Compare commits

..

388 Commits
v0.10 ... v0.12

Author SHA1 Message Date
Max Brunsfeld
e27fafb824 v0.12 2022-01-14 12:03:29 -08:00
Max Brunsfeld
485554cd0c Merge pull request #342 from zed-industries/symbolic-nav
Introduce outline view
2022-01-14 12:02:43 -08:00
Max Brunsfeld
f3239fe1d5 Apply scroll_max after uniform list autoscrolls 2022-01-14 11:56:28 -08:00
Max Brunsfeld
dd8e5ee543 Add bottom margin to the outline view 2022-01-14 11:01:20 -08:00
Max Brunsfeld
5de5e4b6f2 Avoid panic in OutlineView when active item isn't an editor 2022-01-14 10:51:26 -08:00
Max Brunsfeld
b7561c6cef Add select_first and select_last bindings to outline view
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-14 10:45:37 -08:00
Max Brunsfeld
ea69dcd42a Match on names only when outline query has no spaces
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-14 09:16:29 -08:00
Antonio Scandurra
ce51196eab Center the selected item when updating outline query
Co-Authored-By: Max Brunsfeld <max@zed.dev>
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-14 17:53:06 +01:00
Antonio Scandurra
e4c0fc6ad5 Dismiss outline view when the query editor is blurred
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-14 17:25:24 +01:00
Nathan Sobo
b52db22544 Only enable smart case if the query contains an uppercase character
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2022-01-14 09:16:09 -07:00
Nathan Sobo
f934370e7f Match full path when query contains spaces
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2022-01-14 09:02:04 -07:00
Antonio Scandurra
be24e58926 Associate StringMatchCandidate with an id
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-14 14:55:03 +01:00
Antonio Scandurra
e538beb920 Highlight matches by increasing the font weight
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-14 14:39:15 +01:00
Antonio Scandurra
a64ba8b687 Allow matching of context items in outline view 2022-01-14 11:09:02 +01:00
Antonio Scandurra
e7f1398f3a 💄 2022-01-14 10:20:04 +01:00
Antonio Scandurra
b0033bb6d4 Don't emit duplicate text when mixing syntax highlighting and match indices 2022-01-14 10:08:08 +01:00
Antonio Scandurra
ecba761e18 Make mod a @context 2022-01-14 09:22:20 +01:00
Antonio Scandurra
deb679b8f5 Report all matching strings in fuzzy matcher even if they're duplicates 2022-01-14 09:16:39 +01:00
Antonio Scandurra
9c1f58ee89 Maintain order of outline items when filling out tree's missing parts 2022-01-14 09:12:30 +01:00
Max Brunsfeld
adeb7e6864 Incorporate syntax highlighting into symbol outline view
Still need to figure out how to style the fuzzy match characters
now that there's syntax highlighting. Right now, they are
underlined in red.
2022-01-13 18:10:02 -08:00
Max Brunsfeld
7913a1ea22 Include highlighting runs in Outline 2022-01-13 14:46:15 -08:00
Max Brunsfeld
3e1c559b2d Allow multiple disjoint nodes to be captured as matcheable in the outline query 2022-01-13 14:04:25 -08:00
Max Brunsfeld
950b06674f Add more items to rust outline query 2022-01-13 12:01:36 -08:00
Max Brunsfeld
f2cef0b795 Implement navigation via outline modal
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2022-01-13 11:48:44 -08:00
Max Brunsfeld
373fe6fadf Change Editor::set_highlighted_row to take a row range
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2022-01-13 09:49:46 -08:00
Antonio Scandurra
055d48cfb2 Select the closest outline item when the outline view's query is empty
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-01-13 18:43:49 +01:00
Antonio Scandurra
2660d37ad8 Return Outline<Anchor> from MultiBuffer::outline
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-01-13 18:24:00 +01:00
Antonio Scandurra
e165f1e16c Use OutlineItem::depth to include ancestors of matching candidates
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-13 16:59:52 +01:00
Antonio Scandurra
aee3bb98f2 Implement selecting prev and next in outline view
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-13 16:44:06 +01:00
Antonio Scandurra
8d7a57a01e Merge pull request #340 from zed-industries/split-project-diagnostics
Allow splitting of project diagnostics
2022-01-13 16:20:19 +01:00
Antonio Scandurra
d74658fdb5 Allow searching of outline items 2022-01-13 15:10:50 +01:00
Antonio Scandurra
06ba1c64cf Implement Outline::search 2022-01-13 15:10:29 +01:00
Antonio Scandurra
5e64f1aca8 Report the candidate's index when matching strings 2022-01-13 15:09:27 +01:00
Antonio Scandurra
5f2ac61401 Use only lowercase characters to determine if query matches a candidate 2022-01-13 15:07:48 +01:00
Antonio Scandurra
d6ed2ba642 Start on rendering the outline view 2022-01-13 12:01:11 +01:00
Antonio Scandurra
ef596c64f8 Add OutlineItem::depth so that we can render a tree in the outline view 2022-01-13 11:35:43 +01:00
Antonio Scandurra
08c3fddc65 Allow splitting of project diagnostics 2022-01-13 10:24:41 +01:00
Antonio Scandurra
bb3fc8efd7 Merge pull request #339 from zed-industries/update-curl
Update curl to avoid setting `MACOSX_DEPLOYMENT_TARGET` on server
2022-01-13 09:05:21 +01:00
Antonio Scandurra
9422e27f97 Update curl to avoid setting MACOSX_DEPLOYMENT_TARGET on server 2022-01-13 08:52:42 +01:00
Max Brunsfeld
63a401ac5d Add Buffer::outline method
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-12 18:17:49 -08:00
Nathan Sobo
057dc62b90 Merge pull request #338 from zed-industries/use-wildcard-cert
Associate staging and production load balancers with wildcard cert
2022-01-12 17:23:04 -07:00
Nathan Sobo
a93502bb64 Add placeholder environment variables to our deploy
We have the ability to define environment-specific environment variables, but don't current need it. This keeps these files in place while avoiding noise from running `export` with no args due to the files being empty.

Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2022-01-12 16:33:41 -07:00
Nathan Sobo
e71b989041 Use the same wildcard certificate on production and staging
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2022-01-12 16:28:09 -07:00
Nathan Sobo
3a82d0d8e1 Use a wildcard cert on DigitalOcean in the staging environment
If this works, we'll use it for production as well and delete the other certs.

Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2022-01-12 16:17:21 -07:00
Max Brunsfeld
abd05cc82e Merge pull request #337 from zed-industries/project-diagnostics-styling
Restructure the project diagnostics view to match some aspects of current designs
2022-01-12 12:22:23 -08:00
Max Brunsfeld
1a672929e0 Adjust BlockMap tests to reflect new tiebreaking behavior
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-12 12:11:55 -08:00
Max Brunsfeld
ed88fdcea2 Add unit test for diagnostic + path header ordering 2022-01-12 11:34:57 -08:00
Max Brunsfeld
6ad9ff10c1 Ensure path headers appear before first diagnostic header
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-12 10:51:28 -08:00
Max Brunsfeld
ac0d55222f Adjust project diagnostics test to reflect new block structure 2022-01-12 10:51:08 -08:00
Max Brunsfeld
9ccf2f3f58 Tweak theming of project diagnostics 2022-01-12 10:51:08 -08:00
Max Brunsfeld
b5ee095da9 Deduplicate path names in the project diagnostics view 2022-01-12 10:51:08 -08:00
Max Brunsfeld
a9937ee8be Expand block decorations' bounds to include the gutter 2022-01-12 10:51:08 -08:00
Nathan Sobo
d346b1bfd9 Merge pull request #336 from zed-industries/format-on-save
Format on save
2022-01-12 11:48:12 -07:00
Nathan Sobo
30225678c0 Test ordering of responses with respect to uni-directional messages
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2022-01-12 11:19:17 -07:00
Antonio Scandurra
66694b4c9a Fix failing tests
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-01-12 18:43:23 +01:00
Antonio Scandurra
8b53868f8a Preserve the order of responses with respect to all other incoming messages
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-01-12 18:26:00 +01:00
Antonio Scandurra
9e4b118214 Use synchronous locks for Peer state
We hold these locks for a short amount of time anyway, and using an
async lock could cause parallel sends to happen in an order different
than the order in which `send`/`request` was called.

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-12 18:02:41 +01:00
Antonio Scandurra
310def2923 Implement Buffer::format
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-12 18:01:20 +01:00
Antonio Scandurra
67991b413c Merge pull request #335 from zed-industries/fix-refresh-selections
Fix panic in `Editor::refresh_selections` due to calling `summaries_for_anchors` without sorting the anchors
2022-01-12 10:03:33 +01:00
Antonio Scandurra
6fbbbab7ba Process selection anchors in a sorted fashion when refreshing them 2022-01-12 09:28:09 +01:00
Antonio Scandurra
b768a3977c Add unit test reproducing a panic when refreshing selections 2022-01-12 09:27:03 +01:00
Antonio Scandurra
7daa4b5b04 Don't return a Result in test-only method select_display_ranges 2022-01-12 09:14:48 +01:00
Nathan Sobo
a6dd9a20d4 Fix binding to dump element JSON 2022-01-11 17:52:26 -07:00
Max Brunsfeld
9602bc6f8e Remove stray dbg! calls 2022-01-11 13:56:07 -08:00
Max Brunsfeld
5941f5fca0 Upgrade tree-sitter-markdown 2022-01-11 10:36:31 -08:00
Max Brunsfeld
5a889b04df Merge pull request #329 from zed-industries/fix-newline-in-multibuffer
Fix cursor position when inserting newlines on a repeated excerpt
2022-01-11 09:39:05 -08:00
Antonio Scandurra
89ead1c44d Merge pull request #314 from zed-industries/auto-connect
Auto-connect to server on startup if credentials are on the keychain
2022-01-11 18:34:20 +01:00
Antonio Scandurra
c16820166b Fix cursor position when inserting newlines on a repeated excerpt
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-01-11 18:30:25 +01:00
Antonio Scandurra
58e45dd9be Merge pull request #328 from zed-industries/fix-multibuffer-anchors
Randomize test multibuffer anchors and fix resulting issues
2022-01-11 17:42:53 +01:00
Antonio Scandurra
aa543a4b0a Ensure selections stay sorted after refreshing them
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-11 17:16:45 +01:00
Antonio Scandurra
e70b728758 Verify Anchor::buffer_id before resolving it or comparing it
This commit also verifies some properties about anchor resolution in the
multibuffer randomized test.

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-11 16:40:12 +01:00
Antonio Scandurra
2d5e72251e Merge pull request #325 from zed-industries/fix-more-subscription-panics
Don't register an entity ID extractor for non-entity subscriptions
2022-01-11 15:29:23 +01:00
Antonio Scandurra
d7fcb049d4 Don't register an entity ID extractor for non-entity subscriptions
This commit fixes a panic that could occur when registering N subscriptions for
N entities of the same kind. Before, when dropping the first of the
subscriptions, we would remove the entity ID extractor as well. This was,
however, used by all the other N - 1 subscriptions which would then start
losing messages. In addition, dropping yet another subscription of that kind
would result in a panic, because we wouldn't find the extractor in the map
upon invoking `Subscription::drop`.

With this change we will avoid removing the ID extractor when dropping a
subscription. Crucially, we also avoid inserting extractors for simple message
subscriptions. This enables these non-entity subscriptions to be dropped and
re-registered without seeing a "registered handler for the same message twice"
panic.
2022-01-11 08:21:35 +01:00
Max Brunsfeld
2ea78c5ade Merge pull request #320 from zed-industries/more-diagnostics-polish
Keep the cursor at the top when first opening the project diagnostics view
2022-01-10 16:43:20 -08:00
Max Brunsfeld
a0a558318c In diagnostics editor, attempt to open excerpts in a different pane
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-10 16:33:25 -08:00
Max Brunsfeld
747d9e8784 Add files to project diagnostics view in order
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-10 16:17:56 -08:00
Max Brunsfeld
c7eb6a6a60 Tweak color of share icon in titlebar 2022-01-10 11:26:07 -08:00
Max Brunsfeld
7244fe9c7f Merge pull request #315 from zed-industries/fix-outdent-column-0
Fix outdent not working when cursor is at column 0
2022-01-10 11:16:30 -08:00
Max Brunsfeld
8ee106e6aa Merge pull request #316 from zed-industries/fix-subscription-panic
Fix `rpc::Client` subscription panics
2022-01-10 11:15:17 -08:00
Antonio Scandurra
4992a8a407 🎨 2022-01-10 16:10:29 +01:00
Antonio Scandurra
b44ae46559 Fix panic if subscribing after dropping a subscription for the same message 2022-01-10 16:09:06 +01:00
Antonio Scandurra
dff812b38e Don't panic when dropping a subscription in a subscription handler 2022-01-10 16:04:49 +01:00
Antonio Scandurra
9f6c53b547 Fix dev dependencies in client/Cargo.toml 2022-01-10 16:03:55 +01:00
Antonio Scandurra
b1de9a945d Fix outdent not working when cursor is at column 0 2022-01-10 15:32:28 +01:00
Antonio Scandurra
e8bbd370e4 Auto-connect to server on startup if credentials are on the keychain 2022-01-10 15:06:38 +01:00
Antonio Scandurra
8d7bb8b1a3 Merge pull request #313 from zed-industries/polish-project-diagnostics
Polish project diagnostics UX
2022-01-10 14:33:26 +01:00
Antonio Scandurra
5c3ae8808b Fix diagnostic unit test assertions 2022-01-10 14:28:25 +01:00
Antonio Scandurra
eb353648e6 🎨 2022-01-10 12:14:52 +01:00
Antonio Scandurra
a1597578ff Compare singleton buffers in test_open_and_save_new_file 2022-01-10 11:56:00 +01:00
Antonio Scandurra
0742640b39 Correctly report line boundaries when a map contains both folds and wraps
This fixes the randomized test failures that were occurring on main.
2022-01-10 11:26:48 +01:00
Antonio Scandurra
1a53d5b7ba Use a new Workspace::activate_item API in project diagnostics
Previously, we would only activate the pane without switching the
pane's *active item*.
2022-01-10 10:10:11 +01:00
Max Brunsfeld
f933d54469 When selections lose their excerpts, move them to the next primary diagnostic 2022-01-07 14:53:33 -08:00
Max Brunsfeld
ce6f3d7f3e Reuse views when moving between diagnostic view and editors
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-07 11:00:12 -08:00
Max Brunsfeld
ea263822fa Finish implementing ProjectDiagnostics::open_excerpts
* Build workspace item views with a reference to the workspace
* Add randomized test for MultiBuffer::excerpted_buffers and fix a small bug

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-07 09:59:27 -08:00
Antonio Scandurra
e5c520a265 Use Buffer handles instead of MultiBuffer as editor workspace items
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-07 17:48:31 +01:00
Antonio Scandurra
794d214eee Refactor opening workspace items
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-07 17:38:37 +01:00
Antonio Scandurra
3cab32d201 WIP: Add keybinding to open buffers under cursors
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-07 16:12:50 +01:00
Antonio Scandurra
cf62d26ed8 Display a "Checking..." message when running disk-based diagnostics
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-07 15:03:19 +01:00
Antonio Scandurra
e39be35e17 Show status bar item for project diagnostic summary
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-07 14:14:21 +01:00
Antonio Scandurra
56496c2585 Move back diagnostic_summaries into Worktree
This fixes an issue where updating the snapshot's entries would
override the diagnostic summaries received on the remote side.

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-07 13:38:20 +01:00
Antonio Scandurra
089542c6f4 Avoid removing diagnostics from Worktree after opening a buffer
This allows re-opening the same buffer and supplying the previous
diagnostics.
2022-01-07 10:33:21 +01:00
Antonio Scandurra
67f672d0cc Clear selections on other excerpted buffers when setting active selections 2022-01-07 10:19:28 +01:00
Max Brunsfeld
94e9c7fd5b Give a full-width background to the diagnostic headers 2022-01-06 17:55:56 -08:00
Max Brunsfeld
2b36ab0de7 Introduce Expanded element
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-06 17:35:45 -08:00
Max Brunsfeld
1f762e482d Unify Flexible and Expanded elements
We'll use the name Expanded for something else now.

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-06 17:29:34 -08:00
Max Brunsfeld
b19d92e918 Keep selections at the top of the project diagnostics view when it is first populated
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-06 17:01:13 -08:00
Max Brunsfeld
9bbe67f0ea Don't clobber diagnostics when getting new snapshot from background scanner
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-06 15:04:06 -08:00
Max Brunsfeld
7357b3ff2a Revert "Remove special handling of multi-line primary diagnostic messages and fix tests"
This reverts commit ce4142eab3.
2022-01-06 14:38:13 -08:00
Max Brunsfeld
10548c2038 Always group diagnostics the way they're grouped in the LSP message
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-06 14:22:28 -08:00
Nathan Sobo
943571af2a Report backtraces of pending conditions when deterministic executor illegally parks
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2022-01-06 13:33:55 -07:00
Nathan Sobo
2dbee1d914 Send diagnostic summaries to guests when they join the project
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2022-01-06 12:12:09 -07:00
Nathan Sobo
d7a78e14ac Allow disk-based diagnostic progress begin/end events to interleave
When multiple saves occur, we can have multiple start events followed by multiple end events. We don't want to update our project diagnostics view until all pending progress is finished.

Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2022-01-06 09:32:08 -07:00
Nathan Sobo
571d0386e2 Re-focus diagnostics editor when transitioning from an empty to a populated state
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2022-01-06 08:35:31 -07:00
Antonio Scandurra
1875a0e349 Polish rendering of inline errors
- Don't soft-wrap
- Render multiple lines

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-06 16:17:53 +01:00
Antonio Scandurra
d0f7e5f075 Maintain excerpt ordering correctly when some errors don't change 2022-01-06 15:33:02 +01:00
Antonio Scandurra
f37f839330 Wire up refresh_anchors in Editor::refresh_selections and call it 2022-01-06 15:32:37 +01:00
Max Brunsfeld
7340e83059 WIP - MultiBuffer::refresh_anchors 2022-01-05 21:12:49 -08:00
Max Brunsfeld
fee7657fd7 Merge branch 'main' into polish-project-diagnostics 2022-01-05 20:38:20 -08:00
Nathan Sobo
b10f06d084 Merge pull request #308 from zed-industries/fix-collaboration-regressions
Fix collaboration regressions
2022-01-05 19:56:51 -07:00
Max Brunsfeld
f9f75e98f8 Fix Locator::from_index
Enhance language::tests::test_random_collaborators so that it checks buffer invariants.

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-05 17:58:24 -08:00
Max Brunsfeld
e5faaeb2f2 Fix Global::gt and rename it to changed_since
A false negative return value of `gt` was preventing guests' multibuffers from
syncing correctly.

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-05 17:36:12 -08:00
Max Brunsfeld
5a53eeef63 Don't scroll editors away from the top of their buffer when content changes
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-05 12:31:00 -08:00
Max Brunsfeld
85a13fa477 Fix panic when resolving anchors after an excerpt id has been recycled
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-01-05 11:28:49 -08:00
Max Brunsfeld
8728d3292d Merge branch 'main' into polish-project-diagnostics
Also fix false failure in ModelHandle::condition when parking is not forbidden.
2022-01-05 10:53:18 -08:00
Antonio Scandurra
29b63ae4c6 Remove invalid excerpts as opposed to styling them differently 2022-01-05 18:21:17 +01:00
Antonio Scandurra
4b22e49ce1 Merge pull request #306 from zed-industries/serialize-buffer-undos
Fix buffer replication bugs that would lead to divergence among replicas
2022-01-05 18:14:09 +01:00
Antonio Scandurra
fe28abe8cf Show a message when no diagnostics are available 2022-01-05 17:25:03 +01:00
Antonio Scandurra
e56609cf0c Ensure prior, deferred selections don't override newer selections 2022-01-05 15:43:26 +01:00
Antonio Scandurra
eb65a5d29a Fix TreeMap::get always returning None 2022-01-05 15:41:30 +01:00
Antonio Scandurra
f8c2620166 Fix Buffer::remote_selections_in_range at query range boundaries 2022-01-05 15:04:50 +01:00
Antonio Scandurra
587a908225 Populate deferred operations when an operation can't be applied 2022-01-05 15:04:19 +01:00
Antonio Scandurra
bf044506ed Verify that selections are replicated correctly in randomized test 2022-01-05 15:04:08 +01:00
Antonio Scandurra
870fa5f278 Serialize deferred operations 2022-01-05 11:52:41 +01:00
Antonio Scandurra
d383ff30ce Introduce randomized test for collaboration on buffers
This test will exercise serialization of operations as well as peers
replicating from an existing buffer.
2022-01-05 11:51:41 +01:00
Antonio Scandurra
7bc8eb4f3d Fix compile errors and get serialization unit test passing 2022-01-05 10:29:29 +01:00
Max Brunsfeld
984e366c32 WIP - Serialize buffer in terms of its state, not its base text + ops
The main reason for this is that we need to include information about
a buffer's UndoMap into its protobuf representation. But it's a bit
complex to correctly incorporate this information into the current
protobuf representation.

If we want to continue reusing `Buffer::apply_remote_edit` for
incorporating the historical operations, we need to either make
that method capable of incorporating already-undone edits, or
serialize the UndoMap into undo *operations*, so that we can apply
these undo operations after the fact when deserializing. But this is
not trivial, because an UndoOperation requires information about
the full offset ranges that were undone.
2022-01-04 18:06:16 -08:00
Max Brunsfeld
0bcd0a3f08 Forward events from remote worktrees to their projects 2022-01-04 16:01:25 -08:00
Max Brunsfeld
d7ecbdcc1d Add unit test showing problem with serialization of undo ops 2022-01-04 16:01:12 -08:00
Max Brunsfeld
d8b888c9cb Replicate diagnostic summaries
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2022-01-04 14:29:22 -08:00
Antonio Scandurra
b2f0c78924 Merge branch 'main' into polish-project-diagnostics
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-01-04 18:35:28 +01:00
Antonio Scandurra
5d45c5711d Merge pull request #303 from zed-industries/project-sharing-fixes
Miscellaneous bug fixes after switching to a project-centric sharing model
2022-01-04 18:29:46 +01:00
Antonio Scandurra
b3b56c36d0 Release v0.11.0
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-01-04 18:27:35 +01:00
Antonio Scandurra
ad1db117e6 Re-enable cargo check for rust-analyzer 2022-01-04 18:26:16 +01:00
Antonio Scandurra
508b9dc024 Rip out "diagnostic providers" 2022-01-04 16:32:17 +01:00
Antonio Scandurra
496066db59 Run Project::diagnose when registering a new language on Worktree 2022-01-04 15:17:37 +01:00
Antonio Scandurra
2b31a48ef9 Clip right when moving to next word in case we land on a block line 2022-01-04 15:17:37 +01:00
Antonio Scandurra
ed361f2d1a Position selections correctly when duplicating lines in a multi-buffer 2022-01-04 15:17:37 +01:00
Antonio Scandurra
8dc9197324 Position cursors correctly in Editor::delete_line in a multi-buffer 2022-01-04 15:17:37 +01:00
Antonio Scandurra
05a6137549 Capture a new buffer snapshot for excerpts whose selections got updated 2022-01-04 12:22:51 +01:00
Max Brunsfeld
a4027aacb5 Introduce a WorktreeId struct, fix incorrect use of remote worktrees' handle id 2022-01-04 11:28:44 +01:00
Max Brunsfeld
7f8e76e0f1 Remove worktree-specific methods from language::File trait
Use downcasting instead for accessing worktree-specific state of the Files.

This will allow us to introduce a WorktreeId type and use that everywhere
for identifying worktrees. It also just removes some unnecessary coupling
between the language crate and the worktree.
2022-01-04 11:28:44 +01:00
Antonio Scandurra
8270e8e758 Merge pull request #302 from zed-industries/sign-in-redirect
Allow the zed app to connect to both the old and new rpc endpoints
2022-01-04 10:39:28 +01:00
Max Brunsfeld
a080ae98c6 Allow the zed app to connect to both the old and new rpc endpoints
In the case of the new Next.js app, the app will follow a redirect
from 'zed.dev/rpc' to the subdomain where the rust service is hosted.
Until then, the app will connect directly to zed.dev/rpc.
2022-01-03 15:29:26 -08:00
Nathan Sobo
f499a1dfc2 Merge pull request #301 from zed-industries/move-lines-in-multibuffer
Support moving lines up and down in multi-buffers
2021-12-30 01:13:31 -08:00
Nathan Sobo
6d6a82655a Create blocks with anchors to allow a bias to be specified
This allows us to respect the bias on anchors we use to create excerpt headers so that they always remain above any content inserted at the start of an excerpt.
2021-12-30 01:03:19 -08:00
Nathan Sobo
ba75007259 Merge pull request #300 from zed-industries/fix-move-line-panic
Fix panics when moving lines with block decorations and simplify line boundary detection
2021-12-29 23:47:25 -08:00
Nathan Sobo
984378e12c Use anchors for line movement edits to support multi-buffers
Because multi-buffers can contain the same content multiple times, we need to use anchors to track our desired insertion and removal locations when moving lines. This is because deleting a line in order to move it might end up deleting *multiple* lines.
2021-12-29 23:47:03 -08:00
Nathan Sobo
7c9e4e513c Provide an accurate panic message when translating points off the end of a line
Maybe we should fail more gracefully in this case, but I think we should at least make the message accurate and see how we do.
2021-12-29 23:11:54 -08:00
Max Brunsfeld
137fbd0088 Update editor element to use new {next,prev}_line_boundary methods
Since these methods take buffer points instead of display points, this adjusts
the logic for retrieving the visible selections, so that they are initially returned
in terms of buffer points.
2021-12-28 13:47:09 -08:00
Nathan Sobo
7f786ca8a6 WIP: Start moving toward a simpler interface for detecting prev/next line boundaries 2021-12-27 22:11:05 -08:00
Nathan Sobo
89bbfb8154 wip 2021-12-27 21:14:23 -08:00
Max Brunsfeld
6057d819b0 Add a unit test showing panic in move_line_down 2021-12-27 20:58:01 -08:00
Nathan Sobo
93a516d588 Fix warning 2021-12-27 18:00:15 -08:00
Max Brunsfeld
accf90e843 Add MultiBufferSnapshot::range_contains_excerpt_boundary
Use this method to disable move_line_down across excerpt boundaries.

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-27 17:08:31 -08:00
Nathan Sobo
cbc162acf5 WIP: Allow lines to be moved down across excerpts
This is still a bit weird because we can't remove the last line of an excerpt but we still move it into another buffer. There also seem to be issues with undo.
2021-12-27 15:46:19 -08:00
Nathan Sobo
835af35839 Simplify prev/next_row_boundary methods
We added clipping of points against the buffer when excerpt headers were in the buffer, but now that they're just blocks, I think we can avoid the potential to panic in these methods by going back to not clipping.
2021-12-27 15:46:19 -08:00
Max Brunsfeld
d3521650d3 Merge pull request #296 from zed-industries/fix-autoindent
Fix regressions that happened when moving selections into Editor
2021-12-27 15:45:58 -08:00
Max Brunsfeld
3040cfece1 Fix Editor::newest_selection
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-27 15:42:37 -08:00
Max Brunsfeld
f5d4e26799 Remove unused context variable 2021-12-27 15:34:07 -08:00
Max Brunsfeld
cbd9e186b5 Store selections with a right start bias so that autoindent moves them
Previously, cursors at column 0 had to be explicitly moved when those lines
were autoindented. This behavior was lost when we moved selections from
the buffer to the editor. Now, with the right bias, we get this behavior automatically.

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-27 15:33:57 -08:00
Max Brunsfeld
43db9e826b Clear autoindent requests when applying autoindent
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-27 13:33:48 -08:00
Nathan Sobo
6f26fa013a Merge pull request #295 from zed-industries/api
Add remaining API endpoints needed to support the new website
2021-12-27 12:54:34 -08:00
Nathan Sobo
13ed9dc1f1 Document database setup and fix issue in script/seed-db
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-12-27 12:52:15 -08:00
Nathan Sobo
8937d877e3 💄 2021-12-27 12:22:59 -08:00
Nathan Sobo
63238a2938 Destroy access tokens before destroying users and word-smith method names 2021-12-25 17:46:02 -07:00
Nathan Sobo
b949b30f24 Add delete user endpoint 2021-12-25 11:57:37 -07:00
Nathan Sobo
56930972fe Add endpoints for listing, creating, and updating users 2021-12-25 11:55:10 -07:00
Nathan Sobo
07a4cfeefd Streamline running both next and collab servers in development
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2021-12-25 11:55:10 -07:00
Nathan Sobo
fe5465a265 Enable authentication via the NextJS site 2021-12-25 11:55:10 -07:00
Nathan Sobo
6dd23c250b Merge pull request #294 from zed-industries/simplify-keymap-contexts
Don't merge keymap contexts from containing elements
2021-12-24 16:54:47 -07:00
Nathan Sobo
e9a750be71 Don't merge keymap contexts from containing elements
Co-Authored-By: Aaron Hillegass <charmedliferaft@gmail.com>
2021-12-24 16:44:35 -07:00
Nathan Sobo
9fc2ddb8da Merge pull request #293 from zed-industries/project-diagnostics
Project diagnostics: First pass
2021-12-24 16:42:00 -07:00
Nathan Sobo
cf81f5a555 Update tests to reflect that we no longer attempt to recycle group ids 2021-12-24 16:36:21 -07:00
Nathan Sobo
ce4142eab3 Remove special handling of multi-line primary diagnostic messages and fix tests 2021-12-24 13:47:45 -07:00
Nathan Sobo
a3df597155 Make diagnostics disk-based in test 2021-12-24 13:33:11 -07:00
Antonio Scandurra
adeea9da66 Parse children from cargo check output to provide hints
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-24 17:06:18 +01:00
Antonio Scandurra
a85e400b35 Start on a DiagnosticProvider implementation for Rust
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-24 16:22:22 +01:00
Antonio Scandurra
393009a05c Implement Buffer::diagnostic_group 2021-12-24 12:08:55 +01:00
Antonio Scandurra
11e3874b4a Remove duplication when assigning diagnostics and hardcode provider names 2021-12-24 12:07:26 +01:00
Antonio Scandurra
3149a4297c Add API_TOKEN environment variable to manifest 2021-12-24 10:02:24 +01:00
Max Brunsfeld
4f774e2bde wip 2021-12-23 23:10:28 -08:00
Nate Butler
78564dcc68 Add job post to Zed.dev 2021-12-23 23:22:57 -05:00
Max Brunsfeld
d5a17053df Get code compiling with some todos 2021-12-23 14:21:10 -08:00
Nathan Sobo
e3ecd87081 WIP
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-12-23 11:24:35 -07:00
Antonio Scandurra
7b453beebc WIP: Use cargo check for on-disk diagnostics
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2021-12-23 18:35:50 +01:00
Antonio Scandurra
b9d1ca4341 Show only disk-based diagnostics in ProjectDiagnosticsEditor
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-23 16:47:54 +01:00
Antonio Scandurra
304afc1813 Only preserve excerpts for invalid diagnostics if they contain cursors
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-23 16:33:50 +01:00
Antonio Scandurra
dcf26acaac Use a different invalidation strategy for project-wide diagnostics 2021-12-23 15:41:43 +01:00
Antonio Scandurra
da460edb8b Remove BufferState when the last buffer's excerpt is removed 2021-12-23 09:59:39 +01:00
Antonio Scandurra
9164c5f239 Emit an UpdateDiagnostics from Worktree when buffer diagnostics change 2021-12-23 09:57:50 +01:00
Max Brunsfeld
c47340000d Fix remove_excerpts when removing the last N excerpts, N > 1
Also, generalize the randomized test to remove multiple excerpts at a time
2021-12-22 21:02:20 -08:00
Max Brunsfeld
3e59c61a34 Use MultiBuffer::insert_excerpt_after to update project diagnostics view 2021-12-22 18:00:53 -08:00
Max Brunsfeld
435d405d10 Implement MultiBuffer::insert_excerpt_after 2021-12-22 17:59:44 -08:00
Max Brunsfeld
a86ba57983 Add Editor::remove_blocks 2021-12-22 17:30:14 -08:00
Max Brunsfeld
5d8ed535be Clear out old disk-based diagnostics after 2 seconds 2021-12-22 15:51:51 -08:00
Max Brunsfeld
b9551ae8b1 Preserve group ids when updating diagnostics 2021-12-22 14:50:51 -08:00
Max Brunsfeld
06d2cdc20d Remove unused multi_buffer::FromAnchor trait 2021-12-22 13:27:43 -08:00
Max Brunsfeld
0faf5308ac Add a unit test for preserving disk-based diagnostics 2021-12-22 13:27:43 -08:00
Max Brunsfeld
1544da887e Start work on preserving continuity of disk-based diagnostics 2021-12-22 12:52:50 -08:00
Max Brunsfeld
e31205c95e Revert "Implement MultiBuffer::remove_excerpts by inserting tombstones"
This reverts commit 275b7e8d4f.
2021-12-22 10:18:33 -08:00
Antonio Scandurra
275b7e8d4f Implement MultiBuffer::remove_excerpts by inserting tombstones
This will make it easier to use anchors in the presence of deletes.
2021-12-22 17:57:36 +01:00
Max Brunsfeld
2c3efdea8c WIP - Start work on updating project diagnostics view 2021-12-21 16:39:23 -08:00
Max Brunsfeld
a888620e5f Implement MultiBuffer::remove_excerpts
We'll need this for updating project diagnostics
2021-12-21 15:25:57 -08:00
Max Brunsfeld
a93f5e5fb4 Avoid repeated subscriptions + clones when adding another excerpt for same buffer 2021-12-21 14:28:23 -08:00
Max Brunsfeld
3c26f67ea3 Minor cleanup in Buffer::update_diagnostics 2021-12-21 14:07:50 -08:00
Max Brunsfeld
bc906fef9c Store worktree's diagnostics summaries ordered by path 2021-12-21 14:07:09 -08:00
Max Brunsfeld
699dafbbd4 Avoid cloning diagnostic messages from language server 2021-12-21 14:06:17 -08:00
Max Brunsfeld
8492c6e7ac Fix maintenance of MultiBuffer's buffer states 2021-12-21 13:07:43 -08:00
Max Brunsfeld
13ecd16685 Index max buffer row on MultiBuffer 2021-12-21 12:36:46 -08:00
Nathan Sobo
61b806e485 Add an endpoint for creating an access token for a GitHub login 2021-12-21 13:05:32 -07:00
Max Brunsfeld
04d577e326 Fix context line handline in project diagnostic view 2021-12-21 11:46:47 -08:00
Max Brunsfeld
60f7169008 Remove header heights from multibuffer randomized test 2021-12-21 10:24:01 -08:00
Antonio Scandurra
eec1748dc7 Render excerpt headers using DisplayMap::insert_blocks
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2021-12-21 18:32:27 +01:00
Antonio Scandurra
91c786a8db WIP: Insert blocks in BlockMap for MultiBuffer headers 2021-12-21 17:38:03 +01:00
Antonio Scandurra
8534a9cc41 Don't insert headers in MultiBuffer
This lays the groundwork to insert headers in the block map instead.
2021-12-21 16:38:18 +01:00
Antonio Scandurra
99317bbd62 Delete unit test
Sharing/unsharing is already exercised via the integration tests.
2021-12-21 14:51:09 +01:00
Antonio Scandurra
89c0b358a7 Allow sharing/unsharing of projects 2021-12-21 12:45:20 +01:00
Antonio Scandurra
17094ec542 Allow opening of remote projects via the contacts panel 2021-12-21 12:05:38 +01:00
Antonio Scandurra
5d2c4807db Fix invalid theme variables 2021-12-21 10:25:37 +01:00
Antonio Scandurra
c6dd797f4e Drop project instead of worktree to simulate client leaving 2021-12-21 10:20:05 +01:00
Antonio Scandurra
afec4152f4 Update contacts as projects/worktrees get registered/unregistered 2021-12-21 10:17:26 +01:00
Antonio Scandurra
40da3b233f Get more integration tests passing 2021-12-21 09:50:11 +01:00
Antonio Scandurra
1e8ef8a4c1 Register local worktrees after acquiring a project remote id 2021-12-21 09:49:13 +01:00
Antonio Scandurra
4053d683d9 Re-enable commented out worktree test 2021-12-21 08:35:08 +01:00
Max Brunsfeld
788bb4a368 Get some RPC integration tests passing 2021-12-20 18:08:53 -08:00
Max Brunsfeld
636931373e Add missing RPC handlers for local projects 2021-12-20 18:08:06 -08:00
Max Brunsfeld
870b73aa36 Send a LeaveProject message when dropping a remote project 2021-12-20 18:07:51 -08:00
Max Brunsfeld
a138955943 Fix logic for waiting for project's remote id 2021-12-20 18:07:34 -08:00
Max Brunsfeld
5d8d7de68d Fix accidental usages of local worktree id instead of remote id 2021-12-20 18:06:58 -08:00
Max Brunsfeld
55910c0d79 Get the server and integration tests compiling 2021-12-20 16:30:29 -08:00
Max Brunsfeld
466a377e1d Merge branch 'main' into share-project
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-20 11:42:59 -08:00
Max Brunsfeld
614ee4eac7 Send worktree info only when sharing worktree
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-20 11:36:59 -08:00
Antonio Scandurra
697e641e8e Get back to a compiling state for client
This temporarily disables some tests and there are still some server-side
compiler errors.
2021-12-20 16:08:34 +01:00
Nathan Sobo
323e1f7367 Add the fetching of user JSON by github login with a token header 2021-12-19 09:43:13 -07:00
Nathan Sobo
f4b9772ec2 Relocate admin routes to make room for API
I want to use the top-level /users route for the API that we'll access from the front-end site running on Vercel, and this is the easiest way to make space. Eventually we won't have admin pages, but I want to be additive for now.
2021-12-19 09:06:57 -07:00
Nathan Sobo
29bc2db6e8 Fix journal format strings 2021-12-18 12:15:07 -07:00
Max Brunsfeld
c41b958829 WIP - start restructuring collaboration around entire projects
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-17 22:00:39 -08:00
Antonio Scandurra
88d663a253 Allow saving of all buffers contained in project diagnostics editor
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-17 16:48:16 +01:00
Antonio Scandurra
f0fe346e15 Gracefully degrade diagnostics_in_range, diagnostic_group and file
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-17 16:37:12 +01:00
Antonio Scandurra
6685d5aa7d Implement MultiBuffer::save
This is still not integrated with `workspace::ItemView`.
2021-12-17 16:11:18 +01:00
Antonio Scandurra
7d2b74a93b Implement MultiBuffer::{is_dirty,has_conflict} 2021-12-17 15:33:09 +01:00
Antonio Scandurra
5f819b6edc Implement MultiBuffer::enclosing_bracket_ranges 2021-12-17 15:05:05 +01:00
Antonio Scandurra
c9cbc2fe1e Implement MultiBuffer::range_for_syntax_ancestor 2021-12-17 14:57:42 +01:00
Antonio Scandurra
a2ee38f37b Make MultiBuffer::is_parsing a test-only method 2021-12-17 14:57:24 +01:00
Antonio Scandurra
3914d1d072 Display filename on the first excerpt's header for a group 2021-12-17 13:49:21 +01:00
Antonio Scandurra
63f171200e Enhance diagnostic unit test and correctly display primary diagnostic
That is, if the diagnostic has more than one line we will display the
first line in the header and all the other message lines at the error
location.
2021-12-17 12:16:09 +01:00
Max Brunsfeld
528d64d3cc WIP - Improve project diagnostic context rendering 2021-12-16 18:34:29 -08:00
Max Brunsfeld
fb492a9fb8 Correctly incorporate editor settings into diagnostic header rendering 2021-12-16 16:36:33 -08:00
Max Brunsfeld
ae147a379d Don't terminate on an empty input chunk in ExcerptChunks 2021-12-16 16:05:28 -08:00
Max Brunsfeld
31eeffa5a7 Autoscroll after inserting blocks 2021-12-16 14:20:01 -08:00
Max Brunsfeld
9cd4e5ba04 Transfer focus from ProjectDiagnostics view to its editor 2021-12-16 14:14:22 -08:00
Max Brunsfeld
6444fcd442 Integrate MultiBuffer::buffer_rows into the display map 2021-12-16 13:53:32 -08:00
Max Brunsfeld
db33e4935a Implement MultiBuffer::buffer_rows 2021-12-16 12:17:47 -08:00
Max Brunsfeld
a293e9c0c5 Suppress unused field warnings 2021-12-16 11:17:06 -08:00
Max Brunsfeld
38df091b06 Fix up/down movement across excerpt headers
Implement these movements in terms of clipping, instead of with explicit loops
2021-12-16 11:16:48 -08:00
Max Brunsfeld
dcd05ef96b Resolve Anchor::min and ::max to valid positions
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2021-12-16 10:25:09 -08:00
Max Brunsfeld
80f3173fbd Always panic if invalid point is passed to {prev,next}_row_boundary
Co-Authored-By: Antonio Scandurra <antonio@zed.dev>
2021-12-16 10:23:45 -08:00
Antonio Scandurra
0fc2db6d6e Account for folds when inserting/removing block decorations 2021-12-16 16:44:15 +01:00
Antonio Scandurra
7660159164 Test blocks in display map randomized tests
This highlighted some errors in the implementation.
2021-12-16 16:15:14 +01:00
Antonio Scandurra
de679cae78 Re-enable creating multiple blocks at once in BlockMap tests 2021-12-16 12:41:48 +01:00
Antonio Scandurra
abf96e6ad6 Fix movement tests in DisplayMap 2021-12-16 12:36:27 +01:00
Antonio Scandurra
64e2f6d506 Ensure BlockMap::clip_point always yield a valid buffer location 2021-12-16 12:29:37 +01:00
Antonio Scandurra
ec39c9d335 Allow specifying MAX_EXCERPTS via an env variable in random tests 2021-12-16 12:28:54 +01:00
Max Brunsfeld
3e2f684545 Fix prev_row_boundary when a wrap follows a fold
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-15 18:30:09 -08:00
Max Brunsfeld
4c22774694 Always clip buffer points when clipping display points
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-15 17:58:24 -08:00
Max Brunsfeld
f898dc6dae Guard against inverted ranges when building edits in unfold
The multibuffer lets you refer to offsets inside of headers,
so it's possible to create a fold that appears non-empty,
but which spans zero characters in the underlying buffers.

Fold ranges are biased inward: the start is biased right, and
the end is biased left.

Because of these two things, it's possible to create a fold
that becomes "inverted" when you insert text at that position.
2021-12-15 17:29:15 -08:00
Max Brunsfeld
e8570b5c26 Allow multibuffer to clip to the ends of excerpts, before trailing newlines 2021-12-15 17:04:57 -08:00
Max Brunsfeld
f8ef605cbd Update all MultiBuffer unit tests, removing expected trailing newline 2021-12-15 17:04:57 -08:00
Max Brunsfeld
f4115ddc3c 🎨 point_to_display_point & display_point_to_point 2021-12-15 15:45:02 -08:00
Max Brunsfeld
368b4447ff Clip buffer points in DisplayMap::{prev,next}_row_boundary 2021-12-15 15:41:38 -08:00
Max Brunsfeld
2930ea8fb0 Fix handling of excerpts surrounded by edits in MultiBuffer::edit 2021-12-15 12:12:39 -08:00
Max Brunsfeld
4bea16eb31 Ensure muiltibuffer anchors are contained within their excerpt ranges
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2021-12-15 10:52:27 -08:00
Max Brunsfeld
cec0c5912c Create multiple excerpts in random BlockMap test
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-15 10:21:11 -08:00
Antonio Scandurra
80abd84050 Create MultiBuffers with more than one fragment in more randomized tests
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2021-12-15 18:38:37 +01:00
Antonio Scandurra
1bdaeda43e Remove disk diagnostics that were invalidated by a buffer edit
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2021-12-15 18:29:01 +01:00
Antonio Scandurra
4ab307f0a1 Re-enable multi-byte random character generation
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-15 17:51:11 +01:00
Antonio Scandurra
5118f27a90 Overhaul MultiBuffer::chunks
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-15 17:22:04 +01:00
Antonio Scandurra
bcdb4ffd88 Allow edits at the end of MultiBuffer
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-15 16:45:01 +01:00
Antonio Scandurra
7bbaa1d930 Don't insert a newline after the last excerpt of a MultiBuffer
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-15 16:39:09 +01:00
Antonio Scandurra
ae0fa75abe Start testing the integration of display layers with MultiBuffers
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-15 16:05:26 +01:00
Antonio Scandurra
59121a238a Forward notifications from Buffer in MultiBuffer
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-15 15:07:42 +01:00
Nathan Sobo
437145afbe Remove assertion and don't consume 0 bytes
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2021-12-15 06:52:37 -07:00
Nathan Sobo
fbba417f09 Implement MultiBuffer::bytes_in_range
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2021-12-15 06:49:10 -07:00
Antonio Scandurra
95137ecb2a WIP 2021-12-15 13:20:11 +01:00
Antonio Scandurra
e23965e7c9 Implement MultiBuffer::reversed_chars_at 2021-12-15 10:06:45 +01:00
Antonio Scandurra
9cbb680fb2 Fix panic on creation of a left-biased anchor at the end of MultiBuffer 2021-12-15 10:06:43 +01:00
Antonio Scandurra
7bcce23dc9 Fix compile error in server integration tests 2021-12-15 08:48:50 +01:00
Nathan Sobo
6c5b27af1d Group diagnostics by primary
Render primary message above the excerpt and supporting messages as block decorations with a `Below` disposition. This is still super rough.

Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-12-14 18:26:42 -07:00
Nathan Sobo
e1a2897d53 Render basic diagnostic messages in project diagnostics view
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-12-14 17:00:57 -07:00
Nathan Sobo
ad05c0cc7a Implement MultiBufferSnapshot::excerpt_headers_in_range
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-12-14 16:24:48 -07:00
Max Brunsfeld
60e2c6bc52 Fix multibuffer anchors before the ends of excerpts 2021-12-14 13:37:05 -08:00
Max Brunsfeld
06e241117c Fix assertions in test for selection restoration after undo/redo
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-14 13:04:51 -08:00
Max Brunsfeld
e38c1814d5 Update selections on text insertion using anchors
The delta-based approach doesn't work for multi-excerpt buffers.

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-14 12:15:26 -08:00
Max Brunsfeld
4ed96bb5a6 Fix assertion in multibuffer history test
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-14 11:47:22 -08:00
Max Brunsfeld
bf9daf1529 Allow left-biased anchors at the beginnings of excerpts
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2021-12-14 09:58:28 -08:00
Antonio Scandurra
358a6ff66c Implement MultiBufferSnapshot::contains_str_at 2021-12-14 17:51:14 +01:00
Antonio Scandurra
08e9f3e1e3 Maintain a different undo/redo stack in MultiBuffer
This only applies to singleton mode.
2021-12-14 17:43:41 +01:00
Antonio Scandurra
523cbe781b Return the transaction id after grouping in end_transaction_at
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-14 17:41:42 +01:00
Antonio Scandurra
119d44caf7 Remove test-only transaction_group_interval method from MultiBuffer 2021-12-14 14:19:04 +01:00
Antonio Scandurra
2d1ff8f606 Clip anchors created on MultiBuffer's trailing newlines or headers 2021-12-14 12:29:05 +01:00
Antonio Scandurra
1b67f19edc Implement MultiBuffer::set_active_selections 2021-12-14 12:13:19 +01:00
Antonio Scandurra
920daa8a8f Remove stray log statement 2021-12-14 12:01:19 +01:00
Antonio Scandurra
163ce95171 Implement MultiBufferSnapshot::remote_selections_in_range 2021-12-14 11:34:26 +01:00
Antonio Scandurra
174b37cdf0 Assume all excerpts in the multi buffer have the same language for now 2021-12-14 11:33:53 +01:00
Antonio Scandurra
04ffca95c6 Keep a separate diagnostic update count and parse count in MultiBuffer 2021-12-14 11:32:49 +01:00
Antonio Scandurra
9e15c57f91 Display a rudimentary project diagnostic view on alt-shift-d 2021-12-14 11:32:05 +01:00
Max Brunsfeld
4efdc53d9f WIP 2021-12-13 17:44:20 -08:00
Max Brunsfeld
0b1c27956b Add Project::open_buffer method 2021-12-13 17:44:15 -08:00
Max Brunsfeld
fe571f1d70 Store diagnostic summaries on worktrees 2021-12-13 16:36:53 -08:00
Max Brunsfeld
6ab795c629 Handle buffer deduping in the worktree instead of in workspace
Previously, buffers were only deduped by file if they were opened
through Workspace::open_entry
2021-12-13 16:35:46 -08:00
Max Brunsfeld
52b8e3d1a2 Get tests passing after diagnostic + selection changes
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-13 11:34:56 -08:00
Max Brunsfeld
418a9a3d66 Get things compiling with diagnostics on worktree
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-13 11:15:03 -08:00
Antonio Scandurra
85674ba506 WIP
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-13 16:46:35 +01:00
Antonio Scandurra
6645e2820c First attempt at implementing MultiBuffer::edit_internal
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-13 15:43:26 +01:00
Antonio Scandurra
c984b39aaa Show remote collaborators' active selections 2021-12-13 11:38:38 +01:00
Antonio Scandurra
2adf11e204 Write a simple unit test for TreeMap and fix bug in remove 2021-12-13 11:20:10 +01:00
Nathan Sobo
cdbcbdfe6d Test undo/redo at editor layer, including selection restoration 2021-12-12 15:04:19 -07:00
Nathan Sobo
44cd0be068 Restore selections upon undo/redo of edits performed in the current editor 2021-12-12 14:12:03 -07:00
Nathan Sobo
1e7184ea07 Get selections rendering again when local selections are owned by Editor 2021-12-11 13:42:46 -07:00
Nathan Sobo
4dd0752e80 More messy progress towards selections in editors 2021-12-11 00:29:34 -07:00
Nathan Sobo
0639c8331c Relax TreeMap value bounds, fix warnings, simplify cmp 2021-12-10 23:35:24 -07:00
Nathan Sobo
49d1c9d1ba Introduce sum_tree::TreeMap<K, V>
I think this will be useful to avoid cloning HashMaps in certain cases such as snapshots.
2021-12-10 23:33:15 -07:00
Nathan Sobo
f5c775fcd1 WIP 2021-12-10 22:16:39 -07:00
Nathan Sobo
8432daef6a WIP: Start on removing selections from buffer in favor of editor 2021-12-10 19:23:34 -07:00
Nathan Sobo
f35c419f43 Return optional transaction ids from undo/redo
This will allow the editor to restore selections that it associated with the start or end of a transaction.
2021-12-10 18:08:26 -07:00
Nathan Sobo
77defe6e28 Return optional transaction ids when starting/ending a transaction
If the transaction was nested, we return None. Otherwise we return the transaction id in preparation for editors to maintain their own selection state.
2021-12-10 18:00:09 -07:00
Nathan Sobo
c8b43e3078 Move multi_buffer to editor crate 2021-12-10 17:37:53 -07:00
Max Brunsfeld
6caf016df9 Get tests passing w/ multibuffer in editor
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-10 15:54:25 -08:00
Max Brunsfeld
75dd37d873 Update multibuffer when buffers' syntax trees or diagnostics change
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-10 15:54:06 -08:00
Max Brunsfeld
ceff57d02f Don't append a trailing newline in singleton MultiBuffer 2021-12-10 14:27:58 -08:00
Max Brunsfeld
a758bd4f8d Fill in some missing methods on MultiBuffer, MultiBufferSnapshot 2021-12-10 14:27:04 -08:00
Antonio Scandurra
5b31c1ba4e Start making MultiBuffer work with a singleton buffer
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-10 18:04:11 +01:00
Antonio Scandurra
7524974f19 Get everything compiling again
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-10 17:15:16 +01:00
Antonio Scandurra
da09247e5e WIP
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-10 16:14:12 +01:00
Antonio Scandurra
9c74deb9ec Finish removing anchor collections from MultiBuffer
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-10 15:58:37 +01:00
Antonio Scandurra
d9da8effd4 Re-implement edits_since_in_range in terms of Locator
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-10 14:22:43 +01:00
Antonio Scandurra
c8d5e19492 Merge branch 'fragment-locators' into project-diagnostics 2021-12-10 14:01:17 +01:00
Antonio Scandurra
cb97b7cd1d Fix diagnostic unit test 2021-12-10 09:43:21 +01:00
Antonio Scandurra
eeba0993aa Optimize anchor comparison and take full advantage of fragment IDs 2021-12-10 09:16:58 +01:00
Max Brunsfeld
5e516f59c0 Merge branch 'fragment-locators' into HEAD 2021-12-09 14:49:04 -08:00
Max Brunsfeld
1ed1ec21dd Batch anchor resolution, avoid cloning fragment ids when seeking 2021-12-09 11:00:40 -08:00
Antonio Scandurra
e9c385e7a6 WIP 2021-12-09 18:27:54 +01:00
Antonio Scandurra
91a7bbbba2 Fix some of the diagnostic tests and make DiagnosticEntry generic
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-09 17:53:08 +01:00
Antonio Scandurra
65711b2256 Remove anchor collections
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-09 17:28:27 +01:00
Antonio Scandurra
67686dd1c2 Don't use an enum for anchors and model min/max more implicitly
This will make it easier to serialize an anchor.
2021-12-09 12:01:17 +01:00
Antonio Scandurra
cbe136c0cb Implement anchor resolution using locators 2021-12-09 11:18:01 +01:00
Antonio Scandurra
b7535dfba4 Store only clock::Local in InsertionFragment 2021-12-09 09:21:52 +01:00
Antonio Scandurra
dc81b5f57a Make remote edit randomized tests pass with locators 2021-12-09 09:15:19 +01:00
Antonio Scandurra
b4ebe179f9 Make local edit randomized tests pass with locators 2021-12-09 09:11:16 +01:00
Nathan Sobo
dd38eb1264 Start on maintaining an insertions tree
I'm correctly assigning fragment ids to all fragments in the fragments tree, but I have a randomized test failure when making sure that the insertions tree matches the state of the fragments tree.
2021-12-08 22:05:13 -07:00
Nathan Sobo
ec54010e3c Sketch in type-level changes to track insertion splits 2021-12-08 21:04:22 -07:00
Nathan Sobo
98f726974e WIP 2021-12-08 20:04:30 -07:00
Nathan Sobo
4ee404a0af Take a cx in MultiBuffer::start_transaction 2021-12-08 19:30:52 -07:00
Nathan Sobo
87d16c271e Get Editor compiling with MultiBuffer as its buffer
There's a bunch of unimplemented methods in MultiBuffer, but everything compiles.

Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-12-08 19:23:04 -07:00
Max Brunsfeld
daedf179b2 Implement Anchor, AnchorRangeMap, SelectionSet in multi_buffer
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-08 12:56:09 -08:00
Max Brunsfeld
a7634ccd5f Rename ExcerptList to MultiBuffer
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-08 10:07:15 -08:00
Max Brunsfeld
5f8e406c18 Fill out ExcerptList API
This restores the improvements that we had made on the `project-diagnostics-generic` branch.

Co-Authored-By: Antonio Scandurra <me@as-cii.com>
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-12-08 10:04:22 -08:00
Antonio Scandurra
a88cff4fa0 Remove lifetime parameter from TextDimension trait
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2021-12-08 09:45:57 -08:00
Max Brunsfeld
6a44a7448e Consolidate Edit types in editor crate 2021-12-08 09:33:55 -08:00
Max Brunsfeld
fa379885f1 Give more specific names to all snapshot and text iterator types 2021-12-08 09:24:00 -08:00
Max Brunsfeld
ad33111a22 Fix assertion in excerpt unit test after fixing edits 2021-12-06 17:40:32 -08:00
Max Brunsfeld
39cc0cac93 Fix Subscription re-export after moving it into its own module 2021-12-06 17:40:17 -08:00
Max Brunsfeld
102926d171 Implement and randomized test excerpt list point translation and clipping 2021-12-06 17:39:31 -08:00
Max Brunsfeld
09c0c3a0e7 🎨 excerpt_list::Chunks::next 2021-12-06 16:28:44 -08:00
Max Brunsfeld
416033a01c Get random excerpts test passing w/ text in range, edits 2021-12-06 16:17:31 -08:00
Nathan Sobo
02f42f2877 WIP
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-12-06 14:51:23 -07:00
Nathan Sobo
88e3d87098 Get randomized test passing on basic excerpt list features
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-12-06 13:48:07 -07:00
Nathan Sobo
4578938ea1 Implement ExcerptList::subscribe
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-12-06 13:37:17 -07:00
Nathan Sobo
a02a29944c Get the basic ExcerptList unit test passing again
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-12-06 13:01:09 -07:00
Nathan Sobo
6965117dd8 Allow patches to be composed with edit iterators in addition to other Patches
This can avoid an extra allocation in some cases.

Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-12-06 13:00:51 -07:00
Nathan Sobo
cff610e1ec Rename FragmentList to ExcerptList
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-12-06 11:59:32 -07:00
Antonio Scandurra
42eba7268d Introduce Buffer::edits_since_in_range
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2021-12-06 19:48:45 +01:00
Antonio Scandurra
e37908cf3b Start on a simple randomized test for FragmentList 2021-12-06 16:08:17 +01:00
Antonio Scandurra
8354d1520d 🎨 2021-12-06 14:03:38 +01:00
Antonio Scandurra
45d6f5ab04 Start on maintaining edits in FragmentList 2021-12-06 12:10:25 +01:00
Antonio Scandurra
8f90d42723 Merge branch 'main' into project-diagnostics 2021-12-06 09:39:03 +01:00
Nathan Sobo
0be897d5ac WIP: Edit one of the excerpted buffers and add an assertion
We'll need to detect edits on the child buffers and understand their impact on the tree.
2021-12-04 07:19:30 -07:00
Nathan Sobo
811696670a Start on a new FragmentList
Here I'm exploring a new approach to the project-wide diagnostics view that can exactly mirror the contents of cargo check. The `FragmentList` composes an arbitrary list of fragments from other buffers and presents them as if they were a single buffer.
2021-12-04 06:57:56 -07:00
124 changed files with 17239 additions and 7506 deletions

85
Cargo.lock generated
View File

@@ -486,9 +486,9 @@ dependencies = [
[[package]]
name = "async-tungstenite"
version = "0.14.0"
version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8645e929ec7964448a901db9da30cd2ae8c7fecf4d6176af427837531dbbb63b"
checksum = "5682ea0913e5c20780fe5785abacb85a411e7437bf52a1bedb93ddb3972cb8dd"
dependencies = [
"async-tls",
"futures-io",
@@ -1326,9 +1326,9 @@ dependencies = [
[[package]]
name = "curl"
version = "0.4.38"
version = "0.4.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "003cb79c1c6d1c93344c7e1201bb51c2148f24ec2bd9c253709d6b2efb796515"
checksum = "7de97b894edd5b5bcceef8b78d7da9b75b1d2f2f9a910569d0bde3dd31d84939"
dependencies = [
"curl-sys",
"libc",
@@ -1341,9 +1341,9 @@ dependencies = [
[[package]]
name = "curl-sys"
version = "0.4.44+curl-7.77.0"
version = "0.4.52+curl-7.81.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b6d85e9322b193f117c966e79c2d6929ec08c02f339f950044aba12e20bbaf1"
checksum = "14b8c2d1023ea5fded5b7b892e4b8e95f70038a421126a056761a84246a28971"
dependencies = [
"cc",
"libc",
@@ -1399,6 +1399,24 @@ dependencies = [
"const-oid",
]
[[package]]
name = "diagnostics"
version = "0.1.0"
dependencies = [
"anyhow",
"client",
"collections",
"editor",
"gpui",
"language",
"postage",
"project",
"serde_json",
"unindent",
"util",
"workspace",
]
[[package]]
name = "digest"
version = "0.8.1"
@@ -1525,9 +1543,11 @@ dependencies = [
"aho-corasick",
"anyhow",
"clock",
"collections",
"ctor",
"env_logger",
"gpui",
"itertools",
"language",
"lazy_static",
"log",
@@ -2432,15 +2452,6 @@ version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64e9829a50b42bb782c1df523f78d332fe371b10c661e78b7a3c34b0198e9fac"
[[package]]
name = "input_buffer"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f97967975f448f1a7ddb12b0bc41069d09ed6a1c161a92687e057325db35d413"
dependencies = [
"bytes 1.0.1",
]
[[package]]
name = "instant"
version = "0.1.9"
@@ -2585,8 +2596,13 @@ name = "language"
version = "0.1.0"
dependencies = [
"anyhow",
"async-trait",
"clock",
"collections",
"ctor",
"env_logger",
"futures",
"fuzzy",
"gpui",
"lazy_static",
"log",
@@ -2597,7 +2613,9 @@ dependencies = [
"rpc",
"serde",
"similar",
"smallvec",
"smol",
"sum_tree",
"text",
"theme",
"tree-sitter",
@@ -3104,6 +3122,21 @@ version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "afb2e1c3ee07430c2cf76151675e583e0f19985fa6efae47d6848a3e2c824f85"
[[package]]
name = "outline"
version = "0.1.0"
dependencies = [
"editor",
"fuzzy",
"gpui",
"language",
"ordered-float",
"postage",
"smol",
"text",
"workspace",
]
[[package]]
name = "p256"
version = "0.9.0"
@@ -3449,6 +3482,7 @@ dependencies = [
"async-trait",
"client",
"clock",
"collections",
"fsevent",
"futures",
"fuzzy",
@@ -3484,6 +3518,7 @@ dependencies = [
"project",
"serde_json",
"theme",
"util",
"workspace",
]
@@ -3818,6 +3853,7 @@ dependencies = [
"async-tungstenite",
"base64 0.13.0",
"futures",
"gpui",
"log",
"parking_lot",
"postage",
@@ -4834,11 +4870,13 @@ dependencies = [
"ctor",
"env_logger",
"gpui",
"lazy_static",
"log",
"parking_lot",
"rand 0.8.3",
"smallvec",
"sum_tree",
"util",
]
[[package]]
@@ -5128,7 +5166,7 @@ dependencies = [
[[package]]
name = "tree-sitter-markdown"
version = "0.0.1"
source = "git+https://github.com/maxbrunsfeld/tree-sitter-markdown?rev=b2b4eefd51ada972ef8bb581b83b6b8e7a28c7a6#b2b4eefd51ada972ef8bb581b83b6b8e7a28c7a6"
source = "git+https://github.com/MDeiml/tree-sitter-markdown?rev=330ecab87a3e3a7211ac69bbadc19eabecdb1cca#330ecab87a3e3a7211ac69bbadc19eabecdb1cca"
dependencies = [
"cc",
"tree-sitter",
@@ -5158,16 +5196,15 @@ checksum = "85e00391c1f3d171490a3f8bd79999b0002ae38d3da0d6a3a306c754b053d71b"
[[package]]
name = "tungstenite"
version = "0.13.0"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fe8dada8c1a3aeca77d6b51a4f1314e0f4b8e438b7b1b71e3ddaca8080e4093"
checksum = "6ad3713a14ae247f22a728a0456a545df14acf3867f905adff84be99e23b3ad1"
dependencies = [
"base64 0.13.0",
"byteorder",
"bytes 1.0.1",
"http",
"httparse",
"input_buffer",
"log",
"rand 0.8.3",
"sha-1 0.9.6",
@@ -5364,8 +5401,10 @@ name = "util"
version = "0.1.0"
dependencies = [
"anyhow",
"clock",
"futures",
"log",
"rand 0.8.3",
"serde_json",
"surf",
"tempdir",
@@ -5628,6 +5667,8 @@ version = "0.1.0"
dependencies = [
"anyhow",
"client",
"clock",
"collections",
"gpui",
"language",
"log",
@@ -5664,18 +5705,19 @@ checksum = "ec7a2a501ed189703dba8b08142f057e887dfc4b2cc4db2d343ac6376ba3e0b9"
[[package]]
name = "zed"
version = "0.9.0"
version = "0.12.0"
dependencies = [
"anyhow",
"async-recursion",
"async-trait",
"async-tungstenite",
"chat_panel",
"client",
"clock",
"collections",
"contacts_panel",
"crossbeam-channel",
"ctor",
"diagnostics",
"dirs 3.0.1",
"easy-parallel",
"editor",
@@ -5698,6 +5740,7 @@ dependencies = [
"log-panics",
"lsp",
"num_cpus",
"outline",
"parking_lot",
"postage",
"project",

2
Procfile Normal file
View File

@@ -0,0 +1,2 @@
web: cd ../zed.dev && PORT=3000 npx next dev
collab: cd crates/server && cargo run

View File

@@ -6,17 +6,41 @@ Welcome to Zed, a lightning-fast, collaborative code editor that makes your drea
## Development tips
### Compiling on macOS Monterey
### Testing against locally-running servers
The Zed server uses libcurl, which currently triggers [a bug](https://github.com/rust-lang/rust/issues/90342) in `rustc`. To work around this bug, export the following environment variable:
Make sure you have `zed.dev` cloned as a sibling to this repo.
```
export MACOSX_DEPLOYMENT_TARGET=10.7
cd ..
git clone https://github.com/zed-industries/zed.dev
```
Make sure your local database is created, migrated, and seeded with initial data. Install [Postgres](https://postgresapp.com), then from the `zed` repository root, run:
```
script/sqlx database create
script/sqlx migrate run
script/seed-db
```
Run `zed.dev` and the collaboration server.
```
brew install foreman
foreman start
```
If you want to run Zed pointed at the local servers, you can run:
```
script/zed_with_local_servers
# or...
script/zed_with_local_servers --release
```
### Dump element JSON
If you trigger `cmd-shift-i`, Zed will copy a JSON representation of the current window contents to the clipboard. You can paste this in a tool like [DJSON](https://chrome.google.com/webstore/detail/djson-json-viewer-formatt/chaeijjekipecdajnijdldjjipaegdjc?hl=en) to navigate the state of on-screen elements in a structured way.
If you trigger `cmd-alt-i`, Zed will copy a JSON representation of the current window contents to the clipboard. You can paste this in a tool like [DJSON](https://chrome.google.com/webstore/detail/djson-json-viewer-formatt/chaeijjekipecdajnijdldjjipaegdjc?hl=en) to navigate the state of on-screen elements in a structured way.
## Roadmap
@@ -34,12 +58,12 @@ Establish basic infrastructure for building the app bundle and uploading an arti
[Tracking issue](https://github.com/zed-industries/zed/issues/6)
Turn the minimal text editor into a collaborative *code* editor. This will include the minimal features that the Zed team needs to collaborate in Zed to build Zed without net loss in developer productivity. This includes productivity-critical features such as:
Turn the minimal text editor into a collaborative _code_ editor. This will include the minimal features that the Zed team needs to collaborate in Zed to build Zed without net loss in developer productivity. This includes productivity-critical features such as:
* Syntax highlighting and syntax-aware editing and navigation
* The ability to see and edit non-local working copies of a repository
* Language server support for Rust code navigation, refactoring, diagnostics, etc.
* Project browsing and project-wide search and replace
- Syntax highlighting and syntax-aware editing and navigation
- The ability to see and edit non-local working copies of a repository
- Language server support for Rust code navigation, refactoring, diagnostics, etc.
- Project browsing and project-wide search and replace
We want to tackle collaboration fairly early so that the rest of the design of the product can flow around that assumption. We could probably produce a single-player code editor more quickly, but at the risk of having collaboration feel more "bolted on" when we eventually add it.

View File

@@ -56,14 +56,14 @@ impl ChatPanel {
4,
{
let settings = settings.clone();
move |_| {
Arc::new(move |_| {
let settings = settings.borrow();
EditorSettings {
tab_size: settings.tab_size,
style: settings.theme.chat_panel.input_editor.as_editor(),
soft_wrap: editor::SoftWrap::EditorWidth,
}
}
})
},
cx,
)
@@ -233,7 +233,7 @@ impl ChatPanel {
Empty::new().boxed()
};
Expanded::new(1., messages).boxed()
Flexible::new(1., true, messages).boxed()
}
fn render_message(&self, message: &ChannelMessage) -> ElementBox {

View File

@@ -7,7 +7,7 @@ edition = "2018"
path = "src/client.rs"
[features]
test-support = ["rpc/test-support"]
test-support = ["gpui/test-support", "rpc/test-support"]
[dependencies]
gpui = { path = "../gpui" }
@@ -16,7 +16,7 @@ rpc = { path = "../rpc" }
sum_tree = { path = "../sum_tree" }
anyhow = "1.0.38"
async-recursion = "0.3"
async-tungstenite = { version = "0.14", features = ["async-tls"] }
async-tungstenite = { version = "0.16", features = ["async-tls"] }
futures = "0.3"
image = "0.23"
lazy_static = "1.4.0"
@@ -29,3 +29,7 @@ surf = "2.2"
thiserror = "1.0.29"
time = "0.3"
tiny_http = "0.8"
[dev-dependencies]
gpui = { path = "../gpui", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }

View File

@@ -599,8 +599,8 @@ mod tests {
#[gpui::test]
async fn test_channel_messages(mut cx: TestAppContext) {
let user_id = 5;
let mut client = Client::new();
let http_client = FakeHttpClient::new(|_| async move { Ok(Response::new(404)) });
let mut client = Client::new(http_client.clone());
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));

View File

@@ -11,10 +11,12 @@ use async_tungstenite::tungstenite::{
error::Error as WebsocketError,
http::{Request, StatusCode},
};
use futures::StreamExt;
use gpui::{action, AsyncAppContext, Entity, ModelContext, MutableAppContext, Task};
use http::HttpClient;
use lazy_static::lazy_static;
use parking_lot::RwLock;
use postage::{prelude::Stream, watch};
use postage::watch;
use rand::prelude::*;
use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, RequestMessage};
use std::{
@@ -26,7 +28,7 @@ use std::{
sync::{Arc, Weak},
time::{Duration, Instant},
};
use surf::Url;
use surf::{http::Method, Url};
use thiserror::Error;
use util::{ResultExt, TryFutureExt};
@@ -36,7 +38,7 @@ pub use user::*;
lazy_static! {
static ref ZED_SERVER_URL: String =
std::env::var("ZED_SERVER_URL").unwrap_or("https://zed.dev:443".to_string());
std::env::var("ZED_SERVER_URL").unwrap_or("https://zed.dev".to_string());
static ref IMPERSONATE_LOGIN: Option<String> = std::env::var("ZED_IMPERSONATE")
.ok()
.and_then(|s| if s.is_empty() { None } else { Some(s) });
@@ -54,6 +56,7 @@ pub fn init(rpc: Arc<Client>, cx: &mut MutableAppContext) {
pub struct Client {
peer: Arc<Peer>,
http: Arc<dyn HttpClient>,
state: RwLock<ClientState>,
authenticate:
Option<Box<dyn 'static + Send + Sync + Fn(&AsyncAppContext) -> Task<Result<Credentials>>>>,
@@ -122,14 +125,14 @@ struct ClientState {
status: (watch::Sender<Status>, watch::Receiver<Status>),
entity_id_extractors: HashMap<TypeId, Box<dyn Send + Sync + Fn(&dyn AnyTypedEnvelope) -> u64>>,
model_handlers: HashMap<
(TypeId, u64),
Box<dyn Send + Sync + FnMut(Box<dyn AnyTypedEnvelope>, &mut AsyncAppContext)>,
(TypeId, Option<u64>),
Option<Box<dyn Send + Sync + FnMut(Box<dyn AnyTypedEnvelope>, &mut AsyncAppContext)>>,
>,
_maintain_connection: Option<Task<()>>,
heartbeat_interval: Duration,
}
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct Credentials {
pub user_id: u64,
pub access_token: String,
@@ -150,28 +153,23 @@ impl Default for ClientState {
pub struct Subscription {
client: Weak<Client>,
id: (TypeId, u64),
id: (TypeId, Option<u64>),
}
impl Drop for Subscription {
fn drop(&mut self) {
if let Some(client) = self.client.upgrade() {
drop(
client
.state
.write()
.model_handlers
.remove(&self.id)
.unwrap(),
);
let mut state = client.state.write();
let _ = state.model_handlers.remove(&self.id).unwrap();
}
}
}
impl Client {
pub fn new() -> Arc<Self> {
pub fn new(http: Arc<dyn HttpClient>) -> Arc<Self> {
Arc::new(Self {
peer: Peer::new(),
http,
state: Default::default(),
authenticate: None,
establish_connection: None,
@@ -269,20 +267,13 @@ impl Client {
+ Sync
+ FnMut(&mut M, TypedEnvelope<T>, Arc<Self>, &mut ModelContext<M>) -> Result<()>,
{
let subscription_id = (TypeId::of::<T>(), Default::default());
let subscription_id = (TypeId::of::<T>(), None);
let client = self.clone();
let mut state = self.state.write();
let model = cx.weak_handle();
let prev_extractor = state
.entity_id_extractors
.insert(subscription_id.0, Box::new(|_| Default::default()));
if prev_extractor.is_some() {
panic!("registered a handler for the same entity twice")
}
state.model_handlers.insert(
let prev_handler = state.model_handlers.insert(
subscription_id,
Box::new(move |envelope, cx| {
Some(Box::new(move |envelope, cx| {
if let Some(model) = model.upgrade(cx) {
let envelope = envelope.into_any().downcast::<TypedEnvelope<T>>().unwrap();
model.update(cx, |model, cx| {
@@ -291,8 +282,11 @@ impl Client {
}
});
}
}),
})),
);
if prev_handler.is_some() {
panic!("registered handler for the same message twice");
}
Subscription {
client: Arc::downgrade(self),
@@ -314,7 +308,7 @@ impl Client {
+ Sync
+ FnMut(&mut M, TypedEnvelope<T>, Arc<Self>, &mut ModelContext<M>) -> Result<()>,
{
let subscription_id = (TypeId::of::<T>(), remote_id);
let subscription_id = (TypeId::of::<T>(), Some(remote_id));
let client = self.clone();
let mut state = self.state.write();
let model = cx.weak_handle();
@@ -332,7 +326,7 @@ impl Client {
});
let prev_handler = state.model_handlers.insert(
subscription_id,
Box::new(move |envelope, cx| {
Some(Box::new(move |envelope, cx| {
if let Some(model) = model.upgrade(cx) {
let envelope = envelope.into_any().downcast::<TypedEnvelope<T>>().unwrap();
model.update(cx, |model, cx| {
@@ -341,7 +335,7 @@ impl Client {
}
});
}
}),
})),
);
if prev_handler.is_some() {
panic!("registered a handler for the same entity twice")
@@ -353,6 +347,10 @@ impl Client {
}
}
pub fn has_keychain_credentials(&self, cx: &AsyncAppContext) -> bool {
read_credentials_from_keychain(cx).is_some()
}
#[async_recursion(?Send)]
pub async fn authenticate_and_connect(
self: &Arc<Self>,
@@ -403,7 +401,6 @@ impl Client {
match self.establish_connection(&credentials, cx).await {
Ok(conn) => {
log::info!("connected to rpc address {}", *ZED_SERVER_URL);
self.state.write().credentials = Some(credentials.clone());
if !used_keychain && IMPERSONATE_LOGIN.is_none() {
write_credentials_to_keychain(&credentials, cx).log_err();
@@ -440,29 +437,29 @@ impl Client {
let mut cx = cx.clone();
let this = self.clone();
async move {
while let Some(message) = incoming.recv().await {
while let Some(message) = incoming.next().await {
let mut state = this.state.write();
if let Some(extract_entity_id) =
let payload_type_id = message.payload_type_id();
let entity_id = if let Some(extract_entity_id) =
state.entity_id_extractors.get(&message.payload_type_id())
{
let payload_type_id = message.payload_type_id();
let entity_id = (extract_entity_id)(message.as_ref());
let handler_key = (payload_type_id, entity_id);
if let Some(mut handler) = state.model_handlers.remove(&handler_key) {
drop(state); // Avoid deadlocks if the handler interacts with rpc::Client
let start_time = Instant::now();
log::info!("RPC client message {}", message.payload_type_name());
(handler)(message, &mut cx);
log::info!(
"RPC message handled. duration:{:?}",
start_time.elapsed()
);
this.state
.write()
.model_handlers
.insert(handler_key, handler);
} else {
log::info!("unhandled message {}", message.payload_type_name());
Some((extract_entity_id)(message.as_ref()))
} else {
None
};
let handler_key = (payload_type_id, entity_id);
if let Some(handler) = state.model_handlers.get_mut(&handler_key) {
let mut handler = handler.take().unwrap();
drop(state); // Avoid deadlocks if the handler interacts with rpc::Client
let start_time = Instant::now();
log::info!("RPC client message {}", message.payload_type_name());
(handler)(message, &mut cx);
log::info!("RPC message handled. duration:{:?}", start_time.elapsed());
let mut state = this.state.write();
if state.model_handlers.contains_key(&handler_key) {
state.model_handlers.insert(handler_key, Some(handler));
}
} else {
log::info!("unhandled message {}", message.payload_type_name());
@@ -521,20 +518,57 @@ impl Client {
format!("{} {}", credentials.user_id, credentials.access_token),
)
.header("X-Zed-Protocol-Version", rpc::PROTOCOL_VERSION);
let http = self.http.clone();
cx.background().spawn(async move {
if let Some(host) = ZED_SERVER_URL.strip_prefix("https://") {
let stream = smol::net::TcpStream::connect(host).await?;
let request = request.uri(format!("wss://{}/rpc", host)).body(())?;
let (stream, _) =
async_tungstenite::async_tls::client_async_tls(request, stream).await?;
Ok(Connection::new(stream))
} else if let Some(host) = ZED_SERVER_URL.strip_prefix("http://") {
let stream = smol::net::TcpStream::connect(host).await?;
let request = request.uri(format!("ws://{}/rpc", host)).body(())?;
let (stream, _) = async_tungstenite::client_async(request, stream).await?;
Ok(Connection::new(stream))
} else {
Err(anyhow!("invalid server url: {}", *ZED_SERVER_URL))?
let mut rpc_url = format!("{}/rpc", *ZED_SERVER_URL);
let rpc_request = surf::Request::new(
Method::Get,
surf::Url::parse(&rpc_url).context("invalid ZED_SERVER_URL")?,
);
let rpc_response = http.send(rpc_request).await?;
if rpc_response.status().is_redirection() {
rpc_url = rpc_response
.header("Location")
.ok_or_else(|| anyhow!("missing location header in /rpc response"))?
.as_str()
.to_string();
}
// Until we switch the zed.dev domain to point to the new Next.js app, there
// will be no redirect required, and the app will connect directly to
// wss://zed.dev/rpc.
else if rpc_response.status() != surf::StatusCode::UpgradeRequired {
Err(anyhow!(
"unexpected /rpc response status {}",
rpc_response.status()
))?
}
let mut rpc_url = surf::Url::parse(&rpc_url).context("invalid rpc url")?;
let rpc_host = rpc_url
.host_str()
.zip(rpc_url.port_or_known_default())
.ok_or_else(|| anyhow!("missing host in rpc url"))?;
let stream = smol::net::TcpStream::connect(rpc_host).await?;
log::info!("connected to rpc endpoint {}", rpc_url);
match rpc_url.scheme() {
"https" => {
rpc_url.set_scheme("wss").unwrap();
let request = request.uri(rpc_url.as_str()).body(())?;
let (stream, _) =
async_tungstenite::async_tls::client_async_tls(request, stream).await?;
Ok(Connection::new(stream))
}
"http" => {
rpc_url.set_scheme("ws").unwrap();
let request = request.uri(rpc_url.as_str()).body(())?;
let (stream, _) = async_tungstenite::client_async(request, stream).await?;
Ok(Connection::new(stream))
}
_ => Err(anyhow!("invalid rpc url: {}", rpc_url))?,
}
})
}
@@ -561,7 +595,7 @@ impl Client {
// Open the Zed sign-in page in the user's browser, with query parameters that indicate
// that the user is signing in from a Zed app running on the same device.
let mut url = format!(
"{}/sign_in?native_app_port={}&native_app_public_key={}",
"{}/native_app_signin?native_app_port={}&native_app_public_key={}",
*ZED_SERVER_URL, port, public_key_string
);
@@ -592,9 +626,16 @@ impl Client {
user_id = Some(value.to_string());
}
}
let post_auth_url =
format!("{}/native_app_signin_succeeded", *ZED_SERVER_URL);
req.respond(
tiny_http::Response::from_string(LOGIN_RESPONSE).with_header(
tiny_http::Header::from_bytes("Content-Type", "text/html").unwrap(),
tiny_http::Response::empty(302).with_header(
tiny_http::Header::from_bytes(
&b"Location"[..],
post_auth_url.as_bytes(),
)
.unwrap(),
),
)
.context("failed to respond to login http request")?;
@@ -621,9 +662,9 @@ impl Client {
})
}
pub async fn disconnect(self: &Arc<Self>, cx: &AsyncAppContext) -> Result<()> {
pub fn disconnect(self: &Arc<Self>, cx: &AsyncAppContext) -> Result<()> {
let conn_id = self.connection_id()?;
self.peer.disconnect(conn_id).await;
self.peer.disconnect(conn_id);
self.set_status(Status::SignedOut, cx);
Ok(())
}
@@ -651,6 +692,14 @@ impl Client {
) -> impl Future<Output = Result<()>> {
self.peer.respond(receipt, response)
}
pub fn respond_with_error<T: RequestMessage>(
&self,
receipt: Receipt<T>,
error: proto::Error,
) -> impl Future<Output = Result<()>> {
self.peer.respond_with_error(receipt, error)
}
}
fn read_credentials_from_keychain(cx: &AsyncAppContext) -> Option<Credentials> {
@@ -694,17 +743,10 @@ pub fn decode_worktree_url(url: &str) -> Option<(u64, String)> {
Some((id, access_token.to_string()))
}
const LOGIN_RESPONSE: &'static str = "
<!DOCTYPE html>
<html>
<script>window.close();</script>
</html>
";
#[cfg(test)]
mod tests {
use super::*;
use crate::test::FakeServer;
use crate::test::{FakeHttpClient, FakeServer};
use gpui::TestAppContext;
#[gpui::test(iterations = 10)]
@@ -712,7 +754,7 @@ mod tests {
cx.foreground().forbid_parking();
let user_id = 5;
let mut client = Client::new();
let mut client = Client::new(FakeHttpClient::with_404_response());
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
cx.foreground().advance_clock(Duration::from_secs(10));
@@ -723,7 +765,7 @@ mod tests {
let ping = server.receive::<proto::Ping>().await.unwrap();
server.respond(ping.receipt(), proto::Ack {}).await;
client.disconnect(&cx.to_async()).await.unwrap();
client.disconnect(&cx.to_async()).unwrap();
assert!(server.receive::<proto::Ping>().await.is_err());
}
@@ -732,27 +774,27 @@ mod tests {
cx.foreground().forbid_parking();
let user_id = 5;
let mut client = Client::new();
let mut client = Client::new(FakeHttpClient::with_404_response());
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
let mut status = client.status();
assert!(matches!(
status.recv().await,
status.next().await,
Some(Status::Connected { .. })
));
assert_eq!(server.auth_count(), 1);
server.forbid_connections();
server.disconnect().await;
while !matches!(status.recv().await, Some(Status::ReconnectionError { .. })) {}
server.disconnect();
while !matches!(status.next().await, Some(Status::ReconnectionError { .. })) {}
server.allow_connections();
cx.foreground().advance_clock(Duration::from_secs(10));
while !matches!(status.recv().await, Some(Status::Connected { .. })) {}
while !matches!(status.next().await, Some(Status::Connected { .. })) {}
assert_eq!(server.auth_count(), 1); // Client reused the cached credentials when reconnecting
server.forbid_connections();
server.disconnect().await;
while !matches!(status.recv().await, Some(Status::ReconnectionError { .. })) {}
server.disconnect();
while !matches!(status.next().await, Some(Status::ReconnectionError { .. })) {}
// Clear cached credentials after authentication fails
server.roll_access_token();
@@ -760,7 +802,7 @@ mod tests {
cx.foreground().advance_clock(Duration::from_secs(10));
assert_eq!(server.auth_count(), 1);
cx.foreground().advance_clock(Duration::from_secs(10));
while !matches!(status.recv().await, Some(Status::Connected { .. })) {}
while !matches!(status.next().await, Some(Status::Connected { .. })) {}
assert_eq!(server.auth_count(), 2); // Client re-authenticated due to an invalid token
}
@@ -774,4 +816,113 @@ mod tests {
);
assert_eq!(decode_worktree_url("not://the-right-format"), None);
}
#[gpui::test]
async fn test_subscribing_to_entity(mut cx: TestAppContext) {
cx.foreground().forbid_parking();
let user_id = 5;
let mut client = Client::new(FakeHttpClient::with_404_response());
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
let model = cx.add_model(|_| Model { subscription: None });
let (mut done_tx1, mut done_rx1) = postage::oneshot::channel();
let (mut done_tx2, mut done_rx2) = postage::oneshot::channel();
let _subscription1 = model.update(&mut cx, |_, cx| {
client.subscribe_to_entity(
1,
cx,
move |_, _: TypedEnvelope<proto::UnshareProject>, _, _| {
postage::sink::Sink::try_send(&mut done_tx1, ()).unwrap();
Ok(())
},
)
});
let _subscription2 = model.update(&mut cx, |_, cx| {
client.subscribe_to_entity(
2,
cx,
move |_, _: TypedEnvelope<proto::UnshareProject>, _, _| {
postage::sink::Sink::try_send(&mut done_tx2, ()).unwrap();
Ok(())
},
)
});
// Ensure dropping a subscription for the same entity type still allows receiving of
// messages for other entity IDs of the same type.
let subscription3 = model.update(&mut cx, |_, cx| {
client.subscribe_to_entity(
3,
cx,
move |_, _: TypedEnvelope<proto::UnshareProject>, _, _| Ok(()),
)
});
drop(subscription3);
server.send(proto::UnshareProject { project_id: 1 }).await;
server.send(proto::UnshareProject { project_id: 2 }).await;
done_rx1.next().await.unwrap();
done_rx2.next().await.unwrap();
}
#[gpui::test]
async fn test_subscribing_after_dropping_subscription(mut cx: TestAppContext) {
cx.foreground().forbid_parking();
let user_id = 5;
let mut client = Client::new(FakeHttpClient::with_404_response());
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
let model = cx.add_model(|_| Model { subscription: None });
let (mut done_tx1, _done_rx1) = postage::oneshot::channel();
let (mut done_tx2, mut done_rx2) = postage::oneshot::channel();
let subscription1 = model.update(&mut cx, |_, cx| {
client.subscribe(cx, move |_, _: TypedEnvelope<proto::Ping>, _, _| {
postage::sink::Sink::try_send(&mut done_tx1, ()).unwrap();
Ok(())
})
});
drop(subscription1);
let _subscription2 = model.update(&mut cx, |_, cx| {
client.subscribe(cx, move |_, _: TypedEnvelope<proto::Ping>, _, _| {
postage::sink::Sink::try_send(&mut done_tx2, ()).unwrap();
Ok(())
})
});
server.send(proto::Ping {}).await;
done_rx2.next().await.unwrap();
}
#[gpui::test]
async fn test_dropping_subscription_in_handler(mut cx: TestAppContext) {
cx.foreground().forbid_parking();
let user_id = 5;
let mut client = Client::new(FakeHttpClient::with_404_response());
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
let model = cx.add_model(|_| Model { subscription: None });
let (mut done_tx, mut done_rx) = postage::oneshot::channel();
model.update(&mut cx, |model, cx| {
model.subscription = Some(client.subscribe(
cx,
move |model, _: TypedEnvelope<proto::Ping>, _, _| {
model.subscription.take();
postage::sink::Sink::try_send(&mut done_tx, ()).unwrap();
Ok(())
},
));
});
server.send(proto::Ping {}).await;
done_rx.next().await.unwrap();
}
struct Model {
subscription: Option<Subscription>,
}
impl Entity for Model {
type Event = ();
}
}

View File

@@ -1,10 +1,9 @@
use super::Client;
use super::*;
use crate::http::{HttpClient, Request, Response, ServerResponse};
use futures::{future::BoxFuture, Future};
use futures::{future::BoxFuture, stream::BoxStream, Future, StreamExt};
use gpui::{ModelHandle, TestAppContext};
use parking_lot::Mutex;
use postage::{mpsc, prelude::Stream};
use rpc::{proto, ConnectionId, Peer, Receipt, TypedEnvelope};
use std::fmt;
use std::sync::atomic::Ordering::SeqCst;
@@ -15,7 +14,7 @@ use std::sync::{
pub struct FakeServer {
peer: Arc<Peer>,
incoming: Mutex<Option<mpsc::Receiver<Box<dyn proto::AnyTypedEnvelope>>>>,
incoming: Mutex<Option<BoxStream<'static, Box<dyn proto::AnyTypedEnvelope>>>>,
connection_id: Mutex<Option<ConnectionId>>,
forbid_connections: AtomicBool,
auth_count: AtomicUsize,
@@ -72,8 +71,8 @@ impl FakeServer {
server
}
pub async fn disconnect(&self) {
self.peer.disconnect(self.connection_id()).await;
pub fn disconnect(&self) {
self.peer.disconnect(self.connection_id());
self.connection_id.lock().take();
self.incoming.lock().take();
}
@@ -129,7 +128,7 @@ impl FakeServer {
.lock()
.as_mut()
.expect("not connected")
.recv()
.next()
.await
.ok_or_else(|| anyhow!("other half hung up"))?;
let type_name = message.payload_type_name();

View File

@@ -22,14 +22,14 @@ pub struct User {
#[derive(Debug)]
pub struct Contact {
pub user: Arc<User>,
pub worktrees: Vec<WorktreeMetadata>,
pub projects: Vec<ProjectMetadata>,
}
#[derive(Debug)]
pub struct WorktreeMetadata {
pub struct ProjectMetadata {
pub id: u64,
pub root_name: String,
pub is_shared: bool,
pub worktree_root_names: Vec<String>,
pub guests: Vec<Arc<User>>,
}
@@ -112,7 +112,7 @@ impl UserStore {
let mut user_ids = HashSet::new();
for contact in &message.contacts {
user_ids.insert(contact.user_id);
user_ids.extend(contact.worktrees.iter().flat_map(|w| &w.guests).copied());
user_ids.extend(contact.projects.iter().flat_map(|w| &w.guests).copied());
}
let load_users = self.load_users(user_ids.into_iter().collect(), cx);
@@ -221,10 +221,10 @@ impl Contact {
user_store.fetch_user(contact.user_id, cx)
})
.await?;
let mut worktrees = Vec::new();
for worktree in contact.worktrees {
let mut projects = Vec::new();
for project in contact.projects {
let mut guests = Vec::new();
for participant_id in worktree.guests {
for participant_id in project.guests {
guests.push(
user_store
.update(cx, |user_store, cx| {
@@ -233,14 +233,14 @@ impl Contact {
.await?,
);
}
worktrees.push(WorktreeMetadata {
id: worktree.id,
root_name: worktree.root_name,
is_shared: worktree.is_shared,
projects.push(ProjectMetadata {
id: project.id,
worktree_root_names: project.worktree_root_names.clone(),
is_shared: project.is_shared,
guests,
});
}
Ok(Self { user, worktrees })
Ok(Self { user, projects })
}
}

View File

@@ -21,6 +21,15 @@ pub struct Lamport {
}
impl Local {
pub const MIN: Self = Self {
replica_id: ReplicaId::MIN,
value: Seq::MIN,
};
pub const MAX: Self = Self {
replica_id: ReplicaId::MAX,
value: Seq::MAX,
};
pub fn new(replica_id: ReplicaId) -> Self {
Self {
replica_id,
@@ -169,7 +178,7 @@ impl Global {
}
}
pub fn ge(&self, other: &Self) -> bool {
pub fn observed_all(&self, other: &Self) -> bool {
let mut lhs = self.0.iter();
let mut rhs = other.0.iter();
loop {
@@ -187,22 +196,16 @@ impl Global {
}
}
pub fn gt(&self, other: &Self) -> bool {
let mut lhs = self.0.iter();
let mut rhs = other.0.iter();
loop {
if let Some(left) = lhs.next() {
if let Some(right) = rhs.next() {
if left <= right {
return false;
}
} else {
return true;
}
} else {
return rhs.next().is_none();
pub fn changed_since(&self, other: &Self) -> bool {
if self.0.len() > other.0.len() {
return true;
}
for (left, right) in self.0.iter().zip(other.0.iter()) {
if left > right {
return true;
}
}
false
}
pub fn iter<'a>(&'a self) -> impl 'a + Iterator<Item = Local> {

View File

@@ -1,27 +1,15 @@
use std::sync::Arc;
use client::{Contact, UserStore};
use gpui::{
action,
elements::*,
geometry::{rect::RectF, vector::vec2f},
platform::CursorStyle,
Element, ElementBox, Entity, LayoutContext, ModelHandle, MutableAppContext, RenderContext,
Subscription, View, ViewContext,
Element, ElementBox, Entity, LayoutContext, ModelHandle, RenderContext, Subscription, View,
ViewContext,
};
use postage::watch;
use theme::Theme;
use workspace::{Settings, Workspace};
action!(JoinWorktree, u64);
action!(LeaveWorktree, u64);
action!(ShareWorktree, u64);
action!(UnshareWorktree, u64);
pub fn init(cx: &mut MutableAppContext) {
cx.add_action(ContactsPanel::share_worktree);
cx.add_action(ContactsPanel::unshare_worktree);
cx.add_action(ContactsPanel::join_worktree);
cx.add_action(ContactsPanel::leave_worktree);
}
use workspace::{AppState, JoinProject, JoinProjectParams, Settings};
pub struct ContactsPanel {
contacts: ListState,
@@ -31,78 +19,33 @@ pub struct ContactsPanel {
}
impl ContactsPanel {
pub fn new(
user_store: ModelHandle<UserStore>,
settings: watch::Receiver<Settings>,
cx: &mut ViewContext<Self>,
) -> Self {
pub fn new(app_state: Arc<AppState>, cx: &mut ViewContext<Self>) -> Self {
Self {
contacts: ListState::new(
user_store.read(cx).contacts().len(),
app_state.user_store.read(cx).contacts().len(),
Orientation::Top,
1000.,
{
let user_store = user_store.clone();
let settings = settings.clone();
let app_state = app_state.clone();
move |ix, cx| {
let user_store = user_store.read(cx);
let user_store = app_state.user_store.read(cx);
let contacts = user_store.contacts().clone();
let current_user_id = user_store.current_user().map(|user| user.id);
Self::render_collaborator(
&contacts[ix],
current_user_id,
&settings.borrow().theme,
app_state.clone(),
cx,
)
}
},
),
_maintain_contacts: cx.observe(&user_store, Self::update_contacts),
user_store,
settings,
_maintain_contacts: cx.observe(&app_state.user_store, Self::update_contacts),
user_store: app_state.user_store.clone(),
settings: app_state.settings.clone(),
}
}
fn share_worktree(
workspace: &mut Workspace,
action: &ShareWorktree,
cx: &mut ViewContext<Workspace>,
) {
workspace
.project()
.update(cx, |p, cx| p.share_worktree(action.0, cx));
}
fn unshare_worktree(
workspace: &mut Workspace,
action: &UnshareWorktree,
cx: &mut ViewContext<Workspace>,
) {
workspace
.project()
.update(cx, |p, cx| p.unshare_worktree(action.0, cx));
}
fn join_worktree(
workspace: &mut Workspace,
action: &JoinWorktree,
cx: &mut ViewContext<Workspace>,
) {
workspace
.project()
.update(cx, |p, cx| p.add_remote_worktree(action.0, cx).detach());
}
fn leave_worktree(
workspace: &mut Workspace,
action: &LeaveWorktree,
cx: &mut ViewContext<Workspace>,
) {
workspace
.project()
.update(cx, |p, cx| p.close_remote_worktree(action.0, cx));
}
fn update_contacts(&mut self, _: ModelHandle<UserStore>, cx: &mut ViewContext<Self>) {
self.contacts
.reset(self.user_store.read(cx).contacts().len());
@@ -112,20 +55,16 @@ impl ContactsPanel {
fn render_collaborator(
collaborator: &Contact,
current_user_id: Option<u64>,
theme: &Theme,
app_state: Arc<AppState>,
cx: &mut LayoutContext,
) -> ElementBox {
let theme = &theme.contacts_panel;
let worktree_count = collaborator.worktrees.len();
let theme = &app_state.settings.borrow().theme.contacts_panel;
let project_count = collaborator.projects.len();
let font_cache = cx.font_cache();
let line_height = theme.unshared_worktree.name.text.line_height(font_cache);
let cap_height = theme.unshared_worktree.name.text.cap_height(font_cache);
let baseline_offset = theme
.unshared_worktree
.name
.text
.baseline_offset(font_cache)
+ (theme.unshared_worktree.height - line_height) / 2.;
let line_height = theme.unshared_project.name.text.line_height(font_cache);
let cap_height = theme.unshared_project.name.text.cap_height(font_cache);
let baseline_offset = theme.unshared_project.name.text.baseline_offset(font_cache)
+ (theme.unshared_project.height - line_height) / 2.;
let tree_branch_width = theme.tree_branch_width;
let tree_branch_color = theme.tree_branch_color;
let host_avatar_height = theme
@@ -161,11 +100,11 @@ impl ContactsPanel {
)
.with_children(
collaborator
.worktrees
.projects
.iter()
.enumerate()
.map(|(ix, worktree)| {
let worktree_id = worktree.id;
.map(|(ix, project)| {
let project_id = project.id;
Flex::row()
.with_child(
@@ -182,7 +121,7 @@ impl ContactsPanel {
vec2f(start_x, start_y),
vec2f(
start_x + tree_branch_width,
if ix + 1 == worktree_count {
if ix + 1 == project_count {
end_y
} else {
bounds.max_y()
@@ -210,28 +149,28 @@ impl ContactsPanel {
.with_child({
let is_host = Some(collaborator.user.id) == current_user_id;
let is_guest = !is_host
&& worktree
&& project
.guests
.iter()
.any(|guest| Some(guest.id) == current_user_id);
let is_shared = worktree.is_shared;
let is_shared = project.is_shared;
let app_state = app_state.clone();
MouseEventHandler::new::<ContactsPanel, _, _, _>(
worktree_id as usize,
project_id as usize,
cx,
|mouse_state, _| {
let style = match (worktree.is_shared, mouse_state.hovered)
{
(false, false) => &theme.unshared_worktree,
(false, true) => &theme.hovered_unshared_worktree,
(true, false) => &theme.shared_worktree,
(true, true) => &theme.hovered_shared_worktree,
let style = match (project.is_shared, mouse_state.hovered) {
(false, false) => &theme.unshared_project,
(false, true) => &theme.hovered_unshared_project,
(true, false) => &theme.shared_project,
(true, true) => &theme.hovered_shared_project,
};
Flex::row()
.with_child(
Label::new(
worktree.root_name.clone(),
project.worktree_root_names.join(", "),
style.name.text.clone(),
)
.aligned()
@@ -240,7 +179,7 @@ impl ContactsPanel {
.with_style(style.name.container)
.boxed(),
)
.with_children(worktree.guests.iter().filter_map(
.with_children(project.guests.iter().filter_map(
|participant| {
participant.avatar.clone().map(|avatar| {
Image::new(avatar)
@@ -268,23 +207,18 @@ impl ContactsPanel {
CursorStyle::Arrow
})
.on_click(move |cx| {
if is_shared {
if is_host {
cx.dispatch_action(UnshareWorktree(worktree_id));
} else if is_guest {
cx.dispatch_action(LeaveWorktree(worktree_id));
} else {
cx.dispatch_action(JoinWorktree(worktree_id))
}
} else if is_host {
cx.dispatch_action(ShareWorktree(worktree_id));
if !is_host && !is_guest {
cx.dispatch_global_action(JoinProject(JoinProjectParams {
project_id,
app_state: app_state.clone(),
}));
}
})
.expanded(1.0)
.flexible(1., true)
.boxed()
})
.constrained()
.with_height(theme.unshared_worktree.height)
.with_height(theme.unshared_project.height)
.boxed()
}),
)

View File

@@ -0,0 +1,27 @@
[package]
name = "diagnostics"
version = "0.1.0"
edition = "2021"
[lib]
path = "src/diagnostics.rs"
[dependencies]
anyhow = "1.0"
collections = { path = "../collections" }
editor = { path = "../editor" }
language = { path = "../language" }
gpui = { path = "../gpui" }
project = { path = "../project" }
util = { path = "../util" }
workspace = { path = "../workspace" }
postage = { version = "0.4", features = ["futures-traits"] }
[dev-dependencies]
unindent = "0.1"
client = { path = "../client", features = ["test-support"] }
editor = { path = "../editor", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] }
workspace = { path = "../workspace", features = ["test-support"] }
serde_json = { version = "1", features = ["preserve_order"] }

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,87 @@
use gpui::{
elements::*, platform::CursorStyle, Entity, ModelHandle, RenderContext, View, ViewContext,
};
use postage::watch;
use project::Project;
use std::fmt::Write;
use workspace::{Settings, StatusItemView};
pub struct DiagnosticSummary {
settings: watch::Receiver<Settings>,
summary: project::DiagnosticSummary,
in_progress: bool,
}
impl DiagnosticSummary {
pub fn new(
project: &ModelHandle<Project>,
settings: watch::Receiver<Settings>,
cx: &mut ViewContext<Self>,
) -> Self {
cx.subscribe(project, |this, project, event, cx| match event {
project::Event::DiskBasedDiagnosticsUpdated { .. } => {
this.summary = project.read(cx).diagnostic_summary(cx);
cx.notify();
}
project::Event::DiskBasedDiagnosticsStarted => {
this.in_progress = true;
cx.notify();
}
project::Event::DiskBasedDiagnosticsFinished => {
this.in_progress = false;
cx.notify();
}
_ => {}
})
.detach();
Self {
settings,
summary: project.read(cx).diagnostic_summary(cx),
in_progress: project.read(cx).is_running_disk_based_diagnostics(),
}
}
}
impl Entity for DiagnosticSummary {
type Event = ();
}
impl View for DiagnosticSummary {
fn ui_name() -> &'static str {
"DiagnosticSummary"
}
fn render(&mut self, cx: &mut RenderContext<Self>) -> ElementBox {
enum Tag {}
let theme = &self.settings.borrow().theme.project_diagnostics;
let mut message = String::new();
if self.in_progress {
message.push_str("Checking... ");
}
write!(
message,
"Errors: {}, Warnings: {}",
self.summary.error_count, self.summary.warning_count
)
.unwrap();
MouseEventHandler::new::<Tag, _, _, _>(0, cx, |_, _| {
Label::new(message, theme.status_bar_item.text.clone())
.contained()
.with_style(theme.status_bar_item.container)
.boxed()
})
.with_cursor_style(CursorStyle::PointingHand)
.on_click(|cx| cx.dispatch_action(crate::Deploy))
.boxed()
}
}
impl StatusItemView for DiagnosticSummary {
fn set_active_pane_item(
&mut self,
_: Option<&dyn workspace::ItemViewHandle>,
_: &mut ViewContext<Self>,
) {
}
}

View File

@@ -1,21 +1,24 @@
[package]
name = "editor"
version = "0.1.0"
edition = "2018"
edition = "2021"
[lib]
path = "src/editor.rs"
[features]
test-support = [
"rand",
"text/test-support",
"language/test-support",
"gpui/test-support",
"util/test-support",
]
[dependencies]
text = { path = "../text" }
clock = { path = "../clock" }
collections = { path = "../collections" }
gpui = { path = "../gpui" }
language = { path = "../language" }
project = { path = "../project" }
@@ -25,10 +28,12 @@ util = { path = "../util" }
workspace = { path = "../workspace" }
aho-corasick = "0.7"
anyhow = "1.0"
itertools = "0.10"
lazy_static = "1.4"
log = "0.4"
parking_lot = "0.11"
postage = { version = "0.4", features = ["futures-traits"] }
rand = { version = "0.8.3", optional = true }
serde = { version = "1", features = ["derive", "rc"] }
smallvec = { version = "1.6", features = ["union"] }
smol = "1.2"
@@ -37,6 +42,7 @@ smol = "1.2"
text = { path = "../text", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
ctor = "0.1"
env_logger = "0.8"
rand = "0.8"

View File

@@ -3,28 +3,29 @@ mod fold_map;
mod tab_map;
mod wrap_map;
pub use block_map::{
AlignedBlock, BlockContext, BlockDisposition, BlockId, BlockProperties, BufferRows, Chunks,
};
use crate::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint};
use block_map::{BlockMap, BlockPoint};
use collections::{HashMap, HashSet};
use fold_map::{FoldMap, ToFoldPoint as _};
use gpui::{fonts::FontId, ElementBox, Entity, ModelContext, ModelHandle};
use language::{Anchor, Buffer, Point, Subscription as BufferSubscription, ToOffset, ToPoint};
use std::{
collections::{HashMap, HashSet},
ops::Range,
};
use gpui::{fonts::FontId, Entity, ModelContext, ModelHandle};
use language::{Point, Subscription as BufferSubscription};
use std::ops::Range;
use sum_tree::Bias;
use tab_map::TabMap;
use theme::SyntaxTheme;
use wrap_map::WrapMap;
pub use block_map::{
AlignedBlock, BlockBufferRows as DisplayBufferRows, BlockChunks as DisplayChunks, BlockContext,
BlockDisposition, BlockId, BlockProperties, RenderBlock,
};
pub trait ToDisplayPoint {
fn to_display_point(&self, map: &DisplayMapSnapshot) -> DisplayPoint;
fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint;
}
pub struct DisplayMap {
buffer: ModelHandle<Buffer>,
buffer: ModelHandle<MultiBuffer>,
buffer_subscription: BufferSubscription,
fold_map: FoldMap,
tab_map: TabMap,
@@ -38,7 +39,7 @@ impl Entity for DisplayMap {
impl DisplayMap {
pub fn new(
buffer: ModelHandle<Buffer>,
buffer: ModelHandle<MultiBuffer>,
tab_size: usize,
font_id: FontId,
font_size: f32,
@@ -46,10 +47,10 @@ impl DisplayMap {
cx: &mut ModelContext<Self>,
) -> Self {
let buffer_subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
let (fold_map, snapshot) = FoldMap::new(buffer.read(cx).snapshot());
let (fold_map, snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx));
let (tab_map, snapshot) = TabMap::new(snapshot, tab_size);
let (wrap_map, snapshot) = WrapMap::new(snapshot, font_id, font_size, wrap_width, cx);
let block_map = BlockMap::new(buffer.clone(), snapshot);
let block_map = BlockMap::new(snapshot);
cx.observe(&wrap_map, |_, _, cx| cx.notify()).detach();
DisplayMap {
buffer,
@@ -61,18 +62,18 @@ impl DisplayMap {
}
}
pub fn snapshot(&self, cx: &mut ModelContext<Self>) -> DisplayMapSnapshot {
let buffer_snapshot = self.buffer.read(cx).snapshot();
pub fn snapshot(&self, cx: &mut ModelContext<Self>) -> DisplaySnapshot {
let buffer_snapshot = self.buffer.read(cx).snapshot(cx);
let edits = self.buffer_subscription.consume().into_inner();
let (folds_snapshot, edits) = self.fold_map.read(buffer_snapshot, edits);
let (tabs_snapshot, edits) = self.tab_map.sync(folds_snapshot.clone(), edits);
let (wraps_snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(tabs_snapshot.clone(), edits, cx));
let blocks_snapshot = self.block_map.read(wraps_snapshot.clone(), edits, cx);
let blocks_snapshot = self.block_map.read(wraps_snapshot.clone(), edits);
DisplayMapSnapshot {
buffer_snapshot: self.buffer.read(cx).snapshot(),
DisplaySnapshot {
buffer_snapshot: self.buffer.read(cx).snapshot(cx),
folds_snapshot,
tabs_snapshot,
wraps_snapshot,
@@ -85,20 +86,20 @@ impl DisplayMap {
ranges: impl IntoIterator<Item = Range<T>>,
cx: &mut ModelContext<Self>,
) {
let snapshot = self.buffer.read(cx).snapshot();
let snapshot = self.buffer.read(cx).snapshot(cx);
let edits = self.buffer_subscription.consume().into_inner();
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
self.block_map.read(snapshot, edits, cx);
self.block_map.read(snapshot, edits);
let (snapshot, edits) = fold_map.fold(ranges);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
self.block_map.read(snapshot, edits, cx);
self.block_map.read(snapshot, edits);
}
pub fn unfold<T: ToOffset>(
@@ -106,58 +107,52 @@ impl DisplayMap {
ranges: impl IntoIterator<Item = Range<T>>,
cx: &mut ModelContext<Self>,
) {
let snapshot = self.buffer.read(cx).snapshot();
let snapshot = self.buffer.read(cx).snapshot(cx);
let edits = self.buffer_subscription.consume().into_inner();
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
self.block_map.read(snapshot, edits, cx);
self.block_map.read(snapshot, edits);
let (snapshot, edits) = fold_map.unfold(ranges);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
self.block_map.read(snapshot, edits, cx);
self.block_map.read(snapshot, edits);
}
pub fn insert_blocks<P>(
pub fn insert_blocks(
&mut self,
blocks: impl IntoIterator<Item = BlockProperties<P>>,
blocks: impl IntoIterator<Item = BlockProperties<Anchor>>,
cx: &mut ModelContext<Self>,
) -> Vec<BlockId>
where
P: ToOffset + Clone,
{
let snapshot = self.buffer.read(cx).snapshot();
) -> Vec<BlockId> {
let snapshot = self.buffer.read(cx).snapshot(cx);
let edits = self.buffer_subscription.consume().into_inner();
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
let mut block_map = self.block_map.write(snapshot, edits, cx);
block_map.insert(blocks, cx)
let mut block_map = self.block_map.write(snapshot, edits);
block_map.insert(blocks)
}
pub fn replace_blocks<F>(&mut self, styles: HashMap<BlockId, F>)
where
F: 'static + Fn(&BlockContext) -> ElementBox,
{
pub fn replace_blocks(&mut self, styles: HashMap<BlockId, RenderBlock>) {
self.block_map.replace(styles);
}
pub fn remove_blocks(&mut self, ids: HashSet<BlockId>, cx: &mut ModelContext<Self>) {
let snapshot = self.buffer.read(cx).snapshot();
let snapshot = self.buffer.read(cx).snapshot(cx);
let edits = self.buffer_subscription.consume().into_inner();
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
let mut block_map = self.block_map.write(snapshot, edits, cx);
block_map.remove(ids, cx);
let mut block_map = self.block_map.write(snapshot, edits);
block_map.remove(ids);
}
pub fn set_font(&self, font_id: FontId, font_size: f32, cx: &mut ModelContext<Self>) {
@@ -176,15 +171,15 @@ impl DisplayMap {
}
}
pub struct DisplayMapSnapshot {
pub buffer_snapshot: language::Snapshot,
folds_snapshot: fold_map::Snapshot,
tabs_snapshot: tab_map::Snapshot,
wraps_snapshot: wrap_map::Snapshot,
pub struct DisplaySnapshot {
pub buffer_snapshot: MultiBufferSnapshot,
folds_snapshot: fold_map::FoldSnapshot,
tabs_snapshot: tab_map::TabSnapshot,
wraps_snapshot: wrap_map::WrapSnapshot,
blocks_snapshot: block_map::BlockSnapshot,
}
impl DisplayMapSnapshot {
impl DisplaySnapshot {
#[cfg(test)]
pub fn fold_count(&self) -> usize {
self.folds_snapshot.fold_count()
@@ -194,56 +189,60 @@ impl DisplayMapSnapshot {
self.buffer_snapshot.len() == 0
}
pub fn buffer_rows<'a>(&'a self, start_row: u32) -> BufferRows<'a> {
pub fn buffer_rows<'a>(&'a self, start_row: u32) -> DisplayBufferRows<'a> {
self.blocks_snapshot.buffer_rows(start_row)
}
pub fn buffer_row_count(&self) -> u32 {
self.buffer_snapshot.max_point().row + 1
pub fn max_buffer_row(&self) -> u32 {
self.buffer_snapshot.max_buffer_row()
}
pub fn prev_row_boundary(&self, mut display_point: DisplayPoint) -> (DisplayPoint, Point) {
pub fn prev_line_boundary(&self, mut point: Point) -> (Point, DisplayPoint) {
loop {
let mut fold_point = point.to_fold_point(&self.folds_snapshot, Bias::Left);
*fold_point.column_mut() = 0;
point = fold_point.to_buffer_point(&self.folds_snapshot);
let mut display_point = self.point_to_display_point(point, Bias::Left);
*display_point.column_mut() = 0;
let mut point = display_point.to_point(self);
point.column = 0;
let next_display_point = self.point_to_display_point(point, Bias::Left);
if next_display_point == display_point {
return (display_point, point);
let next_point = self.display_point_to_point(display_point, Bias::Left);
if next_point == point {
return (point, display_point);
}
display_point = next_display_point;
point = next_point;
}
}
pub fn next_row_boundary(&self, mut display_point: DisplayPoint) -> (DisplayPoint, Point) {
pub fn next_line_boundary(&self, mut point: Point) -> (Point, DisplayPoint) {
loop {
let mut fold_point = point.to_fold_point(&self.folds_snapshot, Bias::Right);
*fold_point.column_mut() = self.folds_snapshot.line_len(fold_point.row());
point = fold_point.to_buffer_point(&self.folds_snapshot);
let mut display_point = self.point_to_display_point(point, Bias::Right);
*display_point.column_mut() = self.line_len(display_point.row());
let mut point = display_point.to_point(self);
point.column = self.buffer_snapshot.line_len(point.row);
let next_display_point = self.point_to_display_point(point, Bias::Right);
if next_display_point == display_point {
return (display_point, point);
let next_point = self.display_point_to_point(display_point, Bias::Right);
if next_point == point {
return (point, display_point);
}
display_point = next_display_point;
point = next_point;
}
}
fn point_to_display_point(&self, point: Point, bias: Bias) -> DisplayPoint {
DisplayPoint(
self.blocks_snapshot.to_block_point(
self.wraps_snapshot.from_tab_point(
self.tabs_snapshot
.to_tab_point(point.to_fold_point(&self.folds_snapshot, bias)),
),
),
)
let fold_point = point.to_fold_point(&self.folds_snapshot, bias);
let tab_point = self.tabs_snapshot.to_tab_point(fold_point);
let wrap_point = self.wraps_snapshot.from_tab_point(tab_point);
let block_point = self.blocks_snapshot.to_block_point(wrap_point);
DisplayPoint(block_point)
}
fn display_point_to_point(&self, point: DisplayPoint, bias: Bias) -> Point {
let unblocked_point = self.blocks_snapshot.to_wrap_point(point.0);
let unwrapped_point = self.wraps_snapshot.to_tab_point(unblocked_point);
let unexpanded_point = self.tabs_snapshot.to_fold_point(unwrapped_point, bias).0;
unexpanded_point.to_buffer_point(&self.folds_snapshot)
let block_point = point.0;
let wrap_point = self.blocks_snapshot.to_wrap_point(block_point);
let tab_point = self.wraps_snapshot.to_tab_point(wrap_point);
let fold_point = self.tabs_snapshot.to_fold_point(tab_point, bias).0;
fold_point.to_buffer_point(&self.folds_snapshot)
}
pub fn max_point(&self) -> DisplayPoint {
@@ -260,7 +259,7 @@ impl DisplayMapSnapshot {
&'a self,
display_rows: Range<u32>,
theme: Option<&'a SyntaxTheme>,
) -> block_map::Chunks<'a> {
) -> DisplayChunks<'a> {
self.blocks_snapshot.chunks(display_rows, theme)
}
@@ -420,11 +419,11 @@ impl DisplayPoint {
&mut self.0.column
}
pub fn to_point(self, map: &DisplayMapSnapshot) -> Point {
pub fn to_point(self, map: &DisplaySnapshot) -> Point {
map.display_point_to_point(self, Bias::Left)
}
pub fn to_offset(self, map: &DisplayMapSnapshot, bias: Bias) -> usize {
pub fn to_offset(self, map: &DisplaySnapshot, bias: Bias) -> usize {
let unblocked_point = map.blocks_snapshot.to_wrap_point(self.0);
let unwrapped_point = map.wraps_snapshot.to_tab_point(unblocked_point);
let unexpanded_point = map.tabs_snapshot.to_fold_point(unwrapped_point, bias).0;
@@ -433,19 +432,19 @@ impl DisplayPoint {
}
impl ToDisplayPoint for usize {
fn to_display_point(&self, map: &DisplayMapSnapshot) -> DisplayPoint {
fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint {
map.point_to_display_point(self.to_point(&map.buffer_snapshot), Bias::Left)
}
}
impl ToDisplayPoint for Point {
fn to_display_point(&self, map: &DisplayMapSnapshot) -> DisplayPoint {
fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint {
map.point_to_display_point(*self, Bias::Left)
}
}
impl ToDisplayPoint for Anchor {
fn to_display_point(&self, map: &DisplayMapSnapshot) -> DisplayPoint {
fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint {
self.to_point(&map.buffer_snapshot).to_display_point(map)
}
}
@@ -453,16 +452,18 @@ impl ToDisplayPoint for Anchor {
#[cfg(test)]
mod tests {
use super::*;
use crate::{movement, test::*};
use gpui::{color::Color, MutableAppContext};
use language::{Language, LanguageConfig, RandomCharIter, SelectionGoal};
use rand::{prelude::StdRng, Rng};
use crate::movement;
use gpui::{color::Color, elements::*, test::observe, MutableAppContext};
use language::{Buffer, Language, LanguageConfig, RandomCharIter, SelectionGoal};
use rand::{prelude::*, Rng};
use smol::stream::StreamExt;
use std::{env, sync::Arc};
use theme::SyntaxTheme;
use util::test::sample_text;
use Bias::*;
#[gpui::test(iterations = 100)]
async fn test_random(mut cx: gpui::TestAppContext, mut rng: StdRng) {
async fn test_random_display_map(mut cx: gpui::TestAppContext, mut rng: StdRng) {
cx.foreground().set_block_on_ticks(0..=50);
cx.foreground().forbid_parking();
let operations = env::var("OPERATIONS")
@@ -486,17 +487,30 @@ mod tests {
log::info!("tab size: {}", tab_size);
log::info!("wrap width: {:?}", wrap_width);
let buffer = cx.add_model(|cx| {
let len = rng.gen_range(0..10);
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
Buffer::new(0, text, cx)
let buffer = cx.update(|cx| {
if rng.gen() {
let len = rng.gen_range(0..10);
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
MultiBuffer::build_simple(&text, cx)
} else {
MultiBuffer::build_random(&mut rng, cx)
}
});
let map = cx.add_model(|cx| {
DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, wrap_width, cx)
});
let (_observer, notifications) = Observer::new(&map, &mut cx);
let mut notifications = observe(&map, &mut cx);
let mut fold_count = 0;
let mut blocks = Vec::new();
let snapshot = map.update(&mut cx, |map, cx| map.snapshot(cx));
log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
log::info!("fold text: {:?}", snapshot.folds_snapshot.text());
log::info!("tab text: {:?}", snapshot.tabs_snapshot.text());
log::info!("wrap text: {:?}", snapshot.wraps_snapshot.text());
log::info!("block text: {:?}", snapshot.blocks_snapshot.text());
log::info!("display text: {:?}", snapshot.text());
for _i in 0..operations {
match rng.gen_range(0..100) {
@@ -509,10 +523,55 @@ mod tests {
log::info!("setting wrap width to {:?}", wrap_width);
map.update(&mut cx, |map, cx| map.set_wrap_width(wrap_width, cx));
}
20..=80 => {
20..=44 => {
map.update(&mut cx, |map, cx| {
if rng.gen() || blocks.is_empty() {
let buffer = map.snapshot(cx).buffer_snapshot;
let block_properties = (0..rng.gen_range(1..=1))
.map(|_| {
let position =
buffer.anchor_after(buffer.clip_offset(
rng.gen_range(0..=buffer.len()),
Bias::Left,
));
let disposition = if rng.gen() {
BlockDisposition::Above
} else {
BlockDisposition::Below
};
let height = rng.gen_range(1..5);
log::info!(
"inserting block {:?} {:?} with height {}",
disposition,
position.to_point(&buffer),
height
);
BlockProperties {
position,
height,
disposition,
render: Arc::new(|_| Empty::new().boxed()),
}
})
.collect::<Vec<_>>();
blocks.extend(map.insert_blocks(block_properties, cx));
} else {
blocks.shuffle(&mut rng);
let remove_count = rng.gen_range(1..=4.min(blocks.len()));
let block_ids_to_remove = (0..remove_count)
.map(|_| blocks.remove(rng.gen_range(0..blocks.len())))
.collect();
log::info!("removing block ids {:?}", block_ids_to_remove);
map.remove_blocks(block_ids_to_remove, cx);
}
});
}
45..=79 => {
let mut ranges = Vec::new();
for _ in 0..rng.gen_range(1..=3) {
buffer.read_with(&cx, |buffer, _| {
buffer.read_with(&cx, |buffer, cx| {
let buffer = buffer.read(cx);
let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right);
let start = buffer.clip_offset(rng.gen_range(0..=end), Left);
ranges.push(start..end);
@@ -537,33 +596,34 @@ mod tests {
}
if map.read_with(&cx, |map, cx| map.is_rewrapping(cx)) {
notifications.recv().await.unwrap();
notifications.next().await.unwrap();
}
let snapshot = map.update(&mut cx, |map, cx| map.snapshot(cx));
fold_count = snapshot.fold_count();
log::info!("buffer text: {:?}", buffer.read_with(&cx, |b, _| b.text()));
log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
log::info!("fold text: {:?}", snapshot.folds_snapshot.text());
log::info!("tab text: {:?}", snapshot.tabs_snapshot.text());
log::info!("wrap text: {:?}", snapshot.wraps_snapshot.text());
log::info!("block text: {:?}", snapshot.blocks_snapshot.text());
log::info!("display text: {:?}", snapshot.text());
// Line boundaries
let buffer = &snapshot.buffer_snapshot;
for _ in 0..5 {
let row = rng.gen_range(0..=snapshot.max_point().row());
let column = rng.gen_range(0..=snapshot.line_len(row));
let point = snapshot.clip_point(DisplayPoint::new(row, column), Left);
let row = rng.gen_range(0..=buffer.max_point().row);
let column = rng.gen_range(0..=buffer.line_len(row));
let point = buffer.clip_point(Point::new(row, column), Left);
let (prev_display_bound, prev_buffer_bound) = snapshot.prev_row_boundary(point);
let (next_display_bound, next_buffer_bound) = snapshot.next_row_boundary(point);
let (prev_buffer_bound, prev_display_bound) = snapshot.prev_line_boundary(point);
let (next_buffer_bound, next_display_bound) = snapshot.next_line_boundary(point);
assert!(prev_display_bound <= point);
assert!(next_display_bound >= point);
assert!(prev_buffer_bound <= point);
assert!(next_buffer_bound >= point);
assert_eq!(prev_buffer_bound.column, 0);
assert_eq!(prev_display_bound.column(), 0);
if next_display_bound < snapshot.max_point() {
assert_eq!(
buffer
.read_with(&cx, |buffer, _| buffer.chars_at(next_buffer_bound).next()),
Some('\n')
)
if next_buffer_bound < buffer.max_point() {
assert_eq!(buffer.chars_at(next_buffer_bound).next(), Some('\n'));
}
assert_eq!(
@@ -597,6 +657,8 @@ mod tests {
}
// Movement
let min_point = snapshot.clip_point(DisplayPoint::new(0, 0), Left);
let max_point = snapshot.clip_point(snapshot.max_point(), Right);
for _ in 0..5 {
let row = rng.gen_range(0..=snapshot.max_point().row());
let column = rng.gen_range(0..=snapshot.line_len(row));
@@ -606,7 +668,7 @@ mod tests {
let moved_right = movement::right(&snapshot, point).unwrap();
log::info!("Right {:?}", moved_right);
if point < snapshot.max_point() {
if point < max_point {
assert!(moved_right > point);
if point.column() == snapshot.line_len(point.row())
|| snapshot.soft_wrap_indent(point.row()).is_some()
@@ -620,13 +682,13 @@ mod tests {
let moved_left = movement::left(&snapshot, point).unwrap();
log::info!("Left {:?}", moved_left);
if !point.is_zero() {
if point > min_point {
assert!(moved_left < point);
if point.column() == 0 {
assert!(moved_left.row() < point.row());
}
} else {
assert!(moved_left.is_zero());
assert_eq!(moved_left, point);
}
}
}
@@ -648,7 +710,7 @@ mod tests {
let wrap_width = Some(64.);
let text = "one two three four five\nsix seven eight";
let buffer = cx.add_model(|cx| Buffer::new(0, text.to_string(), cx));
let buffer = MultiBuffer::build_simple(text, cx);
let map = cx.add_model(|cx| {
DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, wrap_width, cx)
});
@@ -697,8 +759,8 @@ mod tests {
(DisplayPoint::new(2, 4), SelectionGoal::Column(10))
);
let ix = snapshot.buffer_snapshot.text().find("seven").unwrap();
buffer.update(cx, |buffer, cx| {
let ix = buffer.text().find("seven").unwrap();
buffer.edit(vec![ix..ix], "and ", cx);
});
@@ -720,8 +782,8 @@ mod tests {
#[gpui::test]
fn test_text_chunks(cx: &mut gpui::MutableAppContext) {
let text = sample_text(6, 6);
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
let text = sample_text(6, 6, 'a');
let buffer = MultiBuffer::build_simple(&text, cx);
let tab_size = 4;
let family_id = cx.font_cache().load_family(&["Helvetica"]).unwrap();
let font_id = cx
@@ -800,6 +862,7 @@ mod tests {
let buffer =
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Some(lang), None, cx));
buffer.condition(&cx, |buf, _| !buf.is_parsing()).await;
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let tab_size = 2;
let font_cache = cx.font_cache();
@@ -887,6 +950,7 @@ mod tests {
let buffer =
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Some(lang), None, cx));
buffer.condition(&cx, |buf, _| !buf.is_parsing()).await;
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let font_cache = cx.font_cache();
@@ -932,7 +996,7 @@ mod tests {
let text = "\n'a', 'α',\t'✋',\t'❎', '🍐'\n";
let display_text = "\n'a', 'α', '✋', '❎', '🍐'\n";
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
let buffer = MultiBuffer::build_simple(text, cx);
let tab_size = 4;
let font_cache = cx.font_cache();
@@ -976,7 +1040,7 @@ mod tests {
#[gpui::test]
fn test_tabs_with_multibyte_chars(cx: &mut gpui::MutableAppContext) {
let text = "\t\tα\nβ\t\n🏀β\t\tγ";
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
let buffer = MultiBuffer::build_simple(text, cx);
let tab_size = 4;
let font_cache = cx.font_cache();
let family_id = font_cache.load_family(&["Helvetica"]).unwrap();
@@ -1035,7 +1099,7 @@ mod tests {
#[gpui::test]
fn test_max_point(cx: &mut gpui::MutableAppContext) {
let buffer = cx.add_model(|cx| Buffer::new(0, "aaa\n\t\tbbb", cx));
let buffer = MultiBuffer::build_simple("aaa\n\t\tbbb", cx);
let tab_size = 4;
let font_cache = cx.font_cache();
let family_id = font_cache.load_family(&["Helvetica"]).unwrap();

View File

@@ -1,10 +1,11 @@
use super::wrap_map::{self, Edit as WrapEdit, Snapshot as WrapSnapshot, WrapPoint};
use gpui::{AppContext, ElementBox, ModelHandle};
use language::{Buffer, Chunk};
use super::wrap_map::{self, WrapEdit, WrapPoint, WrapSnapshot};
use crate::{Anchor, ToPoint as _};
use collections::{HashMap, HashSet};
use gpui::{AppContext, ElementBox};
use language::Chunk;
use parking_lot::Mutex;
use std::{
cmp::{self, Ordering},
collections::{HashMap, HashSet},
cmp::{self, Ordering, Reverse},
fmt::Debug,
ops::{Deref, Range},
sync::{
@@ -12,14 +13,13 @@ use std::{
Arc,
},
};
use sum_tree::SumTree;
use text::{Anchor, Bias, Edit, Point, ToOffset, ToPoint as _};
use sum_tree::{Bias, SumTree};
use text::{Edit, Point};
use theme::SyntaxTheme;
const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize];
pub struct BlockMap {
buffer: ModelHandle<Buffer>,
next_block_id: AtomicUsize,
wrap_snapshot: Mutex<WrapSnapshot>,
blocks: Vec<Arc<Block>>,
@@ -45,11 +45,13 @@ struct BlockRow(u32);
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
struct WrapRow(u32);
pub type RenderBlock = Arc<dyn Fn(&BlockContext) -> ElementBox>;
pub struct Block {
id: BlockId,
position: Anchor,
height: u8,
render: Mutex<Arc<dyn Fn(&BlockContext) -> ElementBox>>,
render: Mutex<RenderBlock>,
disposition: BlockDisposition,
}
@@ -67,6 +69,7 @@ where
pub struct BlockContext<'a> {
pub cx: &'a AppContext,
pub anchor_x: f32,
pub line_number_x: f32,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
@@ -93,25 +96,24 @@ struct TransformSummary {
output_rows: u32,
}
pub struct Chunks<'a> {
pub struct BlockChunks<'a> {
transforms: sum_tree::Cursor<'a, Transform, (BlockRow, WrapRow)>,
input_chunks: wrap_map::Chunks<'a>,
input_chunks: wrap_map::WrapChunks<'a>,
input_chunk: Chunk<'a>,
output_row: u32,
max_output_row: u32,
}
pub struct BufferRows<'a> {
pub struct BlockBufferRows<'a> {
transforms: sum_tree::Cursor<'a, Transform, (BlockRow, WrapRow)>,
input_buffer_rows: wrap_map::BufferRows<'a>,
input_buffer_rows: wrap_map::WrapBufferRows<'a>,
output_row: u32,
started: bool,
}
impl BlockMap {
pub fn new(buffer: ModelHandle<Buffer>, wrap_snapshot: WrapSnapshot) -> Self {
pub fn new(wrap_snapshot: WrapSnapshot) -> Self {
Self {
buffer,
next_block_id: AtomicUsize::new(0),
blocks: Vec::new(),
transforms: Mutex::new(SumTree::from_item(
@@ -122,13 +124,8 @@ impl BlockMap {
}
}
pub fn read(
&self,
wrap_snapshot: WrapSnapshot,
edits: Vec<WrapEdit>,
cx: &AppContext,
) -> BlockSnapshot {
self.sync(&wrap_snapshot, edits, cx);
pub fn read(&self, wrap_snapshot: WrapSnapshot, edits: Vec<WrapEdit>) -> BlockSnapshot {
self.sync(&wrap_snapshot, edits);
*self.wrap_snapshot.lock() = wrap_snapshot.clone();
BlockSnapshot {
wrap_snapshot,
@@ -136,23 +133,18 @@ impl BlockMap {
}
}
pub fn write(
&mut self,
wrap_snapshot: WrapSnapshot,
edits: Vec<WrapEdit>,
cx: &AppContext,
) -> BlockMapWriter {
self.sync(&wrap_snapshot, edits, cx);
pub fn write(&mut self, wrap_snapshot: WrapSnapshot, edits: Vec<WrapEdit>) -> BlockMapWriter {
self.sync(&wrap_snapshot, edits);
*self.wrap_snapshot.lock() = wrap_snapshot;
BlockMapWriter(self)
}
fn sync(&self, wrap_snapshot: &WrapSnapshot, edits: Vec<WrapEdit>, cx: &AppContext) {
fn sync(&self, wrap_snapshot: &WrapSnapshot, edits: Vec<WrapEdit>) {
if edits.is_empty() {
return;
}
let buffer = self.buffer.read(cx);
let buffer = wrap_snapshot.buffer_snapshot();
let mut transforms = self.transforms.lock();
let mut new_transforms = SumTree::new();
let old_row_count = transforms.summary().input_rows;
@@ -236,26 +228,29 @@ impl BlockMap {
}
// Find the blocks within this edited region.
let new_start = wrap_snapshot.to_point(WrapPoint::new(new_start.0, 0), Bias::Left);
let start_anchor = buffer.anchor_before(new_start);
let new_buffer_start =
wrap_snapshot.to_point(WrapPoint::new(new_start.0, 0), Bias::Left);
let start_anchor = buffer.anchor_before(new_buffer_start);
let start_block_ix = match self.blocks[last_block_ix..].binary_search_by(|probe| {
probe
.position
.cmp(&start_anchor, buffer)
.cmp(&start_anchor, &buffer)
.unwrap()
.then(Ordering::Greater)
}) {
Ok(ix) | Err(ix) => last_block_ix + ix,
};
let end_block_ix = if new_end.0 > wrap_snapshot.max_point().row() {
self.blocks.len()
} else {
let new_end = wrap_snapshot.to_point(WrapPoint::new(new_end.0, 0), Bias::Left);
let end_anchor = buffer.anchor_before(new_end);
let new_buffer_end =
wrap_snapshot.to_point(WrapPoint::new(new_end.0, 0), Bias::Left);
let end_anchor = buffer.anchor_before(new_buffer_end);
match self.blocks[start_block_ix..].binary_search_by(|probe| {
probe
.position
.cmp(&end_anchor, buffer)
.cmp(&end_anchor, &buffer)
.unwrap()
.then(Ordering::Greater)
}) {
@@ -263,12 +258,13 @@ impl BlockMap {
}
};
last_block_ix = end_block_ix;
blocks_in_edit.clear();
debug_assert!(blocks_in_edit.is_empty());
blocks_in_edit.extend(
self.blocks[start_block_ix..end_block_ix]
.iter()
.map(|block| {
let mut position = block.position.to_point(buffer);
let mut position = block.position.to_point(&buffer);
let column = wrap_snapshot.from_point(position, Bias::Left).column();
match block.disposition {
BlockDisposition::Above => position.column = 0,
@@ -277,22 +273,25 @@ impl BlockMap {
}
}
let position = wrap_snapshot.from_point(position, Bias::Left);
(position.row(), column, block)
(position.row(), column, block.clone())
}),
);
// When multiple blocks are on the same row, newer blocks appear above older
// blocks. This is arbitrary, but we currently rely on it in ProjectDiagnosticsEditor.
blocks_in_edit
.sort_unstable_by_key(|(row, _, block)| (*row, block.disposition, block.id));
.sort_by_key(|(row, _, block)| (*row, block.disposition, Reverse(block.id)));
// For each of these blocks, insert a new isomorphic transform preceding the block,
// and then insert the block itself.
for (block_row, column, block) in blocks_in_edit.iter().copied() {
for (block_row, column, block) in blocks_in_edit.drain(..) {
let insertion_row = match block.disposition {
BlockDisposition::Above => block_row,
BlockDisposition::Below => block_row + 1,
};
let extent_before_block = insertion_row - new_transforms.summary().input_rows;
push_isomorphic(&mut new_transforms, extent_before_block);
new_transforms.push(Transform::block(block.clone(), column), &());
new_transforms.push(Transform::block(block, column), &());
}
old_end = WrapRow(old_end.0.min(old_row_count));
@@ -317,13 +316,10 @@ impl BlockMap {
*transforms = new_transforms;
}
pub fn replace<F>(&mut self, mut element_builders: HashMap<BlockId, F>)
where
F: 'static + Fn(&BlockContext) -> ElementBox,
{
pub fn replace(&mut self, mut renderers: HashMap<BlockId, RenderBlock>) {
for block in &self.blocks {
if let Some(build_element) = element_builders.remove(&block.id) {
*block.render.lock() = Arc::new(build_element);
if let Some(render) = renderers.remove(&block.id) {
*block.render.lock() = render;
}
}
}
@@ -371,40 +367,33 @@ impl std::ops::DerefMut for BlockPoint {
}
impl<'a> BlockMapWriter<'a> {
pub fn insert<P>(
pub fn insert(
&mut self,
blocks: impl IntoIterator<Item = BlockProperties<P>>,
cx: &AppContext,
) -> Vec<BlockId>
where
P: ToOffset + Clone,
{
let buffer = self.0.buffer.read(cx);
blocks: impl IntoIterator<Item = BlockProperties<Anchor>>,
) -> Vec<BlockId> {
let mut ids = Vec::new();
let mut edits = Vec::<Edit<u32>>::new();
let wrap_snapshot = &*self.0.wrap_snapshot.lock();
let buffer = wrap_snapshot.buffer_snapshot();
for block in blocks {
let id = BlockId(self.0.next_block_id.fetch_add(1, SeqCst));
ids.push(id);
let position = buffer.anchor_after(block.position);
let point = position.to_point(buffer);
let start_row = wrap_snapshot
let position = block.position;
let point = position.to_point(&buffer);
let wrap_row = wrap_snapshot
.from_point(Point::new(point.row, 0), Bias::Left)
.row();
let end_row = if point.row == buffer.max_point().row {
wrap_snapshot.max_point().row() + 1
} else {
wrap_snapshot
.from_point(Point::new(point.row + 1, 0), Bias::Left)
.row()
};
let start_row = wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0));
let end_row = wrap_snapshot
.next_row_boundary(WrapPoint::new(wrap_row, 0))
.unwrap_or(wrap_snapshot.max_point().row() + 1);
let block_ix = match self
.0
.blocks
.binary_search_by(|probe| probe.position.cmp(&position, buffer).unwrap())
.binary_search_by(|probe| probe.position.cmp(&position, &buffer).unwrap())
{
Ok(ix) | Err(ix) => ix,
};
@@ -430,30 +419,27 @@ impl<'a> BlockMapWriter<'a> {
}
}
self.0.sync(wrap_snapshot, edits, cx);
self.0.sync(wrap_snapshot, edits);
ids
}
pub fn remove(&mut self, block_ids: HashSet<BlockId>, cx: &AppContext) {
let buffer = self.0.buffer.read(cx);
pub fn remove(&mut self, block_ids: HashSet<BlockId>) {
let wrap_snapshot = &*self.0.wrap_snapshot.lock();
let buffer = wrap_snapshot.buffer_snapshot();
let mut edits = Vec::new();
let mut last_block_buffer_row = None;
self.0.blocks.retain(|block| {
if block_ids.contains(&block.id) {
let buffer_row = block.position.to_point(buffer).row;
let buffer_row = block.position.to_point(&buffer).row;
if last_block_buffer_row != Some(buffer_row) {
last_block_buffer_row = Some(buffer_row);
let start_row = wrap_snapshot
let wrap_row = wrap_snapshot
.from_point(Point::new(buffer_row, 0), Bias::Left)
.row();
let start_row = wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0));
let end_row = wrap_snapshot
.from_point(
Point::new(buffer_row, buffer.line_len(buffer_row)),
Bias::Left,
)
.row()
+ 1;
.next_row_boundary(WrapPoint::new(wrap_row, 0))
.unwrap_or(wrap_snapshot.max_point().row() + 1);
edits.push(Edit {
old: start_row..end_row,
new: start_row..end_row,
@@ -464,19 +450,23 @@ impl<'a> BlockMapWriter<'a> {
true
}
});
self.0.sync(wrap_snapshot, edits, cx);
self.0.sync(wrap_snapshot, edits);
}
}
impl BlockSnapshot {
#[cfg(test)]
fn text(&mut self) -> String {
pub fn text(&self) -> String {
self.chunks(0..self.transforms.summary().output_rows, None)
.map(|chunk| chunk.text)
.collect()
}
pub fn chunks<'a>(&'a self, rows: Range<u32>, theme: Option<&'a SyntaxTheme>) -> Chunks<'a> {
pub fn chunks<'a>(
&'a self,
rows: Range<u32>,
theme: Option<&'a SyntaxTheme>,
) -> BlockChunks<'a> {
let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows);
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>();
let input_end = {
@@ -503,7 +493,7 @@ impl BlockSnapshot {
};
cursor.start().1 .0 + overshoot
};
Chunks {
BlockChunks {
input_chunks: self.wrap_snapshot.chunks(input_start..input_end, theme),
input_chunk: Default::default(),
transforms: cursor,
@@ -512,7 +502,7 @@ impl BlockSnapshot {
}
}
pub fn buffer_rows<'a>(&'a self, start_row: u32) -> BufferRows<'a> {
pub fn buffer_rows<'a>(&'a self, start_row: u32) -> BlockBufferRows<'a> {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>();
cursor.seek(&BlockRow(start_row), Bias::Right, &());
let (output_start, input_start) = cursor.start();
@@ -522,7 +512,7 @@ impl BlockSnapshot {
0
};
let input_start_row = input_start.0 + overshoot;
BufferRows {
BlockBufferRows {
transforms: cursor,
input_buffer_rows: self.wrap_snapshot.buffer_rows(input_start_row),
output_row: start_row,
@@ -590,41 +580,45 @@ impl BlockSnapshot {
cursor.seek(&BlockRow(point.row), Bias::Right, &());
let max_input_row = WrapRow(self.transforms.summary().input_rows);
let search_left =
let mut search_left =
(bias == Bias::Left && cursor.start().1 .0 > 0) || cursor.end(&()).1 == max_input_row;
let mut reversed = false;
loop {
if let Some(transform) = cursor.item() {
if transform.is_isomorphic() {
let (output_start_row, input_start_row) = cursor.start();
let (output_end_row, input_end_row) = cursor.end(&());
if point.row >= output_end_row.0 {
return BlockPoint::new(
output_end_row.0 - 1,
self.wrap_snapshot.line_len(input_end_row.0 - 1),
);
}
let output_start = Point::new(output_start_row.0, 0);
if point.0 > output_start {
let output_overshoot = point.0 - output_start;
let input_start = Point::new(input_start_row.0, 0);
let input_point = self
.wrap_snapshot
.clip_point(WrapPoint(input_start + output_overshoot), bias);
let input_overshoot = input_point.0 - input_start;
return BlockPoint(output_start + input_overshoot);
let input_start = Point::new(input_start_row.0, 0);
let input_end = Point::new(input_end_row.0, 0);
let input_point = if point.row >= output_end_row.0 {
let line_len = self.wrap_snapshot.line_len(input_end_row.0 - 1);
self.wrap_snapshot
.clip_point(WrapPoint::new(input_end_row.0 - 1, line_len), bias)
} else {
return BlockPoint(output_start);
let output_overshoot = point.0.saturating_sub(output_start);
self.wrap_snapshot
.clip_point(WrapPoint(input_start + output_overshoot), bias)
};
if (input_start..input_end).contains(&input_point.0) {
let input_overshoot = input_point.0.saturating_sub(input_start);
return BlockPoint(output_start + input_overshoot);
}
} else if search_left {
}
if search_left {
cursor.prev(&());
} else {
cursor.next(&());
}
} else {
} else if reversed {
return self.max_point();
} else {
reversed = true;
search_left = !search_left;
cursor.seek(&BlockRow(point.row), Bias::Right, &());
}
}
}
@@ -693,7 +687,7 @@ impl Transform {
}
}
impl<'a> Iterator for Chunks<'a> {
impl<'a> Iterator for BlockChunks<'a> {
type Item = Chunk<'a>;
fn next(&mut self) -> Option<Self::Item> {
@@ -756,7 +750,7 @@ impl<'a> Iterator for Chunks<'a> {
}
}
impl<'a> Iterator for BufferRows<'a> {
impl<'a> Iterator for BlockBufferRows<'a> {
type Item = Option<u32>;
fn next(&mut self) -> Option<Self::Item> {
@@ -840,6 +834,14 @@ impl Deref for AlignedBlock {
}
}
impl<'a> Deref for BlockContext<'a> {
type Target = AppContext;
fn deref(&self) -> &Self::Target {
&self.cx
}
}
impl Debug for Block {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Block")
@@ -872,8 +874,8 @@ fn offset_for_row(s: &str, target: u32) -> (u32, usize) {
mod tests {
use super::*;
use crate::display_map::{fold_map::FoldMap, tab_map::TabMap, wrap_map::WrapMap};
use crate::multi_buffer::MultiBuffer;
use gpui::{elements::Empty, Element};
use language::Buffer;
use rand::prelude::*;
use std::env;
use text::RandomCharIter;
@@ -902,38 +904,37 @@ mod tests {
let text = "aaa\nbbb\nccc\nddd";
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
let (fold_map, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot());
let buffer = MultiBuffer::build_simple(text, cx);
let buffer_snapshot = buffer.read(cx).snapshot(cx);
let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
let (fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone());
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1);
let (wrap_map, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, None, cx);
let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone());
let mut block_map = BlockMap::new(wraps_snapshot.clone());
let mut writer = block_map.write(wraps_snapshot.clone(), vec![], cx);
writer.insert(
vec![
BlockProperties {
position: Point::new(1, 0),
height: 1,
disposition: BlockDisposition::Above,
render: Arc::new(|_| Empty::new().named("block 1")),
},
BlockProperties {
position: Point::new(1, 2),
height: 2,
disposition: BlockDisposition::Above,
render: Arc::new(|_| Empty::new().named("block 2")),
},
BlockProperties {
position: Point::new(3, 3),
height: 3,
disposition: BlockDisposition::Below,
render: Arc::new(|_| Empty::new().named("block 3")),
},
],
cx,
);
let mut writer = block_map.write(wraps_snapshot.clone(), vec![]);
writer.insert(vec![
BlockProperties {
position: buffer_snapshot.anchor_after(Point::new(1, 0)),
height: 1,
disposition: BlockDisposition::Above,
render: Arc::new(|_| Empty::new().named("block 1")),
},
BlockProperties {
position: buffer_snapshot.anchor_after(Point::new(1, 2)),
height: 2,
disposition: BlockDisposition::Above,
render: Arc::new(|_| Empty::new().named("block 2")),
},
BlockProperties {
position: buffer_snapshot.anchor_after(Point::new(3, 3)),
height: 3,
disposition: BlockDisposition::Below,
render: Arc::new(|_| Empty::new().named("block 3")),
},
]);
let mut snapshot = block_map.read(wraps_snapshot, vec![], cx);
let snapshot = block_map.read(wraps_snapshot, vec![]);
assert_eq!(snapshot.text(), "aaa\n\n\n\nbbb\nccc\nddd\n\n\n");
let blocks = snapshot
@@ -943,18 +944,24 @@ mod tests {
start_row..start_row + block.height(),
block.column(),
block
.render(&BlockContext { cx, anchor_x: 0. })
.render(&BlockContext {
cx,
anchor_x: 0.,
line_number_x: 0.,
})
.name()
.unwrap()
.to_string(),
)
})
.collect::<Vec<_>>();
// When multiple blocks are on the same line, the newer blocks appear first.
assert_eq!(
blocks,
&[
(1..2, 0, "block 1".to_string()),
(2..4, 2, "block 2".to_string()),
(1..3, 2, "block 2".to_string()),
(3..4, 0, "block 1".to_string()),
(7..10, 3, "block 3".to_string()),
]
);
@@ -1046,20 +1053,19 @@ mod tests {
]
);
// Insert a line break, separating two block decorations into separate
// lines.
let (buffer_snapshot, buffer_edits) = buffer.update(cx, |buffer, cx| {
let v0 = buffer.version();
// Insert a line break, separating two block decorations into separate lines.
let buffer_snapshot = buffer.update(cx, |buffer, cx| {
buffer.edit([Point::new(1, 1)..Point::new(1, 1)], "!!!\n", cx);
(buffer.snapshot(), buffer.edits_since(&v0).collect())
buffer.snapshot(cx)
});
let (folds_snapshot, fold_edits) = fold_map.read(buffer_snapshot, buffer_edits);
let (folds_snapshot, fold_edits) =
fold_map.read(buffer_snapshot, subscription.consume().into_inner());
let (tabs_snapshot, tab_edits) = tab_map.sync(folds_snapshot, fold_edits);
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
wrap_map.sync(tabs_snapshot, tab_edits, cx)
});
let mut snapshot = block_map.read(wraps_snapshot, wrap_edits, cx);
let snapshot = block_map.read(wraps_snapshot, wrap_edits);
assert_eq!(snapshot.text(), "aaa\n\nb!!!\n\n\nbb\nccc\nddd\n\n\n");
}
@@ -1073,34 +1079,32 @@ mod tests {
let text = "one two three\nfour five six\nseven eight";
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
let (_, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot());
let buffer = MultiBuffer::build_simple(text, cx);
let buffer_snapshot = buffer.read(cx).snapshot(cx);
let (_, folds_snapshot) = FoldMap::new(buffer_snapshot.clone());
let (_, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1);
let (_, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, Some(60.), cx);
let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone());
let mut block_map = BlockMap::new(wraps_snapshot.clone());
let mut writer = block_map.write(wraps_snapshot.clone(), vec![], cx);
writer.insert(
vec![
BlockProperties {
position: Point::new(1, 12),
disposition: BlockDisposition::Above,
render: Arc::new(|_| Empty::new().named("block 1")),
height: 1,
},
BlockProperties {
position: Point::new(1, 1),
disposition: BlockDisposition::Below,
render: Arc::new(|_| Empty::new().named("block 2")),
height: 1,
},
],
cx,
);
let mut writer = block_map.write(wraps_snapshot.clone(), vec![]);
writer.insert(vec![
BlockProperties {
position: buffer_snapshot.anchor_after(Point::new(1, 12)),
disposition: BlockDisposition::Above,
render: Arc::new(|_| Empty::new().named("block 1")),
height: 1,
},
BlockProperties {
position: buffer_snapshot.anchor_after(Point::new(1, 1)),
disposition: BlockDisposition::Below,
render: Arc::new(|_| Empty::new().named("block 2")),
height: 1,
},
]);
// Blocks with an 'above' disposition go above their corresponding buffer line.
// Blocks with a 'below' disposition go below their corresponding buffer line.
let mut snapshot = block_map.read(wraps_snapshot, vec![], cx);
let snapshot = block_map.read(wraps_snapshot, vec![]);
assert_eq!(
snapshot.text(),
"one two \nthree\n\nfour five \nsix\n\nseven \neight"
@@ -1128,18 +1132,21 @@ mod tests {
log::info!("Wrap width: {:?}", wrap_width);
let buffer = cx.add_model(|cx| {
let buffer = if rng.gen() {
let len = rng.gen_range(0..10);
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
log::info!("initial buffer text: {:?}", text);
Buffer::new(0, text, cx)
});
let mut buffer_snapshot = buffer.read(cx).snapshot();
MultiBuffer::build_simple(&text, cx)
} else {
MultiBuffer::build_random(&mut rng, cx)
};
let mut buffer_snapshot = buffer.read(cx).snapshot(cx);
let (fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone());
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size);
let (wrap_map, wraps_snapshot) =
WrapMap::new(tabs_snapshot, font_id, font_size, wrap_width, cx);
let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot);
let mut block_map = BlockMap::new(wraps_snapshot);
let mut expected_blocks = Vec::new();
for _ in 0..operations {
@@ -1155,10 +1162,10 @@ mod tests {
wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx));
}
20..=39 => {
let block_count = rng.gen_range(1..=1);
let block_count = rng.gen_range(1..=5);
let block_properties = (0..block_count)
.map(|_| {
let buffer = buffer.read(cx);
let buffer = buffer.read(cx).read(cx);
let position = buffer.anchor_after(
buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Left),
);
@@ -1172,7 +1179,7 @@ mod tests {
log::info!(
"inserting block {:?} {:?} with height {}",
disposition,
position.to_point(buffer),
position.to_point(&buffer),
height
);
BlockProperties {
@@ -1190,8 +1197,8 @@ mod tests {
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
wrap_map.sync(tabs_snapshot, tab_edits, cx)
});
let mut block_map = block_map.write(wraps_snapshot, wrap_edits, cx);
let block_ids = block_map.insert(block_properties.clone(), cx);
let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
let block_ids = block_map.insert(block_properties.clone());
for (block_id, props) in block_ids.into_iter().zip(block_properties) {
expected_blocks.push((block_id, props));
}
@@ -1212,17 +1219,17 @@ mod tests {
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
wrap_map.sync(tabs_snapshot, tab_edits, cx)
});
let mut block_map = block_map.write(wraps_snapshot, wrap_edits, cx);
block_map.remove(block_ids_to_remove, cx);
let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
block_map.remove(block_ids_to_remove);
}
_ => {
buffer.update(cx, |buffer, cx| {
let v0 = buffer.version();
let edit_count = rng.gen_range(1..=5);
let subscription = buffer.subscribe();
buffer.randomly_edit(&mut rng, edit_count, cx);
log::info!("buffer text: {:?}", buffer.text());
buffer_edits.extend(buffer.edits_since(&v0));
buffer_snapshot = buffer.snapshot();
buffer_snapshot = buffer.snapshot(cx);
buffer_edits.extend(subscription.consume());
log::info!("buffer text: {:?}", buffer_snapshot.text());
});
}
}
@@ -1232,26 +1239,25 @@ mod tests {
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
wrap_map.sync(tabs_snapshot, tab_edits, cx)
});
let mut blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits, cx);
let blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits);
assert_eq!(
blocks_snapshot.transforms.summary().input_rows,
wraps_snapshot.max_point().row() + 1
);
log::info!("blocks text: {:?}", blocks_snapshot.text());
let buffer = buffer.read(cx);
let mut sorted_blocks = expected_blocks
.iter()
.cloned()
.map(|(id, block)| {
let mut position = block.position.to_point(buffer);
let mut position = block.position.to_point(&buffer_snapshot);
let column = wraps_snapshot.from_point(position, Bias::Left).column();
match block.disposition {
BlockDisposition::Above => {
position.column = 0;
}
BlockDisposition::Below => {
position.column = buffer.line_len(position.row);
position.column = buffer_snapshot.line_len(position.row);
}
};
let row = wraps_snapshot.from_point(position, Bias::Left).row();
@@ -1266,12 +1272,15 @@ mod tests {
)
})
.collect::<Vec<_>>();
sorted_blocks
.sort_unstable_by_key(|(id, block)| (block.position.row, block.disposition, *id));
let mut sorted_blocks = sorted_blocks.into_iter().peekable();
sorted_blocks.sort_unstable_by_key(|(id, block)| {
(block.position.row, block.disposition, Reverse(*id))
});
let mut sorted_blocks_iter = sorted_blocks.iter().peekable();
let input_buffer_rows = buffer_snapshot.buffer_rows(0).collect::<Vec<_>>();
let mut expected_buffer_rows = Vec::new();
let mut expected_text = String::new();
let mut expected_block_positions = Vec::new();
let input_text = wraps_snapshot.text();
for (row, input_line) in input_text.split('\n').enumerate() {
let row = row as u32;
@@ -1279,35 +1288,39 @@ mod tests {
expected_text.push('\n');
}
let buffer_row = wraps_snapshot
let buffer_row = input_buffer_rows[wraps_snapshot
.to_point(WrapPoint::new(row, 0), Bias::Left)
.row;
.row as usize];
while let Some((_, block)) = sorted_blocks.peek() {
while let Some((block_id, block)) = sorted_blocks_iter.peek() {
if block.position.row == row && block.disposition == BlockDisposition::Above {
expected_block_positions
.push((expected_text.matches('\n').count() as u32, *block_id));
let text = "\n".repeat(block.height as usize);
expected_text.push_str(&text);
for _ in 0..block.height {
expected_buffer_rows.push(None);
}
sorted_blocks.next();
sorted_blocks_iter.next();
} else {
break;
}
}
let soft_wrapped = wraps_snapshot.to_tab_point(WrapPoint::new(row, 0)).column() > 0;
expected_buffer_rows.push(if soft_wrapped { None } else { Some(buffer_row) });
expected_buffer_rows.push(if soft_wrapped { None } else { buffer_row });
expected_text.push_str(input_line);
while let Some((_, block)) = sorted_blocks.peek() {
while let Some((block_id, block)) = sorted_blocks_iter.peek() {
if block.position.row == row && block.disposition == BlockDisposition::Below {
expected_block_positions
.push((expected_text.matches('\n').count() as u32 + 1, *block_id));
let text = "\n".repeat(block.height as usize);
expected_text.push_str(&text);
for _ in 0..block.height {
expected_buffer_rows.push(None);
}
sorted_blocks.next();
sorted_blocks_iter.next();
} else {
break;
}
@@ -1335,6 +1348,14 @@ mod tests {
);
}
assert_eq!(
blocks_snapshot
.blocks_in_range(0..(expected_row_count as u32))
.map(|(row, block)| (row, block.id))
.collect::<Vec<_>>(),
expected_block_positions
);
let mut expected_longest_rows = Vec::new();
let mut longest_line_len = -1_isize;
for (row, line) in expected_lines.iter().enumerate() {
@@ -1377,16 +1398,30 @@ mod tests {
let mut block_point = BlockPoint::new(0, 0);
for c in expected_text.chars() {
let left_point = blocks_snapshot.clip_point(block_point, Bias::Left);
let right_point = blocks_snapshot.clip_point(block_point, Bias::Right);
let left_buffer_point = blocks_snapshot.to_point(left_point, Bias::Left);
assert_eq!(
blocks_snapshot.to_block_point(blocks_snapshot.to_wrap_point(left_point)),
left_point
);
assert_eq!(
left_buffer_point,
buffer_snapshot.clip_point(left_buffer_point, Bias::Right),
"{:?} is not valid in buffer coordinates",
left_point
);
let right_point = blocks_snapshot.clip_point(block_point, Bias::Right);
let right_buffer_point = blocks_snapshot.to_point(right_point, Bias::Right);
assert_eq!(
blocks_snapshot.to_block_point(blocks_snapshot.to_wrap_point(right_point)),
right_point
);
assert_eq!(
right_buffer_point,
buffer_snapshot.clip_point(right_buffer_point, Bias::Left),
"{:?} is not valid in buffer coordinates",
right_point
);
if c == '\n' {
block_point.0 += Point::new(1, 0);
@@ -1396,4 +1431,10 @@ mod tests {
}
}
}
impl BlockSnapshot {
fn to_point(&self, point: BlockPoint, bias: Bias) -> Point {
self.wrap_snapshot.to_point(self.to_wrap_point(point), bias)
}
}
}

View File

@@ -1,19 +1,20 @@
use language::{
Anchor, AnchorRangeExt, Chunk, Edit, Point, PointUtf16, Snapshot as BufferSnapshot,
TextSummary, ToOffset,
use crate::{
multi_buffer::MultiBufferRows, Anchor, AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot,
ToOffset,
};
use language::{Chunk, Edit, Point, PointUtf16, TextSummary};
use parking_lot::Mutex;
use std::{
cmp::{self, Ordering},
iter,
ops::Range,
ops::{Range, Sub},
sync::atomic::{AtomicUsize, Ordering::SeqCst},
};
use sum_tree::{Bias, Cursor, FilterCursor, SumTree};
use theme::SyntaxTheme;
pub trait ToFoldPoint {
fn to_fold_point(&self, snapshot: &Snapshot, bias: Bias) -> FoldPoint;
fn to_fold_point(&self, snapshot: &FoldSnapshot, bias: Bias) -> FoldPoint;
}
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
@@ -36,28 +37,27 @@ impl FoldPoint {
&mut self.0.row
}
#[cfg(test)]
pub fn column_mut(&mut self) -> &mut u32 {
&mut self.0.column
}
pub fn to_buffer_point(&self, snapshot: &Snapshot) -> Point {
pub fn to_buffer_point(&self, snapshot: &FoldSnapshot) -> Point {
let mut cursor = snapshot.transforms.cursor::<(FoldPoint, Point)>();
cursor.seek(self, Bias::Right, &());
let overshoot = self.0 - cursor.start().0 .0;
cursor.start().1 + overshoot
}
pub fn to_buffer_offset(&self, snapshot: &Snapshot) -> usize {
pub fn to_buffer_offset(&self, snapshot: &FoldSnapshot) -> usize {
let mut cursor = snapshot.transforms.cursor::<(FoldPoint, Point)>();
cursor.seek(self, Bias::Right, &());
let overshoot = self.0 - cursor.start().0 .0;
snapshot
.buffer_snapshot
.to_offset(cursor.start().1 + overshoot)
.point_to_offset(cursor.start().1 + overshoot)
}
pub fn to_offset(&self, snapshot: &Snapshot) -> FoldOffset {
pub fn to_offset(&self, snapshot: &FoldSnapshot) -> FoldOffset {
let mut cursor = snapshot
.transforms
.cursor::<(FoldPoint, TransformSummary)>();
@@ -69,7 +69,7 @@ impl FoldPoint {
assert!(transform.output_text.is_none());
let end_buffer_offset = snapshot
.buffer_snapshot
.to_offset(cursor.start().1.input.lines + overshoot);
.point_to_offset(cursor.start().1.input.lines + overshoot);
offset += end_buffer_offset - cursor.start().1.input.bytes;
}
FoldOffset(offset)
@@ -77,7 +77,7 @@ impl FoldPoint {
}
impl ToFoldPoint for Point {
fn to_fold_point(&self, snapshot: &Snapshot, bias: Bias) -> FoldPoint {
fn to_fold_point(&self, snapshot: &FoldSnapshot, bias: Bias) -> FoldPoint {
let mut cursor = snapshot.transforms.cursor::<(Point, FoldPoint)>();
cursor.seek(self, Bias::Right, &());
if cursor.item().map_or(false, |t| t.is_fold()) {
@@ -102,7 +102,7 @@ impl<'a> FoldMapWriter<'a> {
pub fn fold<T: ToOffset>(
&mut self,
ranges: impl IntoIterator<Item = Range<T>>,
) -> (Snapshot, Vec<FoldEdit>) {
) -> (FoldSnapshot, Vec<FoldEdit>) {
let mut edits = Vec::new();
let mut folds = Vec::new();
let buffer = self.0.buffer.lock().clone();
@@ -133,7 +133,7 @@ impl<'a> FoldMapWriter<'a> {
consolidate_buffer_edits(&mut edits);
let edits = self.0.sync(buffer.clone(), edits);
let snapshot = Snapshot {
let snapshot = FoldSnapshot {
transforms: self.0.transforms.lock().clone(),
folds: self.0.folds.clone(),
buffer_snapshot: buffer,
@@ -145,7 +145,7 @@ impl<'a> FoldMapWriter<'a> {
pub fn unfold<T: ToOffset>(
&mut self,
ranges: impl IntoIterator<Item = Range<T>>,
) -> (Snapshot, Vec<FoldEdit>) {
) -> (FoldSnapshot, Vec<FoldEdit>) {
let mut edits = Vec::new();
let mut fold_ixs_to_delete = Vec::new();
let buffer = self.0.buffer.lock().clone();
@@ -154,10 +154,12 @@ impl<'a> FoldMapWriter<'a> {
let mut folds_cursor = intersecting_folds(&buffer, &self.0.folds, range, true);
while let Some(fold) = folds_cursor.item() {
let offset_range = fold.0.start.to_offset(&buffer)..fold.0.end.to_offset(&buffer);
edits.push(text::Edit {
old: offset_range.clone(),
new: offset_range,
});
if offset_range.end > offset_range.start {
edits.push(text::Edit {
old: offset_range.clone(),
new: offset_range,
});
}
fold_ixs_to_delete.push(*folds_cursor.start());
folds_cursor.next(&buffer);
}
@@ -179,7 +181,7 @@ impl<'a> FoldMapWriter<'a> {
consolidate_buffer_edits(&mut edits);
let edits = self.0.sync(buffer.clone(), edits);
let snapshot = Snapshot {
let snapshot = FoldSnapshot {
transforms: self.0.transforms.lock().clone(),
folds: self.0.folds.clone(),
buffer_snapshot: buffer,
@@ -190,14 +192,14 @@ impl<'a> FoldMapWriter<'a> {
}
pub struct FoldMap {
buffer: Mutex<BufferSnapshot>,
buffer: Mutex<MultiBufferSnapshot>,
transforms: Mutex<SumTree<Transform>>,
folds: SumTree<Fold>,
version: AtomicUsize,
}
impl FoldMap {
pub fn new(buffer: BufferSnapshot) -> (Self, Snapshot) {
pub fn new(buffer: MultiBufferSnapshot) -> (Self, FoldSnapshot) {
let this = Self {
buffer: Mutex::new(buffer.clone()),
folds: Default::default(),
@@ -214,7 +216,7 @@ impl FoldMap {
version: Default::default(),
};
let snapshot = Snapshot {
let snapshot = FoldSnapshot {
transforms: this.transforms.lock().clone(),
folds: this.folds.clone(),
buffer_snapshot: this.buffer.lock().clone(),
@@ -225,12 +227,12 @@ impl FoldMap {
pub fn read(
&self,
buffer: BufferSnapshot,
buffer: MultiBufferSnapshot,
edits: Vec<Edit<usize>>,
) -> (Snapshot, Vec<FoldEdit>) {
) -> (FoldSnapshot, Vec<FoldEdit>) {
let edits = self.sync(buffer, edits);
self.check_invariants();
let snapshot = Snapshot {
let snapshot = FoldSnapshot {
transforms: self.transforms.lock().clone(),
folds: self.folds.clone(),
buffer_snapshot: self.buffer.lock().clone(),
@@ -241,9 +243,9 @@ impl FoldMap {
pub fn write(
&mut self,
buffer: BufferSnapshot,
buffer: MultiBufferSnapshot,
edits: Vec<Edit<usize>>,
) -> (FoldMapWriter, Snapshot, Vec<FoldEdit>) {
) -> (FoldMapWriter, FoldSnapshot, Vec<FoldEdit>) {
let (snapshot, edits) = self.read(buffer, edits);
(FoldMapWriter(self), snapshot, edits)
}
@@ -260,7 +262,7 @@ impl FoldMap {
fn sync(
&self,
new_buffer: BufferSnapshot,
new_buffer: MultiBufferSnapshot,
buffer_edits: Vec<text::Edit<usize>>,
) -> Vec<FoldEdit> {
if buffer_edits.is_empty() {
@@ -457,8 +459,8 @@ impl FoldMap {
new_transforms.start().1 .0 + (edit.new.end - new_transforms.start().0);
fold_edits.push(FoldEdit {
old_bytes: FoldOffset(old_start)..FoldOffset(old_end),
new_bytes: FoldOffset(new_start)..FoldOffset(new_end),
old: FoldOffset(old_start)..FoldOffset(old_end),
new: FoldOffset(new_start)..FoldOffset(new_end),
});
}
@@ -474,14 +476,18 @@ impl FoldMap {
}
#[derive(Clone)]
pub struct Snapshot {
pub struct FoldSnapshot {
transforms: SumTree<Transform>,
folds: SumTree<Fold>,
buffer_snapshot: language::Snapshot,
buffer_snapshot: MultiBufferSnapshot,
pub version: usize,
}
impl Snapshot {
impl FoldSnapshot {
pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
&self.buffer_snapshot
}
#[cfg(test)]
pub fn text(&self) -> String {
self.chunks(FoldOffset(0)..self.len(), None)
@@ -542,7 +548,6 @@ impl Snapshot {
FoldOffset(self.transforms.summary().output.bytes)
}
#[cfg(test)]
pub fn line_len(&self, row: u32) -> u32 {
let line_start = FoldPoint::new(row, 0).to_offset(self).0;
let line_end = if row >= self.max_point().row() {
@@ -553,15 +558,24 @@ impl Snapshot {
(line_end - line_start) as u32
}
pub fn buffer_rows(&self, start_row: u32) -> BufferRows {
pub fn buffer_rows(&self, start_row: u32) -> FoldBufferRows {
if start_row > self.transforms.summary().output.lines.row {
panic!("invalid display row {}", start_row);
}
let fold_point = FoldPoint::new(start_row, 0);
let mut cursor = self.transforms.cursor();
let mut cursor = self.transforms.cursor::<(FoldPoint, Point)>();
cursor.seek(&fold_point, Bias::Left, &());
BufferRows { fold_point, cursor }
let overshoot = fold_point.0 - cursor.start().0 .0;
let buffer_point = cursor.start().1 + overshoot;
let input_buffer_rows = self.buffer_snapshot.buffer_rows(buffer_point.row);
FoldBufferRows {
fold_point,
input_buffer_rows,
cursor,
}
}
pub fn max_point(&self) -> FoldPoint {
@@ -624,7 +638,7 @@ impl Snapshot {
&'a self,
range: Range<FoldOffset>,
theme: Option<&'a SyntaxTheme>,
) -> Chunks<'a> {
) -> FoldChunks<'a> {
let mut transform_cursor = self.transforms.cursor::<(FoldOffset, usize)>();
transform_cursor.seek(&range.end, Bias::Right, &());
@@ -635,7 +649,7 @@ impl Snapshot {
let overshoot = range.start.0 - transform_cursor.start().0 .0;
let buffer_start = transform_cursor.start().1 + overshoot;
Chunks {
FoldChunks {
transform_cursor,
buffer_chunks: self.buffer_snapshot.chunks(buffer_start..buffer_end, theme),
buffer_chunk: None,
@@ -687,11 +701,7 @@ impl Snapshot {
let buffer_position = cursor.start().1 + overshoot;
let clipped_buffer_position =
self.buffer_snapshot.clip_point(buffer_position, bias);
FoldPoint::new(
point.row(),
((point.column() as i32) + clipped_buffer_position.column as i32
- buffer_position.column as i32) as u32,
)
FoldPoint(cursor.start().0 .0 + (clipped_buffer_position - cursor.start().1))
}
} else {
FoldPoint(self.transforms.summary().output.lines)
@@ -700,7 +710,7 @@ impl Snapshot {
}
fn intersecting_folds<'a, T>(
buffer: &'a text::Snapshot,
buffer: &'a MultiBufferSnapshot,
folds: &'a SumTree<Fold>,
range: Range<T>,
inclusive: bool,
@@ -750,20 +760,20 @@ fn consolidate_buffer_edits(edits: &mut Vec<text::Edit<usize>>) {
fn consolidate_fold_edits(edits: &mut Vec<FoldEdit>) {
edits.sort_unstable_by(|a, b| {
a.old_bytes
a.old
.start
.cmp(&b.old_bytes.start)
.then_with(|| b.old_bytes.end.cmp(&a.old_bytes.end))
.cmp(&b.old.start)
.then_with(|| b.old.end.cmp(&a.old.end))
});
let mut i = 1;
while i < edits.len() {
let edit = edits[i].clone();
let prev_edit = &mut edits[i - 1];
if prev_edit.old_bytes.end >= edit.old_bytes.start {
prev_edit.old_bytes.end = prev_edit.old_bytes.end.max(edit.old_bytes.end);
prev_edit.new_bytes.start = prev_edit.new_bytes.start.min(edit.new_bytes.start);
prev_edit.new_bytes.end = prev_edit.new_bytes.end.max(edit.new_bytes.end);
if prev_edit.old.end >= edit.old.start {
prev_edit.old.end = prev_edit.old.end.max(edit.old.end);
prev_edit.new.start = prev_edit.new.start.min(edit.new.start);
prev_edit.new.end = prev_edit.new.end.max(edit.new.end);
edits.remove(i);
continue;
}
@@ -851,9 +861,9 @@ impl Default for FoldSummary {
}
impl sum_tree::Summary for FoldSummary {
type Context = text::Snapshot;
type Context = MultiBufferSnapshot;
fn add_summary(&mut self, other: &Self, buffer: &text::Snapshot) {
fn add_summary(&mut self, other: &Self, buffer: &MultiBufferSnapshot) {
if other.min_start.cmp(&self.min_start, buffer).unwrap() == Ordering::Less {
self.min_start = other.min_start.clone();
}
@@ -877,62 +887,66 @@ impl sum_tree::Summary for FoldSummary {
}
impl<'a> sum_tree::Dimension<'a, FoldSummary> for Fold {
fn add_summary(&mut self, summary: &'a FoldSummary, _: &text::Snapshot) {
fn add_summary(&mut self, summary: &'a FoldSummary, _: &MultiBufferSnapshot) {
self.0.start = summary.start.clone();
self.0.end = summary.end.clone();
}
}
impl<'a> sum_tree::SeekTarget<'a, FoldSummary, Fold> for Fold {
fn cmp(&self, other: &Self, buffer: &text::Snapshot) -> Ordering {
fn cmp(&self, other: &Self, buffer: &MultiBufferSnapshot) -> Ordering {
self.0.cmp(&other.0, buffer).unwrap()
}
}
impl<'a> sum_tree::Dimension<'a, FoldSummary> for usize {
fn add_summary(&mut self, summary: &'a FoldSummary, _: &text::Snapshot) {
fn add_summary(&mut self, summary: &'a FoldSummary, _: &MultiBufferSnapshot) {
*self += summary.count;
}
}
pub struct BufferRows<'a> {
pub struct FoldBufferRows<'a> {
cursor: Cursor<'a, Transform, (FoldPoint, Point)>,
input_buffer_rows: MultiBufferRows<'a>,
fold_point: FoldPoint,
}
impl<'a> Iterator for BufferRows<'a> {
type Item = u32;
impl<'a> Iterator for FoldBufferRows<'a> {
type Item = Option<u32>;
fn next(&mut self) -> Option<Self::Item> {
let mut traversed_fold = false;
while self.fold_point > self.cursor.end(&()).0 {
self.cursor.next(&());
traversed_fold = true;
if self.cursor.item().is_none() {
// TODO: Return a bool from next?
break;
}
}
if self.cursor.item().is_some() {
let overshoot = self.fold_point.0 - self.cursor.start().0 .0;
let buffer_point = self.cursor.start().1 + overshoot;
if traversed_fold {
self.input_buffer_rows.seek(self.cursor.start().1.row);
self.input_buffer_rows.next();
}
*self.fold_point.row_mut() += 1;
Some(buffer_point.row)
self.input_buffer_rows.next()
} else {
None
}
}
}
pub struct Chunks<'a> {
pub struct FoldChunks<'a> {
transform_cursor: Cursor<'a, Transform, (FoldOffset, usize)>,
buffer_chunks: language::Chunks<'a>,
buffer_chunks: MultiBufferChunks<'a>,
buffer_chunk: Option<(usize, Chunk<'a>)>,
buffer_offset: usize,
output_offset: usize,
max_output_offset: usize,
}
impl<'a> Iterator for Chunks<'a> {
impl<'a> Iterator for FoldChunks<'a> {
type Item = Chunk<'a>;
fn next(&mut self) -> Option<Self::Item> {
@@ -1006,7 +1020,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldPoint {
pub struct FoldOffset(pub usize);
impl FoldOffset {
pub fn to_point(&self, snapshot: &Snapshot) -> FoldPoint {
pub fn to_point(&self, snapshot: &FoldSnapshot) -> FoldPoint {
let mut cursor = snapshot
.transforms
.cursor::<(FoldOffset, TransformSummary)>();
@@ -1015,13 +1029,21 @@ impl FoldOffset {
Point::new(0, (self.0 - cursor.start().0 .0) as u32)
} else {
let buffer_offset = cursor.start().1.input.bytes + self.0 - cursor.start().0 .0;
let buffer_point = snapshot.buffer_snapshot.to_point(buffer_offset);
let buffer_point = snapshot.buffer_snapshot.offset_to_point(buffer_offset);
buffer_point - cursor.start().1.input.lines
};
FoldPoint(cursor.start().1.output.lines + overshoot)
}
}
impl Sub for FoldOffset {
type Output = Self;
fn sub(self, rhs: Self) -> Self::Output {
Self(self.0 - rhs.0)
}
}
impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldOffset {
fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) {
self.0 += &summary.output.bytes;
@@ -1040,41 +1062,23 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for usize {
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct FoldEdit {
pub old_bytes: Range<FoldOffset>,
pub new_bytes: Range<FoldOffset>,
}
#[cfg(test)]
impl FoldEdit {
pub fn delta(&self) -> isize {
self.inserted_bytes() as isize - self.deleted_bytes() as isize
}
pub fn deleted_bytes(&self) -> usize {
self.old_bytes.end.0 - self.old_bytes.start.0
}
pub fn inserted_bytes(&self) -> usize {
self.new_bytes.end.0 - self.new_bytes.start.0
}
}
pub type FoldEdit = Edit<FoldOffset>;
#[cfg(test)]
mod tests {
use super::*;
use crate::{test::sample_text, ToPoint};
use language::Buffer;
use crate::{MultiBuffer, ToPoint};
use rand::prelude::*;
use std::{env, mem};
use text::RandomCharIter;
use util::test::sample_text;
use Bias::{Left, Right};
#[gpui::test]
fn test_basic_folds(cx: &mut gpui::MutableAppContext) {
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx));
let buffer_snapshot = buffer.read(cx).snapshot();
let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx);
let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
let buffer_snapshot = buffer.read(cx).snapshot(cx);
let mut map = FoldMap::new(buffer_snapshot.clone()).0;
let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
@@ -1087,18 +1091,17 @@ mod tests {
edits,
&[
FoldEdit {
old_bytes: FoldOffset(2)..FoldOffset(16),
new_bytes: FoldOffset(2)..FoldOffset(5),
old: FoldOffset(2)..FoldOffset(16),
new: FoldOffset(2)..FoldOffset(5),
},
FoldEdit {
old_bytes: FoldOffset(18)..FoldOffset(29),
new_bytes: FoldOffset(7)..FoldOffset(10)
old: FoldOffset(18)..FoldOffset(29),
new: FoldOffset(7)..FoldOffset(10)
},
]
);
let (buffer_snapshot, edits) = buffer.update(cx, |buffer, cx| {
let v0 = buffer.version();
let buffer_snapshot = buffer.update(cx, |buffer, cx| {
buffer.edit(
vec![
Point::new(0, 0)..Point::new(0, 1),
@@ -1107,30 +1110,30 @@ mod tests {
"123",
cx,
);
(buffer.snapshot(), buffer.edits_since(&v0).collect())
buffer.snapshot(cx)
});
let (snapshot3, edits) = map.read(buffer_snapshot.clone(), edits);
let (snapshot3, edits) =
map.read(buffer_snapshot.clone(), subscription.consume().into_inner());
assert_eq!(snapshot3.text(), "123a…c123c…eeeee");
assert_eq!(
edits,
&[
FoldEdit {
old_bytes: FoldOffset(0)..FoldOffset(1),
new_bytes: FoldOffset(0)..FoldOffset(3),
old: FoldOffset(0)..FoldOffset(1),
new: FoldOffset(0)..FoldOffset(3),
},
FoldEdit {
old_bytes: FoldOffset(6)..FoldOffset(6),
new_bytes: FoldOffset(8)..FoldOffset(11),
old: FoldOffset(6)..FoldOffset(6),
new: FoldOffset(8)..FoldOffset(11),
},
]
);
let (buffer_snapshot, edits) = buffer.update(cx, |buffer, cx| {
let v0 = buffer.version();
let buffer_snapshot = buffer.update(cx, |buffer, cx| {
buffer.edit(vec![Point::new(2, 6)..Point::new(4, 3)], "456", cx);
(buffer.snapshot(), buffer.edits_since(&v0).collect())
buffer.snapshot(cx)
});
let (snapshot4, _) = map.read(buffer_snapshot.clone(), edits);
let (snapshot4, _) = map.read(buffer_snapshot.clone(), subscription.consume().into_inner());
assert_eq!(snapshot4.text(), "123a…c123456eee");
let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
@@ -1141,8 +1144,9 @@ mod tests {
#[gpui::test]
fn test_adjacent_folds(cx: &mut gpui::MutableAppContext) {
let buffer = cx.add_model(|cx| Buffer::new(0, "abcdefghijkl", cx));
let buffer_snapshot = buffer.read(cx).snapshot();
let buffer = MultiBuffer::build_simple("abcdefghijkl", cx);
let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
let buffer_snapshot = buffer.read(cx).snapshot(cx);
{
let mut map = FoldMap::new(buffer_snapshot.clone()).0;
@@ -1175,20 +1179,20 @@ mod tests {
assert_eq!(snapshot.text(), "…fghijkl");
// Edit within one of the folds.
let (buffer_snapshot, edits) = buffer.update(cx, |buffer, cx| {
let v0 = buffer.version();
let buffer_snapshot = buffer.update(cx, |buffer, cx| {
buffer.edit(vec![0..1], "12345", cx);
(buffer.snapshot(), buffer.edits_since(&v0).collect())
buffer.snapshot(cx)
});
let (snapshot, _) = map.read(buffer_snapshot.clone(), edits);
let (snapshot, _) =
map.read(buffer_snapshot.clone(), subscription.consume().into_inner());
assert_eq!(snapshot.text(), "12345…fghijkl");
}
}
#[gpui::test]
fn test_overlapping_folds(cx: &mut gpui::MutableAppContext) {
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx));
let buffer_snapshot = buffer.read(cx).snapshot();
let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx);
let buffer_snapshot = buffer.read(cx).snapshot(cx);
let mut map = FoldMap::new(buffer_snapshot.clone()).0;
let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
writer.fold(vec![
@@ -1203,8 +1207,9 @@ mod tests {
#[gpui::test]
fn test_merging_folds_via_edit(cx: &mut gpui::MutableAppContext) {
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx));
let buffer_snapshot = buffer.read(cx).snapshot();
let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx);
let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
let buffer_snapshot = buffer.read(cx).snapshot(cx);
let mut map = FoldMap::new(buffer_snapshot.clone()).0;
let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
@@ -1215,21 +1220,19 @@ mod tests {
let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
assert_eq!(snapshot.text(), "aa…cccc\nd…eeeee");
let (buffer_snapshot, edits) = buffer.update(cx, |buffer, cx| {
let v0 = buffer.version();
let buffer_snapshot = buffer.update(cx, |buffer, cx| {
buffer.edit(Some(Point::new(2, 2)..Point::new(3, 1)), "", cx);
(buffer.snapshot(), buffer.edits_since(&v0).collect())
buffer.snapshot(cx)
});
let (snapshot, _) = map.read(buffer_snapshot.clone(), edits);
let (snapshot, _) = map.read(buffer_snapshot.clone(), subscription.consume().into_inner());
assert_eq!(snapshot.text(), "aa…eeeee");
}
#[gpui::test]
fn test_folds_in_range(cx: &mut gpui::MutableAppContext) {
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx));
let buffer_snapshot = buffer.read(cx).snapshot();
let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx);
let buffer_snapshot = buffer.read(cx).snapshot(cx);
let mut map = FoldMap::new(buffer_snapshot.clone()).0;
let buffer = buffer.read(cx);
let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
writer.fold(vec![
@@ -1241,7 +1244,7 @@ mod tests {
let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
let fold_ranges = snapshot
.folds_in_range(Point::new(1, 0)..Point::new(1, 3))
.map(|fold| fold.start.to_point(buffer)..fold.end.to_point(buffer))
.map(|fold| fold.start.to_point(&buffer_snapshot)..fold.end.to_point(&buffer_snapshot))
.collect::<Vec<_>>();
assert_eq!(
fold_ranges,
@@ -1258,55 +1261,73 @@ mod tests {
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
.unwrap_or(10);
let buffer = cx.add_model(|cx| {
let len = rng.gen_range(0..10);
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
Buffer::new(0, text, cx)
});
let buffer_snapshot = buffer.read(cx).snapshot();
let len = rng.gen_range(0..10);
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
let buffer = if rng.gen() {
MultiBuffer::build_simple(&text, cx)
} else {
MultiBuffer::build_random(&mut rng, cx)
};
let mut buffer_snapshot = buffer.read(cx).snapshot(cx);
let mut map = FoldMap::new(buffer_snapshot.clone()).0;
let (mut initial_snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
let mut snapshot_edits = Vec::new();
for _ in 0..operations {
log::info!("text: {:?}", buffer.read(cx).text());
let buffer_edits = match rng.gen_range(0..=100) {
log::info!("text: {:?}", buffer_snapshot.text());
let mut buffer_edits = Vec::new();
match rng.gen_range(0..=100) {
0..=59 => {
snapshot_edits.extend(map.randomly_mutate(&mut rng));
vec![]
}
_ => buffer.update(cx, |buffer, cx| {
let start_version = buffer.version.clone();
let subscription = buffer.subscribe();
let edit_count = rng.gen_range(1..=5);
buffer.randomly_edit(&mut rng, edit_count, cx);
let edits = buffer
.edits_since::<Point>(&start_version)
.collect::<Vec<_>>();
buffer_snapshot = buffer.snapshot(cx);
let edits = subscription.consume().into_inner();
log::info!("editing {:?}", edits);
buffer.edits_since::<usize>(&start_version).collect()
buffer_edits.extend(edits);
}),
};
let buffer_snapshot = buffer.read(cx).snapshot();
let (snapshot, edits) = map.read(buffer_snapshot.clone(), buffer_edits);
snapshot_edits.push((snapshot.clone(), edits));
let mut expected_text: String = buffer_snapshot.text().to_string();
let mut expected_buffer_rows = Vec::new();
let mut next_row = buffer_snapshot.max_point().row;
for fold_range in map.merged_fold_ranges().into_iter().rev() {
let fold_start = buffer_snapshot.point_for_offset(fold_range.start).unwrap();
let fold_end = buffer_snapshot.point_for_offset(fold_range.end).unwrap();
expected_buffer_rows.extend((fold_end.row + 1..=next_row).rev());
next_row = fold_start.row;
expected_text.replace_range(fold_range.start..fold_range.end, "");
}
expected_buffer_rows.extend((0..=next_row).rev());
expected_buffer_rows.reverse();
assert_eq!(snapshot.text(), expected_text);
log::info!(
"fold text {:?} ({} lines)",
expected_text,
expected_text.matches('\n').count() + 1
);
let mut prev_row = 0;
let mut expected_buffer_rows = Vec::new();
for fold_range in map.merged_fold_ranges().into_iter() {
let fold_start = buffer_snapshot.offset_to_point(fold_range.start).row;
let fold_end = buffer_snapshot.offset_to_point(fold_range.end).row;
expected_buffer_rows.extend(
buffer_snapshot
.buffer_rows(prev_row)
.take((1 + fold_start - prev_row) as usize),
);
prev_row = 1 + fold_end;
}
expected_buffer_rows.extend(buffer_snapshot.buffer_rows(prev_row));
assert_eq!(
expected_buffer_rows.len(),
expected_text.matches('\n').count() + 1,
"wrong expected buffer rows {:?}. text: {:?}",
expected_buffer_rows,
expected_text
);
for (output_row, line) in expected_text.lines().enumerate() {
let line_len = snapshot.line_len(output_row as u32);
@@ -1375,7 +1396,6 @@ mod tests {
}
let text = &expected_text[start.0..end.0];
log::info!("slicing {:?}..{:?} (text: {:?})", start, end, text);
assert_eq!(
snapshot
.chunks(start..end, None)
@@ -1385,14 +1405,19 @@ mod tests {
);
}
for (idx, buffer_row) in expected_buffer_rows.iter().enumerate() {
let fold_row = Point::new(*buffer_row, 0)
.to_fold_point(&snapshot, Right)
let mut fold_row = 0;
while fold_row < expected_buffer_rows.len() as u32 {
fold_row = snapshot
.clip_point(FoldPoint::new(fold_row, 0), Bias::Right)
.row();
eprintln!("fold_row: {} of {}", fold_row, expected_buffer_rows.len());
assert_eq!(
snapshot.buffer_rows(fold_row).collect::<Vec<_>>(),
expected_buffer_rows[idx..],
expected_buffer_rows[(fold_row as usize)..],
"wrong buffer rows starting at fold row {}",
fold_row,
);
fold_row += 1;
}
for fold_range in map.merged_fold_ranges() {
@@ -1454,12 +1479,9 @@ mod tests {
let mut text = initial_snapshot.text();
for (snapshot, edits) in snapshot_edits.drain(..) {
let new_text = snapshot.text();
let mut delta = 0isize;
for edit in edits {
let old_bytes = ((edit.old_bytes.start.0 as isize) + delta) as usize
..((edit.old_bytes.end.0 as isize) + delta) as usize;
let new_bytes = edit.new_bytes.start.0..edit.new_bytes.end.0;
delta += edit.delta();
let old_bytes = edit.new.start.0..edit.new.start.0 + edit.old_len().0;
let new_bytes = edit.new.start.0..edit.new.end.0;
text.replace_range(old_bytes, &new_text[new_bytes]);
}
@@ -1471,10 +1493,10 @@ mod tests {
#[gpui::test]
fn test_buffer_rows(cx: &mut gpui::MutableAppContext) {
let text = sample_text(6, 6) + "\n";
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
let text = sample_text(6, 6, 'a') + "\n";
let buffer = MultiBuffer::build_simple(&text, cx);
let buffer_snapshot = buffer.read(cx).snapshot();
let buffer_snapshot = buffer.read(cx).snapshot(cx);
let mut map = FoldMap::new(buffer_snapshot.clone()).0;
let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
@@ -1485,8 +1507,11 @@ mod tests {
let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
assert_eq!(snapshot.text(), "aa…cccc\nd…eeeee\nffffff\n");
assert_eq!(snapshot.buffer_rows(0).collect::<Vec<_>>(), [0, 3, 5, 6]);
assert_eq!(snapshot.buffer_rows(3).collect::<Vec<_>>(), [6]);
assert_eq!(
snapshot.buffer_rows(0).collect::<Vec<_>>(),
[Some(0), Some(3), Some(5), Some(6)]
);
assert_eq!(snapshot.buffer_rows(3).collect::<Vec<_>>(), [Some(6)]);
}
impl FoldMap {
@@ -1519,7 +1544,10 @@ mod tests {
merged_ranges
}
pub fn randomly_mutate(&mut self, rng: &mut impl Rng) -> Vec<(Snapshot, Vec<FoldEdit>)> {
pub fn randomly_mutate(
&mut self,
rng: &mut impl Rng,
) -> Vec<(FoldSnapshot, Vec<FoldEdit>)> {
let mut snapshot_edits = Vec::new();
match rng.gen_range(0..=100) {
0..=39 if !self.folds.is_empty() => {

View File

@@ -1,4 +1,5 @@
use super::fold_map::{self, FoldEdit, FoldPoint, Snapshot as FoldSnapshot, ToFoldPoint};
use super::fold_map::{self, FoldEdit, FoldPoint, FoldSnapshot, ToFoldPoint};
use crate::MultiBufferSnapshot;
use language::{rope, Chunk};
use parking_lot::Mutex;
use std::{cmp, mem, ops::Range};
@@ -6,11 +7,11 @@ use sum_tree::Bias;
use text::Point;
use theme::SyntaxTheme;
pub struct TabMap(Mutex<Snapshot>);
pub struct TabMap(Mutex<TabSnapshot>);
impl TabMap {
pub fn new(input: FoldSnapshot, tab_size: usize) -> (Self, Snapshot) {
let snapshot = Snapshot {
pub fn new(input: FoldSnapshot, tab_size: usize) -> (Self, TabSnapshot) {
let snapshot = TabSnapshot {
fold_snapshot: input,
tab_size,
};
@@ -21,10 +22,10 @@ impl TabMap {
&self,
fold_snapshot: FoldSnapshot,
mut fold_edits: Vec<FoldEdit>,
) -> (Snapshot, Vec<Edit>) {
) -> (TabSnapshot, Vec<TabEdit>) {
let mut old_snapshot = self.0.lock();
let max_offset = old_snapshot.fold_snapshot.len();
let new_snapshot = Snapshot {
let new_snapshot = TabSnapshot {
fold_snapshot,
tab_size: old_snapshot.tab_size,
};
@@ -34,13 +35,13 @@ impl TabMap {
let mut delta = 0;
for chunk in old_snapshot
.fold_snapshot
.chunks(fold_edit.old_bytes.end..max_offset, None)
.chunks(fold_edit.old.end..max_offset, None)
{
let patterns: &[_] = &['\t', '\n'];
if let Some(ix) = chunk.text.find(patterns) {
if &chunk.text[ix..ix + 1] == "\t" {
fold_edit.old_bytes.end.0 += delta + ix + 1;
fold_edit.new_bytes.end.0 += delta + ix + 1;
fold_edit.old.end.0 += delta + ix + 1;
fold_edit.new.end.0 += delta + ix + 1;
}
break;
@@ -55,9 +56,9 @@ impl TabMap {
let (prev_edits, next_edits) = fold_edits.split_at_mut(ix);
let prev_edit = prev_edits.last_mut().unwrap();
let edit = &next_edits[0];
if prev_edit.old_bytes.end >= edit.old_bytes.start {
prev_edit.old_bytes.end = edit.old_bytes.end;
prev_edit.new_bytes.end = edit.new_bytes.end;
if prev_edit.old.end >= edit.old.start {
prev_edit.old.end = edit.old.end;
prev_edit.new.end = edit.new.end;
fold_edits.remove(ix);
} else {
ix += 1;
@@ -65,25 +66,13 @@ impl TabMap {
}
for fold_edit in fold_edits {
let old_start = fold_edit
.old_bytes
.start
.to_point(&old_snapshot.fold_snapshot);
let old_end = fold_edit
.old_bytes
.end
.to_point(&old_snapshot.fold_snapshot);
let new_start = fold_edit
.new_bytes
.start
.to_point(&new_snapshot.fold_snapshot);
let new_end = fold_edit
.new_bytes
.end
.to_point(&new_snapshot.fold_snapshot);
tab_edits.push(Edit {
old_lines: old_snapshot.to_tab_point(old_start)..old_snapshot.to_tab_point(old_end),
new_lines: new_snapshot.to_tab_point(new_start)..new_snapshot.to_tab_point(new_end),
let old_start = fold_edit.old.start.to_point(&old_snapshot.fold_snapshot);
let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot);
let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot);
let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot);
tab_edits.push(TabEdit {
old: old_snapshot.to_tab_point(old_start)..old_snapshot.to_tab_point(old_end),
new: new_snapshot.to_tab_point(new_start)..new_snapshot.to_tab_point(new_end),
});
}
@@ -93,12 +82,16 @@ impl TabMap {
}
#[derive(Clone)]
pub struct Snapshot {
pub struct TabSnapshot {
pub fold_snapshot: FoldSnapshot,
pub tab_size: usize,
}
impl Snapshot {
impl TabSnapshot {
pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
self.fold_snapshot.buffer_snapshot()
}
pub fn text_summary(&self) -> TextSummary {
self.text_summary_for_range(TabPoint::zero()..self.max_point())
}
@@ -155,7 +148,7 @@ impl Snapshot {
&'a self,
range: Range<TabPoint>,
theme: Option<&'a SyntaxTheme>,
) -> Chunks<'a> {
) -> TabChunks<'a> {
let (input_start, expanded_char_column, to_next_stop) =
self.to_fold_point(range.start, Bias::Left);
let input_start = input_start.to_offset(&self.fold_snapshot);
@@ -169,7 +162,7 @@ impl Snapshot {
to_next_stop
};
Chunks {
TabChunks {
fold_chunks: self.fold_snapshot.chunks(input_start..input_end, theme),
column: expanded_char_column,
output_position: range.start.0,
@@ -183,7 +176,7 @@ impl Snapshot {
}
}
pub fn buffer_rows(&self, row: u32) -> fold_map::BufferRows {
pub fn buffer_rows(&self, row: u32) -> fold_map::FoldBufferRows {
self.fold_snapshot.buffer_rows(row)
}
@@ -322,11 +315,7 @@ impl From<super::Point> for TabPoint {
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Edit {
pub old_lines: Range<TabPoint>,
pub new_lines: Range<TabPoint>,
}
pub type TabEdit = text::Edit<TabPoint>;
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct TextSummary {
@@ -380,8 +369,8 @@ impl<'a> std::ops::AddAssign<&'a Self> for TextSummary {
// Handles a tab width <= 16
const SPACES: &'static str = " ";
pub struct Chunks<'a> {
fold_chunks: fold_map::Chunks<'a>,
pub struct TabChunks<'a> {
fold_chunks: fold_map::FoldChunks<'a>,
chunk: Chunk<'a>,
column: usize,
output_position: Point,
@@ -390,7 +379,7 @@ pub struct Chunks<'a> {
skip_leading_tab: bool,
}
impl<'a> Iterator for Chunks<'a> {
impl<'a> Iterator for TabChunks<'a> {
type Item = Chunk<'a>;
fn next(&mut self) -> Option<Self::Item> {
@@ -450,28 +439,29 @@ impl<'a> Iterator for Chunks<'a> {
#[cfg(test)]
mod tests {
use super::*;
use crate::display_map::fold_map::FoldMap;
use language::Buffer;
use crate::{display_map::fold_map::FoldMap, MultiBuffer};
use rand::{prelude::StdRng, Rng};
use text::{RandomCharIter, Rope};
#[test]
fn test_expand_tabs() {
assert_eq!(Snapshot::expand_tabs("\t".chars(), 0, 4), 0);
assert_eq!(Snapshot::expand_tabs("\t".chars(), 1, 4), 4);
assert_eq!(Snapshot::expand_tabs("\ta".chars(), 2, 4), 5);
assert_eq!(TabSnapshot::expand_tabs("\t".chars(), 0, 4), 0);
assert_eq!(TabSnapshot::expand_tabs("\t".chars(), 1, 4), 4);
assert_eq!(TabSnapshot::expand_tabs("\ta".chars(), 2, 4), 5);
}
#[gpui::test(iterations = 100)]
fn test_random(cx: &mut gpui::MutableAppContext, mut rng: StdRng) {
fn test_random_tabs(cx: &mut gpui::MutableAppContext, mut rng: StdRng) {
let tab_size = rng.gen_range(1..=4);
let buffer = cx.add_model(|cx| {
let len = rng.gen_range(0..30);
let len = rng.gen_range(0..30);
let buffer = if rng.gen() {
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
Buffer::new(0, text, cx)
});
let buffer_snapshot = buffer.read(cx).snapshot();
log::info!("Buffer text: {:?}", buffer.read(cx).text());
MultiBuffer::build_simple(&text, cx)
} else {
MultiBuffer::build_random(&mut rng, cx)
};
let buffer_snapshot = buffer.read(cx).snapshot(cx);
log::info!("Buffer text: {:?}", buffer_snapshot.text());
let (mut fold_map, _) = FoldMap::new(buffer_snapshot.clone());
fold_map.randomly_mutate(&mut rng);
@@ -502,13 +492,15 @@ mod tests {
.chunks_in_range(text.point_to_offset(start.0)..text.point_to_offset(end.0))
.collect::<String>();
let expected_summary = TextSummary::from(expected_text.as_str());
log::info!("slicing {:?}..{:?} (text: {:?})", start, end, text);
assert_eq!(
expected_text,
tabs_snapshot
.chunks(start..end, None)
.map(|c| c.text)
.collect::<String>()
.collect::<String>(),
"chunks({:?}..{:?})",
start,
end
);
let mut actual_summary = tabs_snapshot.text_summary_for_range(start..end);

View File

@@ -1,24 +1,25 @@
use super::{
fold_map,
tab_map::{self, Edit as TabEdit, Snapshot as TabSnapshot, TabPoint},
tab_map::{self, TabEdit, TabPoint, TabSnapshot},
};
use crate::{MultiBufferSnapshot, Point};
use gpui::{
fonts::FontId, text_layout::LineWrapper, Entity, ModelContext, ModelHandle, MutableAppContext,
Task,
};
use language::{Chunk, Point};
use language::Chunk;
use lazy_static::lazy_static;
use smol::future::yield_now;
use std::{collections::VecDeque, mem, ops::Range, time::Duration};
use std::{cmp, collections::VecDeque, mem, ops::Range, time::Duration};
use sum_tree::{Bias, Cursor, SumTree};
use text::Patch;
use theme::SyntaxTheme;
pub use super::tab_map::TextSummary;
pub type Edit = text::Edit<u32>;
pub type WrapEdit = text::Edit<u32>;
pub struct WrapMap {
snapshot: Snapshot,
snapshot: WrapSnapshot,
pending_edits: VecDeque<(TabSnapshot, Vec<TabEdit>)>,
interpolated_edits: Patch<u32>,
edits_since_sync: Patch<u32>,
@@ -32,7 +33,7 @@ impl Entity for WrapMap {
}
#[derive(Clone)]
pub struct Snapshot {
pub struct WrapSnapshot {
tab_snapshot: TabSnapshot,
transforms: SumTree<Transform>,
interpolated: bool,
@@ -53,17 +54,17 @@ struct TransformSummary {
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
pub struct WrapPoint(pub super::Point);
pub struct Chunks<'a> {
input_chunks: tab_map::Chunks<'a>,
pub struct WrapChunks<'a> {
input_chunks: tab_map::TabChunks<'a>,
input_chunk: Chunk<'a>,
output_position: WrapPoint,
max_output_row: u32,
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
}
pub struct BufferRows<'a> {
input_buffer_rows: fold_map::BufferRows<'a>,
input_buffer_row: u32,
pub struct WrapBufferRows<'a> {
input_buffer_rows: fold_map::FoldBufferRows<'a>,
input_buffer_row: Option<u32>,
output_row: u32,
soft_wrapped: bool,
max_output_row: u32,
@@ -77,7 +78,7 @@ impl WrapMap {
font_size: f32,
wrap_width: Option<f32>,
cx: &mut MutableAppContext,
) -> (ModelHandle<Self>, Snapshot) {
) -> (ModelHandle<Self>, WrapSnapshot) {
let handle = cx.add_model(|cx| {
let mut this = Self {
font: (font_id, font_size),
@@ -85,7 +86,7 @@ impl WrapMap {
pending_edits: Default::default(),
interpolated_edits: Default::default(),
edits_since_sync: Default::default(),
snapshot: Snapshot::new(tab_snapshot),
snapshot: WrapSnapshot::new(tab_snapshot),
background_task: None,
};
this.set_wrap_width(wrap_width, cx);
@@ -106,7 +107,7 @@ impl WrapMap {
tab_snapshot: TabSnapshot,
edits: Vec<TabEdit>,
cx: &mut ModelContext<Self>,
) -> (Snapshot, Vec<Edit>) {
) -> (WrapSnapshot, Vec<WrapEdit>) {
if self.wrap_width.is_some() {
self.pending_edits.push_back((tab_snapshot, edits));
self.flush_edits(cx);
@@ -157,8 +158,8 @@ impl WrapMap {
.update(
tab_snapshot,
&[TabEdit {
old_lines: range.clone(),
new_lines: range.clone(),
old: range.clone(),
new: range.clone(),
}],
wrap_width,
&mut line_wrapper,
@@ -203,7 +204,7 @@ impl WrapMap {
}
let new_rows = self.snapshot.transforms.summary().output.lines.row + 1;
self.snapshot.interpolated = false;
self.edits_since_sync = self.edits_since_sync.compose(&Patch::new(vec![Edit {
self.edits_since_sync = self.edits_since_sync.compose(&Patch::new(vec![WrapEdit {
old: 0..old_rows,
new: 0..new_rows,
}]));
@@ -291,7 +292,7 @@ impl WrapMap {
}
}
impl Snapshot {
impl WrapSnapshot {
fn new(tab_snapshot: TabSnapshot) -> Self {
let mut transforms = SumTree::new();
let extent = tab_snapshot.text_summary();
@@ -305,6 +306,10 @@ impl Snapshot {
}
}
pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
self.tab_snapshot.buffer_snapshot()
}
fn interpolate(&mut self, new_tab_snapshot: TabSnapshot, tab_edits: &[TabEdit]) -> Patch<u32> {
let mut new_transforms;
if tab_edits.is_empty() {
@@ -313,47 +318,44 @@ impl Snapshot {
let mut old_cursor = self.transforms.cursor::<TabPoint>();
let mut tab_edits_iter = tab_edits.iter().peekable();
new_transforms = old_cursor.slice(
&tab_edits_iter.peek().unwrap().old_lines.start,
Bias::Right,
&(),
);
new_transforms =
old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right, &());
while let Some(edit) = tab_edits_iter.next() {
if edit.new_lines.start > TabPoint::from(new_transforms.summary().input.lines) {
if edit.new.start > TabPoint::from(new_transforms.summary().input.lines) {
let summary = new_tab_snapshot.text_summary_for_range(
TabPoint::from(new_transforms.summary().input.lines)..edit.new_lines.start,
TabPoint::from(new_transforms.summary().input.lines)..edit.new.start,
);
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
if !edit.new_lines.is_empty() {
if !edit.new.is_empty() {
new_transforms.push_or_extend(Transform::isomorphic(
new_tab_snapshot.text_summary_for_range(edit.new_lines.clone()),
new_tab_snapshot.text_summary_for_range(edit.new.clone()),
));
}
old_cursor.seek_forward(&edit.old_lines.end, Bias::Right, &());
old_cursor.seek_forward(&edit.old.end, Bias::Right, &());
if let Some(next_edit) = tab_edits_iter.peek() {
if next_edit.old_lines.start > old_cursor.end(&()) {
if old_cursor.end(&()) > edit.old_lines.end {
if next_edit.old.start > old_cursor.end(&()) {
if old_cursor.end(&()) > edit.old.end {
let summary = self
.tab_snapshot
.text_summary_for_range(edit.old_lines.end..old_cursor.end(&()));
.text_summary_for_range(edit.old.end..old_cursor.end(&()));
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
old_cursor.next(&());
new_transforms.push_tree(
old_cursor.slice(&next_edit.old_lines.start, Bias::Right, &()),
old_cursor.slice(&next_edit.old.start, Bias::Right, &()),
&(),
);
}
} else {
if old_cursor.end(&()) > edit.old_lines.end {
if old_cursor.end(&()) > edit.old.end {
let summary = self
.tab_snapshot
.text_summary_for_range(edit.old_lines.end..old_cursor.end(&()));
.text_summary_for_range(edit.old.end..old_cursor.end(&()));
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
old_cursor.next(&());
@@ -364,7 +366,7 @@ impl Snapshot {
let old_snapshot = mem::replace(
self,
Snapshot {
WrapSnapshot {
tab_snapshot: new_tab_snapshot,
transforms: new_transforms,
interpolated: true,
@@ -391,14 +393,14 @@ impl Snapshot {
let mut row_edits = Vec::new();
while let Some(edit) = tab_edits_iter.next() {
let mut row_edit = RowEdit {
old_rows: edit.old_lines.start.row()..edit.old_lines.end.row() + 1,
new_rows: edit.new_lines.start.row()..edit.new_lines.end.row() + 1,
old_rows: edit.old.start.row()..edit.old.end.row() + 1,
new_rows: edit.new.start.row()..edit.new.end.row() + 1,
};
while let Some(next_edit) = tab_edits_iter.peek() {
if next_edit.old_lines.start.row() <= row_edit.old_rows.end {
row_edit.old_rows.end = next_edit.old_lines.end.row() + 1;
row_edit.new_rows.end = next_edit.new_lines.end.row() + 1;
if next_edit.old.start.row() <= row_edit.old_rows.end {
row_edit.old_rows.end = next_edit.old.end.row() + 1;
row_edit.new_rows.end = next_edit.new.end.row() + 1;
tab_edits_iter.next();
} else {
break;
@@ -513,7 +515,7 @@ impl Snapshot {
let old_snapshot = mem::replace(
self,
Snapshot {
WrapSnapshot {
tab_snapshot: new_tab_snapshot,
transforms: new_transforms,
interpolated: false,
@@ -523,33 +525,33 @@ impl Snapshot {
old_snapshot.compute_edits(tab_edits, self)
}
fn compute_edits(&self, tab_edits: &[TabEdit], new_snapshot: &Snapshot) -> Patch<u32> {
fn compute_edits(&self, tab_edits: &[TabEdit], new_snapshot: &WrapSnapshot) -> Patch<u32> {
let mut wrap_edits = Vec::new();
let mut old_cursor = self.transforms.cursor::<TransformSummary>();
let mut new_cursor = new_snapshot.transforms.cursor::<TransformSummary>();
for mut tab_edit in tab_edits.iter().cloned() {
tab_edit.old_lines.start.0.column = 0;
tab_edit.old_lines.end.0 += Point::new(1, 0);
tab_edit.new_lines.start.0.column = 0;
tab_edit.new_lines.end.0 += Point::new(1, 0);
tab_edit.old.start.0.column = 0;
tab_edit.old.end.0 += Point::new(1, 0);
tab_edit.new.start.0.column = 0;
tab_edit.new.end.0 += Point::new(1, 0);
old_cursor.seek(&tab_edit.old_lines.start, Bias::Right, &());
old_cursor.seek(&tab_edit.old.start, Bias::Right, &());
let mut old_start = old_cursor.start().output.lines;
old_start += tab_edit.old_lines.start.0 - old_cursor.start().input.lines;
old_start += tab_edit.old.start.0 - old_cursor.start().input.lines;
old_cursor.seek(&tab_edit.old_lines.end, Bias::Right, &());
old_cursor.seek(&tab_edit.old.end, Bias::Right, &());
let mut old_end = old_cursor.start().output.lines;
old_end += tab_edit.old_lines.end.0 - old_cursor.start().input.lines;
old_end += tab_edit.old.end.0 - old_cursor.start().input.lines;
new_cursor.seek(&tab_edit.new_lines.start, Bias::Right, &());
new_cursor.seek(&tab_edit.new.start, Bias::Right, &());
let mut new_start = new_cursor.start().output.lines;
new_start += tab_edit.new_lines.start.0 - new_cursor.start().input.lines;
new_start += tab_edit.new.start.0 - new_cursor.start().input.lines;
new_cursor.seek(&tab_edit.new_lines.end, Bias::Right, &());
new_cursor.seek(&tab_edit.new.end, Bias::Right, &());
let mut new_end = new_cursor.start().output.lines;
new_end += tab_edit.new_lines.end.0 - new_cursor.start().input.lines;
new_end += tab_edit.new.end.0 - new_cursor.start().input.lines;
wrap_edits.push(Edit {
wrap_edits.push(WrapEdit {
old: old_start.row..old_end.row,
new: new_start.row..new_end.row,
});
@@ -564,7 +566,11 @@ impl Snapshot {
.map(|h| h.text)
}
pub fn chunks<'a>(&'a self, rows: Range<u32>, theme: Option<&'a SyntaxTheme>) -> Chunks<'a> {
pub fn chunks<'a>(
&'a self,
rows: Range<u32>,
theme: Option<&'a SyntaxTheme>,
) -> WrapChunks<'a> {
let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0);
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>();
@@ -576,7 +582,7 @@ impl Snapshot {
let input_end = self
.to_tab_point(output_end)
.min(self.tab_snapshot.max_point());
Chunks {
WrapChunks {
input_chunks: self.tab_snapshot.chunks(input_start..input_end, theme),
input_chunk: Default::default(),
output_position: output_start,
@@ -622,7 +628,7 @@ impl Snapshot {
self.transforms.summary().output.longest_row
}
pub fn buffer_rows(&self, start_row: u32) -> BufferRows {
pub fn buffer_rows(&self, start_row: u32) -> WrapBufferRows {
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>();
transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
let mut input_row = transforms.start().1.row();
@@ -632,7 +638,7 @@ impl Snapshot {
let soft_wrapped = transforms.item().map_or(false, |t| !t.is_isomorphic());
let mut input_buffer_rows = self.tab_snapshot.buffer_rows(input_row);
let input_buffer_row = input_buffer_rows.next().unwrap();
BufferRows {
WrapBufferRows {
transforms,
input_buffer_row,
input_buffer_rows,
@@ -679,6 +685,46 @@ impl Snapshot {
self.from_tab_point(self.tab_snapshot.clip_point(self.to_tab_point(point), bias))
}
pub fn prev_row_boundary(&self, mut point: WrapPoint) -> u32 {
if self.transforms.is_empty() {
return 0;
}
*point.column_mut() = 0;
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>();
cursor.seek(&point, Bias::Right, &());
if cursor.item().is_none() {
cursor.prev(&());
}
while let Some(transform) = cursor.item() {
if transform.is_isomorphic() && cursor.start().1.column() == 0 {
return cmp::min(cursor.end(&()).0.row(), point.row());
} else {
cursor.prev(&());
}
}
unreachable!()
}
pub fn next_row_boundary(&self, mut point: WrapPoint) -> Option<u32> {
point.0 += Point::new(1, 0);
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>();
cursor.seek(&point, Bias::Right, &());
while let Some(transform) = cursor.item() {
if transform.is_isomorphic() && cursor.start().1.column() == 0 {
return Some(cmp::max(cursor.start().0.row(), point.row()));
} else {
cursor.next(&());
}
}
None
}
fn check_invariants(&self) {
#[cfg(test)]
{
@@ -696,22 +742,19 @@ impl Snapshot {
}
}
let input_buffer_rows = self.buffer_snapshot().buffer_rows(0).collect::<Vec<_>>();
let mut expected_buffer_rows = Vec::new();
let mut buffer_row = 0;
let mut prev_tab_row = 0;
for display_row in 0..=self.max_point().row() {
let tab_point = self.to_tab_point(WrapPoint::new(display_row, 0));
let soft_wrapped;
if tab_point.row() == prev_tab_row {
soft_wrapped = display_row != 0;
if tab_point.row() == prev_tab_row && display_row != 0 {
expected_buffer_rows.push(None);
} else {
let fold_point = self.tab_snapshot.to_fold_point(tab_point, Bias::Left).0;
let buffer_point = fold_point.to_buffer_point(&self.tab_snapshot.fold_snapshot);
buffer_row = buffer_point.row;
expected_buffer_rows.push(input_buffer_rows[buffer_point.row as usize]);
prev_tab_row = tab_point.row();
soft_wrapped = false;
}
expected_buffer_rows.push(if soft_wrapped { None } else { Some(buffer_row) });
}
for start_display_row in 0..expected_buffer_rows.len() {
@@ -727,7 +770,7 @@ impl Snapshot {
}
}
impl<'a> Iterator for Chunks<'a> {
impl<'a> Iterator for WrapChunks<'a> {
type Item = Chunk<'a>;
fn next(&mut self) -> Option<Self::Item> {
@@ -790,7 +833,7 @@ impl<'a> Iterator for Chunks<'a> {
}
}
impl<'a> Iterator for BufferRows<'a> {
impl<'a> Iterator for WrapBufferRows<'a> {
type Item = Option<u32>;
fn next(&mut self) -> Option<Self::Item> {
@@ -811,7 +854,7 @@ impl<'a> Iterator for BufferRows<'a> {
self.soft_wrapped = true;
}
Some(if soft_wrapped { None } else { Some(buffer_row) })
Some(if soft_wrapped { None } else { buffer_row })
}
}
@@ -951,7 +994,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for WrapPoint {
}
}
fn consolidate_wrap_edits(edits: &mut Vec<Edit>) {
fn consolidate_wrap_edits(edits: &mut Vec<WrapEdit>) {
let mut i = 1;
while i < edits.len() {
let edit = edits[i].clone();
@@ -971,10 +1014,12 @@ mod tests {
use super::*;
use crate::{
display_map::{fold_map::FoldMap, tab_map::TabMap},
test::Observer,
MultiBuffer,
};
use language::{Buffer, RandomCharIter};
use gpui::test::observe;
use language::RandomCharIter;
use rand::prelude::*;
use smol::stream::StreamExt;
use std::{cmp, env};
use text::Rope;
@@ -1003,18 +1048,19 @@ mod tests {
log::info!("Tab size: {}", tab_size);
log::info!("Wrap width: {:?}", wrap_width);
let buffer = cx.add_model(|cx| {
let len = rng.gen_range(0..10);
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
Buffer::new(0, text, cx)
let buffer = cx.update(|cx| {
if rng.gen() {
MultiBuffer::build_random(&mut rng, cx)
} else {
let len = rng.gen_range(0..10);
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
MultiBuffer::build_simple(&text, cx)
}
});
let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
let mut buffer_snapshot = buffer.read_with(&cx, |buffer, cx| buffer.snapshot(cx));
let (mut fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone());
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size);
log::info!(
"Unwrapped text (no folds): {:?}",
buffer.read_with(&cx, |buf, _| buf.text())
);
log::info!("Unwrapped text (no folds): {:?}", buffer_snapshot.text());
log::info!(
"Unwrapped text (unexpanded tabs): {:?}",
folds_snapshot.text()
@@ -1027,10 +1073,10 @@ mod tests {
let (wrap_map, _) =
cx.update(|cx| WrapMap::new(tabs_snapshot.clone(), font_id, font_size, wrap_width, cx));
let (_observer, notifications) = Observer::new(&wrap_map, &mut cx);
let mut notifications = observe(&wrap_map, &mut cx);
if wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
notifications.recv().await.unwrap();
notifications.next().await.unwrap();
}
let (initial_snapshot, _) = wrap_map.update(&mut cx, |map, cx| {
@@ -1073,17 +1119,17 @@ mod tests {
}
_ => {
buffer.update(&mut cx, |buffer, cx| {
let v0 = buffer.version();
let subscription = buffer.subscribe();
let edit_count = rng.gen_range(1..=5);
buffer.randomly_edit(&mut rng, edit_count, cx);
buffer_edits.extend(buffer.edits_since(&v0));
buffer_snapshot = buffer.snapshot(cx);
buffer_edits.extend(subscription.consume());
});
}
}
let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
log::info!("Unwrapped text (no folds): {:?}", buffer_snapshot.text());
let (folds_snapshot, fold_edits) = fold_map.read(buffer_snapshot, buffer_edits);
let (folds_snapshot, fold_edits) = fold_map.read(buffer_snapshot.clone(), buffer_edits);
log::info!(
"Unwrapped text (unexpanded tabs): {:?}",
folds_snapshot.text()
@@ -1103,7 +1149,7 @@ mod tests {
if wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) {
log::info!("Waiting for wrapping to finish");
while wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
notifications.recv().await.unwrap();
notifications.next().await.unwrap();
}
wrap_map.read_with(&cx, |map, _| assert!(map.pending_edits.is_empty()));
}
@@ -1191,7 +1237,7 @@ mod tests {
if wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
log::info!("Waiting for wrapping to finish");
while wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
notifications.recv().await.unwrap();
notifications.next().await.unwrap();
}
}
wrap_map.read_with(&cx, |map, _| assert!(map.pending_edits.is_empty()));
@@ -1224,7 +1270,7 @@ mod tests {
}
}
impl Snapshot {
impl WrapSnapshot {
pub fn text(&self) -> String {
self.text_chunks(0).collect()
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,12 +1,14 @@
use crate::display_map::{BlockContext, ToDisplayPoint};
use super::{
DisplayPoint, Editor, EditorMode, EditorSettings, EditorStyle, Input, Scroll, Select,
SelectPhase, Snapshot, SoftWrap, MAX_LINE_LEN,
display_map::{BlockContext, ToDisplayPoint},
Anchor, DisplayPoint, Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, Input,
Scroll, Select, SelectPhase, SoftWrap, ToPoint, MAX_LINE_LEN,
};
use clock::ReplicaId;
use collections::{BTreeMap, HashMap};
use gpui::{
color::Color,
elements::layout_highlighted_chunks,
fonts::HighlightStyle,
geometry::{
rect::RectF,
vector::{vec2f, Vector2F},
@@ -19,11 +21,10 @@ use gpui::{
MutableAppContext, PaintContext, Quad, Scene, SizeConstraint, ViewContext, WeakViewHandle,
};
use json::json;
use language::{Chunk, ToPoint};
use language::Bias;
use smallvec::SmallVec;
use std::{
cmp::{self, Ordering},
collections::{BTreeMap, HashMap},
fmt::Write,
ops::Range,
};
@@ -49,7 +50,7 @@ impl EditorElement {
self.view.upgrade(cx).unwrap().update(cx, f)
}
fn snapshot(&self, cx: &mut MutableAppContext) -> Snapshot {
fn snapshot(&self, cx: &mut MutableAppContext) -> EditorSnapshot {
self.update_view(cx, |view, cx| view.snapshot(cx))
}
@@ -264,12 +265,16 @@ impl EditorElement {
}
}
if let Some(highlighted_row) = layout.highlighted_row {
if let Some(highlighted_rows) = &layout.highlighted_rows {
let origin = vec2f(
bounds.origin_x(),
bounds.origin_y() + (layout.line_height * highlighted_row as f32) - scroll_top,
bounds.origin_y() + (layout.line_height * highlighted_rows.start as f32)
- scroll_top,
);
let size = vec2f(
bounds.width(),
layout.line_height * highlighted_rows.len() as f32,
);
let size = vec2f(bounds.width(), layout.line_height);
cx.scene.push_quad(Quad {
bounds: RectF::new(origin, size),
background: Some(style.highlighted_line_background),
@@ -418,7 +423,7 @@ impl EditorElement {
fn paint_blocks(
&mut self,
text_bounds: RectF,
bounds: RectF,
visible_bounds: RectF,
layout: &mut LayoutState,
cx: &mut PaintContext,
@@ -428,14 +433,14 @@ impl EditorElement {
let scroll_top = scroll_position.y() * layout.line_height;
for (row, element) in &mut layout.blocks {
let origin = text_bounds.origin()
let origin = bounds.origin()
+ vec2f(-scroll_left, *row as f32 * layout.line_height - scroll_top);
element.paint(origin, visible_bounds, cx);
}
}
fn max_line_number_width(&self, snapshot: &Snapshot, cx: &LayoutContext) -> f32 {
let digit_count = (snapshot.buffer_row_count() as f32).log10().floor() as usize + 1;
fn max_line_number_width(&self, snapshot: &EditorSnapshot, cx: &LayoutContext) -> f32 {
let digit_count = (snapshot.max_buffer_row() as f32).log10().floor() as usize + 1;
let style = &self.settings.style;
cx.text_layout_cache
@@ -458,7 +463,7 @@ impl EditorElement {
&self,
rows: Range<u32>,
active_rows: &BTreeMap<u32, bool>,
snapshot: &Snapshot,
snapshot: &EditorSnapshot,
cx: &LayoutContext,
) -> Vec<Option<text_layout::Line>> {
let style = &self.settings.style;
@@ -504,7 +509,7 @@ impl EditorElement {
fn layout_lines(
&mut self,
mut rows: Range<u32>,
snapshot: &mut Snapshot,
snapshot: &mut EditorSnapshot,
cx: &LayoutContext,
) -> Vec<text_layout::Line> {
rows.end = cmp::min(rows.end, snapshot.max_point().row() + 1);
@@ -538,93 +543,46 @@ impl EditorElement {
)
})
.collect();
}
let style = &self.settings.style;
let mut prev_font_properties = style.text.font_properties.clone();
let mut prev_font_id = style.text.font_id;
let mut layouts = Vec::with_capacity(rows.len());
let mut line = String::new();
let mut styles = Vec::new();
let mut row = rows.start;
let mut line_exceeded_max_len = false;
let chunks = snapshot.chunks(rows.clone(), Some(&style.syntax));
let newline_chunk = Chunk {
text: "\n",
..Default::default()
};
'outer: for chunk in chunks.chain([newline_chunk]) {
for (ix, mut line_chunk) in chunk.text.split('\n').enumerate() {
if ix > 0 {
layouts.push(cx.text_layout_cache.layout_str(
&line,
style.text.font_size,
&styles,
));
line.clear();
styles.clear();
row += 1;
line_exceeded_max_len = false;
if row == rows.end {
break 'outer;
}
}
if !line_chunk.is_empty() && !line_exceeded_max_len {
let highlight_style =
chunk.highlight_style.unwrap_or(style.text.clone().into());
// Avoid a lookup if the font properties match the previous ones.
let font_id = if highlight_style.font_properties == prev_font_properties {
prev_font_id
} else {
cx.font_cache
.select_font(
style.text.font_family_id,
&highlight_style.font_properties,
)
.unwrap_or(style.text.font_id)
};
if line.len() + line_chunk.len() > MAX_LINE_LEN {
let mut chunk_len = MAX_LINE_LEN - line.len();
while !line_chunk.is_char_boundary(chunk_len) {
chunk_len -= 1;
} else {
let style = &self.settings.style;
let chunks = snapshot
.chunks(rows.clone(), Some(&style.syntax))
.map(|chunk| {
let highlight = if let Some(severity) = chunk.diagnostic {
let underline = Some(super::diagnostic_style(severity, true, style).text);
if let Some(mut highlight) = chunk.highlight_style {
highlight.underline = underline;
Some(highlight)
} else {
Some(HighlightStyle {
underline,
color: style.text.color,
font_properties: style.text.font_properties,
})
}
line_chunk = &line_chunk[..chunk_len];
line_exceeded_max_len = true;
}
let underline = if let Some(severity) = chunk.diagnostic {
Some(super::diagnostic_style(severity, true, style).text)
} else {
highlight_style.underline
chunk.highlight_style
};
line.push_str(line_chunk);
styles.push((
line_chunk.len(),
RunStyle {
font_id,
color: highlight_style.color,
underline,
},
));
prev_font_id = font_id;
prev_font_properties = highlight_style.font_properties;
}
}
(chunk.text, highlight)
});
layout_highlighted_chunks(
chunks,
&style.text,
&cx.text_layout_cache,
&cx.font_cache,
MAX_LINE_LEN,
rows.len() as usize,
)
}
layouts
}
fn layout_blocks(
&mut self,
rows: Range<u32>,
snapshot: &Snapshot,
text_width: f32,
snapshot: &EditorSnapshot,
width: f32,
line_number_x: f32,
text_x: f32,
line_height: f32,
style: &EditorStyle,
line_layouts: &[text_layout::Line],
@@ -639,19 +597,24 @@ impl EditorElement {
.to_display_point(snapshot)
.row();
let anchor_x = if rows.contains(&anchor_row) {
line_layouts[(anchor_row - rows.start) as usize]
.x_for_index(block.column() as usize)
} else {
layout_line(anchor_row, snapshot, style, cx.text_layout_cache)
.x_for_index(block.column() as usize)
};
let anchor_x = text_x
+ if rows.contains(&anchor_row) {
line_layouts[(anchor_row - rows.start) as usize]
.x_for_index(block.column() as usize)
} else {
layout_line(anchor_row, snapshot, style, cx.text_layout_cache)
.x_for_index(block.column() as usize)
};
let mut element = block.render(&BlockContext { cx, anchor_x });
let mut element = block.render(&BlockContext {
cx,
anchor_x,
line_number_x,
});
element.layout(
SizeConstraint {
min: Vector2F::zero(),
max: vec2f(text_width, block.height() as f32 * line_height),
max: vec2f(width, block.height() as f32 * line_height),
},
cx,
);
@@ -732,35 +695,69 @@ impl Element for EditorElement {
let scroll_top = scroll_position.y() * line_height;
let end_row = ((scroll_top + size.y()) / line_height).ceil() as u32 + 1; // Add 1 to ensure selections bleed off screen
let mut selections = HashMap::new();
let mut active_rows = BTreeMap::new();
let mut highlighted_row = None;
self.update_view(cx.app, |view, cx| {
highlighted_row = view.highlighted_row();
for selection_set_id in view.active_selection_sets(cx).collect::<Vec<_>>() {
let replica_selections = view
.intersecting_selections(
selection_set_id,
DisplayPoint::new(start_row, 0)..DisplayPoint::new(end_row, 0),
cx,
)
.collect::<Vec<_>>();
for selection in &replica_selections {
if selection_set_id == view.selection_set_id {
let is_empty = selection.start == selection.end;
let selection_start = snapshot.prev_row_boundary(selection.start).0;
let selection_end = snapshot.next_row_boundary(selection.end).0;
for row in cmp::max(selection_start.row(), start_row)
..=cmp::min(selection_end.row(), end_row)
{
let contains_non_empty_selection =
active_rows.entry(row).or_insert(!is_empty);
*contains_non_empty_selection |= !is_empty;
}
}
}
let start_anchor = if start_row == 0 {
Anchor::min()
} else {
snapshot
.buffer_snapshot
.anchor_before(DisplayPoint::new(start_row, 0).to_offset(&snapshot, Bias::Left))
};
let end_anchor = if end_row > snapshot.max_point().row() {
Anchor::max()
} else {
snapshot
.buffer_snapshot
.anchor_before(DisplayPoint::new(end_row, 0).to_offset(&snapshot, Bias::Right))
};
selections.insert(selection_set_id.replica_id, replica_selections);
let mut selections = HashMap::default();
let mut active_rows = BTreeMap::new();
let mut highlighted_rows = None;
self.update_view(cx.app, |view, cx| {
highlighted_rows = view.highlighted_rows();
let display_map = view.display_map.update(cx, |map, cx| map.snapshot(cx));
let local_selections = view
.local_selections_in_range(start_anchor.clone()..end_anchor.clone(), &display_map);
for selection in &local_selections {
let is_empty = selection.start == selection.end;
let selection_start = snapshot.prev_line_boundary(selection.start).1;
let selection_end = snapshot.next_line_boundary(selection.end).1;
for row in cmp::max(selection_start.row(), start_row)
..=cmp::min(selection_end.row(), end_row)
{
let contains_non_empty_selection = active_rows.entry(row).or_insert(!is_empty);
*contains_non_empty_selection |= !is_empty;
}
}
selections.insert(
view.replica_id(cx),
local_selections
.into_iter()
.map(|selection| crate::Selection {
id: selection.id,
goal: selection.goal,
reversed: selection.reversed,
start: selection.start.to_display_point(&display_map),
end: selection.end.to_display_point(&display_map),
})
.collect(),
);
for (replica_id, selection) in display_map
.buffer_snapshot
.remote_selections_in_range(&(start_anchor..end_anchor))
{
selections
.entry(replica_id)
.or_insert(Vec::new())
.push(crate::Selection {
id: selection.id,
goal: selection.goal,
reversed: selection.reversed,
start: selection.start.to_display_point(&display_map),
end: selection.end.to_display_point(&display_map),
});
}
});
@@ -777,7 +774,9 @@ impl Element for EditorElement {
let blocks = self.layout_blocks(
start_row..end_row,
&snapshot,
text_size.x(),
size.x(),
gutter_padding,
gutter_width + text_offset.x(),
line_height,
&style,
&line_layouts,
@@ -794,7 +793,7 @@ impl Element for EditorElement {
snapshot,
style: self.settings.style.clone(),
active_rows,
highlighted_row,
highlighted_rows,
line_layouts,
line_number_layouts,
blocks,
@@ -853,7 +852,7 @@ impl Element for EditorElement {
self.paint_gutter(gutter_bounds, visible_bounds, layout, cx);
}
self.paint_text(text_bounds, visible_bounds, layout, cx);
self.paint_blocks(text_bounds, visible_bounds, layout, cx);
self.paint_blocks(bounds, visible_bounds, layout, cx);
cx.scene.pop_layer();
@@ -923,9 +922,9 @@ pub struct LayoutState {
gutter_padding: f32,
text_size: Vector2F,
style: EditorStyle,
snapshot: Snapshot,
snapshot: EditorSnapshot,
active_rows: BTreeMap<u32, bool>,
highlighted_row: Option<u32>,
highlighted_rows: Option<Range<u32>>,
line_layouts: Vec<text_layout::Line>,
line_number_layouts: Vec<Option<text_layout::Line>>,
blocks: Vec<(u32, ElementBox)>,
@@ -961,7 +960,7 @@ impl LayoutState {
fn layout_line(
row: u32,
snapshot: &Snapshot,
snapshot: &EditorSnapshot,
style: &EditorStyle,
layout_cache: &TextLayoutCache,
) -> text_layout::Line {
@@ -998,7 +997,7 @@ pub struct PaintState {
impl PaintState {
fn point_for_position(
&self,
snapshot: &Snapshot,
snapshot: &EditorSnapshot,
layout: &LayoutState,
position: Vector2F,
) -> (DisplayPoint, u32) {
@@ -1164,23 +1163,20 @@ fn scale_horizontal_mouse_autoscroll_delta(delta: f32) -> f32 {
#[cfg(test)]
mod tests {
use super::*;
use crate::{
test::sample_text,
{Editor, EditorSettings},
};
use language::Buffer;
use crate::{Editor, EditorSettings, MultiBuffer};
use std::sync::Arc;
use util::test::sample_text;
#[gpui::test]
fn test_layout_line_numbers(cx: &mut gpui::MutableAppContext) {
let settings = EditorSettings::test(cx);
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6), cx));
let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx);
let (window_id, editor) = cx.add_window(Default::default(), |cx| {
Editor::for_buffer(
buffer,
{
let settings = settings.clone();
move |_| settings.clone()
Arc::new(move |_| settings.clone())
},
cx,
)

View File

@@ -1,19 +1,20 @@
use crate::{Editor, EditorSettings, Event};
use crate::{Autoscroll, Editor, Event};
use crate::{MultiBuffer, ToPoint as _};
use anyhow::Result;
use gpui::{
elements::*, fonts::TextStyle, AppContext, Entity, ModelContext, ModelHandle,
MutableAppContext, RenderContext, Subscription, Task, View, ViewContext, ViewHandle,
WeakModelHandle,
elements::*, AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, RenderContext,
Subscription, Task, View, ViewContext, ViewHandle, WeakModelHandle,
};
use language::{Buffer, Diagnostic, File as _};
use postage::watch;
use project::{ProjectPath, Worktree};
use project::{File, ProjectPath, Worktree};
use std::fmt::Write;
use std::path::Path;
use text::{Point, Selection, ToPoint};
use text::{Point, Selection};
use util::TryFutureExt;
use workspace::{
settings, EntryOpener, ItemHandle, ItemView, ItemViewHandle, Settings, StatusItemView,
WeakItemHandle,
ItemHandle, ItemView, ItemViewHandle, PathOpener, Settings, StatusItemView, WeakItemHandle,
Workspace,
};
pub struct BufferOpener;
@@ -24,7 +25,7 @@ pub struct BufferItemHandle(pub ModelHandle<Buffer>);
#[derive(Clone)]
struct WeakBufferItemHandle(WeakModelHandle<Buffer>);
impl EntryOpener for BufferOpener {
impl PathOpener for BufferOpener {
fn open(
&self,
worktree: &mut Worktree,
@@ -33,9 +34,8 @@ impl EntryOpener for BufferOpener {
) -> Option<Task<Result<Box<dyn ItemHandle>>>> {
let buffer = worktree.open_buffer(project_path.path, cx);
let task = cx.spawn(|_, _| async move {
buffer
.await
.map(|buffer| Box::new(BufferItemHandle(buffer)) as Box<dyn ItemHandle>)
let buffer = buffer.await?;
Ok(Box::new(BufferItemHandle(buffer)) as Box<dyn ItemHandle>)
});
Some(task)
}
@@ -45,49 +45,15 @@ impl ItemHandle for BufferItemHandle {
fn add_view(
&self,
window_id: usize,
settings: watch::Receiver<Settings>,
workspace: &Workspace,
cx: &mut MutableAppContext,
) -> Box<dyn ItemViewHandle> {
let buffer = self.0.downgrade();
let buffer = cx.add_model(|cx| MultiBuffer::singleton(self.0.clone(), cx));
let weak_buffer = buffer.downgrade();
Box::new(cx.add_view(window_id, |cx| {
Editor::for_buffer(
self.0.clone(),
move |cx| {
let settings = settings.borrow();
let font_cache = cx.font_cache();
let font_family_id = settings.buffer_font_family;
let font_family_name = cx.font_cache().family_name(font_family_id).unwrap();
let font_properties = Default::default();
let font_id = font_cache
.select_font(font_family_id, &font_properties)
.unwrap();
let font_size = settings.buffer_font_size;
let mut theme = settings.theme.editor.clone();
theme.text = TextStyle {
color: theme.text.color,
font_family_name,
font_family_id,
font_id,
font_size,
font_properties,
underline: None,
};
let language = buffer.upgrade(cx).and_then(|buf| buf.read(cx).language());
let soft_wrap = match settings.soft_wrap(language) {
settings::SoftWrap::None => crate::SoftWrap::None,
settings::SoftWrap::EditorWidth => crate::SoftWrap::EditorWidth,
settings::SoftWrap::PreferredLineLength => crate::SoftWrap::Column(
settings.preferred_line_length(language).saturating_sub(1),
),
};
EditorSettings {
tab_size: settings.tab_size,
soft_wrap,
style: theme,
}
},
buffer,
crate::settings_builder(weak_buffer, workspace.settings()),
cx,
)
}))
@@ -97,16 +63,24 @@ impl ItemHandle for BufferItemHandle {
Box::new(self.clone())
}
fn to_any(&self) -> gpui::AnyModelHandle {
self.0.clone().into()
}
fn downgrade(&self) -> Box<dyn workspace::WeakItemHandle> {
Box::new(WeakBufferItemHandle(self.0.downgrade()))
}
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
self.0.read(cx).file().map(|f| ProjectPath {
worktree_id: f.worktree_id(),
File::from_dyn(self.0.read(cx).file()).map(|f| ProjectPath {
worktree_id: f.worktree_id(cx),
path: f.path().clone(),
})
}
fn id(&self) -> usize {
self.0.id()
}
}
impl WeakItemHandle for WeakBufferItemHandle {
@@ -115,29 +89,24 @@ impl WeakItemHandle for WeakBufferItemHandle {
.upgrade(cx)
.map(|buffer| Box::new(BufferItemHandle(buffer)) as Box<dyn ItemHandle>)
}
fn id(&self) -> usize {
self.0.id()
}
}
impl ItemView for Editor {
fn should_activate_item_on_event(event: &Event) -> bool {
matches!(event, Event::Activate)
}
type ItemHandle = BufferItemHandle;
fn should_close_item_on_event(event: &Event) -> bool {
matches!(event, Event::Closed)
}
fn should_update_tab_on_event(event: &Event) -> bool {
matches!(
event,
Event::Saved | Event::Dirtied | Event::FileHandleChanged
)
fn item_handle(&self, cx: &AppContext) -> Self::ItemHandle {
BufferItemHandle(self.buffer.read(cx).as_singleton().unwrap())
}
fn title(&self, cx: &AppContext) -> String {
let filename = self
.buffer()
.read(cx)
.file()
.file(cx)
.and_then(|file| file.file_name());
if let Some(name) = filename {
name.to_string_lossy().into()
@@ -147,8 +116,8 @@ impl ItemView for Editor {
}
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
self.buffer().read(cx).file().map(|file| ProjectPath {
worktree_id: file.worktree_id(),
File::from_dyn(self.buffer().read(cx).file(cx)).map(|file| ProjectPath {
worktree_id: file.worktree_id(cx),
path: file.path().clone(),
})
}
@@ -160,21 +129,52 @@ impl ItemView for Editor {
Some(self.clone(cx))
}
fn is_dirty(&self, cx: &AppContext) -> bool {
self.buffer().read(cx).read(cx).is_dirty()
}
fn has_conflict(&self, cx: &AppContext) -> bool {
self.buffer().read(cx).read(cx).has_conflict()
}
fn can_save(&self, cx: &AppContext) -> bool {
self.project_path(cx).is_some()
}
fn save(&mut self, cx: &mut ViewContext<Self>) -> Result<Task<Result<()>>> {
let save = self.buffer().update(cx, |b, cx| b.save(cx))?;
Ok(cx.spawn(|_, _| async move {
save.await?;
let buffer = self.buffer().clone();
Ok(cx.spawn(|editor, mut cx| async move {
buffer
.update(&mut cx, |buffer, cx| buffer.format(cx).log_err())
.await;
editor.update(&mut cx, |editor, cx| {
editor.request_autoscroll(Autoscroll::Fit, cx)
});
buffer
.update(&mut cx, |buffer, cx| buffer.save(cx))?
.await?;
Ok(())
}))
}
fn can_save_as(&self, _: &AppContext) -> bool {
true
}
fn save_as(
&mut self,
worktree: ModelHandle<Worktree>,
path: &Path,
cx: &mut ViewContext<Self>,
) -> Task<Result<()>> {
self.buffer().update(cx, |buffer, cx| {
let buffer = self
.buffer()
.read(cx)
.as_singleton()
.expect("cannot call save_as on an excerpt list")
.clone();
buffer.update(cx, |buffer, cx| {
let handle = cx.handle();
let text = buffer.as_rope().clone();
let version = buffer.version();
@@ -191,12 +191,12 @@ impl ItemView for Editor {
let (language, language_server) = worktree.update(&mut cx, |worktree, cx| {
let worktree = worktree.as_local_mut().unwrap();
let language = worktree
.languages()
.language_registry()
.select_language(new_file.full_path())
.cloned();
let language_server = language
.as_ref()
.and_then(|language| worktree.ensure_language_server(language, cx));
.and_then(|language| worktree.register_language(language, cx));
(language, language_server.clone())
});
@@ -209,12 +209,19 @@ impl ItemView for Editor {
})
}
fn is_dirty(&self, cx: &AppContext) -> bool {
self.buffer().read(cx).is_dirty()
fn should_activate_item_on_event(event: &Event) -> bool {
matches!(event, Event::Activate)
}
fn has_conflict(&self, cx: &AppContext) -> bool {
self.buffer().read(cx).has_conflict()
fn should_close_item_on_event(event: &Event) -> bool {
matches!(event, Event::Closed)
}
fn should_update_tab_on_event(event: &Event) -> bool {
matches!(
event,
Event::Saved | Event::Dirtied | Event::FileHandleChanged
)
}
}
@@ -237,11 +244,11 @@ impl CursorPosition {
fn update_position(&mut self, editor: ViewHandle<Editor>, cx: &mut ViewContext<Self>) {
let editor = editor.read(cx);
let buffer = editor.buffer().read(cx);
let buffer = editor.buffer().read(cx).snapshot(cx);
self.selected_count = 0;
let mut last_selection: Option<Selection<usize>> = None;
for selection in editor.selections::<usize>(cx) {
for selection in editor.local_selections::<usize>(cx) {
self.selected_count += selection.end - selection.start;
if last_selection
.as_ref()
@@ -250,7 +257,7 @@ impl CursorPosition {
last_selection = Some(selection);
}
}
self.position = last_selection.map(|s| s.head().to_point(buffer));
self.position = last_selection.map(|s| s.head().to_point(&buffer));
cx.notify();
}
@@ -314,14 +321,14 @@ impl DiagnosticMessage {
fn update(&mut self, editor: ViewHandle<Editor>, cx: &mut ViewContext<Self>) {
let editor = editor.read(cx);
let cursor_position = editor.newest_selection(cx).head();
let new_diagnostic = editor
.buffer()
let buffer = editor.buffer().read(cx);
let cursor_position = editor.newest_selection::<usize>(&buffer.read(cx)).head();
let new_diagnostic = buffer
.read(cx)
.diagnostics_in_range::<usize, usize>(cursor_position..cursor_position)
.filter(|(range, _)| !range.is_empty())
.min_by_key(|(range, diagnostic)| (diagnostic.severity, range.len()))
.map(|(_, diagnostic)| diagnostic.clone());
.diagnostics_in_range::<_, usize>(cursor_position..cursor_position)
.filter(|entry| !entry.range.is_empty())
.min_by_key(|entry| (entry.diagnostic.severity, entry.range.len()))
.map(|entry| entry.diagnostic);
if new_diagnostic != self.diagnostic {
self.diagnostic = new_diagnostic;
cx.notify();

View File

@@ -1,9 +1,9 @@
use super::{Bias, DisplayMapSnapshot, DisplayPoint, SelectionGoal, ToDisplayPoint};
use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint};
use crate::ToPoint;
use anyhow::Result;
use std::{cmp, ops::Range};
use text::ToPoint;
pub fn left(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> Result<DisplayPoint> {
pub fn left(map: &DisplaySnapshot, mut point: DisplayPoint) -> Result<DisplayPoint> {
if point.column() > 0 {
*point.column_mut() -= 1;
} else if point.row() > 0 {
@@ -13,7 +13,7 @@ pub fn left(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> Result<Display
Ok(map.clip_point(point, Bias::Left))
}
pub fn right(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> Result<DisplayPoint> {
pub fn right(map: &DisplaySnapshot, mut point: DisplayPoint) -> Result<DisplayPoint> {
let max_column = map.line_len(point.row());
if point.column() < max_column {
*point.column_mut() += 1;
@@ -25,27 +25,26 @@ pub fn right(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> Result<Displa
}
pub fn up(
map: &DisplayMapSnapshot,
mut point: DisplayPoint,
map: &DisplaySnapshot,
start: DisplayPoint,
goal: SelectionGoal,
) -> Result<(DisplayPoint, SelectionGoal)> {
let goal_column = if let SelectionGoal::Column(column) = goal {
let mut goal_column = if let SelectionGoal::Column(column) = goal {
column
} else {
map.column_to_chars(point.row(), point.column())
map.column_to_chars(start.row(), start.column())
};
loop {
if point.row() > 0 {
*point.row_mut() -= 1;
*point.column_mut() = map.column_from_chars(point.row(), goal_column);
if !map.is_block_line(point.row()) {
break;
}
} else {
point = DisplayPoint::new(0, 0);
break;
}
let prev_row = start.row().saturating_sub(1);
let mut point = map.clip_point(
DisplayPoint::new(prev_row, map.line_len(prev_row)),
Bias::Left,
);
if point.row() < start.row() {
*point.column_mut() = map.column_from_chars(point.row(), goal_column);
} else {
point = DisplayPoint::new(0, 0);
goal_column = 0;
}
let clip_bias = if point.column() == map.line_len(point.row()) {
@@ -61,28 +60,23 @@ pub fn up(
}
pub fn down(
map: &DisplayMapSnapshot,
mut point: DisplayPoint,
map: &DisplaySnapshot,
start: DisplayPoint,
goal: SelectionGoal,
) -> Result<(DisplayPoint, SelectionGoal)> {
let max_point = map.max_point();
let goal_column = if let SelectionGoal::Column(column) = goal {
let mut goal_column = if let SelectionGoal::Column(column) = goal {
column
} else {
map.column_to_chars(point.row(), point.column())
map.column_to_chars(start.row(), start.column())
};
loop {
if point.row() < max_point.row() {
*point.row_mut() += 1;
*point.column_mut() = map.column_from_chars(point.row(), goal_column);
if !map.is_block_line(point.row()) {
break;
}
} else {
point = max_point;
break;
}
let next_row = start.row() + 1;
let mut point = map.clip_point(DisplayPoint::new(next_row, 0), Bias::Right);
if point.row() > start.row() {
*point.column_mut() = map.column_from_chars(point.row(), goal_column);
} else {
point = map.max_point();
goal_column = map.column_to_chars(point.row(), point.column())
}
let clip_bias = if point.column() == map.line_len(point.row()) {
@@ -98,7 +92,7 @@ pub fn down(
}
pub fn line_beginning(
map: &DisplayMapSnapshot,
map: &DisplaySnapshot,
point: DisplayPoint,
toggle_indent: bool,
) -> DisplayPoint {
@@ -110,12 +104,12 @@ pub fn line_beginning(
}
}
pub fn line_end(map: &DisplayMapSnapshot, point: DisplayPoint) -> DisplayPoint {
pub fn line_end(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint {
let line_end = DisplayPoint::new(point.row(), map.line_len(point.row()));
map.clip_point(line_end, Bias::Left)
}
pub fn prev_word_boundary(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> DisplayPoint {
pub fn prev_word_boundary(map: &DisplaySnapshot, mut point: DisplayPoint) -> DisplayPoint {
let mut line_start = 0;
if point.row() > 0 {
if let Some(indent) = map.soft_wrap_indent(point.row() - 1) {
@@ -154,7 +148,7 @@ pub fn prev_word_boundary(map: &DisplayMapSnapshot, mut point: DisplayPoint) ->
boundary
}
pub fn next_word_boundary(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> DisplayPoint {
pub fn next_word_boundary(map: &DisplaySnapshot, mut point: DisplayPoint) -> DisplayPoint {
let mut prev_char_kind = None;
for c in map.chars_at(point) {
let char_kind = char_kind(c);
@@ -178,10 +172,10 @@ pub fn next_word_boundary(map: &DisplayMapSnapshot, mut point: DisplayPoint) ->
}
prev_char_kind = Some(char_kind);
}
point
map.clip_point(point, Bias::Right)
}
pub fn is_inside_word(map: &DisplayMapSnapshot, point: DisplayPoint) -> bool {
pub fn is_inside_word(map: &DisplaySnapshot, point: DisplayPoint) -> bool {
let ix = map.clip_point(point, Bias::Left).to_offset(map, Bias::Left);
let text = &map.buffer_snapshot;
let next_char_kind = text.chars_at(ix).next().map(char_kind);
@@ -189,7 +183,7 @@ pub fn is_inside_word(map: &DisplayMapSnapshot, point: DisplayPoint) -> bool {
prev_char_kind.zip(next_char_kind) == Some((CharKind::Word, CharKind::Word))
}
pub fn surrounding_word(map: &DisplayMapSnapshot, point: DisplayPoint) -> Range<DisplayPoint> {
pub fn surrounding_word(map: &DisplaySnapshot, point: DisplayPoint) -> Range<DisplayPoint> {
let mut start = map.clip_point(point, Bias::Left).to_offset(map, Bias::Left);
let mut end = start;
@@ -244,7 +238,120 @@ fn char_kind(c: char) -> CharKind {
#[cfg(test)]
mod tests {
use super::*;
use crate::{display_map::DisplayMap, Buffer};
use crate::{
display_map::{BlockDisposition, BlockProperties},
Buffer, DisplayMap, ExcerptProperties, MultiBuffer,
};
use gpui::{elements::Empty, Element};
use language::Point;
use std::sync::Arc;
#[gpui::test]
fn test_move_up_and_down_with_excerpts(cx: &mut gpui::MutableAppContext) {
let family_id = cx.font_cache().load_family(&["Helvetica"]).unwrap();
let font_id = cx
.font_cache()
.select_font(family_id, &Default::default())
.unwrap();
let buffer = cx.add_model(|cx| Buffer::new(0, "abc\ndefg\nhijkl\nmn", cx));
let mut excerpt1_header_position = None;
let mut excerpt2_header_position = None;
let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0);
let excerpt1_id = multibuffer.push_excerpt(
ExcerptProperties {
buffer: &buffer,
range: Point::new(0, 0)..Point::new(1, 4),
},
cx,
);
let excerpt2_id = multibuffer.push_excerpt(
ExcerptProperties {
buffer: &buffer,
range: Point::new(2, 0)..Point::new(3, 2),
},
cx,
);
excerpt1_header_position = Some(
multibuffer
.read(cx)
.anchor_in_excerpt(excerpt1_id, language::Anchor::min()),
);
excerpt2_header_position = Some(
multibuffer
.read(cx)
.anchor_in_excerpt(excerpt2_id, language::Anchor::min()),
);
multibuffer
});
let display_map =
cx.add_model(|cx| DisplayMap::new(multibuffer, 2, font_id, 14.0, None, cx));
display_map.update(cx, |display_map, cx| {
display_map.insert_blocks(
[
BlockProperties {
position: excerpt1_header_position.unwrap(),
height: 2,
render: Arc::new(|_| Empty::new().boxed()),
disposition: BlockDisposition::Above,
},
BlockProperties {
position: excerpt2_header_position.unwrap(),
height: 3,
render: Arc::new(|_| Empty::new().boxed()),
disposition: BlockDisposition::Above,
},
],
cx,
)
});
let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
assert_eq!(snapshot.text(), "\n\nabc\ndefg\n\n\n\nhijkl\nmn");
// Can't move up into the first excerpt's header
assert_eq!(
up(&snapshot, DisplayPoint::new(2, 2), SelectionGoal::Column(2)).unwrap(),
(DisplayPoint::new(2, 0), SelectionGoal::Column(0)),
);
assert_eq!(
up(&snapshot, DisplayPoint::new(2, 0), SelectionGoal::None).unwrap(),
(DisplayPoint::new(2, 0), SelectionGoal::Column(0)),
);
// Move up and down within first excerpt
assert_eq!(
up(&snapshot, DisplayPoint::new(3, 4), SelectionGoal::Column(4)).unwrap(),
(DisplayPoint::new(2, 3), SelectionGoal::Column(4)),
);
assert_eq!(
down(&snapshot, DisplayPoint::new(2, 3), SelectionGoal::Column(4)).unwrap(),
(DisplayPoint::new(3, 4), SelectionGoal::Column(4)),
);
// Move up and down across second excerpt's header
assert_eq!(
up(&snapshot, DisplayPoint::new(7, 5), SelectionGoal::Column(5)).unwrap(),
(DisplayPoint::new(3, 4), SelectionGoal::Column(5)),
);
assert_eq!(
down(&snapshot, DisplayPoint::new(3, 4), SelectionGoal::Column(5)).unwrap(),
(DisplayPoint::new(7, 5), SelectionGoal::Column(5)),
);
// Can't move down off the end
assert_eq!(
down(&snapshot, DisplayPoint::new(8, 0), SelectionGoal::Column(0)).unwrap(),
(DisplayPoint::new(8, 2), SelectionGoal::Column(2)),
);
assert_eq!(
down(&snapshot, DisplayPoint::new(8, 2), SelectionGoal::Column(2)).unwrap(),
(DisplayPoint::new(8, 2), SelectionGoal::Column(2)),
);
}
#[gpui::test]
fn test_prev_next_word_boundary_multibyte(cx: &mut gpui::MutableAppContext) {
@@ -256,7 +363,7 @@ mod tests {
.unwrap();
let font_size = 14.0;
let buffer = cx.add_model(|cx| Buffer::new(0, "a bcΔ defγ hi—jk", cx));
let buffer = MultiBuffer::build_simple("a bcΔ defγ hi—jk", cx);
let display_map =
cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx));
let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
@@ -312,7 +419,7 @@ mod tests {
.select_font(family_id, &Default::default())
.unwrap();
let font_size = 14.0;
let buffer = cx.add_model(|cx| Buffer::new(0, "lorem ipsum dolor\n sit", cx));
let buffer = MultiBuffer::build_simple("lorem ipsum dolor\n sit", cx);
let display_map =
cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx));
let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,141 @@
use super::{ExcerptId, MultiBufferSnapshot, ToOffset, ToPoint};
use anyhow::Result;
use std::{
cmp::Ordering,
ops::{Range, Sub},
};
use sum_tree::Bias;
use text::{rope::TextDimension, Point};
#[derive(Clone, Eq, PartialEq, Debug, Hash)]
pub struct Anchor {
pub(crate) buffer_id: usize,
pub(crate) excerpt_id: ExcerptId,
pub(crate) text_anchor: text::Anchor,
}
impl Anchor {
pub fn min() -> Self {
Self {
buffer_id: 0,
excerpt_id: ExcerptId::min(),
text_anchor: text::Anchor::min(),
}
}
pub fn max() -> Self {
Self {
buffer_id: 0,
excerpt_id: ExcerptId::max(),
text_anchor: text::Anchor::max(),
}
}
pub fn excerpt_id(&self) -> &ExcerptId {
&self.excerpt_id
}
pub fn cmp<'a>(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Result<Ordering> {
let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id);
if excerpt_id_cmp.is_eq() {
if self.excerpt_id == ExcerptId::min() || self.excerpt_id == ExcerptId::max() {
Ok(Ordering::Equal)
} else if let Some((buffer_id, buffer_snapshot)) =
snapshot.buffer_snapshot_for_excerpt(&self.excerpt_id)
{
// Even though the anchor refers to a valid excerpt the underlying buffer might have
// changed. In that case, treat the anchor as if it were at the start of that
// excerpt.
if self.buffer_id == buffer_id && other.buffer_id == buffer_id {
self.text_anchor.cmp(&other.text_anchor, buffer_snapshot)
} else if self.buffer_id == buffer_id {
Ok(Ordering::Greater)
} else if other.buffer_id == buffer_id {
Ok(Ordering::Less)
} else {
Ok(Ordering::Equal)
}
} else {
Ok(Ordering::Equal)
}
} else {
Ok(excerpt_id_cmp)
}
}
pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
if self.text_anchor.bias != Bias::Left {
if let Some((buffer_id, buffer_snapshot)) =
snapshot.buffer_snapshot_for_excerpt(&self.excerpt_id)
{
if self.buffer_id == buffer_id {
return Self {
buffer_id: self.buffer_id,
excerpt_id: self.excerpt_id.clone(),
text_anchor: self.text_anchor.bias_left(buffer_snapshot),
};
}
}
}
self.clone()
}
pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
if self.text_anchor.bias != Bias::Right {
if let Some((buffer_id, buffer_snapshot)) =
snapshot.buffer_snapshot_for_excerpt(&self.excerpt_id)
{
if self.buffer_id == buffer_id {
return Self {
buffer_id: self.buffer_id,
excerpt_id: self.excerpt_id.clone(),
text_anchor: self.text_anchor.bias_right(buffer_snapshot),
};
}
}
}
self.clone()
}
pub fn summary<D>(&self, snapshot: &MultiBufferSnapshot) -> D
where
D: TextDimension + Ord + Sub<D, Output = D>,
{
snapshot.summary_for_anchor(self)
}
}
impl ToOffset for Anchor {
fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize {
self.summary(snapshot)
}
}
impl ToPoint for Anchor {
fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point {
self.summary(snapshot)
}
}
pub trait AnchorRangeExt {
fn cmp(&self, b: &Range<Anchor>, buffer: &MultiBufferSnapshot) -> Result<Ordering>;
fn to_offset(&self, content: &MultiBufferSnapshot) -> Range<usize>;
fn to_point(&self, content: &MultiBufferSnapshot) -> Range<Point>;
}
impl AnchorRangeExt for Range<Anchor> {
fn cmp(&self, other: &Range<Anchor>, buffer: &MultiBufferSnapshot) -> Result<Ordering> {
Ok(match self.start.cmp(&other.start, buffer)? {
Ordering::Equal => other.end.cmp(&self.end, buffer)?,
ord @ _ => ord,
})
}
fn to_offset(&self, content: &MultiBufferSnapshot) -> Range<usize> {
self.start.to_offset(&content)..self.end.to_offset(&content)
}
fn to_point(&self, content: &MultiBufferSnapshot) -> Range<Point> {
self.start.to_point(&content)..self.end.to_point(&content)
}
}

View File

@@ -1,46 +1,6 @@
use gpui::{Entity, ModelHandle};
use smol::channel;
use std::marker::PhantomData;
#[cfg(test)]
#[ctor::ctor]
fn init_logger() {
// std::env::set_var("RUST_LOG", "info");
env_logger::init();
}
pub fn sample_text(rows: usize, cols: usize) -> String {
let mut text = String::new();
for row in 0..rows {
let c: char = ('a' as u32 + row as u32) as u8 as char;
let mut line = c.to_string().repeat(cols);
if row < rows - 1 {
line.push('\n');
}
text += &line;
}
text
}
pub struct Observer<T>(PhantomData<T>);
impl<T: 'static> Entity for Observer<T> {
type Event = ();
}
impl<T: Entity> Observer<T> {
pub fn new(
handle: &ModelHandle<T>,
cx: &mut gpui::TestAppContext,
) -> (ModelHandle<Self>, channel::Receiver<()>) {
let (notify_tx, notify_rx) = channel::unbounded();
let observer = cx.add_model(|cx| {
cx.observe(handle, move |_, _, _| {
let _ = notify_tx.try_send(());
})
.detach();
Observer(PhantomData)
});
(observer, notify_rx)
}
}

View File

@@ -3,16 +3,12 @@ use fuzzy::PathMatch;
use gpui::{
action,
elements::*,
keymap::{
self,
menu::{SelectNext, SelectPrev},
Binding,
},
keymap::{self, Binding},
AppContext, Axis, Entity, ModelHandle, MutableAppContext, RenderContext, Task, View,
ViewContext, ViewHandle, WeakViewHandle,
};
use postage::watch;
use project::{Project, ProjectPath};
use project::{Project, ProjectPath, WorktreeId};
use std::{
cmp,
path::Path,
@@ -22,7 +18,10 @@ use std::{
},
};
use util::post_inc;
use workspace::{Settings, Workspace};
use workspace::{
menu::{Confirm, SelectNext, SelectPrev},
Settings, Workspace,
};
pub struct FileFinder {
handle: WeakViewHandle<Self>,
@@ -40,7 +39,6 @@ pub struct FileFinder {
}
action!(Toggle);
action!(Confirm);
action!(Select, ProjectPath);
pub fn init(cx: &mut MutableAppContext) {
@@ -53,7 +51,6 @@ pub fn init(cx: &mut MutableAppContext) {
cx.add_bindings(vec![
Binding::new("cmd-p", Toggle, None),
Binding::new("escape", Toggle, Some("FileFinder")),
Binding::new("enter", Confirm, Some("FileFinder")),
]);
}
@@ -83,7 +80,7 @@ impl View for FileFinder {
.with_style(settings.theme.selector.input_editor.container)
.boxed(),
)
.with_child(Flexible::new(1.0, self.render_matches()).boxed())
.with_child(Flexible::new(1.0, false, self.render_matches()).boxed())
.boxed(),
)
.with_style(settings.theme.selector.container)
@@ -175,6 +172,7 @@ impl FileFinder {
.with_child(
Flexible::new(
1.0,
false,
Flex::column()
.with_child(
Label::new(file_name.to_string(), style.label.clone())
@@ -195,7 +193,7 @@ impl FileFinder {
.with_style(style.container);
let action = Select(ProjectPath {
worktree_id: path_match.worktree_id,
worktree_id: WorktreeId::from_usize(path_match.worktree_id),
path: path_match.path.clone(),
});
EventHandler::new(container.boxed())
@@ -249,8 +247,8 @@ impl FileFinder {
match event {
Event::Selected(project_path) => {
workspace
.open_entry(project_path.clone(), cx)
.map(|d| d.detach());
.open_path(project_path.clone(), cx)
.detach_and_log_err(cx);
workspace.dismiss_modal(cx);
}
Event::Dismissed => {
@@ -270,14 +268,14 @@ impl FileFinder {
Editor::single_line(
{
let settings = settings.clone();
move |_| {
Arc::new(move |_| {
let settings = settings.borrow();
EditorSettings {
style: settings.theme.selector.input_editor.as_editor(),
tab_size: settings.tab_size,
soft_wrap: editor::SoftWrap::None,
}
}
})
},
cx,
)
@@ -352,7 +350,8 @@ impl FileFinder {
let mat = &self.matches[selected_index];
self.selected = Some((mat.worktree_id, mat.path.clone()));
}
self.list_state.scroll_to(selected_index);
self.list_state
.scroll_to(ScrollTarget::Show(selected_index));
cx.notify();
}
@@ -363,14 +362,15 @@ impl FileFinder {
let mat = &self.matches[selected_index];
self.selected = Some((mat.worktree_id, mat.path.clone()));
}
self.list_state.scroll_to(selected_index);
self.list_state
.scroll_to(ScrollTarget::Show(selected_index));
cx.notify();
}
fn confirm(&mut self, _: &Confirm, cx: &mut ViewContext<Self>) {
if let Some(m) = self.matches.get(self.selected_index()) {
cx.emit(Event::Selected(ProjectPath {
worktree_id: m.worktree_id,
worktree_id: WorktreeId::from_usize(m.worktree_id),
path: m.path.clone(),
}));
}
@@ -414,7 +414,8 @@ impl FileFinder {
}
self.latest_search_query = query;
self.latest_search_did_cancel = did_cancel;
self.list_state.scroll_to(self.selected_index());
self.list_state
.scroll_to(ScrollTarget::Show(self.selected_index()));
cx.notify();
}
}
@@ -430,14 +431,14 @@ mod tests {
#[gpui::test]
async fn test_matching_paths(mut cx: gpui::TestAppContext) {
let mut entry_openers = Vec::new();
let mut path_openers = Vec::new();
cx.update(|cx| {
super::init(cx);
editor::init(cx, &mut entry_openers);
editor::init(cx, &mut path_openers);
});
let mut params = cx.update(WorkspaceParams::test);
params.entry_openers = Arc::from(entry_openers);
params.path_openers = Arc::from(path_openers);
params
.fs
.as_fake()

View File

@@ -9,6 +9,7 @@ impl CharBag {
}
fn insert(&mut self, c: char) {
let c = c.to_ascii_lowercase();
if c >= 'a' && c <= 'z' {
let mut count = self.0;
let idx = c as u8 - 'a' as u8;

View File

@@ -55,6 +55,7 @@ pub struct PathMatch {
#[derive(Clone, Debug)]
pub struct StringMatchCandidate {
pub id: usize,
pub string: String,
pub char_bag: CharBag,
}
@@ -109,6 +110,7 @@ impl<'a> MatchCandidate for &'a StringMatchCandidate {
#[derive(Clone, Debug)]
pub struct StringMatch {
pub candidate_id: usize,
pub score: f64,
pub positions: Vec<usize>,
pub string: String,
@@ -116,7 +118,7 @@ pub struct StringMatch {
impl PartialEq for StringMatch {
fn eq(&self, other: &Self) -> bool {
self.score.eq(&other.score)
self.cmp(other).is_eq()
}
}
@@ -133,13 +135,13 @@ impl Ord for StringMatch {
self.score
.partial_cmp(&other.score)
.unwrap_or(Ordering::Equal)
.then_with(|| self.string.cmp(&other.string))
.then_with(|| self.candidate_id.cmp(&other.candidate_id))
}
}
impl PartialEq for PathMatch {
fn eq(&self, other: &Self) -> bool {
self.score.eq(&other.score)
self.cmp(other).is_eq()
}
}
@@ -187,8 +189,8 @@ pub async fn match_strings(
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
let cancel_flag = &cancel_flag;
scope.spawn(async move {
let segment_start = segment_idx * segment_size;
let segment_end = segment_start + segment_size;
let segment_start = cmp::min(segment_idx * segment_size, candidates.len());
let segment_end = cmp::min(segment_start + segment_size, candidates.len());
let mut matcher = Matcher::new(
query,
lowercase_query,
@@ -330,6 +332,7 @@ impl<'a> Matcher<'a> {
results,
cancel_flag,
|candidate, score| StringMatch {
candidate_id: candidate.id,
score,
positions: Vec::new(),
string: candidate.string.to_string(),
@@ -433,13 +436,17 @@ impl<'a> Matcher<'a> {
}
}
fn find_last_positions(&mut self, prefix: &[char], path: &[char]) -> bool {
let mut path = path.iter();
let mut prefix_iter = prefix.iter();
for (i, char) in self.query.iter().enumerate().rev() {
if let Some(j) = path.rposition(|c| c == char) {
self.last_positions[i] = j + prefix.len();
} else if let Some(j) = prefix_iter.rposition(|c| c == char) {
fn find_last_positions(
&mut self,
lowercase_prefix: &[char],
lowercase_candidate: &[char],
) -> bool {
let mut lowercase_prefix = lowercase_prefix.iter();
let mut lowercase_candidate = lowercase_candidate.iter();
for (i, char) in self.lowercase_query.iter().enumerate().rev() {
if let Some(j) = lowercase_candidate.rposition(|c| c == char) {
self.last_positions[i] = j + lowercase_prefix.len();
} else if let Some(j) = lowercase_prefix.rposition(|c| c == char) {
self.last_positions[i] = j;
} else {
return false;

View File

@@ -1,10 +1,11 @@
use text::{Bias, Point, Selection};
use editor::{display_map::ToDisplayPoint, Autoscroll, Editor, EditorSettings};
use gpui::{
action, elements::*, geometry::vector::Vector2F, keymap::Binding, Axis, Entity,
MutableAppContext, RenderContext, View, ViewContext, ViewHandle,
};
use postage::watch;
use std::sync::Arc;
use text::{Bias, Point, Selection};
use workspace::{Settings, Workspace};
action!(Toggle);
@@ -25,7 +26,7 @@ pub struct GoToLine {
line_editor: ViewHandle<Editor>,
active_editor: ViewHandle<Editor>,
restore_state: Option<RestoreState>,
line_selection: Option<Selection<usize>>,
line_selection_id: Option<usize>,
cursor_point: Point,
max_point: Point,
}
@@ -49,14 +50,14 @@ impl GoToLine {
Editor::single_line(
{
let settings = settings.clone();
move |_| {
Arc::new(move |_| {
let settings = settings.borrow();
EditorSettings {
tab_size: settings.tab_size,
style: settings.theme.selector.input_editor.as_editor(),
soft_wrap: editor::SoftWrap::None,
}
}
})
},
cx,
)
@@ -67,13 +68,14 @@ impl GoToLine {
let (restore_state, cursor_point, max_point) = active_editor.update(cx, |editor, cx| {
let restore_state = Some(RestoreState {
scroll_position: editor.scroll_position(cx),
selections: editor.selections::<usize>(cx).collect(),
selections: editor.local_selections::<usize>(cx),
});
let buffer = editor.buffer().read(cx).read(cx);
(
restore_state,
editor.newest_selection(cx).head(),
editor.buffer().read(cx).max_point(),
editor.newest_selection(&buffer).head(),
buffer.max_point(),
)
});
@@ -82,7 +84,7 @@ impl GoToLine {
line_editor,
active_editor,
restore_state,
line_selection: None,
line_selection_id: None,
cursor_point,
max_point,
}
@@ -127,7 +129,7 @@ impl GoToLine {
match event {
editor::Event::Blurred => cx.emit(Event::Dismissed),
editor::Event::Edited => {
let line_editor = self.line_editor.read(cx).buffer().read(cx).text();
let line_editor = self.line_editor.read(cx).buffer().read(cx).read(cx).text();
let mut components = line_editor.trim().split(&[',', ':'][..]);
let row = components.next().and_then(|row| row.parse::<u32>().ok());
let column = components.next().and_then(|row| row.parse::<u32>().ok());
@@ -137,13 +139,18 @@ impl GoToLine {
column.map(|column| column.saturating_sub(1)).unwrap_or(0),
)
}) {
self.line_selection = self.active_editor.update(cx, |active_editor, cx| {
self.line_selection_id = self.active_editor.update(cx, |active_editor, cx| {
let snapshot = active_editor.snapshot(cx).display_snapshot;
let point = snapshot.buffer_snapshot.clip_point(point, Bias::Left);
let display_point = point.to_display_point(&snapshot);
let row = display_point.row();
active_editor.select_ranges([point..point], Some(Autoscroll::Center), cx);
active_editor.set_highlighted_row(Some(display_point.row()));
Some(active_editor.newest_selection(cx))
active_editor.set_highlighted_rows(Some(row..row + 1));
Some(
active_editor
.newest_selection::<usize>(&snapshot.buffer_snapshot)
.id,
)
});
cx.notify();
}
@@ -157,12 +164,14 @@ impl Entity for GoToLine {
type Event = Event;
fn release(&mut self, cx: &mut MutableAppContext) {
let line_selection = self.line_selection.take();
let line_selection_id = self.line_selection_id.take();
let restore_state = self.restore_state.take();
self.active_editor.update(cx, |editor, cx| {
editor.set_highlighted_row(None);
if let Some((line_selection, restore_state)) = line_selection.zip(restore_state) {
if line_selection.id == editor.newest_selection::<usize>(cx).id {
editor.set_highlighted_rows(None);
if let Some((line_selection_id, restore_state)) = line_selection_id.zip(restore_state) {
let newest_selection =
editor.newest_selection::<usize>(&editor.buffer().read(cx).read(cx));
if line_selection_id == newest_selection.id {
editor.set_scroll_position(restore_state.scroll_position, cx);
editor.update_selections(restore_state.selections, None, cx);
}
@@ -215,6 +224,4 @@ impl View for GoToLine {
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
cx.focus(&self.line_editor);
}
fn on_blur(&mut self, _: &mut ViewContext<Self>) {}
}

View File

@@ -992,7 +992,7 @@ impl MutableAppContext {
})
}
fn observe<E, H, F>(&mut self, handle: &H, mut callback: F) -> Subscription
pub fn observe<E, H, F>(&mut self, handle: &H, mut callback: F) -> Subscription
where
E: Entity,
E::Event: 'static,
@@ -1161,11 +1161,9 @@ impl MutableAppContext {
keystroke: &Keystroke,
) -> Result<bool> {
let mut context_chain = Vec::new();
let mut context = keymap::Context::default();
for view_id in &responder_chain {
if let Some(view) = self.cx.views.get(&(window_id, *view_id)) {
context.extend(view.keymap_context(self.as_ref()));
context_chain.push(context.clone());
context_chain.push(view.keymap_context(self.as_ref()));
} else {
return Err(anyhow!(
"View {} in responder chain does not exist",
@@ -2674,9 +2672,11 @@ impl<T: Entity> ModelHandle<T> {
}
}
cx.borrow().foreground().start_waiting();
rx.recv()
.await
.expect("model dropped with pending condition");
cx.borrow().foreground().finish_waiting();
}
})
.await
@@ -2773,6 +2773,10 @@ impl<T: Entity> WeakModelHandle<T> {
}
}
pub fn id(&self) -> usize {
self.model_id
}
pub fn upgrade(self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<T>> {
cx.upgrade_model_handle(self)
}
@@ -2866,6 +2870,28 @@ impl<T: View> ViewHandle<T> {
.map_or(false, |focused_id| focused_id == self.view_id)
}
pub fn next_notification(&self, cx: &TestAppContext) -> impl Future<Output = ()> {
let (mut tx, mut rx) = mpsc::channel(1);
let mut cx = cx.cx.borrow_mut();
let subscription = cx.observe(self, move |_, _| {
tx.blocking_send(()).ok();
});
let duration = if std::env::var("CI").is_ok() {
Duration::from_secs(5)
} else {
Duration::from_secs(1)
};
async move {
let notification = timeout(duration, rx.recv())
.await
.expect("next notification timed out");
drop(subscription);
notification.expect("model dropped while test was waiting for its next notification")
}
}
pub fn condition(
&self,
cx: &TestAppContext,
@@ -2916,9 +2942,11 @@ impl<T: View> ViewHandle<T> {
}
}
cx.borrow().foreground().start_waiting();
rx.recv()
.await
.expect("view dropped with pending condition");
cx.borrow().foreground().finish_waiting();
}
})
.await
@@ -3091,14 +3119,39 @@ impl Drop for AnyViewHandle {
pub struct AnyModelHandle {
model_id: usize,
model_type: TypeId,
ref_counts: Arc<Mutex<RefCounts>>,
}
impl AnyModelHandle {
pub fn downcast<T: Entity>(self) -> Option<ModelHandle<T>> {
if self.is::<T>() {
let result = Some(ModelHandle {
model_id: self.model_id,
model_type: PhantomData,
ref_counts: self.ref_counts.clone(),
});
unsafe {
Arc::decrement_strong_count(&self.ref_counts);
}
std::mem::forget(self);
result
} else {
None
}
}
pub fn is<T: Entity>(&self) -> bool {
self.model_type == TypeId::of::<T>()
}
}
impl<T: Entity> From<ModelHandle<T>> for AnyModelHandle {
fn from(handle: ModelHandle<T>) -> Self {
handle.ref_counts.lock().inc_model(handle.model_id);
Self {
model_id: handle.model_id,
model_type: TypeId::of::<T>(),
ref_counts: handle.ref_counts.clone(),
}
}
@@ -4094,7 +4147,10 @@ mod tests {
let mut view_2 = View::new(2);
let mut view_3 = View::new(3);
view_1.keymap_context.set.insert("a".into());
view_2.keymap_context.set.insert("a".into());
view_2.keymap_context.set.insert("b".into());
view_3.keymap_context.set.insert("a".into());
view_3.keymap_context.set.insert("b".into());
view_3.keymap_context.set.insert("c".into());
let (window_id, view_1) = cx.add_window(Default::default(), |_| view_1);

View File

@@ -4,6 +4,7 @@ mod constrained_box;
mod container;
mod empty;
mod event_handler;
mod expanded;
mod flex;
mod hook;
mod image;
@@ -16,6 +17,7 @@ mod svg;
mod text;
mod uniform_list;
use self::expanded::Expanded;
pub use self::{
align::*, canvas::*, constrained_box::*, container::*, empty::*, event_handler::*, flex::*,
hook::*, image::*, label::*, list::*, mouse_event_handler::*, overlay::*, stack::*, svg::*,
@@ -130,11 +132,18 @@ pub trait Element {
Container::new(self.boxed())
}
fn expanded(self, flex: f32) -> Expanded
fn expanded(self) -> Expanded
where
Self: 'static + Sized,
{
Expanded::new(flex, self.boxed())
Expanded::new(self.boxed())
}
fn flexible(self, flex: f32, expanded: bool) -> Flexible
where
Self: 'static + Sized,
{
Flexible::new(flex, expanded, self.boxed())
}
}

View File

@@ -52,6 +52,11 @@ impl Container {
self
}
pub fn with_margin_bottom(mut self, margin: f32) -> Self {
self.style.margin.bottom = margin;
self
}
pub fn with_margin_left(mut self, margin: f32) -> Self {
self.style.margin.left = margin;
self

View File

@@ -0,0 +1,90 @@
use crate::{
geometry::{rect::RectF, vector::Vector2F},
json, DebugContext, Element, ElementBox, Event, EventContext, LayoutContext, PaintContext,
SizeConstraint,
};
use serde_json::json;
pub struct Expanded {
child: ElementBox,
full_width: bool,
full_height: bool,
}
impl Expanded {
pub fn new(child: ElementBox) -> Self {
Self {
child,
full_width: true,
full_height: true,
}
}
pub fn to_full_width(mut self) -> Self {
self.full_width = true;
self.full_height = false;
self
}
pub fn to_full_height(mut self) -> Self {
self.full_width = false;
self.full_height = true;
self
}
}
impl Element for Expanded {
type LayoutState = ();
type PaintState = ();
fn layout(
&mut self,
mut constraint: SizeConstraint,
cx: &mut LayoutContext,
) -> (Vector2F, Self::LayoutState) {
if self.full_width {
constraint.min.set_x(constraint.max.x());
}
if self.full_height {
constraint.min.set_y(constraint.max.y());
}
let size = self.child.layout(constraint, cx);
(size, ())
}
fn paint(
&mut self,
bounds: RectF,
visible_bounds: RectF,
_: &mut Self::LayoutState,
cx: &mut PaintContext,
) -> Self::PaintState {
self.child.paint(bounds.origin(), visible_bounds, cx);
}
fn dispatch_event(
&mut self,
event: &Event,
_: RectF,
_: &mut Self::LayoutState,
_: &mut Self::PaintState,
cx: &mut EventContext,
) -> bool {
self.child.dispatch_event(event, cx)
}
fn debug(
&self,
_: RectF,
_: &Self::LayoutState,
_: &Self::PaintState,
cx: &DebugContext,
) -> json::Value {
json!({
"type": "Expanded",
"full_width": self.full_width,
"full_height": self.full_height,
"child": self.child.debug(cx)
})
}
}

View File

@@ -228,88 +228,15 @@ struct FlexParentData {
expanded: bool,
}
pub struct Expanded {
metadata: FlexParentData,
child: ElementBox,
}
impl Expanded {
pub fn new(flex: f32, child: ElementBox) -> Self {
Expanded {
metadata: FlexParentData {
flex,
expanded: true,
},
child,
}
}
}
impl Element for Expanded {
type LayoutState = ();
type PaintState = ();
fn layout(
&mut self,
constraint: SizeConstraint,
cx: &mut LayoutContext,
) -> (Vector2F, Self::LayoutState) {
let size = self.child.layout(constraint, cx);
(size, ())
}
fn paint(
&mut self,
bounds: RectF,
visible_bounds: RectF,
_: &mut Self::LayoutState,
cx: &mut PaintContext,
) -> Self::PaintState {
self.child.paint(bounds.origin(), visible_bounds, cx)
}
fn dispatch_event(
&mut self,
event: &Event,
_: RectF,
_: &mut Self::LayoutState,
_: &mut Self::PaintState,
cx: &mut EventContext,
) -> bool {
self.child.dispatch_event(event, cx)
}
fn metadata(&self) -> Option<&dyn Any> {
Some(&self.metadata)
}
fn debug(
&self,
_: RectF,
_: &Self::LayoutState,
_: &Self::PaintState,
cx: &DebugContext,
) -> Value {
json!({
"type": "Expanded",
"flex": self.metadata.flex,
"child": self.child.debug(cx)
})
}
}
pub struct Flexible {
metadata: FlexParentData,
child: ElementBox,
}
impl Flexible {
pub fn new(flex: f32, child: ElementBox) -> Self {
pub fn new(flex: f32, expanded: bool, child: ElementBox) -> Self {
Flexible {
metadata: FlexParentData {
flex,
expanded: false,
},
metadata: FlexParentData { flex, expanded },
child,
}
}

View File

@@ -1,35 +1,56 @@
use std::{ops::Range, sync::Arc};
use crate::{
color::Color,
fonts::TextStyle,
fonts::{HighlightStyle, TextStyle},
geometry::{
rect::RectF,
vector::{vec2f, Vector2F},
},
json::{ToJson, Value},
text_layout::{Line, ShapedBoundary},
DebugContext, Element, Event, EventContext, LayoutContext, PaintContext, SizeConstraint,
text_layout::{Line, RunStyle, ShapedBoundary},
DebugContext, Element, Event, EventContext, FontCache, LayoutContext, PaintContext,
SizeConstraint, TextLayoutCache,
};
use serde_json::json;
pub struct Text {
text: String,
style: TextStyle,
soft_wrap: bool,
highlights: Vec<(Range<usize>, HighlightStyle)>,
}
pub struct LayoutState {
lines: Vec<(Line, Vec<ShapedBoundary>)>,
shaped_lines: Vec<Line>,
wrap_boundaries: Vec<Vec<ShapedBoundary>>,
line_height: f32,
}
impl Text {
pub fn new(text: String, style: TextStyle) -> Self {
Self { text, style }
Self {
text,
style,
soft_wrap: true,
highlights: Vec::new(),
}
}
pub fn with_default_color(mut self, color: Color) -> Self {
self.style.color = color;
self
}
pub fn with_highlights(mut self, runs: Vec<(Range<usize>, HighlightStyle)>) -> Self {
self.highlights = runs;
self
}
pub fn with_soft_wrap(mut self, soft_wrap: bool) -> Self {
self.soft_wrap = soft_wrap;
self
}
}
impl Element for Text {
@@ -41,28 +62,59 @@ impl Element for Text {
constraint: SizeConstraint,
cx: &mut LayoutContext,
) -> (Vector2F, Self::LayoutState) {
let font_id = self.style.font_id;
let line_height = cx.font_cache.line_height(font_id, self.style.font_size);
// Convert the string and highlight ranges into an iterator of highlighted chunks.
let mut offset = 0;
let mut highlight_ranges = self.highlights.iter().peekable();
let chunks = std::iter::from_fn(|| {
let result;
if let Some((range, highlight)) = highlight_ranges.peek() {
if offset < range.start {
result = Some((&self.text[offset..range.start], None));
offset = range.start;
} else {
result = Some((&self.text[range.clone()], Some(*highlight)));
highlight_ranges.next();
offset = range.end;
}
} else if offset < self.text.len() {
result = Some((&self.text[offset..], None));
offset = self.text.len();
} else {
result = None;
}
result
});
let mut wrapper = cx.font_cache.line_wrapper(font_id, self.style.font_size);
let mut lines = Vec::new();
// Perform shaping on these highlighted chunks
let shaped_lines = layout_highlighted_chunks(
chunks,
&self.style,
cx.text_layout_cache,
&cx.font_cache,
usize::MAX,
self.text.matches('\n').count() + 1,
);
// If line wrapping is enabled, wrap each of the shaped lines.
let font_id = self.style.font_id;
let mut line_count = 0;
let mut max_line_width = 0_f32;
for line in self.text.lines() {
let shaped_line = cx.text_layout_cache.layout_str(
line,
self.style.font_size,
&[(line.len(), self.style.to_run())],
);
let wrap_boundaries = wrapper
.wrap_shaped_line(line, &shaped_line, constraint.max.x())
.collect::<Vec<_>>();
let mut wrap_boundaries = Vec::new();
let mut wrapper = cx.font_cache.line_wrapper(font_id, self.style.font_size);
for (line, shaped_line) in self.text.lines().zip(&shaped_lines) {
if self.soft_wrap {
let boundaries = wrapper
.wrap_shaped_line(line, shaped_line, constraint.max.x())
.collect::<Vec<_>>();
line_count += boundaries.len() + 1;
wrap_boundaries.push(boundaries);
} else {
line_count += 1;
}
max_line_width = max_line_width.max(shaped_line.width());
line_count += wrap_boundaries.len() + 1;
lines.push((shaped_line, wrap_boundaries));
}
let line_height = cx.font_cache.line_height(font_id, self.style.font_size);
let size = vec2f(
max_line_width
.ceil()
@@ -70,7 +122,14 @@ impl Element for Text {
.min(constraint.max.x()),
(line_height * line_count as f32).ceil(),
);
(size, LayoutState { lines, line_height })
(
size,
LayoutState {
shaped_lines,
wrap_boundaries,
line_height,
},
)
}
fn paint(
@@ -81,8 +140,10 @@ impl Element for Text {
cx: &mut PaintContext,
) -> Self::PaintState {
let mut origin = bounds.origin();
for (line, wrap_boundaries) in &layout.lines {
let wrapped_line_boundaries = RectF::new(
let empty = Vec::new();
for (ix, line) in layout.shaped_lines.iter().enumerate() {
let wrap_boundaries = layout.wrap_boundaries.get(ix).unwrap_or(&empty);
let boundaries = RectF::new(
origin,
vec2f(
bounds.width(),
@@ -90,16 +151,20 @@ impl Element for Text {
),
);
if wrapped_line_boundaries.intersects(visible_bounds) {
line.paint_wrapped(
origin,
visible_bounds,
layout.line_height,
wrap_boundaries.iter().copied(),
cx,
);
if boundaries.intersects(visible_bounds) {
if self.soft_wrap {
line.paint_wrapped(
origin,
visible_bounds,
layout.line_height,
wrap_boundaries.iter().copied(),
cx,
);
} else {
line.paint(origin, visible_bounds, layout.line_height, cx);
}
}
origin.set_y(wrapped_line_boundaries.max_y());
origin.set_y(boundaries.max_y());
}
}
@@ -129,3 +194,71 @@ impl Element for Text {
})
}
}
/// Perform text layout on a series of highlighted chunks of text.
pub fn layout_highlighted_chunks<'a>(
chunks: impl Iterator<Item = (&'a str, Option<HighlightStyle>)>,
style: &'a TextStyle,
text_layout_cache: &'a TextLayoutCache,
font_cache: &'a Arc<FontCache>,
max_line_len: usize,
max_line_count: usize,
) -> Vec<Line> {
let mut layouts = Vec::with_capacity(max_line_count);
let mut prev_font_properties = style.font_properties.clone();
let mut prev_font_id = style.font_id;
let mut line = String::new();
let mut styles = Vec::new();
let mut row = 0;
let mut line_exceeded_max_len = false;
for (chunk, highlight_style) in chunks.chain([("\n", None)]) {
for (ix, mut line_chunk) in chunk.split('\n').enumerate() {
if ix > 0 {
layouts.push(text_layout_cache.layout_str(&line, style.font_size, &styles));
line.clear();
styles.clear();
row += 1;
line_exceeded_max_len = false;
if row == max_line_count {
return layouts;
}
}
if !line_chunk.is_empty() && !line_exceeded_max_len {
let highlight_style = highlight_style.unwrap_or(style.clone().into());
// Avoid a lookup if the font properties match the previous ones.
let font_id = if highlight_style.font_properties == prev_font_properties {
prev_font_id
} else {
font_cache
.select_font(style.font_family_id, &highlight_style.font_properties)
.unwrap_or(style.font_id)
};
if line.len() + line_chunk.len() > max_line_len {
let mut chunk_len = max_line_len - line.len();
while !line_chunk.is_char_boundary(chunk_len) {
chunk_len -= 1;
}
line_chunk = &line_chunk[..chunk_len];
line_exceeded_max_len = true;
}
line.push_str(line_chunk);
styles.push((
line_chunk.len(),
RunStyle {
font_id,
color: highlight_style.color,
underline: highlight_style.underline,
},
));
prev_font_id = font_id;
prev_font_properties = highlight_style.font_properties;
}
}
}
layouts
}

View File

@@ -14,9 +14,15 @@ use std::{cmp, ops::Range, sync::Arc};
#[derive(Clone, Default)]
pub struct UniformListState(Arc<Mutex<StateInner>>);
#[derive(Debug)]
pub enum ScrollTarget {
Show(usize),
Center(usize),
}
impl UniformListState {
pub fn scroll_to(&self, item_ix: usize) {
self.0.lock().scroll_to = Some(item_ix);
pub fn scroll_to(&self, scroll_to: ScrollTarget) {
self.0.lock().scroll_to = Some(scroll_to);
}
pub fn scroll_top(&self) -> f32 {
@@ -27,7 +33,7 @@ impl UniformListState {
#[derive(Default)]
struct StateInner {
scroll_top: f32,
scroll_to: Option<usize>,
scroll_to: Option<ScrollTarget>,
}
pub struct LayoutState {
@@ -93,20 +99,38 @@ where
fn autoscroll(&mut self, scroll_max: f32, list_height: f32, item_height: f32) {
let mut state = self.state.0.lock();
if state.scroll_top > scroll_max {
state.scroll_top = scroll_max;
}
if let Some(scroll_to) = state.scroll_to.take() {
let item_ix;
let center;
match scroll_to {
ScrollTarget::Show(ix) => {
item_ix = ix;
center = false;
}
ScrollTarget::Center(ix) => {
item_ix = ix;
center = true;
}
}
if let Some(item_ix) = state.scroll_to.take() {
let item_top = self.padding_top + item_ix as f32 * item_height;
let item_bottom = item_top + item_height;
if item_top < state.scroll_top {
state.scroll_top = item_top;
} else if item_bottom > (state.scroll_top + list_height) {
state.scroll_top = item_bottom - list_height;
if center {
let item_center = item_top + item_height / 2.;
state.scroll_top = (item_center - list_height / 2.).max(0.);
} else {
let scroll_bottom = state.scroll_top + list_height;
if item_top < state.scroll_top {
state.scroll_top = item_top;
} else if item_bottom > scroll_bottom {
state.scroll_top = item_bottom - list_height;
}
}
}
if state.scroll_top > scroll_max {
state.scroll_top = scroll_max;
}
}
fn scroll_top(&self) -> f32 {

View File

@@ -7,7 +7,7 @@ use rand::prelude::*;
use smol::{channel, prelude::*, Executor, Timer};
use std::{
any::Any,
fmt::{self, Debug},
fmt::{self, Debug, Display},
marker::PhantomData,
mem,
ops::RangeInclusive,
@@ -25,7 +25,7 @@ use waker_fn::waker_fn;
use crate::{
platform::{self, Dispatcher},
util,
util, MutableAppContext,
};
pub enum Foreground {
@@ -54,6 +54,7 @@ type AnyLocalTask = async_task::Task<Box<dyn Any + 'static>>;
#[must_use]
pub enum Task<T> {
Ready(Option<T>),
Local {
any_task: AnyLocalTask,
result_type: PhantomData<T>,
@@ -76,6 +77,7 @@ struct DeterministicState {
block_on_ticks: RangeInclusive<usize>,
now: Instant,
pending_timers: Vec<(Instant, barrier::Sender)>,
waiting_backtrace: Option<Backtrace>,
}
pub struct Deterministic {
@@ -96,6 +98,7 @@ impl Deterministic {
block_on_ticks: 0..=1000,
now: Instant::now(),
pending_timers: Default::default(),
waiting_backtrace: None,
})),
parker: Default::default(),
}
@@ -142,8 +145,8 @@ impl Deterministic {
return result;
}
if !woken.load(SeqCst) && self.state.lock().forbid_parking {
panic!("deterministic executor parked after a call to forbid_parking");
if !woken.load(SeqCst) {
self.state.lock().will_park();
}
woken.store(false, SeqCst);
@@ -205,6 +208,7 @@ impl Deterministic {
}
let state = self.state.lock();
if state.scheduled_from_foreground.is_empty()
&& state.scheduled_from_background.is_empty()
&& state.spawned_from_foreground.is_empty()
@@ -243,11 +247,9 @@ impl Deterministic {
if let Poll::Ready(result) = future.as_mut().poll(&mut cx) {
return Some(result);
}
let state = self.state.lock();
let mut state = self.state.lock();
if state.scheduled_from_background.is_empty() {
if state.forbid_parking {
panic!("deterministic executor parked after a call to forbid_parking");
}
state.will_park();
drop(state);
self.parker.lock().park();
}
@@ -260,6 +262,26 @@ impl Deterministic {
}
}
impl DeterministicState {
fn will_park(&mut self) {
if self.forbid_parking {
let mut backtrace_message = String::new();
if let Some(backtrace) = self.waiting_backtrace.as_mut() {
backtrace.resolve();
backtrace_message = format!(
"\nbacktrace of waiting future:\n{:?}",
CwdBacktrace::new(backtrace)
);
}
panic!(
"deterministic executor parked after a call to forbid_parking{}",
backtrace_message
);
}
}
}
#[derive(Default)]
struct Trace {
executed: Vec<Backtrace>,
@@ -305,32 +327,53 @@ impl Trace {
}
}
impl Debug for Trace {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
struct FirstCwdFrameInBacktrace<'a>(&'a Backtrace);
struct CwdBacktrace<'a> {
backtrace: &'a Backtrace,
first_frame_only: bool,
}
impl<'a> Debug for FirstCwdFrameInBacktrace<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
let cwd = std::env::current_dir().unwrap();
let mut print_path = |fmt: &mut fmt::Formatter<'_>, path: BytesOrWideString<'_>| {
fmt::Display::fmt(&path, fmt)
};
let mut fmt = BacktraceFmt::new(f, backtrace::PrintFmt::Full, &mut print_path);
for frame in self.0.frames() {
let mut formatted_frame = fmt.frame();
if frame
.symbols()
.iter()
.any(|s| s.filename().map_or(false, |f| f.starts_with(&cwd)))
{
formatted_frame.backtrace_frame(frame)?;
break;
}
impl<'a> CwdBacktrace<'a> {
fn new(backtrace: &'a Backtrace) -> Self {
Self {
backtrace,
first_frame_only: false,
}
}
fn first_frame(backtrace: &'a Backtrace) -> Self {
Self {
backtrace,
first_frame_only: true,
}
}
}
impl<'a> Debug for CwdBacktrace<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
let cwd = std::env::current_dir().unwrap();
let mut print_path = |fmt: &mut fmt::Formatter<'_>, path: BytesOrWideString<'_>| {
fmt::Display::fmt(&path, fmt)
};
let mut fmt = BacktraceFmt::new(f, backtrace::PrintFmt::Full, &mut print_path);
for frame in self.backtrace.frames() {
let mut formatted_frame = fmt.frame();
if frame
.symbols()
.iter()
.any(|s| s.filename().map_or(false, |f| f.starts_with(&cwd)))
{
formatted_frame.backtrace_frame(frame)?;
if self.first_frame_only {
break;
}
fmt.finish()
}
}
fmt.finish()
}
}
impl Debug for Trace {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for ((backtrace, scheduled), spawned_from_foreground) in self
.executed
.iter()
@@ -339,7 +382,7 @@ impl Debug for Trace {
{
writeln!(f, "Scheduled")?;
for backtrace in scheduled {
writeln!(f, "- {:?}", FirstCwdFrameInBacktrace(backtrace))?;
writeln!(f, "- {:?}", CwdBacktrace::first_frame(backtrace))?;
}
if scheduled.is_empty() {
writeln!(f, "None")?;
@@ -348,14 +391,14 @@ impl Debug for Trace {
writeln!(f, "Spawned from foreground")?;
for backtrace in spawned_from_foreground {
writeln!(f, "- {:?}", FirstCwdFrameInBacktrace(backtrace))?;
writeln!(f, "- {:?}", CwdBacktrace::first_frame(backtrace))?;
}
if spawned_from_foreground.is_empty() {
writeln!(f, "None")?;
}
writeln!(f, "==========")?;
writeln!(f, "Run: {:?}", FirstCwdFrameInBacktrace(backtrace))?;
writeln!(f, "Run: {:?}", CwdBacktrace::first_frame(backtrace))?;
writeln!(f, "+++++++++++++++++++")?;
}
@@ -432,6 +475,31 @@ impl Foreground {
*any_value.downcast().unwrap()
}
pub fn parking_forbidden(&self) -> bool {
match self {
Self::Deterministic(executor) => executor.state.lock().forbid_parking,
_ => panic!("this method can only be called on a deterministic executor"),
}
}
pub fn start_waiting(&self) {
match self {
Self::Deterministic(executor) => {
executor.state.lock().waiting_backtrace = Some(Backtrace::new_unresolved());
}
_ => panic!("this method can only be called on a deterministic executor"),
}
}
pub fn finish_waiting(&self) {
match self {
Self::Deterministic(executor) => {
executor.state.lock().waiting_backtrace.take();
}
_ => panic!("this method can only be called on a deterministic executor"),
}
}
pub fn forbid_parking(&self) {
match self {
Self::Deterministic(executor) => {
@@ -594,6 +662,10 @@ pub fn deterministic(seed: u64) -> (Rc<Foreground>, Arc<Background>) {
}
impl<T> Task<T> {
pub fn ready(value: T) -> Self {
Self::Ready(Some(value))
}
fn local(any_task: AnyLocalTask) -> Self {
Self::Local {
any_task,
@@ -603,12 +675,24 @@ impl<T> Task<T> {
pub fn detach(self) {
match self {
Task::Ready(_) => {}
Task::Local { any_task, .. } => any_task.detach(),
Task::Send { any_task, .. } => any_task.detach(),
}
}
}
impl<T: 'static, E: 'static + Display> Task<Result<T, E>> {
pub fn detach_and_log_err(self, cx: &mut MutableAppContext) {
cx.spawn(|_| async move {
if let Err(err) = self.await {
log::error!("{}", err);
}
})
.detach();
}
}
impl<T: Send> Task<T> {
fn send(any_task: AnyTask) -> Self {
Self::Send {
@@ -621,6 +705,7 @@ impl<T: Send> Task<T> {
impl<T: fmt::Debug> fmt::Debug for Task<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Task::Ready(value) => value.fmt(f),
Task::Local { any_task, .. } => any_task.fmt(f),
Task::Send { any_task, .. } => any_task.fmt(f),
}
@@ -632,6 +717,7 @@ impl<T: 'static> Future for Task<T> {
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
match unsafe { self.get_unchecked_mut() } {
Task::Ready(value) => Poll::Ready(value.take().unwrap()),
Task::Local { any_task, .. } => {
any_task.poll(cx).map(|value| *value.downcast().unwrap())
}

View File

@@ -30,7 +30,7 @@ pub struct TextStyle {
pub underline: Option<Color>,
}
#[derive(Copy, Clone, Debug, Default)]
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub struct HighlightStyle {
pub color: Color,
pub font_properties: Properties,

View File

@@ -23,6 +23,7 @@ struct Pending {
context: Option<Context>,
}
#[derive(Default)]
pub struct Keymap(Vec<Binding>);
pub struct Binding {
@@ -153,24 +154,6 @@ impl Keymap {
}
}
pub mod menu {
use crate::action;
action!(SelectPrev);
action!(SelectNext);
}
impl Default for Keymap {
fn default() -> Self {
Self(vec![
Binding::new("up", menu::SelectPrev, Some("menu")),
Binding::new("ctrl-p", menu::SelectPrev, Some("menu")),
Binding::new("down", menu::SelectNext, Some("menu")),
Binding::new("ctrl-n", menu::SelectNext, Some("menu")),
])
}
}
impl Binding {
pub fn new<A: Action>(keystrokes: &str, action: A, context: Option<&str>) -> Self {
let context = if let Some(context) = context {

View File

@@ -7,7 +7,13 @@ use std::{
},
};
use crate::{executor, platform, FontCache, MutableAppContext, Platform, TestAppContext};
use futures::StreamExt;
use smol::channel;
use crate::{
executor, platform, Entity, FontCache, Handle, MutableAppContext, Platform, Subscription,
TestAppContext,
};
#[cfg(test)]
#[ctor::ctor]
@@ -87,3 +93,47 @@ pub fn run_test(
}
}
}
pub struct Observation<T> {
rx: channel::Receiver<T>,
_subscription: Subscription,
}
impl<T> futures::Stream for Observation<T> {
type Item = T;
fn poll_next(
mut self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Option<Self::Item>> {
self.rx.poll_next_unpin(cx)
}
}
pub fn observe<T: Entity>(entity: &impl Handle<T>, cx: &mut TestAppContext) -> Observation<()> {
let (tx, rx) = smol::channel::unbounded();
let _subscription = cx.update(|cx| {
cx.observe(entity, move |_, _| {
let _ = smol::block_on(tx.send(()));
})
});
Observation { rx, _subscription }
}
pub fn subscribe<T: Entity>(
entity: &impl Handle<T>,
cx: &mut TestAppContext,
) -> Observation<T::Event>
where
T::Event: Clone,
{
let (tx, rx) = smol::channel::unbounded();
let _subscription = cx.update(|cx| {
cx.subscribe(entity, move |_, event, _| {
let _ = smol::block_on(tx.send(event.clone()));
})
});
Observation { rx, _subscription }
}

View File

@@ -24,13 +24,13 @@ pub fn new_journal_entry(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
let journal_dir = home_dir.join("journal");
let month_dir = journal_dir
.join(format!("{:2}", now.year()))
.join(format!("{:2}", now.month()));
let entry_path = month_dir.join(format!("{:2}.md", now.day()));
.join(format!("{:02}", now.year()))
.join(format!("{:02}", now.month()));
let entry_path = month_dir.join(format!("{:02}.md", now.day()));
let now = now.time();
let (pm, hour) = now.hour12();
let am_or_pm = if pm { "PM" } else { "AM" };
let entry_heading = format!("# {}:{:2} {}\n\n", hour, now.minute(), am_or_pm);
let entry_heading = format!("# {}:{:02} {}\n\n", hour, now.minute(), am_or_pm);
let create_entry = cx.background().spawn(async move {
std::fs::create_dir_all(month_dir)?;
@@ -57,7 +57,7 @@ pub fn new_journal_entry(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
if let Some(Some(Ok(item))) = opened.first() {
if let Some(editor) = item.to_any().downcast::<Editor>() {
editor.update(&mut cx, |editor, cx| {
let len = editor.buffer().read(cx).len();
let len = editor.buffer().read(cx).read(cx).len();
editor.select_ranges([len..len], Some(Autoscroll::Center), cx);
if len > 0 {
editor.insert("\n\n", cx);

View File

@@ -1,7 +1,7 @@
[package]
name = "language"
version = "0.1.0"
edition = "2018"
edition = "2021"
[lib]
path = "src/language.rs"
@@ -9,20 +9,26 @@ path = "src/language.rs"
[features]
test-support = [
"rand",
"text/test-support",
"collections/test-support",
"lsp/test-support",
"text/test-support",
"tree-sitter-rust",
"util/test-support",
]
[dependencies]
text = { path = "../text" }
clock = { path = "../clock" }
collections = { path = "../collections" }
fuzzy = { path = "../fuzzy" }
gpui = { path = "../gpui" }
lsp = { path = "../lsp" }
rpc = { path = "../rpc" }
sum_tree = { path = "../sum_tree" }
text = { path = "../text" }
theme = { path = "../theme" }
util = { path = "../util" }
anyhow = "1.0.38"
async-trait = "0.1"
futures = "0.3"
lazy_static = "1.4"
log = "0.4"
@@ -31,14 +37,19 @@ postage = { version = "0.4.1", features = ["futures-traits"] }
rand = { version = "0.8.3", optional = true }
serde = { version = "1", features = ["derive"] }
similar = "1.3"
smallvec = { version = "1.6", features = ["union"] }
smol = "1.2"
tree-sitter = "0.20.0"
tree-sitter-rust = { version = "0.20.0", optional = true }
[dev-dependencies]
text = { path = "../text", features = ["test-support"] }
collections = { path = "../collections", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] }
lsp = { path = "../lsp", features = ["test-support"] }
text = { path = "../text", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
ctor = "0.1"
env_logger = "0.8"
rand = "0.8.3"
tree-sitter-rust = "0.20.0"
unindent = "0.1.7"

View File

@@ -3,4 +3,3 @@ fn main() {
println!("cargo:rustc-env=ZED_BUNDLE={}", bundled);
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,211 @@
use crate::Diagnostic;
use collections::HashMap;
use std::{
cmp::{Ordering, Reverse},
iter,
ops::Range,
};
use sum_tree::{self, Bias, SumTree};
use text::{Anchor, FromAnchor, Point, ToOffset};
#[derive(Clone, Debug)]
pub struct DiagnosticSet {
diagnostics: SumTree<DiagnosticEntry<Anchor>>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct DiagnosticEntry<T> {
pub range: Range<T>,
pub diagnostic: Diagnostic,
}
#[derive(Debug)]
pub struct DiagnosticGroup<T> {
pub entries: Vec<DiagnosticEntry<T>>,
pub primary_ix: usize,
}
#[derive(Clone, Debug)]
pub struct Summary {
start: Anchor,
end: Anchor,
min_start: Anchor,
max_end: Anchor,
count: usize,
}
impl DiagnosticSet {
pub fn from_sorted_entries<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
where
I: IntoIterator<Item = DiagnosticEntry<Anchor>>,
{
Self {
diagnostics: SumTree::from_iter(iter, buffer),
}
}
pub fn new<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
where
I: IntoIterator<Item = DiagnosticEntry<Point>>,
{
let mut entries = iter.into_iter().collect::<Vec<_>>();
entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end)));
Self {
diagnostics: SumTree::from_iter(
entries.into_iter().map(|entry| DiagnosticEntry {
range: buffer.anchor_before(entry.range.start)
..buffer.anchor_after(entry.range.end),
diagnostic: entry.diagnostic,
}),
buffer,
),
}
}
pub fn iter(&self) -> impl Iterator<Item = &DiagnosticEntry<Anchor>> {
self.diagnostics.iter()
}
pub fn range<'a, T, O>(
&'a self,
range: Range<T>,
buffer: &'a text::BufferSnapshot,
inclusive: bool,
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
where
T: 'a + ToOffset,
O: FromAnchor,
{
let end_bias = if inclusive { Bias::Right } else { Bias::Left };
let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias);
let mut cursor = self.diagnostics.filter::<_, ()>(
{
move |summary: &Summary| {
let start_cmp = range.start.cmp(&summary.max_end, buffer).unwrap();
let end_cmp = range.end.cmp(&summary.min_start, buffer).unwrap();
if inclusive {
start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal
} else {
start_cmp == Ordering::Less && end_cmp == Ordering::Greater
}
}
},
buffer,
);
iter::from_fn({
move || {
if let Some(diagnostic) = cursor.item() {
cursor.next(buffer);
Some(diagnostic.resolve(buffer))
} else {
None
}
}
})
}
pub fn groups(&self, output: &mut Vec<DiagnosticGroup<Anchor>>, buffer: &text::BufferSnapshot) {
let mut groups = HashMap::default();
for entry in self.diagnostics.iter() {
groups
.entry(entry.diagnostic.group_id)
.or_insert(Vec::new())
.push(entry.clone());
}
let start_ix = output.len();
output.extend(groups.into_values().filter_map(|mut entries| {
entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start, buffer).unwrap());
entries
.iter()
.position(|entry| entry.diagnostic.is_primary)
.map(|primary_ix| DiagnosticGroup {
entries,
primary_ix,
})
}));
output[start_ix..].sort_unstable_by(|a, b| {
a.entries[a.primary_ix]
.range
.start
.cmp(&b.entries[b.primary_ix].range.start, buffer)
.unwrap()
});
}
pub fn group<'a, O: FromAnchor>(
&'a self,
group_id: usize,
buffer: &'a text::BufferSnapshot,
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>> {
self.iter()
.filter(move |entry| entry.diagnostic.group_id == group_id)
.map(|entry| entry.resolve(buffer))
}
}
impl Default for DiagnosticSet {
fn default() -> Self {
Self {
diagnostics: Default::default(),
}
}
}
impl sum_tree::Item for DiagnosticEntry<Anchor> {
type Summary = Summary;
fn summary(&self) -> Self::Summary {
Summary {
start: self.range.start.clone(),
end: self.range.end.clone(),
min_start: self.range.start.clone(),
max_end: self.range.end.clone(),
count: 1,
}
}
}
impl DiagnosticEntry<Anchor> {
pub fn resolve<O: FromAnchor>(&self, buffer: &text::BufferSnapshot) -> DiagnosticEntry<O> {
DiagnosticEntry {
range: O::from_anchor(&self.range.start, buffer)
..O::from_anchor(&self.range.end, buffer),
diagnostic: self.diagnostic.clone(),
}
}
}
impl Default for Summary {
fn default() -> Self {
Self {
start: Anchor::min(),
end: Anchor::max(),
min_start: Anchor::max(),
max_end: Anchor::min(),
count: 0,
}
}
}
impl sum_tree::Summary for Summary {
type Context = text::BufferSnapshot;
fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
if other
.min_start
.cmp(&self.min_start, buffer)
.unwrap()
.is_lt()
{
self.min_start = other.min_start.clone();
}
if other.max_end.cmp(&self.max_end, buffer).unwrap().is_gt() {
self.max_end = other.max_end.clone();
}
self.start = other.start.clone();
self.end = other.end.clone();
self.count += other.count;
}
}

View File

@@ -1,5 +1,7 @@
mod buffer;
mod diagnostic_set;
mod highlight_map;
mod outline;
pub mod proto;
#[cfg(test)]
mod tests;
@@ -7,13 +9,15 @@ mod tests;
use anyhow::{anyhow, Result};
pub use buffer::Operation;
pub use buffer::*;
use gpui::{executor::Background, AppContext};
use collections::HashSet;
pub use diagnostic_set::DiagnosticEntry;
use gpui::AppContext;
use highlight_map::HighlightMap;
use lazy_static::lazy_static;
use lsp::LanguageServer;
pub use outline::{Outline, OutlineItem};
use parking_lot::Mutex;
use serde::Deserialize;
use std::{collections::HashSet, path::Path, str, sync::Arc};
use std::{ops::Range, path::Path, str, sync::Arc};
use theme::SyntaxTheme;
use tree_sitter::{self, Query};
pub use tree_sitter::{Parser, Tree};
@@ -31,6 +35,10 @@ lazy_static! {
));
}
pub trait ToPointUtf16 {
fn to_point_utf16(self) -> PointUtf16;
}
#[derive(Default, Deserialize)]
pub struct LanguageConfig {
pub name: String,
@@ -44,9 +52,10 @@ pub struct LanguageConfig {
pub struct LanguageServerConfig {
pub binary: String,
pub disk_based_diagnostic_sources: HashSet<String>,
pub disk_based_diagnostics_progress_token: Option<String>,
#[cfg(any(test, feature = "test-support"))]
#[serde(skip)]
pub fake_server: Option<(Arc<LanguageServer>, Arc<std::sync::atomic::AtomicBool>)>,
pub fake_server: Option<(Arc<lsp::LanguageServer>, Arc<std::sync::atomic::AtomicBool>)>,
}
#[derive(Clone, Debug, Deserialize)]
@@ -67,6 +76,7 @@ pub struct Grammar {
pub(crate) highlights_query: Query,
pub(crate) brackets_query: Query,
pub(crate) indents_query: Query,
pub(crate) outline_query: Query,
pub(crate) highlight_map: Mutex<HighlightMap>,
}
@@ -120,6 +130,7 @@ impl Language {
brackets_query: Query::new(ts_language, "").unwrap(),
highlights_query: Query::new(ts_language, "").unwrap(),
indents_query: Query::new(ts_language, "").unwrap(),
outline_query: Query::new(ts_language, "").unwrap(),
ts_language,
highlight_map: Default::default(),
})
@@ -157,6 +168,16 @@ impl Language {
Ok(self)
}
pub fn with_outline_query(mut self, source: &str) -> Result<Self> {
let grammar = self
.grammar
.as_mut()
.and_then(Arc::get_mut)
.ok_or_else(|| anyhow!("grammar does not exist or is already being used"))?;
grammar.outline_query = Query::new(grammar.ts_language, source)?;
Ok(self)
}
pub fn name(&self) -> &str {
self.config.name.as_str()
}
@@ -197,6 +218,13 @@ impl Language {
.map(|config| &config.disk_based_diagnostic_sources)
}
pub fn disk_based_diagnostics_progress_token(&self) -> Option<&String> {
self.config
.language_server
.as_ref()
.and_then(|config| config.disk_based_diagnostics_progress_token.as_ref())
}
pub fn brackets(&self) -> &[BracketPair] {
&self.config.brackets
}
@@ -217,7 +245,9 @@ impl Grammar {
#[cfg(any(test, feature = "test-support"))]
impl LanguageServerConfig {
pub async fn fake(executor: Arc<Background>) -> (Self, lsp::FakeLanguageServer) {
pub async fn fake(
executor: Arc<gpui::executor::Background>,
) -> (Self, lsp::FakeLanguageServer) {
let (server, fake) = lsp::LanguageServer::fake(executor).await;
fake.started
.store(false, std::sync::atomic::Ordering::SeqCst);
@@ -225,9 +255,22 @@ impl LanguageServerConfig {
(
Self {
fake_server: Some((server, started)),
disk_based_diagnostics_progress_token: Some("fakeServer/check".to_string()),
..Default::default()
},
fake,
)
}
}
impl ToPointUtf16 for lsp::Position {
fn to_point_utf16(self) -> PointUtf16 {
PointUtf16::new(self.line, self.character)
}
}
pub fn range_from_lsp(range: lsp::Range) -> Range<PointUtf16> {
let start = PointUtf16::new(range.start.line, range.start.character);
let end = PointUtf16::new(range.end.line, range.end.character);
start..end
}

View File

@@ -0,0 +1,146 @@
use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::{executor::Background, fonts::HighlightStyle};
use std::{ops::Range, sync::Arc};
#[derive(Debug)]
pub struct Outline<T> {
pub items: Vec<OutlineItem<T>>,
candidates: Vec<StringMatchCandidate>,
path_candidates: Vec<StringMatchCandidate>,
path_candidate_prefixes: Vec<usize>,
}
#[derive(Clone, Debug)]
pub struct OutlineItem<T> {
pub depth: usize,
pub range: Range<T>,
pub text: String,
pub highlight_ranges: Vec<(Range<usize>, HighlightStyle)>,
pub name_ranges: Vec<Range<usize>>,
}
impl<T> Outline<T> {
pub fn new(items: Vec<OutlineItem<T>>) -> Self {
let mut candidates = Vec::new();
let mut path_candidates = Vec::new();
let mut path_candidate_prefixes = Vec::new();
let mut path_text = String::new();
let mut path_stack = Vec::new();
for (id, item) in items.iter().enumerate() {
if item.depth < path_stack.len() {
path_stack.truncate(item.depth);
path_text.truncate(path_stack.last().copied().unwrap_or(0));
}
if !path_text.is_empty() {
path_text.push(' ');
}
path_candidate_prefixes.push(path_text.len());
path_text.push_str(&item.text);
path_stack.push(path_text.len());
let candidate_text = item
.name_ranges
.iter()
.map(|range| &item.text[range.start as usize..range.end as usize])
.collect::<String>();
path_candidates.push(StringMatchCandidate {
id,
char_bag: path_text.as_str().into(),
string: path_text.clone(),
});
candidates.push(StringMatchCandidate {
id,
char_bag: candidate_text.as_str().into(),
string: candidate_text,
});
}
Self {
candidates,
path_candidates,
path_candidate_prefixes,
items,
}
}
pub async fn search(&self, query: &str, executor: Arc<Background>) -> Vec<StringMatch> {
let query = query.trim_start();
let is_path_query = query.contains(' ');
let smart_case = query.chars().any(|c| c.is_uppercase());
let mut matches = fuzzy::match_strings(
if is_path_query {
&self.path_candidates
} else {
&self.candidates
},
query,
smart_case,
100,
&Default::default(),
executor.clone(),
)
.await;
matches.sort_unstable_by_key(|m| m.candidate_id);
let mut tree_matches = Vec::new();
let mut prev_item_ix = 0;
for mut string_match in matches {
let outline_match = &self.items[string_match.candidate_id];
if is_path_query {
let prefix_len = self.path_candidate_prefixes[string_match.candidate_id];
string_match
.positions
.retain(|position| *position >= prefix_len);
for position in &mut string_match.positions {
*position -= prefix_len;
}
} else {
let mut name_ranges = outline_match.name_ranges.iter();
let mut name_range = name_ranges.next().unwrap();
let mut preceding_ranges_len = 0;
for position in &mut string_match.positions {
while *position >= preceding_ranges_len + name_range.len() as usize {
preceding_ranges_len += name_range.len();
name_range = name_ranges.next().unwrap();
}
*position = name_range.start as usize + (*position - preceding_ranges_len);
}
}
let insertion_ix = tree_matches.len();
let mut cur_depth = outline_match.depth;
for (ix, item) in self.items[prev_item_ix..string_match.candidate_id]
.iter()
.enumerate()
.rev()
{
if cur_depth == 0 {
break;
}
let candidate_index = ix + prev_item_ix;
if item.depth == cur_depth - 1 {
tree_matches.insert(
insertion_ix,
StringMatch {
candidate_id: candidate_index,
score: Default::default(),
positions: Default::default(),
string: Default::default(),
},
);
cur_depth -= 1;
}
}
prev_item_ix = string_match.candidate_id + 1;
tree_matches.push(string_match);
}
tree_matches
}
}

View File

@@ -1,13 +1,13 @@
use std::sync::Arc;
use crate::{Diagnostic, Operation};
use crate::{diagnostic_set::DiagnosticEntry, Diagnostic, Operation};
use anyhow::{anyhow, Result};
use clock::ReplicaId;
use collections::HashSet;
use lsp::DiagnosticSeverity;
use rpc::proto;
use std::sync::Arc;
use text::*;
pub use proto::Buffer;
pub use proto::{Buffer, SelectionSet};
pub fn serialize_operation(operation: &Operation) -> proto::Operation {
proto::Operation {
@@ -33,7 +33,7 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
counts: undo
.counts
.iter()
.map(|(edit_id, count)| proto::operation::UndoCount {
.map(|(edit_id, count)| proto::UndoCount {
replica_id: edit_id.replica_id as u32,
local_timestamp: edit_id.value,
count: *count,
@@ -41,46 +41,23 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
.collect(),
version: From::from(&undo.version),
}),
Operation::Buffer(text::Operation::UpdateSelections {
set_id,
Operation::UpdateSelections {
replica_id,
selections,
lamport_timestamp,
}) => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections {
replica_id: set_id.replica_id as u32,
local_timestamp: set_id.value,
} => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections {
replica_id: *replica_id as u32,
lamport_timestamp: lamport_timestamp.value,
version: selections.version().into(),
selections: selections
.full_offset_ranges()
.map(|(range, state)| proto::Selection {
id: state.id as u64,
start: range.start.0 as u64,
end: range.end.0 as u64,
reversed: state.reversed,
})
.collect(),
selections: serialize_selections(selections),
}),
Operation::Buffer(text::Operation::RemoveSelections {
set_id,
Operation::UpdateDiagnostics {
diagnostics,
lamport_timestamp,
}) => proto::operation::Variant::RemoveSelections(proto::operation::RemoveSelections {
replica_id: set_id.replica_id as u32,
local_timestamp: set_id.value,
} => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics {
replica_id: lamport_timestamp.replica_id as u32,
lamport_timestamp: lamport_timestamp.value,
diagnostics: serialize_diagnostics(diagnostics.iter()),
}),
Operation::Buffer(text::Operation::SetActiveSelections {
set_id,
lamport_timestamp,
}) => proto::operation::Variant::SetActiveSelections(
proto::operation::SetActiveSelections {
replica_id: lamport_timestamp.replica_id as u32,
local_timestamp: set_id.map(|set_id| set_id.value),
lamport_timestamp: lamport_timestamp.value,
},
),
Operation::UpdateDiagnostics(diagnostic_set) => {
proto::operation::Variant::UpdateDiagnostics(serialize_diagnostics(diagnostic_set))
}
}),
}
}
@@ -104,45 +81,89 @@ pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::
}
}
pub fn serialize_selection_set(set: &SelectionSet) -> proto::SelectionSet {
let version = set.selections.version();
let entries = set.selections.full_offset_ranges();
proto::SelectionSet {
replica_id: set.id.replica_id as u32,
lamport_timestamp: set.id.value as u32,
is_active: set.active,
version: version.into(),
selections: entries
.map(|(range, state)| proto::Selection {
id: state.id as u64,
start: range.start.0 as u64,
end: range.end.0 as u64,
reversed: state.reversed,
pub fn serialize_undo_map_entry(
(edit_id, counts): (&clock::Local, &[(clock::Local, u32)]),
) -> proto::UndoMapEntry {
proto::UndoMapEntry {
replica_id: edit_id.replica_id as u32,
local_timestamp: edit_id.value,
counts: counts
.iter()
.map(|(undo_id, count)| proto::UndoCount {
replica_id: undo_id.replica_id as u32,
local_timestamp: undo_id.value,
count: *count,
})
.collect(),
}
}
pub fn serialize_diagnostics(map: &AnchorRangeMultimap<Diagnostic>) -> proto::DiagnosticSet {
proto::DiagnosticSet {
version: map.version().into(),
diagnostics: map
.full_offset_ranges()
.map(|(range, diagnostic)| proto::Diagnostic {
start: range.start.0 as u64,
end: range.end.0 as u64,
message: diagnostic.message.clone(),
severity: match diagnostic.severity {
DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
_ => proto::diagnostic::Severity::None,
} as i32,
group_id: diagnostic.group_id as u64,
is_primary: diagnostic.is_primary,
pub fn serialize_buffer_fragment(fragment: &text::Fragment) -> proto::BufferFragment {
proto::BufferFragment {
replica_id: fragment.insertion_timestamp.replica_id as u32,
local_timestamp: fragment.insertion_timestamp.local,
lamport_timestamp: fragment.insertion_timestamp.lamport,
insertion_offset: fragment.insertion_offset as u32,
len: fragment.len as u32,
visible: fragment.visible,
deletions: fragment
.deletions
.iter()
.map(|clock| proto::VectorClockEntry {
replica_id: clock.replica_id as u32,
timestamp: clock.value,
})
.collect(),
max_undos: From::from(&fragment.max_undos),
}
}
pub fn serialize_selections(selections: &Arc<[Selection<Anchor>]>) -> Vec<proto::Selection> {
selections
.iter()
.map(|selection| proto::Selection {
id: selection.id as u64,
start: Some(serialize_anchor(&selection.start)),
end: Some(serialize_anchor(&selection.end)),
reversed: selection.reversed,
})
.collect()
}
pub fn serialize_diagnostics<'a>(
diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<Anchor>>,
) -> Vec<proto::Diagnostic> {
diagnostics
.into_iter()
.map(|entry| proto::Diagnostic {
start: Some(serialize_anchor(&entry.range.start)),
end: Some(serialize_anchor(&entry.range.end)),
message: entry.diagnostic.message.clone(),
severity: match entry.diagnostic.severity {
DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
_ => proto::diagnostic::Severity::None,
} as i32,
group_id: entry.diagnostic.group_id as u64,
is_primary: entry.diagnostic.is_primary,
is_valid: entry.diagnostic.is_valid,
code: entry.diagnostic.code.clone(),
is_disk_based: entry.diagnostic.is_disk_based,
})
.collect()
}
fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
proto::Anchor {
replica_id: anchor.timestamp.replica_id as u32,
local_timestamp: anchor.timestamp.value,
offset: anchor.offset as u64,
bias: match anchor.bias {
Bias::Left => proto::Bias::Left as i32,
Bias::Right => proto::Bias::Right as i32,
},
}
}
@@ -187,67 +208,36 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
},
}),
proto::operation::Variant::UpdateSelections(message) => {
let version = message.version.into();
let entries = message
let selections = message
.selections
.iter()
.map(|selection| {
let range = FullOffset(selection.start as usize)
..FullOffset(selection.end as usize);
let state = SelectionState {
.into_iter()
.filter_map(|selection| {
Some(Selection {
id: selection.id as usize,
start: deserialize_anchor(selection.start?)?,
end: deserialize_anchor(selection.end?)?,
reversed: selection.reversed,
goal: SelectionGoal::None,
};
(range, state)
})
})
.collect();
let selections = AnchorRangeMap::from_full_offset_ranges(
version,
Bias::Left,
Bias::Left,
entries,
);
.collect::<Vec<_>>();
Operation::Buffer(text::Operation::UpdateSelections {
set_id: clock::Lamport {
replica_id: message.replica_id as ReplicaId,
value: message.local_timestamp,
},
Operation::UpdateSelections {
replica_id: message.replica_id as ReplicaId,
lamport_timestamp: clock::Lamport {
replica_id: message.replica_id as ReplicaId,
value: message.lamport_timestamp,
},
selections: Arc::from(selections),
})
}
proto::operation::Variant::RemoveSelections(message) => {
Operation::Buffer(text::Operation::RemoveSelections {
set_id: clock::Lamport {
replica_id: message.replica_id as ReplicaId,
value: message.local_timestamp,
},
lamport_timestamp: clock::Lamport {
replica_id: message.replica_id as ReplicaId,
value: message.lamport_timestamp,
},
})
}
proto::operation::Variant::SetActiveSelections(message) => {
Operation::Buffer(text::Operation::SetActiveSelections {
set_id: message.local_timestamp.map(|value| clock::Lamport {
replica_id: message.replica_id as ReplicaId,
value,
}),
lamport_timestamp: clock::Lamport {
replica_id: message.replica_id as ReplicaId,
value: message.lamport_timestamp,
},
})
}
proto::operation::Variant::UpdateDiagnostics(message) => {
Operation::UpdateDiagnostics(deserialize_diagnostics(message))
}
}
proto::operation::Variant::UpdateDiagnostics(message) => Operation::UpdateDiagnostics {
diagnostics: deserialize_diagnostics(message.diagnostics),
lamport_timestamp: clock::Lamport {
replica_id: message.replica_id as ReplicaId,
value: message.lamport_timestamp,
},
},
},
)
}
@@ -270,43 +260,79 @@ pub fn deserialize_edit_operation(edit: proto::operation::Edit) -> EditOperation
}
}
pub fn deserialize_selection_set(set: proto::SelectionSet) -> SelectionSet {
SelectionSet {
id: clock::Lamport {
replica_id: set.replica_id as u16,
value: set.lamport_timestamp,
pub fn deserialize_undo_map_entry(
entry: proto::UndoMapEntry,
) -> (clock::Local, Vec<(clock::Local, u32)>) {
(
clock::Local {
replica_id: entry.replica_id as u16,
value: entry.local_timestamp,
},
active: set.is_active,
selections: Arc::new(AnchorRangeMap::from_full_offset_ranges(
set.version.into(),
Bias::Left,
Bias::Left,
set.selections
.into_iter()
.map(|selection| {
let range =
FullOffset(selection.start as usize)..FullOffset(selection.end as usize);
let state = SelectionState {
id: selection.id as usize,
reversed: selection.reversed,
goal: SelectionGoal::None,
};
(range, state)
})
.collect(),
)),
entry
.counts
.into_iter()
.map(|undo_count| {
(
clock::Local {
replica_id: undo_count.replica_id as u16,
value: undo_count.local_timestamp,
},
undo_count.count,
)
})
.collect(),
)
}
pub fn deserialize_buffer_fragment(
message: proto::BufferFragment,
ix: usize,
count: usize,
) -> Fragment {
Fragment {
id: locator::Locator::from_index(ix, count),
insertion_timestamp: InsertionTimestamp {
replica_id: message.replica_id as ReplicaId,
local: message.local_timestamp,
lamport: message.lamport_timestamp,
},
insertion_offset: message.insertion_offset as usize,
len: message.len as usize,
visible: message.visible,
deletions: HashSet::from_iter(message.deletions.into_iter().map(|entry| clock::Local {
replica_id: entry.replica_id as ReplicaId,
value: entry.timestamp,
})),
max_undos: From::from(message.max_undos),
}
}
pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMultimap<Diagnostic> {
AnchorRangeMultimap::from_full_offset_ranges(
message.version.into(),
Bias::Left,
Bias::Right,
message.diagnostics.into_iter().filter_map(|diagnostic| {
Some((
FullOffset(diagnostic.start as usize)..FullOffset(diagnostic.end as usize),
Diagnostic {
pub fn deserialize_selections(selections: Vec<proto::Selection>) -> Arc<[Selection<Anchor>]> {
Arc::from(
selections
.into_iter()
.filter_map(|selection| {
Some(Selection {
id: selection.id as usize,
start: deserialize_anchor(selection.start?)?,
end: deserialize_anchor(selection.end?)?,
reversed: selection.reversed,
goal: SelectionGoal::None,
})
})
.collect::<Vec<_>>(),
)
}
pub fn deserialize_diagnostics(
diagnostics: Vec<proto::Diagnostic>,
) -> Arc<[DiagnosticEntry<Anchor>]> {
diagnostics
.into_iter()
.filter_map(|diagnostic| {
Some(DiagnosticEntry {
range: deserialize_anchor(diagnostic.start?)?..deserialize_anchor(diagnostic.end?)?,
diagnostic: Diagnostic {
severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? {
proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR,
proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING,
@@ -316,9 +342,26 @@ pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMult
},
message: diagnostic.message,
group_id: diagnostic.group_id as usize,
code: diagnostic.code,
is_valid: diagnostic.is_valid,
is_primary: diagnostic.is_primary,
is_disk_based: diagnostic.is_disk_based,
},
))
}),
)
})
})
.collect()
}
fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
Some(Anchor {
timestamp: clock::Local {
replica_id: anchor.replica_id as ReplicaId,
value: anchor.local_timestamp,
},
offset: anchor.offset as usize,
bias: match proto::Bias::from_i32(anchor.bias)? {
proto::Bias::Left => Bias::Left,
proto::Bias::Right => Bias::Right,
},
})
}

File diff suppressed because it is too large Load Diff

View File

@@ -16,7 +16,7 @@ use std::{
io::Write,
str::FromStr,
sync::{
atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
atomic::{AtomicUsize, Ordering::SeqCst},
Arc,
},
};
@@ -28,7 +28,7 @@ pub use lsp_types::*;
const JSON_RPC_VERSION: &'static str = "2.0";
const CONTENT_LEN_HEADER: &'static str = "Content-Length: ";
type NotificationHandler = Box<dyn Send + Sync + Fn(&str)>;
type NotificationHandler = Box<dyn Send + Sync + FnMut(&str)>;
type ResponseHandler = Box<dyn Send + FnOnce(Result<&str, Error>)>;
pub struct LanguageServer {
@@ -139,7 +139,7 @@ impl LanguageServer {
if let Ok(AnyNotification { method, params }) =
serde_json::from_slice(&buffer)
{
if let Some(handler) = notification_handlers.read().get(method) {
if let Some(handler) = notification_handlers.write().get_mut(method) {
handler(params.get());
} else {
log::info!(
@@ -231,6 +231,10 @@ impl LanguageServer {
experimental: Some(json!({
"serverStatusNotification": true,
})),
window: Some(lsp_types::WindowClientCapabilities {
work_done_progress: Some(true),
..Default::default()
}),
..Default::default()
},
trace: Default::default(),
@@ -279,10 +283,10 @@ impl LanguageServer {
}
}
pub fn on_notification<T, F>(&self, f: F) -> Subscription
pub fn on_notification<T, F>(&self, mut f: F) -> Subscription
where
T: lsp_types::notification::Notification,
F: 'static + Send + Sync + Fn(T::Params),
F: 'static + Send + Sync + FnMut(T::Params),
{
let prev_handler = self.notification_handlers.write().insert(
T::METHOD,
@@ -427,7 +431,7 @@ pub struct FakeLanguageServer {
buffer: Vec<u8>,
stdin: smol::io::BufReader<async_pipe::PipeReader>,
stdout: smol::io::BufWriter<async_pipe::PipeWriter>,
pub started: Arc<AtomicBool>,
pub started: Arc<std::sync::atomic::AtomicBool>,
}
#[cfg(any(test, feature = "test-support"))]
@@ -445,7 +449,7 @@ impl LanguageServer {
stdin: smol::io::BufReader::new(stdin.1),
stdout: smol::io::BufWriter::new(stdout.0),
buffer: Vec::new(),
started: Arc::new(AtomicBool::new(true)),
started: Arc::new(std::sync::atomic::AtomicBool::new(true)),
};
let server = Self::new_internal(stdin.0, stdout.1, Path::new("/"), executor).unwrap();
@@ -490,17 +494,25 @@ impl FakeLanguageServer {
}
pub async fn receive_request<T: request::Request>(&mut self) -> (RequestId<T>, T::Params) {
self.receive().await;
let request = serde_json::from_slice::<Request<T::Params>>(&self.buffer).unwrap();
assert_eq!(request.method, T::METHOD);
assert_eq!(request.jsonrpc, JSON_RPC_VERSION);
(
RequestId {
id: request.id,
_type: std::marker::PhantomData,
},
request.params,
)
loop {
self.receive().await;
if let Ok(request) = serde_json::from_slice::<Request<T::Params>>(&self.buffer) {
assert_eq!(request.method, T::METHOD);
assert_eq!(request.jsonrpc, JSON_RPC_VERSION);
return (
RequestId {
id: request.id,
_type: std::marker::PhantomData,
},
request.params,
);
} else {
println!(
"skipping message in fake language server {:?}",
std::str::from_utf8(&self.buffer)
);
}
}
}
pub async fn receive_notification<T: notification::Notification>(&mut self) -> T::Params {
@@ -510,6 +522,22 @@ impl FakeLanguageServer {
notification.params
}
pub async fn start_progress(&mut self, token: impl Into<String>) {
self.notify::<notification::Progress>(ProgressParams {
token: NumberOrString::String(token.into()),
value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(Default::default())),
})
.await;
}
pub async fn end_progress(&mut self, token: impl Into<String>) {
self.notify::<notification::Progress>(ProgressParams {
token: NumberOrString::String(token.into()),
value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(Default::default())),
})
.await;
}
async fn send(&mut self, message: Vec<u8>) {
self.stdout
.write_all(CONTENT_LEN_HEADER.as_bytes())

18
crates/outline/Cargo.toml Normal file
View File

@@ -0,0 +1,18 @@
[package]
name = "outline"
version = "0.1.0"
edition = "2021"
[lib]
path = "src/outline.rs"
[dependencies]
editor = { path = "../editor" }
fuzzy = { path = "../fuzzy" }
gpui = { path = "../gpui" }
language = { path = "../language" }
text = { path = "../text" }
workspace = { path = "../workspace" }
ordered-float = "2.1.1"
postage = { version = "0.4", features = ["futures-traits"] }
smol = "1.2"

View File

@@ -0,0 +1,540 @@
use editor::{
display_map::ToDisplayPoint, Anchor, AnchorRangeExt, Autoscroll, Editor, EditorSettings,
ToPoint,
};
use fuzzy::StringMatch;
use gpui::{
action,
elements::*,
fonts::{self, HighlightStyle},
geometry::vector::Vector2F,
keymap::{self, Binding},
AppContext, Axis, Entity, MutableAppContext, RenderContext, View, ViewContext, ViewHandle,
WeakViewHandle,
};
use language::{Outline, Selection};
use ordered_float::OrderedFloat;
use postage::watch;
use std::{
cmp::{self, Reverse},
ops::Range,
sync::Arc,
};
use workspace::{
menu::{Confirm, SelectFirst, SelectLast, SelectNext, SelectPrev},
Settings, Workspace,
};
action!(Toggle);
pub fn init(cx: &mut MutableAppContext) {
cx.add_bindings([
Binding::new("cmd-shift-O", Toggle, Some("Editor")),
Binding::new("escape", Toggle, Some("OutlineView")),
]);
cx.add_action(OutlineView::toggle);
cx.add_action(OutlineView::confirm);
cx.add_action(OutlineView::select_prev);
cx.add_action(OutlineView::select_next);
cx.add_action(OutlineView::select_first);
cx.add_action(OutlineView::select_last);
}
struct OutlineView {
handle: WeakViewHandle<Self>,
active_editor: ViewHandle<Editor>,
outline: Outline<Anchor>,
selected_match_index: usize,
restore_state: Option<RestoreState>,
symbol_selection_id: Option<usize>,
matches: Vec<StringMatch>,
query_editor: ViewHandle<Editor>,
list_state: UniformListState,
settings: watch::Receiver<Settings>,
}
struct RestoreState {
scroll_position: Vector2F,
selections: Vec<Selection<usize>>,
}
pub enum Event {
Dismissed,
}
impl Entity for OutlineView {
type Event = Event;
fn release(&mut self, cx: &mut MutableAppContext) {
self.restore_active_editor(cx);
}
}
impl View for OutlineView {
fn ui_name() -> &'static str {
"OutlineView"
}
fn keymap_context(&self, _: &AppContext) -> keymap::Context {
let mut cx = Self::default_keymap_context();
cx.set.insert("menu".into());
cx
}
fn render(&mut self, _: &mut RenderContext<Self>) -> ElementBox {
let settings = self.settings.borrow();
Flex::new(Axis::Vertical)
.with_child(
Container::new(ChildView::new(self.query_editor.id()).boxed())
.with_style(settings.theme.selector.input_editor.container)
.boxed(),
)
.with_child(Flexible::new(1.0, false, self.render_matches()).boxed())
.contained()
.with_style(settings.theme.selector.container)
.constrained()
.with_max_width(800.0)
.with_max_height(1200.0)
.aligned()
.top()
.named("outline view")
}
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
cx.focus(&self.query_editor);
}
}
impl OutlineView {
fn new(
outline: Outline<Anchor>,
editor: ViewHandle<Editor>,
settings: watch::Receiver<Settings>,
cx: &mut ViewContext<Self>,
) -> Self {
let query_editor = cx.add_view(|cx| {
Editor::single_line(
{
let settings = settings.clone();
Arc::new(move |_| {
let settings = settings.borrow();
EditorSettings {
style: settings.theme.selector.input_editor.as_editor(),
tab_size: settings.tab_size,
soft_wrap: editor::SoftWrap::None,
}
})
},
cx,
)
});
cx.subscribe(&query_editor, Self::on_query_editor_event)
.detach();
let restore_state = editor.update(cx, |editor, cx| {
Some(RestoreState {
scroll_position: editor.scroll_position(cx),
selections: editor.local_selections::<usize>(cx),
})
});
let mut this = Self {
handle: cx.weak_handle(),
active_editor: editor,
matches: Default::default(),
selected_match_index: 0,
restore_state,
symbol_selection_id: None,
outline,
query_editor,
list_state: Default::default(),
settings,
};
this.update_matches(cx);
this
}
fn toggle(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext<Workspace>) {
if let Some(editor) = workspace
.active_item(cx)
.and_then(|item| item.to_any().downcast::<Editor>())
{
let settings = workspace.settings();
let buffer = editor
.read(cx)
.buffer()
.read(cx)
.read(cx)
.outline(Some(settings.borrow().theme.editor.syntax.as_ref()));
if let Some(outline) = buffer {
workspace.toggle_modal(cx, |cx, _| {
let view = cx.add_view(|cx| OutlineView::new(outline, editor, settings, cx));
cx.subscribe(&view, Self::on_event).detach();
view
})
}
}
}
fn select_prev(&mut self, _: &SelectPrev, cx: &mut ViewContext<Self>) {
if self.selected_match_index > 0 {
self.select(self.selected_match_index - 1, true, false, cx);
}
}
fn select_next(&mut self, _: &SelectNext, cx: &mut ViewContext<Self>) {
if self.selected_match_index + 1 < self.matches.len() {
self.select(self.selected_match_index + 1, true, false, cx);
}
}
fn select_first(&mut self, _: &SelectFirst, cx: &mut ViewContext<Self>) {
self.select(0, true, false, cx);
}
fn select_last(&mut self, _: &SelectLast, cx: &mut ViewContext<Self>) {
self.select(self.matches.len().saturating_sub(1), true, false, cx);
}
fn select(&mut self, index: usize, navigate: bool, center: bool, cx: &mut ViewContext<Self>) {
self.selected_match_index = index;
self.list_state.scroll_to(if center {
ScrollTarget::Center(index)
} else {
ScrollTarget::Show(index)
});
if navigate {
let selected_match = &self.matches[self.selected_match_index];
let outline_item = &self.outline.items[selected_match.candidate_id];
self.symbol_selection_id = self.active_editor.update(cx, |active_editor, cx| {
let snapshot = active_editor.snapshot(cx).display_snapshot;
let buffer_snapshot = &snapshot.buffer_snapshot;
let start = outline_item.range.start.to_point(&buffer_snapshot);
let end = outline_item.range.end.to_point(&buffer_snapshot);
let display_rows = start.to_display_point(&snapshot).row()
..end.to_display_point(&snapshot).row() + 1;
active_editor.select_ranges([start..start], Some(Autoscroll::Center), cx);
active_editor.set_highlighted_rows(Some(display_rows));
Some(active_editor.newest_selection::<usize>(&buffer_snapshot).id)
});
cx.notify();
}
}
fn confirm(&mut self, _: &Confirm, cx: &mut ViewContext<Self>) {
self.restore_state.take();
cx.emit(Event::Dismissed);
}
fn restore_active_editor(&mut self, cx: &mut MutableAppContext) {
let symbol_selection_id = self.symbol_selection_id.take();
self.active_editor.update(cx, |editor, cx| {
editor.set_highlighted_rows(None);
if let Some((symbol_selection_id, restore_state)) =
symbol_selection_id.zip(self.restore_state.as_ref())
{
let newest_selection =
editor.newest_selection::<usize>(&editor.buffer().read(cx).read(cx));
if symbol_selection_id == newest_selection.id {
editor.set_scroll_position(restore_state.scroll_position, cx);
editor.update_selections(restore_state.selections.clone(), None, cx);
}
}
})
}
fn on_event(
workspace: &mut Workspace,
_: ViewHandle<Self>,
event: &Event,
cx: &mut ViewContext<Workspace>,
) {
match event {
Event::Dismissed => workspace.dismiss_modal(cx),
}
}
fn on_query_editor_event(
&mut self,
_: ViewHandle<Editor>,
event: &editor::Event,
cx: &mut ViewContext<Self>,
) {
match event {
editor::Event::Blurred => cx.emit(Event::Dismissed),
editor::Event::Edited => self.update_matches(cx),
_ => {}
}
}
fn update_matches(&mut self, cx: &mut ViewContext<Self>) {
let selected_index;
let navigate_to_selected_index;
let query = self.query_editor.update(cx, |buffer, cx| buffer.text(cx));
if query.is_empty() {
self.restore_active_editor(cx);
self.matches = self
.outline
.items
.iter()
.enumerate()
.map(|(index, _)| StringMatch {
candidate_id: index,
score: Default::default(),
positions: Default::default(),
string: Default::default(),
})
.collect();
let editor = self.active_editor.read(cx);
let buffer = editor.buffer().read(cx).read(cx);
let cursor_offset = editor.newest_selection::<usize>(&buffer).head();
selected_index = self
.outline
.items
.iter()
.enumerate()
.map(|(ix, item)| {
let range = item.range.to_offset(&buffer);
let distance_to_closest_endpoint = cmp::min(
(range.start as isize - cursor_offset as isize).abs() as usize,
(range.end as isize - cursor_offset as isize).abs() as usize,
);
let depth = if range.contains(&cursor_offset) {
Some(item.depth)
} else {
None
};
(ix, depth, distance_to_closest_endpoint)
})
.max_by_key(|(_, depth, distance)| (*depth, Reverse(*distance)))
.unwrap()
.0;
navigate_to_selected_index = false;
} else {
self.matches = smol::block_on(self.outline.search(&query, cx.background().clone()));
selected_index = self
.matches
.iter()
.enumerate()
.max_by_key(|(_, m)| OrderedFloat(m.score))
.map(|(ix, _)| ix)
.unwrap_or(0);
navigate_to_selected_index = !self.matches.is_empty();
}
self.select(selected_index, navigate_to_selected_index, true, cx);
}
fn render_matches(&self) -> ElementBox {
if self.matches.is_empty() {
let settings = self.settings.borrow();
return Container::new(
Label::new(
"No matches".into(),
settings.theme.selector.empty.label.clone(),
)
.boxed(),
)
.with_style(settings.theme.selector.empty.container)
.named("empty matches");
}
let handle = self.handle.clone();
let list = UniformList::new(
self.list_state.clone(),
self.matches.len(),
move |mut range, items, cx| {
let cx = cx.as_ref();
let view = handle.upgrade(cx).unwrap();
let view = view.read(cx);
let start = range.start;
range.end = cmp::min(range.end, view.matches.len());
items.extend(
view.matches[range]
.iter()
.enumerate()
.map(move |(ix, m)| view.render_match(m, start + ix)),
);
},
);
Container::new(list.boxed())
.with_margin_top(6.0)
.named("matches")
}
fn render_match(&self, string_match: &StringMatch, index: usize) -> ElementBox {
let settings = self.settings.borrow();
let style = if index == self.selected_match_index {
&settings.theme.selector.active_item
} else {
&settings.theme.selector.item
};
let outline_item = &self.outline.items[string_match.candidate_id];
Text::new(outline_item.text.clone(), style.label.text.clone())
.with_soft_wrap(false)
.with_highlights(combine_syntax_and_fuzzy_match_highlights(
&outline_item.text,
style.label.text.clone().into(),
&outline_item.highlight_ranges,
&string_match.positions,
))
.contained()
.with_padding_left(20. * outline_item.depth as f32)
.contained()
.with_style(style.container)
.boxed()
}
}
fn combine_syntax_and_fuzzy_match_highlights(
text: &str,
default_style: HighlightStyle,
syntax_ranges: &[(Range<usize>, HighlightStyle)],
match_indices: &[usize],
) -> Vec<(Range<usize>, HighlightStyle)> {
let mut result = Vec::new();
let mut match_indices = match_indices.iter().copied().peekable();
for (range, mut syntax_highlight) in syntax_ranges
.iter()
.cloned()
.chain([(usize::MAX..0, Default::default())])
{
syntax_highlight.font_properties.weight(Default::default());
// Add highlights for any fuzzy match characters before the next
// syntax highlight range.
while let Some(&match_index) = match_indices.peek() {
if match_index >= range.start {
break;
}
match_indices.next();
let end_index = char_ix_after(match_index, text);
let mut match_style = default_style;
match_style.font_properties.weight(fonts::Weight::BOLD);
result.push((match_index..end_index, match_style));
}
if range.start == usize::MAX {
break;
}
// Add highlights for any fuzzy match characters within the
// syntax highlight range.
let mut offset = range.start;
while let Some(&match_index) = match_indices.peek() {
if match_index >= range.end {
break;
}
match_indices.next();
if match_index > offset {
result.push((offset..match_index, syntax_highlight));
}
let mut end_index = char_ix_after(match_index, text);
while let Some(&next_match_index) = match_indices.peek() {
if next_match_index == end_index && next_match_index < range.end {
end_index = char_ix_after(next_match_index, text);
match_indices.next();
} else {
break;
}
}
let mut match_style = syntax_highlight;
match_style.font_properties.weight(fonts::Weight::BOLD);
result.push((match_index..end_index, match_style));
offset = end_index;
}
if offset < range.end {
result.push((offset..range.end, syntax_highlight));
}
}
result
}
fn char_ix_after(ix: usize, text: &str) -> usize {
ix + text[ix..].chars().next().unwrap().len_utf8()
}
#[cfg(test)]
mod tests {
use super::*;
use gpui::{color::Color, fonts::HighlightStyle};
#[test]
fn test_combine_syntax_and_fuzzy_match_highlights() {
let string = "abcdefghijklmnop";
let default = HighlightStyle::default();
let syntax_ranges = [
(
0..3,
HighlightStyle {
color: Color::red(),
..default
},
),
(
4..8,
HighlightStyle {
color: Color::green(),
..default
},
),
];
let match_indices = [4, 6, 7, 8];
assert_eq!(
combine_syntax_and_fuzzy_match_highlights(
&string,
default,
&syntax_ranges,
&match_indices,
),
&[
(
0..3,
HighlightStyle {
color: Color::red(),
..default
},
),
(
4..5,
HighlightStyle {
color: Color::green(),
font_properties: *fonts::Properties::default().weight(fonts::Weight::BOLD),
..default
},
),
(
5..6,
HighlightStyle {
color: Color::green(),
..default
},
),
(
6..8,
HighlightStyle {
color: Color::green(),
font_properties: *fonts::Properties::default().weight(fonts::Weight::BOLD),
..default
},
),
(
8..9,
HighlightStyle {
font_properties: *fonts::Properties::default().weight(fonts::Weight::BOLD),
..default
},
),
]
);
}
}

View File

@@ -13,6 +13,7 @@ test-support = ["language/test-support", "text/test-support"]
text = { path = "../text" }
client = { path = "../client" }
clock = { path = "../clock" }
collections = { path = "../collections" }
fsevent = { path = "../fsevent" }
fuzzy = { path = "../fuzzy" }
gpui = { path = "../gpui" }
@@ -37,6 +38,7 @@ toml = "0.5"
[dev-dependencies]
client = { path = "../client", features = ["test-support"] }
collections = { path = "../collections", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] }
lsp = { path = "../lsp", features = ["test-support"] }

View File

@@ -134,6 +134,7 @@ impl Fs for RealFs {
}
}
#[cfg(any(test, feature = "test-support"))]
#[derive(Clone, Debug)]
struct FakeFsEntry {
metadata: Metadata,

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -10,6 +10,7 @@ path = "src/project_panel.rs"
gpui = { path = "../gpui" }
project = { path = "../project" }
theme = { path = "../theme" }
util = { path = "../util" }
workspace = { path = "../workspace" }
postage = { version = "0.4.1", features = ["futures-traits"] }

View File

@@ -1,32 +1,31 @@
use gpui::{
action,
elements::{
Align, ConstrainedBox, Empty, Flex, Label, MouseEventHandler, ParentElement, Svg,
UniformList, UniformListState,
},
keymap::{
self,
menu::{SelectNext, SelectPrev},
Binding,
Align, ConstrainedBox, Empty, Flex, Label, MouseEventHandler, ParentElement, ScrollTarget,
Svg, UniformList, UniformListState,
},
keymap::{self, Binding},
platform::CursorStyle,
AppContext, Element, ElementBox, Entity, ModelHandle, MutableAppContext, ReadModel, View,
ViewContext, ViewHandle, WeakViewHandle,
};
use postage::watch;
use project::{Project, ProjectEntry, ProjectPath, Worktree};
use project::{Project, ProjectEntry, ProjectPath, Worktree, WorktreeId};
use std::{
collections::{hash_map, HashMap},
ffi::OsStr,
ops::Range,
};
use workspace::{Settings, Workspace};
use workspace::{
menu::{SelectNext, SelectPrev},
Settings, Workspace,
};
pub struct ProjectPanel {
project: ModelHandle<Project>,
list: UniformListState,
visible_entries: Vec<Vec<usize>>,
expanded_dir_ids: HashMap<usize, Vec<usize>>,
expanded_dir_ids: HashMap<WorktreeId, Vec<usize>>,
selection: Option<Selection>,
settings: watch::Receiver<Settings>,
handle: WeakViewHandle<Self>,
@@ -34,7 +33,7 @@ pub struct ProjectPanel {
#[derive(Copy, Clone)]
struct Selection {
worktree_id: usize,
worktree_id: WorktreeId,
entry_id: usize,
index: usize,
}
@@ -67,7 +66,10 @@ pub fn init(cx: &mut MutableAppContext) {
}
pub enum Event {
OpenedEntry { worktree_id: usize, entry_id: usize },
OpenedEntry {
worktree_id: WorktreeId,
entry_id: usize,
},
}
impl ProjectPanel {
@@ -114,21 +116,21 @@ impl ProjectPanel {
this
});
cx.subscribe(&project_panel, move |workspace, _, event, cx| match event {
Event::OpenedEntry {
&Event::OpenedEntry {
worktree_id,
entry_id,
} => {
if let Some(worktree) = project.read(cx).worktree_for_id(*worktree_id) {
if let Some(entry) = worktree.read(cx).entry_for_id(*entry_id) {
if let Some(worktree) = project.read(cx).worktree_for_id(worktree_id, cx) {
if let Some(entry) = worktree.read(cx).entry_for_id(entry_id) {
workspace
.open_entry(
.open_path(
ProjectPath {
worktree_id: worktree.id(),
worktree_id,
path: entry.path.clone(),
},
cx,
)
.map(|t| t.detach());
.detach_and_log_err(cx);
}
}
}
@@ -259,8 +261,8 @@ impl ProjectPanel {
fn select_first(&mut self, cx: &mut ViewContext<Self>) {
if let Some(worktree) = self.project.read(cx).worktrees().first() {
let worktree_id = worktree.id();
let worktree = worktree.read(cx);
let worktree_id = worktree.id();
if let Some(root_entry) = worktree.root_entry() {
self.selection = Some(Selection {
worktree_id,
@@ -275,7 +277,7 @@ impl ProjectPanel {
fn autoscroll(&mut self) {
if let Some(selection) = self.selection {
self.list.scroll_to(selection.index);
self.list.scroll_to(ScrollTarget::Show(selection.index));
}
}
@@ -307,13 +309,13 @@ impl ProjectPanel {
fn selected_entry<'a>(&self, cx: &'a AppContext) -> Option<(&'a Worktree, &'a project::Entry)> {
let selection = self.selection?;
let project = self.project.read(cx);
let worktree = project.worktree_for_id(selection.worktree_id)?.read(cx);
let worktree = project.worktree_for_id(selection.worktree_id, cx)?.read(cx);
Some((worktree, worktree.entry_for_id(selection.entry_id)?))
}
fn update_visible_entries(
&mut self,
new_selected_entry: Option<(usize, usize)>,
new_selected_entry: Option<(WorktreeId, usize)>,
cx: &mut ViewContext<Self>,
) {
let worktrees = self.project.read(cx).worktrees();
@@ -322,7 +324,7 @@ impl ProjectPanel {
let mut entry_ix = 0;
for worktree in worktrees {
let snapshot = worktree.read(cx).snapshot();
let worktree_id = worktree.id();
let worktree_id = snapshot.id();
let expanded_dir_ids = match self.expanded_dir_ids.entry(worktree_id) {
hash_map::Entry::Occupied(e) => e.into_mut(),
@@ -342,7 +344,7 @@ impl ProjectPanel {
while let Some(item) = entry_iter.entry() {
visible_worktree_entries.push(entry_iter.offset());
if let Some(new_selected_entry) = new_selected_entry {
if new_selected_entry == (worktree.id(), item.id) {
if new_selected_entry == (worktree_id, item.id) {
self.selection = Some(Selection {
worktree_id,
entry_id: item.id,
@@ -371,10 +373,15 @@ impl ProjectPanel {
}
}
fn expand_entry(&mut self, worktree_id: usize, entry_id: usize, cx: &mut ViewContext<Self>) {
fn expand_entry(
&mut self,
worktree_id: WorktreeId,
entry_id: usize,
cx: &mut ViewContext<Self>,
) {
let project = self.project.read(cx);
if let Some((worktree, expanded_dir_ids)) = project
.worktree_for_id(worktree_id)
.worktree_for_id(worktree_id, cx)
.zip(self.expanded_dir_ids.get_mut(&worktree_id))
{
let worktree = worktree.read(cx);
@@ -417,12 +424,12 @@ impl ProjectPanel {
let end_ix = range.end.min(ix + visible_worktree_entries.len());
let worktree = &worktrees[worktree_ix];
let snapshot = worktree.read(cx).snapshot();
let expanded_entry_ids = self
.expanded_dir_ids
.get(&worktree.id())
.get(&snapshot.id())
.map(Vec::as_slice)
.unwrap_or(&[]);
let snapshot = worktree.read(cx).snapshot();
let root_name = OsStr::new(snapshot.root_name());
let mut cursor = snapshot.entries(false);
@@ -439,11 +446,11 @@ impl ProjectPanel {
is_dir: entry.is_dir(),
is_expanded: expanded_entry_ids.binary_search(&entry.id).is_ok(),
is_selected: self.selection.map_or(false, |e| {
e.worktree_id == worktree.id() && e.entry_id == entry.id
e.worktree_id == snapshot.id() && e.entry_id == entry.id
}),
};
let entry = ProjectEntry {
worktree_id: worktree.id(),
worktree_id: snapshot.id(),
entry_id: entry.id,
};
callback(entry, details, cx);
@@ -461,7 +468,7 @@ impl ProjectPanel {
) -> ElementBox {
let is_dir = details.is_dir;
MouseEventHandler::new::<Self, _, _, _>(
(entry.worktree_id, entry.entry_id),
(entry.worktree_id.to_usize(), entry.entry_id),
cx,
|state, _| {
let style = match (details.is_selected, state.hovered) {
@@ -617,17 +624,18 @@ mod tests {
)
.await;
let project = cx.add_model(|_| {
Project::new(
params.languages.clone(),
let project = cx.update(|cx| {
Project::local(
params.client.clone(),
params.user_store.clone(),
params.languages.clone(),
params.fs.clone(),
cx,
)
});
let root1 = project
.update(&mut cx, |project, cx| {
project.add_local_worktree("/root1".as_ref(), cx)
project.add_local_worktree("/root1", cx)
})
.await
.unwrap();
@@ -636,7 +644,7 @@ mod tests {
.await;
let root2 = project
.update(&mut cx, |project, cx| {
project.add_local_worktree("/root2".as_ref(), cx)
project.add_local_worktree("/root2", cx)
})
.await
.unwrap();

View File

@@ -13,7 +13,7 @@ test-support = []
[dependencies]
anyhow = "1.0"
async-lock = "2.4"
async-tungstenite = "0.14"
async-tungstenite = "0.16"
base64 = "0.13"
futures = "0.3"
log = "0.4"
@@ -30,5 +30,6 @@ zstd = "0.9"
prost-build = "0.8"
[dev-dependencies]
gpui = { path = "../gpui", features = ["test-support"] }
smol = "1.2.5"
tempdir = "0.3.7"

View File

@@ -9,37 +9,49 @@ message Envelope {
Ack ack = 4;
Error error = 5;
Ping ping = 6;
ShareWorktree share_worktree = 7;
ShareWorktreeResponse share_worktree_response = 8;
JoinWorktree join_worktree = 9;
JoinWorktreeResponse join_worktree_response = 10;
UpdateWorktree update_worktree = 11;
CloseWorktree close_worktree = 12;
OpenBuffer open_buffer = 13;
OpenBufferResponse open_buffer_response = 14;
CloseBuffer close_buffer = 15;
UpdateBuffer update_buffer = 16;
SaveBuffer save_buffer = 17;
BufferSaved buffer_saved = 18;
AddCollaborator add_collaborator = 19;
RemoveCollaborator remove_collaborator = 20;
GetChannels get_channels = 21;
GetChannelsResponse get_channels_response = 22;
GetUsers get_users = 23;
GetUsersResponse get_users_response = 24;
JoinChannel join_channel = 25;
JoinChannelResponse join_channel_response = 26;
LeaveChannel leave_channel = 27;
SendChannelMessage send_channel_message = 28;
SendChannelMessageResponse send_channel_message_response = 29;
ChannelMessageSent channel_message_sent = 30;
GetChannelMessages get_channel_messages = 31;
GetChannelMessagesResponse get_channel_messages_response = 32;
OpenWorktree open_worktree = 33;
OpenWorktreeResponse open_worktree_response = 34;
UnshareWorktree unshare_worktree = 35;
UpdateContacts update_contacts = 36;
LeaveWorktree leave_worktree = 37;
RegisterProject register_project = 7;
RegisterProjectResponse register_project_response = 8;
UnregisterProject unregister_project = 9;
ShareProject share_project = 10;
UnshareProject unshare_project = 11;
JoinProject join_project = 12;
JoinProjectResponse join_project_response = 13;
LeaveProject leave_project = 14;
AddProjectCollaborator add_project_collaborator = 15;
RemoveProjectCollaborator remove_project_collaborator = 16;
RegisterWorktree register_worktree = 17;
UnregisterWorktree unregister_worktree = 18;
ShareWorktree share_worktree = 19;
UpdateWorktree update_worktree = 20;
UpdateDiagnosticSummary update_diagnostic_summary = 21;
DiskBasedDiagnosticsUpdating disk_based_diagnostics_updating = 22;
DiskBasedDiagnosticsUpdated disk_based_diagnostics_updated = 23;
OpenBuffer open_buffer = 24;
OpenBufferResponse open_buffer_response = 25;
CloseBuffer close_buffer = 26;
UpdateBuffer update_buffer = 27;
SaveBuffer save_buffer = 28;
BufferSaved buffer_saved = 29;
FormatBuffer format_buffer = 30;
GetChannels get_channels = 31;
GetChannelsResponse get_channels_response = 32;
JoinChannel join_channel = 33;
JoinChannelResponse join_channel_response = 34;
LeaveChannel leave_channel = 35;
SendChannelMessage send_channel_message = 36;
SendChannelMessageResponse send_channel_message_response = 37;
ChannelMessageSent channel_message_sent = 38;
GetChannelMessages get_channel_messages = 39;
GetChannelMessagesResponse get_channel_messages_response = 40;
UpdateContacts update_contacts = 41;
GetUsers get_users = 42;
GetUsersResponse get_users_response = 43;
}
}
@@ -53,62 +65,77 @@ message Error {
string message = 1;
}
message OpenWorktree {
string root_name = 1;
repeated string authorized_logins = 2;
message RegisterProject {}
message RegisterProjectResponse {
uint64 project_id = 1;
}
message OpenWorktreeResponse {
uint64 worktree_id = 1;
message UnregisterProject {
uint64 project_id = 1;
}
message ShareWorktree {
Worktree worktree = 1;
message ShareProject {
uint64 project_id = 1;
}
message ShareWorktreeResponse {}
message UnshareWorktree {
uint64 worktree_id = 1;
message UnshareProject {
uint64 project_id = 1;
}
message JoinWorktree {
uint64 worktree_id = 1;
message JoinProject {
uint64 project_id = 1;
}
message LeaveWorktree {
uint64 worktree_id = 1;
}
message JoinWorktreeResponse {
Worktree worktree = 2;
uint32 replica_id = 3;
message JoinProjectResponse {
uint32 replica_id = 2;
repeated Worktree worktrees = 3;
repeated Collaborator collaborators = 4;
}
message LeaveProject {
uint64 project_id = 1;
}
message RegisterWorktree {
uint64 project_id = 1;
uint64 worktree_id = 2;
string root_name = 3;
repeated string authorized_logins = 4;
}
message UnregisterWorktree {
uint64 project_id = 1;
uint64 worktree_id = 2;
}
message ShareWorktree {
uint64 project_id = 1;
Worktree worktree = 2;
}
message UpdateWorktree {
uint64 worktree_id = 1;
repeated Entry updated_entries = 2;
repeated uint64 removed_entries = 3;
uint64 project_id = 1;
uint64 worktree_id = 2;
string root_name = 3;
repeated Entry updated_entries = 4;
repeated uint64 removed_entries = 5;
}
message CloseWorktree {
uint64 worktree_id = 1;
}
message AddCollaborator {
uint64 worktree_id = 1;
message AddProjectCollaborator {
uint64 project_id = 1;
Collaborator collaborator = 2;
}
message RemoveCollaborator {
uint64 worktree_id = 1;
message RemoveProjectCollaborator {
uint64 project_id = 1;
uint32 peer_id = 2;
}
message OpenBuffer {
uint64 worktree_id = 1;
string path = 2;
uint64 project_id = 1;
uint64 worktree_id = 2;
string path = 3;
}
message OpenBufferResponse {
@@ -116,26 +143,60 @@ message OpenBufferResponse {
}
message CloseBuffer {
uint64 worktree_id = 1;
uint64 buffer_id = 2;
uint64 project_id = 1;
uint64 worktree_id = 2;
uint64 buffer_id = 3;
}
message UpdateBuffer {
uint64 worktree_id = 1;
uint64 buffer_id = 2;
repeated Operation operations = 3;
uint64 project_id = 1;
uint64 worktree_id = 2;
uint64 buffer_id = 3;
repeated Operation operations = 4;
}
message SaveBuffer {
uint64 worktree_id = 1;
uint64 buffer_id = 2;
uint64 project_id = 1;
uint64 worktree_id = 2;
uint64 buffer_id = 3;
}
message BufferSaved {
uint64 worktree_id = 1;
uint64 buffer_id = 2;
repeated VectorClockEntry version = 3;
Timestamp mtime = 4;
uint64 project_id = 1;
uint64 worktree_id = 2;
uint64 buffer_id = 3;
repeated VectorClockEntry version = 4;
Timestamp mtime = 5;
}
message FormatBuffer {
uint64 project_id = 1;
uint64 worktree_id = 2;
uint64 buffer_id = 3;
}
message UpdateDiagnosticSummary {
uint64 project_id = 1;
uint64 worktree_id = 2;
DiagnosticSummary summary = 3;
}
message DiagnosticSummary {
string path = 3;
uint32 error_count = 4;
uint32 warning_count = 5;
uint32 info_count = 6;
uint32 hint_count = 7;
}
message DiskBasedDiagnosticsUpdating {
uint64 project_id = 1;
uint64 worktree_id = 2;
}
message DiskBasedDiagnosticsUpdated {
uint64 project_id = 1;
uint64 worktree_id = 2;
}
message GetChannels {}
@@ -212,6 +273,7 @@ message Worktree {
uint64 id = 1;
string root_name = 2;
repeated Entry entries = 3;
repeated DiagnosticSummary diagnostic_summaries = 4;
}
message Entry {
@@ -226,39 +288,70 @@ message Entry {
message Buffer {
uint64 id = 1;
string content = 2;
repeated Operation.Edit history = 3;
repeated SelectionSet selections = 4;
DiagnosticSet diagnostics = 5;
string visible_text = 2;
string deleted_text = 3;
repeated BufferFragment fragments = 4;
repeated UndoMapEntry undo_map = 5;
repeated VectorClockEntry version = 6;
repeated SelectionSet selections = 7;
repeated Diagnostic diagnostics = 8;
uint32 lamport_timestamp = 9;
repeated Operation deferred_operations = 10;
}
message BufferFragment {
uint32 replica_id = 1;
uint32 local_timestamp = 2;
uint32 lamport_timestamp = 3;
uint32 insertion_offset = 4;
uint32 len = 5;
bool visible = 6;
repeated VectorClockEntry deletions = 7;
repeated VectorClockEntry max_undos = 8;
}
message SelectionSet {
uint32 replica_id = 1;
uint32 lamport_timestamp = 2;
bool is_active = 3;
repeated VectorClockEntry version = 4;
repeated Selection selections = 5;
repeated Selection selections = 2;
uint32 lamport_timestamp = 3;
}
message Selection {
uint64 id = 1;
uint64 start = 2;
uint64 end = 3;
Anchor start = 2;
Anchor end = 3;
bool reversed = 4;
}
message DiagnosticSet {
repeated VectorClockEntry version = 1;
repeated Diagnostic diagnostics = 2;
message Anchor {
uint32 replica_id = 1;
uint32 local_timestamp = 2;
uint64 offset = 3;
Bias bias = 4;
}
enum Bias {
Left = 0;
Right = 1;
}
message UpdateDiagnostics {
uint32 replica_id = 1;
uint32 lamport_timestamp = 2;
repeated Diagnostic diagnostics = 3;
}
message Diagnostic {
uint64 start = 1;
uint64 end = 2;
Anchor start = 1;
Anchor end = 2;
Severity severity = 3;
string message = 4;
uint64 group_id = 5;
bool is_primary = 6;
optional string code = 5;
uint64 group_id = 6;
bool is_primary = 7;
bool is_valid = 8;
bool is_disk_based = 9;
enum Severity {
None = 0;
Error = 1;
@@ -268,16 +361,12 @@ message Diagnostic {
}
}
message Operation {
oneof variant {
Edit edit = 1;
Undo undo = 2;
UpdateSelections update_selections = 3;
RemoveSelections remove_selections = 4;
SetActiveSelections set_active_selections = 5;
DiagnosticSet update_diagnostics = 6;
UpdateDiagnostics update_diagnostics = 4;
}
message Edit {
@@ -298,31 +387,23 @@ message Operation {
repeated UndoCount counts = 6;
}
message UndoCount {
uint32 replica_id = 1;
uint32 local_timestamp = 2;
uint32 count = 3;
}
message UpdateSelections {
uint32 replica_id = 1;
uint32 local_timestamp = 2;
uint32 lamport_timestamp = 3;
repeated VectorClockEntry version = 4;
repeated Selection selections = 5;
repeated Selection selections = 4;
}
}
message RemoveSelections {
uint32 replica_id = 1;
uint32 local_timestamp = 2;
uint32 lamport_timestamp = 3;
}
message UndoMapEntry {
uint32 replica_id = 1;
uint32 local_timestamp = 2;
repeated UndoCount counts = 3;
}
message SetActiveSelections {
uint32 replica_id = 1;
optional uint32 local_timestamp = 2;
uint32 lamport_timestamp = 3;
}
message UndoCount {
uint32 replica_id = 1;
uint32 local_timestamp = 2;
uint32 count = 3;
}
message VectorClockEntry {
@@ -360,12 +441,12 @@ message ChannelMessage {
message Contact {
uint64 user_id = 1;
repeated WorktreeMetadata worktrees = 2;
repeated ProjectMetadata projects = 2;
}
message WorktreeMetadata {
message ProjectMetadata {
uint64 id = 1;
string root_name = 2;
bool is_shared = 3;
bool is_shared = 2;
repeated string worktree_root_names = 3;
repeated uint64 guests = 4;
}

View File

@@ -1,8 +1,9 @@
use super::proto::{self, AnyTypedEnvelope, EnvelopedMessage, MessageStream, RequestMessage};
use super::Connection;
use anyhow::{anyhow, Context, Result};
use async_lock::{Mutex, RwLock};
use futures::FutureExt as _;
use futures::stream::BoxStream;
use futures::{FutureExt as _, StreamExt};
use parking_lot::{Mutex, RwLock};
use postage::{
mpsc,
prelude::{Sink as _, Stream as _},
@@ -109,7 +110,7 @@ impl Peer {
) -> (
ConnectionId,
impl Future<Output = anyhow::Result<()>> + Send,
mpsc::Receiver<Box<dyn AnyTypedEnvelope>>,
BoxStream<'static, Box<dyn AnyTypedEnvelope>>,
) {
let connection_id = ConnectionId(self.next_connection_id.fetch_add(1, SeqCst));
let (mut incoming_tx, incoming_rx) = mpsc::channel(64);
@@ -132,23 +133,9 @@ impl Peer {
futures::select_biased! {
incoming = read_message => match incoming {
Ok(incoming) => {
if let Some(responding_to) = incoming.responding_to {
let channel = response_channels.lock().await.as_mut().unwrap().remove(&responding_to);
if let Some(mut tx) = channel {
tx.send(incoming).await.ok();
} else {
log::warn!("received RPC response to unknown request {}", responding_to);
}
} else {
if let Some(envelope) = proto::build_typed_envelope(connection_id, incoming) {
if incoming_tx.send(envelope).await.is_err() {
break 'outer Ok(())
}
} else {
log::error!("unable to construct a typed envelope");
}
if incoming_tx.send(incoming).await.is_err() {
break 'outer Ok(());
}
break;
}
Err(error) => {
@@ -169,25 +156,47 @@ impl Peer {
}
};
response_channels.lock().await.take();
this.connections.write().await.remove(&connection_id);
response_channels.lock().take();
this.connections.write().remove(&connection_id);
result
};
let response_channels = connection_state.response_channels.clone();
self.connections
.write()
.await
.insert(connection_id, connection_state);
(connection_id, handle_io, incoming_rx)
let incoming_rx = incoming_rx.filter_map(move |incoming| {
let response_channels = response_channels.clone();
async move {
if let Some(responding_to) = incoming.responding_to {
let channel = response_channels.lock().as_mut()?.remove(&responding_to);
if let Some(mut tx) = channel {
tx.send(incoming).await.ok();
} else {
log::warn!("received RPC response to unknown request {}", responding_to);
}
None
} else {
if let Some(envelope) = proto::build_typed_envelope(connection_id, incoming) {
Some(envelope)
} else {
log::error!("unable to construct a typed envelope");
None
}
}
}
});
(connection_id, handle_io, incoming_rx.boxed())
}
pub async fn disconnect(&self, connection_id: ConnectionId) {
self.connections.write().await.remove(&connection_id);
pub fn disconnect(&self, connection_id: ConnectionId) {
self.connections.write().remove(&connection_id);
}
pub async fn reset(&self) {
self.connections.write().await.clear();
pub fn reset(&self) {
self.connections.write().clear();
}
pub fn request<T: RequestMessage>(
@@ -216,12 +225,11 @@ impl Peer {
let this = self.clone();
let (tx, mut rx) = mpsc::channel(1);
async move {
let mut connection = this.connection_state(receiver_id).await?;
let mut connection = this.connection_state(receiver_id)?;
let message_id = connection.next_message_id.fetch_add(1, SeqCst);
connection
.response_channels
.lock()
.await
.as_mut()
.ok_or_else(|| anyhow!("connection was closed"))?
.insert(message_id, tx);
@@ -250,7 +258,7 @@ impl Peer {
) -> impl Future<Output = Result<()>> {
let this = self.clone();
async move {
let mut connection = this.connection_state(receiver_id).await?;
let mut connection = this.connection_state(receiver_id)?;
let message_id = connection
.next_message_id
.fetch_add(1, atomic::Ordering::SeqCst);
@@ -270,7 +278,7 @@ impl Peer {
) -> impl Future<Output = Result<()>> {
let this = self.clone();
async move {
let mut connection = this.connection_state(receiver_id).await?;
let mut connection = this.connection_state(receiver_id)?;
let message_id = connection
.next_message_id
.fetch_add(1, atomic::Ordering::SeqCst);
@@ -289,7 +297,7 @@ impl Peer {
) -> impl Future<Output = Result<()>> {
let this = self.clone();
async move {
let mut connection = this.connection_state(receipt.sender_id).await?;
let mut connection = this.connection_state(receipt.sender_id)?;
let message_id = connection
.next_message_id
.fetch_add(1, atomic::Ordering::SeqCst);
@@ -308,7 +316,7 @@ impl Peer {
) -> impl Future<Output = Result<()>> {
let this = self.clone();
async move {
let mut connection = this.connection_state(receipt.sender_id).await?;
let mut connection = this.connection_state(receipt.sender_id)?;
let message_id = connection
.next_message_id
.fetch_add(1, atomic::Ordering::SeqCst);
@@ -320,18 +328,12 @@ impl Peer {
}
}
fn connection_state(
self: &Arc<Self>,
connection_id: ConnectionId,
) -> impl Future<Output = Result<ConnectionState>> {
let this = self.clone();
async move {
let connections = this.connections.read().await;
let connection = connections
.get(&connection_id)
.ok_or_else(|| anyhow!("no such connection: {}", connection_id))?;
Ok(connection.clone())
}
fn connection_state(&self, connection_id: ConnectionId) -> Result<ConnectionState> {
let connections = self.connections.read();
let connection = connections
.get(&connection_id)
.ok_or_else(|| anyhow!("no such connection: {}", connection_id))?;
Ok(connection.clone())
}
}
@@ -340,204 +342,311 @@ mod tests {
use super::*;
use crate::TypedEnvelope;
use async_tungstenite::tungstenite::Message as WebSocketMessage;
use futures::StreamExt as _;
use gpui::TestAppContext;
#[test]
fn test_request_response() {
smol::block_on(async move {
// create 2 clients connected to 1 server
let server = Peer::new();
let client1 = Peer::new();
let client2 = Peer::new();
#[gpui::test(iterations = 10)]
async fn test_request_response(cx: TestAppContext) {
let executor = cx.foreground();
let (client1_to_server_conn, server_to_client_1_conn, _) = Connection::in_memory();
let (client1_conn_id, io_task1, _) =
client1.add_connection(client1_to_server_conn).await;
let (_, io_task2, incoming1) = server.add_connection(server_to_client_1_conn).await;
// create 2 clients connected to 1 server
let server = Peer::new();
let client1 = Peer::new();
let client2 = Peer::new();
let (client2_to_server_conn, server_to_client_2_conn, _) = Connection::in_memory();
let (client2_conn_id, io_task3, _) =
client2.add_connection(client2_to_server_conn).await;
let (_, io_task4, incoming2) = server.add_connection(server_to_client_2_conn).await;
let (client1_to_server_conn, server_to_client_1_conn, _) = Connection::in_memory();
let (client1_conn_id, io_task1, client1_incoming) =
client1.add_connection(client1_to_server_conn).await;
let (_, io_task2, server_incoming1) = server.add_connection(server_to_client_1_conn).await;
smol::spawn(io_task1).detach();
smol::spawn(io_task2).detach();
smol::spawn(io_task3).detach();
smol::spawn(io_task4).detach();
smol::spawn(handle_messages(incoming1, server.clone())).detach();
smol::spawn(handle_messages(incoming2, server.clone())).detach();
let (client2_to_server_conn, server_to_client_2_conn, _) = Connection::in_memory();
let (client2_conn_id, io_task3, client2_incoming) =
client2.add_connection(client2_to_server_conn).await;
let (_, io_task4, server_incoming2) = server.add_connection(server_to_client_2_conn).await;
assert_eq!(
client1
.request(client1_conn_id, proto::Ping {},)
.await
.unwrap(),
proto::Ack {}
);
executor.spawn(io_task1).detach();
executor.spawn(io_task2).detach();
executor.spawn(io_task3).detach();
executor.spawn(io_task4).detach();
executor
.spawn(handle_messages(server_incoming1, server.clone()))
.detach();
executor
.spawn(handle_messages(client1_incoming, client1.clone()))
.detach();
executor
.spawn(handle_messages(server_incoming2, server.clone()))
.detach();
executor
.spawn(handle_messages(client2_incoming, client2.clone()))
.detach();
assert_eq!(
client2
.request(client2_conn_id, proto::Ping {},)
.await
.unwrap(),
proto::Ack {}
);
assert_eq!(
client1
.request(client1_conn_id, proto::Ping {},)
.await
.unwrap(),
proto::Ack {}
);
assert_eq!(
client1
.request(
client1_conn_id,
proto::OpenBuffer {
worktree_id: 1,
path: "path/one".to_string(),
},
)
.await
.unwrap(),
proto::OpenBufferResponse {
buffer: Some(proto::Buffer {
id: 101,
content: "path/one content".to_string(),
history: vec![],
selections: vec![],
diagnostics: None,
}),
}
);
assert_eq!(
client2
.request(client2_conn_id, proto::Ping {},)
.await
.unwrap(),
proto::Ack {}
);
assert_eq!(
client2
.request(
client2_conn_id,
proto::OpenBuffer {
worktree_id: 2,
path: "path/two".to_string(),
},
)
.await
.unwrap(),
proto::OpenBufferResponse {
buffer: Some(proto::Buffer {
id: 102,
content: "path/two content".to_string(),
history: vec![],
selections: vec![],
diagnostics: None,
}),
}
);
client1.disconnect(client1_conn_id).await;
client2.disconnect(client1_conn_id).await;
async fn handle_messages(
mut messages: mpsc::Receiver<Box<dyn AnyTypedEnvelope>>,
peer: Arc<Peer>,
) -> Result<()> {
while let Some(envelope) = messages.next().await {
let envelope = envelope.into_any();
if let Some(envelope) = envelope.downcast_ref::<TypedEnvelope<proto::Ping>>() {
let receipt = envelope.receipt();
peer.respond(receipt, proto::Ack {}).await?
} else if let Some(envelope) =
envelope.downcast_ref::<TypedEnvelope<proto::OpenBuffer>>()
{
let message = &envelope.payload;
let receipt = envelope.receipt();
let response = match message.path.as_str() {
"path/one" => {
assert_eq!(message.worktree_id, 1);
proto::OpenBufferResponse {
buffer: Some(proto::Buffer {
id: 101,
content: "path/one content".to_string(),
history: vec![],
selections: vec![],
diagnostics: None,
}),
}
}
"path/two" => {
assert_eq!(message.worktree_id, 2);
proto::OpenBufferResponse {
buffer: Some(proto::Buffer {
id: 102,
content: "path/two content".to_string(),
history: vec![],
selections: vec![],
diagnostics: None,
}),
}
}
_ => {
panic!("unexpected path {}", message.path);
}
};
peer.respond(receipt, response).await?
} else {
panic!("unknown message type");
}
}
Ok(())
assert_eq!(
client1
.request(
client1_conn_id,
proto::OpenBuffer {
project_id: 0,
worktree_id: 1,
path: "path/one".to_string(),
},
)
.await
.unwrap(),
proto::OpenBufferResponse {
buffer: Some(proto::Buffer {
id: 101,
visible_text: "path/one content".to_string(),
..Default::default()
}),
}
});
);
assert_eq!(
client2
.request(
client2_conn_id,
proto::OpenBuffer {
project_id: 0,
worktree_id: 2,
path: "path/two".to_string(),
},
)
.await
.unwrap(),
proto::OpenBufferResponse {
buffer: Some(proto::Buffer {
id: 102,
visible_text: "path/two content".to_string(),
..Default::default()
}),
}
);
client1.disconnect(client1_conn_id);
client2.disconnect(client1_conn_id);
async fn handle_messages(
mut messages: BoxStream<'static, Box<dyn AnyTypedEnvelope>>,
peer: Arc<Peer>,
) -> Result<()> {
while let Some(envelope) = messages.next().await {
let envelope = envelope.into_any();
if let Some(envelope) = envelope.downcast_ref::<TypedEnvelope<proto::Ping>>() {
let receipt = envelope.receipt();
peer.respond(receipt, proto::Ack {}).await?
} else if let Some(envelope) =
envelope.downcast_ref::<TypedEnvelope<proto::OpenBuffer>>()
{
let message = &envelope.payload;
let receipt = envelope.receipt();
let response = match message.path.as_str() {
"path/one" => {
assert_eq!(message.worktree_id, 1);
proto::OpenBufferResponse {
buffer: Some(proto::Buffer {
id: 101,
visible_text: "path/one content".to_string(),
..Default::default()
}),
}
}
"path/two" => {
assert_eq!(message.worktree_id, 2);
proto::OpenBufferResponse {
buffer: Some(proto::Buffer {
id: 102,
visible_text: "path/two content".to_string(),
..Default::default()
}),
}
}
_ => {
panic!("unexpected path {}", message.path);
}
};
peer.respond(receipt, response).await?
} else {
panic!("unknown message type");
}
}
Ok(())
}
}
#[test]
fn test_disconnect() {
smol::block_on(async move {
let (client_conn, mut server_conn, _) = Connection::in_memory();
#[gpui::test(iterations = 10)]
async fn test_order_of_response_and_incoming(cx: TestAppContext) {
let executor = cx.foreground();
let server = Peer::new();
let client = Peer::new();
let client = Peer::new();
let (connection_id, io_handler, mut incoming) =
client.add_connection(client_conn).await;
let (client_to_server_conn, server_to_client_conn, _) = Connection::in_memory();
let (client_to_server_conn_id, io_task1, mut client_incoming) =
client.add_connection(client_to_server_conn).await;
let (server_to_client_conn_id, io_task2, mut server_incoming) =
server.add_connection(server_to_client_conn).await;
let (mut io_ended_tx, mut io_ended_rx) = postage::barrier::channel();
smol::spawn(async move {
executor.spawn(io_task1).detach();
executor.spawn(io_task2).detach();
executor
.spawn(async move {
let request = server_incoming
.next()
.await
.unwrap()
.into_any()
.downcast::<TypedEnvelope<proto::Ping>>()
.unwrap();
server
.send(
server_to_client_conn_id,
proto::Error {
message: "message 1".to_string(),
},
)
.await
.unwrap();
server
.send(
server_to_client_conn_id,
proto::Error {
message: "message 2".to_string(),
},
)
.await
.unwrap();
server
.respond(request.receipt(), proto::Ack {})
.await
.unwrap();
// Prevent the connection from being dropped
server_incoming.next().await;
})
.detach();
let events = Arc::new(Mutex::new(Vec::new()));
let response = client.request(client_to_server_conn_id, proto::Ping {});
let response_task = executor.spawn({
let events = events.clone();
async move {
response.await.unwrap();
events.lock().push("response".to_string());
}
});
executor
.spawn({
let events = events.clone();
async move {
let incoming1 = client_incoming
.next()
.await
.unwrap()
.into_any()
.downcast::<TypedEnvelope<proto::Error>>()
.unwrap();
events.lock().push(incoming1.payload.message);
let incoming2 = client_incoming
.next()
.await
.unwrap()
.into_any()
.downcast::<TypedEnvelope<proto::Error>>()
.unwrap();
events.lock().push(incoming2.payload.message);
// Prevent the connection from being dropped
client_incoming.next().await;
}
})
.detach();
response_task.await;
assert_eq!(
&*events.lock(),
&[
"message 1".to_string(),
"message 2".to_string(),
"response".to_string()
]
);
}
#[gpui::test(iterations = 10)]
async fn test_disconnect(cx: TestAppContext) {
let executor = cx.foreground();
let (client_conn, mut server_conn, _) = Connection::in_memory();
let client = Peer::new();
let (connection_id, io_handler, mut incoming) = client.add_connection(client_conn).await;
let (mut io_ended_tx, mut io_ended_rx) = postage::barrier::channel();
executor
.spawn(async move {
io_handler.await.ok();
io_ended_tx.send(()).await.unwrap();
})
.detach();
let (mut messages_ended_tx, mut messages_ended_rx) = postage::barrier::channel();
smol::spawn(async move {
let (mut messages_ended_tx, mut messages_ended_rx) = postage::barrier::channel();
executor
.spawn(async move {
incoming.next().await;
messages_ended_tx.send(()).await.unwrap();
})
.detach();
client.disconnect(connection_id).await;
client.disconnect(connection_id);
io_ended_rx.recv().await;
messages_ended_rx.recv().await;
assert!(server_conn
.send(WebSocketMessage::Binary(vec![]))
.await
.is_err());
});
io_ended_rx.recv().await;
messages_ended_rx.recv().await;
assert!(server_conn
.send(WebSocketMessage::Binary(vec![]))
.await
.is_err());
}
#[test]
fn test_io_error() {
smol::block_on(async move {
let (client_conn, mut server_conn, _) = Connection::in_memory();
#[gpui::test(iterations = 10)]
async fn test_io_error(cx: TestAppContext) {
let executor = cx.foreground();
let (client_conn, mut server_conn, _) = Connection::in_memory();
let client = Peer::new();
let (connection_id, io_handler, mut incoming) =
client.add_connection(client_conn).await;
smol::spawn(io_handler).detach();
smol::spawn(async move { incoming.next().await }).detach();
let client = Peer::new();
let (connection_id, io_handler, mut incoming) = client.add_connection(client_conn).await;
executor.spawn(io_handler).detach();
executor
.spawn(async move { incoming.next().await })
.detach();
let response = smol::spawn(client.request(connection_id, proto::Ping {}));
let _request = server_conn.rx.next().await.unwrap().unwrap();
let response = executor.spawn(client.request(connection_id, proto::Ping {}));
let _request = server_conn.rx.next().await.unwrap().unwrap();
drop(server_conn);
assert_eq!(
response.await.unwrap_err().to_string(),
"connection was closed"
);
});
drop(server_conn);
assert_eq!(
response.await.unwrap_err().to_string(),
"connection was closed"
);
}
}

View File

@@ -121,69 +121,83 @@ macro_rules! entity_messages {
messages!(
Ack,
AddCollaborator,
AddProjectCollaborator,
BufferSaved,
ChannelMessageSent,
CloseBuffer,
CloseWorktree,
DiskBasedDiagnosticsUpdated,
DiskBasedDiagnosticsUpdating,
Error,
FormatBuffer,
GetChannelMessages,
GetChannelMessagesResponse,
GetChannels,
GetChannelsResponse,
UpdateContacts,
GetUsers,
GetUsersResponse,
JoinChannel,
JoinChannelResponse,
JoinWorktree,
JoinWorktreeResponse,
JoinProject,
JoinProjectResponse,
LeaveChannel,
LeaveWorktree,
LeaveProject,
OpenBuffer,
OpenBufferResponse,
OpenWorktree,
OpenWorktreeResponse,
RegisterProjectResponse,
Ping,
RemoveCollaborator,
RegisterProject,
RegisterWorktree,
RemoveProjectCollaborator,
SaveBuffer,
SendChannelMessage,
SendChannelMessageResponse,
ShareProject,
ShareWorktree,
ShareWorktreeResponse,
UnshareWorktree,
UnregisterProject,
UnregisterWorktree,
UnshareProject,
UpdateBuffer,
UpdateContacts,
UpdateDiagnosticSummary,
UpdateWorktree,
);
request_messages!(
(FormatBuffer, Ack),
(GetChannelMessages, GetChannelMessagesResponse),
(GetChannels, GetChannelsResponse),
(GetUsers, GetUsersResponse),
(JoinChannel, JoinChannelResponse),
(JoinProject, JoinProjectResponse),
(OpenBuffer, OpenBufferResponse),
(JoinWorktree, JoinWorktreeResponse),
(OpenWorktree, OpenWorktreeResponse),
(Ping, Ack),
(RegisterProject, RegisterProjectResponse),
(RegisterWorktree, Ack),
(SaveBuffer, BufferSaved),
(UpdateBuffer, Ack),
(ShareWorktree, ShareWorktreeResponse),
(UnshareWorktree, Ack),
(SendChannelMessage, SendChannelMessageResponse),
(GetChannelMessages, GetChannelMessagesResponse),
(ShareProject, Ack),
(ShareWorktree, Ack),
(UpdateBuffer, Ack),
);
entity_messages!(
worktree_id,
AddCollaborator,
project_id,
AddProjectCollaborator,
BufferSaved,
CloseBuffer,
CloseWorktree,
DiskBasedDiagnosticsUpdated,
DiskBasedDiagnosticsUpdating,
FormatBuffer,
JoinProject,
LeaveProject,
OpenBuffer,
JoinWorktree,
RemoveCollaborator,
RemoveProjectCollaborator,
SaveBuffer,
UnshareWorktree,
ShareWorktree,
UnregisterWorktree,
UnshareProject,
UpdateBuffer,
UpdateDiagnosticSummary,
UpdateWorktree,
);

View File

@@ -5,4 +5,4 @@ pub mod proto;
pub use conn::Connection;
pub use peer::*;
pub const PROTOCOL_VERSION: u32 = 3;
pub const PROTOCOL_VERSION: u32 = 4;

View File

@@ -5,6 +5,7 @@ HTTP_PORT = 8080
DATABASE_URL = "postgres://postgres@localhost/zed"
SESSION_SECRET = "6E1GS6IQNOLIBKWMEVWF1AFO4H78KNU8"
API_TOKEN = "secret"
# Available at https://github.com/organizations/zed-industries/settings/apps/zed-local-development
GITHUB_APP_ID = 115633

View File

@@ -19,7 +19,7 @@ rpc = { path = "../rpc" }
anyhow = "1.0.40"
async-std = { version = "1.8.0", features = ["attributes"] }
async-trait = "0.1.50"
async-tungstenite = "0.14"
async-tungstenite = "0.16"
base64 = "0.13"
clap = "=3.0.0-beta.2"
comrak = "0.10"
@@ -38,6 +38,7 @@ rand = "0.8"
rust-embed = { version = "6.2", features = ["include-exclude"] }
scrypt = "0.7"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
sha-1 = "0.9"
surf = "2.2.0"
tide = "0.16.0"

View File

@@ -1 +1 @@
ZED_LOAD_BALANCER_CERT_ID=6f857971-20fb-4c68-a7d6-35fef9e5ec4c
ZED_ENVIRONMENT=production

View File

@@ -1 +1 @@
ZED_LOAD_BALANCER_CERT_ID=b0d48941-4895-4d75-a966-fe5a571b1fff
ZED_ENVIRONMENT=staging

View File

@@ -11,7 +11,7 @@ metadata:
name: zed
annotations:
service.beta.kubernetes.io/do-loadbalancer-tls-ports: "443"
service.beta.kubernetes.io/do-loadbalancer-certificate-id: "${ZED_LOAD_BALANCER_CERT_ID}"
service.beta.kubernetes.io/do-loadbalancer-certificate-id: "2634d353-1ab4-437f-add2-4ffd8f315233"
spec:
type: LoadBalancer
selector:
@@ -76,6 +76,11 @@ spec:
secretKeyRef:
name: github
key: privateKey
- name: API_TOKEN
valueFrom:
secretKeyRef:
name: api
key: token
securityContext:
capabilities:
# FIXME - Switch to the more restrictive `PERFMON` capability.

View File

@@ -30,10 +30,10 @@ impl RequestExt for Request {
pub fn add_routes(app: &mut tide::Server<Arc<AppState>>) {
app.at("/admin").get(get_admin_page);
app.at("/users").post(post_user);
app.at("/users/:id").put(put_user);
app.at("/users/:id/delete").post(delete_user);
app.at("/signups/:id/delete").post(delete_signup);
app.at("/admin/users").post(post_user);
app.at("/admin/users/:id").put(put_user);
app.at("/admin/users/:id/delete").post(delete_user);
app.at("/admin/signups/:id/delete").post(delete_signup);
}
#[derive(Serialize)]
@@ -105,13 +105,13 @@ async fn put_user(mut request: Request) -> tide::Result {
async fn delete_user(request: Request) -> tide::Result {
request.require_admin().await?;
let user_id = db::UserId(request.param("id")?.parse()?);
request.db().delete_user(user_id).await?;
request.db().destroy_user(user_id).await?;
Ok(tide::Redirect::new("/admin").into())
}
async fn delete_signup(request: Request) -> tide::Result {
request.require_admin().await?;
let signup_id = db::SignupId(request.param("id")?.parse()?);
request.db().delete_signup(signup_id).await?;
request.db().destroy_signup(signup_id).await?;
Ok(tide::Redirect::new("/admin").into())
}

179
crates/server/src/api.rs Normal file
View File

@@ -0,0 +1,179 @@
use crate::{auth, db::UserId, AppState, Request, RequestExt as _};
use async_trait::async_trait;
use serde::Deserialize;
use serde_json::json;
use std::sync::Arc;
use surf::StatusCode;
pub fn add_routes(app: &mut tide::Server<Arc<AppState>>) {
app.at("/users").get(get_users);
app.at("/users").post(create_user);
app.at("/users/:id").put(update_user);
app.at("/users/:id").delete(destroy_user);
app.at("/users/:github_login").get(get_user);
app.at("/users/:github_login/access_tokens")
.post(create_access_token);
}
async fn get_user(request: Request) -> tide::Result {
request.require_token().await?;
let user = request
.db()
.get_user_by_github_login(request.param("github_login")?)
.await?
.ok_or_else(|| surf::Error::from_str(404, "user not found"))?;
Ok(tide::Response::builder(StatusCode::Ok)
.body(tide::Body::from_json(&user)?)
.build())
}
async fn get_users(request: Request) -> tide::Result {
request.require_token().await?;
let users = request.db().get_all_users().await?;
Ok(tide::Response::builder(StatusCode::Ok)
.body(tide::Body::from_json(&users)?)
.build())
}
async fn create_user(mut request: Request) -> tide::Result {
request.require_token().await?;
#[derive(Deserialize)]
struct Params {
github_login: String,
admin: bool,
}
let params = request.body_json::<Params>().await?;
let user_id = request
.db()
.create_user(&params.github_login, params.admin)
.await?;
let user = request.db().get_user_by_id(user_id).await?.ok_or_else(|| {
surf::Error::from_str(
StatusCode::InternalServerError,
"couldn't find the user we just created",
)
})?;
Ok(tide::Response::builder(StatusCode::Ok)
.body(tide::Body::from_json(&user)?)
.build())
}
async fn update_user(mut request: Request) -> tide::Result {
request.require_token().await?;
#[derive(Deserialize)]
struct Params {
admin: bool,
}
let user_id = UserId(
request
.param("id")?
.parse::<i32>()
.map_err(|error| surf::Error::from_str(StatusCode::BadRequest, error.to_string()))?,
);
let params = request.body_json::<Params>().await?;
request
.db()
.set_user_is_admin(user_id, params.admin)
.await?;
Ok(tide::Response::builder(StatusCode::Ok).build())
}
async fn destroy_user(request: Request) -> tide::Result {
request.require_token().await?;
let user_id = UserId(
request
.param("id")?
.parse::<i32>()
.map_err(|error| surf::Error::from_str(StatusCode::BadRequest, error.to_string()))?,
);
request.db().destroy_user(user_id).await?;
Ok(tide::Response::builder(StatusCode::Ok).build())
}
async fn create_access_token(request: Request) -> tide::Result {
request.require_token().await?;
let user = request
.db()
.get_user_by_github_login(request.param("github_login")?)
.await?
.ok_or_else(|| surf::Error::from_str(StatusCode::NotFound, "user not found"))?;
let access_token = auth::create_access_token(request.db(), user.id).await?;
#[derive(Deserialize)]
struct QueryParams {
public_key: String,
impersonate: Option<String>,
}
let query_params: QueryParams = request.query().map_err(|_| {
surf::Error::from_str(StatusCode::UnprocessableEntity, "invalid query params")
})?;
let encrypted_access_token =
auth::encrypt_access_token(&access_token, query_params.public_key.clone())?;
let mut user_id = user.id;
if let Some(impersonate) = query_params.impersonate {
if user.admin {
if let Some(impersonated_user) =
request.db().get_user_by_github_login(&impersonate).await?
{
user_id = impersonated_user.id;
} else {
return Ok(tide::Response::builder(StatusCode::UnprocessableEntity)
.body(format!(
"Can't impersonate non-existent user {}",
impersonate
))
.build());
}
} else {
return Ok(tide::Response::builder(StatusCode::Unauthorized)
.body(format!(
"Can't impersonate user {} because the real user isn't an admin",
impersonate
))
.build());
}
}
Ok(tide::Response::builder(StatusCode::Ok)
.body(json!({"user_id": user_id, "encrypted_access_token": encrypted_access_token}))
.build())
}
#[async_trait]
pub trait RequestExt {
async fn require_token(&self) -> tide::Result<()>;
}
#[async_trait]
impl RequestExt for Request {
async fn require_token(&self) -> tide::Result<()> {
let token = self
.header("Authorization")
.and_then(|header| header.get(0))
.and_then(|header| header.as_str().strip_prefix("token "))
.ok_or_else(|| surf::Error::from_str(403, "invalid authorization header"))?;
if token == self.state().config.api_token {
Ok(())
} else {
Err(tide::Error::from_str(403, "invalid authorization token"))
}
}
}

View File

@@ -11,6 +11,7 @@ use oauth2::{
TokenResponse as _, TokenUrl,
};
use rand::thread_rng;
use rpc::auth as zed_auth;
use scrypt::{
password_hash::{PasswordHash, PasswordHasher, PasswordVerifier, SaltString},
Scrypt,
@@ -19,7 +20,6 @@ use serde::{Deserialize, Serialize};
use std::{borrow::Cow, convert::TryFrom, sync::Arc};
use surf::{StatusCode, Url};
use tide::{log, Error, Server};
use rpc::auth as zed_auth;
static CURRENT_GITHUB_USER: &'static str = "current_github_user";
static GITHUB_AUTH_URL: &'static str = "https://github.com/login/oauth/authorize";
@@ -112,6 +112,9 @@ pub fn add_routes(app: &mut Server<Arc<AppState>>) {
app.at("/sign_in").get(get_sign_in);
app.at("/sign_out").post(post_sign_out);
app.at("/auth_callback").get(get_auth_callback);
app.at("/native_app_signin").get(get_sign_in);
app.at("/native_app_signin_succeeded")
.get(get_app_signin_success);
}
#[derive(Debug, Deserialize)]
@@ -166,6 +169,10 @@ async fn get_sign_in(mut request: Request) -> tide::Result {
Ok(tide::Redirect::new(auth_url).into())
}
async fn get_app_signin_success(_: Request) -> tide::Result {
Ok(tide::Redirect::new("/").into())
}
async fn get_auth_callback(mut request: Request) -> tide::Result {
#[derive(Debug, Deserialize)]
struct Query {
@@ -238,12 +245,10 @@ async fn get_auth_callback(mut request: Request) -> tide::Result {
}
let access_token = create_access_token(request.db(), user_id).await?;
let native_app_public_key =
zed_auth::PublicKey::try_from(app_sign_in_params.native_app_public_key.clone())
.context("failed to parse app public key")?;
let encrypted_access_token = native_app_public_key
.encrypt_string(&access_token)
.context("failed to encrypt access token with public key")?;
let encrypted_access_token = encrypt_access_token(
&access_token,
app_sign_in_params.native_app_public_key.clone(),
)?;
return Ok(tide::Redirect::new(&format!(
"http://127.0.0.1:{}?user_id={}&access_token={}",
@@ -289,6 +294,15 @@ fn hash_access_token(token: &str) -> tide::Result<String> {
.to_string())
}
pub fn encrypt_access_token(access_token: &str, public_key: String) -> tide::Result<String> {
let native_app_public_key =
zed_auth::PublicKey::try_from(public_key).context("failed to parse app public key")?;
let encrypted_access_token = native_app_public_key
.encrypt_string(&access_token)
.context("failed to encrypt access token with public key")?;
Ok(encrypted_access_token)
}
pub fn verify_access_token(token: &str, hash: &str) -> tide::Result<bool> {
let hash = PasswordHash::new(hash)?;
Ok(Scrypt.verify_password(token.as_bytes(), &hash).is_ok())

View File

@@ -1,23 +1,13 @@
use db::{Db, UserId};
use rand::prelude::*;
use tide::log;
use time::{Duration, OffsetDateTime};
#[allow(unused)]
#[path = "../db.rs"]
mod db;
#[path = "../env.rs"]
mod env;
#[async_std::main]
async fn main() {
if let Err(error) = env::load_dotenv() {
log::error!(
"error loading .env.toml (this is expected in production): {}",
error
);
}
let mut rng = StdRng::from_entropy();
let database_url = std::env::var("DATABASE_URL").expect("missing DATABASE_URL env var");
let db = Db::new(&database_url, 5)

View File

@@ -0,0 +1,15 @@
use crate::{AppState, Request, RequestExt};
use std::sync::Arc;
use tide::http::mime;
pub fn add_routes(app: &mut tide::Server<Arc<AppState>>) {
app.at("/careers").get(get_careers);
}
async fn get_careers(mut request: Request) -> tide::Result {
let data = request.layout_data().await?;
Ok(tide::Response::builder(200)
.body(request.state().render_template("careers.hbs", &data)?)
.content_type(mime::HTML)
.build())
}

View File

@@ -84,7 +84,7 @@ impl Db {
})
}
pub async fn delete_signup(&self, id: SignupId) -> Result<()> {
pub async fn destroy_signup(&self, id: SignupId) -> Result<()> {
test_support!(self, {
let query = "DELETE FROM signups WHERE id = $1";
sqlx::query(query)
@@ -121,6 +121,11 @@ impl Db {
})
}
pub async fn get_user_by_id(&self, id: UserId) -> Result<Option<User>> {
let users = self.get_users_by_ids([id]).await?;
Ok(users.into_iter().next())
}
pub async fn get_users_by_ids(
&self,
ids: impl IntoIterator<Item = UserId>,
@@ -159,8 +164,14 @@ impl Db {
})
}
pub async fn delete_user(&self, id: UserId) -> Result<()> {
pub async fn destroy_user(&self, id: UserId) -> Result<()> {
test_support!(self, {
let query = "DELETE FROM access_tokens WHERE user_id = $1;";
sqlx::query(query)
.bind(id.0)
.execute(&self.pool)
.await
.map(drop)?;
let query = "DELETE FROM users WHERE id = $1;";
sqlx::query(query)
.bind(id.0)
@@ -443,7 +454,9 @@ impl Db {
macro_rules! id_type {
($name:ident) => {
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, sqlx::Type, Serialize)]
#[derive(
Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, sqlx::Type, Serialize,
)]
#[sqlx(transparent)]
#[serde(transparent)]
pub struct $name(pub i32);

View File

@@ -1,4 +1,5 @@
mod admin;
mod api;
mod assets;
mod auth;
mod community;
@@ -11,6 +12,7 @@ mod home;
mod releases;
mod rpc;
mod team;
mod careers;
use self::errors::TideResultExt as _;
use ::rpc::Peer;
@@ -43,6 +45,7 @@ pub struct Config {
pub github_client_id: String,
pub github_client_secret: String,
pub github_private_key: String,
pub api_token: String,
}
pub struct AppState {
@@ -173,8 +176,10 @@ pub async fn run_server(
.with_same_site_policy(SameSite::Lax), // Required obtain our session in /auth_callback
);
web.with(errors::Middleware);
api::add_routes(&mut web);
home::add_routes(&mut web);
team::add_routes(&mut web);
careers::add_routes(&mut web);
releases::add_routes(&mut web);
community::add_routes(&mut web);
admin::add_routes(&mut web);

View File

@@ -2,16 +2,15 @@ use crate::{
auth::RequestExt as _, github::Release, AppState, LayoutData, Request, RequestExt as _,
};
use comrak::ComrakOptions;
use serde::{Serialize};
use serde::Serialize;
use std::sync::Arc;
use tide::{http::mime};
use tide::http::mime;
pub fn add_routes(releases: &mut tide::Server<Arc<AppState>>) {
releases.at("/releases").get(get_releases);
}
async fn get_releases(mut request: Request) -> tide::Result {
#[derive(Serialize)]
struct ReleasesData {
#[serde(flatten)]
@@ -52,4 +51,4 @@ async fn get_releases(mut request: Request) -> tide::Result {
.body(request.state().render_template("releases.hbs", &data)?)
.content_type(mime::HTML)
.build())
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,37 +1,47 @@
use crate::db::{ChannelId, UserId};
use anyhow::anyhow;
use collections::{HashMap, HashSet};
use collections::{BTreeMap, HashMap, HashSet};
use rpc::{proto, ConnectionId};
use std::collections::hash_map;
use std::{collections::hash_map, path::PathBuf};
#[derive(Default)]
pub struct Store {
connections: HashMap<ConnectionId, ConnectionState>,
connections_by_user_id: HashMap<UserId, HashSet<ConnectionId>>,
worktrees: HashMap<u64, Worktree>,
visible_worktrees_by_user_id: HashMap<UserId, HashSet<u64>>,
projects: HashMap<u64, Project>,
visible_projects_by_user_id: HashMap<UserId, HashSet<u64>>,
channels: HashMap<ChannelId, Channel>,
next_worktree_id: u64,
next_project_id: u64,
}
struct ConnectionState {
user_id: UserId,
worktrees: HashSet<u64>,
projects: HashSet<u64>,
channels: HashSet<ChannelId>,
}
pub struct Worktree {
pub struct Project {
pub host_connection_id: ConnectionId,
pub host_user_id: UserId,
pub share: Option<ProjectShare>,
pub worktrees: HashMap<u64, Worktree>,
}
pub struct Worktree {
pub authorized_user_ids: Vec<UserId>,
pub root_name: String,
pub share: Option<WorktreeShare>,
}
pub struct WorktreeShare {
#[derive(Default)]
pub struct ProjectShare {
pub guests: HashMap<ConnectionId, (ReplicaId, UserId)>,
pub active_replica_ids: HashSet<ReplicaId>,
}
pub struct WorktreeShare {
pub entries: HashMap<u64, proto::Entry>,
pub diagnostic_summaries: BTreeMap<PathBuf, proto::DiagnosticSummary>,
}
#[derive(Default)]
@@ -43,14 +53,14 @@ pub type ReplicaId = u16;
#[derive(Default)]
pub struct RemovedConnectionState {
pub hosted_worktrees: HashMap<u64, Worktree>,
pub guest_worktree_ids: HashMap<u64, Vec<ConnectionId>>,
pub hosted_projects: HashMap<u64, Project>,
pub guest_project_ids: HashMap<u64, Vec<ConnectionId>>,
pub contact_ids: HashSet<UserId>,
}
pub struct JoinedWorktree<'a> {
pub struct JoinedProject<'a> {
pub replica_id: ReplicaId,
pub worktree: &'a Worktree,
pub project: &'a Project,
}
pub struct UnsharedWorktree {
@@ -58,7 +68,7 @@ pub struct UnsharedWorktree {
pub authorized_user_ids: Vec<UserId>,
}
pub struct LeftWorktree {
pub struct LeftProject {
pub connection_ids: Vec<ConnectionId>,
pub authorized_user_ids: Vec<UserId>,
}
@@ -69,7 +79,7 @@ impl Store {
connection_id,
ConnectionState {
user_id,
worktrees: Default::default(),
projects: Default::default(),
channels: Default::default(),
},
);
@@ -105,17 +115,15 @@ impl Store {
}
let mut result = RemovedConnectionState::default();
for worktree_id in connection.worktrees.clone() {
if let Ok(worktree) = self.remove_worktree(worktree_id, connection_id) {
for project_id in connection.projects.clone() {
if let Some(project) = self.unregister_project(project_id, connection_id) {
result.contact_ids.extend(project.authorized_user_ids());
result.hosted_projects.insert(project_id, project);
} else if let Some(project) = self.leave_project(connection_id, project_id) {
result
.contact_ids
.extend(worktree.authorized_user_ids.iter().copied());
result.hosted_worktrees.insert(worktree_id, worktree);
} else if let Some(worktree) = self.leave_worktree(connection_id, worktree_id) {
result
.guest_worktree_ids
.insert(worktree_id, worktree.connection_ids);
result.contact_ids.extend(worktree.authorized_user_ids);
.guest_project_ids
.insert(project_id, project.connection_ids);
result.contact_ids.extend(project.authorized_user_ids);
}
}
@@ -174,15 +182,15 @@ impl Store {
pub fn contacts_for_user(&self, user_id: UserId) -> Vec<proto::Contact> {
let mut contacts = HashMap::default();
for worktree_id in self
.visible_worktrees_by_user_id
for project_id in self
.visible_projects_by_user_id
.get(&user_id)
.unwrap_or(&HashSet::default())
{
let worktree = &self.worktrees[worktree_id];
let project = &self.projects[project_id];
let mut guests = HashSet::default();
if let Ok(share) = worktree.share() {
if let Ok(share) = project.share() {
for guest_connection_id in share.guests.keys() {
if let Ok(user_id) = self.user_id_for_connection(*guest_connection_id) {
guests.insert(user_id.to_proto());
@@ -190,18 +198,24 @@ impl Store {
}
}
if let Ok(host_user_id) = self.user_id_for_connection(worktree.host_connection_id) {
if let Ok(host_user_id) = self.user_id_for_connection(project.host_connection_id) {
let mut worktree_root_names = project
.worktrees
.values()
.map(|worktree| worktree.root_name.clone())
.collect::<Vec<_>>();
worktree_root_names.sort_unstable();
contacts
.entry(host_user_id)
.or_insert_with(|| proto::Contact {
user_id: host_user_id.to_proto(),
worktrees: Vec::new(),
projects: Vec::new(),
})
.worktrees
.push(proto::WorktreeMetadata {
id: *worktree_id,
root_name: worktree.root_name.clone(),
is_shared: worktree.share.is_some(),
.projects
.push(proto::ProjectMetadata {
id: *project_id,
worktree_root_names,
is_shared: project.share.is_some(),
guests: guests.into_iter().collect(),
});
}
@@ -210,107 +224,147 @@ impl Store {
contacts.into_values().collect()
}
pub fn add_worktree(&mut self, worktree: Worktree) -> u64 {
let worktree_id = self.next_worktree_id;
for authorized_user_id in &worktree.authorized_user_ids {
self.visible_worktrees_by_user_id
.entry(*authorized_user_id)
.or_default()
.insert(worktree_id);
}
self.next_worktree_id += 1;
if let Some(connection) = self.connections.get_mut(&worktree.host_connection_id) {
connection.worktrees.insert(worktree_id);
}
self.worktrees.insert(worktree_id, worktree);
#[cfg(test)]
self.check_invariants();
worktree_id
pub fn register_project(
&mut self,
host_connection_id: ConnectionId,
host_user_id: UserId,
) -> u64 {
let project_id = self.next_project_id;
self.projects.insert(
project_id,
Project {
host_connection_id,
host_user_id,
share: None,
worktrees: Default::default(),
},
);
self.next_project_id += 1;
project_id
}
pub fn remove_worktree(
pub fn register_worktree(
&mut self,
project_id: u64,
worktree_id: u64,
worktree: Worktree,
) -> bool {
if let Some(project) = self.projects.get_mut(&project_id) {
for authorized_user_id in &worktree.authorized_user_ids {
self.visible_projects_by_user_id
.entry(*authorized_user_id)
.or_default()
.insert(project_id);
}
if let Some(connection) = self.connections.get_mut(&project.host_connection_id) {
connection.projects.insert(project_id);
}
project.worktrees.insert(worktree_id, worktree);
#[cfg(test)]
self.check_invariants();
true
} else {
false
}
}
pub fn unregister_project(
&mut self,
project_id: u64,
connection_id: ConnectionId,
) -> Option<Project> {
match self.projects.entry(project_id) {
hash_map::Entry::Occupied(e) => {
if e.get().host_connection_id == connection_id {
for user_id in e.get().authorized_user_ids() {
if let hash_map::Entry::Occupied(mut projects) =
self.visible_projects_by_user_id.entry(user_id)
{
projects.get_mut().remove(&project_id);
}
}
Some(e.remove())
} else {
None
}
}
hash_map::Entry::Vacant(_) => None,
}
}
pub fn unregister_worktree(
&mut self,
project_id: u64,
worktree_id: u64,
acting_connection_id: ConnectionId,
) -> tide::Result<Worktree> {
let worktree = if let hash_map::Entry::Occupied(e) = self.worktrees.entry(worktree_id) {
if e.get().host_connection_id != acting_connection_id {
Err(anyhow!("not your worktree"))?;
}
e.remove()
} else {
return Err(anyhow!("no such worktree"))?;
};
if let Some(connection) = self.connections.get_mut(&worktree.host_connection_id) {
connection.worktrees.remove(&worktree_id);
) -> tide::Result<(Worktree, Vec<ConnectionId>)> {
let project = self
.projects
.get_mut(&project_id)
.ok_or_else(|| anyhow!("no such project"))?;
if project.host_connection_id != acting_connection_id {
Err(anyhow!("not your worktree"))?;
}
if let Some(share) = &worktree.share {
for connection_id in share.guests.keys() {
if let Some(connection) = self.connections.get_mut(connection_id) {
connection.worktrees.remove(&worktree_id);
let worktree = project
.worktrees
.remove(&worktree_id)
.ok_or_else(|| anyhow!("no such worktree"))?;
let mut guest_connection_ids = Vec::new();
if let Some(share) = &project.share {
guest_connection_ids.extend(share.guests.keys());
}
for authorized_user_id in &worktree.authorized_user_ids {
if let Some(visible_projects) =
self.visible_projects_by_user_id.get_mut(authorized_user_id)
{
if !project.has_authorized_user_id(*authorized_user_id) {
visible_projects.remove(&project_id);
}
}
}
for authorized_user_id in &worktree.authorized_user_ids {
if let Some(visible_worktrees) = self
.visible_worktrees_by_user_id
.get_mut(&authorized_user_id)
{
visible_worktrees.remove(&worktree_id);
}
}
#[cfg(test)]
self.check_invariants();
Ok(worktree)
Ok((worktree, guest_connection_ids))
}
pub fn share_worktree(
&mut self,
worktree_id: u64,
connection_id: ConnectionId,
entries: HashMap<u64, proto::Entry>,
) -> Option<Vec<UserId>> {
if let Some(worktree) = self.worktrees.get_mut(&worktree_id) {
if worktree.host_connection_id == connection_id {
worktree.share = Some(WorktreeShare {
guests: Default::default(),
active_replica_ids: Default::default(),
entries,
});
return Some(worktree.authorized_user_ids.clone());
pub fn share_project(&mut self, project_id: u64, connection_id: ConnectionId) -> bool {
if let Some(project) = self.projects.get_mut(&project_id) {
if project.host_connection_id == connection_id {
project.share = Some(ProjectShare::default());
return true;
}
}
None
false
}
pub fn unshare_worktree(
pub fn unshare_project(
&mut self,
worktree_id: u64,
project_id: u64,
acting_connection_id: ConnectionId,
) -> tide::Result<UnsharedWorktree> {
let worktree = if let Some(worktree) = self.worktrees.get_mut(&worktree_id) {
worktree
let project = if let Some(project) = self.projects.get_mut(&project_id) {
project
} else {
return Err(anyhow!("no such worktree"))?;
return Err(anyhow!("no such project"))?;
};
if worktree.host_connection_id != acting_connection_id {
return Err(anyhow!("not your worktree"))?;
if project.host_connection_id != acting_connection_id {
return Err(anyhow!("not your project"))?;
}
let connection_ids = worktree.connection_ids();
let authorized_user_ids = worktree.authorized_user_ids.clone();
if let Some(share) = worktree.share.take() {
let connection_ids = project.connection_ids();
let authorized_user_ids = project.authorized_user_ids();
if let Some(share) = project.share.take() {
for connection_id in share.guests.into_keys() {
if let Some(connection) = self.connections.get_mut(&connection_id) {
connection.worktrees.remove(&worktree_id);
connection.projects.remove(&project_id);
}
}
@@ -322,34 +376,76 @@ impl Store {
authorized_user_ids,
})
} else {
Err(anyhow!("worktree is not shared"))?
Err(anyhow!("project is not shared"))?
}
}
pub fn join_worktree(
pub fn share_worktree(
&mut self,
project_id: u64,
worktree_id: u64,
connection_id: ConnectionId,
entries: HashMap<u64, proto::Entry>,
diagnostic_summaries: BTreeMap<PathBuf, proto::DiagnosticSummary>,
) -> Option<Vec<UserId>> {
let project = self.projects.get_mut(&project_id)?;
let worktree = project.worktrees.get_mut(&worktree_id)?;
if project.host_connection_id == connection_id && project.share.is_some() {
worktree.share = Some(WorktreeShare {
entries,
diagnostic_summaries,
});
Some(project.authorized_user_ids())
} else {
None
}
}
pub fn update_diagnostic_summary(
&mut self,
project_id: u64,
worktree_id: u64,
connection_id: ConnectionId,
summary: proto::DiagnosticSummary,
) -> Option<Vec<ConnectionId>> {
let project = self.projects.get_mut(&project_id)?;
let worktree = project.worktrees.get_mut(&worktree_id)?;
if project.host_connection_id == connection_id {
if let Some(share) = worktree.share.as_mut() {
share
.diagnostic_summaries
.insert(summary.path.clone().into(), summary);
return Some(project.connection_ids());
}
}
None
}
pub fn join_project(
&mut self,
connection_id: ConnectionId,
user_id: UserId,
worktree_id: u64,
) -> tide::Result<JoinedWorktree> {
project_id: u64,
) -> tide::Result<JoinedProject> {
let connection = self
.connections
.get_mut(&connection_id)
.ok_or_else(|| anyhow!("no such connection"))?;
let worktree = self
.worktrees
.get_mut(&worktree_id)
.and_then(|worktree| {
if worktree.authorized_user_ids.contains(&user_id) {
Some(worktree)
let project = self
.projects
.get_mut(&project_id)
.and_then(|project| {
if project.has_authorized_user_id(user_id) {
Some(project)
} else {
None
}
})
.ok_or_else(|| anyhow!("no such worktree"))?;
.ok_or_else(|| anyhow!("no such project"))?;
let share = worktree.share_mut()?;
connection.worktrees.insert(worktree_id);
let share = project.share_mut()?;
connection.projects.insert(project_id);
let mut replica_id = 1;
while share.active_replica_ids.contains(&replica_id) {
@@ -361,33 +457,33 @@ impl Store {
#[cfg(test)]
self.check_invariants();
Ok(JoinedWorktree {
Ok(JoinedProject {
replica_id,
worktree: &self.worktrees[&worktree_id],
project: &self.projects[&project_id],
})
}
pub fn leave_worktree(
pub fn leave_project(
&mut self,
connection_id: ConnectionId,
worktree_id: u64,
) -> Option<LeftWorktree> {
let worktree = self.worktrees.get_mut(&worktree_id)?;
let share = worktree.share.as_mut()?;
project_id: u64,
) -> Option<LeftProject> {
let project = self.projects.get_mut(&project_id)?;
let share = project.share.as_mut()?;
let (replica_id, _) = share.guests.remove(&connection_id)?;
share.active_replica_ids.remove(&replica_id);
if let Some(connection) = self.connections.get_mut(&connection_id) {
connection.worktrees.remove(&worktree_id);
connection.projects.remove(&project_id);
}
let connection_ids = worktree.connection_ids();
let authorized_user_ids = worktree.authorized_user_ids.clone();
let connection_ids = project.connection_ids();
let authorized_user_ids = project.authorized_user_ids();
#[cfg(test)]
self.check_invariants();
Some(LeftWorktree {
Some(LeftProject {
connection_ids,
authorized_user_ids,
})
@@ -396,115 +492,80 @@ impl Store {
pub fn update_worktree(
&mut self,
connection_id: ConnectionId,
project_id: u64,
worktree_id: u64,
removed_entries: &[u64],
updated_entries: &[proto::Entry],
) -> tide::Result<Vec<ConnectionId>> {
let worktree = self.write_worktree(worktree_id, connection_id)?;
let share = worktree.share_mut()?;
) -> Option<Vec<ConnectionId>> {
let project = self.write_project(project_id, connection_id)?;
let share = project.worktrees.get_mut(&worktree_id)?.share.as_mut()?;
for entry_id in removed_entries {
share.entries.remove(&entry_id);
}
for entry in updated_entries {
share.entries.insert(entry.id, entry.clone());
}
Ok(worktree.connection_ids())
Some(project.connection_ids())
}
pub fn worktree_host_connection_id(
pub fn project_connection_ids(
&self,
connection_id: ConnectionId,
worktree_id: u64,
) -> tide::Result<ConnectionId> {
Ok(self
.read_worktree(worktree_id, connection_id)?
.host_connection_id)
}
pub fn worktree_guest_connection_ids(
&self,
connection_id: ConnectionId,
worktree_id: u64,
) -> tide::Result<Vec<ConnectionId>> {
Ok(self
.read_worktree(worktree_id, connection_id)?
.share()?
.guests
.keys()
.copied()
.collect())
}
pub fn worktree_connection_ids(
&self,
connection_id: ConnectionId,
worktree_id: u64,
) -> tide::Result<Vec<ConnectionId>> {
Ok(self
.read_worktree(worktree_id, connection_id)?
.connection_ids())
project_id: u64,
acting_connection_id: ConnectionId,
) -> Option<Vec<ConnectionId>> {
Some(
self.read_project(project_id, acting_connection_id)?
.connection_ids(),
)
}
pub fn channel_connection_ids(&self, channel_id: ChannelId) -> Option<Vec<ConnectionId>> {
Some(self.channels.get(&channel_id)?.connection_ids())
}
fn read_worktree(
&self,
worktree_id: u64,
connection_id: ConnectionId,
) -> tide::Result<&Worktree> {
let worktree = self
.worktrees
.get(&worktree_id)
.ok_or_else(|| anyhow!("worktree not found"))?;
#[cfg(test)]
pub fn project(&self, project_id: u64) -> Option<&Project> {
self.projects.get(&project_id)
}
if worktree.host_connection_id == connection_id
|| worktree.share()?.guests.contains_key(&connection_id)
pub fn read_project(&self, project_id: u64, connection_id: ConnectionId) -> Option<&Project> {
let project = self.projects.get(&project_id)?;
if project.host_connection_id == connection_id
|| project.share.as_ref()?.guests.contains_key(&connection_id)
{
Ok(worktree)
Some(project)
} else {
Err(anyhow!(
"{} is not a member of worktree {}",
connection_id,
worktree_id
))?
None
}
}
fn write_worktree(
fn write_project(
&mut self,
worktree_id: u64,
project_id: u64,
connection_id: ConnectionId,
) -> tide::Result<&mut Worktree> {
let worktree = self
.worktrees
.get_mut(&worktree_id)
.ok_or_else(|| anyhow!("worktree not found"))?;
if worktree.host_connection_id == connection_id
|| worktree
.share
.as_ref()
.map_or(false, |share| share.guests.contains_key(&connection_id))
) -> Option<&mut Project> {
let project = self.projects.get_mut(&project_id)?;
if project.host_connection_id == connection_id
|| project.share.as_ref()?.guests.contains_key(&connection_id)
{
Ok(worktree)
Some(project)
} else {
Err(anyhow!(
"{} is not a member of worktree {}",
connection_id,
worktree_id
))?
None
}
}
#[cfg(test)]
fn check_invariants(&self) {
for (connection_id, connection) in &self.connections {
for worktree_id in &connection.worktrees {
let worktree = &self.worktrees.get(&worktree_id).unwrap();
if worktree.host_connection_id != *connection_id {
assert!(worktree.share().unwrap().guests.contains_key(connection_id));
for project_id in &connection.projects {
let project = &self.projects.get(&project_id).unwrap();
if project.host_connection_id != *connection_id {
assert!(project
.share
.as_ref()
.unwrap()
.guests
.contains_key(connection_id));
}
}
for channel_id in &connection.channels {
@@ -527,22 +588,22 @@ impl Store {
}
}
for (worktree_id, worktree) in &self.worktrees {
let host_connection = self.connections.get(&worktree.host_connection_id).unwrap();
assert!(host_connection.worktrees.contains(worktree_id));
for (project_id, project) in &self.projects {
let host_connection = self.connections.get(&project.host_connection_id).unwrap();
assert!(host_connection.projects.contains(project_id));
for authorized_user_ids in &worktree.authorized_user_ids {
let visible_worktree_ids = self
.visible_worktrees_by_user_id
.get(authorized_user_ids)
for authorized_user_ids in project.authorized_user_ids() {
let visible_project_ids = self
.visible_projects_by_user_id
.get(&authorized_user_ids)
.unwrap();
assert!(visible_worktree_ids.contains(worktree_id));
assert!(visible_project_ids.contains(project_id));
}
if let Some(share) = &worktree.share {
if let Some(share) = &project.share {
for guest_connection_id in share.guests.keys() {
let guest_connection = self.connections.get(guest_connection_id).unwrap();
assert!(guest_connection.worktrees.contains(worktree_id));
assert!(guest_connection.projects.contains(project_id));
}
assert_eq!(share.active_replica_ids.len(), share.guests.len(),);
assert_eq!(
@@ -556,10 +617,10 @@ impl Store {
}
}
for (user_id, visible_worktree_ids) in &self.visible_worktrees_by_user_id {
for worktree_id in visible_worktree_ids {
let worktree = self.worktrees.get(worktree_id).unwrap();
assert!(worktree.authorized_user_ids.contains(user_id));
for (user_id, visible_project_ids) in &self.visible_projects_by_user_id {
for project_id in visible_project_ids {
let project = self.projects.get(project_id).unwrap();
assert!(project.authorized_user_ids().contains(user_id));
}
}
@@ -572,7 +633,33 @@ impl Store {
}
}
impl Worktree {
impl Project {
pub fn has_authorized_user_id(&self, user_id: UserId) -> bool {
self.worktrees
.values()
.any(|worktree| worktree.authorized_user_ids.contains(&user_id))
}
pub fn authorized_user_ids(&self) -> Vec<UserId> {
let mut ids = self
.worktrees
.values()
.flat_map(|worktree| worktree.authorized_user_ids.iter())
.copied()
.collect::<Vec<_>>();
ids.sort_unstable();
ids.dedup();
ids
}
pub fn guest_connection_ids(&self) -> Vec<ConnectionId> {
if let Some(share) = &self.share {
share.guests.keys().copied().collect()
} else {
Vec::new()
}
}
pub fn connection_ids(&self) -> Vec<ConnectionId> {
if let Some(share) = &self.share {
share
@@ -586,14 +673,14 @@ impl Worktree {
}
}
pub fn share(&self) -> tide::Result<&WorktreeShare> {
pub fn share(&self) -> tide::Result<&ProjectShare> {
Ok(self
.share
.as_ref()
.ok_or_else(|| anyhow!("worktree is not shared"))?)
}
fn share_mut(&mut self) -> tide::Result<&mut WorktreeShare> {
fn share_mut(&mut self) -> tide::Result<&mut ProjectShare> {
Ok(self
.share
.as_mut()

View File

@@ -24,13 +24,15 @@
<div class="flex flex-row items-center justify-between p-5 border-b border-white pl-7 pr-7 admin-nav">
<h1 class="font-display font-extralight">Admin</h1>
<ul class="flex flex-row">
<li><a href="#userlist" class="mr-4 leading-relaxed no-underline lowercase text-main hover:underline">Users</a></li>
<li><a href="#signuplist" class="leading-relaxed no-underline lowercase text-main hover:underline">Signups</a></li>
<li><a href="#userlist"
class="mr-4 leading-relaxed no-underline lowercase text-main hover:underline">Users</a></li>
<li><a href="#signuplist"
class="leading-relaxed no-underline lowercase text-main hover:underline">Signups</a></li>
</ul>
</div>
<h2 id="userlist" class="pt-10 mb-5 text-white pl-7 pr-7 font-display font-extralight">Users</h2>
<div class="flex flex-col w-full pb-5 font-mono text-xs" id="users">
<div class="flex flex-row pl-5 pr-10 font-bold">
<p class="w-1/3 p-2">Github Username</p>
@@ -38,16 +40,17 @@
<p class="w-24 p-2"> </p>
</div>
<div class="flex flex-col pl-5 pr-10 text-gray-100">
<form action="/users" method="post" class="m-0">
<form action="/admin/users" method="post" class="m-0">
<div class="flex flex-row items-center">
<p class="w-1/3 p-2"><input class="block w-full p-2 text-xs bg-transparent border border-white" type="text" name="github_login" required minlength="4" placeholder="@github_handle"></p>
<p class="w-1/3 p-2"><input class="block w-full p-2 text-xs bg-transparent border border-white"
type="text" name="github_login" required minlength="4" placeholder="@github_handle"></p>
<p class="w-32 p-2 text-center"><input type="checkbox" id="admin" name="admin" value="true"></p>
<p class="w-24 p-2"><button class="underline hover:no-underline">Add</button></p>
</div>
</form>
</div>
</div>
<div class="flex flex-col w-full pb-10 font-mono text-xs border-b border-white">
<div class="flex flex-row pl-5 pr-10 font-bold">
<p class="w-1/3 p-2">Github Username</p>
@@ -56,10 +59,11 @@
</div>
{{#each users}}
<div class="flex flex-col pl-5 pr-10 text-gray-100 alternate-bg">
<form action="/users/{{id}}/delete" method="post" class="m-0">
<form action="/admin/users/{{id}}/delete" method="post" class="m-0">
<div class="flex flex-row items-center">
<p class="w-1/3 p-2">{{github_login}}</p>
<p class="w-32 p-2 text-center"><input action="/users/{{id}}" type="checkbox" {{#if admin}}checked{{/if}}></p>
<p class="w-32 p-2 text-center"><input action="/admin/users/{{id}}" type="checkbox" {{#if
admin}}checked{{/if}}></p>
<p class="w-24 p-2"><button class="underline hover:no-underline">Remove</button></p>
</div>
</form>
@@ -70,7 +74,7 @@
<h2 class="pt-10 mb-5 text-white pl-7 pr-7 font-display font-extralight">Signups</h2>
<div class="flex flex-col w-full pb-10 font-mono text-xs border-b border-white">
<div class="flex flex-row justify-between pl-5 pr-10 font-bold">
<p class="w-1/5 p-2">Email</p>
<p class="w-1/5 p-2">Github</p>
@@ -81,17 +85,19 @@
</div>
{{#each signups}}
<div class="flex flex-col pb-1 pl-5 pr-10 text-gray-100 alternate-bg">
<form action="/signups/{{id}}/delete" method="post" class="m-0">
<form action="/admin/signups/{{id}}/delete" method="post" class="m-0">
<div class="flex flex-row items-center justify-between">
<p class="w-1/5 p-2 pb-1">{{email_address}}</p>
<p class="w-1/5 p-2 pb-1">{{github_login}}</p>
<p class="w-24 p-2 pb-1 text-center">{{#if wants_releases}}[✓]{{else}}[ ]{{/if}}</p>
<p class="w-24 p-2 pb-1 text-center">{{#if wants_updates}}[✓]{{else}}[ ]{{/if}}</p>
<p class="w-24 p-2 pb-1 text-center">{{#if wants_community}}[✓]{{else}}[ ]{{/if}}</p>
<p class="w-24 p-2 pb-1 text-right"><button class="text-lg text-gray-500 hover:text-white">×</button></p>
<p class="w-24 p-2 pb-1 text-right"><button
class="text-lg text-gray-500 hover:text-white">×</button></p>
</div>
</form>
<p class="max-w-full p-2 pt-0 overflow-hidden leading-normal text-gray-400 max-h-5 whitespace-nowrap overflow-ellipsis" title="{{about}}">{{about}}</p>
<p class="max-w-full p-2 pt-0 overflow-hidden leading-normal text-gray-400 max-h-5 whitespace-nowrap overflow-ellipsis"
title="{{about}}">{{about}}</p>
</div>
{{/each}}
</div>

View File

@@ -0,0 +1,20 @@
{{#> layout }}
<div class="max-w-screen-lg p-5 mx-auto font-extralight text-main lg:p-20">
<h1 class="mb-10 text-4xl text-white font-display font-extralight">
We think there's a better way to write code&mdash;and we've been working for more than a decade to bring it into existence.
</h1>
<p class="mt-5 leading-relaxed">Zed is looking for a Rust engineer to help us advance the art of code editing. We're currently three engineers and a designer developing a new editor with a focus on performance, clean design, and collaboration.</p>
<p class="mt-5 leading-relaxed">We're working on exciting technologies, including a custom, GPU-accelerated UI framework, heavy use of parallelism and persistent data structures, conflict-free replicated data types, and an incremental parsing framework called Tree-sitter that was created by one of our founders.</p>
<p class="mt-5 leading-relaxed">Our codebase consists of about 60,000 lines of well-factored, thoroughly-tested Rust that compiles quickly. In addition to the Zed editor, we're also developing a server-side component to facilitate collaboration that tightly integrates with the client.</p>
<p class="mt-10 leading-relaxed">We'd love to hear from you if you're excited to build the world's best code editor and meet this description:</p>
<ul class="mt-10 mb-16 leading-relaxed">
<li class="mt-5 leading-relaxed">You're experienced in Rust. You've developed a substantial piece of software or meaningfully contributed to an open source project.</li>
<li class="mt-5 leading-relaxed">You'd be excited to to pair program with us multiple times a week to learn the codebase.</li>
<li class="mt-5 leading-relaxed">You know how to maintain code quality while being pragmatic and prioritizing the needs of the customer.</li>
<li class="mt-5 leading-relaxed">You know how to write performant software, including algorithmic analysis and profile-driven optimization.</li>
<li class="mt-5 leading-relaxed">Bonus: You have experience working on code editors, compilers, distributed systems, CRDTs, or real-time graphics.</li>
</ul>
</div>
{{/layout}}

View File

@@ -4,7 +4,7 @@
<article class="">
<h1 class="mb-10 text-4xl leading-tight text-white font-display font-extralight">Introducing Zed&mdash;A lightning-fast, collaborative code editor written in Rust.</h1>
<p class="mt-5 leading-relaxed">
We think theres a better way to write code, and it starts with the following assumptions:
We think there's a better way to write code, and it starts with the following assumptions:
</p>
<h3 class="mt-10 leading-tight text-white font-display font-extralight">Mission-critical tools should be hyper-responsive.</h3>
@@ -20,11 +20,11 @@
<h3 class="mt-10 leading-tight text-white font-display font-extralight">Real-time collaboration produces better software.</h3>
<p class="mt-3 leading-relaxed">
Pull requests have their place, but sometimes you just want to code together. Its more fun than code review, builds trust between colleagues, and its one of the best ways to distribute knowledge through a team. But it has to be easy.
Pull requests have their place, but sometimes you just want to code together. It's more fun than code review, builds trust between colleagues, and it's one of the best ways to distribute knowledge through a team. But it has to be easy.
</p>
<p class="mt-5 leading-relaxed">
If you want someones perspective on a piece of code, it should be as easy as sending a message to pull them into your working copy and start coding.
If you want someone's perspective on a piece of code, it should be as easy as sending a message to pull them into your working copy and start coding.
</p>
<h3 class="mt-10 leading-tight text-white font-display font-extralight">Conversations about software should happen close to the code.</h3>
@@ -40,7 +40,7 @@
<h3 class="mt-10 leading-tight text-white font-display font-extralight">Our goal is to make you as efficient as possible.</h3>
<p class="mt-3 leading-relaxed">
If youre living in a tool for hours every day, you want it to disappear. Every pixel must carry its weight, and the software youre creating should always be the focus. So we constantly ask ourselves how we can maximize signal and minimize noise.
If you're living in a tool for hours every day, you want it to disappear. Every pixel must carry its weight, and the software you're creating should always be the focus. So we constantly ask ourselves how we can maximize signal and minimize noise.
</p>
<p class="mt-5 leading-relaxed">
@@ -49,15 +49,21 @@
<p class="mt-10 mb-10 leading-relaxed">&mdash;&mdash;&mdash;</p>
<p class="mt-5 leading-relaxed">
We're looking to add another engineer to the team. Want to help us build the future? <a href="/careers">Join us</a>.
</p>
<p class="mt-10 mb-10 leading-relaxed">&mdash;&mdash;&mdash;</p>
<h2 class="mt-16 mb-5 leading-tight text-white font-display font-extralight">Under the hood</h2>
<article class="leading-relaxed">
<h3 class="mt-10 leading-tight text-white font-display font-extralight">Building in Rust</h3>
<p class="mt-3">Rust offers expressivity and developer productivity rivaling languages that depend on a VM, while simultaneously offering the control required to fully utilize the underlying hardware.</p>
<p class="mt-5">Rusts unique ownership model is a challenge at the beginning, but once you internalize it, you can write extremely efficient multi-core code without fear of invoking undefined behavior.</p>
<p class="mt-5">Rust's unique ownership model is a challenge at the beginning, but once you internalize it, you can write extremely efficient multi-core code without fear of invoking undefined behavior.</p>
<p class="mt-5">It also makes it straightforward to call directly into the C-based APIs of the operating system. Rust makes it possible for a small team to build a complex product quickly, and Zed wouldnt have been possible without it.</p>
<p class="mt-5">It also makes it straightforward to call directly into the C-based APIs of the operating system. Rust makes it possible for a small team to build a complex product quickly, and Zed wouldn't have been possible without it.</p>
<p class="mt-5">In the past, to write software with this performant you would need to use C++. Rust, for the first time, enables us to write software at that level as a very small team.</p>
</article>
@@ -68,11 +74,11 @@
<p class="mt-5"></p>We call it GPUI.</p>
<p class="mt-5">We took a lot of inspiration from Mozillas Webrender project. The key insight was that modern graphics hardware can render complex 3D graphics at high frame rates, so why not use it to render relatively simple 2D user interfaces with an immediate mode architecture? </p>
<p class="mt-5">We took a lot of inspiration from Mozilla's Webrender project. The key insight was that modern graphics hardware can render complex 3D graphics at high frame rates, so why not use it to render relatively simple 2D user interfaces with an immediate mode architecture? </p>
<p class="mt-5">Rusts ownership model required us to rethink much of what we learned in other UI programming paradigms, but the result is a framework thats productive to use and remarkably easy to reason about.</p>
<p class="mt-5">Rust's ownership model required us to rethink much of what we learned in other UI programming paradigms, but the result is a framework that's productive to use and remarkably easy to reason about.</p>
<p class="mt-5">Its liberating to control every pixel, and its a rush to push those pixels at lightning speed.</p>
<p class="mt-5">It's liberating to control every pixel, and it's a rush to push those pixels at lightning speed.</p>
</article>
<article class="leading-relaxed">
@@ -82,17 +88,17 @@
<p class="mt-5">After being applied locally, edits are transmitted to collaborators over the network, whose copies may have also changed in the meantime. This means that as participants edit together, their replicas continuously diverge and reconverge. Turns out this is a tricky problem.</p>
<p class="mt-5">To solve it, were using conflict-free replicated data types, which have emerged in the last decade as a general framework for achieving eventual consistency in a variety of circumstances.</p>
<p class="mt-5">To solve it, we're using conflict-free replicated data types, which have emerged in the last decade as a general framework for achieving eventual consistency in a variety of circumstances.</p>
<p class="mt-5">Making Zeds buffers CRDTs allows for collaborative editing, but it also helps us reason about concurrent change so that we can push work into background threads and keep the UI thread responsive.</p>
<p class="mt-5">Making Zed's buffers CRDTs allows for collaborative editing, but it also helps us reason about concurrent change so that we can push work into background threads and keep the UI thread responsive.</p>
</article>
<article class="leading-relaxed">
<h3 class="mt-10 mb-4 leading-tight text-white font-display font-extralight">Tree-sitter</h3>
<p class="mt-3">We plan to integrate with the Language Server Protocol to support advanced IDE features, but we also think its important for a code editor to have a rich, native understanding of syntax.</p>
<p class="mt-3">We plan to integrate with the Language Server Protocol to support advanced IDE features, but we also think it's important for a code editor to have a rich, native understanding of syntax.</p>
<p class="mt-5">Thats why we built Tree-sitter, a fast, general, incremental parsing library that can provide Zed with syntax trees for over 50 languages. Tree-sitter already powers production functionality on GitHub, and well use it to deliver syntactically-precise syntax highlighting, tree-based selection and cursor navigation, robust auto-indent support, symbolic navigation, and more.</p>
<p class="mt-5">That's why we built Tree-sitter, a fast, general, incremental parsing library that can provide Zed with syntax trees for over 50 languages. Tree-sitter already powers production functionality on GitHub, and we'll use it to deliver syntactically-precise syntax highlighting, tree-based selection and cursor navigation, robust auto-indent support, symbolic navigation, and more.</p>
</article>
<p class="mt-10 mb-10 leading-relaxed">&mdash;&mdash;&mdash;</p>

View File

@@ -114,6 +114,12 @@
</span>
The Team
</a>
<a href="/careers" class="relative mt-2 text-base no-underline lowercase hover:underline">
<span class="absolute items-center justify-center hidden h-full align-middle nav-active-arrow -left-7">
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M10.1594 12.1606C10.0309 12.0567 9.96803 11.8899 9.96803 11.7204C9.96803 11.5509 10.0321 11.3846 10.1603 11.2564L12.7596 8.65813H1.6562C1.29392 8.65813 1 8.36284 1 7.97732C1 7.63829 1.29392 7.34573 1.6562 7.34573H12.7596L10.1613 4.74747C9.90501 4.49114 9.90501 4.07596 10.1613 3.81949C10.4177 3.56303 10.8328 3.56317 11.0893 3.81949L14.8078 7.53794C15.0641 7.79427 15.0641 8.20945 14.8078 8.46592L11.0893 12.1844C10.832 12.4395 10.4164 12.4395 10.1594 12.1606Z" fill="white"/></svg>
</span>
Careers
</a>
<a href="/community" class="relative mt-2 text-base no-underline lowercase hover:underline">
<span class="absolute items-center justify-center hidden h-full align-middle nav-active-arrow -left-7">
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M10.1594 12.1606C10.0309 12.0567 9.96803 11.8899 9.96803 11.7204C9.96803 11.5509 10.0321 11.3846 10.1603 11.2564L12.7596 8.65813H1.6562C1.29392 8.65813 1 8.36284 1 7.97732C1 7.63829 1.29392 7.34573 1.6562 7.34573H12.7596L10.1613 4.74747C9.90501 4.49114 9.90501 4.07596 10.1613 3.81949C10.4177 3.56303 10.8328 3.56317 11.0893 3.81949L14.8078 7.53794C15.0641 7.79427 15.0641 8.20945 14.8078 8.46592L11.0893 12.1844C10.832 12.4395 10.4164 12.4395 10.1594 12.1606Z" fill="white"/></svg>

View File

@@ -18,6 +18,11 @@ pub struct Cursor<'a, T: Item, D> {
at_end: bool,
}
pub struct Iter<'a, T: Item> {
tree: &'a SumTree<T>,
stack: ArrayVec<StackEntry<'a, T, ()>, 16>,
}
impl<'a, T, D> Cursor<'a, T, D>
where
T: Item,
@@ -487,6 +492,71 @@ where
}
}
impl<'a, T: Item> Iter<'a, T> {
pub(crate) fn new(tree: &'a SumTree<T>) -> Self {
Self {
tree,
stack: Default::default(),
}
}
}
impl<'a, T: Item> Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
let mut descend = false;
if self.stack.is_empty() {
self.stack.push(StackEntry {
tree: self.tree,
index: 0,
position: (),
});
descend = true;
}
while self.stack.len() > 0 {
let new_subtree = {
let entry = self.stack.last_mut().unwrap();
match entry.tree.0.as_ref() {
Node::Internal { child_trees, .. } => {
if !descend {
entry.index += 1;
}
child_trees.get(entry.index)
}
Node::Leaf { items, .. } => {
if !descend {
entry.index += 1;
}
if let Some(next_item) = items.get(entry.index) {
return Some(next_item);
} else {
None
}
}
}
};
if let Some(subtree) = new_subtree {
descend = true;
self.stack.push(StackEntry {
tree: subtree,
index: 0,
position: (),
});
} else {
descend = false;
self.stack.pop();
}
}
None
}
}
impl<'a, T, S, D> Iterator for Cursor<'a, T, D>
where
T: Item<Summary = S>,

View File

@@ -1,10 +1,11 @@
mod cursor;
mod tree_map;
use arrayvec::ArrayVec;
pub use cursor::Cursor;
pub use cursor::FilterCursor;
pub use cursor::{Cursor, FilterCursor, Iter};
use std::marker::PhantomData;
use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc};
pub use tree_map::TreeMap;
#[cfg(test)]
const TREE_BASE: usize = 2;
@@ -156,6 +157,10 @@ impl<T: Item> SumTree<T> {
items
}
pub fn iter(&self) -> Iter<T> {
Iter::new(self)
}
pub fn cursor<'a, S>(&'a self) -> Cursor<T, S>
where
S: Dimension<'a, T::Summary>,
@@ -722,6 +727,10 @@ mod tests {
};
assert_eq!(tree.items(&()), reference_items);
assert_eq!(
tree.iter().collect::<Vec<_>>(),
tree.cursor::<()>().collect::<Vec<_>>()
);
let mut filter_cursor =
tree.filter::<_, Count>(|summary| summary.contains_even, &());

View File

@@ -0,0 +1,173 @@
use std::{cmp::Ordering, fmt::Debug};
use crate::{Bias, Dimension, Item, KeyedItem, SeekTarget, SumTree, Summary};
#[derive(Clone)]
pub struct TreeMap<K, V>(SumTree<MapEntry<K, V>>)
where
K: Clone + Debug + Default + Ord,
V: Clone + Debug;
#[derive(Clone)]
pub struct MapEntry<K, V> {
key: K,
value: V,
}
#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
pub struct MapKey<K>(K);
#[derive(Clone, Debug, Default)]
pub struct MapKeyRef<'a, K>(Option<&'a K>);
impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
pub fn from_ordered_entries(entries: impl IntoIterator<Item = (K, V)>) -> Self {
let tree = SumTree::from_iter(
entries
.into_iter()
.map(|(key, value)| MapEntry { key, value }),
&(),
);
Self(tree)
}
pub fn get<'a>(&self, key: &'a K) -> Option<&V> {
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
cursor.seek(&MapKeyRef(Some(key)), Bias::Left, &());
if let Some(item) = cursor.item() {
if *key == item.key().0 {
Some(&item.value)
} else {
None
}
} else {
None
}
}
pub fn insert(&mut self, key: K, value: V) {
self.0.insert_or_replace(MapEntry { key, value }, &());
}
pub fn remove<'a>(&mut self, key: &'a K) -> Option<V> {
let mut removed = None;
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
let key = MapKeyRef(Some(key));
let mut new_tree = cursor.slice(&key, Bias::Left, &());
if key.cmp(&cursor.end(&()), &()) == Ordering::Equal {
removed = Some(cursor.item().unwrap().value.clone());
cursor.next(&());
}
new_tree.push_tree(cursor.suffix(&()), &());
drop(cursor);
self.0 = new_tree;
removed
}
pub fn iter<'a>(&'a self) -> impl 'a + Iterator<Item = (&'a K, &'a V)> {
self.0.iter().map(|entry| (&entry.key, &entry.value))
}
}
impl<K, V> Default for TreeMap<K, V>
where
K: Clone + Debug + Default + Ord,
V: Clone + Debug,
{
fn default() -> Self {
Self(Default::default())
}
}
impl<K, V> Item for MapEntry<K, V>
where
K: Clone + Debug + Default + Ord,
V: Clone,
{
type Summary = MapKey<K>;
fn summary(&self) -> Self::Summary {
self.key()
}
}
impl<K, V> KeyedItem for MapEntry<K, V>
where
K: Clone + Debug + Default + Ord,
V: Clone,
{
type Key = MapKey<K>;
fn key(&self) -> Self::Key {
MapKey(self.key.clone())
}
}
impl<K> Summary for MapKey<K>
where
K: Clone + Debug + Default,
{
type Context = ();
fn add_summary(&mut self, summary: &Self, _: &()) {
*self = summary.clone()
}
}
impl<'a, K> Dimension<'a, MapKey<K>> for MapKeyRef<'a, K>
where
K: Clone + Debug + Default + Ord,
{
fn add_summary(&mut self, summary: &'a MapKey<K>, _: &()) {
self.0 = Some(&summary.0)
}
}
impl<'a, K> SeekTarget<'a, MapKey<K>, MapKeyRef<'a, K>> for MapKeyRef<'_, K>
where
K: Clone + Debug + Default + Ord,
{
fn cmp(&self, cursor_location: &MapKeyRef<K>, _: &()) -> Ordering {
self.0.cmp(&cursor_location.0)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_basic() {
let mut map = TreeMap::default();
assert_eq!(map.iter().collect::<Vec<_>>(), vec![]);
map.insert(3, "c");
assert_eq!(map.get(&3), Some(&"c"));
assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&3, &"c")]);
map.insert(1, "a");
assert_eq!(map.get(&1), Some(&"a"));
assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&1, &"a"), (&3, &"c")]);
map.insert(2, "b");
assert_eq!(map.get(&2), Some(&"b"));
assert_eq!(map.get(&1), Some(&"a"));
assert_eq!(map.get(&3), Some(&"c"));
assert_eq!(
map.iter().collect::<Vec<_>>(),
vec![(&1, &"a"), (&2, &"b"), (&3, &"c")]
);
map.remove(&2);
assert_eq!(map.get(&2), None);
assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&1, &"a"), (&3, &"c")]);
map.remove(&3);
assert_eq!(map.get(&3), None);
assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&1, &"a")]);
map.remove(&1);
assert_eq!(map.get(&1), None);
assert_eq!(map.iter().collect::<Vec<_>>(), vec![]);
}
}

View File

@@ -15,6 +15,7 @@ collections = { path = "../collections" }
sum_tree = { path = "../sum_tree" }
anyhow = "1.0.38"
arrayvec = "0.7.1"
lazy_static = "1.4"
log = "0.4"
parking_lot = "0.11"
rand = { version = "0.8.3", optional = true }
@@ -23,6 +24,7 @@ smallvec = { version = "1.6", features = ["union"] }
[dev-dependencies]
collections = { path = "../collections", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
ctor = "0.1"
env_logger = "0.8"
rand = "0.8.3"

View File

@@ -1,105 +1,48 @@
use crate::{rope::TextDimension, Snapshot};
use super::{Buffer, FromAnchor, FullOffset, Point, ToOffset};
use super::{Point, ToOffset};
use crate::{rope::TextDimension, BufferSnapshot};
use anyhow::Result;
use std::{
cmp::Ordering,
fmt::{Debug, Formatter},
ops::Range,
};
use sum_tree::{Bias, SumTree};
use std::{cmp::Ordering, fmt::Debug, ops::Range};
use sum_tree::Bias;
#[derive(Clone, Eq, PartialEq, Debug, Hash)]
pub struct Anchor {
pub full_offset: FullOffset,
pub timestamp: clock::Local,
pub offset: usize,
pub bias: Bias,
pub version: clock::Global,
}
#[derive(Clone)]
pub struct AnchorMap<T> {
pub(crate) version: clock::Global,
pub(crate) bias: Bias,
pub(crate) entries: Vec<(FullOffset, T)>,
}
#[derive(Clone)]
pub struct AnchorSet(pub(crate) AnchorMap<()>);
#[derive(Clone)]
pub struct AnchorRangeMap<T> {
pub(crate) version: clock::Global,
pub(crate) entries: Vec<(Range<FullOffset>, T)>,
pub(crate) start_bias: Bias,
pub(crate) end_bias: Bias,
}
#[derive(Clone)]
pub struct AnchorRangeSet(pub(crate) AnchorRangeMap<()>);
#[derive(Clone)]
pub struct AnchorRangeMultimap<T: Clone> {
pub(crate) entries: SumTree<AnchorRangeMultimapEntry<T>>,
pub(crate) version: clock::Global,
pub(crate) start_bias: Bias,
pub(crate) end_bias: Bias,
}
#[derive(Clone)]
pub(crate) struct AnchorRangeMultimapEntry<T> {
pub(crate) range: FullOffsetRange,
pub(crate) value: T,
}
#[derive(Clone, Debug)]
pub(crate) struct FullOffsetRange {
pub(crate) start: FullOffset,
pub(crate) end: FullOffset,
}
#[derive(Clone, Debug)]
pub(crate) struct AnchorRangeMultimapSummary {
start: FullOffset,
end: FullOffset,
min_start: FullOffset,
max_end: FullOffset,
count: usize,
}
impl Anchor {
pub fn min() -> Self {
Self {
full_offset: FullOffset(0),
timestamp: clock::Local::MIN,
offset: usize::MIN,
bias: Bias::Left,
version: Default::default(),
}
}
pub fn max() -> Self {
Self {
full_offset: FullOffset::MAX,
timestamp: clock::Local::MAX,
offset: usize::MAX,
bias: Bias::Right,
version: Default::default(),
}
}
pub fn cmp<'a>(&self, other: &Anchor, buffer: &Snapshot) -> Result<Ordering> {
if self == other {
return Ok(Ordering::Equal);
}
let offset_comparison = if self.version == other.version {
self.full_offset.cmp(&other.full_offset)
pub fn cmp(&self, other: &Anchor, buffer: &BufferSnapshot) -> Result<Ordering> {
let fragment_id_comparison = if self.timestamp == other.timestamp {
Ordering::Equal
} else {
buffer
.full_offset_for_anchor(self)
.cmp(&buffer.full_offset_for_anchor(other))
.fragment_id_for_anchor(self)
.cmp(&buffer.fragment_id_for_anchor(other))
};
Ok(offset_comparison.then_with(|| self.bias.cmp(&other.bias)))
Ok(fragment_id_comparison
.then_with(|| self.offset.cmp(&other.offset))
.then_with(|| self.bias.cmp(&other.bias)))
}
pub fn bias_left(&self, buffer: &Buffer) -> Anchor {
pub fn bias_left(&self, buffer: &BufferSnapshot) -> Anchor {
if self.bias == Bias::Left {
self.clone()
} else {
@@ -107,7 +50,7 @@ impl Anchor {
}
}
pub fn bias_right(&self, buffer: &Buffer) -> Anchor {
pub fn bias_right(&self, buffer: &BufferSnapshot) -> Anchor {
if self.bias == Bias::Right {
self.clone()
} else {
@@ -115,464 +58,33 @@ impl Anchor {
}
}
pub fn summary<'a, D>(&self, content: &'a Snapshot) -> D
pub fn summary<'a, D>(&self, content: &'a BufferSnapshot) -> D
where
D: TextDimension<'a>,
D: TextDimension,
{
content.summary_for_anchor(self)
}
}
impl<T> AnchorMap<T> {
pub fn version(&self) -> &clock::Global {
&self.version
}
pub fn len(&self) -> usize {
self.entries.len()
}
pub fn iter<'a, D>(&'a self, snapshot: &'a Snapshot) -> impl Iterator<Item = (D, &'a T)> + 'a
where
D: 'a + TextDimension<'a>,
{
snapshot
.summaries_for_anchors(
self.version.clone(),
self.bias,
self.entries.iter().map(|e| &e.0),
)
.zip(self.entries.iter().map(|e| &e.1))
}
}
impl AnchorSet {
pub fn version(&self) -> &clock::Global {
&self.0.version
}
pub fn len(&self) -> usize {
self.0.len()
}
pub fn iter<'a, D>(&'a self, content: &'a Snapshot) -> impl Iterator<Item = D> + 'a
where
D: 'a + TextDimension<'a>,
{
self.0.iter(content).map(|(position, _)| position)
}
}
impl<T> AnchorRangeMap<T> {
pub fn version(&self) -> &clock::Global {
&self.version
}
pub fn len(&self) -> usize {
self.entries.len()
}
pub fn from_full_offset_ranges(
version: clock::Global,
start_bias: Bias,
end_bias: Bias,
entries: Vec<(Range<FullOffset>, T)>,
) -> Self {
Self {
version,
start_bias,
end_bias,
entries,
}
}
pub fn ranges<'a, D>(
&'a self,
content: &'a Snapshot,
) -> impl Iterator<Item = (Range<D>, &'a T)> + 'a
where
D: 'a + TextDimension<'a>,
{
content
.summaries_for_anchor_ranges(
self.version.clone(),
self.start_bias,
self.end_bias,
self.entries.iter().map(|e| &e.0),
)
.zip(self.entries.iter().map(|e| &e.1))
}
pub fn intersecting_ranges<'a, D, I>(
&'a self,
range: Range<(I, Bias)>,
content: &'a Snapshot,
) -> impl Iterator<Item = (Range<D>, &'a T)> + 'a
where
D: 'a + TextDimension<'a>,
I: ToOffset,
{
let range = content.anchor_at(range.start.0, range.start.1)
..content.anchor_at(range.end.0, range.end.1);
let mut probe_anchor = Anchor {
full_offset: Default::default(),
bias: self.start_bias,
version: self.version.clone(),
};
let start_ix = self.entries.binary_search_by(|probe| {
probe_anchor.full_offset = probe.0.end;
probe_anchor.cmp(&range.start, &content).unwrap()
});
match start_ix {
Ok(start_ix) | Err(start_ix) => content
.summaries_for_anchor_ranges(
self.version.clone(),
self.start_bias,
self.end_bias,
self.entries[start_ix..].iter().map(|e| &e.0),
)
.zip(self.entries.iter().map(|e| &e.1)),
}
}
pub fn full_offset_ranges(&self) -> impl Iterator<Item = &(Range<FullOffset>, T)> {
self.entries.iter()
}
pub fn min_by_key<'a, D, F, K>(
&self,
content: &'a Snapshot,
mut extract_key: F,
) -> Option<(Range<D>, &T)>
where
D: 'a + TextDimension<'a>,
F: FnMut(&T) -> K,
K: Ord,
{
self.entries
.iter()
.min_by_key(|(_, value)| extract_key(value))
.map(|(range, value)| (self.resolve_range(range, &content), value))
}
pub fn max_by_key<'a, D, F, K>(
&self,
content: &'a Snapshot,
mut extract_key: F,
) -> Option<(Range<D>, &T)>
where
D: 'a + TextDimension<'a>,
F: FnMut(&T) -> K,
K: Ord,
{
self.entries
.iter()
.max_by_key(|(_, value)| extract_key(value))
.map(|(range, value)| (self.resolve_range(range, &content), value))
}
fn resolve_range<'a, D>(&self, range: &Range<FullOffset>, content: &'a Snapshot) -> Range<D>
where
D: 'a + TextDimension<'a>,
{
let mut anchor = Anchor {
full_offset: range.start,
bias: self.start_bias,
version: self.version.clone(),
};
let start = content.summary_for_anchor(&anchor);
anchor.full_offset = range.end;
anchor.bias = self.end_bias;
let end = content.summary_for_anchor(&anchor);
start..end
}
}
impl<T: PartialEq> PartialEq for AnchorRangeMap<T> {
fn eq(&self, other: &Self) -> bool {
self.version == other.version && self.entries == other.entries
}
}
impl<T: Eq> Eq for AnchorRangeMap<T> {}
impl<T: Debug> Debug for AnchorRangeMap<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
let mut f = f.debug_map();
for (range, value) in &self.entries {
f.key(range);
f.value(value);
}
f.finish()
}
}
impl Debug for AnchorRangeSet {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let mut f = f.debug_set();
for (range, _) in &self.0.entries {
f.entry(range);
}
f.finish()
}
}
impl AnchorRangeSet {
pub fn len(&self) -> usize {
self.0.len()
}
pub fn version(&self) -> &clock::Global {
self.0.version()
}
pub fn ranges<'a, D>(&'a self, content: &'a Snapshot) -> impl 'a + Iterator<Item = Range<Point>>
where
D: 'a + TextDimension<'a>,
{
self.0.ranges(content).map(|(range, _)| range)
}
}
impl<T: Clone> Default for AnchorRangeMultimap<T> {
fn default() -> Self {
Self {
entries: Default::default(),
version: Default::default(),
start_bias: Bias::Left,
end_bias: Bias::Left,
}
}
}
impl<T: Clone> AnchorRangeMultimap<T> {
pub fn version(&self) -> &clock::Global {
&self.version
}
pub fn intersecting_ranges<'a, I, O>(
&'a self,
range: Range<I>,
content: &'a Snapshot,
inclusive: bool,
) -> impl Iterator<Item = (usize, Range<O>, &T)> + 'a
where
I: ToOffset,
O: FromAnchor,
{
let end_bias = if inclusive { Bias::Right } else { Bias::Left };
let range = range.start.to_full_offset(&content, Bias::Left)
..range.end.to_full_offset(&content, end_bias);
let mut cursor = self.entries.filter::<_, usize>(
{
let mut endpoint = Anchor {
full_offset: FullOffset(0),
bias: Bias::Right,
version: self.version.clone(),
};
move |summary: &AnchorRangeMultimapSummary| {
endpoint.full_offset = summary.max_end;
endpoint.bias = self.end_bias;
let max_end = endpoint.to_full_offset(&content, self.end_bias);
let start_cmp = range.start.cmp(&max_end);
endpoint.full_offset = summary.min_start;
endpoint.bias = self.start_bias;
let min_start = endpoint.to_full_offset(&content, self.start_bias);
let end_cmp = range.end.cmp(&min_start);
if inclusive {
start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal
} else {
start_cmp == Ordering::Less && end_cmp == Ordering::Greater
}
}
},
&(),
);
std::iter::from_fn({
let mut endpoint = Anchor {
full_offset: FullOffset(0),
bias: Bias::Left,
version: self.version.clone(),
};
move || {
if let Some(item) = cursor.item() {
let ix = *cursor.start();
endpoint.full_offset = item.range.start;
endpoint.bias = self.start_bias;
let start = O::from_anchor(&endpoint, &content);
endpoint.full_offset = item.range.end;
endpoint.bias = self.end_bias;
let end = O::from_anchor(&endpoint, &content);
let value = &item.value;
cursor.next(&());
Some((ix, start..end, value))
} else {
None
}
}
})
}
pub fn from_full_offset_ranges(
version: clock::Global,
start_bias: Bias,
end_bias: Bias,
entries: impl Iterator<Item = (Range<FullOffset>, T)>,
) -> Self {
Self {
version,
start_bias,
end_bias,
entries: SumTree::from_iter(
entries.map(|(range, value)| AnchorRangeMultimapEntry {
range: FullOffsetRange {
start: range.start,
end: range.end,
},
value,
}),
&(),
),
}
}
pub fn full_offset_ranges(&self) -> impl Iterator<Item = (Range<FullOffset>, &T)> {
self.entries
.cursor::<()>()
.map(|entry| (entry.range.start..entry.range.end, &entry.value))
}
pub fn filter<'a, O, F>(
&'a self,
content: &'a Snapshot,
mut f: F,
) -> impl 'a + Iterator<Item = (usize, Range<O>, &T)>
where
O: FromAnchor,
F: 'a + FnMut(&'a T) -> bool,
{
let mut endpoint = Anchor {
full_offset: FullOffset(0),
bias: Bias::Left,
version: self.version.clone(),
};
self.entries
.cursor::<()>()
.enumerate()
.filter_map(move |(ix, entry)| {
if f(&entry.value) {
endpoint.full_offset = entry.range.start;
endpoint.bias = self.start_bias;
let start = O::from_anchor(&endpoint, &content);
endpoint.full_offset = entry.range.end;
endpoint.bias = self.end_bias;
let end = O::from_anchor(&endpoint, &content);
Some((ix, start..end, &entry.value))
} else {
None
}
})
}
}
impl<T: Clone> sum_tree::Item for AnchorRangeMultimapEntry<T> {
type Summary = AnchorRangeMultimapSummary;
fn summary(&self) -> Self::Summary {
AnchorRangeMultimapSummary {
start: self.range.start,
end: self.range.end,
min_start: self.range.start,
max_end: self.range.end,
count: 1,
}
}
}
impl Default for AnchorRangeMultimapSummary {
fn default() -> Self {
Self {
start: FullOffset(0),
end: FullOffset::MAX,
min_start: FullOffset::MAX,
max_end: FullOffset(0),
count: 0,
}
}
}
impl sum_tree::Summary for AnchorRangeMultimapSummary {
type Context = ();
fn add_summary(&mut self, other: &Self, _: &Self::Context) {
self.min_start = self.min_start.min(other.min_start);
self.max_end = self.max_end.max(other.max_end);
#[cfg(debug_assertions)]
{
let start_comparison = self.start.cmp(&other.start);
assert!(start_comparison <= Ordering::Equal);
if start_comparison == Ordering::Equal {
assert!(self.end.cmp(&other.end) >= Ordering::Equal);
}
}
self.start = other.start;
self.end = other.end;
self.count += other.count;
}
}
impl Default for FullOffsetRange {
fn default() -> Self {
Self {
start: FullOffset(0),
end: FullOffset::MAX,
}
}
}
impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for usize {
fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) {
*self += summary.count;
}
}
impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for FullOffsetRange {
fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) {
self.start = summary.start;
self.end = summary.end;
}
}
impl<'a> sum_tree::SeekTarget<'a, AnchorRangeMultimapSummary, FullOffsetRange> for FullOffsetRange {
fn cmp(&self, cursor_location: &FullOffsetRange, _: &()) -> Ordering {
Ord::cmp(&self.start, &cursor_location.start)
.then_with(|| Ord::cmp(&cursor_location.end, &self.end))
}
}
pub trait AnchorRangeExt {
fn cmp(&self, b: &Range<Anchor>, buffer: &Snapshot) -> Result<Ordering>;
fn to_offset(&self, content: &Snapshot) -> Range<usize>;
fn cmp(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering>;
fn to_offset(&self, content: &BufferSnapshot) -> Range<usize>;
fn to_point(&self, content: &BufferSnapshot) -> Range<Point>;
}
impl AnchorRangeExt for Range<Anchor> {
fn cmp(&self, other: &Range<Anchor>, buffer: &Snapshot) -> Result<Ordering> {
fn cmp(&self, other: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering> {
Ok(match self.start.cmp(&other.start, buffer)? {
Ordering::Equal => other.end.cmp(&self.end, buffer)?,
ord @ _ => ord,
})
}
fn to_offset(&self, content: &Snapshot) -> Range<usize> {
fn to_offset(&self, content: &BufferSnapshot) -> Range<usize> {
self.start.to_offset(&content)..self.end.to_offset(&content)
}
fn to_point(&self, content: &BufferSnapshot) -> Range<Point> {
self.start.summary::<Point>(&content)..self.end.summary::<Point>(&content)
}
}

View File

@@ -0,0 +1,94 @@
use lazy_static::lazy_static;
use smallvec::{smallvec, SmallVec};
use std::iter;
lazy_static! {
pub static ref MIN: Locator = Locator::min();
pub static ref MAX: Locator = Locator::max();
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Locator(SmallVec<[u64; 4]>);
impl Locator {
pub fn min() -> Self {
Self(smallvec![u64::MIN])
}
pub fn max() -> Self {
Self(smallvec![u64::MAX])
}
pub fn from_index(ix: usize, count: usize) -> Self {
let id = (1 + ix as u64) * (u64::MAX / (count as u64 + 2));
Self(smallvec![id])
}
pub fn assign(&mut self, other: &Self) {
self.0.resize(other.0.len(), 0);
self.0.copy_from_slice(&other.0);
}
pub fn between(lhs: &Self, rhs: &Self) -> Self {
let lhs = lhs.0.iter().copied().chain(iter::repeat(u64::MIN));
let rhs = rhs.0.iter().copied().chain(iter::repeat(u64::MAX));
let mut location = SmallVec::new();
for (lhs, rhs) in lhs.zip(rhs) {
let mid = lhs + ((rhs.saturating_sub(lhs)) >> 48);
location.push(mid);
if mid > lhs {
break;
}
}
Self(location)
}
pub fn len(&self) -> usize {
self.0.len()
}
}
impl Default for Locator {
fn default() -> Self {
Self::min()
}
}
#[cfg(test)]
mod tests {
use super::*;
use rand::prelude::*;
use std::mem;
#[gpui::test(iterations = 100)]
fn test_locators(mut rng: StdRng) {
let mut lhs = Default::default();
let mut rhs = Default::default();
while lhs == rhs {
lhs = Locator(
(0..rng.gen_range(1..=5))
.map(|_| rng.gen_range(0..=100))
.collect(),
);
rhs = Locator(
(0..rng.gen_range(1..=5))
.map(|_| rng.gen_range(0..=100))
.collect(),
);
}
if lhs > rhs {
mem::swap(&mut lhs, &mut rhs);
}
let middle = Locator::between(&lhs, &rhs);
assert!(middle > lhs);
assert!(middle < rhs);
for ix in 0..middle.0.len() - 1 {
assert!(
middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0)
|| middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0)
);
}
}
}

View File

@@ -1,9 +1,15 @@
use super::Operation;
use std::{fmt::Debug, ops::Add};
use sum_tree::{Cursor, Dimension, Edit, Item, KeyedItem, SumTree, Summary};
use sum_tree::{Dimension, Edit, Item, KeyedItem, SumTree, Summary};
pub trait Operation: Clone + Debug {
fn lamport_timestamp(&self) -> clock::Lamport;
}
#[derive(Clone, Debug)]
pub struct OperationQueue(SumTree<Operation>);
struct OperationItem<T>(T);
#[derive(Clone, Debug)]
pub struct OperationQueue<T: Operation>(SumTree<OperationItem<T>>);
#[derive(Clone, Copy, Debug, Default, Eq, Ord, PartialEq, PartialOrd)]
pub struct OperationKey(clock::Lamport);
@@ -20,7 +26,7 @@ impl OperationKey {
}
}
impl OperationQueue {
impl<T: Operation> OperationQueue<T> {
pub fn new() -> Self {
OperationQueue(SumTree::new())
}
@@ -29,11 +35,15 @@ impl OperationQueue {
self.0.summary().len
}
pub fn insert(&mut self, mut ops: Vec<Operation>) {
pub fn insert(&mut self, mut ops: Vec<T>) {
ops.sort_by_key(|op| op.lamport_timestamp());
ops.dedup_by_key(|op| op.lamport_timestamp());
self.0
.edit(ops.into_iter().map(Edit::Insert).collect(), &());
self.0.edit(
ops.into_iter()
.map(|op| Edit::Insert(OperationItem(op)))
.collect(),
&(),
);
}
pub fn drain(&mut self) -> Self {
@@ -42,8 +52,8 @@ impl OperationQueue {
clone
}
pub fn cursor(&self) -> Cursor<Operation, ()> {
self.0.cursor()
pub fn iter(&self) -> impl Iterator<Item = &T> {
self.0.iter().map(|i| &i.0)
}
}
@@ -76,22 +86,22 @@ impl<'a> Dimension<'a, OperationSummary> for OperationKey {
}
}
impl Item for Operation {
impl<T: Operation> Item for OperationItem<T> {
type Summary = OperationSummary;
fn summary(&self) -> Self::Summary {
OperationSummary {
key: OperationKey::new(self.lamport_timestamp()),
key: OperationKey::new(self.0.lamport_timestamp()),
len: 1,
}
}
}
impl KeyedItem for Operation {
impl<T: Operation> KeyedItem for OperationItem<T> {
type Key = OperationKey;
fn key(&self) -> Self::Key {
OperationKey::new(self.lamport_timestamp())
OperationKey::new(self.0.lamport_timestamp())
}
}
@@ -107,21 +117,27 @@ mod tests {
assert_eq!(queue.len(), 0);
queue.insert(vec![
Operation::Test(clock.tick()),
Operation::Test(clock.tick()),
TestOperation(clock.tick()),
TestOperation(clock.tick()),
]);
assert_eq!(queue.len(), 2);
queue.insert(vec![Operation::Test(clock.tick())]);
queue.insert(vec![TestOperation(clock.tick())]);
assert_eq!(queue.len(), 3);
drop(queue.drain());
assert_eq!(queue.len(), 0);
queue.insert(vec![Operation::Test(clock.tick())]);
queue.insert(vec![TestOperation(clock.tick())]);
assert_eq!(queue.len(), 1);
}
#[derive(Clone, Debug, Eq, PartialEq)]
struct TestOperation(clock::Lamport);
impl Operation for TestOperation {
fn lamport_timestamp(&self) -> clock::Lamport {
self.0
}
}
}

View File

@@ -9,7 +9,8 @@ pub struct Patch<T>(Vec<Edit<T>>);
impl<T> Patch<T>
where
T: Clone
T: 'static
+ Clone
+ Copy
+ Ord
+ Sub<T, Output = T>
@@ -33,13 +34,17 @@ where
Self(edits)
}
pub fn edits(&self) -> &[Edit<T>] {
&self.0
}
pub fn into_inner(self) -> Vec<Edit<T>> {
self.0
}
pub fn compose(&self, other: &Self) -> Self {
pub fn compose(&self, new_edits_iter: impl IntoIterator<Item = Edit<T>>) -> Self {
let mut old_edits_iter = self.0.iter().cloned().peekable();
let mut new_edits_iter = other.0.iter().cloned().peekable();
let mut new_edits_iter = new_edits_iter.into_iter().peekable();
let mut composed = Patch(Vec::new());
let mut old_start = T::default();
@@ -196,6 +201,33 @@ where
}
}
impl<T: Clone> IntoIterator for Patch<T> {
type Item = Edit<T>;
type IntoIter = std::vec::IntoIter<Edit<T>>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl<'a, T: Clone> IntoIterator for &'a Patch<T> {
type Item = Edit<T>;
type IntoIter = std::iter::Cloned<std::slice::Iter<'a, Edit<T>>>;
fn into_iter(self) -> Self::IntoIter {
self.0.iter().cloned()
}
}
impl<'a, T: Clone> IntoIterator for &'a mut Patch<T> {
type Item = Edit<T>;
type IntoIter = std::iter::Cloned<std::slice::Iter<'a, Edit<T>>>;
fn into_iter(self) -> Self::IntoIter {
self.0.iter().cloned()
}
}
#[cfg(test)]
mod tests {
use super::*;

View File

@@ -35,6 +35,14 @@ impl Point {
pub fn is_zero(&self) -> bool {
self.row == 0 && self.column == 0
}
pub fn saturating_sub(self, other: Self) -> Self {
if self < other {
Self::zero()
} else {
self - other
}
}
}
impl<'a> Add<&'a Self> for Point {

View File

@@ -26,6 +26,14 @@ impl PointUtf16 {
pub fn is_zero(&self) -> bool {
self.row == 0 && self.column == 0
}
pub fn saturating_sub(self, other: Self) -> Self {
if self < other {
Self::zero()
} else {
self - other
}
}
}
impl<'a> Add<&'a Self> for PointUtf16 {

View File

@@ -12,14 +12,22 @@ impl<T: Rng> Iterator for RandomCharIter<T> {
type Item = char;
fn next(&mut self) -> Option<Self::Item> {
if std::env::var("SIMPLE_TEXT").map_or(false, |v| !v.is_empty()) {
return if self.0.gen_range(0..100) < 5 {
Some('\n')
} else {
Some(self.0.gen_range(b'a'..b'z' + 1).into())
};
}
match self.0.gen_range(0..100) {
// whitespace
0..=19 => [' ', '\n', '\t'].choose(&mut self.0).copied(),
// two-byte greek letters
20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))),
// three-byte characters
// // three-byte characters
33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(),
// four-byte characters
// // four-byte characters
46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(),
// ascii letters
_ => Some(self.0.gen_range(b'a'..b'z' + 1).into()),

View File

@@ -205,6 +205,19 @@ impl Rope {
.map_or(0, |chunk| chunk.point_utf16_to_offset(overshoot))
}
pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point {
if point >= self.summary().lines_utf16 {
return self.summary().lines;
}
let mut cursor = self.chunks.cursor::<(PointUtf16, Point)>();
cursor.seek(&point, Bias::Left, &());
let overshoot = point - cursor.start().0;
cursor.start().1
+ cursor
.item()
.map_or(Point::zero(), |chunk| chunk.point_utf16_to_point(overshoot))
}
pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize {
let mut cursor = self.chunks.cursor::<usize>();
cursor.seek(&offset, Bias::Left, &());
@@ -327,7 +340,7 @@ impl<'a> Cursor<'a> {
slice
}
pub fn summary<D: TextDimension<'a>>(&mut self, end_offset: usize) -> D {
pub fn summary<D: TextDimension>(&mut self, end_offset: usize) -> D {
debug_assert!(end_offset >= self.offset);
let mut summary = D::default();
@@ -552,6 +565,12 @@ impl Chunk {
if ch == '\n' {
point.row += 1;
if point.row > target.row {
panic!(
"point {:?} is beyond the end of a line with length {}",
target, point.column
);
}
point.column = 0;
} else {
point.column += ch.len_utf8() as u32;
@@ -574,6 +593,12 @@ impl Chunk {
if ch == '\n' {
point.row += 1;
if point.row > target.row {
panic!(
"point {:?} is beyond the end of a line with length {}",
target, point.column
);
}
point.column = 0;
} else {
point.column += ch.len_utf16() as u32;
@@ -583,6 +608,28 @@ impl Chunk {
offset
}
fn point_utf16_to_point(&self, target: PointUtf16) -> Point {
let mut point = Point::zero();
let mut point_utf16 = PointUtf16::zero();
for ch in self.0.chars() {
if point_utf16 >= target {
if point_utf16 > target {
panic!("point {:?} is inside of character {:?}", target, ch);
}
break;
}
if ch == '\n' {
point_utf16 += PointUtf16::new(1, 0);
point += Point::new(1, 0);
} else {
point_utf16 += PointUtf16::new(0, ch.len_utf16() as u32);
point += Point::new(0, ch.len_utf8() as u32);
}
}
point
}
fn clip_point(&self, target: Point, bias: Bias) -> Point {
for (row, line) in self.0.split('\n').enumerate() {
if row == target.row as usize {
@@ -685,6 +732,15 @@ impl sum_tree::Summary for TextSummary {
}
}
impl<'a> std::ops::Add<Self> for TextSummary {
type Output = Self;
fn add(mut self, rhs: Self) -> Self::Output {
self.add_assign(&rhs);
self
}
}
impl<'a> std::ops::AddAssign<&'a Self> for TextSummary {
fn add_assign(&mut self, other: &'a Self) {
let joined_chars = self.last_line_chars + other.first_line_chars;
@@ -719,12 +775,12 @@ impl std::ops::AddAssign<Self> for TextSummary {
}
}
pub trait TextDimension<'a>: Dimension<'a, TextSummary> {
pub trait TextDimension: 'static + for<'a> Dimension<'a, TextSummary> {
fn from_text_summary(summary: &TextSummary) -> Self;
fn add_assign(&mut self, other: &Self);
}
impl<'a, D1: TextDimension<'a>, D2: TextDimension<'a>> TextDimension<'a> for (D1, D2) {
impl<'a, D1: TextDimension, D2: TextDimension> TextDimension for (D1, D2) {
fn from_text_summary(summary: &TextSummary) -> Self {
(
D1::from_text_summary(summary),
@@ -738,7 +794,7 @@ impl<'a, D1: TextDimension<'a>, D2: TextDimension<'a>> TextDimension<'a> for (D1
}
}
impl<'a> TextDimension<'a> for TextSummary {
impl TextDimension for TextSummary {
fn from_text_summary(summary: &TextSummary) -> Self {
summary.clone()
}
@@ -754,7 +810,7 @@ impl<'a> sum_tree::Dimension<'a, TextSummary> for usize {
}
}
impl<'a> TextDimension<'a> for usize {
impl TextDimension for usize {
fn from_text_summary(summary: &TextSummary) -> Self {
summary.bytes
}
@@ -770,7 +826,7 @@ impl<'a> sum_tree::Dimension<'a, TextSummary> for Point {
}
}
impl<'a> TextDimension<'a> for Point {
impl TextDimension for Point {
fn from_text_summary(summary: &TextSummary) -> Self {
summary.lines
}
@@ -786,7 +842,7 @@ impl<'a> sum_tree::Dimension<'a, TextSummary> for PointUtf16 {
}
}
impl<'a> TextDimension<'a> for PointUtf16 {
impl TextDimension for PointUtf16 {
fn from_text_summary(summary: &TextSummary) -> Self {
summary.lines_utf16
}
@@ -949,6 +1005,22 @@ mod tests {
}
}
let mut point_utf16 = PointUtf16::zero();
for unit in expected.encode_utf16() {
let left_point = actual.clip_point_utf16(point_utf16, Bias::Left);
let right_point = actual.clip_point_utf16(point_utf16, Bias::Right);
assert!(right_point >= left_point);
// Ensure translating UTF-16 points to offsets doesn't panic.
actual.point_utf16_to_offset(left_point);
actual.point_utf16_to_offset(right_point);
if unit == b'\n' as u16 {
point_utf16 += PointUtf16::new(1, 0);
} else {
point_utf16 += PointUtf16::new(0, 1);
}
}
for _ in 0..5 {
let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);

View File

@@ -1,12 +1,6 @@
use sum_tree::Bias;
use crate::{rope::TextDimension, Snapshot};
use super::{AnchorRangeMap, Buffer, Point, ToOffset, ToPoint};
use std::{cmp::Ordering, ops::Range, sync::Arc};
pub type SelectionSetId = clock::Lamport;
pub type SelectionsVersion = usize;
use crate::Anchor;
use crate::{rope::TextDimension, BufferSnapshot, ToOffset, ToPoint};
use std::cmp::Ordering;
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum SelectionGoal {
@@ -24,20 +18,6 @@ pub struct Selection<T> {
pub goal: SelectionGoal,
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct SelectionSet {
pub id: SelectionSetId,
pub active: bool,
pub selections: Arc<AnchorRangeMap<SelectionState>>,
}
#[derive(Debug, Eq, PartialEq)]
pub struct SelectionState {
pub id: usize,
pub reversed: bool,
pub goal: SelectionGoal,
}
impl<T: Clone> Selection<T> {
pub fn head(&self) -> T {
if self.reversed {
@@ -76,98 +56,19 @@ impl<T: ToOffset + ToPoint + Copy + Ord> Selection<T> {
self.end = head;
}
}
}
pub fn point_range(&self, buffer: &Buffer) -> Range<Point> {
let start = self.start.to_point(buffer);
let end = self.end.to_point(buffer);
if self.reversed {
end..start
} else {
start..end
}
}
pub fn offset_range(&self, buffer: &Buffer) -> Range<usize> {
let start = self.start.to_offset(buffer);
let end = self.end.to_offset(buffer);
if self.reversed {
end..start
} else {
start..end
impl Selection<Anchor> {
pub fn resolve<'a, D: 'a + TextDimension>(
&'a self,
snapshot: &'a BufferSnapshot,
) -> Selection<D> {
Selection {
id: self.id,
start: snapshot.summary_for_anchor(&self.start),
end: snapshot.summary_for_anchor(&self.end),
reversed: self.reversed,
goal: self.goal,
}
}
}
impl SelectionSet {
pub fn len(&self) -> usize {
self.selections.len()
}
pub fn selections<'a, D>(
&'a self,
content: &'a Snapshot,
) -> impl 'a + Iterator<Item = Selection<D>>
where
D: 'a + TextDimension<'a>,
{
self.selections
.ranges(content)
.map(|(range, state)| Selection {
id: state.id,
start: range.start,
end: range.end,
reversed: state.reversed,
goal: state.goal,
})
}
pub fn intersecting_selections<'a, D, I>(
&'a self,
range: Range<(I, Bias)>,
content: &'a Snapshot,
) -> impl 'a + Iterator<Item = Selection<D>>
where
D: 'a + TextDimension<'a>,
I: 'a + ToOffset,
{
self.selections
.intersecting_ranges(range, content)
.map(|(range, state)| Selection {
id: state.id,
start: range.start,
end: range.end,
reversed: state.reversed,
goal: state.goal,
})
}
pub fn oldest_selection<'a, D>(&'a self, content: &'a Snapshot) -> Option<Selection<D>>
where
D: 'a + TextDimension<'a>,
{
self.selections
.min_by_key(content, |selection| selection.id)
.map(|(range, state)| Selection {
id: state.id,
start: range.start,
end: range.end,
reversed: state.reversed,
goal: state.goal,
})
}
pub fn newest_selection<'a, D>(&'a self, content: &'a Snapshot) -> Option<Selection<D>>
where
D: 'a + TextDimension<'a>,
{
self.selections
.max_by_key(content, |selection| selection.id)
.map(|(range, state)| Selection {
id: state.id,
start: range.start,
end: range.end,
reversed: state.reversed,
goal: state.goal,
})
}
}

View File

@@ -0,0 +1,48 @@
use crate::{Edit, Patch};
use parking_lot::Mutex;
use std::{
mem,
sync::{Arc, Weak},
};
#[derive(Default)]
pub struct Topic(Mutex<Vec<Weak<Mutex<Patch<usize>>>>>);
pub struct Subscription(Arc<Mutex<Patch<usize>>>);
impl Topic {
pub fn subscribe(&mut self) -> Subscription {
let subscription = Subscription(Default::default());
self.0.get_mut().push(Arc::downgrade(&subscription.0));
subscription
}
pub fn publish(&self, edits: impl Clone + IntoIterator<Item = Edit<usize>>) {
publish(&mut *self.0.lock(), edits);
}
pub fn publish_mut(&mut self, edits: impl Clone + IntoIterator<Item = Edit<usize>>) {
publish(self.0.get_mut(), edits);
}
}
impl Subscription {
pub fn consume(&self) -> Patch<usize> {
mem::take(&mut *self.0.lock())
}
}
fn publish(
subscriptions: &mut Vec<Weak<Mutex<Patch<usize>>>>,
edits: impl Clone + IntoIterator<Item = Edit<usize>>,
) {
subscriptions.retain(|subscription| {
if let Some(subscription) = subscription.upgrade() {
let mut patch = subscription.lock();
*patch = patch.compose(edits.clone());
true
} else {
false
}
});
}

View File

@@ -7,6 +7,7 @@ use std::{
iter::Iterator,
time::{Duration, Instant},
};
use util::test::Network;
#[cfg(test)]
#[ctor::ctor]
@@ -78,6 +79,8 @@ fn test_random_edits(mut rng: StdRng) {
TextSummary::from(&reference_string[range])
);
buffer.check_invariants();
if rng.gen_bool(0.3) {
buffer_versions.push((buffer.clone(), buffer.subscribe()));
}
@@ -102,6 +105,32 @@ fn test_random_edits(mut rng: StdRng) {
}
assert_eq!(text.to_string(), buffer.text());
for _ in 0..5 {
let end_ix = old_buffer.clip_offset(rng.gen_range(0..=old_buffer.len()), Bias::Right);
let start_ix = old_buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
let range = old_buffer.anchor_before(start_ix)..old_buffer.anchor_after(end_ix);
let mut old_text = old_buffer.text_for_range(range.clone()).collect::<String>();
let edits = buffer
.edits_since_in_range::<usize>(&old_buffer.version, range.clone())
.collect::<Vec<_>>();
log::info!(
"applying edits since version {:?} to old text in range {:?}: {:?}: {:?}",
old_buffer.version(),
start_ix..end_ix,
old_text,
edits,
);
let new_text = buffer.text_for_range(range).collect::<String>();
for edit in edits {
old_text.replace_range(
edit.new.start..edit.new.start + edit.old_len(),
&new_text[edit.new],
);
}
assert_eq!(old_text, new_text);
}
let subscription_edits = subscription.consume();
log::info!(
"applying subscription edits since version {:?} to old text: {:?}: {:?}",
@@ -432,63 +461,41 @@ fn test_history() {
let mut now = Instant::now();
let mut buffer = Buffer::new(0, 0, History::new("123456".into()));
let set_id = if let Operation::UpdateSelections { set_id, .. } =
buffer.add_selection_set(&buffer.selections_from_ranges(vec![4..4]).unwrap())
{
set_id
} else {
unreachable!()
};
buffer.start_transaction_at(Some(set_id), now).unwrap();
buffer.start_transaction_at(now);
buffer.edit(vec![2..4], "cd");
buffer.end_transaction_at(Some(set_id), now).unwrap();
buffer.end_transaction_at(now);
assert_eq!(buffer.text(), "12cd56");
assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]);
buffer.start_transaction_at(Some(set_id), now).unwrap();
buffer
.update_selection_set(set_id, &buffer.selections_from_ranges(vec![1..3]).unwrap())
.unwrap();
buffer.start_transaction_at(now);
buffer.edit(vec![4..5], "e");
buffer.end_transaction_at(Some(set_id), now).unwrap();
buffer.end_transaction_at(now).unwrap();
assert_eq!(buffer.text(), "12cde6");
assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]);
now += buffer.history.group_interval + Duration::from_millis(1);
buffer.start_transaction_at(Some(set_id), now).unwrap();
buffer
.update_selection_set(set_id, &buffer.selections_from_ranges(vec![2..2]).unwrap())
.unwrap();
buffer.start_transaction_at(now);
buffer.edit(vec![0..1], "a");
buffer.edit(vec![1..1], "b");
buffer.end_transaction_at(Some(set_id), now).unwrap();
buffer.end_transaction_at(now).unwrap();
assert_eq!(buffer.text(), "ab2cde6");
assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]);
// Last transaction happened past the group interval, undo it on its
// own.
// Last transaction happened past the group interval, undo it on its own.
buffer.undo();
assert_eq!(buffer.text(), "12cde6");
assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]);
// First two transactions happened within the group interval, undo them
// together.
// First two transactions happened within the group interval, undo them together.
buffer.undo();
assert_eq!(buffer.text(), "123456");
assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]);
// Redo the first two transactions together.
buffer.redo();
assert_eq!(buffer.text(), "12cde6");
assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]);
// Redo the last transaction on its own.
buffer.redo();
assert_eq!(buffer.text(), "ab2cde6");
assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]);
buffer.start_transaction_at(None, now).unwrap();
assert!(buffer.end_transaction_at(None, now).is_none());
buffer.start_transaction_at(now);
assert!(buffer.end_transaction_at(now).is_none());
buffer.undo();
assert_eq!(buffer.text(), "12cde6");
}
@@ -554,8 +561,8 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
let buffer = &mut buffers[replica_index];
match rng.gen_range(0..=100) {
0..=50 if mutation_count != 0 => {
let ops = buffer.randomly_mutate(&mut rng);
network.broadcast(buffer.replica_id, ops);
let op = buffer.randomly_edit(&mut rng, 5).2;
network.broadcast(buffer.replica_id, vec![op]);
log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text());
mutation_count -= 1;
}
@@ -577,6 +584,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
}
_ => {}
}
buffer.check_invariants();
if mutation_count == 0 && network.is_idle() {
break;
@@ -591,95 +599,6 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
"Replica {} text != Replica 0 text",
buffer.replica_id
);
assert_eq!(
buffer.selection_sets().collect::<HashMap<_, _>>(),
first_buffer.selection_sets().collect::<HashMap<_, _>>()
);
assert_eq!(
buffer
.all_selection_ranges::<usize>()
.collect::<HashMap<_, _>>(),
first_buffer
.all_selection_ranges::<usize>()
.collect::<HashMap<_, _>>()
);
}
}
#[derive(Clone)]
struct Envelope<T: Clone> {
message: T,
sender: ReplicaId,
}
struct Network<T: Clone, R: rand::Rng> {
inboxes: std::collections::BTreeMap<ReplicaId, Vec<Envelope<T>>>,
all_messages: Vec<T>,
rng: R,
}
impl<T: Clone, R: rand::Rng> Network<T, R> {
fn new(rng: R) -> Self {
Network {
inboxes: Default::default(),
all_messages: Vec::new(),
rng,
}
}
fn add_peer(&mut self, id: ReplicaId) {
self.inboxes.insert(id, Vec::new());
}
fn is_idle(&self) -> bool {
self.inboxes.values().all(|i| i.is_empty())
}
fn broadcast(&mut self, sender: ReplicaId, messages: Vec<T>) {
for (replica, inbox) in self.inboxes.iter_mut() {
if *replica != sender {
for message in &messages {
let min_index = inbox
.iter()
.enumerate()
.rev()
.find_map(|(index, envelope)| {
if sender == envelope.sender {
Some(index + 1)
} else {
None
}
})
.unwrap_or(0);
// Insert one or more duplicates of this message *after* the previous
// message delivered by this replica.
for _ in 0..self.rng.gen_range(1..4) {
let insertion_index = self.rng.gen_range(min_index..inbox.len() + 1);
inbox.insert(
insertion_index,
Envelope {
message: message.clone(),
sender,
},
);
}
}
}
}
self.all_messages.extend(messages);
}
fn has_unreceived(&self, receiver: ReplicaId) -> bool {
!self.inboxes[&receiver].is_empty()
}
fn receive(&mut self, receiver: ReplicaId) -> Vec<T> {
let inbox = self.inboxes.get_mut(&receiver).unwrap();
let count = self.rng.gen_range(0..inbox.len() + 1);
inbox
.drain(0..count)
.map(|envelope| envelope.message)
.collect()
buffer.check_invariants();
}
}

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More