Compare commits

...

232 Commits
v0.2.2 ... v0.5

Author SHA1 Message Date
Max Brunsfeld
a0c8b60a1b Merge pull request #203 from zed-industries/autoclose-pairs
Autoclose pairs
2021-10-06 11:17:41 -07:00
Antonio Scandurra
724272931a Skip autoclosed pairs
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2021-10-06 19:04:55 +02:00
Antonio Scandurra
05d7e9c4e7 Start on autoclosing pairs
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-10-06 16:34:57 +02:00
Max Brunsfeld
3cb7ba0f57 Make the fields of buffer::Language private 2021-10-05 14:19:33 -07:00
Max Brunsfeld
0282e6f255 Add Cargo.toml features to fix compilation of individual crates 2021-10-05 14:18:04 -07:00
Max Brunsfeld
cefb90269e Merge pull request #202 from zed-industries/crates
Break project into crates to improve incremental compilation time
2021-10-05 10:54:15 -07:00
Max Brunsfeld
2fcb90652b Fix path to zed crate in script/bundle 2021-10-05 10:46:59 -07:00
Antonio Scandurra
f70e3878b6 Flip the dependency between editor and theme
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2021-10-05 19:21:13 +02:00
Antonio Scandurra
f09798c4a7 Use the same test::run_test function for async gpui::tests 2021-10-05 18:04:22 +02:00
Antonio Scandurra
9c7ef39da6 Minimize code generation for synchronous gpui::test macro
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-10-05 17:47:46 +02:00
Antonio Scandurra
7a05461c50 Avoid loading Rust grammar in zed tests 2021-10-05 15:43:57 +02:00
Antonio Scandurra
2280c75103 Extract theme_selector into its own crate 2021-10-05 15:38:25 +02:00
Antonio Scandurra
47b29a5f21 Extract people_panel into its own crate 2021-10-05 14:36:38 +02:00
Antonio Scandurra
cd6378e848 Extract file_finder into its own crate 2021-10-05 14:30:56 +02:00
Antonio Scandurra
1ec0afb2d1 Extract chat_panel into its own crate 2021-10-05 14:23:45 +02:00
Antonio Scandurra
d04a11405c Extract project_panel into its own crate 2021-10-05 14:19:57 +02:00
Antonio Scandurra
499616d769 Move workspace module into its own crate 2021-10-05 13:49:10 +02:00
Antonio Scandurra
2087c4731f Extract theme into its own crate 2021-10-05 11:14:30 +02:00
Antonio Scandurra
0022c6b828 Move settings::test helper into zed::test 2021-10-05 11:00:46 +02:00
Antonio Scandurra
5105596918 Move sidebar-specific code out of Workspace 2021-10-05 10:59:20 +02:00
Antonio Scandurra
36594ecf1d Use edition = 2018 instead of 2021 for the editor crate 2021-10-05 10:16:13 +02:00
Max Brunsfeld
bbb27b9654 Move ChannelList, UserStore into client crate
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-10-04 17:30:11 -07:00
Max Brunsfeld
94209d2b6d Rename rpc_client -> client
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-10-04 17:14:21 -07:00
Max Brunsfeld
2f0212ee98 Rename worktree crate to project, pull in Project
Also, move the high-level fuzzy mathcing functions in
zed::fuzzy into the fuzzy crate so that project can
use them.

This required defining a 'PathMatchCandidateSet' trait
to avoid a circular dependency from fuzzy to worktree.

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-10-04 16:45:05 -07:00
Max Brunsfeld
748598e419 Improve logging when avatar request fails
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-10-04 15:47:18 -07:00
Max Brunsfeld
75cf2488db List path dependencies first in all Cargo.toml files
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-10-04 15:36:52 -07:00
Max Brunsfeld
1d97f08901 Move editor into its own crate
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-10-04 15:23:10 -07:00
Nathan Sobo
d5b60ad124 Rename zrpc to rpc
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-10-04 13:28:00 -06:00
Nathan Sobo
fdfed3d7db Move all crates to a top-level crates folder
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-10-04 13:22:21 -06:00
Nathan Sobo
d768224182 Clean commented dependencies
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-10-04 13:15:45 -06:00
Nathan Sobo
c236b0828c Extract worktree, rpc_client, and util crates
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-10-04 13:07:35 -06:00
Antonio Scandurra
154620233b Move LanguageRegistry into buffer
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2021-10-04 19:59:03 +02:00
Antonio Scandurra
becae9feee Pull out buffer into its own crate 2021-10-04 16:50:12 +02:00
Antonio Scandurra
034aed053c Extract a clock subcrate for logical clocks 2021-10-04 14:34:02 +02:00
Nathan Sobo
5b75fcd0aa 💄 2021-10-02 19:46:07 -06:00
Nathan Sobo
6239b5ec60 Introduce ProjectPath and ProjectEntry structs
These replace tuples of (usize, Arc<Path>) and (usize, usize) respectively.
2021-10-02 19:39:04 -06:00
Nathan Sobo
f00045544f Introduce a File trait object to buffer
This will remove the dependency of buffer on `worktree::File`
2021-10-02 19:01:29 -06:00
Nathan Sobo
7728467790 Remove File::select_language
We want to make File a trait object defined in the buffer crate to decouple buffer from worktree, and this method is in the way.

Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-10-01 16:56:28 -07:00
Nathan Sobo
90cc01a50a Select the language in Editor::save_as instead of Buffer::did_save
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-10-01 17:38:08 -06:00
Nathan Sobo
fcf6a9d58a Move Buffer::save_as to Editor
This removes buffer's dependency on Worktree, preparing the path for us to pull it into its own crate.

Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-10-01 17:32:22 -06:00
Nathan Sobo
74a47a1384 Extract fuzzy module to its own crate
We still have a fuzzy module in the zed crate with some app-specific logic.

Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-10-01 17:08:19 -06:00
Nathan Sobo
626fd2a2aa Fix warnings
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-10-01 16:45:24 -06:00
Nathan Sobo
706925b781 Move fuzzy::match_paths to Project::match_paths
This prepares the way to extract the rest of fuzzy to its own crate.

Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-10-01 16:44:08 -06:00
Nathan Sobo
cf270b4dff Extract sum_tree to its own crate
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-10-01 15:55:21 -06:00
Nathan Sobo
bd2d71a582 Merge pull request #194 from zed-industries/investigate-compile-time
Explore some ways to reduce Zed's compile time
2021-10-01 15:35:51 -06:00
Nathan Sobo
514d1b66c3 Reduce generic instantiations in read/update_view/model
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-10-01 15:23:35 -06:00
Max Brunsfeld
8370b3075e Avoid spawn_local instantiations due to different scheduling closure
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-10-01 12:00:37 -07:00
Max Brunsfeld
48f9bc972a Box future outputs before passing them to async_task
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-10-01 11:13:17 -07:00
Max Brunsfeld
7577a2be32 Box futures before passing them to async_task APIs
This reduces the number of copies of each async_task function that get compiled.

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-10-01 10:07:27 -07:00
Max Brunsfeld
24918b5cbc Remove type parameters from Cursor::seek_internal
Instead, use trait objects for the target dimension and aggregation
2021-09-30 17:17:33 -07:00
Max Brunsfeld
3719a9ee23 Use published prost crates
The previous version of prost didn't build on nightly rust.
2021-09-30 15:37:04 -07:00
Max Brunsfeld
6a20937ab7 Merge pull request #193 from zed-industries/project-browser
Add the project panel
2021-09-30 15:35:47 -07:00
Max Brunsfeld
2b3e5945c6 Add key bindings to toggle the project panel
- Use `cmd-1` to open the project panel and toggle focus between it and the workspace center.
- Use `cmd-shift-1` to open or close the project panel.
2021-09-30 15:26:14 -07:00
Max Brunsfeld
8dcd38c25a Fix ProjectPanel test failure 2021-09-30 15:02:27 -07:00
Max Brunsfeld
6007fa6ab3 Style the active entry in the project panel
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-30 14:16:28 -07:00
Max Brunsfeld
ad92bfe79f Avoid duplicate notifications for the same entity in the same effect cycle
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-30 14:06:09 -07:00
Max Brunsfeld
fef0bdf6c3 Don't autoscroll the project panel when worktree changes
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-30 13:44:07 -07:00
Max Brunsfeld
5fb112ac4c Vertically align root folder in project panel with tabs and sidebar icons
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-30 13:30:24 -07:00
Max Brunsfeld
177306d494 Add 'overlay' property to border
For containers, this causes the border to be drawn on top of the child element.

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-30 13:29:26 -07:00
Max Brunsfeld
e37ea6c6c8 Avoid rendering artifacts when label text is shaped with a non-integer width
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-30 13:28:45 -07:00
Max Brunsfeld
6fba17a5e1 Add disclosure arrows to the project panel
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-30 10:24:52 -07:00
Nathan Sobo
18a379f20c Scope the left and right bindings in the project panel 2021-09-30 10:50:58 -06:00
Antonio Scandurra
41fea2de1c Open buffer when trying to expand a file 2021-09-30 16:19:24 +02:00
Antonio Scandurra
e98731876c 💄 2021-09-30 16:00:01 +02:00
Antonio Scandurra
2f508af017 Rename active entry to selected entry 2021-09-30 14:41:12 +02:00
Antonio Scandurra
19325fd3f1 Allow expanding/collapsing active entry using the keyboard 2021-09-30 14:38:21 +02:00
Antonio Scandurra
789d5dfaee Allow selecting prev and next entries via the keyboard 2021-09-30 14:13:07 +02:00
Antonio Scandurra
7a1cdc9ad1 Fix panic when rendering multiple worktrees in project panel 2021-09-30 10:39:15 +02:00
Antonio Scandurra
5cd2c56f95 Merge branch 'main' into project-browser 2021-09-30 10:01:48 +02:00
Max Brunsfeld
d182182ae2 Avoid ProjectPanel panic when worktree has no root entry
Also, avoid bug where too many UniformList elements were rendered.
2021-09-29 22:08:31 -07:00
Max Brunsfeld
a389395e9c Merge pull request #192 from zed-industries/perfmon
Configure production containers to allow performance monitoring
2021-09-29 16:28:49 -07:00
Max Brunsfeld
e22a181750 Remove stray logging 2021-09-29 16:04:14 -07:00
Max Brunsfeld
8e4685b718 Merge pull request #190 from zed-industries/worktree-cursor
Unify all worktree traversal into a single cursor/iterator
2021-09-29 16:02:19 -07:00
Max Brunsfeld
a4169f1007 Add kube-shell script, for running shell commands in a deployed container
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-29 14:55:16 -07:00
Max Brunsfeld
09f38bbd16 Fix errors in deploy script
Also, allow deploying to staging with uncommitted changes.

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-29 14:53:52 -07:00
Max Brunsfeld
39ac723f5e Add SYS_ADMIN capability to the server container
This will let us run perf(1) on the zed-server to debug
any performance problems we encounter in production.

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-29 14:53:20 -07:00
Max Brunsfeld
7599ac1e8c Generalize deploy scripts to work w/ both production and staging
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-29 14:10:40 -07:00
Max Brunsfeld
1cfdd07d40 Avoid fetching GH releases in home route
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2021-09-29 14:10:40 -07:00
Max Brunsfeld
6e8d35379c Remove session and errors middleware from static route
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-29 14:10:40 -07:00
Nate
51a617dd5d Revert "Update link hrefs to absolute urls"
This reverts commit 62edcd70e2.
2021-09-29 12:12:35 -04:00
Nate
62edcd70e2 Update link hrefs to absolute urls 2021-09-29 12:03:56 -04:00
Antonio Scandurra
1519e1d45f Maintain active entry Project and render it in ProjectPanel 2021-09-29 17:05:03 +02:00
Antonio Scandurra
67c40eb4be Allow opening entries from project panel 2021-09-29 13:08:27 +02:00
Antonio Scandurra
e030917769 Refine project panel styling 2021-09-29 11:32:06 +02:00
Nathan Sobo
99003ef2d1 Merge pull request #185 from zed-industries/site-v2
Update zed.dev to version 0.2.0
2021-09-28 16:00:48 -06:00
Nathan Sobo
5536bf6dcc Merge branch 'main' into site-v2 2021-09-28 15:33:44 -06:00
Nathan Sobo
bd7bf82d18 Load .gitignore files from the Fs object so we can test this with a FakeFs
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-09-28 13:21:19 -06:00
Nathan Sobo
dde782a006 Remove top/bottom padding from project panel
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-09-28 12:26:26 -06:00
Nathan Sobo
14bc2c6857 Fix scrolling of project panel
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-09-28 12:26:15 -06:00
Nathan Sobo
a3f45c0d3b Merge branch 'worktree-cursor' into project-browser 2021-09-28 12:13:47 -06:00
Nathan Sobo
60799c5a7c 💄
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-09-28 12:03:11 -06:00
Nathan Sobo
b018191d52 Merge pull request #186 from zed-industries/sum-tree-seek-target
Allow seeking SumTree cursor by multiple seek types per dimension
2021-09-28 11:56:32 -06:00
Nathan Sobo
b9b08879f2 Get tests passing
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2021-09-28 11:23:09 -06:00
Antonio Scandurra
366552f3bb Use Bias::Left when traversing by path 2021-09-28 13:42:46 +02:00
Nathan Sobo
c3e8a5baa0 Unify all worktree traversal in a Traversal cursor/iterator
This uses our new ability to have a SeekTarget that's distinct from Dimension.
2021-09-27 21:58:22 -06:00
Max Brunsfeld
ab31ddfc31 Allow seeking SumTree cursor by multiple seek types per dimension
Also, remove the cursor's sum_dimension. Replace it with a
blanket implementation of Dimension for two-element tuples
of dimensions.

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-24 18:23:53 -07:00
Nate
6e50f2a413 Disable JIT mode for Tailwind 2021-09-24 18:20:31 -04:00
Nate
aa8ca8704f update sign out link 2021-09-24 18:19:46 -04:00
Nate
1c7727b815 Edit pass text cleanup + fix mismatched max-w 2021-09-24 18:16:13 -04:00
Max Brunsfeld
443a7826bb Start work on expanding and collapsing directories in project panel
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-24 15:12:49 -07:00
Nate
dc2c235211 Update some styles to improve rendering on mobile
* Add active links for nav on mobile
* Smaller page margins for more edge to edge look
2021-09-24 17:37:00 -04:00
Nate
d955be3ed5 Update custom checkbox style 2021-09-24 17:20:33 -04:00
Nate
4cdb1ad0c8 Fix type on the community page 2021-09-24 17:09:11 -04:00
Nate
4013e45af6 Merge the updates and releases pages
* Remove unused story page
* Remove updates page
* Remove updates from nav
2021-09-24 17:04:02 -04:00
Nate
a90ccbdc86 Merge branch 'main' into site-v2 2021-09-24 16:52:06 -04:00
Nate
d0c4d2015e Update content on all major pages
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-24 16:50:10 -04:00
Nate
337afb0d9d Clean up some content on home and team 2021-09-24 15:28:23 -04:00
Nate
7b2803b69e Make leading and font-weights more consistent 2021-09-24 15:19:53 -04:00
Nate
0980bb1f83 Style the community form
* Add custom checkboxes
* Reorganize form
* Update copy
* Update signup.hbs with new style
2021-09-24 15:11:13 -04:00
Max Brunsfeld
7eda614c4a Start work on ProjectPanel 2021-09-23 18:14:15 -07:00
Nate
9168f64bcf combine team + story, clean up nav 2021-09-23 20:20:43 -04:00
Max Brunsfeld
d561f50ab1 Extract from Workspace a Project model that stores a window's worktrees 2021-09-23 16:34:06 -07:00
Nate
c126cd5b1a add home content 2021-09-23 19:31:36 -04:00
Max Brunsfeld
39fbf7d4d1 Avoid verifying access tokens for out-of-date clients
Replace the 'VerifyToken' middleware with a 'process_auth_header' function
that we call in the '/rpc' handler after checking that the client's protocol
version matches.
2021-09-23 15:32:23 -07:00
Max Brunsfeld
d2192fd986 Merge pull request #183 from zed-industries/speed-up-access-token-verification
Speed up login by avoiding unnecessary access token verification
2021-09-23 14:12:34 -07:00
Nate
68a3ba5e4d Clean up styling of pre and code blocks inside prose 2021-09-23 16:55:08 -04:00
Nate
61194982f8 favicon + og tags update 2021-09-23 16:45:10 -04:00
Nate
ffaf3154bb wip home updates 2021-09-23 16:44:48 -04:00
Max Brunsfeld
06efb0968c Speed up login by avoiding unnecessary access token verification 2021-09-23 13:34:53 -07:00
Nate
5d76e00b6a update story 2021-09-23 15:48:38 -04:00
Nate
2c851bf99c favicon + social media assets 2021-09-23 15:43:35 -04:00
Nate
e07702a72b style updates 2021-09-23 15:43:21 -04:00
Max Brunsfeld
82ec1b73e0 Flush pending selection before performing any action on all selections
Closes #155

Co-Authored-By: Antonio Scandurra <me@as-cii.com>
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-23 11:20:56 -07:00
Antonio Scandurra
948aa788cb Merge pull request #181 from zed-industries/rpc-compression
Compress RPC messages using zstd
2021-09-23 18:56:36 +02:00
Antonio Scandurra
d403281fdc Merge pull request #182 from zed-industries/retry-flaky-wrap-shaped-line-test
Retry `test_wrap_shaped_line` 5 times before reporting a failure
2021-09-23 18:49:13 +02:00
Antonio Scandurra
21daddb60e Retry test_wrap_shaped_line 5 times before reporting a failure
It doesn't seem like there's anything fundamentally broken and it
might just be something weird with CI. Also, there's precedent for
retrying tests depending on fonts, e.g. `test_layout_str`.
2021-09-23 18:32:39 +02:00
Antonio Scandurra
96961a7dfe Bump zrpc version to 1 2021-09-23 18:27:12 +02:00
Antonio Scandurra
5b40dcaeed Remove stray logging 2021-09-23 18:26:42 +02:00
Max Brunsfeld
8bfee93be4 Start work on compressing RPC messages
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
2021-09-23 18:26:42 +02:00
Antonio Scandurra
286846cafd Merge pull request #179 from zed-industries/rpc-protocol-version
Forbid connections from outdated clients
2021-09-23 18:26:12 +02:00
Antonio Scandurra
e32f1f8b80 Create test_app_state with FakeFs instead of RealFs
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-23 18:17:49 +02:00
Antonio Scandurra
969bdb2390 Merge pull request #180 from zed-industries/peer-test-io-error-flaky
Fix flaky `zrpc::tests::peer::test_io_error` test
2021-09-23 17:56:40 +02:00
Antonio Scandurra
0aadc94442 Properly initialize response_channels
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-23 17:50:13 +02:00
Antonio Scandurra
2352725c58 💄
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-23 17:32:37 +02:00
Antonio Scandurra
742bfe1d5f Merge pull request #176 from zed-industries/worktree-share-exclude-ignored
Exclude ignored entries when sharing a worktree
2021-09-23 17:26:41 +02:00
Antonio Scandurra
6b0ed820bf Prevent requests from hanging when shutting down a connection
When closing a connection (either due to an error or simply because the
user wanted to), we will now  *take* `response_channels` as opposed to
clearing them. This ensures that `Peer::request` can't succeed in both
adding the oneshot channel in `response_channels` map _and_ submit the
message onto the `outgoing_tx` channel.

This also streamlines how we close a connection by unifying all the exit
code paths of the IO handling future.
2021-09-23 16:35:40 +02:00
Antonio Scandurra
c70b4a99c9 Remove non-determinism from zrpc::tests::peer::test_io_error 2021-09-23 15:55:28 +02:00
Antonio Scandurra
374b05a379 Display warning in titlebar when Zed is out-of-date 2021-09-23 15:36:32 +02:00
Antonio Scandurra
511cbaa2bd Version the zrpc protocol using a X-ZRPC-VERSION header 2021-09-23 15:35:35 +02:00
Antonio Scandurra
9efd8ce323 Exclude ignored entries when sharing a worktree
This commit also adds a randomized test that exercises building and
applying snapshot updates.
2021-09-23 11:20:53 +02:00
Antonio Scandurra
d07ff73999 Bump PathEntry::scan_id when the ignore status of an entry changes 2021-09-23 11:20:53 +02:00
Antonio Scandurra
a1ca507498 Merge pull request #174 from zed-industries/tweak-word-boundaries
Adjust word-boundary motion, treating whitespace more like other editors
2021-09-23 09:09:49 +02:00
Max Brunsfeld
1402f842f7 Adjust word-boundary motion, treating whitespace more like other editors
Also, fix the behavior of 'delete_to_{prev,next}_word_boundary' commands when
there are non-empty selections.
2021-09-22 17:00:57 -07:00
Max Brunsfeld
84c7064699 Fix bytes/chars error in Editor::paste
Fixes #156
2021-09-22 14:36:30 -07:00
Nate
94d1c9d9a6 update styles 2021-09-22 13:04:14 -04:00
Nate
4e6e789f34 Merge branch 'main' into site-v2 2021-09-22 12:14:58 -04:00
Nate
09fdce4ac9 update styles 2021-09-22 11:38:14 -04:00
Nate
1bcbb9417a shrink tailwind package size by removing unused core components 2021-09-22 11:38:09 -04:00
Antonio Scandurra
796139e4ab Merge pull request #170 from zed-industries/people-panel
People panel
2021-09-22 17:10:50 +02:00
Antonio Scandurra
6120ce3747 Move people panel up
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-22 17:09:24 +02:00
Antonio Scandurra
23d77e2b9f Refine people panel styling
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-22 17:02:50 +02:00
Antonio Scandurra
257744ac36 Fix resolution of extends directive when children are unresolved
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-22 16:04:44 +02:00
Antonio Scandurra
3e65fb4267 Leave joined worktrees when guest loses connection 2021-09-22 15:33:13 +02:00
Antonio Scandurra
b47422ae28 Maintain connections correctly when leaving or unsharing worktrees 2021-09-22 15:24:11 +02:00
Antonio Scandurra
78fbd1307a Remove remote worktrees and close their buffers when host unshares 2021-09-22 14:50:32 +02:00
Antonio Scandurra
d67227177a Allow leaving worktree while opening a buffer 2021-09-22 12:38:31 +02:00
Max Brunsfeld
c24d439eb1 Allow clicking on worktrees to share, unshare, join, and leave
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-21 16:27:57 -07:00
Max Brunsfeld
41a1514cec Add chainable Element methods for common containers
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-21 15:24:14 -07:00
Max Brunsfeld
b576397610 Vertically align avatars with labels in people panel
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-21 15:09:31 -07:00
Max Brunsfeld
6d0b84a467 💄 2021-09-21 13:48:34 -07:00
Max Brunsfeld
729896d32a Style worktree rows in people panel
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-21 13:35:00 -07:00
Max Brunsfeld
9352c4e076 Ignore empty ZED_IMPERSONATE env var 2021-09-21 13:29:06 -07:00
Max Brunsfeld
412535420b Allow joining worktrees by clicking them in the people panel
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-21 12:21:03 -07:00
Max Brunsfeld
65a3af9bde Rerender list elements when they notify during events
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-21 12:20:32 -07:00
Antonio Scandurra
c90dc7235e Rename participants to guests in proto
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-21 18:30:49 +02:00
Antonio Scandurra
1bd6cd0978 Allow size to be specified in ImageStyle
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-21 18:27:26 +02:00
Antonio Scandurra
f8990b707a Style people panel
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-21 18:13:02 +02:00
Antonio Scandurra
4a723a1fb9 Merge branch 'main' into people-panel 2021-09-21 17:29:07 +02:00
Antonio Scandurra
7c10faeccf Use more concrete names for Store return values
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-21 17:21:30 +02:00
Antonio Scandurra
b9d46366ed Fix more failing/hanging unit tests 2021-09-21 16:20:20 +02:00
Antonio Scandurra
d381020a60 Add Server::{state,state_mut} to catch most deadlocks statically 2021-09-21 12:19:52 +02:00
Antonio Scandurra
0b11192fe3 Remove deadlock and make integration tests pass again 2021-09-21 11:58:31 +02:00
Antonio Scandurra
1954c6b00e Replace returned tuples with named structs in Store 2021-09-21 11:38:50 +02:00
Nathan Sobo
aa671f1041 Fix remaining compilation errors 2021-09-20 19:44:30 -06:00
Max Brunsfeld
5dfd4be174 WIP 2021-09-20 18:05:46 -07:00
Nathan Sobo
e6a0a46476 Merge pull request #167 from zed-industries/fix-language-selection
Fix language selection when saving new buffers as a single-file worktree
2021-09-20 17:51:10 -07:00
Max Brunsfeld
8b1a2c8cd2 Fix warnings in people_panel 2021-09-20 15:45:41 -07:00
Max Brunsfeld
8f578e7521 Maintain server state consistency when removing a connection
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-20 15:45:33 -07:00
Max Brunsfeld
8de9c362c9 Add .zed.toml 2021-09-20 15:44:42 -07:00
Max Brunsfeld
a068019d94 Add ZED_IMPERSONATE env var, for testing
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-20 15:44:28 -07:00
Nate
d8ebbd5a51 update home 2021-09-20 18:31:06 -04:00
Nate
cbb7e882e8 update page spacing on mobile 2021-09-20 18:23:01 -04:00
Nate
10ed388d41 Form design changes 2021-09-20 18:21:00 -04:00
Nate
1470184d45 Add active page indicator 2021-09-20 18:02:04 -04:00
Nate
9c0b9f0bcc update signup form and admin panel 2021-09-20 17:53:10 -04:00
Max Brunsfeld
4ca5814470 Add missing people panel properties to base theme
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-20 13:38:59 -07:00
Nathan Sobo
4279451150 Fix language selection when saving new buffers as a single-file worktree
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
2021-09-20 14:28:02 -06:00
Nate
ccf4cf165f Update admin + community to use new interest values from form 2021-09-20 16:06:40 -04:00
Nate
f933b40fe2 Add "interest" booleans to signups form
Co-Authored-By: Max Brunsfeld
<max@zed.dev>
2021-09-20 16:06:16 -04:00
Nate
8d08abf2a6 Merge branch 'main' into site-v2 2021-09-20 16:01:03 -04:00
Nate
b2c7bf1dae mobile styles 2021-09-20 15:42:30 -04:00
Max Brunsfeld
7252ed2451 Add 'active' class to the active nav link
Co-Authored-By: Nate Butler <nate@zed.dev>
2021-09-20 12:04:26 -07:00
Nate
6ed1b3cba6 mobile styles 2021-09-20 14:41:52 -04:00
Antonio Scandurra
23652f2ba6 Start on PeoplePanel::render
Co-Authored-By: Max Brunsfeld <max@zed.dev>
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-20 20:04:48 +02:00
Antonio Scandurra
cb2d8bac1d Use bullseye-slim for migration Dockerfile
Closes #154

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2021-09-20 19:42:24 +02:00
Nate
6eca2f465b mobile style updates 2021-09-20 12:32:43 -04:00
Nate
fe5cb691b5 wip mobile nav 2021-09-20 12:02:42 -04:00
Nate
6c59da6496 update error page 2021-09-20 12:02:35 -04:00
Antonio Scandurra
67946b7224 Add an integration test to verify collaborators are kept up to date 2021-09-20 16:40:31 +02:00
Nate
893ff2f26f layout updates 2021-09-20 10:34:18 -04:00
Antonio Scandurra
3d4dbf3140 Maintain collaborators in UserStore 2021-09-20 15:05:41 +02:00
Antonio Scandurra
32111092bd Turn UserStore into a model 2021-09-20 14:40:06 +02:00
Antonio Scandurra
6f2c8ffb00 Introduce a Client::subscribe method that doesn't need an entity 2021-09-20 14:14:07 +02:00
Antonio Scandurra
ae9fb65315 Fix integration tests 2021-09-20 12:52:13 +02:00
Antonio Scandurra
d8ea220acc Update collaborators as worktrees are opened/shared/closed 2021-09-20 12:29:32 +02:00
Nate
493fb4fc24 minimal styles for admin page 2021-09-18 12:57:04 -04:00
Nate
8f8806445e update team & story 2021-09-17 16:25:15 -04:00
Nate
80fce4e18b remove tech from nav 2021-09-17 15:40:03 -04:00
Nate
a4d35d6c09 add community page content 2021-09-17 15:37:34 -04:00
Nate
d3b9e333a7 update home + story 2021-09-17 15:37:27 -04:00
Nate
93f3c59d5a release page cleanup 2021-09-17 15:06:38 -04:00
Nate
8cd16982b1 add releases page 2021-09-17 14:59:02 -04:00
Nate
105206aef1 style prose, add code highligting, style updates page 2021-09-17 14:02:31 -04:00
Antonio Scandurra
f13af7dc59 WIP
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2021-09-17 18:37:00 +02:00
Nate
cb34321d69 add prose.css for manual prose styles 2021-09-17 12:33:10 -04:00
Nate
f4d0225b75 Update sign in button to link 2021-09-17 10:36:47 -04:00
Antonio Scandurra
f2825cceba Start on Presence as the backing model for the people panel 2021-09-17 16:18:41 +02:00
Antonio Scandurra
12fa309b7c Implement proto::GetCollaborators request 2021-09-17 14:36:50 +02:00
Max Brunsfeld
e3c0d6980c Switch to a new flow for advertising, sharing and joining worktrees
Now, when you open a local worktree, we immediately send an `OpenWorktree` message
to the server, telling it the name of the folder that you've opened, and the names of all the
collaborators (based on a `.zed.toml` file). The server responds with a unique id for the
worktree.

When starting share this local worktree, you now include this previously-assigned id
in the `ShareWorktree` message.

When joining a worktree, there is no longer a need to provide an access token. The access
is controlled by the set of "collaborator logins" that were provided when the worktree
was initially opened by the host.
2021-09-16 18:39:29 -07:00
Nate
18fd689177 hook up updates + more styles 2021-09-16 17:12:24 -04:00
Max Brunsfeld
176ca9086e Rename OpenWorktree message to JoinWorktree
We'll repurpose the name 'OpenWorktree' for opening a local
worktree while signed in, and advertising the open status to
the worktree's collaborators.
2021-09-16 12:57:19 -07:00
Nate
eb32675652 fix scrollbar jumping 2021-09-16 15:40:12 -04:00
Nate
2ea7368cfb style updates 2021-09-16 15:01:50 -04:00
Max Brunsfeld
f0093974ee Always store an rpc client on local worktrees
Even before sharing, we now want to advertise that the user is working
on a certain worktree, to make that visible to all of the worktree's
collaborators.
2021-09-16 11:06:49 -07:00
Nate
fc7dc73f57 add additional pages and set base styles 2021-09-15 18:28:38 -04:00
Nate
51f0250466 swap logged in avatar -> username 2021-09-15 14:58:59 -04:00
Nate
0eb9822e0d style nav 2021-09-15 14:47:50 -04:00
Nate
0906b96a23 add new font stack 2021-09-15 13:21:04 -04:00
Nate
8eb1ba9fe7 split home and releases 2021-09-15 12:57:25 -04:00
Nate
c0ad095513 add wishful nav items 2021-09-15 12:54:19 -04:00
Nate
68926908d0 new home markup 2021-09-15 12:54:07 -04:00
Nate
0f2ba45267 wip test new site content 2021-09-14 17:54:23 -04:00
315 changed files with 207188 additions and 5944 deletions

1
.zed.toml Normal file
View File

@@ -0,0 +1 @@
collaborators = ["nathansobo", "as-cii", "maxbrunsfeld", "iamnbutler"]

390
Cargo.lock generated
View File

@@ -742,6 +742,30 @@ dependencies = [
"memchr",
]
[[package]]
name = "buffer"
version = "0.1.0"
dependencies = [
"anyhow",
"arrayvec 0.7.1",
"clock",
"gpui",
"lazy_static",
"log",
"parking_lot",
"rand 0.8.3",
"rpc",
"seahash",
"serde 1.0.125",
"similar",
"smallvec",
"sum_tree",
"theme",
"tree-sitter",
"tree-sitter-rust",
"unindent",
]
[[package]]
name = "build_const"
version = "0.2.2"
@@ -867,6 +891,9 @@ name = "cc"
version = "1.0.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3c69b077ad434294d3ce9f1f6143a2a4b89a8a2d54ef813d85003a4fd1137fd"
dependencies = [
"jobserver",
]
[[package]]
name = "cexpr"
@@ -899,6 +926,20 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chat_panel"
version = "0.1.0"
dependencies = [
"client",
"editor",
"gpui",
"postage",
"theme",
"time 0.3.2",
"util",
"workspace",
]
[[package]]
name = "chrono"
version = "0.4.19"
@@ -995,6 +1036,39 @@ dependencies = [
"syn",
]
[[package]]
name = "client"
version = "0.1.0"
dependencies = [
"anyhow",
"async-recursion",
"async-tungstenite",
"futures",
"gpui",
"image 0.23.14",
"lazy_static",
"log",
"parking_lot",
"postage",
"rand 0.8.3",
"rpc",
"smol",
"sum_tree",
"surf",
"thiserror",
"time 0.3.2",
"tiny_http",
"util",
]
[[package]]
name = "clock"
version = "0.1.0"
dependencies = [
"rpc",
"smallvec",
]
[[package]]
name = "cloudabi"
version = "0.0.3"
@@ -1541,6 +1615,30 @@ dependencies = [
"getrandom 0.2.2",
]
[[package]]
name = "editor"
version = "0.1.0"
dependencies = [
"anyhow",
"buffer",
"clock",
"gpui",
"lazy_static",
"log",
"parking_lot",
"postage",
"rand 0.8.3",
"serde 1.0.125",
"smallvec",
"smol",
"sum_tree",
"theme",
"tree-sitter",
"tree-sitter-rust",
"unindent",
"util",
]
[[package]]
name = "either"
version = "1.6.1"
@@ -1759,6 +1857,21 @@ dependencies = [
"subtle",
]
[[package]]
name = "file_finder"
version = "0.1.0"
dependencies = [
"editor",
"fuzzy",
"gpui",
"postage",
"project",
"serde_json 1.0.64",
"theme",
"util",
"workspace",
]
[[package]]
name = "filetime"
version = "0.2.14"
@@ -2037,6 +2150,14 @@ dependencies = [
"slab",
]
[[package]]
name = "fuzzy"
version = "0.1.0"
dependencies = [
"gpui",
"util",
]
[[package]]
name = "generator"
version = "0.6.23"
@@ -2172,7 +2293,6 @@ name = "gpui"
version = "0.1.0"
dependencies = [
"anyhow",
"arrayvec 0.7.1",
"async-task",
"backtrace",
"bindgen",
@@ -2209,6 +2329,7 @@ dependencies = [
"simplelog",
"smallvec",
"smol",
"sum_tree",
"time 0.3.2",
"tiny-skia",
"tree-sitter",
@@ -2584,15 +2705,6 @@ dependencies = [
"waker-fn",
]
[[package]]
name = "itertools"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b"
dependencies = [
"either",
]
[[package]]
name = "itertools"
version = "0.10.1"
@@ -2614,6 +2726,15 @@ version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd25036021b0de88a0aff6b850051563c6516d0bf53f8638938edbb9de732736"
[[package]]
name = "jobserver"
version = "0.1.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af25a77299a7f711a01975c35a6a424eb6862092cc2d6c72c4ed6cbc56dfc1fa"
dependencies = [
"libc",
]
[[package]]
name = "jpeg-decoder"
version = "0.1.22"
@@ -3321,6 +3442,17 @@ dependencies = [
"regex",
]
[[package]]
name = "people_panel"
version = "0.1.0"
dependencies = [
"client",
"gpui",
"postage",
"theme",
"workspace",
]
[[package]]
name = "percent-encoding"
version = "2.1.0"
@@ -3614,36 +3746,70 @@ dependencies = [
]
[[package]]
name = "prost"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e6984d2f1a23009bd270b8bb56d0926810a3d483f59c987d77969e9d8e840b2"
name = "project"
version = "0.1.0"
dependencies = [
"bytes 1.0.1",
"prost-derive 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"anyhow",
"async-trait",
"buffer",
"client",
"clock",
"fsevent",
"futures",
"fuzzy",
"gpui",
"ignore",
"lazy_static",
"libc",
"log",
"parking_lot",
"postage",
"rand 0.8.3",
"rpc",
"serde 1.0.125",
"serde_json 1.0.64",
"smol",
"sum_tree",
"tempdir",
"toml 0.5.8",
"util",
]
[[package]]
name = "project_panel"
version = "0.1.0"
dependencies = [
"gpui",
"postage",
"project",
"serde_json 1.0.64",
"theme",
"workspace",
]
[[package]]
name = "prost"
version = "0.7.0"
source = "git+https://github.com/tokio-rs/prost?rev=6cf97ea422b09d98de34643c4dda2d4f8b7e23e6#6cf97ea422b09d98de34643c4dda2d4f8b7e23e6"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de5e2533f59d08fcf364fd374ebda0692a70bd6d7e66ef97f306f45c6c5d8020"
dependencies = [
"bytes 1.0.1",
"prost-derive 0.7.0 (git+https://github.com/tokio-rs/prost?rev=6cf97ea422b09d98de34643c4dda2d4f8b7e23e6)",
"prost-derive",
]
[[package]]
name = "prost-build"
version = "0.7.0"
source = "git+https://github.com/tokio-rs/prost?rev=6cf97ea422b09d98de34643c4dda2d4f8b7e23e6#6cf97ea422b09d98de34643c4dda2d4f8b7e23e6"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "355f634b43cdd80724ee7848f95770e7e70eefa6dcf14fea676216573b8fd603"
dependencies = [
"bytes 1.0.1",
"heck",
"itertools 0.10.1",
"itertools",
"log",
"multimap",
"petgraph",
"prost 0.7.0 (git+https://github.com/tokio-rs/prost?rev=6cf97ea422b09d98de34643c4dda2d4f8b7e23e6)",
"prost",
"prost-types",
"tempfile",
"which 4.1.0",
@@ -3651,24 +3817,12 @@ dependencies = [
[[package]]
name = "prost-derive"
version = "0.7.0"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "169a15f3008ecb5160cba7d37bcd690a7601b6d30cfb87a117d45e59d52af5d4"
checksum = "600d2f334aa05acb02a755e217ef1ab6dea4d51b58b7846588b747edec04efba"
dependencies = [
"anyhow",
"itertools 0.9.0",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "prost-derive"
version = "0.7.0"
source = "git+https://github.com/tokio-rs/prost?rev=6cf97ea422b09d98de34643c4dda2d4f8b7e23e6#6cf97ea422b09d98de34643c4dda2d4f8b7e23e6"
dependencies = [
"anyhow",
"itertools 0.10.1",
"itertools",
"proc-macro2",
"quote",
"syn",
@@ -3676,11 +3830,12 @@ dependencies = [
[[package]]
name = "prost-types"
version = "0.7.0"
source = "git+https://github.com/tokio-rs/prost?rev=6cf97ea422b09d98de34643c4dda2d4f8b7e23e6#6cf97ea422b09d98de34643c4dda2d4f8b7e23e6"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "603bbd6394701d13f3f25aada59c7de9d35a6a5887cfc156181234a44002771b"
dependencies = [
"bytes 1.0.1",
"prost 0.7.0 (git+https://github.com/tokio-rs/prost?rev=6cf97ea422b09d98de34643c4dda2d4f8b7e23e6)",
"prost",
]
[[package]]
@@ -4068,6 +4223,28 @@ dependencies = [
"xmlparser",
]
[[package]]
name = "rpc"
version = "0.1.0"
dependencies = [
"anyhow",
"async-lock",
"async-tungstenite",
"base64 0.13.0",
"futures",
"log",
"parking_lot",
"postage",
"prost",
"prost-build",
"rand 0.8.3",
"rsa",
"serde 1.0.125",
"smol",
"tempdir",
"zstd",
]
[[package]]
name = "rsa"
version = "0.4.0"
@@ -4956,6 +5133,14 @@ version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e81da0851ada1f3e9d4312c704aa4f8806f0f9d69faaf8df2f3464b4a9437c2"
[[package]]
name = "sum_tree"
version = "0.1.0"
dependencies = [
"arrayvec 0.7.1",
"rand 0.8.3",
]
[[package]]
name = "surf"
version = "2.2.0"
@@ -5128,6 +5313,35 @@ dependencies = [
"unicode-width",
]
[[package]]
name = "theme"
version = "0.1.0"
dependencies = [
"anyhow",
"gpui",
"indexmap",
"parking_lot",
"serde 1.0.125",
"serde_json 1.0.64",
"serde_path_to_error",
"toml 0.5.8",
]
[[package]]
name = "theme_selector"
version = "0.1.0"
dependencies = [
"editor",
"fuzzy",
"gpui",
"log",
"parking_lot",
"postage",
"smol",
"theme",
"workspace",
]
[[package]]
name = "thiserror"
version = "1.0.29"
@@ -5598,6 +5812,18 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]]
name = "util"
version = "0.1.0"
dependencies = [
"anyhow",
"futures",
"log",
"serde_json 1.0.64",
"surf",
"tempdir",
]
[[package]]
name = "uuid"
version = "0.5.1"
@@ -5870,6 +6096,24 @@ dependencies = [
"winapi 0.3.9",
]
[[package]]
name = "workspace"
version = "0.1.0"
dependencies = [
"anyhow",
"buffer",
"client",
"editor",
"gpui",
"log",
"postage",
"project",
"serde_json 1.0.64",
"theme",
"tree-sitter",
"tree-sitter-rust",
]
[[package]]
name = "wyz"
version = "0.2.0"
@@ -5908,18 +6152,24 @@ name = "zed"
version = "0.1.0"
dependencies = [
"anyhow",
"arrayvec 0.7.1",
"async-recursion",
"async-trait",
"async-tungstenite",
"buffer",
"cargo-bundle",
"chat_panel",
"client",
"clock",
"crossbeam-channel",
"ctor",
"dirs 3.0.1",
"easy-parallel",
"editor",
"env_logger",
"file_finder",
"fsevent",
"futures",
"fuzzy",
"gpui",
"http-auth-basic",
"ignore",
@@ -5931,20 +6181,25 @@ dependencies = [
"log-panics",
"num_cpus",
"parking_lot",
"people_panel",
"postage",
"project",
"project_panel",
"rand 0.8.3",
"rpc",
"rsa",
"rust-embed",
"seahash",
"serde 1.0.125",
"serde_json 1.0.64",
"serde_path_to_error",
"similar",
"simplelog",
"smallvec",
"smol",
"sum_tree",
"surf",
"tempdir",
"theme",
"theme_selector",
"thiserror",
"time 0.3.2",
"tiny_http",
@@ -5953,7 +6208,8 @@ dependencies = [
"tree-sitter-rust",
"unindent",
"url",
"zrpc",
"util",
"workspace",
]
[[package]]
@@ -5982,6 +6238,7 @@ dependencies = [
"parking_lot",
"postage",
"rand 0.8.3",
"rpc",
"rust-embed",
"scrypt",
"serde 1.0.125",
@@ -5994,7 +6251,6 @@ dependencies = [
"time 0.2.25",
"toml 0.5.8",
"zed",
"zrpc",
]
[[package]]
@@ -6019,22 +6275,30 @@ dependencies = [
]
[[package]]
name = "zrpc"
version = "0.1.0"
name = "zstd"
version = "0.9.0+zstd.1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07749a5dc2cb6b36661290245e350f15ec3bbb304e493db54a1d354480522ccd"
dependencies = [
"anyhow",
"async-lock",
"async-tungstenite",
"base64 0.13.0",
"futures",
"log",
"parking_lot",
"postage",
"prost 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"prost-build",
"rand 0.8.3",
"rsa",
"serde 1.0.125",
"smol",
"tempdir",
"zstd-safe",
]
[[package]]
name = "zstd-safe"
version = "4.1.1+zstd.1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c91c90f2c593b003603e5e0493c837088df4469da25aafff8bce42ba48caf079"
dependencies = [
"libc",
"zstd-sys",
]
[[package]]
name = "zstd-sys"
version = "1.6.1+zstd.1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "615120c7a2431d16cf1cf979e7fc31ba7a5b5e5707b29c8a99e5dbf8a8392a33"
dependencies = [
"cc",
"libc",
]

View File

@@ -1,6 +1,6 @@
[workspace]
members = ["fsevent", "gpui", "gpui_macros", "server", "zed", "zrpc"]
default-members = ["zed"]
members = ["crates/*"]
default-members = ["crates/zed"]
[patch.crates-io]
async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" }

View File

@@ -1,12 +1,12 @@
# syntax = docker/dockerfile:1.2
FROM rust as builder
FROM rust:1.55-bullseye as builder
WORKDIR app
RUN --mount=type=cache,target=/usr/local/cargo/registry \
--mount=type=cache,target=./target \
cargo install sqlx-cli --root=/app --target-dir=/app/target --version 0.5.7
FROM debian:buster-slim as runtime
FROM debian:bullseye-slim as runtime
RUN apt-get update; \
apt-get install -y --no-install-recommends libssl1.1
WORKDIR app

32
crates/buffer/Cargo.toml Normal file
View File

@@ -0,0 +1,32 @@
[package]
name = "buffer"
version = "0.1.0"
edition = "2018"
[features]
test-support = ["rand"]
[dependencies]
clock = { path = "../clock" }
gpui = { path = "../gpui" }
rpc = { path = "../rpc" }
sum_tree = { path = "../sum_tree" }
theme = { path = "../theme" }
anyhow = "1.0.38"
arrayvec = "0.7.1"
lazy_static = "1.4"
log = "0.4"
parking_lot = "0.11.1"
rand = { version = "0.8.3", optional = true }
seahash = "4.1"
serde = { version = "1", features = ["derive"] }
similar = "1.3"
smallvec = { version = "1.6", features = ["union"] }
tree-sitter = "0.19.5"
[dev-dependencies]
gpui = { path = "../gpui", features = ["test-support"] }
rand = "0.8.3"
tree-sitter-rust = "0.19.0"
unindent = "0.1.7"

View File

@@ -1,13 +1,13 @@
use super::{Buffer, Content};
use crate::{time, util::Bias};
use anyhow::Result;
use std::{cmp::Ordering, ops::Range};
use sum_tree::Bias;
#[derive(Clone, Eq, PartialEq, Debug, Hash)]
pub struct Anchor {
pub offset: usize,
pub bias: Bias,
pub version: time::Global,
pub version: clock::Global,
}
impl Anchor {

View File

@@ -1,5 +1,6 @@
use super::SyntaxTheme;
use gpui::fonts::HighlightStyle;
use std::sync::Arc;
use theme::SyntaxTheme;
#[derive(Clone, Debug)]
pub struct HighlightMap(Arc<[HighlightId]>);
@@ -49,6 +50,20 @@ impl HighlightMap {
}
}
impl HighlightId {
pub fn style(&self, theme: &SyntaxTheme) -> Option<HighlightStyle> {
theme
.highlights
.get(self.0 as usize)
.map(|entry| entry.1.clone())
}
#[cfg(any(test, feature = "test-support"))]
pub fn name<'a>(&self, theme: &'a SyntaxTheme) -> Option<&'a str> {
theme.highlights.get(self.0 as usize).map(|e| e.0.as_str())
}
}
impl Default for HighlightMap {
fn default() -> Self {
Self(Arc::new([]))
@@ -89,8 +104,8 @@ mod tests {
];
let map = HighlightMap::new(capture_names, &theme);
assert_eq!(theme.highlight_name(map.get(0)), Some("function"));
assert_eq!(theme.highlight_name(map.get(1)), Some("function.async"));
assert_eq!(theme.highlight_name(map.get(2)), Some("variable.builtin"));
assert_eq!(map.get(0).name(&theme), Some("function"));
assert_eq!(map.get(1).name(&theme), Some("function.async"));
assert_eq!(map.get(2).name(&theme), Some("variable.builtin"));
}
}

View File

@@ -1,65 +1,45 @@
use crate::{settings::HighlightMap, theme::SyntaxTheme};
use crate::HighlightMap;
use anyhow::Result;
use parking_lot::Mutex;
use rust_embed::RustEmbed;
use serde::Deserialize;
use std::{path::Path, str, sync::Arc};
use theme::SyntaxTheme;
use tree_sitter::{Language as Grammar, Query};
pub use tree_sitter::{Parser, Tree};
#[derive(RustEmbed)]
#[folder = "languages"]
pub struct LanguageDir;
#[derive(Default, Deserialize)]
pub struct LanguageConfig {
pub name: String,
pub path_suffixes: Vec<String>,
pub autoclose_pairs: Vec<AutoclosePair>,
}
#[derive(Deserialize)]
pub struct BracketPair {
#[derive(Clone, Deserialize)]
pub struct AutoclosePair {
pub start: String,
pub end: String,
}
pub struct Language {
pub config: LanguageConfig,
pub grammar: Grammar,
pub highlight_query: Query,
pub brackets_query: Query,
pub highlight_map: Mutex<HighlightMap>,
pub(crate) config: LanguageConfig,
pub(crate) grammar: Grammar,
pub(crate) highlight_query: Query,
pub(crate) brackets_query: Query,
pub(crate) highlight_map: Mutex<HighlightMap>,
}
#[derive(Default)]
pub struct LanguageRegistry {
languages: Vec<Arc<Language>>,
}
impl Language {
pub fn highlight_map(&self) -> HighlightMap {
self.highlight_map.lock().clone()
}
pub fn set_theme(&self, theme: &SyntaxTheme) {
*self.highlight_map.lock() = HighlightMap::new(self.highlight_query.capture_names(), theme);
}
}
impl LanguageRegistry {
pub fn new() -> Self {
let grammar = tree_sitter_rust::language();
let rust_config =
toml::from_slice(&LanguageDir::get("rust/config.toml").unwrap().data).unwrap();
let rust_language = Language {
config: rust_config,
grammar,
highlight_query: Self::load_query(grammar, "rust/highlights.scm"),
brackets_query: Self::load_query(grammar, "rust/brackets.scm"),
highlight_map: Mutex::new(HighlightMap::default()),
};
Self::default()
}
Self {
languages: vec![Arc::new(rust_language)],
}
pub fn add(&mut self, language: Arc<Language>) {
self.languages.push(language);
}
pub fn set_theme(&self, theme: &SyntaxTheme) {
@@ -81,19 +61,43 @@ impl LanguageRegistry {
.any(|suffix| path_suffixes.contains(&Some(suffix.as_str())))
})
}
fn load_query(grammar: tree_sitter::Language, path: &str) -> Query {
Query::new(
grammar,
str::from_utf8(&LanguageDir::get(path).unwrap().data).unwrap(),
)
.unwrap()
}
}
impl Default for LanguageRegistry {
fn default() -> Self {
Self::new()
impl Language {
pub fn new(config: LanguageConfig, grammar: Grammar) -> Self {
Self {
config,
brackets_query: Query::new(grammar, "").unwrap(),
highlight_query: Query::new(grammar, "").unwrap(),
grammar,
highlight_map: Default::default(),
}
}
pub fn with_highlights_query(mut self, highlights_query_source: &str) -> Result<Self> {
self.highlight_query = Query::new(self.grammar, highlights_query_source)?;
Ok(self)
}
pub fn with_brackets_query(mut self, brackets_query_source: &str) -> Result<Self> {
self.brackets_query = Query::new(self.grammar, brackets_query_source)?;
Ok(self)
}
pub fn name(&self) -> &str {
self.config.name.as_str()
}
pub fn autoclose_pairs(&self) -> &[AutoclosePair] {
&self.config.autoclose_pairs
}
pub fn highlight_map(&self) -> HighlightMap {
self.highlight_map.lock().clone()
}
pub fn set_theme(&self, theme: &SyntaxTheme) {
*self.highlight_map.lock() = HighlightMap::new(self.highlight_query.capture_names(), theme);
}
}
@@ -133,27 +137,26 @@ mod tests {
// matching file extension
assert_eq!(
registry.select_language("zed/lib.rs").map(get_name),
registry.select_language("zed/lib.rs").map(|l| l.name()),
Some("Rust")
);
assert_eq!(
registry.select_language("zed/lib.mk").map(get_name),
registry.select_language("zed/lib.mk").map(|l| l.name()),
Some("Make")
);
// matching filename
assert_eq!(
registry.select_language("zed/Makefile").map(get_name),
registry.select_language("zed/Makefile").map(|l| l.name()),
Some("Make")
);
// matching suffix that is not the full file extension or filename
assert_eq!(registry.select_language("zed/cars").map(get_name), None);
assert_eq!(registry.select_language("zed/a.cars").map(get_name), None);
assert_eq!(registry.select_language("zed/sumk").map(get_name), None);
fn get_name(language: &Arc<Language>) -> &str {
language.config.name.as_str()
}
assert_eq!(registry.select_language("zed/cars").map(|l| l.name()), None);
assert_eq!(
registry.select_language("zed/a.cars").map(|l| l.name()),
None
);
assert_eq!(registry.select_language("zed/sumk").map(|l| l.name()), None);
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,13 +1,12 @@
use super::Operation;
use crate::time;
use gpui::sum_tree::{Cursor, Dimension, Edit, Item, KeyedItem, SumTree, Summary};
use std::{fmt::Debug, ops::Add};
use sum_tree::{Cursor, Dimension, Edit, Item, KeyedItem, SumTree, Summary};
#[derive(Clone, Debug)]
pub struct OperationQueue(SumTree<Operation>);
#[derive(Clone, Copy, Debug, Default, Eq, Ord, PartialEq, PartialOrd)]
pub struct OperationKey(time::Lamport);
pub struct OperationKey(clock::Lamport);
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
pub struct OperationSummary {
@@ -16,7 +15,7 @@ pub struct OperationSummary {
}
impl OperationKey {
pub fn new(timestamp: time::Lamport) -> Self {
pub fn new(timestamp: clock::Lamport) -> Self {
Self(timestamp)
}
}
@@ -43,7 +42,7 @@ impl OperationQueue {
clone
}
pub fn cursor(&self) -> Cursor<Operation, (), ()> {
pub fn cursor(&self) -> Cursor<Operation, ()> {
self.0.cursor()
}
}
@@ -102,7 +101,7 @@ mod tests {
#[test]
fn test_len() {
let mut clock = time::Lamport::new(0);
let mut clock = clock::Lamport::new(0);
let mut queue = OperationQueue::new();
assert_eq!(queue.len(), 0);
@@ -124,5 +123,5 @@ mod tests {
}
#[derive(Clone, Debug, Eq, PartialEq)]
struct TestOperation(time::Lamport);
struct TestOperation(clock::Lamport);
}

View File

@@ -0,0 +1,28 @@
use rand::prelude::*;
pub struct RandomCharIter<T: Rng>(T);
impl<T: Rng> RandomCharIter<T> {
pub fn new(rng: T) -> Self {
Self(rng)
}
}
impl<T: Rng> Iterator for RandomCharIter<T> {
type Item = char;
fn next(&mut self) -> Option<Self::Item> {
match self.0.gen_range(0..100) {
// whitespace
0..=19 => [' ', '\n', '\t'].choose(&mut self.0).copied(),
// two-byte greek letters
20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))),
// three-byte characters
33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(),
// four-byte characters
46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(),
// ascii letters
_ => Some(self.0.gen_range(b'a'..b'z' + 1).into()),
}
}
}

View File

@@ -1,9 +1,8 @@
use super::Point;
use crate::util::Bias;
use arrayvec::ArrayString;
use gpui::sum_tree::{self, SumTree};
use smallvec::SmallVec;
use std::{cmp, ops::Range, str};
use sum_tree::{Bias, SumTree};
#[cfg(test)]
const CHUNK_BASE: usize = 6;
@@ -22,7 +21,7 @@ impl Rope {
}
pub fn append(&mut self, rope: Rope) {
let mut chunks = rope.chunks.cursor::<(), ()>();
let mut chunks = rope.chunks.cursor::<()>();
chunks.next(&());
if let Some(chunk) = chunks.item() {
if self.chunks.last().map_or(false, |c| c.0.len() < CHUNK_BASE)
@@ -83,7 +82,7 @@ impl Rope {
{
// Ensure all chunks except maybe the last one are not underflowing.
// Allow some wiggle room for multibyte characters at chunk boundaries.
let mut chunks = self.chunks.cursor::<(), ()>().peekable();
let mut chunks = self.chunks.cursor::<()>().peekable();
while let Some(chunk) = chunks.next() {
if chunks.peek().is_some() {
assert!(chunk.0.len() + 3 >= CHUNK_BASE);
@@ -116,6 +115,10 @@ impl Rope {
self.chunks_in_range(start..self.len()).flat_map(str::chars)
}
pub fn bytes_at(&self, start: usize) -> impl Iterator<Item = u8> + '_ {
self.chunks_in_range(start..self.len()).flat_map(str::bytes)
}
pub fn chunks<'a>(&'a self) -> Chunks<'a> {
self.chunks_in_range(0..self.len())
}
@@ -126,10 +129,10 @@ impl Rope {
pub fn to_point(&self, offset: usize) -> Point {
assert!(offset <= self.summary().bytes);
let mut cursor = self.chunks.cursor::<usize, Point>();
let mut cursor = self.chunks.cursor::<(usize, Point)>();
cursor.seek(&offset, Bias::Left, &());
let overshoot = offset - cursor.seek_start();
*cursor.sum_start()
let overshoot = offset - cursor.start().0;
cursor.start().1
+ cursor
.item()
.map_or(Point::zero(), |chunk| chunk.to_point(overshoot))
@@ -137,17 +140,17 @@ impl Rope {
pub fn to_offset(&self, point: Point) -> usize {
assert!(point <= self.summary().lines);
let mut cursor = self.chunks.cursor::<Point, usize>();
let mut cursor = self.chunks.cursor::<(Point, usize)>();
cursor.seek(&point, Bias::Left, &());
let overshoot = point - cursor.seek_start();
cursor.sum_start() + cursor.item().map_or(0, |chunk| chunk.to_offset(overshoot))
let overshoot = point - cursor.start().0;
cursor.start().1 + cursor.item().map_or(0, |chunk| chunk.to_offset(overshoot))
}
pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize {
let mut cursor = self.chunks.cursor::<usize, ()>();
let mut cursor = self.chunks.cursor::<usize>();
cursor.seek(&offset, Bias::Left, &());
if let Some(chunk) = cursor.item() {
let mut ix = offset - cursor.seek_start();
let mut ix = offset - cursor.start();
while !chunk.0.is_char_boundary(ix) {
match bias {
Bias::Left => {
@@ -167,11 +170,11 @@ impl Rope {
}
pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
let mut cursor = self.chunks.cursor::<Point, ()>();
let mut cursor = self.chunks.cursor::<Point>();
cursor.seek(&point, Bias::Right, &());
if let Some(chunk) = cursor.item() {
let overshoot = point - cursor.seek_start();
*cursor.seek_start() + chunk.clip_point(overshoot, bias)
let overshoot = point - cursor.start();
*cursor.start() + chunk.clip_point(overshoot, bias)
} else {
self.summary().lines
}
@@ -194,7 +197,7 @@ impl Into<String> for Rope {
pub struct Cursor<'a> {
rope: &'a Rope,
chunks: sum_tree::Cursor<'a, Chunk, usize, ()>,
chunks: sum_tree::Cursor<'a, Chunk, usize>,
offset: usize,
}
@@ -226,18 +229,18 @@ impl<'a> Cursor<'a> {
let mut slice = Rope::new();
if let Some(start_chunk) = self.chunks.item() {
let start_ix = self.offset - self.chunks.seek_start();
let end_ix = cmp::min(end_offset, self.chunks.seek_end(&())) - self.chunks.seek_start();
let start_ix = self.offset - self.chunks.start();
let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start();
slice.push(&start_chunk.0[start_ix..end_ix]);
}
if end_offset > self.chunks.seek_end(&()) {
if end_offset > self.chunks.end(&()) {
self.chunks.next(&());
slice.append(Rope {
chunks: self.chunks.slice(&end_offset, Bias::Right, &()),
});
if let Some(end_chunk) = self.chunks.item() {
let end_ix = end_offset - self.chunks.seek_start();
let end_ix = end_offset - self.chunks.start();
slice.push(&end_chunk.0[..end_ix]);
}
}
@@ -251,16 +254,16 @@ impl<'a> Cursor<'a> {
let mut summary = TextSummary::default();
if let Some(start_chunk) = self.chunks.item() {
let start_ix = self.offset - self.chunks.seek_start();
let end_ix = cmp::min(end_offset, self.chunks.seek_end(&())) - self.chunks.seek_start();
let start_ix = self.offset - self.chunks.start();
let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start();
summary = TextSummary::from(&start_chunk.0[start_ix..end_ix]);
}
if end_offset > self.chunks.seek_end(&()) {
if end_offset > self.chunks.end(&()) {
self.chunks.next(&());
summary += &self.chunks.summary(&end_offset, Bias::Right, &());
if let Some(end_chunk) = self.chunks.item() {
let end_ix = end_offset - self.chunks.seek_start();
let end_ix = end_offset - self.chunks.start();
summary += TextSummary::from(&end_chunk.0[..end_ix]);
}
}
@@ -278,7 +281,7 @@ impl<'a> Cursor<'a> {
}
pub struct Chunks<'a> {
chunks: sum_tree::Cursor<'a, Chunk, usize, ()>,
chunks: sum_tree::Cursor<'a, Chunk, usize>,
range: Range<usize>,
}
@@ -290,11 +293,11 @@ impl<'a> Chunks<'a> {
}
pub fn offset(&self) -> usize {
self.range.start.max(*self.chunks.seek_start())
self.range.start.max(*self.chunks.start())
}
pub fn seek(&mut self, offset: usize) {
if offset >= self.chunks.seek_end(&()) {
if offset >= self.chunks.end(&()) {
self.chunks.seek_forward(&offset, Bias::Right, &());
} else {
self.chunks.seek(&offset, Bias::Right, &());
@@ -304,10 +307,10 @@ impl<'a> Chunks<'a> {
pub fn peek(&self) -> Option<&'a str> {
if let Some(chunk) = self.chunks.item() {
let offset = *self.chunks.seek_start();
let offset = *self.chunks.start();
if self.range.end > offset {
let start = self.range.start.saturating_sub(*self.chunks.seek_start());
let end = self.range.end - self.chunks.seek_start();
let start = self.range.start.saturating_sub(*self.chunks.start());
let end = self.range.end - self.chunks.start();
return Some(&chunk.0[start..chunk.0.len().min(end)]);
}
}
@@ -486,12 +489,6 @@ impl std::ops::AddAssign<Self> for TextSummary {
}
}
impl<'a> sum_tree::Dimension<'a, TextSummary> for TextSummary {
fn add_summary(&mut self, summary: &'a TextSummary, _: &()) {
*self += summary;
}
}
impl<'a> sum_tree::Dimension<'a, TextSummary> for usize {
fn add_summary(&mut self, summary: &'a TextSummary, _: &()) {
*self += summary.bytes;
@@ -526,7 +523,7 @@ fn find_split_ix(text: &str) -> usize {
#[cfg(test)]
mod tests {
use super::*;
use crate::util::RandomCharIter;
use crate::random_char_iter::RandomCharIter;
use rand::prelude::*;
use std::env;
use Bias::{Left, Right};
@@ -611,7 +608,7 @@ mod tests {
impl Rope {
fn text(&self) -> String {
let mut text = String::new();
for chunk in self.chunks.cursor::<(), ()>() {
for chunk in self.chunks.cursor::<()>() {
text.push_str(&chunk.0);
}
text

View File

@@ -1,13 +1,7 @@
use crate::{
editor::{
buffer::{Anchor, Buffer, Point, ToOffset as _, ToPoint as _},
Bias, DisplayMapSnapshot, DisplayPoint,
},
time,
};
use crate::{Anchor, Buffer, Point, ToOffset as _, ToPoint as _};
use std::{cmp::Ordering, mem, ops::Range};
pub type SelectionSetId = time::Lamport;
pub type SelectionSetId = clock::Lamport;
pub type SelectionsVersion = usize;
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
@@ -17,11 +11,6 @@ pub enum SelectionGoal {
ColumnRange { start: u32, end: u32 },
}
pub struct SpannedRows {
pub buffer_rows: Range<u32>,
pub display_rows: Range<u32>,
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Selection {
pub id: usize,
@@ -83,38 +72,4 @@ impl Selection {
start..end
}
}
pub fn display_range(&self, map: &DisplayMapSnapshot) -> Range<DisplayPoint> {
let start = self.start.to_display_point(map, Bias::Left);
let end = self.end.to_display_point(map, Bias::Left);
if self.reversed {
end..start
} else {
start..end
}
}
pub fn spanned_rows(
&self,
include_end_if_at_line_start: bool,
map: &DisplayMapSnapshot,
) -> SpannedRows {
let display_start = self.start.to_display_point(map, Bias::Left);
let mut display_end = self.end.to_display_point(map, Bias::Right);
if !include_end_if_at_line_start
&& display_end.row() != map.max_point().row()
&& display_start.row() != display_end.row()
&& display_end.column() == 0
{
*display_end.row_mut() -= 1;
}
let (display_start, buffer_start) = map.prev_row_boundary(display_start);
let (display_end, buffer_end) = map.next_row_boundary(display_end);
SpannedRows {
buffer_rows: buffer_start.row..buffer_end.row + 1,
display_rows: display_start.row()..display_end.row() + 1,
}
}
}

View File

@@ -0,0 +1,14 @@
[package]
name = "chat_panel"
version = "0.1.0"
edition = "2018"
[dependencies]
client = { path = "../client" }
editor = { path = "../editor" }
gpui = { path = "../gpui" }
theme = { path = "../theme" }
util = { path = "../util" }
workspace = { path = "../workspace" }
postage = { version = "0.4.1", features = ["futures-traits"] }
time = "0.3"

View File

@@ -1,13 +1,8 @@
use std::sync::Arc;
use crate::{
use client::{
channel::{Channel, ChannelEvent, ChannelList, ChannelMessage},
editor::Editor,
rpc::{self, Client},
theme,
util::{ResultExt, TryFutureExt},
Settings,
Client,
};
use editor::{Editor, EditorSettings};
use gpui::{
action,
elements::*,
@@ -18,7 +13,10 @@ use gpui::{
ViewContext, ViewHandle,
};
use postage::{prelude::Stream, watch};
use std::sync::Arc;
use time::{OffsetDateTime, UtcOffset};
use util::{ResultExt, TryFutureExt};
use workspace::Settings;
const MESSAGE_LOADING_THRESHOLD: usize = 50;
@@ -56,10 +54,15 @@ impl ChatPanel {
let input_editor = cx.add_view(|cx| {
Editor::auto_height(
4,
settings.clone(),
{
let settings = settings.clone();
move |_| settings.borrow().theme.chat_panel.input_editor.as_editor()
move |_| {
let settings = settings.borrow();
EditorSettings {
tab_size: settings.tab_size,
style: settings.theme.chat_panel.input_editor.as_editor(),
}
}
},
cx,
)
@@ -406,7 +409,10 @@ impl View for ChatPanel {
}
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
if matches!(*self.rpc.status().borrow(), rpc::Status::Connected { .. }) {
if matches!(
*self.rpc.status().borrow(),
client::Status::Connected { .. }
) {
cx.focus(&self.input_editor);
}
}

28
crates/client/Cargo.toml Normal file
View File

@@ -0,0 +1,28 @@
[package]
name = "client"
version = "0.1.0"
edition = "2018"
[features]
test-support = ["rpc/test-support"]
[dependencies]
gpui = { path = "../gpui" }
util = { path = "../util" }
rpc = { path = "../rpc" }
sum_tree = { path = "../sum_tree" }
anyhow = "1.0.38"
async-recursion = "0.3"
async-tungstenite = { version = "0.14", features = ["async-tls"] }
futures = "0.3"
image = "0.23"
lazy_static = "1.4.0"
log = "0.4"
parking_lot = "0.11.1"
postage = { version = "0.4.1", features = ["futures-traits"] }
rand = "0.8.3"
smol = "1.2.5"
surf = "2.2"
thiserror = "1.0.29"
time = "0.3"
tiny_http = "0.8"

View File

@@ -1,12 +1,11 @@
use crate::{
rpc::{self, Client},
use super::{
proto,
user::{User, UserStore},
util::{post_inc, TryFutureExt},
Client, Status, Subscription, TypedEnvelope,
};
use anyhow::{anyhow, Context, Result};
use gpui::{
sum_tree::{self, Bias, SumTree},
Entity, ModelContext, ModelHandle, MutableAppContext, Task, WeakModelHandle,
AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, WeakModelHandle,
};
use postage::prelude::Stream;
use rand::prelude::*;
@@ -16,17 +15,15 @@ use std::{
ops::Range,
sync::Arc,
};
use sum_tree::{Bias, SumTree};
use time::OffsetDateTime;
use zrpc::{
proto::{self, ChannelMessageSent},
TypedEnvelope,
};
use util::{post_inc, TryFutureExt};
pub struct ChannelList {
available_channels: Option<Vec<ChannelDetails>>,
channels: HashMap<u64, WeakModelHandle<Channel>>,
rpc: Arc<Client>,
user_store: Arc<UserStore>,
client: Arc<Client>,
user_store: ModelHandle<UserStore>,
_task: Task<Option<()>>,
}
@@ -41,10 +38,10 @@ pub struct Channel {
messages: SumTree<ChannelMessage>,
loaded_all_messages: bool,
next_pending_message_id: usize,
user_store: Arc<UserStore>,
user_store: ModelHandle<UserStore>,
rpc: Arc<Client>,
rng: StdRng,
_subscription: rpc::Subscription,
_subscription: Subscription,
}
#[derive(Clone, Debug)]
@@ -68,7 +65,7 @@ pub struct ChannelMessageSummary {
count: usize,
}
#[derive(Copy, Clone, Debug, Default)]
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
struct Count(usize);
pub enum ChannelListEvent {}
@@ -87,8 +84,8 @@ impl Entity for ChannelList {
impl ChannelList {
pub fn new(
user_store: Arc<UserStore>,
rpc: Arc<rpc::Client>,
user_store: ModelHandle<UserStore>,
rpc: Arc<Client>,
cx: &mut ModelContext<Self>,
) -> Self {
let _task = cx.spawn_weak(|this, mut cx| {
@@ -97,7 +94,7 @@ impl ChannelList {
let mut status = rpc.status();
while let Some((status, this)) = status.recv().await.zip(this.upgrade(&cx)) {
match status {
rpc::Status::Connected { .. } => {
Status::Connected { .. } => {
let response = rpc
.request(proto::GetChannels {})
.await
@@ -121,7 +118,7 @@ impl ChannelList {
cx.notify();
});
}
rpc::Status::SignedOut { .. } => {
Status::SignedOut { .. } => {
this.update(&mut cx, |this, cx| {
this.available_channels = None;
this.channels.clear();
@@ -140,7 +137,7 @@ impl ChannelList {
available_channels: None,
channels: Default::default(),
user_store,
rpc,
client: rpc,
_task,
}
}
@@ -160,8 +157,9 @@ impl ChannelList {
let channels = self.available_channels.as_ref()?;
let details = channels.iter().find(|details| details.id == id)?.clone();
let channel =
cx.add_model(|cx| Channel::new(details, self.user_store.clone(), self.rpc.clone(), cx));
let channel = cx.add_model(|cx| {
Channel::new(details, self.user_store.clone(), self.client.clone(), cx)
});
self.channels.insert(id, channel.downgrade());
Some(channel)
}
@@ -186,11 +184,11 @@ impl Entity for Channel {
impl Channel {
pub fn new(
details: ChannelDetails,
user_store: Arc<UserStore>,
user_store: ModelHandle<UserStore>,
rpc: Arc<Client>,
cx: &mut ModelContext<Self>,
) -> Self {
let _subscription = rpc.subscribe_from_model(details.id, cx, Self::handle_message_sent);
let _subscription = rpc.subscribe_to_entity(details.id, cx, Self::handle_message_sent);
{
let user_store = user_store.clone();
@@ -199,7 +197,8 @@ impl Channel {
cx.spawn(|channel, mut cx| {
async move {
let response = rpc.request(proto::JoinChannel { channel_id }).await?;
let messages = messages_from_proto(response.messages, &user_store).await?;
let messages =
messages_from_proto(response.messages, &user_store, &mut cx).await?;
let loaded_all_messages = response.done;
channel.update(&mut cx, |channel, cx| {
@@ -241,6 +240,7 @@ impl Channel {
let current_user = self
.user_store
.read(cx)
.current_user()
.ok_or_else(|| anyhow!("current_user is not present"))?;
@@ -272,6 +272,7 @@ impl Channel {
let message = ChannelMessage::from_proto(
response.message.ok_or_else(|| anyhow!("invalid message"))?,
&user_store,
&mut cx,
)
.await?;
this.update(&mut cx, |this, cx| {
@@ -301,7 +302,8 @@ impl Channel {
})
.await?;
let loaded_all_messages = response.done;
let messages = messages_from_proto(response.messages, &user_store).await?;
let messages =
messages_from_proto(response.messages, &user_store, &mut cx).await?;
this.update(&mut cx, |this, cx| {
this.loaded_all_messages = loaded_all_messages;
this.insert_messages(messages, cx);
@@ -324,7 +326,7 @@ impl Channel {
cx.spawn(|this, mut cx| {
async move {
let response = rpc.request(proto::JoinChannel { channel_id }).await?;
let messages = messages_from_proto(response.messages, &user_store).await?;
let messages = messages_from_proto(response.messages, &user_store, &mut cx).await?;
let loaded_all_messages = response.done;
let pending_messages = this.update(&mut cx, |this, cx| {
@@ -359,6 +361,7 @@ impl Channel {
let message = ChannelMessage::from_proto(
response.message.ok_or_else(|| anyhow!("invalid message"))?,
&user_store,
&mut cx,
)
.await?;
this.update(&mut cx, |this, cx| {
@@ -382,27 +385,27 @@ impl Channel {
}
pub fn message(&self, ix: usize) -> &ChannelMessage {
let mut cursor = self.messages.cursor::<Count, ()>();
let mut cursor = self.messages.cursor::<Count>();
cursor.seek(&Count(ix), Bias::Right, &());
cursor.item().unwrap()
}
pub fn messages_in_range(&self, range: Range<usize>) -> impl Iterator<Item = &ChannelMessage> {
let mut cursor = self.messages.cursor::<Count, ()>();
let mut cursor = self.messages.cursor::<Count>();
cursor.seek(&Count(range.start), Bias::Right, &());
cursor.take(range.len())
}
pub fn pending_messages(&self) -> impl Iterator<Item = &ChannelMessage> {
let mut cursor = self.messages.cursor::<ChannelMessageId, ()>();
let mut cursor = self.messages.cursor::<ChannelMessageId>();
cursor.seek(&ChannelMessageId::Pending(0), Bias::Left, &());
cursor
}
fn handle_message_sent(
&mut self,
message: TypedEnvelope<ChannelMessageSent>,
_: Arc<rpc::Client>,
message: TypedEnvelope<proto::ChannelMessageSent>,
_: Arc<Client>,
cx: &mut ModelContext<Self>,
) -> Result<()> {
let user_store = self.user_store.clone();
@@ -413,7 +416,7 @@ impl Channel {
cx.spawn(|this, mut cx| {
async move {
let message = ChannelMessage::from_proto(message, &user_store).await?;
let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?;
this.update(&mut cx, |this, cx| {
this.insert_messages(SumTree::from_item(message, &()), cx)
});
@@ -428,13 +431,13 @@ impl Channel {
fn insert_messages(&mut self, messages: SumTree<ChannelMessage>, cx: &mut ModelContext<Self>) {
if let Some((first_message, last_message)) = messages.first().zip(messages.last()) {
let nonces = messages
.cursor::<(), ()>()
.cursor::<()>()
.map(|m| m.nonce)
.collect::<HashSet<_>>();
let mut old_cursor = self.messages.cursor::<ChannelMessageId, Count>();
let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>();
let mut new_messages = old_cursor.slice(&first_message.id, Bias::Left, &());
let start_ix = old_cursor.sum_start().0;
let start_ix = old_cursor.start().1 .0;
let removed_messages = old_cursor.slice(&last_message.id, Bias::Right, &());
let removed_count = removed_messages.summary().count;
let new_count = messages.summary().count;
@@ -452,7 +455,7 @@ impl Channel {
);
while let Some(message) = old_cursor.item() {
let message_ix = old_cursor.sum_start().0;
let message_ix = old_cursor.start().1 .0;
if nonces.contains(&message.nonce) {
if ranges.last().map_or(false, |r| r.end == message_ix) {
ranges.last_mut().unwrap().end += 1;
@@ -486,7 +489,8 @@ impl Channel {
async fn messages_from_proto(
proto_messages: Vec<proto::ChannelMessage>,
user_store: &UserStore,
user_store: &ModelHandle<UserStore>,
cx: &mut AsyncAppContext,
) -> Result<SumTree<ChannelMessage>> {
let unique_user_ids = proto_messages
.iter()
@@ -494,11 +498,15 @@ async fn messages_from_proto(
.collect::<HashSet<_>>()
.into_iter()
.collect();
user_store.load_users(unique_user_ids).await?;
user_store
.update(cx, |user_store, cx| {
user_store.load_users(unique_user_ids, cx)
})
.await?;
let mut messages = Vec::with_capacity(proto_messages.len());
for message in proto_messages {
messages.push(ChannelMessage::from_proto(message, &user_store).await?);
messages.push(ChannelMessage::from_proto(message, user_store, cx).await?);
}
let mut result = SumTree::new();
result.extend(messages, &());
@@ -517,9 +525,14 @@ impl From<proto::Channel> for ChannelDetails {
impl ChannelMessage {
pub async fn from_proto(
message: proto::ChannelMessage,
user_store: &UserStore,
user_store: &ModelHandle<UserStore>,
cx: &mut AsyncAppContext,
) -> Result<Self> {
let sender = user_store.fetch_user(message.sender_id).await?;
let sender = user_store
.update(cx, |user_store, cx| {
user_store.fetch_user(message.sender_id, cx)
})
.await?;
Ok(ChannelMessage {
id: ChannelMessageId::Saved(message.id),
body: message.body,
@@ -576,12 +589,6 @@ impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for Count {
}
}
impl<'a> sum_tree::SeekDimension<'a, ChannelMessageSummary> for Count {
fn cmp(&self, other: &Self, _: &()) -> std::cmp::Ordering {
Ord::cmp(&self.0, &other.0)
}
}
#[cfg(test)]
mod tests {
use super::*;
@@ -595,26 +602,11 @@ mod tests {
let mut client = Client::new();
let http_client = FakeHttpClient::new(|_| async move { Ok(Response::new(404)) });
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
let user_store = UserStore::new(client.clone(), http_client, cx.background().as_ref());
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
let channel_list = cx.add_model(|cx| ChannelList::new(user_store, client.clone(), cx));
channel_list.read_with(&cx, |list, _| assert_eq!(list.available_channels(), None));
let get_users = server.receive::<proto::GetUsers>().await.unwrap();
assert_eq!(get_users.payload.user_ids, vec![5]);
server
.respond(
get_users.receipt(),
proto::GetUsersResponse {
users: vec![proto::User {
id: 5,
github_login: "nathansobo".into(),
avatar_url: "http://avatar.com/nathansobo".into(),
}],
},
)
.await;
// Get the available channels.
let get_channels = server.receive::<proto::GetChannels>().await.unwrap();
server
@@ -639,6 +631,21 @@ mod tests {
)
});
let get_users = server.receive::<proto::GetUsers>().await.unwrap();
assert_eq!(get_users.payload.user_ids, vec![5]);
server
.respond(
get_users.receipt(),
proto::GetUsersResponse {
users: vec![proto::User {
id: 5,
github_login: "nathansobo".into(),
avatar_url: "http://avatar.com/nathansobo".into(),
}],
},
)
.await;
// Join a channel and populate its existing messages.
let channel = channel_list
.update(&mut cx, |list, cx| {

View File

@@ -1,34 +1,55 @@
use crate::util::ResultExt;
#[cfg(any(test, feature = "test-support"))]
pub mod test;
pub mod channel;
pub mod http;
pub mod user;
use anyhow::{anyhow, Context, Result};
use async_recursion::async_recursion;
use async_tungstenite::tungstenite::{
error::Error as WebsocketError,
http::{Request, StatusCode},
};
use gpui::{AsyncAppContext, Entity, ModelContext, Task};
use gpui::{action, AsyncAppContext, Entity, ModelContext, MutableAppContext, Task};
use lazy_static::lazy_static;
use parking_lot::RwLock;
use postage::{prelude::Stream, watch};
use rand::prelude::*;
use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, RequestMessage};
use std::{
any::TypeId,
collections::HashMap,
convert::TryFrom,
fmt::Write as _,
future::Future,
sync::{Arc, Weak},
time::{Duration, Instant},
};
use surf::Url;
use thiserror::Error;
pub use zrpc::{proto, ConnectionId, PeerId, TypedEnvelope};
use zrpc::{
proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, RequestMessage},
Connection, Peer, Receipt,
};
use util::{ResultExt, TryFutureExt};
pub use channel::*;
pub use rpc::*;
pub use user::*;
lazy_static! {
static ref ZED_SERVER_URL: String =
std::env::var("ZED_SERVER_URL").unwrap_or("https://zed.dev:443".to_string());
static ref IMPERSONATE_LOGIN: Option<String> = std::env::var("ZED_IMPERSONATE")
.ok()
.and_then(|s| if s.is_empty() { None } else { Some(s) });
}
action!(Authenticate);
pub fn init(rpc: Arc<Client>, cx: &mut MutableAppContext) {
cx.add_global_action(move |_: &Authenticate, cx| {
let rpc = rpc.clone();
cx.spawn(|cx| async move { rpc.authenticate_and_connect(&cx).log_err().await })
.detach();
});
}
pub struct Client {
@@ -51,6 +72,8 @@ pub struct Client {
#[derive(Error, Debug)]
pub enum EstablishConnectionError {
#[error("upgrade required")]
UpgradeRequired,
#[error("unauthorized")]
Unauthorized,
#[error("{0}")]
@@ -64,8 +87,10 @@ pub enum EstablishConnectionError {
impl From<WebsocketError> for EstablishConnectionError {
fn from(error: WebsocketError) -> Self {
if let WebsocketError::Http(response) = &error {
if response.status() == StatusCode::UNAUTHORIZED {
return EstablishConnectionError::Unauthorized;
match response.status() {
StatusCode::UNAUTHORIZED => return EstablishConnectionError::Unauthorized,
StatusCode::UPGRADE_REQUIRED => return EstablishConnectionError::UpgradeRequired,
_ => {}
}
}
EstablishConnectionError::Other(error.into())
@@ -81,6 +106,7 @@ impl EstablishConnectionError {
#[derive(Copy, Clone, Debug)]
pub enum Status {
SignedOut,
UpgradeRequired,
Authenticating,
Connecting,
ConnectionError,
@@ -223,14 +249,58 @@ impl Client {
}
}));
}
Status::SignedOut => {
Status::SignedOut | Status::UpgradeRequired => {
state._maintain_connection.take();
}
_ => {}
}
}
pub fn subscribe_from_model<T, M, F>(
pub fn subscribe<T, M, F>(
self: &Arc<Self>,
cx: &mut ModelContext<M>,
mut handler: F,
) -> Subscription
where
T: EnvelopedMessage,
M: Entity,
F: 'static
+ Send
+ Sync
+ FnMut(&mut M, TypedEnvelope<T>, Arc<Self>, &mut ModelContext<M>) -> Result<()>,
{
let subscription_id = (TypeId::of::<T>(), Default::default());
let client = self.clone();
let mut state = self.state.write();
let model = cx.handle().downgrade();
let prev_extractor = state
.entity_id_extractors
.insert(subscription_id.0, Box::new(|_| Default::default()));
if prev_extractor.is_some() {
panic!("registered a handler for the same entity twice")
}
state.model_handlers.insert(
subscription_id,
Box::new(move |envelope, cx| {
if let Some(model) = model.upgrade(cx) {
let envelope = envelope.into_any().downcast::<TypedEnvelope<T>>().unwrap();
model.update(cx, |model, cx| {
if let Err(error) = handler(model, *envelope, client.clone(), cx) {
log::error!("error handling message: {}", error)
}
});
}
}),
);
Subscription {
client: Arc::downgrade(self),
id: subscription_id,
}
}
pub fn subscribe_to_entity<T, M, F>(
self: &Arc<Self>,
remote_id: u64,
cx: &mut ModelContext<M>,
@@ -298,6 +368,7 @@ impl Client {
| Status::Reconnecting { .. }
| Status::Authenticating
| Status::Reauthenticating => return Ok(()),
Status::UpgradeRequired => return Err(EstablishConnectionError::UpgradeRequired)?,
};
if was_disconnected {
@@ -306,12 +377,12 @@ impl Client {
self.set_status(Status::Reauthenticating, cx)
}
let mut read_from_keychain = false;
let mut used_keychain = false;
let credentials = self.state.read().credentials.clone();
let credentials = if let Some(credentials) = credentials {
credentials
} else if let Some(credentials) = read_credentials_from_keychain(cx) {
read_from_keychain = true;
used_keychain = true;
credentials
} else {
let credentials = match self.authenticate(&cx).await {
@@ -334,28 +405,31 @@ impl Client {
Ok(conn) => {
log::info!("connected to rpc address {}", *ZED_SERVER_URL);
self.state.write().credentials = Some(credentials.clone());
if !read_from_keychain {
if !used_keychain && IMPERSONATE_LOGIN.is_none() {
write_credentials_to_keychain(&credentials, cx).log_err();
}
self.set_connection(conn, cx).await;
Ok(())
}
Err(err) => {
if matches!(err, EstablishConnectionError::Unauthorized) {
self.state.write().credentials.take();
Err(EstablishConnectionError::Unauthorized) => {
self.state.write().credentials.take();
if used_keychain {
cx.platform().delete_credentials(&ZED_SERVER_URL).log_err();
if read_from_keychain {
self.set_status(Status::SignedOut, cx);
self.authenticate_and_connect(cx).await
} else {
self.set_status(Status::ConnectionError, cx);
Err(err)?
}
self.set_status(Status::SignedOut, cx);
self.authenticate_and_connect(cx).await
} else {
self.set_status(Status::ConnectionError, cx);
Err(err)?
Err(EstablishConnectionError::Unauthorized)?
}
}
Err(EstablishConnectionError::UpgradeRequired) => {
self.set_status(Status::UpgradeRequired, cx);
Err(EstablishConnectionError::UpgradeRequired)?
}
Err(error) => {
self.set_status(Status::ConnectionError, cx);
Err(error)?
}
}
}
@@ -441,10 +515,12 @@ impl Client {
credentials: &Credentials,
cx: &AsyncAppContext,
) -> Task<Result<Connection, EstablishConnectionError>> {
let request = Request::builder().header(
"Authorization",
format!("{} {}", credentials.user_id, credentials.access_token),
);
let request = Request::builder()
.header(
"Authorization",
format!("{} {}", credentials.user_id, credentials.access_token),
)
.header("X-Zed-Protocol-Version", rpc::PROTOCOL_VERSION);
cx.background().spawn(async move {
if let Some(host) = ZED_SERVER_URL.strip_prefix("https://") {
let stream = smol::net::TcpStream::connect(host).await?;
@@ -474,7 +550,7 @@ impl Client {
// zed server to encrypt the user's access token, so that it can'be intercepted by
// any other app running on the user's device.
let (public_key, private_key) =
zrpc::auth::keypair().expect("failed to generate keypair for auth");
rpc::auth::keypair().expect("failed to generate keypair for auth");
let public_key_string =
String::try_from(public_key).expect("failed to serialize public key for auth");
@@ -484,10 +560,17 @@ impl Client {
// Open the Zed sign-in page in the user's browser, with query parameters that indicate
// that the user is signing in from a Zed app running on the same device.
platform.open_url(&format!(
let mut url = format!(
"{}/sign_in?native_app_port={}&native_app_public_key={}",
*ZED_SERVER_URL, port, public_key_string
));
);
if let Some(impersonate_login) = IMPERSONATE_LOGIN.as_ref() {
log::info!("impersonating user @{}", impersonate_login);
write!(&mut url, "&impersonate={}", impersonate_login).unwrap();
}
platform.open_url(&url);
// Receive the HTTP request from the user's browser. Retrieve the user id and encrypted
// access token from the query params.
@@ -571,6 +654,10 @@ impl Client {
}
fn read_credentials_from_keychain(cx: &AsyncAppContext) -> Option<Credentials> {
if IMPERSONATE_LOGIN.is_some() {
return None;
}
let (user_id, access_token) = cx
.platform()
.read_credentials(&ZED_SERVER_URL)

188
crates/client/src/test.rs Normal file
View File

@@ -0,0 +1,188 @@
use super::Client;
use super::*;
use crate::http::{HttpClient, Request, Response, ServerResponse};
use futures::{future::BoxFuture, Future};
use gpui::TestAppContext;
use parking_lot::Mutex;
use postage::{mpsc, prelude::Stream};
use rpc::{proto, ConnectionId, Peer, Receipt, TypedEnvelope};
use std::fmt;
use std::sync::atomic::Ordering::SeqCst;
use std::sync::{
atomic::{AtomicBool, AtomicUsize},
Arc,
};
pub struct FakeServer {
peer: Arc<Peer>,
incoming: Mutex<Option<mpsc::Receiver<Box<dyn proto::AnyTypedEnvelope>>>>,
connection_id: Mutex<Option<ConnectionId>>,
forbid_connections: AtomicBool,
auth_count: AtomicUsize,
access_token: AtomicUsize,
user_id: u64,
}
impl FakeServer {
pub async fn for_client(
client_user_id: u64,
client: &mut Arc<Client>,
cx: &TestAppContext,
) -> Arc<Self> {
let server = Arc::new(Self {
peer: Peer::new(),
incoming: Default::default(),
connection_id: Default::default(),
forbid_connections: Default::default(),
auth_count: Default::default(),
access_token: Default::default(),
user_id: client_user_id,
});
Arc::get_mut(client)
.unwrap()
.override_authenticate({
let server = server.clone();
move |cx| {
server.auth_count.fetch_add(1, SeqCst);
let access_token = server.access_token.load(SeqCst).to_string();
cx.spawn(move |_| async move {
Ok(Credentials {
user_id: client_user_id,
access_token,
})
})
}
})
.override_establish_connection({
let server = server.clone();
move |credentials, cx| {
let credentials = credentials.clone();
cx.spawn({
let server = server.clone();
move |cx| async move { server.establish_connection(&credentials, &cx).await }
})
}
});
client
.authenticate_and_connect(&cx.to_async())
.await
.unwrap();
server
}
pub async fn disconnect(&self) {
self.peer.disconnect(self.connection_id()).await;
self.connection_id.lock().take();
self.incoming.lock().take();
}
async fn establish_connection(
&self,
credentials: &Credentials,
cx: &AsyncAppContext,
) -> Result<Connection, EstablishConnectionError> {
assert_eq!(credentials.user_id, self.user_id);
if self.forbid_connections.load(SeqCst) {
Err(EstablishConnectionError::Other(anyhow!(
"server is forbidding connections"
)))?
}
if credentials.access_token != self.access_token.load(SeqCst).to_string() {
Err(EstablishConnectionError::Unauthorized)?
}
let (client_conn, server_conn, _) = Connection::in_memory();
let (connection_id, io, incoming) = self.peer.add_connection(server_conn).await;
cx.background().spawn(io).detach();
*self.incoming.lock() = Some(incoming);
*self.connection_id.lock() = Some(connection_id);
Ok(client_conn)
}
pub fn auth_count(&self) -> usize {
self.auth_count.load(SeqCst)
}
pub fn roll_access_token(&self) {
self.access_token.fetch_add(1, SeqCst);
}
pub fn forbid_connections(&self) {
self.forbid_connections.store(true, SeqCst);
}
pub fn allow_connections(&self) {
self.forbid_connections.store(false, SeqCst);
}
pub async fn send<T: proto::EnvelopedMessage>(&self, message: T) {
self.peer.send(self.connection_id(), message).await.unwrap();
}
pub async fn receive<M: proto::EnvelopedMessage>(&self) -> Result<TypedEnvelope<M>> {
let message = self
.incoming
.lock()
.as_mut()
.expect("not connected")
.recv()
.await
.ok_or_else(|| anyhow!("other half hung up"))?;
let type_name = message.payload_type_name();
Ok(*message
.into_any()
.downcast::<TypedEnvelope<M>>()
.unwrap_or_else(|_| {
panic!(
"fake server received unexpected message type: {:?}",
type_name
);
}))
}
pub async fn respond<T: proto::RequestMessage>(
&self,
receipt: Receipt<T>,
response: T::Response,
) {
self.peer.respond(receipt, response).await.unwrap()
}
fn connection_id(&self) -> ConnectionId {
self.connection_id.lock().expect("not connected")
}
}
pub struct FakeHttpClient {
handler:
Box<dyn 'static + Send + Sync + Fn(Request) -> BoxFuture<'static, Result<ServerResponse>>>,
}
impl FakeHttpClient {
pub fn new<Fut, F>(handler: F) -> Arc<dyn HttpClient>
where
Fut: 'static + Send + Future<Output = Result<ServerResponse>>,
F: 'static + Send + Sync + Fn(Request) -> Fut,
{
Arc::new(Self {
handler: Box::new(move |req| Box::pin(handler(req))),
})
}
}
impl fmt::Debug for FakeHttpClient {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("FakeHttpClient").finish()
}
}
impl HttpClient for FakeHttpClient {
fn send<'a>(&'a self, req: Request) -> BoxFuture<'a, Result<Response>> {
let future = (self.handler)(req);
Box::pin(async move { future.await.map(Into::into) })
}
}

268
crates/client/src/user.rs Normal file
View File

@@ -0,0 +1,268 @@
use super::{
http::{HttpClient, Method, Request, Url},
proto, Client, Status, TypedEnvelope,
};
use anyhow::{anyhow, Context, Result};
use futures::future;
use gpui::{AsyncAppContext, Entity, ImageData, ModelContext, ModelHandle, Task};
use postage::{prelude::Stream, sink::Sink, watch};
use std::{
collections::{HashMap, HashSet},
sync::Arc,
};
use util::TryFutureExt as _;
#[derive(Debug)]
pub struct User {
pub id: u64,
pub github_login: String,
pub avatar: Option<Arc<ImageData>>,
}
#[derive(Debug)]
pub struct Collaborator {
pub user: Arc<User>,
pub worktrees: Vec<WorktreeMetadata>,
}
#[derive(Debug)]
pub struct WorktreeMetadata {
pub id: u64,
pub root_name: String,
pub is_shared: bool,
pub guests: Vec<Arc<User>>,
}
pub struct UserStore {
users: HashMap<u64, Arc<User>>,
current_user: watch::Receiver<Option<Arc<User>>>,
collaborators: Arc<[Collaborator]>,
rpc: Arc<Client>,
http: Arc<dyn HttpClient>,
_maintain_collaborators: Task<()>,
_maintain_current_user: Task<()>,
}
pub enum Event {}
impl Entity for UserStore {
type Event = Event;
}
impl UserStore {
pub fn new(rpc: Arc<Client>, http: Arc<dyn HttpClient>, cx: &mut ModelContext<Self>) -> Self {
let (mut current_user_tx, current_user_rx) = watch::channel();
let (mut update_collaborators_tx, mut update_collaborators_rx) =
watch::channel::<Option<proto::UpdateCollaborators>>();
let update_collaborators_subscription = rpc.subscribe(
cx,
move |_: &mut Self, msg: TypedEnvelope<proto::UpdateCollaborators>, _, _| {
let _ = update_collaborators_tx.blocking_send(Some(msg.payload));
Ok(())
},
);
Self {
users: Default::default(),
current_user: current_user_rx,
collaborators: Arc::from([]),
rpc: rpc.clone(),
http,
_maintain_collaborators: cx.spawn_weak(|this, mut cx| async move {
let _subscription = update_collaborators_subscription;
while let Some(message) = update_collaborators_rx.recv().await {
if let Some((message, this)) = message.zip(this.upgrade(&cx)) {
this.update(&mut cx, |this, cx| this.update_collaborators(message, cx))
.log_err()
.await;
}
}
}),
_maintain_current_user: cx.spawn_weak(|this, mut cx| async move {
let mut status = rpc.status();
while let Some(status) = status.recv().await {
match status {
Status::Connected { .. } => {
if let Some((this, user_id)) = this.upgrade(&cx).zip(rpc.user_id()) {
let user = this
.update(&mut cx, |this, cx| this.fetch_user(user_id, cx))
.log_err()
.await;
current_user_tx.send(user).await.ok();
}
}
Status::SignedOut => {
current_user_tx.send(None).await.ok();
}
_ => {}
}
}
}),
}
}
fn update_collaborators(
&mut self,
message: proto::UpdateCollaborators,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
let mut user_ids = HashSet::new();
for collaborator in &message.collaborators {
user_ids.insert(collaborator.user_id);
user_ids.extend(
collaborator
.worktrees
.iter()
.flat_map(|w| &w.guests)
.copied(),
);
}
let load_users = self.load_users(user_ids.into_iter().collect(), cx);
cx.spawn(|this, mut cx| async move {
load_users.await?;
let mut collaborators = Vec::new();
for collaborator in message.collaborators {
collaborators.push(Collaborator::from_proto(collaborator, &this, &mut cx).await?);
}
this.update(&mut cx, |this, cx| {
collaborators.sort_by(|a, b| a.user.github_login.cmp(&b.user.github_login));
this.collaborators = collaborators.into();
cx.notify();
});
Ok(())
})
}
pub fn collaborators(&self) -> &Arc<[Collaborator]> {
&self.collaborators
}
pub fn load_users(
&mut self,
mut user_ids: Vec<u64>,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
let rpc = self.rpc.clone();
let http = self.http.clone();
user_ids.retain(|id| !self.users.contains_key(id));
cx.spawn_weak(|this, mut cx| async move {
if !user_ids.is_empty() {
let response = rpc.request(proto::GetUsers { user_ids }).await?;
let new_users = future::join_all(
response
.users
.into_iter()
.map(|user| User::new(user, http.as_ref())),
)
.await;
if let Some(this) = this.upgrade(&cx) {
this.update(&mut cx, |this, _| {
for user in new_users {
this.users.insert(user.id, Arc::new(user));
}
});
}
}
Ok(())
})
}
pub fn fetch_user(
&mut self,
user_id: u64,
cx: &mut ModelContext<Self>,
) -> Task<Result<Arc<User>>> {
if let Some(user) = self.users.get(&user_id).cloned() {
return cx.spawn_weak(|_, _| async move { Ok(user) });
}
let load_users = self.load_users(vec![user_id], cx);
cx.spawn(|this, mut cx| async move {
load_users.await?;
this.update(&mut cx, |this, _| {
this.users
.get(&user_id)
.cloned()
.ok_or_else(|| anyhow!("server responded with no users"))
})
})
}
pub fn current_user(&self) -> Option<Arc<User>> {
self.current_user.borrow().clone()
}
pub fn watch_current_user(&self) -> watch::Receiver<Option<Arc<User>>> {
self.current_user.clone()
}
}
impl User {
async fn new(message: proto::User, http: &dyn HttpClient) -> Self {
User {
id: message.id,
github_login: message.github_login,
avatar: fetch_avatar(http, &message.avatar_url).warn_on_err().await,
}
}
}
impl Collaborator {
async fn from_proto(
collaborator: proto::Collaborator,
user_store: &ModelHandle<UserStore>,
cx: &mut AsyncAppContext,
) -> Result<Self> {
let user = user_store
.update(cx, |user_store, cx| {
user_store.fetch_user(collaborator.user_id, cx)
})
.await?;
let mut worktrees = Vec::new();
for worktree in collaborator.worktrees {
let mut guests = Vec::new();
for participant_id in worktree.guests {
guests.push(
user_store
.update(cx, |user_store, cx| {
user_store.fetch_user(participant_id, cx)
})
.await?,
);
}
worktrees.push(WorktreeMetadata {
id: worktree.id,
root_name: worktree.root_name,
is_shared: worktree.is_shared,
guests,
});
}
Ok(Self { user, worktrees })
}
}
async fn fetch_avatar(http: &dyn HttpClient, url: &str) -> Result<Arc<ImageData>> {
let url = Url::parse(url).with_context(|| format!("failed to parse avatar url {:?}", url))?;
let mut request = Request::new(Method::Get, url);
request.middleware(surf::middleware::Redirect::default());
let mut response = http
.send(request)
.await
.map_err(|e| anyhow!("failed to send user avatar request: {}", e))?;
if !response.status().is_success() {
return Err(anyhow!("avatar request failed {:?}", response.status()));
}
let bytes = response
.body_bytes()
.await
.map_err(|e| anyhow!("failed to read user avatar response body: {}", e))?;
let format = image::guess_format(&bytes)?;
let image = image::load_from_memory_with_format(&bytes, format)?.into_bgra8();
Ok(ImageData::new(image))
}

8
crates/clock/Cargo.toml Normal file
View File

@@ -0,0 +1,8 @@
[package]
name = "clock"
version = "0.1.0"
edition = "2018"
[dependencies]
smallvec = { version = "1.6", features = ["union"] }
rpc = { path = "../rpc" }

View File

@@ -61,8 +61,8 @@ impl<'a> AddAssign<&'a Local> for Local {
#[derive(Clone, Default, Hash, Eq, PartialEq)]
pub struct Global(SmallVec<[Local; 3]>);
impl From<Vec<zrpc::proto::VectorClockEntry>> for Global {
fn from(message: Vec<zrpc::proto::VectorClockEntry>) -> Self {
impl From<Vec<rpc::proto::VectorClockEntry>> for Global {
fn from(message: Vec<rpc::proto::VectorClockEntry>) -> Self {
let mut version = Self::new();
for entry in message {
version.observe(Local {
@@ -74,11 +74,11 @@ impl From<Vec<zrpc::proto::VectorClockEntry>> for Global {
}
}
impl<'a> From<&'a Global> for Vec<zrpc::proto::VectorClockEntry> {
impl<'a> From<&'a Global> for Vec<rpc::proto::VectorClockEntry> {
fn from(version: &'a Global) -> Self {
version
.iter()
.map(|entry| zrpc::proto::VectorClockEntry {
.map(|entry| rpc::proto::VectorClockEntry {
replica_id: entry.replica_id as u32,
timestamp: entry.value,
})

31
crates/editor/Cargo.toml Normal file
View File

@@ -0,0 +1,31 @@
[package]
name = "editor"
version = "0.1.0"
edition = "2018"
[features]
test-support = ["buffer/test-support", "gpui/test-support"]
[dependencies]
buffer = { path = "../buffer" }
clock = { path = "../clock" }
gpui = { path = "../gpui" }
sum_tree = { path = "../sum_tree" }
theme = { path = "../theme" }
util = { path = "../util" }
anyhow = "1.0"
lazy_static = "1.4"
log = "0.4"
parking_lot = "0.11"
postage = { version = "0.4", features = ["futures-traits"] }
serde = { version = "1", features = ["derive", "rc"] }
smallvec = { version = "1.6", features = ["union"] }
smol = "1.2"
[dev-dependencies]
buffer = { path = "../buffer", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] }
rand = "0.8"
unindent = "0.1.7"
tree-sitter = "0.19"
tree-sitter-rust = "0.19"

View File

@@ -2,14 +2,19 @@ mod fold_map;
mod tab_map;
mod wrap_map;
use super::{buffer, Anchor, Bias, Buffer, Point, ToOffset, ToPoint};
use fold_map::FoldMap;
use buffer::{Anchor, Buffer, Point, ToOffset, ToPoint};
use fold_map::{FoldMap, ToFoldPoint as _};
use gpui::{fonts::FontId, Entity, ModelContext, ModelHandle};
use std::ops::Range;
use sum_tree::Bias;
use tab_map::TabMap;
use wrap_map::WrapMap;
pub use wrap_map::{BufferRows, HighlightedChunks};
pub trait ToDisplayPoint {
fn to_display_point(&self, map: &DisplayMapSnapshot, bias: Bias) -> DisplayPoint;
}
pub struct DisplayMap {
buffer: ModelHandle<Buffer>,
fold_map: FoldMap,
@@ -333,8 +338,8 @@ impl DisplayPoint {
}
}
impl Point {
pub fn to_display_point(self, map: &DisplayMapSnapshot, bias: Bias) -> DisplayPoint {
impl ToDisplayPoint for Point {
fn to_display_point(&self, map: &DisplayMapSnapshot, bias: Bias) -> DisplayPoint {
let fold_point = self.to_fold_point(&map.folds_snapshot, bias);
let tab_point = map.tabs_snapshot.to_tab_point(fold_point);
let wrap_point = map.wraps_snapshot.to_wrap_point(tab_point);
@@ -342,8 +347,8 @@ impl Point {
}
}
impl Anchor {
pub fn to_display_point(&self, map: &DisplayMapSnapshot, bias: Bias) -> DisplayPoint {
impl ToDisplayPoint for Anchor {
fn to_display_point(&self, map: &DisplayMapSnapshot, bias: Bias) -> DisplayPoint {
self.to_point(&map.buffer_snapshot)
.to_display_point(map, bias)
}
@@ -352,17 +357,12 @@ impl Anchor {
#[cfg(test)]
mod tests {
use super::*;
use crate::{
editor::movement,
language::{Language, LanguageConfig},
test::*,
theme::SyntaxTheme,
util::RandomCharIter,
};
use buffer::{History, SelectionGoal};
use crate::{movement, test::*};
use buffer::{History, Language, LanguageConfig, RandomCharIter, SelectionGoal};
use gpui::{color::Color, MutableAppContext};
use rand::{prelude::StdRng, Rng};
use std::{env, sync::Arc};
use theme::SyntaxTheme;
use Bias::*;
#[gpui::test(iterations = 100)]
@@ -670,7 +670,6 @@ mod tests {
async fn test_highlighted_chunks_at(mut cx: gpui::TestAppContext) {
use unindent::Unindent as _;
let grammar = tree_sitter_rust::language();
let text = r#"
fn outer() {}
@@ -678,28 +677,28 @@ mod tests {
fn inner() {}
}"#
.unindent();
let highlight_query = tree_sitter::Query::new(
grammar,
r#"
(mod_item name: (identifier) body: _ @mod.body)
(function_item name: (identifier) @fn.name)"#,
)
.unwrap();
let theme = SyntaxTheme::new(vec![
("mod.body".to_string(), Color::from_u32(0xff0000ff).into()),
("fn.name".to_string(), Color::from_u32(0x00ff00ff).into()),
]);
let lang = Arc::new(Language {
config: LanguageConfig {
name: "Test".to_string(),
path_suffixes: vec![".test".to_string()],
..Default::default()
},
grammar: grammar.clone(),
highlight_query,
brackets_query: tree_sitter::Query::new(grammar, "").unwrap(),
highlight_map: Default::default(),
});
let lang = Arc::new(
Language::new(
LanguageConfig {
name: "Test".to_string(),
path_suffixes: vec![".test".to_string()],
..Default::default()
},
tree_sitter_rust::language(),
)
.with_highlights_query(
r#"
(mod_item name: (identifier) body: _ @mod.body)
(function_item name: (identifier) @fn.name)
"#,
)
.unwrap(),
);
lang.set_theme(&theme);
let buffer = cx.add_model(|cx| {
@@ -759,7 +758,6 @@ mod tests {
cx.foreground().set_block_on_ticks(usize::MAX..=usize::MAX);
let grammar = tree_sitter_rust::language();
let text = r#"
fn outer() {}
@@ -767,28 +765,28 @@ mod tests {
fn inner() {}
}"#
.unindent();
let highlight_query = tree_sitter::Query::new(
grammar,
r#"
(mod_item name: (identifier) body: _ @mod.body)
(function_item name: (identifier) @fn.name)"#,
)
.unwrap();
let theme = SyntaxTheme::new(vec![
("mod.body".to_string(), Color::from_u32(0xff0000ff).into()),
("fn.name".to_string(), Color::from_u32(0x00ff00ff).into()),
]);
let lang = Arc::new(Language {
config: LanguageConfig {
name: "Test".to_string(),
path_suffixes: vec![".test".to_string()],
..Default::default()
},
grammar: grammar.clone(),
highlight_query,
brackets_query: tree_sitter::Query::new(grammar, "").unwrap(),
highlight_map: Default::default(),
});
let lang = Arc::new(
Language::new(
LanguageConfig {
name: "Test".to_string(),
path_suffixes: vec![".test".to_string()],
..Default::default()
},
tree_sitter_rust::language(),
)
.with_highlights_query(
r#"
(mod_item name: (identifier) body: _ @mod.body)
(function_item name: (identifier) @fn.name)
"#,
)
.unwrap(),
);
lang.set_theme(&theme);
let buffer = cx.add_model(|cx| {
@@ -977,7 +975,7 @@ mod tests {
let mut snapshot = map.update(cx, |map, cx| map.snapshot(cx));
let mut chunks: Vec<(String, Option<&str>)> = Vec::new();
for (chunk, style_id) in snapshot.highlighted_chunks_for_rows(rows) {
let style_name = theme.highlight_name(style_id);
let style_name = style_id.name(theme);
if let Some((last_chunk, last_style_name)) = chunks.last_mut() {
if style_name == *last_style_name {
last_chunk.push_str(chunk);

View File

@@ -1,12 +1,5 @@
use super::{
buffer::{AnchorRangeExt, TextSummary},
Anchor, Buffer, Point, ToOffset,
};
use crate::{editor::buffer, settings::HighlightId, time, util::Bias};
use gpui::{
sum_tree::{self, Cursor, FilterCursor, SumTree},
AppContext, ModelHandle,
};
use buffer::{Anchor, Buffer, Point, ToOffset, AnchorRangeExt, HighlightId, TextSummary};
use gpui::{AppContext, ModelHandle};
use parking_lot::Mutex;
use std::{
cmp::{self, Ordering},
@@ -14,6 +7,11 @@ use std::{
ops::Range,
sync::atomic::{AtomicUsize, Ordering::SeqCst},
};
use sum_tree::{Bias, Cursor, FilterCursor, SumTree};
pub trait ToFoldPoint {
fn to_fold_point(&self, snapshot: &Snapshot, bias: Bias) -> FoldPoint;
}
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
pub struct FoldPoint(pub super::Point);
@@ -41,53 +39,55 @@ impl FoldPoint {
}
pub fn to_buffer_point(&self, snapshot: &Snapshot) -> Point {
let mut cursor = snapshot.transforms.cursor::<FoldPoint, Point>();
let mut cursor = snapshot.transforms.cursor::<(FoldPoint, Point)>();
cursor.seek(self, Bias::Right, &());
let overshoot = self.0 - cursor.seek_start().0;
*cursor.sum_start() + overshoot
let overshoot = self.0 - cursor.start().0 .0;
cursor.start().1 + overshoot
}
pub fn to_buffer_offset(&self, snapshot: &Snapshot) -> usize {
let mut cursor = snapshot.transforms.cursor::<FoldPoint, Point>();
let mut cursor = snapshot.transforms.cursor::<(FoldPoint, Point)>();
cursor.seek(self, Bias::Right, &());
let overshoot = self.0 - cursor.seek_start().0;
let overshoot = self.0 - cursor.start().0 .0;
snapshot
.buffer_snapshot
.to_offset(*cursor.sum_start() + overshoot)
.to_offset(cursor.start().1 + overshoot)
}
pub fn to_offset(&self, snapshot: &Snapshot) -> FoldOffset {
let mut cursor = snapshot.transforms.cursor::<FoldPoint, TransformSummary>();
let mut cursor = snapshot
.transforms
.cursor::<(FoldPoint, TransformSummary)>();
cursor.seek(self, Bias::Right, &());
let overshoot = self.0 - cursor.sum_start().output.lines;
let mut offset = cursor.sum_start().output.bytes;
let overshoot = self.0 - cursor.start().1.output.lines;
let mut offset = cursor.start().1.output.bytes;
if !overshoot.is_zero() {
let transform = cursor.item().expect("display point out of range");
assert!(transform.output_text.is_none());
let end_buffer_offset = snapshot
.buffer_snapshot
.to_offset(cursor.sum_start().input.lines + overshoot);
offset += end_buffer_offset - cursor.sum_start().input.bytes;
.to_offset(cursor.start().1.input.lines + overshoot);
offset += end_buffer_offset - cursor.start().1.input.bytes;
}
FoldOffset(offset)
}
}
impl Point {
pub fn to_fold_point(&self, snapshot: &Snapshot, bias: Bias) -> FoldPoint {
let mut cursor = snapshot.transforms.cursor::<Point, FoldPoint>();
impl ToFoldPoint for Point {
fn to_fold_point(&self, snapshot: &Snapshot, bias: Bias) -> FoldPoint {
let mut cursor = snapshot.transforms.cursor::<(Point, FoldPoint)>();
cursor.seek(self, Bias::Right, &());
if cursor.item().map_or(false, |t| t.is_fold()) {
if bias == Bias::Left || *self == *cursor.seek_start() {
*cursor.sum_start()
if bias == Bias::Left || *self == cursor.start().0 {
cursor.start().1
} else {
cursor.sum_end(&())
cursor.end(&()).1
}
} else {
let overshoot = *self - cursor.seek_start();
let overshoot = *self - cursor.start().0;
FoldPoint(cmp::min(
cursor.sum_start().0 + overshoot,
cursor.sum_end(&()).0,
cursor.start().1 .0 + overshoot,
cursor.end(&()).1 .0,
))
}
}
@@ -117,11 +117,11 @@ impl<'a> FoldMapWriter<'a> {
}
}
folds.sort_unstable_by(|a, b| sum_tree::SeekDimension::cmp(a, b, &buffer));
folds.sort_unstable_by(|a, b| sum_tree::SeekTarget::cmp(a, b, &buffer));
self.0.folds = {
let mut new_tree = SumTree::new();
let mut cursor = self.0.folds.cursor::<_, ()>();
let mut cursor = self.0.folds.cursor::<Fold>();
for fold in folds {
new_tree.push_tree(cursor.slice(&fold, Bias::Right, &buffer), &buffer);
new_tree.push(fold, &buffer);
@@ -168,7 +168,7 @@ impl<'a> FoldMapWriter<'a> {
fold_ixs_to_delete.dedup();
self.0.folds = {
let mut cursor = self.0.folds.cursor::<_, ()>();
let mut cursor = self.0.folds.cursor::<usize>();
let mut folds = SumTree::new();
for fold_ix in fold_ixs_to_delete {
folds.push_tree(cursor.slice(&fold_ix, Bias::Right, &buffer), &buffer);
@@ -200,7 +200,7 @@ pub struct FoldMap {
#[derive(Clone)]
struct SyncState {
version: time::Global,
version: clock::Global,
parse_count: usize,
}
@@ -287,20 +287,20 @@ impl FoldMap {
let mut new_transforms = SumTree::new();
let mut transforms = self.transforms.lock();
let mut cursor = transforms.cursor::<usize, ()>();
let mut cursor = transforms.cursor::<usize>();
cursor.seek(&0, Bias::Right, &());
while let Some(mut edit) = buffer_edits_iter.next() {
new_transforms.push_tree(cursor.slice(&edit.old_bytes.start, Bias::Left, &()), &());
edit.new_bytes.start -= edit.old_bytes.start - cursor.seek_start();
edit.old_bytes.start = *cursor.seek_start();
edit.new_bytes.start -= edit.old_bytes.start - cursor.start();
edit.old_bytes.start = *cursor.start();
cursor.seek(&edit.old_bytes.end, Bias::Right, &());
cursor.next(&());
let mut delta = edit.delta();
loop {
edit.old_bytes.end = *cursor.seek_start();
edit.old_bytes.end = *cursor.start();
if let Some(next_edit) = buffer_edits_iter.peek() {
if next_edit.old_bytes.start > edit.old_bytes.end {
@@ -324,7 +324,7 @@ impl FoldMap {
((edit.new_bytes.start + edit.deleted_bytes()) as isize + delta) as usize;
let anchor = buffer.anchor_before(edit.new_bytes.start);
let mut folds_cursor = self.folds.cursor::<_, ()>();
let mut folds_cursor = self.folds.cursor::<Fold>();
folds_cursor.seek(&Fold(anchor..Anchor::max()), Bias::Left, &buffer);
let mut folds = iter::from_fn({
@@ -432,39 +432,39 @@ impl FoldMap {
let mut fold_edits = Vec::with_capacity(buffer_edits.len());
{
let mut old_transforms = transforms.cursor::<usize, FoldOffset>();
let mut new_transforms = new_transforms.cursor::<usize, FoldOffset>();
let mut old_transforms = transforms.cursor::<(usize, FoldOffset)>();
let mut new_transforms = new_transforms.cursor::<(usize, FoldOffset)>();
for mut edit in buffer_edits {
old_transforms.seek(&edit.old_bytes.start, Bias::Left, &());
if old_transforms.item().map_or(false, |t| t.is_fold()) {
edit.old_bytes.start = *old_transforms.seek_start();
edit.old_bytes.start = old_transforms.start().0;
}
let old_start = old_transforms.sum_start().0
+ (edit.old_bytes.start - old_transforms.seek_start());
let old_start =
old_transforms.start().1 .0 + (edit.old_bytes.start - old_transforms.start().0);
old_transforms.seek_forward(&edit.old_bytes.end, Bias::Right, &());
if old_transforms.item().map_or(false, |t| t.is_fold()) {
old_transforms.next(&());
edit.old_bytes.end = *old_transforms.seek_start();
edit.old_bytes.end = old_transforms.start().0;
}
let old_end = old_transforms.sum_start().0
+ (edit.old_bytes.end - old_transforms.seek_start());
let old_end =
old_transforms.start().1 .0 + (edit.old_bytes.end - old_transforms.start().0);
new_transforms.seek(&edit.new_bytes.start, Bias::Left, &());
if new_transforms.item().map_or(false, |t| t.is_fold()) {
edit.new_bytes.start = *new_transforms.seek_start();
edit.new_bytes.start = new_transforms.start().0;
}
let new_start = new_transforms.sum_start().0
+ (edit.new_bytes.start - new_transforms.seek_start());
let new_start =
new_transforms.start().1 .0 + (edit.new_bytes.start - new_transforms.start().0);
new_transforms.seek_forward(&edit.new_bytes.end, Bias::Right, &());
if new_transforms.item().map_or(false, |t| t.is_fold()) {
new_transforms.next(&());
edit.new_bytes.end = *new_transforms.seek_start();
edit.new_bytes.end = new_transforms.start().0;
}
let new_end = new_transforms.sum_start().0
+ (edit.new_bytes.end - new_transforms.seek_start());
let new_end =
new_transforms.start().1 .0 + (edit.new_bytes.end - new_transforms.start().0);
fold_edits.push(FoldEdit {
old_bytes: FoldOffset(old_start)..FoldOffset(old_end),
@@ -503,38 +503,37 @@ impl Snapshot {
pub fn text_summary_for_range(&self, range: Range<FoldPoint>) -> TextSummary {
let mut summary = TextSummary::default();
let mut cursor = self.transforms.cursor::<FoldPoint, Point>();
let mut cursor = self.transforms.cursor::<(FoldPoint, Point)>();
cursor.seek(&range.start, Bias::Right, &());
if let Some(transform) = cursor.item() {
let start_in_transform = range.start.0 - cursor.seek_start().0;
let end_in_transform =
cmp::min(range.end, cursor.seek_end(&())).0 - cursor.seek_start().0;
let start_in_transform = range.start.0 - cursor.start().0 .0;
let end_in_transform = cmp::min(range.end, cursor.end(&()).0).0 - cursor.start().0 .0;
if let Some(output_text) = transform.output_text {
summary = TextSummary::from(
&output_text
[start_in_transform.column as usize..end_in_transform.column as usize],
);
} else {
let buffer_start = *cursor.sum_start() + start_in_transform;
let buffer_end = *cursor.sum_start() + end_in_transform;
let buffer_start = cursor.start().1 + start_in_transform;
let buffer_end = cursor.start().1 + end_in_transform;
summary = self
.buffer_snapshot
.text_summary_for_range(buffer_start..buffer_end);
}
}
if range.end > cursor.seek_end(&()) {
if range.end > cursor.end(&()).0 {
cursor.next(&());
summary += &cursor
.summary::<TransformSummary>(&range.end, Bias::Right, &())
.summary::<_, TransformSummary>(&range.end, Bias::Right, &())
.output;
if let Some(transform) = cursor.item() {
let end_in_transform = range.end.0 - cursor.seek_start().0;
let end_in_transform = range.end.0 - cursor.start().0 .0;
if let Some(output_text) = transform.output_text {
summary += TextSummary::from(&output_text[..end_in_transform.column as usize]);
} else {
let buffer_start = *cursor.sum_start();
let buffer_end = *cursor.sum_start() + end_in_transform;
let buffer_start = cursor.start().1;
let buffer_end = cursor.start().1 + end_in_transform;
summary += self
.buffer_snapshot
.text_summary_for_range(buffer_start..buffer_end);
@@ -545,6 +544,7 @@ impl Snapshot {
summary
}
#[cfg(test)]
pub fn len(&self) -> FoldOffset {
FoldOffset(self.transforms.summary().output.bytes)
}
@@ -600,19 +600,19 @@ impl Snapshot {
T: ToOffset,
{
let offset = offset.to_offset(&self.buffer_snapshot);
let mut cursor = self.transforms.cursor::<usize, ()>();
let mut cursor = self.transforms.cursor::<usize>();
cursor.seek(&offset, Bias::Right, &());
cursor.item().map_or(false, |t| t.output_text.is_some())
}
pub fn is_line_folded(&self, output_row: u32) -> bool {
let mut cursor = self.transforms.cursor::<FoldPoint, ()>();
let mut cursor = self.transforms.cursor::<FoldPoint>();
cursor.seek(&FoldPoint::new(output_row, 0), Bias::Right, &());
while let Some(transform) = cursor.item() {
if transform.output_text.is_some() {
return true;
}
if cursor.seek_end(&()).row() == output_row {
if cursor.end(&()).row() == output_row {
cursor.next(&())
} else {
break;
@@ -622,10 +622,10 @@ impl Snapshot {
}
pub fn chunks_at(&self, offset: FoldOffset) -> Chunks {
let mut transform_cursor = self.transforms.cursor::<FoldOffset, usize>();
let mut transform_cursor = self.transforms.cursor::<(FoldOffset, usize)>();
transform_cursor.seek(&offset, Bias::Right, &());
let overshoot = offset.0 - transform_cursor.seek_start().0;
let buffer_offset = transform_cursor.sum_start() + overshoot;
let overshoot = offset.0 - transform_cursor.start().0 .0;
let buffer_offset = transform_cursor.start().1 + overshoot;
Chunks {
transform_cursor,
buffer_offset,
@@ -636,15 +636,15 @@ impl Snapshot {
}
pub fn highlighted_chunks(&mut self, range: Range<FoldOffset>) -> HighlightedChunks {
let mut transform_cursor = self.transforms.cursor::<FoldOffset, usize>();
let mut transform_cursor = self.transforms.cursor::<(FoldOffset, usize)>();
transform_cursor.seek(&range.end, Bias::Right, &());
let overshoot = range.end.0 - transform_cursor.seek_start().0;
let buffer_end = transform_cursor.sum_start() + overshoot;
let overshoot = range.end.0 - transform_cursor.start().0 .0;
let buffer_end = transform_cursor.start().1 + overshoot;
transform_cursor.seek(&range.start, Bias::Right, &());
let overshoot = range.start.0 - transform_cursor.seek_start().0;
let buffer_start = transform_cursor.sum_start() + overshoot;
let overshoot = range.start.0 - transform_cursor.start().0 .0;
let buffer_start = transform_cursor.start().1 + overshoot;
HighlightedChunks {
transform_cursor,
@@ -663,19 +663,19 @@ impl Snapshot {
#[cfg(test)]
pub fn clip_offset(&self, offset: FoldOffset, bias: Bias) -> FoldOffset {
let mut cursor = self.transforms.cursor::<FoldOffset, usize>();
let mut cursor = self.transforms.cursor::<(FoldOffset, usize)>();
cursor.seek(&offset, Bias::Right, &());
if let Some(transform) = cursor.item() {
let transform_start = cursor.seek_start().0;
let transform_start = cursor.start().0 .0;
if transform.output_text.is_some() {
if offset.0 == transform_start || matches!(bias, Bias::Left) {
FoldOffset(transform_start)
} else {
FoldOffset(cursor.seek_end(&()).0)
FoldOffset(cursor.end(&()).0 .0)
}
} else {
let overshoot = offset.0 - transform_start;
let buffer_offset = cursor.sum_start() + overshoot;
let buffer_offset = cursor.start().1 + overshoot;
let clipped_buffer_offset = self.buffer_snapshot.clip_offset(buffer_offset, bias);
FoldOffset(
(offset.0 as isize + (clipped_buffer_offset as isize - buffer_offset as isize))
@@ -688,19 +688,19 @@ impl Snapshot {
}
pub fn clip_point(&self, point: FoldPoint, bias: Bias) -> FoldPoint {
let mut cursor = self.transforms.cursor::<FoldPoint, Point>();
let mut cursor = self.transforms.cursor::<(FoldPoint, Point)>();
cursor.seek(&point, Bias::Right, &());
if let Some(transform) = cursor.item() {
let transform_start = cursor.seek_start().0;
let transform_start = cursor.start().0 .0;
if transform.output_text.is_some() {
if point.0 == transform_start || matches!(bias, Bias::Left) {
FoldPoint(transform_start)
} else {
FoldPoint(cursor.seek_end(&()).0)
FoldPoint(cursor.end(&()).0 .0)
}
} else {
let overshoot = point.0 - transform_start;
let buffer_position = *cursor.sum_start() + overshoot;
let buffer_position = cursor.start().1 + overshoot;
let clipped_buffer_position =
self.buffer_snapshot.clip_point(buffer_position, bias);
FoldPoint::new(
@@ -822,12 +822,6 @@ impl sum_tree::Summary for TransformSummary {
}
}
impl<'a> sum_tree::Dimension<'a, TransformSummary> for TransformSummary {
fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) {
sum_tree::Summary::add_summary(self, summary, &());
}
}
#[derive(Clone, Debug)]
struct Fold(Range<Anchor>);
@@ -905,7 +899,7 @@ impl<'a> sum_tree::Dimension<'a, FoldSummary> for Fold {
}
}
impl<'a> sum_tree::SeekDimension<'a, FoldSummary> for Fold {
impl<'a> sum_tree::SeekTarget<'a, FoldSummary, Fold> for Fold {
fn cmp(&self, other: &Self, buffer: &buffer::Snapshot) -> Ordering {
self.0.cmp(&other.0, buffer).unwrap()
}
@@ -918,7 +912,7 @@ impl<'a> sum_tree::Dimension<'a, FoldSummary> for usize {
}
pub struct BufferRows<'a> {
cursor: Cursor<'a, Transform, FoldPoint, Point>,
cursor: Cursor<'a, Transform, (FoldPoint, Point)>,
fold_point: FoldPoint,
}
@@ -926,7 +920,7 @@ impl<'a> Iterator for BufferRows<'a> {
type Item = u32;
fn next(&mut self) -> Option<Self::Item> {
while self.fold_point > self.cursor.seek_end(&()) {
while self.fold_point > self.cursor.end(&()).0 {
self.cursor.next(&());
if self.cursor.item().is_none() {
// TODO: Return a bool from next?
@@ -935,8 +929,8 @@ impl<'a> Iterator for BufferRows<'a> {
}
if self.cursor.item().is_some() {
let overshoot = self.fold_point.0 - self.cursor.seek_start().0;
let buffer_point = *self.cursor.sum_start() + overshoot;
let overshoot = self.fold_point.0 - self.cursor.start().0 .0;
let buffer_point = self.cursor.start().1 + overshoot;
*self.fold_point.row_mut() += 1;
Some(buffer_point.row)
} else {
@@ -946,7 +940,7 @@ impl<'a> Iterator for BufferRows<'a> {
}
pub struct Chunks<'a> {
transform_cursor: Cursor<'a, Transform, FoldOffset, usize>,
transform_cursor: Cursor<'a, Transform, (FoldOffset, usize)>,
buffer_chunks: buffer::Chunks<'a>,
buffer_offset: usize,
}
@@ -967,7 +961,7 @@ impl<'a> Iterator for Chunks<'a> {
self.buffer_offset += transform.summary.input.bytes;
self.buffer_chunks.seek(self.buffer_offset);
while self.buffer_offset >= self.transform_cursor.sum_end(&())
while self.buffer_offset >= self.transform_cursor.end(&()).1
&& self.transform_cursor.item().is_some()
{
self.transform_cursor.next(&());
@@ -982,7 +976,7 @@ impl<'a> Iterator for Chunks<'a> {
chunk = &chunk[offset_in_chunk..];
// Truncate the chunk so that it ends at the next fold.
let region_end = self.transform_cursor.sum_end(&()) - self.buffer_offset;
let region_end = self.transform_cursor.end(&()).1 - self.buffer_offset;
if chunk.len() >= region_end {
chunk = &chunk[0..region_end];
self.transform_cursor.next(&());
@@ -999,7 +993,7 @@ impl<'a> Iterator for Chunks<'a> {
}
pub struct HighlightedChunks<'a> {
transform_cursor: Cursor<'a, Transform, FoldOffset, usize>,
transform_cursor: Cursor<'a, Transform, (FoldOffset, usize)>,
buffer_chunks: buffer::HighlightedChunks<'a>,
buffer_chunk: Option<(usize, &'a str, HighlightId)>,
buffer_offset: usize,
@@ -1022,7 +1016,7 @@ impl<'a> Iterator for HighlightedChunks<'a> {
self.buffer_offset += transform.summary.input.bytes;
self.buffer_chunks.seek(self.buffer_offset);
while self.buffer_offset >= self.transform_cursor.sum_end(&())
while self.buffer_offset >= self.transform_cursor.end(&()).1
&& self.transform_cursor.item().is_some()
{
self.transform_cursor.next(&());
@@ -1046,7 +1040,7 @@ impl<'a> Iterator for HighlightedChunks<'a> {
chunk = &chunk[offset_in_chunk..];
// Truncate the chunk so that it ends at the next fold.
let region_end = self.transform_cursor.sum_end(&()) - self.buffer_offset;
let region_end = self.transform_cursor.end(&()).1 - self.buffer_offset;
if chunk.len() >= region_end {
chunk = &chunk[0..region_end];
self.transform_cursor.next(&());
@@ -1073,16 +1067,18 @@ pub struct FoldOffset(pub usize);
impl FoldOffset {
pub fn to_point(&self, snapshot: &Snapshot) -> FoldPoint {
let mut cursor = snapshot.transforms.cursor::<FoldOffset, TransformSummary>();
let mut cursor = snapshot
.transforms
.cursor::<(FoldOffset, TransformSummary)>();
cursor.seek(self, Bias::Right, &());
let overshoot = if cursor.item().map_or(true, |t| t.is_fold()) {
Point::new(0, (self.0 - cursor.seek_start().0) as u32)
Point::new(0, (self.0 - cursor.start().0 .0) as u32)
} else {
let buffer_offset = cursor.sum_start().input.bytes + self.0 - cursor.seek_start().0;
let buffer_offset = cursor.start().1.input.bytes + self.0 - cursor.start().0 .0;
let buffer_point = snapshot.buffer_snapshot.to_point(buffer_offset);
buffer_point - cursor.sum_start().input.lines
buffer_point - cursor.start().1.input.lines
};
FoldPoint(cursor.sum_start().output.lines + overshoot)
FoldPoint(cursor.start().1.output.lines + overshoot)
}
}
@@ -1128,7 +1124,8 @@ impl FoldEdit {
#[cfg(test)]
mod tests {
use super::*;
use crate::{editor::ToPoint, test::sample_text, util::RandomCharIter};
use crate::{test::sample_text, ToPoint};
use buffer::RandomCharIter;
use rand::prelude::*;
use std::{env, mem};
use Bias::{Left, Right};

View File

@@ -1,8 +1,8 @@
use parking_lot::Mutex;
use super::fold_map::{self, FoldEdit, FoldPoint, Snapshot as FoldSnapshot};
use crate::{editor::rope, settings::HighlightId, util::Bias};
use buffer::{rope, HighlightId};
use parking_lot::Mutex;
use std::{mem, ops::Range};
use sum_tree::Bias;
pub struct TabMap(Mutex<Snapshot>);

View File

@@ -2,16 +2,12 @@ use super::{
fold_map,
tab_map::{self, Edit as TabEdit, Snapshot as TabSnapshot, TabPoint, TextSummary},
};
use crate::{editor::Point, settings::HighlightId, util::Bias};
use gpui::{
fonts::FontId,
sum_tree::{self, Cursor, SumTree},
text_layout::LineWrapper,
Entity, ModelContext, Task,
};
use buffer::{HighlightId, Point};
use gpui::{fonts::FontId, text_layout::LineWrapper, Entity, ModelContext, Task};
use lazy_static::lazy_static;
use smol::future::yield_now;
use std::{collections::VecDeque, ops::Range, time::Duration};
use sum_tree::{Bias, Cursor, SumTree};
pub struct WrapMap {
snapshot: Snapshot,
@@ -51,7 +47,7 @@ pub struct Chunks<'a> {
input_chunks: tab_map::Chunks<'a>,
input_chunk: &'a str,
output_position: WrapPoint,
transforms: Cursor<'a, Transform, WrapPoint, TabPoint>,
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
}
pub struct HighlightedChunks<'a> {
@@ -60,7 +56,7 @@ pub struct HighlightedChunks<'a> {
style_id: HighlightId,
output_position: WrapPoint,
max_output_row: u32,
transforms: Cursor<'a, Transform, WrapPoint, TabPoint>,
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
}
pub struct BufferRows<'a> {
@@ -69,7 +65,7 @@ pub struct BufferRows<'a> {
output_row: u32,
soft_wrapped: bool,
max_output_row: u32,
transforms: Cursor<'a, Transform, WrapPoint, TabPoint>,
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
}
impl WrapMap {
@@ -272,7 +268,7 @@ impl Snapshot {
if edits.is_empty() {
new_transforms = self.transforms.clone();
} else {
let mut old_cursor = self.transforms.cursor::<TabPoint, ()>();
let mut old_cursor = self.transforms.cursor::<TabPoint>();
let mut edits = edits.into_iter().peekable();
new_transforms =
old_cursor.slice(&edits.peek().unwrap().old_lines.start, Bias::Right, &());
@@ -293,11 +289,11 @@ impl Snapshot {
old_cursor.seek_forward(&edit.old_lines.end, Bias::Right, &());
if let Some(next_edit) = edits.peek() {
if next_edit.old_lines.start > old_cursor.seek_end(&()) {
if old_cursor.seek_end(&()) > edit.old_lines.end {
let summary = self.tab_snapshot.text_summary_for_range(
edit.old_lines.end..old_cursor.seek_end(&()),
);
if next_edit.old_lines.start > old_cursor.end(&()) {
if old_cursor.end(&()) > edit.old_lines.end {
let summary = self
.tab_snapshot
.text_summary_for_range(edit.old_lines.end..old_cursor.end(&()));
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
old_cursor.next(&());
@@ -307,10 +303,10 @@ impl Snapshot {
);
}
} else {
if old_cursor.seek_end(&()) > edit.old_lines.end {
if old_cursor.end(&()) > edit.old_lines.end {
let summary = self
.tab_snapshot
.text_summary_for_range(edit.old_lines.end..old_cursor.seek_end(&()));
.text_summary_for_range(edit.old_lines.end..old_cursor.end(&()));
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
old_cursor.next(&());
@@ -364,7 +360,7 @@ impl Snapshot {
new_transforms = self.transforms.clone();
} else {
let mut row_edits = row_edits.into_iter().peekable();
let mut old_cursor = self.transforms.cursor::<TabPoint, ()>();
let mut old_cursor = self.transforms.cursor::<TabPoint>();
new_transforms = old_cursor.slice(
&TabPoint::new(row_edits.peek().unwrap().old_rows.start, 0),
@@ -427,10 +423,10 @@ impl Snapshot {
old_cursor.seek_forward(&TabPoint::new(edit.old_rows.end, 0), Bias::Right, &());
if let Some(next_edit) = row_edits.peek() {
if next_edit.old_rows.start > old_cursor.seek_end(&()).row() {
if old_cursor.seek_end(&()) > TabPoint::new(edit.old_rows.end, 0) {
if next_edit.old_rows.start > old_cursor.end(&()).row() {
if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) {
let summary = self.tab_snapshot.text_summary_for_range(
TabPoint::new(edit.old_rows.end, 0)..old_cursor.seek_end(&()),
TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
);
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
@@ -445,9 +441,9 @@ impl Snapshot {
);
}
} else {
if old_cursor.seek_end(&()) > TabPoint::new(edit.old_rows.end, 0) {
if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) {
let summary = self.tab_snapshot.text_summary_for_range(
TabPoint::new(edit.old_rows.end, 0)..old_cursor.seek_end(&()),
TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
);
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
@@ -465,11 +461,11 @@ impl Snapshot {
pub fn chunks_at(&self, wrap_row: u32) -> Chunks {
let point = WrapPoint::new(wrap_row, 0);
let mut transforms = self.transforms.cursor::<WrapPoint, TabPoint>();
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>();
transforms.seek(&point, Bias::Right, &());
let mut input_position = TabPoint(transforms.sum_start().0);
let mut input_position = TabPoint(transforms.start().1 .0);
if transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_position.0 += point.0 - transforms.seek_start().0;
input_position.0 += point.0 - transforms.start().0 .0;
}
let input_chunks = self.tab_snapshot.chunks_at(input_position);
Chunks {
@@ -483,11 +479,11 @@ impl Snapshot {
pub fn highlighted_chunks_for_rows(&mut self, rows: Range<u32>) -> HighlightedChunks {
let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0);
let mut transforms = self.transforms.cursor::<WrapPoint, TabPoint>();
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>();
transforms.seek(&output_start, Bias::Right, &());
let mut input_start = TabPoint(transforms.sum_start().0);
let mut input_start = TabPoint(transforms.start().1 .0);
if transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_start.0 += output_start.0 - transforms.seek_start().0;
input_start.0 += output_start.0 - transforms.start().0 .0;
}
let input_end = self
.to_tab_point(output_end)
@@ -520,7 +516,7 @@ impl Snapshot {
}
pub fn soft_wrap_indent(&self, row: u32) -> Option<u32> {
let mut cursor = self.transforms.cursor::<_, ()>();
let mut cursor = self.transforms.cursor::<WrapPoint>();
cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right, &());
cursor.item().and_then(|transform| {
if transform.is_isomorphic() {
@@ -536,11 +532,11 @@ impl Snapshot {
}
pub fn buffer_rows(&self, start_row: u32) -> BufferRows {
let mut transforms = self.transforms.cursor::<WrapPoint, TabPoint>();
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>();
transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
let mut input_row = transforms.sum_start().row();
let mut input_row = transforms.start().1.row();
if transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_row += start_row - transforms.seek_start().row();
input_row += start_row - transforms.start().0.row();
}
let soft_wrapped = transforms.item().map_or(false, |t| !t.is_isomorphic());
let mut input_buffer_rows = self.tab_snapshot.buffer_rows(input_row);
@@ -556,27 +552,27 @@ impl Snapshot {
}
pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint {
let mut cursor = self.transforms.cursor::<WrapPoint, TabPoint>();
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>();
cursor.seek(&point, Bias::Right, &());
let mut tab_point = cursor.sum_start().0;
let mut tab_point = cursor.start().1 .0;
if cursor.item().map_or(false, |t| t.is_isomorphic()) {
tab_point += point.0 - cursor.seek_start().0;
tab_point += point.0 - cursor.start().0 .0;
}
TabPoint(tab_point)
}
pub fn to_wrap_point(&self, point: TabPoint) -> WrapPoint {
let mut cursor = self.transforms.cursor::<TabPoint, WrapPoint>();
let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>();
cursor.seek(&point, Bias::Right, &());
WrapPoint(cursor.sum_start().0 + (point.0 - cursor.seek_start().0))
WrapPoint(cursor.start().1 .0 + (point.0 - cursor.start().0 .0))
}
pub fn clip_point(&self, mut point: WrapPoint, bias: Bias) -> WrapPoint {
if bias == Bias::Left {
let mut cursor = self.transforms.cursor::<WrapPoint, ()>();
let mut cursor = self.transforms.cursor::<WrapPoint>();
cursor.seek(&point, Bias::Right, &());
if cursor.item().map_or(false, |t| !t.is_isomorphic()) {
point = *cursor.seek_start();
point = *cursor.start();
*point.column_mut() -= 1;
}
}
@@ -593,7 +589,7 @@ impl Snapshot {
);
{
let mut transforms = self.transforms.cursor::<(), ()>().peekable();
let mut transforms = self.transforms.cursor::<()>().peekable();
while let Some(transform) = transforms.next() {
if let Some(next_transform) = transforms.peek() {
assert!(transform.is_isomorphic() != next_transform.is_isomorphic());
@@ -638,7 +634,7 @@ impl<'a> Iterator for Chunks<'a> {
fn next(&mut self) -> Option<Self::Item> {
let transform = self.transforms.item()?;
if let Some(display_text) = transform.display_text {
if self.output_position > *self.transforms.seek_start() {
if self.output_position > self.transforms.start().0 {
self.output_position.0.column += transform.summary.output.lines.column;
self.transforms.next(&());
return Some(&display_text[1..]);
@@ -654,7 +650,7 @@ impl<'a> Iterator for Chunks<'a> {
}
let mut input_len = 0;
let transform_end = self.transforms.seek_end(&());
let transform_end = self.transforms.end(&()).0;
for c in self.input_chunk.chars() {
let char_len = c.len_utf8();
input_len += char_len;
@@ -691,7 +687,7 @@ impl<'a> Iterator for HighlightedChunks<'a> {
let mut end_ix = display_text.len();
let mut summary = transform.summary.output.lines;
if self.output_position > *self.transforms.seek_start() {
if self.output_position > self.transforms.start().0 {
// Exclude newline starting prior to the desired row.
start_ix = 1;
summary.row = 0;
@@ -713,7 +709,7 @@ impl<'a> Iterator for HighlightedChunks<'a> {
}
let mut input_len = 0;
let transform_end = self.transforms.seek_end(&());
let transform_end = self.transforms.end(&()).0;
for c in self.input_chunk.chars() {
let char_len = c.len_utf8();
input_len += char_len;
@@ -900,13 +896,10 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for WrapPoint {
mod tests {
use super::*;
use crate::{
editor::{
display_map::{fold_map::FoldMap, tab_map::TabMap},
Buffer,
},
display_map::{fold_map::FoldMap, tab_map::TabMap},
test::Observer,
util::RandomCharIter,
};
use buffer::{Buffer, RandomCharIter};
use rand::prelude::*;
use std::env;

View File

@@ -1,8 +1,9 @@
use super::{
DisplayPoint, Editor, EditorMode, EditorStyle, Insert, Scroll, Select, SelectPhase, Snapshot,
MAX_LINE_LEN,
DisplayPoint, Editor, EditorMode, EditorSettings, EditorStyle, Input, Scroll, Select,
SelectPhase, Snapshot, MAX_LINE_LEN,
};
use crate::{theme::HighlightId, time::ReplicaId};
use buffer::HighlightId;
use clock::ReplicaId;
use gpui::{
color::Color,
geometry::{
@@ -27,12 +28,12 @@ use std::{
pub struct EditorElement {
view: WeakViewHandle<Editor>,
style: EditorStyle,
settings: EditorSettings,
}
impl EditorElement {
pub fn new(view: WeakViewHandle<Editor>, style: EditorStyle) -> Self {
Self { view, style }
pub fn new(view: WeakViewHandle<Editor>, settings: EditorSettings) -> Self {
Self { view, settings }
}
fn view<'a>(&self, cx: &'a AppContext) -> &'a Editor {
@@ -142,7 +143,7 @@ impl EditorElement {
if chars.chars().any(|c| c.is_control()) || keystroke.cmd || keystroke.ctrl {
false
} else {
cx.dispatch_action(Insert(chars.to_string()));
cx.dispatch_action(Input(chars.to_string()));
true
}
}
@@ -195,15 +196,16 @@ impl EditorElement {
let bounds = gutter_bounds.union_rect(text_bounds);
let scroll_top = layout.snapshot.scroll_position().y() * layout.line_height;
let editor = self.view(cx.app);
let style = &self.settings.style;
cx.scene.push_quad(Quad {
bounds: gutter_bounds,
background: Some(self.style.gutter_background),
background: Some(style.gutter_background),
border: Border::new(0., Color::transparent_black()),
corner_radius: 0.,
});
cx.scene.push_quad(Quad {
bounds: text_bounds,
background: Some(self.style.background),
background: Some(style.background),
border: Border::new(0., Color::transparent_black()),
corner_radius: 0.,
});
@@ -230,7 +232,7 @@ impl EditorElement {
);
cx.scene.push_quad(Quad {
bounds: RectF::new(origin, size),
background: Some(self.style.active_line_background),
background: Some(style.active_line_background),
border: Border::default(),
corner_radius: 0.,
});
@@ -267,8 +269,7 @@ impl EditorElement {
cx: &mut PaintContext,
) {
let view = self.view(cx.app);
let settings = self.view(cx.app).settings.borrow();
let theme = &settings.theme.editor;
let style = &self.settings.style;
let local_replica_id = view.replica_id(cx);
let scroll_position = layout.snapshot.scroll_position();
let start_row = scroll_position.y() as u32;
@@ -286,11 +287,11 @@ impl EditorElement {
let content_origin = bounds.origin() + layout.text_offset;
for (replica_id, selections) in &layout.selections {
let style_ix = *replica_id as usize % (theme.guest_selections.len() + 1);
let style_ix = *replica_id as usize % (style.guest_selections.len() + 1);
let style = if style_ix == 0 {
&theme.selection
&style.selection
} else {
&theme.guest_selections[style_ix - 1]
&style.guest_selections[style_ix - 1]
};
for selection in selections {
@@ -382,15 +383,16 @@ impl EditorElement {
fn max_line_number_width(&self, snapshot: &Snapshot, cx: &LayoutContext) -> f32 {
let digit_count = (snapshot.buffer_row_count() as f32).log10().floor() as usize + 1;
let style = &self.settings.style;
cx.text_layout_cache
.layout_str(
"1".repeat(digit_count).as_str(),
self.style.text.font_size,
style.text.font_size,
&[(
digit_count,
RunStyle {
font_id: self.style.text.font_id,
font_id: style.text.font_id,
color: Color::black(),
underline: false,
},
@@ -406,6 +408,7 @@ impl EditorElement {
snapshot: &Snapshot,
cx: &LayoutContext,
) -> Vec<Option<text_layout::Line>> {
let style = &self.settings.style;
let mut layouts = Vec::with_capacity(rows.len());
let mut line_number = String::new();
for (ix, (buffer_row, soft_wrapped)) in snapshot
@@ -415,9 +418,9 @@ impl EditorElement {
{
let display_row = rows.start + ix as u32;
let color = if active_rows.contains_key(&display_row) {
self.style.line_number_active
style.line_number_active
} else {
self.style.line_number
style.line_number
};
if soft_wrapped {
layouts.push(None);
@@ -426,11 +429,11 @@ impl EditorElement {
write!(&mut line_number, "{}", buffer_row + 1).unwrap();
layouts.push(Some(cx.text_layout_cache.layout_str(
&line_number,
self.style.text.font_size,
style.text.font_size,
&[(
line_number.len(),
RunStyle {
font_id: self.style.text.font_id,
font_id: style.text.font_id,
color,
underline: false,
},
@@ -455,7 +458,7 @@ impl EditorElement {
// When the editor is empty and unfocused, then show the placeholder.
if snapshot.is_empty() && !snapshot.is_focused() {
let placeholder_style = self.style.placeholder_text();
let placeholder_style = self.settings.style.placeholder_text();
let placeholder_text = snapshot.placeholder_text();
let placeholder_lines = placeholder_text
.as_ref()
@@ -481,10 +484,10 @@ impl EditorElement {
.collect();
}
let mut prev_font_properties = self.style.text.font_properties.clone();
let mut prev_font_id = self.style.text.font_id;
let style = &self.settings.style;
let mut prev_font_properties = style.text.font_properties.clone();
let mut prev_font_id = style.text.font_id;
let theme = snapshot.theme().clone();
let mut layouts = Vec::with_capacity(rows.len());
let mut line = String::new();
let mut styles = Vec::new();
@@ -497,7 +500,7 @@ impl EditorElement {
if ix > 0 {
layouts.push(cx.text_layout_cache.layout_str(
&line,
self.style.text.font_size,
style.text.font_size,
&styles,
));
line.clear();
@@ -510,17 +513,19 @@ impl EditorElement {
}
if !line_chunk.is_empty() && !line_exceeded_max_len {
let style = theme
.syntax
.highlight_style(style_ix)
.unwrap_or(self.style.text.clone().into());
let highlight_style = style_ix
.style(&style.syntax)
.unwrap_or(style.text.clone().into());
// Avoid a lookup if the font properties match the previous ones.
let font_id = if style.font_properties == prev_font_properties {
let font_id = if highlight_style.font_properties == prev_font_properties {
prev_font_id
} else {
cx.font_cache
.select_font(self.style.text.font_family_id, &style.font_properties)
.unwrap_or(self.style.text.font_id)
.select_font(
style.text.font_family_id,
&highlight_style.font_properties,
)
.unwrap_or(style.text.font_id)
};
if line.len() + line_chunk.len() > MAX_LINE_LEN {
@@ -537,12 +542,12 @@ impl EditorElement {
line_chunk.len(),
RunStyle {
font_id,
color: style.color,
underline: style.underline,
color: highlight_style.color,
underline: highlight_style.underline,
},
));
prev_font_id = font_id;
prev_font_properties = style.font_properties;
prev_font_properties = highlight_style.font_properties;
}
}
}
@@ -566,12 +571,13 @@ impl Element for EditorElement {
}
let snapshot = self.snapshot(cx.app);
let line_height = self.style.text.line_height(cx.font_cache);
let style = self.settings.style.clone();
let line_height = style.text.line_height(cx.font_cache);
let gutter_padding;
let gutter_width;
if snapshot.mode == EditorMode::Full {
gutter_padding = self.style.text.em_width(cx.font_cache);
gutter_padding = style.text.em_width(cx.font_cache);
gutter_width = self.max_line_number_width(&snapshot, cx) + gutter_padding * 2.0;
} else {
gutter_padding = 0.0;
@@ -579,8 +585,8 @@ impl Element for EditorElement {
};
let text_width = size.x() - gutter_width;
let text_offset = vec2f(-self.style.text.descent(cx.font_cache), 0.);
let em_width = self.style.text.em_width(cx.font_cache);
let text_offset = vec2f(-style.text.descent(cx.font_cache), 0.);
let em_width = style.text.em_width(cx.font_cache);
let overscroll = vec2f(em_width, 0.);
let wrap_width = text_width - text_offset.x() - overscroll.x() - em_width;
let snapshot = self.update_view(cx.app, |view, cx| {
@@ -676,7 +682,7 @@ impl Element for EditorElement {
overscroll,
text_offset,
snapshot,
style: self.style.clone(),
style: self.settings.style.clone(),
active_rows,
line_layouts,
line_number_layouts,
@@ -688,7 +694,7 @@ impl Element for EditorElement {
let scroll_max = layout.scroll_max(cx.font_cache, cx.text_layout_cache).x();
let scroll_width = layout.scroll_width(cx.text_layout_cache);
let max_glyph_width = self.style.text.em_width(&cx.font_cache);
let max_glyph_width = style.text.em_width(&cx.font_cache);
self.update_view(cx.app, |view, cx| {
let clamped = view.clamp_scroll_left(scroll_max);
let autoscrolled;
@@ -1034,30 +1040,27 @@ fn scale_horizontal_mouse_autoscroll_delta(delta: f32) -> f32 {
mod tests {
use super::*;
use crate::{
editor::{Buffer, Editor, EditorStyle},
settings,
test::sample_text,
{Editor, EditorSettings},
};
use buffer::Buffer;
#[gpui::test]
fn test_layout_line_numbers(cx: &mut gpui::MutableAppContext) {
let font_cache = cx.font_cache().clone();
let settings = settings::test(&cx).1;
let style = EditorStyle::test(&font_cache);
let settings = EditorSettings::test(cx);
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6), cx));
let (window_id, editor) = cx.add_window(Default::default(), |cx| {
Editor::for_buffer(
buffer,
settings.clone(),
{
let style = style.clone();
move |_| style.clone()
let settings = settings.clone();
move |_| settings.clone()
},
cx,
)
});
let element = EditorElement::new(editor.downgrade(), style);
let element = EditorElement::new(editor.downgrade(), settings);
let layouts = editor.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx);

File diff suppressed because it is too large Load Diff

View File

@@ -101,7 +101,10 @@ pub fn line_end(map: &DisplayMapSnapshot, point: DisplayPoint) -> Result<Display
Ok(map.clip_point(line_end, Bias::Left))
}
pub fn prev_word_boundary(map: &DisplayMapSnapshot, point: DisplayPoint) -> Result<DisplayPoint> {
pub fn prev_word_boundary(
map: &DisplayMapSnapshot,
mut point: DisplayPoint,
) -> Result<DisplayPoint> {
let mut line_start = 0;
if point.row() > 0 {
if let Some(indent) = map.soft_wrap_indent(point.row() - 1) {
@@ -111,39 +114,52 @@ pub fn prev_word_boundary(map: &DisplayMapSnapshot, point: DisplayPoint) -> Resu
if point.column() == line_start {
if point.row() == 0 {
Ok(DisplayPoint::new(0, 0))
return Ok(DisplayPoint::new(0, 0));
} else {
let row = point.row() - 1;
Ok(map.clip_point(DisplayPoint::new(row, map.line_len(row)), Bias::Left))
point = map.clip_point(DisplayPoint::new(row, map.line_len(row)), Bias::Left);
}
} else {
let mut boundary = DisplayPoint::new(point.row(), 0);
let mut column = 0;
let mut prev_c = None;
for c in map.chars_at(DisplayPoint::new(point.row(), 0)) {
if column >= point.column() {
break;
}
if prev_c.is_none() || char_kind(prev_c.unwrap()) != char_kind(c) {
*boundary.column_mut() = column;
}
prev_c = Some(c);
column += c.len_utf8() as u32;
}
Ok(boundary)
}
let mut boundary = DisplayPoint::new(point.row(), 0);
let mut column = 0;
let mut prev_char_kind = CharKind::Newline;
for c in map.chars_at(DisplayPoint::new(point.row(), 0)) {
if column >= point.column() {
break;
}
let char_kind = char_kind(c);
if char_kind != prev_char_kind
&& char_kind != CharKind::Whitespace
&& char_kind != CharKind::Newline
{
*boundary.column_mut() = column;
}
prev_char_kind = char_kind;
column += c.len_utf8() as u32;
}
Ok(boundary)
}
pub fn next_word_boundary(
map: &DisplayMapSnapshot,
mut point: DisplayPoint,
) -> Result<DisplayPoint> {
let mut prev_c = None;
let mut prev_char_kind = None;
for c in map.chars_at(point) {
if prev_c.is_some() && (c == '\n' || char_kind(prev_c.unwrap()) != char_kind(c)) {
break;
let char_kind = char_kind(c);
if let Some(prev_char_kind) = prev_char_kind {
if c == '\n' {
break;
}
if prev_char_kind != char_kind
&& prev_char_kind != CharKind::Whitespace
&& prev_char_kind != CharKind::Newline
{
break;
}
}
if c == '\n' {
@@ -152,7 +168,7 @@ pub fn next_word_boundary(
} else {
*point.column_mut() += c.len_utf8() as u32;
}
prev_c = Some(c);
prev_char_kind = Some(char_kind);
}
Ok(point)
}
@@ -180,7 +196,7 @@ fn char_kind(c: char) -> CharKind {
#[cfg(test)]
mod tests {
use super::*;
use crate::editor::{display_map::DisplayMap, Buffer};
use crate::{display_map::DisplayMap, Buffer};
#[gpui::test]
fn test_prev_next_word_boundary_multibyte(cx: &mut gpui::MutableAppContext) {
@@ -192,7 +208,7 @@ mod tests {
.unwrap();
let font_size = 14.0;
let buffer = cx.add_model(|cx| Buffer::new(0, "a bcΔ defγ", cx));
let buffer = cx.add_model(|cx| Buffer::new(0, "a bcΔ defγ hi—jk", cx));
let display_map =
cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx));
let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
@@ -202,7 +218,7 @@ mod tests {
);
assert_eq!(
prev_word_boundary(&snapshot, DisplayPoint::new(0, 7)).unwrap(),
DisplayPoint::new(0, 6)
DisplayPoint::new(0, 2)
);
assert_eq!(
prev_word_boundary(&snapshot, DisplayPoint::new(0, 6)).unwrap(),
@@ -210,7 +226,7 @@ mod tests {
);
assert_eq!(
prev_word_boundary(&snapshot, DisplayPoint::new(0, 2)).unwrap(),
DisplayPoint::new(0, 1)
DisplayPoint::new(0, 0)
);
assert_eq!(
prev_word_boundary(&snapshot, DisplayPoint::new(0, 1)).unwrap(),
@@ -223,7 +239,7 @@ mod tests {
);
assert_eq!(
next_word_boundary(&snapshot, DisplayPoint::new(0, 1)).unwrap(),
DisplayPoint::new(0, 2)
DisplayPoint::new(0, 6)
);
assert_eq!(
next_word_boundary(&snapshot, DisplayPoint::new(0, 2)).unwrap(),
@@ -231,7 +247,7 @@ mod tests {
);
assert_eq!(
next_word_boundary(&snapshot, DisplayPoint::new(0, 6)).unwrap(),
DisplayPoint::new(0, 7)
DisplayPoint::new(0, 12)
);
assert_eq!(
next_word_boundary(&snapshot, DisplayPoint::new(0, 7)).unwrap(),

39
crates/editor/src/test.rs Normal file
View File

@@ -0,0 +1,39 @@
use gpui::{Entity, ModelHandle};
use smol::channel;
use std::marker::PhantomData;
pub fn sample_text(rows: usize, cols: usize) -> String {
let mut text = String::new();
for row in 0..rows {
let c: char = ('a' as u32 + row as u32) as u8 as char;
let mut line = c.to_string().repeat(cols);
if row < rows - 1 {
line.push('\n');
}
text += &line;
}
text
}
pub struct Observer<T>(PhantomData<T>);
impl<T: 'static> Entity for Observer<T> {
type Event = ();
}
impl<T: Entity> Observer<T> {
pub fn new(
handle: &ModelHandle<T>,
cx: &mut gpui::TestAppContext,
) -> (ModelHandle<Self>, channel::Receiver<()>) {
let (notify_tx, notify_rx) = channel::unbounded();
let observer = cx.add_model(|cx| {
cx.observe(handle, move |_, _, _| {
let _ = notify_tx.try_send(());
})
.detach();
Observer(PhantomData)
});
(observer, notify_rx)
}
}

View File

@@ -0,0 +1,18 @@
[package]
name = "file_finder"
version = "0.1.0"
edition = "2018"
[dependencies]
editor = { path = "../editor" }
fuzzy = { path = "../fuzzy" }
gpui = { path = "../gpui" }
project = { path = "../project" }
util = { path = "../util" }
theme = { path = "../theme" }
workspace = { path = "../workspace" }
postage = { version = "0.4.1", features = ["futures-traits"] }
[dev-dependencies]
serde_json = { version = "1.0.64", features = ["preserve_order"] }
workspace = { path = "../workspace", features = ["test-support"] }

View File

@@ -1,10 +1,5 @@
use crate::{
editor::{self, Editor},
settings::Settings,
util,
workspace::Workspace,
worktree::{match_paths, PathMatch},
};
use editor::{Editor, EditorSettings};
use fuzzy::PathMatch;
use gpui::{
action,
elements::*,
@@ -13,10 +8,11 @@ use gpui::{
menu::{SelectNext, SelectPrev},
Binding,
},
AppContext, Axis, Entity, MutableAppContext, RenderContext, Task, View, ViewContext,
ViewHandle, WeakViewHandle,
AppContext, Axis, Entity, ModelHandle, MutableAppContext, RenderContext, Task, View,
ViewContext, ViewHandle, WeakViewHandle,
};
use postage::watch;
use project::{Project, ProjectPath};
use std::{
cmp,
path::Path,
@@ -25,11 +21,13 @@ use std::{
Arc,
},
};
use util::post_inc;
use workspace::{Settings, Workspace};
pub struct FileFinder {
handle: WeakViewHandle<Self>,
settings: watch::Receiver<Settings>,
workspace: WeakViewHandle<Workspace>,
project: ModelHandle<Project>,
query_editor: ViewHandle<Editor>,
search_count: usize,
latest_search_id: usize,
@@ -43,13 +41,7 @@ pub struct FileFinder {
action!(Toggle);
action!(Confirm);
action!(Select, Entry);
#[derive(Clone)]
pub struct Entry {
worktree_id: usize,
path: Arc<Path>,
}
action!(Select, ProjectPath);
pub fn init(cx: &mut MutableAppContext) {
cx.add_action(FileFinder::toggle);
@@ -66,7 +58,7 @@ pub fn init(cx: &mut MutableAppContext) {
}
pub enum Event {
Selected(usize, Arc<Path>),
Selected(ProjectPath),
Dismissed,
}
@@ -202,8 +194,8 @@ impl FileFinder {
)
.with_style(style.container);
let action = Select(Entry {
worktree_id: path_match.tree_id,
let action = Select(ProjectPath {
worktree_id: path_match.worktree_id,
path: path_match.path.clone(),
});
EventHandler::new(container.boxed())
@@ -241,8 +233,8 @@ impl FileFinder {
fn toggle(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext<Workspace>) {
workspace.toggle_modal(cx, |cx, workspace| {
let handle = cx.handle();
let finder = cx.add_view(|cx| Self::new(workspace.settings.clone(), handle, cx));
let project = workspace.project().clone();
let finder = cx.add_view(|cx| Self::new(workspace.settings.clone(), project, cx));
cx.subscribe(&finder, Self::on_event).detach();
finder
});
@@ -255,9 +247,9 @@ impl FileFinder {
cx: &mut ViewContext<Workspace>,
) {
match event {
Event::Selected(tree_id, path) => {
Event::Selected(project_path) => {
workspace
.open_entry((*tree_id, path.clone()), cx)
.open_entry(project_path.clone(), cx)
.map(|d| d.detach());
workspace.dismiss_modal(cx);
}
@@ -269,17 +261,22 @@ impl FileFinder {
pub fn new(
settings: watch::Receiver<Settings>,
workspace: ViewHandle<Workspace>,
project: ModelHandle<Project>,
cx: &mut ViewContext<Self>,
) -> Self {
cx.observe(&workspace, Self::workspace_updated).detach();
cx.observe(&project, Self::project_updated).detach();
let query_editor = cx.add_view(|cx| {
Editor::single_line(
settings.clone(),
{
let settings = settings.clone();
move |_| settings.borrow().theme.selector.input_editor.as_editor()
move |_| {
let settings = settings.borrow();
EditorSettings {
style: settings.theme.selector.input_editor.as_editor(),
tab_size: settings.tab_size,
}
}
},
cx,
)
@@ -290,7 +287,7 @@ impl FileFinder {
Self {
handle: cx.handle().downgrade(),
settings,
workspace: workspace.downgrade(),
project,
query_editor,
search_count: 0,
latest_search_id: 0,
@@ -303,7 +300,7 @@ impl FileFinder {
}
}
fn workspace_updated(&mut self, _: ViewHandle<Workspace>, cx: &mut ViewContext<Self>) {
fn project_updated(&mut self, _: ModelHandle<Project>, cx: &mut ViewContext<Self>) {
let query = self.query_editor.update(cx, |buffer, cx| buffer.text(cx));
if let Some(task) = self.spawn_search(query, cx) {
task.detach();
@@ -320,7 +317,7 @@ impl FileFinder {
editor::Event::Edited => {
let query = self.query_editor.update(cx, |buffer, cx| buffer.text(cx));
if query.is_empty() {
self.latest_search_id = util::post_inc(&mut self.search_count);
self.latest_search_id = post_inc(&mut self.search_count);
self.matches.clear();
cx.notify();
} else {
@@ -337,7 +334,7 @@ impl FileFinder {
fn selected_index(&self) -> usize {
if let Some(selected) = self.selected.as_ref() {
for (ix, path_match) in self.matches.iter().enumerate() {
if (path_match.tree_id, path_match.path.as_ref())
if (path_match.worktree_id, path_match.path.as_ref())
== (selected.0, selected.1.as_ref())
{
return ix;
@@ -352,7 +349,7 @@ impl FileFinder {
if selected_index > 0 {
selected_index -= 1;
let mat = &self.matches[selected_index];
self.selected = Some((mat.tree_id, mat.path.clone()));
self.selected = Some((mat.worktree_id, mat.path.clone()));
}
self.list_state.scroll_to(selected_index);
cx.notify();
@@ -363,7 +360,7 @@ impl FileFinder {
if selected_index + 1 < self.matches.len() {
selected_index += 1;
let mat = &self.matches[selected_index];
self.selected = Some((mat.tree_id, mat.path.clone()));
self.selected = Some((mat.worktree_id, mat.path.clone()));
}
self.list_state.scroll_to(selected_index);
cx.notify();
@@ -371,40 +368,30 @@ impl FileFinder {
fn confirm(&mut self, _: &Confirm, cx: &mut ViewContext<Self>) {
if let Some(m) = self.matches.get(self.selected_index()) {
cx.emit(Event::Selected(m.tree_id, m.path.clone()));
cx.emit(Event::Selected(ProjectPath {
worktree_id: m.worktree_id,
path: m.path.clone(),
}));
}
}
fn select(&mut self, Select(entry): &Select, cx: &mut ViewContext<Self>) {
cx.emit(Event::Selected(entry.worktree_id, entry.path.clone()));
fn select(&mut self, Select(project_path): &Select, cx: &mut ViewContext<Self>) {
cx.emit(Event::Selected(project_path.clone()));
}
#[must_use]
fn spawn_search(&mut self, query: String, cx: &mut ViewContext<Self>) -> Option<Task<()>> {
let snapshots = self
.workspace
.upgrade(&cx)?
.read(cx)
.worktrees()
.iter()
.map(|tree| tree.read(cx).snapshot())
.collect::<Vec<_>>();
let search_id = util::post_inc(&mut self.search_count);
let background = cx.as_ref().background().clone();
self.cancel_flag.store(true, atomic::Ordering::Relaxed);
self.cancel_flag = Arc::new(AtomicBool::new(false));
let cancel_flag = self.cancel_flag.clone();
let project = self.project.clone();
Some(cx.spawn(|this, mut cx| async move {
let matches = match_paths(
&snapshots,
&query,
false,
false,
100,
cancel_flag.as_ref(),
background,
)
.await;
let matches = project
.read_with(&cx, |project, cx| {
project.match_paths(&query, false, false, 100, cancel_flag.as_ref(), cx)
})
.await;
let did_cancel = cancel_flag.load(atomic::Ordering::Relaxed);
this.update(&mut cx, |this, cx| {
this.update_matches((search_id, did_cancel, query, matches), cx)
@@ -435,32 +422,36 @@ impl FileFinder {
#[cfg(test)]
mod tests {
use super::*;
use crate::{
editor::{self, Insert},
fs::FakeFs,
test::{temp_tree, test_app_state},
workspace::Workspace,
};
use editor::Input;
use serde_json::json;
use std::fs;
use tempdir::TempDir;
use std::path::PathBuf;
use workspace::{Workspace, WorkspaceParams};
#[gpui::test]
async fn test_matching_paths(mut cx: gpui::TestAppContext) {
let tmp_dir = TempDir::new("example").unwrap();
fs::create_dir(tmp_dir.path().join("a")).unwrap();
fs::write(tmp_dir.path().join("a/banana"), "banana").unwrap();
fs::write(tmp_dir.path().join("a/bandana"), "bandana").unwrap();
let params = cx.update(WorkspaceParams::test);
params
.fs
.as_fake()
.insert_tree(
"/root",
json!({
"a": {
"banana": "",
"bandana": "",
}
}),
)
.await;
cx.update(|cx| {
super::init(cx);
editor::init(cx);
});
let app_state = cx.update(test_app_state);
let (window_id, workspace) = cx.add_window(|cx| Workspace::new(&app_state, cx));
let (window_id, workspace) = cx.add_window(|cx| Workspace::new(&params, cx));
workspace
.update(&mut cx, |workspace, cx| {
workspace.add_worktree(tmp_dir.path(), cx)
workspace.add_worktree(Path::new("/root"), cx)
})
.await
.unwrap();
@@ -480,9 +471,9 @@ mod tests {
let query_buffer = cx.read(|cx| finder.read(cx).query_editor.clone());
let chain = vec![finder.id(), query_buffer.id()];
cx.dispatch_action(window_id, chain.clone(), Insert("b".into()));
cx.dispatch_action(window_id, chain.clone(), Insert("n".into()));
cx.dispatch_action(window_id, chain.clone(), Insert("a".into()));
cx.dispatch_action(window_id, chain.clone(), Input("b".into()));
cx.dispatch_action(window_id, chain.clone(), Input("n".into()));
cx.dispatch_action(window_id, chain.clone(), Input("a".into()));
finder
.condition(&cx, |finder, _| finder.matches.len() == 2)
.await;
@@ -501,7 +492,8 @@ mod tests {
#[gpui::test]
async fn test_matching_cancellation(mut cx: gpui::TestAppContext) {
let fs = Arc::new(FakeFs::new());
let params = cx.update(WorkspaceParams::test);
let fs = params.fs.as_fake();
fs.insert_tree(
"/dir",
json!({
@@ -516,10 +508,7 @@ mod tests {
)
.await;
let mut app_state = cx.update(test_app_state);
Arc::get_mut(&mut app_state).unwrap().fs = fs;
let (_, workspace) = cx.add_window(|cx| Workspace::new(&app_state, cx));
let (_, workspace) = cx.add_window(|cx| Workspace::new(&params, cx));
workspace
.update(&mut cx, |workspace, cx| {
workspace.add_worktree("/dir".as_ref(), cx)
@@ -528,8 +517,13 @@ mod tests {
.unwrap();
cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
.await;
let (_, finder) =
cx.add_window(|cx| FileFinder::new(app_state.settings.clone(), workspace.clone(), cx));
let (_, finder) = cx.add_window(|cx| {
FileFinder::new(
params.settings.clone(),
workspace.read(cx).project().clone(),
cx,
)
});
let query = "hi".to_string();
finder
@@ -572,24 +566,29 @@ mod tests {
#[gpui::test]
async fn test_single_file_worktrees(mut cx: gpui::TestAppContext) {
let temp_dir = TempDir::new("test-single-file-worktrees").unwrap();
let dir_path = temp_dir.path().join("the-parent-dir");
let file_path = dir_path.join("the-file");
fs::create_dir(&dir_path).unwrap();
fs::write(&file_path, "").unwrap();
let params = cx.update(WorkspaceParams::test);
params
.fs
.as_fake()
.insert_tree("/root", json!({ "the-parent-dir": { "the-file": "" } }))
.await;
let app_state = cx.update(test_app_state);
let (_, workspace) = cx.add_window(|cx| Workspace::new(&app_state, cx));
let (_, workspace) = cx.add_window(|cx| Workspace::new(&params, cx));
workspace
.update(&mut cx, |workspace, cx| {
workspace.add_worktree(&file_path, cx)
workspace.add_worktree(Path::new("/root/the-parent-dir/the-file"), cx)
})
.await
.unwrap();
cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
.await;
let (_, finder) =
cx.add_window(|cx| FileFinder::new(app_state.settings.clone(), workspace.clone(), cx));
let (_, finder) = cx.add_window(|cx| {
FileFinder::new(
params.settings.clone(),
workspace.read(cx).project().clone(),
cx,
)
});
// Even though there is only one worktree, that worktree's filename
// is included in the matching, because the worktree is a single file.
@@ -620,18 +619,25 @@ mod tests {
#[gpui::test(retries = 5)]
async fn test_multiple_matches_with_same_relative_path(mut cx: gpui::TestAppContext) {
let tmp_dir = temp_tree(json!({
"dir1": { "a.txt": "" },
"dir2": { "a.txt": "" }
}));
let params = cx.update(WorkspaceParams::test);
params
.fs
.as_fake()
.insert_tree(
"/root",
json!({
"dir1": { "a.txt": "" },
"dir2": { "a.txt": "" }
}),
)
.await;
let app_state = cx.update(test_app_state);
let (_, workspace) = cx.add_window(|cx| Workspace::new(&app_state, cx));
let (_, workspace) = cx.add_window(|cx| Workspace::new(&params, cx));
workspace
.update(&mut cx, |workspace, cx| {
workspace.open_paths(
&[tmp_dir.path().join("dir1"), tmp_dir.path().join("dir2")],
&[PathBuf::from("/root/dir1"), PathBuf::from("/root/dir2")],
cx,
)
})
@@ -639,8 +645,13 @@ mod tests {
cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
.await;
let (_, finder) =
cx.add_window(|cx| FileFinder::new(app_state.settings.clone(), workspace.clone(), cx));
let (_, finder) = cx.add_window(|cx| {
FileFinder::new(
params.settings.clone(),
workspace.read(cx).project().clone(),
cx,
)
});
// Run a search that matches two files with the same relative path.
finder

8
crates/fuzzy/Cargo.toml Normal file
View File

@@ -0,0 +1,8 @@
[package]
name = "fuzzy"
version = "0.1.0"
edition = "2018"
[dependencies]
gpui = { path = "../gpui" }
util = { path = "../util" }

View File

@@ -1,13 +1,9 @@
mod char_bag;
use crate::{
util,
worktree::{EntryKind, Snapshot},
};
use gpui::executor;
use std::{
borrow::Cow,
cmp::{max, min, Ordering},
cmp::{self, Ordering},
path::Path,
sync::atomic::{self, AtomicBool},
sync::Arc,
@@ -19,7 +15,7 @@ const BASE_DISTANCE_PENALTY: f64 = 0.6;
const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05;
const MIN_DISTANCE_PENALTY: f64 = 0.2;
struct Matcher<'a> {
pub struct Matcher<'a> {
query: &'a [char],
lowercase_query: &'a [char],
query_char_bag: CharBag,
@@ -52,7 +48,7 @@ pub struct PathMatchCandidate<'a> {
pub struct PathMatch {
pub score: f64,
pub positions: Vec<usize>,
pub tree_id: usize,
pub worktree_id: usize,
pub path: Arc<Path>,
pub path_prefix: Arc<str>,
}
@@ -63,6 +59,14 @@ pub struct StringMatchCandidate {
pub char_bag: CharBag,
}
pub trait PathMatchCandidateSet<'a>: Send + Sync {
type Candidates: Iterator<Item = PathMatchCandidate<'a>>;
fn id(&self) -> usize;
fn len(&self) -> usize;
fn prefix(&self) -> Arc<str>;
fn candidates(&'a self, start: usize) -> Self::Candidates;
}
impl Match for PathMatch {
fn score(&self) -> f64 {
self.score
@@ -152,7 +156,7 @@ impl Ord for PathMatch {
self.score
.partial_cmp(&other.score)
.unwrap_or(Ordering::Equal)
.then_with(|| self.tree_id.cmp(&other.tree_id))
.then_with(|| self.worktree_id.cmp(&other.worktree_id))
.then_with(|| Arc::as_ptr(&self.path).cmp(&Arc::as_ptr(&other.path)))
}
}
@@ -213,20 +217,15 @@ pub async fn match_strings(
results
}
pub async fn match_paths(
snapshots: &[Snapshot],
pub async fn match_paths<'a, Set: PathMatchCandidateSet<'a>>(
candidate_sets: &'a [Set],
query: &str,
include_ignored: bool,
smart_case: bool,
max_results: usize,
cancel_flag: &AtomicBool,
background: Arc<executor::Background>,
) -> Vec<PathMatch> {
let path_count: usize = if include_ignored {
snapshots.iter().map(Snapshot::file_count).sum()
} else {
snapshots.iter().map(Snapshot::visible_file_count).sum()
};
let path_count: usize = candidate_sets.iter().map(|s| s.len()).sum();
if path_count == 0 {
return Vec::new();
}
@@ -259,45 +258,18 @@ pub async fn match_paths(
);
let mut tree_start = 0;
for snapshot in snapshots {
let tree_end = if include_ignored {
tree_start + snapshot.file_count()
} else {
tree_start + snapshot.visible_file_count()
};
for candidate_set in candidate_sets {
let tree_end = tree_start + candidate_set.len();
if tree_start < segment_end && segment_start < tree_end {
let path_prefix: Arc<str> =
if snapshot.root_entry().map_or(false, |e| e.is_file()) {
snapshot.root_name().into()
} else if snapshots.len() > 1 {
format!("{}/", snapshot.root_name()).into()
} else {
"".into()
};
let start = max(tree_start, segment_start) - tree_start;
let end = min(tree_end, segment_end) - tree_start;
let entries = if include_ignored {
snapshot.files(start).take(end - start)
} else {
snapshot.visible_files(start).take(end - start)
};
let paths = entries.map(|entry| {
if let EntryKind::File(char_bag) = entry.kind {
PathMatchCandidate {
path: &entry.path,
char_bag,
}
} else {
unreachable!()
}
});
let start = cmp::max(tree_start, segment_start) - tree_start;
let end = cmp::min(tree_end, segment_end) - tree_start;
let candidates = candidate_set.candidates(start).take(end - start);
matcher.match_paths(
snapshot.id(),
path_prefix,
paths,
candidate_set.id(),
candidate_set.prefix(),
candidates,
results,
&cancel_flag,
);
@@ -324,7 +296,7 @@ pub async fn match_paths(
}
impl<'a> Matcher<'a> {
fn new(
pub fn new(
query: &'a [char],
lowercase_query: &'a [char],
query_char_bag: CharBag,
@@ -345,7 +317,7 @@ impl<'a> Matcher<'a> {
}
}
fn match_strings(
pub fn match_strings(
&mut self,
candidates: &[StringMatchCandidate],
results: &mut Vec<StringMatch>,
@@ -365,11 +337,11 @@ impl<'a> Matcher<'a> {
)
}
fn match_paths(
pub fn match_paths<'c: 'a>(
&mut self,
tree_id: usize,
path_prefix: Arc<str>,
path_entries: impl Iterator<Item = PathMatchCandidate<'a>>,
path_entries: impl Iterator<Item = PathMatchCandidate<'c>>,
results: &mut Vec<PathMatch>,
cancel_flag: &AtomicBool,
) {
@@ -386,7 +358,7 @@ impl<'a> Matcher<'a> {
cancel_flag,
|candidate, score| PathMatch {
score,
tree_id,
worktree_id: tree_id,
positions: Vec::new(),
path: candidate.path.clone(),
path_prefix: path_prefix.clone(),

View File

@@ -5,15 +5,16 @@ name = "gpui"
version = "0.1.0"
[features]
test-support = []
test-support = ["env_logger"]
[dependencies]
arrayvec = "0.7.1"
gpui_macros = { path = "../gpui_macros" }
sum_tree = { path = "../sum_tree" }
async-task = "4.0.3"
backtrace = "0.3"
ctor = "0.1"
env_logger = { version = "0.8", optional = true }
etagere = "0.2"
gpui_macros = { path = "../gpui_macros" }
image = "0.23"
lazy_static = "1.4.0"
log = "0.4"

View File

@@ -1,6 +1,6 @@
use crate::{
elements::ElementBox,
executor,
executor::{self, Task},
keymap::{self, Keystroke},
platform::{self, CursorStyle, Platform, PromptLevel, WindowOptions},
presenter::Presenter,
@@ -8,7 +8,6 @@ use crate::{
AssetCache, AssetSource, ClipboardItem, FontCache, PathPromptOptions, TextLayoutCache,
};
use anyhow::{anyhow, Result};
use async_task::Task;
use keymap::MatchResult;
use parking_lot::Mutex;
use platform::Event;
@@ -58,18 +57,19 @@ pub trait ReadModel {
}
pub trait ReadModelWith {
fn read_model_with<E: Entity, F: FnOnce(&E, &AppContext) -> T, T>(
fn read_model_with<E: Entity, T>(
&self,
handle: &ModelHandle<E>,
read: F,
read: &mut dyn FnMut(&E, &AppContext) -> T,
) -> T;
}
pub trait UpdateModel {
fn update_model<T, F, S>(&mut self, handle: &ModelHandle<T>, update: F) -> S
where
T: Entity,
F: FnOnce(&mut T, &mut ModelContext<T>) -> S;
fn update_model<T: Entity, O>(
&mut self,
handle: &ModelHandle<T>,
update: &mut dyn FnMut(&mut T, &mut ModelContext<T>) -> O,
) -> O;
}
pub trait UpgradeModelHandle {
@@ -82,17 +82,23 @@ pub trait ReadView {
}
pub trait ReadViewWith {
fn read_view_with<V, F, T>(&self, handle: &ViewHandle<V>, read: F) -> T
fn read_view_with<V, T>(
&self,
handle: &ViewHandle<V>,
read: &mut dyn FnMut(&V, &AppContext) -> T,
) -> T
where
V: View,
F: FnOnce(&V, &AppContext) -> T;
V: View;
}
pub trait UpdateView {
fn update_view<T, F, S>(&mut self, handle: &ViewHandle<T>, update: F) -> S
fn update_view<T, S>(
&mut self,
handle: &ViewHandle<T>,
update: &mut dyn FnMut(&mut T, &mut ViewContext<T>) -> S,
) -> S
where
T: View,
F: FnOnce(&mut T, &mut ViewContext<T>) -> S;
T: View;
}
pub trait Action: 'static + AnyAction {
@@ -312,6 +318,7 @@ impl App {
let mut state = self.0.borrow_mut();
state.pending_flushes += 1;
let result = callback(&mut *state);
state.pending_notifications.clear();
state.flush_effects();
result
}
@@ -531,11 +538,11 @@ impl AsyncAppContext {
}
impl UpdateModel for AsyncAppContext {
fn update_model<T, F, S>(&mut self, handle: &ModelHandle<T>, update: F) -> S
where
T: Entity,
F: FnOnce(&mut T, &mut ModelContext<T>) -> S,
{
fn update_model<E: Entity, O>(
&mut self,
handle: &ModelHandle<E>,
update: &mut dyn FnMut(&mut E, &mut ModelContext<E>) -> O,
) -> O {
let mut state = self.0.borrow_mut();
state.pending_flushes += 1;
let result = state.update_model(handle, update);
@@ -554,10 +561,10 @@ impl UpgradeModelHandle for AsyncAppContext {
}
impl ReadModelWith for AsyncAppContext {
fn read_model_with<E: Entity, F: FnOnce(&E, &AppContext) -> T, T>(
fn read_model_with<E: Entity, T>(
&self,
handle: &ModelHandle<E>,
read: F,
read: &mut dyn FnMut(&E, &AppContext) -> T,
) -> T {
let cx = self.0.borrow();
let cx = cx.as_ref();
@@ -566,10 +573,13 @@ impl ReadModelWith for AsyncAppContext {
}
impl UpdateView for AsyncAppContext {
fn update_view<T, F, S>(&mut self, handle: &ViewHandle<T>, update: F) -> S
fn update_view<T, S>(
&mut self,
handle: &ViewHandle<T>,
update: &mut dyn FnMut(&mut T, &mut ViewContext<T>) -> S,
) -> S
where
T: View,
F: FnOnce(&mut T, &mut ViewContext<T>) -> S,
{
let mut state = self.0.borrow_mut();
state.pending_flushes += 1;
@@ -580,10 +590,13 @@ impl UpdateView for AsyncAppContext {
}
impl ReadViewWith for AsyncAppContext {
fn read_view_with<V, F, T>(&self, handle: &ViewHandle<V>, read: F) -> T
fn read_view_with<V, T>(
&self,
handle: &ViewHandle<V>,
read: &mut dyn FnMut(&V, &AppContext) -> T,
) -> T
where
V: View,
F: FnOnce(&V, &AppContext) -> T,
{
let cx = self.0.borrow();
let cx = cx.as_ref();
@@ -592,11 +605,11 @@ impl ReadViewWith for AsyncAppContext {
}
impl UpdateModel for TestAppContext {
fn update_model<T, F, S>(&mut self, handle: &ModelHandle<T>, update: F) -> S
where
T: Entity,
F: FnOnce(&mut T, &mut ModelContext<T>) -> S,
{
fn update_model<T: Entity, O>(
&mut self,
handle: &ModelHandle<T>,
update: &mut dyn FnMut(&mut T, &mut ModelContext<T>) -> O,
) -> O {
let mut state = self.cx.borrow_mut();
state.pending_flushes += 1;
let result = state.update_model(handle, update);
@@ -606,10 +619,10 @@ impl UpdateModel for TestAppContext {
}
impl ReadModelWith for TestAppContext {
fn read_model_with<E: Entity, F: FnOnce(&E, &AppContext) -> T, T>(
fn read_model_with<E: Entity, T>(
&self,
handle: &ModelHandle<E>,
read: F,
read: &mut dyn FnMut(&E, &AppContext) -> T,
) -> T {
let cx = self.cx.borrow();
let cx = cx.as_ref();
@@ -618,10 +631,13 @@ impl ReadModelWith for TestAppContext {
}
impl UpdateView for TestAppContext {
fn update_view<T, F, S>(&mut self, handle: &ViewHandle<T>, update: F) -> S
fn update_view<T, S>(
&mut self,
handle: &ViewHandle<T>,
update: &mut dyn FnMut(&mut T, &mut ViewContext<T>) -> S,
) -> S
where
T: View,
F: FnOnce(&mut T, &mut ViewContext<T>) -> S,
{
let mut state = self.cx.borrow_mut();
state.pending_flushes += 1;
@@ -632,10 +648,13 @@ impl UpdateView for TestAppContext {
}
impl ReadViewWith for TestAppContext {
fn read_view_with<V, F, T>(&self, handle: &ViewHandle<V>, read: F) -> T
fn read_view_with<V, T>(
&self,
handle: &ViewHandle<V>,
read: &mut dyn FnMut(&V, &AppContext) -> T,
) -> T
where
V: View,
F: FnOnce(&V, &AppContext) -> T,
{
let cx = self.cx.borrow();
let cx = cx.as_ref();
@@ -668,6 +687,7 @@ pub struct MutableAppContext {
debug_elements_callbacks: HashMap<usize, Box<dyn Fn(&AppContext) -> crate::json::Value>>,
foreground: Rc<executor::Foreground>,
pending_effects: VecDeque<Effect>,
pending_notifications: HashSet<usize>,
pending_flushes: usize,
flushing_effects: bool,
next_cursor_style_handle_id: Arc<AtomicUsize>,
@@ -708,6 +728,7 @@ impl MutableAppContext {
debug_elements_callbacks: HashMap::new(),
foreground,
pending_effects: VecDeque::new(),
pending_notifications: HashSet::new(),
pending_flushes: 0,
flushing_effects: false,
next_cursor_style_handle_id: Default::default(),
@@ -1015,10 +1036,18 @@ impl MutableAppContext {
observations: Some(Arc::downgrade(&self.observations)),
}
}
pub(crate) fn notify_model(&mut self, model_id: usize) {
if self.pending_notifications.insert(model_id) {
self.pending_effects
.push_back(Effect::ModelNotification { model_id });
}
}
pub(crate) fn notify_view(&mut self, window_id: usize, view_id: usize) {
self.pending_effects
.push_back(Effect::ViewNotification { window_id, view_id });
if self.pending_notifications.insert(view_id) {
self.pending_effects
.push_back(Effect::ViewNotification { window_id, view_id });
}
}
pub fn dispatch_action<A: Action>(
@@ -1400,6 +1429,7 @@ impl MutableAppContext {
refreshing = true;
}
}
self.pending_notifications.clear();
self.remove_dropped_entities();
} else {
self.remove_dropped_entities();
@@ -1411,6 +1441,7 @@ impl MutableAppContext {
if self.pending_effects.is_empty() {
self.flushing_effects = false;
self.pending_notifications.clear();
break;
} else {
refreshing = false;
@@ -1617,11 +1648,11 @@ impl ReadModel for MutableAppContext {
}
impl UpdateModel for MutableAppContext {
fn update_model<T, F, S>(&mut self, handle: &ModelHandle<T>, update: F) -> S
where
T: Entity,
F: FnOnce(&mut T, &mut ModelContext<T>) -> S,
{
fn update_model<T: Entity, V>(
&mut self,
handle: &ModelHandle<T>,
update: &mut dyn FnMut(&mut T, &mut ModelContext<T>) -> V,
) -> V {
if let Some(mut model) = self.cx.models.remove(&handle.model_id) {
self.pending_flushes += 1;
let mut cx = ModelContext::new(self, handle.model_id);
@@ -1661,10 +1692,13 @@ impl ReadView for MutableAppContext {
}
impl UpdateView for MutableAppContext {
fn update_view<T, F, S>(&mut self, handle: &ViewHandle<T>, update: F) -> S
fn update_view<T, S>(
&mut self,
handle: &ViewHandle<T>,
update: &mut dyn FnMut(&mut T, &mut ViewContext<T>) -> S,
) -> S
where
T: View,
F: FnOnce(&mut T, &mut ViewContext<T>) -> S,
{
self.pending_flushes += 1;
let mut view = self
@@ -1983,11 +2017,7 @@ impl<'a, T: Entity> ModelContext<'a, T> {
}
pub fn notify(&mut self) {
self.app
.pending_effects
.push_back(Effect::ModelNotification {
model_id: self.model_id,
});
self.app.notify_model(self.model_id);
}
pub fn subscribe<S: Entity, F>(
@@ -2075,11 +2105,11 @@ impl<M> ReadModel for ModelContext<'_, M> {
}
impl<M> UpdateModel for ModelContext<'_, M> {
fn update_model<T, F, S>(&mut self, handle: &ModelHandle<T>, update: F) -> S
where
T: Entity,
F: FnOnce(&mut T, &mut ModelContext<T>) -> S,
{
fn update_model<T: Entity, V>(
&mut self,
handle: &ModelHandle<T>,
update: &mut dyn FnMut(&mut T, &mut ModelContext<T>) -> V,
) -> V {
self.app.update_model(handle, update)
}
}
@@ -2336,11 +2366,11 @@ impl<V: View> ReadModel for RenderContext<'_, V> {
}
impl<V: View> UpdateModel for RenderContext<'_, V> {
fn update_model<T, F, S>(&mut self, handle: &ModelHandle<T>, update: F) -> S
where
T: Entity,
F: FnOnce(&mut T, &mut ModelContext<T>) -> S,
{
fn update_model<T: Entity, O>(
&mut self,
handle: &ModelHandle<T>,
update: &mut dyn FnMut(&mut T, &mut ModelContext<T>) -> O,
) -> O {
self.app.update_model(handle, update)
}
}
@@ -2387,11 +2417,11 @@ impl<V> UpgradeModelHandle for ViewContext<'_, V> {
}
impl<V: View> UpdateModel for ViewContext<'_, V> {
fn update_model<T, F, S>(&mut self, handle: &ModelHandle<T>, update: F) -> S
where
T: Entity,
F: FnOnce(&mut T, &mut ModelContext<T>) -> S,
{
fn update_model<T: Entity, O>(
&mut self,
handle: &ModelHandle<T>,
update: &mut dyn FnMut(&mut T, &mut ModelContext<T>) -> O,
) -> O {
self.app.update_model(handle, update)
}
}
@@ -2403,10 +2433,13 @@ impl<V: View> ReadView for ViewContext<'_, V> {
}
impl<V: View> UpdateView for ViewContext<'_, V> {
fn update_view<T, F, S>(&mut self, handle: &ViewHandle<T>, update: F) -> S
fn update_view<T, S>(
&mut self,
handle: &ViewHandle<T>,
update: &mut dyn FnMut(&mut T, &mut ViewContext<T>) -> S,
) -> S
where
T: View,
F: FnOnce(&mut T, &mut ViewContext<T>) -> S,
{
self.app.update_view(handle, update)
}
@@ -2461,7 +2494,11 @@ impl<T: Entity> ModelHandle<T> {
C: ReadModelWith,
F: FnOnce(&T, &AppContext) -> S,
{
cx.read_model_with(self, read)
let mut read = Some(read);
cx.read_model_with(self, &mut |model, cx| {
let read = read.take().unwrap();
read(model, cx)
})
}
pub fn update<C, F, S>(&self, cx: &mut C, update: F) -> S
@@ -2469,7 +2506,11 @@ impl<T: Entity> ModelHandle<T> {
C: UpdateModel,
F: FnOnce(&mut T, &mut ModelContext<T>) -> S,
{
cx.update_model(self, update)
let mut update = Some(update);
cx.update_model(self, &mut |model, cx| {
let update = update.take().unwrap();
update(model, cx)
})
}
pub fn next_notification(&self, cx: &TestAppContext) -> impl Future<Output = ()> {
@@ -2735,7 +2776,11 @@ impl<T: View> ViewHandle<T> {
C: ReadViewWith,
F: FnOnce(&T, &AppContext) -> S,
{
cx.read_view_with(self, read)
let mut read = Some(read);
cx.read_view_with(self, &mut |view, cx| {
let read = read.take().unwrap();
read(view, cx)
})
}
pub fn update<C, F, S>(&self, cx: &mut C, update: F) -> S
@@ -2743,7 +2788,11 @@ impl<T: View> ViewHandle<T> {
C: UpdateView,
F: FnOnce(&mut T, &mut ViewContext<T>) -> S,
{
cx.update_view(self, update)
let mut update = Some(update);
cx.update_view(self, &mut |view, cx| {
let update = update.take().unwrap();
update(view, cx)
})
}
pub fn is_focused(&self, cx: &AppContext) -> bool {
@@ -2891,6 +2940,11 @@ impl AnyViewHandle {
TypeId::of::<T>() == self.view_type
}
pub fn is_focused(&self, cx: &AppContext) -> bool {
cx.focused_view_id(self.window_id)
.map_or(false, |focused_id| focused_id == self.view_id)
}
pub fn downcast<T: View>(self) -> Option<ViewHandle<T>> {
if self.is::<T>() {
let result = Some(ViewHandle {

View File

@@ -33,6 +33,14 @@ impl Color {
Self(ColorU::from_u32(0xff0000ff))
}
pub fn green() -> Self {
Self(ColorU::from_u32(0x00ff00ff))
}
pub fn blue() -> Self {
Self(ColorU::from_u32(0x0000ffff))
}
pub fn new(r: u8, g: u8, b: u8, a: u8) -> Self {
Self(ColorU::new(r, g, b, a))
}

View File

@@ -8,7 +8,6 @@ mod flex;
mod hook;
mod image;
mod label;
mod line_box;
mod list;
mod mouse_event_handler;
mod overlay;
@@ -19,8 +18,8 @@ mod uniform_list;
pub use self::{
align::*, canvas::*, constrained_box::*, container::*, empty::*, event_handler::*, flex::*,
hook::*, image::*, label::*, line_box::*, list::*, mouse_event_handler::*, overlay::*,
stack::*, svg::*, text::*, uniform_list::*,
hook::*, image::*, label::*, list::*, mouse_event_handler::*, overlay::*, stack::*, svg::*,
text::*, uniform_list::*,
};
pub use crate::presenter::ChildView;
use crate::{
@@ -109,6 +108,34 @@ pub trait Element {
element: Rc::new(RefCell::new(Lifecycle::Init { element: self })),
})
}
fn constrained(self) -> ConstrainedBox
where
Self: 'static + Sized,
{
ConstrainedBox::new(self.boxed())
}
fn aligned(self) -> Align
where
Self: 'static + Sized,
{
Align::new(self.boxed())
}
fn contained(self) -> Container
where
Self: 'static + Sized,
{
Container::new(self.boxed())
}
fn expanded(self, flex: f32) -> Expanded
where
Self: 'static + Sized,
{
Expanded::new(flex, self.boxed())
}
}
pub enum Lifecycle<T: Element> {

View File

@@ -25,6 +25,11 @@ impl Align {
self
}
pub fn left(mut self) -> Self {
self.alignment.set_x(-1.0);
self
}
pub fn right(mut self) -> Self {
self.alignment.set_x(1.0);
self

View File

@@ -57,6 +57,11 @@ impl Container {
self
}
pub fn with_margin_right(mut self, margin: f32) -> Self {
self.style.margin.right = margin;
self
}
pub fn with_horizontal_padding(mut self, padding: f32) -> Self {
self.style.padding.left = padding;
self.style.padding.right = padding;
@@ -79,6 +84,11 @@ impl Container {
self
}
pub fn with_padding_left(mut self, padding: f32) -> Self {
self.style.padding.left = padding;
self
}
pub fn with_padding_right(mut self, padding: f32) -> Self {
self.style.padding.right = padding;
self
@@ -157,7 +167,10 @@ impl Element for Container {
constraint: SizeConstraint,
cx: &mut LayoutContext,
) -> (Vector2F, Self::LayoutState) {
let size_buffer = self.margin_size() + self.padding_size() + self.border_size();
let mut size_buffer = self.margin_size() + self.padding_size();
if !self.style.border.overlay {
size_buffer += self.border_size();
}
let child_constraint = SizeConstraint {
min: (constraint.min - size_buffer).max(Vector2F::zero()),
max: (constraint.max - size_buffer).max(Vector2F::zero()),
@@ -186,20 +199,43 @@ impl Element for Container {
color: shadow.color,
});
}
cx.scene.push_quad(Quad {
bounds: quad_bounds,
background: self.style.background_color,
border: self.style.border,
corner_radius: self.style.corner_radius,
});
let child_origin = quad_bounds.origin()
+ vec2f(self.style.padding.left, self.style.padding.top)
+ vec2f(
self.style.border.left_width(),
self.style.border.top_width(),
);
self.child.paint(child_origin, visible_bounds, cx);
let child_origin =
quad_bounds.origin() + vec2f(self.style.padding.left, self.style.padding.top);
if self.style.border.overlay {
cx.scene.push_quad(Quad {
bounds: quad_bounds,
background: self.style.background_color,
border: Default::default(),
corner_radius: self.style.corner_radius,
});
self.child.paint(child_origin, visible_bounds, cx);
cx.scene.push_layer(None);
cx.scene.push_quad(Quad {
bounds: quad_bounds,
background: Default::default(),
border: self.style.border,
corner_radius: self.style.corner_radius,
});
cx.scene.pop_layer();
} else {
cx.scene.push_quad(Quad {
bounds: quad_bounds,
background: self.style.background_color,
border: self.style.border,
corner_radius: self.style.corner_radius,
});
let child_origin = child_origin
+ vec2f(
self.style.border.left_width(),
self.style.border.top_width(),
);
self.child.paint(child_origin, visible_bounds, cx);
}
}
fn dispatch_event(

View File

@@ -1,6 +1,9 @@
use super::constrain_size_preserving_aspect_ratio;
use crate::{
geometry::{rect::RectF, vector::Vector2F},
geometry::{
rect::RectF,
vector::{vec2f, Vector2F},
},
json::{json, ToJson},
scene, Border, DebugContext, Element, Event, EventContext, ImageData, LayoutContext,
PaintContext, SizeConstraint,
@@ -16,9 +19,13 @@ pub struct Image {
#[derive(Copy, Clone, Default, Deserialize)]
pub struct ImageStyle {
#[serde(default)]
border: Border,
pub border: Border,
#[serde(default)]
corner_radius: f32,
pub corner_radius: f32,
#[serde(default)]
pub height: Option<f32>,
#[serde(default)]
pub width: Option<f32>,
}
impl Image {
@@ -44,8 +51,14 @@ impl Element for Image {
constraint: SizeConstraint,
_: &mut LayoutContext,
) -> (Vector2F, Self::LayoutState) {
let size =
constrain_size_preserving_aspect_ratio(constraint.max, self.data.size().to_f32());
let desired_size = vec2f(
self.style.width.unwrap_or(constraint.max.x()),
self.style.height.unwrap_or(constraint.max.y()),
);
let size = constrain_size_preserving_aspect_ratio(
constraint.constrain(desired_size),
self.data.size().to_f32(),
);
(size, ())
}

View File

@@ -18,7 +18,7 @@ pub struct Label {
highlight_indices: Vec<usize>,
}
#[derive(Clone, Debug, Deserialize)]
#[derive(Clone, Debug, Deserialize, Default)]
pub struct LabelStyle {
pub text: TextStyle,
pub highlight_text: Option<TextStyle>,
@@ -135,10 +135,12 @@ impl Element for Label {
);
let size = vec2f(
line.width().max(constraint.min.x()).min(constraint.max.x()),
line.width()
.ceil()
.max(constraint.min.x())
.min(constraint.max.x()),
cx.font_cache
.line_height(self.style.text.font_id, self.style.text.font_size)
.ceil(),
.line_height(self.style.text.font_id, self.style.text.font_size),
);
(size, line)

View File

@@ -4,14 +4,15 @@ use crate::{
vector::{vec2f, Vector2F},
},
json::json,
sum_tree::{self, Bias, SumTree},
DebugContext, Element, ElementBox, ElementRc, Event, EventContext, LayoutContext, PaintContext,
SizeConstraint,
};
use std::{cell::RefCell, collections::VecDeque, ops::Range, rc::Rc};
use sum_tree::{Bias, SumTree};
pub struct List {
state: ListState,
invalidated_elements: Vec<ElementRc>,
}
#[derive(Clone)]
@@ -79,7 +80,10 @@ struct Height(f32);
impl List {
pub fn new(state: ListState) -> Self {
Self { state }
Self {
state,
invalidated_elements: Default::default(),
}
}
}
@@ -124,7 +128,7 @@ impl Element for List {
});
// Render items after the scroll top, including those in the trailing overdraw.
let mut cursor = old_items.cursor::<Count, ()>();
let mut cursor = old_items.cursor::<Count>();
cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &());
for (ix, item) in cursor.by_ref().enumerate() {
if rendered_height - scroll_top.offset_in_item >= size.y() + state.overdraw {
@@ -145,8 +149,7 @@ impl Element for List {
while rendered_height < size.y() {
cursor.prev(&());
if let Some(item) = cursor.item() {
let element =
state.render_item(cursor.seek_start().0, item, item_constraint, cx);
let element = state.render_item(cursor.start().0, item, item_constraint, cx);
rendered_height += element.size().y();
rendered_items.push_front(ListItem::Rendered(element));
} else {
@@ -155,7 +158,7 @@ impl Element for List {
}
scroll_top = ListOffset {
item_ix: cursor.seek_start().0,
item_ix: cursor.start().0,
offset_in_item: rendered_height - size.y(),
};
@@ -166,7 +169,7 @@ impl Element for List {
}
Orientation::Bottom => {
scroll_top = ListOffset {
item_ix: cursor.seek_start().0,
item_ix: cursor.start().0,
offset_in_item: rendered_height - size.y(),
};
state.logical_scroll_top = None;
@@ -179,7 +182,7 @@ impl Element for List {
while leading_overdraw < state.overdraw {
cursor.prev(&());
if let Some(item) = cursor.item() {
let element = state.render_item(cursor.seek_start().0, item, item_constraint, cx);
let element = state.render_item(cursor.start().0, item, item_constraint, cx);
leading_overdraw += element.size().y();
rendered_items.push_front(ListItem::Rendered(element));
} else {
@@ -187,10 +190,9 @@ impl Element for List {
}
}
let new_rendered_range =
cursor.seek_start().0..(cursor.seek_start().0 + rendered_items.len());
let new_rendered_range = cursor.start().0..(cursor.start().0 + rendered_items.len());
let mut cursor = old_items.cursor::<Count, ()>();
let mut cursor = old_items.cursor::<Count>();
if state.rendered_range.start < new_rendered_range.start {
new_items.push_tree(
@@ -198,7 +200,7 @@ impl Element for List {
&(),
);
let remove_to = state.rendered_range.end.min(new_rendered_range.start);
while cursor.seek_start().0 < remove_to {
while cursor.start().0 < remove_to {
new_items.push(cursor.item().unwrap().remove(), &());
cursor.next(&());
}
@@ -217,7 +219,7 @@ impl Element for List {
&(),
);
}
while cursor.seek_start().0 < state.rendered_range.end {
while cursor.start().0 < state.rendered_range.end {
new_items.push(cursor.item().unwrap().remove(), &());
cursor.next(&());
}
@@ -258,10 +260,35 @@ impl Element for List {
let mut handled = false;
let mut state = self.state.0.borrow_mut();
for (mut element, _) in state.visible_elements(bounds, scroll_top) {
handled = element.dispatch_event(event, cx) || handled;
let mut item_origin = bounds.origin() - vec2f(0., scroll_top.offset_in_item);
let mut cursor = state.items.cursor::<Count>();
let mut new_items = cursor.slice(&Count(scroll_top.item_ix), Bias::Right, &());
while let Some(item) = cursor.item() {
if item_origin.y() > bounds.max_y() {
break;
}
if let ListItem::Rendered(element) = item {
let prev_notify_count = cx.notify_count();
let mut element = element.clone();
handled = element.dispatch_event(event, cx) || handled;
item_origin.set_y(item_origin.y() + element.size().y());
if cx.notify_count() > prev_notify_count {
new_items.push(ListItem::Unrendered, &());
self.invalidated_elements.push(element);
} else {
new_items.push(item.clone(), &());
}
cursor.next(&());
} else {
unreachable!();
}
}
new_items.push_tree(cursor.suffix(&()), &());
drop(cursor);
state.items = new_items;
match event {
Event::ScrollWheel {
position,
@@ -361,7 +388,7 @@ impl ListState {
new_end + state.rendered_range.end.saturating_sub(old_range.end);
}
let mut old_heights = state.items.cursor::<Count, ()>();
let mut old_heights = state.items.cursor::<Count>();
let mut new_heights = old_heights.slice(&Count(old_range.start), Bias::Right, &());
old_heights.seek_forward(&Count(old_range.end), Bias::Right, &());
@@ -397,12 +424,11 @@ impl StateInner {
}
fn visible_range(&self, height: f32, scroll_top: &ListOffset) -> Range<usize> {
let mut cursor = self.items.cursor::<Count, Height>();
let mut cursor = self.items.cursor::<ListItemSummary>();
cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &());
let start_y = cursor.sum_start().0 + scroll_top.offset_in_item;
let mut cursor = cursor.swap_dimensions();
let start_y = cursor.start().height + scroll_top.offset_in_item;
cursor.seek_forward(&Height(start_y + height), Bias::Left, &());
scroll_top.item_ix..cursor.sum_start().0 + 1
scroll_top.item_ix..cursor.start().count + 1
}
fn visible_elements<'a>(
@@ -411,7 +437,7 @@ impl StateInner {
scroll_top: &ListOffset,
) -> impl Iterator<Item = (ElementRc, Vector2F)> + 'a {
let mut item_origin = bounds.origin() - vec2f(0., scroll_top.offset_in_item);
let mut cursor = self.items.cursor::<Count, ()>();
let mut cursor = self.items.cursor::<Count>();
cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &());
std::iter::from_fn(move || {
while let Some(item) = cursor.item() {
@@ -453,10 +479,10 @@ impl StateInner {
if self.orientation == Orientation::Bottom && new_scroll_top == scroll_max {
self.logical_scroll_top = None;
} else {
let mut cursor = self.items.cursor::<Height, Count>();
let mut cursor = self.items.cursor::<ListItemSummary>();
cursor.seek(&Height(new_scroll_top), Bias::Right, &());
let item_ix = cursor.sum_start().0;
let offset_in_item = new_scroll_top - cursor.seek_start().0;
let item_ix = cursor.start().count;
let offset_in_item = new_scroll_top - cursor.start().height;
self.logical_scroll_top = Some(ListOffset {
item_ix,
offset_in_item,
@@ -473,9 +499,9 @@ impl StateInner {
}
fn scroll_top(&self, logical_scroll_top: &ListOffset) -> f32 {
let mut cursor = self.items.cursor::<Count, Height>();
let mut cursor = self.items.cursor::<ListItemSummary>();
cursor.seek(&Count(logical_scroll_top.item_ix), Bias::Right, &());
cursor.sum_start().0 + logical_scroll_top.offset_in_item
cursor.start().height + logical_scroll_top.offset_in_item
}
}
@@ -527,12 +553,6 @@ impl sum_tree::Summary for ListItemSummary {
}
}
impl<'a> sum_tree::Dimension<'a, ListItemSummary> for ListItemSummary {
fn add_summary(&mut self, summary: &'a ListItemSummary, _: &()) {
sum_tree::Summary::add_summary(self, summary, &());
}
}
impl<'a> sum_tree::Dimension<'a, ListItemSummary> for Count {
fn add_summary(&mut self, summary: &'a ListItemSummary, _: &()) {
self.0 += summary.count;
@@ -557,9 +577,15 @@ impl<'a> sum_tree::Dimension<'a, ListItemSummary> for Height {
}
}
impl<'a> sum_tree::SeekDimension<'a, ListItemSummary> for Height {
fn cmp(&self, other: &Self, _: &()) -> std::cmp::Ordering {
self.0.partial_cmp(&other.0).unwrap()
impl<'a> sum_tree::SeekTarget<'a, ListItemSummary, ListItemSummary> for Count {
fn cmp(&self, other: &ListItemSummary, _: &()) -> std::cmp::Ordering {
self.0.partial_cmp(&other.count).unwrap()
}
}
impl<'a> sum_tree::SeekTarget<'a, ListItemSummary, ListItemSummary> for Height {
fn cmp(&self, other: &ListItemSummary, _: &()) -> std::cmp::Ordering {
self.0.partial_cmp(&other.height).unwrap()
}
}
@@ -731,7 +757,7 @@ mod tests {
log::info!("splice({:?}, {:?})", start_ix..end_ix, new_elements);
state.splice(start_ix..end_ix, new_elements.len());
elements.splice(start_ix..end_ix, new_elements);
for (ix, item) in state.0.borrow().items.cursor::<(), ()>().enumerate() {
for (ix, item) in state.0.borrow().items.cursor::<()>().enumerate() {
if let ListItem::Rendered(element) = item {
let (expected_id, _) = elements[ix];
element.with_metadata(|metadata: Option<&usize>| {
@@ -768,7 +794,7 @@ mod tests {
let mut first_rendered_element_top = None;
let mut last_rendered_element_bottom = None;
assert_eq!(state.items.summary().count, elements.borrow().len());
for (ix, item) in state.items.cursor::<(), ()>().enumerate() {
for (ix, item) in state.items.cursor::<()>().enumerate() {
match item {
ListItem::Unrendered => {
let item_bottom = item_top;

View File

@@ -5,7 +5,7 @@ use crate::{
vector::{vec2f, Vector2F},
},
json::{self, json},
ElementBox, MutableAppContext,
ElementBox,
};
use json::ToJson;
use parking_lot::Mutex;
@@ -38,25 +38,39 @@ pub struct LayoutState {
pub struct UniformList<F>
where
F: Fn(Range<usize>, &mut Vec<ElementBox>, &mut MutableAppContext),
F: Fn(Range<usize>, &mut Vec<ElementBox>, &mut LayoutContext),
{
state: UniformListState,
item_count: usize,
append_items: F,
padding_top: f32,
padding_bottom: f32,
}
impl<F> UniformList<F>
where
F: Fn(Range<usize>, &mut Vec<ElementBox>, &mut MutableAppContext),
F: Fn(Range<usize>, &mut Vec<ElementBox>, &mut LayoutContext),
{
pub fn new(state: UniformListState, item_count: usize, append_items: F) -> Self {
Self {
state,
item_count,
append_items,
padding_top: 0.,
padding_bottom: 0.,
}
}
pub fn with_padding_top(mut self, padding: f32) -> Self {
self.padding_top = padding;
self
}
pub fn with_padding_bottom(mut self, padding: f32) -> Self {
self.padding_bottom = padding;
self
}
fn scroll(
&self,
_: Vector2F,
@@ -84,7 +98,7 @@ where
}
if let Some(item_ix) = state.scroll_to.take() {
let item_top = item_ix as f32 * item_height;
let item_top = self.padding_top + item_ix as f32 * item_height;
let item_bottom = item_top + item_height;
if item_top < state.scroll_top {
@@ -102,7 +116,7 @@ where
impl<F> Element for UniformList<F>
where
F: Fn(Range<usize>, &mut Vec<ElementBox>, &mut MutableAppContext),
F: Fn(Range<usize>, &mut Vec<ElementBox>, &mut LayoutContext),
{
type LayoutState = LayoutState;
type PaintState = ();
@@ -124,7 +138,7 @@ where
let mut scroll_max = 0.;
let mut items = Vec::new();
(self.append_items)(0..1, &mut items, cx.app);
(self.append_items)(0..1, &mut items, cx);
if let Some(first_item) = items.first_mut() {
let mut item_size = first_item.layout(item_constraint, cx);
item_size.set_x(size.x());
@@ -137,16 +151,21 @@ where
size.set_y(size.y().min(scroll_height).max(constraint.min.y()));
}
scroll_max = item_height * self.item_count as f32 - size.y();
let scroll_height =
item_height * self.item_count as f32 + self.padding_top + self.padding_bottom;
scroll_max = (scroll_height - size.y()).max(0.);
self.autoscroll(scroll_max, size.y(), item_height);
items.clear();
let start = cmp::min((self.scroll_top() / item_height) as usize, self.item_count);
let start = cmp::min(
((self.scroll_top() - self.padding_top) / item_height) as usize,
self.item_count,
);
let end = cmp::min(
self.item_count,
start + (size.y() / item_height).ceil() as usize + 1,
);
(self.append_items)(start..end, &mut items, cx.app);
(self.append_items)(start..end, &mut items, cx);
for item in &mut items {
item.layout(item_constraint, cx);
}
@@ -173,8 +192,11 @@ where
) -> Self::PaintState {
cx.scene.push_layer(Some(bounds));
let mut item_origin =
bounds.origin() - vec2f(0.0, self.state.scroll_top() % layout.item_height);
let mut item_origin = bounds.origin()
- vec2f(
0.,
(self.state.scroll_top() - self.padding_top) % layout.item_height,
);
for item in &mut layout.items {
item.paint(item_origin, visible_bounds, cx);

View File

@@ -1,12 +1,12 @@
use anyhow::{anyhow, Result};
use async_task::Runnable;
pub use async_task::Task;
use backtrace::{Backtrace, BacktraceFmt, BytesOrWideString};
use parking_lot::Mutex;
use postage::{barrier, prelude::Stream as _};
use rand::prelude::*;
use smol::{channel, prelude::*, Executor, Timer};
use std::{
any::Any,
fmt::{self, Debug},
marker::PhantomData,
mem,
@@ -23,7 +23,10 @@ use std::{
};
use waker_fn::waker_fn;
use crate::{platform, util};
use crate::{
platform::{self, Dispatcher},
util,
};
pub enum Foreground {
Platform {
@@ -42,6 +45,24 @@ pub enum Background {
},
}
type AnyLocalFuture = Pin<Box<dyn 'static + Future<Output = Box<dyn Any + 'static>>>>;
type AnyFuture = Pin<Box<dyn 'static + Send + Future<Output = Box<dyn Any + Send + 'static>>>>;
type AnyTask = async_task::Task<Box<dyn Any + Send + 'static>>;
type AnyLocalTask = async_task::Task<Box<dyn Any + 'static>>;
pub enum Task<T> {
Local {
any_task: AnyLocalTask,
result_type: PhantomData<T>,
},
Send {
any_task: AnyTask,
result_type: PhantomData<T>,
},
}
unsafe impl<T: Send> Send for Task<T> {}
struct DeterministicState {
rng: StdRng,
seed: u64,
@@ -77,11 +98,7 @@ impl Deterministic {
}
}
pub fn spawn_from_foreground<F, T>(&self, future: F) -> Task<T>
where
T: 'static,
F: Future<Output = T> + 'static,
{
fn spawn_from_foreground(&self, future: AnyLocalFuture) -> AnyLocalTask {
let backtrace = Backtrace::new_unresolved();
let scheduled_once = AtomicBool::new(false);
let state = self.state.clone();
@@ -100,11 +117,7 @@ impl Deterministic {
task
}
pub fn spawn<F, T>(&self, future: F) -> Task<T>
where
T: 'static + Send,
F: 'static + Send + Future<Output = T>,
{
fn spawn(&self, future: AnyFuture) -> AnyTask {
let backtrace = Backtrace::new_unresolved();
let state = self.state.clone();
let unparker = self.parker.lock().unparker();
@@ -119,13 +132,8 @@ impl Deterministic {
task
}
pub fn run<F, T>(&self, future: F) -> T
where
T: 'static,
F: Future<Output = T> + 'static,
{
fn run(&self, mut future: AnyLocalFuture) -> Box<dyn Any> {
let woken = Arc::new(AtomicBool::new(false));
let mut future = Box::pin(future);
loop {
if let Some(result) = self.run_internal(woken.clone(), &mut future) {
return result;
@@ -142,16 +150,15 @@ impl Deterministic {
fn run_until_parked(&self) {
let woken = Arc::new(AtomicBool::new(false));
let future = std::future::pending::<()>();
smol::pin!(future);
self.run_internal(woken, future);
let mut future = any_local_future(std::future::pending::<()>());
self.run_internal(woken, &mut future);
}
pub fn run_internal<F, T>(&self, woken: Arc<AtomicBool>, mut future: F) -> Option<T>
where
T: 'static,
F: Future<Output = T> + Unpin,
{
fn run_internal(
&self,
woken: Arc<AtomicBool>,
future: &mut AnyLocalFuture,
) -> Option<Box<dyn Any>> {
let unparker = self.parker.lock().unparker();
let waker = waker_fn(move || {
woken.store(true, SeqCst);
@@ -205,13 +212,7 @@ impl Deterministic {
}
}
pub fn block_on<F, T>(&self, future: F) -> Option<T>
where
T: 'static,
F: Future<Output = T>,
{
smol::pin!(future);
fn block_on(&self, future: &mut AnyLocalFuture) -> Option<Box<dyn Any>> {
let unparker = self.parker.lock().unparker();
let waker = waker_fn(move || {
unparker.unpark();
@@ -396,25 +397,36 @@ impl Foreground {
}
pub fn spawn<T: 'static>(&self, future: impl Future<Output = T> + 'static) -> Task<T> {
match self {
let future = any_local_future(future);
let any_task = match self {
Self::Deterministic(executor) => executor.spawn_from_foreground(future),
Self::Platform { dispatcher, .. } => {
let dispatcher = dispatcher.clone();
let schedule = move |runnable: Runnable| dispatcher.run_on_main_thread(runnable);
let (runnable, task) = async_task::spawn_local(future, schedule);
runnable.schedule();
task
fn spawn_inner(
future: AnyLocalFuture,
dispatcher: &Arc<dyn Dispatcher>,
) -> AnyLocalTask {
let dispatcher = dispatcher.clone();
let schedule =
move |runnable: Runnable| dispatcher.run_on_main_thread(runnable);
let (runnable, task) = async_task::spawn_local(future, schedule);
runnable.schedule();
task
}
spawn_inner(future, dispatcher)
}
Self::Test(executor) => executor.spawn(future),
Self::Deterministic(executor) => executor.spawn_from_foreground(future),
}
};
Task::local(any_task)
}
pub fn run<T: 'static>(&self, future: impl 'static + Future<Output = T>) -> T {
match self {
let future = any_local_future(future);
let any_value = match self {
Self::Deterministic(executor) => executor.run(future),
Self::Platform { .. } => panic!("you can't call run on a platform foreground executor"),
Self::Test(executor) => smol::block_on(executor.run(future)),
Self::Deterministic(executor) => executor.run(future),
}
};
*any_value.downcast().unwrap()
}
pub fn forbid_parking(&self) {
@@ -500,29 +512,34 @@ impl Background {
T: 'static + Send,
F: Send + Future<Output = T> + 'static,
{
match self {
let future = any_future(future);
let any_task = match self {
Self::Production { executor, .. } => executor.spawn(future),
Self::Deterministic(executor) => executor.spawn(future),
}
};
Task::send(any_task)
}
pub fn block_with_timeout<F, T>(&self, timeout: Duration, mut future: F) -> Result<T, F>
pub fn block_with_timeout<F, T>(
&self,
timeout: Duration,
future: F,
) -> Result<T, impl Future<Output = T>>
where
T: 'static,
F: 'static + Unpin + Future<Output = T>,
{
let mut future = any_local_future(future);
if !timeout.is_zero() {
let output = match self {
Self::Production { .. } => {
smol::block_on(util::timeout(timeout, Pin::new(&mut future))).ok()
}
Self::Deterministic(executor) => executor.block_on(Pin::new(&mut future)),
Self::Production { .. } => smol::block_on(util::timeout(timeout, &mut future)).ok(),
Self::Deterministic(executor) => executor.block_on(&mut future),
};
if let Some(output) = output {
return Ok(output);
return Ok(*output.downcast().unwrap());
}
}
Err(future)
Err(async { *future.await.downcast().unwrap() })
}
pub async fn scoped<'scope, F>(&self, scheduler: F)
@@ -572,3 +589,68 @@ pub fn deterministic(seed: u64) -> (Rc<Foreground>, Arc<Background>) {
Arc::new(Background::Deterministic(executor)),
)
}
impl<T> Task<T> {
fn local(any_task: AnyLocalTask) -> Self {
Self::Local {
any_task,
result_type: PhantomData,
}
}
pub fn detach(self) {
match self {
Task::Local { any_task, .. } => any_task.detach(),
Task::Send { any_task, .. } => any_task.detach(),
}
}
}
impl<T: Send> Task<T> {
fn send(any_task: AnyTask) -> Self {
Self::Send {
any_task,
result_type: PhantomData,
}
}
}
impl<T: fmt::Debug> fmt::Debug for Task<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Task::Local { any_task, .. } => any_task.fmt(f),
Task::Send { any_task, .. } => any_task.fmt(f),
}
}
}
impl<T: 'static> Future for Task<T> {
type Output = T;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
match unsafe { self.get_unchecked_mut() } {
Task::Local { any_task, .. } => {
any_task.poll(cx).map(|value| *value.downcast().unwrap())
}
Task::Send { any_task, .. } => {
any_task.poll(cx).map(|value| *value.downcast().unwrap())
}
}
}
}
fn any_future<T, F>(future: F) -> AnyFuture
where
T: 'static + Send,
F: Future<Output = T> + Send + 'static,
{
async { Box::new(future.await) as Box<dyn Any + Send> }.boxed()
}
fn any_local_future<T, F>(future: F) -> AnyLocalFuture
where
T: 'static,
F: Future<Output = T> + 'static,
{
async { Box::new(future.await) as Box<dyn Any> }.boxed_local()
}

View File

@@ -166,6 +166,10 @@ impl FontCache {
self.metric(font_id, |m| m.cap_height) * self.em_scale(font_id, font_size)
}
pub fn x_height(&self, font_id: FontId, font_size: f32) -> f32 {
self.metric(font_id, |m| m.x_height) * self.em_scale(font_id, font_size)
}
pub fn ascent(&self, font_id: FontId, font_size: f32) -> f32 {
self.metric(font_id, |m| m.ascent) * self.em_scale(font_id, font_size)
}
@@ -178,6 +182,14 @@ impl FontCache {
font_size / self.metric(font_id, |m| m.units_per_em as f32)
}
pub fn baseline_offset(&self, font_id: FontId, font_size: f32) -> f32 {
let line_height = self.line_height(font_id, font_size);
let ascent = self.ascent(font_id, font_size);
let descent = self.descent(font_id, font_size);
let padding_top = (line_height - ascent - descent) / 2.;
padding_top + ascent
}
pub fn line_wrapper(self: &Arc<Self>, font_id: FontId, font_size: f32) -> LineWrapperHandle {
let mut state = self.0.write();
let wrappers = state

View File

@@ -132,6 +132,14 @@ impl TextStyle {
font_cache.line_height(self.font_id, self.font_size)
}
pub fn cap_height(&self, font_cache: &FontCache) -> f32 {
font_cache.cap_height(self.font_id, self.font_size)
}
pub fn x_height(&self, font_cache: &FontCache) -> f32 {
font_cache.x_height(self.font_id, self.font_size)
}
pub fn em_width(&self, font_cache: &FontCache) -> f32 {
font_cache.em_width(self.font_id, self.font_size)
}
@@ -140,6 +148,10 @@ impl TextStyle {
font_cache.metric(self.font_id, |m| m.descent) * self.em_scale(font_cache)
}
pub fn baseline_offset(&self, font_cache: &FontCache) -> f32 {
font_cache.baseline_offset(self.font_id, self.font_size)
}
fn em_scale(&self, font_cache: &FontCache) -> f32 {
font_cache.em_scale(self.font_id, self.font_size)
}
@@ -155,6 +167,32 @@ impl From<TextStyle> for HighlightStyle {
}
}
impl Default for TextStyle {
fn default() -> Self {
FONT_CACHE.with(|font_cache| {
let font_cache = font_cache.borrow();
let font_cache = font_cache
.as_ref()
.expect("TextStyle::default can only be called within a call to with_font_cache");
let font_family_name = Arc::from("Courier");
let font_family_id = font_cache.load_family(&[&font_family_name]).unwrap();
let font_id = font_cache
.select_font(font_family_id, &Default::default())
.unwrap();
Self {
color: Default::default(),
font_family_name,
font_family_id,
font_id,
font_size: 14.,
font_properties: Default::default(),
underline: Default::default(),
}
})
}
}
impl HighlightStyle {
fn from_json(json: HighlightStyleJson) -> Self {
let font_properties = properties_from_json(json.weight, json.italic);

View File

@@ -1,9 +1,8 @@
mod app;
pub use app::*;
mod assets;
pub mod sum_tree;
#[cfg(test)]
mod test;
#[cfg(any(test, feature = "test-support"))]
pub mod test;
pub use assets::*;
pub mod elements;
pub mod font_cache;

View File

@@ -4,7 +4,7 @@ use crate::geometry::{
};
use etagere::BucketedAtlasAllocator;
use foreign_types::ForeignType;
use metal::{self, Device, TextureDescriptor};
use metal::{Device, TextureDescriptor};
use objc::{msg_send, sel, sel_impl};
pub struct AtlasAllocator {

Some files were not shown because too many files have changed in this diff Show More