Compare commits

...

414 Commits

Author SHA1 Message Date
Max Brunsfeld
c8b209306e collab 0.4.2 2022-12-19 11:29:22 -08:00
Max Brunsfeld
61c6c825b5 Merge pull request #1980 from zed-industries/following-panics
Fix panics when following
2022-12-19 11:26:28 -08:00
Julia
6f211292b2 Merge pull request #1984 from zed-industries/format-problematic-db-macros
Format problematic DB macros
2022-12-19 11:17:34 -05:00
Julia
c49573dc11 Format problematic DB macros 2022-12-19 11:11:10 -05:00
Julia
de9c58d216 Merge pull request #1983 from zed-industries/multi-buffer-git-gutter
Multi buffer git gutter
2022-12-19 10:53:42 -05:00
Antonio Scandurra
84a860e54d Merge pull request #1982 from zed-industries/fix-rust-analyzer
Update rust-analyzer's `disk_based_diagnostics_progress_token`
2022-12-19 16:33:01 +01:00
Antonio Scandurra
cb60eb8a57 Update rust-analyzer's disk_based_diagnostics_progress_token 2022-12-19 16:27:25 +01:00
Max Brunsfeld
1e02ebbd11 Replicate pending selections separately from other selections
This fixes a panic that would occur when a leader created
a pending selection that overlapped another selection,
because the follower would attempt to treat that pending
selection as non-pending, which would violate the invariant
that selections are sorted and disjoint.
2022-12-17 14:00:53 -08:00
Max Brunsfeld
8c64514570 Add ZED_STATELESS env var, for suppressing persistence
Use this env var in the start-local-collaboration script to make
the behavior more predictable.
2022-12-17 12:03:51 -08:00
Kay Simmons
6fcb3c9020 Merge pull request #1972 from zed-industries/recent-workspace
Recent Project Picker
2022-12-16 15:51:57 -08:00
Kay Simmons
2c47bd4a97 Clear stale projects if they no longer exist 2022-12-16 15:45:17 -08:00
Antonio Scandurra
a5f624203e collab 0.4.1 2022-12-16 12:02:03 +01:00
Antonio Scandurra
98d1b6ec5a Merge pull request #1975 from zed-industries/screen-share-after-reconnect
Prevent screen-sharing from being lost after a reconnection
2022-12-16 12:00:02 +01:00
Antonio Scandurra
457e1046c8 Bump protocol version 2022-12-16 11:48:14 +01:00
Antonio Scandurra
21ab1bb434 Remove unnecessary PeerId parsing code 2022-12-16 11:45:42 +01:00
Antonio Scandurra
aa44de3d16 Fix test ensuring room is left when disconnected from LiveKit 2022-12-16 10:52:32 +01:00
Max Brunsfeld
ad37034960 Identify LiveKit room participants by user id, not peer id
This way, their participant id can remain the same when they reconnect.
2022-12-15 17:19:32 -08:00
Julia
ebd0c5d000 Handle reversed=true for multi-buffer git-hunks-in-range iteration
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-12-15 18:17:32 -05:00
Julia
f88b413f6a Rewrite multi-buffer aware git hunks in range to be more correct
Less ad-hoc state tracking, rely more on values provided by the
underlying data

Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-12-15 17:09:09 -05:00
Antonio Scandurra
c2f5381e5a collab 0.4.0 2022-12-15 19:37:53 +01:00
Antonio Scandurra
ea1f6689b9 Merge pull request #1971 from zed-industries/update-app-icons
Update Zed & Zed Preview icons
2022-12-15 19:37:04 +01:00
Antonio Scandurra
b1affb13bb Merge pull request #1973 from zed-industries/fix-reconnects-after-deploy
Improve reconnections to server after it is redeployed
2022-12-15 19:35:42 +01:00
Antonio Scandurra
2679e245a5 Minor stylistic change 2022-12-15 16:40:16 +01:00
Antonio Scandurra
5a334622ea 💄 2022-12-15 16:34:59 +01:00
Antonio Scandurra
5720c43fe7 Merge branch 'main' into fix-reconnects-after-deploy 2022-12-15 15:32:05 +01:00
Joseph T. Lyons
af4d846428 Merge pull request #1954 from zed-industries/add-symlink-to-applications-directory-in-dmg 2022-12-15 08:06:17 -05:00
Antonio Scandurra
5fb522a9b1 collab 0.3.14 2022-12-15 11:31:51 +01:00
Antonio Scandurra
86e5ae1f2e Allow nulls in projects.host_connection_{id,server_id}
The server version on stable won't be able to fill values for those
columns when we deploy the migration to preview.

With this commit we're also dropping the unused `worktree_extensions`
and `project_activity_periods` tables. The last version of the server
on stable (0.2.6) doesn't contain any code that accesses those tables.
2022-12-15 11:30:51 +01:00
Antonio Scandurra
aadd7f2886 collab 0.3.13 2022-12-15 10:53:17 +01:00
Antonio Scandurra
067a19c971 Avoid logging an error when user who hasn't joined any room disconnects 2022-12-15 10:45:03 +01:00
Antonio Scandurra
688f179256 Use "id" nomenclature more consistently 2022-12-15 10:15:59 +01:00
Antonio Scandurra
af77f1188a Re-add server_id indices for room_participants/project_collaborators 2022-12-15 09:58:25 +01:00
Julia
0dedc1f3a4 Get tests building again 2022-12-15 00:17:28 -05:00
Max Brunsfeld
6c58a4f885 Fix stale server queries, use foreign keys from connectionsn to servers 2022-12-14 17:34:24 -08:00
Kay Simmons
81e3b48f37 Add keybinding 2022-12-14 16:14:16 -08:00
Kay Simmons
6da59311d1 Add open recent project to file menu 2022-12-14 16:02:48 -08:00
Kay Simmons
2bc685281c Add recent project picker 2022-12-14 15:59:50 -08:00
Max Brunsfeld
7e0b6ed1c6 Bump RPC version due to multibuffer following PR 2022-12-14 15:34:22 -08:00
Max Brunsfeld
e08d6cd6de Merge pull request #1921 from zed-industries/multibuffer-following
Allow following collaborators into editors with multi-excerpt buffers (refactors + find-all-refs)
2022-12-14 15:33:11 -08:00
Max Brunsfeld
954c9ac3fd Add integration test coverage for following into multibuffers 2022-12-14 15:28:58 -08:00
Max Brunsfeld
e4c5dfcf6c Use run_until_parked instead of 'condition' in all integration tests 2022-12-14 15:05:35 -08:00
Nate Butler
5f6313d336 Update Zed & Zed Preview icons 2022-12-14 17:41:18 -05:00
Max Brunsfeld
70efd2bebe Introduce a ViewId message, identifying views across calls 2022-12-14 14:40:07 -08:00
Max Brunsfeld
43b7e16c89 Handle retina screens in start-local-collaboration script 2022-12-14 11:50:15 -08:00
Max Brunsfeld
f99f581bfc Clean up state matching in from_state_proto using let/else statements 2022-12-14 11:09:33 -08:00
Max Brunsfeld
09d3fbf04f In editor following test, apply excerpt removals to both followers 2022-12-14 11:08:08 -08:00
Antonio Scandurra
363e3cae4b WIP 2022-12-14 19:25:07 +01:00
Antonio Scandurra
930be6706f WIP 2022-12-14 18:02:39 +01:00
Antonio Scandurra
05e99eb67e Introduce an epoch to ConnectionId and PeerId 2022-12-14 15:55:56 +01:00
Antonio Scandurra
9bd400cf16 collab 0.3.12 2022-12-14 11:43:33 +01:00
Antonio Scandurra
553585b9a1 Add more logging to Room 2022-12-14 11:43:12 +01:00
Antonio Scandurra
674fddac87 Instrument rpc::Server::start and reduce cleanup timeout again 2022-12-14 11:42:12 +01:00
Antonio Scandurra
63e7b9189d collab 0.3.11 2022-12-14 11:25:04 +01:00
Antonio Scandurra
9530976f61 Try using a longer timeout for cleaning up stale rooms 2022-12-14 11:24:36 +01:00
Antonio Scandurra
02c30b0091 collab 0.3.10 2022-12-14 09:35:52 +01:00
Antonio Scandurra
b9c7796547 Reduce readiness probe delay and period 2022-12-14 09:35:36 +01:00
Antonio Scandurra
e00cb6b074 collab 0.3.9 2022-12-14 09:05:19 +01:00
Antonio Scandurra
dc47552180 Fix kubernetes configuration for readiness probe 2022-12-14 08:58:19 +01:00
Antonio Scandurra
98a593b263 collab 0.3.8 2022-12-14 08:56:02 +01:00
Antonio Scandurra
897506c797 Define readiness probe to know when the new server can accept traffic 2022-12-14 08:54:46 +01:00
Antonio Scandurra
59c9a57570 collab 0.3.7 2022-12-14 08:43:18 +01:00
Antonio Scandurra
dde6cf596e Don't wait for stale project deletion before listening for connections 2022-12-14 08:42:34 +01:00
Antonio Scandurra
2596fefa04 collab 0.3.6 2022-12-13 23:09:02 +01:00
Antonio Scandurra
34b69896e4 Listen to SIGTERM in addition to ctrl-c for graceful shutdown 2022-12-13 23:08:43 +01:00
Antonio Scandurra
7824ace58b collab 0.3.5 2022-12-13 22:40:55 +01:00
Antonio Scandurra
b150efbd96 Set log level to debug for preview deployment
Also, add a log statement when we receive the interrupt signal.

Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-12-13 22:38:55 +01:00
Max Brunsfeld
c20204d269 collab 0.3.4 2022-12-13 11:17:37 -08:00
Max Brunsfeld
45bfcfc3b8 Fix excessive delay before clearing stale room data
Co-authored-by: Antonio Scandurra <antonio@zed.dev>
2022-12-13 11:17:20 -08:00
Julia
cf72173282 Clamp end of visual git hunk to requested range 2022-12-13 13:58:50 -05:00
Max Brunsfeld
5218a2f966 collab 0.3.3 2022-12-13 10:17:01 -08:00
Max Brunsfeld
95748123b5 Merge remote-tracking branch 'origin/collab-0.3.x' 2022-12-13 10:16:25 -08:00
Max Brunsfeld
6ad326ac58 Wait longer before deleting outdated rooms from the database
Co-authored-by: Antonio Scandurra <antonio@zed.dev>
2022-12-13 10:14:43 -08:00
Mikayla Maki
b0652c55c6 Merge pull request #1968 from zed-industries/fix-text-size-in-updates
Fix text size in notifications
2022-12-13 10:00:24 -08:00
Mikayla Maki
790ef19a48 Fix mis-set variables 2022-12-13 09:58:04 -08:00
Julia
ecd44e6914 Git diff recalc in project diagnostics 2022-12-13 12:35:58 -05:00
Julia
2cd9987b54 Git diff recalc in project search 2022-12-13 12:35:58 -05:00
Julia
7c3dc1e3dc Cleanup 2022-12-13 12:35:58 -05:00
Julia
00b7c78e33 Initial hacky displaying of git gutter in multi-buffers 2022-12-13 12:35:58 -05:00
Max Brunsfeld
11800a8a78 Merge branch 'main' into multibuffer-following 2022-12-13 09:25:18 -08:00
Antonio Scandurra
99c5f8c713 collab 0.3.2 2022-12-13 15:16:41 +01:00
Antonio Scandurra
461c2400ad Merge pull request #1965 from zed-industries/preserve-calls-during-server-restarts
Automatically re-join call when server is restarted
2022-12-13 14:12:32 +01:00
Antonio Scandurra
073a2988e6 Move creation of room_id index into its own migration 2022-12-13 13:57:41 +01:00
Antonio Scandurra
70aac75dd5 Run until parked before asserting about participants in restart test 2022-12-13 13:53:51 +01:00
Antonio Scandurra
4dc838fbb7 Reset connection pool when tearing down the server in tests 2022-12-13 13:51:25 +01:00
Antonio Scandurra
d4c8fa3090 Use a synchronous mutex for ConnectionPool 2022-12-13 13:50:51 +01:00
Antonio Scandurra
a594ba8f8a Simulate server restarts in randomized test 2022-12-13 12:18:38 +01:00
Antonio Scandurra
f1884d608b Allow server to see client disconnection before giving up on reconnecting 2022-12-13 12:17:21 +01:00
Antonio Scandurra
417db95693 Fix typo in index name 2022-12-13 11:44:48 +01:00
Antonio Scandurra
0220d7ba5d Include room_id in CallCanceled message
This ensures we don't accidentally cancel old calls.
2022-12-13 11:43:09 +01:00
Antonio Scandurra
e2b132ef23 💄 2022-12-13 11:37:39 +01:00
Antonio Scandurra
7e8d9d52d3 Delete stray debug statement 2022-12-13 11:36:40 +01:00
Antonio Scandurra
6a6a032f1f Delete stale rooms/participants after RECONNECT_TIMEOUT 2022-12-13 11:32:37 +01:00
Mikayla Maki
fcea254e8e Merge pull request #1963 from zed-industries/fix-workspace-corner-cases
Fix small workspace deserialization corner cases
2022-12-12 17:56:43 -08:00
Mikayla Maki
9bf0a02eae Allow an empty center group to successfully deserialize into an empty pane.
Fix error when deserializing pane axis which caused it's members.len() > 1 invariant to be violated
Fix failure to gain center pane focus when failing to deserialize a center pane entirely

Co-authored-by: Max <max@zed.dev>
2022-12-12 17:51:16 -08:00
Max Brunsfeld
2affbcc495 Merge pull request #1962 from zed-industries/scrolling-breaks-follow
Avoid breaking follow when syncing leader's scroll position
2022-12-12 16:37:58 -08:00
Mikayla Maki
8012e9fcbd Merge pull request #1961 from zed-industries/fix-next-screen-bug
Fixed issue where the NextScreen action would never have an effect
2022-12-12 15:47:16 -08:00
Mikayla Maki
cd2d593a6c Fixed issue where the NextScreen action would never have an effect 2022-12-12 15:36:51 -08:00
Max Brunsfeld
9ef00ea44c Avoid breaking follow when syncing leader's scroll position
Co-authored-by: Mikayla Maki <mikayla@zed.dev>
Co-authored-by: Kay Simmons <kay@zed.dev>
2022-12-12 15:36:30 -08:00
Mikayla Maki
91d6b66fc4 Merge pull request #1959 from zed-industries/serializing-bug-fixes
Add check for if the user wants a blanks workspace when deserializing
2022-12-12 13:37:43 -08:00
Mikayla Maki
5a29a74956 Fetch last workspace explicitly when starting Zed
co-authored-by: Max <max@zed.dev>
2022-12-12 13:29:18 -08:00
Mikayla Maki
db3119b553 Add check for if the user wants a blanks workspace when deserializing 2022-12-12 12:25:52 -08:00
Max Brunsfeld
f797dfb88f Merge branch 'main' into multibuffer-following 2022-12-12 11:47:39 -08:00
Antonio Scandurra
beea9b68ff Allow re-joining room after server restarts 2022-12-12 16:03:21 +01:00
Antonio Scandurra
82397f34d1 Merge pull request #1950 from zed-industries/reconnect-to-room
Automatically re-join call when client connection drops
2022-12-12 13:40:21 +01:00
Antonio Scandurra
3cd77bfcc4 Always cast connection ids to i32
Postgres doesn't support unsigned types. This also adds indices to
support querying `project_collaborators` and `room_participants`
by connection id.
2022-12-12 11:43:08 +01:00
Antonio Scandurra
456396ca6e Rename connection_lost to answering_connection_lost 2022-12-12 11:43:08 +01:00
Antonio Scandurra
26b5653427 Delete hosted projects from database when connection is lost 2022-12-12 11:43:08 +01:00
Antonio Scandurra
895c365485 Introduce random reconnections in the randomized test 2022-12-12 11:43:08 +01:00
Antonio Scandurra
8fa26bfe18 Fix test_calls_on_multiple_connections after adding room reconnection 2022-12-12 11:43:08 +01:00
Antonio Scandurra
aca3f02590 Re-join room when client temporarily loses connection 2022-12-12 11:43:08 +01:00
Antonio Scandurra
d74fb97158 Remove Executor trait from collab and use an enum instead
This will let us save off the executor and avoid using generics.
2022-12-12 11:43:08 +01:00
Joseph Lyons
7608875625 Remove extraneous newline 2022-12-10 09:56:42 -05:00
Joseph Lyons
dcf11ac7e5 Add symlink to applications directory in dmg 2022-12-10 09:48:40 -05:00
Mikayla Maki
5879dcc4e9 Merge pull request #1951 from zed-industries/dock-bugfix
Fix infinite loop in dock position when deserializing
2022-12-09 13:19:44 -08:00
Mikayla Maki
34388a1d31 Updated is_child() to omit self 2022-12-09 12:07:49 -08:00
Mikayla Maki
3a4f8d267a Fix infinite loop in dock position when deserializing 2022-12-09 11:50:24 -08:00
Antonio Scandurra
0366d725ea collab 0.3.1 2022-12-09 08:19:41 +01:00
Antonio Scandurra
8bd7b28056 Merge pull request #1949 from zed-industries/do-not-drop-unregistered
Don't drop `unregistered` column in reconnection support migration
2022-12-09 08:15:53 +01:00
Antonio Scandurra
2697112a8a Don't drop unregistered column in reconnection support migration
We don't use this column anymore because, when a project is unshared, we
simply remove it from the `projects` table. However, this column is expected
in the stable version of the server and the database is shared between stable
and preview. If we dropped it, stable would start throwing errors.
2022-12-09 08:11:18 +01:00
Mikayla Maki
9bd4bc8813 Merge pull request #1940 from zed-industries/terminal-collab-kickoff
WIP - move terminal to project as pre-prep for collaboration
2022-12-08 20:30:32 -08:00
Mikayla Maki
925c9e13bb Remove terminal container view, switch to notify errors 2022-12-08 20:21:00 -08:00
Mikayla Maki
da100a09fb WIP 2022-12-08 20:21:00 -08:00
Mikayla Maki
c42da5c9b9 WIP 2022-12-08 20:21:00 -08:00
Mikayla Maki
2733f91d8c Fix bugs resulting from refactoring the terminal into project and workspace halves 2022-12-08 20:21:00 -08:00
Mikayla Maki
83aefffa38 Rearrange the terminal code to not have a cyclic dependency with the project 2022-12-08 20:21:00 -08:00
Mikayla Maki
1b8763d0cf WIP - move terminal to project as pre-prep for collaboration 2022-12-08 20:21:00 -08:00
Max Brunsfeld
7dde54b052 v0.68.x dev 2022-12-08 15:33:02 -08:00
Kay Simmons
b1e37378dc Merge pull request #1944 from zed-industries/vim-page-movement
Add scroll commands to vim mode
2022-12-08 14:58:19 -08:00
Kay Simmons
e61a38b3a9 remove printline 2022-12-08 14:45:22 -08:00
Kay Simmons
2cf48c03f9 fix final failing tests 2022-12-08 14:39:48 -08:00
Joseph Lyons
ab978ff1a3 collab 0.3.0 2022-12-08 16:35:13 -05:00
Joseph T. Lyons
dcd4b8f7db Merge pull request #1941 from zed-industries/Allow-overwriting-signup-data
Allow overwriting signup data if a user signs up more than once with the same email address
2022-12-08 16:11:28 -05:00
Kay Simmons
2eb335158b Merge pull request #1946 from zed-industries/fix-zombie-tooltips
notify views when hover finishes in tooltip wrapper
2022-12-08 11:37:12 -08:00
Kay Simmons
10aecc310e notify views when hover finishes in tooltip wrapper 2022-12-08 11:26:46 -08:00
Kay Simmons
750e7eb833 Merge pull request #1945 from zed-industries/drag-and-drop-deadzones
Add deadzones to drag and drop
2022-12-08 11:15:42 -08:00
Kay Simmons
36bc90b2b8 Add deadzones to drag and drop 2022-12-07 17:46:00 -08:00
Kay Simmons
f6f41510d2 fix failing tests from incorrect follow behavior 2022-12-07 17:25:48 -08:00
Kay Simmons
cffb064c16 Refactor editor scrolling and implement scroll commands from vim mode 2022-12-07 16:39:32 -08:00
Antonio Scandurra
3313387b28 Merge pull request #1943 from zed-industries/fix-inviting-existing-users-via-different-mail
Fix inviting existing users via a different email address
2022-12-07 14:19:17 +01:00
Joseph Lyons
d71d543337 Ensure that subsequent signup happens after initial
We can't rely on the fact that the test won't run fast enough such that both `created_at`s are the same time.  This ensures the subsequent signup happens after the initial one and that the database doesn't overwrite the initial one.
2022-12-07 08:15:01 -05:00
Antonio Scandurra
665219fb00 Fix inviting user that had already signed up via a different email 2022-12-07 14:07:01 +01:00
Antonio Scandurra
1b8f23eeed Add failing test showcasing inviting existing user via different email 2022-12-07 14:06:59 +01:00
Joseph Lyons
5f31907127 Clean up test 2022-12-07 07:12:27 -05:00
Joseph Lyons
97989b04a0 Remove comment 2022-12-06 17:18:54 -05:00
Joseph Lyons
694840cdd6 Allow overwriting signup data if a user signs up more than once with the same email address 2022-12-06 17:12:12 -05:00
Antonio Scandurra
1920de81d9 Merge pull request #1938 from zed-industries/fix-metrics
Query project count as `i64` instead of `i32` when gathering metrics
2022-12-06 15:04:27 +01:00
Antonio Scandurra
3b5b48c043 Query project count as i64 instead of i32 when gathering metrics
Using the latter will cause a type mismatch when performing the query.
2022-12-06 15:00:32 +01:00
Antonio Scandurra
2080d3efff Merge pull request #1937 from zed-industries/fix-accepted-contact-busy-status
Fix busy status when accepting a contact request
2022-12-06 10:29:58 +01:00
Antonio Scandurra
fc7b01b74e Fix busy status when accepting a contact request
Previously, we would send an contact update when accepting a request
using the same `busy` status for both the requester and the responder.
This was obviously wrong and caused the requester to see their own
busy status as the newly-added responder contact's status.
2022-12-06 10:19:34 +01:00
Antonio Scandurra
f1b35981c2 Merge pull request #1935 from zed-industries/reconnections-2
Move in-memory server state to the database
2022-12-06 09:22:59 +01:00
Antonio Scandurra
744714b478 Remove unused UserId import from seed script 2022-12-06 09:07:25 +01:00
Max Brunsfeld
35549ffabe Merge pull request #1936 from zed-industries/c-outline-pointers
Include outline items for c/c++ functions returning pointers
2022-12-05 14:03:49 -08:00
Max Brunsfeld
855f17c378 Include outline items for c/c++ functions returning pointers-to-pointers, references
Co-authored-by: Julia Risley <julia@zed.dev>
2022-12-05 13:56:21 -08:00
Mikayla Maki
f23f294b86 Merge pull request #1858 from zed-industries/add-lisp
Added tree sitter support for scheme and racket
2022-12-05 11:40:52 -08:00
Mikayla Maki
0921178b42 Got tree sitter integration to a shippable place 2022-12-05 11:31:52 -08:00
Mikayla Maki
30872d3992 Added experimental support for scheme, racket, and commonlisp 2022-12-05 11:31:49 -08:00
Antonio Scandurra
cd08d289aa Fix warnings 2022-12-05 19:45:56 +01:00
Antonio Scandurra
9a62150dce Merge branch 'main' into reconnections-2 2022-12-05 19:18:40 +01:00
Antonio Scandurra
7bbd97cfb9 Send diagnostic summaries synchronously 2022-12-05 19:07:06 +01:00
Antonio Scandurra
5443d9cffe Return project collaborators and connection IDs in a RoomGuard 2022-12-05 18:37:01 +01:00
Antonio Scandurra
be3fb1e985 Update sea-orm to fix bug on failure to commit transactions
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-12-05 18:36:25 +01:00
Antonio Scandurra
b97c35a468 Remove project_id foreign key from room_participants 2022-12-05 15:16:06 +01:00
Antonio Scandurra
eec3df09be Upgrade sea-orm 2022-12-05 14:56:01 +01:00
Antonio Scandurra
d3c411677a Remove random pauses to prevent the database from deadlocking 2022-12-05 12:03:45 +01:00
Antonio Scandurra
d97a8364ad Retry transactions if there's a serialization failure during commit 2022-12-05 10:49:53 +01:00
Antonio Scandurra
0ed731780a Remove duplication between transaction and room_transaction 2022-12-05 09:46:03 +01:00
Julia
11c1254e71 Merge pull request #1924 from zed-industries/simon-says-dont-move
Do not reorder tab opened by follower to end of item list
2022-12-04 13:00:07 -05:00
Mikayla Maki
6ba225f3a5 Merge pull request #1798 from zed-industries/serializing-workspaces
Serializing workspaces
2022-12-03 16:56:02 -08:00
Mikayla Maki
55eb0a3742 Fixed and error message and properly initialized the DB 2022-12-03 16:46:35 -08:00
Mikayla Maki
1ce0863158 Removed old code 2022-12-03 16:27:45 -08:00
Mikayla Maki
d609237c32 Found db parallelism problem :( 2022-12-03 16:26:37 -08:00
Mikayla Maki
4288f10873 And library change 2022-12-03 16:13:02 -08:00
Mikayla Maki
80e035cc2c Fixed bad rebase 2022-12-03 16:12:07 -08:00
Mikayla Maki
a1f273278b Added user notifications 2022-12-03 16:06:02 -08:00
Mikayla Maki
ffcad4e4e2 WIP fixing dock problems 2022-12-03 16:06:02 -08:00
Mikayla Maki
5262e8c77e CHANGE LOCK TO NOT BE DROPPED INSTANTLY. DANG U RUST
co-authored-by: kay@zed.dev
2022-12-03 16:06:02 -08:00
Mikayla Maki
5e240f98f0 Reworked thread safe connection be threadsafer,,,, again
Co-Authored-By: kay@zed.dev
2022-12-03 16:06:02 -08:00
Mikayla Maki
189a820113 First draft of graceful corruption restoration 2022-12-03 16:06:02 -08:00
Mikayla Maki
b8d423555b Added side bar restoration 2022-12-03 16:06:02 -08:00
Kay Simmons
8a48567857 Reactivate the correct item in each pane when deserializing 2022-12-03 16:06:01 -08:00
Kay Simmons
f68e8d4664 Address some issues with the sqlez_macros 2022-12-03 16:06:01 -08:00
Kay Simmons
1b225fa37c fix test failures 2022-12-03 16:06:01 -08:00
Kay Simmons
a29ccb4ff8 make thread safe connection more thread safe
Co-Authored-By: Mikayla Maki <mikayla@zed.dev>
2022-12-03 16:06:01 -08:00
Mikayla Maki
9cd6894dc5 Added multi-threading problem test 2022-12-03 16:06:01 -08:00
Kay Simmons
dd9d20be25 Added sql! proc macro which checks syntax errors on sql code and displays them with reasonable underline locations
Co-Authored-By: Mikayla Maki <mikayla@zed.dev>
2022-12-03 16:06:01 -08:00
Mikayla Maki
260164a711 Added basic syntax checker to sqlez 2022-12-03 16:06:01 -08:00
Kay Simmons
359b8aaf47 rename sql_method to query and adjust the syntax to more closely match function definitions 2022-12-03 16:06:01 -08:00
Kay Simmons
1cc3e4820a working serialized writes with panics on failure. Everything seems to be working 2022-12-03 16:06:01 -08:00
Mikayla Maki
b01243109e Removed database test files 2022-12-03 16:06:01 -08:00
Mikayla Maki
3e0f9d27a7 Made dev tools not break everything about the db
Also improved multi statements to allow out of order parameter binding in statements
Ensured that all statements are run for maybe_row and single, and that of all statements only 1 of them returns only 1 row
Made bind and column calls add useful context to errors

Co-authored-by: kay@zed.dev
2022-12-03 16:06:01 -08:00
Mikayla Maki
2dc1130902 Added extra sql methods 2022-12-03 16:06:01 -08:00
Mikayla Maki
37174f45f0 Touched up sql macro 2022-12-03 16:06:01 -08:00
Mikayla Maki
76c42af62a Finished terminal working directory restoration 2022-12-03 16:06:01 -08:00
Mikayla Maki
cf4c103660 Fixed workspace tests 2022-12-03 16:06:01 -08:00
Mikayla Maki
e1eff3f4cd WIP: Some bugs switching to database provided IDs, terminal titles don't reload when restored from serialized, workspace tests are no longer passing but should be easy to fix when it isn't 11:44 2022-12-03 16:06:01 -08:00
Mikayla Maki
a47f2ca445 Added UUID based, stable workspace ID for caching on item startup. Completed first sketch of terminal persistence. Still need to debug it though.... 2022-12-03 16:06:01 -08:00
Mikayla Maki
e659823e6c WIP termial implementation. need some way of getting the currently valid workspace ID 2022-12-03 16:06:01 -08:00
Mikayla Maki
a8ed95e1dc Implementing persistence for the terminal working directory, found an issue with my current data model. :( 2022-12-03 16:06:01 -08:00
Kay Simmons
cb1d2cd1f2 WIP serializing and deserializing editors 2022-12-03 16:06:01 -08:00
Mikayla Maki
9077b058a2 removed test file 2022-12-03 16:06:01 -08:00
Mikayla Maki
7ceb5e815e workspace level integration of serialization complete! Time for item level integration....
Co-Authored-By: kay@zed.dev
2022-12-03 16:06:01 -08:00
Mikayla Maki
992b94eef3 Rebased to main 2022-12-03 16:06:01 -08:00
Mikayla Maki
a0cb6542ba Polishing workspace data structures
Co-authored-by: kay@zed.dev
2022-12-03 16:06:01 -08:00
Mikayla Maki
6530658c3e Added center group deserialization 2022-12-03 16:06:01 -08:00
Kay Simmons
75d3d46b1b wip serialize editor 2022-12-03 16:06:01 -08:00
Kay Simmons
d20d21c6a2 Dock persistence working!
Co-Authored-By: Mikayla Maki <mikayla@zed.dev>
2022-12-03 16:06:01 -08:00
Kay Simmons
c1f7902309 wip 2022-12-03 16:06:01 -08:00
Mikayla Maki
4798161118 Distributed database pattern built.
Co-Authored-By: kay@zed.dev
2022-12-03 16:06:01 -08:00
Mikayla Maki
2a5565ca93 WIP 2022-12-03 16:06:00 -08:00
Mikayla Maki
a5edac312e Moved to workspaces crate... don't feel great about it 2022-12-03 16:05:26 -08:00
Mikayla Maki
e578f2530e WIP commit, migrating workspace serialization code into the workspace 2022-12-03 16:05:25 -08:00
Mikayla Maki
c84201fc9f Done first draft of strongly typed migrations 2022-12-03 16:05:25 -08:00
Kay Simmons
4a00f0b062 Add typed statements 2022-12-03 16:05:25 -08:00
Mikayla Maki
64ac84fdf4 Re-use big union statement for get_center_pane 2022-12-03 16:05:25 -08:00
Mikayla Maki
f27a9d77d1 Finished the bulk of workspace serialization. Just items and wiring it all through.
Co-Authored-By: kay@zed.dev
2022-12-03 16:05:25 -08:00
Mikayla Maki
0186289420 Refined sqlez, implemented 60% of workspace serialization sql 2022-12-03 16:05:25 -08:00
Mikayla Maki
6b214acbc4 Got Zed compiling again 🥰 2022-12-03 16:05:25 -08:00
Kay Simmons
d419f27d75 replace worktree roots table with serialized worktree roots list 2022-12-03 16:05:25 -08:00
Kay Simmons
eb0598dac2 more refactoring and slightly better api 2022-12-03 16:05:25 -08:00
Mikayla Maki
aa7b909b7b WIP3 2022-12-03 16:05:25 -08:00
Mikayla Maki
b552f1788c WIP2 2022-12-03 16:05:25 -08:00
Mikayla Maki
d492cbced9 WIP 2022-12-03 16:05:25 -08:00
Mikayla Maki
19aac6a57f Moved docks to a better position 2022-12-03 16:05:25 -08:00
Kay Simmons
685bc9fed3 impl bind and column and adjust pane tables 2022-12-03 16:05:25 -08:00
Mikayla Maki
406663c75e Converted to sqlez, so much nicer 2022-12-03 16:05:25 -08:00
Mikayla Maki
c8face33fa WIP, incorporating type parsing using new sqlez patterns 2022-12-03 16:05:25 -08:00
Mikayla Maki
3c1b747f64 WIP almost compiling with sqlez 2022-12-03 16:05:25 -08:00
Mikayla Maki
777f05eb76 Finished implementing the workspace stuff 2022-12-03 16:05:25 -08:00
Mikayla Maki
395070cb92 remove submodule 2022-12-03 16:05:25 -08:00
Mikayla Maki
a4a1859dfc Added sqlez api 2022-12-03 16:05:25 -08:00
Kay Simmons
e3fdfe02e5 WIP switching to sqlez 2022-12-03 16:05:24 -08:00
Mikayla Maki
7744c9ba45 Abandoning rusqlite, the API is miserable 2022-12-03 16:04:10 -08:00
Mikayla Maki
e6ca0adbcb Fixed failing serialization issues 2022-12-03 16:04:10 -08:00
Mikayla Maki
c105f41487 Started working on dock panes
co-authored-by: kay@zed.dev
2022-12-03 16:04:10 -08:00
Mikayla Maki
ddecba143f Refactored workspaces API and corrected method headers + fixed bug caused by migration failures
co-authored-by: kay@zed.dev
2022-12-03 16:04:10 -08:00
Mikayla Maki
3451a3c7fe Rebase - Got Zed compiling and fixed a build error due to conflicting dependencies that cargo didn't catch :(
Co-Authored-By: kay@zed.dev
2022-12-03 16:04:10 -08:00
Mikayla Maki
b9cbd4084e WIP: fixing up behavior of workspace initialization 2022-12-03 16:04:10 -08:00
Mikayla Maki
5505a776e6 Figured out a good schema for the pane serialization stuff 2022-12-03 16:04:10 -08:00
Mikayla Maki
46ff0885f0 WIP: Writing tests 2022-12-03 16:04:10 -08:00
Mikayla Maki
a9dc46c950 added stubs for more tests 2022-12-03 16:04:10 -08:00
Mikayla Maki
7d33520b2c Tidied up code, managed errors, etc. 2022-12-03 16:04:10 -08:00
Mikayla Maki
e9ea751f3d All workspace tests passing :D 2022-12-03 16:04:10 -08:00
Mikayla Maki
d7bbfb82a3 Rebase - Successfully detecting workplace IDs :D 2022-12-03 16:04:10 -08:00
Mikayla Maki
500ecbf915 Rebase fix + Started writing the real SQL we're going to need 2022-12-03 16:04:10 -08:00
K Simmons
e5c6393f85 rebase fix - almost have serialize_workspace piped to the workspace constructor. Just a few compile errors left 2022-12-03 16:04:10 -08:00
K Simmons
73f0459a0f wip 2022-12-03 16:04:10 -08:00
K Simmons
0c466f806c WIP 2022-12-03 16:04:10 -08:00
Mikayla Maki
b48e28b555 Built first draft of workspace serialization schemas, started writing DB tests
Co-Authored-By: kay@zed.dev
2022-12-03 16:04:10 -08:00
Mikayla Maki
60ebe33518 Rebase fix - Reworking approach to sql for take 2022-12-03 16:04:10 -08:00
Mikayla Maki
72c1ee904b Fix rebase - Broken tab 2022-12-03 16:04:10 -08:00
Julia
57e10b7dd5 Cleanup dbg 2022-12-02 16:42:49 -05:00
Julia
4bc1d77535 Fix tab following order test to wait for file open to propagate
Now it can actually repro the original bug

Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-12-02 16:09:37 -05:00
Antonio Scandurra
d96f524fb6 WIP: Manually rollback transactions to avoid spurious savepoint failure
TODO:
- Avoid unwrapping transaction after f(tx)
- Remove duplication between `transaction` and `room_transaction`
- Introduce random delay before and after committing a transaction
- Run lots of randomized tests
- Investigate diverging diagnostic summaries

Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-12-02 20:36:50 +01:00
Antonio Scandurra
1c30767592 Remove stale Error variant
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-12-02 19:20:51 +01:00
Antonio Scandurra
969c314315 Merge branch 'main' into reconnections-2 2022-12-02 19:09:33 +01:00
Antonio Scandurra
568de814aa Delete empty rooms 2022-12-02 16:56:41 +01:00
Antonio Scandurra
27f6ae945d Clear stale data on startup
This is a stopgap measure until we introduce reconnection support.
2022-12-02 16:30:00 +01:00
Antonio Scandurra
1b46b7a7d6 Move modules into collab library as opposed to using the binary
This ensures that we can use collab's modules from the seed script
as well.
2022-12-02 14:37:52 +01:00
Antonio Scandurra
7502558631 Make all tests pass again after migration to sea-orm 2022-12-02 14:22:36 +01:00
Antonio Scandurra
48b6ee313f Use i32 to represent Postgres INTEGER types in Rust 2022-12-02 13:58:54 +01:00
Antonio Scandurra
dec5f37e4e Finish porting remaining db methods to sea-orm 2022-12-02 13:58:23 +01:00
Julia
239a04ea5b Add test that should have exercised tab reordering while following
Except it doesn't, it passes both with and without the prior commit.
Investigate further
2022-12-02 00:31:16 -05:00
Joseph T. Lyons
ea03b48243 Merge pull request #1876 from zed-industries/update-release-urls-to-match-new-zed.dev-url-format
Update release urls to match new zed.dev url format
2022-12-01 20:32:14 -05:00
Max Brunsfeld
82824f78b6 Make each Zed instance use half the screen in 'start-local-collaboration' script 2022-12-01 16:43:39 -08:00
Max Brunsfeld
e4507c1d74 Fetch missing buffers when adding excerpts to a multibuffer while following
Make FollowableItem::apply_update_proto asynchronous. Use a single
task per workspace to process all leader updates, to prevent updates
from being interleaved.

Co-authored-by: Antonio Scandurra <antonio@zed.dev>
2022-12-01 15:17:51 -08:00
Antonio Scandurra
585ac3e1be WIP 2022-12-01 18:39:24 +01:00
Antonio Scandurra
29a4baf346 Replace i32 with u32 for database columns
We never expect to return signed integers and so we shouldn't use
a signed type. I think this was a limitation of sqlx.
2022-12-01 17:47:51 +01:00
Antonio Scandurra
cfdf0a57b8 Implement Database::update_project 2022-12-01 17:36:36 +01:00
Antonio Scandurra
944d6554de Implement Database::unshare_project 2022-12-01 16:26:13 +01:00
Antonio Scandurra
e3ac67784a Implement Database::project_guest_connection_ids 2022-12-01 16:23:29 +01:00
Antonio Scandurra
62624b81d8 Avoid using col_expr whenever possible
...and use the more type-safe `::set`.
2022-12-01 16:17:27 +01:00
Antonio Scandurra
256e3e8e0f Get basic calls working again with sea-orm 2022-12-01 16:17:24 +01:00
Antonio Scandurra
aebc6326a9 Implement Database::create_room 2022-12-01 15:22:20 +01:00
Antonio Scandurra
db1d93576f Go back to a compiling state, panicking on unimplemented db methods 2022-12-01 15:13:57 +01:00
Antonio Scandurra
d2385bd6a0 Start using the new sea-orm backed database 2022-12-01 14:41:59 +01:00
Antonio Scandurra
19d14737bf Implement signups using sea-orm 2022-12-01 11:58:07 +01:00
Antonio Scandurra
4f864a20a7 Implement invite codes using sea-orm 2022-12-01 11:10:51 +01:00
Antonio Scandurra
2375741bdf Implement db2::Database::fuzzy_search_users 2022-12-01 10:09:53 +01:00
Julia
46f1d5f5c2 Avoid moving tab when leader item updates 2022-12-01 00:29:58 -05:00
Max Brunsfeld
d70996bb99 collab 0.2.5 2022-11-30 14:10:10 -08:00
Max Brunsfeld
9314c0e313 Replicate multibuffer excerpt additions and removals to followers 2022-11-30 13:20:13 -08:00
Julia
5a0c39cbed Merge pull request #1922 from zed-industries/dont-panic-clip-instead
Dont panic in point conversion, clip instead
2022-11-30 13:28:10 -05:00
Julia
41b2fde10d Style
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-11-30 13:11:08 -05:00
Julia
023ecd595b Change verify macro to debug panic
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-11-30 13:03:15 -05:00
Julia
2b979d3b88 Don't panic rope point conversions 2022-11-30 12:43:43 -05:00
Julia
5965113fc8 Add verify macros & use in one location for point conversion 2022-11-30 12:43:43 -05:00
Max Brunsfeld
a48cd9125b Start-local-collaboration script: put peers' windows at different positions 2022-11-30 09:29:49 -08:00
Antonio Scandurra
4c04d512db Implement db2::Database::remove_contact 2022-11-30 17:39:17 +01:00
Antonio Scandurra
d1a44b889e Implement contacts using sea-orm
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-11-30 17:36:25 +01:00
Antonio Scandurra
04d553d4d3 Implement db2::Database::get_user_metrics_id 2022-11-30 15:06:04 +01:00
Antonio Scandurra
2e24d128db Implement access tokens using sea-orm 2022-11-30 14:47:03 +01:00
Antonio Scandurra
9e59056e7f Implement db2::Database::get_user_by_github_account 2022-11-30 14:18:46 +01:00
Antonio Scandurra
d9a892a423 Make some db tests pass against the new sea-orm implementation 2022-11-30 12:13:16 +01:00
Joseph T. Lyons
3a1cd6ed3a Merge pull request #1913 from zed-industries/Add-column-to-signups-for-added-to-mailing-list
Add "added_to_mailing_list" column on signups table
2022-11-29 19:30:11 -05:00
Max Brunsfeld
6120d6488b Start work on following in multi-buffers 2022-11-29 14:50:43 -08:00
Max Brunsfeld
82abf31ef1 Add start-local-collaboration script 2022-11-29 14:50:12 -08:00
Joseph T. Lyons
9f9398476d Merge pull request #1920 from zed-industries/order-invites-by-creation-time
Order invites by creation time
2022-11-29 14:28:53 -05:00
Antonio Scandurra
b7294887c7 WIP: move to a non-generic test database struct
Co-Authored-By: Mikayla Maki <mikayla@zed.dev>
Co-Authored-By: Julia Risley <julia@zed.dev>
2022-11-29 19:20:11 +01:00
Joseph Lyons
049c0f8ba4 Order invites by creation time 2022-11-29 12:57:51 -05:00
Antonio Scandurra
11a39226e8 Start on a new db2 module that uses SeaORM 2022-11-29 16:49:04 +01:00
Antonio Scandurra
ac24600a40 Start moving towards using sea-query to construct queries 2022-11-29 13:55:08 +01:00
Antonio Scandurra
d525cfd697 Increase probability of creating new files in randomized test 2022-11-29 11:02:14 +01:00
Joseph Lyons
4436ec48eb Add "added_to_mailing_list" column on signups table 2022-11-29 02:13:13 -05:00
Joseph T. Lyons
5a9a0f9fa5 Merge pull request #1918 from zed-industries/remove-sign-in-telemetry-event
Remove sign in telemetry event
2022-11-29 01:59:33 -05:00
Max Brunsfeld
6d9b55a654 Send full multibuffer anchors to following peers 2022-11-28 18:00:38 -08:00
Max Brunsfeld
3eac3e20d5 Emit events from a multibuffer when adding/removing excerpts 2022-11-28 17:57:55 -08:00
Joseph Lyons
d2cd9c94f7 Remove sign in telemetry event 2022-11-28 18:56:27 -05:00
Max Brunsfeld
3adc0b947f Merge pull request #1917 from zed-industries/integer-excerpt-ids
Use integers for excerpt ids, map them to locators internally
2022-11-28 14:27:35 -08:00
Max Brunsfeld
718f802157 Implement Copy for multibuffer anchors 2022-11-28 14:18:49 -08:00
Max Brunsfeld
f71145bb32 Add a layer of indirection between excerpt ids and locators 2022-11-28 14:18:49 -08:00
Antonio Scandurra
cd2a8579b9 Capture runnable backtraces only when detecting nondeterminism 2022-11-28 19:35:33 +01:00
Antonio Scandurra
d0709e7bfa Error if project is disconnected after getting completions response 2022-11-28 19:19:24 +01:00
Antonio Scandurra
fa3f100eff Introduce a new detect_nondeterminism = true attribute to gpui::test 2022-11-28 19:01:28 +01:00
Antonio Scandurra
f0a721032d Remove non-determinism caused by random entropy when reconnecting 2022-11-28 18:56:11 +01:00
Antonio Scandurra
0a565c6bae 💄 2022-11-28 17:44:18 +01:00
Antonio Scandurra
af2a2d2494 Return error when waiting on a worktree snapshot after disconnecting 2022-11-28 17:43:40 +01:00
Antonio Scandurra
cd0b663f62 Introduce per-room lock acquired before committing a transaction 2022-11-28 17:00:47 +01:00
Antonio Scandurra
2a0ddd99d2 Error if project is disconnected after getting code actions response 2022-11-28 15:05:34 +01:00
Antonio Scandurra
5581674f8f After completing LSP request, return an error if guest is disconnected 2022-11-28 14:39:27 +01:00
Antonio Scandurra
b0e1d6bc7f Fix integration test incorrectly assuming a certain ordering 2022-11-28 13:57:15 +01:00
Antonio Scandurra
ae11e4f798 Check the correct serialization failure code when retrying transaction 2022-11-28 13:56:03 +01:00
Max Brunsfeld
0b0fe91545 Merge pull request #1912 from zed-industries/matching-brackets-must-contain-range
Fix enclosing-bracket bug that appeared in JS for loops
2022-11-23 13:44:48 -08:00
Max Brunsfeld
aeea47323a Fix enclosing-bracket bug that appeared in JS for loops
Previously, we were relying on the tree-sitter query's range restriction to
avoid returning brackets that did not contain the given range. But the
query's range restriction only guarantees that we don't descend into parent
nodes unless they intersect the range.
2022-11-23 13:37:22 -08:00
Julia
e4185f38cf Merge pull request #1910 from zed-industries/lsp-coordinate-clamp
Don't trust LSP coordinates to be within document bounds
2022-11-23 14:07:37 -05:00
Julia
09e6d44873 Move Unclipped into separate file 2022-11-23 14:02:11 -05:00
Julia
525d84e5bf Remove spurious lifetimes
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-11-23 13:52:39 -05:00
Julia
55ca085d7d Consistency in prefix/suffix/signature of UTF16 point to point conversion
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-11-23 13:52:18 -05:00
Julia
03cfd23ac5 Bump protocol version back down as proto changes are non-breaking 2022-11-23 13:40:49 -05:00
Julia
a666ca3e40 Collapse proto Point into the one kind of use case, utf-16 coords
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-11-23 13:28:44 -05:00
Julia
b58ae8bdd7 Clip diagnostic range before and during empty range expansion
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-11-23 13:21:05 -05:00
Max Brunsfeld
5e7652698d v0.67.x dev 2022-11-23 09:56:06 -08:00
Julia
e51cbf67ab Fixup compile errors 2022-11-22 02:49:47 -05:00
Julia
8c75df30cb Wrap a bunch of traits for Unclipped<T> 2022-11-21 15:58:44 -05:00
Julia
1c84e77c37 Start adding concept of Unclipped text coordinates
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-11-21 15:48:25 -05:00
Max Brunsfeld
b3a92979a3 Merge pull request #1911 from zed-industries/single-file-worktree-event-extension
Fix file extension retrieval for single-file worktrees
2022-11-21 12:41:35 -08:00
Max Brunsfeld
55d3c09b6b Fix file extension retrieval for single-file worktrees
Previously, we used the file's 'path' method, which only returns the relative
path from the worktree root.
2022-11-21 12:34:36 -08:00
Julia
436c89650a Rename clamped -> clipped 2022-11-21 15:23:00 -05:00
Julia
4ead1ecbbf Simply logic of this method
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-11-21 14:25:01 -05:00
Julia
074e3cfbd6 Clamp UTF-16 to point conversions 2022-11-21 14:25:01 -05:00
Julia
bb32599ded Clamp for all UTF-16 to offset conversions which used to use ToOffset 2022-11-21 14:25:01 -05:00
Julia
f9cbed5a1f Clamp UTF-16 coordinate while performing LSP edits rather than panicing 2022-11-21 11:48:13 -05:00
Kay Simmons
0078bea877 change bump-version to install jq if its not already installed 2022-11-18 13:42:46 -08:00
Kay Simmons
bb80cee19e Merge pull request #1814 from zed-industries/golden-ratio
Active Pane Magnification
2022-11-18 13:14:03 -08:00
Kay Simmons
0c50c0959d Merge pull request #1906 from zed-industries/mouse-down-capture-on-click-fix
Fix mouse down falling through popovers
2022-11-18 13:10:50 -08:00
Kay Simmons
75b8a12ab3 address issue where mouse down events weren't getting captured after the multiple handlers change 2022-11-18 13:04:27 -08:00
Antonio Scandurra
4c1b4953c1 Remove version from Room
We won't need it once we add the per-room lock.
2022-11-18 20:18:48 +01:00
Antonio Scandurra
c3d556d9bd Don't take an Arc<Server> in message handlers 2022-11-18 11:45:42 +01:00
Max Brunsfeld
d090d230e2 Merge pull request #1903 from zed-industries/override-pyright-completion-sorting
Add LspAdapter hook for processing completions, fix completion sorting from Pyright
2022-11-17 15:30:07 -08:00
Max Brunsfeld
bca635e5d3 Add LspAdapter hook for processing completions, fix completion sorting from Pyright 2022-11-17 15:26:46 -08:00
Julia
3938adf60a Merge pull request #1902 from zed-industries/event-handlers-are-multitude
Allow having multiple mouse event handlers of the same kind
2022-11-17 17:19:32 -05:00
Julia
6537def97e Allow having multiple mouse event handlers of the same kind
Co-Authored-By: Kay Simmons <kay@zed.dev>
2022-11-17 17:01:34 -05:00
Max Brunsfeld
5020c70a04 collab 0.2.4 2022-11-17 11:44:29 -08:00
Mikayla Maki
0a63d2e3e1 Merge pull request #1900 from zed-industries/fix-terminal-performance
Check for wakeups correctly
2022-11-17 11:17:13 -08:00
Mikayla Maki
ce0dfde8ee Check for wakeups correctly 2022-11-17 11:14:31 -08:00
Antonio Scandurra
44bb2ce024 Rename Store to ConnectionPool 2022-11-17 19:03:59 +01:00
Antonio Scandurra
6c83be3f89 Remove obsolete code from Store 2022-11-17 18:46:39 +01:00
Antonio Scandurra
0a4517f97e WIP: Introduce a db field to Session
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-11-17 17:30:26 +01:00
Antonio Scandurra
c34a5f3177 Introduce a new Session struct to server message handlers
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-11-17 17:11:06 +01:00
Antonio Scandurra
4f39181c4c Revert "Don't replace newer diagnostics with older ones"
This reverts commit 71eeeedc05.
2022-11-17 16:57:40 +01:00
Antonio Scandurra
e7e45be6e1 Revert "Wait for previous UpdateFollowers message ack before sending new ones"
This reverts commit fe93263ad4.
2022-11-17 16:57:32 +01:00
Antonio Scandurra
8621c88a3c Use int8 for scan_id and inode in Postgres 2022-11-17 16:56:43 +01:00
Antonio Scandurra
7dae21cb36 🎨 2022-11-17 15:35:18 +01:00
Antonio Scandurra
0f4598a243 Fix seed script 2022-11-17 15:34:35 +01:00
Antonio Scandurra
6415809b61 Fix errors in Postgres schema 2022-11-17 15:34:12 +01:00
Antonio Scandurra
fe93263ad4 Wait for previous UpdateFollowers message ack before sending new ones 2022-11-17 14:12:00 +01:00
Antonio Scandurra
3b34d858b5 Remove unwrap from Server::share_project 2022-11-17 13:33:26 +01:00
Antonio Scandurra
71eeeedc05 Don't replace newer diagnostics with older ones 2022-11-17 12:21:51 +01:00
Antonio Scandurra
532a599239 Use Db::get_guest_connection_ids in other db methods 2022-11-17 11:38:00 +01:00
Nathan Sobo
9eee22ff0a Fix column name in query 2022-11-16 19:40:53 -07:00
Nathan Sobo
94fe93c6ee Move unshare_project to db module 2022-11-16 18:28:45 -07:00
Joseph Lyons
93824dd239 Fix top-level header in discord webhook action 2022-11-16 20:02:15 -05:00
Nathan Sobo
e5f05c9f3b Move leave_project from Store to db module 2022-11-16 17:45:47 -07:00
Nathan Sobo
bdb521cb6b Fix typo in query 2022-11-16 16:52:05 -07:00
Joseph Lyons
c613c98e37 Move comment to correct location 2022-11-16 17:28:50 -05:00
Max Brunsfeld
4e4299d500 v0.66.x dev 2022-11-16 14:22:18 -08:00
Antonio Scandurra
c1291a093b WIP: Allow subscribing to remote entity before creating a model
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
Co-Authored-By: Max Brunsfeld <max@zed.dev>
2022-11-16 19:51:24 +01:00
Antonio Scandurra
adf43c87dd Batch some of the new queries in Db
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-11-16 17:19:06 +01:00
Antonio Scandurra
faf265328e Wait for acknowledgment before sending the next diagnostic summary 2022-11-16 16:03:01 +01:00
Antonio Scandurra
9bc57c0c61 Move Store::start_language_server to Db 2022-11-16 15:48:26 +01:00
Antonio Scandurra
95369f92eb Move Store::update_diagnostic_summary to Db 2022-11-16 15:41:33 +01:00
Antonio Scandurra
117458f4f6 Send worktree updates after project metadata has been sent 2022-11-16 14:58:11 +01:00
Antonio Scandurra
eeb32fa888 Improve queries for composite primary keys 2022-11-16 11:07:39 +01:00
Antonio Scandurra
f9567ae116 Cascade deletes when project is deleted 2022-11-16 10:41:36 +01:00
Antonio Scandurra
c151c87e12 Correctly leave projects when leaving room 2022-11-16 10:36:48 +01:00
Antonio Scandurra
3190236396 Update worktree entry instead of erroring when it already exists 2022-11-16 08:57:19 +01:00
Antonio Scandurra
0817f905a2 Fix syntax error in schema 2022-11-15 18:02:07 +01:00
Antonio Scandurra
ad67f5e4de Always use the database to retrieve collaborators for a project 2022-11-15 17:49:37 +01:00
Antonio Scandurra
e9eadcaa6a Move Store::update_worktree to Db::update_worktree 2022-11-15 17:18:28 +01:00
Antonio Scandurra
4b1dcf2d55 Always use strings to represent paths over the wire
Previously, the protocol used a mix of strings and bytes without any consistency.

When we go to multiple platforms, we won't be able to mix encodings of paths anyway.
We don't know this is the right approach, but it at least makes things consistent
and easy to read in the database, on the wire, etc. Really, we should be using entry
ids etc to refer to entries on the wire anyway, but there's a chance this is the
wrong decision.

Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-11-15 16:46:17 +01:00
Antonio Scandurra
974ef967a3 Move Store::join_project to Db::join_project
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-11-15 16:37:51 +01:00
Antonio Scandurra
be523617c9 Start reworking join_project to use the database 2022-11-15 11:44:26 +01:00
Antonio Scandurra
6cbf197226 Determine whether a contact is busy via the database 2022-11-15 10:41:21 +01:00
Antonio Scandurra
3e8fcb04f7 Finish implementing Db::update_project 2022-11-15 09:01:51 +01:00
Antonio Scandurra
42bb5f0e9f Add random delay after returning results from the database 2022-11-15 08:48:16 +01:00
Antonio Scandurra
b9af2ae66e Switch to serializable isolation
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
2022-11-14 19:39:12 +01:00
Antonio Scandurra
d7369ace6a Skip applying room updates if they're older than the local room state 2022-11-14 15:35:39 +01:00
Antonio Scandurra
40073f6100 Wait for acknowledgment before sending the next project update 2022-11-14 15:32:49 +01:00
Antonio Scandurra
65c5adff05 Automatically decline call when user drops their last connection 2022-11-14 11:32:26 +01:00
Antonio Scandurra
59e8600e4c Implement Db::cancel_call 2022-11-14 11:12:23 +01:00
Antonio Scandurra
0310e27347 Fix query errors in Db::share_project 2022-11-14 10:53:11 +01:00
Antonio Scandurra
9902211af1 Leave room when connection is dropped 2022-11-14 10:13:36 +01:00
Joseph Lyons
1da5be6e8f Update release urls to match new zed.dev url format 2022-11-12 21:39:08 -05:00
Antonio Scandurra
2145965749 WIP 2022-11-11 19:36:20 +01:00
Antonio Scandurra
11caba4a4c Remove stray log statement 2022-11-11 18:54:08 +01:00
Antonio Scandurra
9f39dcf7cf Get basic calls test passing again 2022-11-11 18:53:23 +01:00
Antonio Scandurra
1135aeecb8 WIP: Move Store::leave_room to Db::leave_room 2022-11-11 16:59:54 +01:00
Antonio Scandurra
0d1d267213 Move Store::decline_call to Db::decline_call 2022-11-11 15:41:56 +01:00
Antonio Scandurra
c213c98ea4 Remove calls table and use just room_participants 2022-11-11 15:22:04 +01:00
Antonio Scandurra
cc58607c3b Move Store::join_room into Db::join_room 2022-11-11 14:43:40 +01:00
Antonio Scandurra
58947c5c72 Move incoming calls into Db 2022-11-11 14:28:26 +01:00
Antonio Scandurra
6871bbbc71 Start moving Store state into the database 2022-11-11 12:06:43 +01:00
Antonio Scandurra
28aa1567ce Include sender_user_id when handling a server message/request 2022-11-11 11:45:58 +01:00
Antonio Scandurra
f639c4c3d1 Add schema for reconnection support 2022-11-11 10:41:44 +01:00
K Simmons
e2ba8d6df7 Add active pane magnification setting which grows the active pane making it easier to see it's contents 2022-10-25 17:24:19 -07:00
223 changed files with 22273 additions and 11065 deletions

View File

@@ -14,10 +14,10 @@ jobs:
content: |
📣 Zed ${{ github.event.release.tag_name }} was just released!
Restart your Zed or head to https://zed.dev/releases to grab it.
Restart your Zed or head to https://zed.dev/releases/latest to grab it.
```md
### Changelog
# Changelog
${{ github.event.release.body }}
```

1
.gitignore vendored
View File

@@ -18,3 +18,4 @@ DerivedData/
.swiftpm/config/registries.json
.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
.netrc
**/*.db

1211
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -40,11 +40,14 @@ members = [
"crates/project",
"crates/project_panel",
"crates/project_symbols",
"crates/recent_projects",
"crates/rope",
"crates/rpc",
"crates/search",
"crates/settings",
"crates/snippet",
"crates/sqlez",
"crates/sqlez_macros",
"crates/sum_tree",
"crates/terminal",
"crates/text",
@@ -81,3 +84,4 @@ split-debuginfo = "unpacked"
[profile.release]
debug = true

View File

@@ -36,6 +36,7 @@
"cmd-n": "workspace::NewFile",
"cmd-shift-n": "workspace::NewWindow",
"cmd-o": "workspace::Open",
"alt-cmd-o": "recent_projects::Toggle",
"ctrl-`": "workspace::NewTerminal"
}
},

View File

@@ -8,6 +8,22 @@
"Namespace": "G"
}
],
"i": [
"vim::PushOperator",
{
"Object": {
"around": false
}
}
],
"a": [
"vim::PushOperator",
{
"Object": {
"around": true
}
}
],
"h": "vim::Left",
"backspace": "vim::Backspace",
"j": "vim::Down",
@@ -38,22 +54,6 @@
],
"%": "vim::Matching",
"escape": "editor::Cancel",
"i": [
"vim::PushOperator",
{
"Object": {
"around": false
}
}
],
"a": [
"vim::PushOperator",
{
"Object": {
"around": true
}
}
],
"0": "vim::StartOfLine", // When no number operator present, use start of line motion
"1": [
"vim::Number",
@@ -110,6 +110,12 @@
"vim::PushOperator",
"Yank"
],
"z": [
"vim::PushOperator",
{
"Namespace": "Z"
}
],
"i": [
"vim::SwitchMode",
"Insert"
@@ -147,6 +153,30 @@
{
"focus": true
}
],
"ctrl-f": [
"vim::Scroll",
"PageDown"
],
"ctrl-b": [
"vim::Scroll",
"PageUp"
],
"ctrl-d": [
"vim::Scroll",
"HalfPageDown"
],
"ctrl-u": [
"vim::Scroll",
"HalfPageUp"
],
"ctrl-e": [
"vim::Scroll",
"LineDown"
],
"ctrl-y": [
"vim::Scroll",
"LineUp"
]
}
},
@@ -188,6 +218,18 @@
"y": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_operator == z",
"bindings": {
"t": "editor::ScrollCursorTop",
"z": "editor::ScrollCursorCenter",
"b": "editor::ScrollCursorBottom",
"escape": [
"vim::SwitchMode",
"Normal"
]
}
},
{
"context": "Editor && VimObject",
"bindings": {

View File

@@ -1,230 +1,233 @@
{
// The name of the Zed theme to use for the UI
"theme": "One Dark",
// The name of a font to use for rendering text in the editor
"buffer_font_family": "Zed Mono",
// The default font size for text in the editor
"buffer_font_size": 15,
// Whether to enable vim modes and key bindings
"vim_mode": false,
// Whether to show the informational hover box when moving the mouse
// over symbols in the editor.
"hover_popover_enabled": true,
// Whether the cursor blinks in the editor.
"cursor_blink": true,
// Whether to pop the completions menu while typing in an editor without
// explicitly requesting it.
"show_completions_on_input": true,
// Whether new projects should start out 'online'. Online projects
// appear in the contacts panel under your name, so that your contacts
// can see which projects you are working on. Regardless of this
// setting, projects keep their last online status when you reopen them.
"projects_online_by_default": true,
// Whether to use language servers to provide code intelligence.
"enable_language_server": true,
// When to automatically save edited buffers. This setting can
// take four values.
//
// 1. Never automatically save:
// "autosave": "off",
// 2. Save when changing focus away from the Zed window:
// "autosave": "on_window_change",
// 3. Save when changing focus away from a specific buffer:
// "autosave": "on_focus_change",
// 4. Save when idle for a certain amount of time:
// "autosave": { "after_delay": {"milliseconds": 500} },
"autosave": "off",
// Where to place the dock by default. This setting can take three
// values:
//
// 1. Position the dock attached to the bottom of the workspace
// "default_dock_anchor": "bottom"
// 2. Position the dock to the right of the workspace like a side panel
// "default_dock_anchor": "right"
// 3. Position the dock full screen over the entire workspace"
// "default_dock_anchor": "expanded"
"default_dock_anchor": "right",
// Whether or not to perform a buffer format before saving
"format_on_save": "on",
// How to perform a buffer format. This setting can take two values:
//
// 1. Format code using the current language server:
// "format_on_save": "language_server"
// 2. Format code using an external command:
// "format_on_save": {
// "external": {
// "command": "prettier",
// "arguments": ["--stdin-filepath", "{buffer_path}"]
// }
// }
"formatter": "language_server",
// How to soft-wrap long lines of text. This setting can take
// three values:
//
// 1. Do not soft wrap.
// "soft_wrap": "none",
// 2. Soft wrap lines that overflow the editor:
// "soft_wrap": "editor_width",
// 3. Soft wrap lines at the preferred line length
// "soft_wrap": "preferred_line_length",
"soft_wrap": "none",
// The column at which to soft-wrap lines, for buffers where soft-wrap
// is enabled.
"preferred_line_length": 80,
// Whether to indent lines using tab characters, as opposed to multiple
// spaces.
"hard_tabs": false,
// How many columns a tab should occupy.
"tab_size": 4,
// Git gutter behavior configuration.
"git": {
// Control whether the git gutter is shown. May take 2 values:
// 1. Show the gutter
// "git_gutter": "tracked_files"
// 2. Hide the gutter
// "git_gutter": "hide"
"git_gutter": "tracked_files"
},
// Settings specific to journaling
"journal": {
// The path of the directory where journal entries are stored
"path": "~",
// What format to display the hours in
// May take 2 values:
// 1. hour12
// 2. hour24
"hour_format": "hour12"
},
// Settings specific to the terminal
"terminal": {
// What shell to use when opening a terminal. May take 3 values:
// 1. Use the system's default terminal configuration (e.g. $TERM).
// "shell": "system"
// 2. A program:
// "shell": {
// "program": "sh"
// }
// 3. A program with arguments:
// "shell": {
// "with_arguments": {
// "program": "/bin/bash",
// "arguments": ["--login"]
// }
// }
"shell": "system",
// What working directory to use when launching the terminal.
// May take 4 values:
// 1. Use the current file's project directory. Will Fallback to the
// first project directory strategy if unsuccessful
// "working_directory": "current_project_directory"
// 2. Use the first project in this workspace's directory
// "working_directory": "first_project_directory"
// 3. Always use this platform's home directory (if we can find it)
// "working_directory": "always_home"
// 4. Always use a specific directory. This value will be shell expanded.
// If this path is not a valid directory the terminal will default to
// this platform's home directory (if we can find it)
// "working_directory": {
// "always": {
// "directory": "~/zed/projects/"
// }
// }
// The name of the Zed theme to use for the UI
"theme": "One Dark",
// The name of a font to use for rendering text in the editor
"buffer_font_family": "Zed Mono",
// The default font size for text in the editor
"buffer_font_size": 15,
// The factor to grow the active pane by. Defaults to 1.0
// which gives the same size as all other panes.
"active_pane_magnification": 1.0,
// Whether to enable vim modes and key bindings
"vim_mode": false,
// Whether to show the informational hover box when moving the mouse
// over symbols in the editor.
"hover_popover_enabled": true,
// Whether the cursor blinks in the editor.
"cursor_blink": true,
// Whether to pop the completions menu while typing in an editor without
// explicitly requesting it.
"show_completions_on_input": true,
// Whether new projects should start out 'online'. Online projects
// appear in the contacts panel under your name, so that your contacts
// can see which projects you are working on. Regardless of this
// setting, projects keep their last online status when you reopen them.
"projects_online_by_default": true,
// Whether to use language servers to provide code intelligence.
"enable_language_server": true,
// When to automatically save edited buffers. This setting can
// take four values.
//
// 1. Never automatically save:
// "autosave": "off",
// 2. Save when changing focus away from the Zed window:
// "autosave": "on_window_change",
// 3. Save when changing focus away from a specific buffer:
// "autosave": "on_focus_change",
// 4. Save when idle for a certain amount of time:
// "autosave": { "after_delay": {"milliseconds": 500} },
"autosave": "off",
// Where to place the dock by default. This setting can take three
// values:
//
"working_directory": "current_project_directory",
// Set the cursor blinking behavior in the terminal.
// May take 4 values:
// 1. Never blink the cursor, ignoring the terminal mode
// "blinking": "off",
// 2. Default the cursor blink to off, but allow the terminal to
// set blinking
// "blinking": "terminal_controlled",
// 3. Always blink the cursor, ignoring the terminal mode
// "blinking": "on",
"blinking": "terminal_controlled",
// Set whether Alternate Scroll mode (code: ?1007) is active by default.
// Alternate Scroll mode converts mouse scroll events into up / down key
// presses when in the alternate screen (e.g. when running applications
// like vim or less). The terminal can still set and unset this mode.
// May take 2 values:
// 1. Default alternate scroll mode to on
// "alternate_scroll": "on",
// 2. Default alternate scroll mode to off
// "alternate_scroll": "off",
"alternate_scroll": "off",
// Set whether the option key behaves as the meta key.
// May take 2 values:
// 1. Rely on default platform handling of option key, on macOS
// this means generating certain unicode characters
// "option_to_meta": false,
// 2. Make the option keys behave as a 'meta' key, e.g. for emacs
// "option_to_meta": true,
"option_as_meta": false,
// Whether or not selecting text in the terminal will automatically
// copy to the system clipboard.
"copy_on_select": false,
// Any key-value pairs added to this list will be added to the terminal's
// enviroment. Use `:` to seperate multiple values.
"env": {
// "KEY": "value1:value2"
}
// Set the terminal's font size. If this option is not included,
// the terminal will default to matching the buffer's font size.
// "font_size": "15"
// Set the terminal's font family. If this option is not included,
// the terminal will default to matching the buffer's font family.
// "font_family": "Zed Mono"
},
// Different settings for specific languages.
"languages": {
"Plain Text": {
"soft_wrap": "preferred_line_length"
},
"C": {
"tab_size": 2
},
"C++": {
"tab_size": 2
},
"Elixir": {
"tab_size": 2
},
"Go": {
"tab_size": 4,
"hard_tabs": true
},
"Markdown": {
"soft_wrap": "preferred_line_length"
},
"Rust": {
"tab_size": 4
},
"JavaScript": {
"tab_size": 2
},
"TypeScript": {
"tab_size": 2
},
"TSX": {
"tab_size": 2
}
},
// LSP Specific settings.
"lsp": {
// Specify the LSP name as a key here.
// As of 8/10/22, supported LSPs are:
// pyright
// gopls
// rust-analyzer
// typescript-language-server
// vscode-json-languageserver
// "rust_analyzer": {
// //These initialization options are merged into Zed's defaults
// "initialization_options": {
// "checkOnSave": {
// "command": "clippy"
// }
// 1. Position the dock attached to the bottom of the workspace
// "default_dock_anchor": "bottom"
// 2. Position the dock to the right of the workspace like a side panel
// "default_dock_anchor": "right"
// 3. Position the dock full screen over the entire workspace"
// "default_dock_anchor": "expanded"
"default_dock_anchor": "right",
// Whether or not to perform a buffer format before saving
"format_on_save": "on",
// How to perform a buffer format. This setting can take two values:
//
// 1. Format code using the current language server:
// "format_on_save": "language_server"
// 2. Format code using an external command:
// "format_on_save": {
// "external": {
// "command": "prettier",
// "arguments": ["--stdin-filepath", "{buffer_path}"]
// }
// }
// }
}
"formatter": "language_server",
// How to soft-wrap long lines of text. This setting can take
// three values:
//
// 1. Do not soft wrap.
// "soft_wrap": "none",
// 2. Soft wrap lines that overflow the editor:
// "soft_wrap": "editor_width",
// 3. Soft wrap lines at the preferred line length
// "soft_wrap": "preferred_line_length",
"soft_wrap": "none",
// The column at which to soft-wrap lines, for buffers where soft-wrap
// is enabled.
"preferred_line_length": 80,
// Whether to indent lines using tab characters, as opposed to multiple
// spaces.
"hard_tabs": false,
// How many columns a tab should occupy.
"tab_size": 4,
// Git gutter behavior configuration.
"git": {
// Control whether the git gutter is shown. May take 2 values:
// 1. Show the gutter
// "git_gutter": "tracked_files"
// 2. Hide the gutter
// "git_gutter": "hide"
"git_gutter": "tracked_files"
},
// Settings specific to journaling
"journal": {
// The path of the directory where journal entries are stored
"path": "~",
// What format to display the hours in
// May take 2 values:
// 1. hour12
// 2. hour24
"hour_format": "hour12"
},
// Settings specific to the terminal
"terminal": {
// What shell to use when opening a terminal. May take 3 values:
// 1. Use the system's default terminal configuration (e.g. $TERM).
// "shell": "system"
// 2. A program:
// "shell": {
// "program": "sh"
// }
// 3. A program with arguments:
// "shell": {
// "with_arguments": {
// "program": "/bin/bash",
// "arguments": ["--login"]
// }
// }
"shell": "system",
// What working directory to use when launching the terminal.
// May take 4 values:
// 1. Use the current file's project directory. Will Fallback to the
// first project directory strategy if unsuccessful
// "working_directory": "current_project_directory"
// 2. Use the first project in this workspace's directory
// "working_directory": "first_project_directory"
// 3. Always use this platform's home directory (if we can find it)
// "working_directory": "always_home"
// 4. Always use a specific directory. This value will be shell expanded.
// If this path is not a valid directory the terminal will default to
// this platform's home directory (if we can find it)
// "working_directory": {
// "always": {
// "directory": "~/zed/projects/"
// }
// }
//
//
"working_directory": "current_project_directory",
// Set the cursor blinking behavior in the terminal.
// May take 4 values:
// 1. Never blink the cursor, ignoring the terminal mode
// "blinking": "off",
// 2. Default the cursor blink to off, but allow the terminal to
// set blinking
// "blinking": "terminal_controlled",
// 3. Always blink the cursor, ignoring the terminal mode
// "blinking": "on",
"blinking": "terminal_controlled",
// Set whether Alternate Scroll mode (code: ?1007) is active by default.
// Alternate Scroll mode converts mouse scroll events into up / down key
// presses when in the alternate screen (e.g. when running applications
// like vim or less). The terminal can still set and unset this mode.
// May take 2 values:
// 1. Default alternate scroll mode to on
// "alternate_scroll": "on",
// 2. Default alternate scroll mode to off
// "alternate_scroll": "off",
"alternate_scroll": "off",
// Set whether the option key behaves as the meta key.
// May take 2 values:
// 1. Rely on default platform handling of option key, on macOS
// this means generating certain unicode characters
// "option_to_meta": false,
// 2. Make the option keys behave as a 'meta' key, e.g. for emacs
// "option_to_meta": true,
"option_as_meta": false,
// Whether or not selecting text in the terminal will automatically
// copy to the system clipboard.
"copy_on_select": false,
// Any key-value pairs added to this list will be added to the terminal's
// enviroment. Use `:` to seperate multiple values.
"env": {
// "KEY": "value1:value2"
}
// Set the terminal's font size. If this option is not included,
// the terminal will default to matching the buffer's font size.
// "font_size": "15"
// Set the terminal's font family. If this option is not included,
// the terminal will default to matching the buffer's font family.
// "font_family": "Zed Mono"
},
// Different settings for specific languages.
"languages": {
"Plain Text": {
"soft_wrap": "preferred_line_length"
},
"C": {
"tab_size": 2
},
"C++": {
"tab_size": 2
},
"Elixir": {
"tab_size": 2
},
"Go": {
"tab_size": 4,
"hard_tabs": true
},
"Markdown": {
"soft_wrap": "preferred_line_length"
},
"Rust": {
"tab_size": 4
},
"JavaScript": {
"tab_size": 2
},
"TypeScript": {
"tab_size": 2
},
"TSX": {
"tab_size": 2
}
},
// LSP Specific settings.
"lsp": {
// Specify the LSP name as a key here.
// As of 8/10/22, supported LSPs are:
// pyright
// gopls
// rust-analyzer
// typescript-language-server
// vscode-json-languageserver
// "rust_analyzer": {
// //These initialization options are merged into Zed's defaults
// "initialization_options": {
// "checkOnSave": {
// "command": "clippy"
// }
// }
// }
}
}

View File

@@ -11,7 +11,7 @@ use settings::Settings;
use smallvec::SmallVec;
use std::{cmp::Reverse, fmt::Write, sync::Arc};
use util::ResultExt;
use workspace::{ItemHandle, StatusItemView, Workspace};
use workspace::{item::ItemHandle, StatusItemView, Workspace};
actions!(lsp_status, [ShowErrorMessage]);

View File

@@ -8,6 +8,7 @@ path = "src/auto_update.rs"
doctest = false
[dependencies]
db = { path = "../db" }
client = { path = "../client" }
gpui = { path = "../gpui" }
menu = { path = "../menu" }

View File

@@ -1,17 +1,18 @@
mod update_notification;
use anyhow::{anyhow, Context, Result};
use client::{http::HttpClient, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL};
use client::{http::HttpClient, ZED_SECRET_CLIENT_TOKEN};
use db::kvp::KEY_VALUE_STORE;
use gpui::{
actions, platform::AppVersion, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
MutableAppContext, Task, WeakViewHandle,
};
use lazy_static::lazy_static;
use serde::Deserialize;
use settings::ReleaseChannel;
use smol::{fs::File, io::AsyncReadExt, process::Command};
use std::{env, ffi::OsString, path::PathBuf, sync::Arc, time::Duration};
use update_notification::UpdateNotification;
use util::channel::ReleaseChannel;
use workspace::Workspace;
const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification";
@@ -41,7 +42,6 @@ pub struct AutoUpdater {
current_version: AppVersion,
http_client: Arc<dyn HttpClient>,
pending_poll: Option<Task<()>>,
db: project::Db,
server_url: String,
}
@@ -55,11 +55,11 @@ impl Entity for AutoUpdater {
type Event = ();
}
pub fn init(db: project::Db, http_client: Arc<dyn HttpClient>, cx: &mut MutableAppContext) {
pub fn init(http_client: Arc<dyn HttpClient>, server_url: String, cx: &mut MutableAppContext) {
if let Some(version) = (*ZED_APP_VERSION).or_else(|| cx.platform().app_version().ok()) {
let server_url = ZED_SERVER_URL.to_string();
let server_url = server_url;
let auto_updater = cx.add_model(|cx| {
let updater = AutoUpdater::new(version, db.clone(), http_client, server_url.clone());
let updater = AutoUpdater::new(version, http_client, server_url.clone());
updater.start_polling(cx).detach();
updater
});
@@ -70,7 +70,14 @@ pub fn init(db: project::Db, http_client: Arc<dyn HttpClient>, cx: &mut MutableA
}
});
cx.add_global_action(move |_: &ViewReleaseNotes, cx| {
cx.platform().open_url(&format!("{server_url}/releases"));
let latest_release_url = if cx.has_global::<ReleaseChannel>()
&& *cx.global::<ReleaseChannel>() == ReleaseChannel::Preview
{
format!("{server_url}/releases/preview/latest")
} else {
format!("{server_url}/releases/latest")
};
cx.platform().open_url(&latest_release_url);
});
cx.add_action(UpdateNotification::dismiss);
}
@@ -113,14 +120,12 @@ impl AutoUpdater {
fn new(
current_version: AppVersion,
db: project::Db,
http_client: Arc<dyn HttpClient>,
server_url: String,
) -> Self {
Self {
status: AutoUpdateStatus::Idle,
current_version,
db,
http_client,
server_url,
pending_poll: None,
@@ -290,20 +295,28 @@ impl AutoUpdater {
should_show: bool,
cx: &AppContext,
) -> Task<Result<()>> {
let db = self.db.clone();
cx.background().spawn(async move {
if should_show {
db.write_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY, "")?;
KEY_VALUE_STORE
.write_kvp(
SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string(),
"".to_string(),
)
.await?;
} else {
db.delete_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)?;
KEY_VALUE_STORE
.delete_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string())
.await?;
}
Ok(())
})
}
fn should_show_update_notification(&self, cx: &AppContext) -> Task<Result<bool>> {
let db = self.db.clone();
cx.background()
.spawn(async move { Ok(db.read_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)?.is_some()) })
cx.background().spawn(async move {
Ok(KEY_VALUE_STORE
.read_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)?
.is_some())
})
}
}

View File

@@ -5,8 +5,9 @@ use gpui::{
Element, Entity, MouseButton, View, ViewContext,
};
use menu::Cancel;
use settings::{ReleaseChannel, Settings};
use workspace::Notification;
use settings::Settings;
use util::channel::ReleaseChannel;
use workspace::notifications::Notification;
pub struct UpdateNotification {
version: AppVersion,
@@ -29,7 +30,7 @@ impl View for UpdateNotification {
let theme = cx.global::<Settings>().theme.clone();
let theme = &theme.update_notification;
let app_name = cx.global::<ReleaseChannel>().name();
let app_name = cx.global::<ReleaseChannel>().display_name();
MouseEventHandler::<ViewReleaseNotes>::new(0, cx, |state, cx| {
Flex::column()

View File

@@ -4,7 +4,10 @@ use gpui::{
use itertools::Itertools;
use search::ProjectSearchView;
use settings::Settings;
use workspace::{ItemEvent, ItemHandle, ToolbarItemLocation, ToolbarItemView};
use workspace::{
item::{ItemEvent, ItemHandle},
ToolbarItemLocation, ToolbarItemView,
};
pub enum Event {
UpdateLocation,

View File

@@ -21,6 +21,7 @@ test-support = [
client = { path = "../client" }
collections = { path = "../collections" }
gpui = { path = "../gpui" }
log = "0.4"
live_kit_client = { path = "../live_kit_client" }
media = { path = "../media" }
project = { path = "../project" }

View File

@@ -22,7 +22,7 @@ pub fn init(client: Arc<Client>, user_store: ModelHandle<UserStore>, cx: &mut Mu
#[derive(Clone)]
pub struct IncomingCall {
pub room_id: u64,
pub caller: Arc<User>,
pub calling_user: Arc<User>,
pub participants: Vec<Arc<User>>,
pub initial_project: Option<proto::ParticipantProject>,
}
@@ -78,9 +78,9 @@ impl ActiveCall {
user_store.get_users(envelope.payload.participant_user_ids, cx)
})
.await?,
caller: user_store
calling_user: user_store
.update(&mut cx, |user_store, cx| {
user_store.get_user(envelope.payload.caller_user_id, cx)
user_store.get_user(envelope.payload.calling_user_id, cx)
})
.await?,
initial_project: envelope.payload.initial_project,
@@ -94,12 +94,18 @@ impl ActiveCall {
async fn handle_call_canceled(
this: ModelHandle<Self>,
_: TypedEnvelope<proto::CallCanceled>,
envelope: TypedEnvelope<proto::CallCanceled>,
_: Arc<Client>,
mut cx: AsyncAppContext,
) -> Result<()> {
this.update(&mut cx, |this, _| {
*this.incoming_call.0.borrow_mut() = None;
let mut incoming_call = this.incoming_call.0.borrow_mut();
if incoming_call
.as_ref()
.map_or(false, |call| call.room_id == envelope.payload.room_id)
{
incoming_call.take();
}
});
Ok(())
}
@@ -110,13 +116,13 @@ impl ActiveCall {
pub fn invite(
&mut self,
recipient_user_id: u64,
called_user_id: u64,
initial_project: Option<ModelHandle<Project>>,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
let client = self.client.clone();
let user_store = self.user_store.clone();
if !self.pending_invites.insert(recipient_user_id) {
if !self.pending_invites.insert(called_user_id) {
return Task::ready(Err(anyhow!("user was already invited")));
}
@@ -136,13 +142,13 @@ impl ActiveCall {
};
room.update(&mut cx, |room, cx| {
room.call(recipient_user_id, initial_project_id, cx)
room.call(called_user_id, initial_project_id, cx)
})
.await?;
} else {
let room = cx
.update(|cx| {
Room::create(recipient_user_id, initial_project, client, user_store, cx)
Room::create(called_user_id, initial_project, client, user_store, cx)
})
.await?;
@@ -155,7 +161,7 @@ impl ActiveCall {
let result = invite.await;
this.update(&mut cx, |this, cx| {
this.pending_invites.remove(&recipient_user_id);
this.pending_invites.remove(&called_user_id);
cx.notify();
});
result
@@ -164,7 +170,7 @@ impl ActiveCall {
pub fn cancel_invite(
&mut self,
recipient_user_id: u64,
called_user_id: u64,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
let room_id = if let Some(room) = self.room() {
@@ -178,7 +184,7 @@ impl ActiveCall {
client
.request(proto::CancelCall {
room_id,
recipient_user_id,
called_user_id,
})
.await?;
anyhow::Ok(())

View File

@@ -4,7 +4,7 @@ use collections::HashMap;
use gpui::WeakModelHandle;
pub use live_kit_client::Frame;
use project::Project;
use std::sync::Arc;
use std::{fmt, sync::Arc};
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum ParticipantLocation {
@@ -36,9 +36,10 @@ pub struct LocalParticipant {
pub active_project: Option<WeakModelHandle<Project>>,
}
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct RemoteParticipant {
pub user: Arc<User>,
pub peer_id: proto::PeerId,
pub projects: Vec<proto::ParticipantProject>,
pub location: ParticipantLocation,
pub tracks: HashMap<live_kit_client::Sid, Arc<RemoteVideoTrack>>,
@@ -49,6 +50,12 @@ pub struct RemoteVideoTrack {
pub(crate) live_kit_track: Arc<live_kit_client::RemoteVideoTrack>,
}
impl fmt::Debug for RemoteVideoTrack {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RemoteVideoTrack").finish()
}
}
impl RemoteVideoTrack {
pub fn frames(&self) -> async_broadcast::Receiver<Frame> {
self.live_kit_track.frames()

View File

@@ -3,23 +3,30 @@ use crate::{
IncomingCall,
};
use anyhow::{anyhow, Result};
use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
use client::{
proto::{self, PeerId},
Client, TypedEnvelope, User, UserStore,
};
use collections::{BTreeMap, HashSet};
use futures::StreamExt;
use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task};
use futures::{FutureExt, StreamExt};
use gpui::{
AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, WeakModelHandle,
};
use live_kit_client::{LocalTrackPublication, LocalVideoTrack, RemoteVideoTrackUpdate};
use postage::stream::Stream;
use project::Project;
use std::{mem, os::unix::prelude::OsStrExt, sync::Arc};
use util::{post_inc, ResultExt};
use std::{mem, sync::Arc, time::Duration};
use util::{post_inc, ResultExt, TryFutureExt};
pub const RECONNECT_TIMEOUT: Duration = client::RECEIVE_TIMEOUT;
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Event {
ParticipantLocationChanged {
participant_id: PeerId,
participant_id: proto::PeerId,
},
RemoteVideoTracksChanged {
participant_id: PeerId,
participant_id: proto::PeerId,
},
RemoteProjectShared {
owner: Arc<User>,
@@ -37,7 +44,7 @@ pub struct Room {
live_kit: Option<LiveKitRoom>,
status: RoomStatus,
local_participant: LocalParticipant,
remote_participants: BTreeMap<PeerId, RemoteParticipant>,
remote_participants: BTreeMap<u64, RemoteParticipant>,
pending_participants: Vec<Arc<User>>,
participant_user_ids: HashSet<u64>,
pending_call_count: usize,
@@ -46,6 +53,7 @@ pub struct Room {
user_store: ModelHandle<UserStore>,
subscriptions: Vec<client::Subscription>,
pending_room_update: Option<Task<()>>,
maintain_connection: Option<Task<Option<()>>>,
}
impl Entity for Room {
@@ -53,7 +61,8 @@ impl Entity for Room {
fn release(&mut self, _: &mut MutableAppContext) {
if self.status.is_online() {
self.client.send(proto::LeaveRoom { id: self.id }).log_err();
log::info!("room was released, sending leave message");
self.client.send(proto::LeaveRoom {}).log_err();
}
}
}
@@ -66,21 +75,6 @@ impl Room {
user_store: ModelHandle<UserStore>,
cx: &mut ModelContext<Self>,
) -> Self {
let mut client_status = client.status();
cx.spawn_weak(|this, mut cx| async move {
let is_connected = client_status
.next()
.await
.map_or(false, |s| s.is_connected());
// Even if we're initially connected, any future change of the status means we momentarily disconnected.
if !is_connected || client_status.next().await.is_some() {
if let Some(this) = this.upgrade(&cx) {
let _ = this.update(&mut cx, |this, cx| this.leave(cx));
}
}
})
.detach();
let live_kit_room = if let Some(connection_info) = live_kit_connection_info {
let room = live_kit_client::Room::new();
let mut status = room.status();
@@ -131,6 +125,9 @@ impl Room {
None
};
let maintain_connection =
cx.spawn_weak(|this, cx| Self::maintain_connection(this, client.clone(), cx).log_err());
Self {
id,
live_kit: live_kit_room,
@@ -145,11 +142,12 @@ impl Room {
pending_room_update: None,
client,
user_store,
maintain_connection: Some(maintain_connection),
}
}
pub(crate) fn create(
recipient_user_id: u64,
called_user_id: u64,
initial_project: Option<ModelHandle<Project>>,
client: Arc<Client>,
user_store: ModelHandle<UserStore>,
@@ -182,7 +180,7 @@ impl Room {
match room
.update(&mut cx, |room, cx| {
room.leave_when_empty = true;
room.call(recipient_user_id, initial_project_id, cx)
room.call(called_user_id, initial_project_id, cx)
})
.await
{
@@ -235,16 +233,113 @@ impl Room {
cx.notify();
cx.emit(Event::Left);
log::info!("leaving room");
self.status = RoomStatus::Offline;
self.remote_participants.clear();
self.pending_participants.clear();
self.participant_user_ids.clear();
self.subscriptions.clear();
self.live_kit.take();
self.client.send(proto::LeaveRoom { id: self.id })?;
self.pending_room_update.take();
self.maintain_connection.take();
self.client.send(proto::LeaveRoom {})?;
Ok(())
}
async fn maintain_connection(
this: WeakModelHandle<Self>,
client: Arc<Client>,
mut cx: AsyncAppContext,
) -> Result<()> {
let mut client_status = client.status();
loop {
let is_connected = client_status
.next()
.await
.map_or(false, |s| s.is_connected());
// Even if we're initially connected, any future change of the status means we momentarily disconnected.
if !is_connected || client_status.next().await.is_some() {
log::info!("detected client disconnection");
let room_id = this
.upgrade(&cx)
.ok_or_else(|| anyhow!("room was dropped"))?
.update(&mut cx, |this, cx| {
this.status = RoomStatus::Rejoining;
cx.notify();
this.id
});
// Wait for client to re-establish a connection to the server.
{
let mut reconnection_timeout = cx.background().timer(RECONNECT_TIMEOUT).fuse();
let client_reconnection = async {
let mut remaining_attempts = 3;
while remaining_attempts > 0 {
log::info!(
"waiting for client status change, remaining attempts {}",
remaining_attempts
);
if let Some(status) = client_status.next().await {
if status.is_connected() {
log::info!("client reconnected, attempting to rejoin room");
let rejoin_room = async {
let response =
client.request(proto::JoinRoom { id: room_id }).await?;
let room_proto =
response.room.ok_or_else(|| anyhow!("invalid room"))?;
this.upgrade(&cx)
.ok_or_else(|| anyhow!("room was dropped"))?
.update(&mut cx, |this, cx| {
this.status = RoomStatus::Online;
this.apply_room_update(room_proto, cx)
})?;
anyhow::Ok(())
};
if rejoin_room.await.log_err().is_some() {
return true;
} else {
remaining_attempts -= 1;
}
}
} else {
return false;
}
}
false
}
.fuse();
futures::pin_mut!(client_reconnection);
futures::select_biased! {
reconnected = client_reconnection => {
if reconnected {
log::info!("successfully reconnected to room");
// If we successfully joined the room, go back around the loop
// waiting for future connection status changes.
continue;
}
}
_ = reconnection_timeout => {
log::info!("room reconnection timeout expired");
}
}
}
// The client failed to re-establish a connection to the server
// or an error occurred while trying to re-join the room. Either way
// we leave the room and return an error.
if let Some(this) = this.upgrade(&cx) {
log::info!("reconnection failed, leaving room");
let _ = this.update(&mut cx, |this, cx| this.leave(cx));
}
return Err(anyhow!(
"can't reconnect to room: client failed to re-establish connection"
));
}
}
}
pub fn id(&self) -> u64 {
self.id
}
@@ -257,10 +352,16 @@ impl Room {
&self.local_participant
}
pub fn remote_participants(&self) -> &BTreeMap<PeerId, RemoteParticipant> {
pub fn remote_participants(&self) -> &BTreeMap<u64, RemoteParticipant> {
&self.remote_participants
}
pub fn remote_participant_for_peer_id(&self, peer_id: PeerId) -> Option<&RemoteParticipant> {
self.remote_participants
.values()
.find(|p| p.peer_id == peer_id)
}
pub fn pending_participants(&self) -> &[Arc<User>] {
&self.pending_participants
}
@@ -294,6 +395,11 @@ impl Room {
.position(|participant| Some(participant.user_id) == self.client.user_id());
let local_participant = local_participant_ix.map(|ix| room.participants.swap_remove(ix));
let pending_participant_user_ids = room
.pending_participants
.iter()
.map(|p| p.user_id)
.collect::<Vec<_>>();
let remote_participant_user_ids = room
.participants
.iter()
@@ -303,7 +409,7 @@ impl Room {
self.user_store.update(cx, move |user_store, cx| {
(
user_store.get_users(remote_participant_user_ids, cx),
user_store.get_users(room.pending_participant_user_ids, cx),
user_store.get_users(pending_participant_user_ids, cx),
)
});
self.pending_room_update = Some(cx.spawn(|this, mut cx| async move {
@@ -321,12 +427,12 @@ impl Room {
if let Some(participants) = remote_participants.log_err() {
for (participant, user) in room.participants.into_iter().zip(participants) {
let peer_id = PeerId(participant.peer_id);
let Some(peer_id) = participant.peer_id else { continue };
this.participant_user_ids.insert(participant.user_id);
let old_projects = this
.remote_participants
.get(&peer_id)
.get(&participant.user_id)
.into_iter()
.flat_map(|existing| &existing.projects)
.map(|project| project.id)
@@ -355,9 +461,11 @@ impl Room {
let location = ParticipantLocation::from_proto(participant.location)
.unwrap_or(ParticipantLocation::External);
if let Some(remote_participant) = this.remote_participants.get_mut(&peer_id)
if let Some(remote_participant) =
this.remote_participants.get_mut(&participant.user_id)
{
remote_participant.projects = participant.projects;
remote_participant.peer_id = peer_id;
if location != remote_participant.location {
remote_participant.location = location;
cx.emit(Event::ParticipantLocationChanged {
@@ -366,9 +474,10 @@ impl Room {
}
} else {
this.remote_participants.insert(
peer_id,
participant.user_id,
RemoteParticipant {
user: user.clone(),
peer_id,
projects: participant.projects,
location,
tracks: Default::default(),
@@ -377,7 +486,7 @@ impl Room {
if let Some(live_kit) = this.live_kit.as_ref() {
let tracks =
live_kit.room.remote_video_tracks(&peer_id.0.to_string());
live_kit.room.remote_video_tracks(&peer_id.to_string());
for track in tracks {
this.remote_video_track_updated(
RemoteVideoTrackUpdate::Subscribed(track),
@@ -389,8 +498,8 @@ impl Room {
}
}
this.remote_participants.retain(|_, participant| {
if this.participant_user_ids.contains(&participant.user.id) {
this.remote_participants.retain(|user_id, participant| {
if this.participant_user_ids.contains(user_id) {
true
} else {
for project in &participant.projects {
@@ -412,6 +521,7 @@ impl Room {
this.pending_room_update.take();
if this.should_leave() {
log::info!("room is empty, leaving");
let _ = this.leave(cx);
}
@@ -431,11 +541,11 @@ impl Room {
) -> Result<()> {
match change {
RemoteVideoTrackUpdate::Subscribed(track) => {
let peer_id = PeerId(track.publisher_id().parse()?);
let user_id = track.publisher_id().parse()?;
let track_id = track.sid().to_string();
let participant = self
.remote_participants
.get_mut(&peer_id)
.get_mut(&user_id)
.ok_or_else(|| anyhow!("subscribed to track by unknown participant"))?;
participant.tracks.insert(
track_id.clone(),
@@ -444,21 +554,21 @@ impl Room {
}),
);
cx.emit(Event::RemoteVideoTracksChanged {
participant_id: peer_id,
participant_id: participant.peer_id,
});
}
RemoteVideoTrackUpdate::Unsubscribed {
publisher_id,
track_id,
} => {
let peer_id = PeerId(publisher_id.parse()?);
let user_id = publisher_id.parse()?;
let participant = self
.remote_participants
.get_mut(&peer_id)
.get_mut(&user_id)
.ok_or_else(|| anyhow!("unsubscribed from track by unknown participant"))?;
participant.tracks.remove(&track_id);
cx.emit(Event::RemoteVideoTracksChanged {
participant_id: peer_id,
participant_id: participant.peer_id,
});
}
}
@@ -472,10 +582,12 @@ impl Room {
{
for participant in self.remote_participants.values() {
assert!(self.participant_user_ids.contains(&participant.user.id));
assert_ne!(participant.user.id, self.client.user_id().unwrap());
}
for participant in &self.pending_participants {
assert!(self.participant_user_ids.contains(&participant.id));
assert_ne!(participant.id, self.client.user_id().unwrap());
}
assert_eq!(
@@ -487,7 +599,7 @@ impl Room {
pub(crate) fn call(
&mut self,
recipient_user_id: u64,
called_user_id: u64,
initial_project_id: Option<u64>,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
@@ -503,7 +615,7 @@ impl Room {
let result = client
.request(proto::Call {
room_id,
recipient_user_id,
called_user_id,
initial_project_id,
})
.await;
@@ -538,7 +650,7 @@ impl Room {
id: worktree.id().to_proto(),
root_name: worktree.root_name().into(),
visible: worktree.is_visible(),
abs_path: worktree.abs_path().as_os_str().as_bytes().to_vec(),
abs_path: worktree.abs_path().to_string_lossy().into(),
}
})
.collect(),
@@ -746,6 +858,7 @@ impl Default for ScreenTrack {
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum RoomStatus {
Online,
Rejoining,
Offline,
}

View File

@@ -11,29 +11,27 @@ use async_tungstenite::tungstenite::{
error::Error as WebsocketError,
http::{Request, StatusCode},
};
use db::Db;
use futures::{future::LocalBoxFuture, AsyncReadExt, FutureExt, SinkExt, StreamExt, TryStreamExt};
use gpui::{
actions,
serde_json::{self, Value},
AnyModelHandle, AnyViewHandle, AnyWeakModelHandle, AnyWeakViewHandle, AppContext,
AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, View, ViewContext,
ViewHandle,
AsyncAppContext, Entity, ModelHandle, MutableAppContext, Task, View, ViewContext, ViewHandle,
};
use http::HttpClient;
use lazy_static::lazy_static;
use parking_lot::RwLock;
use postage::watch;
use rand::prelude::*;
use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, RequestMessage};
use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, PeerId, RequestMessage};
use serde::Deserialize;
use settings::ReleaseChannel;
use std::{
any::TypeId,
collections::HashMap,
convert::TryFrom,
fmt::Write as _,
future::Future,
marker::PhantomData,
path::PathBuf,
sync::{Arc, Weak},
time::{Duration, Instant},
@@ -41,6 +39,7 @@ use std::{
use telemetry::Telemetry;
use thiserror::Error;
use url::Url;
use util::channel::ReleaseChannel;
use util::{ResultExt, TryFutureExt};
pub use rpc::*;
@@ -141,7 +140,7 @@ impl EstablishConnectionError {
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum Status {
SignedOut,
UpgradeRequired,
@@ -172,7 +171,7 @@ struct ClientState {
entity_id_extractors: HashMap<TypeId, fn(&dyn AnyTypedEnvelope) -> u64>,
_reconnect_task: Option<Task<()>>,
reconnect_interval: Duration,
entities_by_type_and_remote_id: HashMap<(TypeId, u64), AnyWeakEntityHandle>,
entities_by_type_and_remote_id: HashMap<(TypeId, u64), WeakSubscriber>,
models_by_message_type: HashMap<TypeId, AnyWeakModelHandle>,
entity_types_by_message_type: HashMap<TypeId, TypeId>,
#[allow(clippy::type_complexity)]
@@ -182,7 +181,7 @@ struct ClientState {
dyn Send
+ Sync
+ Fn(
AnyEntityHandle,
Subscriber,
Box<dyn AnyTypedEnvelope>,
&Arc<Client>,
AsyncAppContext,
@@ -191,12 +190,13 @@ struct ClientState {
>,
}
enum AnyWeakEntityHandle {
enum WeakSubscriber {
Model(AnyWeakModelHandle),
View(AnyWeakViewHandle),
Pending(Vec<Box<dyn AnyTypedEnvelope>>),
}
enum AnyEntityHandle {
enum Subscriber {
Model(AnyModelHandle),
View(AnyViewHandle),
}
@@ -254,11 +254,59 @@ impl Drop for Subscription {
}
}
pub struct PendingEntitySubscription<T: Entity> {
client: Arc<Client>,
remote_id: u64,
_entity_type: PhantomData<T>,
consumed: bool,
}
impl<T: Entity> PendingEntitySubscription<T> {
pub fn set_model(mut self, model: &ModelHandle<T>, cx: &mut AsyncAppContext) -> Subscription {
self.consumed = true;
let mut state = self.client.state.write();
let id = (TypeId::of::<T>(), self.remote_id);
let Some(WeakSubscriber::Pending(messages)) =
state.entities_by_type_and_remote_id.remove(&id)
else {
unreachable!()
};
state
.entities_by_type_and_remote_id
.insert(id, WeakSubscriber::Model(model.downgrade().into()));
drop(state);
for message in messages {
self.client.handle_message(message, cx);
}
Subscription::Entity {
client: Arc::downgrade(&self.client),
id,
}
}
}
impl<T: Entity> Drop for PendingEntitySubscription<T> {
fn drop(&mut self) {
if !self.consumed {
let mut state = self.client.state.write();
if let Some(WeakSubscriber::Pending(messages)) = state
.entities_by_type_and_remote_id
.remove(&(TypeId::of::<T>(), self.remote_id))
{
for message in messages {
log::info!("unhandled message {}", message.payload_type_name());
}
}
}
}
}
impl Client {
pub fn new(http: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> {
Arc::new(Self {
id: 0,
peer: Peer::new(),
peer: Peer::new(0),
telemetry: Telemetry::new(http.clone(), cx),
http,
state: Default::default(),
@@ -285,14 +333,14 @@ impl Client {
}
#[cfg(any(test, feature = "test-support"))]
pub fn tear_down(&self) {
pub fn teardown(&self) {
let mut state = self.state.write();
state._reconnect_task.take();
state.message_handlers.clear();
state.models_by_message_type.clear();
state.entities_by_type_and_remote_id.clear();
state.entity_id_extractors.clear();
self.peer.reset();
self.peer.teardown();
}
#[cfg(any(test, feature = "test-support"))]
@@ -349,7 +397,11 @@ impl Client {
let this = self.clone();
let reconnect_interval = state.reconnect_interval;
state._reconnect_task = Some(cx.spawn(|cx| async move {
#[cfg(any(test, feature = "test-support"))]
let mut rng = StdRng::seed_from_u64(0);
#[cfg(not(any(test, feature = "test-support")))]
let mut rng = StdRng::from_entropy();
let mut delay = INITIAL_RECONNECTION_DELAY;
while let Err(error) = this.authenticate_and_connect(true, &cx).await {
log::error!("failed to connect {}", error);
@@ -387,26 +439,28 @@ impl Client {
self.state
.write()
.entities_by_type_and_remote_id
.insert(id, AnyWeakEntityHandle::View(cx.weak_handle().into()));
.insert(id, WeakSubscriber::View(cx.weak_handle().into()));
Subscription::Entity {
client: Arc::downgrade(self),
id,
}
}
pub fn add_model_for_remote_entity<T: Entity>(
pub fn subscribe_to_entity<T: Entity>(
self: &Arc<Self>,
remote_id: u64,
cx: &mut ModelContext<T>,
) -> Subscription {
) -> PendingEntitySubscription<T> {
let id = (TypeId::of::<T>(), remote_id);
self.state
.write()
.entities_by_type_and_remote_id
.insert(id, AnyWeakEntityHandle::Model(cx.weak_handle().into()));
Subscription::Entity {
client: Arc::downgrade(self),
id,
.insert(id, WeakSubscriber::Pending(Default::default()));
PendingEntitySubscription {
client: self.clone(),
remote_id,
consumed: false,
_entity_type: PhantomData,
}
}
@@ -434,7 +488,7 @@ impl Client {
let prev_handler = state.message_handlers.insert(
message_type_id,
Arc::new(move |handle, envelope, client, cx| {
let handle = if let AnyEntityHandle::Model(handle) = handle {
let handle = if let Subscriber::Model(handle) = handle {
handle
} else {
unreachable!();
@@ -488,7 +542,7 @@ impl Client {
F: 'static + Future<Output = Result<()>>,
{
self.add_entity_message_handler::<M, E, _, _>(move |handle, message, client, cx| {
if let AnyEntityHandle::View(handle) = handle {
if let Subscriber::View(handle) = handle {
handler(handle.downcast::<E>().unwrap(), message, client, cx)
} else {
unreachable!();
@@ -507,7 +561,7 @@ impl Client {
F: 'static + Future<Output = Result<()>>,
{
self.add_entity_message_handler::<M, E, _, _>(move |handle, message, client, cx| {
if let AnyEntityHandle::Model(handle) = handle {
if let Subscriber::Model(handle) = handle {
handler(handle.downcast::<E>().unwrap(), message, client, cx)
} else {
unreachable!();
@@ -522,7 +576,7 @@ impl Client {
H: 'static
+ Send
+ Sync
+ Fn(AnyEntityHandle, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
+ Fn(Subscriber, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
F: 'static + Future<Output = Result<()>>,
{
let model_type_id = TypeId::of::<E>();
@@ -756,7 +810,11 @@ impl Client {
hello_message_type_name
)
})?;
Ok(PeerId(hello.payload.peer_id))
let peer_id = hello
.payload
.peer_id
.ok_or_else(|| anyhow!("invalid peer id"))?;
Ok(peer_id)
};
let peer_id = match peer_id.await {
@@ -768,7 +826,7 @@ impl Client {
};
log::info!(
"set status to connected (connection id: {}, peer id: {})",
"set status to connected (connection id: {:?}, peer id: {:?})",
connection_id,
peer_id
);
@@ -784,94 +842,8 @@ impl Client {
let cx = cx.clone();
let this = self.clone();
async move {
let mut message_id = 0_usize;
while let Some(message) = incoming.next().await {
let mut state = this.state.write();
message_id += 1;
let type_name = message.payload_type_name();
let payload_type_id = message.payload_type_id();
let sender_id = message.original_sender_id().map(|id| id.0);
let model = state
.models_by_message_type
.get(&payload_type_id)
.and_then(|model| model.upgrade(&cx))
.map(AnyEntityHandle::Model)
.or_else(|| {
let entity_type_id =
*state.entity_types_by_message_type.get(&payload_type_id)?;
let entity_id = state
.entity_id_extractors
.get(&message.payload_type_id())
.map(|extract_entity_id| {
(extract_entity_id)(message.as_ref())
})?;
let entity = state
.entities_by_type_and_remote_id
.get(&(entity_type_id, entity_id))?;
if let Some(entity) = entity.upgrade(&cx) {
Some(entity)
} else {
state
.entities_by_type_and_remote_id
.remove(&(entity_type_id, entity_id));
None
}
});
let model = if let Some(model) = model {
model
} else {
log::info!("unhandled message {}", type_name);
continue;
};
let handler = state.message_handlers.get(&payload_type_id).cloned();
// Dropping the state prevents deadlocks if the handler interacts with rpc::Client.
// It also ensures we don't hold the lock while yielding back to the executor, as
// that might cause the executor thread driving this future to block indefinitely.
drop(state);
if let Some(handler) = handler {
let future = handler(model, message, &this, cx.clone());
let client_id = this.id;
log::debug!(
"rpc message received. client_id:{}, message_id:{}, sender_id:{:?}, type:{}",
client_id,
message_id,
sender_id,
type_name
);
cx.foreground()
.spawn(async move {
match future.await {
Ok(()) => {
log::debug!(
"rpc message handled. client_id:{}, message_id:{}, sender_id:{:?}, type:{}",
client_id,
message_id,
sender_id,
type_name
);
}
Err(error) => {
log::error!(
"error handling message. client_id:{}, message_id:{}, sender_id:{:?}, type:{}, error:{:?}",
client_id,
message_id,
sender_id,
type_name,
error
);
}
}
})
.detach();
} else {
log::info!("unhandled message {}", type_name);
}
this.handle_message(message, &cx);
// Don't starve the main thread when receiving lots of messages at once.
smol::future::yield_now().await;
}
@@ -885,7 +857,7 @@ impl Client {
.spawn(async move {
match handle_io.await {
Ok(()) => {
if *this.status().borrow()
if this.status().borrow().clone()
== (Status::Connected {
connection_id,
peer_id,
@@ -1218,8 +1190,99 @@ impl Client {
self.peer.respond_with_error(receipt, error)
}
pub fn start_telemetry(&self, db: Db) {
self.telemetry.start(db.clone());
fn handle_message(
self: &Arc<Client>,
message: Box<dyn AnyTypedEnvelope>,
cx: &AsyncAppContext,
) {
let mut state = self.state.write();
let type_name = message.payload_type_name();
let payload_type_id = message.payload_type_id();
let sender_id = message.original_sender_id();
let mut subscriber = None;
if let Some(message_model) = state
.models_by_message_type
.get(&payload_type_id)
.and_then(|model| model.upgrade(cx))
{
subscriber = Some(Subscriber::Model(message_model));
} else if let Some((extract_entity_id, entity_type_id)) =
state.entity_id_extractors.get(&payload_type_id).zip(
state
.entity_types_by_message_type
.get(&payload_type_id)
.copied(),
)
{
let entity_id = (extract_entity_id)(message.as_ref());
match state
.entities_by_type_and_remote_id
.get_mut(&(entity_type_id, entity_id))
{
Some(WeakSubscriber::Pending(pending)) => {
pending.push(message);
return;
}
Some(weak_subscriber @ _) => subscriber = weak_subscriber.upgrade(cx),
_ => {}
}
}
let subscriber = if let Some(subscriber) = subscriber {
subscriber
} else {
log::info!("unhandled message {}", type_name);
return;
};
let handler = state.message_handlers.get(&payload_type_id).cloned();
// Dropping the state prevents deadlocks if the handler interacts with rpc::Client.
// It also ensures we don't hold the lock while yielding back to the executor, as
// that might cause the executor thread driving this future to block indefinitely.
drop(state);
if let Some(handler) = handler {
let future = handler(subscriber, message, &self, cx.clone());
let client_id = self.id;
log::debug!(
"rpc message received. client_id:{}, sender_id:{:?}, type:{}",
client_id,
sender_id,
type_name
);
cx.foreground()
.spawn(async move {
match future.await {
Ok(()) => {
log::debug!(
"rpc message handled. client_id:{}, sender_id:{:?}, type:{}",
client_id,
sender_id,
type_name
);
}
Err(error) => {
log::error!(
"error handling message. client_id:{}, sender_id:{:?}, type:{}, error:{:?}",
client_id,
sender_id,
type_name,
error
);
}
}
})
.detach();
} else {
log::info!("unhandled message {}", type_name);
}
}
pub fn start_telemetry(&self) {
self.telemetry.start();
}
pub fn report_event(&self, kind: &str, properties: Value) {
@@ -1231,11 +1294,12 @@ impl Client {
}
}
impl AnyWeakEntityHandle {
fn upgrade(&self, cx: &AsyncAppContext) -> Option<AnyEntityHandle> {
impl WeakSubscriber {
fn upgrade(&self, cx: &AsyncAppContext) -> Option<Subscriber> {
match self {
AnyWeakEntityHandle::Model(handle) => handle.upgrade(cx).map(AnyEntityHandle::Model),
AnyWeakEntityHandle::View(handle) => handle.upgrade(cx).map(AnyEntityHandle::View),
WeakSubscriber::Model(handle) => handle.upgrade(cx).map(Subscriber::Model),
WeakSubscriber::View(handle) => handle.upgrade(cx).map(Subscriber::View),
WeakSubscriber::Pending(_) => None,
}
}
}
@@ -1480,11 +1544,17 @@ mod tests {
subscription: None,
});
let _subscription1 = model1.update(cx, |_, cx| client.add_model_for_remote_entity(1, cx));
let _subscription2 = model2.update(cx, |_, cx| client.add_model_for_remote_entity(2, cx));
let _subscription1 = client
.subscribe_to_entity(1)
.set_model(&model1, &mut cx.to_async());
let _subscription2 = client
.subscribe_to_entity(2)
.set_model(&model2, &mut cx.to_async());
// Ensure dropping a subscription for the same entity type still allows receiving of
// messages for other entity IDs of the same type.
let subscription3 = model3.update(cx, |_, cx| client.add_model_for_remote_entity(3, cx));
let subscription3 = client
.subscribe_to_entity(3)
.set_model(&model3, &mut cx.to_async());
drop(subscription3);
server.send(proto::JoinProject { project_id: 1 });

View File

@@ -1,5 +1,5 @@
use crate::http::HttpClient;
use db::Db;
use db::kvp::KEY_VALUE_STORE;
use gpui::{
executor::Background,
serde_json::{self, value::Map, Value},
@@ -10,7 +10,6 @@ use lazy_static::lazy_static;
use parking_lot::Mutex;
use serde::Serialize;
use serde_json::json;
use settings::ReleaseChannel;
use std::{
io::Write,
mem,
@@ -19,7 +18,7 @@ use std::{
time::{Duration, SystemTime, UNIX_EPOCH},
};
use tempfile::NamedTempFile;
use util::{post_inc, ResultExt, TryFutureExt};
use util::{channel::ReleaseChannel, post_inc, ResultExt, TryFutureExt};
use uuid::Uuid;
pub struct Telemetry {
@@ -107,7 +106,7 @@ impl Telemetry {
pub fn new(client: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> {
let platform = cx.platform();
let release_channel = if cx.has_global::<ReleaseChannel>() {
Some(cx.global::<ReleaseChannel>().name())
Some(cx.global::<ReleaseChannel>().display_name())
} else {
None
};
@@ -148,18 +147,21 @@ impl Telemetry {
Some(self.state.lock().log_file.as_ref()?.path().to_path_buf())
}
pub fn start(self: &Arc<Self>, db: Db) {
pub fn start(self: &Arc<Self>) {
let this = self.clone();
self.executor
.spawn(
async move {
let device_id = if let Ok(Some(device_id)) = db.read_kvp("device_id") {
device_id
} else {
let device_id = Uuid::new_v4().to_string();
db.write_kvp("device_id", &device_id)?;
device_id
};
let device_id =
if let Ok(Some(device_id)) = KEY_VALUE_STORE.read_kvp("device_id") {
device_id
} else {
let device_id = Uuid::new_v4().to_string();
KEY_VALUE_STORE
.write_kvp("device_id".to_string(), device_id.clone())
.await?;
device_id
};
let device_id: Arc<str> = device_id.into();
let mut state = this.state.lock();

View File

@@ -35,7 +35,7 @@ impl FakeServer {
cx: &TestAppContext,
) -> Self {
let server = Self {
peer: Peer::new(),
peer: Peer::new(0),
state: Default::default(),
user_id: client_user_id,
executor: cx.foreground(),
@@ -92,7 +92,7 @@ impl FakeServer {
peer.send(
connection_id,
proto::Hello {
peer_id: connection_id.0,
peer_id: Some(connection_id.into()),
},
)
.unwrap();

View File

@@ -150,7 +150,6 @@ impl UserStore {
client.telemetry.set_authenticated_user_info(None, false);
}
client.telemetry.report_event("sign in", Default::default());
current_user_tx.send(user).await.ok();
}
}

View File

@@ -2,6 +2,7 @@ DATABASE_URL = "postgres://postgres@localhost/zed"
HTTP_PORT = 8080
API_TOKEN = "secret"
INVITE_LINK_PREFIX = "http://localhost:3000/invites/"
ZED_ENVIRONMENT = "development"
LIVE_KIT_SERVER = "http://localhost:7880"
LIVE_KIT_KEY = "devkey"
LIVE_KIT_SECRET = "secret"

View File

@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
default-run = "collab"
edition = "2021"
name = "collab"
version = "0.2.3"
version = "0.4.2"
[[bin]]
name = "collab"
@@ -19,12 +19,12 @@ rpc = { path = "../rpc" }
util = { path = "../util" }
anyhow = "1.0.40"
async-trait = "0.1.50"
async-tungstenite = "0.16"
axum = { version = "0.5", features = ["json", "headers", "ws"] }
axum-extra = { version = "0.3", features = ["erased-json"] }
base64 = "0.13"
clap = { version = "3.1", features = ["derive"], optional = true }
dashmap = "5.4"
envy = "0.4.2"
futures = "0.3"
hyper = "0.14"
@@ -36,9 +36,13 @@ prometheus = "0.13"
rand = "0.8"
reqwest = { version = "0.11", features = ["json"], optional = true }
scrypt = "0.7"
# Remove fork dependency when a version with https://github.com/SeaQL/sea-orm/pull/1283 is released.
sea-orm = { git = "https://github.com/zed-industries/sea-orm", rev = "18f4c691085712ad014a51792af75a9044bacee6", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls"] }
sea-query = "0.27"
serde = { version = "1.0", features = ["derive", "rc"] }
serde_json = "1.0"
sha-1 = "0.9"
sqlx = { version = "0.6", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] }
time = { version = "0.3", features = ["serde", "serde-well-known"] }
tokio = { version = "1", features = ["full"] }
tokio-tungstenite = "0.17"
@@ -49,11 +53,6 @@ tracing = "0.1.34"
tracing-log = "0.1.3"
tracing-subscriber = { version = "0.3.11", features = ["env-filter", "json"] }
[dependencies.sqlx]
git = "https://github.com/launchbadge/sqlx"
rev = "4b7053807c705df312bcb9b6281e184bf7534eb3"
features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid"]
[dev-dependencies]
collections = { path = "../collections", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] }
@@ -65,6 +64,7 @@ fs = { path = "../fs", features = ["test-support"] }
git = { path = "../git", features = ["test-support"] }
live_kit_client = { path = "../live_kit_client", features = ["test-support"] }
lsp = { path = "../lsp", features = ["test-support"] }
pretty_assertions = "1.3.0"
project = { path = "../project", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }
settings = { path = "../settings", features = ["test-support"] }
@@ -76,13 +76,10 @@ env_logger = "0.9"
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
util = { path = "../util" }
lazy_static = "1.4"
sea-orm = { git = "https://github.com/zed-industries/sea-orm", rev = "18f4c691085712ad014a51792af75a9044bacee6", features = ["sqlx-sqlite"] }
serde_json = { version = "1.0", features = ["preserve_order"] }
sqlx = { version = "0.6", features = ["sqlite"] }
unindent = "0.1"
[dev-dependencies.sqlx]
git = "https://github.com/launchbadge/sqlx"
rev = "4b7053807c705df312bcb9b6281e184bf7534eb3"
features = ["sqlite"]
[features]
seed-support = ["clap", "lipsum", "reqwest"]

View File

@@ -59,6 +59,12 @@ spec:
ports:
- containerPort: 8080
protocol: TCP
readinessProbe:
httpGet:
path: /
port: 8080
initialDelaySeconds: 1
periodSeconds: 1
env:
- name: HTTP_PORT
value: "8080"
@@ -93,6 +99,8 @@ spec:
value: ${RUST_LOG}
- name: LOG_JSON
value: "true"
- name: ZED_ENVIRONMENT
value: ${ZED_ENVIRONMENT}
securityContext:
capabilities:
# FIXME - Switch to the more restrictive `PERFMON` capability.

View File

@@ -1,5 +1,5 @@
CREATE TABLE IF NOT EXISTS "users" (
"id" INTEGER PRIMARY KEY,
CREATE TABLE "users" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"github_login" VARCHAR,
"admin" BOOLEAN,
"email_address" VARCHAR(255) DEFAULT NULL,
@@ -8,7 +8,7 @@ CREATE TABLE IF NOT EXISTS "users" (
"inviter_id" INTEGER REFERENCES users (id),
"connected_once" BOOLEAN NOT NULL DEFAULT false,
"created_at" TIMESTAMP NOT NULL DEFAULT now,
"metrics_id" VARCHAR(255),
"metrics_id" TEXT,
"github_user_id" INTEGER
);
CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login");
@@ -16,15 +16,15 @@ CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code");
CREATE INDEX "index_users_on_email_address" ON "users" ("email_address");
CREATE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id");
CREATE TABLE IF NOT EXISTS "access_tokens" (
"id" INTEGER PRIMARY KEY,
CREATE TABLE "access_tokens" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"user_id" INTEGER REFERENCES users (id),
"hash" VARCHAR(128)
);
CREATE INDEX "index_access_tokens_user_id" ON "access_tokens" ("user_id");
CREATE TABLE IF NOT EXISTS "contacts" (
"id" INTEGER PRIMARY KEY,
CREATE TABLE "contacts" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"user_id_a" INTEGER REFERENCES users (id) NOT NULL,
"user_id_b" INTEGER REFERENCES users (id) NOT NULL,
"a_to_b" BOOLEAN NOT NULL,
@@ -34,8 +34,109 @@ CREATE TABLE IF NOT EXISTS "contacts" (
CREATE UNIQUE INDEX "index_contacts_user_ids" ON "contacts" ("user_id_a", "user_id_b");
CREATE INDEX "index_contacts_user_id_b" ON "contacts" ("user_id_b");
CREATE TABLE IF NOT EXISTS "projects" (
"id" INTEGER PRIMARY KEY,
"host_user_id" INTEGER REFERENCES users (id) NOT NULL,
"unregistered" BOOLEAN NOT NULL DEFAULT false
CREATE TABLE "rooms" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"live_kit_room" VARCHAR NOT NULL
);
CREATE TABLE "projects" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"room_id" INTEGER REFERENCES rooms (id) NOT NULL,
"host_user_id" INTEGER REFERENCES users (id) NOT NULL,
"host_connection_id" INTEGER,
"host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE,
"unregistered" BOOLEAN NOT NULL DEFAULT FALSE
);
CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id");
CREATE INDEX "index_projects_on_host_connection_id_and_host_connection_server_id" ON "projects" ("host_connection_id", "host_connection_server_id");
CREATE TABLE "worktrees" (
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
"id" INTEGER NOT NULL,
"root_name" VARCHAR NOT NULL,
"abs_path" VARCHAR NOT NULL,
"visible" BOOL NOT NULL,
"scan_id" INTEGER NOT NULL,
"is_complete" BOOL NOT NULL,
PRIMARY KEY(project_id, id)
);
CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id");
CREATE TABLE "worktree_entries" (
"project_id" INTEGER NOT NULL,
"worktree_id" INTEGER NOT NULL,
"id" INTEGER NOT NULL,
"is_dir" BOOL NOT NULL,
"path" VARCHAR NOT NULL,
"inode" INTEGER NOT NULL,
"mtime_seconds" INTEGER NOT NULL,
"mtime_nanos" INTEGER NOT NULL,
"is_symlink" BOOL NOT NULL,
"is_ignored" BOOL NOT NULL,
PRIMARY KEY(project_id, worktree_id, id),
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
);
CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id");
CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id");
CREATE TABLE "worktree_diagnostic_summaries" (
"project_id" INTEGER NOT NULL,
"worktree_id" INTEGER NOT NULL,
"path" VARCHAR NOT NULL,
"language_server_id" INTEGER NOT NULL,
"error_count" INTEGER NOT NULL,
"warning_count" INTEGER NOT NULL,
PRIMARY KEY(project_id, worktree_id, path),
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
);
CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id" ON "worktree_diagnostic_summaries" ("project_id");
CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id_and_worktree_id" ON "worktree_diagnostic_summaries" ("project_id", "worktree_id");
CREATE TABLE "language_servers" (
"id" INTEGER NOT NULL,
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
"name" VARCHAR NOT NULL,
PRIMARY KEY(project_id, id)
);
CREATE INDEX "index_language_servers_on_project_id" ON "language_servers" ("project_id");
CREATE TABLE "project_collaborators" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
"connection_id" INTEGER NOT NULL,
"connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
"user_id" INTEGER NOT NULL,
"replica_id" INTEGER NOT NULL,
"is_host" BOOLEAN NOT NULL
);
CREATE INDEX "index_project_collaborators_on_project_id" ON "project_collaborators" ("project_id");
CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_and_replica_id" ON "project_collaborators" ("project_id", "replica_id");
CREATE INDEX "index_project_collaborators_on_connection_server_id" ON "project_collaborators" ("connection_server_id");
CREATE INDEX "index_project_collaborators_on_connection_id" ON "project_collaborators" ("connection_id");
CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_server_id" ON "project_collaborators" ("project_id", "connection_id", "connection_server_id");
CREATE TABLE "room_participants" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"room_id" INTEGER NOT NULL REFERENCES rooms (id),
"user_id" INTEGER NOT NULL REFERENCES users (id),
"answering_connection_id" INTEGER,
"answering_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE,
"answering_connection_lost" BOOLEAN NOT NULL,
"location_kind" INTEGER,
"location_project_id" INTEGER,
"initial_project_id" INTEGER,
"calling_user_id" INTEGER NOT NULL REFERENCES users (id),
"calling_connection_id" INTEGER NOT NULL,
"calling_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE SET NULL
);
CREATE UNIQUE INDEX "index_room_participants_on_user_id" ON "room_participants" ("user_id");
CREATE INDEX "index_room_participants_on_room_id" ON "room_participants" ("room_id");
CREATE INDEX "index_room_participants_on_answering_connection_server_id" ON "room_participants" ("answering_connection_server_id");
CREATE INDEX "index_room_participants_on_calling_connection_server_id" ON "room_participants" ("calling_connection_server_id");
CREATE INDEX "index_room_participants_on_answering_connection_id" ON "room_participants" ("answering_connection_id");
CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_server_id" ON "room_participants" ("answering_connection_id", "answering_connection_server_id");
CREATE TABLE "servers" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"environment" VARCHAR NOT NULL
);

View File

@@ -0,0 +1,90 @@
CREATE TABLE IF NOT EXISTS "rooms" (
"id" SERIAL PRIMARY KEY,
"live_kit_room" VARCHAR NOT NULL
);
ALTER TABLE "projects"
ADD "room_id" INTEGER REFERENCES rooms (id),
ADD "host_connection_id" INTEGER,
ADD "host_connection_epoch" UUID;
CREATE INDEX "index_projects_on_host_connection_epoch" ON "projects" ("host_connection_epoch");
CREATE TABLE "worktrees" (
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
"id" INT8 NOT NULL,
"root_name" VARCHAR NOT NULL,
"abs_path" VARCHAR NOT NULL,
"visible" BOOL NOT NULL,
"scan_id" INT8 NOT NULL,
"is_complete" BOOL NOT NULL,
PRIMARY KEY(project_id, id)
);
CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id");
CREATE TABLE "worktree_entries" (
"project_id" INTEGER NOT NULL,
"worktree_id" INT8 NOT NULL,
"id" INT8 NOT NULL,
"is_dir" BOOL NOT NULL,
"path" VARCHAR NOT NULL,
"inode" INT8 NOT NULL,
"mtime_seconds" INT8 NOT NULL,
"mtime_nanos" INTEGER NOT NULL,
"is_symlink" BOOL NOT NULL,
"is_ignored" BOOL NOT NULL,
PRIMARY KEY(project_id, worktree_id, id),
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
);
CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id");
CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id");
CREATE TABLE "worktree_diagnostic_summaries" (
"project_id" INTEGER NOT NULL,
"worktree_id" INT8 NOT NULL,
"path" VARCHAR NOT NULL,
"language_server_id" INT8 NOT NULL,
"error_count" INTEGER NOT NULL,
"warning_count" INTEGER NOT NULL,
PRIMARY KEY(project_id, worktree_id, path),
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
);
CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id" ON "worktree_diagnostic_summaries" ("project_id");
CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id_and_worktree_id" ON "worktree_diagnostic_summaries" ("project_id", "worktree_id");
CREATE TABLE "language_servers" (
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
"id" INT8 NOT NULL,
"name" VARCHAR NOT NULL,
PRIMARY KEY(project_id, id)
);
CREATE INDEX "index_language_servers_on_project_id" ON "language_servers" ("project_id");
CREATE TABLE "project_collaborators" (
"id" SERIAL PRIMARY KEY,
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
"connection_id" INTEGER NOT NULL,
"connection_epoch" UUID NOT NULL,
"user_id" INTEGER NOT NULL,
"replica_id" INTEGER NOT NULL,
"is_host" BOOLEAN NOT NULL
);
CREATE INDEX "index_project_collaborators_on_project_id" ON "project_collaborators" ("project_id");
CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_and_replica_id" ON "project_collaborators" ("project_id", "replica_id");
CREATE INDEX "index_project_collaborators_on_connection_epoch" ON "project_collaborators" ("connection_epoch");
CREATE TABLE "room_participants" (
"id" SERIAL PRIMARY KEY,
"room_id" INTEGER NOT NULL REFERENCES rooms (id),
"user_id" INTEGER NOT NULL REFERENCES users (id),
"answering_connection_id" INTEGER,
"answering_connection_epoch" UUID,
"location_kind" INTEGER,
"location_project_id" INTEGER,
"initial_project_id" INTEGER,
"calling_user_id" INTEGER NOT NULL REFERENCES users (id),
"calling_connection_id" INTEGER NOT NULL,
"calling_connection_epoch" UUID NOT NULL
);
CREATE UNIQUE INDEX "index_room_participants_on_user_id" ON "room_participants" ("user_id");
CREATE INDEX "index_room_participants_on_answering_connection_epoch" ON "room_participants" ("answering_connection_epoch");
CREATE INDEX "index_room_participants_on_calling_connection_epoch" ON "room_participants" ("calling_connection_epoch");

View File

@@ -0,0 +1,2 @@
ALTER TABLE "signups"
ADD "added_to_mailing_list" BOOLEAN NOT NULL DEFAULT FALSE;

View File

@@ -0,0 +1,7 @@
ALTER TABLE "room_participants"
ADD "answering_connection_lost" BOOLEAN NOT NULL DEFAULT FALSE;
CREATE INDEX "index_project_collaborators_on_connection_id" ON "project_collaborators" ("connection_id");
CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_epoch" ON "project_collaborators" ("project_id", "connection_id", "connection_epoch");
CREATE INDEX "index_room_participants_on_answering_connection_id" ON "room_participants" ("answering_connection_id");
CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_epoch" ON "room_participants" ("answering_connection_id", "answering_connection_epoch");

View File

@@ -0,0 +1 @@
CREATE INDEX "index_room_participants_on_room_id" ON "room_participants" ("room_id");

View File

@@ -0,0 +1,30 @@
CREATE TABLE servers (
id SERIAL PRIMARY KEY,
environment VARCHAR NOT NULL
);
DROP TABLE worktree_extensions;
DROP TABLE project_activity_periods;
DELETE from projects;
ALTER TABLE projects
DROP COLUMN host_connection_epoch,
ADD COLUMN host_connection_server_id INTEGER REFERENCES servers (id) ON DELETE CASCADE;
CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id");
CREATE INDEX "index_projects_on_host_connection_id_and_host_connection_server_id" ON "projects" ("host_connection_id", "host_connection_server_id");
DELETE FROM project_collaborators;
ALTER TABLE project_collaborators
DROP COLUMN connection_epoch,
ADD COLUMN connection_server_id INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE;
CREATE INDEX "index_project_collaborators_on_connection_server_id" ON "project_collaborators" ("connection_server_id");
CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_server_id" ON "project_collaborators" ("project_id", "connection_id", "connection_server_id");
DELETE FROM room_participants;
ALTER TABLE room_participants
DROP COLUMN answering_connection_epoch,
DROP COLUMN calling_connection_epoch,
ADD COLUMN answering_connection_server_id INTEGER REFERENCES servers (id) ON DELETE CASCADE,
ADD COLUMN calling_connection_server_id INTEGER REFERENCES servers (id) ON DELETE SET NULL;
CREATE INDEX "index_room_participants_on_answering_connection_server_id" ON "room_participants" ("answering_connection_server_id");
CREATE INDEX "index_room_participants_on_calling_connection_server_id" ON "room_participants" ("calling_connection_server_id");
CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_server_id" ON "room_participants" ("answering_connection_id", "answering_connection_server_id");

View File

@@ -1,6 +1,6 @@
use crate::{
auth,
db::{Invite, NewUserParams, Signup, User, UserId, WaitlistSummary},
db::{Invite, NewSignup, NewUserParams, User, UserId, WaitlistSummary},
rpc::{self, ResultExt},
AppState, Error, Result,
};
@@ -204,7 +204,7 @@ async fn create_user(
#[derive(Deserialize)]
struct UpdateUserParams {
admin: Option<bool>,
invite_count: Option<u32>,
invite_count: Option<i32>,
}
async fn update_user(
@@ -335,7 +335,7 @@ async fn get_user_for_invite_code(
}
async fn create_signup(
Json(params): Json<Signup>,
Json(params): Json<NewSignup>,
Extension(app): Extension<Arc<AppState>>,
) -> Result<()> {
app.db.create_signup(&params).await?;

View File

@@ -75,7 +75,7 @@ pub async fn validate_header<B>(mut req: Request<B>, next: Next<B>) -> impl Into
const MAX_ACCESS_TOKENS_TO_STORE: usize = 8;
pub async fn create_access_token(db: &db::DefaultDb, user_id: UserId) -> Result<String> {
pub async fn create_access_token(db: &db::Database, user_id: UserId) -> Result<String> {
let access_token = rpc::auth::random_token();
let access_token_hash =
hash_access_token(&access_token).context("failed to hash access token")?;

View File

@@ -1,12 +1,8 @@
use collab::{Error, Result};
use db::DefaultDb;
use collab::db;
use db::{ConnectOptions, Database};
use serde::{de::DeserializeOwned, Deserialize};
use std::fmt::Write;
#[allow(unused)]
#[path = "../db.rs"]
mod db;
#[derive(Debug, Deserialize)]
struct GitHubUser {
id: i32,
@@ -17,7 +13,7 @@ struct GitHubUser {
#[tokio::main]
async fn main() {
let database_url = std::env::var("DATABASE_URL").expect("missing DATABASE_URL env var");
let db = DefaultDb::new(&database_url, 5)
let db = Database::new(ConnectOptions::new(database_url))
.await
.expect("failed to connect to postgres database");
let github_token = std::env::var("GITHUB_TOKEN").expect("missing GITHUB_TOKEN env var");

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,29 @@
use super::{AccessTokenId, UserId};
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "access_tokens")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: AccessTokenId,
pub user_id: UserId,
pub hash: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::user::Entity",
from = "Column::UserId",
to = "super::user::Column::Id"
)]
User,
}
impl Related<super::user::Entity> for Entity {
fn to() -> RelationDef {
Relation::User.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,58 @@
use super::{ContactId, UserId};
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "contacts")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: ContactId,
pub user_id_a: UserId,
pub user_id_b: UserId,
pub a_to_b: bool,
pub should_notify: bool,
pub accepted: bool,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::room_participant::Entity",
from = "Column::UserIdA",
to = "super::room_participant::Column::UserId"
)]
UserARoomParticipant,
#[sea_orm(
belongs_to = "super::room_participant::Entity",
from = "Column::UserIdB",
to = "super::room_participant::Column::UserId"
)]
UserBRoomParticipant,
}
impl ActiveModelBehavior for ActiveModel {}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Contact {
Accepted {
user_id: UserId,
should_notify: bool,
busy: bool,
},
Outgoing {
user_id: UserId,
},
Incoming {
user_id: UserId,
should_notify: bool,
},
}
impl Contact {
pub fn user_id(&self) -> UserId {
match self {
Contact::Accepted { user_id, .. } => *user_id,
Contact::Outgoing { user_id } => *user_id,
Contact::Incoming { user_id, .. } => *user_id,
}
}
}

View File

@@ -0,0 +1,30 @@
use super::ProjectId;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "language_servers")]
pub struct Model {
#[sea_orm(primary_key)]
pub project_id: ProjectId,
#[sea_orm(primary_key)]
pub id: i64,
pub name: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::project::Entity",
from = "Column::ProjectId",
to = "super::project::Column::Id"
)]
Project,
}
impl Related<super::project::Entity> for Entity {
fn to() -> RelationDef {
Relation::Project.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,84 @@
use super::{ProjectId, Result, RoomId, ServerId, UserId};
use anyhow::anyhow;
use rpc::ConnectionId;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "projects")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: ProjectId,
pub room_id: RoomId,
pub host_user_id: UserId,
pub host_connection_id: Option<i32>,
pub host_connection_server_id: Option<ServerId>,
}
impl Model {
pub fn host_connection(&self) -> Result<ConnectionId> {
let host_connection_server_id = self
.host_connection_server_id
.ok_or_else(|| anyhow!("empty host_connection_server_id"))?;
let host_connection_id = self
.host_connection_id
.ok_or_else(|| anyhow!("empty host_connection_id"))?;
Ok(ConnectionId {
owner_id: host_connection_server_id.0 as u32,
id: host_connection_id as u32,
})
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::user::Entity",
from = "Column::HostUserId",
to = "super::user::Column::Id"
)]
HostUser,
#[sea_orm(
belongs_to = "super::room::Entity",
from = "Column::RoomId",
to = "super::room::Column::Id"
)]
Room,
#[sea_orm(has_many = "super::worktree::Entity")]
Worktrees,
#[sea_orm(has_many = "super::project_collaborator::Entity")]
Collaborators,
#[sea_orm(has_many = "super::language_server::Entity")]
LanguageServers,
}
impl Related<super::user::Entity> for Entity {
fn to() -> RelationDef {
Relation::HostUser.def()
}
}
impl Related<super::room::Entity> for Entity {
fn to() -> RelationDef {
Relation::Room.def()
}
}
impl Related<super::worktree::Entity> for Entity {
fn to() -> RelationDef {
Relation::Worktrees.def()
}
}
impl Related<super::project_collaborator::Entity> for Entity {
fn to() -> RelationDef {
Relation::Collaborators.def()
}
}
impl Related<super::language_server::Entity> for Entity {
fn to() -> RelationDef {
Relation::LanguageServers.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,33 @@
use super::{ProjectCollaboratorId, ProjectId, ReplicaId, ServerId, UserId};
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "project_collaborators")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: ProjectCollaboratorId,
pub project_id: ProjectId,
pub connection_id: i32,
pub connection_server_id: ServerId,
pub user_id: UserId,
pub replica_id: ReplicaId,
pub is_host: bool,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::project::Entity",
from = "Column::ProjectId",
to = "super::project::Column::Id"
)]
Project,
}
impl Related<super::project::Entity> for Entity {
fn to() -> RelationDef {
Relation::Project.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,32 @@
use super::RoomId;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "rooms")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: RoomId,
pub live_kit_room: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::room_participant::Entity")]
RoomParticipant,
#[sea_orm(has_many = "super::project::Entity")]
Project,
}
impl Related<super::room_participant::Entity> for Entity {
fn to() -> RelationDef {
Relation::RoomParticipant.def()
}
}
impl Related<super::project::Entity> for Entity {
fn to() -> RelationDef {
Relation::Project.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,50 @@
use super::{ProjectId, RoomId, RoomParticipantId, ServerId, UserId};
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "room_participants")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: RoomParticipantId,
pub room_id: RoomId,
pub user_id: UserId,
pub answering_connection_id: Option<i32>,
pub answering_connection_server_id: Option<ServerId>,
pub answering_connection_lost: bool,
pub location_kind: Option<i32>,
pub location_project_id: Option<ProjectId>,
pub initial_project_id: Option<ProjectId>,
pub calling_user_id: UserId,
pub calling_connection_id: i32,
pub calling_connection_server_id: Option<ServerId>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::user::Entity",
from = "Column::UserId",
to = "super::user::Column::Id"
)]
User,
#[sea_orm(
belongs_to = "super::room::Entity",
from = "Column::RoomId",
to = "super::room::Column::Id"
)]
Room,
}
impl Related<super::user::Entity> for Entity {
fn to() -> RelationDef {
Relation::User.def()
}
}
impl Related<super::room::Entity> for Entity {
fn to() -> RelationDef {
Relation::Room.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,15 @@
use super::ServerId;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "servers")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: ServerId,
pub environment: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,57 @@
use super::{SignupId, UserId};
use sea_orm::{entity::prelude::*, FromQueryResult};
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "signups")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: SignupId,
pub email_address: String,
pub email_confirmation_code: String,
pub email_confirmation_sent: bool,
pub created_at: DateTime,
pub device_id: Option<String>,
pub user_id: Option<UserId>,
pub inviting_user_id: Option<UserId>,
pub platform_mac: bool,
pub platform_linux: bool,
pub platform_windows: bool,
pub platform_unknown: bool,
pub editor_features: Option<Vec<String>>,
pub programming_languages: Option<Vec<String>>,
pub added_to_mailing_list: bool,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}
#[derive(Clone, Debug, PartialEq, Eq, FromQueryResult, Serialize, Deserialize)]
pub struct Invite {
pub email_address: String,
pub email_confirmation_code: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct NewSignup {
pub email_address: String,
pub platform_mac: bool,
pub platform_windows: bool,
pub platform_linux: bool,
pub editor_features: Vec<String>,
pub programming_languages: Vec<String>,
pub device_id: Option<String>,
pub added_to_mailing_list: bool,
pub created_at: Option<DateTime>,
}
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize, FromQueryResult)]
pub struct WaitlistSummary {
pub count: i64,
pub linux_count: i64,
pub mac_count: i64,
pub windows_count: i64,
pub unknown_count: i64,
}

View File

@@ -1,19 +1,22 @@
use super::db::*;
use super::*;
use gpui::executor::{Background, Deterministic};
use std::sync::Arc;
#[cfg(test)]
use pretty_assertions::{assert_eq, assert_ne};
macro_rules! test_both_dbs {
($postgres_test_name:ident, $sqlite_test_name:ident, $db:ident, $body:block) => {
#[gpui::test]
async fn $postgres_test_name() {
let test_db = PostgresTestDb::new(Deterministic::new(0).build_background());
let test_db = TestDb::postgres(Deterministic::new(0).build_background());
let $db = test_db.db();
$body
}
#[gpui::test]
async fn $sqlite_test_name() {
let test_db = SqliteTestDb::new(Deterministic::new(0).build_background());
let test_db = TestDb::sqlite(Deterministic::new(0).build_background());
let $db = test_db.db();
$body
}
@@ -26,9 +29,10 @@ test_both_dbs!(
db,
{
let mut user_ids = Vec::new();
let mut user_metric_ids = Vec::new();
for i in 1..=4 {
user_ids.push(
db.create_user(
let user = db
.create_user(
&format!("user{i}@example.com"),
false,
NewUserParams {
@@ -38,9 +42,9 @@ test_both_dbs!(
},
)
.await
.unwrap()
.user_id,
);
.unwrap();
user_ids.push(user.user_id);
user_metric_ids.push(user.metrics_id);
}
assert_eq!(
@@ -52,6 +56,7 @@ test_both_dbs!(
github_user_id: Some(1),
email_address: Some("user1@example.com".to_string()),
admin: false,
metrics_id: user_metric_ids[0].parse().unwrap(),
..Default::default()
},
User {
@@ -60,6 +65,7 @@ test_both_dbs!(
github_user_id: Some(2),
email_address: Some("user2@example.com".to_string()),
admin: false,
metrics_id: user_metric_ids[1].parse().unwrap(),
..Default::default()
},
User {
@@ -68,6 +74,7 @@ test_both_dbs!(
github_user_id: Some(3),
email_address: Some("user3@example.com".to_string()),
admin: false,
metrics_id: user_metric_ids[2].parse().unwrap(),
..Default::default()
},
User {
@@ -76,6 +83,7 @@ test_both_dbs!(
github_user_id: Some(4),
email_address: Some("user4@example.com".to_string()),
admin: false,
metrics_id: user_metric_ids[3].parse().unwrap(),
..Default::default()
}
]
@@ -258,7 +266,8 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
db.get_contacts(user_1).await.unwrap(),
&[Contact::Accepted {
user_id: user_2,
should_notify: true
should_notify: true,
busy: false,
}],
);
assert!(db.has_contact(user_1, user_2).await.unwrap());
@@ -268,6 +277,7 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
&[Contact::Accepted {
user_id: user_1,
should_notify: false,
busy: false,
}]
);
@@ -284,6 +294,7 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
&[Contact::Accepted {
user_id: user_2,
should_notify: true,
busy: false,
}]
);
@@ -296,6 +307,7 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
&[Contact::Accepted {
user_id: user_2,
should_notify: false,
busy: false,
}]
);
@@ -309,10 +321,12 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
Contact::Accepted {
user_id: user_2,
should_notify: false,
busy: false,
},
Contact::Accepted {
user_id: user_3,
should_notify: false
should_notify: false,
busy: false,
}
]
);
@@ -320,7 +334,8 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
db.get_contacts(user_3).await.unwrap(),
&[Contact::Accepted {
user_id: user_1,
should_notify: false
should_notify: false,
busy: false,
}],
);
@@ -335,14 +350,16 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
db.get_contacts(user_2).await.unwrap(),
&[Contact::Accepted {
user_id: user_1,
should_notify: false
should_notify: false,
busy: false,
}]
);
assert_eq!(
db.get_contacts(user_3).await.unwrap(),
&[Contact::Accepted {
user_id: user_1,
should_notify: false
should_notify: false,
busy: false,
}],
);
});
@@ -388,16 +405,91 @@ test_both_dbs!(test_metrics_id_postgres, test_metrics_id_sqlite, db, {
assert_ne!(metrics_id1, metrics_id2);
});
test_both_dbs!(
test_project_count_postgres,
test_project_count_sqlite,
db,
{
let owner_id = db.create_server("test").await.unwrap().0 as u32;
let user1 = db
.create_user(
&format!("admin@example.com"),
true,
NewUserParams {
github_login: "admin".into(),
github_user_id: 0,
invite_count: 0,
},
)
.await
.unwrap();
let user2 = db
.create_user(
&format!("user@example.com"),
false,
NewUserParams {
github_login: "user".into(),
github_user_id: 1,
invite_count: 0,
},
)
.await
.unwrap();
let room_id = RoomId::from_proto(
db.create_room(user1.user_id, ConnectionId { owner_id, id: 0 }, "")
.await
.unwrap()
.id,
);
db.call(
room_id,
user1.user_id,
ConnectionId { owner_id, id: 0 },
user2.user_id,
None,
)
.await
.unwrap();
db.join_room(room_id, user2.user_id, ConnectionId { owner_id, id: 1 })
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[])
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1);
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[])
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
// Projects shared by admins aren't counted.
db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[])
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
db.leave_room(ConnectionId { owner_id, id: 1 })
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
}
);
#[test]
fn test_fuzzy_like_string() {
assert_eq!(DefaultDb::fuzzy_like_string("abcd"), "%a%b%c%d%");
assert_eq!(DefaultDb::fuzzy_like_string("x y"), "%x%y%");
assert_eq!(DefaultDb::fuzzy_like_string(" z "), "%z%");
assert_eq!(Database::fuzzy_like_string("abcd"), "%a%b%c%d%");
assert_eq!(Database::fuzzy_like_string("x y"), "%x%y%");
assert_eq!(Database::fuzzy_like_string(" z "), "%z%");
}
#[gpui::test]
async fn test_fuzzy_search_users() {
let test_db = PostgresTestDb::new(build_background_executor());
let test_db = TestDb::postgres(build_background_executor());
let db = test_db.db();
for (i, github_login) in [
"California",
@@ -433,7 +525,7 @@ async fn test_fuzzy_search_users() {
&["rhode-island", "colorado", "oregon"],
);
async fn fuzzy_search_user_names(db: &Db<sqlx::Postgres>, query: &str) -> Vec<String> {
async fn fuzzy_search_user_names(db: &Database, query: &str) -> Vec<String> {
db.fuzzy_search_users(query, 10)
.await
.unwrap()
@@ -445,7 +537,7 @@ async fn test_fuzzy_search_users() {
#[gpui::test]
async fn test_invite_codes() {
let test_db = PostgresTestDb::new(build_background_executor());
let test_db = TestDb::postgres(build_background_executor());
let db = test_db.db();
let NewUserResult { user_id: user1, .. } = db
@@ -504,16 +596,20 @@ async fn test_invite_codes() {
db.get_contacts(user1).await.unwrap(),
[Contact::Accepted {
user_id: user2,
should_notify: true
should_notify: true,
busy: false,
}]
);
assert_eq!(
db.get_contacts(user2).await.unwrap(),
[Contact::Accepted {
user_id: user1,
should_notify: false
should_notify: false,
busy: false,
}]
);
assert!(db.has_contact(user1, user2).await.unwrap());
assert!(db.has_contact(user2, user1).await.unwrap());
assert_eq!(
db.get_invite_code_for_user(user2).await.unwrap().unwrap().1,
7
@@ -550,11 +646,13 @@ async fn test_invite_codes() {
[
Contact::Accepted {
user_id: user2,
should_notify: true
should_notify: true,
busy: false,
},
Contact::Accepted {
user_id: user3,
should_notify: true
should_notify: true,
busy: false,
}
]
);
@@ -562,9 +660,12 @@ async fn test_invite_codes() {
db.get_contacts(user3).await.unwrap(),
[Contact::Accepted {
user_id: user1,
should_notify: false
should_notify: false,
busy: false,
}]
);
assert!(db.has_contact(user1, user3).await.unwrap());
assert!(db.has_contact(user3, user1).await.unwrap());
assert_eq!(
db.get_invite_code_for_user(user3).await.unwrap().unwrap().1,
3
@@ -607,15 +708,18 @@ async fn test_invite_codes() {
[
Contact::Accepted {
user_id: user2,
should_notify: true
should_notify: true,
busy: false,
},
Contact::Accepted {
user_id: user3,
should_notify: true
should_notify: true,
busy: false,
},
Contact::Accepted {
user_id: user4,
should_notify: true
should_notify: true,
busy: false,
}
]
);
@@ -623,9 +727,12 @@ async fn test_invite_codes() {
db.get_contacts(user4).await.unwrap(),
[Contact::Accepted {
user_id: user1,
should_notify: false
should_notify: false,
busy: false,
}]
);
assert!(db.has_contact(user1, user4).await.unwrap());
assert!(db.has_contact(user4, user1).await.unwrap());
assert_eq!(
db.get_invite_code_for_user(user4).await.unwrap().unwrap().1,
5
@@ -637,20 +744,170 @@ async fn test_invite_codes() {
.unwrap_err();
let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap();
assert_eq!(invite_count, 1);
// A newer user can invite an existing one via a different email address
// than the one they used to sign up.
let user5 = db
.create_user(
"user5@example.com",
false,
NewUserParams {
github_login: "user5".into(),
github_user_id: 5,
invite_count: 0,
},
)
.await
.unwrap()
.user_id;
db.set_invite_count_for_user(user5, 5).await.unwrap();
let (user5_invite_code, _) = db.get_invite_code_for_user(user5).await.unwrap().unwrap();
let user5_invite_to_user1 = db
.create_invite_from_code(&user5_invite_code, "user1@different.com", None)
.await
.unwrap();
let user1_2 = db
.create_user_from_invite(
&user5_invite_to_user1,
NewUserParams {
github_login: "user1".into(),
github_user_id: 1,
invite_count: 5,
},
)
.await
.unwrap()
.unwrap()
.user_id;
assert_eq!(user1_2, user1);
assert_eq!(
db.get_contacts(user1).await.unwrap(),
[
Contact::Accepted {
user_id: user2,
should_notify: true,
busy: false,
},
Contact::Accepted {
user_id: user3,
should_notify: true,
busy: false,
},
Contact::Accepted {
user_id: user4,
should_notify: true,
busy: false,
},
Contact::Accepted {
user_id: user5,
should_notify: false,
busy: false,
}
]
);
assert_eq!(
db.get_contacts(user5).await.unwrap(),
[Contact::Accepted {
user_id: user1,
should_notify: true,
busy: false,
}]
);
assert!(db.has_contact(user1, user5).await.unwrap());
assert!(db.has_contact(user5, user1).await.unwrap());
}
#[gpui::test]
async fn test_multiple_signup_overwrite() {
let test_db = TestDb::postgres(build_background_executor());
let db = test_db.db();
let email_address = "user_1@example.com".to_string();
let initial_signup_created_at_milliseconds = 0;
let initial_signup = NewSignup {
email_address: email_address.clone(),
platform_mac: false,
platform_linux: true,
platform_windows: false,
editor_features: vec!["speed".into()],
programming_languages: vec!["rust".into(), "c".into()],
device_id: Some(format!("device_id")),
added_to_mailing_list: false,
created_at: Some(
DateTime::from_timestamp_millis(initial_signup_created_at_milliseconds).unwrap(),
),
};
db.create_signup(&initial_signup).await.unwrap();
let initial_signup_from_db = db.get_signup(&email_address).await.unwrap();
assert_eq!(
initial_signup_from_db.clone(),
signup::Model {
email_address: initial_signup.email_address,
platform_mac: initial_signup.platform_mac,
platform_linux: initial_signup.platform_linux,
platform_windows: initial_signup.platform_windows,
editor_features: Some(initial_signup.editor_features),
programming_languages: Some(initial_signup.programming_languages),
added_to_mailing_list: initial_signup.added_to_mailing_list,
..initial_signup_from_db
}
);
let subsequent_signup = NewSignup {
email_address: email_address.clone(),
platform_mac: true,
platform_linux: false,
platform_windows: true,
editor_features: vec!["git integration".into(), "clean design".into()],
programming_languages: vec!["d".into(), "elm".into()],
device_id: Some(format!("different_device_id")),
added_to_mailing_list: true,
// subsequent signup happens next day
created_at: Some(
DateTime::from_timestamp_millis(
initial_signup_created_at_milliseconds + (1000 * 60 * 60 * 24),
)
.unwrap(),
),
};
db.create_signup(&subsequent_signup).await.unwrap();
let subsequent_signup_from_db = db.get_signup(&email_address).await.unwrap();
assert_eq!(
subsequent_signup_from_db.clone(),
signup::Model {
platform_mac: subsequent_signup.platform_mac,
platform_linux: subsequent_signup.platform_linux,
platform_windows: subsequent_signup.platform_windows,
editor_features: Some(subsequent_signup.editor_features),
programming_languages: Some(subsequent_signup.programming_languages),
device_id: subsequent_signup.device_id,
added_to_mailing_list: subsequent_signup.added_to_mailing_list,
// shouldn't overwrite their creation Datetime - user shouldn't lose their spot in line
created_at: initial_signup_from_db.created_at,
..subsequent_signup_from_db
}
);
}
#[gpui::test]
async fn test_signups() {
let test_db = PostgresTestDb::new(build_background_executor());
let test_db = TestDb::postgres(build_background_executor());
let db = test_db.db();
let usernames = (0..8).map(|i| format!("person-{i}")).collect::<Vec<_>>();
// people sign up on the waitlist
let all_signups = usernames
.iter()
.enumerate()
.map(|(i, username)| Signup {
.map(|(i, username)| NewSignup {
email_address: format!("{username}@example.com"),
platform_mac: true,
platform_linux: i % 2 == 0,
@@ -658,11 +915,14 @@ async fn test_signups() {
editor_features: vec!["speed".into()],
programming_languages: vec!["rust".into(), "c".into()],
device_id: Some(format!("device_id_{i}")),
added_to_mailing_list: i != 0, // One user failed to subscribe
created_at: Some(DateTime::from_timestamp_millis(i as i64).unwrap()), // Signups are consecutive
})
.collect::<Vec<Signup>>();
.collect::<Vec<NewSignup>>();
// people sign up on the waitlist
for signup in &all_signups {
// Users can sign up multiple times without issues
// users can sign up multiple times without issues
for _ in 0..2 {
db.create_signup(&signup).await.unwrap();
}

View File

@@ -0,0 +1,49 @@
use super::UserId;
use sea_orm::entity::prelude::*;
use serde::Serialize;
#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel, Serialize)]
#[sea_orm(table_name = "users")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: UserId,
pub github_login: String,
pub github_user_id: Option<i32>,
pub email_address: Option<String>,
pub admin: bool,
pub invite_code: Option<String>,
pub invite_count: i32,
pub inviter_id: Option<UserId>,
pub connected_once: bool,
pub metrics_id: Uuid,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::access_token::Entity")]
AccessToken,
#[sea_orm(has_one = "super::room_participant::Entity")]
RoomParticipant,
#[sea_orm(has_many = "super::project::Entity")]
HostedProjects,
}
impl Related<super::access_token::Entity> for Entity {
fn to() -> RelationDef {
Relation::AccessToken.def()
}
}
impl Related<super::room_participant::Entity> for Entity {
fn to() -> RelationDef {
Relation::RoomParticipant.def()
}
}
impl Related<super::project::Entity> for Entity {
fn to() -> RelationDef {
Relation::HostedProjects.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,34 @@
use super::ProjectId;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "worktrees")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
#[sea_orm(primary_key)]
pub project_id: ProjectId,
pub abs_path: String,
pub root_name: String,
pub visible: bool,
pub scan_id: i64,
pub is_complete: bool,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::project::Entity",
from = "Column::ProjectId",
to = "super::project::Column::Id"
)]
Project,
}
impl Related<super::project::Entity> for Entity {
fn to() -> RelationDef {
Relation::Project.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,21 @@
use super::ProjectId;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "worktree_diagnostic_summaries")]
pub struct Model {
#[sea_orm(primary_key)]
pub project_id: ProjectId,
#[sea_orm(primary_key)]
pub worktree_id: i64,
#[sea_orm(primary_key)]
pub path: String,
pub language_server_id: i64,
pub error_count: i32,
pub warning_count: i32,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,25 @@
use super::ProjectId;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "worktree_entries")]
pub struct Model {
#[sea_orm(primary_key)]
pub project_id: ProjectId,
#[sea_orm(primary_key)]
pub worktree_id: i64,
#[sea_orm(primary_key)]
pub id: i64,
pub is_dir: bool,
pub path: String,
pub inode: i64,
pub mtime_seconds: i64,
pub mtime_nanos: i32,
pub is_symlink: bool,
pub is_ignored: bool,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,36 @@
use std::{future::Future, time::Duration};
#[derive(Clone)]
pub enum Executor {
Production,
#[cfg(test)]
Deterministic(std::sync::Arc<gpui::executor::Background>),
}
impl Executor {
pub fn spawn_detached<F>(&self, future: F)
where
F: 'static + Send + Future<Output = ()>,
{
match self {
Executor::Production => {
tokio::spawn(future);
}
#[cfg(test)]
Executor::Deterministic(background) => {
background.spawn(future).detach();
}
}
}
pub fn sleep(&self, duration: Duration) -> impl Future<Output = ()> {
let this = self.clone();
async move {
match this {
Executor::Production => tokio::time::sleep(duration).await,
#[cfg(test)]
Executor::Deterministic(background) => background.timer(duration).await,
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,22 @@
pub mod api;
pub mod auth;
pub mod db;
pub mod env;
pub mod executor;
#[cfg(test)]
mod integration_tests;
pub mod rpc;
use axum::{http::StatusCode, response::IntoResponse};
use db::Database;
use serde::Deserialize;
use std::{path::PathBuf, sync::Arc};
pub type Result<T, E = Error> = std::result::Result<T, E>;
pub enum Error {
Http(StatusCode, String),
Database(sea_orm::error::DbErr),
Internal(anyhow::Error),
}
@@ -13,9 +26,9 @@ impl From<anyhow::Error> for Error {
}
}
impl From<sqlx::Error> for Error {
fn from(error: sqlx::Error) -> Self {
Self::Internal(error.into())
impl From<sea_orm::error::DbErr> for Error {
fn from(error: sea_orm::error::DbErr) -> Self {
Self::Database(error)
}
}
@@ -41,6 +54,9 @@ impl IntoResponse for Error {
fn into_response(self) -> axum::response::Response {
match self {
Error::Http(code, message) => (code, message).into_response(),
Error::Database(error) => {
(StatusCode::INTERNAL_SERVER_ERROR, format!("{}", &error)).into_response()
}
Error::Internal(error) => {
(StatusCode::INTERNAL_SERVER_ERROR, format!("{}", &error)).into_response()
}
@@ -52,6 +68,7 @@ impl std::fmt::Debug for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Error::Http(code, message) => (code, message).fmt(f),
Error::Database(error) => error.fmt(f),
Error::Internal(error) => error.fmt(f),
}
}
@@ -61,9 +78,65 @@ impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Error::Http(code, message) => write!(f, "{code}: {message}"),
Error::Database(error) => error.fmt(f),
Error::Internal(error) => error.fmt(f),
}
}
}
impl std::error::Error for Error {}
#[derive(Default, Deserialize)]
pub struct Config {
pub http_port: u16,
pub database_url: String,
pub api_token: String,
pub invite_link_prefix: String,
pub live_kit_server: Option<String>,
pub live_kit_key: Option<String>,
pub live_kit_secret: Option<String>,
pub rust_log: Option<String>,
pub log_json: Option<bool>,
pub zed_environment: String,
}
#[derive(Default, Deserialize)]
pub struct MigrateConfig {
pub database_url: String,
pub migrations_path: Option<PathBuf>,
}
pub struct AppState {
pub db: Arc<Database>,
pub live_kit_client: Option<Arc<dyn live_kit_server::api::Client>>,
pub config: Config,
}
impl AppState {
pub async fn new(config: Config) -> Result<Arc<Self>> {
let mut db_options = db::ConnectOptions::new(config.database_url.clone());
db_options.max_connections(5);
let db = Database::new(db_options).await?;
let live_kit_client = if let Some(((server, key), secret)) = config
.live_kit_server
.as_ref()
.zip(config.live_kit_key.as_ref())
.zip(config.live_kit_secret.as_ref())
{
Some(Arc::new(live_kit_server::api::LiveKitClient::new(
server.clone(),
key.clone(),
secret.clone(),
)) as Arc<dyn live_kit_server::api::Client>)
} else {
None
};
let this = Self {
db: Arc::new(db),
live_kit_client,
config,
};
Ok(Arc::new(this))
}
}

View File

@@ -1,86 +1,19 @@
mod api;
mod auth;
mod db;
mod env;
mod rpc;
#[cfg(test)]
mod db_tests;
#[cfg(test)]
mod integration_tests;
use crate::rpc::ResultExt as _;
use anyhow::anyhow;
use axum::{routing::get, Router};
use collab::{Error, Result};
use db::DefaultDb as Db;
use serde::Deserialize;
use collab::{db, env, executor::Executor, AppState, Config, MigrateConfig, Result};
use db::Database;
use std::{
env::args,
net::{SocketAddr, TcpListener},
path::{Path, PathBuf},
sync::Arc,
time::Duration,
path::Path,
};
use tokio::signal;
use tokio::signal::unix::SignalKind;
use tracing_log::LogTracer;
use tracing_subscriber::{filter::EnvFilter, fmt::format::JsonFields, Layer};
use util::ResultExt;
const VERSION: &'static str = env!("CARGO_PKG_VERSION");
#[derive(Default, Deserialize)]
pub struct Config {
pub http_port: u16,
pub database_url: String,
pub api_token: String,
pub invite_link_prefix: String,
pub live_kit_server: Option<String>,
pub live_kit_key: Option<String>,
pub live_kit_secret: Option<String>,
pub rust_log: Option<String>,
pub log_json: Option<bool>,
}
#[derive(Default, Deserialize)]
pub struct MigrateConfig {
pub database_url: String,
pub migrations_path: Option<PathBuf>,
}
pub struct AppState {
db: Arc<Db>,
live_kit_client: Option<Arc<dyn live_kit_server::api::Client>>,
config: Config,
}
impl AppState {
async fn new(config: Config) -> Result<Arc<Self>> {
let db = Db::new(&config.database_url, 5).await?;
let live_kit_client = if let Some(((server, key), secret)) = config
.live_kit_server
.as_ref()
.zip(config.live_kit_key.as_ref())
.zip(config.live_kit_secret.as_ref())
{
Some(Arc::new(live_kit_server::api::LiveKitClient::new(
server.clone(),
key.clone(),
secret.clone(),
)) as Arc<dyn live_kit_server::api::Client>)
} else {
None
};
let this = Self {
db: Arc::new(db),
live_kit_client,
config,
};
Ok(Arc::new(this))
}
}
#[tokio::main]
async fn main() -> Result<()> {
if let Err(error) = env::load_dotenv() {
@@ -96,7 +29,9 @@ async fn main() -> Result<()> {
}
Some("migrate") => {
let config = envy::from_env::<MigrateConfig>().expect("error loading config");
let db = Db::new(&config.database_url, 5).await?;
let mut db_options = db::ConnectOptions::new(config.database_url.clone());
db_options.max_connections(5);
let db = Database::new(db_options).await?;
let migrations_path = config
.migrations_path
@@ -118,18 +53,35 @@ async fn main() -> Result<()> {
init_tracing(&config);
let state = AppState::new(config).await?;
let listener = TcpListener::bind(&format!("0.0.0.0:{}", state.config.http_port))
.expect("failed to bind TCP listener");
let rpc_server = rpc::Server::new(state.clone());
let epoch = state
.db
.create_server(&state.config.zed_environment)
.await?;
let rpc_server = collab::rpc::Server::new(epoch, state.clone(), Executor::Production);
rpc_server.start().await?;
let app = api::routes(rpc_server.clone(), state.clone())
.merge(rpc::routes(rpc_server.clone()))
let app = collab::api::routes(rpc_server.clone(), state.clone())
.merge(collab::rpc::routes(rpc_server.clone()))
.merge(Router::new().route("/", get(handle_root)));
axum::Server::from_tcp(listener)?
.serve(app.into_make_service_with_connect_info::<SocketAddr>())
.with_graceful_shutdown(graceful_shutdown(rpc_server, state))
.with_graceful_shutdown(async move {
let mut sigterm = tokio::signal::unix::signal(SignalKind::terminate())
.expect("failed to listen for interrupt signal");
let mut sigint = tokio::signal::unix::signal(SignalKind::interrupt())
.expect("failed to listen for interrupt signal");
let sigterm = sigterm.recv();
let sigint = sigint.recv();
futures::pin_mut!(sigterm, sigint);
futures::future::select(sigterm, sigint).await;
tracing::info!("Received interrupt signal");
rpc_server.teardown();
})
.await?;
}
_ => {
@@ -174,52 +126,3 @@ pub fn init_tracing(config: &Config) -> Option<()> {
None
}
async fn graceful_shutdown(rpc_server: Arc<rpc::Server>, state: Arc<AppState>) {
let ctrl_c = async {
signal::ctrl_c()
.await
.expect("failed to install Ctrl+C handler");
};
#[cfg(unix)]
let terminate = async {
signal::unix::signal(signal::unix::SignalKind::terminate())
.expect("failed to install signal handler")
.recv()
.await;
};
#[cfg(not(unix))]
let terminate = std::future::pending::<()>();
tokio::select! {
_ = ctrl_c => {},
_ = terminate => {},
}
if let Some(live_kit) = state.live_kit_client.as_ref() {
let deletions = rpc_server
.store()
.await
.rooms()
.values()
.map(|room| {
let name = room.live_kit_room.clone();
async {
live_kit.delete_room(name).await.trace_err();
}
})
.collect::<Vec<_>>();
tracing::info!("deleting all live-kit rooms");
if let Err(_) = tokio::time::timeout(
Duration::from_secs(10),
futures::future::join_all(deletions),
)
.await
{
tracing::error!("timed out waiting for live-kit room deletion");
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,98 @@
use crate::db::UserId;
use anyhow::{anyhow, Result};
use collections::{BTreeMap, HashSet};
use rpc::ConnectionId;
use serde::Serialize;
use tracing::instrument;
#[derive(Default, Serialize)]
pub struct ConnectionPool {
connections: BTreeMap<ConnectionId, Connection>,
connected_users: BTreeMap<UserId, ConnectedUser>,
}
#[derive(Default, Serialize)]
struct ConnectedUser {
connection_ids: HashSet<ConnectionId>,
}
#[derive(Serialize)]
pub struct Connection {
pub user_id: UserId,
pub admin: bool,
}
impl ConnectionPool {
pub fn reset(&mut self) {
self.connections.clear();
self.connected_users.clear();
}
#[instrument(skip(self))]
pub fn add_connection(&mut self, connection_id: ConnectionId, user_id: UserId, admin: bool) {
self.connections
.insert(connection_id, Connection { user_id, admin });
let connected_user = self.connected_users.entry(user_id).or_default();
connected_user.connection_ids.insert(connection_id);
}
#[instrument(skip(self))]
pub fn remove_connection(&mut self, connection_id: ConnectionId) -> Result<()> {
let connection = self
.connections
.get_mut(&connection_id)
.ok_or_else(|| anyhow!("no such connection"))?;
let user_id = connection.user_id;
let connected_user = self.connected_users.get_mut(&user_id).unwrap();
connected_user.connection_ids.remove(&connection_id);
if connected_user.connection_ids.is_empty() {
self.connected_users.remove(&user_id);
}
self.connections.remove(&connection_id).unwrap();
Ok(())
}
pub fn connections(&self) -> impl Iterator<Item = &Connection> {
self.connections.values()
}
pub fn user_connection_ids(&self, user_id: UserId) -> impl Iterator<Item = ConnectionId> + '_ {
self.connected_users
.get(&user_id)
.into_iter()
.map(|state| &state.connection_ids)
.flatten()
.copied()
}
pub fn is_user_online(&self, user_id: UserId) -> bool {
!self
.connected_users
.get(&user_id)
.unwrap_or(&Default::default())
.connection_ids
.is_empty()
}
#[cfg(test)]
pub fn check_invariants(&self) {
for (connection_id, connection) in &self.connections {
assert!(self
.connected_users
.get(&connection.user_id)
.unwrap()
.connection_ids
.contains(connection_id));
}
for (user_id, state) in &self.connected_users {
for connection_id in &state.connection_ids {
assert_eq!(
self.connections.get(connection_id).unwrap().user_id,
*user_id
);
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
use crate::{contact_notification::ContactNotification, contacts_popover};
use call::{ActiveCall, ParticipantLocation};
use client::{Authenticate, ContactEventKind, PeerId, User, UserStore};
use client::{proto::PeerId, Authenticate, ContactEventKind, User, UserStore};
use clock::ReplicaId;
use contacts_popover::ContactsPopover;
use gpui::{
@@ -342,24 +342,27 @@ impl CollabTitlebarItem {
let mut participants = room
.read(cx)
.remote_participants()
.iter()
.map(|(peer_id, collaborator)| (*peer_id, collaborator.clone()))
.values()
.cloned()
.collect::<Vec<_>>();
participants
.sort_by_key(|(peer_id, _)| Some(project.collaborators().get(peer_id)?.replica_id));
participants.sort_by_key(|p| Some(project.collaborators().get(&p.peer_id)?.replica_id));
participants
.into_iter()
.filter_map(|(peer_id, participant)| {
.filter_map(|participant| {
let project = workspace.read(cx).project().read(cx);
let replica_id = project
.collaborators()
.get(&peer_id)
.get(&participant.peer_id)
.map(|collaborator| collaborator.replica_id);
let user = participant.user.clone();
Some(self.render_avatar(
&user,
replica_id,
Some((peer_id, &user.github_login, participant.location)),
Some((
participant.peer_id,
&user.github_login,
participant.location,
)),
workspace,
theme,
cx,
@@ -474,7 +477,7 @@ impl CollabTitlebarItem {
cx.dispatch_action(ToggleFollow(peer_id))
})
.with_tooltip::<ToggleFollow, _>(
peer_id.0 as usize,
peer_id.as_u64() as usize,
if is_followed {
format!("Unfollow {}", peer_github_login)
} else {
@@ -487,22 +490,24 @@ impl CollabTitlebarItem {
.boxed()
} else if let ParticipantLocation::SharedProject { project_id } = location {
let user_id = user.id;
MouseEventHandler::<JoinProject>::new(peer_id.0 as usize, cx, move |_, _| content)
.with_cursor_style(CursorStyle::PointingHand)
.on_click(MouseButton::Left, move |_, cx| {
cx.dispatch_action(JoinProject {
project_id,
follow_user_id: user_id,
})
MouseEventHandler::<JoinProject>::new(peer_id.as_u64() as usize, cx, move |_, _| {
content
})
.with_cursor_style(CursorStyle::PointingHand)
.on_click(MouseButton::Left, move |_, cx| {
cx.dispatch_action(JoinProject {
project_id,
follow_user_id: user_id,
})
.with_tooltip::<JoinProject, _>(
peer_id.0 as usize,
format!("Follow {} into external project", peer_github_login),
Some(Box::new(FollowNextCollaborator)),
theme.tooltip.clone(),
cx,
)
.boxed()
})
.with_tooltip::<JoinProject, _>(
peer_id.as_u64() as usize,
format!("Follow {} into external project", peer_github_login),
Some(Box::new(FollowNextCollaborator)),
theme.tooltip.clone(),
cx,
)
.boxed()
} else {
content
}

View File

@@ -43,7 +43,6 @@ pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
project_id,
app_state.client.clone(),
app_state.user_store.clone(),
app_state.project_store.clone(),
app_state.languages.clone(),
app_state.fs.clone(),
cx.clone(),
@@ -51,7 +50,13 @@ pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
.await?;
let (_, workspace) = cx.add_window((app_state.build_window_options)(), |cx| {
let mut workspace = Workspace::new(project, app_state.default_item_factory, cx);
let mut workspace = Workspace::new(
Default::default(),
0,
project,
app_state.dock_default_item_factory,
cx,
);
(app_state.initialize_workspace)(&mut workspace, &app_state, cx);
workspace
});
@@ -68,7 +73,7 @@ pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
.remote_participants()
.iter()
.find(|(_, participant)| participant.user.id == follow_user_id)
.map(|(peer_id, _)| *peer_id)
.map(|(_, p)| p.peer_id)
.or_else(|| {
// If we couldn't follow the given user, follow the host instead.
let collaborator = workspace

View File

@@ -2,7 +2,7 @@ use std::{mem, sync::Arc};
use crate::contacts_popover;
use call::ActiveCall;
use client::{Contact, PeerId, User, UserStore};
use client::{proto::PeerId, Contact, User, UserStore};
use editor::{Cancel, Editor};
use fuzzy::{match_strings, StringMatchCandidate};
use gpui::{
@@ -461,15 +461,13 @@ impl ContactList {
// Populate remote participants.
self.match_candidates.clear();
self.match_candidates
.extend(
room.remote_participants()
.iter()
.map(|(peer_id, participant)| StringMatchCandidate {
id: peer_id.0 as usize,
string: participant.user.github_login.clone(),
char_bag: participant.user.github_login.chars().collect(),
}),
);
.extend(room.remote_participants().iter().map(|(_, participant)| {
StringMatchCandidate {
id: participant.user.id as usize,
string: participant.user.github_login.clone(),
char_bag: participant.user.github_login.chars().collect(),
}
}));
let matches = executor.block(match_strings(
&self.match_candidates,
&query,
@@ -479,8 +477,8 @@ impl ContactList {
executor.clone(),
));
for mat in matches {
let peer_id = PeerId(mat.candidate_id as u32);
let participant = &room.remote_participants()[&peer_id];
let user_id = mat.candidate_id as u64;
let participant = &room.remote_participants()[&user_id];
participant_entries.push(ContactEntry::CallParticipant {
user: participant.user.clone(),
is_pending: false,
@@ -496,7 +494,7 @@ impl ContactList {
}
if !participant.tracks.is_empty() {
participant_entries.push(ContactEntry::ParticipantScreen {
peer_id,
peer_id: participant.peer_id,
is_last: true,
});
}
@@ -881,75 +879,80 @@ impl ContactList {
let baseline_offset =
row.name.text.baseline_offset(font_cache) + (theme.row_height - line_height) / 2.;
MouseEventHandler::<OpenSharedScreen>::new(peer_id.0 as usize, cx, |mouse_state, _| {
let tree_branch = *tree_branch.style_for(mouse_state, is_selected);
let row = theme.project_row.style_for(mouse_state, is_selected);
MouseEventHandler::<OpenSharedScreen>::new(
peer_id.as_u64() as usize,
cx,
|mouse_state, _| {
let tree_branch = *tree_branch.style_for(mouse_state, is_selected);
let row = theme.project_row.style_for(mouse_state, is_selected);
Flex::row()
.with_child(
Stack::new()
.with_child(
Canvas::new(move |bounds, _, cx| {
let start_x = bounds.min_x() + (bounds.width() / 2.)
- (tree_branch.width / 2.);
let end_x = bounds.max_x();
let start_y = bounds.min_y();
let end_y = bounds.min_y() + baseline_offset - (cap_height / 2.);
Flex::row()
.with_child(
Stack::new()
.with_child(
Canvas::new(move |bounds, _, cx| {
let start_x = bounds.min_x() + (bounds.width() / 2.)
- (tree_branch.width / 2.);
let end_x = bounds.max_x();
let start_y = bounds.min_y();
let end_y =
bounds.min_y() + baseline_offset - (cap_height / 2.);
cx.scene.push_quad(gpui::Quad {
bounds: RectF::from_points(
vec2f(start_x, start_y),
vec2f(
start_x + tree_branch.width,
if is_last { end_y } else { bounds.max_y() },
cx.scene.push_quad(gpui::Quad {
bounds: RectF::from_points(
vec2f(start_x, start_y),
vec2f(
start_x + tree_branch.width,
if is_last { end_y } else { bounds.max_y() },
),
),
),
background: Some(tree_branch.color),
border: gpui::Border::default(),
corner_radius: 0.,
});
cx.scene.push_quad(gpui::Quad {
bounds: RectF::from_points(
vec2f(start_x, end_y),
vec2f(end_x, end_y + tree_branch.width),
),
background: Some(tree_branch.color),
border: gpui::Border::default(),
corner_radius: 0.,
});
})
background: Some(tree_branch.color),
border: gpui::Border::default(),
corner_radius: 0.,
});
cx.scene.push_quad(gpui::Quad {
bounds: RectF::from_points(
vec2f(start_x, end_y),
vec2f(end_x, end_y + tree_branch.width),
),
background: Some(tree_branch.color),
border: gpui::Border::default(),
corner_radius: 0.,
});
})
.boxed(),
)
.constrained()
.with_width(host_avatar_height)
.boxed(),
)
.constrained()
.with_width(host_avatar_height)
.boxed(),
)
.with_child(
Svg::new("icons/disable_screen_sharing_12.svg")
.with_color(row.icon.color)
.constrained()
.with_width(row.icon.width)
.aligned()
.left()
.contained()
.with_style(row.icon.container)
.boxed(),
)
.with_child(
Label::new("Screen".into(), row.name.text.clone())
.aligned()
.left()
.contained()
.with_style(row.name.container)
.flex(1., false)
.boxed(),
)
.constrained()
.with_height(theme.row_height)
.contained()
.with_style(row.container)
.boxed()
})
)
.with_child(
Svg::new("icons/disable_screen_sharing_12.svg")
.with_color(row.icon.color)
.constrained()
.with_width(row.icon.width)
.aligned()
.left()
.contained()
.with_style(row.icon.container)
.boxed(),
)
.with_child(
Label::new("Screen".into(), row.name.text.clone())
.aligned()
.left()
.contained()
.with_style(row.name.container)
.flex(1., false)
.boxed(),
)
.constrained()
.with_height(theme.row_height)
.contained()
.with_style(row.container)
.boxed()
},
)
.with_cursor_style(CursorStyle::PointingHand)
.on_click(MouseButton::Left, move |_, cx| {
cx.dispatch_action(OpenSharedScreen { peer_id });

View File

@@ -6,7 +6,7 @@ use gpui::{
elements::*, impl_internal_actions, Entity, ModelHandle, MutableAppContext, RenderContext,
View, ViewContext,
};
use workspace::Notification;
use workspace::notifications::Notification;
impl_internal_actions!(contact_notifications, [Dismiss, RespondToContactRequest]);

View File

@@ -74,7 +74,7 @@ impl IncomingCallNotification {
let active_call = ActiveCall::global(cx);
if action.accept {
let join = active_call.update(cx, |active_call, cx| active_call.accept_incoming(cx));
let caller_user_id = self.call.caller.id;
let caller_user_id = self.call.calling_user.id;
let initial_project_id = self.call.initial_project.as_ref().map(|project| project.id);
cx.spawn_weak(|_, mut cx| async move {
join.await?;
@@ -105,7 +105,7 @@ impl IncomingCallNotification {
.as_ref()
.unwrap_or(&default_project);
Flex::row()
.with_children(self.call.caller.avatar.clone().map(|avatar| {
.with_children(self.call.calling_user.avatar.clone().map(|avatar| {
Image::new(avatar)
.with_style(theme.caller_avatar)
.aligned()
@@ -115,7 +115,7 @@ impl IncomingCallNotification {
Flex::column()
.with_child(
Label::new(
self.call.caller.github_login.clone(),
self.call.calling_user.github_login.clone(),
theme.caller_username.text.clone(),
)
.contained()

View File

@@ -350,8 +350,9 @@ mod tests {
});
let project = Project::test(app_state.fs.clone(), [], cx).await;
let (_, workspace) =
cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
let (_, workspace) = cx.add_window(|cx| {
Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
});
let editor = cx.add_view(&workspace, |cx| {
let mut editor = Editor::single_line(None, cx);
editor.set_text("abc", cx);

View File

@@ -12,16 +12,20 @@ test-support = []
[dependencies]
collections = { path = "../collections" }
gpui = { path = "../gpui" }
sqlez = { path = "../sqlez" }
sqlez_macros = { path = "../sqlez_macros" }
util = { path = "../util" }
anyhow = "1.0.57"
indoc = "1.0.4"
async-trait = "0.1"
lazy_static = "1.4.0"
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
parking_lot = "0.11.1"
rusqlite = { version = "0.28.0", features = ["bundled", "serde_json"] }
rusqlite_migration = { git = "https://github.com/cljoly/rusqlite_migration", rev = "c433555d7c1b41b103426e35756eb3144d0ebbc6" }
serde = { workspace = true }
serde_rusqlite = "0.31.0"
serde = { version = "1.0", features = ["derive"] }
smol = "1.2"
[dev-dependencies]
gpui = { path = "../gpui", features = ["test-support"] }
env_logger = "0.9.1"
tempdir = { version = "0.3.7" }

5
crates/db/README.md Normal file
View File

@@ -0,0 +1,5 @@
# Building Queries
First, craft your test data. The examples folder shows a template for building a test-db, and can be ran with `cargo run --example [your-example]`.
To actually use and test your queries, import the generated DB file into https://sqliteonline.com/

View File

@@ -1,119 +1,396 @@
mod kvp;
mod migrations;
pub mod kvp;
pub mod query;
use std::fs;
// Re-export
pub use anyhow;
use anyhow::Context;
pub use indoc::indoc;
pub use lazy_static;
use parking_lot::{Mutex, RwLock};
pub use smol;
pub use sqlez;
pub use sqlez_macros;
pub use util::channel::{RELEASE_CHANNEL, RELEASE_CHANNEL_NAME};
pub use util::paths::DB_DIR;
use sqlez::domain::Migrator;
use sqlez::thread_safe_connection::ThreadSafeConnection;
use sqlez_macros::sql;
use std::fs::create_dir_all;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
use std::time::{SystemTime, UNIX_EPOCH};
use util::channel::ReleaseChannel;
use util::{async_iife, ResultExt};
use anyhow::Result;
use log::error;
use parking_lot::Mutex;
use rusqlite::Connection;
const CONNECTION_INITIALIZE_QUERY: &'static str = sql!(
PRAGMA foreign_keys=TRUE;
);
use migrations::MIGRATIONS;
const DB_INITIALIZE_QUERY: &'static str = sql!(
PRAGMA journal_mode=WAL;
PRAGMA busy_timeout=1;
PRAGMA case_sensitive_like=TRUE;
PRAGMA synchronous=NORMAL;
);
#[derive(Clone)]
pub enum Db {
Real(Arc<RealDb>),
Null,
const FALLBACK_DB_NAME: &'static str = "FALLBACK_MEMORY_DB";
const DB_FILE_NAME: &'static str = "db.sqlite";
lazy_static::lazy_static! {
static ref ZED_STATELESS: bool = std::env::var("ZED_STATELESS").map_or(false, |v| !v.is_empty());
static ref DB_FILE_OPERATIONS: Mutex<()> = Mutex::new(());
pub static ref BACKUP_DB_PATH: RwLock<Option<PathBuf>> = RwLock::new(None);
pub static ref ALL_FILE_DB_FAILED: AtomicBool = AtomicBool::new(false);
}
pub struct RealDb {
connection: Mutex<Connection>,
path: Option<PathBuf>,
}
impl Db {
/// Open or create a database at the given directory path.
pub fn open(db_dir: &Path, channel: &'static str) -> Self {
// Use 0 for now. Will implement incrementing and clearing of old db files soon TM
let current_db_dir = db_dir.join(Path::new(&format!("0-{}", channel)));
fs::create_dir_all(&current_db_dir)
.expect("Should be able to create the database directory");
let db_path = current_db_dir.join(Path::new("db.sqlite"));
Connection::open(db_path)
.map_err(Into::into)
.and_then(|connection| Self::initialize(connection))
.map(|connection| {
Db::Real(Arc::new(RealDb {
connection,
path: Some(db_dir.to_path_buf()),
}))
})
.unwrap_or_else(|e| {
error!(
"Connecting to file backed db failed. Reverting to null db. {}",
e
);
Self::Null
})
/// Open or create a database at the given directory path.
/// This will retry a couple times if there are failures. If opening fails once, the db directory
/// is moved to a backup folder and a new one is created. If that fails, a shared in memory db is created.
/// In either case, static variables are set so that the user can be notified.
pub async fn open_db<M: Migrator + 'static>(
db_dir: &Path,
release_channel: &ReleaseChannel,
) -> ThreadSafeConnection<M> {
if *ZED_STATELESS {
return open_fallback_db().await;
}
/// Open a in memory database for testing and as a fallback.
#[cfg(any(test, feature = "test-support"))]
pub fn open_in_memory() -> Self {
Connection::open_in_memory()
.map_err(Into::into)
.and_then(|connection| Self::initialize(connection))
.map(|connection| {
Db::Real(Arc::new(RealDb {
connection,
path: None,
}))
})
.unwrap_or_else(|e| {
error!(
"Connecting to in memory db failed. Reverting to null db. {}",
e
);
Self::Null
})
}
let release_channel_name = release_channel.dev_name();
let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name)));
fn initialize(mut conn: Connection) -> Result<Mutex<Connection>> {
MIGRATIONS.to_latest(&mut conn)?;
let connection = async_iife!({
// Note: This still has a race condition where 1 set of migrations succeeds
// (e.g. (Workspace, Editor)) and another fails (e.g. (Workspace, Terminal))
// This will cause the first connection to have the database taken out
// from under it. This *should* be fine though. The second dabatase failure will
// cause errors in the log and so should be observed by developers while writing
// soon-to-be good migrations. If user databases are corrupted, we toss them out
// and try again from a blank. As long as running all migrations from start to end
// on a blank database is ok, this race condition will never be triggered.
//
// Basically: Don't ever push invalid migrations to stable or everyone will have
// a bad time.
conn.pragma_update(None, "journal_mode", "WAL")?;
conn.pragma_update(None, "synchronous", "NORMAL")?;
conn.pragma_update(None, "foreign_keys", true)?;
conn.pragma_update(None, "case_sensitive_like", true)?;
// If no db folder, create one at 0-{channel}
create_dir_all(&main_db_dir).context("Could not create db directory")?;
let db_path = main_db_dir.join(Path::new(DB_FILE_NAME));
Ok(Mutex::new(conn))
}
pub fn persisting(&self) -> bool {
self.real().and_then(|db| db.path.as_ref()).is_some()
}
pub fn real(&self) -> Option<&RealDb> {
match self {
Db::Real(db) => Some(&db),
_ => None,
// Optimistically open databases in parallel
if !DB_FILE_OPERATIONS.is_locked() {
// Try building a connection
if let Some(connection) = open_main_db(&db_path).await {
return Ok(connection)
};
}
// Take a lock in the failure case so that we move the db once per process instead
// of potentially multiple times from different threads. This shouldn't happen in the
// normal path
let _lock = DB_FILE_OPERATIONS.lock();
if let Some(connection) = open_main_db(&db_path).await {
return Ok(connection)
};
let backup_timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("System clock is set before the unix timestamp, Zed does not support this region of spacetime")
.as_millis();
// If failed, move 0-{channel} to {current unix timestamp}-{channel}
let backup_db_dir = db_dir.join(Path::new(&format!(
"{}-{}",
backup_timestamp,
release_channel_name,
)));
std::fs::rename(&main_db_dir, &backup_db_dir)
.context("Failed clean up corrupted database, panicking.")?;
// Set a static ref with the failed timestamp and error so we can notify the user
{
let mut guard = BACKUP_DB_PATH.write();
*guard = Some(backup_db_dir);
}
// Create a new 0-{channel}
create_dir_all(&main_db_dir).context("Should be able to create the database directory")?;
let db_path = main_db_dir.join(Path::new(DB_FILE_NAME));
// Try again
open_main_db(&db_path).await.context("Could not newly created db")
}).await.log_err();
if let Some(connection) = connection {
return connection;
}
// Set another static ref so that we can escalate the notification
ALL_FILE_DB_FAILED.store(true, Ordering::Release);
// If still failed, create an in memory db with a known name
open_fallback_db().await
}
impl Drop for Db {
fn drop(&mut self) {
match self {
Db::Real(real_db) => {
let lock = real_db.connection.lock();
async fn open_main_db<M: Migrator>(db_path: &PathBuf) -> Option<ThreadSafeConnection<M>> {
log::info!("Opening main db");
ThreadSafeConnection::<M>::builder(db_path.to_string_lossy().as_ref(), true)
.with_db_initialization_query(DB_INITIALIZE_QUERY)
.with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY)
.build()
.await
.log_err()
}
let _ = lock.pragma_update(None, "analysis_limit", "500");
let _ = lock.pragma_update(None, "optimize", "");
async fn open_fallback_db<M: Migrator>() -> ThreadSafeConnection<M> {
log::info!("Opening fallback db");
ThreadSafeConnection::<M>::builder(FALLBACK_DB_NAME, false)
.with_db_initialization_query(DB_INITIALIZE_QUERY)
.with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY)
.build()
.await
.expect(
"Fallback in memory database failed. Likely initialization queries or migrations have fundamental errors",
)
}
#[cfg(any(test, feature = "test-support"))]
pub async fn open_test_db<M: Migrator>(db_name: &str) -> ThreadSafeConnection<M> {
use sqlez::thread_safe_connection::locking_queue;
ThreadSafeConnection::<M>::builder(db_name, false)
.with_db_initialization_query(DB_INITIALIZE_QUERY)
.with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY)
// Serialize queued writes via a mutex and run them synchronously
.with_write_queue_constructor(locking_queue())
.build()
.await
.unwrap()
}
/// Implements a basic DB wrapper for a given domain
#[macro_export]
macro_rules! define_connection {
(pub static ref $id:ident: $t:ident<()> = $migrations:expr;) => {
pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<$t>);
impl ::std::ops::Deref for $t {
type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection<$t>;
fn deref(&self) -> &Self::Target {
&self.0
}
Db::Null => {}
}
}
impl $crate::sqlez::domain::Domain for $t {
fn name() -> &'static str {
stringify!($t)
}
fn migrations() -> &'static [&'static str] {
$migrations
}
}
#[cfg(any(test, feature = "test-support"))]
$crate::lazy_static::lazy_static! {
pub static ref $id: $t = $t($crate::smol::block_on($crate::open_test_db(stringify!($id))));
}
#[cfg(not(any(test, feature = "test-support")))]
$crate::lazy_static::lazy_static! {
pub static ref $id: $t = $t($crate::smol::block_on($crate::open_db(&$crate::DB_DIR, &$crate::RELEASE_CHANNEL)));
}
};
(pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr;) => {
pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<( $($d),+, $t )>);
impl ::std::ops::Deref for $t {
type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection<($($d),+, $t)>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl $crate::sqlez::domain::Domain for $t {
fn name() -> &'static str {
stringify!($t)
}
fn migrations() -> &'static [&'static str] {
$migrations
}
}
#[cfg(any(test, feature = "test-support"))]
$crate::lazy_static::lazy_static! {
pub static ref $id: $t = $t($crate::smol::block_on($crate::open_test_db(stringify!($id))));
}
#[cfg(not(any(test, feature = "test-support")))]
$crate::lazy_static::lazy_static! {
pub static ref $id: $t = $t($crate::smol::block_on($crate::open_db(&$crate::DB_DIR, &$crate::RELEASE_CHANNEL)));
}
};
}
#[cfg(test)]
mod tests {
use crate::migrations::MIGRATIONS;
use std::{fs, thread};
#[test]
fn test_migrations() {
assert!(MIGRATIONS.validate().is_ok());
use sqlez::{connection::Connection, domain::Domain};
use sqlez_macros::sql;
use tempdir::TempDir;
use crate::{open_db, DB_FILE_NAME};
// Test bad migration panics
#[gpui::test]
#[should_panic]
async fn test_bad_migration_panics() {
enum BadDB {}
impl Domain for BadDB {
fn name() -> &'static str {
"db_tests"
}
fn migrations() -> &'static [&'static str] {
&[
sql!(CREATE TABLE test(value);),
// failure because test already exists
sql!(CREATE TABLE test(value);),
]
}
}
let tempdir = TempDir::new("DbTests").unwrap();
let _bad_db = open_db::<BadDB>(tempdir.path(), &util::channel::ReleaseChannel::Dev).await;
}
/// Test that DB exists but corrupted (causing recreate)
#[gpui::test]
async fn test_db_corruption() {
enum CorruptedDB {}
impl Domain for CorruptedDB {
fn name() -> &'static str {
"db_tests"
}
fn migrations() -> &'static [&'static str] {
&[sql!(CREATE TABLE test(value);)]
}
}
enum GoodDB {}
impl Domain for GoodDB {
fn name() -> &'static str {
"db_tests" //Notice same name
}
fn migrations() -> &'static [&'static str] {
&[sql!(CREATE TABLE test2(value);)] //But different migration
}
}
let tempdir = TempDir::new("DbTests").unwrap();
{
let corrupt_db =
open_db::<CorruptedDB>(tempdir.path(), &util::channel::ReleaseChannel::Dev).await;
assert!(corrupt_db.persistent());
}
let good_db = open_db::<GoodDB>(tempdir.path(), &util::channel::ReleaseChannel::Dev).await;
assert!(
good_db.select_row::<usize>("SELECT * FROM test2").unwrap()()
.unwrap()
.is_none()
);
let mut corrupted_backup_dir = fs::read_dir(tempdir.path())
.unwrap()
.find(|entry| {
!entry
.as_ref()
.unwrap()
.file_name()
.to_str()
.unwrap()
.starts_with("0")
})
.unwrap()
.unwrap()
.path();
corrupted_backup_dir.push(DB_FILE_NAME);
dbg!(&corrupted_backup_dir);
let backup = Connection::open_file(&corrupted_backup_dir.to_string_lossy());
assert!(backup.select_row::<usize>("SELECT * FROM test").unwrap()()
.unwrap()
.is_none());
}
/// Test that DB exists but corrupted (causing recreate)
#[gpui::test]
async fn test_simultaneous_db_corruption() {
enum CorruptedDB {}
impl Domain for CorruptedDB {
fn name() -> &'static str {
"db_tests"
}
fn migrations() -> &'static [&'static str] {
&[sql!(CREATE TABLE test(value);)]
}
}
enum GoodDB {}
impl Domain for GoodDB {
fn name() -> &'static str {
"db_tests" //Notice same name
}
fn migrations() -> &'static [&'static str] {
&[sql!(CREATE TABLE test2(value);)] //But different migration
}
}
let tempdir = TempDir::new("DbTests").unwrap();
{
// Setup the bad database
let corrupt_db =
open_db::<CorruptedDB>(tempdir.path(), &util::channel::ReleaseChannel::Dev).await;
assert!(corrupt_db.persistent());
}
// Try to connect to it a bunch of times at once
let mut guards = vec![];
for _ in 0..10 {
let tmp_path = tempdir.path().to_path_buf();
let guard = thread::spawn(move || {
let good_db = smol::block_on(open_db::<GoodDB>(
tmp_path.as_path(),
&util::channel::ReleaseChannel::Dev,
));
assert!(
good_db.select_row::<usize>("SELECT * FROM test2").unwrap()()
.unwrap()
.is_none()
);
});
guards.push(guard);
}
for guard in guards.into_iter() {
assert!(guard.join().is_ok());
}
}
}

View File

@@ -1,311 +0,0 @@
use std::{ffi::OsStr, fmt::Display, hash::Hash, os::unix::prelude::OsStrExt, path::PathBuf};
use anyhow::Result;
use collections::HashSet;
use rusqlite::{named_params, params};
use super::Db;
pub(crate) const ITEMS_M_1: &str = "
CREATE TABLE items(
id INTEGER PRIMARY KEY,
kind TEXT
) STRICT;
CREATE TABLE item_path(
item_id INTEGER PRIMARY KEY,
path BLOB
) STRICT;
CREATE TABLE item_query(
item_id INTEGER PRIMARY KEY,
query TEXT
) STRICT;
";
#[derive(PartialEq, Eq, Hash, Debug)]
pub enum SerializedItemKind {
Editor,
Terminal,
ProjectSearch,
Diagnostics,
}
impl Display for SerializedItemKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&format!("{:?}", self))
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum SerializedItem {
Editor(usize, PathBuf),
Terminal(usize),
ProjectSearch(usize, String),
Diagnostics(usize),
}
impl SerializedItem {
fn kind(&self) -> SerializedItemKind {
match self {
SerializedItem::Editor(_, _) => SerializedItemKind::Editor,
SerializedItem::Terminal(_) => SerializedItemKind::Terminal,
SerializedItem::ProjectSearch(_, _) => SerializedItemKind::ProjectSearch,
SerializedItem::Diagnostics(_) => SerializedItemKind::Diagnostics,
}
}
fn id(&self) -> usize {
match self {
SerializedItem::Editor(id, _)
| SerializedItem::Terminal(id)
| SerializedItem::ProjectSearch(id, _)
| SerializedItem::Diagnostics(id) => *id,
}
}
}
impl Db {
fn write_item(&self, serialized_item: SerializedItem) -> Result<()> {
self.real()
.map(|db| {
let mut lock = db.connection.lock();
let tx = lock.transaction()?;
// Serialize the item
let id = serialized_item.id();
{
let mut stmt = tx.prepare_cached(
"INSERT OR REPLACE INTO items(id, kind) VALUES ((?), (?))",
)?;
dbg!("inserting item");
stmt.execute(params![id, serialized_item.kind().to_string()])?;
}
// Serialize item data
match &serialized_item {
SerializedItem::Editor(_, path) => {
dbg!("inserting path");
let mut stmt = tx.prepare_cached(
"INSERT OR REPLACE INTO item_path(item_id, path) VALUES ((?), (?))",
)?;
let path_bytes = path.as_os_str().as_bytes();
stmt.execute(params![id, path_bytes])?;
}
SerializedItem::ProjectSearch(_, query) => {
dbg!("inserting query");
let mut stmt = tx.prepare_cached(
"INSERT OR REPLACE INTO item_query(item_id, query) VALUES ((?), (?))",
)?;
stmt.execute(params![id, query])?;
}
_ => {}
}
tx.commit()?;
let mut stmt = lock.prepare_cached("SELECT id, kind FROM items")?;
let _ = stmt
.query_map([], |row| {
let zero: usize = row.get(0)?;
let one: String = row.get(1)?;
dbg!(zero, one);
Ok(())
})?
.collect::<Vec<Result<(), _>>>();
Ok(())
})
.unwrap_or(Ok(()))
}
fn delete_item(&self, item_id: usize) -> Result<()> {
self.real()
.map(|db| {
let lock = db.connection.lock();
let mut stmt = lock.prepare_cached(
r#"
DELETE FROM items WHERE id = (:id);
DELETE FROM item_path WHERE id = (:id);
DELETE FROM item_query WHERE id = (:id);
"#,
)?;
stmt.execute(named_params! {":id": item_id})?;
Ok(())
})
.unwrap_or(Ok(()))
}
fn take_items(&self) -> Result<HashSet<SerializedItem>> {
self.real()
.map(|db| {
let mut lock = db.connection.lock();
let tx = lock.transaction()?;
// When working with transactions in rusqlite, need to make this kind of scope
// To make the borrow stuff work correctly. Don't know why, rust is wild.
let result = {
let mut editors_stmt = tx.prepare_cached(
r#"
SELECT items.id, item_path.path
FROM items
LEFT JOIN item_path
ON items.id = item_path.item_id
WHERE items.kind = ?;
"#,
)?;
let editors_iter = editors_stmt.query_map(
[SerializedItemKind::Editor.to_string()],
|row| {
let id: usize = row.get(0)?;
let buf: Vec<u8> = row.get(1)?;
let path: PathBuf = OsStr::from_bytes(&buf).into();
Ok(SerializedItem::Editor(id, path))
},
)?;
let mut terminals_stmt = tx.prepare_cached(
r#"
SELECT items.id
FROM items
WHERE items.kind = ?;
"#,
)?;
let terminals_iter = terminals_stmt.query_map(
[SerializedItemKind::Terminal.to_string()],
|row| {
let id: usize = row.get(0)?;
Ok(SerializedItem::Terminal(id))
},
)?;
let mut search_stmt = tx.prepare_cached(
r#"
SELECT items.id, item_query.query
FROM items
LEFT JOIN item_query
ON items.id = item_query.item_id
WHERE items.kind = ?;
"#,
)?;
let searches_iter = search_stmt.query_map(
[SerializedItemKind::ProjectSearch.to_string()],
|row| {
let id: usize = row.get(0)?;
let query = row.get(1)?;
Ok(SerializedItem::ProjectSearch(id, query))
},
)?;
#[cfg(debug_assertions)]
let tmp =
searches_iter.collect::<Vec<Result<SerializedItem, rusqlite::Error>>>();
#[cfg(debug_assertions)]
debug_assert!(tmp.len() == 0 || tmp.len() == 1);
#[cfg(debug_assertions)]
let searches_iter = tmp.into_iter();
let mut diagnostic_stmt = tx.prepare_cached(
r#"
SELECT items.id
FROM items
WHERE items.kind = ?;
"#,
)?;
let diagnostics_iter = diagnostic_stmt.query_map(
[SerializedItemKind::Diagnostics.to_string()],
|row| {
let id: usize = row.get(0)?;
Ok(SerializedItem::Diagnostics(id))
},
)?;
#[cfg(debug_assertions)]
let tmp =
diagnostics_iter.collect::<Vec<Result<SerializedItem, rusqlite::Error>>>();
#[cfg(debug_assertions)]
debug_assert!(tmp.len() == 0 || tmp.len() == 1);
#[cfg(debug_assertions)]
let diagnostics_iter = tmp.into_iter();
let res = editors_iter
.chain(terminals_iter)
.chain(diagnostics_iter)
.chain(searches_iter)
.collect::<Result<HashSet<SerializedItem>, rusqlite::Error>>()?;
let mut delete_stmt = tx.prepare_cached(
r#"
DELETE FROM items;
DELETE FROM item_path;
DELETE FROM item_query;
"#,
)?;
delete_stmt.execute([])?;
res
};
tx.commit()?;
Ok(result)
})
.unwrap_or(Ok(HashSet::default()))
}
}
#[cfg(test)]
mod test {
use anyhow::Result;
use super::*;
#[test]
fn test_items_round_trip() -> Result<()> {
let db = Db::open_in_memory();
let mut items = vec![
SerializedItem::Editor(0, PathBuf::from("/tmp/test.txt")),
SerializedItem::Terminal(1),
SerializedItem::ProjectSearch(2, "Test query!".to_string()),
SerializedItem::Diagnostics(3),
]
.into_iter()
.collect::<HashSet<_>>();
for item in items.iter() {
dbg!("Inserting... ");
db.write_item(item.clone())?;
}
assert_eq!(items, db.take_items()?);
// Check that it's empty, as expected
assert_eq!(HashSet::default(), db.take_items()?);
for item in items.iter() {
db.write_item(item.clone())?;
}
items.remove(&SerializedItem::ProjectSearch(2, "Test query!".to_string()));
db.delete_item(2)?;
assert_eq!(items, db.take_items()?);
Ok(())
}
}

View File

@@ -1,82 +1,62 @@
use anyhow::Result;
use rusqlite::OptionalExtension;
use sqlez_macros::sql;
use super::Db;
use crate::{define_connection, query};
pub(crate) const KVP_M_1_UP: &str = "
CREATE TABLE kv_store(
key TEXT PRIMARY KEY,
value TEXT NOT NULL
) STRICT;
";
define_connection!(pub static ref KEY_VALUE_STORE: KeyValueStore<()> =
&[sql!(
CREATE TABLE IF NOT EXISTS kv_store(
key TEXT PRIMARY KEY,
value TEXT NOT NULL
) STRICT;
)];
);
impl Db {
pub fn read_kvp(&self, key: &str) -> Result<Option<String>> {
self.real()
.map(|db| {
let lock = db.connection.lock();
let mut stmt = lock.prepare_cached("SELECT value FROM kv_store WHERE key = (?)")?;
Ok(stmt.query_row([key], |row| row.get(0)).optional()?)
})
.unwrap_or(Ok(None))
impl KeyValueStore {
query! {
pub fn read_kvp(key: &str) -> Result<Option<String>> {
SELECT value FROM kv_store WHERE key = (?)
}
}
pub fn write_kvp(&self, key: &str, value: &str) -> Result<()> {
self.real()
.map(|db| {
let lock = db.connection.lock();
let mut stmt = lock.prepare_cached(
"INSERT OR REPLACE INTO kv_store(key, value) VALUES ((?), (?))",
)?;
stmt.execute([key, value])?;
Ok(())
})
.unwrap_or(Ok(()))
query! {
pub async fn write_kvp(key: String, value: String) -> Result<()> {
INSERT OR REPLACE INTO kv_store(key, value) VALUES ((?), (?))
}
}
pub fn delete_kvp(&self, key: &str) -> Result<()> {
self.real()
.map(|db| {
let lock = db.connection.lock();
let mut stmt = lock.prepare_cached("DELETE FROM kv_store WHERE key = (?)")?;
stmt.execute([key])?;
Ok(())
})
.unwrap_or(Ok(()))
query! {
pub async fn delete_kvp(key: String) -> Result<()> {
DELETE FROM kv_store WHERE key = (?)
}
}
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use crate::kvp::KeyValueStore;
use super::*;
#[gpui::test]
async fn test_kvp() {
let db = KeyValueStore(crate::open_test_db("test_kvp").await);
#[test]
fn test_kvp() -> Result<()> {
let db = Db::open_in_memory();
assert_eq!(db.read_kvp("key-1").unwrap(), None);
assert_eq!(db.read_kvp("key-1")?, None);
db.write_kvp("key-1".to_string(), "one".to_string())
.await
.unwrap();
assert_eq!(db.read_kvp("key-1").unwrap(), Some("one".to_string()));
db.write_kvp("key-1", "one")?;
assert_eq!(db.read_kvp("key-1")?, Some("one".to_string()));
db.write_kvp("key-1".to_string(), "one-2".to_string())
.await
.unwrap();
assert_eq!(db.read_kvp("key-1").unwrap(), Some("one-2".to_string()));
db.write_kvp("key-1", "one-2")?;
assert_eq!(db.read_kvp("key-1")?, Some("one-2".to_string()));
db.write_kvp("key-2".to_string(), "two".to_string())
.await
.unwrap();
assert_eq!(db.read_kvp("key-2").unwrap(), Some("two".to_string()));
db.write_kvp("key-2", "two")?;
assert_eq!(db.read_kvp("key-2")?, Some("two".to_string()));
db.delete_kvp("key-1")?;
assert_eq!(db.read_kvp("key-1")?, None);
Ok(())
db.delete_kvp("key-1".to_string()).await.unwrap();
assert_eq!(db.read_kvp("key-1").unwrap(), None);
}
}

View File

@@ -1,15 +0,0 @@
use rusqlite_migration::{Migrations, M};
// use crate::items::ITEMS_M_1;
use crate::kvp::KVP_M_1_UP;
// This must be ordered by development time! Only ever add new migrations to the end!!
// Bad things will probably happen if you don't monotonically edit this vec!!!!
// And no re-ordering ever!!!!!!!!!! The results of these migrations are on the user's
// file system and so everything we do here is locked in _f_o_r_e_v_e_r_.
lazy_static::lazy_static! {
pub static ref MIGRATIONS: Migrations<'static> = Migrations::new(vec![
M::up(KVP_M_1_UP),
// M::up(ITEMS_M_1),
]);
}

314
crates/db/src/query.rs Normal file
View File

@@ -0,0 +1,314 @@
#[macro_export]
macro_rules! query {
($vis:vis fn $id:ident() -> Result<()> { $($sql:tt)+ }) => {
$vis fn $id(&self) -> $crate::anyhow::Result<()> {
use $crate::anyhow::Context;
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
self.exec(sql_stmt)?().context(::std::format!(
"Error in {}, exec failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt,
))
}
};
($vis:vis async fn $id:ident() -> Result<()> { $($sql:tt)+ }) => {
$vis async fn $id(&self) -> $crate::anyhow::Result<()> {
use $crate::anyhow::Context;
self.write(|connection| {
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
connection.exec(sql_stmt)?().context(::std::format!(
"Error in {}, exec failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))
}).await
}
};
($vis:vis fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<()> { $($sql:tt)+ }) => {
$vis fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<()> {
use $crate::anyhow::Context;
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
self.exec_bound::<($($arg_type),+)>(sql_stmt)?(($($arg),+))
.context(::std::format!(
"Error in {}, exec_bound failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))
}
};
($vis:vis async fn $id:ident($arg:ident: $arg_type:ty) -> Result<()> { $($sql:tt)+ }) => {
$vis async fn $id(&self, $arg: $arg_type) -> $crate::anyhow::Result<()> {
use $crate::anyhow::Context;
self.write(move |connection| {
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
connection.exec_bound::<$arg_type>(sql_stmt)?($arg)
.context(::std::format!(
"Error in {}, exec_bound failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))
}).await
}
};
($vis:vis async fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<()> { $($sql:tt)+ }) => {
$vis async fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<()> {
use $crate::anyhow::Context;
self.write(move |connection| {
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
connection.exec_bound::<($($arg_type),+)>(sql_stmt)?(($($arg),+))
.context(::std::format!(
"Error in {}, exec_bound failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))
}).await
}
};
($vis:vis fn $id:ident() -> Result<Vec<$return_type:ty>> { $($sql:tt)+ }) => {
$vis fn $id(&self) -> $crate::anyhow::Result<Vec<$return_type>> {
use $crate::anyhow::Context;
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
self.select::<$return_type>(sql_stmt)?()
.context(::std::format!(
"Error in {}, select_row failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))
}
};
($vis:vis async fn $id:ident() -> Result<Vec<$return_type:ty>> { $($sql:tt)+ }) => {
pub async fn $id(&self) -> $crate::anyhow::Result<Vec<$return_type>> {
use $crate::anyhow::Context;
self.write(|connection| {
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
connection.select::<$return_type>(sql_stmt)?()
.context(::std::format!(
"Error in {}, select_row failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))
}).await
}
};
($vis:vis fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<Vec<$return_type:ty>> { $($sql:tt)+ }) => {
$vis fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<Vec<$return_type>> {
use $crate::anyhow::Context;
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
self.select_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
.context(::std::format!(
"Error in {}, exec_bound failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))
}
};
($vis:vis async fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<Vec<$return_type:ty>> { $($sql:tt)+ }) => {
$vis async fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<Vec<$return_type>> {
use $crate::anyhow::Context;
self.write(|connection| {
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
connection.select_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
.context(::std::format!(
"Error in {}, exec_bound failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))
}).await
}
};
($vis:vis fn $id:ident() -> Result<Option<$return_type:ty>> { $($sql:tt)+ }) => {
$vis fn $id(&self) -> $crate::anyhow::Result<Option<$return_type>> {
use $crate::anyhow::Context;
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
self.select_row::<$return_type>(sql_stmt)?()
.context(::std::format!(
"Error in {}, select_row failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))
}
};
($vis:vis async fn $id:ident() -> Result<Option<$return_type:ty>> { $($sql:tt)+ }) => {
$vis async fn $id(&self) -> $crate::anyhow::Result<Option<$return_type>> {
use $crate::anyhow::Context;
self.write(|connection| {
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
connection.select_row::<$return_type>(sql_stmt)?()
.context(::std::format!(
"Error in {}, select_row failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))
}).await
}
};
($vis:vis fn $id:ident($arg:ident: $arg_type:ty) -> Result<Option<$return_type:ty>> { $($sql:tt)+ }) => {
$vis fn $id(&self, $arg: $arg_type) -> $crate::anyhow::Result<Option<$return_type>> {
use $crate::anyhow::Context;
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
self.select_row_bound::<$arg_type, $return_type>(sql_stmt)?($arg)
.context(::std::format!(
"Error in {}, select_row_bound failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))
}
};
($vis:vis fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<Option<$return_type:ty>> { $($sql:tt)+ }) => {
$vis fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<Option<$return_type>> {
use $crate::anyhow::Context;
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
self.select_row_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
.context(::std::format!(
"Error in {}, select_row_bound failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))
}
};
($vis:vis async fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<Option<$return_type:ty>> { $($sql:tt)+ }) => {
$vis async fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<Option<$return_type>> {
use $crate::anyhow::Context;
self.write(move |connection| {
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
connection.select_row_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
.context(::std::format!(
"Error in {}, select_row_bound failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))
}).await
}
};
($vis:vis fn $id:ident() -> Result<$return_type:ty> { $($sql:tt)+ }) => {
$vis fn $id(&self) -> $crate::anyhow::Result<$return_type> {
use $crate::anyhow::Context;
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
self.select_row::<$return_type>(indoc! { $sql })?()
.context(::std::format!(
"Error in {}, select_row_bound failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))?
.context(::std::format!(
"Error in {}, select_row_bound expected single row result but found none for: {}",
::std::stringify!($id),
sql_stmt
))
}
};
($vis:vis async fn $id:ident() -> Result<$return_type:ty> { $($sql:tt)+ }) => {
$vis async fn $id(&self) -> $crate::anyhow::Result<$return_type> {
use $crate::anyhow::Context;
self.write(|connection| {
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
connection.select_row::<$return_type>(sql_stmt)?()
.context(::std::format!(
"Error in {}, select_row_bound failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))?
.context(::std::format!(
"Error in {}, select_row_bound expected single row result but found none for: {}",
::std::stringify!($id),
sql_stmt
))
}).await
}
};
($vis:vis fn $id:ident($arg:ident: $arg_type:ty) -> Result<$return_type:ty> { $($sql:tt)+ }) => {
pub fn $id(&self, $arg: $arg_type) -> $crate::anyhow::Result<$return_type> {
use $crate::anyhow::Context;
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
self.select_row_bound::<$arg_type, $return_type>(sql_stmt)?($arg)
.context(::std::format!(
"Error in {}, select_row_bound failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))?
.context(::std::format!(
"Error in {}, select_row_bound expected single row result but found none for: {}",
::std::stringify!($id),
sql_stmt
))
}
};
($vis:vis fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<$return_type:ty> { $($sql:tt)+ }) => {
$vis fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<$return_type> {
use $crate::anyhow::Context;
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
self.select_row_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
.context(::std::format!(
"Error in {}, select_row_bound failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))?
.context(::std::format!(
"Error in {}, select_row_bound expected single row result but found none for: {}",
::std::stringify!($id),
sql_stmt
))
}
};
($vis:vis fn async $id:ident($($arg:ident: $arg_type:ty),+) -> Result<$return_type:ty> { $($sql:tt)+ }) => {
$vis async fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<$return_type> {
use $crate::anyhow::Context;
self.write(|connection| {
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
connection.select_row_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
.context(::std::format!(
"Error in {}, select_row_bound failed to execute or parse for: {}",
::std::stringify!($id),
sql_stmt
))?
.context(::std::format!(
"Error in {}, select_row_bound expected single row result but found none for: {}",
::std::stringify!($id),
sql_stmt
))
}).await
}
};
}

View File

@@ -5,8 +5,9 @@ use collections::{BTreeMap, HashSet};
use editor::{
diagnostic_block_renderer,
display_map::{BlockDisposition, BlockId, BlockProperties, BlockStyle, RenderBlock},
highlight_diagnostic_message, Autoscroll, Editor, ExcerptId, ExcerptRange, MultiBuffer,
ToOffset,
highlight_diagnostic_message,
scroll::autoscroll::Autoscroll,
Editor, ExcerptId, ExcerptRange, MultiBuffer, ToOffset,
};
use gpui::{
actions, elements::*, fonts::TextStyle, impl_internal_actions, serde_json, AnyViewHandle,
@@ -29,7 +30,10 @@ use std::{
sync::Arc,
};
use util::TryFutureExt;
use workspace::{ItemHandle as _, ItemNavHistory, Workspace};
use workspace::{
item::{Item, ItemEvent, ItemHandle},
ItemNavHistory, Pane, Workspace,
};
actions!(diagnostics, [Deploy]);
@@ -160,7 +164,7 @@ impl ProjectDiagnosticsEditor {
editor.set_vertical_scroll_margin(5, cx);
editor
});
cx.subscribe(&editor, |_, _, event, cx| cx.emit(*event))
cx.subscribe(&editor, |_, _, event, cx| cx.emit(event.clone()))
.detach();
let project = project_handle.read(cx);
@@ -322,7 +326,7 @@ impl ProjectDiagnosticsEditor {
);
let excerpt_id = excerpts
.insert_excerpts_after(
&prev_excerpt_id,
prev_excerpt_id,
buffer.clone(),
[ExcerptRange {
context: excerpt_start..excerpt_end,
@@ -384,7 +388,7 @@ impl ProjectDiagnosticsEditor {
groups_to_add.push(group_state);
} else if let Some((group_ix, group_state)) = to_remove {
excerpts.remove_excerpts(group_state.excerpts.iter(), excerpts_cx);
excerpts.remove_excerpts(group_state.excerpts.iter().copied(), excerpts_cx);
group_ixs_to_remove.push(group_ix);
blocks_to_remove.extend(group_state.blocks.iter().copied());
} else if let Some((_, group)) = to_keep {
@@ -457,10 +461,15 @@ impl ProjectDiagnosticsEditor {
}
// If any selection has lost its position, move it to start of the next primary diagnostic.
let snapshot = editor.snapshot(cx);
for selection in &mut selections {
if let Some(new_excerpt_id) = new_excerpt_ids_by_selection_id.get(&selection.id) {
let group_ix = match groups.binary_search_by(|probe| {
probe.excerpts.last().unwrap().cmp(new_excerpt_id)
probe
.excerpts
.last()
.unwrap()
.cmp(new_excerpt_id, &snapshot.buffer_snapshot)
}) {
Ok(ix) | Err(ix) => ix,
};
@@ -498,7 +507,7 @@ impl ProjectDiagnosticsEditor {
}
}
impl workspace::Item for ProjectDiagnosticsEditor {
impl Item for ProjectDiagnosticsEditor {
fn tab_content(
&self,
_detail: Option<usize>,
@@ -566,7 +575,16 @@ impl workspace::Item for ProjectDiagnosticsEditor {
unreachable!()
}
fn to_item_events(event: &Self::Event) -> Vec<workspace::ItemEvent> {
fn git_diff_recalc(
&mut self,
project: ModelHandle<Project>,
cx: &mut ViewContext<Self>,
) -> Task<Result<()>> {
self.editor
.update(cx, |editor, cx| editor.git_diff_recalc(project, cx))
}
fn to_item_events(event: &Self::Event) -> Vec<ItemEvent> {
Editor::to_item_events(event)
}
@@ -576,7 +594,11 @@ impl workspace::Item for ProjectDiagnosticsEditor {
});
}
fn clone_on_split(&self, cx: &mut ViewContext<Self>) -> Option<Self>
fn clone_on_split(
&self,
_workspace_id: workspace::WorkspaceId,
cx: &mut ViewContext<Self>,
) -> Option<Self>
where
Self: Sized,
{
@@ -605,6 +627,20 @@ impl workspace::Item for ProjectDiagnosticsEditor {
fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
self.editor.update(cx, |editor, cx| editor.deactivated(cx));
}
fn serialized_item_kind() -> Option<&'static str> {
Some("diagnostics")
}
fn deserialize(
project: ModelHandle<Project>,
workspace: WeakViewHandle<Workspace>,
_workspace_id: workspace::WorkspaceId,
_item_id: workspace::ItemId,
cx: &mut ViewContext<Pane>,
) -> Task<Result<ViewHandle<Self>>> {
Task::ready(Ok(cx.add_view(|cx| Self::new(project, workspace, cx))))
}
}
fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock {
@@ -738,7 +774,7 @@ mod tests {
DisplayPoint,
};
use gpui::TestAppContext;
use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, PointUtf16};
use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, PointUtf16, Unclipped};
use serde_json::json;
use unindent::Unindent as _;
use workspace::AppState;
@@ -776,8 +812,15 @@ mod tests {
.await;
let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await;
let (_, workspace) =
cx.add_window(|cx| Workspace::new(project.clone(), |_, _| unimplemented!(), cx));
let (_, workspace) = cx.add_window(|cx| {
Workspace::new(
Default::default(),
0,
project.clone(),
|_, _| unimplemented!(),
cx,
)
});
// Create some diagnostics
project.update(cx, |project, cx| {
@@ -788,7 +831,7 @@ mod tests {
None,
vec![
DiagnosticEntry {
range: PointUtf16::new(1, 8)..PointUtf16::new(1, 9),
range: Unclipped(PointUtf16::new(1, 8))..Unclipped(PointUtf16::new(1, 9)),
diagnostic: Diagnostic {
message:
"move occurs because `x` has type `Vec<char>`, which does not implement the `Copy` trait"
@@ -801,7 +844,7 @@ mod tests {
},
},
DiagnosticEntry {
range: PointUtf16::new(2, 8)..PointUtf16::new(2, 9),
range: Unclipped(PointUtf16::new(2, 8))..Unclipped(PointUtf16::new(2, 9)),
diagnostic: Diagnostic {
message:
"move occurs because `y` has type `Vec<char>`, which does not implement the `Copy` trait"
@@ -814,7 +857,7 @@ mod tests {
},
},
DiagnosticEntry {
range: PointUtf16::new(3, 6)..PointUtf16::new(3, 7),
range: Unclipped(PointUtf16::new(3, 6))..Unclipped(PointUtf16::new(3, 7)),
diagnostic: Diagnostic {
message: "value moved here".to_string(),
severity: DiagnosticSeverity::INFORMATION,
@@ -825,7 +868,7 @@ mod tests {
},
},
DiagnosticEntry {
range: PointUtf16::new(4, 6)..PointUtf16::new(4, 7),
range: Unclipped(PointUtf16::new(4, 6))..Unclipped(PointUtf16::new(4, 7)),
diagnostic: Diagnostic {
message: "value moved here".to_string(),
severity: DiagnosticSeverity::INFORMATION,
@@ -836,7 +879,7 @@ mod tests {
},
},
DiagnosticEntry {
range: PointUtf16::new(7, 6)..PointUtf16::new(7, 7),
range: Unclipped(PointUtf16::new(7, 6))..Unclipped(PointUtf16::new(7, 7)),
diagnostic: Diagnostic {
message: "use of moved value\nvalue used here after move".to_string(),
severity: DiagnosticSeverity::ERROR,
@@ -847,7 +890,7 @@ mod tests {
},
},
DiagnosticEntry {
range: PointUtf16::new(8, 6)..PointUtf16::new(8, 7),
range: Unclipped(PointUtf16::new(8, 6))..Unclipped(PointUtf16::new(8, 7)),
diagnostic: Diagnostic {
message: "use of moved value\nvalue used here after move".to_string(),
severity: DiagnosticSeverity::ERROR,
@@ -939,7 +982,7 @@ mod tests {
PathBuf::from("/test/consts.rs"),
None,
vec![DiagnosticEntry {
range: PointUtf16::new(0, 15)..PointUtf16::new(0, 15),
range: Unclipped(PointUtf16::new(0, 15))..Unclipped(PointUtf16::new(0, 15)),
diagnostic: Diagnostic {
message: "mismatched types\nexpected `usize`, found `char`".to_string(),
severity: DiagnosticSeverity::ERROR,
@@ -1040,7 +1083,8 @@ mod tests {
None,
vec![
DiagnosticEntry {
range: PointUtf16::new(0, 15)..PointUtf16::new(0, 15),
range: Unclipped(PointUtf16::new(0, 15))
..Unclipped(PointUtf16::new(0, 15)),
diagnostic: Diagnostic {
message: "mismatched types\nexpected `usize`, found `char`"
.to_string(),
@@ -1052,7 +1096,8 @@ mod tests {
},
},
DiagnosticEntry {
range: PointUtf16::new(1, 15)..PointUtf16::new(1, 15),
range: Unclipped(PointUtf16::new(1, 15))
..Unclipped(PointUtf16::new(1, 15)),
diagnostic: Diagnostic {
message: "unresolved name `c`".to_string(),
severity: DiagnosticSeverity::ERROR,

View File

@@ -7,7 +7,7 @@ use gpui::{
use language::Diagnostic;
use project::Project;
use settings::Settings;
use workspace::StatusItemView;
use workspace::{item::ItemHandle, StatusItemView};
pub struct DiagnosticIndicator {
summary: project::DiagnosticSummary,
@@ -219,7 +219,7 @@ impl View for DiagnosticIndicator {
impl StatusItemView for DiagnosticIndicator {
fn set_active_pane_item(
&mut self,
active_pane_item: Option<&dyn workspace::ItemHandle>,
active_pane_item: Option<&dyn ItemHandle>,
cx: &mut ViewContext<Self>,
) {
if let Some(editor) = active_pane_item.and_then(|item| item.downcast::<Editor>()) {

View File

@@ -9,11 +9,17 @@ use gpui::{
View, WeakViewHandle,
};
const DEAD_ZONE: f32 = 4.;
enum State<V: View> {
Down {
region_offset: Vector2F,
region: RectF,
},
DeadZone {
region_offset: Vector2F,
region: RectF,
},
Dragging {
window_id: usize,
position: Vector2F,
@@ -35,6 +41,13 @@ impl<V: View> Clone for State<V> {
region_offset,
region,
},
&State::DeadZone {
region_offset,
region,
} => State::DeadZone {
region_offset,
region,
},
State::Dragging {
window_id,
position,
@@ -101,7 +114,7 @@ impl<V: View> DragAndDrop<V> {
pub fn drag_started(event: MouseDown, cx: &mut EventContext) {
cx.update_global(|this: &mut Self, _| {
this.currently_dragged = Some(State::Down {
region_offset: event.region.origin() - event.position,
region_offset: event.position - event.region.origin(),
region: event.region,
});
})
@@ -122,7 +135,29 @@ impl<V: View> DragAndDrop<V> {
region_offset,
region,
})
| Some(&State::Dragging {
| Some(&State::DeadZone {
region_offset,
region,
}) => {
if (event.position - (region.origin() + region_offset)).length() > DEAD_ZONE {
this.currently_dragged = Some(State::Dragging {
window_id,
region_offset,
region,
position: event.position,
payload,
render: Rc::new(move |payload, cx| {
render(payload.downcast_ref::<T>().unwrap(), cx)
}),
});
} else {
this.currently_dragged = Some(State::DeadZone {
region_offset,
region,
})
}
}
Some(&State::Dragging {
region_offset,
region,
..
@@ -151,6 +186,7 @@ impl<V: View> DragAndDrop<V> {
.and_then(|state| {
match state {
State::Down { .. } => None,
State::DeadZone { .. } => None,
State::Dragging {
window_id,
region_offset,
@@ -163,7 +199,7 @@ impl<V: View> DragAndDrop<V> {
return None;
}
let position = position + region_offset;
let position = position - region_offset;
Some(
Overlay::new(
MouseEventHandler::<DraggedElementHandler>::new(0, cx, |_, cx| {

View File

@@ -23,6 +23,7 @@ test-support = [
drag_and_drop = { path = "../drag_and_drop" }
text = { path = "../text" }
clock = { path = "../clock" }
db = { path = "../db" }
collections = { path = "../collections" }
context_menu = { path = "../context_menu" }
fuzzy = { path = "../fuzzy" }
@@ -37,6 +38,7 @@ snippet = { path = "../snippet" }
sum_tree = { path = "../sum_tree" }
theme = { path = "../theme" }
util = { path = "../util" }
sqlez = { path = "../sqlez" }
workspace = { path = "../workspace" }
aho-corasick = "0.7"
anyhow = "1.0"

View File

@@ -2,7 +2,7 @@ use super::{
wrap_map::{self, WrapEdit, WrapPoint, WrapSnapshot},
TextHighlights,
};
use crate::{Anchor, ExcerptRange, ToPoint as _};
use crate::{Anchor, ExcerptId, ExcerptRange, ToPoint as _};
use collections::{Bound, HashMap, HashSet};
use gpui::{ElementBox, RenderContext};
use language::{BufferSnapshot, Chunk, Patch, Point};
@@ -107,7 +107,7 @@ struct Transform {
pub enum TransformBlock {
Custom(Arc<Block>),
ExcerptHeader {
key: usize,
id: ExcerptId,
buffer: BufferSnapshot,
range: ExcerptRange<text::Anchor>,
height: u8,
@@ -371,7 +371,7 @@ impl BlockMap {
.make_wrap_point(Point::new(excerpt_boundary.row, 0), Bias::Left)
.row(),
TransformBlock::ExcerptHeader {
key: excerpt_boundary.key,
id: excerpt_boundary.id,
buffer: excerpt_boundary.buffer,
range: excerpt_boundary.range,
height: if excerpt_boundary.starts_new_buffer {

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +1,7 @@
use std::{cell::RefCell, rc::Rc, time::Instant};
use drag_and_drop::DragAndDrop;
use futures::StreamExt;
use indoc::indoc;
use std::{cell::RefCell, rc::Rc, time::Instant};
use unindent::Unindent;
use super::*;
@@ -12,7 +11,7 @@ use crate::test::{
};
use gpui::{
executor::Deterministic,
geometry::rect::RectF,
geometry::{rect::RectF, vector::vec2f},
platform::{WindowBounds, WindowOptions},
};
use language::{FakeLspAdapter, LanguageConfig, LanguageRegistry, Point};
@@ -22,7 +21,10 @@ use util::{
assert_set_eq,
test::{marked_text_ranges, marked_text_ranges_by, sample_text, TextRangeMarker},
};
use workspace::{FollowableItem, ItemHandle, NavigationEntry, Pane};
use workspace::{
item::{FollowableItem, ItemHandle},
NavigationEntry, Pane, ViewId,
};
#[gpui::test]
fn test_edit_events(cx: &mut MutableAppContext) {
@@ -38,7 +40,7 @@ fn test_edit_events(cx: &mut MutableAppContext) {
event,
Event::Edited | Event::BufferEdited | Event::DirtyChanged
) {
events.borrow_mut().push(("editor1", *event));
events.borrow_mut().push(("editor1", event.clone()));
}
})
.detach();
@@ -53,7 +55,7 @@ fn test_edit_events(cx: &mut MutableAppContext) {
event,
Event::Edited | Event::BufferEdited | Event::DirtyChanged
) {
events.borrow_mut().push(("editor2", *event));
events.borrow_mut().push(("editor2", event.clone()));
}
})
.detach();
@@ -475,7 +477,7 @@ fn test_clone(cx: &mut gpui::MutableAppContext) {
fn test_navigation_history(cx: &mut gpui::MutableAppContext) {
cx.set_global(Settings::test(cx));
cx.set_global(DragAndDrop::<Workspace>::default());
use workspace::Item;
use workspace::item::Item;
let (_, pane) = cx.add_window(Default::default(), |cx| Pane::new(None, cx));
let buffer = MultiBuffer::build_simple(&sample_text(300, 5, 'a'), cx);
@@ -541,31 +543,30 @@ fn test_navigation_history(cx: &mut gpui::MutableAppContext) {
// Set scroll position to check later
editor.set_scroll_position(Vector2F::new(5.5, 5.5), cx);
let original_scroll_position = editor.scroll_position;
let original_scroll_top_anchor = editor.scroll_top_anchor.clone();
let original_scroll_position = editor.scroll_manager.anchor();
// Jump to the end of the document and adjust scroll
editor.move_to_end(&MoveToEnd, cx);
editor.set_scroll_position(Vector2F::new(-2.5, -0.5), cx);
assert_ne!(editor.scroll_position, original_scroll_position);
assert_ne!(editor.scroll_top_anchor, original_scroll_top_anchor);
assert_ne!(editor.scroll_manager.anchor(), original_scroll_position);
let nav_entry = pop_history(&mut editor, cx).unwrap();
editor.navigate(nav_entry.data.unwrap(), cx);
assert_eq!(editor.scroll_position, original_scroll_position);
assert_eq!(editor.scroll_top_anchor, original_scroll_top_anchor);
assert_eq!(editor.scroll_manager.anchor(), original_scroll_position);
// Ensure we don't panic when navigation data contains invalid anchors *and* points.
let mut invalid_anchor = editor.scroll_top_anchor.clone();
let mut invalid_anchor = editor.scroll_manager.anchor().top_anchor;
invalid_anchor.text_anchor.buffer_id = Some(999);
let invalid_point = Point::new(9999, 0);
editor.navigate(
Box::new(NavigationData {
cursor_anchor: invalid_anchor.clone(),
cursor_anchor: invalid_anchor,
cursor_position: invalid_point,
scroll_top_anchor: invalid_anchor,
scroll_anchor: ScrollAnchor {
top_anchor: invalid_anchor,
offset: Default::default(),
},
scroll_top_row: invalid_point.row,
scroll_position: Default::default(),
}),
cx,
);
@@ -4718,9 +4719,7 @@ fn test_refresh_selections(cx: &mut gpui::MutableAppContext) {
// Refreshing selections is a no-op when excerpts haven't changed.
editor.update(cx, |editor, cx| {
editor.change_selections(None, cx, |s| {
s.refresh();
});
editor.change_selections(None, cx, |s| s.refresh());
assert_eq!(
editor.selections.ranges(cx),
[
@@ -4731,7 +4730,7 @@ fn test_refresh_selections(cx: &mut gpui::MutableAppContext) {
});
multibuffer.update(cx, |multibuffer, cx| {
multibuffer.remove_excerpts([&excerpt1_id.unwrap()], cx);
multibuffer.remove_excerpts([excerpt1_id.unwrap()], cx);
});
editor.update(cx, |editor, cx| {
// Removing an excerpt causes the first selection to become degenerate.
@@ -4745,9 +4744,7 @@ fn test_refresh_selections(cx: &mut gpui::MutableAppContext) {
// Refreshing selections will relocate the first selection to the original buffer
// location.
editor.change_selections(None, cx, |s| {
s.refresh();
});
editor.change_selections(None, cx, |s| s.refresh());
assert_eq!(
editor.selections.ranges(cx),
[
@@ -4801,7 +4798,7 @@ fn test_refresh_selections_while_selecting_with_mouse(cx: &mut gpui::MutableAppC
});
multibuffer.update(cx, |multibuffer, cx| {
multibuffer.remove_excerpts([&excerpt1_id.unwrap()], cx);
multibuffer.remove_excerpts([excerpt1_id.unwrap()], cx);
});
editor.update(cx, |editor, cx| {
assert_eq!(
@@ -4810,9 +4807,7 @@ fn test_refresh_selections_while_selecting_with_mouse(cx: &mut gpui::MutableAppC
);
// Ensure we don't panic when selections are refreshed and that the pending selection is finalized.
editor.change_selections(None, cx, |s| {
s.refresh();
});
editor.change_selections(None, cx, |s| s.refresh());
assert_eq!(
editor.selections.ranges(cx),
[Point::new(0, 3)..Point::new(0, 3)]
@@ -4973,23 +4968,178 @@ fn test_highlighted_ranges(cx: &mut gpui::MutableAppContext) {
}
#[gpui::test]
fn test_following(cx: &mut gpui::MutableAppContext) {
let buffer = MultiBuffer::build_simple(&sample_text(16, 8, 'a'), cx);
async fn test_following(cx: &mut gpui::TestAppContext) {
Settings::test_async(cx);
let fs = FakeFs::new(cx.background());
let project = Project::test(fs, ["/file.rs".as_ref()], cx).await;
cx.set_global(Settings::test(cx));
let (_, leader) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx));
let (_, follower) = cx.add_window(
WindowOptions {
bounds: WindowBounds::Fixed(RectF::from_points(vec2f(0., 0.), vec2f(10., 80.))),
..Default::default()
},
|cx| build_editor(buffer.clone(), cx),
);
let buffer = project.update(cx, |project, cx| {
let buffer = project
.create_buffer(&sample_text(16, 8, 'a'), None, cx)
.unwrap();
cx.add_model(|cx| MultiBuffer::singleton(buffer, cx))
});
let (_, leader) = cx.add_window(|cx| build_editor(buffer.clone(), cx));
let (_, follower) = cx.update(|cx| {
cx.add_window(
WindowOptions {
bounds: WindowBounds::Fixed(RectF::from_points(vec2f(0., 0.), vec2f(10., 80.))),
..Default::default()
},
|cx| build_editor(buffer.clone(), cx),
)
});
let is_still_following = Rc::new(RefCell::new(true));
let pending_update = Rc::new(RefCell::new(None));
follower.update(cx, {
let update = pending_update.clone();
let is_still_following = is_still_following.clone();
|_, cx| {
cx.subscribe(&leader, move |_, leader, event, cx| {
leader
.read(cx)
.add_event_to_update_proto(event, &mut *update.borrow_mut(), cx);
})
.detach();
cx.subscribe(&follower, move |_, _, event, cx| {
if Editor::should_unfollow_on_event(event, cx) {
*is_still_following.borrow_mut() = false;
}
})
.detach();
}
});
// Update the selections only
leader.update(cx, |leader, cx| {
leader.change_selections(None, cx, |s| s.select_ranges([1..1]));
});
follower
.update(cx, |follower, cx| {
follower.apply_update_proto(&project, pending_update.borrow_mut().take().unwrap(), cx)
})
.await
.unwrap();
follower.read_with(cx, |follower, cx| {
assert_eq!(follower.selections.ranges(cx), vec![1..1]);
});
assert_eq!(*is_still_following.borrow(), true);
// Update the scroll position only
leader.update(cx, |leader, cx| {
leader.set_scroll_position(vec2f(1.5, 3.5), cx);
});
follower
.update(cx, |follower, cx| {
follower.apply_update_proto(&project, pending_update.borrow_mut().take().unwrap(), cx)
})
.await
.unwrap();
assert_eq!(
follower.update(cx, |follower, cx| follower.scroll_position(cx)),
vec2f(1.5, 3.5)
);
assert_eq!(*is_still_following.borrow(), true);
// Update the selections and scroll position. The follower's scroll position is updated
// via autoscroll, not via the leader's exact scroll position.
leader.update(cx, |leader, cx| {
leader.change_selections(None, cx, |s| s.select_ranges([0..0]));
leader.request_autoscroll(Autoscroll::newest(), cx);
leader.set_scroll_position(vec2f(1.5, 3.5), cx);
});
follower
.update(cx, |follower, cx| {
follower.apply_update_proto(&project, pending_update.borrow_mut().take().unwrap(), cx)
})
.await
.unwrap();
follower.update(cx, |follower, cx| {
assert_eq!(follower.scroll_position(cx), vec2f(1.5, 0.0));
assert_eq!(follower.selections.ranges(cx), vec![0..0]);
});
assert_eq!(*is_still_following.borrow(), true);
// Creating a pending selection that precedes another selection
leader.update(cx, |leader, cx| {
leader.change_selections(None, cx, |s| s.select_ranges([1..1]));
leader.begin_selection(DisplayPoint::new(0, 0), true, 1, cx);
});
follower
.update(cx, |follower, cx| {
follower.apply_update_proto(&project, pending_update.borrow_mut().take().unwrap(), cx)
})
.await
.unwrap();
follower.read_with(cx, |follower, cx| {
assert_eq!(follower.selections.ranges(cx), vec![0..0, 1..1]);
});
assert_eq!(*is_still_following.borrow(), true);
// Extend the pending selection so that it surrounds another selection
leader.update(cx, |leader, cx| {
leader.extend_selection(DisplayPoint::new(0, 2), 1, cx);
});
follower
.update(cx, |follower, cx| {
follower.apply_update_proto(&project, pending_update.borrow_mut().take().unwrap(), cx)
})
.await
.unwrap();
follower.read_with(cx, |follower, cx| {
assert_eq!(follower.selections.ranges(cx), vec![0..2]);
});
// Scrolling locally breaks the follow
follower.update(cx, |follower, cx| {
let top_anchor = follower.buffer().read(cx).read(cx).anchor_after(0);
follower.set_scroll_anchor(
ScrollAnchor {
top_anchor,
offset: vec2f(0.0, 0.5),
},
cx,
);
});
assert_eq!(*is_still_following.borrow(), false);
}
#[gpui::test]
async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) {
Settings::test_async(cx);
let fs = FakeFs::new(cx.background());
let project = Project::test(fs, ["/file.rs".as_ref()], cx).await;
let (_, pane) = cx.add_window(|cx| Pane::new(None, cx));
let leader = pane.update(cx, |_, cx| {
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
cx.add_view(|cx| build_editor(multibuffer.clone(), cx))
});
// Start following the editor when it has no excerpts.
let mut state_message = leader.update(cx, |leader, cx| leader.to_state_proto(cx));
let follower_1 = cx
.update(|cx| {
Editor::from_state_proto(
pane.clone(),
project.clone(),
ViewId {
creator: Default::default(),
id: 0,
},
&mut state_message,
cx,
)
})
.unwrap()
.await
.unwrap();
let update_message = Rc::new(RefCell::new(None));
follower_1.update(cx, {
let update = update_message.clone();
|_, cx| {
cx.subscribe(&leader, move |_, leader, event, cx| {
leader
@@ -5000,69 +5150,119 @@ fn test_following(cx: &mut gpui::MutableAppContext) {
}
});
// Update the selections only
leader.update(cx, |leader, cx| {
leader.change_selections(None, cx, |s| s.select_ranges([1..1]));
let (buffer_1, buffer_2) = project.update(cx, |project, cx| {
(
project
.create_buffer("abc\ndef\nghi\njkl\n", None, cx)
.unwrap(),
project
.create_buffer("mno\npqr\nstu\nvwx\n", None, cx)
.unwrap(),
)
});
follower.update(cx, |follower, cx| {
follower
.apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx)
.unwrap();
});
assert_eq!(follower.read(cx).selections.ranges(cx), vec![1..1]);
// Update the scroll position only
// Insert some excerpts.
leader.update(cx, |leader, cx| {
leader.set_scroll_position(vec2f(1.5, 3.5), cx);
});
follower.update(cx, |follower, cx| {
follower
.apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx)
.unwrap();
leader.buffer.update(cx, |multibuffer, cx| {
let excerpt_ids = multibuffer.push_excerpts(
buffer_1.clone(),
[
ExcerptRange {
context: 1..6,
primary: None,
},
ExcerptRange {
context: 12..15,
primary: None,
},
ExcerptRange {
context: 0..3,
primary: None,
},
],
cx,
);
multibuffer.insert_excerpts_after(
excerpt_ids[0],
buffer_2.clone(),
[
ExcerptRange {
context: 8..12,
primary: None,
},
ExcerptRange {
context: 0..6,
primary: None,
},
],
cx,
);
});
});
// Apply the update of adding the excerpts.
follower_1
.update(cx, |follower, cx| {
follower.apply_update_proto(&project, update_message.borrow().clone().unwrap(), cx)
})
.await
.unwrap();
assert_eq!(
follower.update(cx, |follower, cx| follower.scroll_position(cx)),
vec2f(1.5, 3.5)
follower_1.read_with(cx, Editor::text),
leader.read_with(cx, Editor::text)
);
update_message.borrow_mut().take();
// Start following separately after it already has excerpts.
let mut state_message = leader.update(cx, |leader, cx| leader.to_state_proto(cx));
let follower_2 = cx
.update(|cx| {
Editor::from_state_proto(
pane.clone(),
project.clone(),
ViewId {
creator: Default::default(),
id: 0,
},
&mut state_message,
cx,
)
})
.unwrap()
.await
.unwrap();
assert_eq!(
follower_2.read_with(cx, Editor::text),
leader.read_with(cx, Editor::text)
);
// Update the selections and scroll position
// Remove some excerpts.
leader.update(cx, |leader, cx| {
leader.change_selections(None, cx, |s| s.select_ranges([0..0]));
leader.request_autoscroll(Autoscroll::newest(), cx);
leader.set_scroll_position(vec2f(1.5, 3.5), cx);
leader.buffer.update(cx, |multibuffer, cx| {
let excerpt_ids = multibuffer.excerpt_ids();
multibuffer.remove_excerpts([excerpt_ids[1], excerpt_ids[2]], cx);
multibuffer.remove_excerpts([excerpt_ids[0]], cx);
});
});
follower.update(cx, |follower, cx| {
let initial_scroll_position = follower.scroll_position(cx);
follower
.apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx)
.unwrap();
assert_eq!(follower.scroll_position(cx), initial_scroll_position);
assert!(follower.autoscroll_request.is_some());
});
assert_eq!(follower.read(cx).selections.ranges(cx), vec![0..0]);
// Creating a pending selection that precedes another selection
leader.update(cx, |leader, cx| {
leader.change_selections(None, cx, |s| s.select_ranges([1..1]));
leader.begin_selection(DisplayPoint::new(0, 0), true, 1, cx);
});
follower.update(cx, |follower, cx| {
follower
.apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx)
.unwrap();
});
assert_eq!(follower.read(cx).selections.ranges(cx), vec![0..0, 1..1]);
// Extend the pending selection so that it surrounds another selection
leader.update(cx, |leader, cx| {
leader.extend_selection(DisplayPoint::new(0, 2), 1, cx);
});
follower.update(cx, |follower, cx| {
follower
.apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx)
.unwrap();
});
assert_eq!(follower.read(cx).selections.ranges(cx), vec![0..2]);
// Apply the update of removing the excerpts.
follower_1
.update(cx, |follower, cx| {
follower.apply_update_proto(&project, update_message.borrow().clone().unwrap(), cx)
})
.await
.unwrap();
follower_2
.update(cx, |follower, cx| {
follower.apply_update_proto(&project, update_message.borrow().clone().unwrap(), cx)
})
.await
.unwrap();
update_message.borrow_mut().take();
assert_eq!(
follower_1.read_with(cx, Editor::text),
leader.read_with(cx, Editor::text)
);
}
#[test]

View File

@@ -1,7 +1,7 @@
use super::{
display_map::{BlockContext, ToDisplayPoint},
Anchor, DisplayPoint, Editor, EditorMode, EditorSnapshot, Scroll, Select, SelectPhase,
SoftWrap, ToPoint, MAX_LINE_LEN,
Anchor, DisplayPoint, Editor, EditorMode, EditorSnapshot, Select, SelectPhase, SoftWrap,
ToPoint, MAX_LINE_LEN,
};
use crate::{
display_map::{BlockStyle, DisplaySnapshot, TransformBlock},
@@ -13,6 +13,7 @@ use crate::{
GoToFetchedDefinition, GoToFetchedTypeDefinition, UpdateGoToDefinitionLink,
},
mouse_context_menu::DeployMouseContextMenu,
scroll::actions::Scroll,
EditorStyle,
};
use clock::ReplicaId;
@@ -955,7 +956,7 @@ impl EditorElement {
move |_, cx| {
if let Some(view) = view.upgrade(cx.deref_mut()) {
view.update(cx.deref_mut(), |view, cx| {
view.make_scrollbar_visible(cx);
view.scroll_manager.show_scrollbar(cx);
});
}
}
@@ -977,7 +978,7 @@ impl EditorElement {
position.set_y(top_row as f32);
view.set_scroll_position(position, cx);
} else {
view.make_scrollbar_visible(cx);
view.scroll_manager.show_scrollbar(cx);
}
});
}
@@ -1298,7 +1299,7 @@ impl EditorElement {
};
let tooltip_style = cx.global::<Settings>().theme.tooltip.clone();
let scroll_x = snapshot.scroll_position.x();
let scroll_x = snapshot.scroll_anchor.offset.x();
let (fixed_blocks, non_fixed_blocks) = snapshot
.blocks_in_range(rows.clone())
.partition::<Vec<_>, _>(|(_, block)| match block {
@@ -1334,12 +1335,13 @@ impl EditorElement {
})
}
TransformBlock::ExcerptHeader {
key,
id,
buffer,
range,
starts_new_buffer,
..
} => {
let id = *id;
let jump_icon = project::File::from_dyn(buffer.file()).map(|file| {
let jump_position = range
.primary
@@ -1356,7 +1358,7 @@ impl EditorElement {
enum JumpIcon {}
cx.render(&editor, |_, cx| {
MouseEventHandler::<JumpIcon>::new(*key, cx, |state, _| {
MouseEventHandler::<JumpIcon>::new(id.into(), cx, |state, _| {
let style = style.jump_icon.style_for(state, false);
Svg::new("icons/arrow_up_right_8.svg")
.with_color(style.color)
@@ -1375,7 +1377,7 @@ impl EditorElement {
cx.dispatch_action(jump_action.clone())
})
.with_tooltip::<JumpIcon, _>(
*key,
id.into(),
"Jump to Buffer".to_string(),
Some(Box::new(crate::OpenExcerpts)),
tooltip_style.clone(),
@@ -1606,16 +1608,13 @@ impl Element for EditorElement {
highlighted_rows = view.highlighted_rows();
let theme = cx.global::<Settings>().theme.as_ref();
highlighted_ranges = view.background_highlights_in_range(
start_anchor.clone()..end_anchor.clone(),
&display_map,
theme,
);
highlighted_ranges =
view.background_highlights_in_range(start_anchor..end_anchor, &display_map, theme);
let mut remote_selections = HashMap::default();
for (replica_id, line_mode, cursor_shape, selection) in display_map
.buffer_snapshot
.remote_selections_in_range(&(start_anchor.clone()..end_anchor.clone()))
.remote_selections_in_range(&(start_anchor..end_anchor))
{
// The local selections match the leader's selections.
if Some(replica_id) == view.leader_replica_id {
@@ -1672,7 +1671,7 @@ impl Element for EditorElement {
));
}
show_scrollbars = view.show_scrollbars();
show_scrollbars = view.scroll_manager.scrollbars_visible();
include_root = view
.project
.as_ref()
@@ -1727,7 +1726,7 @@ impl Element for EditorElement {
);
self.update_view(cx.app, |view, cx| {
let clamped = view.clamp_scroll_left(scroll_max.x());
let clamped = view.scroll_manager.clamp_scroll_left(scroll_max.x());
let autoscrolled = if autoscroll_horizontally {
view.autoscroll_horizontally(

View File

@@ -221,7 +221,7 @@ fn show_hover(
start..end
} else {
anchor.clone()..anchor.clone()
anchor..anchor
};
Some(InfoPopover {

View File

@@ -1,14 +1,18 @@
use crate::{
display_map::ToDisplayPoint, link_go_to_definition::hide_link_definition,
movement::surrounding_word, Anchor, Autoscroll, Editor, Event, ExcerptId, MultiBuffer,
MultiBufferSnapshot, NavigationData, ToPoint as _, FORMAT_TIMEOUT,
movement::surrounding_word, persistence::DB, scroll::ScrollAnchor, Anchor, Autoscroll, Editor,
Event, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, NavigationData, ToPoint as _,
FORMAT_TIMEOUT,
};
use anyhow::{anyhow, Result};
use anyhow::{anyhow, Context, Result};
use collections::HashSet;
use futures::future::try_join_all;
use futures::FutureExt;
use gpui::{
elements::*, geometry::vector::vec2f, AppContext, Entity, ModelHandle, MutableAppContext,
RenderContext, Subscription, Task, View, ViewContext, ViewHandle,
RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
};
use language::proto::serialize_anchor as serialize_text_anchor;
use language::{Bias, Buffer, File as _, OffsetRangeExt, Point, SelectionGoal};
use project::{File, FormatTrigger, Project, ProjectEntryId, ProjectPath};
use rpc::proto::{self, update_view};
@@ -18,88 +22,140 @@ use std::{
borrow::Cow,
cmp::{self, Ordering},
fmt::Write,
iter,
ops::Range,
path::{Path, PathBuf},
};
use text::Selection;
use util::TryFutureExt;
use util::{ResultExt, TryFutureExt};
use workspace::item::FollowableItemHandle;
use workspace::{
item::{FollowableItem, Item, ItemEvent, ItemHandle, ProjectItem},
searchable::{Direction, SearchEvent, SearchableItem, SearchableItemHandle},
FollowableItem, Item, ItemEvent, ItemHandle, ItemNavHistory, ProjectItem, StatusItemView,
ToolbarItemLocation,
ItemId, ItemNavHistory, Pane, StatusItemView, ToolbarItemLocation, ViewId, Workspace,
WorkspaceId,
};
pub const MAX_TAB_TITLE_LEN: usize = 24;
impl FollowableItem for Editor {
fn remote_id(&self) -> Option<ViewId> {
self.remote_id
}
fn from_state_proto(
pane: ViewHandle<workspace::Pane>,
project: ModelHandle<Project>,
remote_id: ViewId,
state: &mut Option<proto::view::Variant>,
cx: &mut MutableAppContext,
) -> Option<Task<Result<ViewHandle<Self>>>> {
let state = if matches!(state, Some(proto::view::Variant::Editor(_))) {
if let Some(proto::view::Variant::Editor(state)) = state.take() {
state
} else {
unreachable!()
}
} else {
return None;
};
let Some(proto::view::Variant::Editor(_)) = state else { return None };
let Some(proto::view::Variant::Editor(state)) = state.take() else { unreachable!() };
let buffer = project.update(cx, |project, cx| {
project.open_buffer_by_id(state.buffer_id, cx)
let client = project.read(cx).client();
let replica_id = project.read(cx).replica_id();
let buffer_ids = state
.excerpts
.iter()
.map(|excerpt| excerpt.buffer_id)
.collect::<HashSet<_>>();
let buffers = project.update(cx, |project, cx| {
buffer_ids
.iter()
.map(|id| project.open_buffer_by_id(*id, cx))
.collect::<Vec<_>>()
});
Some(cx.spawn(|mut cx| async move {
let buffer = buffer.await?;
let editor = pane
.read_with(&cx, |pane, cx| {
pane.items_of_type::<Self>().find(|editor| {
editor.read(cx).buffer.read(cx).as_singleton().as_ref() == Some(&buffer)
})
let mut buffers = futures::future::try_join_all(buffers).await?;
let editor = pane.read_with(&cx, |pane, cx| {
let mut editors = pane.items_of_type::<Self>();
editors.find(|editor| {
editor.remote_id(&client, cx) == Some(remote_id)
|| state.singleton
&& buffers.len() == 1
&& editor.read(cx).buffer.read(cx).as_singleton().as_ref()
== Some(&buffers[0])
})
.unwrap_or_else(|| {
pane.update(&mut cx, |_, cx| {
cx.add_view(|cx| Editor::for_buffer(buffer, Some(project), cx))
})
});
});
let editor = editor.unwrap_or_else(|| {
pane.update(&mut cx, |_, cx| {
let multibuffer = cx.add_model(|cx| {
let mut multibuffer;
if state.singleton && buffers.len() == 1 {
multibuffer = MultiBuffer::singleton(buffers.pop().unwrap(), cx)
} else {
multibuffer = MultiBuffer::new(replica_id);
let mut excerpts = state.excerpts.into_iter().peekable();
while let Some(excerpt) = excerpts.peek() {
let buffer_id = excerpt.buffer_id;
let buffer_excerpts = iter::from_fn(|| {
let excerpt = excerpts.peek()?;
(excerpt.buffer_id == buffer_id)
.then(|| excerpts.next().unwrap())
});
let buffer =
buffers.iter().find(|b| b.read(cx).remote_id() == buffer_id);
if let Some(buffer) = buffer {
multibuffer.push_excerpts(
buffer.clone(),
buffer_excerpts.filter_map(deserialize_excerpt_range),
cx,
);
}
}
};
if let Some(title) = &state.title {
multibuffer = multibuffer.with_title(title.clone())
}
multibuffer
});
cx.add_view(|cx| Editor::for_multibuffer(multibuffer, Some(project), cx))
})
});
editor.update(&mut cx, |editor, cx| {
let excerpt_id;
let buffer_id;
{
let buffer = editor.buffer.read(cx).read(cx);
let singleton = buffer.as_singleton().unwrap();
excerpt_id = singleton.0.clone();
buffer_id = singleton.1;
}
editor.remote_id = Some(remote_id);
let buffer = editor.buffer.read(cx).read(cx);
let selections = state
.selections
.into_iter()
.map(|selection| {
deserialize_selection(&excerpt_id, buffer_id, selection)
deserialize_selection(&buffer, selection)
.ok_or_else(|| anyhow!("invalid selection"))
})
.collect::<Result<Vec<_>>>()?;
if !selections.is_empty() {
editor.set_selections_from_remote(selections, cx);
let pending_selection = state
.pending_selection
.map(|selection| deserialize_selection(&buffer, selection))
.flatten();
let scroll_top_anchor = state
.scroll_top_anchor
.and_then(|anchor| deserialize_anchor(&buffer, anchor));
drop(buffer);
if !selections.is_empty() || pending_selection.is_some() {
editor.set_selections_from_remote(selections, pending_selection, cx);
}
if let Some(anchor) = state.scroll_top_anchor {
editor.set_scroll_top_anchor(
Anchor {
buffer_id: Some(state.buffer_id as usize),
excerpt_id,
text_anchor: language::proto::deserialize_anchor(anchor)
.ok_or_else(|| anyhow!("invalid scroll top"))?,
if let Some(scroll_top_anchor) = scroll_top_anchor {
editor.set_scroll_anchor_remote(
ScrollAnchor {
top_anchor: scroll_top_anchor,
offset: vec2f(state.scroll_x, state.scroll_y),
},
vec2f(state.scroll_x, state.scroll_y),
cx,
);
}
Ok::<_, anyhow::Error>(())
anyhow::Ok(())
})?;
Ok(editor)
}))
}
@@ -130,20 +186,45 @@ impl FollowableItem for Editor {
}
fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant> {
let buffer_id = self.buffer.read(cx).as_singleton()?.read(cx).remote_id();
let buffer = self.buffer.read(cx);
let scroll_anchor = self.scroll_manager.anchor();
let excerpts = buffer
.read(cx)
.excerpts()
.map(|(id, buffer, range)| proto::Excerpt {
id: id.to_proto(),
buffer_id: buffer.remote_id(),
context_start: Some(serialize_text_anchor(&range.context.start)),
context_end: Some(serialize_text_anchor(&range.context.end)),
primary_start: range
.primary
.as_ref()
.map(|range| serialize_text_anchor(&range.start)),
primary_end: range
.primary
.as_ref()
.map(|range| serialize_text_anchor(&range.end)),
})
.collect();
Some(proto::view::Variant::Editor(proto::view::Editor {
buffer_id,
scroll_top_anchor: Some(language::proto::serialize_anchor(
&self.scroll_top_anchor.text_anchor,
)),
scroll_x: self.scroll_position.x(),
scroll_y: self.scroll_position.y(),
singleton: buffer.is_singleton(),
title: (!buffer.is_singleton()).then(|| buffer.title(cx).into()),
excerpts,
scroll_top_anchor: Some(serialize_anchor(&scroll_anchor.top_anchor)),
scroll_x: scroll_anchor.offset.x(),
scroll_y: scroll_anchor.offset.y(),
selections: self
.selections
.disjoint_anchors()
.iter()
.map(serialize_selection)
.collect(),
pending_selection: self
.selections
.pending_anchor()
.as_ref()
.map(serialize_selection),
}))
}
@@ -151,19 +232,45 @@ impl FollowableItem for Editor {
&self,
event: &Self::Event,
update: &mut Option<proto::update_view::Variant>,
_: &AppContext,
cx: &AppContext,
) -> bool {
let update =
update.get_or_insert_with(|| proto::update_view::Variant::Editor(Default::default()));
match update {
proto::update_view::Variant::Editor(update) => match event {
Event::ExcerptsAdded {
buffer,
predecessor,
excerpts,
} => {
let buffer_id = buffer.read(cx).remote_id();
let mut excerpts = excerpts.iter();
if let Some((id, range)) = excerpts.next() {
update.inserted_excerpts.push(proto::ExcerptInsertion {
previous_excerpt_id: Some(predecessor.to_proto()),
excerpt: serialize_excerpt(buffer_id, id, range),
});
update.inserted_excerpts.extend(excerpts.map(|(id, range)| {
proto::ExcerptInsertion {
previous_excerpt_id: None,
excerpt: serialize_excerpt(buffer_id, id, range),
}
}))
}
true
}
Event::ExcerptsRemoved { ids } => {
update
.deleted_excerpts
.extend(ids.iter().map(ExcerptId::to_proto));
true
}
Event::ScrollPositionChanged { .. } => {
update.scroll_top_anchor = Some(language::proto::serialize_anchor(
&self.scroll_top_anchor.text_anchor,
));
update.scroll_x = self.scroll_position.x();
update.scroll_y = self.scroll_position.y();
let scroll_anchor = self.scroll_manager.anchor();
update.scroll_top_anchor = Some(serialize_anchor(&scroll_anchor.top_anchor));
update.scroll_x = scroll_anchor.offset.x();
update.scroll_y = scroll_anchor.offset.y();
true
}
Event::SelectionsChanged { .. } => {
@@ -171,9 +278,13 @@ impl FollowableItem for Editor {
.selections
.disjoint_anchors()
.iter()
.chain(self.selections.pending_anchor().as_ref())
.map(serialize_selection)
.collect();
update.pending_selection = self
.selections
.pending_anchor()
.as_ref()
.map(serialize_selection);
true
}
_ => false,
@@ -183,43 +294,102 @@ impl FollowableItem for Editor {
fn apply_update_proto(
&mut self,
project: &ModelHandle<Project>,
message: update_view::Variant,
cx: &mut ViewContext<Self>,
) -> Result<()> {
match message {
update_view::Variant::Editor(message) => {
let buffer = self.buffer.read(cx);
let buffer = buffer.read(cx);
let (excerpt_id, buffer_id, _) = buffer.as_singleton().unwrap();
let excerpt_id = excerpt_id.clone();
drop(buffer);
) -> Task<Result<()>> {
let update_view::Variant::Editor(message) = message;
let multibuffer = self.buffer.read(cx);
let multibuffer = multibuffer.read(cx);
let selections = message
.selections
.into_iter()
.filter_map(|selection| {
deserialize_selection(&excerpt_id, buffer_id, selection)
})
.collect::<Vec<_>>();
let buffer_ids = message
.inserted_excerpts
.iter()
.filter_map(|insertion| Some(insertion.excerpt.as_ref()?.buffer_id))
.collect::<HashSet<_>>();
if !selections.is_empty() {
self.set_selections_from_remote(selections, cx);
self.request_autoscroll_remotely(Autoscroll::newest(), cx);
} else if let Some(anchor) = message.scroll_top_anchor {
self.set_scroll_top_anchor(
Anchor {
buffer_id: Some(buffer_id),
excerpt_id,
text_anchor: language::proto::deserialize_anchor(anchor)
.ok_or_else(|| anyhow!("invalid scroll top"))?,
},
vec2f(message.scroll_x, message.scroll_y),
cx,
);
let mut removals = message
.deleted_excerpts
.into_iter()
.map(ExcerptId::from_proto)
.collect::<Vec<_>>();
removals.sort_by(|a, b| a.cmp(&b, &multibuffer));
let selections = message
.selections
.into_iter()
.filter_map(|selection| deserialize_selection(&multibuffer, selection))
.collect::<Vec<_>>();
let pending_selection = message
.pending_selection
.and_then(|selection| deserialize_selection(&multibuffer, selection));
let scroll_top_anchor = message
.scroll_top_anchor
.and_then(|anchor| deserialize_anchor(&multibuffer, anchor));
drop(multibuffer);
let buffers = project.update(cx, |project, cx| {
buffer_ids
.into_iter()
.map(|id| project.open_buffer_by_id(id, cx))
.collect::<Vec<_>>()
});
let project = project.clone();
cx.spawn(|this, mut cx| async move {
let _buffers = try_join_all(buffers).await?;
this.update(&mut cx, |this, cx| {
this.buffer.update(cx, |multibuffer, cx| {
let mut insertions = message.inserted_excerpts.into_iter().peekable();
while let Some(insertion) = insertions.next() {
let Some(excerpt) = insertion.excerpt else { continue };
let Some(previous_excerpt_id) = insertion.previous_excerpt_id else { continue };
let buffer_id = excerpt.buffer_id;
let Some(buffer) = project.read(cx).buffer_for_id(buffer_id, cx) else { continue };
let adjacent_excerpts = iter::from_fn(|| {
let insertion = insertions.peek()?;
if insertion.previous_excerpt_id.is_none()
&& insertion.excerpt.as_ref()?.buffer_id == buffer_id
{
insertions.next()?.excerpt
} else {
None
}
});
multibuffer.insert_excerpts_with_ids_after(
ExcerptId::from_proto(previous_excerpt_id),
buffer,
[excerpt]
.into_iter()
.chain(adjacent_excerpts)
.filter_map(|excerpt| {
Some((
ExcerptId::from_proto(excerpt.id),
deserialize_excerpt_range(excerpt)?,
))
}),
cx,
);
}
multibuffer.remove_excerpts(removals, cx);
});
if !selections.is_empty() || pending_selection.is_some() {
this.set_selections_from_remote(selections, pending_selection, cx);
this.request_autoscroll_remotely(Autoscroll::newest(), cx);
} else if let Some(anchor) = scroll_top_anchor {
this.set_scroll_anchor_remote(ScrollAnchor {
top_anchor: anchor,
offset: vec2f(message.scroll_x, message.scroll_y)
}, cx);
}
}
}
Ok(())
});
Ok(())
})
}
fn should_unfollow_on_event(event: &Self::Event, _: &AppContext) -> bool {
@@ -232,41 +402,82 @@ impl FollowableItem for Editor {
}
}
fn serialize_excerpt(
buffer_id: u64,
id: &ExcerptId,
range: &ExcerptRange<language::Anchor>,
) -> Option<proto::Excerpt> {
Some(proto::Excerpt {
id: id.to_proto(),
buffer_id,
context_start: Some(serialize_text_anchor(&range.context.start)),
context_end: Some(serialize_text_anchor(&range.context.end)),
primary_start: range
.primary
.as_ref()
.map(|r| serialize_text_anchor(&r.start)),
primary_end: range
.primary
.as_ref()
.map(|r| serialize_text_anchor(&r.end)),
})
}
fn serialize_selection(selection: &Selection<Anchor>) -> proto::Selection {
proto::Selection {
id: selection.id as u64,
start: Some(language::proto::serialize_anchor(
&selection.start.text_anchor,
)),
end: Some(language::proto::serialize_anchor(
&selection.end.text_anchor,
)),
start: Some(serialize_anchor(&selection.start)),
end: Some(serialize_anchor(&selection.end)),
reversed: selection.reversed,
}
}
fn serialize_anchor(anchor: &Anchor) -> proto::EditorAnchor {
proto::EditorAnchor {
excerpt_id: anchor.excerpt_id.to_proto(),
anchor: Some(serialize_text_anchor(&anchor.text_anchor)),
}
}
fn deserialize_excerpt_range(excerpt: proto::Excerpt) -> Option<ExcerptRange<language::Anchor>> {
let context = {
let start = language::proto::deserialize_anchor(excerpt.context_start?)?;
let end = language::proto::deserialize_anchor(excerpt.context_end?)?;
start..end
};
let primary = excerpt
.primary_start
.zip(excerpt.primary_end)
.and_then(|(start, end)| {
let start = language::proto::deserialize_anchor(start)?;
let end = language::proto::deserialize_anchor(end)?;
Some(start..end)
});
Some(ExcerptRange { context, primary })
}
fn deserialize_selection(
excerpt_id: &ExcerptId,
buffer_id: usize,
buffer: &MultiBufferSnapshot,
selection: proto::Selection,
) -> Option<Selection<Anchor>> {
Some(Selection {
id: selection.id as usize,
start: Anchor {
buffer_id: Some(buffer_id),
excerpt_id: excerpt_id.clone(),
text_anchor: language::proto::deserialize_anchor(selection.start?)?,
},
end: Anchor {
buffer_id: Some(buffer_id),
excerpt_id: excerpt_id.clone(),
text_anchor: language::proto::deserialize_anchor(selection.end?)?,
},
start: deserialize_anchor(buffer, selection.start?)?,
end: deserialize_anchor(buffer, selection.end?)?,
reversed: selection.reversed,
goal: SelectionGoal::None,
})
}
fn deserialize_anchor(buffer: &MultiBufferSnapshot, anchor: proto::EditorAnchor) -> Option<Anchor> {
let excerpt_id = ExcerptId::from_proto(anchor.excerpt_id);
Some(Anchor {
excerpt_id,
text_anchor: language::proto::deserialize_anchor(anchor.anchor?)?,
buffer_id: buffer.buffer_id_for_excerpt(excerpt_id),
})
}
impl Item for Editor {
fn navigate(&mut self, data: Box<dyn std::any::Any>, cx: &mut ViewContext<Self>) -> bool {
if let Ok(data) = data.downcast::<NavigationData>() {
@@ -278,13 +489,12 @@ impl Item for Editor {
buffer.clip_point(data.cursor_position, Bias::Left)
};
let scroll_top_anchor = if buffer.can_resolve(&data.scroll_top_anchor) {
data.scroll_top_anchor
} else {
buffer.anchor_before(
let mut scroll_anchor = data.scroll_anchor;
if !buffer.can_resolve(&scroll_anchor.top_anchor) {
scroll_anchor.top_anchor = buffer.anchor_before(
buffer.clip_point(Point::new(data.scroll_top_row, 0), Bias::Left),
)
};
);
}
drop(buffer);
@@ -292,8 +502,7 @@ impl Item for Editor {
false
} else {
let nav_history = self.nav_history.take();
self.scroll_position = data.scroll_position;
self.scroll_top_anchor = scroll_top_anchor;
self.set_scroll_anchor(scroll_anchor, cx);
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([offset..offset])
});
@@ -367,7 +576,7 @@ impl Item for Editor {
self.buffer.read(cx).is_singleton()
}
fn clone_on_split(&self, cx: &mut ViewContext<Self>) -> Option<Self>
fn clone_on_split(&self, _workspace_id: WorkspaceId, cx: &mut ViewContext<Self>) -> Option<Self>
where
Self: Sized,
{
@@ -490,7 +699,7 @@ impl Item for Editor {
Task::ready(Ok(()))
}
fn to_item_events(event: &Self::Event) -> Vec<workspace::ItemEvent> {
fn to_item_events(event: &Self::Event) -> Vec<ItemEvent> {
let mut result = Vec::new();
match event {
Event::Closed => result.push(ItemEvent::CloseItem),
@@ -552,6 +761,87 @@ impl Item for Editor {
}));
Some(breadcrumbs)
}
fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext<Self>) {
let workspace_id = workspace.database_id();
let item_id = cx.view_id();
fn serialize(
buffer: ModelHandle<Buffer>,
workspace_id: WorkspaceId,
item_id: ItemId,
cx: &mut MutableAppContext,
) {
if let Some(file) = buffer.read(cx).file().and_then(|file| file.as_local()) {
let path = file.abs_path(cx);
cx.background()
.spawn(async move {
DB.save_path(item_id, workspace_id, path.clone())
.await
.log_err()
})
.detach();
}
}
if let Some(buffer) = self.buffer().read(cx).as_singleton() {
serialize(buffer.clone(), workspace_id, item_id, cx);
cx.subscribe(&buffer, |this, buffer, event, cx| {
if let Some(workspace_id) = this.workspace_id {
if let language::Event::FileHandleChanged = event {
serialize(buffer, workspace_id, cx.view_id(), cx);
}
}
})
.detach();
}
}
fn serialized_item_kind() -> Option<&'static str> {
Some("Editor")
}
fn deserialize(
project: ModelHandle<Project>,
_workspace: WeakViewHandle<Workspace>,
workspace_id: workspace::WorkspaceId,
item_id: ItemId,
cx: &mut ViewContext<Pane>,
) -> Task<Result<ViewHandle<Self>>> {
let project_item: Result<_> = project.update(cx, |project, cx| {
// Look up the path with this key associated, create a self with that path
let path = DB
.get_path(item_id, workspace_id)?
.context("No path stored for this editor")?;
let (worktree, path) = project
.find_local_worktree(&path, cx)
.with_context(|| format!("No worktree for path: {path:?}"))?;
let project_path = ProjectPath {
worktree_id: worktree.read(cx).id(),
path: path.into(),
};
Ok(project.open_path(project_path, cx))
});
project_item
.map(|project_item| {
cx.spawn(|pane, mut cx| async move {
let (_, project_item) = project_item.await?;
let buffer = project_item
.downcast::<Buffer>()
.context("Project item at stored path was not a buffer")?;
Ok(cx.update(|cx| {
cx.add_view(pane, |cx| Editor::for_buffer(buffer, Some(project), cx))
}))
})
})
.unwrap_or_else(|error| Task::ready(Err(error)))
}
}
impl ProjectItem for Editor {

File diff suppressed because it is too large Load Diff

View File

@@ -6,7 +6,7 @@ use std::{
};
use sum_tree::Bias;
#[derive(Clone, Eq, PartialEq, Debug, Hash)]
#[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)]
pub struct Anchor {
pub(crate) buffer_id: Option<usize>,
pub(crate) excerpt_id: ExcerptId,
@@ -30,16 +30,16 @@ impl Anchor {
}
}
pub fn excerpt_id(&self) -> &ExcerptId {
&self.excerpt_id
pub fn excerpt_id(&self) -> ExcerptId {
self.excerpt_id
}
pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering {
let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id);
let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id, snapshot);
if excerpt_id_cmp.is_eq() {
if self.excerpt_id == ExcerptId::min() || self.excerpt_id == ExcerptId::max() {
Ordering::Equal
} else if let Some(excerpt) = snapshot.excerpt(&self.excerpt_id) {
} else if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
self.text_anchor.cmp(&other.text_anchor, &excerpt.buffer)
} else {
Ordering::Equal
@@ -51,7 +51,7 @@ impl Anchor {
pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
if self.text_anchor.bias != Bias::Left {
if let Some(excerpt) = snapshot.excerpt(&self.excerpt_id) {
if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
return Self {
buffer_id: self.buffer_id,
excerpt_id: self.excerpt_id.clone(),
@@ -64,7 +64,7 @@ impl Anchor {
pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
if self.text_anchor.bias != Bias::Right {
if let Some(excerpt) = snapshot.excerpt(&self.excerpt_id) {
if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
return Self {
buffer_id: self.buffer_id,
excerpt_id: self.excerpt_id.clone(),

View File

@@ -0,0 +1,36 @@
use std::path::PathBuf;
use db::sqlez_macros::sql;
use db::{define_connection, query};
use workspace::{ItemId, WorkspaceDb, WorkspaceId};
define_connection!(
pub static ref DB: EditorDb<WorkspaceDb> =
&[sql! (
CREATE TABLE editors(
item_id INTEGER NOT NULL,
workspace_id INTEGER NOT NULL,
path BLOB NOT NULL,
PRIMARY KEY(item_id, workspace_id),
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
ON DELETE CASCADE
ON UPDATE CASCADE
) STRICT;
)];
);
impl EditorDb {
query! {
pub fn get_path(item_id: ItemId, workspace_id: WorkspaceId) -> Result<Option<PathBuf>> {
SELECT path FROM editors
WHERE item_id = ? AND workspace_id = ?
}
}
query! {
pub async fn save_path(item_id: ItemId, workspace_id: WorkspaceId, path: PathBuf) -> Result<()> {
INSERT OR REPLACE INTO editors(item_id, workspace_id, path)
VALUES (?, ?, ?)
}
}
}

348
crates/editor/src/scroll.rs Normal file
View File

@@ -0,0 +1,348 @@
pub mod actions;
pub mod autoscroll;
pub mod scroll_amount;
use std::{
cmp::Ordering,
time::{Duration, Instant},
};
use gpui::{
geometry::vector::{vec2f, Vector2F},
Axis, MutableAppContext, Task, ViewContext,
};
use language::Bias;
use crate::{
display_map::{DisplaySnapshot, ToDisplayPoint},
hover_popover::hide_hover,
Anchor, DisplayPoint, Editor, EditorMode, Event, MultiBufferSnapshot, ToPoint,
};
use self::{
autoscroll::{Autoscroll, AutoscrollStrategy},
scroll_amount::ScrollAmount,
};
pub const SCROLL_EVENT_SEPARATION: Duration = Duration::from_millis(28);
const SCROLLBAR_SHOW_INTERVAL: Duration = Duration::from_secs(1);
#[derive(Default)]
pub struct ScrollbarAutoHide(pub bool);
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct ScrollAnchor {
pub offset: Vector2F,
pub top_anchor: Anchor,
}
impl ScrollAnchor {
fn new() -> Self {
Self {
offset: Vector2F::zero(),
top_anchor: Anchor::min(),
}
}
pub fn scroll_position(&self, snapshot: &DisplaySnapshot) -> Vector2F {
let mut scroll_position = self.offset;
if self.top_anchor != Anchor::min() {
let scroll_top = self.top_anchor.to_display_point(snapshot).row() as f32;
scroll_position.set_y(scroll_top + scroll_position.y());
} else {
scroll_position.set_y(0.);
}
scroll_position
}
pub fn top_row(&self, buffer: &MultiBufferSnapshot) -> u32 {
self.top_anchor.to_point(buffer).row
}
}
#[derive(Clone, Copy, Debug)]
pub struct OngoingScroll {
last_event: Instant,
axis: Option<Axis>,
}
impl OngoingScroll {
fn new() -> Self {
Self {
last_event: Instant::now() - SCROLL_EVENT_SEPARATION,
axis: None,
}
}
pub fn filter(&self, delta: &mut Vector2F) -> Option<Axis> {
const UNLOCK_PERCENT: f32 = 1.9;
const UNLOCK_LOWER_BOUND: f32 = 6.;
let mut axis = self.axis;
let x = delta.x().abs();
let y = delta.y().abs();
let duration = Instant::now().duration_since(self.last_event);
if duration > SCROLL_EVENT_SEPARATION {
//New ongoing scroll will start, determine axis
axis = if x <= y {
Some(Axis::Vertical)
} else {
Some(Axis::Horizontal)
};
} else if x.max(y) >= UNLOCK_LOWER_BOUND {
//Check if the current ongoing will need to unlock
match axis {
Some(Axis::Vertical) => {
if x > y && x >= y * UNLOCK_PERCENT {
axis = None;
}
}
Some(Axis::Horizontal) => {
if y > x && y >= x * UNLOCK_PERCENT {
axis = None;
}
}
None => {}
}
}
match axis {
Some(Axis::Vertical) => *delta = vec2f(0., delta.y()),
Some(Axis::Horizontal) => *delta = vec2f(delta.x(), 0.),
None => {}
}
axis
}
}
pub struct ScrollManager {
vertical_scroll_margin: f32,
anchor: ScrollAnchor,
ongoing: OngoingScroll,
autoscroll_request: Option<(Autoscroll, bool)>,
last_autoscroll: Option<(Vector2F, f32, f32, AutoscrollStrategy)>,
show_scrollbars: bool,
hide_scrollbar_task: Option<Task<()>>,
visible_line_count: Option<f32>,
}
impl ScrollManager {
pub fn new() -> Self {
ScrollManager {
vertical_scroll_margin: 3.0,
anchor: ScrollAnchor::new(),
ongoing: OngoingScroll::new(),
autoscroll_request: None,
show_scrollbars: true,
hide_scrollbar_task: None,
last_autoscroll: None,
visible_line_count: None,
}
}
pub fn clone_state(&mut self, other: &Self) {
self.anchor = other.anchor;
self.ongoing = other.ongoing;
}
pub fn anchor(&self) -> ScrollAnchor {
self.anchor
}
pub fn ongoing_scroll(&self) -> OngoingScroll {
self.ongoing
}
pub fn update_ongoing_scroll(&mut self, axis: Option<Axis>) {
self.ongoing.last_event = Instant::now();
self.ongoing.axis = axis;
}
pub fn scroll_position(&self, snapshot: &DisplaySnapshot) -> Vector2F {
self.anchor.scroll_position(snapshot)
}
fn set_scroll_position(
&mut self,
scroll_position: Vector2F,
map: &DisplaySnapshot,
local: bool,
cx: &mut ViewContext<Editor>,
) {
let new_anchor = if scroll_position.y() <= 0. {
ScrollAnchor {
top_anchor: Anchor::min(),
offset: scroll_position.max(vec2f(0., 0.)),
}
} else {
let scroll_top_buffer_offset =
DisplayPoint::new(scroll_position.y() as u32, 0).to_offset(&map, Bias::Right);
let top_anchor = map
.buffer_snapshot
.anchor_at(scroll_top_buffer_offset, Bias::Right);
ScrollAnchor {
top_anchor,
offset: vec2f(
scroll_position.x(),
scroll_position.y() - top_anchor.to_display_point(&map).row() as f32,
),
}
};
self.set_anchor(new_anchor, local, cx);
}
fn set_anchor(&mut self, anchor: ScrollAnchor, local: bool, cx: &mut ViewContext<Editor>) {
self.anchor = anchor;
cx.emit(Event::ScrollPositionChanged { local });
self.show_scrollbar(cx);
self.autoscroll_request.take();
cx.notify();
}
pub fn show_scrollbar(&mut self, cx: &mut ViewContext<Editor>) {
if !self.show_scrollbars {
self.show_scrollbars = true;
cx.notify();
}
if cx.default_global::<ScrollbarAutoHide>().0 {
self.hide_scrollbar_task = Some(cx.spawn_weak(|editor, mut cx| async move {
cx.background().timer(SCROLLBAR_SHOW_INTERVAL).await;
if let Some(editor) = editor.upgrade(&cx) {
editor.update(&mut cx, |editor, cx| {
editor.scroll_manager.show_scrollbars = false;
cx.notify();
});
}
}));
} else {
self.hide_scrollbar_task = None;
}
}
pub fn scrollbars_visible(&self) -> bool {
self.show_scrollbars
}
pub fn has_autoscroll_request(&self) -> bool {
self.autoscroll_request.is_some()
}
pub fn clamp_scroll_left(&mut self, max: f32) -> bool {
if max < self.anchor.offset.x() {
self.anchor.offset.set_x(max);
true
} else {
false
}
}
}
impl Editor {
pub fn vertical_scroll_margin(&mut self) -> usize {
self.scroll_manager.vertical_scroll_margin as usize
}
pub fn set_vertical_scroll_margin(&mut self, margin_rows: usize, cx: &mut ViewContext<Self>) {
self.scroll_manager.vertical_scroll_margin = margin_rows as f32;
cx.notify();
}
pub fn visible_line_count(&self) -> Option<f32> {
self.scroll_manager.visible_line_count
}
pub(crate) fn set_visible_line_count(&mut self, lines: f32) {
self.scroll_manager.visible_line_count = Some(lines)
}
pub fn set_scroll_position(&mut self, scroll_position: Vector2F, cx: &mut ViewContext<Self>) {
self.set_scroll_position_internal(scroll_position, true, cx);
}
pub(crate) fn set_scroll_position_internal(
&mut self,
scroll_position: Vector2F,
local: bool,
cx: &mut ViewContext<Self>,
) {
let map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
hide_hover(self, cx);
self.scroll_manager
.set_scroll_position(scroll_position, &map, local, cx);
}
pub fn scroll_position(&self, cx: &mut ViewContext<Self>) -> Vector2F {
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
self.scroll_manager.anchor.scroll_position(&display_map)
}
pub fn set_scroll_anchor(&mut self, scroll_anchor: ScrollAnchor, cx: &mut ViewContext<Self>) {
hide_hover(self, cx);
self.scroll_manager.set_anchor(scroll_anchor, true, cx);
}
pub(crate) fn set_scroll_anchor_remote(
&mut self,
scroll_anchor: ScrollAnchor,
cx: &mut ViewContext<Self>,
) {
hide_hover(self, cx);
self.scroll_manager.set_anchor(scroll_anchor, false, cx);
}
pub fn scroll_screen(&mut self, amount: &ScrollAmount, cx: &mut ViewContext<Self>) {
if matches!(self.mode, EditorMode::SingleLine) {
cx.propagate_action();
return;
}
if self.take_rename(true, cx).is_some() {
return;
}
if amount.move_context_menu_selection(self, cx) {
return;
}
let cur_position = self.scroll_position(cx);
let new_pos = cur_position + vec2f(0., amount.lines(self) - 1.);
self.set_scroll_position(new_pos, cx);
}
/// Returns an ordering. The newest selection is:
/// Ordering::Equal => on screen
/// Ordering::Less => above the screen
/// Ordering::Greater => below the screen
pub fn newest_selection_on_screen(&self, cx: &mut MutableAppContext) -> Ordering {
let snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let newest_head = self
.selections
.newest_anchor()
.head()
.to_display_point(&snapshot);
let screen_top = self
.scroll_manager
.anchor
.top_anchor
.to_display_point(&snapshot);
if screen_top > newest_head {
return Ordering::Less;
}
if let Some(visible_lines) = self.visible_line_count() {
if newest_head.row() < screen_top.row() + visible_lines as u32 {
return Ordering::Equal;
}
}
Ordering::Greater
}
}

View File

@@ -0,0 +1,159 @@
use gpui::{
actions, geometry::vector::Vector2F, impl_internal_actions, Axis, MutableAppContext,
ViewContext,
};
use language::Bias;
use crate::{Editor, EditorMode};
use super::{autoscroll::Autoscroll, scroll_amount::ScrollAmount, ScrollAnchor};
actions!(
editor,
[
LineDown,
LineUp,
HalfPageDown,
HalfPageUp,
PageDown,
PageUp,
NextScreen,
ScrollCursorTop,
ScrollCursorCenter,
ScrollCursorBottom,
]
);
#[derive(Clone, PartialEq)]
pub struct Scroll {
pub scroll_position: Vector2F,
pub axis: Option<Axis>,
}
impl_internal_actions!(editor, [Scroll]);
pub fn init(cx: &mut MutableAppContext) {
cx.add_action(Editor::next_screen);
cx.add_action(Editor::scroll);
cx.add_action(Editor::scroll_cursor_top);
cx.add_action(Editor::scroll_cursor_center);
cx.add_action(Editor::scroll_cursor_bottom);
cx.add_action(|this: &mut Editor, _: &LineDown, cx| {
this.scroll_screen(&ScrollAmount::LineDown, cx)
});
cx.add_action(|this: &mut Editor, _: &LineUp, cx| {
this.scroll_screen(&ScrollAmount::LineUp, cx)
});
cx.add_action(|this: &mut Editor, _: &HalfPageDown, cx| {
this.scroll_screen(&ScrollAmount::HalfPageDown, cx)
});
cx.add_action(|this: &mut Editor, _: &HalfPageUp, cx| {
this.scroll_screen(&ScrollAmount::HalfPageUp, cx)
});
cx.add_action(|this: &mut Editor, _: &PageDown, cx| {
this.scroll_screen(&ScrollAmount::PageDown, cx)
});
cx.add_action(|this: &mut Editor, _: &PageUp, cx| {
this.scroll_screen(&ScrollAmount::PageUp, cx)
});
}
impl Editor {
pub fn next_screen(&mut self, _: &NextScreen, cx: &mut ViewContext<Editor>) -> Option<()> {
if self.take_rename(true, cx).is_some() {
return None;
}
if self.mouse_context_menu.read(cx).visible() {
return None;
}
if matches!(self.mode, EditorMode::SingleLine) {
cx.propagate_action();
return None;
}
self.request_autoscroll(Autoscroll::Next, cx);
Some(())
}
fn scroll(&mut self, action: &Scroll, cx: &mut ViewContext<Self>) {
self.scroll_manager.update_ongoing_scroll(action.axis);
self.set_scroll_position(action.scroll_position, cx);
}
fn scroll_cursor_top(editor: &mut Editor, _: &ScrollCursorTop, cx: &mut ViewContext<Editor>) {
let snapshot = editor.snapshot(cx).display_snapshot;
let scroll_margin_rows = editor.vertical_scroll_margin() as u32;
let mut new_screen_top = editor.selections.newest_display(cx).head();
*new_screen_top.row_mut() = new_screen_top.row().saturating_sub(scroll_margin_rows);
*new_screen_top.column_mut() = 0;
let new_screen_top = new_screen_top.to_offset(&snapshot, Bias::Left);
let new_anchor = snapshot.buffer_snapshot.anchor_before(new_screen_top);
editor.set_scroll_anchor(
ScrollAnchor {
top_anchor: new_anchor,
offset: Default::default(),
},
cx,
)
}
fn scroll_cursor_center(
editor: &mut Editor,
_: &ScrollCursorCenter,
cx: &mut ViewContext<Editor>,
) {
let snapshot = editor.snapshot(cx).display_snapshot;
let visible_rows = if let Some(visible_rows) = editor.visible_line_count() {
visible_rows as u32
} else {
return;
};
let mut new_screen_top = editor.selections.newest_display(cx).head();
*new_screen_top.row_mut() = new_screen_top.row().saturating_sub(visible_rows / 2);
*new_screen_top.column_mut() = 0;
let new_screen_top = new_screen_top.to_offset(&snapshot, Bias::Left);
let new_anchor = snapshot.buffer_snapshot.anchor_before(new_screen_top);
editor.set_scroll_anchor(
ScrollAnchor {
top_anchor: new_anchor,
offset: Default::default(),
},
cx,
)
}
fn scroll_cursor_bottom(
editor: &mut Editor,
_: &ScrollCursorBottom,
cx: &mut ViewContext<Editor>,
) {
let snapshot = editor.snapshot(cx).display_snapshot;
let scroll_margin_rows = editor.vertical_scroll_margin() as u32;
let visible_rows = if let Some(visible_rows) = editor.visible_line_count() {
visible_rows as u32
} else {
return;
};
let mut new_screen_top = editor.selections.newest_display(cx).head();
*new_screen_top.row_mut() = new_screen_top
.row()
.saturating_sub(visible_rows.saturating_sub(scroll_margin_rows));
*new_screen_top.column_mut() = 0;
let new_screen_top = new_screen_top.to_offset(&snapshot, Bias::Left);
let new_anchor = snapshot.buffer_snapshot.anchor_before(new_screen_top);
editor.set_scroll_anchor(
ScrollAnchor {
top_anchor: new_anchor,
offset: Default::default(),
},
cx,
)
}
}

View File

@@ -0,0 +1,246 @@
use std::cmp;
use gpui::{text_layout, ViewContext};
use language::Point;
use crate::{display_map::ToDisplayPoint, Editor, EditorMode};
#[derive(PartialEq, Eq)]
pub enum Autoscroll {
Next,
Strategy(AutoscrollStrategy),
}
impl Autoscroll {
pub fn fit() -> Self {
Self::Strategy(AutoscrollStrategy::Fit)
}
pub fn newest() -> Self {
Self::Strategy(AutoscrollStrategy::Newest)
}
pub fn center() -> Self {
Self::Strategy(AutoscrollStrategy::Center)
}
}
#[derive(PartialEq, Eq, Default)]
pub enum AutoscrollStrategy {
Fit,
Newest,
#[default]
Center,
Top,
Bottom,
}
impl AutoscrollStrategy {
fn next(&self) -> Self {
match self {
AutoscrollStrategy::Center => AutoscrollStrategy::Top,
AutoscrollStrategy::Top => AutoscrollStrategy::Bottom,
_ => AutoscrollStrategy::Center,
}
}
}
impl Editor {
pub fn autoscroll_vertically(
&mut self,
viewport_height: f32,
line_height: f32,
cx: &mut ViewContext<Editor>,
) -> bool {
let visible_lines = viewport_height / line_height;
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let mut scroll_position = self.scroll_manager.scroll_position(&display_map);
let max_scroll_top = if matches!(self.mode, EditorMode::AutoHeight { .. }) {
(display_map.max_point().row() as f32 - visible_lines + 1.).max(0.)
} else {
display_map.max_point().row() as f32
};
if scroll_position.y() > max_scroll_top {
scroll_position.set_y(max_scroll_top);
self.set_scroll_position(scroll_position, cx);
}
let (autoscroll, local) =
if let Some(autoscroll) = self.scroll_manager.autoscroll_request.take() {
autoscroll
} else {
return false;
};
let first_cursor_top;
let last_cursor_bottom;
if let Some(highlighted_rows) = &self.highlighted_rows {
first_cursor_top = highlighted_rows.start as f32;
last_cursor_bottom = first_cursor_top + 1.;
} else if autoscroll == Autoscroll::newest() {
let newest_selection = self.selections.newest::<Point>(cx);
first_cursor_top = newest_selection.head().to_display_point(&display_map).row() as f32;
last_cursor_bottom = first_cursor_top + 1.;
} else {
let selections = self.selections.all::<Point>(cx);
first_cursor_top = selections
.first()
.unwrap()
.head()
.to_display_point(&display_map)
.row() as f32;
last_cursor_bottom = selections
.last()
.unwrap()
.head()
.to_display_point(&display_map)
.row() as f32
+ 1.0;
}
let margin = if matches!(self.mode, EditorMode::AutoHeight { .. }) {
0.
} else {
((visible_lines - (last_cursor_bottom - first_cursor_top)) / 2.0).floor()
};
if margin < 0.0 {
return false;
}
let strategy = match autoscroll {
Autoscroll::Strategy(strategy) => strategy,
Autoscroll::Next => {
let last_autoscroll = &self.scroll_manager.last_autoscroll;
if let Some(last_autoscroll) = last_autoscroll {
if self.scroll_manager.anchor.offset == last_autoscroll.0
&& first_cursor_top == last_autoscroll.1
&& last_cursor_bottom == last_autoscroll.2
{
last_autoscroll.3.next()
} else {
AutoscrollStrategy::default()
}
} else {
AutoscrollStrategy::default()
}
}
};
match strategy {
AutoscrollStrategy::Fit | AutoscrollStrategy::Newest => {
let margin = margin.min(self.scroll_manager.vertical_scroll_margin);
let target_top = (first_cursor_top - margin).max(0.0);
let target_bottom = last_cursor_bottom + margin;
let start_row = scroll_position.y();
let end_row = start_row + visible_lines;
if target_top < start_row {
scroll_position.set_y(target_top);
self.set_scroll_position_internal(scroll_position, local, cx);
} else if target_bottom >= end_row {
scroll_position.set_y(target_bottom - visible_lines);
self.set_scroll_position_internal(scroll_position, local, cx);
}
}
AutoscrollStrategy::Center => {
scroll_position.set_y((first_cursor_top - margin).max(0.0));
self.set_scroll_position_internal(scroll_position, local, cx);
}
AutoscrollStrategy::Top => {
scroll_position.set_y((first_cursor_top).max(0.0));
self.set_scroll_position_internal(scroll_position, local, cx);
}
AutoscrollStrategy::Bottom => {
scroll_position.set_y((last_cursor_bottom - visible_lines).max(0.0));
self.set_scroll_position_internal(scroll_position, local, cx);
}
}
self.scroll_manager.last_autoscroll = Some((
self.scroll_manager.anchor.offset,
first_cursor_top,
last_cursor_bottom,
strategy,
));
true
}
pub fn autoscroll_horizontally(
&mut self,
start_row: u32,
viewport_width: f32,
scroll_width: f32,
max_glyph_width: f32,
layouts: &[text_layout::Line],
cx: &mut ViewContext<Self>,
) -> bool {
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let selections = self.selections.all::<Point>(cx);
let mut target_left;
let mut target_right;
if self.highlighted_rows.is_some() {
target_left = 0.0_f32;
target_right = 0.0_f32;
} else {
target_left = std::f32::INFINITY;
target_right = 0.0_f32;
for selection in selections {
let head = selection.head().to_display_point(&display_map);
if head.row() >= start_row && head.row() < start_row + layouts.len() as u32 {
let start_column = head.column().saturating_sub(3);
let end_column = cmp::min(display_map.line_len(head.row()), head.column() + 3);
target_left = target_left.min(
layouts[(head.row() - start_row) as usize]
.x_for_index(start_column as usize),
);
target_right = target_right.max(
layouts[(head.row() - start_row) as usize].x_for_index(end_column as usize)
+ max_glyph_width,
);
}
}
}
target_right = target_right.min(scroll_width);
if target_right - target_left > viewport_width {
return false;
}
let scroll_left = self.scroll_manager.anchor.offset.x() * max_glyph_width;
let scroll_right = scroll_left + viewport_width;
if target_left < scroll_left {
self.scroll_manager
.anchor
.offset
.set_x(target_left / max_glyph_width);
true
} else if target_right > scroll_right {
self.scroll_manager
.anchor
.offset
.set_x((target_right - viewport_width) / max_glyph_width);
true
} else {
false
}
}
pub fn request_autoscroll(&mut self, autoscroll: Autoscroll, cx: &mut ViewContext<Self>) {
self.scroll_manager.autoscroll_request = Some((autoscroll, true));
cx.notify();
}
pub(crate) fn request_autoscroll_remotely(
&mut self,
autoscroll: Autoscroll,
cx: &mut ViewContext<Self>,
) {
self.scroll_manager.autoscroll_request = Some((autoscroll, false));
cx.notify();
}
}

View File

@@ -0,0 +1,48 @@
use gpui::ViewContext;
use serde::Deserialize;
use util::iife;
use crate::Editor;
#[derive(Clone, PartialEq, Deserialize)]
pub enum ScrollAmount {
LineUp,
LineDown,
HalfPageUp,
HalfPageDown,
PageUp,
PageDown,
}
impl ScrollAmount {
pub fn move_context_menu_selection(
&self,
editor: &mut Editor,
cx: &mut ViewContext<Editor>,
) -> bool {
iife!({
let context_menu = editor.context_menu.as_mut()?;
match self {
Self::LineDown | Self::HalfPageDown => context_menu.select_next(cx),
Self::LineUp | Self::HalfPageUp => context_menu.select_prev(cx),
Self::PageDown => context_menu.select_last(cx),
Self::PageUp => context_menu.select_first(cx),
}
.then_some(())
})
.is_some()
}
pub fn lines(&self, editor: &mut Editor) -> f32 {
match self {
Self::LineDown => 1.,
Self::LineUp => -1.,
Self::HalfPageDown => editor.visible_line_count().map(|l| l / 2.).unwrap_or(1.),
Self::HalfPageUp => -editor.visible_line_count().map(|l| l / 2.).unwrap_or(1.),
// Minus 1. here so that there is a pivot line that stays on the screen
Self::PageDown => editor.visible_line_count().unwrap_or(1.) - 1.,
Self::PageUp => -editor.visible_line_count().unwrap_or(1.) - 1.,
}
}
}

View File

@@ -61,7 +61,7 @@ impl SelectionsCollection {
self.buffer.read(cx).read(cx)
}
pub fn set_state(&mut self, other: &SelectionsCollection) {
pub fn clone_state(&mut self, other: &SelectionsCollection) {
self.next_selection_id = other.next_selection_id;
self.line_mode = other.line_mode;
self.disjoint = other.disjoint.clone();
@@ -544,11 +544,21 @@ impl<'a> MutableSelectionsCollection<'a> {
T: ToOffset,
{
let buffer = self.buffer.read(self.cx).snapshot(self.cx);
let ranges = ranges
.into_iter()
.map(|range| range.start.to_offset(&buffer)..range.end.to_offset(&buffer));
self.select_offset_ranges(ranges);
}
fn select_offset_ranges<I>(&mut self, ranges: I)
where
I: IntoIterator<Item = Range<usize>>,
{
let selections = ranges
.into_iter()
.map(|range| {
let mut start = range.start.to_offset(&buffer);
let mut end = range.end.to_offset(&buffer);
let mut start = range.start;
let mut end = range.end;
let reversed = if start > end {
mem::swap(&mut start, &mut end);
true

View File

@@ -63,8 +63,15 @@ impl<'a> EditorLspTestContext<'a> {
.insert_tree("/root", json!({ "dir": { file_name: "" }}))
.await;
let (window_id, workspace) =
cx.add_window(|cx| Workspace::new(project.clone(), |_, _| unimplemented!(), cx));
let (window_id, workspace) = cx.add_window(|cx| {
Workspace::new(
Default::default(),
0,
project.clone(),
|_, _| unimplemented!(),
cx,
)
});
project
.update(cx, |project, cx| {
project.find_or_create_local_worktree("/root", true, cx)

View File

@@ -254,7 +254,7 @@ impl<'a> EditorTestContext<'a> {
Actual selections:
{}
"},
"},
self.assertion_context(),
expected_marked_text,
actual_marked_text,

View File

@@ -62,11 +62,12 @@ impl View for FileFinder {
impl FileFinder {
fn labels_for_match(&self, path_match: &PathMatch) -> (String, Vec<usize>, String, Vec<usize>) {
let path_string = path_match.path.to_string_lossy();
let path = &path_match.path;
let path_string = path.to_string_lossy();
let full_path = [path_match.path_prefix.as_ref(), path_string.as_ref()].join("");
let path_positions = path_match.positions.clone();
let file_name = path_match.path.file_name().map_or_else(
let file_name = path.file_name().map_or_else(
|| path_match.path_prefix.to_string(),
|file_name| file_name.to_string_lossy().to_string(),
);
@@ -161,7 +162,7 @@ impl FileFinder {
self.cancel_flag = Arc::new(AtomicBool::new(false));
let cancel_flag = self.cancel_flag.clone();
cx.spawn(|this, mut cx| async move {
let matches = fuzzy::match_paths(
let matches = fuzzy::match_path_sets(
candidate_sets.as_slice(),
&query,
false,
@@ -316,8 +317,9 @@ mod tests {
.await;
let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
let (window_id, workspace) =
cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
let (window_id, workspace) = cx.add_window(|cx| {
Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
});
cx.dispatch_action(window_id, Toggle);
let finder = cx.read(|cx| workspace.read(cx).modal::<FileFinder>().unwrap());
@@ -371,8 +373,9 @@ mod tests {
.await;
let project = Project::test(app_state.fs.clone(), ["/dir".as_ref()], cx).await;
let (_, workspace) =
cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
let (_, workspace) = cx.add_window(|cx| {
Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
});
let (_, finder) =
cx.add_window(|cx| FileFinder::new(workspace.read(cx).project().clone(), cx));
@@ -446,8 +449,9 @@ mod tests {
cx,
)
.await;
let (_, workspace) =
cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
let (_, workspace) = cx.add_window(|cx| {
Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
});
let (_, finder) =
cx.add_window(|cx| FileFinder::new(workspace.read(cx).project().clone(), cx));
finder
@@ -471,8 +475,9 @@ mod tests {
cx,
)
.await;
let (_, workspace) =
cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
let (_, workspace) = cx.add_window(|cx| {
Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
});
let (_, finder) =
cx.add_window(|cx| FileFinder::new(workspace.read(cx).project().clone(), cx));
@@ -524,8 +529,9 @@ mod tests {
cx,
)
.await;
let (_, workspace) =
cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
let (_, workspace) = cx.add_window(|cx| {
Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
});
let (_, finder) =
cx.add_window(|cx| FileFinder::new(workspace.read(cx).project().clone(), cx));
@@ -563,8 +569,9 @@ mod tests {
.await;
let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
let (_, workspace) =
cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
let (_, workspace) = cx.add_window(|cx| {
Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
});
let (_, finder) =
cx.add_window(|cx| FileFinder::new(workspace.read(cx).project().clone(), cx));
finder

View File

@@ -1,794 +1,8 @@
mod char_bag;
use gpui::executor;
use std::{
borrow::Cow,
cmp::{self, Ordering},
path::Path,
sync::atomic::{self, AtomicBool},
sync::Arc,
};
mod matcher;
mod paths;
mod strings;
pub use char_bag::CharBag;
const BASE_DISTANCE_PENALTY: f64 = 0.6;
const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05;
const MIN_DISTANCE_PENALTY: f64 = 0.2;
pub struct Matcher<'a> {
query: &'a [char],
lowercase_query: &'a [char],
query_char_bag: CharBag,
smart_case: bool,
max_results: usize,
min_score: f64,
match_positions: Vec<usize>,
last_positions: Vec<usize>,
score_matrix: Vec<Option<f64>>,
best_position_matrix: Vec<usize>,
}
trait Match: Ord {
fn score(&self) -> f64;
fn set_positions(&mut self, positions: Vec<usize>);
}
trait MatchCandidate {
fn has_chars(&self, bag: CharBag) -> bool;
fn to_string(&self) -> Cow<'_, str>;
}
#[derive(Clone, Debug)]
pub struct PathMatchCandidate<'a> {
pub path: &'a Arc<Path>,
pub char_bag: CharBag,
}
#[derive(Clone, Debug)]
pub struct PathMatch {
pub score: f64,
pub positions: Vec<usize>,
pub worktree_id: usize,
pub path: Arc<Path>,
pub path_prefix: Arc<str>,
}
#[derive(Clone, Debug)]
pub struct StringMatchCandidate {
pub id: usize,
pub string: String,
pub char_bag: CharBag,
}
pub trait PathMatchCandidateSet<'a>: Send + Sync {
type Candidates: Iterator<Item = PathMatchCandidate<'a>>;
fn id(&self) -> usize;
fn len(&self) -> usize;
fn is_empty(&self) -> bool {
self.len() == 0
}
fn prefix(&self) -> Arc<str>;
fn candidates(&'a self, start: usize) -> Self::Candidates;
}
impl Match for PathMatch {
fn score(&self) -> f64 {
self.score
}
fn set_positions(&mut self, positions: Vec<usize>) {
self.positions = positions;
}
}
impl Match for StringMatch {
fn score(&self) -> f64 {
self.score
}
fn set_positions(&mut self, positions: Vec<usize>) {
self.positions = positions;
}
}
impl<'a> MatchCandidate for PathMatchCandidate<'a> {
fn has_chars(&self, bag: CharBag) -> bool {
self.char_bag.is_superset(bag)
}
fn to_string(&self) -> Cow<'a, str> {
self.path.to_string_lossy()
}
}
impl StringMatchCandidate {
pub fn new(id: usize, string: String) -> Self {
Self {
id,
char_bag: CharBag::from(string.as_str()),
string,
}
}
}
impl<'a> MatchCandidate for &'a StringMatchCandidate {
fn has_chars(&self, bag: CharBag) -> bool {
self.char_bag.is_superset(bag)
}
fn to_string(&self) -> Cow<'a, str> {
self.string.as_str().into()
}
}
#[derive(Clone, Debug)]
pub struct StringMatch {
pub candidate_id: usize,
pub score: f64,
pub positions: Vec<usize>,
pub string: String,
}
impl PartialEq for StringMatch {
fn eq(&self, other: &Self) -> bool {
self.cmp(other).is_eq()
}
}
impl Eq for StringMatch {}
impl PartialOrd for StringMatch {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for StringMatch {
fn cmp(&self, other: &Self) -> Ordering {
self.score
.partial_cmp(&other.score)
.unwrap_or(Ordering::Equal)
.then_with(|| self.candidate_id.cmp(&other.candidate_id))
}
}
impl PartialEq for PathMatch {
fn eq(&self, other: &Self) -> bool {
self.cmp(other).is_eq()
}
}
impl Eq for PathMatch {}
impl PartialOrd for PathMatch {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for PathMatch {
fn cmp(&self, other: &Self) -> Ordering {
self.score
.partial_cmp(&other.score)
.unwrap_or(Ordering::Equal)
.then_with(|| self.worktree_id.cmp(&other.worktree_id))
.then_with(|| Arc::as_ptr(&self.path).cmp(&Arc::as_ptr(&other.path)))
}
}
pub async fn match_strings(
candidates: &[StringMatchCandidate],
query: &str,
smart_case: bool,
max_results: usize,
cancel_flag: &AtomicBool,
background: Arc<executor::Background>,
) -> Vec<StringMatch> {
if candidates.is_empty() || max_results == 0 {
return Default::default();
}
if query.is_empty() {
return candidates
.iter()
.map(|candidate| StringMatch {
candidate_id: candidate.id,
score: 0.,
positions: Default::default(),
string: candidate.string.clone(),
})
.collect();
}
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();
let lowercase_query = &lowercase_query;
let query = &query;
let query_char_bag = CharBag::from(&lowercase_query[..]);
let num_cpus = background.num_cpus().min(candidates.len());
let segment_size = (candidates.len() + num_cpus - 1) / num_cpus;
let mut segment_results = (0..num_cpus)
.map(|_| Vec::with_capacity(max_results.min(candidates.len())))
.collect::<Vec<_>>();
background
.scoped(|scope| {
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
let cancel_flag = &cancel_flag;
scope.spawn(async move {
let segment_start = cmp::min(segment_idx * segment_size, candidates.len());
let segment_end = cmp::min(segment_start + segment_size, candidates.len());
let mut matcher = Matcher::new(
query,
lowercase_query,
query_char_bag,
smart_case,
max_results,
);
matcher.match_strings(
&candidates[segment_start..segment_end],
results,
cancel_flag,
);
});
}
})
.await;
let mut results = Vec::new();
for segment_result in segment_results {
if results.is_empty() {
results = segment_result;
} else {
util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a));
}
}
results
}
pub async fn match_paths<'a, Set: PathMatchCandidateSet<'a>>(
candidate_sets: &'a [Set],
query: &str,
smart_case: bool,
max_results: usize,
cancel_flag: &AtomicBool,
background: Arc<executor::Background>,
) -> Vec<PathMatch> {
let path_count: usize = candidate_sets.iter().map(|s| s.len()).sum();
if path_count == 0 {
return Vec::new();
}
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();
let lowercase_query = &lowercase_query;
let query = &query;
let query_char_bag = CharBag::from(&lowercase_query[..]);
let num_cpus = background.num_cpus().min(path_count);
let segment_size = (path_count + num_cpus - 1) / num_cpus;
let mut segment_results = (0..num_cpus)
.map(|_| Vec::with_capacity(max_results))
.collect::<Vec<_>>();
background
.scoped(|scope| {
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
scope.spawn(async move {
let segment_start = segment_idx * segment_size;
let segment_end = segment_start + segment_size;
let mut matcher = Matcher::new(
query,
lowercase_query,
query_char_bag,
smart_case,
max_results,
);
let mut tree_start = 0;
for candidate_set in candidate_sets {
let tree_end = tree_start + candidate_set.len();
if tree_start < segment_end && segment_start < tree_end {
let start = cmp::max(tree_start, segment_start) - tree_start;
let end = cmp::min(tree_end, segment_end) - tree_start;
let candidates = candidate_set.candidates(start).take(end - start);
matcher.match_paths(
candidate_set.id(),
candidate_set.prefix(),
candidates,
results,
cancel_flag,
);
}
if tree_end >= segment_end {
break;
}
tree_start = tree_end;
}
})
}
})
.await;
let mut results = Vec::new();
for segment_result in segment_results {
if results.is_empty() {
results = segment_result;
} else {
util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a));
}
}
results
}
impl<'a> Matcher<'a> {
pub fn new(
query: &'a [char],
lowercase_query: &'a [char],
query_char_bag: CharBag,
smart_case: bool,
max_results: usize,
) -> Self {
Self {
query,
lowercase_query,
query_char_bag,
min_score: 0.0,
last_positions: vec![0; query.len()],
match_positions: vec![0; query.len()],
score_matrix: Vec::new(),
best_position_matrix: Vec::new(),
smart_case,
max_results,
}
}
pub fn match_strings(
&mut self,
candidates: &[StringMatchCandidate],
results: &mut Vec<StringMatch>,
cancel_flag: &AtomicBool,
) {
self.match_internal(
&[],
&[],
candidates.iter(),
results,
cancel_flag,
|candidate, score| StringMatch {
candidate_id: candidate.id,
score,
positions: Vec::new(),
string: candidate.string.to_string(),
},
)
}
pub fn match_paths<'c: 'a>(
&mut self,
tree_id: usize,
path_prefix: Arc<str>,
path_entries: impl Iterator<Item = PathMatchCandidate<'c>>,
results: &mut Vec<PathMatch>,
cancel_flag: &AtomicBool,
) {
let prefix = path_prefix.chars().collect::<Vec<_>>();
let lowercase_prefix = prefix
.iter()
.map(|c| c.to_ascii_lowercase())
.collect::<Vec<_>>();
self.match_internal(
&prefix,
&lowercase_prefix,
path_entries,
results,
cancel_flag,
|candidate, score| PathMatch {
score,
worktree_id: tree_id,
positions: Vec::new(),
path: candidate.path.clone(),
path_prefix: path_prefix.clone(),
},
)
}
fn match_internal<C: MatchCandidate, R, F>(
&mut self,
prefix: &[char],
lowercase_prefix: &[char],
candidates: impl Iterator<Item = C>,
results: &mut Vec<R>,
cancel_flag: &AtomicBool,
build_match: F,
) where
R: Match,
F: Fn(&C, f64) -> R,
{
let mut candidate_chars = Vec::new();
let mut lowercase_candidate_chars = Vec::new();
for candidate in candidates {
if !candidate.has_chars(self.query_char_bag) {
continue;
}
if cancel_flag.load(atomic::Ordering::Relaxed) {
break;
}
candidate_chars.clear();
lowercase_candidate_chars.clear();
for c in candidate.to_string().chars() {
candidate_chars.push(c);
lowercase_candidate_chars.push(c.to_ascii_lowercase());
}
if !self.find_last_positions(lowercase_prefix, &lowercase_candidate_chars) {
continue;
}
let matrix_len = self.query.len() * (prefix.len() + candidate_chars.len());
self.score_matrix.clear();
self.score_matrix.resize(matrix_len, None);
self.best_position_matrix.clear();
self.best_position_matrix.resize(matrix_len, 0);
let score = self.score_match(
&candidate_chars,
&lowercase_candidate_chars,
prefix,
lowercase_prefix,
);
if score > 0.0 {
let mut mat = build_match(&candidate, score);
if let Err(i) = results.binary_search_by(|m| mat.cmp(m)) {
if results.len() < self.max_results {
mat.set_positions(self.match_positions.clone());
results.insert(i, mat);
} else if i < results.len() {
results.pop();
mat.set_positions(self.match_positions.clone());
results.insert(i, mat);
}
if results.len() == self.max_results {
self.min_score = results.last().unwrap().score();
}
}
}
}
}
fn find_last_positions(
&mut self,
lowercase_prefix: &[char],
lowercase_candidate: &[char],
) -> bool {
let mut lowercase_prefix = lowercase_prefix.iter();
let mut lowercase_candidate = lowercase_candidate.iter();
for (i, char) in self.lowercase_query.iter().enumerate().rev() {
if let Some(j) = lowercase_candidate.rposition(|c| c == char) {
self.last_positions[i] = j + lowercase_prefix.len();
} else if let Some(j) = lowercase_prefix.rposition(|c| c == char) {
self.last_positions[i] = j;
} else {
return false;
}
}
true
}
fn score_match(
&mut self,
path: &[char],
path_cased: &[char],
prefix: &[char],
lowercase_prefix: &[char],
) -> f64 {
let score = self.recursive_score_match(
path,
path_cased,
prefix,
lowercase_prefix,
0,
0,
self.query.len() as f64,
) * self.query.len() as f64;
if score <= 0.0 {
return 0.0;
}
let path_len = prefix.len() + path.len();
let mut cur_start = 0;
let mut byte_ix = 0;
let mut char_ix = 0;
for i in 0..self.query.len() {
let match_char_ix = self.best_position_matrix[i * path_len + cur_start];
while char_ix < match_char_ix {
let ch = prefix
.get(char_ix)
.or_else(|| path.get(char_ix - prefix.len()))
.unwrap();
byte_ix += ch.len_utf8();
char_ix += 1;
}
cur_start = match_char_ix + 1;
self.match_positions[i] = byte_ix;
}
score
}
#[allow(clippy::too_many_arguments)]
fn recursive_score_match(
&mut self,
path: &[char],
path_cased: &[char],
prefix: &[char],
lowercase_prefix: &[char],
query_idx: usize,
path_idx: usize,
cur_score: f64,
) -> f64 {
if query_idx == self.query.len() {
return 1.0;
}
let path_len = prefix.len() + path.len();
if let Some(memoized) = self.score_matrix[query_idx * path_len + path_idx] {
return memoized;
}
let mut score = 0.0;
let mut best_position = 0;
let query_char = self.lowercase_query[query_idx];
let limit = self.last_positions[query_idx];
let mut last_slash = 0;
for j in path_idx..=limit {
let path_char = if j < prefix.len() {
lowercase_prefix[j]
} else {
path_cased[j - prefix.len()]
};
let is_path_sep = path_char == '/' || path_char == '\\';
if query_idx == 0 && is_path_sep {
last_slash = j;
}
if query_char == path_char || (is_path_sep && query_char == '_' || query_char == '\\') {
let curr = if j < prefix.len() {
prefix[j]
} else {
path[j - prefix.len()]
};
let mut char_score = 1.0;
if j > path_idx {
let last = if j - 1 < prefix.len() {
prefix[j - 1]
} else {
path[j - 1 - prefix.len()]
};
if last == '/' {
char_score = 0.9;
} else if (last == '-' || last == '_' || last == ' ' || last.is_numeric())
|| (last.is_lowercase() && curr.is_uppercase())
{
char_score = 0.8;
} else if last == '.' {
char_score = 0.7;
} else if query_idx == 0 {
char_score = BASE_DISTANCE_PENALTY;
} else {
char_score = MIN_DISTANCE_PENALTY.max(
BASE_DISTANCE_PENALTY
- (j - path_idx - 1) as f64 * ADDITIONAL_DISTANCE_PENALTY,
);
}
}
// Apply a severe penalty if the case doesn't match.
// This will make the exact matches have higher score than the case-insensitive and the
// path insensitive matches.
if (self.smart_case || curr == '/') && self.query[query_idx] != curr {
char_score *= 0.001;
}
let mut multiplier = char_score;
// Scale the score based on how deep within the path we found the match.
if query_idx == 0 {
multiplier /= ((prefix.len() + path.len()) - last_slash) as f64;
}
let mut next_score = 1.0;
if self.min_score > 0.0 {
next_score = cur_score * multiplier;
// Scores only decrease. If we can't pass the previous best, bail
if next_score < self.min_score {
// Ensure that score is non-zero so we use it in the memo table.
if score == 0.0 {
score = 1e-18;
}
continue;
}
}
let new_score = self.recursive_score_match(
path,
path_cased,
prefix,
lowercase_prefix,
query_idx + 1,
j + 1,
next_score,
) * multiplier;
if new_score > score {
score = new_score;
best_position = j;
// Optimization: can't score better than 1.
if new_score == 1.0 {
break;
}
}
}
}
if best_position != 0 {
self.best_position_matrix[query_idx * path_len + path_idx] = best_position;
}
self.score_matrix[query_idx * path_len + path_idx] = Some(score);
score
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::path::PathBuf;
#[test]
fn test_get_last_positions() {
let mut query: &[char] = &['d', 'c'];
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']);
assert!(!result);
query = &['c', 'd'];
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']);
assert!(result);
assert_eq!(matcher.last_positions, vec![2, 4]);
query = &['z', '/', 'z', 'f'];
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
let result = matcher.find_last_positions(&['z', 'e', 'd', '/'], &['z', 'e', 'd', '/', 'f']);
assert!(result);
assert_eq!(matcher.last_positions, vec![0, 3, 4, 8]);
}
#[test]
fn test_match_path_entries() {
let paths = vec![
"",
"a",
"ab",
"abC",
"abcd",
"alphabravocharlie",
"AlphaBravoCharlie",
"thisisatestdir",
"/////ThisIsATestDir",
"/this/is/a/test/dir",
"/test/tiatd",
];
assert_eq!(
match_query("abc", false, &paths),
vec![
("abC", vec![0, 1, 2]),
("abcd", vec![0, 1, 2]),
("AlphaBravoCharlie", vec![0, 5, 10]),
("alphabravocharlie", vec![4, 5, 10]),
]
);
assert_eq!(
match_query("t/i/a/t/d", false, &paths),
vec![("/this/is/a/test/dir", vec![1, 5, 6, 8, 9, 10, 11, 15, 16]),]
);
assert_eq!(
match_query("tiatd", false, &paths),
vec![
("/test/tiatd", vec![6, 7, 8, 9, 10]),
("/this/is/a/test/dir", vec![1, 6, 9, 11, 16]),
("/////ThisIsATestDir", vec![5, 9, 11, 12, 16]),
("thisisatestdir", vec![0, 2, 6, 7, 11]),
]
);
}
#[test]
fn test_match_multibyte_path_entries() {
let paths = vec!["aαbβ/cγ", "αβγδ/bcde", "c1⃣2⃣3⃣/d4⃣5⃣6⃣/e7⃣8⃣9⃣/f", "/d/🆒/h"];
assert_eq!("1".len(), 7);
assert_eq!(
match_query("bcd", false, &paths),
vec![
("αβγδ/bcde", vec![9, 10, 11]),
("aαbβ/cγ", vec![3, 7, 10]),
]
);
assert_eq!(
match_query("cde", false, &paths),
vec![
("αβγδ/bcde", vec![10, 11, 12]),
("c1⃣2⃣3⃣/d4⃣5⃣6⃣/e7⃣8⃣9⃣/f", vec![0, 23, 46]),
]
);
}
fn match_query<'a>(
query: &str,
smart_case: bool,
paths: &[&'a str],
) -> Vec<(&'a str, Vec<usize>)> {
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();
let query_chars = CharBag::from(&lowercase_query[..]);
let path_arcs = paths
.iter()
.map(|path| Arc::from(PathBuf::from(path)))
.collect::<Vec<_>>();
let mut path_entries = Vec::new();
for (i, path) in paths.iter().enumerate() {
let lowercase_path = path.to_lowercase().chars().collect::<Vec<_>>();
let char_bag = CharBag::from(lowercase_path.as_slice());
path_entries.push(PathMatchCandidate {
char_bag,
path: path_arcs.get(i).unwrap(),
});
}
let mut matcher = Matcher::new(&query, &lowercase_query, query_chars, smart_case, 100);
let cancel_flag = AtomicBool::new(false);
let mut results = Vec::new();
matcher.match_paths(
0,
"".into(),
path_entries.into_iter(),
&mut results,
&cancel_flag,
);
results
.into_iter()
.map(|result| {
(
paths
.iter()
.copied()
.find(|p| result.path.as_ref() == Path::new(p))
.unwrap(),
result.positions,
)
})
.collect()
}
}
pub use paths::{match_path_sets, PathMatch, PathMatchCandidate, PathMatchCandidateSet};
pub use strings::{match_strings, StringMatch, StringMatchCandidate};

463
crates/fuzzy/src/matcher.rs Normal file
View File

@@ -0,0 +1,463 @@
use std::{
borrow::Cow,
sync::atomic::{self, AtomicBool},
};
use crate::CharBag;
const BASE_DISTANCE_PENALTY: f64 = 0.6;
const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05;
const MIN_DISTANCE_PENALTY: f64 = 0.2;
pub struct Matcher<'a> {
query: &'a [char],
lowercase_query: &'a [char],
query_char_bag: CharBag,
smart_case: bool,
max_results: usize,
min_score: f64,
match_positions: Vec<usize>,
last_positions: Vec<usize>,
score_matrix: Vec<Option<f64>>,
best_position_matrix: Vec<usize>,
}
pub trait Match: Ord {
fn score(&self) -> f64;
fn set_positions(&mut self, positions: Vec<usize>);
}
pub trait MatchCandidate {
fn has_chars(&self, bag: CharBag) -> bool;
fn to_string(&self) -> Cow<'_, str>;
}
impl<'a> Matcher<'a> {
pub fn new(
query: &'a [char],
lowercase_query: &'a [char],
query_char_bag: CharBag,
smart_case: bool,
max_results: usize,
) -> Self {
Self {
query,
lowercase_query,
query_char_bag,
min_score: 0.0,
last_positions: vec![0; query.len()],
match_positions: vec![0; query.len()],
score_matrix: Vec::new(),
best_position_matrix: Vec::new(),
smart_case,
max_results,
}
}
pub fn match_candidates<C: MatchCandidate, R, F>(
&mut self,
prefix: &[char],
lowercase_prefix: &[char],
candidates: impl Iterator<Item = C>,
results: &mut Vec<R>,
cancel_flag: &AtomicBool,
build_match: F,
) where
R: Match,
F: Fn(&C, f64) -> R,
{
let mut candidate_chars = Vec::new();
let mut lowercase_candidate_chars = Vec::new();
for candidate in candidates {
if !candidate.has_chars(self.query_char_bag) {
continue;
}
if cancel_flag.load(atomic::Ordering::Relaxed) {
break;
}
candidate_chars.clear();
lowercase_candidate_chars.clear();
for c in candidate.to_string().chars() {
candidate_chars.push(c);
lowercase_candidate_chars.push(c.to_ascii_lowercase());
}
if !self.find_last_positions(lowercase_prefix, &lowercase_candidate_chars) {
continue;
}
let matrix_len = self.query.len() * (prefix.len() + candidate_chars.len());
self.score_matrix.clear();
self.score_matrix.resize(matrix_len, None);
self.best_position_matrix.clear();
self.best_position_matrix.resize(matrix_len, 0);
let score = self.score_match(
&candidate_chars,
&lowercase_candidate_chars,
prefix,
lowercase_prefix,
);
if score > 0.0 {
let mut mat = build_match(&candidate, score);
if let Err(i) = results.binary_search_by(|m| mat.cmp(m)) {
if results.len() < self.max_results {
mat.set_positions(self.match_positions.clone());
results.insert(i, mat);
} else if i < results.len() {
results.pop();
mat.set_positions(self.match_positions.clone());
results.insert(i, mat);
}
if results.len() == self.max_results {
self.min_score = results.last().unwrap().score();
}
}
}
}
}
fn find_last_positions(
&mut self,
lowercase_prefix: &[char],
lowercase_candidate: &[char],
) -> bool {
let mut lowercase_prefix = lowercase_prefix.iter();
let mut lowercase_candidate = lowercase_candidate.iter();
for (i, char) in self.lowercase_query.iter().enumerate().rev() {
if let Some(j) = lowercase_candidate.rposition(|c| c == char) {
self.last_positions[i] = j + lowercase_prefix.len();
} else if let Some(j) = lowercase_prefix.rposition(|c| c == char) {
self.last_positions[i] = j;
} else {
return false;
}
}
true
}
fn score_match(
&mut self,
path: &[char],
path_cased: &[char],
prefix: &[char],
lowercase_prefix: &[char],
) -> f64 {
let score = self.recursive_score_match(
path,
path_cased,
prefix,
lowercase_prefix,
0,
0,
self.query.len() as f64,
) * self.query.len() as f64;
if score <= 0.0 {
return 0.0;
}
let path_len = prefix.len() + path.len();
let mut cur_start = 0;
let mut byte_ix = 0;
let mut char_ix = 0;
for i in 0..self.query.len() {
let match_char_ix = self.best_position_matrix[i * path_len + cur_start];
while char_ix < match_char_ix {
let ch = prefix
.get(char_ix)
.or_else(|| path.get(char_ix - prefix.len()))
.unwrap();
byte_ix += ch.len_utf8();
char_ix += 1;
}
cur_start = match_char_ix + 1;
self.match_positions[i] = byte_ix;
}
score
}
#[allow(clippy::too_many_arguments)]
fn recursive_score_match(
&mut self,
path: &[char],
path_cased: &[char],
prefix: &[char],
lowercase_prefix: &[char],
query_idx: usize,
path_idx: usize,
cur_score: f64,
) -> f64 {
if query_idx == self.query.len() {
return 1.0;
}
let path_len = prefix.len() + path.len();
if let Some(memoized) = self.score_matrix[query_idx * path_len + path_idx] {
return memoized;
}
let mut score = 0.0;
let mut best_position = 0;
let query_char = self.lowercase_query[query_idx];
let limit = self.last_positions[query_idx];
let mut last_slash = 0;
for j in path_idx..=limit {
let path_char = if j < prefix.len() {
lowercase_prefix[j]
} else {
path_cased[j - prefix.len()]
};
let is_path_sep = path_char == '/' || path_char == '\\';
if query_idx == 0 && is_path_sep {
last_slash = j;
}
if query_char == path_char || (is_path_sep && query_char == '_' || query_char == '\\') {
let curr = if j < prefix.len() {
prefix[j]
} else {
path[j - prefix.len()]
};
let mut char_score = 1.0;
if j > path_idx {
let last = if j - 1 < prefix.len() {
prefix[j - 1]
} else {
path[j - 1 - prefix.len()]
};
if last == '/' {
char_score = 0.9;
} else if (last == '-' || last == '_' || last == ' ' || last.is_numeric())
|| (last.is_lowercase() && curr.is_uppercase())
{
char_score = 0.8;
} else if last == '.' {
char_score = 0.7;
} else if query_idx == 0 {
char_score = BASE_DISTANCE_PENALTY;
} else {
char_score = MIN_DISTANCE_PENALTY.max(
BASE_DISTANCE_PENALTY
- (j - path_idx - 1) as f64 * ADDITIONAL_DISTANCE_PENALTY,
);
}
}
// Apply a severe penalty if the case doesn't match.
// This will make the exact matches have higher score than the case-insensitive and the
// path insensitive matches.
if (self.smart_case || curr == '/') && self.query[query_idx] != curr {
char_score *= 0.001;
}
let mut multiplier = char_score;
// Scale the score based on how deep within the path we found the match.
if query_idx == 0 {
multiplier /= ((prefix.len() + path.len()) - last_slash) as f64;
}
let mut next_score = 1.0;
if self.min_score > 0.0 {
next_score = cur_score * multiplier;
// Scores only decrease. If we can't pass the previous best, bail
if next_score < self.min_score {
// Ensure that score is non-zero so we use it in the memo table.
if score == 0.0 {
score = 1e-18;
}
continue;
}
}
let new_score = self.recursive_score_match(
path,
path_cased,
prefix,
lowercase_prefix,
query_idx + 1,
j + 1,
next_score,
) * multiplier;
if new_score > score {
score = new_score;
best_position = j;
// Optimization: can't score better than 1.
if new_score == 1.0 {
break;
}
}
}
}
if best_position != 0 {
self.best_position_matrix[query_idx * path_len + path_idx] = best_position;
}
self.score_matrix[query_idx * path_len + path_idx] = Some(score);
score
}
}
#[cfg(test)]
mod tests {
use crate::{PathMatch, PathMatchCandidate};
use super::*;
use std::{
path::{Path, PathBuf},
sync::Arc,
};
#[test]
fn test_get_last_positions() {
let mut query: &[char] = &['d', 'c'];
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']);
assert!(!result);
query = &['c', 'd'];
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']);
assert!(result);
assert_eq!(matcher.last_positions, vec![2, 4]);
query = &['z', '/', 'z', 'f'];
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
let result = matcher.find_last_positions(&['z', 'e', 'd', '/'], &['z', 'e', 'd', '/', 'f']);
assert!(result);
assert_eq!(matcher.last_positions, vec![0, 3, 4, 8]);
}
#[test]
fn test_match_path_entries() {
let paths = vec![
"",
"a",
"ab",
"abC",
"abcd",
"alphabravocharlie",
"AlphaBravoCharlie",
"thisisatestdir",
"/////ThisIsATestDir",
"/this/is/a/test/dir",
"/test/tiatd",
];
assert_eq!(
match_single_path_query("abc", false, &paths),
vec![
("abC", vec![0, 1, 2]),
("abcd", vec![0, 1, 2]),
("AlphaBravoCharlie", vec![0, 5, 10]),
("alphabravocharlie", vec![4, 5, 10]),
]
);
assert_eq!(
match_single_path_query("t/i/a/t/d", false, &paths),
vec![("/this/is/a/test/dir", vec![1, 5, 6, 8, 9, 10, 11, 15, 16]),]
);
assert_eq!(
match_single_path_query("tiatd", false, &paths),
vec![
("/test/tiatd", vec![6, 7, 8, 9, 10]),
("/this/is/a/test/dir", vec![1, 6, 9, 11, 16]),
("/////ThisIsATestDir", vec![5, 9, 11, 12, 16]),
("thisisatestdir", vec![0, 2, 6, 7, 11]),
]
);
}
#[test]
fn test_match_multibyte_path_entries() {
let paths = vec!["aαbβ/cγ", "αβγδ/bcde", "c1⃣2⃣3⃣/d4⃣5⃣6⃣/e7⃣8⃣9⃣/f", "/d/🆒/h"];
assert_eq!("1".len(), 7);
assert_eq!(
match_single_path_query("bcd", false, &paths),
vec![
("αβγδ/bcde", vec![9, 10, 11]),
("aαbβ/cγ", vec![3, 7, 10]),
]
);
assert_eq!(
match_single_path_query("cde", false, &paths),
vec![
("αβγδ/bcde", vec![10, 11, 12]),
("c1⃣2⃣3⃣/d4⃣5⃣6⃣/e7⃣8⃣9⃣/f", vec![0, 23, 46]),
]
);
}
fn match_single_path_query<'a>(
query: &str,
smart_case: bool,
paths: &[&'a str],
) -> Vec<(&'a str, Vec<usize>)> {
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();
let query_chars = CharBag::from(&lowercase_query[..]);
let path_arcs: Vec<Arc<Path>> = paths
.iter()
.map(|path| Arc::from(PathBuf::from(path)))
.collect::<Vec<_>>();
let mut path_entries = Vec::new();
for (i, path) in paths.iter().enumerate() {
let lowercase_path = path.to_lowercase().chars().collect::<Vec<_>>();
let char_bag = CharBag::from(lowercase_path.as_slice());
path_entries.push(PathMatchCandidate {
char_bag,
path: &path_arcs[i],
});
}
let mut matcher = Matcher::new(&query, &lowercase_query, query_chars, smart_case, 100);
let cancel_flag = AtomicBool::new(false);
let mut results = Vec::new();
matcher.match_candidates(
&[],
&[],
path_entries.into_iter(),
&mut results,
&cancel_flag,
|candidate, score| PathMatch {
score,
worktree_id: 0,
positions: Vec::new(),
path: candidate.path.clone(),
path_prefix: "".into(),
},
);
results
.into_iter()
.map(|result| {
(
paths
.iter()
.copied()
.find(|p| result.path.as_ref() == Path::new(p))
.unwrap(),
result.positions,
)
})
.collect()
}
}

174
crates/fuzzy/src/paths.rs Normal file
View File

@@ -0,0 +1,174 @@
use std::{
borrow::Cow,
cmp::{self, Ordering},
path::Path,
sync::{atomic::AtomicBool, Arc},
};
use gpui::executor;
use crate::{
matcher::{Match, MatchCandidate, Matcher},
CharBag,
};
#[derive(Clone, Debug)]
pub struct PathMatchCandidate<'a> {
pub path: &'a Arc<Path>,
pub char_bag: CharBag,
}
#[derive(Clone, Debug)]
pub struct PathMatch {
pub score: f64,
pub positions: Vec<usize>,
pub worktree_id: usize,
pub path: Arc<Path>,
pub path_prefix: Arc<str>,
}
pub trait PathMatchCandidateSet<'a>: Send + Sync {
type Candidates: Iterator<Item = PathMatchCandidate<'a>>;
fn id(&self) -> usize;
fn len(&self) -> usize;
fn is_empty(&self) -> bool {
self.len() == 0
}
fn prefix(&self) -> Arc<str>;
fn candidates(&'a self, start: usize) -> Self::Candidates;
}
impl Match for PathMatch {
fn score(&self) -> f64 {
self.score
}
fn set_positions(&mut self, positions: Vec<usize>) {
self.positions = positions;
}
}
impl<'a> MatchCandidate for PathMatchCandidate<'a> {
fn has_chars(&self, bag: CharBag) -> bool {
self.char_bag.is_superset(bag)
}
fn to_string(&self) -> Cow<'a, str> {
self.path.to_string_lossy()
}
}
impl PartialEq for PathMatch {
fn eq(&self, other: &Self) -> bool {
self.cmp(other).is_eq()
}
}
impl Eq for PathMatch {}
impl PartialOrd for PathMatch {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for PathMatch {
fn cmp(&self, other: &Self) -> Ordering {
self.score
.partial_cmp(&other.score)
.unwrap_or(Ordering::Equal)
.then_with(|| self.worktree_id.cmp(&other.worktree_id))
.then_with(|| self.path.cmp(&other.path))
}
}
pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>(
candidate_sets: &'a [Set],
query: &str,
smart_case: bool,
max_results: usize,
cancel_flag: &AtomicBool,
background: Arc<executor::Background>,
) -> Vec<PathMatch> {
let path_count: usize = candidate_sets.iter().map(|s| s.len()).sum();
if path_count == 0 {
return Vec::new();
}
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();
let lowercase_query = &lowercase_query;
let query = &query;
let query_char_bag = CharBag::from(&lowercase_query[..]);
let num_cpus = background.num_cpus().min(path_count);
let segment_size = (path_count + num_cpus - 1) / num_cpus;
let mut segment_results = (0..num_cpus)
.map(|_| Vec::with_capacity(max_results))
.collect::<Vec<_>>();
background
.scoped(|scope| {
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
scope.spawn(async move {
let segment_start = segment_idx * segment_size;
let segment_end = segment_start + segment_size;
let mut matcher = Matcher::new(
query,
lowercase_query,
query_char_bag,
smart_case,
max_results,
);
let mut tree_start = 0;
for candidate_set in candidate_sets {
let tree_end = tree_start + candidate_set.len();
if tree_start < segment_end && segment_start < tree_end {
let start = cmp::max(tree_start, segment_start) - tree_start;
let end = cmp::min(tree_end, segment_end) - tree_start;
let candidates = candidate_set.candidates(start).take(end - start);
let worktree_id = candidate_set.id();
let prefix = candidate_set.prefix().chars().collect::<Vec<_>>();
let lowercase_prefix = prefix
.iter()
.map(|c| c.to_ascii_lowercase())
.collect::<Vec<_>>();
matcher.match_candidates(
&prefix,
&lowercase_prefix,
candidates,
results,
cancel_flag,
|candidate, score| PathMatch {
score,
worktree_id,
positions: Vec::new(),
path: candidate.path.clone(),
path_prefix: candidate_set.prefix(),
},
);
}
if tree_end >= segment_end {
break;
}
tree_start = tree_end;
}
})
}
})
.await;
let mut results = Vec::new();
for segment_result in segment_results {
if results.is_empty() {
results = segment_result;
} else {
util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a));
}
}
results
}

161
crates/fuzzy/src/strings.rs Normal file
View File

@@ -0,0 +1,161 @@
use std::{
borrow::Cow,
cmp::{self, Ordering},
sync::{atomic::AtomicBool, Arc},
};
use gpui::executor;
use crate::{
matcher::{Match, MatchCandidate, Matcher},
CharBag,
};
#[derive(Clone, Debug)]
pub struct StringMatchCandidate {
pub id: usize,
pub string: String,
pub char_bag: CharBag,
}
impl Match for StringMatch {
fn score(&self) -> f64 {
self.score
}
fn set_positions(&mut self, positions: Vec<usize>) {
self.positions = positions;
}
}
impl StringMatchCandidate {
pub fn new(id: usize, string: String) -> Self {
Self {
id,
char_bag: CharBag::from(string.as_str()),
string,
}
}
}
impl<'a> MatchCandidate for &'a StringMatchCandidate {
fn has_chars(&self, bag: CharBag) -> bool {
self.char_bag.is_superset(bag)
}
fn to_string(&self) -> Cow<'a, str> {
self.string.as_str().into()
}
}
#[derive(Clone, Debug)]
pub struct StringMatch {
pub candidate_id: usize,
pub score: f64,
pub positions: Vec<usize>,
pub string: String,
}
impl PartialEq for StringMatch {
fn eq(&self, other: &Self) -> bool {
self.cmp(other).is_eq()
}
}
impl Eq for StringMatch {}
impl PartialOrd for StringMatch {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for StringMatch {
fn cmp(&self, other: &Self) -> Ordering {
self.score
.partial_cmp(&other.score)
.unwrap_or(Ordering::Equal)
.then_with(|| self.candidate_id.cmp(&other.candidate_id))
}
}
pub async fn match_strings(
candidates: &[StringMatchCandidate],
query: &str,
smart_case: bool,
max_results: usize,
cancel_flag: &AtomicBool,
background: Arc<executor::Background>,
) -> Vec<StringMatch> {
if candidates.is_empty() || max_results == 0 {
return Default::default();
}
if query.is_empty() {
return candidates
.iter()
.map(|candidate| StringMatch {
candidate_id: candidate.id,
score: 0.,
positions: Default::default(),
string: candidate.string.clone(),
})
.collect();
}
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();
let lowercase_query = &lowercase_query;
let query = &query;
let query_char_bag = CharBag::from(&lowercase_query[..]);
let num_cpus = background.num_cpus().min(candidates.len());
let segment_size = (candidates.len() + num_cpus - 1) / num_cpus;
let mut segment_results = (0..num_cpus)
.map(|_| Vec::with_capacity(max_results.min(candidates.len())))
.collect::<Vec<_>>();
background
.scoped(|scope| {
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
let cancel_flag = &cancel_flag;
scope.spawn(async move {
let segment_start = cmp::min(segment_idx * segment_size, candidates.len());
let segment_end = cmp::min(segment_start + segment_size, candidates.len());
let mut matcher = Matcher::new(
query,
lowercase_query,
query_char_bag,
smart_case,
max_results,
);
matcher.match_candidates(
&[],
&[],
candidates[segment_start..segment_end].iter(),
results,
cancel_flag,
|candidate, score| StringMatch {
candidate_id: candidate.id,
score,
positions: Vec::new(),
string: candidate.string.to_string(),
},
);
});
}
})
.await;
let mut results = Vec::new();
for segment_result in segment_results {
if results.is_empty() {
results = segment_result;
} else {
util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a));
}
}
results
}

View File

@@ -71,18 +71,26 @@ impl BufferDiff {
}
}
pub fn hunks_in_range<'a>(
pub fn hunks_in_row_range<'a>(
&'a self,
query_row_range: Range<u32>,
range: Range<u32>,
buffer: &'a BufferSnapshot,
reversed: bool,
) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
let start = buffer.anchor_before(Point::new(query_row_range.start, 0));
let end = buffer.anchor_after(Point::new(query_row_range.end, 0));
let start = buffer.anchor_before(Point::new(range.start, 0));
let end = buffer.anchor_after(Point::new(range.end, 0));
self.hunks_intersecting_range(start..end, buffer, reversed)
}
pub fn hunks_intersecting_range<'a>(
&'a self,
range: Range<Anchor>,
buffer: &'a BufferSnapshot,
reversed: bool,
) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| {
let before_start = summary.buffer_range.end.cmp(&start, buffer).is_lt();
let after_end = summary.buffer_range.start.cmp(&end, buffer).is_gt();
let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt();
let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt();
!before_start && !after_end
});
@@ -141,7 +149,9 @@ impl BufferDiff {
#[cfg(test)]
fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
self.hunks_in_range(0..u32::MAX, text, false)
let start = text.anchor_before(Point::new(0, 0));
let end = text.anchor_after(Point::new(u32::MAX, u32::MAX));
self.hunks_intersecting_range(start..end, text, false)
}
fn diff<'a>(head: &'a str, current: &'a str) -> Option<GitPatch<'a>> {
@@ -355,7 +365,7 @@ mod tests {
assert_eq!(diff.hunks(&buffer).count(), 8);
assert_hunks(
diff.hunks_in_range(7..12, &buffer, false),
diff.hunks_in_row_range(7..12, &buffer, false),
&buffer,
&diff_base,
&[

View File

@@ -1,6 +1,6 @@
use std::sync::Arc;
use editor::{display_map::ToDisplayPoint, Autoscroll, DisplayPoint, Editor};
use editor::{display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, DisplayPoint, Editor};
use gpui::{
actions, elements::*, geometry::vector::Vector2F, AnyViewHandle, Axis, Entity,
MutableAppContext, RenderContext, View, ViewContext, ViewHandle,

View File

@@ -17,6 +17,7 @@ collections = { path = "../collections" }
gpui_macros = { path = "../gpui_macros" }
util = { path = "../util" }
sum_tree = { path = "../sum_tree" }
sqlez = { path = "../sqlez" }
async-task = "4.0.3"
backtrace = { version = "0.3", optional = true }
ctor = "0.1"

View File

@@ -1,10 +1,10 @@
#include "nan.h"
#include "tree_sitter/parser.h"
#include <node.h>
#include "nan.h"
using namespace v8;
extern "C" TSLanguage * tree_sitter_context_predicate();
extern "C" TSLanguage *tree_sitter_context_predicate();
namespace {
@@ -16,13 +16,15 @@ void Init(Local<Object> exports, Local<Object> module) {
tpl->InstanceTemplate()->SetInternalFieldCount(1);
Local<Function> constructor = Nan::GetFunction(tpl).ToLocalChecked();
Local<Object> instance = constructor->NewInstance(Nan::GetCurrentContext()).ToLocalChecked();
Local<Object> instance =
constructor->NewInstance(Nan::GetCurrentContext()).ToLocalChecked();
Nan::SetInternalFieldPointer(instance, 0, tree_sitter_context_predicate());
Nan::Set(instance, Nan::New("name").ToLocalChecked(), Nan::New("context_predicate").ToLocalChecked());
Nan::Set(instance, Nan::New("name").ToLocalChecked(),
Nan::New("context_predicate").ToLocalChecked());
Nan::Set(module, Nan::New("exports").ToLocalChecked(), instance);
}
NODE_MODULE(tree_sitter_context_predicate_binding, Init)
} // namespace
} // namespace

View File

@@ -594,6 +594,9 @@ type ReleaseObservationCallback = Box<dyn FnOnce(&dyn Any, &mut MutableAppContex
type ActionObservationCallback = Box<dyn FnMut(TypeId, &mut MutableAppContext)>;
type WindowActivationCallback = Box<dyn FnMut(bool, &mut MutableAppContext) -> bool>;
type WindowFullscreenCallback = Box<dyn FnMut(bool, &mut MutableAppContext) -> bool>;
type KeystrokeCallback = Box<
dyn FnMut(&Keystroke, &MatchResult, Option<&Box<dyn Action>>, &mut MutableAppContext) -> bool,
>;
type DeserializeActionCallback = fn(json: &str) -> anyhow::Result<Box<dyn Action>>;
type WindowShouldCloseSubscriptionCallback = Box<dyn FnMut(&mut MutableAppContext) -> bool>;
@@ -619,6 +622,7 @@ pub struct MutableAppContext {
observations: CallbackCollection<usize, ObservationCallback>,
window_activation_observations: CallbackCollection<usize, WindowActivationCallback>,
window_fullscreen_observations: CallbackCollection<usize, WindowFullscreenCallback>,
keystroke_observations: CallbackCollection<usize, KeystrokeCallback>,
release_observations: Arc<Mutex<HashMap<usize, BTreeMap<usize, ReleaseObservationCallback>>>>,
action_dispatch_observations: Arc<Mutex<BTreeMap<usize, ActionObservationCallback>>>,
@@ -678,6 +682,7 @@ impl MutableAppContext {
global_observations: Default::default(),
window_activation_observations: Default::default(),
window_fullscreen_observations: Default::default(),
keystroke_observations: Default::default(),
action_dispatch_observations: Default::default(),
presenters_and_platform_windows: Default::default(),
foreground,
@@ -763,11 +768,11 @@ impl MutableAppContext {
.with_context(|| format!("invalid data for action {}", name))
}
pub fn add_action<A, V, F>(&mut self, handler: F)
pub fn add_action<A, V, F, R>(&mut self, handler: F)
where
A: Action,
V: View,
F: 'static + FnMut(&mut V, &A, &mut ViewContext<V>),
F: 'static + FnMut(&mut V, &A, &mut ViewContext<V>) -> R,
{
self.add_action_internal(handler, false)
}
@@ -781,11 +786,11 @@ impl MutableAppContext {
self.add_action_internal(handler, true)
}
fn add_action_internal<A, V, F>(&mut self, mut handler: F, capture: bool)
fn add_action_internal<A, V, F, R>(&mut self, mut handler: F, capture: bool)
where
A: Action,
V: View,
F: 'static + FnMut(&mut V, &A, &mut ViewContext<V>),
F: 'static + FnMut(&mut V, &A, &mut ViewContext<V>) -> R,
{
let handler = Box::new(
move |view: &mut dyn AnyView,
@@ -1255,6 +1260,27 @@ impl MutableAppContext {
}
}
pub fn observe_keystrokes<F>(&mut self, window_id: usize, callback: F) -> Subscription
where
F: 'static
+ FnMut(
&Keystroke,
&MatchResult,
Option<&Box<dyn Action>>,
&mut MutableAppContext,
) -> bool,
{
let subscription_id = post_inc(&mut self.next_subscription_id);
self.keystroke_observations
.add_callback(window_id, subscription_id, Box::new(callback));
Subscription::KeystrokeObservation {
id: subscription_id,
window_id,
observations: Some(self.keystroke_observations.downgrade()),
}
}
pub fn defer(&mut self, callback: impl 'static + FnOnce(&mut MutableAppContext)) {
self.pending_effects.push_back(Effect::Deferred {
callback: Box::new(callback),
@@ -1405,8 +1431,8 @@ impl MutableAppContext {
true
}
// Returns an iterator over all of the view ids from the passed view up to the root of the window
// Includes the passed view itself
/// Returns an iterator over all of the view ids from the passed view up to the root of the window
/// Includes the passed view itself
fn ancestors(&self, window_id: usize, mut view_id: usize) -> impl Iterator<Item = usize> + '_ {
std::iter::once(view_id)
.into_iter()
@@ -1538,27 +1564,39 @@ impl MutableAppContext {
})
.collect();
match self
let match_result = self
.keystroke_matcher
.push_keystroke(keystroke.clone(), dispatch_path)
{
.push_keystroke(keystroke.clone(), dispatch_path);
let mut handled_by = None;
let keystroke_handled = match &match_result {
MatchResult::None => false,
MatchResult::Pending => true,
MatchResult::Matches(matches) => {
for (view_id, action) in matches {
if self.handle_dispatch_action_from_effect(
window_id,
Some(view_id),
Some(*view_id),
action.as_ref(),
) {
self.keystroke_matcher.clear_pending();
return true;
handled_by = Some(action.boxed_clone());
break;
}
}
false
handled_by.is_some()
}
}
};
self.keystroke(
window_id,
keystroke.clone(),
handled_by,
match_result.clone(),
);
keystroke_handled
} else {
self.keystroke(window_id, keystroke.clone(), None, MatchResult::None);
false
}
}
@@ -2110,6 +2148,12 @@ impl MutableAppContext {
} => {
self.handle_window_should_close_subscription_effect(window_id, callback)
}
Effect::Keystroke {
window_id,
keystroke,
handled_by,
result,
} => self.handle_keystroke_effect(window_id, keystroke, handled_by, result),
}
self.pending_notifications.clear();
self.remove_dropped_entities();
@@ -2188,6 +2232,21 @@ impl MutableAppContext {
});
}
fn keystroke(
&mut self,
window_id: usize,
keystroke: Keystroke,
handled_by: Option<Box<dyn Action>>,
result: MatchResult,
) {
self.pending_effects.push_back(Effect::Keystroke {
window_id,
keystroke,
handled_by,
result,
});
}
pub fn refresh_windows(&mut self) {
self.pending_effects.push_back(Effect::RefreshWindows);
}
@@ -2299,6 +2358,21 @@ impl MutableAppContext {
});
}
fn handle_keystroke_effect(
&mut self,
window_id: usize,
keystroke: Keystroke,
handled_by: Option<Box<dyn Action>>,
result: MatchResult,
) {
self.update(|this| {
let mut observations = this.keystroke_observations.clone();
observations.emit_and_cleanup(window_id, this, {
move |callback, this| callback(&keystroke, &result, handled_by.as_ref(), this)
});
});
}
fn handle_window_activation_effect(&mut self, window_id: usize, active: bool) {
//Short circuit evaluation if we're already g2g
if self
@@ -2852,6 +2926,12 @@ pub enum Effect {
subscription_id: usize,
callback: WindowFullscreenCallback,
},
Keystroke {
window_id: usize,
keystroke: Keystroke,
handled_by: Option<Box<dyn Action>>,
result: MatchResult,
},
RefreshWindows,
DispatchActionFrom {
window_id: usize,
@@ -2995,6 +3075,21 @@ impl Debug for Effect {
.debug_struct("Effect::WindowShouldCloseSubscription")
.field("window_id", window_id)
.finish(),
Effect::Keystroke {
window_id,
keystroke,
handled_by,
result,
} => f
.debug_struct("Effect::Keystroke")
.field("window_id", window_id)
.field("keystroke", keystroke)
.field(
"keystroke",
&handled_by.as_ref().map(|handled_by| handled_by.name()),
)
.field("result", result)
.finish(),
}
}
}
@@ -3600,6 +3695,7 @@ impl<'a, T: View> ViewContext<'a, T> {
return false;
}
self.ancestors(view.window_id, view.view_id)
.skip(1) // Skip self id
.any(|parent| parent == self.view_id)
}
@@ -3826,6 +3922,33 @@ impl<'a, T: View> ViewContext<'a, T> {
})
}
pub fn observe_keystroke<F>(&mut self, mut callback: F) -> Subscription
where
F: 'static
+ FnMut(
&mut T,
&Keystroke,
Option<&Box<dyn Action>>,
&MatchResult,
&mut ViewContext<T>,
) -> bool,
{
let observer = self.weak_handle();
self.app.observe_keystrokes(
self.window_id(),
move |keystroke, result, handled_by, cx| {
if let Some(observer) = observer.upgrade(cx) {
observer.update(cx, |observer, cx| {
callback(observer, keystroke, handled_by, result, cx);
});
true
} else {
false
}
},
)
}
pub fn emit(&mut self, payload: T::Event) {
self.app.pending_effects.push_back(Effect::Event {
entity_id: self.view_id,
@@ -5018,6 +5141,11 @@ pub enum Subscription {
window_id: usize,
observations: Option<Weak<Mapping<usize, WindowFullscreenCallback>>>,
},
KeystrokeObservation {
id: usize,
window_id: usize,
observations: Option<Weak<Mapping<usize, KeystrokeCallback>>>,
},
ReleaseObservation {
id: usize,
@@ -5056,6 +5184,9 @@ impl Subscription {
Subscription::ActionObservation { observations, .. } => {
observations.take();
}
Subscription::KeystrokeObservation { observations, .. } => {
observations.take();
}
Subscription::WindowActivationObservation { observations, .. } => {
observations.take();
}
@@ -5175,6 +5306,27 @@ impl Drop for Subscription {
observations.lock().remove(id);
}
}
Subscription::KeystrokeObservation {
id,
window_id,
observations,
} => {
if let Some(observations) = observations.as_ref().and_then(Weak::upgrade) {
match observations
.lock()
.entry(*window_id)
.or_default()
.entry(*id)
{
btree_map::Entry::Vacant(entry) => {
entry.insert(None);
}
btree_map::Entry::Occupied(entry) => {
entry.remove();
}
}
}
}
Subscription::WindowActivationObservation {
id,
window_id,

View File

@@ -115,6 +115,7 @@ impl Tooltip {
} else {
state.visible.set(false);
state.debounce.take();
cx.notify();
}
}
})

View File

@@ -66,21 +66,32 @@ struct DeterministicState {
rng: rand::prelude::StdRng,
seed: u64,
scheduled_from_foreground: collections::HashMap<usize, Vec<ForegroundRunnable>>,
scheduled_from_background: Vec<Runnable>,
scheduled_from_background: Vec<BackgroundRunnable>,
forbid_parking: bool,
block_on_ticks: std::ops::RangeInclusive<usize>,
now: std::time::Instant,
next_timer_id: usize,
pending_timers: Vec<(usize, std::time::Instant, postage::barrier::Sender)>,
waiting_backtrace: Option<backtrace::Backtrace>,
next_runnable_id: usize,
poll_history: Vec<usize>,
enable_runnable_backtraces: bool,
runnable_backtraces: collections::HashMap<usize, backtrace::Backtrace>,
}
#[cfg(any(test, feature = "test-support"))]
struct ForegroundRunnable {
id: usize,
runnable: Runnable,
main: bool,
}
#[cfg(any(test, feature = "test-support"))]
struct BackgroundRunnable {
id: usize,
runnable: Runnable,
}
#[cfg(any(test, feature = "test-support"))]
pub struct Deterministic {
state: Arc<parking_lot::Mutex<DeterministicState>>,
@@ -117,11 +128,29 @@ impl Deterministic {
next_timer_id: Default::default(),
pending_timers: Default::default(),
waiting_backtrace: None,
next_runnable_id: 0,
poll_history: Default::default(),
enable_runnable_backtraces: false,
runnable_backtraces: Default::default(),
})),
parker: Default::default(),
})
}
pub fn runnable_history(&self) -> Vec<usize> {
self.state.lock().poll_history.clone()
}
pub fn enable_runnable_backtrace(&self) {
self.state.lock().enable_runnable_backtraces = true;
}
pub fn runnable_backtrace(&self, runnable_id: usize) -> backtrace::Backtrace {
let mut backtrace = self.state.lock().runnable_backtraces[&runnable_id].clone();
backtrace.resolve();
backtrace
}
pub fn build_background(self: &Arc<Self>) -> Arc<Background> {
Arc::new(Background::Deterministic {
executor: self.clone(),
@@ -142,6 +171,17 @@ impl Deterministic {
main: bool,
) -> AnyLocalTask {
let state = self.state.clone();
let id;
{
let mut state = state.lock();
id = util::post_inc(&mut state.next_runnable_id);
if state.enable_runnable_backtraces {
state
.runnable_backtraces
.insert(id, backtrace::Backtrace::new_unresolved());
}
}
let unparker = self.parker.lock().unparker();
let (runnable, task) = async_task::spawn_local(future, move |runnable| {
let mut state = state.lock();
@@ -149,7 +189,7 @@ impl Deterministic {
.scheduled_from_foreground
.entry(cx_id)
.or_default()
.push(ForegroundRunnable { runnable, main });
.push(ForegroundRunnable { id, runnable, main });
unparker.unpark();
});
runnable.schedule();
@@ -158,10 +198,23 @@ impl Deterministic {
fn spawn(&self, future: AnyFuture) -> AnyTask {
let state = self.state.clone();
let id;
{
let mut state = state.lock();
id = util::post_inc(&mut state.next_runnable_id);
if state.enable_runnable_backtraces {
state
.runnable_backtraces
.insert(id, backtrace::Backtrace::new_unresolved());
}
}
let unparker = self.parker.lock().unparker();
let (runnable, task) = async_task::spawn(future, move |runnable| {
let mut state = state.lock();
state.scheduled_from_background.push(runnable);
state
.scheduled_from_background
.push(BackgroundRunnable { id, runnable });
unparker.unpark();
});
runnable.schedule();
@@ -178,15 +231,27 @@ impl Deterministic {
let woken = Arc::new(AtomicBool::new(false));
let state = self.state.clone();
let id;
{
let mut state = state.lock();
id = util::post_inc(&mut state.next_runnable_id);
if state.enable_runnable_backtraces {
state
.runnable_backtraces
.insert(id, backtrace::Backtrace::new_unresolved());
}
}
let unparker = self.parker.lock().unparker();
let (runnable, mut main_task) = unsafe {
async_task::spawn_unchecked(main_future, move |runnable| {
let mut state = state.lock();
let state = &mut *state.lock();
state
.scheduled_from_foreground
.entry(cx_id)
.or_default()
.push(ForegroundRunnable {
id: util::post_inc(&mut state.next_runnable_id),
runnable,
main: true,
});
@@ -248,9 +313,10 @@ impl Deterministic {
if !state.scheduled_from_background.is_empty() && state.rng.gen() {
let background_len = state.scheduled_from_background.len();
let ix = state.rng.gen_range(0..background_len);
let runnable = state.scheduled_from_background.remove(ix);
let background_runnable = state.scheduled_from_background.remove(ix);
state.poll_history.push(background_runnable.id);
drop(state);
runnable.run();
background_runnable.runnable.run();
} else if !state.scheduled_from_foreground.is_empty() {
let available_cx_ids = state
.scheduled_from_foreground
@@ -266,6 +332,7 @@ impl Deterministic {
if scheduled_from_cx.is_empty() {
state.scheduled_from_foreground.remove(&cx_id_to_run);
}
state.poll_history.push(foreground_runnable.id);
drop(state);
@@ -298,9 +365,10 @@ impl Deterministic {
let runnable_count = state.scheduled_from_background.len();
let ix = state.rng.gen_range(0..=runnable_count);
if ix < state.scheduled_from_background.len() {
let runnable = state.scheduled_from_background.remove(ix);
let background_runnable = state.scheduled_from_background.remove(ix);
state.poll_history.push(background_runnable.id);
drop(state);
runnable.run();
background_runnable.runnable.run();
} else {
drop(state);
if let Poll::Ready(result) = future.poll(&mut cx) {

View File

@@ -112,6 +112,21 @@ impl PartialEq for MatchResult {
impl Eq for MatchResult {}
impl Clone for MatchResult {
fn clone(&self) -> Self {
match self {
MatchResult::None => MatchResult::None,
MatchResult::Pending => MatchResult::Pending,
MatchResult::Matches(matches) => MatchResult::Matches(
matches
.iter()
.map(|(view_id, action)| (*view_id, Action::boxed_clone(action.as_ref())))
.collect(),
),
}
}
}
impl Matcher {
pub fn new(keymap: Keymap) -> Self {
Self {

Some files were not shown because too many files have changed in this diff Show More