Compare commits
616 Commits
v0.94.3-pr
...
crdb-sync-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2fe736fc6b | ||
|
|
92d748134e | ||
|
|
8d7b37b743 | ||
|
|
f1fe0007e3 | ||
|
|
d66af5c4d2 | ||
|
|
dc049ca7d2 | ||
|
|
6251399d0a | ||
|
|
e979f753f2 | ||
|
|
80f9b553c7 | ||
|
|
bd0a9eb704 | ||
|
|
52beac9fa5 | ||
|
|
c603ef3f3d | ||
|
|
eb79ea6ee5 | ||
|
|
26cb5e316d | ||
|
|
974d07d304 | ||
|
|
d8f6dc3749 | ||
|
|
3ebe2f97cc | ||
|
|
400105cf18 | ||
|
|
ba6f1d7281 | ||
|
|
86b0ed8fda | ||
|
|
50f507e38e | ||
|
|
7fbe5910b9 | ||
|
|
7f49ca4adb | ||
|
|
b751fe9e98 | ||
|
|
85c9b30e9f | ||
|
|
82c022adb3 | ||
|
|
7494fdc738 | ||
|
|
1919a5bb63 | ||
|
|
308c29d377 | ||
|
|
eacb37e200 | ||
|
|
083c857fa0 | ||
|
|
1d0ff4bf8e | ||
|
|
9cbe3cca9a | ||
|
|
ce17cd83cf | ||
|
|
05ec6b89c2 | ||
|
|
93701f9c5d | ||
|
|
083bfc6cbc | ||
|
|
a084a93ef0 | ||
|
|
84bf6c5a16 | ||
|
|
e99f82f855 | ||
|
|
ba3353d127 | ||
|
|
2887c4674d | ||
|
|
22e8a67498 | ||
|
|
4848ea8e62 | ||
|
|
3349f2147a | ||
|
|
103d5293be | ||
|
|
1046b60fd2 | ||
|
|
d5932717f2 | ||
|
|
8eeaacdf0a | ||
|
|
2de48f96d2 | ||
|
|
ba42db7069 | ||
|
|
02946e1e8f | ||
|
|
32c8eb2612 | ||
|
|
d277e214bd | ||
|
|
cbc5dc04ef | ||
|
|
198be71b6c | ||
|
|
5eede853f6 | ||
|
|
129e711545 | ||
|
|
d5c30709b3 | ||
|
|
b8a2a2ecd3 | ||
|
|
354c020612 | ||
|
|
ee66f99ce6 | ||
|
|
fbede4a5a3 | ||
|
|
e2b38f7a31 | ||
|
|
06167889c3 | ||
|
|
5c48729c7c | ||
|
|
89bbcdfa4f | ||
|
|
98fde36834 | ||
|
|
40fcec1495 | ||
|
|
6cd10f3d5e | ||
|
|
603387ace5 | ||
|
|
9fc1ebcb5b | ||
|
|
711073cf3c | ||
|
|
a58c9ed7d3 | ||
|
|
56704c7c5f | ||
|
|
7282629f93 | ||
|
|
517591bced | ||
|
|
4c5e248693 | ||
|
|
b8066d444a | ||
|
|
f8cf74f9e8 | ||
|
|
97cdad7f1a | ||
|
|
fc1844d684 | ||
|
|
0b61c93a25 | ||
|
|
394a105639 | ||
|
|
1b803e498f | ||
|
|
0ac919f6e0 | ||
|
|
7a851d42ad | ||
|
|
ca6f7d8a80 | ||
|
|
18412c3a06 | ||
|
|
6e8b207104 | ||
|
|
39f02c2b72 | ||
|
|
75999204ad | ||
|
|
cc23360bab | ||
|
|
e8210b827d | ||
|
|
cdceddd2cc | ||
|
|
4085df5146 | ||
|
|
64b252e81a | ||
|
|
bf2ca57f55 | ||
|
|
c32fd57643 | ||
|
|
93ec73da29 | ||
|
|
97c3d97792 | ||
|
|
1f65effe57 | ||
|
|
3c3bf793e3 | ||
|
|
ca57c1839c | ||
|
|
299818cde0 | ||
|
|
c86096a886 | ||
|
|
1a84382881 | ||
|
|
f3928c8c26 | ||
|
|
94f7ddbcb2 | ||
|
|
369d85b82e | ||
|
|
cbdb83fb43 | ||
|
|
309e307fa8 | ||
|
|
e9342e1b47 | ||
|
|
edc63ee3ff | ||
|
|
88829dfe21 | ||
|
|
1ada3722e5 | ||
|
|
1e644b7a45 | ||
|
|
eb36bc5b62 | ||
|
|
31f84cef14 | ||
|
|
9358958b1f | ||
|
|
bbe126818f | ||
|
|
bdd0b9f387 | ||
|
|
a01d973477 | ||
|
|
e199a6a3a1 | ||
|
|
7603659479 | ||
|
|
3cc88904bf | ||
|
|
62ee52a5fc | ||
|
|
fbe0108959 | ||
|
|
41105136a4 | ||
|
|
d95c4fdb2b | ||
|
|
baa16a2fc6 | ||
|
|
7d02462abd | ||
|
|
ea74734b0a | ||
|
|
43d94e37ec | ||
|
|
25e4bcea7f | ||
|
|
fa48d59de4 | ||
|
|
439f6ba616 | ||
|
|
729ea89fef | ||
|
|
8c5eab6933 | ||
|
|
5118218a9c | ||
|
|
52154f76ac | ||
|
|
7dccb487de | ||
|
|
6ad0852a70 | ||
|
|
dd504f5965 | ||
|
|
a4914fcf3b | ||
|
|
8fff0b0ff8 | ||
|
|
429a2fc623 | ||
|
|
3dee2faa44 | ||
|
|
c1f72328ff | ||
|
|
18b32304e2 | ||
|
|
28ee05b324 | ||
|
|
b4b53eb5f1 | ||
|
|
a3a9d024ba | ||
|
|
b338ffe8d8 | ||
|
|
e0915190d4 | ||
|
|
f05095a6dd | ||
|
|
6c09782aa2 | ||
|
|
3920bff5e0 | ||
|
|
51d311affd | ||
|
|
ff0864026e | ||
|
|
05b161118c | ||
|
|
dcaf8a9af8 | ||
|
|
d14a484a20 | ||
|
|
458916409c | ||
|
|
7788eabec0 | ||
|
|
bd9118f673 | ||
|
|
c538504b9c | ||
|
|
4bd415f2b6 | ||
|
|
b7e79d5241 | ||
|
|
25ea07cd41 | ||
|
|
33b215a288 | ||
|
|
f2c9738a69 | ||
|
|
804da68af7 | ||
|
|
2d8159998d | ||
|
|
595bc16749 | ||
|
|
e002d9efb0 | ||
|
|
243a1a854e | ||
|
|
a81fde36e0 | ||
|
|
60f5dca222 | ||
|
|
56c657fe79 | ||
|
|
4772e4ccee | ||
|
|
a50d30bf8e | ||
|
|
8ba69c15d1 | ||
|
|
bf2583414b | ||
|
|
807279208d | ||
|
|
5f89de0b80 | ||
|
|
45037eb7a0 | ||
|
|
4e085a6f2d | ||
|
|
517d073806 | ||
|
|
cbc4b90df8 | ||
|
|
35400d5797 | ||
|
|
1ddd3971d8 | ||
|
|
939eaaa973 | ||
|
|
baf06043cb | ||
|
|
cd3620692b | ||
|
|
12acc7418e | ||
|
|
d98fcc4402 | ||
|
|
02ea88aa51 | ||
|
|
57b6e25278 | ||
|
|
7337910034 | ||
|
|
76188c9508 | ||
|
|
05a8409363 | ||
|
|
a9bfe97361 | ||
|
|
81b05f2a08 | ||
|
|
4557adf693 | ||
|
|
1d1da74d72 | ||
|
|
0769458ae4 | ||
|
|
a85af79892 | ||
|
|
6b95ac9b26 | ||
|
|
719c56734a | ||
|
|
6095525b56 | ||
|
|
429daf5f8c | ||
|
|
7d3d54652b | ||
|
|
1242b5b4a2 | ||
|
|
0b6155609d | ||
|
|
abb145da70 | ||
|
|
95947f6d3a | ||
|
|
c56d62fd84 | ||
|
|
0e068a644f | ||
|
|
13ae1249f5 | ||
|
|
2f4e5b7e0e | ||
|
|
a7695c47bf | ||
|
|
8f0b24b264 | ||
|
|
0e9cad4935 | ||
|
|
e02d6bc0d4 | ||
|
|
d84d663ac3 | ||
|
|
4d1dbb8aa3 | ||
|
|
5d22a300c3 | ||
|
|
372f66c88a | ||
|
|
0e984e1e69 | ||
|
|
464cc2e71a | ||
|
|
d6a463afb8 | ||
|
|
f051e66231 | ||
|
|
a90b151d52 | ||
|
|
9d04228853 | ||
|
|
54378a5f57 | ||
|
|
0237276557 | ||
|
|
56b1aaa6cb | ||
|
|
e771ede830 | ||
|
|
55232486d7 | ||
|
|
8c1973c5a2 | ||
|
|
287dfc0460 | ||
|
|
4a8556cd81 | ||
|
|
0e6048a85d | ||
|
|
900deaab50 | ||
|
|
257dd57fe4 | ||
|
|
a5e63fbf77 | ||
|
|
30bac17749 | ||
|
|
d675f10447 | ||
|
|
15dc8b43c4 | ||
|
|
2762f9b1c6 | ||
|
|
e1379f0ef0 | ||
|
|
6f1dcb4e94 | ||
|
|
cb97f5a69c | ||
|
|
aa67413abc | ||
|
|
b13e86aba6 | ||
|
|
b774d83a50 | ||
|
|
b636398093 | ||
|
|
88c46e091e | ||
|
|
efe973ebe2 | ||
|
|
7e904183bf | ||
|
|
9809ec3d70 | ||
|
|
2e3aa703d9 | ||
|
|
491b3d5515 | ||
|
|
1e4bddd276 | ||
|
|
5ceb258b3e | ||
|
|
34488ca863 | ||
|
|
64d134a0dc | ||
|
|
07dc82409b | ||
|
|
9c9ce15afc | ||
|
|
e3f9a01f6b | ||
|
|
f4413b0969 | ||
|
|
c754c1e9e2 | ||
|
|
aacc4bb8b0 | ||
|
|
8c855680e7 | ||
|
|
96ef6ab326 | ||
|
|
929a9f97b2 | ||
|
|
fd72f4526d | ||
|
|
d023189bda | ||
|
|
d26f76ba90 | ||
|
|
8003e84d11 | ||
|
|
2c27e875e5 | ||
|
|
9e03e9d6df | ||
|
|
dd8863d0de | ||
|
|
843e74689d | ||
|
|
98b8008bcc | ||
|
|
27b06c1d09 | ||
|
|
c528880155 | ||
|
|
3058a96dee | ||
|
|
e6b7bbee25 | ||
|
|
c5e47f27f5 | ||
|
|
afb0329914 | ||
|
|
8deafe90fc | ||
|
|
0e071919a0 | ||
|
|
33921183dc | ||
|
|
6ed7820f7c | ||
|
|
10db05f87f | ||
|
|
6f7a6e57fc | ||
|
|
94358ffb16 | ||
|
|
82a9d53c8a | ||
|
|
6349d90cac | ||
|
|
6123c67de9 | ||
|
|
23f25562b5 | ||
|
|
f52722b6a4 | ||
|
|
75d900704e | ||
|
|
91ba80ae98 | ||
|
|
9aeb970f09 | ||
|
|
342dbc6945 | ||
|
|
8d0614ce74 | ||
|
|
0ae7a9974f | ||
|
|
be7d4d6ea9 | ||
|
|
7cb5326ba0 | ||
|
|
e73f394604 | ||
|
|
00b0189660 | ||
|
|
018eb06091 | ||
|
|
b00703a149 | ||
|
|
bf2dcd4582 | ||
|
|
fab26267db | ||
|
|
80ef92a3e1 | ||
|
|
ed1b1a5ccd | ||
|
|
b9fdfd60f0 | ||
|
|
192f747bd1 | ||
|
|
aee008440b | ||
|
|
5267c6d2cb | ||
|
|
137734cfcf | ||
|
|
009cf48b26 | ||
|
|
6205ac27a5 | ||
|
|
a884bd77e1 | ||
|
|
fa529d9590 | ||
|
|
7fde3614fe | ||
|
|
afc4c10ec1 | ||
|
|
d83c4ffb07 | ||
|
|
96abba2b7d | ||
|
|
8b42f5b1b3 | ||
|
|
9e44de90af | ||
|
|
e630ff38c4 | ||
|
|
9f650dfa52 | ||
|
|
1a8bfdfa21 | ||
|
|
ede86a686c | ||
|
|
4efcf492ee | ||
|
|
04625fe376 | ||
|
|
f0bf60fded | ||
|
|
6793d4b6b8 | ||
|
|
c9bf407431 | ||
|
|
1362c5a3d9 | ||
|
|
fef73ae921 | ||
|
|
3e136943c0 | ||
|
|
6770aeeb3c | ||
|
|
a4bf19c5bd | ||
|
|
4cc06748c9 | ||
|
|
f887a17ffe | ||
|
|
cf0dd09b5c | ||
|
|
4bece54655 | ||
|
|
dd6b674e7e | ||
|
|
8642a1d074 | ||
|
|
ee9123a7da | ||
|
|
5b6582a7c2 | ||
|
|
6c7a6d43fc | ||
|
|
94796e943b | ||
|
|
965cc2efbc | ||
|
|
11173b2199 | ||
|
|
dc557e1647 | ||
|
|
f5eac82e81 | ||
|
|
eaa8224076 | ||
|
|
10a1df3faa | ||
|
|
419cbcbaf8 | ||
|
|
f24001c130 | ||
|
|
322ebc33d1 | ||
|
|
4d91409bbc | ||
|
|
c3e8ea304a | ||
|
|
dcc2cd8dff | ||
|
|
b9e0074793 | ||
|
|
c69d0d50cd | ||
|
|
031172d3f2 | ||
|
|
c0b2326053 | ||
|
|
c7669317ec | ||
|
|
369ccc725c | ||
|
|
cde5b3952d | ||
|
|
2dae42b1ba | ||
|
|
d4971e9ead | ||
|
|
c6195e6176 | ||
|
|
0f5489397f | ||
|
|
c466711cd1 | ||
|
|
9c150252aa | ||
|
|
31720d8825 | ||
|
|
21e7e35e73 | ||
|
|
2f2ef7c165 | ||
|
|
2e2333107a | ||
|
|
3a625d15d3 | ||
|
|
b14cd5f56d | ||
|
|
ccc78000bd | ||
|
|
c130dd6b47 | ||
|
|
f710efca3b | ||
|
|
2053418f21 | ||
|
|
29cbeb39bd | ||
|
|
bf9dfa3b51 | ||
|
|
f1b034d4f8 | ||
|
|
ff8a89a075 | ||
|
|
1424a7a56a | ||
|
|
415b8f0147 | ||
|
|
77c4fc98bd | ||
|
|
b7ed467690 | ||
|
|
b38e3b804c | ||
|
|
50623c018c | ||
|
|
9da8f609cf | ||
|
|
331fd896b5 | ||
|
|
5797282b98 | ||
|
|
d8fd0be598 | ||
|
|
623cb9833c | ||
|
|
0a0e40fb24 | ||
|
|
00b04f1c85 | ||
|
|
5eab628580 | ||
|
|
d5f7ad08fa | ||
|
|
ef7aa66959 | ||
|
|
9a1a9813cb | ||
|
|
a56d454a07 | ||
|
|
608c16342c | ||
|
|
c2ffd8975b | ||
|
|
8cce403c11 | ||
|
|
26b9be628e | ||
|
|
5385ca411b | ||
|
|
c9ba4c764a | ||
|
|
6da5008f32 | ||
|
|
488b41826b | ||
|
|
1e8ee5361d | ||
|
|
7cbcc28b1b | ||
|
|
d164034198 | ||
|
|
ad4f5e55cb | ||
|
|
0c7949bdee | ||
|
|
6297675055 | ||
|
|
0e600ad2a4 | ||
|
|
1cc8ecad12 | ||
|
|
af9506b21d | ||
|
|
c732aa1617 | ||
|
|
37568ccbf0 | ||
|
|
c141519dba | ||
|
|
dc09a11090 | ||
|
|
2cb7d8aa96 | ||
|
|
e69240cf13 | ||
|
|
001e848393 | ||
|
|
2ac485a6ec | ||
|
|
c12821f6c5 | ||
|
|
6260d977fb | ||
|
|
6d96c6ef51 | ||
|
|
3db1aac119 | ||
|
|
99c2395a86 | ||
|
|
78c8324698 | ||
|
|
10c62779d9 | ||
|
|
5086e37e73 | ||
|
|
b9f5cb0301 | ||
|
|
33e2b52a01 | ||
|
|
297fa029e3 | ||
|
|
b68cd58a3b | ||
|
|
4b3bb2c661 | ||
|
|
4a4dd39875 | ||
|
|
d244c0fcea | ||
|
|
badf94b097 | ||
|
|
08e24bbbae | ||
|
|
af7b2f17ae | ||
|
|
ef296e46cb | ||
|
|
2ca4b3f4cc | ||
|
|
debe6f107e | ||
|
|
02f523094b | ||
|
|
9165320390 | ||
|
|
550aa2d6bd | ||
|
|
be881369fa | ||
|
|
5483bd1404 | ||
|
|
4b4d049b0a | ||
|
|
dd0dbdc5bd | ||
|
|
1649cf81de | ||
|
|
5012d618e6 | ||
|
|
98a0113ac3 | ||
|
|
efe8b8b6d0 | ||
|
|
298c2213a0 | ||
|
|
8161438a85 | ||
|
|
748e7af5a2 | ||
|
|
f5fec55930 | ||
|
|
91832c8cd8 | ||
|
|
15010e94fd | ||
|
|
f164eb5289 | ||
|
|
1fbf09fe4c | ||
|
|
a1fe5abeaf | ||
|
|
3c1ab3d0b8 | ||
|
|
4125e7eccc | ||
|
|
e83afdc5ab | ||
|
|
4f60679861 | ||
|
|
dce72a1ce7 | ||
|
|
307d8d9c8d | ||
|
|
82079dd422 | ||
|
|
a6d713eb3d | ||
|
|
e00e73f608 | ||
|
|
6739c31594 | ||
|
|
a75a7e2b1d | ||
|
|
92a0a4e367 | ||
|
|
273b9e1636 | ||
|
|
9ffe220def | ||
|
|
4029481fd0 | ||
|
|
f0cddeb478 | ||
|
|
0189742497 | ||
|
|
3318896ad9 | ||
|
|
6c8cb6b2a9 | ||
|
|
6e24ded2bc | ||
|
|
52a497be21 | ||
|
|
b4b0f622de | ||
|
|
232d14a3ae | ||
|
|
dea728a7e5 | ||
|
|
6cf13c62d1 | ||
|
|
d70f415e8e | ||
|
|
dbec2ed1f1 | ||
|
|
96ce0bb783 | ||
|
|
2ffce24ef0 | ||
|
|
75fe77c11d | ||
|
|
20d8a2a1ec | ||
|
|
460bf93866 | ||
|
|
362023ccf2 | ||
|
|
da7dce79f6 | ||
|
|
3f5667b101 | ||
|
|
caa29d57c2 | ||
|
|
b70b76029e | ||
|
|
66bf56fc4f | ||
|
|
4a69c71167 | ||
|
|
cb24cb1ea5 | ||
|
|
d69b07bafd | ||
|
|
abf3b4a54e | ||
|
|
79ece8a86e | ||
|
|
318deed25b | ||
|
|
c03dda1a0c | ||
|
|
6f1e988cb9 | ||
|
|
7d634f66e2 | ||
|
|
4ab2b8b24b | ||
|
|
e6ec0af743 | ||
|
|
fff65968bf | ||
|
|
e57f6f21fe | ||
|
|
3ca0170264 | ||
|
|
a86b6c42c7 | ||
|
|
793eff1695 | ||
|
|
b4ed0347b4 | ||
|
|
2c7e5e0671 | ||
|
|
11ae99fbd6 | ||
|
|
708852aa00 | ||
|
|
348c93e8bb | ||
|
|
5408275c7a | ||
|
|
3e245fec90 | ||
|
|
5e7d9dc718 | ||
|
|
b66453e771 | ||
|
|
0b0a161626 | ||
|
|
492b849ea1 | ||
|
|
8ced7ab00a | ||
|
|
c298cf7527 | ||
|
|
1936bdebb3 | ||
|
|
dd6629416c | ||
|
|
f6c96ec892 | ||
|
|
801f41e68e | ||
|
|
8b8bafef22 | ||
|
|
594b6e8d64 | ||
|
|
6a15ae9c01 | ||
|
|
afccf608f4 | ||
|
|
eff0ee3b60 | ||
|
|
b6520a8f1d | ||
|
|
e45d3a0a63 | ||
|
|
0d18b72cf8 | ||
|
|
0733e8d50f | ||
|
|
fe57e04016 | ||
|
|
b055f594b0 | ||
|
|
e36d5f41c8 | ||
|
|
18a5a47f8a | ||
|
|
3408b98167 | ||
|
|
36907bb4dc | ||
|
|
f4d71b2b24 | ||
|
|
0db0876289 | ||
|
|
e3ab54942e | ||
|
|
1d737e490b | ||
|
|
cdeabcab4e | ||
|
|
4070f67f3b | ||
|
|
abb58c41db | ||
|
|
9ee2707d43 | ||
|
|
530561e4eb | ||
|
|
77b120323b | ||
|
|
d6112e4a59 | ||
|
|
2678dfdc57 | ||
|
|
39137fc19f | ||
|
|
0a7245a583 | ||
|
|
a08d60fc61 | ||
|
|
4bc1b57d8f | ||
|
|
4c672c4e5f | ||
|
|
fd68a2afae | ||
|
|
85e71415fe | ||
|
|
400d39740c | ||
|
|
3ca3de807c | ||
|
|
e6bd85ffa7 | ||
|
|
40ff7779bb | ||
|
|
04cd04ff82 | ||
|
|
9d19dea7dd | ||
|
|
650282f3d0 | ||
|
|
3a71894360 | ||
|
|
2381e3f650 | ||
|
|
1cb53805e2 | ||
|
|
2791db41d7 | ||
|
|
1578f5fb35 | ||
|
|
d09af1948e | ||
|
|
754cc01f87 | ||
|
|
523ea23b10 | ||
|
|
f9b5f102f8 | ||
|
|
7d1b6cb49d | ||
|
|
d1bdfa0be6 | ||
|
|
4bfe3de1f2 | ||
|
|
953e928bdb | ||
|
|
74b693d6b9 | ||
|
|
0f232e0ce2 | ||
|
|
7937a16002 | ||
|
|
65bbb7c57b | ||
|
|
c071b271be | ||
|
|
dd309070eb | ||
|
|
d4a4db42aa | ||
|
|
80a894b829 | ||
|
|
86247bf657 |
14
.github/workflows/ci.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
rustup update stable
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
clean: false
|
||||
submodules: 'recursive'
|
||||
@@ -54,12 +54,12 @@ jobs:
|
||||
cargo install cargo-nextest
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v2
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '18'
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
clean: false
|
||||
submodules: 'recursive'
|
||||
@@ -104,12 +104,12 @@ jobs:
|
||||
rustup target add wasm32-wasi
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v2
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '18'
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
clean: false
|
||||
submodules: 'recursive'
|
||||
@@ -148,8 +148,8 @@ jobs:
|
||||
- name: Create app bundle
|
||||
run: script/bundle
|
||||
|
||||
- name: Upload app bundle to workflow run if main branch or specifi label
|
||||
uses: actions/upload-artifact@v2
|
||||
- name: Upload app bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-build-dmg') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg
|
||||
|
||||
4
.github/workflows/randomized_tests.yml
vendored
@@ -29,12 +29,12 @@ jobs:
|
||||
rustup update stable
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v2
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '18'
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
clean: false
|
||||
submodules: 'recursive'
|
||||
|
||||
4
.github/workflows/release_actions.yml
vendored
@@ -16,8 +16,4 @@ jobs:
|
||||
|
||||
Restart your Zed or head to https://zed.dev/releases/stable/latest to grab it.
|
||||
|
||||
```md
|
||||
# Changelog
|
||||
|
||||
${{ github.event.release.body }}
|
||||
```
|
||||
|
||||
365
Cargo.lock
generated
@@ -118,7 +118,7 @@ dependencies = [
|
||||
"settings",
|
||||
"smol",
|
||||
"theme",
|
||||
"tiktoken-rs",
|
||||
"tiktoken-rs 0.4.2",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
@@ -161,7 +161,7 @@ dependencies = [
|
||||
"miow 0.3.7",
|
||||
"nix",
|
||||
"parking_lot 0.12.1",
|
||||
"regex-automata",
|
||||
"regex-automata 0.1.10",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
"signal-hook",
|
||||
@@ -179,9 +179,9 @@ checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd"
|
||||
|
||||
[[package]]
|
||||
name = "alsa"
|
||||
version = "0.7.0"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8512c9117059663fb5606788fbca3619e2a91dac0e3fe516242eab1fa6be5e44"
|
||||
checksum = "e2562ad8dcf0f789f65c6fdaad8a8a9708ed6b488e649da28c01656ad66b8b47"
|
||||
dependencies = [
|
||||
"alsa-sys",
|
||||
"bitflags",
|
||||
@@ -268,6 +268,9 @@ name = "anyhow"
|
||||
version = "1.0.71"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
|
||||
dependencies = [
|
||||
"backtrace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arrayref"
|
||||
@@ -286,6 +289,9 @@ name = "arrayvec"
|
||||
version = "0.7.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ascii"
|
||||
@@ -450,17 +456,6 @@ dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-recursion"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d7d78656ba01f1b93024b7c3a0467f1608e4be67d725749fdcd7d2c7678fd7a2"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-recursion"
|
||||
version = "1.0.4"
|
||||
@@ -772,9 +767,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.21.0"
|
||||
version = "0.21.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a"
|
||||
checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d"
|
||||
|
||||
[[package]]
|
||||
name = "base64ct"
|
||||
@@ -959,23 +954,23 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "bromberg_sl2"
|
||||
version = "0.6.0"
|
||||
source = "git+https://github.com/zed-industries/bromberg_sl2?rev=950bc5482c216c395049ae33ae4501e08975f17f#950bc5482c216c395049ae33ae4501e08975f17f"
|
||||
source = "git+https://github.com/zed-industries/bromberg_sl2?rev=6faf816bd5b4b7b2b6ea77495686634732ded095#6faf816bd5b4b7b2b6ea77495686634732ded095"
|
||||
dependencies = [
|
||||
"digest 0.9.0",
|
||||
"lazy_static",
|
||||
"rayon",
|
||||
"seq-macro",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.4.0"
|
||||
version = "1.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c3d4260bcc2e8fc9df1eac4919a720effeb63a3f0952f5bf4944adfa18897f09"
|
||||
checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"once_cell",
|
||||
"regex-automata",
|
||||
"regex-automata 0.3.4",
|
||||
"serde",
|
||||
]
|
||||
|
||||
@@ -1053,6 +1048,10 @@ dependencies = [
|
||||
"media",
|
||||
"postage",
|
||||
"project",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"util",
|
||||
]
|
||||
@@ -1315,7 +1314,7 @@ name = "client"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-recursion 0.3.2",
|
||||
"async-recursion",
|
||||
"async-tungstenite",
|
||||
"collections",
|
||||
"db",
|
||||
@@ -1401,7 +1400,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "collab"
|
||||
version = "0.15.0"
|
||||
version = "0.16.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-tungstenite",
|
||||
@@ -1491,6 +1490,7 @@ dependencies = [
|
||||
"theme",
|
||||
"theme_selector",
|
||||
"util",
|
||||
"vcs_menu",
|
||||
"workspace",
|
||||
"zed-actions",
|
||||
]
|
||||
@@ -1863,6 +1863,32 @@ dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crdb"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arrayvec 0.7.2",
|
||||
"async-broadcast",
|
||||
"bromberg_sl2",
|
||||
"collections",
|
||||
"ctor",
|
||||
"env_logger 0.9.3",
|
||||
"futures 0.3.28",
|
||||
"gpui",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"parking_lot 0.11.2",
|
||||
"portable-atomic",
|
||||
"rand 0.8.5",
|
||||
"serde",
|
||||
"serde_bare",
|
||||
"smallvec",
|
||||
"smol",
|
||||
"util",
|
||||
"uuid 1.3.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-channel"
|
||||
version = "0.4.4"
|
||||
@@ -1990,7 +2016,6 @@ checksum = "14d05c10f541ae6f3bc5b3d923c20001f47db7d5f0b2bc6ad16490133842db79"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
"libnghttp2-sys",
|
||||
"libz-sys",
|
||||
"openssl-sys",
|
||||
"pkg-config",
|
||||
@@ -2312,7 +2337,6 @@ dependencies = [
|
||||
"theme",
|
||||
"tree-sitter",
|
||||
"tree-sitter-html",
|
||||
"tree-sitter-javascript",
|
||||
"tree-sitter-rust",
|
||||
"tree-sitter-typescript 0.20.2 (git+https://github.com/tree-sitter/tree-sitter-typescript?rev=5d20856f34315b068c41edaee2ac8a100081d259)",
|
||||
"unindent",
|
||||
@@ -2372,9 +2396,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "equivalent"
|
||||
version = "1.0.0"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "88bffebc5d80432c9b140ee17875ff173a8ab62faad5b257da912bd2f6c1c0a1"
|
||||
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
|
||||
|
||||
[[package]]
|
||||
name = "erased-serde"
|
||||
@@ -2448,6 +2472,12 @@ version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
|
||||
|
||||
[[package]]
|
||||
name = "fallible-streaming-iterator"
|
||||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
|
||||
|
||||
[[package]]
|
||||
name = "fancy-regex"
|
||||
version = "0.11.0"
|
||||
@@ -3145,6 +3175,15 @@ version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7249a3129cbc1ffccd74857f81464a323a152173cdb134e0fd81bc803b29facf"
|
||||
dependencies = [
|
||||
"hashbrown 0.11.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.8.1"
|
||||
@@ -3786,15 +3825,16 @@ dependencies = [
|
||||
"text",
|
||||
"theme",
|
||||
"tree-sitter",
|
||||
"tree-sitter-elixir 0.1.0 (git+https://github.com/elixir-lang/tree-sitter-elixir?rev=4ba9dab6e2602960d95b2b625f3386c27e08084e)",
|
||||
"tree-sitter-embedded-template",
|
||||
"tree-sitter-heex",
|
||||
"tree-sitter-html",
|
||||
"tree-sitter-javascript",
|
||||
"tree-sitter-json 0.19.0",
|
||||
"tree-sitter-json 0.20.0",
|
||||
"tree-sitter-markdown",
|
||||
"tree-sitter-python",
|
||||
"tree-sitter-ruby",
|
||||
"tree-sitter-rust",
|
||||
"tree-sitter-typescript 0.20.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tree-sitter-typescript 0.20.2 (git+https://github.com/tree-sitter/tree-sitter-typescript?rev=5d20856f34315b068c41edaee2ac8a100081d259)",
|
||||
"unicase",
|
||||
"unindent",
|
||||
"util",
|
||||
@@ -3906,16 +3946,6 @@ version = "0.2.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb"
|
||||
|
||||
[[package]]
|
||||
name = "libnghttp2-sys"
|
||||
version = "0.1.7+1.45.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "57ed28aba195b38d5ff02b9170cbff627e336a20925e43b4945390401c5dc93f"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libsqlite3-sys"
|
||||
version = "0.24.2"
|
||||
@@ -4004,7 +4034,6 @@ dependencies = [
|
||||
"gpui",
|
||||
"hmac 0.12.1",
|
||||
"jwt",
|
||||
"lazy_static",
|
||||
"live_kit_server",
|
||||
"log",
|
||||
"media",
|
||||
@@ -4128,7 +4157,7 @@ version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
|
||||
dependencies = [
|
||||
"regex-automata",
|
||||
"regex-automata 0.1.10",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4143,6 +4172,16 @@ version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "73cbba799671b762df5a175adf59ce145165747bb891505c43d09aefbbf38beb"
|
||||
|
||||
[[package]]
|
||||
name = "matrixmultiply"
|
||||
version = "0.3.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "090126dc04f95dc0d1c1c91f61bdd474b3930ca064c1edc8a849da2c6cbe1e77"
|
||||
dependencies = [
|
||||
"autocfg 1.1.0",
|
||||
"rawpointer",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "maybe-owned"
|
||||
version = "0.3.4"
|
||||
@@ -5100,7 +5139,7 @@ version = "1.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9bd9647b268a3d3e14ff09c23201133a62589c658db02bb7388c7246aafe0590"
|
||||
dependencies = [
|
||||
"base64 0.21.0",
|
||||
"base64 0.21.2",
|
||||
"indexmap 1.9.3",
|
||||
"line-wrap",
|
||||
"quick-xml",
|
||||
@@ -5180,6 +5219,15 @@ version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5da3b0203fd7ee5720aa0b5e790b591aa5d3f41c3ed2c34a3a393382198af2f7"
|
||||
|
||||
[[package]]
|
||||
name = "portable-atomic"
|
||||
version = "1.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f32154ba0af3a075eefa1eda8bb414ee928f62303a54ea85b8d6638ff1a6ee9e"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "postage"
|
||||
version = "0.5.0"
|
||||
@@ -5347,6 +5395,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"collections",
|
||||
"context_menu",
|
||||
"db",
|
||||
"drag_and_drop",
|
||||
@@ -5662,6 +5711,12 @@ version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f2ff9a1f06a88b01621b7ae906ef0211290d1c8a168a15542486a8f61c0833b9"
|
||||
|
||||
[[package]]
|
||||
name = "rawpointer"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3"
|
||||
|
||||
[[package]]
|
||||
name = "rayon"
|
||||
version = "1.7.0"
|
||||
@@ -5781,6 +5836,12 @@ dependencies = [
|
||||
"regex-syntax 0.6.29",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.3.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b7b6d6190b7594385f61bd3911cd1be99dfddcfc365a4160cc2ab5bff4aed294"
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.29"
|
||||
@@ -5829,7 +5890,7 @@ version = "0.11.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13293b639a097af28fc8a90f22add145a9c954e49d77da06263d58cf44d5fb91"
|
||||
dependencies = [
|
||||
"base64 0.21.0",
|
||||
"base64 0.21.2",
|
||||
"bytes 1.4.0",
|
||||
"encoding_rs",
|
||||
"futures-core",
|
||||
@@ -6031,6 +6092,21 @@ dependencies = [
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rusqlite"
|
||||
version = "0.27.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85127183a999f7db96d1a976a309eebbfb6ea3b0b400ddd8340190129de6eb7a"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"fallible-iterator",
|
||||
"fallible-streaming-iterator",
|
||||
"hashlink 0.7.0",
|
||||
"libsqlite3-sys",
|
||||
"memchr",
|
||||
"smallvec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rust-embed"
|
||||
version = "6.6.1"
|
||||
@@ -6166,7 +6242,7 @@ version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b"
|
||||
dependencies = [
|
||||
"base64 0.21.0",
|
||||
"base64 0.21.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6429,6 +6505,7 @@ name = "search"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags",
|
||||
"client",
|
||||
"collections",
|
||||
"editor",
|
||||
@@ -6440,6 +6517,7 @@ dependencies = [
|
||||
"menu",
|
||||
"postage",
|
||||
"project",
|
||||
"semantic_index",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
@@ -6475,6 +6553,52 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semantic_index"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"bincode",
|
||||
"ctor",
|
||||
"editor",
|
||||
"env_logger 0.9.3",
|
||||
"futures 0.3.28",
|
||||
"globset",
|
||||
"gpui",
|
||||
"isahc",
|
||||
"language",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"matrixmultiply",
|
||||
"parking_lot 0.11.2",
|
||||
"picker",
|
||||
"postage",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"rand 0.8.5",
|
||||
"rpc",
|
||||
"rusqlite",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smol",
|
||||
"tempdir",
|
||||
"theme",
|
||||
"tiktoken-rs 0.5.0",
|
||||
"tree-sitter",
|
||||
"tree-sitter-cpp",
|
||||
"tree-sitter-elixir 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tree-sitter-json 0.19.0",
|
||||
"tree-sitter-rust",
|
||||
"tree-sitter-toml 0.20.0",
|
||||
"tree-sitter-typescript 0.20.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unindent",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "0.11.0"
|
||||
@@ -6508,6 +6632,15 @@ dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_bare"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "51c55386eed0f1ae957b091dc2ca8122f287b60c79c774cbe3d5f2b69fded660"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.162"
|
||||
@@ -6822,6 +6955,9 @@ name = "smallvec"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smol"
|
||||
@@ -6962,7 +7098,7 @@ dependencies = [
|
||||
"futures-executor",
|
||||
"futures-intrusive",
|
||||
"futures-util",
|
||||
"hashlink",
|
||||
"hashlink 0.8.1",
|
||||
"hex",
|
||||
"hkdf",
|
||||
"hmac 0.12.1",
|
||||
@@ -7466,7 +7602,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ba161c549e2c0686f35f5d920e63fad5cafba2c28ad2caceaf07e5d9fa6e8c4"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base64 0.21.0",
|
||||
"base64 0.21.2",
|
||||
"bstr",
|
||||
"fancy-regex",
|
||||
"lazy_static",
|
||||
"parking_lot 0.12.1",
|
||||
"rustc-hash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tiktoken-rs"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a99d843674a3468b4a9200a565bbe909a0152f95e82a52feae71e6bf2d4b49d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base64 0.21.2",
|
||||
"bstr",
|
||||
"fancy-regex",
|
||||
"lazy_static",
|
||||
@@ -7695,9 +7846,9 @@ checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b"
|
||||
|
||||
[[package]]
|
||||
name = "toml_edit"
|
||||
version = "0.19.11"
|
||||
version = "0.19.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "266f016b7f039eec8a1a80dfe6156b633d208b9fccca5e4db1d6775b0c4e34a7"
|
||||
checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a"
|
||||
dependencies = [
|
||||
"indexmap 2.0.0",
|
||||
"toml_datetime",
|
||||
@@ -7875,12 +8026,21 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "tree-sitter"
|
||||
version = "0.20.10"
|
||||
source = "git+https://github.com/tree-sitter/tree-sitter?rev=49226023693107fba9a1191136a4f47f38cdca73#49226023693107fba9a1191136a4f47f38cdca73"
|
||||
source = "git+https://github.com/tree-sitter/tree-sitter?rev=1c65ca24bc9a734ab70115188f465e12eecf224e#1c65ca24bc9a734ab70115188f465e12eecf224e"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-bash"
|
||||
version = "0.19.0"
|
||||
source = "git+https://github.com/tree-sitter/tree-sitter-bash?rev=1b0321ee85701d5036c334a6f04761cdc672e64c#1b0321ee85701d5036c334a6f04761cdc672e64c"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-c"
|
||||
version = "0.20.2"
|
||||
@@ -7910,6 +8070,16 @@ dependencies = [
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-elixir"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9a9916f3e1c80b3c8aab8582604e97e8720cb9b893489b347cf999f80f9d469e"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-elixir"
|
||||
version = "0.1.0"
|
||||
@@ -7919,6 +8089,15 @@ dependencies = [
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-elm"
|
||||
version = "5.6.4"
|
||||
source = "git+https://github.com/elm-tooling/tree-sitter-elm?rev=692c50c0b961364c40299e73c1306aecb5d20f40#692c50c0b961364c40299e73c1306aecb5d20f40"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-embedded-template"
|
||||
version = "0.20.0"
|
||||
@@ -7929,6 +8108,15 @@ dependencies = [
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-glsl"
|
||||
version = "0.1.4"
|
||||
source = "git+https://github.com/theHamsta/tree-sitter-glsl?rev=2a56fb7bc8bb03a1892b4741279dd0a8758b7fb3#2a56fb7bc8bb03a1892b4741279dd0a8758b7fb3"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-go"
|
||||
version = "0.19.1"
|
||||
@@ -7957,16 +8145,6 @@ dependencies = [
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-javascript"
|
||||
version = "0.20.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2490fab08630b2c8943c320f7b63473cbf65511c8d83aec551beb9b4375906ed"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-json"
|
||||
version = "0.19.0"
|
||||
@@ -8005,6 +8183,15 @@ dependencies = [
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-php"
|
||||
version = "0.19.1"
|
||||
source = "git+https://github.com/tree-sitter/tree-sitter-php?rev=d43130fd1525301e9826f420c5393a4d169819fc#d43130fd1525301e9826f420c5393a4d169819fc"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-python"
|
||||
version = "0.20.2"
|
||||
@@ -8053,6 +8240,15 @@ dependencies = [
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-svelte"
|
||||
version = "0.10.2"
|
||||
source = "git+https://github.com/Himujjal/tree-sitter-svelte?rev=697bb515471871e85ff799ea57a76298a71a9cca#697bb515471871e85ff799ea57a76298a71a9cca"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-toml"
|
||||
version = "0.5.1"
|
||||
@@ -8062,6 +8258,16 @@ dependencies = [
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-toml"
|
||||
version = "0.20.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ca517f578a98b23d20780247cc2688407fa81effad5b627a5a364ec3339b53e8"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-typescript"
|
||||
version = "0.20.2"
|
||||
@@ -8349,6 +8555,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4dad5567ad0cf5b760e5665964bec1b47dfd077ba8a2544b513f3556d3d239a2"
|
||||
dependencies = [
|
||||
"getrandom 0.2.9",
|
||||
"rand 0.8.5",
|
||||
"serde",
|
||||
]
|
||||
|
||||
@@ -8378,6 +8585,19 @@ version = "0.2.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
|
||||
|
||||
[[package]]
|
||||
name = "vcs_menu"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"picker",
|
||||
"theme",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.4"
|
||||
@@ -8398,7 +8618,7 @@ dependencies = [
|
||||
"indoc",
|
||||
"itertools",
|
||||
"language",
|
||||
"lazy_static",
|
||||
"language_selector",
|
||||
"log",
|
||||
"nvim-rs",
|
||||
"parking_lot 0.11.2",
|
||||
@@ -8408,6 +8628,7 @@ dependencies = [
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"theme",
|
||||
"tokio",
|
||||
"util",
|
||||
"workspace",
|
||||
@@ -9181,9 +9402,9 @@ checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
|
||||
|
||||
[[package]]
|
||||
name = "winnow"
|
||||
version = "0.4.7"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ca0ace3845f0d96209f0375e6d367e3eb87eb65d27d445bdc9f1843a26f39448"
|
||||
checksum = "f46aab759304e4d7b2075a9aecba26228bb073ee8c50db796b2c72c676b5d807"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
@@ -9234,7 +9455,7 @@ name = "workspace"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-recursion 1.0.4",
|
||||
"async-recursion",
|
||||
"bincode",
|
||||
"call",
|
||||
"client",
|
||||
@@ -9339,13 +9560,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.94.3"
|
||||
version = "0.98.0"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"ai",
|
||||
"anyhow",
|
||||
"async-compression",
|
||||
"async-recursion 0.3.2",
|
||||
"async-recursion",
|
||||
"async-tar",
|
||||
"async-trait",
|
||||
"audio",
|
||||
@@ -9405,6 +9626,7 @@ dependencies = [
|
||||
"rsa",
|
||||
"rust-embed",
|
||||
"search",
|
||||
"semantic_index",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
@@ -9423,23 +9645,28 @@ dependencies = [
|
||||
"tiny_http",
|
||||
"toml",
|
||||
"tree-sitter",
|
||||
"tree-sitter-bash",
|
||||
"tree-sitter-c",
|
||||
"tree-sitter-cpp",
|
||||
"tree-sitter-css",
|
||||
"tree-sitter-elixir",
|
||||
"tree-sitter-elixir 0.1.0 (git+https://github.com/elixir-lang/tree-sitter-elixir?rev=4ba9dab6e2602960d95b2b625f3386c27e08084e)",
|
||||
"tree-sitter-elm",
|
||||
"tree-sitter-embedded-template",
|
||||
"tree-sitter-glsl",
|
||||
"tree-sitter-go",
|
||||
"tree-sitter-heex",
|
||||
"tree-sitter-html",
|
||||
"tree-sitter-json 0.20.0",
|
||||
"tree-sitter-lua",
|
||||
"tree-sitter-markdown",
|
||||
"tree-sitter-php",
|
||||
"tree-sitter-python",
|
||||
"tree-sitter-racket",
|
||||
"tree-sitter-ruby",
|
||||
"tree-sitter-rust",
|
||||
"tree-sitter-scheme",
|
||||
"tree-sitter-toml",
|
||||
"tree-sitter-svelte",
|
||||
"tree-sitter-toml 0.5.1",
|
||||
"tree-sitter-typescript 0.20.2 (git+https://github.com/tree-sitter/tree-sitter-typescript?rev=5d20856f34315b068c41edaee2ac8a100081d259)",
|
||||
"tree-sitter-yaml",
|
||||
"unindent",
|
||||
|
||||
38
Cargo.toml
@@ -16,6 +16,7 @@ members = [
|
||||
"crates/context_menu",
|
||||
"crates/copilot",
|
||||
"crates/copilot_button",
|
||||
"crates/crdb",
|
||||
"crates/db",
|
||||
"crates/diagnostics",
|
||||
"crates/drag_and_drop",
|
||||
@@ -63,7 +64,9 @@ members = [
|
||||
"crates/theme",
|
||||
"crates/theme_selector",
|
||||
"crates/util",
|
||||
"crates/semantic_index",
|
||||
"crates/vim",
|
||||
"crates/vcs_menu",
|
||||
"crates/workspace",
|
||||
"crates/welcome",
|
||||
"crates/xtask",
|
||||
@@ -74,14 +77,16 @@ default-members = ["crates/zed"]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.dependencies]
|
||||
anyhow = { version = "1.0.57" }
|
||||
anyhow = { version = "1.0.57", features = ["backtrace"] }
|
||||
async-recursion = "1.0"
|
||||
async-trait = { version = "0.1" }
|
||||
ctor = { version = "0.1" }
|
||||
env_logger = { version = "0.9" }
|
||||
futures = { version = "0.3" }
|
||||
globset = { version = "0.4" }
|
||||
indoc = "1"
|
||||
isahc = "1.7.2"
|
||||
# We explicitly disable a http2 support in isahc.
|
||||
isahc = { version = "1.7.2", default-features = false, features = ["static-curl", "text-decoding"] }
|
||||
lazy_static = { version = "1.4.0" }
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||
ordered-float = { version = "2.1.1" }
|
||||
@@ -94,7 +99,7 @@ schemars = { version = "0.8" }
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_derive = { version = "1.0", features = ["deserialize_in_place"] }
|
||||
serde_json = { version = "1.0", features = ["preserve_order", "raw_value"] }
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
smallvec = { version = "1.6", features = ["serde", "union"] }
|
||||
smol = { version = "1.2" }
|
||||
tempdir = { version = "0.3.7" }
|
||||
thiserror = { version = "1.0.29" }
|
||||
@@ -104,8 +109,33 @@ tree-sitter = "0.20"
|
||||
unindent = { version = "0.1.7" }
|
||||
pretty_assertions = "1.3.0"
|
||||
|
||||
tree-sitter-bash = { git = "https://github.com/tree-sitter/tree-sitter-bash", rev = "1b0321ee85701d5036c334a6f04761cdc672e64c" }
|
||||
tree-sitter-c = "0.20.1"
|
||||
tree-sitter-cpp = "0.20.0"
|
||||
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
|
||||
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "4ba9dab6e2602960d95b2b625f3386c27e08084e" }
|
||||
tree-sitter-elm = { git = "https://github.com/elm-tooling/tree-sitter-elm", rev = "692c50c0b961364c40299e73c1306aecb5d20f40"}
|
||||
tree-sitter-embedded-template = "0.20.0"
|
||||
tree-sitter-glsl = { git = "https://github.com/theHamsta/tree-sitter-glsl", rev = "2a56fb7bc8bb03a1892b4741279dd0a8758b7fb3" }
|
||||
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
|
||||
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "2e1348c3cf2c9323e87c2744796cf3f3868aa82a" }
|
||||
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "40a81c01a40ac48744e0c8ccabbaba1920441199" }
|
||||
tree-sitter-rust = "0.20.3"
|
||||
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
|
||||
tree-sitter-php = { git = "https://github.com/tree-sitter/tree-sitter-php", rev = "d43130fd1525301e9826f420c5393a4d169819fc" }
|
||||
tree-sitter-python = "0.20.2"
|
||||
tree-sitter-toml = { git = "https://github.com/tree-sitter/tree-sitter-toml", rev = "342d9be207c2dba869b9967124c679b5e6fd0ebe" }
|
||||
tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259" }
|
||||
tree-sitter-ruby = "0.20.0"
|
||||
tree-sitter-html = "0.19.0"
|
||||
tree-sitter-scheme = { git = "https://github.com/6cdh/tree-sitter-scheme", rev = "af0fd1fa452cb2562dc7b5c8a8c55551c39273b9"}
|
||||
tree-sitter-svelte = { git = "https://github.com/Himujjal/tree-sitter-svelte", rev = "697bb515471871e85ff799ea57a76298a71a9cca"}
|
||||
tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a"}
|
||||
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930"}
|
||||
tree-sitter-lua = "0.0.14"
|
||||
|
||||
[patch.crates-io]
|
||||
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "49226023693107fba9a1191136a4f47f38cdca73" }
|
||||
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "1c65ca24bc9a734ab70115188f465e12eecf224e" }
|
||||
async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" }
|
||||
|
||||
# TODO - Remove when a version is released with this PR: https://github.com/servo/core-foundation-rs/pull/457
|
||||
|
||||
@@ -16,22 +16,25 @@ Welcome to Zed, a lightning-fast, collaborative code editor that makes your drea
|
||||
brew install foreman
|
||||
```
|
||||
|
||||
* Ensure the Zed.dev website is checked out in a sibling directory:
|
||||
* Ensure the Zed.dev website is checked out in a sibling directory and install it's dependencies:
|
||||
|
||||
```
|
||||
cd ..
|
||||
git clone https://github.com/zed-industries/zed.dev
|
||||
cd zed.dev && npm install
|
||||
npm install -g vercel
|
||||
```
|
||||
|
||||
* Initialize submodules
|
||||
* Return to Zed project directory and Initialize submodules
|
||||
|
||||
```
|
||||
cd zed
|
||||
git submodule update --init --recursive
|
||||
```
|
||||
|
||||
* Set up a local `zed` database and seed it with some initial users:
|
||||
|
||||
Create a personal GitHub token to run `script/bootstrap` once successfully: the token needs to have an access to private repositories for the script to work (`repo` OAuth scope).
|
||||
[Create a personal GitHub token](https://github.com/settings/tokens/new) to run `script/bootstrap` once successfully: the token needs to have an access to private repositories for the script to work (`repo` OAuth scope).
|
||||
Then delete that token.
|
||||
|
||||
```
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
User input begins on a line starting with /.
|
||||
User input begins on a line starting with >.
|
||||
Your output begins on a line starting with <.
|
||||
|
||||
Don't apologize ever.
|
||||
Never say "I apologize".
|
||||
Use simple language and don't flatter the users.
|
||||
Keep it short.
|
||||
Risk being rude.
|
||||
Use simple language and don't flatter the users. Spend your tokens on valuable information.
|
||||
|
||||
27
assets/icons/file_icons/ai.svg
Normal file
@@ -0,0 +1,27 @@
|
||||
<svg width="14" height="16" viewBox="0 0 14 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M9.375 8.74577V10.375C7.30597 10.375 6.69403 10.375 4.625 10.375V10.1226L9.375 5.87742V5.625H4.625V7.27717" stroke="black" stroke-width="1.25"/>
|
||||
<circle cx="0.5" cy="8" r="0.5" fill="black" fill-opacity="0.3"/>
|
||||
<circle cx="1.49976" cy="5.82825" r="0.5" fill="black" fill-opacity="0.6"/>
|
||||
<circle cx="1.49976" cy="10.1719" r="0.5" fill="black" fill-opacity="0.6"/>
|
||||
<circle cx="13.5" cy="8.01581" r="0.5" fill="black" fill-opacity="0.3"/>
|
||||
<circle cx="12.5" cy="5.84387" r="0.5" fill="black" fill-opacity="0.6"/>
|
||||
<circle cx="12.5" cy="10.1877" r="0.5" fill="black" fill-opacity="0.6"/>
|
||||
<circle cx="6.99213" cy="1.48438" r="0.5" fill="black" fill-opacity="0.3"/>
|
||||
<circle cx="4.50391" cy="2.48438" r="0.5" fill="black" fill-opacity="0.6"/>
|
||||
<circle cx="2.49976" cy="3.48438" r="0.5" fill="black" fill-opacity="0.3"/>
|
||||
<circle cx="2.49976" cy="12.5" r="0.5" fill="black" fill-opacity="0.3"/>
|
||||
<circle cx="0.5" cy="12.016" r="0.5" fill="black" fill-opacity="0.3"/>
|
||||
<circle cx="0.5" cy="3.98438" r="0.5" fill="black" fill-opacity="0.3"/>
|
||||
<circle cx="13.5" cy="12.016" r="0.5" fill="black" fill-opacity="0.3"/>
|
||||
<circle cx="13.5" cy="3.98438" r="0.5" fill="black" fill-opacity="0.3"/>
|
||||
<circle cx="2.49976" cy="14.516" r="0.5" fill="black" fill-opacity="0.3"/>
|
||||
<circle cx="2.48413" cy="1.48438" r="0.5" fill="black" fill-opacity="0.3"/>
|
||||
<circle cx="11.5" cy="14.516" r="0.5" fill="black" fill-opacity="0.3"/>
|
||||
<circle cx="11.5" cy="1.48438" r="0.5" fill="black" fill-opacity="0.3"/>
|
||||
<circle cx="11.5" cy="3.48438" r="0.5" fill="black" fill-opacity="0.3"/>
|
||||
<circle cx="11.5" cy="12.516" r="0.5" fill="black" fill-opacity="0.3"/>
|
||||
<circle cx="9.49609" cy="2.48438" r="0.5" fill="black" fill-opacity="0.6"/>
|
||||
<circle cx="6.99213" cy="14.5" r="0.5" fill="black" fill-opacity="0.3"/>
|
||||
<circle cx="4.50391" cy="13.516" r="0.5" fill="black" fill-opacity="0.6"/>
|
||||
<circle cx="9.49609" cy="13.5" r="0.5" fill="black" fill-opacity="0.6"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.0 KiB |
5
assets/icons/file_icons/archive.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M6 7.63H8" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<rect x="2" y="2" width="10" height="3" rx="0.5" fill="black" fill-opacity="0.3" stroke="black" stroke-width="1.25"/>
|
||||
<path d="M2.59375 5H11.4375L10.5581 11.5664C10.5248 11.8146 10.313 12 10.0625 12H3.93944C3.68812 12 3.47585 11.8134 3.44358 11.5642L2.59375 5Z" stroke="black" stroke-width="1.25"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 527 B |
6
assets/icons/file_icons/audio.svg
Normal file
@@ -0,0 +1,6 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M7 11C5.46973 11 4.1268 11.1873 3.31522 11.3327C2.94367 11.3992 2.60079 11.0563 2.66733 10.6848C2.81266 9.8732 3 8.53027 3 7C3 5.8387 2.89211 4.78529 2.77656 3.99011C2.73589 3.71017 3.19546 3.51715 3.36119 3.7464C4.09612 4.76304 5.23301 6.23301 6.5 7.5C7.76699 8.76699 9.23696 9.90388 10.2536 10.6388C10.4828 10.8045 10.2898 11.2641 10.0099 11.2234C9.21472 11.1079 8.1613 11 7 11Z" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M12.365 3.8478L10.3381 1.82088" stroke="black" stroke-opacity="0.3" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M11.3516 7.36803L6.64062 2.64155" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<rect x="2.72266" y="8.73828" width="3.58525" height="2.72899" rx="0.5" transform="rotate(45 2.72266 8.73828)" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 950 B |
6
assets/icons/file_icons/book.svg
Normal file
@@ -0,0 +1,6 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M12 10L12 10.8374C12 10.9431 11.9665 11.046 11.9044 11.1315L11.1498 12.1691C11.0557 12.2985 10.9054 12.375 10.7454 12.375L3.25461 12.375C3.09464 12.375 2.94433 12.2985 2.85024 12.1691L2.09563 11.1315C2.03348 11.046 2 10.9431 2 10.8374L2 2" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M2 12V10L7 11H12V12H2Z" fill="black"/>
|
||||
<path d="M5.63246 2.04415C6.44914 2.31638 7 3.08066 7 3.94152V10.7306C7 11.0924 6.62757 11.3345 6.29693 11.1875L2.79693 9.63197C2.61637 9.55172 2.5 9.37266 2.5 9.17506V1.69371C2.5 1.35243 2.83435 1.11145 3.15811 1.21937L5.63246 2.04415Z" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M8.5 2C7.67157 2 7 2.67157 7 3.5V12C7 11.1954 10.2366 11.0382 11.5017 11.0075C11.7778 11.0008 12 10.7761 12 10.5V2.5C12 2.22386 11.7761 2 11.5 2H8.5Z" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.0 KiB |
4
assets/icons/file_icons/camera.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M12 10.5C12 10.7761 11.7761 11 11.5 11H2.5C2.22386 11 2 10.7761 2 10.5V4.88C2 4.60386 2.22386 4.38 2.5 4.38H4.4342C4.61518 4.38 4.78204 4.2822 4.87046 4.12428L5.35681 3.25572C5.44524 3.0978 5.61209 3 5.79308 3H8.20692C8.38791 3 8.55476 3.0978 8.64319 3.25572L9.12954 4.12428C9.21796 4.2822 9.38482 4.38 9.5658 4.38H11.5C11.7761 4.38 12 4.60386 12 4.88V10.5Z" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M7.005 9C7.90246 9 8.63 8.27246 8.63 7.375C8.63 6.47754 7.90246 5.75 7.005 5.75C6.10754 5.75 5.38 6.47754 5.38 7.375C5.38 8.27246 6.10754 9 7.005 9Z" fill="black" fill-opacity="0.3" stroke="black" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 850 B |
3
assets/icons/file_icons/chevron_down.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3.63281 5.66406L6.99344 8.89844L10.3672 5.66406" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 246 B |
3
assets/icons/file_icons/chevron_left.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M8.35938 3.63281L5.125 6.99344L8.35938 10.3672" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 244 B |
3
assets/icons/file_icons/chevron_right.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M5.64062 3.64062L8.89062 7.00125L5.64062 10.375" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 245 B |
3
assets/icons/file_icons/chevron_up.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3.63281 8.36719L6.99344 5.13281L10.3672 8.36719" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 246 B |
4
assets/icons/file_icons/code.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4.375 2C2.5 2 2.5 3.5 2.5 4.5C2.5 5.5 2 6.50106 1 7C2 7.50106 2.5 8.5 2.5 9.5C2.5 10.5 2.5 12 4.375 12" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M9.63281 2C11.5078 2 11.5078 3.5 11.5078 4.5C11.5078 5.5 12.0078 6.50106 13.0078 7C12.0078 7.50106 11.5078 8.5 11.5078 9.5C11.5078 10.5 11.5078 12 9.63281 12" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 553 B |
4
assets/icons/file_icons/conversations.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M5.46115 8.43419C7.30678 8.43419 8.92229 7.43411 8.92229 5.21171C8.92229 2.98933 7.30678 1.98926 5.46115 1.98926C3.61553 1.98926 2 2.98933 2 5.21171C2 6.028 2.21794 6.67935 2.58519 7.17685C2.7184 7.35732 2.69033 7.77795 2.58387 7.97539C2.32908 8.44793 2.81048 8.9657 3.33372 8.84571C3.72539 8.75597 4.13621 8.63447 4.49574 8.4715C4.62736 8.41181 4.7727 8.38777 4.91631 8.40402C5.09471 8.42416 5.27678 8.43419 5.46115 8.43419Z" fill="black" fill-opacity="0.33" stroke="black" stroke-width="0.990499" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11.6055 5.87971C11.4329 5.66762 11.1208 5.6357 10.9088 5.80842C10.6967 5.98114 10.6648 6.29308 10.8375 6.50518L11.6055 5.87971ZM6.4361 10.4149C6.21522 10.2536 5.90539 10.3018 5.74404 10.5226C5.58268 10.7435 5.6309 11.0533 5.85177 11.2147L6.4361 10.4149ZM12.3808 8.25929C12.3808 7.28754 12.1013 6.48847 11.6055 5.87971L10.8375 6.50518C11.1712 6.91492 11.3903 7.48485 11.3903 8.25929H12.3808ZM11.6988 10.5186C12.137 9.92499 12.3808 9.16705 12.3808 8.25929H11.3903C11.3903 8.98414 11.1982 9.52892 10.9019 9.93034L11.6988 10.5186ZM9.1854 11.9702C9.58603 12.1518 10.0316 12.2822 10.4412 12.3761L10.6625 11.4106C10.2888 11.3249 9.91276 11.2124 9.59435 11.068L9.1854 11.9702ZM8.42443 11.977C8.62663 11.977 8.8273 11.9661 9.02494 11.9437L8.91361 10.9595C8.75447 10.9775 8.59097 10.9865 8.42443 10.9865V11.977ZM5.85177 11.2147C6.5749 11.743 7.49105 11.977 8.42443 11.977V10.9865C7.64656 10.9865 6.9503 10.7906 6.4361 10.4149L5.85177 11.2147ZM9.59435 11.068C9.38377 10.9726 9.14869 10.9329 8.91361 10.9595L9.02494 11.9437C9.07704 11.9378 9.13271 11.9463 9.1854 11.9702L9.59435 11.068ZM10.8658 11.2581C10.8784 11.2813 10.8772 11.2932 10.8762 11.2995C10.8746 11.3097 10.8681 11.3291 10.8481 11.3517C10.8049 11.4004 10.7343 11.4271 10.6625 11.4106L10.4412 12.3761C10.8927 12.4796 11.3244 12.3073 11.5891 12.0089C11.8602 11.7033 11.9778 11.2332 11.7377 10.7879L10.8658 11.2581ZM10.9019 9.93034C10.7358 10.1554 10.7116 10.4435 10.7161 10.6293C10.7209 10.8293 10.7634 11.0682 10.8658 11.2581L11.7377 10.7879C11.739 10.7905 11.7304 10.7736 11.7214 10.7331C11.713 10.6954 11.7074 10.6506 11.7063 10.6054C11.7052 10.5594 11.709 10.5234 11.7139 10.5006C11.7196 10.4738 11.7217 10.4876 11.6988 10.5186L10.9019 9.93034Z" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.3 KiB |
5
assets/icons/file_icons/database.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<ellipse cx="7" cy="4" rx="5" ry="2" fill="black" fill-opacity="0.3" stroke="black" stroke-width="1.25"/>
|
||||
<path d="M12 4V10C12 11.1046 9.76142 12 7 12C4.23858 12 2 11.1046 2 10V4" stroke="black" stroke-width="1.25"/>
|
||||
<path d="M12 7C12 8.10457 9.76142 9 7 9C4.23858 9 2 8.10457 2 7" stroke="black" stroke-width="1.25"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 422 B |
4
assets/icons/file_icons/eslint.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M12.5413 7.3125C12.6529 7.11913 12.6529 6.88088 12.5413 6.6875L10.0413 2.35738C9.92962 2.164 9.72329 2.04488 9.5 2.04488L4.5 2.04488C4.27671 2.04488 4.07038 2.164 3.95873 2.35738L1.45873 6.6875C1.34709 6.88088 1.34709 7.11913 1.45873 7.3125L3.95873 11.6426C4.07038 11.836 4.27671 11.9551 4.5 11.9551L9.5 11.9551C9.72329 11.9551 9.92962 11.836 10.0413 11.6426L12.5413 7.3125Z" stroke="black" stroke-width="1.25" stroke-linejoin="round"/>
|
||||
<path d="M6.75 4.14434C6.9047 4.05502 7.0953 4.05502 7.25 4.14434L9.34808 5.35566C9.50278 5.44498 9.59808 5.61004 9.59808 5.78868V8.21132C9.59808 8.38996 9.50278 8.55502 9.34808 8.64434L7.25 9.85566C7.0953 9.94498 6.9047 9.94498 6.75 9.85566L4.65192 8.64434C4.49722 8.55502 4.40192 8.38996 4.40192 8.21132L4.40192 5.78868C4.40192 5.61004 4.49722 5.44498 4.65192 5.35566L6.75 4.14434Z" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 949 B |
5
assets/icons/file_icons/file.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M2 4H10" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M2 7H12" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M2 10H8" stroke="black" stroke-opacity="0.3" stroke-width="1.25" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 379 B |
159
assets/icons/file_icons/file_types.json
Normal file
@@ -0,0 +1,159 @@
|
||||
{
|
||||
"suffixes": {
|
||||
"aac": "audio",
|
||||
"bash": "terminal",
|
||||
"bmp": "image",
|
||||
"c": "code",
|
||||
"conf": "settings",
|
||||
"cpp": "code",
|
||||
"cc": "code",
|
||||
"css": "code",
|
||||
"doc": "document",
|
||||
"docx": "document",
|
||||
"eslintrc": "eslint",
|
||||
"eslintrc.js": "eslint",
|
||||
"eslintrc.json": "eslint",
|
||||
"flac": "audio",
|
||||
"fish": "terminal",
|
||||
"gitattributes": "vcs",
|
||||
"gitignore": "vcs",
|
||||
"gitmodules": "vcs",
|
||||
"gif": "image",
|
||||
"go": "code",
|
||||
"h": "code",
|
||||
"handlebars": "code",
|
||||
"hbs": "template",
|
||||
"htm": "template",
|
||||
"html": "template",
|
||||
"svelte": "template",
|
||||
"hpp": "code",
|
||||
"ico": "image",
|
||||
"ini": "settings",
|
||||
"java": "code",
|
||||
"jpeg": "image",
|
||||
"jpg": "image",
|
||||
"js": "code",
|
||||
"json": "storage",
|
||||
"lock": "lock",
|
||||
"log": "log",
|
||||
"md": "document",
|
||||
"mdx": "document",
|
||||
"mp3": "audio",
|
||||
"mp4": "video",
|
||||
"ods": "document",
|
||||
"odp": "document",
|
||||
"odt": "document",
|
||||
"ogg": "video",
|
||||
"pdf": "document",
|
||||
"php": "code",
|
||||
"png": "image",
|
||||
"ppt": "document",
|
||||
"pptx": "document",
|
||||
"prettierrc": "prettier",
|
||||
"prettierignore": "prettier",
|
||||
"ps1": "terminal",
|
||||
"psd": "image",
|
||||
"py": "code",
|
||||
"rb": "code",
|
||||
"rkt": "code",
|
||||
"rs": "rust",
|
||||
"rtf": "document",
|
||||
"scm": "code",
|
||||
"sh": "terminal",
|
||||
"bashrc": "terminal",
|
||||
"bash_profile": "terminal",
|
||||
"bash_aliases": "terminal",
|
||||
"bash_logout": "terminal",
|
||||
"profile": "terminal",
|
||||
"zshrc": "terminal",
|
||||
"zshenv": "terminal",
|
||||
"zsh_profile": "terminal",
|
||||
"zsh_aliases": "terminal",
|
||||
"zsh_histfile": "terminal",
|
||||
"zlogin": "terminal",
|
||||
"sql": "code",
|
||||
"svg": "image",
|
||||
"swift": "code",
|
||||
"tiff": "image",
|
||||
"toml": "toml",
|
||||
"ts": "typescript",
|
||||
"tsx": "code",
|
||||
"txt": "document",
|
||||
"wav": "audio",
|
||||
"webm": "video",
|
||||
"xls": "document",
|
||||
"xlsx": "document",
|
||||
"xml": "template",
|
||||
"yaml": "settings",
|
||||
"yml": "settings",
|
||||
"zsh": "terminal"
|
||||
},
|
||||
"types": {
|
||||
"audio": {
|
||||
"icon": "icons/file_icons/audio.svg"
|
||||
},
|
||||
"code": {
|
||||
"icon": "icons/file_icons/code.svg"
|
||||
},
|
||||
"collapsed_chevron": {
|
||||
"icon": "icons/file_icons/chevron_right.svg"
|
||||
},
|
||||
"collapsed_folder": {
|
||||
"icon": "icons/file_icons/folder.svg"
|
||||
},
|
||||
"default": {
|
||||
"icon": "icons/file_icons/file.svg"
|
||||
},
|
||||
"document": {
|
||||
"icon": "icons/file_icons/book.svg"
|
||||
},
|
||||
"eslint": {
|
||||
"icon": "icons/file_icons/eslint.svg"
|
||||
},
|
||||
"expanded_chevron": {
|
||||
"icon": "icons/file_icons/chevron_down.svg"
|
||||
},
|
||||
"expanded_folder": {
|
||||
"icon": "icons/file_icons/folder_open.svg"
|
||||
},
|
||||
"image": {
|
||||
"icon": "icons/file_icons/image.svg"
|
||||
},
|
||||
"lock": {
|
||||
"icon": "icons/file_icons/lock.svg"
|
||||
},
|
||||
"log": {
|
||||
"icon": "icons/file_icons/info.svg"
|
||||
},
|
||||
"prettier": {
|
||||
"icon": "icons/file_icons/prettier.svg"
|
||||
},
|
||||
"rust": {
|
||||
"icon": "icons/file_icons/rust.svg"
|
||||
},
|
||||
"settings": {
|
||||
"icon": "icons/file_icons/settings.svg"
|
||||
},
|
||||
"storage": {
|
||||
"icon": "icons/file_icons/database.svg"
|
||||
},
|
||||
"template": {
|
||||
"icon": "icons/file_icons/html.svg"
|
||||
},
|
||||
"terminal": {
|
||||
"icon": "icons/file_icons/terminal.svg"
|
||||
},
|
||||
"toml": {
|
||||
"icon": "icons/file_icons/toml.svg"
|
||||
},
|
||||
"typescript": {
|
||||
"icon": "icons/file_icons/typescript.svg"
|
||||
},
|
||||
"vcs": {
|
||||
"icon": "icons/file_icons/git.svg"
|
||||
},
|
||||
"video": {
|
||||
"icon": "icons/file_icons/video.svg"
|
||||
}
|
||||
}
|
||||
}
|
||||
4
assets/icons/file_icons/folder.svg
Normal file
|
After Width: | Height: | Size: 43 KiB |
4
assets/icons/file_icons/folder_open.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="15" height="14" viewBox="0 0 15 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3.49165 6.13802C3.4991 5.86198 3.72386 5.64062 4 5.64062H13C13.2761 5.64062 13.4991 5.86198 13.4916 6.13802C13.4529 7.57407 13.2341 11.625 12 11.625H2C3.23412 11.625 3.45287 7.57407 3.49165 6.13802Z" fill="black" stroke="black" stroke-width="1.25" stroke-linejoin="round"/>
|
||||
<path d="M4.00781 11.625H2.42841C2.18186 11.625 1.97212 11.4453 1.93432 11.2017L0.651964 2.93603C0.604944 2.63296 0.839355 2.35938 1.14605 2.35938H4.6164C4.95332 2.35938 5.26759 2.52904 5.45244 2.81072L5.8125 3.35938H8.89008C9.37767 3.35938 9.79418 3.71103 9.87593 4.19171L10.125 5.65625" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 760 B |
6
assets/icons/file_icons/git.svg
Normal file
@@ -0,0 +1,6 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<circle cx="4" cy="10" r="2" stroke="black" stroke-width="1.25"/>
|
||||
<circle cx="10" cy="4" r="2" fill="black" fill-opacity="0.3" stroke="black" stroke-width="1.25"/>
|
||||
<line x1="3.625" y1="2.625" x2="3.625" y2="7.375" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M10 6V6C10 8.20914 8.20914 10 6 10V10" stroke="black" stroke-width="1.25"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 462 B |
6
assets/icons/file_icons/hash.svg
Normal file
@@ -0,0 +1,6 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<line x1="10.2795" y1="2.63847" x2="7.74785" y2="11.0142" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<line x1="6.26624" y1="2.99597" x2="3.7346" y2="11.3717" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<line x1="3.15982" y1="5.3799" x2="11.9098" y2="5.3799" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<line x1="2.0983" y1="8.62407" x2="10.8483" y2="8.62407" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 571 B |
5
assets/icons/file_icons/html.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M8.15732 3.17108L5.84268 10.8289" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M4 5L2 7L4 9" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M10 9L12 7L10 5" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 423 B |
7
assets/icons/file_icons/image.svg
Normal file
@@ -0,0 +1,7 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M6.5 3C6.91421 3 7.25 2.66421 7.25 2.25C7.25 1.83579 6.91421 1.5 6.5 1.5C6.08579 1.5 5.75 1.83579 5.75 2.25C5.75 2.66421 6.08579 3 6.5 3Z" fill="black" stroke="black" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M6 8L9 5L12 8H6Z" fill="black" fill-opacity="0.3"/>
|
||||
<path d="M2 10L5 7L7.375 9.375" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M6 8L7.5 6.5L9 5L10.5 6.5L12 8" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M3.375 2H2.5C2.22386 2 2 2.22386 2 2.5V11.5C2 11.7761 2.22386 12 2.5 12H7.35938M9.64062 2H11.5C11.7761 2 12 2.22386 12 2.5V11.5C12 11.7761 11.7761 12 11.5 12H10.125" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 865 B |
5
assets/icons/file_icons/info.svg
Normal file
|
After Width: | Height: | Size: 46 KiB |
6
assets/icons/file_icons/lock.svg
Normal file
@@ -0,0 +1,6 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect x="3" y="5" width="8" height="7" rx="0.5" stroke="black" stroke-width="1.25"/>
|
||||
<path d="M4 4C4 2.89543 4.89543 2 6 2H8C9.10457 2 10 2.89543 10 4V5H4V4Z" stroke="black" stroke-opacity="0.6" stroke-width="1.25"/>
|
||||
<circle cx="7" cy="8" r="1" fill="black"/>
|
||||
<path d="M7 8V9.375" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 444 B |
3
assets/icons/file_icons/magnifying_glass.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M12 12L9.41379 9.41379M2 6.31034C2 3.92981 3.92981 2 6.31034 2C8.6909 2 10.6207 3.92981 10.6207 6.31034C10.6207 8.6909 8.6909 10.6207 6.31034 10.6207C3.92981 10.6207 2 8.6909 2 6.31034Z" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 383 B |
8
assets/icons/file_icons/notebook.svg
Normal file
@@ -0,0 +1,8 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M2.03125 2.96875C2.03125 2.41647 2.47897 1.96875 3.03125 1.96875H5V12H3.03125C2.47897 12 2.03125 11.5523 2.03125 11V2.96875Z" fill="black" fill-opacity="0.3"/>
|
||||
<rect x="2" y="2" width="10" height="10" rx="0.5" stroke="black" stroke-width="1.25"/>
|
||||
<path d="M9.5 5L7.5 5" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M9.5 7H7.5" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M9.5 9H7.5" stroke="black" stroke-opacity="0.3" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M5 2V13" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 820 B |
4
assets/icons/file_icons/package.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M1.62677 3.88472L6.99983 6.78517M1.62677 3.88472L1.63137 9.90006L7.00442 12.8005M1.62677 3.88472L4.31117 2.54211M6.99983 6.78517L7.00442 12.8005M6.99983 6.78517L9.68414 5.33084M7.00442 12.8005L12.373 9.89186L12.3684 3.87652M4.31117 2.54211L6.99556 1.1995L12.3684 3.87652M4.31117 2.54211L9.68414 5.33084M12.3684 3.87652L9.68414 5.33084" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M7.03125 12.5625V6.78125L1.5625 3.9375V9.75L7.03125 12.5625Z" fill="black" fill-opacity="0.3"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 637 B |
3
assets/icons/file_icons/plus.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M7 3V11M11 7H3" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 188 B |
12
assets/icons/file_icons/prettier.svg
Normal file
@@ -0,0 +1,12 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M2 2.86328H8.51563" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M11 2.86328L12 2.86328" stroke="black" stroke-opacity="0.3" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M9.64062 5.6263L12 5.6263" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M4.79688 5.6263L7.15625 5.6263" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M2 5.6263L2.35937 5.6263" stroke="black" stroke-opacity="0.3" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M7.15625 8.3737L12 8.3737" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M2 8.3737L4.64062 8.3737" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M2 11.1094H3.54687" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M5.97656 11.1094H8.35938" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M10.8203 11.1094L12 11.1094" stroke="black" stroke-opacity="0.3" stroke-width="1.25" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.1 KiB |
5
assets/icons/file_icons/project.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M2.03125 2V2.03125M2.03125 8C2.03125 10 5 10 5 10M2.03125 8V2.03125M2.03125 8L2.03125 11M2.03125 2.03125C2.03125 4 5 4 5 4" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<rect x="7.375" y="2.375" width="4.25" height="3.25" rx="1.125" fill="black" fill-opacity="0.33" stroke="black" stroke-width="1.25"/>
|
||||
<rect x="7.375" y="8.375" width="4.25" height="3.25" rx="1.125" fill="black" fill-opacity="0.33" stroke="black" stroke-width="1.25"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 588 B |
11
assets/icons/file_icons/replace.svg
Normal file
@@ -0,0 +1,11 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M7 12C4.97279 12 3.22735 10.7936 2.4425 9.0595M7 2C9.11228 2 10.9186 3.30981 11.6512 5.16152" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<circle cx="1.65625" cy="1.67188" r="0.625" fill="black" fill-opacity="0.5"/>
|
||||
<circle cx="3.71094" cy="1.67188" r="0.625" fill="black" fill-opacity="0.5"/>
|
||||
<circle cx="4.96094" cy="3.36719" r="0.625" fill="black" fill-opacity="0.5"/>
|
||||
<circle cx="3.71094" cy="4.79688" r="0.625" fill="black" fill-opacity="0.5"/>
|
||||
<circle cx="4.60156" cy="6.67188" r="0.625" fill="black" fill-opacity="0.5"/>
|
||||
<circle cx="1.65625" cy="4.17188" r="0.625" fill="black" fill-opacity="0.5"/>
|
||||
<circle cx="1.65625" cy="6.67188" r="0.625" fill="black" fill-opacity="0.5"/>
|
||||
<path d="M10.7802 10.8195C10.838 10.8195 10.8906 10.8527 10.9155 10.9048L11.7174 12.5811C11.8088 12.7721 12.0017 12.8938 12.2135 12.8938H12.3394C12.7483 12.8938 13.0142 12.4635 12.8314 12.0978L12.1619 10.7589C12.1232 10.6816 12.1582 10.5823 12.241 10.5349C12.7565 10.2397 13.0695 9.66858 13.0695 9.00391C13.0695 8.43361 12.8777 7.97006 12.5248 7.64951C12.1725 7.3295 11.6652 7.15703 11.043 7.15703H9.49609C9.19234 7.15703 8.94609 7.40327 8.94609 7.70703V12.3438C8.94609 12.6475 9.19234 12.8938 9.49609 12.8938H9.60156C9.90532 12.8938 10.1516 12.6475 10.1516 12.3438V10.9695C10.1516 10.8867 10.2187 10.8195 10.3016 10.8195H10.7802ZM10.1516 8.31328C10.1516 8.23044 10.2187 8.16328 10.3016 8.16328H10.8984C11.2023 8.16328 11.4371 8.2449 11.5954 8.38814C11.7529 8.5308 11.8406 8.73993 11.8406 9.00781C11.8406 9.28155 11.751 9.49461 11.5909 9.63971C11.4302 9.7854 11.1925 9.86797 10.8867 9.86797H10.3016C10.2187 9.86797 10.1516 9.80081 10.1516 9.71797V8.31328Z" fill="black" stroke="black" stroke-width="0.1"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.8 KiB |
5
assets/icons/file_icons/replace_all.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4.10517 5.8012C4.07193 5.73172 4.00176 5.6875 3.92475 5.6875H3.44609C3.33564 5.6875 3.24609 5.77704 3.24609 5.8875V7.26172C3.24609 7.53786 3.02224 7.76172 2.74609 7.76172H2.64062C2.36448 7.76172 2.14062 7.53786 2.14062 7.26172V2.625C2.14062 2.34886 2.36448 2.125 2.64062 2.125H4.1875C5.41406 2.125 6.16406 2.80469 6.16406 3.92188C6.16406 4.57081 5.85885 5.12418 5.36073 5.40943C5.25888 5.46775 5.20921 5.59421 5.2617 5.69918L5.93117 7.03811C6.09739 7.37056 5.85564 7.76172 5.48395 7.76172H5.35806C5.16552 7.76172 4.99009 7.65117 4.907 7.47748L4.10517 5.8012ZM3.44609 3.03125C3.33564 3.03125 3.24609 3.12079 3.24609 3.23125V4.63594C3.24609 4.74639 3.33564 4.83594 3.44609 4.83594H4.03125C4.66016 4.83594 5.03516 4.49609 5.03516 3.92578C5.03516 3.36719 4.66797 3.03125 4.04297 3.03125H3.44609Z" fill="black" fill-opacity="0.5"/>
|
||||
<path d="M3.92475 5.7375C3.98251 5.7375 4.03514 5.77067 4.06006 5.82277L4.8619 7.49905C4.95329 7.69011 5.14627 7.81172 5.35806 7.81172H5.48395C5.89281 7.81172 6.15873 7.38145 5.97589 7.01575L5.30642 5.67682C5.26778 5.59953 5.30269 5.50028 5.38557 5.45282C5.90107 5.15762 6.21406 4.58655 6.21406 3.92188C6.21406 3.35158 6.02226 2.88803 5.66936 2.56748C5.31705 2.24747 4.80973 2.075 4.1875 2.075H2.64062C2.33687 2.075 2.09062 2.32124 2.09062 2.625V7.26172C2.09062 7.56548 2.33687 7.81172 2.64062 7.81172H2.74609C3.04985 7.81172 3.29609 7.56548 3.29609 7.26172V5.8875C3.29609 5.80466 3.36325 5.7375 3.44609 5.7375H3.92475ZM3.29609 3.23125C3.29609 3.14841 3.36325 3.08125 3.44609 3.08125H4.04297C4.34688 3.08125 4.58164 3.16287 4.73988 3.30611C4.89748 3.44876 4.98516 3.6579 4.98516 3.92578C4.98516 4.19952 4.89553 4.41258 4.73546 4.55768C4.57475 4.70337 4.33706 4.78594 4.03125 4.78594H3.44609C3.36325 4.78594 3.29609 4.71878 3.29609 4.63594V3.23125Z" stroke="black" stroke-opacity="0.5" stroke-width="0.1"/>
|
||||
<path d="M9.32812 6.65625V9.32812M9.32812 12V9.32812M12 9.32812H9.32812M6.65625 9.32812H9.32812M9.32812 9.32812L11.1094 7.54688M9.32812 9.32812L7.54688 11.1094M9.32812 9.32812L11.1094 11.1094M9.32812 9.32812L7.54688 7.54688" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.2 KiB |
5
assets/icons/file_icons/replace_next.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3.96454 5.6762C3.93131 5.60672 3.86114 5.5625 3.78412 5.5625H3.30547C3.19501 5.5625 3.10547 5.65204 3.10547 5.7625V7.13672C3.10547 7.41286 2.88161 7.63672 2.60547 7.63672H2.5C2.22386 7.63672 2 7.41286 2 7.13672V2.5C2 2.22386 2.22386 2 2.5 2H4.04688C5.27344 2 6.02344 2.67969 6.02344 3.79688C6.02344 4.44581 5.71823 4.99918 5.2201 5.28443C5.11826 5.34275 5.06859 5.46921 5.12107 5.57418L5.79054 6.91311C5.95677 7.24556 5.71502 7.63672 5.34333 7.63672H5.21743C5.02489 7.63672 4.84946 7.52617 4.76638 7.35248L3.96454 5.6762ZM3.30547 2.90625C3.19501 2.90625 3.10547 2.99579 3.10547 3.10625V4.51094C3.10547 4.62139 3.19501 4.71094 3.30547 4.71094H3.89062C4.51953 4.71094 4.89453 4.37109 4.89453 3.80078C4.89453 3.24219 4.52734 2.90625 3.90234 2.90625H3.30547Z" fill="black" fill-opacity="0.5"/>
|
||||
<path d="M3.78412 5.6125C3.84188 5.6125 3.89451 5.64567 3.91944 5.69777L4.72127 7.37405C4.81266 7.56511 5.00564 7.68672 5.21743 7.68672H5.34333C5.75219 7.68672 6.01811 7.25645 5.83526 6.89075L5.1658 5.55182C5.12715 5.47453 5.16207 5.37528 5.24495 5.32782C5.76044 5.03262 6.07344 4.46155 6.07344 3.79688C6.07344 3.22658 5.88164 2.76303 5.52873 2.44248C5.17642 2.12247 4.6691 1.95 4.04688 1.95H2.5C2.19624 1.95 1.95 2.19624 1.95 2.5V7.13672C1.95 7.44048 2.19624 7.68672 2.5 7.68672H2.60547C2.90923 7.68672 3.15547 7.44048 3.15547 7.13672V5.7625C3.15547 5.67966 3.22263 5.6125 3.30547 5.6125H3.78412ZM3.15547 3.10625C3.15547 3.02341 3.22263 2.95625 3.30547 2.95625H3.90234C4.20626 2.95625 4.44101 3.03787 4.59926 3.18111C4.75686 3.32376 4.84453 3.5329 4.84453 3.80078C4.84453 4.07452 4.75491 4.28758 4.59484 4.43268C4.43413 4.57837 4.19643 4.66094 3.89062 4.66094H3.30547C3.22263 4.66094 3.15547 4.59378 3.15547 4.51094V3.10625Z" stroke="black" stroke-opacity="0.5" stroke-width="0.1"/>
|
||||
<path d="M7.5 5.88672C9.433 5.88672 11 7.45372 11 9.38672V12M11 12L13 10M11 12L9 10" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.0 KiB |
4
assets/icons/file_icons/rust.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4.27935 9.98207C4.32063 9.4038 3.9204 8.89049 3.35998 8.80276L2.60081 8.68387C2.37979 8.64945 2.20167 8.48001 2.15225 8.25614L2.01378 7.63511C1.96382 7.41235 2.05233 7.1807 2.23696 7.05125L2.8631 6.61242C3.33337 6.28297 3.47456 5.6369 3.18621 5.13364L2.79467 4.45092C2.68118 4.25261 2.69801 4.00374 2.83757 3.82321L3.22314 3.32436C3.3627 3.14438 3.59621 3.06994 3.81071 3.13772L4.57531 3.37769C5.11944 3.54879 5.70048 3.26159 5.90683 2.71886L6.1811 1.99782C6.26255 1.78395 6.46345 1.64285 6.68772 1.6423L7.31007 1.64063C7.53434 1.64007 7.73579 1.78006 7.81834 1.99337L8.09965 2.72275C8.30821 3.26214 8.88655 3.54712 9.42903 3.37714L10.1632 3.14716C10.3772 3.07994 10.6096 3.15382 10.7492 3.3327L11.1374 3.83099" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.76988 10.5933C7.76988 10.6595 7.8236 10.7133 7.88988 10.7133H7.97588C8.32602 10.7133 8.60988 10.9971 8.60988 11.3472C8.60988 11.6974 8.32602 11.9812 7.97588 11.9812H6.05587C5.70573 11.9812 5.42188 11.6974 5.42188 11.3472C5.42188 10.9971 5.70573 10.7133 6.05587 10.7133H6.14188C6.20815 10.7133 6.26188 10.6595 6.26188 10.5933V6.66925C6.26188 6.60298 6.20815 6.54925 6.14188 6.54925H6.05588C5.70573 6.54925 5.42188 6.2654 5.42188 5.91525C5.42188 5.5651 5.70573 5.28125 6.05588 5.28125H8.89988C10.0518 5.28125 11.8619 5.71487 11.8619 7.15185C11.8619 7.67078 11.7284 8.10362 11.4642 8.45348C11.1981 8.79765 10.8458 9.05637 10.4056 9.22931C10.3782 9.24007 10.3673 9.27304 10.3829 9.29801L11.2163 10.6342C11.247 10.6834 11.3008 10.7133 11.3588 10.7133H11.7319C12.082 10.7133 12.3659 10.9971 12.3659 11.3472C12.3659 11.6974 12.082 11.9812 11.7319 11.9812H10.5637C10.4955 11.9812 10.432 11.9465 10.3952 11.889L8.96523 9.65406C8.92847 9.59661 8.86496 9.56185 8.79676 9.56185H7.96988C7.85942 9.56185 7.76988 9.65139 7.76988 9.76185V10.5933ZM8.61188 6.54925C9.02963 6.54925 10.125 6.54925 10.2339 7.18785C10.2975 7.56123 10.1181 7.86557 9.88118 8.07715C9.64227 8.29046 9.20527 8.38985 8.58788 8.38985H7.86988C7.81465 8.38985 7.76988 8.34508 7.76988 8.28985V6.64925C7.76988 6.59402 7.81465 6.54925 7.86988 6.54925H8.61188Z" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.3 KiB |
4
assets/icons/file_icons/settings.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M2.60081 8.94324L3.35998 9.06214C3.9204 9.14986 4.32063 9.66317 4.27935 10.2414L4.22342 11.0252C4.20713 11.2536 4.32877 11.4686 4.53024 11.568L5.09174 11.8446C5.29321 11.9441 5.53379 11.9068 5.69834 11.7519L6.26255 11.2186C6.67855 10.8253 7.32041 10.8253 7.7369 11.2186L8.3011 11.7519C8.46565 11.9074 8.70572 11.9441 8.90772 11.8446L9.47027 11.5674C9.67124 11.4686 9.79234 11.2541 9.77607 11.0264L9.72007 10.2414C9.67883 9.66317 10.079 9.14986 10.6394 9.06214L11.3986 8.94324C11.6197 8.90883 11.7978 8.73938 11.8477 8.51607L11.9862 7.89504C12.0362 7.67172 11.9477 7.44007 11.763 7.31117L11.1293 6.86731C10.6617 6.53959 10.5189 5.89966 10.8013 5.3969L11.1841 4.71586C11.2954 4.51754 11.277 4.26923 11.1374 4.09036L10.7492 3.59207C10.6096 3.41319 10.3772 3.33932 10.1632 3.40653L9.42903 3.63651C8.88655 3.80649 8.30821 3.52152 8.09965 2.98213L7.81834 2.25275C7.73579 2.03944 7.53434 1.89945 7.31007 1.9L6.68772 1.90167C6.46345 1.90222 6.26255 2.04333 6.1811 2.25719L5.90683 2.97824C5.70048 3.52097 5.11944 3.80816 4.57531 3.63706L3.81071 3.39709C3.59621 3.32932 3.3627 3.40375 3.22314 3.58374L2.83757 4.08258C2.69801 4.26312 2.68118 4.51199 2.79467 4.7103L3.18621 5.39302C3.47456 5.89628 3.33337 6.54235 2.8631 6.87179L2.23696 7.31062C2.05233 7.44007 1.96382 7.67173 2.01378 7.89448L2.15225 8.51552C2.20167 8.73938 2.37979 8.90883 2.60081 8.94324Z" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M8.14913 5.85093L8.14909 5.85089C7.51453 5.21637 6.48549 5.21637 5.85092 5.85089L5.85089 5.85092C5.21637 6.48549 5.21637 7.51453 5.85089 8.14909L5.85093 8.14913C6.48549 8.78362 7.51452 8.78362 8.14908 8.14913L8.14913 8.14908C8.78362 7.51452 8.78362 6.48549 8.14913 5.85093Z" fill="black" fill-opacity="0.3" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.9 KiB |
5
assets/icons/file_icons/terminal.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M1.65625 2.5C1.65625 2.22386 1.88011 2 2.15625 2H11.8437C12.1199 2 12.3438 2.22386 12.3438 2.5V11.5C12.3438 11.7761 12.1199 12 11.8437 12H2.15625C1.88011 12 1.65625 11.7761 1.65625 11.5V2.5Z" stroke="black" stroke-width="1.25"/>
|
||||
<path d="M4.375 9L6.375 7L4.375 5" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M7.625 9L9.90625 9" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 549 B |
5
assets/icons/file_icons/toml.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M5 5H9" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M7 5L7 10" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M4 2H2.5C2.22386 2 2 2.22386 2 2.5V11.5C2 11.7761 2.22386 12 2.5 12H4M10 2H11.5C11.7761 2 12 2.22386 12 2.5V11.5C12 11.7761 11.7761 12 11.5 12H10" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 497 B |
5
assets/icons/file_icons/typescript.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M12 4.375V2.5C12 2.22386 11.7761 2 11.5 2H2.5C2.22386 2 2 2.22386 2 2.5V11.5C2 11.7761 2.22386 12 2.5 12H3.375" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M10.6836 7.82805C10.7933 7.65392 10.9823 7.57377 11.174 7.57377C11.2904 7.57377 11.4019 7.59384 11.5092 7.62792C11.8324 7.73069 12.2148 7.63925 12.3392 7.32368L12.3773 7.22707C12.4703 6.99131 12.3823 6.71761 12.1522 6.61154C11.8328 6.46436 11.4984 6.375 11.1262 6.375C9.87708 6.375 8.91935 7.60671 9.4239 8.84869C9.54205 9.13951 9.74219 9.36166 9.9515 9.54337C10.1061 9.6776 10.2858 9.80516 10.4475 9.92002C10.4972 9.95529 10.5452 9.98936 10.5903 10.0221C11.0283 10.34 11.2526 10.5876 11.2526 10.9466C11.2526 11.1518 11.1622 11.3133 11.016 11.4128C10.8777 11.5071 10.7055 11.5357 10.5454 11.5222C10.3931 11.5093 10.2529 11.4717 10.1214 11.4196C9.81633 11.2989 9.45533 11.4015 9.33641 11.7073L9.2814 11.8487C9.19162 12.0796 9.2749 12.3463 9.49799 12.4539C10.0894 12.7391 10.7377 12.8279 11.3915 12.5872C12.0569 12.3423 12.595 11.7708 12.595 10.9068C12.595 10.1301 12.1336 9.69583 11.6966 9.36109C11.606 9.29163 11.5259 9.23292 11.4493 9.17682C11.3259 9.08638 11.1964 8.99109 11.0734 8.88536C10.8937 8.73082 10.7518 8.57274 10.6595 8.38613C10.5746 8.21464 10.5815 7.99013 10.6836 7.82805Z" fill="black"/>
|
||||
<path d="M6.98644 7.70936H7.69396C7.98162 7.70936 8.21481 7.47617 8.21481 7.18851V7.02346C8.21481 6.73581 7.98162 6.50261 7.69396 6.50261H4.96848C4.68082 6.50261 4.44763 6.73581 4.44763 7.02346V7.18851C4.44763 7.47617 4.68082 7.70936 4.96848 7.70936H5.676V12.102C5.676 12.3896 5.90919 12.6228 6.19685 12.6228H6.46559C6.75325 12.6228 6.98644 12.3896 6.98644 12.102V7.70936Z" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.8 KiB |
4
assets/icons/file_icons/video.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M1.65625 2H11.8437C12.1199 2 12.3438 2.22386 12.3438 2.5V9.34375M12.3438 12H2.15625C1.88011 12 1.65625 11.7761 1.65625 11.5V4.65625" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M9 7.01562L5.65624 9.3125L5.65624 4.6875L9 7.01562Z" fill="black" fill-opacity="0.3" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 483 B |
4
assets/icons/radix/maximize.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M9.5 1.5H13.5M13.5 1.5V5.5M13.5 1.5C12.1332 2.86683 10.3668 4.63317 9 6" stroke="white" stroke-linecap="round"/>
|
||||
<path d="M1.5 9.5V13.5M1.5 13.5L6 9M1.5 13.5H5.5" stroke="white" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 315 B |
4
assets/icons/radix/minimize.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M13 6L9 6M9 6L9 2M9 6C10.3668 4.63316 12.1332 2.86683 13.5 1.5" stroke="white" stroke-linecap="round"/>
|
||||
<path d="M6 13L6 9M6 9L1.5 13.5M6 9L2 9" stroke="white" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 297 B |
@@ -9,6 +9,7 @@
|
||||
"context": "Editor",
|
||||
"bindings": {
|
||||
"cmd-b": "editor::GoToDefinition",
|
||||
"alt-cmd-b": "editor::GoToDefinitionSplit",
|
||||
"cmd-<": "editor::ScrollCursorCenter",
|
||||
"cmd-g": [
|
||||
"editor::SelectNext",
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
"cmd-up": "menu::SelectFirst",
|
||||
"cmd-down": "menu::SelectLast",
|
||||
"enter": "menu::Confirm",
|
||||
"cmd-enter": "menu::SecondaryConfirm",
|
||||
"escape": "menu::Cancel",
|
||||
"ctrl-c": "menu::Cancel",
|
||||
"cmd-{": "pane::ActivatePrevItem",
|
||||
@@ -39,6 +40,7 @@
|
||||
"cmd-shift-n": "workspace::NewWindow",
|
||||
"cmd-o": "workspace::Open",
|
||||
"alt-cmd-o": "projects::OpenRecent",
|
||||
"alt-cmd-b": "branches::OpenRecent",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
"ctrl-`": "terminal_panel::ToggleFocus",
|
||||
"shift-escape": "workspace::ToggleZoom"
|
||||
@@ -193,8 +195,8 @@
|
||||
{
|
||||
"context": "Editor && mode == auto_height",
|
||||
"bindings": {
|
||||
"alt-enter": "editor::Newline",
|
||||
"cmd-alt-enter": "editor::NewlineBelow"
|
||||
"ctrl-enter": "editor::Newline",
|
||||
"ctrl-shift-enter": "editor::NewlineBelow"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -220,7 +222,8 @@
|
||||
"escape": "buffer_search::Dismiss",
|
||||
"tab": "buffer_search::FocusEditor",
|
||||
"enter": "search::SelectNextMatch",
|
||||
"shift-enter": "search::SelectPrevMatch"
|
||||
"shift-enter": "search::SelectPrevMatch",
|
||||
"alt-enter": "search::SelectAllMatches"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -241,6 +244,7 @@
|
||||
"cmd-f": "project_search::ToggleFocus",
|
||||
"cmd-g": "search::SelectNextMatch",
|
||||
"cmd-shift-g": "search::SelectPrevMatch",
|
||||
"alt-enter": "search::SelectAllMatches",
|
||||
"alt-cmd-c": "search::ToggleCaseSensitive",
|
||||
"alt-cmd-w": "search::ToggleWholeWord",
|
||||
"alt-cmd-r": "search::ToggleRegex"
|
||||
@@ -295,7 +299,9 @@
|
||||
"shift-f8": "editor::GoToPrevDiagnostic",
|
||||
"f2": "editor::Rename",
|
||||
"f12": "editor::GoToDefinition",
|
||||
"alt-f12": "editor::GoToDefinitionSplit",
|
||||
"cmd-f12": "editor::GoToTypeDefinition",
|
||||
"alt-cmd-f12": "editor::GoToTypeDefinitionSplit",
|
||||
"alt-shift-f12": "editor::FindAllReferences",
|
||||
"ctrl-m": "editor::MoveToEnclosingBracket",
|
||||
"alt-cmd-[": "editor::Fold",
|
||||
@@ -400,6 +406,7 @@
|
||||
"cmd-b": "workspace::ToggleLeftDock",
|
||||
"cmd-r": "workspace::ToggleRightDock",
|
||||
"cmd-j": "workspace::ToggleBottomDock",
|
||||
"alt-cmd-y": "workspace::CloseAllDocks",
|
||||
"cmd-shift-f": "workspace::NewSearch",
|
||||
"cmd-k cmd-t": "theme_selector::Toggle",
|
||||
"cmd-k cmd-s": "zed::OpenKeymap",
|
||||
@@ -439,8 +446,22 @@
|
||||
},
|
||||
{
|
||||
"bindings": {
|
||||
"cmd-k cmd-left": "workspace::ActivatePreviousPane",
|
||||
"cmd-k cmd-right": "workspace::ActivateNextPane"
|
||||
"cmd-k cmd-left": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Left"
|
||||
],
|
||||
"cmd-k cmd-right": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Right"
|
||||
],
|
||||
"cmd-k cmd-up": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Up"
|
||||
],
|
||||
"cmd-k cmd-down": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Down"
|
||||
]
|
||||
}
|
||||
},
|
||||
// Bindings from Atom
|
||||
@@ -506,8 +527,11 @@
|
||||
"cmd-alt-c": "project_panel::CopyPath",
|
||||
"alt-cmd-shift-c": "project_panel::CopyRelativePath",
|
||||
"f2": "project_panel::Rename",
|
||||
"enter": "project_panel::Rename",
|
||||
"space": "project_panel::Open",
|
||||
"backspace": "project_panel::Delete",
|
||||
"alt-cmd-r": "project_panel::RevealInFinder"
|
||||
"alt-cmd-r": "project_panel::RevealInFinder",
|
||||
"alt-shift-f": "project_panel::NewSearchInDirectory"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -46,8 +46,9 @@
|
||||
"alt-f7": "editor::FindAllReferences",
|
||||
"cmd-alt-f7": "editor::FindAllReferences",
|
||||
"cmd-b": "editor::GoToDefinition",
|
||||
"cmd-alt-b": "editor::GoToDefinition",
|
||||
"cmd-alt-b": "editor::GoToDefinitionSplit",
|
||||
"cmd-shift-b": "editor::GoToTypeDefinition",
|
||||
"cmd-alt-shift-b": "editor::GoToTypeDefinitionSplit",
|
||||
"alt-enter": "editor::ToggleCodeActions",
|
||||
"f2": "editor::GoToDiagnostic",
|
||||
"cmd-f2": "editor::GoToPrevDiagnostic",
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
"cmd-shift-a": "editor::SelectLargerSyntaxNode",
|
||||
"shift-f12": "editor::FindAllReferences",
|
||||
"alt-cmd-down": "editor::GoToDefinition",
|
||||
"ctrl-alt-cmd-down": "editor::GoToDefinitionSplit",
|
||||
"alt-shift-cmd-down": "editor::FindAllReferences",
|
||||
"ctrl-.": "editor::GoToHunk",
|
||||
"ctrl-,": "editor::GoToPrevHunk",
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
{
|
||||
"bindings": {
|
||||
"cmd-shift-o": "projects::OpenRecent",
|
||||
"cmd-shift-b": "branches::OpenRecent",
|
||||
"cmd-alt-tab": "project_panel::ToggleFocus"
|
||||
}
|
||||
},
|
||||
@@ -11,6 +12,7 @@
|
||||
"cmd-l": "go_to_line::Toggle",
|
||||
"ctrl-shift-d": "editor::DuplicateLine",
|
||||
"cmd-b": "editor::GoToDefinition",
|
||||
"alt-cmd-b": "editor::GoToDefinition",
|
||||
"cmd-j": "editor::ScrollCursorCenter",
|
||||
"cmd-shift-l": "editor::SelectLine",
|
||||
"cmd-shift-t": "outline::Toggle",
|
||||
|
||||
@@ -2,12 +2,6 @@
|
||||
{
|
||||
"context": "Editor && VimControl && !VimWaiting && !menu",
|
||||
"bindings": {
|
||||
"g": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"Namespace": "G"
|
||||
}
|
||||
],
|
||||
"i": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
@@ -30,13 +24,18 @@
|
||||
"j": "vim::Down",
|
||||
"down": "vim::Down",
|
||||
"enter": "vim::NextLineStart",
|
||||
"tab": "vim::Tab",
|
||||
"shift-tab": "vim::Tab",
|
||||
"k": "vim::Up",
|
||||
"up": "vim::Up",
|
||||
"l": "vim::Right",
|
||||
"right": "vim::Right",
|
||||
"$": "vim::EndOfLine",
|
||||
"^": "vim::FirstNonWhitespace",
|
||||
"shift-g": "vim::EndOfDocument",
|
||||
"w": "vim::NextWordStart",
|
||||
"{": "vim::StartOfParagraph",
|
||||
"}": "vim::EndOfParagraph",
|
||||
"shift-w": [
|
||||
"vim::NextWordStart",
|
||||
{
|
||||
@@ -57,6 +56,8 @@
|
||||
"ignorePunctuation": true
|
||||
}
|
||||
],
|
||||
"n": "search::SelectNextMatch",
|
||||
"shift-n": "search::SelectPrevMatch",
|
||||
"%": "vim::Matching",
|
||||
"f": [
|
||||
"vim::PushOperator",
|
||||
@@ -92,8 +93,43 @@
|
||||
],
|
||||
"ctrl-o": "pane::GoBack",
|
||||
"ctrl-]": "editor::GoToDefinition",
|
||||
"escape": "editor::Cancel",
|
||||
"escape": [
|
||||
"vim::SwitchMode",
|
||||
"Normal"
|
||||
],
|
||||
"ctrl+[": [
|
||||
"vim::SwitchMode",
|
||||
"Normal"
|
||||
],
|
||||
"*": "vim::MoveToNext",
|
||||
"#": "vim::MoveToPrev",
|
||||
"0": "vim::StartOfLine", // When no number operator present, use start of line motion
|
||||
// "g" commands
|
||||
"g g": "vim::StartOfDocument",
|
||||
"g h": "editor::Hover",
|
||||
"g t": "pane::ActivateNextItem",
|
||||
"g shift-t": "pane::ActivatePrevItem",
|
||||
"g d": "editor::GoToDefinition",
|
||||
"g shift-d": "editor::GoToTypeDefinition",
|
||||
"g .": "editor::ToggleCodeActions", // zed specific
|
||||
"g shift-a": "editor::FindAllReferences", // zed specific
|
||||
"g *": [
|
||||
"vim::MoveToNext",
|
||||
{
|
||||
"partialWord": true
|
||||
}
|
||||
],
|
||||
"g #": [
|
||||
"vim::MoveToPrev",
|
||||
{
|
||||
"partialWord": true
|
||||
}
|
||||
],
|
||||
// z commands
|
||||
"z t": "editor::ScrollCursorTop",
|
||||
"z z": "editor::ScrollCursorCenter",
|
||||
"z b": "editor::ScrollCursorBottom",
|
||||
// Count support
|
||||
"1": [
|
||||
"vim::Number",
|
||||
1
|
||||
@@ -129,7 +165,75 @@
|
||||
"9": [
|
||||
"vim::Number",
|
||||
9
|
||||
]
|
||||
],
|
||||
// window related commands (ctrl-w X)
|
||||
"ctrl-w left": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Left"
|
||||
],
|
||||
"ctrl-w right": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Right"
|
||||
],
|
||||
"ctrl-w up": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Up"
|
||||
],
|
||||
"ctrl-w down": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Down"
|
||||
],
|
||||
"ctrl-w h": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Left"
|
||||
],
|
||||
"ctrl-w l": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Right"
|
||||
],
|
||||
"ctrl-w k": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Up"
|
||||
],
|
||||
"ctrl-w j": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Down"
|
||||
],
|
||||
"ctrl-w ctrl-h": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Left"
|
||||
],
|
||||
"ctrl-w ctrl-l": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Right"
|
||||
],
|
||||
"ctrl-w ctrl-k": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Up"
|
||||
],
|
||||
"ctrl-w ctrl-j": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Down"
|
||||
],
|
||||
"ctrl-w g t": "pane::ActivateNextItem",
|
||||
"ctrl-w ctrl-g t": "pane::ActivateNextItem",
|
||||
"ctrl-w g shift-t": "pane::ActivatePrevItem",
|
||||
"ctrl-w ctrl-g shift-t": "pane::ActivatePrevItem",
|
||||
"ctrl-w w": "workspace::ActivateNextPane",
|
||||
"ctrl-w ctrl-w": "workspace::ActivateNextPane",
|
||||
"ctrl-w p": "workspace::ActivatePreviousPane",
|
||||
"ctrl-w ctrl-p": "workspace::ActivatePreviousPane",
|
||||
"ctrl-w shift-w": "workspace::ActivatePreviousPane",
|
||||
"ctrl-w ctrl-shift-w": "workspace::ActivatePreviousPane",
|
||||
"ctrl-w v": "pane::SplitLeft",
|
||||
"ctrl-w ctrl-v": "pane::SplitLeft",
|
||||
"ctrl-w s": "pane::SplitUp",
|
||||
"ctrl-w shift-s": "pane::SplitUp",
|
||||
"ctrl-w ctrl-s": "pane::SplitUp",
|
||||
"ctrl-w c": "pane::CloseAllItems",
|
||||
"ctrl-w ctrl-c": "pane::CloseAllItems",
|
||||
"ctrl-w q": "pane::CloseAllItems",
|
||||
"ctrl-w ctrl-q": "pane::CloseAllItems"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -150,12 +254,6 @@
|
||||
"vim::PushOperator",
|
||||
"Yank"
|
||||
],
|
||||
"z": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"Namespace": "Z"
|
||||
}
|
||||
],
|
||||
"i": [
|
||||
"vim::SwitchMode",
|
||||
"Insert"
|
||||
@@ -165,7 +263,6 @@
|
||||
"shift-a": "vim::InsertEndOfLine",
|
||||
"x": "vim::DeleteRight",
|
||||
"shift-x": "vim::DeleteLeft",
|
||||
"^": "vim::FirstNonWhitespace",
|
||||
"o": "vim::InsertLineBelow",
|
||||
"shift-o": "vim::InsertLineAbove",
|
||||
"~": "vim::ChangeCase",
|
||||
@@ -188,10 +285,18 @@
|
||||
"p": "vim::Paste",
|
||||
"u": "editor::Undo",
|
||||
"ctrl-r": "editor::Redo",
|
||||
"/": [
|
||||
"buffer_search::Deploy",
|
||||
"/": "vim::Search",
|
||||
"?": [
|
||||
"vim::Search",
|
||||
{
|
||||
"focus": true
|
||||
"backwards": true
|
||||
}
|
||||
],
|
||||
";": "vim::RepeatFind",
|
||||
",": [
|
||||
"vim::RepeatFind",
|
||||
{
|
||||
"backwards": true
|
||||
}
|
||||
],
|
||||
"ctrl-f": "vim::PageDown",
|
||||
@@ -222,24 +327,11 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_operator == g",
|
||||
"bindings": {
|
||||
"g": "vim::StartOfDocument",
|
||||
"h": "editor::Hover",
|
||||
"t": "pane::ActivateNextItem",
|
||||
"shift-t": "pane::ActivatePrevItem",
|
||||
"escape": [
|
||||
"vim::SwitchMode",
|
||||
"Normal"
|
||||
],
|
||||
"d": "editor::GoToDefinition"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_operator == c",
|
||||
"bindings": {
|
||||
"c": "vim::CurrentLine"
|
||||
"c": "vim::CurrentLine",
|
||||
"d": "editor::Rename" // zed specific
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -254,18 +346,6 @@
|
||||
"y": "vim::CurrentLine"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_operator == z",
|
||||
"bindings": {
|
||||
"t": "editor::ScrollCursorTop",
|
||||
"z": "editor::ScrollCursorCenter",
|
||||
"b": "editor::ScrollCursorBottom",
|
||||
"escape": [
|
||||
"vim::SwitchMode",
|
||||
"Normal"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && VimObject",
|
||||
"bindings": {
|
||||
@@ -305,15 +385,20 @@
|
||||
"vim::PushOperator",
|
||||
"Replace"
|
||||
],
|
||||
"> >": "editor::Indent",
|
||||
"< <": "editor::Outdent"
|
||||
"ctrl-c": [
|
||||
"vim::SwitchMode",
|
||||
"Normal"
|
||||
],
|
||||
">": "editor::Indent",
|
||||
"<": "editor::Outdent"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_mode == insert",
|
||||
"bindings": {
|
||||
"escape": "vim::NormalBefore",
|
||||
"ctrl-c": "vim::NormalBefore"
|
||||
"ctrl-c": "vim::NormalBefore",
|
||||
"ctrl-[": "vim::NormalBefore"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -321,7 +406,21 @@
|
||||
"bindings": {
|
||||
"tab": "vim::Tab",
|
||||
"enter": "vim::Enter",
|
||||
"escape": "editor::Cancel"
|
||||
"escape": [
|
||||
"vim::SwitchMode",
|
||||
"Normal"
|
||||
],
|
||||
"ctrl+[": [
|
||||
"vim::SwitchMode",
|
||||
"Normal"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar > VimEnabled",
|
||||
"bindings": {
|
||||
"enter": "vim::SearchSubmit",
|
||||
"escape": "buffer_search::Dismiss"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -24,6 +24,17 @@
|
||||
},
|
||||
// The default font size for text in the editor
|
||||
"buffer_font_size": 15,
|
||||
// Set the buffer's line height.
|
||||
// May take 3 values:
|
||||
// 1. Use a line height that's comfortable for reading (1.618)
|
||||
// "line_height": "comfortable"
|
||||
// 2. Use a standard line height, (1.3)
|
||||
// "line_height": "standard",
|
||||
// 3. Use a custom line height
|
||||
// "line_height": {
|
||||
// "custom": 2
|
||||
// },
|
||||
"buffer_line_height": "comfortable",
|
||||
// The factor to grow the active pane by. Defaults to 1.0
|
||||
// which gives the same size as all other panes.
|
||||
"active_pane_magnification": 1.0,
|
||||
@@ -39,6 +50,13 @@
|
||||
// Whether to pop the completions menu while typing in an editor without
|
||||
// explicitly requesting it.
|
||||
"show_completions_on_input": true,
|
||||
// Whether to show wrap guides in the editor. Setting this to true will
|
||||
// show a guide at the 'preferred_line_length' value if softwrap is set to
|
||||
// 'preferred_line_length', and will show any additional guides as specified
|
||||
// by the 'wrap_guides' setting.
|
||||
"show_wrap_guides": true,
|
||||
// Character counts at which to show wrap guides in the editor.
|
||||
"wrap_guides": [],
|
||||
// Whether to use additional LSP queries to format (and amend) the code after
|
||||
// every "trigger" symbol input, defined by LSP server capabilities.
|
||||
"use_on_type_format": true,
|
||||
@@ -55,6 +73,11 @@
|
||||
// 3. Draw all invisible symbols:
|
||||
// "all"
|
||||
"show_whitespaces": "selection",
|
||||
// Settings related to calls in Zed
|
||||
"calls": {
|
||||
// Join calls with the microphone muted by default
|
||||
"mute_on_join": true
|
||||
},
|
||||
// Scrollbar related settings
|
||||
"scrollbar": {
|
||||
// When to show the scrollbar in the editor.
|
||||
@@ -71,25 +94,33 @@
|
||||
// "never"
|
||||
"show": "auto",
|
||||
// Whether to show git diff indicators in the scrollbar.
|
||||
"git_diff": true
|
||||
"git_diff": true,
|
||||
// Whether to show selections in the scrollbar.
|
||||
"selections": true
|
||||
},
|
||||
// Inlay hint related settings
|
||||
"inlay_hints": {
|
||||
// Global switch to toggle hints on and off, switched off by default.
|
||||
"enabled": false,
|
||||
"enabled": false,
|
||||
// Toggle certain types of hints on and off, all switched on by default.
|
||||
"show_type_hints": true,
|
||||
"show_parameter_hints": true,
|
||||
"show_parameter_hints": true,
|
||||
// Corresponds to null/None LSP hint type value.
|
||||
"show_other_hints": true
|
||||
},
|
||||
"project_panel": {
|
||||
// Whether to show the git status in the project panel.
|
||||
"git_status": true,
|
||||
// Default width of the project panel.
|
||||
"default_width": 240,
|
||||
// Where to dock project panel. Can be 'left' or 'right'.
|
||||
"dock": "left",
|
||||
// Default width of the project panel.
|
||||
"default_width": 240
|
||||
// Whether to show file icons in the project panel.
|
||||
"file_icons": true,
|
||||
// Whether to show folder icons or chevrons for directories in the project panel.
|
||||
"folder_icons": true,
|
||||
// Whether to show the git status in the project panel.
|
||||
"git_status": true,
|
||||
// Amount of indentation for nested items.
|
||||
"indent_size": 20
|
||||
},
|
||||
"assistant": {
|
||||
// Where to dock the assistant. Can be 'left', 'right' or 'bottom'.
|
||||
@@ -115,6 +146,13 @@
|
||||
// 4. Save when idle for a certain amount of time:
|
||||
// "autosave": { "after_delay": {"milliseconds": 500} },
|
||||
"autosave": "off",
|
||||
// Settings related to the editor's tabs
|
||||
"tabs": {
|
||||
// Show git status colors in the editor tabs.
|
||||
"git_status": false,
|
||||
// Position of the close button on the editor tabs.
|
||||
"close_position": "right"
|
||||
},
|
||||
// Whether or not to remove any trailing whitespace from lines of a buffer
|
||||
// before saving it.
|
||||
"remove_trailing_whitespace_on_save": true,
|
||||
@@ -176,9 +214,7 @@
|
||||
"copilot": {
|
||||
// The set of glob patterns for which copilot should be disabled
|
||||
// in any matching file.
|
||||
"disabled_globs": [
|
||||
".env"
|
||||
]
|
||||
"disabled_globs": [".env"]
|
||||
},
|
||||
// Settings specific to journaling
|
||||
"journal": {
|
||||
@@ -280,7 +316,6 @@
|
||||
// "line_height": {
|
||||
// "custom": 2
|
||||
// },
|
||||
//
|
||||
"line_height": "comfortable"
|
||||
// Set the terminal's font size. If this option is not included,
|
||||
// the terminal will default to matching the buffer's font size.
|
||||
@@ -289,6 +324,11 @@
|
||||
// the terminal will default to matching the buffer's font family.
|
||||
// "font_family": "Zed Mono"
|
||||
},
|
||||
// Difference settings for semantic_index
|
||||
"semantic_index": {
|
||||
"enabled": false,
|
||||
"reindexing_delay_seconds": 600
|
||||
},
|
||||
// Different settings for specific languages.
|
||||
"languages": {
|
||||
"Plain Text": {
|
||||
@@ -323,12 +363,6 @@
|
||||
// LSP Specific settings.
|
||||
"lsp": {
|
||||
// Specify the LSP name as a key here.
|
||||
// As of 8/10/22, supported LSPs are:
|
||||
// pyright
|
||||
// gopls
|
||||
// rust-analyzer
|
||||
// typescript-language-server
|
||||
// vscode-json-languageserver
|
||||
// "rust-analyzer": {
|
||||
// //These initialization options are merged into Zed's defaults
|
||||
// "initialization_options": {
|
||||
|
||||
@@ -12,6 +12,7 @@ use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
cmp::Reverse,
|
||||
ffi::OsStr,
|
||||
fmt::{self, Display},
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
@@ -80,6 +81,9 @@ impl SavedConversationMetadata {
|
||||
let mut conversations = Vec::<SavedConversationMetadata>::new();
|
||||
while let Some(path) = paths.next().await {
|
||||
let path = path?;
|
||||
if path.extension() != Some(OsStr::new("json")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let pattern = r" - \d+.zed.json$";
|
||||
let re = Regex::new(pattern).unwrap();
|
||||
|
||||
@@ -298,12 +298,22 @@ impl AssistantPanel {
|
||||
}
|
||||
|
||||
fn deploy(&mut self, action: &search::buffer_search::Deploy, cx: &mut ViewContext<Self>) {
|
||||
let mut propagate_action = true;
|
||||
if let Some(search_bar) = self.toolbar.read(cx).item_of_type::<BufferSearchBar>() {
|
||||
if search_bar.update(cx, |search_bar, cx| search_bar.show(action.focus, true, cx)) {
|
||||
return;
|
||||
}
|
||||
search_bar.update(cx, |search_bar, cx| {
|
||||
if search_bar.show(cx) {
|
||||
search_bar.search_suggested(cx);
|
||||
if action.focus {
|
||||
search_bar.select_query(cx);
|
||||
cx.focus_self();
|
||||
}
|
||||
propagate_action = false
|
||||
}
|
||||
});
|
||||
}
|
||||
if propagate_action {
|
||||
cx.propagate_action();
|
||||
}
|
||||
cx.propagate_action();
|
||||
}
|
||||
|
||||
fn handle_editor_cancel(&mut self, _: &editor::Cancel, cx: &mut ViewContext<Self>) {
|
||||
@@ -320,13 +330,13 @@ impl AssistantPanel {
|
||||
|
||||
fn select_next_match(&mut self, _: &search::SelectNextMatch, cx: &mut ViewContext<Self>) {
|
||||
if let Some(search_bar) = self.toolbar.read(cx).item_of_type::<BufferSearchBar>() {
|
||||
search_bar.update(cx, |bar, cx| bar.select_match(Direction::Next, cx));
|
||||
search_bar.update(cx, |bar, cx| bar.select_match(Direction::Next, 1, cx));
|
||||
}
|
||||
}
|
||||
|
||||
fn select_prev_match(&mut self, _: &search::SelectPrevMatch, cx: &mut ViewContext<Self>) {
|
||||
if let Some(search_bar) = self.toolbar.read(cx).item_of_type::<BufferSearchBar>() {
|
||||
search_bar.update(cx, |bar, cx| bar.select_match(Direction::Prev, cx));
|
||||
search_bar.update(cx, |bar, cx| bar.select_match(Direction::Prev, 1, cx));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2061,6 +2071,8 @@ impl ConversationEditor {
|
||||
let remaining_tokens = self.conversation.read(cx).remaining_tokens()?;
|
||||
let remaining_tokens_style = if remaining_tokens <= 0 {
|
||||
&style.no_remaining_tokens
|
||||
} else if remaining_tokens <= 500 {
|
||||
&style.low_remaining_tokens
|
||||
} else {
|
||||
&style.remaining_tokens
|
||||
};
|
||||
|
||||
@@ -36,6 +36,10 @@ anyhow.workspace = true
|
||||
async-broadcast = "0.4"
|
||||
futures.workspace = true
|
||||
postage.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_derive.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
client = { path = "../client", features = ["test-support"] }
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
pub mod call_settings;
|
||||
pub mod participant;
|
||||
pub mod room;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use client::{proto, Client, TypedEnvelope, User, UserStore};
|
||||
use call_settings::CallSettings;
|
||||
use client::{proto, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore};
|
||||
use collections::HashSet;
|
||||
use futures::{future::Shared, FutureExt};
|
||||
use postage::watch;
|
||||
@@ -19,6 +21,8 @@ pub use participant::ParticipantLocation;
|
||||
pub use room::Room;
|
||||
|
||||
pub fn init(client: Arc<Client>, user_store: ModelHandle<UserStore>, cx: &mut AppContext) {
|
||||
settings::register::<CallSettings>(cx);
|
||||
|
||||
let active_call = cx.add_model(|cx| ActiveCall::new(client, user_store, cx));
|
||||
cx.set_global(active_call);
|
||||
}
|
||||
@@ -198,6 +202,7 @@ impl ActiveCall {
|
||||
let result = invite.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.pending_invites.remove(&called_user_id);
|
||||
this.report_call_event("invite", cx);
|
||||
cx.notify();
|
||||
});
|
||||
result
|
||||
@@ -243,21 +248,26 @@ impl ActiveCall {
|
||||
};
|
||||
|
||||
let join = Room::join(&call, self.client.clone(), self.user_store.clone(), cx);
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let room = join.await?;
|
||||
this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))
|
||||
.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.report_call_event("accept incoming", cx)
|
||||
});
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn decline_incoming(&mut self) -> Result<()> {
|
||||
pub fn decline_incoming(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
|
||||
let call = self
|
||||
.incoming_call
|
||||
.0
|
||||
.borrow_mut()
|
||||
.take()
|
||||
.ok_or_else(|| anyhow!("no incoming call"))?;
|
||||
Self::report_call_event_for_room("decline incoming", call.room_id, &self.client, cx);
|
||||
self.client.send(proto::DeclineCall {
|
||||
room_id: call.room_id,
|
||||
})?;
|
||||
@@ -266,6 +276,7 @@ impl ActiveCall {
|
||||
|
||||
pub fn hang_up(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
cx.notify();
|
||||
self.report_call_event("hang up", cx);
|
||||
if let Some((room, _)) = self.room.take() {
|
||||
room.update(cx, |room, cx| room.leave(cx))
|
||||
} else {
|
||||
@@ -279,6 +290,7 @@ impl ActiveCall {
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<u64>> {
|
||||
if let Some((room, _)) = self.room.as_ref() {
|
||||
self.report_call_event("share project", cx);
|
||||
room.update(cx, |room, cx| room.share_project(project, cx))
|
||||
} else {
|
||||
Task::ready(Err(anyhow!("no active call")))
|
||||
@@ -291,6 +303,7 @@ impl ActiveCall {
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Result<()> {
|
||||
if let Some((room, _)) = self.room.as_ref() {
|
||||
self.report_call_event("unshare project", cx);
|
||||
room.update(cx, |room, cx| room.unshare_project(project, cx))
|
||||
} else {
|
||||
Err(anyhow!("no active call"))
|
||||
@@ -349,7 +362,29 @@ impl ActiveCall {
|
||||
self.room.as_ref().map(|(room, _)| room)
|
||||
}
|
||||
|
||||
pub fn client(&self) -> Arc<Client> {
|
||||
self.client.clone()
|
||||
}
|
||||
|
||||
pub fn pending_invites(&self) -> &HashSet<u64> {
|
||||
&self.pending_invites
|
||||
}
|
||||
|
||||
fn report_call_event(&self, operation: &'static str, cx: &AppContext) {
|
||||
if let Some(room) = self.room() {
|
||||
Self::report_call_event_for_room(operation, room.read(cx).id(), &self.client, cx)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn report_call_event_for_room(
|
||||
operation: &'static str,
|
||||
room_id: u64,
|
||||
client: &Arc<Client>,
|
||||
cx: &AppContext,
|
||||
) {
|
||||
let telemetry = client.telemetry();
|
||||
let telemetry_settings = *settings::get::<TelemetrySettings>(cx);
|
||||
let event = ClickhouseEvent::Call { operation, room_id };
|
||||
telemetry.report_clickhouse_event(event, telemetry_settings);
|
||||
}
|
||||
}
|
||||
|
||||
27
crates/call/src/call_settings.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use schemars::JsonSchema;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use settings::Setting;
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct CallSettings {
|
||||
pub mute_on_join: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
pub struct CallSettingsContent {
|
||||
pub mute_on_join: Option<bool>,
|
||||
}
|
||||
|
||||
impl Setting for CallSettings {
|
||||
const KEY: Option<&'static str> = Some("calls");
|
||||
|
||||
type FileContent = CallSettingsContent;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_: &gpui::AppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::{
|
||||
call_settings::CallSettings,
|
||||
participant::{LocalParticipant, ParticipantLocation, RemoteParticipant, RemoteVideoTrack},
|
||||
IncomingCall,
|
||||
};
|
||||
@@ -153,8 +154,10 @@ impl Room {
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
connect.await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| this.share_microphone(cx))
|
||||
.await?;
|
||||
if !cx.read(|cx| settings::get::<CallSettings>(cx).mute_on_join) {
|
||||
this.update(&mut cx, |this, cx| this.share_microphone(cx))
|
||||
.await?;
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
@@ -656,7 +659,7 @@ impl Room {
|
||||
peer_id,
|
||||
projects: participant.projects,
|
||||
location,
|
||||
muted: false,
|
||||
muted: true,
|
||||
speaking: false,
|
||||
video_tracks: Default::default(),
|
||||
audio_tracks: Default::default(),
|
||||
@@ -670,6 +673,10 @@ impl Room {
|
||||
live_kit.room.remote_video_tracks(&user.id.to_string());
|
||||
let audio_tracks =
|
||||
live_kit.room.remote_audio_tracks(&user.id.to_string());
|
||||
let publications = live_kit
|
||||
.room
|
||||
.remote_audio_track_publications(&user.id.to_string());
|
||||
|
||||
for track in video_tracks {
|
||||
this.remote_video_track_updated(
|
||||
RemoteVideoTrackUpdate::Subscribed(track),
|
||||
@@ -677,9 +684,15 @@ impl Room {
|
||||
)
|
||||
.log_err();
|
||||
}
|
||||
for track in audio_tracks {
|
||||
|
||||
for (track, publication) in
|
||||
audio_tracks.iter().zip(publications.iter())
|
||||
{
|
||||
this.remote_audio_track_updated(
|
||||
RemoteAudioTrackUpdate::Subscribed(track),
|
||||
RemoteAudioTrackUpdate::Subscribed(
|
||||
track.clone(),
|
||||
publication.clone(),
|
||||
),
|
||||
cx,
|
||||
)
|
||||
.log_err();
|
||||
@@ -819,8 +832,8 @@ impl Room {
|
||||
cx.notify();
|
||||
}
|
||||
RemoteAudioTrackUpdate::MuteChanged { track_id, muted } => {
|
||||
let mut found = false;
|
||||
for participant in &mut self.remote_participants.values_mut() {
|
||||
let mut found = false;
|
||||
for track in participant.audio_tracks.values() {
|
||||
if track.sid() == track_id {
|
||||
found = true;
|
||||
@@ -832,16 +845,20 @@ impl Room {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
RemoteAudioTrackUpdate::Subscribed(track) => {
|
||||
RemoteAudioTrackUpdate::Subscribed(track, publication) => {
|
||||
let user_id = track.publisher_id().parse()?;
|
||||
let track_id = track.sid().to_string();
|
||||
let participant = self
|
||||
.remote_participants
|
||||
.get_mut(&user_id)
|
||||
.ok_or_else(|| anyhow!("subscribed to track by unknown participant"))?;
|
||||
|
||||
participant.audio_tracks.insert(track_id.clone(), track);
|
||||
participant.muted = publication.is_muted();
|
||||
|
||||
cx.emit(Event::RemoteAudioTracksChanged {
|
||||
participant_id: participant.peer_id,
|
||||
});
|
||||
@@ -1053,7 +1070,7 @@ impl Room {
|
||||
self.live_kit
|
||||
.as_ref()
|
||||
.and_then(|live_kit| match &live_kit.microphone_track {
|
||||
LocalTrack::None => None,
|
||||
LocalTrack::None => Some(true),
|
||||
LocalTrack::Pending { muted, .. } => Some(*muted),
|
||||
LocalTrack::Published { muted, .. } => Some(*muted),
|
||||
})
|
||||
@@ -1070,6 +1087,7 @@ impl Room {
|
||||
self.live_kit.as_ref().map(|live_kit| live_kit.deafened)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn share_microphone(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
if self.status.is_offline() {
|
||||
return Task::ready(Err(anyhow!("room is offline")));
|
||||
@@ -1244,6 +1262,10 @@ impl Room {
|
||||
pub fn toggle_mute(&mut self, cx: &mut ModelContext<Self>) -> Result<Task<Result<()>>> {
|
||||
let should_mute = !self.is_muted();
|
||||
if let Some(live_kit) = self.live_kit.as_mut() {
|
||||
if matches!(live_kit.microphone_track, LocalTrack::None) {
|
||||
return Ok(self.share_microphone(cx));
|
||||
}
|
||||
|
||||
let (ret_task, old_muted) = live_kit.set_mute(should_mute, cx)?;
|
||||
live_kit.muted_by_user = should_mute;
|
||||
|
||||
|
||||
@@ -201,6 +201,7 @@ impl Bundle {
|
||||
self.zed_version_string()
|
||||
);
|
||||
}
|
||||
|
||||
Self::LocalPath { executable, .. } => {
|
||||
let executable_parent = executable
|
||||
.parent()
|
||||
|
||||
@@ -22,7 +22,7 @@ staff_mode = { path = "../staff_mode" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
|
||||
anyhow.workspace = true
|
||||
async-recursion = "0.3"
|
||||
async-recursion.workspace = true
|
||||
async-tungstenite = { version = "0.16", features = ["async-tls"] }
|
||||
futures.workspace = true
|
||||
image = "0.23"
|
||||
|
||||
@@ -748,7 +748,7 @@ impl Client {
|
||||
|
||||
#[async_recursion(?Send)]
|
||||
pub async fn authenticate_and_connect(
|
||||
self: &Arc<Self>,
|
||||
self: &'async_recursion Arc<Self>,
|
||||
try_keychain: bool,
|
||||
cx: &AsyncAppContext,
|
||||
) -> anyhow::Result<()> {
|
||||
|
||||
@@ -40,6 +40,7 @@ lazy_static! {
|
||||
struct ClickhouseEventRequestBody {
|
||||
token: &'static str,
|
||||
installation_id: Option<Arc<str>>,
|
||||
is_staff: Option<bool>,
|
||||
app_version: Option<Arc<str>>,
|
||||
os_name: &'static str,
|
||||
os_version: Option<Arc<str>>,
|
||||
@@ -70,6 +71,10 @@ pub enum ClickhouseEvent {
|
||||
suggestion_accepted: bool,
|
||||
file_extension: Option<String>,
|
||||
},
|
||||
Call {
|
||||
operation: &'static str,
|
||||
room_id: u64,
|
||||
},
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
@@ -220,6 +225,7 @@ impl Telemetry {
|
||||
&ClickhouseEventRequestBody {
|
||||
token: ZED_SECRET_CLIENT_TOKEN,
|
||||
installation_id: state.installation_id.clone(),
|
||||
is_staff: state.is_staff.clone(),
|
||||
app_version: state.app_version.clone(),
|
||||
os_name: state.os_name,
|
||||
os_version: state.os_version.clone(),
|
||||
|
||||
@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
|
||||
default-run = "collab"
|
||||
edition = "2021"
|
||||
name = "collab"
|
||||
version = "0.15.0"
|
||||
version = "0.16.0"
|
||||
publish = false
|
||||
|
||||
[[bin]]
|
||||
@@ -14,7 +14,6 @@ name = "seed"
|
||||
required-features = ["seed-support"]
|
||||
|
||||
[dependencies]
|
||||
audio = { path = "../audio" }
|
||||
collections = { path = "../collections" }
|
||||
live_kit_server = { path = "../live_kit_server" }
|
||||
rpc = { path = "../rpc" }
|
||||
@@ -58,6 +57,7 @@ tracing-log = "0.1.3"
|
||||
tracing-subscriber = { version = "0.3.11", features = ["env-filter", "json"] }
|
||||
|
||||
[dev-dependencies]
|
||||
audio = { path = "../audio" }
|
||||
collections = { path = "../collections", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
call = { path = "../call", features = ["test-support"] }
|
||||
|
||||
@@ -3517,7 +3517,6 @@ pub use test::*;
|
||||
mod test {
|
||||
use super::*;
|
||||
use gpui::executor::Background;
|
||||
use lazy_static::lazy_static;
|
||||
use parking_lot::Mutex;
|
||||
use sea_orm::ConnectionTrait;
|
||||
use sqlx::migrate::MigrateDatabase;
|
||||
@@ -3566,9 +3565,7 @@ mod test {
|
||||
}
|
||||
|
||||
pub fn postgres(background: Arc<Background>) -> Self {
|
||||
lazy_static! {
|
||||
static ref LOCK: Mutex<()> = Mutex::new(());
|
||||
}
|
||||
static LOCK: Mutex<()> = Mutex::new(());
|
||||
|
||||
let _guard = LOCK.lock();
|
||||
let mut rng = StdRng::from_entropy();
|
||||
|
||||
@@ -157,7 +157,7 @@ async fn test_basic_calls(
|
||||
// User C receives the call, but declines it.
|
||||
let call_c = incoming_call_c.next().await.unwrap().unwrap();
|
||||
assert_eq!(call_c.calling_user.github_login, "user_b");
|
||||
active_call_c.update(cx_c, |call, _| call.decline_incoming().unwrap());
|
||||
active_call_c.update(cx_c, |call, cx| call.decline_incoming(cx).unwrap());
|
||||
assert!(incoming_call_c.next().await.unwrap().is_none());
|
||||
|
||||
deterministic.run_until_parked();
|
||||
@@ -1080,7 +1080,7 @@ async fn test_calls_on_multiple_connections(
|
||||
|
||||
// User B declines the call on one of the two connections, causing both connections
|
||||
// to stop ringing.
|
||||
active_call_b2.update(cx_b2, |call, _| call.decline_incoming().unwrap());
|
||||
active_call_b2.update(cx_b2, |call, cx| call.decline_incoming(cx).unwrap());
|
||||
deterministic.run_until_parked();
|
||||
assert!(incoming_call_b1.next().await.unwrap().is_none());
|
||||
assert!(incoming_call_b2.next().await.unwrap().is_none());
|
||||
@@ -5945,7 +5945,7 @@ async fn test_contacts(
|
||||
[("user_b".to_string(), "online", "busy")]
|
||||
);
|
||||
|
||||
active_call_b.update(cx_b, |call, _| call.decline_incoming().unwrap());
|
||||
active_call_b.update(cx_b, |call, cx| call.decline_incoming(cx).unwrap());
|
||||
deterministic.run_until_parked();
|
||||
assert_eq!(
|
||||
contacts(&client_a, cx_a),
|
||||
@@ -7217,7 +7217,7 @@ async fn test_peers_following_each_other(
|
||||
|
||||
// Clients A and B follow each other in split panes
|
||||
workspace_a.update(cx_a, |workspace, cx| {
|
||||
workspace.split_pane(workspace.active_pane().clone(), SplitDirection::Right, cx);
|
||||
workspace.split_and_clone(workspace.active_pane().clone(), SplitDirection::Right, cx);
|
||||
});
|
||||
workspace_a
|
||||
.update(cx_a, |workspace, cx| {
|
||||
@@ -7228,7 +7228,7 @@ async fn test_peers_following_each_other(
|
||||
.await
|
||||
.unwrap();
|
||||
workspace_b.update(cx_b, |workspace, cx| {
|
||||
workspace.split_pane(workspace.active_pane().clone(), SplitDirection::Right, cx);
|
||||
workspace.split_and_clone(workspace.active_pane().clone(), SplitDirection::Right, cx);
|
||||
});
|
||||
workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
@@ -7455,7 +7455,7 @@ async fn test_auto_unfollowing(
|
||||
|
||||
// When client B activates a different pane, it continues following client A in the original pane.
|
||||
workspace_b.update(cx_b, |workspace, cx| {
|
||||
workspace.split_pane(pane_b.clone(), SplitDirection::Right, cx)
|
||||
workspace.split_and_clone(pane_b.clone(), SplitDirection::Right, cx)
|
||||
});
|
||||
assert_eq!(
|
||||
workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
|
||||
@@ -32,14 +32,14 @@ use std::{
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
use util::ResultExt;
|
||||
use util::{path_env_var, ResultExt};
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref PLAN_LOAD_PATH: Option<PathBuf> = path_env_var("LOAD_PLAN");
|
||||
static ref PLAN_SAVE_PATH: Option<PathBuf> = path_env_var("SAVE_PLAN");
|
||||
static ref LOADED_PLAN_JSON: Mutex<Option<Vec<u8>>> = Default::default();
|
||||
static ref PLAN: Mutex<Option<Arc<Mutex<TestPlan>>>> = Default::default();
|
||||
}
|
||||
static LOADED_PLAN_JSON: Mutex<Option<Vec<u8>>> = Mutex::new(None);
|
||||
static PLAN: Mutex<Option<Arc<Mutex<TestPlan>>>> = Mutex::new(None);
|
||||
|
||||
#[gpui::test(iterations = 100, on_failure = "on_failure")]
|
||||
async fn test_random_collaboration(
|
||||
@@ -365,7 +365,7 @@ async fn apply_client_operation(
|
||||
}
|
||||
|
||||
log::info!("{}: declining incoming call", client.username);
|
||||
active_call.update(cx, |call, _| call.decline_incoming())?;
|
||||
active_call.update(cx, |call, cx| call.decline_incoming(cx))?;
|
||||
}
|
||||
|
||||
ClientOperation::LeaveCall => {
|
||||
@@ -2171,16 +2171,3 @@ fn gen_file_name(rng: &mut StdRng) -> String {
|
||||
}
|
||||
name
|
||||
}
|
||||
|
||||
fn path_env_var(name: &str) -> Option<PathBuf> {
|
||||
let value = env::var(name).ok()?;
|
||||
let mut path = PathBuf::from(value);
|
||||
if path.is_relative() {
|
||||
let mut abs_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
abs_path.pop();
|
||||
abs_path.pop();
|
||||
abs_path.push(path);
|
||||
path = abs_path
|
||||
}
|
||||
Some(path)
|
||||
}
|
||||
|
||||
@@ -39,6 +39,7 @@ recent_projects = {path = "../recent_projects"}
|
||||
settings = { path = "../settings" }
|
||||
theme = { path = "../theme" }
|
||||
theme_selector = { path = "../theme_selector" }
|
||||
vcs_menu = { path = "../vcs_menu" }
|
||||
util = { path = "../util" }
|
||||
workspace = { path = "../workspace" }
|
||||
zed-actions = {path = "../zed-actions"}
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
use crate::{
|
||||
branch_list::{build_branch_list, BranchList},
|
||||
contact_notification::ContactNotification,
|
||||
contacts_popover,
|
||||
face_pile::FacePile,
|
||||
contact_notification::ContactNotification, contacts_popover, face_pile::FacePile,
|
||||
toggle_deafen, toggle_mute, toggle_screen_sharing, LeaveCall, ToggleDeafen, ToggleMute,
|
||||
ToggleScreenSharing,
|
||||
};
|
||||
@@ -27,6 +24,7 @@ use recent_projects::{build_recent_projects, RecentProjects};
|
||||
use std::{ops::Range, sync::Arc};
|
||||
use theme::{AvatarStyle, Theme};
|
||||
use util::ResultExt;
|
||||
use vcs_menu::{build_branch_list, BranchList, OpenRecent as ToggleVcsMenu};
|
||||
use workspace::{FollowNextCollaborator, Workspace, WORKSPACE_DB};
|
||||
|
||||
const MAX_PROJECT_NAME_LENGTH: usize = 40;
|
||||
@@ -37,7 +35,6 @@ actions!(
|
||||
[
|
||||
ToggleContactsMenu,
|
||||
ToggleUserMenu,
|
||||
ToggleVcsMenu,
|
||||
ToggleProjectMenu,
|
||||
SwitchBranch,
|
||||
ShareProject,
|
||||
@@ -229,15 +226,23 @@ impl CollabTitlebarItem {
|
||||
let mut ret = Flex::row().with_child(
|
||||
Stack::new()
|
||||
.with_child(
|
||||
MouseEventHandler::<ToggleProjectMenu, Self>::new(0, cx, |mouse_state, _| {
|
||||
MouseEventHandler::<ToggleProjectMenu, Self>::new(0, cx, |mouse_state, cx| {
|
||||
let style = project_style
|
||||
.in_state(self.project_popover.is_some())
|
||||
.style_for(mouse_state);
|
||||
enum RecentProjectsTooltip {}
|
||||
Label::new(name, style.text.clone())
|
||||
.contained()
|
||||
.with_style(style.container)
|
||||
.aligned()
|
||||
.left()
|
||||
.with_tooltip::<RecentProjectsTooltip>(
|
||||
0,
|
||||
"Recent projects".into(),
|
||||
Some(Box::new(recent_projects::OpenRecent)),
|
||||
theme.tooltip.clone(),
|
||||
cx,
|
||||
)
|
||||
.into_any_named("title-project-name")
|
||||
})
|
||||
.with_cursor_style(CursorStyle::PointingHand)
|
||||
@@ -264,7 +269,8 @@ impl CollabTitlebarItem {
|
||||
MouseEventHandler::<ToggleVcsMenu, Self>::new(
|
||||
0,
|
||||
cx,
|
||||
|mouse_state, _| {
|
||||
|mouse_state, cx| {
|
||||
enum BranchPopoverTooltip {}
|
||||
let style = git_style
|
||||
.in_state(self.branch_popover.is_some())
|
||||
.style_for(mouse_state);
|
||||
@@ -274,6 +280,13 @@ impl CollabTitlebarItem {
|
||||
.with_margin_right(item_spacing)
|
||||
.aligned()
|
||||
.left()
|
||||
.with_tooltip::<BranchPopoverTooltip>(
|
||||
0,
|
||||
"Recent branches".into(),
|
||||
Some(Box::new(ToggleVcsMenu)),
|
||||
theme.tooltip.clone(),
|
||||
cx,
|
||||
)
|
||||
.into_any_named("title-project-branch")
|
||||
},
|
||||
)
|
||||
@@ -639,10 +652,10 @@ impl CollabTitlebarItem {
|
||||
let is_muted = room.read(cx).is_muted();
|
||||
if is_muted {
|
||||
icon = "icons/radix/mic-mute.svg";
|
||||
tooltip = "Unmute microphone\nRight click for options";
|
||||
tooltip = "Unmute microphone";
|
||||
} else {
|
||||
icon = "icons/radix/mic.svg";
|
||||
tooltip = "Mute microphone\nRight click for options";
|
||||
tooltip = "Mute microphone";
|
||||
}
|
||||
|
||||
let titlebar = &theme.titlebar;
|
||||
@@ -692,10 +705,10 @@ impl CollabTitlebarItem {
|
||||
let is_deafened = room.read(cx).is_deafened().unwrap_or(false);
|
||||
if is_deafened {
|
||||
icon = "icons/radix/speaker-off.svg";
|
||||
tooltip = "Unmute speakers\nRight click for options";
|
||||
tooltip = "Unmute speakers";
|
||||
} else {
|
||||
icon = "icons/radix/speaker-loud.svg";
|
||||
tooltip = "Mute speakers\nRight click for options";
|
||||
tooltip = "Mute speakers";
|
||||
}
|
||||
|
||||
let titlebar = &theme.titlebar;
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
mod branch_list;
|
||||
mod collab_titlebar_item;
|
||||
mod contact_finder;
|
||||
mod contact_list;
|
||||
@@ -19,17 +18,11 @@ use workspace::AppState;
|
||||
|
||||
actions!(
|
||||
collab,
|
||||
[
|
||||
ToggleScreenSharing,
|
||||
ToggleMute,
|
||||
ToggleDeafen,
|
||||
LeaveCall,
|
||||
ShareMicrophone
|
||||
]
|
||||
[ToggleScreenSharing, ToggleMute, ToggleDeafen, LeaveCall]
|
||||
);
|
||||
|
||||
pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) {
|
||||
branch_list::init(cx);
|
||||
vcs_menu::init(cx);
|
||||
collab_titlebar_item::init(cx);
|
||||
contact_list::init(cx);
|
||||
contact_finder::init(cx);
|
||||
@@ -41,15 +34,28 @@ pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) {
|
||||
cx.add_global_action(toggle_screen_sharing);
|
||||
cx.add_global_action(toggle_mute);
|
||||
cx.add_global_action(toggle_deafen);
|
||||
cx.add_global_action(share_microphone);
|
||||
}
|
||||
|
||||
pub fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut AppContext) {
|
||||
if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() {
|
||||
let call = ActiveCall::global(cx).read(cx);
|
||||
if let Some(room) = call.room().cloned() {
|
||||
let client = call.client();
|
||||
let toggle_screen_sharing = room.update(cx, |room, cx| {
|
||||
if room.is_screen_sharing() {
|
||||
ActiveCall::report_call_event_for_room(
|
||||
"disable screen share",
|
||||
room.id(),
|
||||
&client,
|
||||
cx,
|
||||
);
|
||||
Task::ready(room.unshare_screen(cx))
|
||||
} else {
|
||||
ActiveCall::report_call_event_for_room(
|
||||
"enable screen share",
|
||||
room.id(),
|
||||
&client,
|
||||
cx,
|
||||
);
|
||||
room.share_screen(cx)
|
||||
}
|
||||
});
|
||||
@@ -58,10 +64,24 @@ pub fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut AppContext) {
|
||||
}
|
||||
|
||||
pub fn toggle_mute(_: &ToggleMute, cx: &mut AppContext) {
|
||||
if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() {
|
||||
room.update(cx, Room::toggle_mute)
|
||||
.map(|task| task.detach_and_log_err(cx))
|
||||
.log_err();
|
||||
let call = ActiveCall::global(cx).read(cx);
|
||||
if let Some(room) = call.room().cloned() {
|
||||
let client = call.client();
|
||||
room.update(cx, |room, cx| {
|
||||
if room.is_muted() {
|
||||
ActiveCall::report_call_event_for_room("enable microphone", room.id(), &client, cx);
|
||||
} else {
|
||||
ActiveCall::report_call_event_for_room(
|
||||
"disable microphone",
|
||||
room.id(),
|
||||
&client,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
room.toggle_mute(cx)
|
||||
})
|
||||
.map(|task| task.detach_and_log_err(cx))
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -72,10 +92,3 @@ pub fn toggle_deafen(_: &ToggleDeafen, cx: &mut AppContext) {
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn share_microphone(_: &ShareMicrophone, cx: &mut AppContext) {
|
||||
if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() {
|
||||
room.update(cx, Room::share_microphone)
|
||||
.detach_and_log_err(cx)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ impl PickerDelegate for ContactFinderDelegate {
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm(&mut self, cx: &mut ViewContext<Picker<Self>>) {
|
||||
fn confirm(&mut self, _: bool, cx: &mut ViewContext<Picker<Self>>) {
|
||||
if let Some(user) = self.potential_contacts.get(self.selected_index) {
|
||||
let user_store = self.user_store.read(cx);
|
||||
match user_store.contact_request_status(user) {
|
||||
|
||||
@@ -99,8 +99,8 @@ impl IncomingCallNotification {
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
} else {
|
||||
active_call.update(cx, |active_call, _| {
|
||||
active_call.decline_incoming().log_err();
|
||||
active_call.update(cx, |active_call, cx| {
|
||||
active_call.decline_incoming(cx).log_err();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -160,7 +160,7 @@ impl PickerDelegate for CommandPaletteDelegate {
|
||||
|
||||
fn dismissed(&mut self, _cx: &mut ViewContext<Picker<Self>>) {}
|
||||
|
||||
fn confirm(&mut self, cx: &mut ViewContext<Picker<Self>>) {
|
||||
fn confirm(&mut self, _: bool, cx: &mut ViewContext<Picker<Self>>) {
|
||||
if !self.matches.is_empty() {
|
||||
let window_id = cx.window_id();
|
||||
let focused_view_id = self.focused_view_id;
|
||||
@@ -369,6 +369,7 @@ mod tests {
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
init(cx);
|
||||
Project::init_settings(cx);
|
||||
app_state
|
||||
})
|
||||
}
|
||||
|
||||
39
crates/crdb/Cargo.toml
Normal file
@@ -0,0 +1,39 @@
|
||||
[package]
|
||||
name = "crdb"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
path = "src/crdb.rs"
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
test-support = ["collections/test-support", "util/test-support"]
|
||||
|
||||
[dependencies]
|
||||
collections = { path = "../collections" }
|
||||
util = { path = "../util" }
|
||||
|
||||
anyhow.workspace = true
|
||||
arrayvec = { version = "0.7.1", features = ["serde"] }
|
||||
bromberg_sl2 = { git = "https://github.com/zed-industries/bromberg_sl2", rev = "6faf816bd5b4b7b2b6ea77495686634732ded095" }
|
||||
futures.workspace = true
|
||||
lazy_static.workspace = true
|
||||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
portable-atomic = { version = "1", features = ["serde"] }
|
||||
serde.workspace = true
|
||||
serde_bare = "0.5"
|
||||
smallvec.workspace = true
|
||||
uuid = { version = "1.3", features = ["v4", "fast-rng", "serde"] }
|
||||
|
||||
[dev-dependencies]
|
||||
collections = { path = "../collections", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
util = { path = "../util", features = ["test-support"] }
|
||||
|
||||
async-broadcast = "0.4"
|
||||
ctor.workspace = true
|
||||
env_logger.workspace = true
|
||||
rand.workspace = true
|
||||
smol.workspace = true
|
||||
35
crates/crdb/src/README.md
Normal file
@@ -0,0 +1,35 @@
|
||||
# CRDB: A conflict-free replicated database for code and markdown
|
||||
|
||||
Our goal is for this database to contain all the text inserted in Zed.
|
||||
|
||||
## Contexts
|
||||
|
||||
The database is divided into *contexts*, with each context containing a collection of *documents*.
|
||||
|
||||
### Contexts contain documents
|
||||
|
||||
These contexts and the documents are really just namespaces in a global table of document *fragments*. Each fragment is a sequence of one or more characters, which may or may not be visible in a given branch.
|
||||
|
||||
#### Documents with paths are files
|
||||
|
||||
Documents in a context can be associated with metadata. If a document is associated with a relative path, it represents a file. A context that contains files can be synchronized with a directory tree on the file system, much like a Git repository.
|
||||
|
||||
#### Conversations are also documents
|
||||
|
||||
Contexts can also be associated with conversations, which are special documents that embed other documents that represent messages. Messages are embedded via a mechanism called *portals*, which will be discussed further below.
|
||||
|
||||
### Contexts occupy a hierarchical namespace
|
||||
|
||||
For example, at genesis, zed.dev will contain the following channels:
|
||||
|
||||
#zed
|
||||
- This is where people get oriented about what Zed is all about. We'll link to it from our landing page.
|
||||
#zed/staff
|
||||
- Here's where we talk about stuff private to the company, and host company-specific files.
|
||||
#zed/insiders
|
||||
- Users we've worked with.
|
||||
#zed/zed
|
||||
- This contains the actual source code for Zed.
|
||||
- It also has a conversation where potential contributors can engage with us and each other.
|
||||
#zed/zed/debugger
|
||||
- A subcontext of zed/zed where we talk about and eventually implement a debugger. Associated with a different branch of zed/zed where the debugger is being built, but could also have multiple branches. Branches and contexts are independent.
|
||||
1957
crates/crdb/src/btree.rs
Normal file
755
crates/crdb/src/btree/cursor.rs
Normal file
@@ -0,0 +1,755 @@
|
||||
use super::*;
|
||||
use arrayvec::ArrayVec;
|
||||
use std::{cmp::Ordering, mem, sync::Arc};
|
||||
|
||||
#[derive(Clone)]
|
||||
struct StackEntry<'a, T: Item, D> {
|
||||
tree: &'a Sequence<T>,
|
||||
index: usize,
|
||||
position: D,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Cursor<'a, T: Item, D> {
|
||||
tree: &'a Sequence<T>,
|
||||
stack: ArrayVec<StackEntry<'a, T, D>, 16>,
|
||||
position: D,
|
||||
did_seek: bool,
|
||||
at_end: bool,
|
||||
}
|
||||
|
||||
pub struct Iter<'a, T: Item> {
|
||||
tree: &'a Sequence<T>,
|
||||
stack: ArrayVec<StackEntry<'a, T, ()>, 16>,
|
||||
}
|
||||
|
||||
impl<'a, T, D> Cursor<'a, T, D>
|
||||
where
|
||||
T: Item,
|
||||
D: Dimension<'a, T::Summary>,
|
||||
{
|
||||
pub fn new(tree: &'a Sequence<T>) -> Self {
|
||||
Self {
|
||||
tree,
|
||||
stack: ArrayVec::new(),
|
||||
position: D::default(),
|
||||
did_seek: false,
|
||||
at_end: tree.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
fn reset(&mut self) {
|
||||
self.did_seek = false;
|
||||
self.at_end = self.tree.is_empty();
|
||||
self.stack.truncate(0);
|
||||
self.position = D::default();
|
||||
}
|
||||
|
||||
pub fn start(&self) -> &D {
|
||||
&self.position
|
||||
}
|
||||
|
||||
pub fn end(&self, cx: &<T::Summary as Summary>::Context) -> D {
|
||||
if let Some(item_summary) = self.item_summary() {
|
||||
let mut end = self.start().clone();
|
||||
end.add_summary(item_summary, cx);
|
||||
end
|
||||
} else {
|
||||
self.start().clone()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn item(&self) -> Option<&'a T> {
|
||||
self.assert_did_seek();
|
||||
if let Some(entry) = self.stack.last() {
|
||||
match *entry.tree.0 {
|
||||
Node::Leaf { ref items, .. } => {
|
||||
if entry.index == items.len() {
|
||||
None
|
||||
} else {
|
||||
Some(&items[entry.index])
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn item_summary(&self) -> Option<&'a T::Summary> {
|
||||
self.assert_did_seek();
|
||||
if let Some(entry) = self.stack.last() {
|
||||
match *entry.tree.0 {
|
||||
Node::Leaf {
|
||||
ref item_summaries, ..
|
||||
} => {
|
||||
if entry.index == item_summaries.len() {
|
||||
None
|
||||
} else {
|
||||
Some(&item_summaries[entry.index])
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prev_item(&self) -> Option<&'a T> {
|
||||
self.assert_did_seek();
|
||||
if let Some(entry) = self.stack.last() {
|
||||
if entry.index == 0 {
|
||||
if let Some(prev_leaf) = self.prev_leaf() {
|
||||
Some(prev_leaf.0.items().last().unwrap())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
match *entry.tree.0 {
|
||||
Node::Leaf { ref items, .. } => Some(&items[entry.index - 1]),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
} else if self.at_end {
|
||||
self.tree.last()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn prev_leaf(&self) -> Option<&'a Sequence<T>> {
|
||||
for entry in self.stack.iter().rev().skip(1) {
|
||||
if entry.index != 0 {
|
||||
match *entry.tree.0 {
|
||||
Node::Internal {
|
||||
ref child_trees, ..
|
||||
} => {
|
||||
for tree in child_trees[..entry.index].iter().rev() {
|
||||
if let ChildTree::Loaded { tree } = tree {
|
||||
if let Some(leaf) = tree.rightmost_leaf() {
|
||||
return Some(leaf);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Node::Leaf { .. } => unreachable!(),
|
||||
};
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn prev(&mut self, cx: &<T::Summary as Summary>::Context) {
|
||||
self.prev_internal(|_| true, cx)
|
||||
}
|
||||
|
||||
fn prev_internal<F>(&mut self, mut filter_node: F, cx: &<T::Summary as Summary>::Context)
|
||||
where
|
||||
F: FnMut(&T::Summary) -> bool,
|
||||
{
|
||||
if !self.did_seek {
|
||||
self.did_seek = true;
|
||||
self.at_end = true;
|
||||
}
|
||||
|
||||
if self.at_end {
|
||||
self.position = D::default();
|
||||
self.at_end = self.tree.is_empty();
|
||||
if !self.tree.is_empty() {
|
||||
self.stack.push(StackEntry {
|
||||
tree: self.tree,
|
||||
index: self.tree.0.child_summaries().len(),
|
||||
position: D::from_summary(self.tree.summary(), cx),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let mut descending = false;
|
||||
while !self.stack.is_empty() {
|
||||
if let Some(StackEntry { position, .. }) = self.stack.iter().rev().nth(1) {
|
||||
self.position = position.clone();
|
||||
} else {
|
||||
self.position = D::default();
|
||||
}
|
||||
|
||||
let mut entry = self.stack.last_mut().unwrap();
|
||||
if !descending {
|
||||
if entry.index == 0 {
|
||||
self.stack.pop();
|
||||
continue;
|
||||
} else {
|
||||
entry.index -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
for summary in &entry.tree.0.child_summaries()[..entry.index] {
|
||||
self.position.add_summary(summary, cx);
|
||||
}
|
||||
entry.position = self.position.clone();
|
||||
|
||||
descending = filter_node(&entry.tree.0.child_summaries()[entry.index]);
|
||||
match entry.tree.0.as_ref() {
|
||||
Node::Internal { child_trees, .. } => {
|
||||
if descending {
|
||||
if let ChildTree::Loaded { tree } = &child_trees[entry.index] {
|
||||
self.stack.push(StackEntry {
|
||||
position: D::default(),
|
||||
tree,
|
||||
index: tree.0.child_summaries().len() - 1,
|
||||
});
|
||||
} else {
|
||||
descending = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
Node::Leaf { .. } => {
|
||||
if descending {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next(&mut self, cx: &<T::Summary as Summary>::Context) {
|
||||
self.next_internal(|_| true, cx)
|
||||
}
|
||||
|
||||
fn next_internal<F>(&mut self, mut filter_node: F, cx: &<T::Summary as Summary>::Context)
|
||||
where
|
||||
F: FnMut(&T::Summary) -> bool,
|
||||
{
|
||||
let mut descend = false;
|
||||
|
||||
if self.stack.is_empty() {
|
||||
if !self.at_end {
|
||||
self.stack.push(StackEntry {
|
||||
tree: self.tree,
|
||||
index: 0,
|
||||
position: D::default(),
|
||||
});
|
||||
descend = true;
|
||||
}
|
||||
self.did_seek = true;
|
||||
}
|
||||
|
||||
while !self.stack.is_empty() {
|
||||
let new_subtree = {
|
||||
let entry = self.stack.last_mut().unwrap();
|
||||
match entry.tree.0.as_ref() {
|
||||
Node::Internal {
|
||||
child_trees,
|
||||
child_summaries,
|
||||
..
|
||||
} => {
|
||||
if !descend {
|
||||
entry.index += 1;
|
||||
entry.position = self.position.clone();
|
||||
}
|
||||
|
||||
while entry.index < child_summaries.len() {
|
||||
let next_summary = &child_summaries[entry.index];
|
||||
if filter_node(next_summary) && child_trees[entry.index].is_loaded() {
|
||||
break;
|
||||
} else {
|
||||
entry.index += 1;
|
||||
entry.position.add_summary(next_summary, cx);
|
||||
self.position.add_summary(next_summary, cx);
|
||||
}
|
||||
}
|
||||
|
||||
child_trees.get(entry.index)
|
||||
}
|
||||
Node::Leaf { item_summaries, .. } => {
|
||||
if !descend {
|
||||
let item_summary = &item_summaries[entry.index];
|
||||
entry.index += 1;
|
||||
entry.position.add_summary(item_summary, cx);
|
||||
self.position.add_summary(item_summary, cx);
|
||||
}
|
||||
|
||||
loop {
|
||||
if let Some(next_item_summary) = item_summaries.get(entry.index) {
|
||||
if filter_node(next_item_summary) {
|
||||
return;
|
||||
} else {
|
||||
entry.index += 1;
|
||||
entry.position.add_summary(next_item_summary, cx);
|
||||
self.position.add_summary(next_item_summary, cx);
|
||||
}
|
||||
} else {
|
||||
break None;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(subtree) = new_subtree {
|
||||
let subtree = if let ChildTree::Loaded { tree } = subtree {
|
||||
tree
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
descend = true;
|
||||
self.stack.push(StackEntry {
|
||||
tree: subtree,
|
||||
index: 0,
|
||||
position: self.position.clone(),
|
||||
});
|
||||
} else {
|
||||
descend = false;
|
||||
self.stack.pop();
|
||||
}
|
||||
}
|
||||
|
||||
self.at_end = self.stack.is_empty();
|
||||
debug_assert!(self.stack.is_empty() || self.stack.last().unwrap().tree.0.is_leaf());
|
||||
}
|
||||
|
||||
fn assert_did_seek(&self) {
|
||||
assert!(
|
||||
self.did_seek,
|
||||
"Must call `seek`, `next` or `prev` before calling this method"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T, D> Cursor<'a, T, D>
|
||||
where
|
||||
T: Item,
|
||||
D: Dimension<'a, T::Summary>,
|
||||
{
|
||||
pub fn seek<Target>(
|
||||
&mut self,
|
||||
pos: &Target,
|
||||
bias: Bias,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
) -> bool
|
||||
where
|
||||
Target: SeekTarget<'a, T::Summary, D>,
|
||||
{
|
||||
self.reset();
|
||||
self.seek_internal(pos, bias, &mut (), cx)
|
||||
}
|
||||
|
||||
pub fn seek_forward<Target>(
|
||||
&mut self,
|
||||
pos: &Target,
|
||||
bias: Bias,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
) -> bool
|
||||
where
|
||||
Target: SeekTarget<'a, T::Summary, D>,
|
||||
{
|
||||
self.seek_internal(pos, bias, &mut (), cx)
|
||||
}
|
||||
|
||||
pub fn slice<Target>(
|
||||
&mut self,
|
||||
end: &Target,
|
||||
bias: Bias,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
) -> Sequence<T>
|
||||
where
|
||||
Target: SeekTarget<'a, T::Summary, D>,
|
||||
{
|
||||
let mut slice = SliceSeekAggregate {
|
||||
tree: Sequence::new(),
|
||||
leaf_items: ArrayVec::new(),
|
||||
leaf_item_summaries: ArrayVec::new(),
|
||||
leaf_summary: T::Summary::default(),
|
||||
};
|
||||
self.seek_internal(end, bias, &mut slice, cx);
|
||||
slice.tree
|
||||
}
|
||||
|
||||
pub fn suffix(&mut self, cx: &<T::Summary as Summary>::Context) -> Sequence<T> {
|
||||
self.slice(&End::new(), Bias::Right, cx)
|
||||
}
|
||||
|
||||
pub fn summary<Target, Output>(
|
||||
&mut self,
|
||||
end: &Target,
|
||||
bias: Bias,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
) -> Output
|
||||
where
|
||||
Target: SeekTarget<'a, T::Summary, D>,
|
||||
Output: Dimension<'a, T::Summary>,
|
||||
{
|
||||
let mut summary = SummarySeekAggregate(Output::default());
|
||||
self.seek_internal(end, bias, &mut summary, cx);
|
||||
summary.0
|
||||
}
|
||||
|
||||
fn seek_internal(
|
||||
&mut self,
|
||||
target: &dyn SeekTarget<'a, T::Summary, D>,
|
||||
bias: Bias,
|
||||
aggregate: &mut dyn SeekAggregate<'a, T>,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
) -> bool {
|
||||
debug_assert!(
|
||||
target.seek_cmp(&self.position, cx) >= Ordering::Equal,
|
||||
"cannot seek backward from {:?} to {:?}",
|
||||
self.position,
|
||||
target
|
||||
);
|
||||
|
||||
if !self.did_seek {
|
||||
self.did_seek = true;
|
||||
self.stack.push(StackEntry {
|
||||
tree: self.tree,
|
||||
index: 0,
|
||||
position: Default::default(),
|
||||
});
|
||||
}
|
||||
|
||||
let mut ascending = false;
|
||||
'outer: while let Some(entry) = self.stack.last_mut() {
|
||||
match *entry.tree.0 {
|
||||
Node::Internal {
|
||||
ref child_summaries,
|
||||
ref child_trees,
|
||||
..
|
||||
} => {
|
||||
if ascending {
|
||||
entry.index += 1;
|
||||
entry.position = self.position.clone();
|
||||
}
|
||||
|
||||
for (child_tree, child_summary) in child_trees[entry.index..]
|
||||
.iter()
|
||||
.zip(&child_summaries[entry.index..])
|
||||
{
|
||||
let mut child_end = self.position.clone();
|
||||
child_end.add_summary(child_summary, cx);
|
||||
|
||||
let comparison = target.seek_cmp(&child_end, cx);
|
||||
if comparison == Ordering::Greater
|
||||
|| (comparison == Ordering::Equal && bias == Bias::Right)
|
||||
|| !child_tree.is_loaded()
|
||||
{
|
||||
self.position = child_end;
|
||||
aggregate.push_tree(child_tree, child_summary, cx);
|
||||
entry.index += 1;
|
||||
entry.position = self.position.clone();
|
||||
} else {
|
||||
let child_tree = if let ChildTree::Loaded { tree } = child_tree {
|
||||
tree
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
self.stack.push(StackEntry {
|
||||
tree: child_tree,
|
||||
index: 0,
|
||||
position: self.position.clone(),
|
||||
});
|
||||
ascending = false;
|
||||
continue 'outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
Node::Leaf {
|
||||
ref items,
|
||||
ref item_summaries,
|
||||
..
|
||||
} => {
|
||||
aggregate.begin_leaf();
|
||||
|
||||
for (item, item_summary) in items[entry.index..]
|
||||
.iter()
|
||||
.zip(&item_summaries[entry.index..])
|
||||
{
|
||||
let mut child_end = self.position.clone();
|
||||
child_end.add_summary(item_summary, cx);
|
||||
|
||||
let comparison = target.seek_cmp(&child_end, cx);
|
||||
if comparison == Ordering::Greater
|
||||
|| (comparison == Ordering::Equal && bias == Bias::Right)
|
||||
{
|
||||
self.position = child_end;
|
||||
aggregate.push_item(item, item_summary, cx);
|
||||
entry.index += 1;
|
||||
} else {
|
||||
aggregate.end_leaf(cx);
|
||||
break 'outer;
|
||||
}
|
||||
}
|
||||
|
||||
aggregate.end_leaf(cx);
|
||||
}
|
||||
}
|
||||
|
||||
self.stack.pop();
|
||||
ascending = true;
|
||||
}
|
||||
|
||||
self.at_end = self.stack.is_empty();
|
||||
debug_assert!(self.stack.is_empty() || self.stack.last().unwrap().tree.0.is_leaf());
|
||||
|
||||
let mut end = self.position.clone();
|
||||
if bias == Bias::Left {
|
||||
if let Some(summary) = self.item_summary() {
|
||||
end.add_summary(summary, cx);
|
||||
}
|
||||
}
|
||||
|
||||
target.seek_cmp(&end, cx) == Ordering::Equal
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: Item> Iter<'a, T> {
|
||||
pub(crate) fn new(tree: &'a Sequence<T>) -> Self {
|
||||
Self {
|
||||
tree,
|
||||
stack: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: Item> Iterator for Iter<'a, T> {
|
||||
type Item = &'a T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let mut descend = false;
|
||||
|
||||
if self.stack.is_empty() {
|
||||
self.stack.push(StackEntry {
|
||||
tree: self.tree,
|
||||
index: 0,
|
||||
position: (),
|
||||
});
|
||||
descend = true;
|
||||
}
|
||||
|
||||
while !self.stack.is_empty() {
|
||||
let new_subtree = {
|
||||
let entry = self.stack.last_mut().unwrap();
|
||||
match entry.tree.0.as_ref() {
|
||||
Node::Internal { child_trees, .. } => {
|
||||
if !descend {
|
||||
entry.index += 1;
|
||||
}
|
||||
while entry.index < child_trees.len() {
|
||||
if child_trees[entry.index].is_loaded() {
|
||||
break;
|
||||
}
|
||||
entry.index += 1;
|
||||
}
|
||||
|
||||
child_trees.get(entry.index)
|
||||
}
|
||||
Node::Leaf { items, .. } => {
|
||||
if !descend {
|
||||
entry.index += 1;
|
||||
}
|
||||
|
||||
if let Some(next_item) = items.get(entry.index) {
|
||||
return Some(next_item);
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(subtree) = new_subtree {
|
||||
let subtree = if let ChildTree::Loaded { tree } = subtree {
|
||||
tree
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
descend = true;
|
||||
self.stack.push(StackEntry {
|
||||
tree: subtree,
|
||||
index: 0,
|
||||
position: (),
|
||||
});
|
||||
} else {
|
||||
descend = false;
|
||||
self.stack.pop();
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T, S, D> Iterator for Cursor<'a, T, D>
|
||||
where
|
||||
T: Item<Summary = S>,
|
||||
S: Summary<Context = ()>,
|
||||
D: Dimension<'a, T::Summary>,
|
||||
{
|
||||
type Item = &'a T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if !self.did_seek {
|
||||
self.next(&());
|
||||
}
|
||||
|
||||
if let Some(item) = self.item() {
|
||||
self.next(&());
|
||||
Some(item)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FilterCursor<'a, F, T: Item, D> {
|
||||
cursor: Cursor<'a, T, D>,
|
||||
filter_node: F,
|
||||
}
|
||||
|
||||
impl<'a, F, T, D> FilterCursor<'a, F, T, D>
|
||||
where
|
||||
F: FnMut(&T::Summary) -> bool,
|
||||
T: Item,
|
||||
D: Dimension<'a, T::Summary>,
|
||||
{
|
||||
pub fn new(tree: &'a Sequence<T>, filter_node: F) -> Self {
|
||||
let cursor = tree.cursor::<D>();
|
||||
Self {
|
||||
cursor,
|
||||
filter_node,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start(&self) -> &D {
|
||||
self.cursor.start()
|
||||
}
|
||||
|
||||
pub fn end(&self, cx: &<T::Summary as Summary>::Context) -> D {
|
||||
self.cursor.end(cx)
|
||||
}
|
||||
|
||||
pub fn item(&self) -> Option<&'a T> {
|
||||
self.cursor.item()
|
||||
}
|
||||
|
||||
pub fn item_summary(&self) -> Option<&'a T::Summary> {
|
||||
self.cursor.item_summary()
|
||||
}
|
||||
|
||||
pub fn next(&mut self, cx: &<T::Summary as Summary>::Context) {
|
||||
self.cursor.next_internal(&mut self.filter_node, cx);
|
||||
}
|
||||
|
||||
pub fn prev(&mut self, cx: &<T::Summary as Summary>::Context) {
|
||||
self.cursor.prev_internal(&mut self.filter_node, cx);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, F, T, S, U> Iterator for FilterCursor<'a, F, T, U>
|
||||
where
|
||||
F: FnMut(&T::Summary) -> bool,
|
||||
T: Item<Summary = S>,
|
||||
S: Summary<Context = ()>, //Context for the summary must be unit type, as .next() doesn't take arguments
|
||||
U: Dimension<'a, T::Summary>,
|
||||
{
|
||||
type Item = &'a T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if !self.cursor.did_seek {
|
||||
self.next(&());
|
||||
}
|
||||
|
||||
if let Some(item) = self.item() {
|
||||
self.cursor.next_internal(&mut self.filter_node, &());
|
||||
Some(item)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait SeekAggregate<'a, T: Item> {
|
||||
fn begin_leaf(&mut self);
|
||||
fn end_leaf(&mut self, cx: &<T::Summary as Summary>::Context);
|
||||
fn push_item(
|
||||
&mut self,
|
||||
item: &'a T,
|
||||
summary: &'a T::Summary,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
);
|
||||
fn push_tree(
|
||||
&mut self,
|
||||
tree: &'a ChildTree<T>,
|
||||
summary: &'a T::Summary,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
);
|
||||
}
|
||||
|
||||
struct SliceSeekAggregate<T: Item> {
|
||||
tree: Sequence<T>,
|
||||
leaf_items: ArrayVec<T, { 2 * TREE_BASE }>,
|
||||
leaf_item_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }>,
|
||||
leaf_summary: T::Summary,
|
||||
}
|
||||
|
||||
struct SummarySeekAggregate<D>(D);
|
||||
|
||||
impl<'a, T: Item> SeekAggregate<'a, T> for () {
|
||||
fn begin_leaf(&mut self) {}
|
||||
fn end_leaf(&mut self, _: &<T::Summary as Summary>::Context) {}
|
||||
fn push_item(&mut self, _: &T, _: &T::Summary, _: &<T::Summary as Summary>::Context) {}
|
||||
fn push_tree(
|
||||
&mut self,
|
||||
_: &ChildTree<T>,
|
||||
_: &T::Summary,
|
||||
_: &<T::Summary as Summary>::Context,
|
||||
) {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: Item> SeekAggregate<'a, T> for SliceSeekAggregate<T> {
|
||||
fn begin_leaf(&mut self) {}
|
||||
fn end_leaf(&mut self, cx: &<T::Summary as Summary>::Context) {
|
||||
self.tree.append(
|
||||
Sequence(Arc::new(Node::Leaf {
|
||||
saved_id: SavedId::default(),
|
||||
summary: mem::take(&mut self.leaf_summary),
|
||||
items: mem::take(&mut self.leaf_items),
|
||||
item_summaries: mem::take(&mut self.leaf_item_summaries),
|
||||
})),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
fn push_item(&mut self, item: &T, summary: &T::Summary, cx: &<T::Summary as Summary>::Context) {
|
||||
self.leaf_items.push(item.clone());
|
||||
self.leaf_item_summaries.push(summary.clone());
|
||||
Summary::add_summary(&mut self.leaf_summary, summary, cx);
|
||||
}
|
||||
fn push_tree(
|
||||
&mut self,
|
||||
tree: &ChildTree<T>,
|
||||
summary: &T::Summary,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
) {
|
||||
self.tree.append_internal(tree.clone(), summary.clone(), cx);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: Item, D> SeekAggregate<'a, T> for SummarySeekAggregate<D>
|
||||
where
|
||||
D: Dimension<'a, T::Summary>,
|
||||
{
|
||||
fn begin_leaf(&mut self) {}
|
||||
fn end_leaf(&mut self, _: &<T::Summary as Summary>::Context) {}
|
||||
fn push_item(&mut self, _: &T, summary: &'a T::Summary, cx: &<T::Summary as Summary>::Context) {
|
||||
self.0.add_summary(summary, cx);
|
||||
}
|
||||
fn push_tree(
|
||||
&mut self,
|
||||
_: &ChildTree<T>,
|
||||
summary: &'a T::Summary,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
) {
|
||||
self.0.add_summary(summary, cx);
|
||||
}
|
||||
}
|
||||
594
crates/crdb/src/btree/map.rs
Normal file
@@ -0,0 +1,594 @@
|
||||
use super::{
|
||||
Bias, Dimension, Edit, Item, KeyedItem, KvStore, SavedId, SeekTarget, Sequence, Summary,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
collections::BTreeMap,
|
||||
fmt::{self, Debug},
|
||||
ops::{Bound, RangeBounds},
|
||||
};
|
||||
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct Map<K, V>(Sequence<MapEntry<K, V>>)
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
V: Clone + Debug;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct MapEntry<K, V> {
|
||||
key: K,
|
||||
value: V,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
|
||||
pub struct MapKey<K>(Option<K>);
|
||||
|
||||
impl<K> Default for MapKey<K> {
|
||||
fn default() -> Self {
|
||||
Self(None)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MapKeyRef<'a, K>(Option<&'a K>);
|
||||
|
||||
impl<K> Default for MapKeyRef<'_, K> {
|
||||
fn default() -> Self {
|
||||
Self(None)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Set<K>(Map<K, ()>)
|
||||
where
|
||||
K: Clone + Debug + Ord;
|
||||
|
||||
impl<K, V> Map<K, V>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
V: Clone + Debug,
|
||||
{
|
||||
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
|
||||
Sequence::ptr_eq(&this.0, &other.0)
|
||||
}
|
||||
|
||||
pub fn from_ordered_entries(entries: impl IntoIterator<Item = (K, V)>) -> Self {
|
||||
let tree = Sequence::from_iter(
|
||||
entries
|
||||
.into_iter()
|
||||
.map(|(key, value)| MapEntry { key, value }),
|
||||
&(),
|
||||
);
|
||||
Self(tree)
|
||||
}
|
||||
|
||||
pub async fn load_root(id: SavedId, kv: &dyn KvStore) -> Result<Self>
|
||||
where
|
||||
K: Serialize + for<'de> Deserialize<'de>,
|
||||
V: Serialize + for<'de> Deserialize<'de>,
|
||||
{
|
||||
Ok(Self(Sequence::load_root(id, kv).await?))
|
||||
}
|
||||
|
||||
pub async fn load_all(id: SavedId, kv: &dyn KvStore) -> Result<Self>
|
||||
where
|
||||
K: Serialize + for<'de> Deserialize<'de>,
|
||||
V: Serialize + for<'de> Deserialize<'de>,
|
||||
{
|
||||
let mut sequence = Sequence::load_root(id, kv).await?;
|
||||
sequence.load(kv, &(), |_| true).await?;
|
||||
Ok(Self(sequence))
|
||||
}
|
||||
|
||||
pub async fn load(&mut self, key: &K, kv: &dyn KvStore) -> Result<Option<&V>>
|
||||
where
|
||||
K: Serialize + for<'de> Deserialize<'de>,
|
||||
V: Serialize + for<'de> Deserialize<'de>,
|
||||
{
|
||||
self.0
|
||||
.load(kv, &(), |probe| {
|
||||
let key_range = (
|
||||
Bound::Excluded(probe.start.0.as_ref()),
|
||||
Bound::Included(probe.summary.0.as_ref()),
|
||||
);
|
||||
key_range.contains(&Some(key))
|
||||
})
|
||||
.await?;
|
||||
Ok(self.get(key))
|
||||
}
|
||||
|
||||
pub async fn load_from(
|
||||
&mut self,
|
||||
start: &K,
|
||||
kv: &dyn KvStore,
|
||||
) -> Result<impl Iterator<Item = (&K, &V)>>
|
||||
where
|
||||
K: Serialize + for<'de> Deserialize<'de>,
|
||||
V: Serialize + for<'de> Deserialize<'de>,
|
||||
{
|
||||
self.0
|
||||
.load(kv, &(), |probe| {
|
||||
probe.start.0.as_ref() >= Some(&start) || probe.summary.0.as_ref() >= Some(&start)
|
||||
})
|
||||
.await?;
|
||||
Ok(self.iter_from(start))
|
||||
}
|
||||
|
||||
pub async fn store(&mut self, key: K, value: V, kv: &dyn KvStore) -> Result<()>
|
||||
where
|
||||
K: Serialize + for<'de> Deserialize<'de>,
|
||||
V: Serialize + for<'de> Deserialize<'de>,
|
||||
{
|
||||
self.0
|
||||
.load(kv, &(), |probe| {
|
||||
let key_range = (
|
||||
Bound::Excluded(probe.start.0.as_ref()),
|
||||
Bound::Included(probe.summary.0.as_ref()),
|
||||
);
|
||||
key_range.contains(&Some(&key))
|
||||
})
|
||||
.await?;
|
||||
self.insert(key, value);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn save(&self, kv: &dyn KvStore) -> Result<SavedId>
|
||||
where
|
||||
K: Serialize + for<'de> Deserialize<'de>,
|
||||
V: Serialize + for<'de> Deserialize<'de>,
|
||||
{
|
||||
self.0.save(kv).await
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
pub fn get<'a>(&self, key: &'a K) -> Option<&V> {
|
||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||
cursor.seek(&MapKeyRef(Some(key)), Bias::Left, &());
|
||||
if let Some(item) = cursor.item() {
|
||||
if key == &item.key {
|
||||
Some(&item.value)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn contains_key<'a>(&self, key: &'a K) -> bool {
|
||||
self.get(key).is_some()
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, key: K, value: V) {
|
||||
self.0.insert_or_replace(MapEntry { key, value }, &());
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, key: &K) -> Option<V> {
|
||||
let mut removed = None;
|
||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||
let key = MapKeyRef(Some(key));
|
||||
let mut new_tree = cursor.slice(&key, Bias::Left, &());
|
||||
if key.seek_cmp(&cursor.end(&()), &()) == Ordering::Equal {
|
||||
removed = Some(cursor.item().unwrap().value.clone());
|
||||
cursor.next(&());
|
||||
}
|
||||
new_tree.append(cursor.suffix(&()), &());
|
||||
drop(cursor);
|
||||
self.0 = new_tree;
|
||||
removed
|
||||
}
|
||||
|
||||
pub fn remove_range(&mut self, start: &impl MapSeekTarget<K>, end: &impl MapSeekTarget<K>) {
|
||||
let start = MapSeekTargetAdaptor(start);
|
||||
let end = MapSeekTargetAdaptor(end);
|
||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||
let mut new_tree = cursor.slice(&start, Bias::Left, &());
|
||||
cursor.seek(&end, Bias::Left, &());
|
||||
new_tree.append(cursor.suffix(&()), &());
|
||||
drop(cursor);
|
||||
self.0 = new_tree;
|
||||
}
|
||||
|
||||
/// Returns the key-value pair with the greatest key less than or equal to the given key.
|
||||
pub fn closest(&self, key: &K) -> Option<(&K, &V)> {
|
||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||
let key = MapKeyRef(Some(key));
|
||||
cursor.seek(&key, Bias::Right, &());
|
||||
cursor.prev(&());
|
||||
cursor.item().map(|item| (&item.key, &item.value))
|
||||
}
|
||||
|
||||
pub fn iter_from<'a>(&self, from: &'a K) -> impl Iterator<Item = (&K, &V)> {
|
||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||
let from_key = MapKeyRef(Some(from));
|
||||
cursor.seek(&from_key, Bias::Left, &());
|
||||
|
||||
cursor
|
||||
.into_iter()
|
||||
.map(|map_entry| (&map_entry.key, &map_entry.value))
|
||||
}
|
||||
|
||||
pub fn update<F, T>(&mut self, key: &K, f: F) -> Option<T>
|
||||
where
|
||||
F: FnOnce(&mut V) -> T,
|
||||
{
|
||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||
let key = MapKeyRef(Some(key));
|
||||
let mut new_tree = cursor.slice(&key, Bias::Left, &());
|
||||
let mut result = None;
|
||||
if key.seek_cmp(&cursor.end(&()), &()) == Ordering::Equal {
|
||||
let mut updated = cursor.item().unwrap().clone();
|
||||
result = Some(f(&mut updated.value));
|
||||
new_tree.push(updated, &());
|
||||
cursor.next(&());
|
||||
}
|
||||
new_tree.append(cursor.suffix(&()), &());
|
||||
drop(cursor);
|
||||
self.0 = new_tree;
|
||||
result
|
||||
}
|
||||
|
||||
pub fn retain<F: FnMut(&K, &V) -> bool>(&mut self, mut predicate: F) {
|
||||
let mut new_map = Sequence::<MapEntry<K, V>>::default();
|
||||
|
||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||
cursor.next(&());
|
||||
while let Some(item) = cursor.item() {
|
||||
if predicate(&item.key, &item.value) {
|
||||
new_map.push(item.clone(), &());
|
||||
}
|
||||
cursor.next(&());
|
||||
}
|
||||
drop(cursor);
|
||||
|
||||
self.0 = new_map;
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = (&K, &V)> + '_ {
|
||||
self.0.iter().map(|entry| (&entry.key, &entry.value))
|
||||
}
|
||||
|
||||
pub fn values(&self) -> impl Iterator<Item = &V> + '_ {
|
||||
self.0.iter().map(|entry| &entry.value)
|
||||
}
|
||||
|
||||
pub fn insert_tree(&mut self, other: Map<K, V>) {
|
||||
let edits = other
|
||||
.iter()
|
||||
.map(|(key, value)| {
|
||||
Edit::Insert(MapEntry {
|
||||
key: key.to_owned(),
|
||||
value: value.to_owned(),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
self.0.edit(edits, &());
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Into<BTreeMap<K, V>> for &Map<K, V>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
V: Clone + Debug,
|
||||
{
|
||||
fn into(self) -> BTreeMap<K, V> {
|
||||
self.iter()
|
||||
.map(|(replica_id, count)| (replica_id.clone(), count.clone()))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> From<&BTreeMap<K, V>> for Map<K, V>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
V: Clone + Debug,
|
||||
{
|
||||
fn from(value: &BTreeMap<K, V>) -> Self {
|
||||
Map::from_ordered_entries(value.into_iter().map(|(k, v)| (k.clone(), v.clone())))
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Debug for Map<K, V>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
V: Clone + Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_map().entries(self.iter()).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Debug for Set<T>
|
||||
where
|
||||
T: Clone + Debug + Ord,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_set().entries(self.iter()).finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct MapSeekTargetAdaptor<'a, T>(&'a T);
|
||||
|
||||
impl<'a, K: Debug + Clone + Ord, T: MapSeekTarget<K>> SeekTarget<'a, MapKey<K>, MapKeyRef<'a, K>>
|
||||
for MapSeekTargetAdaptor<'_, T>
|
||||
{
|
||||
fn seek_cmp(&self, cursor_location: &MapKeyRef<K>, _: &()) -> Ordering {
|
||||
if let Some(key) = &cursor_location.0 {
|
||||
MapSeekTarget::cmp_cursor(self.0, key)
|
||||
} else {
|
||||
Ordering::Greater
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait MapSeekTarget<K>: Debug {
|
||||
fn cmp_cursor(&self, cursor_location: &K) -> Ordering;
|
||||
}
|
||||
|
||||
impl<K: Debug + Ord> MapSeekTarget<K> for K {
|
||||
fn cmp_cursor(&self, cursor_location: &K) -> Ordering {
|
||||
self.cmp(cursor_location)
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Default for Map<K, V>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
V: Clone + Debug,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self(Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Item for MapEntry<K, V>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
V: Clone,
|
||||
{
|
||||
type Summary = MapKey<K>;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
self.key()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> KeyedItem for MapEntry<K, V>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
V: Clone,
|
||||
{
|
||||
type Key = MapKey<K>;
|
||||
|
||||
fn key(&self) -> Self::Key {
|
||||
MapKey(Some(self.key.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> Summary for MapKey<K>
|
||||
where
|
||||
K: Clone + Debug,
|
||||
{
|
||||
type Context = ();
|
||||
|
||||
fn add_summary(&mut self, summary: &Self, _: &()) {
|
||||
*self = summary.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, K> Dimension<'a, MapKey<K>> for MapKeyRef<'a, K>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
{
|
||||
fn add_summary(&mut self, summary: &'a MapKey<K>, _: &()) {
|
||||
self.0 = summary.0.as_ref();
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, K> SeekTarget<'a, MapKey<K>, MapKeyRef<'a, K>> for MapKeyRef<'_, K>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
{
|
||||
fn seek_cmp(&self, cursor_location: &MapKeyRef<K>, _: &()) -> Ordering {
|
||||
Ord::cmp(&self.0, &cursor_location.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> Default for Set<K>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self(Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> Set<K>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
{
|
||||
pub fn from_ordered_entries(entries: impl IntoIterator<Item = K>) -> Self {
|
||||
Self(Map::from_ordered_entries(
|
||||
entries.into_iter().map(|key| (key, ())),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, key: K) {
|
||||
self.0.insert(key, ());
|
||||
}
|
||||
|
||||
pub fn contains(&self, key: &K) -> bool {
|
||||
self.0.get(key).is_some()
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = &K> + '_ {
|
||||
self.0.iter().map(|(k, _)| k)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_basic() {
|
||||
let mut map = Map::default();
|
||||
assert_eq!(map.iter().collect::<Vec<_>>(), vec![]);
|
||||
|
||||
map.insert(3, "c");
|
||||
assert_eq!(map.get(&3), Some(&"c"));
|
||||
assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&3, &"c")]);
|
||||
|
||||
map.insert(1, "a");
|
||||
assert_eq!(map.get(&1), Some(&"a"));
|
||||
assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&1, &"a"), (&3, &"c")]);
|
||||
|
||||
map.insert(2, "b");
|
||||
assert_eq!(map.get(&2), Some(&"b"));
|
||||
assert_eq!(map.get(&1), Some(&"a"));
|
||||
assert_eq!(map.get(&3), Some(&"c"));
|
||||
assert_eq!(
|
||||
map.iter().collect::<Vec<_>>(),
|
||||
vec![(&1, &"a"), (&2, &"b"), (&3, &"c")]
|
||||
);
|
||||
|
||||
assert_eq!(map.closest(&0), None);
|
||||
assert_eq!(map.closest(&1), Some((&1, &"a")));
|
||||
assert_eq!(map.closest(&10), Some((&3, &"c")));
|
||||
|
||||
map.remove(&2);
|
||||
assert_eq!(map.get(&2), None);
|
||||
assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&1, &"a"), (&3, &"c")]);
|
||||
|
||||
assert_eq!(map.closest(&2), Some((&1, &"a")));
|
||||
|
||||
map.remove(&3);
|
||||
assert_eq!(map.get(&3), None);
|
||||
assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&1, &"a")]);
|
||||
|
||||
map.remove(&1);
|
||||
assert_eq!(map.get(&1), None);
|
||||
assert_eq!(map.iter().collect::<Vec<_>>(), vec![]);
|
||||
|
||||
map.insert(4, "d");
|
||||
map.insert(5, "e");
|
||||
map.insert(6, "f");
|
||||
map.retain(|key, _| *key % 2 == 0);
|
||||
assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&4, &"d"), (&6, &"f")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iter_from() {
|
||||
let mut map = Map::default();
|
||||
|
||||
map.insert("a", 1);
|
||||
map.insert("b", 2);
|
||||
map.insert("baa", 3);
|
||||
map.insert("baaab", 4);
|
||||
map.insert("c", 5);
|
||||
|
||||
let result = map
|
||||
.iter_from(&"ba")
|
||||
.take_while(|(key, _)| key.starts_with(&"ba"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(result.len(), 2);
|
||||
assert!(result.iter().find(|(k, _)| k == &&"baa").is_some());
|
||||
assert!(result.iter().find(|(k, _)| k == &&"baaab").is_some());
|
||||
|
||||
let result = map
|
||||
.iter_from(&"c")
|
||||
.take_while(|(key, _)| key.starts_with(&"c"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(result.len(), 1);
|
||||
assert!(result.iter().find(|(k, _)| k == &&"c").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_tree() {
|
||||
let mut map = Map::default();
|
||||
map.insert("a", 1);
|
||||
map.insert("b", 2);
|
||||
map.insert("c", 3);
|
||||
|
||||
let mut other = Map::default();
|
||||
other.insert("a", 2);
|
||||
other.insert("b", 2);
|
||||
other.insert("d", 4);
|
||||
|
||||
map.insert_tree(other);
|
||||
|
||||
assert_eq!(map.iter().count(), 4);
|
||||
assert_eq!(map.get(&"a"), Some(&2));
|
||||
assert_eq!(map.get(&"b"), Some(&2));
|
||||
assert_eq!(map.get(&"c"), Some(&3));
|
||||
assert_eq!(map.get(&"d"), Some(&4));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_between_and_path_successor() {
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct PathDescendants<'a>(&'a Path);
|
||||
|
||||
impl MapSeekTarget<PathBuf> for PathDescendants<'_> {
|
||||
fn cmp_cursor(&self, key: &PathBuf) -> Ordering {
|
||||
if key.starts_with(&self.0) {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
self.0.cmp(key)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut map = Map::default();
|
||||
|
||||
map.insert(PathBuf::from("a"), 1);
|
||||
map.insert(PathBuf::from("a/a"), 1);
|
||||
map.insert(PathBuf::from("b"), 2);
|
||||
map.insert(PathBuf::from("b/a/a"), 3);
|
||||
map.insert(PathBuf::from("b/a/a/a/b"), 4);
|
||||
map.insert(PathBuf::from("c"), 5);
|
||||
map.insert(PathBuf::from("c/a"), 6);
|
||||
|
||||
map.remove_range(
|
||||
&PathBuf::from("b/a"),
|
||||
&PathDescendants(&PathBuf::from("b/a")),
|
||||
);
|
||||
|
||||
assert_eq!(map.get(&PathBuf::from("a")), Some(&1));
|
||||
assert_eq!(map.get(&PathBuf::from("a/a")), Some(&1));
|
||||
assert_eq!(map.get(&PathBuf::from("b")), Some(&2));
|
||||
assert_eq!(map.get(&PathBuf::from("b/a/a")), None);
|
||||
assert_eq!(map.get(&PathBuf::from("b/a/a/a/b")), None);
|
||||
assert_eq!(map.get(&PathBuf::from("c")), Some(&5));
|
||||
assert_eq!(map.get(&PathBuf::from("c/a")), Some(&6));
|
||||
|
||||
map.remove_range(&PathBuf::from("c"), &PathDescendants(&PathBuf::from("c")));
|
||||
|
||||
assert_eq!(map.get(&PathBuf::from("a")), Some(&1));
|
||||
assert_eq!(map.get(&PathBuf::from("a/a")), Some(&1));
|
||||
assert_eq!(map.get(&PathBuf::from("b")), Some(&2));
|
||||
assert_eq!(map.get(&PathBuf::from("c")), None);
|
||||
assert_eq!(map.get(&PathBuf::from("c/a")), None);
|
||||
|
||||
map.remove_range(&PathBuf::from("a"), &PathDescendants(&PathBuf::from("a")));
|
||||
|
||||
assert_eq!(map.get(&PathBuf::from("a")), None);
|
||||
assert_eq!(map.get(&PathBuf::from("a/a")), None);
|
||||
assert_eq!(map.get(&PathBuf::from("b")), Some(&2));
|
||||
|
||||
map.remove_range(&PathBuf::from("b"), &PathDescendants(&PathBuf::from("b")));
|
||||
|
||||
assert_eq!(map.get(&PathBuf::from("b")), None);
|
||||
}
|
||||
}
|
||||
2847
crates/crdb/src/crdb.rs
Normal file
127
crates/crdb/src/dense_id.rs
Normal file
@@ -0,0 +1,127 @@
|
||||
use crate::btree;
|
||||
use lazy_static::lazy_static;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use std::iter;
|
||||
|
||||
lazy_static! {
|
||||
static ref MIN: DenseId = DenseId::min();
|
||||
static ref MAX: DenseId = DenseId::max();
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
|
||||
pub struct DenseId(SmallVec<[u64; 4]>);
|
||||
|
||||
impl DenseId {
|
||||
pub fn min() -> Self {
|
||||
Self(smallvec![u64::MIN])
|
||||
}
|
||||
|
||||
pub fn max() -> Self {
|
||||
Self(smallvec![u64::MAX])
|
||||
}
|
||||
|
||||
pub fn min_ref() -> &'static Self {
|
||||
&*MIN
|
||||
}
|
||||
|
||||
pub fn max_ref() -> &'static Self {
|
||||
&*MAX
|
||||
}
|
||||
|
||||
pub fn assign(&mut self, other: &Self) {
|
||||
self.0.resize(other.0.len(), 0);
|
||||
self.0.copy_from_slice(&other.0);
|
||||
}
|
||||
|
||||
pub fn between(lhs: &Self, rhs: &Self) -> Self {
|
||||
let lhs = lhs.0.iter().copied().chain(iter::repeat(u64::MIN));
|
||||
let rhs = rhs.0.iter().copied().chain(iter::repeat(u64::MAX));
|
||||
let mut location = SmallVec::new();
|
||||
for (lhs, rhs) in lhs.zip(rhs) {
|
||||
let mid = lhs + ((rhs.saturating_sub(lhs)) >> 48);
|
||||
location.push(mid);
|
||||
if mid > lhs {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Self(location)
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DenseId {
|
||||
fn default() -> Self {
|
||||
Self::min()
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::Item for DenseId {
|
||||
type Summary = DenseId;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::KeyedItem for DenseId {
|
||||
type Key = DenseId;
|
||||
|
||||
fn key(&self) -> Self::Key {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::Summary for DenseId {
|
||||
type Context = ();
|
||||
|
||||
fn add_summary(&mut self, summary: &Self, _: &()) {
|
||||
self.assign(summary);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rand::prelude::*;
|
||||
use std::mem;
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_dense_id(mut rng: StdRng) {
|
||||
let mut lhs = Default::default();
|
||||
let mut rhs = Default::default();
|
||||
while lhs == rhs {
|
||||
lhs = DenseId(
|
||||
(0..rng.gen_range(1..=5))
|
||||
.map(|_| rng.gen_range(0..=100))
|
||||
.collect(),
|
||||
);
|
||||
rhs = DenseId(
|
||||
(0..rng.gen_range(1..=5))
|
||||
.map(|_| rng.gen_range(0..=100))
|
||||
.collect(),
|
||||
);
|
||||
}
|
||||
|
||||
if lhs > rhs {
|
||||
mem::swap(&mut lhs, &mut rhs);
|
||||
}
|
||||
|
||||
let middle = DenseId::between(&lhs, &rhs);
|
||||
assert!(middle > lhs);
|
||||
assert!(middle < rhs);
|
||||
for ix in 0..middle.0.len() - 1 {
|
||||
assert!(
|
||||
middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0)
|
||||
|| middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
130
crates/crdb/src/digest.rs
Normal file
@@ -0,0 +1,130 @@
|
||||
use std::{cmp, ops::Range};
|
||||
|
||||
use crate::{
|
||||
btree::{self, Bias},
|
||||
messages::Operation,
|
||||
OperationId,
|
||||
};
|
||||
use bromberg_sl2::HashMatrix;
|
||||
|
||||
#[derive(Clone, Default, PartialEq, Eq)]
|
||||
pub struct Digest {
|
||||
pub count: usize,
|
||||
pub hash: HashMatrix,
|
||||
pub max_op_id: OperationId,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Digest {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Digest")
|
||||
.field("count", &self.count)
|
||||
.field("hash", &self.hash.to_hex())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&'_ Operation> for Digest {
|
||||
fn from(op: &'_ Operation) -> Self {
|
||||
Self {
|
||||
count: 1,
|
||||
hash: op.id().digest(),
|
||||
max_op_id: op.id(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::Item for Digest {
|
||||
type Summary = Digest;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::Summary for Digest {
|
||||
type Context = ();
|
||||
|
||||
fn add_summary(&mut self, summary: &Self, _: &()) {
|
||||
self.count += summary.count;
|
||||
self.hash = self.hash * summary.hash;
|
||||
self.max_op_id = summary.max_op_id;
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::Dimension<'_, Digest> for usize {
|
||||
fn add_summary(&mut self, summary: &'_ Digest, _: &()) {
|
||||
*self += summary.count;
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::Dimension<'_, Digest> for HashMatrix {
|
||||
fn add_summary(&mut self, summary: &'_ Digest, _: &()) {
|
||||
*self = *self * summary.hash;
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DigestSequence {
|
||||
digests: btree::Sequence<Digest>,
|
||||
}
|
||||
|
||||
impl DigestSequence {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
digests: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn items(&self) -> Vec<Digest> {
|
||||
self.digests.items(&())
|
||||
}
|
||||
|
||||
pub fn operation_count(&self) -> usize {
|
||||
self.digests.summary().count
|
||||
}
|
||||
|
||||
pub fn digest(&self, mut range: Range<usize>) -> Digest {
|
||||
range.start = cmp::min(range.start, self.digests.summary().count);
|
||||
range.end = cmp::min(range.end, self.digests.summary().count);
|
||||
let mut cursor = self.digests.cursor::<(usize, Digest)>();
|
||||
cursor.seek(&range.start, Bias::Right, &());
|
||||
assert_eq!(
|
||||
cursor.start().0,
|
||||
range.start,
|
||||
"start is not at the start of a digest range"
|
||||
);
|
||||
let mut hash: HashMatrix = cursor.summary(&range.end, Bias::Right, &());
|
||||
if range.end > cursor.start().0 {
|
||||
let digest = cursor.item().unwrap();
|
||||
hash = hash * digest.hash;
|
||||
cursor.next(&());
|
||||
}
|
||||
|
||||
Digest {
|
||||
count: cursor.start().0 - range.start,
|
||||
hash,
|
||||
max_op_id: cursor.start().1.max_op_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn splice(&mut self, mut range: Range<usize>, digests: impl IntoIterator<Item = Digest>) {
|
||||
let max_index = self.digests.summary().count;
|
||||
if range.start > max_index {
|
||||
panic!("range out of bounds");
|
||||
}
|
||||
range.end = cmp::min(range.end, max_index);
|
||||
|
||||
let mut cursor = self.digests.cursor::<usize>();
|
||||
let mut new_digests = cursor.slice(&range.start, Bias::Right, &());
|
||||
assert_eq!(*cursor.start(), range.start, "start is nedigest range");
|
||||
cursor.seek(&range.end, Bias::Right, &());
|
||||
assert_eq!(
|
||||
*cursor.start(),
|
||||
range.end,
|
||||
"end is not at the start of a digest range"
|
||||
);
|
||||
new_digests.extend(digests, &());
|
||||
new_digests.append(cursor.suffix(&()), &());
|
||||
drop(cursor);
|
||||
self.digests = new_digests;
|
||||
}
|
||||
}
|
||||
699
crates/crdb/src/history.rs
Normal file
@@ -0,0 +1,699 @@
|
||||
use std::{cmp::Ordering, iter, ops::RangeBounds};
|
||||
|
||||
use crate::{
|
||||
btree::{self, Bias, KvStore, SavedId},
|
||||
messages::Operation,
|
||||
OperationCount, OperationId, ReplicaId, RevisionId,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use collections::{BTreeSet, Bound, HashMap, HashSet, VecDeque};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smallvec::SmallVec;
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SavedHistory {
|
||||
operations: SavedId,
|
||||
next_operation_id: OperationId,
|
||||
max_operation_ids: SavedId,
|
||||
deferred_operations: SavedId,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct History {
|
||||
operations: btree::Map<OperationId, Operation>,
|
||||
next_operation_id: OperationId,
|
||||
max_operation_ids: btree::Map<ReplicaId, OperationCount>,
|
||||
deferred_operations: btree::Sequence<DeferredOperation>,
|
||||
}
|
||||
|
||||
impl History {
|
||||
pub fn new(replica_id: ReplicaId) -> Self {
|
||||
Self {
|
||||
operations: Default::default(),
|
||||
next_operation_id: OperationId::new(replica_id),
|
||||
max_operation_ids: Default::default(),
|
||||
deferred_operations: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ptr_eq(&self, other: &Self) -> bool {
|
||||
btree::Map::ptr_eq(&self.operations, &other.operations)
|
||||
&& btree::Map::ptr_eq(&self.max_operation_ids, &other.max_operation_ids)
|
||||
&& btree::Sequence::ptr_eq(&self.deferred_operations, &other.deferred_operations)
|
||||
&& self.next_operation_id == other.next_operation_id
|
||||
}
|
||||
|
||||
pub async fn load(saved_history: SavedHistory, kv: &dyn KvStore) -> Result<Self> {
|
||||
Ok(Self {
|
||||
operations: btree::Map::load_root(saved_history.operations, kv).await?,
|
||||
next_operation_id: saved_history.next_operation_id,
|
||||
max_operation_ids: btree::Map::load_all(saved_history.max_operation_ids, kv).await?,
|
||||
deferred_operations: btree::Sequence::load_root(saved_history.deferred_operations, kv)
|
||||
.await?,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn save(&self, kv: &dyn KvStore) -> Result<SavedHistory> {
|
||||
Ok(SavedHistory {
|
||||
operations: self.operations.save(kv).await?,
|
||||
next_operation_id: self.next_operation_id,
|
||||
max_operation_ids: self.max_operation_ids.save(kv).await?,
|
||||
deferred_operations: self.deferred_operations.save(kv).await?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn replica_id(&self) -> ReplicaId {
|
||||
self.next_operation_id.replica_id
|
||||
}
|
||||
|
||||
pub fn next_operation_id(&mut self) -> OperationId {
|
||||
self.next_operation_id.tick()
|
||||
}
|
||||
|
||||
pub fn max_operation_ids(&self) -> &btree::Map<ReplicaId, OperationCount> {
|
||||
&self.max_operation_ids
|
||||
}
|
||||
|
||||
pub async fn insert(
|
||||
&mut self,
|
||||
operation: Operation,
|
||||
kv: &dyn KvStore,
|
||||
) -> Result<SmallVec<[Operation; 1]>> {
|
||||
let op_id = operation.id();
|
||||
self.next_operation_id.observe(op_id);
|
||||
if self
|
||||
.max_operation_ids
|
||||
.load(&op_id.replica_id, kv)
|
||||
.await?
|
||||
.copied()
|
||||
< Some(op_id.operation_count)
|
||||
{
|
||||
self.max_operation_ids
|
||||
.insert(op_id.replica_id, op_id.operation_count);
|
||||
}
|
||||
self.operations.store(op_id, operation, kv).await?;
|
||||
|
||||
self.deferred_operations
|
||||
.load(kv, &(), |probe| {
|
||||
let key_range = (
|
||||
Bound::Excluded(*probe.start),
|
||||
Bound::Included(*probe.summary),
|
||||
);
|
||||
key_range.contains(&op_id)
|
||||
})
|
||||
.await?;
|
||||
let mut cursor = self.deferred_operations.cursor::<OperationId>();
|
||||
let mut remaining = cursor.slice(&op_id, Bias::Left, &());
|
||||
let mut flushed = SmallVec::new();
|
||||
flushed.extend(
|
||||
cursor
|
||||
.slice(&op_id, Bias::Right, &())
|
||||
.iter()
|
||||
.map(|deferred| deferred.operation.clone()),
|
||||
);
|
||||
remaining.append(cursor.suffix(&()), &());
|
||||
drop(cursor);
|
||||
self.deferred_operations = remaining;
|
||||
Ok(flushed)
|
||||
}
|
||||
|
||||
pub fn insert_local(&mut self, operation: Operation) {
|
||||
let id = operation.id();
|
||||
self.next_operation_id.observe(operation.id());
|
||||
self.max_operation_ids
|
||||
.insert(id.replica_id, id.operation_count);
|
||||
self.operations.insert(id, operation);
|
||||
}
|
||||
|
||||
pub async fn defer(&mut self, operation: Operation, kv: &dyn KvStore) -> Result<()> {
|
||||
for parent in operation.parent().iter() {
|
||||
self.deferred_operations
|
||||
.load(kv, &(), |probe| {
|
||||
let key_range = (
|
||||
Bound::Excluded(*probe.start),
|
||||
Bound::Included(*probe.summary),
|
||||
);
|
||||
key_range.contains(&operation.id())
|
||||
})
|
||||
.await?;
|
||||
self.deferred_operations.insert_or_replace(
|
||||
DeferredOperation {
|
||||
parent: *parent,
|
||||
operation: operation.clone(),
|
||||
},
|
||||
&(),
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn can_apply(&mut self, operation: &Operation, kv: &dyn KvStore) -> Result<bool> {
|
||||
for parent in operation.parent().iter() {
|
||||
if self.operations.load(parent, kv).await?.is_none() {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
pub async fn has_applied(&mut self, operation: &Operation, kv: &dyn KvStore) -> Result<bool> {
|
||||
Ok(self.operations.load(&operation.id(), kv).await?.is_some())
|
||||
}
|
||||
|
||||
pub async fn operation(
|
||||
&mut self,
|
||||
id: OperationId,
|
||||
kv: &dyn KvStore,
|
||||
) -> Result<Option<&Operation>> {
|
||||
self.operations.load(&id, kv).await
|
||||
}
|
||||
|
||||
pub async fn operations_since(
|
||||
&mut self,
|
||||
version: &btree::Map<ReplicaId, OperationCount>,
|
||||
kv: &dyn KvStore,
|
||||
) -> Result<Vec<Operation>> {
|
||||
let mut new_operations = Vec::new();
|
||||
for (replica_id, end_op_count) in self.max_operation_ids.iter() {
|
||||
let start_op = OperationId {
|
||||
replica_id: *replica_id,
|
||||
operation_count: version
|
||||
.get(&replica_id)
|
||||
.map(|count| OperationCount(count.0 + 1))
|
||||
.unwrap_or_default(),
|
||||
};
|
||||
let end_op = OperationId {
|
||||
replica_id: *replica_id,
|
||||
operation_count: *end_op_count,
|
||||
};
|
||||
|
||||
new_operations.extend(
|
||||
self.operations
|
||||
.load_from(&start_op, kv)
|
||||
.await?
|
||||
.take_while(|(op_id, _)| **op_id <= end_op)
|
||||
.map(|(_, op)| op.clone()),
|
||||
);
|
||||
}
|
||||
Ok(new_operations)
|
||||
}
|
||||
|
||||
pub async fn rewind(&mut self, revision_id: &RevisionId, kv: &dyn KvStore) -> Result<Rewind> {
|
||||
let mut frontier = VecDeque::new();
|
||||
let mut traversed = HashMap::default();
|
||||
for operation_id in revision_id.iter() {
|
||||
let parent_revision = self
|
||||
.operation(*operation_id, kv)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("operation {:?} not found", operation_id))?
|
||||
.parent()
|
||||
.clone();
|
||||
traversed
|
||||
.entry(parent_revision.clone())
|
||||
.or_insert(BTreeSet::default())
|
||||
.insert((revision_id.clone(), *operation_id));
|
||||
frontier.push_back(Frontier {
|
||||
source: *operation_id,
|
||||
revision: parent_revision,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(Rewind {
|
||||
history: self,
|
||||
frontier,
|
||||
traversed,
|
||||
ancestors: Default::default(),
|
||||
reachable_len: revision_id.len(),
|
||||
start: revision_id.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
struct Frontier {
|
||||
source: OperationId,
|
||||
revision: RevisionId,
|
||||
}
|
||||
|
||||
pub struct Rewind<'a> {
|
||||
history: &'a mut History,
|
||||
frontier: VecDeque<Frontier>,
|
||||
traversed: HashMap<RevisionId, BTreeSet<(RevisionId, OperationId)>>,
|
||||
ancestors: HashMap<RevisionId, HashSet<OperationId>>,
|
||||
reachable_len: usize,
|
||||
start: RevisionId,
|
||||
}
|
||||
|
||||
impl Rewind<'_> {
|
||||
pub async fn next(&mut self, kv: &dyn KvStore) -> Result<Option<RevisionId>> {
|
||||
while let Some(frontier) = self.frontier.pop_front() {
|
||||
let reachable_from = self.ancestors.entry(frontier.revision.clone()).or_default();
|
||||
reachable_from.insert(frontier.source);
|
||||
|
||||
if reachable_from.len() == self.reachable_len {
|
||||
self.reachable_len = frontier.revision.len();
|
||||
self.frontier.clear();
|
||||
self.ancestors.clear();
|
||||
self.start = frontier.revision.clone();
|
||||
for operation_id in frontier.revision.iter() {
|
||||
let parent_revision = self
|
||||
.history
|
||||
.operation(*operation_id, kv)
|
||||
.await?
|
||||
.expect("operation must exist")
|
||||
.parent()
|
||||
.clone();
|
||||
self.traversed
|
||||
.entry(parent_revision.clone())
|
||||
.or_default()
|
||||
.insert((frontier.revision.clone(), *operation_id));
|
||||
self.frontier.push_back(Frontier {
|
||||
source: *operation_id,
|
||||
revision: parent_revision,
|
||||
});
|
||||
}
|
||||
|
||||
return Ok(Some(frontier.revision));
|
||||
} else {
|
||||
for operation_id in frontier.revision.iter() {
|
||||
let parent_revision = self
|
||||
.history
|
||||
.operation(*operation_id, kv)
|
||||
.await?
|
||||
.expect("operation must exist")
|
||||
.parent()
|
||||
.clone();
|
||||
self.traversed
|
||||
.entry(parent_revision.clone())
|
||||
.or_default()
|
||||
.insert((frontier.revision.clone(), *operation_id));
|
||||
|
||||
self.frontier.push_back(Frontier {
|
||||
source: frontier.source,
|
||||
revision: parent_revision,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
pub fn replay(mut self) -> impl Iterator<Item = ReplayOperation> {
|
||||
let mut stack = VecDeque::new();
|
||||
if let Some(children) = self.traversed.remove(&self.start) {
|
||||
for (child_revision_id, operation_id) in children {
|
||||
stack.push_back(ReplayOperation {
|
||||
parent_revision_id: self.start.clone(),
|
||||
target_revision_id: child_revision_id.clone(),
|
||||
operation_id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
iter::from_fn(move || {
|
||||
let entry = stack.pop_front()?;
|
||||
if let Some(children) = self.traversed.remove(&entry.target_revision_id) {
|
||||
for (child_revision, operation_id) in children {
|
||||
stack.push_back(ReplayOperation {
|
||||
parent_revision_id: entry.target_revision_id.clone(),
|
||||
target_revision_id: child_revision.clone(),
|
||||
operation_id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Some(entry)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Eq, PartialEq)]
|
||||
pub struct ReplayOperation {
|
||||
pub parent_revision_id: RevisionId,
|
||||
pub target_revision_id: RevisionId,
|
||||
pub operation_id: OperationId,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for ReplayOperation {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{:?} -> {:?} via {:?}",
|
||||
self.parent_revision_id, self.target_revision_id, self.operation_id
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
struct DeferredOperation {
|
||||
parent: OperationId,
|
||||
operation: Operation,
|
||||
}
|
||||
|
||||
impl PartialEq for DeferredOperation {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.parent == other.parent && self.operation.id() == other.operation.id()
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for DeferredOperation {}
|
||||
|
||||
impl PartialOrd for DeferredOperation {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for DeferredOperation {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.parent
|
||||
.cmp(&other.parent)
|
||||
.then_with(|| self.operation.id().cmp(&other.operation.id()))
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::Item for DeferredOperation {
|
||||
type Summary = OperationId;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
self.parent
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::KeyedItem for DeferredOperation {
|
||||
type Key = (OperationId, OperationId);
|
||||
|
||||
fn key(&self) -> Self::Key {
|
||||
(self.parent, self.operation.id())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::btree::tests::InMemoryKv;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_rewind() {
|
||||
let kv = InMemoryKv::default();
|
||||
let mut history = History::new(ReplicaId(0));
|
||||
let op1 = insert_operation(&[], &mut history, &kv).await;
|
||||
let op2 = insert_operation(&[op1.id()], &mut history, &kv).await;
|
||||
let op3 = insert_operation(&[op1.id()], &mut history, &kv).await;
|
||||
let op4 = insert_operation(&[op2.id(), op3.id()], &mut history, &kv).await;
|
||||
let op5 = insert_operation(&[op4.id()], &mut history, &kv).await;
|
||||
let op6 = insert_operation(&[op4.id()], &mut history, &kv).await;
|
||||
let op7 = insert_operation(&[op2.id()], &mut history, &kv).await;
|
||||
let op8 = insert_operation(&[op5.id()], &mut history, &kv).await;
|
||||
let op9 = insert_operation(&[op5.id()], &mut history, &kv).await;
|
||||
let op10 = insert_operation(&[op8.id()], &mut history, &kv).await;
|
||||
let op11 = insert_operation(&[op9.id(), op10.id()], &mut history, &kv).await;
|
||||
|
||||
assert_eq!(
|
||||
rewind(&[op4.id()], &mut history, &kv).await,
|
||||
&[
|
||||
(
|
||||
RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op4.id()].as_slice()),
|
||||
operation_id: op4.id(),
|
||||
}]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op1.id()].as_slice()),
|
||||
vec![
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op2.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op3.id(),
|
||||
}
|
||||
]
|
||||
),
|
||||
(
|
||||
RevisionId::from([].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([].as_slice()),
|
||||
target_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
operation_id: op1.id(),
|
||||
}]
|
||||
),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
rewind(&[op6.id()], &mut history, &kv).await,
|
||||
&[
|
||||
(
|
||||
RevisionId::from([op4.id()].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op4.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op6.id()].as_slice()),
|
||||
operation_id: op6.id(),
|
||||
}]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op4.id()].as_slice()),
|
||||
operation_id: op4.id(),
|
||||
}]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op1.id()].as_slice()),
|
||||
vec![
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op2.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op3.id(),
|
||||
}
|
||||
]
|
||||
),
|
||||
(
|
||||
RevisionId::from([].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([].as_slice()),
|
||||
target_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
operation_id: op1.id(),
|
||||
}]
|
||||
),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
rewind(&[op5.id(), op6.id()], &mut history, &kv).await,
|
||||
&[
|
||||
(
|
||||
RevisionId::from([op4.id()].as_slice()),
|
||||
vec![
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op4.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op5.id(), op6.id()].as_slice()),
|
||||
operation_id: op5.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op4.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op5.id(), op6.id()].as_slice()),
|
||||
operation_id: op6.id(),
|
||||
}
|
||||
]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op4.id()].as_slice()),
|
||||
operation_id: op4.id(),
|
||||
}]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op1.id()].as_slice()),
|
||||
vec![
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op2.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op3.id(),
|
||||
}
|
||||
]
|
||||
),
|
||||
(
|
||||
RevisionId::from([].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([].as_slice()),
|
||||
target_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
operation_id: op1.id(),
|
||||
}]
|
||||
),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
rewind(&[op4.id(), op7.id()], &mut history, &kv).await,
|
||||
&[
|
||||
(
|
||||
RevisionId::from([op1.id()].as_slice()),
|
||||
vec![
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id()].as_slice()),
|
||||
operation_id: op2.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op2.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op3.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op2.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op4.id(), op7.id()].as_slice()),
|
||||
operation_id: op7.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op4.id(), op7.id()].as_slice()),
|
||||
operation_id: op4.id(),
|
||||
},
|
||||
]
|
||||
),
|
||||
(
|
||||
RevisionId::from([].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([].as_slice()),
|
||||
target_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
operation_id: op1.id(),
|
||||
}]
|
||||
),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
rewind(&[op11.id()], &mut history, &kv).await,
|
||||
&[
|
||||
(
|
||||
RevisionId::from([op9.id(), op10.id()].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op9.id(), op10.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op11.id()].as_slice()),
|
||||
operation_id: op11.id(),
|
||||
}]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op5.id()].as_slice()),
|
||||
vec![
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op5.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op8.id()].as_slice()),
|
||||
operation_id: op8.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op5.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op9.id(), op10.id()].as_slice()),
|
||||
operation_id: op9.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op8.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op9.id(), op10.id()].as_slice()),
|
||||
operation_id: op10.id(),
|
||||
}
|
||||
]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op4.id()].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op4.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op5.id()].as_slice()),
|
||||
operation_id: op5.id(),
|
||||
}]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op4.id()].as_slice()),
|
||||
operation_id: op4.id(),
|
||||
}]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op1.id()].as_slice()),
|
||||
vec![
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op2.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op3.id(),
|
||||
}
|
||||
]
|
||||
),
|
||||
(
|
||||
RevisionId::from([].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([].as_slice()),
|
||||
target_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
operation_id: op1.id(),
|
||||
}]
|
||||
),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
async fn insert_operation(
|
||||
parent: &[OperationId],
|
||||
history: &mut History,
|
||||
kv: &dyn KvStore,
|
||||
) -> Operation {
|
||||
let operation = Operation::CreateBranch(crate::operations::CreateBranch {
|
||||
id: history.next_operation_id(),
|
||||
parent: parent.into(),
|
||||
name: "1".into(),
|
||||
});
|
||||
history.insert(operation.clone(), kv).await.unwrap();
|
||||
operation
|
||||
}
|
||||
|
||||
async fn rewind(
|
||||
revision_id: &[OperationId],
|
||||
history: &mut History,
|
||||
kv: &dyn KvStore,
|
||||
) -> Vec<(RevisionId, Vec<ReplayOperation>)> {
|
||||
let mut rewind = history.rewind(&revision_id.into(), kv).await.unwrap();
|
||||
let mut results = Vec::new();
|
||||
let mut prev_replay = Vec::new();
|
||||
let mut ix = 0;
|
||||
while let Some(ancestor_id) = rewind.next(kv).await.unwrap() {
|
||||
let mut replay = rewind.replay().collect::<Vec<_>>();
|
||||
let suffix_start = replay.len() - prev_replay.len();
|
||||
assert_eq!(prev_replay, &replay[suffix_start..]);
|
||||
prev_replay = replay.clone();
|
||||
drop(replay.drain(suffix_start..));
|
||||
results.push((ancestor_id, replay));
|
||||
|
||||
rewind = history.rewind(&revision_id.into(), kv).await.unwrap();
|
||||
ix += 1;
|
||||
for _ in 0..ix {
|
||||
rewind.next(kv).await.unwrap();
|
||||
}
|
||||
}
|
||||
results
|
||||
}
|
||||
}
|
||||
182
crates/crdb/src/messages.rs
Normal file
@@ -0,0 +1,182 @@
|
||||
use crate::{
|
||||
operations::{CreateBranch, CreateDocument, Edit},
|
||||
BranchId, OperationCount, OperationId, ReplicaId, RepoId, Request, RevisionId, RoomCredentials,
|
||||
};
|
||||
use collections::BTreeMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{any::Any, sync::Arc};
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub enum RequestEnvelope {
|
||||
PublishRepo(PublishRepo),
|
||||
CloneRepo(CloneRepo),
|
||||
ReconnectToRepo(ReconnectToRepo),
|
||||
SyncRepo(SyncRepo),
|
||||
PublishOperations(PublishOperations),
|
||||
}
|
||||
|
||||
impl RequestEnvelope {
|
||||
pub fn unwrap(self) -> Box<dyn Any> {
|
||||
match self {
|
||||
RequestEnvelope::PublishRepo(request) => Box::new(request),
|
||||
RequestEnvelope::CloneRepo(request) => Box::new(request),
|
||||
RequestEnvelope::ReconnectToRepo(request) => Box::new(request),
|
||||
RequestEnvelope::SyncRepo(request) => Box::new(request),
|
||||
RequestEnvelope::PublishOperations(request) => Box::new(request),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Operation> for MessageEnvelope {
|
||||
fn from(value: Operation) -> Self {
|
||||
Self::Operation(value)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct PublishRepo {
|
||||
pub id: RepoId,
|
||||
pub name: Arc<str>,
|
||||
}
|
||||
|
||||
impl Request for PublishRepo {
|
||||
type Response = PublishRepoResponse;
|
||||
}
|
||||
|
||||
impl Into<RequestEnvelope> for PublishRepo {
|
||||
fn into(self) -> RequestEnvelope {
|
||||
RequestEnvelope::PublishRepo(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct PublishRepoResponse {
|
||||
pub credentials: RoomCredentials,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct CloneRepo {
|
||||
pub name: Arc<str>,
|
||||
}
|
||||
|
||||
impl Request for CloneRepo {
|
||||
type Response = CloneRepoResponse;
|
||||
}
|
||||
|
||||
impl Into<RequestEnvelope> for CloneRepo {
|
||||
fn into(self) -> RequestEnvelope {
|
||||
RequestEnvelope::CloneRepo(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct CloneRepoResponse {
|
||||
pub repo_id: RepoId,
|
||||
pub replica_id: ReplicaId,
|
||||
pub credentials: RoomCredentials,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct ReconnectToRepo {
|
||||
pub id: RepoId,
|
||||
pub replica_id: ReplicaId,
|
||||
}
|
||||
|
||||
impl Request for ReconnectToRepo {
|
||||
type Response = ReconnectToRepoResponse;
|
||||
}
|
||||
|
||||
impl Into<RequestEnvelope> for ReconnectToRepo {
|
||||
fn into(self) -> RequestEnvelope {
|
||||
RequestEnvelope::ReconnectToRepo(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct ReconnectToRepoResponse {
|
||||
pub credentials: RoomCredentials,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct SyncRepo {
|
||||
pub id: RepoId,
|
||||
pub max_operation_ids: BTreeMap<ReplicaId, OperationCount>,
|
||||
}
|
||||
|
||||
impl Request for SyncRepo {
|
||||
type Response = SyncRepoResponse;
|
||||
}
|
||||
|
||||
impl Into<RequestEnvelope> for SyncRepo {
|
||||
fn into(self) -> RequestEnvelope {
|
||||
RequestEnvelope::SyncRepo(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct SyncRepoResponse {
|
||||
pub operations: Vec<Operation>,
|
||||
pub max_operation_ids: BTreeMap<ReplicaId, OperationCount>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct PublishOperations {
|
||||
pub repo_id: RepoId,
|
||||
pub operations: Vec<Operation>,
|
||||
}
|
||||
|
||||
impl Request for PublishOperations {
|
||||
type Response = ();
|
||||
}
|
||||
|
||||
impl Into<RequestEnvelope> for PublishOperations {
|
||||
fn into(self) -> RequestEnvelope {
|
||||
RequestEnvelope::PublishOperations(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub enum MessageEnvelope {
|
||||
Operation(Operation),
|
||||
}
|
||||
|
||||
impl MessageEnvelope {
|
||||
pub fn unwrap(self) -> Box<dyn Any> {
|
||||
Box::new(match self {
|
||||
MessageEnvelope::Operation(message) => message,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub enum Operation {
|
||||
CreateDocument(CreateDocument),
|
||||
Edit(Edit),
|
||||
CreateBranch(CreateBranch),
|
||||
}
|
||||
|
||||
impl Operation {
|
||||
pub fn id(&self) -> OperationId {
|
||||
match self {
|
||||
Operation::CreateDocument(op) => op.id,
|
||||
Operation::Edit(op) => op.id,
|
||||
Operation::CreateBranch(op) => op.id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn branch_id(&self) -> BranchId {
|
||||
match self {
|
||||
Operation::CreateBranch(op) => op.id,
|
||||
Operation::CreateDocument(op) => op.branch_id,
|
||||
Operation::Edit(op) => op.branch_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parent(&self) -> &RevisionId {
|
||||
match self {
|
||||
Operation::CreateDocument(op) => &op.parent,
|
||||
Operation::Edit(op) => &op.parent,
|
||||
Operation::CreateBranch(op) => &op.parent,
|
||||
}
|
||||
}
|
||||
}
|
||||
286
crates/crdb/src/operations.rs
Normal file
@@ -0,0 +1,286 @@
|
||||
use crate::{
|
||||
btree::{self, Bias},
|
||||
dense_id::DenseId,
|
||||
AnchorRange, BranchId, DocumentFragment, DocumentFragmentSummary, DocumentId, DocumentMetadata,
|
||||
InsertionFragment, OperationId, Revision, RevisionId, RopeBuilder, Tombstone,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smallvec::SmallVec;
|
||||
use std::{cmp, sync::Arc};
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct CreateBranch {
|
||||
pub id: BranchId,
|
||||
pub parent: RevisionId,
|
||||
pub name: Arc<str>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct CreateDocument {
|
||||
pub id: DocumentId,
|
||||
pub branch_id: BranchId,
|
||||
pub parent: RevisionId,
|
||||
}
|
||||
|
||||
impl CreateDocument {
|
||||
pub fn apply(self, revision: &mut Revision) {
|
||||
let mut cursor = revision.document_fragments.cursor::<DocumentId>();
|
||||
let mut new_document_fragments = cursor.slice(&self.id, Bias::Right, &());
|
||||
new_document_fragments.push(
|
||||
DocumentFragment {
|
||||
document_id: self.id,
|
||||
location: DenseId::min(),
|
||||
insertion_id: self.id,
|
||||
insertion_subrange: 0..0,
|
||||
tombstones: Default::default(),
|
||||
undo_count: 0,
|
||||
},
|
||||
&(),
|
||||
);
|
||||
new_document_fragments.append(cursor.suffix(&()), &());
|
||||
drop(cursor);
|
||||
|
||||
revision.document_fragments = new_document_fragments;
|
||||
revision.insertion_fragments.insert_or_replace(
|
||||
InsertionFragment {
|
||||
insertion_id: self.id,
|
||||
offset_in_insertion: 0,
|
||||
fragment_location: DenseId::min(),
|
||||
},
|
||||
&(),
|
||||
);
|
||||
revision.document_metadata.insert(
|
||||
self.id,
|
||||
DocumentMetadata {
|
||||
path: None,
|
||||
last_change: self.id,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct Edit {
|
||||
pub id: OperationId,
|
||||
pub document_id: DocumentId,
|
||||
pub branch_id: BranchId,
|
||||
pub parent: RevisionId,
|
||||
pub edits: SmallVec<[(AnchorRange, Arc<str>); 2]>,
|
||||
}
|
||||
|
||||
impl Edit {
|
||||
pub fn apply(self, parent_revision: &Revision, revision: &mut Revision) -> Result<()> {
|
||||
if self.edits.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut old_fragments = revision
|
||||
.document_fragments
|
||||
.cursor::<DocumentFragmentSummary>();
|
||||
|
||||
// Slice to the start of the document this to which this operation applies.
|
||||
let mut new_fragments = old_fragments.slice(&self.document_id, Bias::Left, &());
|
||||
let mut new_insertions = Vec::new();
|
||||
let mut new_ropes = RopeBuilder::new(
|
||||
revision.visible_text.cursor(0),
|
||||
revision.hidden_text.cursor(0),
|
||||
);
|
||||
new_ropes.append(
|
||||
new_fragments.summary().visible_len,
|
||||
new_fragments.summary().hidden_len,
|
||||
);
|
||||
|
||||
let mut insertion_offset = 0;
|
||||
let mut current_fragment = old_fragments.item().cloned();
|
||||
for (range, new_text) in self.edits {
|
||||
// We need to tombstone the intersection of the edit's range with fragments that
|
||||
// were visible in the operation's parent revision.
|
||||
for mut parent_fragment in parent_revision
|
||||
.visible_fragments_for_range(range.clone())?
|
||||
.cloned()
|
||||
{
|
||||
// Intersect the parent fragment with the edit's range.
|
||||
if parent_fragment.insertion_id == range.start_insertion_id {
|
||||
parent_fragment.insertion_subrange.start = range.start_offset_in_insertion;
|
||||
}
|
||||
if parent_fragment.insertion_id == range.end_insertion_id {
|
||||
parent_fragment.insertion_subrange.end = cmp::min(
|
||||
parent_fragment.insertion_subrange.end,
|
||||
range.end_offset_in_insertion,
|
||||
);
|
||||
}
|
||||
|
||||
// Find the locations of the parent fragment in the new revision.
|
||||
for fragment_location in revision.fragment_locations(
|
||||
parent_fragment.insertion_id,
|
||||
parent_fragment.insertion_subrange,
|
||||
) {
|
||||
if let Some(fragment) = current_fragment.as_ref() {
|
||||
// Advance to fragment_location if it is greater than the location of the current fragment,
|
||||
if *fragment_location > fragment.location {
|
||||
// Flush the remainder of current fragment.
|
||||
if !fragment.insertion_subrange.is_empty() || fragment.is_sentinel() {
|
||||
new_ropes.push_fragment(fragment, fragment.visible());
|
||||
new_insertions
|
||||
.push(btree::Edit::Insert(InsertionFragment::new(&fragment)));
|
||||
new_fragments.push(fragment.clone(), &());
|
||||
}
|
||||
old_fragments.next(&());
|
||||
|
||||
// Append all fragments between the previous fragment and the new fragment_location.
|
||||
let slice = old_fragments.slice(
|
||||
&(self.document_id, fragment_location),
|
||||
Bias::Left,
|
||||
&(),
|
||||
);
|
||||
new_ropes
|
||||
.append(slice.summary().visible_len, slice.summary().hidden_len);
|
||||
new_fragments.append(slice, &());
|
||||
current_fragment = old_fragments.item().cloned();
|
||||
|
||||
// We should always find a fragment when seeking to fragment_location.
|
||||
debug_assert!(current_fragment.is_some());
|
||||
}
|
||||
}
|
||||
|
||||
// If the edit starts at the end of the current fragment, flush it.
|
||||
if let Some(fragment) = current_fragment.as_ref() {
|
||||
if fragment.insertion_id == range.start_insertion_id
|
||||
&& fragment.insertion_subrange.end == range.start_offset_in_insertion
|
||||
{
|
||||
let fragment = current_fragment.take().unwrap();
|
||||
new_ropes.push_fragment(&fragment, fragment.visible());
|
||||
new_insertions
|
||||
.push(btree::Edit::Insert(InsertionFragment::new(&fragment)));
|
||||
new_fragments.push(fragment, &());
|
||||
old_fragments.next(&());
|
||||
current_fragment = old_fragments.item().and_then(|fragment| {
|
||||
if fragment.document_id == self.document_id {
|
||||
Some(fragment.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(fragment) = current_fragment.take() {
|
||||
// If we haven't advanced off the end, then the current fragment intersects
|
||||
// the current edit's range.
|
||||
let (prefix, mut intersection, suffix) = fragment.intersect(range.clone());
|
||||
|
||||
// If we have a prefix, push it.
|
||||
if let Some(mut prefix) = prefix {
|
||||
prefix.location = DenseId::between(
|
||||
&new_fragments.summary().max_location,
|
||||
&intersection.location,
|
||||
);
|
||||
new_insertions
|
||||
.push(btree::Edit::Insert(InsertionFragment::new(&prefix)));
|
||||
new_ropes.push_fragment(&prefix, prefix.visible());
|
||||
new_fragments.push(prefix, &());
|
||||
}
|
||||
|
||||
if let Some(suffix) = suffix {
|
||||
intersection.location = DenseId::between(
|
||||
&new_fragments.summary().max_location,
|
||||
&suffix.location,
|
||||
);
|
||||
// If we still have a suffix, the next edit may be inside of it, so set it as
|
||||
// the current fragment and continue the loop.
|
||||
current_fragment = Some(suffix);
|
||||
} else {
|
||||
// Otherwise, advance to the next fragment if it's still part of the same document.
|
||||
old_fragments.next(&());
|
||||
if let Some(next_fragment) = old_fragments.item() {
|
||||
if next_fragment.document_id == self.document_id {
|
||||
current_fragment = Some(next_fragment.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Then tombstone the intersecting portion.
|
||||
let was_visible = intersection.visible();
|
||||
intersection.tombstones.push(Tombstone {
|
||||
id: self.id,
|
||||
undo_count: 0,
|
||||
});
|
||||
new_ropes.push_fragment(&intersection, was_visible);
|
||||
new_insertions
|
||||
.push(btree::Edit::Insert(InsertionFragment::new(&intersection)));
|
||||
new_fragments.push(intersection, &());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Move past insertions that were causally after the current operation.
|
||||
while let Some(fragment) = current_fragment.as_ref() {
|
||||
if fragment.insertion_id.is_causally_after(self.id) {
|
||||
new_ropes.push_fragment(fragment, fragment.visible());
|
||||
new_insertions.push(btree::Edit::Insert(InsertionFragment::new(fragment)));
|
||||
new_fragments.push(fragment.clone(), &());
|
||||
old_fragments.next(&());
|
||||
current_fragment = old_fragments.item().and_then(|fragment| {
|
||||
if fragment.document_id == self.document_id {
|
||||
Some(fragment.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Finally, insert a fragment containing the new text.
|
||||
if !new_text.is_empty() {
|
||||
let fragment = DocumentFragment {
|
||||
document_id: self.document_id,
|
||||
location: DenseId::between(
|
||||
&new_fragments.summary().max_location,
|
||||
current_fragment
|
||||
.as_ref()
|
||||
.map_or(DenseId::max_ref(), |fragment| &fragment.location),
|
||||
),
|
||||
insertion_id: self.id,
|
||||
insertion_subrange: insertion_offset..insertion_offset + new_text.len(),
|
||||
tombstones: Default::default(),
|
||||
undo_count: 0,
|
||||
};
|
||||
new_insertions.push(btree::Edit::Insert(InsertionFragment::new(&fragment)));
|
||||
new_ropes.push_str(new_text.as_ref());
|
||||
new_fragments.push(fragment, &());
|
||||
insertion_offset += new_text.len();
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(fragment) = current_fragment {
|
||||
if !fragment.insertion_subrange.is_empty() {
|
||||
new_ropes.push_fragment(&fragment, fragment.visible());
|
||||
new_insertions.push(btree::Edit::Insert(InsertionFragment::new(&fragment)));
|
||||
new_fragments.push(fragment, &());
|
||||
}
|
||||
old_fragments.next(&());
|
||||
}
|
||||
|
||||
let suffix = old_fragments.suffix(&());
|
||||
drop(old_fragments);
|
||||
|
||||
new_ropes.append(suffix.summary().visible_len, suffix.summary().hidden_len);
|
||||
let (visible_text, hidden_text) = new_ropes.finish();
|
||||
revision.visible_text = visible_text;
|
||||
revision.hidden_text = hidden_text;
|
||||
|
||||
new_fragments.append(suffix, &());
|
||||
revision.document_fragments = new_fragments;
|
||||
|
||||
new_insertions.sort_unstable_by_key(|edit| edit.key());
|
||||
new_insertions.dedup_by_key(|edit| edit.key());
|
||||
revision.insertion_fragments.edit(new_insertions, &());
|
||||
|
||||
revision.check_invariants();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
1443
crates/crdb/src/rope.rs
Normal file
51
crates/crdb/src/rope/offset_utf16.rs
Normal file
@@ -0,0 +1,51 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::ops::{Add, AddAssign, Sub};
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
|
||||
pub struct OffsetUtf16(pub usize);
|
||||
|
||||
impl<'a> Add<&'a Self> for OffsetUtf16 {
|
||||
type Output = Self;
|
||||
|
||||
fn add(self, other: &'a Self) -> Self::Output {
|
||||
Self(self.0 + other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for OffsetUtf16 {
|
||||
type Output = Self;
|
||||
|
||||
fn add(self, other: Self) -> Self::Output {
|
||||
Self(self.0 + other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Sub<&'a Self> for OffsetUtf16 {
|
||||
type Output = Self;
|
||||
|
||||
fn sub(self, other: &'a Self) -> Self::Output {
|
||||
debug_assert!(*other <= self);
|
||||
Self(self.0 - other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for OffsetUtf16 {
|
||||
type Output = OffsetUtf16;
|
||||
|
||||
fn sub(self, other: Self) -> Self::Output {
|
||||
debug_assert!(other <= self);
|
||||
Self(self.0 - other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> AddAssign<&'a Self> for OffsetUtf16 {
|
||||
fn add_assign(&mut self, other: &'a Self) {
|
||||
self.0 += other.0;
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign<Self> for OffsetUtf16 {
|
||||
fn add_assign(&mut self, other: Self) {
|
||||
self.0 += other.0;
|
||||
}
|
||||
}
|
||||
129
crates/crdb/src/rope/point.rs
Normal file
@@ -0,0 +1,129 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
ops::{Add, AddAssign, Sub},
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, Default, Eq, PartialEq, Debug, Hash, Serialize, Deserialize)]
|
||||
pub struct Point {
|
||||
pub row: u32,
|
||||
pub column: u32,
|
||||
}
|
||||
|
||||
impl Point {
|
||||
pub const MAX: Self = Self {
|
||||
row: u32::MAX,
|
||||
column: u32::MAX,
|
||||
};
|
||||
|
||||
pub fn new(row: u32, column: u32) -> Self {
|
||||
Point { row, column }
|
||||
}
|
||||
|
||||
pub fn zero() -> Self {
|
||||
Point::new(0, 0)
|
||||
}
|
||||
|
||||
pub fn parse_str(s: &str) -> Self {
|
||||
let mut point = Self::zero();
|
||||
for (row, line) in s.split('\n').enumerate() {
|
||||
point.row = row as u32;
|
||||
point.column = line.len() as u32;
|
||||
}
|
||||
point
|
||||
}
|
||||
|
||||
pub fn is_zero(&self) -> bool {
|
||||
self.row == 0 && self.column == 0
|
||||
}
|
||||
|
||||
pub fn saturating_sub(self, other: Self) -> Self {
|
||||
if self < other {
|
||||
Self::zero()
|
||||
} else {
|
||||
self - other
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Add<&'a Self> for Point {
|
||||
type Output = Point;
|
||||
|
||||
fn add(self, other: &'a Self) -> Self::Output {
|
||||
self + *other
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for Point {
|
||||
type Output = Point;
|
||||
|
||||
fn add(self, other: Self) -> Self::Output {
|
||||
if other.row == 0 {
|
||||
Point::new(self.row, self.column + other.column)
|
||||
} else {
|
||||
Point::new(self.row + other.row, other.column)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Sub<&'a Self> for Point {
|
||||
type Output = Point;
|
||||
|
||||
fn sub(self, other: &'a Self) -> Self::Output {
|
||||
self - *other
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for Point {
|
||||
type Output = Point;
|
||||
|
||||
fn sub(self, other: Self) -> Self::Output {
|
||||
debug_assert!(other <= self);
|
||||
|
||||
if self.row == other.row {
|
||||
Point::new(0, self.column - other.column)
|
||||
} else {
|
||||
Point::new(self.row - other.row, self.column)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> AddAssign<&'a Self> for Point {
|
||||
fn add_assign(&mut self, other: &'a Self) {
|
||||
*self += *other;
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign<Self> for Point {
|
||||
fn add_assign(&mut self, other: Self) {
|
||||
if other.row == 0 {
|
||||
self.column += other.column;
|
||||
} else {
|
||||
self.row += other.row;
|
||||
self.column = other.column;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Point {
|
||||
fn partial_cmp(&self, other: &Point) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Point {
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
fn cmp(&self, other: &Point) -> Ordering {
|
||||
let a = (self.row as usize) << 32 | self.column as usize;
|
||||
let b = (other.row as usize) << 32 | other.column as usize;
|
||||
a.cmp(&b)
|
||||
}
|
||||
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
fn cmp(&self, other: &Point) -> Ordering {
|
||||
match self.row.cmp(&other.row) {
|
||||
Ordering::Equal => self.column.cmp(&other.column),
|
||||
comparison @ _ => comparison,
|
||||
}
|
||||
}
|
||||
}
|
||||
119
crates/crdb/src/rope/point_utf16.rs
Normal file
@@ -0,0 +1,119 @@
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
ops::{Add, AddAssign, Sub},
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, Default, Eq, PartialEq, Debug, Hash)]
|
||||
pub struct PointUtf16 {
|
||||
pub row: u32,
|
||||
pub column: u32,
|
||||
}
|
||||
|
||||
impl PointUtf16 {
|
||||
pub const MAX: Self = Self {
|
||||
row: u32::MAX,
|
||||
column: u32::MAX,
|
||||
};
|
||||
|
||||
pub fn new(row: u32, column: u32) -> Self {
|
||||
PointUtf16 { row, column }
|
||||
}
|
||||
|
||||
pub fn zero() -> Self {
|
||||
PointUtf16::new(0, 0)
|
||||
}
|
||||
|
||||
pub fn is_zero(&self) -> bool {
|
||||
self.row == 0 && self.column == 0
|
||||
}
|
||||
|
||||
pub fn saturating_sub(self, other: Self) -> Self {
|
||||
if self < other {
|
||||
Self::zero()
|
||||
} else {
|
||||
self - other
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Add<&'a Self> for PointUtf16 {
|
||||
type Output = PointUtf16;
|
||||
|
||||
fn add(self, other: &'a Self) -> Self::Output {
|
||||
self + *other
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for PointUtf16 {
|
||||
type Output = PointUtf16;
|
||||
|
||||
fn add(self, other: Self) -> Self::Output {
|
||||
if other.row == 0 {
|
||||
PointUtf16::new(self.row, self.column + other.column)
|
||||
} else {
|
||||
PointUtf16::new(self.row + other.row, other.column)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Sub<&'a Self> for PointUtf16 {
|
||||
type Output = PointUtf16;
|
||||
|
||||
fn sub(self, other: &'a Self) -> Self::Output {
|
||||
self - *other
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for PointUtf16 {
|
||||
type Output = PointUtf16;
|
||||
|
||||
fn sub(self, other: Self) -> Self::Output {
|
||||
debug_assert!(other <= self);
|
||||
|
||||
if self.row == other.row {
|
||||
PointUtf16::new(0, self.column - other.column)
|
||||
} else {
|
||||
PointUtf16::new(self.row - other.row, self.column)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> AddAssign<&'a Self> for PointUtf16 {
|
||||
fn add_assign(&mut self, other: &'a Self) {
|
||||
*self += *other;
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign<Self> for PointUtf16 {
|
||||
fn add_assign(&mut self, other: Self) {
|
||||
if other.row == 0 {
|
||||
self.column += other.column;
|
||||
} else {
|
||||
self.row += other.row;
|
||||
self.column = other.column;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for PointUtf16 {
|
||||
fn partial_cmp(&self, other: &PointUtf16) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for PointUtf16 {
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
fn cmp(&self, other: &PointUtf16) -> Ordering {
|
||||
let a = (self.row as usize) << 32 | self.column as usize;
|
||||
let b = (other.row as usize) << 32 | other.column as usize;
|
||||
a.cmp(&b)
|
||||
}
|
||||
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
fn cmp(&self, other: &PointUtf16) -> Ordering {
|
||||
match self.row.cmp(&other.row) {
|
||||
Ordering::Equal => self.column.cmp(&other.column),
|
||||
comparison @ _ => comparison,
|
||||
}
|
||||
}
|
||||
}
|
||||
58
crates/crdb/src/rope/unclipped.rs
Normal file
@@ -0,0 +1,58 @@
|
||||
use super::{ChunkSummary, TextDimension, TextSummary};
|
||||
use crate::btree;
|
||||
use std::ops::{Add, AddAssign, Sub, SubAssign};
|
||||
|
||||
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct Unclipped<T>(pub T);
|
||||
|
||||
impl<T> From<T> for Unclipped<T> {
|
||||
fn from(value: T) -> Self {
|
||||
Unclipped(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: btree::Dimension<'a, ChunkSummary>> btree::Dimension<'a, ChunkSummary>
|
||||
for Unclipped<T>
|
||||
{
|
||||
fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) {
|
||||
self.0.add_summary(summary, &());
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: TextDimension> TextDimension for Unclipped<T> {
|
||||
fn from_text_summary(summary: &TextSummary) -> Self {
|
||||
Unclipped(T::from_text_summary(summary))
|
||||
}
|
||||
|
||||
fn add_assign(&mut self, other: &Self) {
|
||||
TextDimension::add_assign(&mut self.0, &other.0);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Add<T, Output = T>> Add<Unclipped<T>> for Unclipped<T> {
|
||||
type Output = Unclipped<T>;
|
||||
|
||||
fn add(self, rhs: Unclipped<T>) -> Self::Output {
|
||||
Unclipped(self.0 + rhs.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Sub<T, Output = T>> Sub<Unclipped<T>> for Unclipped<T> {
|
||||
type Output = Unclipped<T>;
|
||||
|
||||
fn sub(self, rhs: Unclipped<T>) -> Self::Output {
|
||||
Unclipped(self.0 - rhs.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: AddAssign<T>> AddAssign<Unclipped<T>> for Unclipped<T> {
|
||||
fn add_assign(&mut self, rhs: Unclipped<T>) {
|
||||
self.0 += rhs.0;
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SubAssign<T>> SubAssign<Unclipped<T>> for Unclipped<T> {
|
||||
fn sub_assign(&mut self, rhs: Unclipped<T>) {
|
||||
self.0 -= rhs.0;
|
||||
}
|
||||
}
|
||||
653
crates/crdb/src/sync.rs
Normal file
@@ -0,0 +1,653 @@
|
||||
use crate::{
|
||||
btree::{self, Bias},
|
||||
digest::{Digest, DigestSequence},
|
||||
messages::{Operation, PublishOperations},
|
||||
OperationId,
|
||||
};
|
||||
use std::{
|
||||
cmp::{self, Ordering},
|
||||
iter,
|
||||
ops::{Range, RangeBounds},
|
||||
};
|
||||
|
||||
struct SyncRequest {
|
||||
digests: Vec<Digest>,
|
||||
}
|
||||
|
||||
struct SyncResponse {
|
||||
shared_prefix_end: usize,
|
||||
operations: Vec<Operation>,
|
||||
}
|
||||
|
||||
struct SyncStats {
|
||||
roundtrips: usize,
|
||||
server_operations: usize,
|
||||
client_operations: usize,
|
||||
}
|
||||
|
||||
fn sync_server(
|
||||
operations: &mut btree::Sequence<Operation>,
|
||||
sync_request: SyncRequest,
|
||||
) -> SyncResponse {
|
||||
for client_digest in sync_request.digests {
|
||||
let server_digest = digest_for_range(operations, 0..client_digest.count);
|
||||
if server_digest == client_digest {
|
||||
return SyncResponse {
|
||||
shared_prefix_end: server_digest.count,
|
||||
operations: operations_for_range(operations, server_digest.count..)
|
||||
.cloned()
|
||||
.collect(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
SyncResponse {
|
||||
shared_prefix_end: 0,
|
||||
operations: operations.iter().cloned().collect(),
|
||||
}
|
||||
}
|
||||
|
||||
fn publish_operations(
|
||||
server_operations: &mut btree::Sequence<Operation>,
|
||||
request: PublishOperations,
|
||||
) {
|
||||
server_operations.edit(
|
||||
request
|
||||
.operations
|
||||
.into_iter()
|
||||
.map(btree::Edit::Insert)
|
||||
.collect(),
|
||||
&(),
|
||||
);
|
||||
}
|
||||
|
||||
fn sync_client(
|
||||
client_operations: &mut btree::Sequence<Operation>,
|
||||
server_operations: &mut btree::Sequence<Operation>,
|
||||
min_digest_delta: usize,
|
||||
max_digest_count: usize,
|
||||
) -> SyncStats {
|
||||
let mut client_operation_count = client_operations.summary().digest.count;
|
||||
let mut digests = Vec::new();
|
||||
let mut n = client_operation_count;
|
||||
|
||||
// We will multiply by some some factor less than 1 to produce digests
|
||||
// over ever smaller digest ranges.
|
||||
// op_count * factor^max_digest_count = min_digest_size
|
||||
// factor^max_digest_count = min_digest_size/op_count
|
||||
// max_digest_count * log(factor) = log(min_digest_size/op_count)
|
||||
// log(factor) = log(min_digest_size/op_count)/max_digest_count
|
||||
// factor = base^(log(min_digest_size/op_count)/max_digest_count)
|
||||
let factor = 2f64.powf(
|
||||
(min_digest_delta as f64 / client_operation_count as f64).log2() / max_digest_count as f64,
|
||||
);
|
||||
for _ in 0..max_digest_count {
|
||||
if n <= min_digest_delta {
|
||||
break;
|
||||
}
|
||||
|
||||
digests.push(digest_for_range(client_operations, 0..n));
|
||||
n = (n as f64 * factor).ceil() as usize; // 🪬
|
||||
}
|
||||
|
||||
let response = sync_server(server_operations, SyncRequest { digests });
|
||||
let client_suffix = operations_for_range(client_operations, response.shared_prefix_end..)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let sync_stats = SyncStats {
|
||||
roundtrips: 1,
|
||||
server_operations: response.operations.len(),
|
||||
client_operations: client_suffix.len(),
|
||||
};
|
||||
client_operations.edit(
|
||||
response
|
||||
.operations
|
||||
.into_iter()
|
||||
.map(btree::Edit::Insert)
|
||||
.collect(),
|
||||
&(),
|
||||
);
|
||||
publish_operations(
|
||||
server_operations,
|
||||
PublishOperations {
|
||||
repo_id: Default::default(),
|
||||
operations: client_suffix,
|
||||
},
|
||||
);
|
||||
|
||||
sync_stats
|
||||
}
|
||||
|
||||
impl btree::Item for Operation {
|
||||
type Summary = OperationSummary;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
OperationSummary {
|
||||
digest: Digest::from(self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::KeyedItem for Operation {
|
||||
type Key = OperationId;
|
||||
|
||||
fn key(&self) -> Self::Key {
|
||||
self.id()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct OperationSummary {
|
||||
digest: Digest,
|
||||
}
|
||||
|
||||
impl btree::Summary for OperationSummary {
|
||||
type Context = ();
|
||||
|
||||
fn add_summary(&mut self, summary: &Self, _: &()) {
|
||||
Digest::add_summary(&mut self.digest, &summary.digest, &());
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::Dimension<'_, OperationSummary> for OperationId {
|
||||
fn add_summary(&mut self, summary: &'_ OperationSummary, _: &()) {
|
||||
*self = summary.digest.max_op_id;
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::Dimension<'_, OperationSummary> for usize {
|
||||
fn add_summary(&mut self, summary: &'_ OperationSummary, _: &()) {
|
||||
*self += summary.digest.count;
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::Dimension<'_, OperationSummary> for Digest {
|
||||
fn add_summary(&mut self, summary: &'_ OperationSummary, _: &()) {
|
||||
Digest::add_summary(self, &summary.digest, &());
|
||||
}
|
||||
}
|
||||
|
||||
fn request_digests(
|
||||
operations: &btree::Sequence<Operation>,
|
||||
mut root_range: Range<usize>,
|
||||
count: usize,
|
||||
min_operations: usize,
|
||||
) -> Vec<Digest> {
|
||||
root_range.start = cmp::min(root_range.start, operations.summary().digest.count);
|
||||
root_range.end = cmp::min(root_range.end, operations.summary().digest.count);
|
||||
subdivide_range(root_range, count, min_operations)
|
||||
.map(|range| digest_for_range(operations, range))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn subdivide_range(
|
||||
root_range: Range<usize>,
|
||||
count: usize,
|
||||
min_operations: usize,
|
||||
) -> impl Iterator<Item = Range<usize>> {
|
||||
let subrange_len = cmp::max(min_operations, (root_range.len() + count - 1) / count);
|
||||
|
||||
let mut subrange_start = root_range.start;
|
||||
iter::from_fn(move || {
|
||||
if subrange_start >= root_range.end {
|
||||
return None;
|
||||
}
|
||||
let subrange = subrange_start..cmp::min(subrange_start + subrange_len, root_range.end);
|
||||
subrange_start = subrange.end;
|
||||
Some(subrange)
|
||||
})
|
||||
}
|
||||
|
||||
fn sync(
|
||||
client: &mut btree::Sequence<Operation>,
|
||||
server: &mut btree::Sequence<Operation>,
|
||||
max_digests: usize,
|
||||
min_operations: usize,
|
||||
) -> SyncStats {
|
||||
let mut server_digests = DigestSequence::new();
|
||||
let mut stats = SyncStats {
|
||||
roundtrips: 1,
|
||||
server_operations: 0,
|
||||
client_operations: 0,
|
||||
};
|
||||
let digests = request_digests(server, 0..usize::MAX, max_digests, min_operations);
|
||||
server_digests.splice(0..0, digests.iter().cloned());
|
||||
let server_operation_count = server_digests.operation_count();
|
||||
let max_sync_range = 0..(client.summary().digest.count + server_operation_count);
|
||||
let mut stack =
|
||||
subdivide_range(max_sync_range, max_digests, min_operations).collect::<Vec<_>>();
|
||||
stack.reverse();
|
||||
|
||||
let mut missed_server_ops = Vec::new();
|
||||
let mut server_end = 0;
|
||||
let mut synced_end = 0;
|
||||
while let Some(mut sync_range) = stack.pop() {
|
||||
sync_range.start = cmp::max(sync_range.start, synced_end);
|
||||
if sync_range.start >= client.summary().digest.count || server_end >= server_operation_count
|
||||
{
|
||||
// We've exhausted all operations from either the client or the server, so we
|
||||
// can fast track to publishing anything the server hasn't seen and requesting
|
||||
// anything the client hasn't seen.
|
||||
break;
|
||||
} else if sync_range.end < synced_end {
|
||||
// This range has already been synced, so we can skip it.
|
||||
continue;
|
||||
}
|
||||
|
||||
let server_digest = server_digests.digest(sync_range.clone());
|
||||
sync_range.end = cmp::max(sync_range.start + server_digest.count, sync_range.end);
|
||||
let mut server_range = server_end..server_end + sync_range.len();
|
||||
|
||||
let client_digest = digest_for_range(client, sync_range.clone());
|
||||
if client_digest == server_digest {
|
||||
log::debug!("skipping {:?}", sync_range);
|
||||
synced_end = sync_range.end;
|
||||
server_end += server_digest.count;
|
||||
} else {
|
||||
let next_client_op_id = {
|
||||
let mut cursor = client.cursor::<(usize, Digest)>();
|
||||
cursor.seek(&sync_range.start, Bias::Right, &());
|
||||
cursor.item().map_or(OperationId::MAX, |op| op.id())
|
||||
};
|
||||
let recurse = next_client_op_id <= server_digest.max_op_id;
|
||||
while let Some(next_sync_range) = stack.last_mut() {
|
||||
let merged_sync_range =
|
||||
sync_range.start..cmp::max(sync_range.end, next_sync_range.end);
|
||||
let merged_digest = server_digests.digest(merged_sync_range.clone());
|
||||
if next_client_op_id <= merged_digest.max_op_id {
|
||||
break;
|
||||
} else {
|
||||
sync_range.end = cmp::max(
|
||||
merged_sync_range.start + merged_digest.count,
|
||||
merged_sync_range.end,
|
||||
);
|
||||
server_range.end = server_end + sync_range.len();
|
||||
stack.pop();
|
||||
}
|
||||
}
|
||||
|
||||
if sync_range.len() > min_operations && recurse {
|
||||
log::debug!("descending into {:?}", sync_range);
|
||||
stats.roundtrips += 1;
|
||||
let digests =
|
||||
request_digests(server, server_range.clone(), max_digests, min_operations);
|
||||
server_digests.splice(sync_range.clone(), digests.iter().cloned());
|
||||
let old_stack_len = stack.len();
|
||||
|
||||
stack.extend(subdivide_range(sync_range, max_digests, min_operations));
|
||||
stack[old_stack_len..].reverse();
|
||||
} else {
|
||||
log::debug!(
|
||||
"fetching operations for {:?} (server range: {:?})",
|
||||
sync_range,
|
||||
server_range,
|
||||
);
|
||||
stats.roundtrips += 1;
|
||||
let server_operations = request_operations(server, server_range.clone());
|
||||
// debug_assert!(server_operations.len() > 0);
|
||||
server_digests.splice(
|
||||
sync_range.clone(),
|
||||
server_operations.iter().map(|op| op.into()),
|
||||
);
|
||||
|
||||
let mut missed_client_ops = Vec::new();
|
||||
stats.server_operations += server_operations.len();
|
||||
let mut server_operations = server_operations.into_iter().peekable();
|
||||
let mut client_operations =
|
||||
operations_for_range(&client, sync_range.clone()).peekable();
|
||||
for _ in sync_range.clone() {
|
||||
match (client_operations.peek(), server_operations.peek()) {
|
||||
(Some(client_operation), Some(server_operation)) => {
|
||||
match client_operation.id().cmp(&server_operation.id()) {
|
||||
Ordering::Less => {
|
||||
let client_operation = client_operations.next().unwrap();
|
||||
missed_server_ops
|
||||
.push(btree::Edit::Insert(client_operation.clone()));
|
||||
server_digests
|
||||
.splice(synced_end..synced_end, [client_operation.into()]);
|
||||
}
|
||||
Ordering::Equal => {
|
||||
client_operations.next().unwrap();
|
||||
server_operations.next().unwrap();
|
||||
server_end += 1;
|
||||
}
|
||||
Ordering::Greater => {
|
||||
let server_operation = server_operations.next().unwrap();
|
||||
missed_client_ops.push(btree::Edit::Insert(server_operation));
|
||||
server_end += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
(None, Some(_)) => {
|
||||
let server_operation = server_operations.next().unwrap();
|
||||
missed_client_ops.push(btree::Edit::Insert(server_operation));
|
||||
server_end += 1;
|
||||
}
|
||||
(Some(_), None) => {
|
||||
let client_operation = client_operations.next().unwrap();
|
||||
missed_server_ops.push(btree::Edit::Insert(client_operation.clone()));
|
||||
server_digests
|
||||
.splice(synced_end..synced_end, [client_operation.into()]);
|
||||
}
|
||||
(None, None) => break,
|
||||
}
|
||||
|
||||
synced_end += 1;
|
||||
}
|
||||
|
||||
drop(client_operations);
|
||||
client.edit(missed_client_ops, &());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch and publish the remaining suffixes.
|
||||
stats.roundtrips += 1;
|
||||
if synced_end < client.summary().digest.count || server_end < server_operation_count {
|
||||
log::debug!("sending client operations from {:?}..", synced_end);
|
||||
let remaining_client_ops = operations_for_range(&client, synced_end..);
|
||||
missed_server_ops.extend(remaining_client_ops.cloned().map(btree::Edit::Insert));
|
||||
|
||||
log::debug!("getting server operations from {:?}..", server_end);
|
||||
let remaining_server_ops = request_operations(server, server_end..);
|
||||
stats.server_operations += remaining_server_ops.len();
|
||||
client.edit(
|
||||
remaining_server_ops
|
||||
.into_iter()
|
||||
.map(btree::Edit::Insert)
|
||||
.collect(),
|
||||
&(),
|
||||
);
|
||||
}
|
||||
|
||||
stats.client_operations = missed_server_ops.len();
|
||||
|
||||
server.edit(missed_server_ops, &());
|
||||
stats
|
||||
}
|
||||
|
||||
fn digest_for_range(operations: &btree::Sequence<Operation>, range: Range<usize>) -> Digest {
|
||||
let mut cursor = operations.cursor::<usize>();
|
||||
cursor.seek(&range.start, Bias::Right, &());
|
||||
cursor.summary(&range.end, Bias::Right, &())
|
||||
}
|
||||
|
||||
fn request_operations<T: RangeBounds<usize>>(
|
||||
operations: &btree::Sequence<Operation>,
|
||||
range: T,
|
||||
) -> Vec<Operation> {
|
||||
operations_for_range(operations, range).cloned().collect()
|
||||
}
|
||||
|
||||
fn operations_for_range<T: RangeBounds<usize>>(
|
||||
operations: &btree::Sequence<Operation>,
|
||||
range: T,
|
||||
) -> impl Iterator<Item = &Operation> {
|
||||
let mut cursor = operations.cursor::<usize>();
|
||||
match range.start_bound() {
|
||||
collections::Bound::Included(start) => {
|
||||
cursor.seek(start, Bias::Right, &());
|
||||
}
|
||||
collections::Bound::Excluded(start) => {
|
||||
cursor.seek(&(*start + 1), Bias::Right, &());
|
||||
}
|
||||
collections::Bound::Unbounded => cursor.next(&()),
|
||||
}
|
||||
|
||||
iter::from_fn(move || {
|
||||
if range.contains(cursor.start()) {
|
||||
let operation = cursor.item()?;
|
||||
cursor.next(&());
|
||||
Some(operation)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{operations, OperationCount, ReplicaId};
|
||||
use rand::prelude::*;
|
||||
use std::{env, mem};
|
||||
|
||||
#[test]
|
||||
fn test_sync() {
|
||||
assert_sync(1..=15, (1..=5).chain(7..=15));
|
||||
assert_sync(1..=10, 5..=10);
|
||||
assert_sync(1..=10, 4..=10);
|
||||
assert_sync(1..=10, 1..=5);
|
||||
assert_sync([1, 3, 5, 7, 9], [2, 4, 6, 8, 10]);
|
||||
assert_sync([1, 2, 3, 4, 6, 7, 8, 9, 11, 12], [4, 5, 6, 10, 12]);
|
||||
assert_sync(1..=10, 5..=14);
|
||||
assert_sync(1..=80, (1..=70).chain(90..=100));
|
||||
assert_sync(1..=1910, (1..=1900).chain(1910..=2000));
|
||||
assert_sync(
|
||||
(1..=1500).chain(4000..=10000),
|
||||
(1..=1000).chain(4000..=11000),
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_performance(mut rng: StdRng) {
|
||||
let max_operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
.unwrap_or(10);
|
||||
let max_digest_count = env::var("MAX_DIGEST_COUNT")
|
||||
.map(|i| i.parse().expect("invalid `MAX_DIGEST_COUNT` variable"))
|
||||
.unwrap_or(1024);
|
||||
let min_operations = env::var("MIN_OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `MIN_OPERATIONS` variable"))
|
||||
.unwrap_or(4096);
|
||||
|
||||
let mut connected = true;
|
||||
let mut client_ops = btree::Sequence::<Operation>::new();
|
||||
let mut server_ops = btree::Sequence::<Operation>::new();
|
||||
let mut client_edits = Vec::new();
|
||||
let mut server_edits = Vec::new();
|
||||
let mut ideal_server_ops = 0;
|
||||
let mut ideal_client_ops = 0;
|
||||
let mut next_reconnection = None;
|
||||
for ix in 1..=max_operations {
|
||||
if connected && rng.gen_bool(0.0005) {
|
||||
connected = false;
|
||||
|
||||
let mut factor = 0.0005;
|
||||
while rng.gen() {
|
||||
factor *= 2.0;
|
||||
}
|
||||
|
||||
let remaining_operations = max_operations - ix;
|
||||
let disconnection_period = (remaining_operations as f64 * factor) as usize;
|
||||
next_reconnection = Some(ix + disconnection_period);
|
||||
log::info!("disconnecting for {} operations", disconnection_period);
|
||||
}
|
||||
|
||||
if next_reconnection == Some(ix) {
|
||||
connected = true;
|
||||
next_reconnection = None;
|
||||
log::info!("reconnecting");
|
||||
client_ops.edit(mem::take(&mut client_edits), &());
|
||||
server_ops.edit(mem::take(&mut server_edits), &());
|
||||
|
||||
let stats = sync(
|
||||
&mut client_ops,
|
||||
&mut server_ops,
|
||||
max_digest_count,
|
||||
min_operations,
|
||||
);
|
||||
log::info!("roundtrips: {}", stats.roundtrips);
|
||||
log::info!(
|
||||
"ideal server ops: {}, actual server ops: {}, abs error: {}, pct error: {:.3}%",
|
||||
ideal_server_ops,
|
||||
stats.server_operations,
|
||||
stats.server_operations - ideal_server_ops,
|
||||
((stats.server_operations as f64 / ideal_server_ops as f64) - 1.) * 100.
|
||||
);
|
||||
log::info!(
|
||||
"ideal client ops: {}, actual client ops: {}, abs error: {}, pct error: {:.3}%",
|
||||
ideal_client_ops,
|
||||
stats.client_operations,
|
||||
stats.client_operations - ideal_client_ops,
|
||||
((stats.client_operations as f64 / ideal_client_ops as f64) - 1.0) * 100.
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
client_ops.iter().map(|op| op.id()).collect::<Vec<_>>(),
|
||||
server_ops.iter().map(|op| op.id()).collect::<Vec<_>>()
|
||||
);
|
||||
ideal_client_ops = 0;
|
||||
ideal_server_ops = 0;
|
||||
}
|
||||
|
||||
if connected {
|
||||
let replica_id = ReplicaId(rng.gen_range(0..=1));
|
||||
client_edits.push(btree::Edit::Insert(op_for_replica(replica_id, ix)));
|
||||
server_edits.push(btree::Edit::Insert(op_for_replica(replica_id, ix)));
|
||||
} else if rng.gen_bool(0.95) {
|
||||
ideal_server_ops += 1;
|
||||
server_edits.push(btree::Edit::Insert(op_for_replica(ReplicaId(0), ix)));
|
||||
} else {
|
||||
ideal_client_ops += 1;
|
||||
client_edits.push(btree::Edit::Insert(op_for_replica(ReplicaId(1), ix)));
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("quiescing");
|
||||
client_ops.edit(mem::take(&mut client_edits), &());
|
||||
server_ops.edit(mem::take(&mut server_edits), &());
|
||||
let stats = sync(
|
||||
&mut client_ops,
|
||||
&mut server_ops,
|
||||
max_digest_count,
|
||||
min_operations,
|
||||
);
|
||||
log::info!("roundtrips: {}", stats.roundtrips);
|
||||
log::info!(
|
||||
"ideal server ops: {}, actual server ops: {}, abs error: {}, pct error: {:.3}%",
|
||||
ideal_server_ops,
|
||||
stats.server_operations,
|
||||
stats.server_operations - ideal_server_ops,
|
||||
((stats.server_operations as f64 / ideal_server_ops as f64) - 1.) * 100.
|
||||
);
|
||||
log::info!(
|
||||
"ideal client ops: {}, actual client ops: {}, abs error: {}, pct error: {:.3}%",
|
||||
ideal_client_ops,
|
||||
stats.client_operations,
|
||||
stats.client_operations - ideal_client_ops,
|
||||
((stats.client_operations as f64 / ideal_client_ops as f64) - 1.0) * 100.
|
||||
);
|
||||
assert_eq!(
|
||||
client_ops.iter().map(|op| op.id()).collect::<Vec<_>>(),
|
||||
server_ops.iter().map(|op| op.id()).collect::<Vec<_>>()
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random(mut rng: StdRng) {
|
||||
let max_operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
.unwrap_or(10);
|
||||
|
||||
let mut client_ops = btree::Sequence::<Operation>::new();
|
||||
let mut server_ops = btree::Sequence::<Operation>::new();
|
||||
for ix in 1..=max_operations {
|
||||
let replica_id = ReplicaId(rng.gen_range(0..=1));
|
||||
|
||||
if rng.gen() {
|
||||
client_ops.insert_or_replace(op_for_replica(replica_id, ix), &());
|
||||
}
|
||||
|
||||
if rng.gen() {
|
||||
server_ops.insert_or_replace(op_for_replica(replica_id, ix), &());
|
||||
}
|
||||
}
|
||||
|
||||
let max_digest_count = rng.gen_range(2..=32);
|
||||
let min_operations = rng.gen_range(1..100);
|
||||
log::info!(
|
||||
"syncing, max digest count: {}, min operations: {}",
|
||||
max_digest_count,
|
||||
min_operations
|
||||
);
|
||||
sync(
|
||||
&mut client_ops,
|
||||
&mut server_ops,
|
||||
max_digest_count,
|
||||
min_operations,
|
||||
);
|
||||
assert_eq!(
|
||||
client_ops.iter().map(|op| op.id()).collect::<Vec<_>>(),
|
||||
server_ops.iter().map(|op| op.id()).collect::<Vec<_>>()
|
||||
);
|
||||
}
|
||||
|
||||
fn assert_sync(
|
||||
client_ops: impl IntoIterator<Item = usize>,
|
||||
server_ops: impl IntoIterator<Item = usize>,
|
||||
) {
|
||||
let client_ops = client_ops
|
||||
.into_iter()
|
||||
.map(build_operation)
|
||||
.collect::<Vec<_>>();
|
||||
let server_ops = server_ops
|
||||
.into_iter()
|
||||
.map(build_operation)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for max_digests in [2, 3, 4, 7, 8, 16, 32] {
|
||||
for min_operations in [1, 2, 3, 4, 7, 8, 16, 32] {
|
||||
log::info!(
|
||||
"max digests: {}, min operations: {}",
|
||||
max_digests,
|
||||
min_operations
|
||||
);
|
||||
let mut client_operations = btree::Sequence::from_iter(client_ops.clone(), &());
|
||||
let mut server_operations = btree::Sequence::from_iter(server_ops.clone(), &());
|
||||
sync(
|
||||
&mut client_operations,
|
||||
&mut server_operations,
|
||||
max_digests,
|
||||
min_operations,
|
||||
);
|
||||
assert_eq!(
|
||||
client_operations
|
||||
.iter()
|
||||
.map(|op| op.id())
|
||||
.collect::<Vec<_>>(),
|
||||
server_operations
|
||||
.iter()
|
||||
.map(|op| op.id())
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build_operation(id: usize) -> Operation {
|
||||
Operation::CreateBranch(operations::CreateBranch {
|
||||
id: OperationId {
|
||||
replica_id: Default::default(),
|
||||
operation_count: OperationCount(id),
|
||||
},
|
||||
parent: Default::default(),
|
||||
name: "".into(),
|
||||
})
|
||||
}
|
||||
|
||||
fn op_for_replica(replica_id: ReplicaId, id: usize) -> Operation {
|
||||
Operation::CreateBranch(operations::CreateBranch {
|
||||
id: OperationId {
|
||||
replica_id,
|
||||
operation_count: OperationCount(id),
|
||||
},
|
||||
parent: Default::default(),
|
||||
name: "".into(),
|
||||
})
|
||||
}
|
||||
|
||||
fn digest_counts(digests: &[Digest]) -> Vec<usize> {
|
||||
digests.iter().map(|d| d.count).collect()
|
||||
}
|
||||
}
|
||||
201
crates/crdb/src/test.rs
Normal file
@@ -0,0 +1,201 @@
|
||||
use crate::{ClientNetwork, ClientRoom, RoomCredentials, RoomName, RoomToken, ServerNetwork, User};
|
||||
use anyhow::{anyhow, Result};
|
||||
use collections::BTreeMap;
|
||||
use futures::{channel::mpsc, future::BoxFuture, FutureExt, StreamExt};
|
||||
use gpui::executor::Background;
|
||||
use parking_lot::Mutex;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub struct TestNetwork(Arc<Mutex<NetworkState>>);
|
||||
|
||||
impl TestNetwork {
|
||||
pub fn new(executor: Arc<Background>) -> Self {
|
||||
Self(Arc::new(Mutex::new(NetworkState {
|
||||
executor,
|
||||
request_handler: None,
|
||||
rooms: Default::default(),
|
||||
})))
|
||||
}
|
||||
|
||||
pub fn server(&self) -> TestServerNetwork {
|
||||
TestServerNetwork(self.0.clone())
|
||||
}
|
||||
|
||||
pub fn client(&self, login: impl Into<Arc<str>>) -> TestClientNetwork {
|
||||
TestClientNetwork {
|
||||
user: User {
|
||||
login: login.into(),
|
||||
},
|
||||
network: self.0.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct NetworkState {
|
||||
executor: Arc<Background>,
|
||||
request_handler:
|
||||
Option<Box<dyn Send + Fn(User, Vec<u8>) -> Result<BoxFuture<'static, Result<Vec<u8>>>>>>,
|
||||
rooms: BTreeMap<RoomName, Room>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Room {
|
||||
inboxes: BTreeMap<RoomToken, mpsc::UnboundedSender<Vec<u8>>>,
|
||||
authorized_users: BTreeMap<RoomToken, Arc<str>>,
|
||||
next_token_id: usize,
|
||||
}
|
||||
|
||||
pub struct TestServerNetwork(Arc<Mutex<NetworkState>>);
|
||||
|
||||
impl ServerNetwork for TestServerNetwork {
|
||||
fn create_room(&self, name: &RoomName) -> BoxFuture<Result<()>> {
|
||||
let network = self.0.clone();
|
||||
let room = name.clone();
|
||||
async move {
|
||||
let executor = network.lock().executor.clone();
|
||||
executor.simulate_random_delay().await;
|
||||
network.lock().rooms.insert(room, Default::default());
|
||||
Ok(())
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn grant_room_access(&self, room: &RoomName, user: &str) -> RoomToken {
|
||||
let mut network = self.0.lock();
|
||||
let room = network.rooms.get_mut(&room).expect("room must exist");
|
||||
let token_id = room.next_token_id;
|
||||
room.next_token_id += 1;
|
||||
let token = RoomToken(format!("{}/{}", token_id, user).into());
|
||||
room.authorized_users.insert(token.clone(), user.into());
|
||||
token
|
||||
}
|
||||
|
||||
fn handle_requests<H, F>(&self, handle_request: H)
|
||||
where
|
||||
H: 'static + Send + Fn(User, Vec<u8>) -> Result<F>,
|
||||
F: 'static + Send + futures::Future<Output = Result<Vec<u8>>>,
|
||||
{
|
||||
self.0.lock().request_handler = Some(Box::new(move |user, request| {
|
||||
handle_request(user, request.clone()).map(FutureExt::boxed)
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TestClientNetwork {
|
||||
user: User,
|
||||
network: Arc<Mutex<NetworkState>>,
|
||||
}
|
||||
|
||||
impl ClientNetwork for TestClientNetwork {
|
||||
type Room = TestClientRoom;
|
||||
|
||||
fn request(&self, request: Vec<u8>) -> BoxFuture<Result<Vec<u8>>> {
|
||||
let response =
|
||||
self.network.lock().request_handler.as_ref().unwrap()(self.user.clone(), request);
|
||||
async move { response?.await }.boxed()
|
||||
}
|
||||
|
||||
fn room(&self, credentials: RoomCredentials) -> Self::Room {
|
||||
TestClientRoom {
|
||||
outbox: Default::default(),
|
||||
credentials,
|
||||
message_handler: Default::default(),
|
||||
network: self.network.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TestClientRoom {
|
||||
outbox: Option<mpsc::UnboundedSender<Vec<u8>>>,
|
||||
credentials: RoomCredentials,
|
||||
message_handler: Arc<Mutex<Option<Box<dyn Send + Fn(Vec<u8>)>>>>,
|
||||
network: Arc<Mutex<NetworkState>>,
|
||||
}
|
||||
|
||||
impl ClientRoom for TestClientRoom {
|
||||
fn connect(&mut self) -> BoxFuture<Result<()>> {
|
||||
assert!(
|
||||
self.outbox.is_none(),
|
||||
"client should not connect more than once"
|
||||
);
|
||||
|
||||
let (inbox_tx, mut inbox_rx) = mpsc::unbounded();
|
||||
{
|
||||
let mut network = self.network.lock();
|
||||
let room = network
|
||||
.rooms
|
||||
.get_mut(&self.credentials.name)
|
||||
.expect("room should exist");
|
||||
|
||||
if !room.authorized_users.contains_key(&self.credentials.token) {
|
||||
return std::future::ready(Err(anyhow!(
|
||||
"token {:?} is not authorized to enter room {:?}",
|
||||
self.credentials.token,
|
||||
self.credentials.name
|
||||
)))
|
||||
.boxed();
|
||||
}
|
||||
|
||||
let existing_inbox = room
|
||||
.inboxes
|
||||
.insert(self.credentials.token.clone(), inbox_tx);
|
||||
assert!(
|
||||
existing_inbox.is_none(),
|
||||
"client should not connect twice with the same token"
|
||||
);
|
||||
}
|
||||
let message_handler = self.message_handler.clone();
|
||||
self.network
|
||||
.lock()
|
||||
.executor
|
||||
.spawn(async move {
|
||||
while let Some(message) = inbox_rx.next().await {
|
||||
if let Some(handler) = message_handler.lock().as_ref() {
|
||||
handler(message);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
// Send outbound messages to other clients in the room.
|
||||
let (outbox_tx, mut outbox_rx) = mpsc::unbounded();
|
||||
self.outbox = Some(outbox_tx);
|
||||
let executor = self.network.lock().executor.clone();
|
||||
let network = self.network.clone();
|
||||
let credentials = self.credentials.clone();
|
||||
self.network
|
||||
.lock()
|
||||
.executor
|
||||
.spawn(async move {
|
||||
while let Some(message) = outbox_rx.next().await {
|
||||
let inboxes = network
|
||||
.lock()
|
||||
.rooms
|
||||
.get(&credentials.name)
|
||||
.map(|room| room.inboxes.clone());
|
||||
if let Some(inboxes) = inboxes {
|
||||
for (inbox_token, inbox) in inboxes {
|
||||
executor.simulate_random_delay().await;
|
||||
if inbox_token != credentials.token {
|
||||
let _ = inbox.unbounded_send(message.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
async { Ok(()) }.boxed()
|
||||
}
|
||||
|
||||
fn broadcast(&self, message: Vec<u8>) {
|
||||
let tx = self.outbox.as_ref().expect("must be connected");
|
||||
tx.unbounded_send(message).expect("channel must be open");
|
||||
}
|
||||
|
||||
fn handle_messages(&self, handle_message: impl 'static + Send + Fn(Vec<u8>)) {
|
||||
self.message_handler
|
||||
.lock()
|
||||
.replace(Box::new(handle_message));
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,6 @@ use anyhow::Context;
|
||||
use gpui::AppContext;
|
||||
pub use indoc::indoc;
|
||||
pub use lazy_static;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
pub use smol;
|
||||
pub use sqlez;
|
||||
pub use sqlez_macros;
|
||||
@@ -17,11 +16,9 @@ pub use util::paths::DB_DIR;
|
||||
use sqlez::domain::Migrator;
|
||||
use sqlez::thread_safe_connection::ThreadSafeConnection;
|
||||
use sqlez_macros::sql;
|
||||
use std::fs::create_dir_all;
|
||||
use std::future::Future;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
use util::channel::ReleaseChannel;
|
||||
use util::{async_iife, ResultExt};
|
||||
|
||||
@@ -41,10 +38,7 @@ const FALLBACK_DB_NAME: &'static str = "FALLBACK_MEMORY_DB";
|
||||
const DB_FILE_NAME: &'static str = "db.sqlite";
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
// !!!!!!! CHANGE BACK TO DEFAULT FALSE BEFORE SHIPPING
|
||||
static ref ZED_STATELESS: bool = std::env::var("ZED_STATELESS").map_or(false, |v| !v.is_empty());
|
||||
static ref DB_FILE_OPERATIONS: Mutex<()> = Mutex::new(());
|
||||
pub static ref BACKUP_DB_PATH: RwLock<Option<PathBuf>> = RwLock::new(None);
|
||||
pub static ref ZED_STATELESS: bool = std::env::var("ZED_STATELESS").map_or(false, |v| !v.is_empty());
|
||||
pub static ref ALL_FILE_DB_FAILED: AtomicBool = AtomicBool::new(false);
|
||||
}
|
||||
|
||||
@@ -64,66 +58,14 @@ pub async fn open_db<M: Migrator + 'static>(
|
||||
let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name)));
|
||||
|
||||
let connection = async_iife!({
|
||||
// Note: This still has a race condition where 1 set of migrations succeeds
|
||||
// (e.g. (Workspace, Editor)) and another fails (e.g. (Workspace, Terminal))
|
||||
// This will cause the first connection to have the database taken out
|
||||
// from under it. This *should* be fine though. The second dabatase failure will
|
||||
// cause errors in the log and so should be observed by developers while writing
|
||||
// soon-to-be good migrations. If user databases are corrupted, we toss them out
|
||||
// and try again from a blank. As long as running all migrations from start to end
|
||||
// on a blank database is ok, this race condition will never be triggered.
|
||||
//
|
||||
// Basically: Don't ever push invalid migrations to stable or everyone will have
|
||||
// a bad time.
|
||||
|
||||
// If no db folder, create one at 0-{channel}
|
||||
create_dir_all(&main_db_dir).context("Could not create db directory")?;
|
||||
smol::fs::create_dir_all(&main_db_dir)
|
||||
.await
|
||||
.context("Could not create db directory")
|
||||
.log_err()?;
|
||||
let db_path = main_db_dir.join(Path::new(DB_FILE_NAME));
|
||||
|
||||
// Optimistically open databases in parallel
|
||||
if !DB_FILE_OPERATIONS.is_locked() {
|
||||
// Try building a connection
|
||||
if let Some(connection) = open_main_db(&db_path).await {
|
||||
return Ok(connection)
|
||||
};
|
||||
}
|
||||
|
||||
// Take a lock in the failure case so that we move the db once per process instead
|
||||
// of potentially multiple times from different threads. This shouldn't happen in the
|
||||
// normal path
|
||||
let _lock = DB_FILE_OPERATIONS.lock();
|
||||
if let Some(connection) = open_main_db(&db_path).await {
|
||||
return Ok(connection)
|
||||
};
|
||||
|
||||
let backup_timestamp = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.expect("System clock is set before the unix timestamp, Zed does not support this region of spacetime")
|
||||
.as_millis();
|
||||
|
||||
// If failed, move 0-{channel} to {current unix timestamp}-{channel}
|
||||
let backup_db_dir = db_dir.join(Path::new(&format!(
|
||||
"{}-{}",
|
||||
backup_timestamp,
|
||||
release_channel_name,
|
||||
)));
|
||||
|
||||
std::fs::rename(&main_db_dir, &backup_db_dir)
|
||||
.context("Failed clean up corrupted database, panicking.")?;
|
||||
|
||||
// Set a static ref with the failed timestamp and error so we can notify the user
|
||||
{
|
||||
let mut guard = BACKUP_DB_PATH.write();
|
||||
*guard = Some(backup_db_dir);
|
||||
}
|
||||
|
||||
// Create a new 0-{channel}
|
||||
create_dir_all(&main_db_dir).context("Should be able to create the database directory")?;
|
||||
let db_path = main_db_dir.join(Path::new(DB_FILE_NAME));
|
||||
|
||||
// Try again
|
||||
open_main_db(&db_path).await.context("Could not newly created db")
|
||||
}).await.log_err();
|
||||
open_main_db(&db_path).await
|
||||
})
|
||||
.await;
|
||||
|
||||
if let Some(connection) = connection {
|
||||
return connection;
|
||||
@@ -250,13 +192,13 @@ where
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{fs, thread};
|
||||
use std::thread;
|
||||
|
||||
use sqlez::{connection::Connection, domain::Domain};
|
||||
use sqlez::domain::Domain;
|
||||
use sqlez_macros::sql;
|
||||
use tempdir::TempDir;
|
||||
|
||||
use crate::{open_db, DB_FILE_NAME};
|
||||
use crate::open_db;
|
||||
|
||||
// Test bad migration panics
|
||||
#[gpui::test]
|
||||
@@ -322,31 +264,10 @@ mod tests {
|
||||
.unwrap()
|
||||
.is_none()
|
||||
);
|
||||
|
||||
let mut corrupted_backup_dir = fs::read_dir(tempdir.path())
|
||||
.unwrap()
|
||||
.find(|entry| {
|
||||
!entry
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.file_name()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.starts_with("0")
|
||||
})
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.path();
|
||||
corrupted_backup_dir.push(DB_FILE_NAME);
|
||||
|
||||
let backup = Connection::open_file(&corrupted_backup_dir.to_string_lossy());
|
||||
assert!(backup.select_row::<usize>("SELECT * FROM test").unwrap()()
|
||||
.unwrap()
|
||||
.is_none());
|
||||
}
|
||||
|
||||
/// Test that DB exists but corrupted (causing recreate)
|
||||
#[gpui::test]
|
||||
#[gpui::test(iterations = 30)]
|
||||
async fn test_simultaneous_db_corruption() {
|
||||
enum CorruptedDB {}
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ doctest = false
|
||||
|
||||
[features]
|
||||
test-support = [
|
||||
"rand",
|
||||
"copilot/test-support",
|
||||
"text/test-support",
|
||||
"language/test-support",
|
||||
@@ -57,16 +56,16 @@ ordered-float.workspace = true
|
||||
parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
pulldown-cmark = { version = "0.9.2", default-features = false }
|
||||
rand = { workspace = true, optional = true }
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_derive.workspace = true
|
||||
smallvec.workspace = true
|
||||
smol.workspace = true
|
||||
tree-sitter-rust = { version = "*", optional = true }
|
||||
tree-sitter-html = { version = "*", optional = true }
|
||||
tree-sitter-javascript = { version = "*", optional = true }
|
||||
tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259", optional = true }
|
||||
rand.workspace = true
|
||||
|
||||
tree-sitter-rust = { workspace = true, optional = true }
|
||||
tree-sitter-html = { workspace = true, optional = true }
|
||||
tree-sitter-typescript = { workspace = true, optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
copilot = { path = "../copilot", features = ["test-support"] }
|
||||
@@ -84,7 +83,6 @@ env_logger.workspace = true
|
||||
rand.workspace = true
|
||||
unindent.workspace = true
|
||||
tree-sitter.workspace = true
|
||||
tree-sitter-rust = "0.20"
|
||||
tree-sitter-html = "0.19"
|
||||
tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259" }
|
||||
tree-sitter-javascript = "0.20"
|
||||
tree-sitter-rust.workspace = true
|
||||
tree-sitter-html.workspace = true
|
||||
tree-sitter-typescript.workspace = true
|
||||
|
||||
@@ -74,6 +74,7 @@ pub use multi_buffer::{
|
||||
};
|
||||
use ordered_float::OrderedFloat;
|
||||
use project::{FormatTrigger, Location, LocationLink, Project, ProjectPath, ProjectTransaction};
|
||||
use rand::{seq::SliceRandom, thread_rng};
|
||||
use scroll::{
|
||||
autoscroll::Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide,
|
||||
};
|
||||
@@ -226,6 +227,10 @@ actions!(
|
||||
MoveLineUp,
|
||||
MoveLineDown,
|
||||
JoinLines,
|
||||
SortLinesCaseSensitive,
|
||||
SortLinesCaseInsensitive,
|
||||
ReverseLines,
|
||||
ShuffleLines,
|
||||
Transpose,
|
||||
Cut,
|
||||
Copy,
|
||||
@@ -271,7 +276,9 @@ actions!(
|
||||
SelectLargerSyntaxNode,
|
||||
SelectSmallerSyntaxNode,
|
||||
GoToDefinition,
|
||||
GoToDefinitionSplit,
|
||||
GoToTypeDefinition,
|
||||
GoToTypeDefinitionSplit,
|
||||
MoveToEnclosingBracket,
|
||||
UndoSelection,
|
||||
RedoSelection,
|
||||
@@ -342,6 +349,10 @@ pub fn init(cx: &mut AppContext) {
|
||||
cx.add_action(Editor::outdent);
|
||||
cx.add_action(Editor::delete_line);
|
||||
cx.add_action(Editor::join_lines);
|
||||
cx.add_action(Editor::sort_lines_case_sensitive);
|
||||
cx.add_action(Editor::sort_lines_case_insensitive);
|
||||
cx.add_action(Editor::reverse_lines);
|
||||
cx.add_action(Editor::shuffle_lines);
|
||||
cx.add_action(Editor::delete_to_previous_word_start);
|
||||
cx.add_action(Editor::delete_to_previous_subword_start);
|
||||
cx.add_action(Editor::delete_to_next_word_end);
|
||||
@@ -407,7 +418,9 @@ pub fn init(cx: &mut AppContext) {
|
||||
cx.add_action(Editor::go_to_hunk);
|
||||
cx.add_action(Editor::go_to_prev_hunk);
|
||||
cx.add_action(Editor::go_to_definition);
|
||||
cx.add_action(Editor::go_to_definition_split);
|
||||
cx.add_action(Editor::go_to_type_definition);
|
||||
cx.add_action(Editor::go_to_type_definition_split);
|
||||
cx.add_action(Editor::fold);
|
||||
cx.add_action(Editor::fold_at);
|
||||
cx.add_action(Editor::unfold_lines);
|
||||
@@ -494,6 +507,7 @@ pub enum SoftWrap {
|
||||
#[derive(Clone)]
|
||||
pub struct EditorStyle {
|
||||
pub text: TextStyle,
|
||||
pub line_height_scalar: f32,
|
||||
pub placeholder_text: Option<TextStyle>,
|
||||
pub theme: theme::Editor,
|
||||
pub theme_id: usize,
|
||||
@@ -544,6 +558,7 @@ pub struct Editor {
|
||||
pending_rename: Option<RenameState>,
|
||||
searchable: bool,
|
||||
cursor_shape: CursorShape,
|
||||
collapse_matches: bool,
|
||||
workspace: Option<(WeakViewHandle<Workspace>, i64)>,
|
||||
keymap_context_layers: BTreeMap<TypeId, KeymapContext>,
|
||||
input_enabled: bool,
|
||||
@@ -557,6 +572,7 @@ pub struct Editor {
|
||||
inlay_hint_cache: InlayHintCache,
|
||||
next_inlay_id: usize,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
pixel_position_of_newest_cursor: Option<Vector2F>,
|
||||
}
|
||||
|
||||
pub struct EditorSnapshot {
|
||||
@@ -1376,6 +1392,7 @@ impl Editor {
|
||||
searchable: true,
|
||||
override_text_style: None,
|
||||
cursor_shape: Default::default(),
|
||||
collapse_matches: false,
|
||||
workspace: None,
|
||||
keymap_context_layers: Default::default(),
|
||||
input_enabled: true,
|
||||
@@ -1387,6 +1404,7 @@ impl Editor {
|
||||
copilot_state: Default::default(),
|
||||
inlay_hint_cache: InlayHintCache::new(inlay_hint_settings),
|
||||
gutter_hovered: false,
|
||||
pixel_position_of_newest_cursor: None,
|
||||
_subscriptions: vec![
|
||||
cx.observe(&buffer, Self::on_buffer_changed),
|
||||
cx.subscribe(&buffer, Self::on_buffer_event),
|
||||
@@ -1515,6 +1533,17 @@ impl Editor {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn set_collapse_matches(&mut self, collapse_matches: bool) {
|
||||
self.collapse_matches = collapse_matches;
|
||||
}
|
||||
|
||||
fn range_for_match<T: std::marker::Copy>(&self, range: &Range<T>) -> Range<T> {
|
||||
if self.collapse_matches {
|
||||
return range.start..range.start;
|
||||
}
|
||||
range.clone()
|
||||
}
|
||||
|
||||
pub fn set_clip_at_line_ends(&mut self, clip: bool, cx: &mut ViewContext<Self>) {
|
||||
if self.display_map.read(cx).clip_at_line_ends != clip {
|
||||
self.display_map
|
||||
@@ -2654,11 +2683,16 @@ impl Editor {
|
||||
InlayRefreshReason::RefreshRequested => (InvalidationStrategy::RefreshRequested, None),
|
||||
};
|
||||
|
||||
self.inlay_hint_cache.refresh_inlay_hints(
|
||||
if let Some(InlaySplice {
|
||||
to_remove,
|
||||
to_insert,
|
||||
}) = self.inlay_hint_cache.spawn_hint_refresh(
|
||||
self.excerpt_visible_offsets(required_languages.as_ref(), cx),
|
||||
invalidate_cache,
|
||||
cx,
|
||||
)
|
||||
) {
|
||||
self.splice_inlay_hints(to_remove, to_insert, cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn visible_inlay_hints(&self, cx: &ViewContext<'_, '_, Editor>) -> Vec<Inlay> {
|
||||
@@ -4180,6 +4214,96 @@ impl Editor {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn sort_lines_case_sensitive(
|
||||
&mut self,
|
||||
_: &SortLinesCaseSensitive,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.manipulate_lines(cx, |text| text.sort())
|
||||
}
|
||||
|
||||
pub fn sort_lines_case_insensitive(
|
||||
&mut self,
|
||||
_: &SortLinesCaseInsensitive,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.manipulate_lines(cx, |text| text.sort_by_key(|line| line.to_lowercase()))
|
||||
}
|
||||
|
||||
pub fn reverse_lines(&mut self, _: &ReverseLines, cx: &mut ViewContext<Self>) {
|
||||
self.manipulate_lines(cx, |lines| lines.reverse())
|
||||
}
|
||||
|
||||
pub fn shuffle_lines(&mut self, _: &ShuffleLines, cx: &mut ViewContext<Self>) {
|
||||
self.manipulate_lines(cx, |lines| lines.shuffle(&mut thread_rng()))
|
||||
}
|
||||
|
||||
fn manipulate_lines<Fn>(&mut self, cx: &mut ViewContext<Self>, mut callback: Fn)
|
||||
where
|
||||
Fn: FnMut(&mut [&str]),
|
||||
{
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||
|
||||
let mut edits = Vec::new();
|
||||
|
||||
let selections = self.selections.all::<Point>(cx);
|
||||
let mut selections = selections.iter().peekable();
|
||||
let mut contiguous_row_selections = Vec::new();
|
||||
let mut new_selections = Vec::new();
|
||||
|
||||
while let Some(selection) = selections.next() {
|
||||
let (start_row, end_row) = consume_contiguous_rows(
|
||||
&mut contiguous_row_selections,
|
||||
selection,
|
||||
&display_map,
|
||||
&mut selections,
|
||||
);
|
||||
|
||||
let start_point = Point::new(start_row, 0);
|
||||
let end_point = Point::new(end_row - 1, buffer.line_len(end_row - 1));
|
||||
let text = buffer
|
||||
.text_for_range(start_point..end_point)
|
||||
.collect::<String>();
|
||||
let mut text = text.split("\n").collect_vec();
|
||||
|
||||
let text_len = text.len();
|
||||
callback(&mut text);
|
||||
|
||||
// This is a current limitation with selections.
|
||||
// If we wanted to support removing or adding lines, we'd need to fix the logic associated with selections.
|
||||
debug_assert!(
|
||||
text.len() == text_len,
|
||||
"callback should not change the number of lines"
|
||||
);
|
||||
|
||||
edits.push((start_point..end_point, text.join("\n")));
|
||||
let start_anchor = buffer.anchor_after(start_point);
|
||||
let end_anchor = buffer.anchor_before(end_point);
|
||||
|
||||
// Make selection and push
|
||||
new_selections.push(Selection {
|
||||
id: selection.id,
|
||||
start: start_anchor.to_offset(&buffer),
|
||||
end: end_anchor.to_offset(&buffer),
|
||||
goal: SelectionGoal::None,
|
||||
reversed: selection.reversed,
|
||||
});
|
||||
}
|
||||
|
||||
self.transact(cx, |this, cx| {
|
||||
this.buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(edits, None, cx);
|
||||
});
|
||||
|
||||
this.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select(new_selections);
|
||||
});
|
||||
|
||||
this.request_autoscroll(Autoscroll::fit(), cx);
|
||||
});
|
||||
}
|
||||
|
||||
pub fn duplicate_line(&mut self, _: &DuplicateLine, cx: &mut ViewContext<Self>) {
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let buffer = &display_map.buffer_snapshot;
|
||||
@@ -5123,7 +5247,7 @@ impl Editor {
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.move_with(|map, selection| {
|
||||
selection.collapse_to(
|
||||
movement::start_of_paragraph(map, selection.head()),
|
||||
movement::start_of_paragraph(map, selection.head(), 1),
|
||||
SelectionGoal::None,
|
||||
)
|
||||
});
|
||||
@@ -5143,7 +5267,7 @@ impl Editor {
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.move_with(|map, selection| {
|
||||
selection.collapse_to(
|
||||
movement::end_of_paragraph(map, selection.head()),
|
||||
movement::end_of_paragraph(map, selection.head(), 1),
|
||||
SelectionGoal::None,
|
||||
)
|
||||
});
|
||||
@@ -5162,7 +5286,10 @@ impl Editor {
|
||||
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.move_heads_with(|map, head, _| {
|
||||
(movement::start_of_paragraph(map, head), SelectionGoal::None)
|
||||
(
|
||||
movement::start_of_paragraph(map, head, 1),
|
||||
SelectionGoal::None,
|
||||
)
|
||||
});
|
||||
})
|
||||
}
|
||||
@@ -5179,7 +5306,10 @@ impl Editor {
|
||||
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.move_heads_with(|map, head, _| {
|
||||
(movement::end_of_paragraph(map, head), SelectionGoal::None)
|
||||
(
|
||||
movement::end_of_paragraph(map, head, 1),
|
||||
SelectionGoal::None,
|
||||
)
|
||||
});
|
||||
})
|
||||
}
|
||||
@@ -5267,7 +5397,7 @@ impl Editor {
|
||||
|
||||
pub fn select_all(&mut self, _: &SelectAll, cx: &mut ViewContext<Self>) {
|
||||
let end = self.buffer.read(cx).read(cx).len();
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
self.change_selections(None, cx, |s| {
|
||||
s.select_ranges(vec![0..end]);
|
||||
});
|
||||
}
|
||||
@@ -6178,14 +6308,31 @@ impl Editor {
|
||||
}
|
||||
|
||||
pub fn go_to_definition(&mut self, _: &GoToDefinition, cx: &mut ViewContext<Self>) {
|
||||
self.go_to_definition_of_kind(GotoDefinitionKind::Symbol, cx);
|
||||
self.go_to_definition_of_kind(GotoDefinitionKind::Symbol, false, cx);
|
||||
}
|
||||
|
||||
pub fn go_to_type_definition(&mut self, _: &GoToTypeDefinition, cx: &mut ViewContext<Self>) {
|
||||
self.go_to_definition_of_kind(GotoDefinitionKind::Type, cx);
|
||||
self.go_to_definition_of_kind(GotoDefinitionKind::Type, false, cx);
|
||||
}
|
||||
|
||||
fn go_to_definition_of_kind(&mut self, kind: GotoDefinitionKind, cx: &mut ViewContext<Self>) {
|
||||
pub fn go_to_definition_split(&mut self, _: &GoToDefinitionSplit, cx: &mut ViewContext<Self>) {
|
||||
self.go_to_definition_of_kind(GotoDefinitionKind::Symbol, true, cx);
|
||||
}
|
||||
|
||||
pub fn go_to_type_definition_split(
|
||||
&mut self,
|
||||
_: &GoToTypeDefinitionSplit,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.go_to_definition_of_kind(GotoDefinitionKind::Type, true, cx);
|
||||
}
|
||||
|
||||
fn go_to_definition_of_kind(
|
||||
&mut self,
|
||||
kind: GotoDefinitionKind,
|
||||
split: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let Some(workspace) = self.workspace(cx) else { return };
|
||||
let buffer = self.buffer.read(cx);
|
||||
let head = self.selections.newest::<usize>(cx).head();
|
||||
@@ -6204,7 +6351,7 @@ impl Editor {
|
||||
cx.spawn_labeled("Fetching Definition...", |editor, mut cx| async move {
|
||||
let definitions = definitions.await?;
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
editor.navigate_to_definitions(definitions, cx);
|
||||
editor.navigate_to_definitions(definitions, split, cx);
|
||||
})?;
|
||||
Ok::<(), anyhow::Error>(())
|
||||
})
|
||||
@@ -6214,6 +6361,7 @@ impl Editor {
|
||||
pub fn navigate_to_definitions(
|
||||
&mut self,
|
||||
mut definitions: Vec<LocationLink>,
|
||||
split: bool,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
let Some(workspace) = self.workspace(cx) else { return };
|
||||
@@ -6227,18 +6375,24 @@ impl Editor {
|
||||
.to_offset(definition.target.buffer.read(cx));
|
||||
|
||||
if Some(&definition.target.buffer) == self.buffer.read(cx).as_singleton().as_ref() {
|
||||
let range = self.range_for_match(&range);
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select_ranges([range]);
|
||||
});
|
||||
} else {
|
||||
cx.window_context().defer(move |cx| {
|
||||
let target_editor: ViewHandle<Self> = workspace.update(cx, |workspace, cx| {
|
||||
workspace.open_project_item(definition.target.buffer.clone(), cx)
|
||||
if split {
|
||||
workspace.split_project_item(definition.target.buffer.clone(), cx)
|
||||
} else {
|
||||
workspace.open_project_item(definition.target.buffer.clone(), cx)
|
||||
}
|
||||
});
|
||||
target_editor.update(cx, |target_editor, cx| {
|
||||
// When selecting a definition in a different buffer, disable the nav history
|
||||
// to avoid creating a history entry at the previous cursor location.
|
||||
pane.update(cx, |pane, _| pane.disable_history());
|
||||
let range = target_editor.range_for_match(&range);
|
||||
target_editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select_ranges([range]);
|
||||
});
|
||||
@@ -6269,7 +6423,9 @@ impl Editor {
|
||||
.map(|definition| definition.target)
|
||||
.collect();
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
Self::open_locations_in_multibuffer(workspace, locations, replica_id, title, cx)
|
||||
Self::open_locations_in_multibuffer(
|
||||
workspace, locations, replica_id, title, split, cx,
|
||||
)
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -6314,7 +6470,7 @@ impl Editor {
|
||||
})
|
||||
.unwrap();
|
||||
Self::open_locations_in_multibuffer(
|
||||
workspace, locations, replica_id, title, cx,
|
||||
workspace, locations, replica_id, title, false, cx,
|
||||
);
|
||||
})?;
|
||||
|
||||
@@ -6329,6 +6485,7 @@ impl Editor {
|
||||
mut locations: Vec<Location>,
|
||||
replica_id: ReplicaId,
|
||||
title: String,
|
||||
split: bool,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) {
|
||||
// If there are multiple definitions, open them in a multibuffer
|
||||
@@ -6375,7 +6532,11 @@ impl Editor {
|
||||
cx,
|
||||
);
|
||||
});
|
||||
workspace.add_item(Box::new(editor), cx);
|
||||
if split {
|
||||
workspace.split_item(Box::new(editor), cx);
|
||||
} else {
|
||||
workspace.add_item(Box::new(editor), cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rename(&mut self, _: &Rename, cx: &mut ViewContext<Self>) -> Option<Task<Result<()>>> {
|
||||
@@ -7024,6 +7185,20 @@ impl Editor {
|
||||
.text()
|
||||
}
|
||||
|
||||
pub fn wrap_guides(&self, cx: &AppContext) -> SmallVec<[(usize, bool); 2]> {
|
||||
let mut wrap_guides = smallvec::smallvec![];
|
||||
|
||||
let settings = self.buffer.read(cx).settings_at(0, cx);
|
||||
if settings.show_wrap_guides {
|
||||
if let SoftWrap::Column(soft_wrap) = self.soft_wrap_mode(cx) {
|
||||
wrap_guides.push((soft_wrap as usize, true));
|
||||
}
|
||||
wrap_guides.extend(settings.wrap_guides.iter().map(|guide| (*guide, false)))
|
||||
}
|
||||
|
||||
wrap_guides
|
||||
}
|
||||
|
||||
pub fn soft_wrap_mode(&self, cx: &AppContext) -> SoftWrap {
|
||||
let settings = self.buffer.read(cx).settings_at(0, cx);
|
||||
let mode = self
|
||||
@@ -7216,6 +7391,47 @@ impl Editor {
|
||||
}
|
||||
results
|
||||
}
|
||||
pub fn background_highlights_in_range_for<T: 'static>(
|
||||
&self,
|
||||
search_range: Range<Anchor>,
|
||||
display_snapshot: &DisplaySnapshot,
|
||||
theme: &Theme,
|
||||
) -> Vec<(Range<DisplayPoint>, Color)> {
|
||||
let mut results = Vec::new();
|
||||
let buffer = &display_snapshot.buffer_snapshot;
|
||||
let Some((color_fetcher, ranges)) = self.background_highlights
|
||||
.get(&TypeId::of::<T>()) else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
let color = color_fetcher(theme);
|
||||
let start_ix = match ranges.binary_search_by(|probe| {
|
||||
let cmp = probe.end.cmp(&search_range.start, buffer);
|
||||
if cmp.is_gt() {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
Ordering::Less
|
||||
}
|
||||
}) {
|
||||
Ok(i) | Err(i) => i,
|
||||
};
|
||||
for range in &ranges[start_ix..] {
|
||||
if range.start.cmp(&search_range.end, buffer).is_ge() {
|
||||
break;
|
||||
}
|
||||
let start = range
|
||||
.start
|
||||
.to_point(buffer)
|
||||
.to_display_point(display_snapshot);
|
||||
let end = range
|
||||
.end
|
||||
.to_point(buffer)
|
||||
.to_display_point(display_snapshot);
|
||||
results.push((start..end, color))
|
||||
}
|
||||
|
||||
results
|
||||
}
|
||||
|
||||
pub fn highlight_text<T: 'static>(
|
||||
&mut self,
|
||||
@@ -7518,7 +7734,7 @@ impl Editor {
|
||||
|
||||
fn report_editor_event(
|
||||
&self,
|
||||
name: &'static str,
|
||||
operation: &'static str,
|
||||
file_extension: Option<String>,
|
||||
cx: &AppContext,
|
||||
) {
|
||||
@@ -7555,7 +7771,7 @@ impl Editor {
|
||||
let event = ClickhouseEvent::Editor {
|
||||
file_extension,
|
||||
vim_mode,
|
||||
operation: name,
|
||||
operation,
|
||||
copilot_enabled,
|
||||
copilot_enabled_for_language,
|
||||
};
|
||||
@@ -8054,7 +8270,7 @@ fn build_style(
|
||||
cx: &AppContext,
|
||||
) -> EditorStyle {
|
||||
let font_cache = cx.font_cache();
|
||||
|
||||
let line_height_scalar = settings.line_height();
|
||||
let theme_id = settings.theme.meta.id;
|
||||
let mut theme = settings.theme.editor.clone();
|
||||
let mut style = if let Some(get_field_editor_theme) = get_field_editor_theme {
|
||||
@@ -8068,6 +8284,7 @@ fn build_style(
|
||||
EditorStyle {
|
||||
text: field_editor_theme.text,
|
||||
placeholder_text: field_editor_theme.placeholder_text,
|
||||
line_height_scalar,
|
||||
theme,
|
||||
theme_id,
|
||||
}
|
||||
@@ -8090,6 +8307,7 @@ fn build_style(
|
||||
underline: Default::default(),
|
||||
},
|
||||
placeholder_text: None,
|
||||
line_height_scalar,
|
||||
theme,
|
||||
theme_id,
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ pub struct EditorSettings {
|
||||
pub struct Scrollbar {
|
||||
pub show: ShowScrollbar,
|
||||
pub git_diff: bool,
|
||||
pub selections: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
|
||||
@@ -39,6 +40,7 @@ pub struct EditorSettingsContent {
|
||||
pub struct ScrollbarContent {
|
||||
pub show: Option<ShowScrollbar>,
|
||||
pub git_diff: Option<bool>,
|
||||
pub selections: Option<bool>,
|
||||
}
|
||||
|
||||
impl Setting for EditorSettings {
|
||||
|
||||
@@ -2500,6 +2500,156 @@ fn test_join_lines_with_multi_selection(cx: &mut TestAppContext) {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_manipulate_lines_with_single_selection(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
// Test sort_lines_case_insensitive()
|
||||
cx.set_state(indoc! {"
|
||||
«z
|
||||
y
|
||||
x
|
||||
Z
|
||||
Y
|
||||
Xˇ»
|
||||
"});
|
||||
cx.update_editor(|e, cx| e.sort_lines_case_insensitive(&SortLinesCaseInsensitive, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
«x
|
||||
X
|
||||
y
|
||||
Y
|
||||
z
|
||||
Zˇ»
|
||||
"});
|
||||
|
||||
// Test reverse_lines()
|
||||
cx.set_state(indoc! {"
|
||||
«5
|
||||
4
|
||||
3
|
||||
2
|
||||
1ˇ»
|
||||
"});
|
||||
cx.update_editor(|e, cx| e.reverse_lines(&ReverseLines, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
«1
|
||||
2
|
||||
3
|
||||
4
|
||||
5ˇ»
|
||||
"});
|
||||
|
||||
// Skip testing shuffle_line()
|
||||
|
||||
// From here on out, test more complex cases of manipulate_lines() with a single driver method: sort_lines_case_sensitive()
|
||||
// Since all methods calling manipulate_lines() are doing the exact same general thing (reordering lines)
|
||||
|
||||
// Don't manipulate when cursor is on single line, but expand the selection
|
||||
cx.set_state(indoc! {"
|
||||
ddˇdd
|
||||
ccc
|
||||
bb
|
||||
a
|
||||
"});
|
||||
cx.update_editor(|e, cx| e.sort_lines_case_sensitive(&SortLinesCaseSensitive, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
«ddddˇ»
|
||||
ccc
|
||||
bb
|
||||
a
|
||||
"});
|
||||
|
||||
// Basic manipulate case
|
||||
// Start selection moves to column 0
|
||||
// End of selection shrinks to fit shorter line
|
||||
cx.set_state(indoc! {"
|
||||
dd«d
|
||||
ccc
|
||||
bb
|
||||
aaaaaˇ»
|
||||
"});
|
||||
cx.update_editor(|e, cx| e.sort_lines_case_sensitive(&SortLinesCaseSensitive, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
«aaaaa
|
||||
bb
|
||||
ccc
|
||||
dddˇ»
|
||||
"});
|
||||
|
||||
// Manipulate case with newlines
|
||||
cx.set_state(indoc! {"
|
||||
dd«d
|
||||
ccc
|
||||
|
||||
bb
|
||||
aaaaa
|
||||
|
||||
ˇ»
|
||||
"});
|
||||
cx.update_editor(|e, cx| e.sort_lines_case_sensitive(&SortLinesCaseSensitive, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
«
|
||||
|
||||
aaaaa
|
||||
bb
|
||||
ccc
|
||||
dddˇ»
|
||||
|
||||
"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_manipulate_lines_with_multi_selection(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
// Manipulate with multiple selections on a single line
|
||||
cx.set_state(indoc! {"
|
||||
dd«dd
|
||||
cˇ»c«c
|
||||
bb
|
||||
aaaˇ»aa
|
||||
"});
|
||||
cx.update_editor(|e, cx| e.sort_lines_case_sensitive(&SortLinesCaseSensitive, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
«aaaaa
|
||||
bb
|
||||
ccc
|
||||
ddddˇ»
|
||||
"});
|
||||
|
||||
// Manipulate with multiple disjoin selections
|
||||
cx.set_state(indoc! {"
|
||||
5«
|
||||
4
|
||||
3
|
||||
2
|
||||
1ˇ»
|
||||
|
||||
dd«dd
|
||||
ccc
|
||||
bb
|
||||
aaaˇ»aa
|
||||
"});
|
||||
cx.update_editor(|e, cx| e.sort_lines_case_sensitive(&SortLinesCaseSensitive, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
«1
|
||||
2
|
||||
3
|
||||
4
|
||||
5ˇ»
|
||||
|
||||
«aaaaa
|
||||
bb
|
||||
ccc
|
||||
ddddˇ»
|
||||
"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_duplicate_line(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
@@ -3836,7 +3986,7 @@ async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) {
|
||||
autoclose_before: "})]>".into(),
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_javascript::language()),
|
||||
Some(tree_sitter_typescript::language_tsx()),
|
||||
));
|
||||
|
||||
let registry = Arc::new(LanguageRegistry::test());
|
||||
@@ -5383,7 +5533,7 @@ async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) {
|
||||
line_comment: Some("// ".into()),
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_javascript::language()),
|
||||
Some(tree_sitter_typescript::language_tsx()),
|
||||
));
|
||||
|
||||
let registry = Arc::new(LanguageRegistry::test());
|
||||
@@ -7223,6 +7373,97 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::Test
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_completions_with_additional_edits(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
lsp::ServerCapabilities {
|
||||
completion_provider: Some(lsp::CompletionOptions {
|
||||
trigger_characters: Some(vec![".".to_string()]),
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
cx.set_state(indoc! {"fn main() { let a = 2ˇ; }"});
|
||||
cx.simulate_keystroke(".");
|
||||
let completion_item = lsp::CompletionItem {
|
||||
label: "some".into(),
|
||||
kind: Some(lsp::CompletionItemKind::SNIPPET),
|
||||
detail: Some("Wrap the expression in an `Option::Some`".to_string()),
|
||||
documentation: Some(lsp::Documentation::MarkupContent(lsp::MarkupContent {
|
||||
kind: lsp::MarkupKind::Markdown,
|
||||
value: "```rust\nSome(2)\n```".to_string(),
|
||||
})),
|
||||
deprecated: Some(false),
|
||||
sort_text: Some("fffffff2".to_string()),
|
||||
filter_text: Some("some".to_string()),
|
||||
insert_text_format: Some(lsp::InsertTextFormat::SNIPPET),
|
||||
text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
|
||||
range: lsp::Range {
|
||||
start: lsp::Position {
|
||||
line: 0,
|
||||
character: 22,
|
||||
},
|
||||
end: lsp::Position {
|
||||
line: 0,
|
||||
character: 22,
|
||||
},
|
||||
},
|
||||
new_text: "Some(2)".to_string(),
|
||||
})),
|
||||
additional_text_edits: Some(vec![lsp::TextEdit {
|
||||
range: lsp::Range {
|
||||
start: lsp::Position {
|
||||
line: 0,
|
||||
character: 20,
|
||||
},
|
||||
end: lsp::Position {
|
||||
line: 0,
|
||||
character: 22,
|
||||
},
|
||||
},
|
||||
new_text: "".to_string(),
|
||||
}]),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let closure_completion_item = completion_item.clone();
|
||||
let mut request = cx.handle_request::<lsp::request::Completion, _, _>(move |_, _, _| {
|
||||
let task_completion_item = closure_completion_item.clone();
|
||||
async move {
|
||||
Ok(Some(lsp::CompletionResponse::Array(vec![
|
||||
task_completion_item,
|
||||
])))
|
||||
}
|
||||
});
|
||||
|
||||
request.next().await;
|
||||
|
||||
cx.condition(|editor, _| editor.context_menu_visible())
|
||||
.await;
|
||||
let apply_additional_edits = cx.update_editor(|editor, cx| {
|
||||
editor
|
||||
.confirm_completion(&ConfirmCompletion::default(), cx)
|
||||
.unwrap()
|
||||
});
|
||||
cx.assert_editor_state(indoc! {"fn main() { let a = 2.Some(2)ˇ; }"});
|
||||
|
||||
cx.handle_request::<lsp::request::ResolveCompletionItem, _, _>(move |_, _, _| {
|
||||
let task_completion_item = completion_item.clone();
|
||||
async move { Ok(task_completion_item) }
|
||||
})
|
||||
.next()
|
||||
.await
|
||||
.unwrap();
|
||||
apply_additional_edits.await.unwrap();
|
||||
cx.assert_editor_state(indoc! {"fn main() { let a = Some(2)ˇ; }"});
|
||||
}
|
||||
|
||||
fn empty_range(row: usize, column: usize) -> Range<DisplayPoint> {
|
||||
let point = DisplayPoint::new(row as u32, column as u32);
|
||||
point..point
|
||||
|
||||
@@ -61,6 +61,7 @@ enum FoldMarkers {}
|
||||
struct SelectionLayout {
|
||||
head: DisplayPoint,
|
||||
cursor_shape: CursorShape,
|
||||
is_newest: bool,
|
||||
range: Range<DisplayPoint>,
|
||||
}
|
||||
|
||||
@@ -70,6 +71,7 @@ impl SelectionLayout {
|
||||
line_mode: bool,
|
||||
cursor_shape: CursorShape,
|
||||
map: &DisplaySnapshot,
|
||||
is_newest: bool,
|
||||
) -> Self {
|
||||
if line_mode {
|
||||
let selection = selection.map(|p| p.to_point(&map.buffer_snapshot));
|
||||
@@ -77,6 +79,7 @@ impl SelectionLayout {
|
||||
Self {
|
||||
head: selection.head().to_display_point(map),
|
||||
cursor_shape,
|
||||
is_newest,
|
||||
range: point_range.start.to_display_point(map)
|
||||
..point_range.end.to_display_point(map),
|
||||
}
|
||||
@@ -85,6 +88,7 @@ impl SelectionLayout {
|
||||
Self {
|
||||
head: selection.head(),
|
||||
cursor_shape,
|
||||
is_newest,
|
||||
range: selection.range(),
|
||||
}
|
||||
}
|
||||
@@ -156,6 +160,7 @@ impl EditorElement {
|
||||
event.position,
|
||||
event.cmd,
|
||||
event.shift,
|
||||
event.alt,
|
||||
position_map.as_ref(),
|
||||
text_bounds,
|
||||
cx,
|
||||
@@ -167,6 +172,10 @@ impl EditorElement {
|
||||
.on_drag(MouseButton::Left, {
|
||||
let position_map = position_map.clone();
|
||||
move |event, editor, cx| {
|
||||
if event.end {
|
||||
return;
|
||||
}
|
||||
|
||||
if !Self::mouse_dragged(
|
||||
editor,
|
||||
event.platform_event,
|
||||
@@ -308,6 +317,7 @@ impl EditorElement {
|
||||
position: Vector2F,
|
||||
cmd: bool,
|
||||
shift: bool,
|
||||
alt: bool,
|
||||
position_map: &PositionMap,
|
||||
text_bounds: RectF,
|
||||
cx: &mut EventContext<Editor>,
|
||||
@@ -324,9 +334,9 @@ impl EditorElement {
|
||||
|
||||
if point == target_point {
|
||||
if shift {
|
||||
go_to_fetched_type_definition(editor, point, cx);
|
||||
go_to_fetched_type_definition(editor, point, alt, cx);
|
||||
} else {
|
||||
go_to_fetched_definition(editor, point, cx);
|
||||
go_to_fetched_definition(editor, point, alt, cx);
|
||||
}
|
||||
|
||||
return true;
|
||||
@@ -535,6 +545,24 @@ impl EditorElement {
|
||||
corner_radius: 0.,
|
||||
});
|
||||
}
|
||||
|
||||
for (wrap_position, active) in layout.wrap_guides.iter() {
|
||||
let x = text_bounds.origin_x() + wrap_position + layout.position_map.em_width / 2.;
|
||||
let color = if *active {
|
||||
self.style.active_wrap_guide
|
||||
} else {
|
||||
self.style.wrap_guide
|
||||
};
|
||||
scene.push_quad(Quad {
|
||||
bounds: RectF::new(
|
||||
vec2f(x, text_bounds.origin_y()),
|
||||
vec2f(1., text_bounds.height()),
|
||||
),
|
||||
background: Some(color),
|
||||
border: Border::new(0., Color::transparent_black()),
|
||||
corner_radius: 0.,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -862,6 +890,12 @@ impl EditorElement {
|
||||
let x = cursor_character_x - scroll_left;
|
||||
let y = cursor_position.row() as f32 * layout.position_map.line_height
|
||||
- scroll_top;
|
||||
if selection.is_newest {
|
||||
editor.pixel_position_of_newest_cursor = Some(vec2f(
|
||||
bounds.origin_x() + x + block_width / 2.,
|
||||
bounds.origin_y() + y + layout.position_map.line_height / 2.,
|
||||
));
|
||||
}
|
||||
cursors.push(Cursor {
|
||||
color: selection_style.cursor,
|
||||
block_width,
|
||||
@@ -1008,6 +1042,7 @@ impl EditorElement {
|
||||
bounds: RectF,
|
||||
layout: &mut LayoutState,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
editor: &Editor,
|
||||
) {
|
||||
enum ScrollbarMouseHandlers {}
|
||||
if layout.mode != EditorMode::Full {
|
||||
@@ -1050,9 +1085,76 @@ impl EditorElement {
|
||||
background: style.track.background_color,
|
||||
..Default::default()
|
||||
});
|
||||
let scrollbar_settings = settings::get::<EditorSettings>(cx).scrollbar;
|
||||
let theme = theme::current(cx);
|
||||
let scrollbar_theme = &theme.editor.scrollbar;
|
||||
if layout.is_singleton && scrollbar_settings.selections {
|
||||
let start_anchor = Anchor::min();
|
||||
let end_anchor = Anchor::max();
|
||||
let mut start_row = None;
|
||||
let mut end_row = None;
|
||||
let color = scrollbar_theme.selections;
|
||||
let border = Border {
|
||||
width: 1.,
|
||||
color: style.thumb.border.color,
|
||||
overlay: false,
|
||||
top: false,
|
||||
right: true,
|
||||
bottom: false,
|
||||
left: true,
|
||||
};
|
||||
let mut push_region = |start, end| {
|
||||
if let (Some(start_display), Some(end_display)) = (start, end) {
|
||||
let start_y = y_for_row(start_display as f32);
|
||||
let mut end_y = y_for_row(end_display as f32);
|
||||
if end_y - start_y < 1. {
|
||||
end_y = start_y + 1.;
|
||||
}
|
||||
let bounds = RectF::from_points(vec2f(left, start_y), vec2f(right, end_y));
|
||||
|
||||
if layout.is_singleton && settings::get::<EditorSettings>(cx).scrollbar.git_diff {
|
||||
let diff_style = theme::current(cx).editor.scrollbar.git.clone();
|
||||
scene.push_quad(Quad {
|
||||
bounds,
|
||||
background: Some(color),
|
||||
border,
|
||||
corner_radius: style.thumb.corner_radius,
|
||||
})
|
||||
}
|
||||
};
|
||||
for (row, _) in &editor
|
||||
.background_highlights_in_range_for::<crate::items::BufferSearchHighlights>(
|
||||
start_anchor..end_anchor,
|
||||
&layout.position_map.snapshot,
|
||||
&theme,
|
||||
)
|
||||
{
|
||||
let start_display = row.start;
|
||||
let end_display = row.end;
|
||||
|
||||
if start_row.is_none() {
|
||||
assert_eq!(end_row, None);
|
||||
start_row = Some(start_display.row());
|
||||
end_row = Some(end_display.row());
|
||||
continue;
|
||||
}
|
||||
if let Some(current_end) = end_row.as_mut() {
|
||||
if start_display.row() > *current_end + 1 {
|
||||
push_region(start_row, end_row);
|
||||
start_row = Some(start_display.row());
|
||||
end_row = Some(end_display.row());
|
||||
} else {
|
||||
// Merge two hunks.
|
||||
*current_end = end_display.row();
|
||||
}
|
||||
} else {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
// We might still have a hunk that was not rendered (if there was a search hit on the last line)
|
||||
push_region(start_row, end_row);
|
||||
}
|
||||
|
||||
if layout.is_singleton && scrollbar_settings.git_diff {
|
||||
let diff_style = scrollbar_theme.git.clone();
|
||||
for hunk in layout
|
||||
.position_map
|
||||
.snapshot
|
||||
@@ -1114,8 +1216,10 @@ impl EditorElement {
|
||||
});
|
||||
scene.push_mouse_region(
|
||||
MouseRegion::new::<ScrollbarMouseHandlers>(cx.view_id(), cx.view_id(), track_bounds)
|
||||
.on_move(move |_, editor: &mut Editor, cx| {
|
||||
editor.scroll_manager.show_scrollbar(cx);
|
||||
.on_move(move |event, editor: &mut Editor, cx| {
|
||||
if event.pressed_button.is_none() {
|
||||
editor.scroll_manager.show_scrollbar(cx);
|
||||
}
|
||||
})
|
||||
.on_down(MouseButton::Left, {
|
||||
let row_range = row_range.clone();
|
||||
@@ -1135,6 +1239,10 @@ impl EditorElement {
|
||||
})
|
||||
.on_drag(MouseButton::Left, {
|
||||
move |event, editor: &mut Editor, cx| {
|
||||
if event.end {
|
||||
return;
|
||||
}
|
||||
|
||||
let y = event.prev_mouse_position.y();
|
||||
let new_y = event.position.y();
|
||||
if thumb_top < y && y < thumb_bottom {
|
||||
@@ -1238,16 +1346,15 @@ impl EditorElement {
|
||||
}
|
||||
}
|
||||
|
||||
fn max_line_number_width(&self, snapshot: &EditorSnapshot, cx: &ViewContext<Editor>) -> f32 {
|
||||
let digit_count = (snapshot.max_buffer_row() as f32).log10().floor() as usize + 1;
|
||||
fn column_pixels(&self, column: usize, cx: &ViewContext<Editor>) -> f32 {
|
||||
let style = &self.style;
|
||||
|
||||
cx.text_layout_cache()
|
||||
.layout_str(
|
||||
"1".repeat(digit_count).as_str(),
|
||||
" ".repeat(column).as_str(),
|
||||
style.text.font_size,
|
||||
&[(
|
||||
digit_count,
|
||||
column,
|
||||
RunStyle {
|
||||
font_id: style.text.font_id,
|
||||
color: Color::black(),
|
||||
@@ -1258,6 +1365,11 @@ impl EditorElement {
|
||||
.width()
|
||||
}
|
||||
|
||||
fn max_line_number_width(&self, snapshot: &EditorSnapshot, cx: &ViewContext<Editor>) -> f32 {
|
||||
let digit_count = (snapshot.max_buffer_row() as f32 + 1.).log10().floor() as usize + 1;
|
||||
self.column_pixels(digit_count, cx)
|
||||
}
|
||||
|
||||
//Folds contained in a hunk are ignored apart from shrinking visual size
|
||||
//If a fold contains any hunks then that fold line is marked as modified
|
||||
fn layout_git_gutters(
|
||||
@@ -1905,7 +2017,8 @@ impl Element<Editor> for EditorElement {
|
||||
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let style = self.style.clone();
|
||||
let line_height = style.text.line_height(cx.font_cache());
|
||||
|
||||
let line_height = (style.text.font_size * style.line_height_scalar).round();
|
||||
|
||||
let gutter_padding;
|
||||
let gutter_width;
|
||||
@@ -1942,6 +2055,12 @@ impl Element<Editor> for EditorElement {
|
||||
}
|
||||
};
|
||||
|
||||
let wrap_guides = editor
|
||||
.wrap_guides(cx)
|
||||
.iter()
|
||||
.map(|(guide, active)| (self.column_pixels(*guide, cx), *active))
|
||||
.collect();
|
||||
|
||||
let scroll_height = (snapshot.max_point().row() + 1) as f32 * line_height;
|
||||
if let EditorMode::AutoHeight { max_lines } = snapshot.mode {
|
||||
size.set_y(
|
||||
@@ -2036,6 +2155,7 @@ impl Element<Editor> for EditorElement {
|
||||
line_mode,
|
||||
cursor_shape,
|
||||
&snapshot.display_snapshot,
|
||||
false,
|
||||
));
|
||||
}
|
||||
selections.extend(remote_selections);
|
||||
@@ -2045,6 +2165,7 @@ impl Element<Editor> for EditorElement {
|
||||
.selections
|
||||
.disjoint_in_range(start_anchor..end_anchor, cx);
|
||||
local_selections.extend(editor.selections.pending(cx));
|
||||
let newest = editor.selections.newest(cx);
|
||||
for selection in &local_selections {
|
||||
let is_empty = selection.start == selection.end;
|
||||
let selection_start = snapshot.prev_line_boundary(selection.start).1;
|
||||
@@ -2067,11 +2188,13 @@ impl Element<Editor> for EditorElement {
|
||||
local_selections
|
||||
.into_iter()
|
||||
.map(|selection| {
|
||||
let is_newest = selection == newest;
|
||||
SelectionLayout::new(
|
||||
selection,
|
||||
editor.selections.line_mode,
|
||||
editor.cursor_shape,
|
||||
&snapshot.display_snapshot,
|
||||
is_newest,
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
@@ -2083,6 +2206,9 @@ impl Element<Editor> for EditorElement {
|
||||
ShowScrollbar::Auto => {
|
||||
// Git
|
||||
(is_singleton && scrollbar_settings.git_diff && snapshot.buffer_snapshot.has_git_diffs())
|
||||
||
|
||||
// Selections
|
||||
(is_singleton && scrollbar_settings.selections && !highlighted_ranges.is_empty())
|
||||
// Scrollmanager
|
||||
|| editor.scroll_manager.scrollbars_visible()
|
||||
}
|
||||
@@ -2295,6 +2421,7 @@ impl Element<Editor> for EditorElement {
|
||||
snapshot,
|
||||
}),
|
||||
visible_display_row_range: start_row..end_row,
|
||||
wrap_guides,
|
||||
gutter_size,
|
||||
gutter_padding,
|
||||
text_size,
|
||||
@@ -2368,7 +2495,7 @@ impl Element<Editor> for EditorElement {
|
||||
if !layout.blocks.is_empty() {
|
||||
self.paint_blocks(scene, bounds, visible_bounds, layout, editor, cx);
|
||||
}
|
||||
self.paint_scrollbar(scene, bounds, layout, cx);
|
||||
self.paint_scrollbar(scene, bounds, layout, cx, &editor);
|
||||
scene.pop_layer();
|
||||
|
||||
scene.pop_layer();
|
||||
@@ -2445,6 +2572,7 @@ pub struct LayoutState {
|
||||
gutter_margin: f32,
|
||||
text_size: Vector2F,
|
||||
mode: EditorMode,
|
||||
wrap_guides: SmallVec<[(f32, bool); 2]>,
|
||||
visible_display_row_range: Range<u32>,
|
||||
active_rows: BTreeMap<u32, bool>,
|
||||
highlighted_rows: Option<Range<u32>>,
|
||||
|
||||
@@ -198,7 +198,7 @@ fn show_hover(
|
||||
|
||||
// Construct new hover popover from hover request
|
||||
let hover_popover = hover_request.await.ok().flatten().and_then(|hover_result| {
|
||||
if hover_result.contents.is_empty() {
|
||||
if hover_result.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
@@ -420,7 +420,7 @@ fn render_blocks(
|
||||
|
||||
RenderedInfo {
|
||||
theme_id,
|
||||
text,
|
||||
text: text.trim().to_string(),
|
||||
highlights,
|
||||
region_ranges,
|
||||
regions,
|
||||
@@ -816,6 +816,118 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_empty_hovers_filtered(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
lsp::ServerCapabilities {
|
||||
hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
// Hover with keyboard has no delay
|
||||
cx.set_state(indoc! {"
|
||||
fˇn test() { println!(); }
|
||||
"});
|
||||
cx.update_editor(|editor, cx| hover(editor, &Hover, cx));
|
||||
let symbol_range = cx.lsp_range(indoc! {"
|
||||
«fn» test() { println!(); }
|
||||
"});
|
||||
cx.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _, _| async move {
|
||||
Ok(Some(lsp::Hover {
|
||||
contents: lsp::HoverContents::Array(vec![
|
||||
lsp::MarkedString::String("regular text for hover to show".to_string()),
|
||||
lsp::MarkedString::String("".to_string()),
|
||||
lsp::MarkedString::LanguageString(lsp::LanguageString {
|
||||
language: "Rust".to_string(),
|
||||
value: "".to_string(),
|
||||
}),
|
||||
]),
|
||||
range: Some(symbol_range),
|
||||
}))
|
||||
})
|
||||
.next()
|
||||
.await;
|
||||
|
||||
cx.condition(|editor, _| editor.hover_state.visible()).await;
|
||||
cx.editor(|editor, _| {
|
||||
assert_eq!(
|
||||
editor.hover_state.info_popover.clone().unwrap().blocks,
|
||||
vec![HoverBlock {
|
||||
text: "regular text for hover to show".to_string(),
|
||||
kind: HoverBlockKind::Markdown,
|
||||
}],
|
||||
"No empty string hovers should be shown"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_line_ends_trimmed(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
lsp::ServerCapabilities {
|
||||
hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
// Hover with keyboard has no delay
|
||||
cx.set_state(indoc! {"
|
||||
fˇn test() { println!(); }
|
||||
"});
|
||||
cx.update_editor(|editor, cx| hover(editor, &Hover, cx));
|
||||
let symbol_range = cx.lsp_range(indoc! {"
|
||||
«fn» test() { println!(); }
|
||||
"});
|
||||
|
||||
let code_str = "\nlet hovered_point: Vector2F // size = 8, align = 0x4\n";
|
||||
let markdown_string = format!("\n```rust\n{code_str}```");
|
||||
|
||||
let closure_markdown_string = markdown_string.clone();
|
||||
cx.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _, _| {
|
||||
let future_markdown_string = closure_markdown_string.clone();
|
||||
async move {
|
||||
Ok(Some(lsp::Hover {
|
||||
contents: lsp::HoverContents::Markup(lsp::MarkupContent {
|
||||
kind: lsp::MarkupKind::Markdown,
|
||||
value: future_markdown_string,
|
||||
}),
|
||||
range: Some(symbol_range),
|
||||
}))
|
||||
}
|
||||
})
|
||||
.next()
|
||||
.await;
|
||||
|
||||
cx.condition(|editor, _| editor.hover_state.visible()).await;
|
||||
cx.editor(|editor, cx| {
|
||||
let blocks = editor.hover_state.info_popover.clone().unwrap().blocks;
|
||||
assert_eq!(
|
||||
blocks,
|
||||
vec![HoverBlock {
|
||||
text: markdown_string,
|
||||
kind: HoverBlockKind::Markdown,
|
||||
}],
|
||||
);
|
||||
|
||||
let style = editor.style(cx);
|
||||
let rendered = render_blocks(0, &blocks, &Default::default(), None, &style);
|
||||
assert_eq!(
|
||||
rendered.text,
|
||||
code_str.trim(),
|
||||
"Should not have extra line breaks at end of rendered hover"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_hover_diagnostic_and_info_popovers(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
@@ -195,20 +195,41 @@ impl InlayHintCache {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn refresh_inlay_hints(
|
||||
pub fn spawn_hint_refresh(
|
||||
&mut self,
|
||||
mut excerpts_to_query: HashMap<ExcerptId, (ModelHandle<Buffer>, Global, Range<usize>)>,
|
||||
invalidate: InvalidationStrategy,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
if !self.enabled || excerpts_to_query.is_empty() {
|
||||
return;
|
||||
) -> Option<InlaySplice> {
|
||||
if !self.enabled {
|
||||
return None;
|
||||
}
|
||||
|
||||
let update_tasks = &mut self.update_tasks;
|
||||
let mut invalidated_hints = Vec::new();
|
||||
if invalidate.should_invalidate() {
|
||||
update_tasks
|
||||
.retain(|task_excerpt_id, _| excerpts_to_query.contains_key(task_excerpt_id));
|
||||
let mut changed = false;
|
||||
update_tasks.retain(|task_excerpt_id, _| {
|
||||
let retain = excerpts_to_query.contains_key(task_excerpt_id);
|
||||
changed |= !retain;
|
||||
retain
|
||||
});
|
||||
self.hints.retain(|cached_excerpt, cached_hints| {
|
||||
let retain = excerpts_to_query.contains_key(cached_excerpt);
|
||||
changed |= !retain;
|
||||
if !retain {
|
||||
invalidated_hints.extend(cached_hints.read().hints.iter().map(|&(id, _)| id));
|
||||
}
|
||||
retain
|
||||
});
|
||||
if changed {
|
||||
self.version += 1;
|
||||
}
|
||||
}
|
||||
if excerpts_to_query.is_empty() && invalidated_hints.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let cache_version = self.version;
|
||||
excerpts_to_query.retain(|visible_excerpt_id, _| {
|
||||
match update_tasks.entry(*visible_excerpt_id) {
|
||||
@@ -229,6 +250,15 @@ impl InlayHintCache {
|
||||
.ok();
|
||||
})
|
||||
.detach();
|
||||
|
||||
if invalidated_hints.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(InlaySplice {
|
||||
to_remove: invalidated_hints,
|
||||
to_insert: Vec::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn new_allowed_hint_kinds_splice(
|
||||
@@ -684,7 +714,7 @@ async fn fetch_and_update_hints(
|
||||
|
||||
if query.invalidate.should_invalidate() {
|
||||
let mut outdated_excerpt_caches = HashSet::default();
|
||||
for (excerpt_id, excerpt_hints) in editor.inlay_hint_cache().hints.iter() {
|
||||
for (excerpt_id, excerpt_hints) in &editor.inlay_hint_cache().hints {
|
||||
let excerpt_hints = excerpt_hints.read();
|
||||
if excerpt_hints.buffer_id == query.buffer_id
|
||||
&& excerpt_id != &query.excerpt_id
|
||||
@@ -1022,9 +1052,9 @@ mod tests {
|
||||
"Should get its first hints when opening the editor"
|
||||
);
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(
|
||||
inlay_cache.version, edits_made,
|
||||
editor.inlay_hint_cache().version,
|
||||
edits_made,
|
||||
"The editor update the cache version after every cache/view change"
|
||||
);
|
||||
});
|
||||
@@ -1053,9 +1083,9 @@ mod tests {
|
||||
"Should not update hints while the work task is running"
|
||||
);
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(
|
||||
inlay_cache.version, edits_made,
|
||||
editor.inlay_hint_cache().version,
|
||||
edits_made,
|
||||
"Should not update the cache while the work task is running"
|
||||
);
|
||||
});
|
||||
@@ -1077,9 +1107,9 @@ mod tests {
|
||||
"New hints should be queried after the work task is done"
|
||||
);
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(
|
||||
inlay_cache.version, edits_made,
|
||||
editor.inlay_hint_cache().version,
|
||||
edits_made,
|
||||
"Cache version should udpate once after the work task is done"
|
||||
);
|
||||
});
|
||||
@@ -1194,9 +1224,9 @@ mod tests {
|
||||
"Should get its first hints when opening the editor"
|
||||
);
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(
|
||||
inlay_cache.version, 1,
|
||||
editor.inlay_hint_cache().version,
|
||||
1,
|
||||
"Rust editor update the cache version after every cache/view change"
|
||||
);
|
||||
});
|
||||
@@ -1252,8 +1282,7 @@ mod tests {
|
||||
"Markdown editor should have a separate verison, repeating Rust editor rules"
|
||||
);
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(inlay_cache.version, 1);
|
||||
assert_eq!(editor.inlay_hint_cache().version, 1);
|
||||
});
|
||||
|
||||
rs_editor.update(cx, |editor, cx| {
|
||||
@@ -1269,9 +1298,9 @@ mod tests {
|
||||
"Rust inlay cache should change after the edit"
|
||||
);
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(
|
||||
inlay_cache.version, 2,
|
||||
editor.inlay_hint_cache().version,
|
||||
2,
|
||||
"Every time hint cache changes, cache version should be incremented"
|
||||
);
|
||||
});
|
||||
@@ -1283,8 +1312,7 @@ mod tests {
|
||||
"Markdown editor should not be affected by Rust editor changes"
|
||||
);
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(inlay_cache.version, 1);
|
||||
assert_eq!(editor.inlay_hint_cache().version, 1);
|
||||
});
|
||||
|
||||
md_editor.update(cx, |editor, cx| {
|
||||
@@ -1300,8 +1328,7 @@ mod tests {
|
||||
"Rust editor should not be affected by Markdown editor changes"
|
||||
);
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(inlay_cache.version, 2);
|
||||
assert_eq!(editor.inlay_hint_cache().version, 2);
|
||||
});
|
||||
rs_editor.update(cx, |editor, cx| {
|
||||
let expected_layers = vec!["1".to_string()];
|
||||
@@ -1311,8 +1338,7 @@ mod tests {
|
||||
"Markdown editor should also change independently"
|
||||
);
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(inlay_cache.version, 2);
|
||||
assert_eq!(editor.inlay_hint_cache().version, 2);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1433,9 +1459,9 @@ mod tests {
|
||||
vec!["other hint".to_string(), "type hint".to_string()],
|
||||
visible_hint_labels(editor, cx)
|
||||
);
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(
|
||||
inlay_cache.version, edits_made,
|
||||
editor.inlay_hint_cache().version,
|
||||
edits_made,
|
||||
"Should not update cache version due to new loaded hints being the same"
|
||||
);
|
||||
});
|
||||
@@ -1568,9 +1594,8 @@ mod tests {
|
||||
);
|
||||
assert!(cached_hint_labels(editor).is_empty());
|
||||
assert!(visible_hint_labels(editor, cx).is_empty());
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(
|
||||
inlay_cache.version, edits_made,
|
||||
editor.inlay_hint_cache().version, edits_made,
|
||||
"The editor should not update the cache version after /refresh query without updates"
|
||||
);
|
||||
});
|
||||
@@ -1641,8 +1666,7 @@ mod tests {
|
||||
vec!["parameter hint".to_string()],
|
||||
visible_hint_labels(editor, cx),
|
||||
);
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(inlay_cache.version, edits_made);
|
||||
assert_eq!(editor.inlay_hint_cache().version, edits_made);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1720,9 +1744,8 @@ mod tests {
|
||||
"Should get hints from the last edit landed only"
|
||||
);
|
||||
assert_eq!(expected_hints, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(
|
||||
inlay_cache.version, 1,
|
||||
editor.inlay_hint_cache().version, 1,
|
||||
"Only one update should be registered in the cache after all cancellations"
|
||||
);
|
||||
});
|
||||
@@ -1766,9 +1789,9 @@ mod tests {
|
||||
"Should get hints from the last edit landed only"
|
||||
);
|
||||
assert_eq!(expected_hints, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(
|
||||
inlay_cache.version, 2,
|
||||
editor.inlay_hint_cache().version,
|
||||
2,
|
||||
"Should update the cache version once more, for the new change"
|
||||
);
|
||||
});
|
||||
@@ -1886,9 +1909,8 @@ mod tests {
|
||||
"Should have hints from both LSP requests made for a big file"
|
||||
);
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(
|
||||
inlay_cache.version, 2,
|
||||
editor.inlay_hint_cache().version, 2,
|
||||
"Both LSP queries should've bumped the cache version"
|
||||
);
|
||||
});
|
||||
@@ -1918,8 +1940,7 @@ mod tests {
|
||||
assert_eq!(expected_layers, cached_hint_labels(editor),
|
||||
"Should have hints from the new LSP response after edit");
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(inlay_cache.version, 5, "Should update the cache for every LSP response with hints added");
|
||||
assert_eq!(editor.inlay_hint_cache().version, 5, "Should update the cache for every LSP response with hints added");
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2075,6 +2096,7 @@ mod tests {
|
||||
panic!("unexpected uri: {:?}", params.text_document.uri);
|
||||
};
|
||||
|
||||
// one hint per excerpt
|
||||
let positions = [
|
||||
lsp::Position::new(0, 2),
|
||||
lsp::Position::new(4, 2),
|
||||
@@ -2138,8 +2160,7 @@ mod tests {
|
||||
"When scroll is at the edge of a multibuffer, its visible excerpts only should be queried for inlay hints"
|
||||
);
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(inlay_cache.version, 4, "Every visible excerpt hints should bump the verison");
|
||||
assert_eq!(editor.inlay_hint_cache().version, expected_layers.len(), "Every visible excerpt hints should bump the verison");
|
||||
});
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
@@ -2169,8 +2190,8 @@ mod tests {
|
||||
assert_eq!(expected_layers, cached_hint_labels(editor),
|
||||
"With more scrolls of the multibuffer, more hints should be added into the cache and nothing invalidated without edits");
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(inlay_cache.version, 9);
|
||||
assert_eq!(editor.inlay_hint_cache().version, expected_layers.len(),
|
||||
"Due to every excerpt having one hint, we update cache per new excerpt scrolled");
|
||||
});
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
@@ -2179,7 +2200,7 @@ mod tests {
|
||||
});
|
||||
});
|
||||
cx.foreground().run_until_parked();
|
||||
editor.update(cx, |editor, cx| {
|
||||
let last_scroll_update_version = editor.update(cx, |editor, cx| {
|
||||
let expected_layers = vec![
|
||||
"main hint #0".to_string(),
|
||||
"main hint #1".to_string(),
|
||||
@@ -2197,8 +2218,8 @@ mod tests {
|
||||
assert_eq!(expected_layers, cached_hint_labels(editor),
|
||||
"After multibuffer was scrolled to the end, all hints for all excerpts should be fetched");
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(inlay_cache.version, 12);
|
||||
assert_eq!(editor.inlay_hint_cache().version, expected_layers.len());
|
||||
expected_layers.len()
|
||||
});
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
@@ -2225,12 +2246,14 @@ mod tests {
|
||||
assert_eq!(expected_layers, cached_hint_labels(editor),
|
||||
"After multibuffer was scrolled to the end, further scrolls up should not bring more hints");
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(inlay_cache.version, 12, "No updates should happen during scrolling already scolled buffer");
|
||||
assert_eq!(editor.inlay_hint_cache().version, last_scroll_update_version, "No updates should happen during scrolling already scolled buffer");
|
||||
});
|
||||
|
||||
editor_edited.store(true, Ordering::Release);
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(None, cx, |s| {
|
||||
s.select_ranges([Point::new(56, 0)..Point::new(56, 0)])
|
||||
});
|
||||
editor.handle_input("++++more text++++", cx);
|
||||
});
|
||||
cx.foreground().run_until_parked();
|
||||
@@ -2240,19 +2263,253 @@ mod tests {
|
||||
"main hint(edited) #1".to_string(),
|
||||
"main hint(edited) #2".to_string(),
|
||||
"main hint(edited) #3".to_string(),
|
||||
"other hint #0".to_string(),
|
||||
"other hint #1".to_string(),
|
||||
"other hint #2".to_string(),
|
||||
"other hint #3".to_string(),
|
||||
"other hint #4".to_string(),
|
||||
"other hint #5".to_string(),
|
||||
"main hint(edited) #4".to_string(),
|
||||
"main hint(edited) #5".to_string(),
|
||||
"other hint(edited) #0".to_string(),
|
||||
"other hint(edited) #1".to_string(),
|
||||
];
|
||||
assert_eq!(expected_layers, cached_hint_labels(editor),
|
||||
"After multibuffer was edited, hints for the edited buffer (1st) should be invalidated and requeried for all of its visible excerpts, \
|
||||
unedited (2nd) buffer should have the same hint");
|
||||
assert_eq!(
|
||||
expected_layers,
|
||||
cached_hint_labels(editor),
|
||||
"After multibuffer edit, editor gets scolled back to the last selection; \
|
||||
all hints should be invalidated and requeried for all of its visible excerpts"
|
||||
);
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
let inlay_cache = editor.inlay_hint_cache();
|
||||
assert_eq!(inlay_cache.version, 16);
|
||||
assert_eq!(
|
||||
editor.inlay_hint_cache().version,
|
||||
last_scroll_update_version + expected_layers.len() + 1,
|
||||
"Due to every excerpt having one hint, cache should update per new excerpt received + 1 for outdated hints removal"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_excerpts_removed(
|
||||
deterministic: Arc<Deterministic>,
|
||||
cx: &mut gpui::TestAppContext,
|
||||
) {
|
||||
init_test(cx, |settings| {
|
||||
settings.defaults.inlay_hints = Some(InlayHintSettings {
|
||||
enabled: true,
|
||||
show_type_hints: false,
|
||||
show_parameter_hints: false,
|
||||
show_other_hints: false,
|
||||
})
|
||||
});
|
||||
|
||||
let mut language = Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_servers = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
inlay_hint_provider: Some(lsp::OneOf::Left(true)),
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
}))
|
||||
.await;
|
||||
let language = Arc::new(language);
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
"/a",
|
||||
json!({
|
||||
"main.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|i| format!("let i = {i};\n")).collect::<Vec<_>>().join("")),
|
||||
"other.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|j| format!("let j = {j};\n")).collect::<Vec<_>>().join("")),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs, ["/a".as_ref()], cx).await;
|
||||
project.update(cx, |project, _| {
|
||||
project.languages().add(Arc::clone(&language))
|
||||
});
|
||||
let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
|
||||
let worktree_id = workspace.update(cx, |workspace, cx| {
|
||||
workspace.project().read_with(cx, |project, cx| {
|
||||
project.worktrees(cx).next().unwrap().read(cx).id()
|
||||
})
|
||||
});
|
||||
|
||||
let buffer_1 = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_buffer((worktree_id, "main.rs"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let buffer_2 = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_buffer((worktree_id, "other.rs"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
let (buffer_1_excerpts, buffer_2_excerpts) = multibuffer.update(cx, |multibuffer, cx| {
|
||||
let buffer_1_excerpts = multibuffer.push_excerpts(
|
||||
buffer_1.clone(),
|
||||
[ExcerptRange {
|
||||
context: Point::new(0, 0)..Point::new(2, 0),
|
||||
primary: None,
|
||||
}],
|
||||
cx,
|
||||
);
|
||||
let buffer_2_excerpts = multibuffer.push_excerpts(
|
||||
buffer_2.clone(),
|
||||
[ExcerptRange {
|
||||
context: Point::new(0, 1)..Point::new(2, 1),
|
||||
primary: None,
|
||||
}],
|
||||
cx,
|
||||
);
|
||||
(buffer_1_excerpts, buffer_2_excerpts)
|
||||
});
|
||||
|
||||
assert!(!buffer_1_excerpts.is_empty());
|
||||
assert!(!buffer_2_excerpts.is_empty());
|
||||
|
||||
deterministic.run_until_parked();
|
||||
cx.foreground().run_until_parked();
|
||||
let (_, editor) =
|
||||
cx.add_window(|cx| Editor::for_multibuffer(multibuffer, Some(project.clone()), cx));
|
||||
let editor_edited = Arc::new(AtomicBool::new(false));
|
||||
let fake_server = fake_servers.next().await.unwrap();
|
||||
let closure_editor_edited = Arc::clone(&editor_edited);
|
||||
fake_server
|
||||
.handle_request::<lsp::request::InlayHintRequest, _, _>(move |params, _| {
|
||||
let task_editor_edited = Arc::clone(&closure_editor_edited);
|
||||
async move {
|
||||
let hint_text = if params.text_document.uri
|
||||
== lsp::Url::from_file_path("/a/main.rs").unwrap()
|
||||
{
|
||||
"main hint"
|
||||
} else if params.text_document.uri
|
||||
== lsp::Url::from_file_path("/a/other.rs").unwrap()
|
||||
{
|
||||
"other hint"
|
||||
} else {
|
||||
panic!("unexpected uri: {:?}", params.text_document.uri);
|
||||
};
|
||||
|
||||
let positions = [
|
||||
lsp::Position::new(0, 2),
|
||||
lsp::Position::new(4, 2),
|
||||
lsp::Position::new(22, 2),
|
||||
lsp::Position::new(44, 2),
|
||||
lsp::Position::new(56, 2),
|
||||
lsp::Position::new(67, 2),
|
||||
];
|
||||
let out_of_range_hint = lsp::InlayHint {
|
||||
position: lsp::Position::new(
|
||||
params.range.start.line + 99,
|
||||
params.range.start.character + 99,
|
||||
),
|
||||
label: lsp::InlayHintLabel::String(
|
||||
"out of excerpt range, should be ignored".to_string(),
|
||||
),
|
||||
kind: None,
|
||||
text_edits: None,
|
||||
tooltip: None,
|
||||
padding_left: None,
|
||||
padding_right: None,
|
||||
data: None,
|
||||
};
|
||||
|
||||
let edited = task_editor_edited.load(Ordering::Acquire);
|
||||
Ok(Some(
|
||||
std::iter::once(out_of_range_hint)
|
||||
.chain(positions.into_iter().enumerate().map(|(i, position)| {
|
||||
lsp::InlayHint {
|
||||
position,
|
||||
label: lsp::InlayHintLabel::String(format!(
|
||||
"{hint_text}{} #{i}",
|
||||
if edited { "(edited)" } else { "" },
|
||||
)),
|
||||
kind: None,
|
||||
text_edits: None,
|
||||
tooltip: None,
|
||||
padding_left: None,
|
||||
padding_right: None,
|
||||
data: None,
|
||||
}
|
||||
}))
|
||||
.collect(),
|
||||
))
|
||||
}
|
||||
})
|
||||
.next()
|
||||
.await;
|
||||
cx.foreground().run_until_parked();
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
vec!["main hint #0".to_string(), "other hint #0".to_string()],
|
||||
cached_hint_labels(editor),
|
||||
"Cache should update for both excerpts despite hints display was disabled"
|
||||
);
|
||||
assert!(
|
||||
visible_hint_labels(editor, cx).is_empty(),
|
||||
"All hints are disabled and should not be shown despite being present in the cache"
|
||||
);
|
||||
assert_eq!(
|
||||
editor.inlay_hint_cache().version,
|
||||
2,
|
||||
"Cache should update once per excerpt query"
|
||||
);
|
||||
});
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.buffer().update(cx, |multibuffer, cx| {
|
||||
multibuffer.remove_excerpts(buffer_2_excerpts, cx)
|
||||
})
|
||||
});
|
||||
cx.foreground().run_until_parked();
|
||||
editor.update(cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
vec!["main hint #0".to_string()],
|
||||
cached_hint_labels(editor),
|
||||
"For the removed excerpt, should clean corresponding cached hints"
|
||||
);
|
||||
assert!(
|
||||
visible_hint_labels(editor, cx).is_empty(),
|
||||
"All hints are disabled and should not be shown despite being present in the cache"
|
||||
);
|
||||
assert_eq!(
|
||||
editor.inlay_hint_cache().version,
|
||||
3,
|
||||
"Excerpt removal should trigger cache update"
|
||||
);
|
||||
});
|
||||
|
||||
update_test_language_settings(cx, |settings| {
|
||||
settings.defaults.inlay_hints = Some(InlayHintSettings {
|
||||
enabled: true,
|
||||
show_type_hints: true,
|
||||
show_parameter_hints: true,
|
||||
show_other_hints: true,
|
||||
})
|
||||
});
|
||||
cx.foreground().run_until_parked();
|
||||
editor.update(cx, |editor, cx| {
|
||||
let expected_hints = vec!["main hint #0".to_string()];
|
||||
assert_eq!(
|
||||
expected_hints,
|
||||
cached_hint_labels(editor),
|
||||
"Hint display settings change should not change the cache"
|
||||
);
|
||||
assert_eq!(
|
||||
expected_hints,
|
||||
visible_hint_labels(editor, cx),
|
||||
"Settings change should make cached hints visible"
|
||||
);
|
||||
assert_eq!(
|
||||
editor.inlay_hint_cache().version,
|
||||
4,
|
||||
"Settings change should trigger cache update"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||