Compare commits
998 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5a889b04df | ||
|
|
89ead1c44d | ||
|
|
c16820166b | ||
|
|
58e45dd9be | ||
|
|
aa543a4b0a | ||
|
|
e70b728758 | ||
|
|
2d5e72251e | ||
|
|
d7fcb049d4 | ||
|
|
2ea78c5ade | ||
|
|
a0a558318c | ||
|
|
747d9e8784 | ||
|
|
c7eb6a6a60 | ||
|
|
7244fe9c7f | ||
|
|
8ee106e6aa | ||
|
|
4992a8a407 | ||
|
|
b44ae46559 | ||
|
|
dff812b38e | ||
|
|
9f6c53b547 | ||
|
|
b1de9a945d | ||
|
|
e8bbd370e4 | ||
|
|
8d7bb8b1a3 | ||
|
|
5c3ae8808b | ||
|
|
eb353648e6 | ||
|
|
a1597578ff | ||
|
|
0742640b39 | ||
|
|
1a53d5b7ba | ||
|
|
f933d54469 | ||
|
|
ce6f3d7f3e | ||
|
|
ea263822fa | ||
|
|
e5c520a265 | ||
|
|
794d214eee | ||
|
|
3cab32d201 | ||
|
|
cf62d26ed8 | ||
|
|
e39be35e17 | ||
|
|
56496c2585 | ||
|
|
089542c6f4 | ||
|
|
67f672d0cc | ||
|
|
94e9c7fd5b | ||
|
|
2b36ab0de7 | ||
|
|
1f762e482d | ||
|
|
b19d92e918 | ||
|
|
9bbe67f0ea | ||
|
|
7357b3ff2a | ||
|
|
10548c2038 | ||
|
|
943571af2a | ||
|
|
2dbee1d914 | ||
|
|
d7a78e14ac | ||
|
|
571d0386e2 | ||
|
|
1875a0e349 | ||
|
|
d0f7e5f075 | ||
|
|
f37f839330 | ||
|
|
7340e83059 | ||
|
|
fee7657fd7 | ||
|
|
b10f06d084 | ||
|
|
f9f75e98f8 | ||
|
|
e5faaeb2f2 | ||
|
|
5a53eeef63 | ||
|
|
85a13fa477 | ||
|
|
8728d3292d | ||
|
|
29b63ae4c6 | ||
|
|
4b22e49ce1 | ||
|
|
fe28abe8cf | ||
|
|
e56609cf0c | ||
|
|
eb65a5d29a | ||
|
|
f8c2620166 | ||
|
|
587a908225 | ||
|
|
bf044506ed | ||
|
|
870fa5f278 | ||
|
|
d383ff30ce | ||
|
|
7bc8eb4f3d | ||
|
|
984e366c32 | ||
|
|
0bcd0a3f08 | ||
|
|
d7ecbdcc1d | ||
|
|
d8b888c9cb | ||
|
|
b2f0c78924 | ||
|
|
5d45c5711d | ||
|
|
b3b56c36d0 | ||
|
|
ad1db117e6 | ||
|
|
508b9dc024 | ||
|
|
496066db59 | ||
|
|
2b31a48ef9 | ||
|
|
ed361f2d1a | ||
|
|
8dc9197324 | ||
|
|
05a6137549 | ||
|
|
a4027aacb5 | ||
|
|
7f8e76e0f1 | ||
|
|
8270e8e758 | ||
|
|
a080ae98c6 | ||
|
|
f499a1dfc2 | ||
|
|
6d6a82655a | ||
|
|
ba75007259 | ||
|
|
984378e12c | ||
|
|
7c9e4e513c | ||
|
|
137fbd0088 | ||
|
|
7f786ca8a6 | ||
|
|
89bbfb8154 | ||
|
|
6057d819b0 | ||
|
|
93a516d588 | ||
|
|
accf90e843 | ||
|
|
cbc162acf5 | ||
|
|
835af35839 | ||
|
|
d3521650d3 | ||
|
|
3040cfece1 | ||
|
|
f5d4e26799 | ||
|
|
cbd9e186b5 | ||
|
|
43db9e826b | ||
|
|
6f26fa013a | ||
|
|
13ed9dc1f1 | ||
|
|
8937d877e3 | ||
|
|
63238a2938 | ||
|
|
b949b30f24 | ||
|
|
56930972fe | ||
|
|
07a4cfeefd | ||
|
|
fe5465a265 | ||
|
|
6dd23c250b | ||
|
|
e9a750be71 | ||
|
|
9fc2ddb8da | ||
|
|
cf81f5a555 | ||
|
|
ce4142eab3 | ||
|
|
a3df597155 | ||
|
|
adeea9da66 | ||
|
|
a85e400b35 | ||
|
|
393009a05c | ||
|
|
11e3874b4a | ||
|
|
3149a4297c | ||
|
|
4f774e2bde | ||
|
|
78564dcc68 | ||
|
|
d5a17053df | ||
|
|
e3ecd87081 | ||
|
|
7b453beebc | ||
|
|
b9d1ca4341 | ||
|
|
304afc1813 | ||
|
|
dcf26acaac | ||
|
|
da460edb8b | ||
|
|
9164c5f239 | ||
|
|
c47340000d | ||
|
|
3e59c61a34 | ||
|
|
435d405d10 | ||
|
|
a86ba57983 | ||
|
|
5d8ed535be | ||
|
|
b9551ae8b1 | ||
|
|
06d2cdc20d | ||
|
|
0faf5308ac | ||
|
|
1544da887e | ||
|
|
e31205c95e | ||
|
|
275b7e8d4f | ||
|
|
2c3efdea8c | ||
|
|
a888620e5f | ||
|
|
a93f5e5fb4 | ||
|
|
3c26f67ea3 | ||
|
|
bc906fef9c | ||
|
|
699dafbbd4 | ||
|
|
8492c6e7ac | ||
|
|
13ecd16685 | ||
|
|
61b806e485 | ||
|
|
04d577e326 | ||
|
|
60f7169008 | ||
|
|
eec1748dc7 | ||
|
|
91c786a8db | ||
|
|
8534a9cc41 | ||
|
|
99317bbd62 | ||
|
|
89c0b358a7 | ||
|
|
17094ec542 | ||
|
|
5d2c4807db | ||
|
|
c6dd797f4e | ||
|
|
afec4152f4 | ||
|
|
40da3b233f | ||
|
|
1e8ef8a4c1 | ||
|
|
4053d683d9 | ||
|
|
788bb4a368 | ||
|
|
636931373e | ||
|
|
870b73aa36 | ||
|
|
a138955943 | ||
|
|
5d8d7de68d | ||
|
|
55910c0d79 | ||
|
|
466a377e1d | ||
|
|
614ee4eac7 | ||
|
|
697e641e8e | ||
|
|
323e1f7367 | ||
|
|
f4b9772ec2 | ||
|
|
29bc2db6e8 | ||
|
|
34edbc7934 | ||
|
|
0a37d40fad | ||
|
|
ab5db0bc1e | ||
|
|
e4f18947de | ||
|
|
9e8ef31452 | ||
|
|
ca0d7e5e1f | ||
|
|
cd65031cda | ||
|
|
c41b958829 | ||
|
|
88d663a253 | ||
|
|
f0fe346e15 | ||
|
|
6685d5aa7d | ||
|
|
7d2b74a93b | ||
|
|
5f819b6edc | ||
|
|
c9cbc2fe1e | ||
|
|
a2ee38f37b | ||
|
|
3914d1d072 | ||
|
|
63f171200e | ||
|
|
528d64d3cc | ||
|
|
fb492a9fb8 | ||
|
|
ae147a379d | ||
|
|
31eeffa5a7 | ||
|
|
9cd4e5ba04 | ||
|
|
6444fcd442 | ||
|
|
db33e4935a | ||
|
|
a293e9c0c5 | ||
|
|
38df091b06 | ||
|
|
dcd05ef96b | ||
|
|
80f3173fbd | ||
|
|
0fc2db6d6e | ||
|
|
7660159164 | ||
|
|
de679cae78 | ||
|
|
abf96e6ad6 | ||
|
|
64e2f6d506 | ||
|
|
ec39c9d335 | ||
|
|
3e2f684545 | ||
|
|
4c22774694 | ||
|
|
f898dc6dae | ||
|
|
e8570b5c26 | ||
|
|
f8ef605cbd | ||
|
|
f4115ddc3c | ||
|
|
368b4447ff | ||
|
|
2930ea8fb0 | ||
|
|
4bea16eb31 | ||
|
|
cec0c5912c | ||
|
|
80abd84050 | ||
|
|
1bdaeda43e | ||
|
|
4ab307f0a1 | ||
|
|
5118f27a90 | ||
|
|
bcdb4ffd88 | ||
|
|
7bbaa1d930 | ||
|
|
ae0fa75abe | ||
|
|
59121a238a | ||
|
|
437145afbe | ||
|
|
fbba417f09 | ||
|
|
95137ecb2a | ||
|
|
e23965e7c9 | ||
|
|
9cbb680fb2 | ||
|
|
7bcce23dc9 | ||
|
|
6c5b27af1d | ||
|
|
e1a2897d53 | ||
|
|
ad05c0cc7a | ||
|
|
60e2c6bc52 | ||
|
|
06e241117c | ||
|
|
e38c1814d5 | ||
|
|
4ed96bb5a6 | ||
|
|
bf9daf1529 | ||
|
|
358a6ff66c | ||
|
|
08e9f3e1e3 | ||
|
|
523cbe781b | ||
|
|
119d44caf7 | ||
|
|
2d1ff8f606 | ||
|
|
1b67f19edc | ||
|
|
920daa8a8f | ||
|
|
163ce95171 | ||
|
|
174b37cdf0 | ||
|
|
04ffca95c6 | ||
|
|
9e15c57f91 | ||
|
|
4efdc53d9f | ||
|
|
0b1c27956b | ||
|
|
fe571f1d70 | ||
|
|
6ab795c629 | ||
|
|
52b8e3d1a2 | ||
|
|
418a9a3d66 | ||
|
|
85674ba506 | ||
|
|
6645e2820c | ||
|
|
c984b39aaa | ||
|
|
2adf11e204 | ||
|
|
cdbcbdfe6d | ||
|
|
44cd0be068 | ||
|
|
1e7184ea07 | ||
|
|
4dd0752e80 | ||
|
|
0639c8331c | ||
|
|
49d1c9d1ba | ||
|
|
f5c775fcd1 | ||
|
|
8432daef6a | ||
|
|
f35c419f43 | ||
|
|
77defe6e28 | ||
|
|
c8b43e3078 | ||
|
|
6caf016df9 | ||
|
|
75dd37d873 | ||
|
|
ceff57d02f | ||
|
|
a758bd4f8d | ||
|
|
5b31c1ba4e | ||
|
|
7524974f19 | ||
|
|
da09247e5e | ||
|
|
9c74deb9ec | ||
|
|
d9da8effd4 | ||
|
|
c8d5e19492 | ||
|
|
cb97b7cd1d | ||
|
|
eeba0993aa | ||
|
|
5e516f59c0 | ||
|
|
1ed1ec21dd | ||
|
|
e9c385e7a6 | ||
|
|
91a7bbbba2 | ||
|
|
65711b2256 | ||
|
|
67686dd1c2 | ||
|
|
cbe136c0cb | ||
|
|
b7535dfba4 | ||
|
|
dc81b5f57a | ||
|
|
b4ebe179f9 | ||
|
|
dd38eb1264 | ||
|
|
ec54010e3c | ||
|
|
98f726974e | ||
|
|
4ee404a0af | ||
|
|
87d16c271e | ||
|
|
daedf179b2 | ||
|
|
a7634ccd5f | ||
|
|
5f8e406c18 | ||
|
|
a88cff4fa0 | ||
|
|
6a44a7448e | ||
|
|
fa379885f1 | ||
|
|
bd6e972d0f | ||
|
|
6d9bf802e2 | ||
|
|
ad33111a22 | ||
|
|
39cc0cac93 | ||
|
|
102926d171 | ||
|
|
09c0c3a0e7 | ||
|
|
416033a01c | ||
|
|
02f42f2877 | ||
|
|
88e3d87098 | ||
|
|
4578938ea1 | ||
|
|
a02a29944c | ||
|
|
6965117dd8 | ||
|
|
cff610e1ec | ||
|
|
42eba7268d | ||
|
|
e37908cf3b | ||
|
|
8354d1520d | ||
|
|
45d6f5ab04 | ||
|
|
8f90d42723 | ||
|
|
703e8e626d | ||
|
|
b1ed9c88a4 | ||
|
|
026c3476db | ||
|
|
a13e2518b8 | ||
|
|
45d1690f6e | ||
|
|
0be897d5ac | ||
|
|
811696670a | ||
|
|
3426d46b69 | ||
|
|
0e93bc41dd | ||
|
|
bd573e0651 | ||
|
|
5ae46709b0 | ||
|
|
ee693a8d2b | ||
|
|
512a10b037 | ||
|
|
0c714210ff | ||
|
|
e668ff8bcd | ||
|
|
853b636435 | ||
|
|
733e0cb21b | ||
|
|
3b536f153f | ||
|
|
47c467dafc | ||
|
|
b841b3eb79 | ||
|
|
faba276fdc | ||
|
|
2463077b2d | ||
|
|
924e1578ea | ||
|
|
36546463e6 | ||
|
|
1445ce10b5 | ||
|
|
748b1ba602 | ||
|
|
d3f28166cb | ||
|
|
eacd2a45bb | ||
|
|
df1804b215 | ||
|
|
0ed488d93b | ||
|
|
fcbd7f9a5a | ||
|
|
2449834868 | ||
|
|
cb942a0e2f | ||
|
|
a1412166f0 | ||
|
|
1a91aa8194 | ||
|
|
5ec003530f | ||
|
|
4cc1556ca4 | ||
|
|
29b616f4cc | ||
|
|
88e0fe6f88 | ||
|
|
7537c3b6d4 | ||
|
|
1803bd77ef | ||
|
|
9d7039ed51 | ||
|
|
2c17ae9aa6 | ||
|
|
b9edde7b26 | ||
|
|
cc78ae14d4 | ||
|
|
93de2bcfed | ||
|
|
e0998dbfda | ||
|
|
815cc7ee91 | ||
|
|
fbc307cd5e | ||
|
|
a5039cad65 | ||
|
|
6ce76ca13e | ||
|
|
4bd43e67ef | ||
|
|
b307a7e91d | ||
|
|
9930e92412 | ||
|
|
21aba54dc3 | ||
|
|
d78d5712be | ||
|
|
c8ad5b68e0 | ||
|
|
cd2c3c3606 | ||
|
|
9f29eeda03 | ||
|
|
f453928b44 | ||
|
|
74cdd32c58 | ||
|
|
f8cf534812 | ||
|
|
ad26362a82 | ||
|
|
fc2ae42f4b | ||
|
|
d249618ee6 | ||
|
|
09a53a0c64 | ||
|
|
2f78d93383 | ||
|
|
2f43ef67fd | ||
|
|
f42fd8e1bb | ||
|
|
861893b7b6 | ||
|
|
10b3fae2c3 | ||
|
|
bf7acb5f34 | ||
|
|
543ebb7e4e | ||
|
|
0d8c68ae1d | ||
|
|
c47855424f | ||
|
|
f7532c785e | ||
|
|
a07fe3aa58 | ||
|
|
1e49b56626 | ||
|
|
8c0541b455 | ||
|
|
0854976691 | ||
|
|
53a7da9d3f | ||
|
|
cea8107242 | ||
|
|
a743c2d8d7 | ||
|
|
afdac15572 | ||
|
|
e88d3bb97e | ||
|
|
fb17d1ed3f | ||
|
|
2cf44d30b7 | ||
|
|
03bd6d6c33 | ||
|
|
9bb195e177 | ||
|
|
a7186c643f | ||
|
|
3a9b69077e | ||
|
|
d19d3bbe45 | ||
|
|
2b9db911c7 | ||
|
|
e0bf5337ca | ||
|
|
a6e530511d | ||
|
|
294769be35 | ||
|
|
bfecdb7bc0 | ||
|
|
73afb29b04 | ||
|
|
22172be2c0 | ||
|
|
9e651ee127 | ||
|
|
d969f38850 | ||
|
|
f0db748ba1 | ||
|
|
2e2bce7322 | ||
|
|
091ed9ab47 | ||
|
|
63089badf1 | ||
|
|
7a79df7a24 | ||
|
|
bcf38e6bb5 | ||
|
|
a0287920e5 | ||
|
|
3269b9925f | ||
|
|
a0ea5b38a0 | ||
|
|
005a7076af | ||
|
|
e1d4bcf013 | ||
|
|
6b7ee10287 | ||
|
|
6df266348e | ||
|
|
4002be882f | ||
|
|
23fbeaf978 | ||
|
|
66e27b7420 | ||
|
|
ce71ed3959 | ||
|
|
843972ceca | ||
|
|
68223bdb67 | ||
|
|
2f39dee28b | ||
|
|
612b4404a9 | ||
|
|
cfe6103daf | ||
|
|
ca4086b844 | ||
|
|
c13a26ff7b | ||
|
|
cfaab6cfb6 | ||
|
|
b621c9b857 | ||
|
|
7474813a17 | ||
|
|
b25c3eb740 | ||
|
|
447f710570 | ||
|
|
6f5ca6064b | ||
|
|
c844fcdc09 | ||
|
|
b0afc80678 | ||
|
|
a023950f28 | ||
|
|
8e74cc178e | ||
|
|
61d8848b31 | ||
|
|
dfbfa86548 | ||
|
|
2664dad2bc | ||
|
|
8d5e3fb159 | ||
|
|
8d1a4a6a24 | ||
|
|
c04151f999 | ||
|
|
0b63d882ce | ||
|
|
6aa346dec8 | ||
|
|
bef09696f6 | ||
|
|
1a8b23e118 | ||
|
|
f39942863b | ||
|
|
5094380c83 | ||
|
|
643545e91e | ||
|
|
0e51365770 | ||
|
|
401b59be5c | ||
|
|
0a6293bcda | ||
|
|
0f1eb3dd2e | ||
|
|
856768a43c | ||
|
|
08e0444ee4 | ||
|
|
b80887dabe | ||
|
|
572e571927 | ||
|
|
5a9dea5299 | ||
|
|
9ba24794c7 | ||
|
|
84d257470a | ||
|
|
4967a8d5ef | ||
|
|
b10c82c015 | ||
|
|
213aa36e1c | ||
|
|
c5956a0363 | ||
|
|
8230dd9a3b | ||
|
|
cb18131432 | ||
|
|
707ffe8ff3 | ||
|
|
00b5cc472e | ||
|
|
1c3bf90a8a | ||
|
|
e60500dd7c | ||
|
|
88d0c04444 | ||
|
|
198f6694b7 | ||
|
|
d9283efbe6 | ||
|
|
18354c5e04 | ||
|
|
52a4c15c14 | ||
|
|
7dd9b9539e | ||
|
|
092689ed56 | ||
|
|
880b3f087f | ||
|
|
d25ec39a23 | ||
|
|
712616d167 | ||
|
|
1cc7615d06 | ||
|
|
76ee44748e | ||
|
|
7d1ba6455b | ||
|
|
7b12c1c9e0 | ||
|
|
862b988d56 | ||
|
|
2cb8b0fcd3 | ||
|
|
3bd4542bce | ||
|
|
213b94afd4 | ||
|
|
8b1b35913a | ||
|
|
0a704b8d67 | ||
|
|
b4bc7906d2 | ||
|
|
d2f4d37af8 | ||
|
|
3498e92d1c | ||
|
|
763ab4d5f1 | ||
|
|
53872a6024 | ||
|
|
314c97715d | ||
|
|
131979dff0 | ||
|
|
34f85b5690 | ||
|
|
cebab56c94 | ||
|
|
296944e34d | ||
|
|
3154ccbafe | ||
|
|
e644c0876e | ||
|
|
5832153712 | ||
|
|
b6e6dafca7 | ||
|
|
d6bc05cad0 | ||
|
|
c9cbeafc05 | ||
|
|
364fab7b5f | ||
|
|
c278503166 | ||
|
|
2e61a586b6 | ||
|
|
e605a5ead2 | ||
|
|
6f97a9be3b | ||
|
|
227c612dac | ||
|
|
c8e47a8c63 | ||
|
|
d721c2ba4b | ||
|
|
3f11b8af56 | ||
|
|
4e32fabfdc | ||
|
|
fe786f3366 | ||
|
|
b9c459e800 | ||
|
|
b2aab0c773 | ||
|
|
f49c9db423 | ||
|
|
6e882bcd02 | ||
|
|
068aa1adb3 | ||
|
|
47ad9baebc | ||
|
|
84d789b8ac | ||
|
|
0159019850 | ||
|
|
1f2eb9ddbc | ||
|
|
d75f415b25 | ||
|
|
4fecab6d4b | ||
|
|
e0897cd019 | ||
|
|
a939535d95 | ||
|
|
59bbe43a46 | ||
|
|
b2caf9e905 | ||
|
|
7dcf30c954 | ||
|
|
118f137f18 | ||
|
|
0fff7d9166 | ||
|
|
62ec105bff | ||
|
|
c2b44537aa | ||
|
|
f33d30cb9d | ||
|
|
8b9488bacb | ||
|
|
2f4d8932dc | ||
|
|
78bbb83448 | ||
|
|
61b9179fb1 | ||
|
|
a72bdac7df | ||
|
|
0ff87e603f | ||
|
|
2d6285a6e1 | ||
|
|
44e0a00734 | ||
|
|
595dbd44ae | ||
|
|
1ec31738e6 | ||
|
|
baf636a4a4 | ||
|
|
9384823e47 | ||
|
|
8b5089c759 | ||
|
|
941d935c4a | ||
|
|
c07d794249 | ||
|
|
9dc3c74260 | ||
|
|
a26b066788 | ||
|
|
306ebb256c | ||
|
|
258b89bb70 | ||
|
|
20a77f4c5e | ||
|
|
9a7ecfbc4f | ||
|
|
8d3f42de52 | ||
|
|
a66b81d60a | ||
|
|
89392cd23d | ||
|
|
1995bd89a6 | ||
|
|
2c57703ad6 | ||
|
|
882c8ce696 | ||
|
|
f5aa07aac9 | ||
|
|
61e06487b7 | ||
|
|
f0353d6aba | ||
|
|
0e62ddbb65 | ||
|
|
40c861c249 | ||
|
|
78d97a3db2 | ||
|
|
1aee7bdb1d | ||
|
|
b8994c2a89 | ||
|
|
6e5ec2a00d | ||
|
|
2919cbe9cb | ||
|
|
f59be5fecf | ||
|
|
3228a55329 | ||
|
|
b571eae4f3 | ||
|
|
6212ebad9b | ||
|
|
9c1b01521a | ||
|
|
78c158e1a4 | ||
|
|
a82a12fd14 | ||
|
|
2cbb313467 | ||
|
|
e1556893f7 | ||
|
|
927118726c | ||
|
|
2952f2c905 | ||
|
|
acb29eb273 | ||
|
|
a1e576343e | ||
|
|
9bc08e446b | ||
|
|
f3cd710f21 | ||
|
|
efc85d1b75 | ||
|
|
9c74be3bf2 | ||
|
|
ce8741977b | ||
|
|
d12387b753 | ||
|
|
50afb2d65f | ||
|
|
ee78d6f17b | ||
|
|
7091e0c567 | ||
|
|
ac76706aa7 | ||
|
|
fcb217b9e8 | ||
|
|
9977248926 | ||
|
|
0c10d6c82d | ||
|
|
bc076c1cc1 | ||
|
|
a7a73a5b0b | ||
|
|
c539069cbb | ||
|
|
f1db618be2 | ||
|
|
79ba217485 | ||
|
|
ef4fc42d93 | ||
|
|
5bfbeb55c0 | ||
|
|
4069db4959 | ||
|
|
7d5425e142 | ||
|
|
de8218314c | ||
|
|
1a92a19954 | ||
|
|
0674e76864 | ||
|
|
60abc5f090 | ||
|
|
e8a2885721 | ||
|
|
5dc47c625e | ||
|
|
64445c7d1c | ||
|
|
50c77daa0b | ||
|
|
c3ff489fee | ||
|
|
6384950d56 | ||
|
|
b49a268031 | ||
|
|
2d6d10f920 | ||
|
|
580bad2042 | ||
|
|
9759f9e947 | ||
|
|
ab4f90a20a | ||
|
|
7105589904 | ||
|
|
59ed535cdf | ||
|
|
60a8e74430 | ||
|
|
6ba4af3e26 | ||
|
|
3ae5ba09fd | ||
|
|
401bdf0ba1 | ||
|
|
087ff28d0d | ||
|
|
715faaaceb | ||
|
|
2c6aeaed7c | ||
|
|
559774d6ac | ||
|
|
282195b13e | ||
|
|
eb9d7c8660 | ||
|
|
eea0f35d38 | ||
|
|
37eae2ba67 | ||
|
|
81a85e9c79 | ||
|
|
cdb268e656 | ||
|
|
30e2e2014d | ||
|
|
76774d6a3c | ||
|
|
5558d553bb | ||
|
|
2286a94185 | ||
|
|
561857fdf2 | ||
|
|
47372e7342 | ||
|
|
63e775eb4c | ||
|
|
6dc9d3ac19 | ||
|
|
53f1078119 | ||
|
|
28ffd750ce | ||
|
|
7f5d454b2d | ||
|
|
b43c78053b | ||
|
|
6531df2368 | ||
|
|
e78a5642fa | ||
|
|
b85ae89b7e | ||
|
|
810315e04c | ||
|
|
b4680144c5 | ||
|
|
c60bc00c9e | ||
|
|
f66b52239e | ||
|
|
18e5d75fd3 | ||
|
|
451f0e7adb | ||
|
|
c0a75abcd2 | ||
|
|
d77025540a | ||
|
|
b5d3ffb16c | ||
|
|
2f295382c4 | ||
|
|
54932a8050 | ||
|
|
2018537bb8 | ||
|
|
d36805c464 | ||
|
|
77af9ef902 | ||
|
|
add1467d32 | ||
|
|
b83b4ad7c7 | ||
|
|
a0c8b60a1b | ||
|
|
724272931a | ||
|
|
05d7e9c4e7 | ||
|
|
3cb7ba0f57 | ||
|
|
0282e6f255 | ||
|
|
cefb90269e | ||
|
|
2fcb90652b | ||
|
|
f70e3878b6 | ||
|
|
f09798c4a7 | ||
|
|
9c7ef39da6 | ||
|
|
7a05461c50 | ||
|
|
2280c75103 | ||
|
|
47b29a5f21 | ||
|
|
cd6378e848 | ||
|
|
1ec0afb2d1 | ||
|
|
d04a11405c | ||
|
|
499616d769 | ||
|
|
2087c4731f | ||
|
|
0022c6b828 | ||
|
|
5105596918 | ||
|
|
36594ecf1d | ||
|
|
bbb27b9654 | ||
|
|
94209d2b6d | ||
|
|
2f0212ee98 | ||
|
|
748598e419 | ||
|
|
75cf2488db | ||
|
|
1d97f08901 | ||
|
|
d5b60ad124 | ||
|
|
fdfed3d7db | ||
|
|
d768224182 | ||
|
|
c236b0828c | ||
|
|
154620233b | ||
|
|
becae9feee | ||
|
|
034aed053c | ||
|
|
5b75fcd0aa | ||
|
|
6239b5ec60 | ||
|
|
f00045544f | ||
|
|
7728467790 | ||
|
|
90cc01a50a | ||
|
|
fcf6a9d58a | ||
|
|
74a47a1384 | ||
|
|
626fd2a2aa | ||
|
|
706925b781 | ||
|
|
cf270b4dff | ||
|
|
bd2d71a582 | ||
|
|
514d1b66c3 | ||
|
|
8370b3075e | ||
|
|
48f9bc972a | ||
|
|
7577a2be32 | ||
|
|
24918b5cbc | ||
|
|
3719a9ee23 | ||
|
|
6a20937ab7 | ||
|
|
2b3e5945c6 | ||
|
|
8dcd38c25a | ||
|
|
6007fa6ab3 | ||
|
|
ad92bfe79f | ||
|
|
fef0bdf6c3 | ||
|
|
5fb112ac4c | ||
|
|
177306d494 | ||
|
|
e37ea6c6c8 | ||
|
|
6fba17a5e1 | ||
|
|
18a379f20c | ||
|
|
41fea2de1c | ||
|
|
e98731876c | ||
|
|
2f508af017 | ||
|
|
19325fd3f1 | ||
|
|
789d5dfaee | ||
|
|
7a1cdc9ad1 | ||
|
|
5cd2c56f95 | ||
|
|
d182182ae2 | ||
|
|
a389395e9c | ||
|
|
e22a181750 | ||
|
|
8e4685b718 | ||
|
|
a4169f1007 | ||
|
|
09f38bbd16 | ||
|
|
39ac723f5e | ||
|
|
7599ac1e8c | ||
|
|
1cfdd07d40 | ||
|
|
6e8d35379c | ||
|
|
51a617dd5d | ||
|
|
62edcd70e2 | ||
|
|
1519e1d45f | ||
|
|
67c40eb4be | ||
|
|
e030917769 | ||
|
|
99003ef2d1 | ||
|
|
5536bf6dcc | ||
|
|
bd7bf82d18 | ||
|
|
dde782a006 | ||
|
|
14bc2c6857 | ||
|
|
a3f45c0d3b | ||
|
|
60799c5a7c | ||
|
|
b018191d52 | ||
|
|
b9b08879f2 | ||
|
|
366552f3bb | ||
|
|
c3e8a5baa0 | ||
|
|
ab31ddfc31 | ||
|
|
6e50f2a413 | ||
|
|
aa8ca8704f | ||
|
|
1c7727b815 | ||
|
|
443a7826bb | ||
|
|
dc2c235211 | ||
|
|
d955be3ed5 | ||
|
|
4cdb1ad0c8 | ||
|
|
4013e45af6 | ||
|
|
a90ccbdc86 | ||
|
|
d0c4d2015e | ||
|
|
337afb0d9d | ||
|
|
7b2803b69e | ||
|
|
0980bb1f83 | ||
|
|
7eda614c4a | ||
|
|
9168f64bcf | ||
|
|
d561f50ab1 | ||
|
|
c126cd5b1a | ||
|
|
39fbf7d4d1 | ||
|
|
d2192fd986 | ||
|
|
68a3ba5e4d | ||
|
|
61194982f8 | ||
|
|
ffaf3154bb | ||
|
|
06efb0968c | ||
|
|
5d76e00b6a | ||
|
|
2c851bf99c | ||
|
|
e07702a72b | ||
|
|
82ec1b73e0 | ||
|
|
948aa788cb | ||
|
|
d403281fdc | ||
|
|
21daddb60e | ||
|
|
96961a7dfe | ||
|
|
5b40dcaeed | ||
|
|
8bfee93be4 | ||
|
|
286846cafd | ||
|
|
e32f1f8b80 | ||
|
|
969bdb2390 | ||
|
|
0aadc94442 | ||
|
|
2352725c58 | ||
|
|
742bfe1d5f | ||
|
|
6b0ed820bf | ||
|
|
c70b4a99c9 | ||
|
|
374b05a379 | ||
|
|
511cbaa2bd | ||
|
|
9efd8ce323 | ||
|
|
d07ff73999 | ||
|
|
a1ca507498 | ||
|
|
1402f842f7 | ||
|
|
84c7064699 | ||
|
|
94d1c9d9a6 | ||
|
|
4e6e789f34 | ||
|
|
09fdce4ac9 | ||
|
|
1bcbb9417a | ||
|
|
796139e4ab | ||
|
|
6120ce3747 | ||
|
|
23d77e2b9f | ||
|
|
257744ac36 | ||
|
|
3e65fb4267 | ||
|
|
b47422ae28 | ||
|
|
78fbd1307a | ||
|
|
d67227177a | ||
|
|
c24d439eb1 | ||
|
|
41a1514cec | ||
|
|
b576397610 | ||
|
|
6d0b84a467 | ||
|
|
729896d32a | ||
|
|
9352c4e076 | ||
|
|
412535420b | ||
|
|
65a3af9bde | ||
|
|
c90dc7235e | ||
|
|
1bd6cd0978 | ||
|
|
f8990b707a | ||
|
|
4a723a1fb9 | ||
|
|
7c10faeccf | ||
|
|
b9d46366ed | ||
|
|
d381020a60 | ||
|
|
0b11192fe3 | ||
|
|
1954c6b00e | ||
|
|
aa671f1041 | ||
|
|
5dfd4be174 | ||
|
|
e6a0a46476 | ||
|
|
8b1a2c8cd2 | ||
|
|
8f578e7521 | ||
|
|
8de9c362c9 | ||
|
|
a068019d94 | ||
|
|
d8ebbd5a51 | ||
|
|
cbb7e882e8 | ||
|
|
10ed388d41 | ||
|
|
1470184d45 | ||
|
|
9c0b9f0bcc | ||
|
|
4ca5814470 | ||
|
|
4279451150 | ||
|
|
ccf4cf165f | ||
|
|
f933b40fe2 | ||
|
|
8d08abf2a6 | ||
|
|
b2c7bf1dae | ||
|
|
7252ed2451 | ||
|
|
6ed1b3cba6 | ||
|
|
23652f2ba6 | ||
|
|
cb2d8bac1d | ||
|
|
6eca2f465b | ||
|
|
edfed001de | ||
|
|
6f38cf5e51 | ||
|
|
fe5cb691b5 | ||
|
|
6c59da6496 | ||
|
|
67946b7224 | ||
|
|
893ff2f26f | ||
|
|
3d4dbf3140 | ||
|
|
32111092bd | ||
|
|
6f2c8ffb00 | ||
|
|
ae9fb65315 | ||
|
|
d8ea220acc | ||
|
|
1719d7da2a | ||
|
|
9e6c54ba0c | ||
|
|
af99d0ef42 | ||
|
|
c7e2b6dacb | ||
|
|
493fb4fc24 | ||
|
|
928779154e | ||
|
|
b5c76ccc95 | ||
|
|
cab577406d | ||
|
|
9ec3af66b1 | ||
|
|
9691267dc8 | ||
|
|
ae9251a783 | ||
|
|
9a9c8aec3f | ||
|
|
42bf88b52a | ||
|
|
8f8806445e | ||
|
|
80fce4e18b | ||
|
|
a4d35d6c09 | ||
|
|
d3b9e333a7 | ||
|
|
93f3c59d5a | ||
|
|
8cd16982b1 | ||
|
|
105206aef1 | ||
|
|
f13af7dc59 | ||
|
|
cb34321d69 | ||
|
|
f4d0225b75 | ||
|
|
f2825cceba | ||
|
|
12fa309b7c | ||
|
|
e3c0d6980c | ||
|
|
68039b9d48 | ||
|
|
f13a3544fc | ||
|
|
18fd689177 | ||
|
|
4f0c9a3e31 | ||
|
|
a1f0693599 | ||
|
|
176ca9086e | ||
|
|
eb32675652 | ||
|
|
c21b754c4c | ||
|
|
606aa148a6 | ||
|
|
2ea7368cfb | ||
|
|
ad9712db70 | ||
|
|
f0093974ee | ||
|
|
260114af6c | ||
|
|
f21b180bce | ||
|
|
79fb3aa8af | ||
|
|
02768b7f7b | ||
|
|
8973e250ca | ||
|
|
4a96a5c9ff | ||
|
|
9402eb3f3e | ||
|
|
df4b5890fd | ||
|
|
fbe77e92b4 | ||
|
|
2392075467 | ||
|
|
caf0f0e428 | ||
|
|
31b5602dc1 | ||
|
|
fc7dc73f57 | ||
|
|
51f0250466 | ||
|
|
95ef70e4f4 | ||
|
|
0eb9822e0d | ||
|
|
0906b96a23 | ||
|
|
8eb1ba9fe7 | ||
|
|
c0ad095513 | ||
|
|
68926908d0 | ||
|
|
5ee0e85f02 | ||
|
|
65b22157e7 | ||
|
|
c2e9aa1b54 | ||
|
|
ec7c6f3f91 | ||
|
|
7d59b2d861 | ||
|
|
914112f2b5 | ||
|
|
f6bf0792ce | ||
|
|
603f1d820d | ||
|
|
7e4d5b7d04 | ||
|
|
4a9918979e | ||
|
|
77a4a36eb3 | ||
|
|
44a457e8b6 | ||
|
|
aa7c1bfa2d | ||
|
|
99a2dc4880 | ||
|
|
e212461dfe | ||
|
|
0f415a594f | ||
|
|
426d52d8c1 | ||
|
|
b63b717eac | ||
|
|
e0e0bdbc3a | ||
|
|
0f2ba45267 | ||
|
|
84d4bb6186 | ||
|
|
4be17b7a9b | ||
|
|
f0019e3725 | ||
|
|
428c491542 | ||
|
|
2cf1c697c2 | ||
|
|
bd4d73bb27 | ||
|
|
d15eda53f6 | ||
|
|
95da665095 | ||
|
|
96ade8668f | ||
|
|
201b923248 | ||
|
|
27b3d11aa6 | ||
|
|
c14ebb264f |
10
.github/workflows/ci.yml
vendored
10
.github/workflows/ci.yml
vendored
@@ -32,8 +32,13 @@ jobs:
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Download rust-analyzer
|
||||
run: |
|
||||
script/download-rust-analyzer
|
||||
echo "$PWD/vendor/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Run tests
|
||||
run: cargo test --no-fail-fast
|
||||
run: cargo test --workspace --no-fail-fast
|
||||
|
||||
bundle:
|
||||
name: Bundle app
|
||||
@@ -63,6 +68,9 @@ jobs:
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Download rust-analyzer
|
||||
run: script/download-rust-analyzer
|
||||
|
||||
- name: Create app bundle
|
||||
run: script/bundle
|
||||
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -2,5 +2,6 @@
|
||||
/zed.xcworkspace
|
||||
.DS_Store
|
||||
/script/node_modules
|
||||
/server/.env.toml
|
||||
/server/static/styles.css
|
||||
/crates/server/.env.toml
|
||||
/crates/server/static/styles.css
|
||||
/vendor/bin
|
||||
|
||||
1
.zed.toml
Normal file
1
.zed.toml
Normal file
@@ -0,0 +1 @@
|
||||
collaborators = ["nathansobo", "as-cii", "maxbrunsfeld", "iamnbutler"]
|
||||
1480
Cargo.lock
generated
1480
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,9 @@
|
||||
[workspace]
|
||||
members = ["fsevent", "gpui", "gpui_macros", "server", "zed", "zrpc"]
|
||||
default-members = ["zed"]
|
||||
members = ["crates/*"]
|
||||
default-members = ["crates/zed"]
|
||||
|
||||
[patch.crates-io]
|
||||
async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" }
|
||||
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "d72771a19f4143530b1cfd23808e344f1276e176" }
|
||||
# TODO - Remove when a version is released with this PR: https://github.com/servo/core-foundation-rs/pull/457
|
||||
cocoa = { git = "https://github.com/servo/core-foundation-rs", rev = "025dcb3c0d1ef01530f57ef65f3b1deb948f5737" }
|
||||
cocoa-foundation = { git = "https://github.com/servo/core-foundation-rs", rev = "025dcb3c0d1ef01530f57ef65f3b1deb948f5737" }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
|
||||
FROM rust as builder
|
||||
FROM rust:1.56-bullseye as builder
|
||||
WORKDIR app
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash -
|
||||
RUN apt-get install -y nodejs
|
||||
@@ -25,7 +25,7 @@ RUN --mount=type=cache,target=./target \
|
||||
cp /app/target/release/zed-server /app/zed-server
|
||||
|
||||
# Copy server binary to the runtime image
|
||||
FROM debian:buster-slim as runtime
|
||||
FROM debian:bullseye-slim as runtime
|
||||
RUN apt-get update; \
|
||||
apt-get install -y --no-install-recommends libcurl4-openssl-dev ca-certificates
|
||||
WORKDIR app
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
|
||||
FROM rust as builder
|
||||
FROM rust:1.55-bullseye as builder
|
||||
WORKDIR app
|
||||
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
||||
--mount=type=cache,target=./target \
|
||||
cargo install sqlx-cli --root=/app --target-dir=/app/target --version 0.5.7
|
||||
|
||||
FROM debian:buster-slim as runtime
|
||||
FROM debian:bullseye-slim as runtime
|
||||
RUN apt-get update; \
|
||||
apt-get install -y --no-install-recommends libssl1.1
|
||||
WORKDIR app
|
||||
|
||||
2
Procfile
Normal file
2
Procfile
Normal file
@@ -0,0 +1,2 @@
|
||||
web: cd ../zed.dev && PORT=3000 npx next dev
|
||||
collab: cd crates/server && cargo run
|
||||
50
README.md
50
README.md
@@ -6,6 +6,46 @@ Welcome to Zed, a lightning-fast, collaborative code editor that makes your drea
|
||||
|
||||
## Development tips
|
||||
|
||||
### Compiling on macOS Monterey
|
||||
|
||||
The Zed server uses libcurl, which currently triggers [a bug](https://github.com/rust-lang/rust/issues/90342) in `rustc`. To work around this bug, export the following environment variable:
|
||||
|
||||
```
|
||||
export MACOSX_DEPLOYMENT_TARGET=10.7
|
||||
```
|
||||
|
||||
### Testing against locally-running servers
|
||||
|
||||
Make sure you have `zed.dev` cloned as a sibling to this repo.
|
||||
|
||||
```
|
||||
cd ..
|
||||
git clone https://github.com/zed-industries/zed.dev
|
||||
```
|
||||
|
||||
Make sure your local database is created, migrated, and seeded with initial data. Install [Postgres](https://postgresapp.com), then from the `zed` repository root, run:
|
||||
|
||||
```
|
||||
script/sqlx database create
|
||||
script/sqlx migrate run
|
||||
script/seed-db
|
||||
```
|
||||
|
||||
Run `zed.dev` and the collaboration server.
|
||||
|
||||
```
|
||||
brew install foreman
|
||||
foreman start
|
||||
```
|
||||
|
||||
If you want to run Zed pointed at the local servers, you can run:
|
||||
|
||||
```
|
||||
script/zed_with_local_servers
|
||||
# or...
|
||||
script/zed_with_local_servers --release
|
||||
```
|
||||
|
||||
### Dump element JSON
|
||||
|
||||
If you trigger `cmd-shift-i`, Zed will copy a JSON representation of the current window contents to the clipboard. You can paste this in a tool like [DJSON](https://chrome.google.com/webstore/detail/djson-json-viewer-formatt/chaeijjekipecdajnijdldjjipaegdjc?hl=en) to navigate the state of on-screen elements in a structured way.
|
||||
@@ -26,12 +66,12 @@ Establish basic infrastructure for building the app bundle and uploading an arti
|
||||
|
||||
[Tracking issue](https://github.com/zed-industries/zed/issues/6)
|
||||
|
||||
Turn the minimal text editor into a collaborative *code* editor. This will include the minimal features that the Zed team needs to collaborate in Zed to build Zed without net loss in developer productivity. This includes productivity-critical features such as:
|
||||
Turn the minimal text editor into a collaborative _code_ editor. This will include the minimal features that the Zed team needs to collaborate in Zed to build Zed without net loss in developer productivity. This includes productivity-critical features such as:
|
||||
|
||||
* Syntax highlighting and syntax-aware editing and navigation
|
||||
* The ability to see and edit non-local working copies of a repository
|
||||
* Language server support for Rust code navigation, refactoring, diagnostics, etc.
|
||||
* Project browsing and project-wide search and replace
|
||||
- Syntax highlighting and syntax-aware editing and navigation
|
||||
- The ability to see and edit non-local working copies of a repository
|
||||
- Language server support for Rust code navigation, refactoring, diagnostics, etc.
|
||||
- Project browsing and project-wide search and replace
|
||||
|
||||
We want to tackle collaboration fairly early so that the rest of the design of the product can flow around that assumption. We could probably produce a single-player code editor more quickly, but at the risk of having collaboration feel more "bolted on" when we eventually add it.
|
||||
|
||||
|
||||
17
crates/chat_panel/Cargo.toml
Normal file
17
crates/chat_panel/Cargo.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "chat_panel"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
path = "src/chat_panel.rs"
|
||||
|
||||
[dependencies]
|
||||
client = { path = "../client" }
|
||||
editor = { path = "../editor" }
|
||||
gpui = { path = "../gpui" }
|
||||
theme = { path = "../theme" }
|
||||
util = { path = "../util" }
|
||||
workspace = { path = "../workspace" }
|
||||
postage = { version = "0.4.1", features = ["futures-traits"] }
|
||||
time = "0.3"
|
||||
@@ -1,13 +1,8 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{
|
||||
use client::{
|
||||
channel::{Channel, ChannelEvent, ChannelList, ChannelMessage},
|
||||
editor::Editor,
|
||||
rpc::{self, Client},
|
||||
theme,
|
||||
util::{ResultExt, TryFutureExt},
|
||||
Settings,
|
||||
Client,
|
||||
};
|
||||
use editor::{Editor, EditorSettings};
|
||||
use gpui::{
|
||||
action,
|
||||
elements::*,
|
||||
@@ -18,7 +13,10 @@ use gpui::{
|
||||
ViewContext, ViewHandle,
|
||||
};
|
||||
use postage::{prelude::Stream, watch};
|
||||
use std::sync::Arc;
|
||||
use time::{OffsetDateTime, UtcOffset};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
use workspace::Settings;
|
||||
|
||||
const MESSAGE_LOADING_THRESHOLD: usize = 50;
|
||||
|
||||
@@ -54,10 +52,21 @@ impl ChatPanel {
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let input_editor = cx.add_view(|cx| {
|
||||
Editor::auto_height(4, settings.clone(), cx).with_style({
|
||||
let settings = settings.clone();
|
||||
move |_| settings.borrow().theme.chat_panel.input_editor.as_editor()
|
||||
})
|
||||
Editor::auto_height(
|
||||
4,
|
||||
{
|
||||
let settings = settings.clone();
|
||||
Arc::new(move |_| {
|
||||
let settings = settings.borrow();
|
||||
EditorSettings {
|
||||
tab_size: settings.tab_size,
|
||||
style: settings.theme.chat_panel.input_editor.as_editor(),
|
||||
soft_wrap: editor::SoftWrap::EditorWidth,
|
||||
}
|
||||
})
|
||||
},
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let channel_select = cx.add_view(|cx| {
|
||||
let channel_list = channel_list.clone();
|
||||
@@ -87,7 +96,7 @@ impl ChatPanel {
|
||||
});
|
||||
|
||||
let mut message_list = ListState::new(0, Orientation::Bottom, 1000., {
|
||||
let this = cx.handle().downgrade();
|
||||
let this = cx.weak_handle();
|
||||
move |ix, cx| {
|
||||
let this = this.upgrade(cx).unwrap().read(cx);
|
||||
let message = this.active_channel.as_ref().unwrap().0.read(cx).message(ix);
|
||||
@@ -209,7 +218,7 @@ impl ChatPanel {
|
||||
Flex::column()
|
||||
.with_child(
|
||||
Container::new(ChildView::new(self.channel_select.id()).boxed())
|
||||
.with_style(&theme.chat_panel.channel_select.container)
|
||||
.with_style(theme.chat_panel.channel_select.container)
|
||||
.boxed(),
|
||||
)
|
||||
.with_child(self.render_active_channel_messages())
|
||||
@@ -224,13 +233,18 @@ impl ChatPanel {
|
||||
Empty::new().boxed()
|
||||
};
|
||||
|
||||
Expanded::new(1., messages).boxed()
|
||||
Flexible::new(1., true, messages).boxed()
|
||||
}
|
||||
|
||||
fn render_message(&self, message: &ChannelMessage) -> ElementBox {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let settings = self.settings.borrow();
|
||||
let theme = &settings.theme.chat_panel.message;
|
||||
let theme = if message.is_pending() {
|
||||
&settings.theme.chat_panel.pending_message
|
||||
} else {
|
||||
&settings.theme.chat_panel.message
|
||||
};
|
||||
|
||||
Container::new(
|
||||
Flex::column()
|
||||
.with_child(
|
||||
@@ -243,7 +257,7 @@ impl ChatPanel {
|
||||
)
|
||||
.boxed(),
|
||||
)
|
||||
.with_style(&theme.sender.container)
|
||||
.with_style(theme.sender.container)
|
||||
.boxed(),
|
||||
)
|
||||
.with_child(
|
||||
@@ -254,7 +268,7 @@ impl ChatPanel {
|
||||
)
|
||||
.boxed(),
|
||||
)
|
||||
.with_style(&theme.timestamp.container)
|
||||
.with_style(theme.timestamp.container)
|
||||
.boxed(),
|
||||
)
|
||||
.boxed(),
|
||||
@@ -262,14 +276,14 @@ impl ChatPanel {
|
||||
.with_child(Text::new(message.body.clone(), theme.body.clone()).boxed())
|
||||
.boxed(),
|
||||
)
|
||||
.with_style(&theme.container)
|
||||
.with_style(theme.container)
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn render_input_box(&self) -> ElementBox {
|
||||
let theme = &self.settings.borrow().theme;
|
||||
Container::new(ChildView::new(self.input_editor.id()).boxed())
|
||||
.with_style(&theme.chat_panel.input_editor.container)
|
||||
.with_style(theme.chat_panel.input_editor.container)
|
||||
.boxed()
|
||||
}
|
||||
|
||||
@@ -293,13 +307,13 @@ impl ChatPanel {
|
||||
Flex::row()
|
||||
.with_child(
|
||||
Container::new(Label::new("#".to_string(), theme.hash.text.clone()).boxed())
|
||||
.with_style(&theme.hash.container)
|
||||
.with_style(theme.hash.container)
|
||||
.boxed(),
|
||||
)
|
||||
.with_child(Label::new(channel.name.clone(), theme.name.clone()).boxed())
|
||||
.boxed(),
|
||||
)
|
||||
.with_style(&theme.container)
|
||||
.with_style(theme.container)
|
||||
.boxed()
|
||||
}
|
||||
|
||||
@@ -381,13 +395,14 @@ impl View for ChatPanel {
|
||||
|
||||
fn render(&mut self, cx: &mut RenderContext<Self>) -> ElementBox {
|
||||
let theme = &self.settings.borrow().theme;
|
||||
let element = match *self.rpc.status().borrow() {
|
||||
rpc::Status::Connected { .. } => self.render_channel(),
|
||||
_ => self.render_sign_in_prompt(cx),
|
||||
let element = if self.rpc.user_id().is_some() {
|
||||
self.render_channel()
|
||||
} else {
|
||||
self.render_sign_in_prompt(cx)
|
||||
};
|
||||
ConstrainedBox::new(
|
||||
Container::new(element)
|
||||
.with_style(&theme.chat_panel.container)
|
||||
.with_style(theme.chat_panel.container)
|
||||
.boxed(),
|
||||
)
|
||||
.with_min_width(150.)
|
||||
@@ -395,7 +410,10 @@ impl View for ChatPanel {
|
||||
}
|
||||
|
||||
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
|
||||
if matches!(*self.rpc.status().borrow(), rpc::Status::Connected { .. }) {
|
||||
if matches!(
|
||||
*self.rpc.status().borrow(),
|
||||
client::Status::Connected { .. }
|
||||
) {
|
||||
cx.focus(&self.input_editor);
|
||||
}
|
||||
}
|
||||
35
crates/client/Cargo.toml
Normal file
35
crates/client/Cargo.toml
Normal file
@@ -0,0 +1,35 @@
|
||||
[package]
|
||||
name = "client"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
path = "src/client.rs"
|
||||
|
||||
[features]
|
||||
test-support = ["gpui/test-support", "rpc/test-support"]
|
||||
|
||||
[dependencies]
|
||||
gpui = { path = "../gpui" }
|
||||
util = { path = "../util" }
|
||||
rpc = { path = "../rpc" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
anyhow = "1.0.38"
|
||||
async-recursion = "0.3"
|
||||
async-tungstenite = { version = "0.16", features = ["async-tls"] }
|
||||
futures = "0.3"
|
||||
image = "0.23"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4"
|
||||
parking_lot = "0.11.1"
|
||||
postage = { version = "0.4.1", features = ["futures-traits"] }
|
||||
rand = "0.8.3"
|
||||
smol = "1.2.5"
|
||||
surf = "2.2"
|
||||
thiserror = "1.0.29"
|
||||
time = "0.3"
|
||||
tiny_http = "0.8"
|
||||
|
||||
[dev-dependencies]
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
rpc = { path = "../rpc", features = ["test-support"] }
|
||||
@@ -1,31 +1,29 @@
|
||||
use crate::{
|
||||
rpc::{self, Client},
|
||||
use super::{
|
||||
proto,
|
||||
user::{User, UserStore},
|
||||
util::TryFutureExt,
|
||||
Client, Status, Subscription, TypedEnvelope,
|
||||
};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use gpui::{
|
||||
sum_tree::{self, Bias, SumTree},
|
||||
Entity, ModelContext, ModelHandle, MutableAppContext, Task, WeakModelHandle,
|
||||
AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, WeakModelHandle,
|
||||
};
|
||||
use postage::prelude::Stream;
|
||||
use rand::prelude::*;
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
mem,
|
||||
ops::Range,
|
||||
sync::Arc,
|
||||
};
|
||||
use sum_tree::{Bias, SumTree};
|
||||
use time::OffsetDateTime;
|
||||
use zrpc::{
|
||||
proto::{self, ChannelMessageSent},
|
||||
TypedEnvelope,
|
||||
};
|
||||
use util::{post_inc, TryFutureExt};
|
||||
|
||||
pub struct ChannelList {
|
||||
available_channels: Option<Vec<ChannelDetails>>,
|
||||
channels: HashMap<u64, WeakModelHandle<Channel>>,
|
||||
rpc: Arc<Client>,
|
||||
user_store: Arc<UserStore>,
|
||||
client: Arc<Client>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
_task: Task<Option<()>>,
|
||||
}
|
||||
|
||||
@@ -39,33 +37,35 @@ pub struct Channel {
|
||||
details: ChannelDetails,
|
||||
messages: SumTree<ChannelMessage>,
|
||||
loaded_all_messages: bool,
|
||||
pending_messages: Vec<PendingChannelMessage>,
|
||||
next_local_message_id: u64,
|
||||
user_store: Arc<UserStore>,
|
||||
next_pending_message_id: usize,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
rpc: Arc<Client>,
|
||||
_subscription: rpc::Subscription,
|
||||
rng: StdRng,
|
||||
_subscription: Subscription,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ChannelMessage {
|
||||
pub id: u64,
|
||||
pub id: ChannelMessageId,
|
||||
pub body: String,
|
||||
pub timestamp: OffsetDateTime,
|
||||
pub sender: Arc<User>,
|
||||
pub nonce: u128,
|
||||
}
|
||||
|
||||
pub struct PendingChannelMessage {
|
||||
pub body: String,
|
||||
local_id: u64,
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum ChannelMessageId {
|
||||
Saved(u64),
|
||||
Pending(usize),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct ChannelMessageSummary {
|
||||
max_id: u64,
|
||||
max_id: ChannelMessageId,
|
||||
count: usize,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default)]
|
||||
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
|
||||
struct Count(usize);
|
||||
|
||||
pub enum ChannelListEvent {}
|
||||
@@ -84,8 +84,8 @@ impl Entity for ChannelList {
|
||||
|
||||
impl ChannelList {
|
||||
pub fn new(
|
||||
user_store: Arc<UserStore>,
|
||||
rpc: Arc<rpc::Client>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
rpc: Arc<Client>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
let _task = cx.spawn_weak(|this, mut cx| {
|
||||
@@ -94,7 +94,7 @@ impl ChannelList {
|
||||
let mut status = rpc.status();
|
||||
while let Some((status, this)) = status.recv().await.zip(this.upgrade(&cx)) {
|
||||
match status {
|
||||
rpc::Status::Connected { .. } => {
|
||||
Status::Connected { .. } => {
|
||||
let response = rpc
|
||||
.request(proto::GetChannels {})
|
||||
.await
|
||||
@@ -118,7 +118,7 @@ impl ChannelList {
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
rpc::Status::Disconnected { .. } => {
|
||||
Status::SignedOut { .. } => {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.available_channels = None;
|
||||
this.channels.clear();
|
||||
@@ -137,7 +137,7 @@ impl ChannelList {
|
||||
available_channels: None,
|
||||
channels: Default::default(),
|
||||
user_store,
|
||||
rpc,
|
||||
client: rpc,
|
||||
_task,
|
||||
}
|
||||
}
|
||||
@@ -157,8 +157,9 @@ impl ChannelList {
|
||||
|
||||
let channels = self.available_channels.as_ref()?;
|
||||
let details = channels.iter().find(|details| details.id == id)?.clone();
|
||||
let channel =
|
||||
cx.add_model(|cx| Channel::new(details, self.user_store.clone(), self.rpc.clone(), cx));
|
||||
let channel = cx.add_model(|cx| {
|
||||
Channel::new(details, self.user_store.clone(), self.client.clone(), cx)
|
||||
});
|
||||
self.channels.insert(id, channel.downgrade());
|
||||
Some(channel)
|
||||
}
|
||||
@@ -183,11 +184,11 @@ impl Entity for Channel {
|
||||
impl Channel {
|
||||
pub fn new(
|
||||
details: ChannelDetails,
|
||||
user_store: Arc<UserStore>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
rpc: Arc<Client>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
let _subscription = rpc.subscribe_from_model(details.id, cx, Self::handle_message_sent);
|
||||
let _subscription = rpc.subscribe_to_entity(details.id, cx, Self::handle_message_sent);
|
||||
|
||||
{
|
||||
let user_store = user_store.clone();
|
||||
@@ -196,7 +197,8 @@ impl Channel {
|
||||
cx.spawn(|channel, mut cx| {
|
||||
async move {
|
||||
let response = rpc.request(proto::JoinChannel { channel_id }).await?;
|
||||
let messages = messages_from_proto(response.messages, &user_store).await?;
|
||||
let messages =
|
||||
messages_from_proto(response.messages, &user_store, &mut cx).await?;
|
||||
let loaded_all_messages = response.done;
|
||||
|
||||
channel.update(&mut cx, |channel, cx| {
|
||||
@@ -216,9 +218,9 @@ impl Channel {
|
||||
user_store,
|
||||
rpc,
|
||||
messages: Default::default(),
|
||||
pending_messages: Default::default(),
|
||||
loaded_all_messages: false,
|
||||
next_local_message_id: 0,
|
||||
next_pending_message_id: 0,
|
||||
rng: StdRng::from_entropy(),
|
||||
_subscription,
|
||||
}
|
||||
}
|
||||
@@ -236,31 +238,45 @@ impl Channel {
|
||||
Err(anyhow!("message body can't be empty"))?;
|
||||
}
|
||||
|
||||
let current_user = self
|
||||
.user_store
|
||||
.read(cx)
|
||||
.current_user()
|
||||
.ok_or_else(|| anyhow!("current_user is not present"))?;
|
||||
|
||||
let channel_id = self.details.id;
|
||||
let local_id = self.next_local_message_id;
|
||||
self.next_local_message_id += 1;
|
||||
self.pending_messages.push(PendingChannelMessage {
|
||||
local_id,
|
||||
body: body.clone(),
|
||||
});
|
||||
let pending_id = ChannelMessageId::Pending(post_inc(&mut self.next_pending_message_id));
|
||||
let nonce = self.rng.gen();
|
||||
self.insert_messages(
|
||||
SumTree::from_item(
|
||||
ChannelMessage {
|
||||
id: pending_id,
|
||||
body: body.clone(),
|
||||
sender: current_user,
|
||||
timestamp: OffsetDateTime::now_utc(),
|
||||
nonce,
|
||||
},
|
||||
&(),
|
||||
),
|
||||
cx,
|
||||
);
|
||||
let user_store = self.user_store.clone();
|
||||
let rpc = self.rpc.clone();
|
||||
Ok(cx.spawn(|this, mut cx| async move {
|
||||
let request = rpc.request(proto::SendChannelMessage { channel_id, body });
|
||||
let request = rpc.request(proto::SendChannelMessage {
|
||||
channel_id,
|
||||
body,
|
||||
nonce: Some(nonce.into()),
|
||||
});
|
||||
let response = request.await?;
|
||||
let message = ChannelMessage::from_proto(
|
||||
response.message.ok_or_else(|| anyhow!("invalid message"))?,
|
||||
&user_store,
|
||||
&mut cx,
|
||||
)
|
||||
.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if let Ok(i) = this
|
||||
.pending_messages
|
||||
.binary_search_by_key(&local_id, |msg| msg.local_id)
|
||||
{
|
||||
this.pending_messages.remove(i);
|
||||
this.insert_messages(SumTree::from_item(message, &()), cx);
|
||||
}
|
||||
this.insert_messages(SumTree::from_item(message, &()), cx);
|
||||
Ok(())
|
||||
})
|
||||
}))
|
||||
@@ -271,7 +287,12 @@ impl Channel {
|
||||
let rpc = self.rpc.clone();
|
||||
let user_store = self.user_store.clone();
|
||||
let channel_id = self.details.id;
|
||||
if let Some(before_message_id) = self.messages.first().map(|message| message.id) {
|
||||
if let Some(before_message_id) =
|
||||
self.messages.first().and_then(|message| match message.id {
|
||||
ChannelMessageId::Saved(id) => Some(id),
|
||||
ChannelMessageId::Pending(_) => None,
|
||||
})
|
||||
{
|
||||
cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
let response = rpc
|
||||
@@ -281,7 +302,8 @@ impl Channel {
|
||||
})
|
||||
.await?;
|
||||
let loaded_all_messages = response.done;
|
||||
let messages = messages_from_proto(response.messages, &user_store).await?;
|
||||
let messages =
|
||||
messages_from_proto(response.messages, &user_store, &mut cx).await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.loaded_all_messages = loaded_all_messages;
|
||||
this.insert_messages(messages, cx);
|
||||
@@ -301,32 +323,52 @@ impl Channel {
|
||||
let user_store = self.user_store.clone();
|
||||
let rpc = self.rpc.clone();
|
||||
let channel_id = self.details.id;
|
||||
cx.spawn(|channel, mut cx| {
|
||||
cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
let response = rpc.request(proto::JoinChannel { channel_id }).await?;
|
||||
let messages = messages_from_proto(response.messages, &user_store).await?;
|
||||
let messages = messages_from_proto(response.messages, &user_store, &mut cx).await?;
|
||||
let loaded_all_messages = response.done;
|
||||
|
||||
channel.update(&mut cx, |channel, cx| {
|
||||
let pending_messages = this.update(&mut cx, |this, cx| {
|
||||
if let Some((first_new_message, last_old_message)) =
|
||||
messages.first().zip(channel.messages.last())
|
||||
messages.first().zip(this.messages.last())
|
||||
{
|
||||
if first_new_message.id > last_old_message.id {
|
||||
let old_messages = mem::take(&mut channel.messages);
|
||||
let old_messages = mem::take(&mut this.messages);
|
||||
cx.emit(ChannelEvent::MessagesUpdated {
|
||||
old_range: 0..old_messages.summary().count,
|
||||
new_count: 0,
|
||||
});
|
||||
channel.loaded_all_messages = loaded_all_messages;
|
||||
this.loaded_all_messages = loaded_all_messages;
|
||||
}
|
||||
}
|
||||
|
||||
channel.insert_messages(messages, cx);
|
||||
this.insert_messages(messages, cx);
|
||||
if loaded_all_messages {
|
||||
channel.loaded_all_messages = loaded_all_messages;
|
||||
this.loaded_all_messages = loaded_all_messages;
|
||||
}
|
||||
|
||||
this.pending_messages().cloned().collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
for pending_message in pending_messages {
|
||||
let request = rpc.request(proto::SendChannelMessage {
|
||||
channel_id,
|
||||
body: pending_message.body,
|
||||
nonce: Some(pending_message.nonce.into()),
|
||||
});
|
||||
let response = request.await?;
|
||||
let message = ChannelMessage::from_proto(
|
||||
response.message.ok_or_else(|| anyhow!("invalid message"))?,
|
||||
&user_store,
|
||||
&mut cx,
|
||||
)
|
||||
.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.insert_messages(SumTree::from_item(message, &()), cx);
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
.log_err()
|
||||
@@ -343,25 +385,27 @@ impl Channel {
|
||||
}
|
||||
|
||||
pub fn message(&self, ix: usize) -> &ChannelMessage {
|
||||
let mut cursor = self.messages.cursor::<Count, ()>();
|
||||
let mut cursor = self.messages.cursor::<Count>();
|
||||
cursor.seek(&Count(ix), Bias::Right, &());
|
||||
cursor.item().unwrap()
|
||||
}
|
||||
|
||||
pub fn messages_in_range(&self, range: Range<usize>) -> impl Iterator<Item = &ChannelMessage> {
|
||||
let mut cursor = self.messages.cursor::<Count, ()>();
|
||||
let mut cursor = self.messages.cursor::<Count>();
|
||||
cursor.seek(&Count(range.start), Bias::Right, &());
|
||||
cursor.take(range.len())
|
||||
}
|
||||
|
||||
pub fn pending_messages(&self) -> &[PendingChannelMessage] {
|
||||
&self.pending_messages
|
||||
pub fn pending_messages(&self) -> impl Iterator<Item = &ChannelMessage> {
|
||||
let mut cursor = self.messages.cursor::<ChannelMessageId>();
|
||||
cursor.seek(&ChannelMessageId::Pending(0), Bias::Left, &());
|
||||
cursor
|
||||
}
|
||||
|
||||
fn handle_message_sent(
|
||||
&mut self,
|
||||
message: TypedEnvelope<ChannelMessageSent>,
|
||||
_: Arc<rpc::Client>,
|
||||
message: TypedEnvelope<proto::ChannelMessageSent>,
|
||||
_: Arc<Client>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Result<()> {
|
||||
let user_store = self.user_store.clone();
|
||||
@@ -372,7 +416,7 @@ impl Channel {
|
||||
|
||||
cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
let message = ChannelMessage::from_proto(message, &user_store).await?;
|
||||
let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.insert_messages(SumTree::from_item(message, &()), cx)
|
||||
});
|
||||
@@ -386,19 +430,54 @@ impl Channel {
|
||||
|
||||
fn insert_messages(&mut self, messages: SumTree<ChannelMessage>, cx: &mut ModelContext<Self>) {
|
||||
if let Some((first_message, last_message)) = messages.first().zip(messages.last()) {
|
||||
let mut old_cursor = self.messages.cursor::<u64, Count>();
|
||||
let nonces = messages
|
||||
.cursor::<()>()
|
||||
.map(|m| m.nonce)
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>();
|
||||
let mut new_messages = old_cursor.slice(&first_message.id, Bias::Left, &());
|
||||
let start_ix = old_cursor.sum_start().0;
|
||||
let start_ix = old_cursor.start().1 .0;
|
||||
let removed_messages = old_cursor.slice(&last_message.id, Bias::Right, &());
|
||||
let removed_count = removed_messages.summary().count;
|
||||
let new_count = messages.summary().count;
|
||||
let end_ix = start_ix + removed_count;
|
||||
|
||||
new_messages.push_tree(messages, &());
|
||||
new_messages.push_tree(old_cursor.suffix(&()), &());
|
||||
|
||||
let mut ranges = Vec::<Range<usize>>::new();
|
||||
if new_messages.last().unwrap().is_pending() {
|
||||
new_messages.push_tree(old_cursor.suffix(&()), &());
|
||||
} else {
|
||||
new_messages.push_tree(
|
||||
old_cursor.slice(&ChannelMessageId::Pending(0), Bias::Left, &()),
|
||||
&(),
|
||||
);
|
||||
|
||||
while let Some(message) = old_cursor.item() {
|
||||
let message_ix = old_cursor.start().1 .0;
|
||||
if nonces.contains(&message.nonce) {
|
||||
if ranges.last().map_or(false, |r| r.end == message_ix) {
|
||||
ranges.last_mut().unwrap().end += 1;
|
||||
} else {
|
||||
ranges.push(message_ix..message_ix + 1);
|
||||
}
|
||||
} else {
|
||||
new_messages.push(message.clone(), &());
|
||||
}
|
||||
old_cursor.next(&());
|
||||
}
|
||||
}
|
||||
|
||||
drop(old_cursor);
|
||||
self.messages = new_messages;
|
||||
|
||||
for range in ranges.into_iter().rev() {
|
||||
cx.emit(ChannelEvent::MessagesUpdated {
|
||||
old_range: range,
|
||||
new_count: 0,
|
||||
});
|
||||
}
|
||||
cx.emit(ChannelEvent::MessagesUpdated {
|
||||
old_range: start_ix..end_ix,
|
||||
new_count,
|
||||
@@ -410,7 +489,8 @@ impl Channel {
|
||||
|
||||
async fn messages_from_proto(
|
||||
proto_messages: Vec<proto::ChannelMessage>,
|
||||
user_store: &UserStore,
|
||||
user_store: &ModelHandle<UserStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<SumTree<ChannelMessage>> {
|
||||
let unique_user_ids = proto_messages
|
||||
.iter()
|
||||
@@ -418,11 +498,15 @@ async fn messages_from_proto(
|
||||
.collect::<HashSet<_>>()
|
||||
.into_iter()
|
||||
.collect();
|
||||
user_store.load_users(unique_user_ids).await?;
|
||||
user_store
|
||||
.update(cx, |user_store, cx| {
|
||||
user_store.load_users(unique_user_ids, cx)
|
||||
})
|
||||
.await?;
|
||||
|
||||
let mut messages = Vec::with_capacity(proto_messages.len());
|
||||
for message in proto_messages {
|
||||
messages.push(ChannelMessage::from_proto(message, &user_store).await?);
|
||||
messages.push(ChannelMessage::from_proto(message, user_store, cx).await?);
|
||||
}
|
||||
let mut result = SumTree::new();
|
||||
result.extend(messages, &());
|
||||
@@ -441,16 +525,29 @@ impl From<proto::Channel> for ChannelDetails {
|
||||
impl ChannelMessage {
|
||||
pub async fn from_proto(
|
||||
message: proto::ChannelMessage,
|
||||
user_store: &UserStore,
|
||||
user_store: &ModelHandle<UserStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<Self> {
|
||||
let sender = user_store.get_user(message.sender_id).await?;
|
||||
let sender = user_store
|
||||
.update(cx, |user_store, cx| {
|
||||
user_store.fetch_user(message.sender_id, cx)
|
||||
})
|
||||
.await?;
|
||||
Ok(ChannelMessage {
|
||||
id: message.id,
|
||||
id: ChannelMessageId::Saved(message.id),
|
||||
body: message.body,
|
||||
timestamp: OffsetDateTime::from_unix_timestamp(message.timestamp as i64)?,
|
||||
sender,
|
||||
nonce: message
|
||||
.nonce
|
||||
.ok_or_else(|| anyhow!("nonce is required"))?
|
||||
.into(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn is_pending(&self) -> bool {
|
||||
matches!(self.id, ChannelMessageId::Pending(_))
|
||||
}
|
||||
}
|
||||
|
||||
impl sum_tree::Item for ChannelMessage {
|
||||
@@ -464,6 +561,12 @@ impl sum_tree::Item for ChannelMessage {
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ChannelMessageId {
|
||||
fn default() -> Self {
|
||||
Self::Saved(0)
|
||||
}
|
||||
}
|
||||
|
||||
impl sum_tree::Summary for ChannelMessageSummary {
|
||||
type Context = ();
|
||||
|
||||
@@ -473,7 +576,7 @@ impl sum_tree::Summary for ChannelMessageSummary {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for u64 {
|
||||
impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for ChannelMessageId {
|
||||
fn add_summary(&mut self, summary: &'a ChannelMessageSummary, _: &()) {
|
||||
debug_assert!(summary.max_id > *self);
|
||||
*self = summary.max_id;
|
||||
@@ -486,24 +589,20 @@ impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for Count {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> sum_tree::SeekDimension<'a, ChannelMessageSummary> for Count {
|
||||
fn cmp(&self, other: &Self, _: &()) -> std::cmp::Ordering {
|
||||
Ord::cmp(&self.0, &other.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::test::FakeServer;
|
||||
use crate::test::{FakeHttpClient, FakeServer};
|
||||
use gpui::TestAppContext;
|
||||
use surf::http::Response;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_channel_messages(mut cx: TestAppContext) {
|
||||
let user_id = 5;
|
||||
let mut client = Client::new();
|
||||
let http_client = FakeHttpClient::new(|_| async move { Ok(Response::new(404)) });
|
||||
let mut client = Client::new(http_client.clone());
|
||||
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
|
||||
let user_store = Arc::new(UserStore::new(client.clone()));
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
|
||||
|
||||
let channel_list = cx.add_model(|cx| ChannelList::new(user_store, client.clone(), cx));
|
||||
channel_list.read_with(&cx, |list, _| assert_eq!(list.available_channels(), None));
|
||||
@@ -532,6 +631,21 @@ mod tests {
|
||||
)
|
||||
});
|
||||
|
||||
let get_users = server.receive::<proto::GetUsers>().await.unwrap();
|
||||
assert_eq!(get_users.payload.user_ids, vec![5]);
|
||||
server
|
||||
.respond(
|
||||
get_users.receipt(),
|
||||
proto::GetUsersResponse {
|
||||
users: vec![proto::User {
|
||||
id: 5,
|
||||
github_login: "nathansobo".into(),
|
||||
avatar_url: "http://avatar.com/nathansobo".into(),
|
||||
}],
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
// Join a channel and populate its existing messages.
|
||||
let channel = channel_list
|
||||
.update(&mut cx, |list, cx| {
|
||||
@@ -551,12 +665,14 @@ mod tests {
|
||||
body: "a".into(),
|
||||
timestamp: 1000,
|
||||
sender_id: 5,
|
||||
nonce: Some(1.into()),
|
||||
},
|
||||
proto::ChannelMessage {
|
||||
id: 11,
|
||||
body: "b".into(),
|
||||
timestamp: 1001,
|
||||
sender_id: 6,
|
||||
nonce: Some(2.into()),
|
||||
},
|
||||
],
|
||||
done: false,
|
||||
@@ -567,23 +683,16 @@ mod tests {
|
||||
// Client requests all users for the received messages
|
||||
let mut get_users = server.receive::<proto::GetUsers>().await.unwrap();
|
||||
get_users.payload.user_ids.sort();
|
||||
assert_eq!(get_users.payload.user_ids, vec![5, 6]);
|
||||
assert_eq!(get_users.payload.user_ids, vec![6]);
|
||||
server
|
||||
.respond(
|
||||
get_users.receipt(),
|
||||
proto::GetUsersResponse {
|
||||
users: vec![
|
||||
proto::User {
|
||||
id: 5,
|
||||
github_login: "nathansobo".into(),
|
||||
avatar_url: "http://avatar.com/nathansobo".into(),
|
||||
},
|
||||
proto::User {
|
||||
id: 6,
|
||||
github_login: "maxbrunsfeld".into(),
|
||||
avatar_url: "http://avatar.com/maxbrunsfeld".into(),
|
||||
},
|
||||
],
|
||||
users: vec![proto::User {
|
||||
id: 6,
|
||||
github_login: "maxbrunsfeld".into(),
|
||||
avatar_url: "http://avatar.com/maxbrunsfeld".into(),
|
||||
}],
|
||||
},
|
||||
)
|
||||
.await;
|
||||
@@ -617,6 +726,7 @@ mod tests {
|
||||
body: "c".into(),
|
||||
timestamp: 1002,
|
||||
sender_id: 7,
|
||||
nonce: Some(3.into()),
|
||||
}),
|
||||
})
|
||||
.await;
|
||||
@@ -672,12 +782,14 @@ mod tests {
|
||||
body: "y".into(),
|
||||
timestamp: 998,
|
||||
sender_id: 5,
|
||||
nonce: Some(4.into()),
|
||||
},
|
||||
proto::ChannelMessage {
|
||||
id: 9,
|
||||
body: "z".into(),
|
||||
timestamp: 999,
|
||||
sender_id: 6,
|
||||
nonce: Some(5.into()),
|
||||
},
|
||||
],
|
||||
},
|
||||
919
crates/client/src/client.rs
Normal file
919
crates/client/src/client.rs
Normal file
@@ -0,0 +1,919 @@
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod test;
|
||||
|
||||
pub mod channel;
|
||||
pub mod http;
|
||||
pub mod user;
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use async_recursion::async_recursion;
|
||||
use async_tungstenite::tungstenite::{
|
||||
error::Error as WebsocketError,
|
||||
http::{Request, StatusCode},
|
||||
};
|
||||
use gpui::{action, AsyncAppContext, Entity, ModelContext, MutableAppContext, Task};
|
||||
use http::HttpClient;
|
||||
use lazy_static::lazy_static;
|
||||
use parking_lot::RwLock;
|
||||
use postage::{prelude::Stream, watch};
|
||||
use rand::prelude::*;
|
||||
use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, RequestMessage};
|
||||
use std::{
|
||||
any::TypeId,
|
||||
collections::HashMap,
|
||||
convert::TryFrom,
|
||||
fmt::Write as _,
|
||||
future::Future,
|
||||
sync::{Arc, Weak},
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use surf::{http::Method, Url};
|
||||
use thiserror::Error;
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
|
||||
pub use channel::*;
|
||||
pub use rpc::*;
|
||||
pub use user::*;
|
||||
|
||||
lazy_static! {
|
||||
static ref ZED_SERVER_URL: String =
|
||||
std::env::var("ZED_SERVER_URL").unwrap_or("https://zed.dev".to_string());
|
||||
static ref IMPERSONATE_LOGIN: Option<String> = std::env::var("ZED_IMPERSONATE")
|
||||
.ok()
|
||||
.and_then(|s| if s.is_empty() { None } else { Some(s) });
|
||||
}
|
||||
|
||||
action!(Authenticate);
|
||||
|
||||
pub fn init(rpc: Arc<Client>, cx: &mut MutableAppContext) {
|
||||
cx.add_global_action(move |_: &Authenticate, cx| {
|
||||
let rpc = rpc.clone();
|
||||
cx.spawn(|cx| async move { rpc.authenticate_and_connect(&cx).log_err().await })
|
||||
.detach();
|
||||
});
|
||||
}
|
||||
|
||||
pub struct Client {
|
||||
peer: Arc<Peer>,
|
||||
http: Arc<dyn HttpClient>,
|
||||
state: RwLock<ClientState>,
|
||||
authenticate:
|
||||
Option<Box<dyn 'static + Send + Sync + Fn(&AsyncAppContext) -> Task<Result<Credentials>>>>,
|
||||
establish_connection: Option<
|
||||
Box<
|
||||
dyn 'static
|
||||
+ Send
|
||||
+ Sync
|
||||
+ Fn(
|
||||
&Credentials,
|
||||
&AsyncAppContext,
|
||||
) -> Task<Result<Connection, EstablishConnectionError>>,
|
||||
>,
|
||||
>,
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum EstablishConnectionError {
|
||||
#[error("upgrade required")]
|
||||
UpgradeRequired,
|
||||
#[error("unauthorized")]
|
||||
Unauthorized,
|
||||
#[error("{0}")]
|
||||
Other(#[from] anyhow::Error),
|
||||
#[error("{0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
#[error("{0}")]
|
||||
Http(#[from] async_tungstenite::tungstenite::http::Error),
|
||||
}
|
||||
|
||||
impl From<WebsocketError> for EstablishConnectionError {
|
||||
fn from(error: WebsocketError) -> Self {
|
||||
if let WebsocketError::Http(response) = &error {
|
||||
match response.status() {
|
||||
StatusCode::UNAUTHORIZED => return EstablishConnectionError::Unauthorized,
|
||||
StatusCode::UPGRADE_REQUIRED => return EstablishConnectionError::UpgradeRequired,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
EstablishConnectionError::Other(error.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl EstablishConnectionError {
|
||||
pub fn other(error: impl Into<anyhow::Error> + Send + Sync) -> Self {
|
||||
Self::Other(error.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum Status {
|
||||
SignedOut,
|
||||
UpgradeRequired,
|
||||
Authenticating,
|
||||
Connecting,
|
||||
ConnectionError,
|
||||
Connected { connection_id: ConnectionId },
|
||||
ConnectionLost,
|
||||
Reauthenticating,
|
||||
Reconnecting,
|
||||
ReconnectionError { next_reconnection: Instant },
|
||||
}
|
||||
|
||||
struct ClientState {
|
||||
credentials: Option<Credentials>,
|
||||
status: (watch::Sender<Status>, watch::Receiver<Status>),
|
||||
entity_id_extractors: HashMap<TypeId, Box<dyn Send + Sync + Fn(&dyn AnyTypedEnvelope) -> u64>>,
|
||||
model_handlers: HashMap<
|
||||
(TypeId, Option<u64>),
|
||||
Option<Box<dyn Send + Sync + FnMut(Box<dyn AnyTypedEnvelope>, &mut AsyncAppContext)>>,
|
||||
>,
|
||||
_maintain_connection: Option<Task<()>>,
|
||||
heartbeat_interval: Duration,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Credentials {
|
||||
pub user_id: u64,
|
||||
pub access_token: String,
|
||||
}
|
||||
|
||||
impl Default for ClientState {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
credentials: None,
|
||||
status: watch::channel_with(Status::SignedOut),
|
||||
entity_id_extractors: Default::default(),
|
||||
model_handlers: Default::default(),
|
||||
_maintain_connection: None,
|
||||
heartbeat_interval: Duration::from_secs(5),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Subscription {
|
||||
client: Weak<Client>,
|
||||
id: (TypeId, Option<u64>),
|
||||
}
|
||||
|
||||
impl Drop for Subscription {
|
||||
fn drop(&mut self) {
|
||||
if let Some(client) = self.client.upgrade() {
|
||||
let mut state = client.state.write();
|
||||
let _ = state.model_handlers.remove(&self.id).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Client {
|
||||
pub fn new(http: Arc<dyn HttpClient>) -> Arc<Self> {
|
||||
Arc::new(Self {
|
||||
peer: Peer::new(),
|
||||
http,
|
||||
state: Default::default(),
|
||||
authenticate: None,
|
||||
establish_connection: None,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn override_authenticate<F>(&mut self, authenticate: F) -> &mut Self
|
||||
where
|
||||
F: 'static + Send + Sync + Fn(&AsyncAppContext) -> Task<Result<Credentials>>,
|
||||
{
|
||||
self.authenticate = Some(Box::new(authenticate));
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn override_establish_connection<F>(&mut self, connect: F) -> &mut Self
|
||||
where
|
||||
F: 'static
|
||||
+ Send
|
||||
+ Sync
|
||||
+ Fn(&Credentials, &AsyncAppContext) -> Task<Result<Connection, EstablishConnectionError>>,
|
||||
{
|
||||
self.establish_connection = Some(Box::new(connect));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn user_id(&self) -> Option<u64> {
|
||||
self.state
|
||||
.read()
|
||||
.credentials
|
||||
.as_ref()
|
||||
.map(|credentials| credentials.user_id)
|
||||
}
|
||||
|
||||
pub fn status(&self) -> watch::Receiver<Status> {
|
||||
self.state.read().status.1.clone()
|
||||
}
|
||||
|
||||
fn set_status(self: &Arc<Self>, status: Status, cx: &AsyncAppContext) {
|
||||
let mut state = self.state.write();
|
||||
*state.status.0.borrow_mut() = status;
|
||||
|
||||
match status {
|
||||
Status::Connected { .. } => {
|
||||
let heartbeat_interval = state.heartbeat_interval;
|
||||
let this = self.clone();
|
||||
let foreground = cx.foreground();
|
||||
state._maintain_connection = Some(cx.foreground().spawn(async move {
|
||||
loop {
|
||||
foreground.timer(heartbeat_interval).await;
|
||||
let _ = this.request(proto::Ping {}).await;
|
||||
}
|
||||
}));
|
||||
}
|
||||
Status::ConnectionLost => {
|
||||
let this = self.clone();
|
||||
let foreground = cx.foreground();
|
||||
let heartbeat_interval = state.heartbeat_interval;
|
||||
state._maintain_connection = Some(cx.spawn(|cx| async move {
|
||||
let mut rng = StdRng::from_entropy();
|
||||
let mut delay = Duration::from_millis(100);
|
||||
while let Err(error) = this.authenticate_and_connect(&cx).await {
|
||||
log::error!("failed to connect {}", error);
|
||||
this.set_status(
|
||||
Status::ReconnectionError {
|
||||
next_reconnection: Instant::now() + delay,
|
||||
},
|
||||
&cx,
|
||||
);
|
||||
foreground.timer(delay).await;
|
||||
delay = delay
|
||||
.mul_f32(rng.gen_range(1.0..=2.0))
|
||||
.min(heartbeat_interval);
|
||||
}
|
||||
}));
|
||||
}
|
||||
Status::SignedOut | Status::UpgradeRequired => {
|
||||
state._maintain_connection.take();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn subscribe<T, M, F>(
|
||||
self: &Arc<Self>,
|
||||
cx: &mut ModelContext<M>,
|
||||
mut handler: F,
|
||||
) -> Subscription
|
||||
where
|
||||
T: EnvelopedMessage,
|
||||
M: Entity,
|
||||
F: 'static
|
||||
+ Send
|
||||
+ Sync
|
||||
+ FnMut(&mut M, TypedEnvelope<T>, Arc<Self>, &mut ModelContext<M>) -> Result<()>,
|
||||
{
|
||||
let subscription_id = (TypeId::of::<T>(), None);
|
||||
let client = self.clone();
|
||||
let mut state = self.state.write();
|
||||
let model = cx.weak_handle();
|
||||
let prev_handler = state.model_handlers.insert(
|
||||
subscription_id,
|
||||
Some(Box::new(move |envelope, cx| {
|
||||
if let Some(model) = model.upgrade(cx) {
|
||||
let envelope = envelope.into_any().downcast::<TypedEnvelope<T>>().unwrap();
|
||||
model.update(cx, |model, cx| {
|
||||
if let Err(error) = handler(model, *envelope, client.clone(), cx) {
|
||||
log::error!("error handling message: {}", error)
|
||||
}
|
||||
});
|
||||
}
|
||||
})),
|
||||
);
|
||||
if prev_handler.is_some() {
|
||||
panic!("registered handler for the same message twice");
|
||||
}
|
||||
|
||||
Subscription {
|
||||
client: Arc::downgrade(self),
|
||||
id: subscription_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn subscribe_to_entity<T, M, F>(
|
||||
self: &Arc<Self>,
|
||||
remote_id: u64,
|
||||
cx: &mut ModelContext<M>,
|
||||
mut handler: F,
|
||||
) -> Subscription
|
||||
where
|
||||
T: EntityMessage,
|
||||
M: Entity,
|
||||
F: 'static
|
||||
+ Send
|
||||
+ Sync
|
||||
+ FnMut(&mut M, TypedEnvelope<T>, Arc<Self>, &mut ModelContext<M>) -> Result<()>,
|
||||
{
|
||||
let subscription_id = (TypeId::of::<T>(), Some(remote_id));
|
||||
let client = self.clone();
|
||||
let mut state = self.state.write();
|
||||
let model = cx.weak_handle();
|
||||
state
|
||||
.entity_id_extractors
|
||||
.entry(subscription_id.0)
|
||||
.or_insert_with(|| {
|
||||
Box::new(|envelope| {
|
||||
let envelope = envelope
|
||||
.as_any()
|
||||
.downcast_ref::<TypedEnvelope<T>>()
|
||||
.unwrap();
|
||||
envelope.payload.remote_entity_id()
|
||||
})
|
||||
});
|
||||
let prev_handler = state.model_handlers.insert(
|
||||
subscription_id,
|
||||
Some(Box::new(move |envelope, cx| {
|
||||
if let Some(model) = model.upgrade(cx) {
|
||||
let envelope = envelope.into_any().downcast::<TypedEnvelope<T>>().unwrap();
|
||||
model.update(cx, |model, cx| {
|
||||
if let Err(error) = handler(model, *envelope, client.clone(), cx) {
|
||||
log::error!("error handling message: {}", error)
|
||||
}
|
||||
});
|
||||
}
|
||||
})),
|
||||
);
|
||||
if prev_handler.is_some() {
|
||||
panic!("registered a handler for the same entity twice")
|
||||
}
|
||||
|
||||
Subscription {
|
||||
client: Arc::downgrade(self),
|
||||
id: subscription_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_keychain_credentials(&self, cx: &AsyncAppContext) -> bool {
|
||||
read_credentials_from_keychain(cx).is_some()
|
||||
}
|
||||
|
||||
#[async_recursion(?Send)]
|
||||
pub async fn authenticate_and_connect(
|
||||
self: &Arc<Self>,
|
||||
cx: &AsyncAppContext,
|
||||
) -> anyhow::Result<()> {
|
||||
let was_disconnected = match *self.status().borrow() {
|
||||
Status::SignedOut => true,
|
||||
Status::ConnectionError | Status::ConnectionLost | Status::ReconnectionError { .. } => {
|
||||
false
|
||||
}
|
||||
Status::Connected { .. }
|
||||
| Status::Connecting { .. }
|
||||
| Status::Reconnecting { .. }
|
||||
| Status::Authenticating
|
||||
| Status::Reauthenticating => return Ok(()),
|
||||
Status::UpgradeRequired => return Err(EstablishConnectionError::UpgradeRequired)?,
|
||||
};
|
||||
|
||||
if was_disconnected {
|
||||
self.set_status(Status::Authenticating, cx);
|
||||
} else {
|
||||
self.set_status(Status::Reauthenticating, cx)
|
||||
}
|
||||
|
||||
let mut used_keychain = false;
|
||||
let credentials = self.state.read().credentials.clone();
|
||||
let credentials = if let Some(credentials) = credentials {
|
||||
credentials
|
||||
} else if let Some(credentials) = read_credentials_from_keychain(cx) {
|
||||
used_keychain = true;
|
||||
credentials
|
||||
} else {
|
||||
let credentials = match self.authenticate(&cx).await {
|
||||
Ok(credentials) => credentials,
|
||||
Err(err) => {
|
||||
self.set_status(Status::ConnectionError, cx);
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
credentials
|
||||
};
|
||||
|
||||
if was_disconnected {
|
||||
self.set_status(Status::Connecting, cx);
|
||||
} else {
|
||||
self.set_status(Status::Reconnecting, cx);
|
||||
}
|
||||
|
||||
match self.establish_connection(&credentials, cx).await {
|
||||
Ok(conn) => {
|
||||
self.state.write().credentials = Some(credentials.clone());
|
||||
if !used_keychain && IMPERSONATE_LOGIN.is_none() {
|
||||
write_credentials_to_keychain(&credentials, cx).log_err();
|
||||
}
|
||||
self.set_connection(conn, cx).await;
|
||||
Ok(())
|
||||
}
|
||||
Err(EstablishConnectionError::Unauthorized) => {
|
||||
self.state.write().credentials.take();
|
||||
if used_keychain {
|
||||
cx.platform().delete_credentials(&ZED_SERVER_URL).log_err();
|
||||
self.set_status(Status::SignedOut, cx);
|
||||
self.authenticate_and_connect(cx).await
|
||||
} else {
|
||||
self.set_status(Status::ConnectionError, cx);
|
||||
Err(EstablishConnectionError::Unauthorized)?
|
||||
}
|
||||
}
|
||||
Err(EstablishConnectionError::UpgradeRequired) => {
|
||||
self.set_status(Status::UpgradeRequired, cx);
|
||||
Err(EstablishConnectionError::UpgradeRequired)?
|
||||
}
|
||||
Err(error) => {
|
||||
self.set_status(Status::ConnectionError, cx);
|
||||
Err(error)?
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn set_connection(self: &Arc<Self>, conn: Connection, cx: &AsyncAppContext) {
|
||||
let (connection_id, handle_io, mut incoming) = self.peer.add_connection(conn).await;
|
||||
cx.foreground()
|
||||
.spawn({
|
||||
let mut cx = cx.clone();
|
||||
let this = self.clone();
|
||||
async move {
|
||||
while let Some(message) = incoming.recv().await {
|
||||
let mut state = this.state.write();
|
||||
let payload_type_id = message.payload_type_id();
|
||||
let entity_id = if let Some(extract_entity_id) =
|
||||
state.entity_id_extractors.get(&message.payload_type_id())
|
||||
{
|
||||
Some((extract_entity_id)(message.as_ref()))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let handler_key = (payload_type_id, entity_id);
|
||||
if let Some(handler) = state.model_handlers.get_mut(&handler_key) {
|
||||
let mut handler = handler.take().unwrap();
|
||||
drop(state); // Avoid deadlocks if the handler interacts with rpc::Client
|
||||
let start_time = Instant::now();
|
||||
log::info!("RPC client message {}", message.payload_type_name());
|
||||
(handler)(message, &mut cx);
|
||||
log::info!("RPC message handled. duration:{:?}", start_time.elapsed());
|
||||
|
||||
let mut state = this.state.write();
|
||||
if state.model_handlers.contains_key(&handler_key) {
|
||||
state.model_handlers.insert(handler_key, Some(handler));
|
||||
}
|
||||
} else {
|
||||
log::info!("unhandled message {}", message.payload_type_name());
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
self.set_status(Status::Connected { connection_id }, cx);
|
||||
|
||||
let handle_io = cx.background().spawn(handle_io);
|
||||
let this = self.clone();
|
||||
let cx = cx.clone();
|
||||
cx.foreground()
|
||||
.spawn(async move {
|
||||
match handle_io.await {
|
||||
Ok(()) => this.set_status(Status::SignedOut, &cx),
|
||||
Err(err) => {
|
||||
log::error!("connection error: {:?}", err);
|
||||
this.set_status(Status::ConnectionLost, &cx);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn authenticate(self: &Arc<Self>, cx: &AsyncAppContext) -> Task<Result<Credentials>> {
|
||||
if let Some(callback) = self.authenticate.as_ref() {
|
||||
callback(cx)
|
||||
} else {
|
||||
self.authenticate_with_browser(cx)
|
||||
}
|
||||
}
|
||||
|
||||
fn establish_connection(
|
||||
self: &Arc<Self>,
|
||||
credentials: &Credentials,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Task<Result<Connection, EstablishConnectionError>> {
|
||||
if let Some(callback) = self.establish_connection.as_ref() {
|
||||
callback(credentials, cx)
|
||||
} else {
|
||||
self.establish_websocket_connection(credentials, cx)
|
||||
}
|
||||
}
|
||||
|
||||
fn establish_websocket_connection(
|
||||
self: &Arc<Self>,
|
||||
credentials: &Credentials,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Task<Result<Connection, EstablishConnectionError>> {
|
||||
let request = Request::builder()
|
||||
.header(
|
||||
"Authorization",
|
||||
format!("{} {}", credentials.user_id, credentials.access_token),
|
||||
)
|
||||
.header("X-Zed-Protocol-Version", rpc::PROTOCOL_VERSION);
|
||||
|
||||
let http = self.http.clone();
|
||||
cx.background().spawn(async move {
|
||||
let mut rpc_url = format!("{}/rpc", *ZED_SERVER_URL);
|
||||
let rpc_request = surf::Request::new(
|
||||
Method::Get,
|
||||
surf::Url::parse(&rpc_url).context("invalid ZED_SERVER_URL")?,
|
||||
);
|
||||
let rpc_response = http.send(rpc_request).await?;
|
||||
|
||||
if rpc_response.status().is_redirection() {
|
||||
rpc_url = rpc_response
|
||||
.header("Location")
|
||||
.ok_or_else(|| anyhow!("missing location header in /rpc response"))?
|
||||
.as_str()
|
||||
.to_string();
|
||||
}
|
||||
// Until we switch the zed.dev domain to point to the new Next.js app, there
|
||||
// will be no redirect required, and the app will connect directly to
|
||||
// wss://zed.dev/rpc.
|
||||
else if rpc_response.status() != surf::StatusCode::UpgradeRequired {
|
||||
Err(anyhow!(
|
||||
"unexpected /rpc response status {}",
|
||||
rpc_response.status()
|
||||
))?
|
||||
}
|
||||
|
||||
let mut rpc_url = surf::Url::parse(&rpc_url).context("invalid rpc url")?;
|
||||
let rpc_host = rpc_url
|
||||
.host_str()
|
||||
.zip(rpc_url.port_or_known_default())
|
||||
.ok_or_else(|| anyhow!("missing host in rpc url"))?;
|
||||
let stream = smol::net::TcpStream::connect(rpc_host).await?;
|
||||
|
||||
log::info!("connected to rpc endpoint {}", rpc_url);
|
||||
|
||||
match rpc_url.scheme() {
|
||||
"https" => {
|
||||
rpc_url.set_scheme("wss").unwrap();
|
||||
let request = request.uri(rpc_url.as_str()).body(())?;
|
||||
let (stream, _) =
|
||||
async_tungstenite::async_tls::client_async_tls(request, stream).await?;
|
||||
Ok(Connection::new(stream))
|
||||
}
|
||||
"http" => {
|
||||
rpc_url.set_scheme("ws").unwrap();
|
||||
let request = request.uri(rpc_url.as_str()).body(())?;
|
||||
let (stream, _) = async_tungstenite::client_async(request, stream).await?;
|
||||
Ok(Connection::new(stream))
|
||||
}
|
||||
_ => Err(anyhow!("invalid rpc url: {}", rpc_url))?,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn authenticate_with_browser(
|
||||
self: &Arc<Self>,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Task<Result<Credentials>> {
|
||||
let platform = cx.platform();
|
||||
let executor = cx.background();
|
||||
executor.clone().spawn(async move {
|
||||
// Generate a pair of asymmetric encryption keys. The public key will be used by the
|
||||
// zed server to encrypt the user's access token, so that it can'be intercepted by
|
||||
// any other app running on the user's device.
|
||||
let (public_key, private_key) =
|
||||
rpc::auth::keypair().expect("failed to generate keypair for auth");
|
||||
let public_key_string =
|
||||
String::try_from(public_key).expect("failed to serialize public key for auth");
|
||||
|
||||
// Start an HTTP server to receive the redirect from Zed's sign-in page.
|
||||
let server = tiny_http::Server::http("127.0.0.1:0").expect("failed to find open port");
|
||||
let port = server.server_addr().port();
|
||||
|
||||
// Open the Zed sign-in page in the user's browser, with query parameters that indicate
|
||||
// that the user is signing in from a Zed app running on the same device.
|
||||
let mut url = format!(
|
||||
"{}/native_app_signin?native_app_port={}&native_app_public_key={}",
|
||||
*ZED_SERVER_URL, port, public_key_string
|
||||
);
|
||||
|
||||
if let Some(impersonate_login) = IMPERSONATE_LOGIN.as_ref() {
|
||||
log::info!("impersonating user @{}", impersonate_login);
|
||||
write!(&mut url, "&impersonate={}", impersonate_login).unwrap();
|
||||
}
|
||||
|
||||
platform.open_url(&url);
|
||||
|
||||
// Receive the HTTP request from the user's browser. Retrieve the user id and encrypted
|
||||
// access token from the query params.
|
||||
//
|
||||
// TODO - Avoid ever starting more than one HTTP server. Maybe switch to using a
|
||||
// custom URL scheme instead of this local HTTP server.
|
||||
let (user_id, access_token) = executor
|
||||
.spawn(async move {
|
||||
if let Some(req) = server.recv_timeout(Duration::from_secs(10 * 60))? {
|
||||
let path = req.url();
|
||||
let mut user_id = None;
|
||||
let mut access_token = None;
|
||||
let url = Url::parse(&format!("http://example.com{}", path))
|
||||
.context("failed to parse login notification url")?;
|
||||
for (key, value) in url.query_pairs() {
|
||||
if key == "access_token" {
|
||||
access_token = Some(value.to_string());
|
||||
} else if key == "user_id" {
|
||||
user_id = Some(value.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
let post_auth_url =
|
||||
format!("{}/native_app_signin_succeeded", *ZED_SERVER_URL);
|
||||
req.respond(
|
||||
tiny_http::Response::empty(302).with_header(
|
||||
tiny_http::Header::from_bytes(
|
||||
&b"Location"[..],
|
||||
post_auth_url.as_bytes(),
|
||||
)
|
||||
.unwrap(),
|
||||
),
|
||||
)
|
||||
.context("failed to respond to login http request")?;
|
||||
Ok((
|
||||
user_id.ok_or_else(|| anyhow!("missing user_id parameter"))?,
|
||||
access_token
|
||||
.ok_or_else(|| anyhow!("missing access_token parameter"))?,
|
||||
))
|
||||
} else {
|
||||
Err(anyhow!("didn't receive login redirect"))
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
|
||||
let access_token = private_key
|
||||
.decrypt_string(&access_token)
|
||||
.context("failed to decrypt access token")?;
|
||||
platform.activate(true);
|
||||
|
||||
Ok(Credentials {
|
||||
user_id: user_id.parse()?,
|
||||
access_token,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn disconnect(self: &Arc<Self>, cx: &AsyncAppContext) -> Result<()> {
|
||||
let conn_id = self.connection_id()?;
|
||||
self.peer.disconnect(conn_id).await;
|
||||
self.set_status(Status::SignedOut, cx);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn connection_id(&self) -> Result<ConnectionId> {
|
||||
if let Status::Connected { connection_id, .. } = *self.status().borrow() {
|
||||
Ok(connection_id)
|
||||
} else {
|
||||
Err(anyhow!("not connected"))
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send<T: EnvelopedMessage>(&self, message: T) -> Result<()> {
|
||||
self.peer.send(self.connection_id()?, message).await
|
||||
}
|
||||
|
||||
pub async fn request<T: RequestMessage>(&self, request: T) -> Result<T::Response> {
|
||||
self.peer.request(self.connection_id()?, request).await
|
||||
}
|
||||
|
||||
pub fn respond<T: RequestMessage>(
|
||||
&self,
|
||||
receipt: Receipt<T>,
|
||||
response: T::Response,
|
||||
) -> impl Future<Output = Result<()>> {
|
||||
self.peer.respond(receipt, response)
|
||||
}
|
||||
}
|
||||
|
||||
fn read_credentials_from_keychain(cx: &AsyncAppContext) -> Option<Credentials> {
|
||||
if IMPERSONATE_LOGIN.is_some() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let (user_id, access_token) = cx
|
||||
.platform()
|
||||
.read_credentials(&ZED_SERVER_URL)
|
||||
.log_err()
|
||||
.flatten()?;
|
||||
Some(Credentials {
|
||||
user_id: user_id.parse().ok()?,
|
||||
access_token: String::from_utf8(access_token).ok()?,
|
||||
})
|
||||
}
|
||||
|
||||
fn write_credentials_to_keychain(credentials: &Credentials, cx: &AsyncAppContext) -> Result<()> {
|
||||
cx.platform().write_credentials(
|
||||
&ZED_SERVER_URL,
|
||||
&credentials.user_id.to_string(),
|
||||
credentials.access_token.as_bytes(),
|
||||
)
|
||||
}
|
||||
|
||||
const WORKTREE_URL_PREFIX: &'static str = "zed://worktrees/";
|
||||
|
||||
pub fn encode_worktree_url(id: u64, access_token: &str) -> String {
|
||||
format!("{}{}/{}", WORKTREE_URL_PREFIX, id, access_token)
|
||||
}
|
||||
|
||||
pub fn decode_worktree_url(url: &str) -> Option<(u64, String)> {
|
||||
let path = url.trim().strip_prefix(WORKTREE_URL_PREFIX)?;
|
||||
let mut parts = path.split('/');
|
||||
let id = parts.next()?.parse::<u64>().ok()?;
|
||||
let access_token = parts.next()?;
|
||||
if access_token.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some((id, access_token.to_string()))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::test::{FakeHttpClient, FakeServer};
|
||||
use gpui::TestAppContext;
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_heartbeat(cx: TestAppContext) {
|
||||
cx.foreground().forbid_parking();
|
||||
|
||||
let user_id = 5;
|
||||
let mut client = Client::new(FakeHttpClient::with_404_response());
|
||||
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
|
||||
|
||||
cx.foreground().advance_clock(Duration::from_secs(10));
|
||||
let ping = server.receive::<proto::Ping>().await.unwrap();
|
||||
server.respond(ping.receipt(), proto::Ack {}).await;
|
||||
|
||||
cx.foreground().advance_clock(Duration::from_secs(10));
|
||||
let ping = server.receive::<proto::Ping>().await.unwrap();
|
||||
server.respond(ping.receipt(), proto::Ack {}).await;
|
||||
|
||||
client.disconnect(&cx.to_async()).await.unwrap();
|
||||
assert!(server.receive::<proto::Ping>().await.is_err());
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_reconnection(cx: TestAppContext) {
|
||||
cx.foreground().forbid_parking();
|
||||
|
||||
let user_id = 5;
|
||||
let mut client = Client::new(FakeHttpClient::with_404_response());
|
||||
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
|
||||
let mut status = client.status();
|
||||
assert!(matches!(
|
||||
status.recv().await,
|
||||
Some(Status::Connected { .. })
|
||||
));
|
||||
assert_eq!(server.auth_count(), 1);
|
||||
|
||||
server.forbid_connections();
|
||||
server.disconnect().await;
|
||||
while !matches!(status.recv().await, Some(Status::ReconnectionError { .. })) {}
|
||||
|
||||
server.allow_connections();
|
||||
cx.foreground().advance_clock(Duration::from_secs(10));
|
||||
while !matches!(status.recv().await, Some(Status::Connected { .. })) {}
|
||||
assert_eq!(server.auth_count(), 1); // Client reused the cached credentials when reconnecting
|
||||
|
||||
server.forbid_connections();
|
||||
server.disconnect().await;
|
||||
while !matches!(status.recv().await, Some(Status::ReconnectionError { .. })) {}
|
||||
|
||||
// Clear cached credentials after authentication fails
|
||||
server.roll_access_token();
|
||||
server.allow_connections();
|
||||
cx.foreground().advance_clock(Duration::from_secs(10));
|
||||
assert_eq!(server.auth_count(), 1);
|
||||
cx.foreground().advance_clock(Duration::from_secs(10));
|
||||
while !matches!(status.recv().await, Some(Status::Connected { .. })) {}
|
||||
assert_eq!(server.auth_count(), 2); // Client re-authenticated due to an invalid token
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_encode_and_decode_worktree_url() {
|
||||
let url = encode_worktree_url(5, "deadbeef");
|
||||
assert_eq!(decode_worktree_url(&url), Some((5, "deadbeef".to_string())));
|
||||
assert_eq!(
|
||||
decode_worktree_url(&format!("\n {}\t", url)),
|
||||
Some((5, "deadbeef".to_string()))
|
||||
);
|
||||
assert_eq!(decode_worktree_url("not://the-right-format"), None);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_subscribing_to_entity(mut cx: TestAppContext) {
|
||||
cx.foreground().forbid_parking();
|
||||
|
||||
let user_id = 5;
|
||||
let mut client = Client::new(FakeHttpClient::with_404_response());
|
||||
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
|
||||
|
||||
let model = cx.add_model(|_| Model { subscription: None });
|
||||
let (mut done_tx1, mut done_rx1) = postage::oneshot::channel();
|
||||
let (mut done_tx2, mut done_rx2) = postage::oneshot::channel();
|
||||
let _subscription1 = model.update(&mut cx, |_, cx| {
|
||||
client.subscribe_to_entity(
|
||||
1,
|
||||
cx,
|
||||
move |_, _: TypedEnvelope<proto::UnshareProject>, _, _| {
|
||||
postage::sink::Sink::try_send(&mut done_tx1, ()).unwrap();
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
});
|
||||
let _subscription2 = model.update(&mut cx, |_, cx| {
|
||||
client.subscribe_to_entity(
|
||||
2,
|
||||
cx,
|
||||
move |_, _: TypedEnvelope<proto::UnshareProject>, _, _| {
|
||||
postage::sink::Sink::try_send(&mut done_tx2, ()).unwrap();
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
});
|
||||
|
||||
// Ensure dropping a subscription for the same entity type still allows receiving of
|
||||
// messages for other entity IDs of the same type.
|
||||
let subscription3 = model.update(&mut cx, |_, cx| {
|
||||
client.subscribe_to_entity(
|
||||
3,
|
||||
cx,
|
||||
move |_, _: TypedEnvelope<proto::UnshareProject>, _, _| Ok(()),
|
||||
)
|
||||
});
|
||||
drop(subscription3);
|
||||
|
||||
server.send(proto::UnshareProject { project_id: 1 }).await;
|
||||
server.send(proto::UnshareProject { project_id: 2 }).await;
|
||||
done_rx1.recv().await.unwrap();
|
||||
done_rx2.recv().await.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_subscribing_after_dropping_subscription(mut cx: TestAppContext) {
|
||||
cx.foreground().forbid_parking();
|
||||
|
||||
let user_id = 5;
|
||||
let mut client = Client::new(FakeHttpClient::with_404_response());
|
||||
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
|
||||
|
||||
let model = cx.add_model(|_| Model { subscription: None });
|
||||
let (mut done_tx1, _done_rx1) = postage::oneshot::channel();
|
||||
let (mut done_tx2, mut done_rx2) = postage::oneshot::channel();
|
||||
let subscription1 = model.update(&mut cx, |_, cx| {
|
||||
client.subscribe(cx, move |_, _: TypedEnvelope<proto::Ping>, _, _| {
|
||||
postage::sink::Sink::try_send(&mut done_tx1, ()).unwrap();
|
||||
Ok(())
|
||||
})
|
||||
});
|
||||
drop(subscription1);
|
||||
let _subscription2 = model.update(&mut cx, |_, cx| {
|
||||
client.subscribe(cx, move |_, _: TypedEnvelope<proto::Ping>, _, _| {
|
||||
postage::sink::Sink::try_send(&mut done_tx2, ()).unwrap();
|
||||
Ok(())
|
||||
})
|
||||
});
|
||||
server.send(proto::Ping {}).await;
|
||||
done_rx2.recv().await.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dropping_subscription_in_handler(mut cx: TestAppContext) {
|
||||
cx.foreground().forbid_parking();
|
||||
|
||||
let user_id = 5;
|
||||
let mut client = Client::new(FakeHttpClient::with_404_response());
|
||||
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
|
||||
|
||||
let model = cx.add_model(|_| Model { subscription: None });
|
||||
let (mut done_tx, mut done_rx) = postage::oneshot::channel();
|
||||
model.update(&mut cx, |model, cx| {
|
||||
model.subscription = Some(client.subscribe(
|
||||
cx,
|
||||
move |model, _: TypedEnvelope<proto::Ping>, _, _| {
|
||||
model.subscription.take();
|
||||
postage::sink::Sink::try_send(&mut done_tx, ()).unwrap();
|
||||
Ok(())
|
||||
},
|
||||
));
|
||||
});
|
||||
server.send(proto::Ping {}).await;
|
||||
done_rx.recv().await.unwrap();
|
||||
}
|
||||
|
||||
struct Model {
|
||||
subscription: Option<Subscription>,
|
||||
}
|
||||
|
||||
impl Entity for Model {
|
||||
type Event = ();
|
||||
}
|
||||
}
|
||||
26
crates/client/src/http.rs
Normal file
26
crates/client/src/http.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
pub use anyhow::{anyhow, Result};
|
||||
use futures::future::BoxFuture;
|
||||
use std::sync::Arc;
|
||||
pub use surf::{
|
||||
http::{Method, Response as ServerResponse},
|
||||
Request, Response, Url,
|
||||
};
|
||||
|
||||
pub trait HttpClient: Send + Sync {
|
||||
fn send<'a>(&'a self, req: Request) -> BoxFuture<'a, Result<Response>>;
|
||||
}
|
||||
|
||||
pub fn client() -> Arc<dyn HttpClient> {
|
||||
Arc::new(surf::client())
|
||||
}
|
||||
|
||||
impl HttpClient for surf::Client {
|
||||
fn send<'a>(&'a self, req: Request) -> BoxFuture<'a, Result<Response>> {
|
||||
Box::pin(async move {
|
||||
Ok(self
|
||||
.send(req)
|
||||
.await
|
||||
.map_err(|e| anyhow!("http request failed: {}", e))?)
|
||||
})
|
||||
}
|
||||
}
|
||||
210
crates/client/src/test.rs
Normal file
210
crates/client/src/test.rs
Normal file
@@ -0,0 +1,210 @@
|
||||
use super::Client;
|
||||
use super::*;
|
||||
use crate::http::{HttpClient, Request, Response, ServerResponse};
|
||||
use futures::{future::BoxFuture, Future};
|
||||
use gpui::{ModelHandle, TestAppContext};
|
||||
use parking_lot::Mutex;
|
||||
use postage::{mpsc, prelude::Stream};
|
||||
use rpc::{proto, ConnectionId, Peer, Receipt, TypedEnvelope};
|
||||
use std::fmt;
|
||||
use std::sync::atomic::Ordering::SeqCst;
|
||||
use std::sync::{
|
||||
atomic::{AtomicBool, AtomicUsize},
|
||||
Arc,
|
||||
};
|
||||
|
||||
pub struct FakeServer {
|
||||
peer: Arc<Peer>,
|
||||
incoming: Mutex<Option<mpsc::Receiver<Box<dyn proto::AnyTypedEnvelope>>>>,
|
||||
connection_id: Mutex<Option<ConnectionId>>,
|
||||
forbid_connections: AtomicBool,
|
||||
auth_count: AtomicUsize,
|
||||
access_token: AtomicUsize,
|
||||
user_id: u64,
|
||||
}
|
||||
|
||||
impl FakeServer {
|
||||
pub async fn for_client(
|
||||
client_user_id: u64,
|
||||
client: &mut Arc<Client>,
|
||||
cx: &TestAppContext,
|
||||
) -> Arc<Self> {
|
||||
let server = Arc::new(Self {
|
||||
peer: Peer::new(),
|
||||
incoming: Default::default(),
|
||||
connection_id: Default::default(),
|
||||
forbid_connections: Default::default(),
|
||||
auth_count: Default::default(),
|
||||
access_token: Default::default(),
|
||||
user_id: client_user_id,
|
||||
});
|
||||
|
||||
Arc::get_mut(client)
|
||||
.unwrap()
|
||||
.override_authenticate({
|
||||
let server = server.clone();
|
||||
move |cx| {
|
||||
server.auth_count.fetch_add(1, SeqCst);
|
||||
let access_token = server.access_token.load(SeqCst).to_string();
|
||||
cx.spawn(move |_| async move {
|
||||
Ok(Credentials {
|
||||
user_id: client_user_id,
|
||||
access_token,
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
||||
.override_establish_connection({
|
||||
let server = server.clone();
|
||||
move |credentials, cx| {
|
||||
let credentials = credentials.clone();
|
||||
cx.spawn({
|
||||
let server = server.clone();
|
||||
move |cx| async move { server.establish_connection(&credentials, &cx).await }
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
client
|
||||
.authenticate_and_connect(&cx.to_async())
|
||||
.await
|
||||
.unwrap();
|
||||
server
|
||||
}
|
||||
|
||||
pub async fn disconnect(&self) {
|
||||
self.peer.disconnect(self.connection_id()).await;
|
||||
self.connection_id.lock().take();
|
||||
self.incoming.lock().take();
|
||||
}
|
||||
|
||||
async fn establish_connection(
|
||||
&self,
|
||||
credentials: &Credentials,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Result<Connection, EstablishConnectionError> {
|
||||
assert_eq!(credentials.user_id, self.user_id);
|
||||
|
||||
if self.forbid_connections.load(SeqCst) {
|
||||
Err(EstablishConnectionError::Other(anyhow!(
|
||||
"server is forbidding connections"
|
||||
)))?
|
||||
}
|
||||
|
||||
if credentials.access_token != self.access_token.load(SeqCst).to_string() {
|
||||
Err(EstablishConnectionError::Unauthorized)?
|
||||
}
|
||||
|
||||
let (client_conn, server_conn, _) = Connection::in_memory();
|
||||
let (connection_id, io, incoming) = self.peer.add_connection(server_conn).await;
|
||||
cx.background().spawn(io).detach();
|
||||
*self.incoming.lock() = Some(incoming);
|
||||
*self.connection_id.lock() = Some(connection_id);
|
||||
Ok(client_conn)
|
||||
}
|
||||
|
||||
pub fn auth_count(&self) -> usize {
|
||||
self.auth_count.load(SeqCst)
|
||||
}
|
||||
|
||||
pub fn roll_access_token(&self) {
|
||||
self.access_token.fetch_add(1, SeqCst);
|
||||
}
|
||||
|
||||
pub fn forbid_connections(&self) {
|
||||
self.forbid_connections.store(true, SeqCst);
|
||||
}
|
||||
|
||||
pub fn allow_connections(&self) {
|
||||
self.forbid_connections.store(false, SeqCst);
|
||||
}
|
||||
|
||||
pub async fn send<T: proto::EnvelopedMessage>(&self, message: T) {
|
||||
self.peer.send(self.connection_id(), message).await.unwrap();
|
||||
}
|
||||
|
||||
pub async fn receive<M: proto::EnvelopedMessage>(&self) -> Result<TypedEnvelope<M>> {
|
||||
let message = self
|
||||
.incoming
|
||||
.lock()
|
||||
.as_mut()
|
||||
.expect("not connected")
|
||||
.recv()
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("other half hung up"))?;
|
||||
let type_name = message.payload_type_name();
|
||||
Ok(*message
|
||||
.into_any()
|
||||
.downcast::<TypedEnvelope<M>>()
|
||||
.unwrap_or_else(|_| {
|
||||
panic!(
|
||||
"fake server received unexpected message type: {:?}",
|
||||
type_name
|
||||
);
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn respond<T: proto::RequestMessage>(
|
||||
&self,
|
||||
receipt: Receipt<T>,
|
||||
response: T::Response,
|
||||
) {
|
||||
self.peer.respond(receipt, response).await.unwrap()
|
||||
}
|
||||
|
||||
fn connection_id(&self) -> ConnectionId {
|
||||
self.connection_id.lock().expect("not connected")
|
||||
}
|
||||
|
||||
pub async fn build_user_store(
|
||||
&self,
|
||||
client: Arc<Client>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> ModelHandle<UserStore> {
|
||||
let http_client = FakeHttpClient::with_404_response();
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client, http_client, cx));
|
||||
assert_eq!(
|
||||
self.receive::<proto::GetUsers>()
|
||||
.await
|
||||
.unwrap()
|
||||
.payload
|
||||
.user_ids,
|
||||
&[self.user_id]
|
||||
);
|
||||
user_store
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FakeHttpClient {
|
||||
handler:
|
||||
Box<dyn 'static + Send + Sync + Fn(Request) -> BoxFuture<'static, Result<ServerResponse>>>,
|
||||
}
|
||||
|
||||
impl FakeHttpClient {
|
||||
pub fn new<Fut, F>(handler: F) -> Arc<dyn HttpClient>
|
||||
where
|
||||
Fut: 'static + Send + Future<Output = Result<ServerResponse>>,
|
||||
F: 'static + Send + Sync + Fn(Request) -> Fut,
|
||||
{
|
||||
Arc::new(Self {
|
||||
handler: Box::new(move |req| Box::pin(handler(req))),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn with_404_response() -> Arc<dyn HttpClient> {
|
||||
Self::new(|_| async move { Ok(ServerResponse::new(404)) })
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for FakeHttpClient {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("FakeHttpClient").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl HttpClient for FakeHttpClient {
|
||||
fn send<'a>(&'a self, req: Request) -> BoxFuture<'a, Result<Response>> {
|
||||
let future = (self.handler)(req);
|
||||
Box::pin(async move { future.await.map(Into::into) })
|
||||
}
|
||||
}
|
||||
266
crates/client/src/user.rs
Normal file
266
crates/client/src/user.rs
Normal file
@@ -0,0 +1,266 @@
|
||||
use super::{
|
||||
http::{HttpClient, Method, Request, Url},
|
||||
proto, Client, Status, TypedEnvelope,
|
||||
};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use futures::future;
|
||||
use gpui::{AsyncAppContext, Entity, ImageData, ModelContext, ModelHandle, Task};
|
||||
use postage::{prelude::Stream, sink::Sink, watch};
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
sync::Arc,
|
||||
};
|
||||
use util::TryFutureExt as _;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct User {
|
||||
pub id: u64,
|
||||
pub github_login: String,
|
||||
pub avatar: Option<Arc<ImageData>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Contact {
|
||||
pub user: Arc<User>,
|
||||
pub projects: Vec<ProjectMetadata>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ProjectMetadata {
|
||||
pub id: u64,
|
||||
pub is_shared: bool,
|
||||
pub worktree_root_names: Vec<String>,
|
||||
pub guests: Vec<Arc<User>>,
|
||||
}
|
||||
|
||||
pub struct UserStore {
|
||||
users: HashMap<u64, Arc<User>>,
|
||||
current_user: watch::Receiver<Option<Arc<User>>>,
|
||||
contacts: Arc<[Contact]>,
|
||||
client: Arc<Client>,
|
||||
http: Arc<dyn HttpClient>,
|
||||
_maintain_contacts: Task<()>,
|
||||
_maintain_current_user: Task<()>,
|
||||
}
|
||||
|
||||
pub enum Event {}
|
||||
|
||||
impl Entity for UserStore {
|
||||
type Event = Event;
|
||||
}
|
||||
|
||||
impl UserStore {
|
||||
pub fn new(
|
||||
client: Arc<Client>,
|
||||
http: Arc<dyn HttpClient>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
let (mut current_user_tx, current_user_rx) = watch::channel();
|
||||
let (mut update_contacts_tx, mut update_contacts_rx) =
|
||||
watch::channel::<Option<proto::UpdateContacts>>();
|
||||
let update_contacts_subscription = client.subscribe(
|
||||
cx,
|
||||
move |_: &mut Self, msg: TypedEnvelope<proto::UpdateContacts>, _, _| {
|
||||
let _ = update_contacts_tx.blocking_send(Some(msg.payload));
|
||||
Ok(())
|
||||
},
|
||||
);
|
||||
Self {
|
||||
users: Default::default(),
|
||||
current_user: current_user_rx,
|
||||
contacts: Arc::from([]),
|
||||
client: client.clone(),
|
||||
http,
|
||||
_maintain_contacts: cx.spawn_weak(|this, mut cx| async move {
|
||||
let _subscription = update_contacts_subscription;
|
||||
while let Some(message) = update_contacts_rx.recv().await {
|
||||
if let Some((message, this)) = message.zip(this.upgrade(&cx)) {
|
||||
this.update(&mut cx, |this, cx| this.update_contacts(message, cx))
|
||||
.log_err()
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}),
|
||||
_maintain_current_user: cx.spawn_weak(|this, mut cx| async move {
|
||||
let mut status = client.status();
|
||||
while let Some(status) = status.recv().await {
|
||||
match status {
|
||||
Status::Connected { .. } => {
|
||||
if let Some((this, user_id)) = this.upgrade(&cx).zip(client.user_id()) {
|
||||
let user = this
|
||||
.update(&mut cx, |this, cx| this.fetch_user(user_id, cx))
|
||||
.log_err()
|
||||
.await;
|
||||
current_user_tx.send(user).await.ok();
|
||||
}
|
||||
}
|
||||
Status::SignedOut => {
|
||||
current_user_tx.send(None).await.ok();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn update_contacts(
|
||||
&mut self,
|
||||
message: proto::UpdateContacts,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let mut user_ids = HashSet::new();
|
||||
for contact in &message.contacts {
|
||||
user_ids.insert(contact.user_id);
|
||||
user_ids.extend(contact.projects.iter().flat_map(|w| &w.guests).copied());
|
||||
}
|
||||
|
||||
let load_users = self.load_users(user_ids.into_iter().collect(), cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
load_users.await?;
|
||||
|
||||
let mut contacts = Vec::new();
|
||||
for contact in message.contacts {
|
||||
contacts.push(Contact::from_proto(contact, &this, &mut cx).await?);
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
contacts.sort_by(|a, b| a.user.github_login.cmp(&b.user.github_login));
|
||||
this.contacts = contacts.into();
|
||||
cx.notify();
|
||||
});
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn contacts(&self) -> &Arc<[Contact]> {
|
||||
&self.contacts
|
||||
}
|
||||
|
||||
pub fn load_users(
|
||||
&mut self,
|
||||
mut user_ids: Vec<u64>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let rpc = self.client.clone();
|
||||
let http = self.http.clone();
|
||||
user_ids.retain(|id| !self.users.contains_key(id));
|
||||
cx.spawn_weak(|this, mut cx| async move {
|
||||
if !user_ids.is_empty() {
|
||||
let response = rpc.request(proto::GetUsers { user_ids }).await?;
|
||||
let new_users = future::join_all(
|
||||
response
|
||||
.users
|
||||
.into_iter()
|
||||
.map(|user| User::new(user, http.as_ref())),
|
||||
)
|
||||
.await;
|
||||
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
this.update(&mut cx, |this, _| {
|
||||
for user in new_users {
|
||||
this.users.insert(user.id, Arc::new(user));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn fetch_user(
|
||||
&mut self,
|
||||
user_id: u64,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<Arc<User>>> {
|
||||
if let Some(user) = self.users.get(&user_id).cloned() {
|
||||
return cx.spawn_weak(|_, _| async move { Ok(user) });
|
||||
}
|
||||
|
||||
let load_users = self.load_users(vec![user_id], cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
load_users.await?;
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.users
|
||||
.get(&user_id)
|
||||
.cloned()
|
||||
.ok_or_else(|| anyhow!("server responded with no users"))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn current_user(&self) -> Option<Arc<User>> {
|
||||
self.current_user.borrow().clone()
|
||||
}
|
||||
|
||||
pub fn watch_current_user(&self) -> watch::Receiver<Option<Arc<User>>> {
|
||||
self.current_user.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl User {
|
||||
async fn new(message: proto::User, http: &dyn HttpClient) -> Self {
|
||||
User {
|
||||
id: message.id,
|
||||
github_login: message.github_login,
|
||||
avatar: fetch_avatar(http, &message.avatar_url).warn_on_err().await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Contact {
|
||||
async fn from_proto(
|
||||
contact: proto::Contact,
|
||||
user_store: &ModelHandle<UserStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<Self> {
|
||||
let user = user_store
|
||||
.update(cx, |user_store, cx| {
|
||||
user_store.fetch_user(contact.user_id, cx)
|
||||
})
|
||||
.await?;
|
||||
let mut projects = Vec::new();
|
||||
for project in contact.projects {
|
||||
let mut guests = Vec::new();
|
||||
for participant_id in project.guests {
|
||||
guests.push(
|
||||
user_store
|
||||
.update(cx, |user_store, cx| {
|
||||
user_store.fetch_user(participant_id, cx)
|
||||
})
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
projects.push(ProjectMetadata {
|
||||
id: project.id,
|
||||
worktree_root_names: project.worktree_root_names.clone(),
|
||||
is_shared: project.is_shared,
|
||||
guests,
|
||||
});
|
||||
}
|
||||
Ok(Self { user, projects })
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch_avatar(http: &dyn HttpClient, url: &str) -> Result<Arc<ImageData>> {
|
||||
let url = Url::parse(url).with_context(|| format!("failed to parse avatar url {:?}", url))?;
|
||||
let mut request = Request::new(Method::Get, url);
|
||||
request.middleware(surf::middleware::Redirect::default());
|
||||
|
||||
let mut response = http
|
||||
.send(request)
|
||||
.await
|
||||
.map_err(|e| anyhow!("failed to send user avatar request: {}", e))?;
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow!("avatar request failed {:?}", response.status()));
|
||||
}
|
||||
let bytes = response
|
||||
.body_bytes()
|
||||
.await
|
||||
.map_err(|e| anyhow!("failed to read user avatar response body: {}", e))?;
|
||||
let format = image::guess_format(&bytes)?;
|
||||
let image = image::load_from_memory_with_format(&bytes, format)?.into_bgra8();
|
||||
Ok(ImageData::new(image))
|
||||
}
|
||||
11
crates/clock/Cargo.toml
Normal file
11
crates/clock/Cargo.toml
Normal file
@@ -0,0 +1,11 @@
|
||||
[package]
|
||||
name = "clock"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
path = "src/clock.rs"
|
||||
|
||||
[dependencies]
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
rpc = { path = "../rpc" }
|
||||
@@ -1,9 +1,8 @@
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
cmp::{self, Ordering},
|
||||
fmt,
|
||||
fmt, iter,
|
||||
ops::{Add, AddAssign},
|
||||
slice,
|
||||
};
|
||||
|
||||
pub type ReplicaId = u16;
|
||||
@@ -22,6 +21,15 @@ pub struct Lamport {
|
||||
}
|
||||
|
||||
impl Local {
|
||||
pub const MIN: Self = Self {
|
||||
replica_id: ReplicaId::MIN,
|
||||
value: Seq::MIN,
|
||||
};
|
||||
pub const MAX: Self = Self {
|
||||
replica_id: ReplicaId::MAX,
|
||||
value: Seq::MAX,
|
||||
};
|
||||
|
||||
pub fn new(replica_id: ReplicaId) -> Self {
|
||||
Self {
|
||||
replica_id,
|
||||
@@ -59,10 +67,10 @@ impl<'a> AddAssign<&'a Local> for Local {
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Hash, Eq, PartialEq)]
|
||||
pub struct Global(SmallVec<[Local; 3]>);
|
||||
pub struct Global(SmallVec<[u32; 8]>);
|
||||
|
||||
impl From<Vec<zrpc::proto::VectorClockEntry>> for Global {
|
||||
fn from(message: Vec<zrpc::proto::VectorClockEntry>) -> Self {
|
||||
impl From<Vec<rpc::proto::VectorClockEntry>> for Global {
|
||||
fn from(message: Vec<rpc::proto::VectorClockEntry>) -> Self {
|
||||
let mut version = Self::new();
|
||||
for entry in message {
|
||||
version.observe(Local {
|
||||
@@ -74,11 +82,11 @@ impl From<Vec<zrpc::proto::VectorClockEntry>> for Global {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a Global> for Vec<zrpc::proto::VectorClockEntry> {
|
||||
impl<'a> From<&'a Global> for Vec<rpc::proto::VectorClockEntry> {
|
||||
fn from(version: &'a Global) -> Self {
|
||||
version
|
||||
.iter()
|
||||
.map(|entry| zrpc::proto::VectorClockEntry {
|
||||
.map(|entry| rpc::proto::VectorClockEntry {
|
||||
replica_id: entry.replica_id as u32,
|
||||
timestamp: entry.value,
|
||||
})
|
||||
@@ -86,81 +94,125 @@ impl<'a> From<&'a Global> for Vec<zrpc::proto::VectorClockEntry> {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Global> for Vec<rpc::proto::VectorClockEntry> {
|
||||
fn from(version: Global) -> Self {
|
||||
(&version).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl Global {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn get(&self, replica_id: ReplicaId) -> Seq {
|
||||
self.0
|
||||
.iter()
|
||||
.find(|t| t.replica_id == replica_id)
|
||||
.map_or(0, |t| t.value)
|
||||
self.0.get(replica_id as usize).copied().unwrap_or(0) as Seq
|
||||
}
|
||||
|
||||
pub fn observe(&mut self, timestamp: Local) {
|
||||
if let Some(entry) = self
|
||||
.0
|
||||
.iter_mut()
|
||||
.find(|t| t.replica_id == timestamp.replica_id)
|
||||
{
|
||||
entry.value = cmp::max(entry.value, timestamp.value);
|
||||
} else {
|
||||
self.0.push(timestamp);
|
||||
if timestamp.value > 0 {
|
||||
let new_len = timestamp.replica_id as usize + 1;
|
||||
if new_len > self.0.len() {
|
||||
self.0.resize(new_len, 0);
|
||||
}
|
||||
|
||||
let entry = &mut self.0[timestamp.replica_id as usize];
|
||||
*entry = cmp::max(*entry, timestamp.value);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn join(&mut self, other: &Self) {
|
||||
for timestamp in other.0.iter() {
|
||||
self.observe(*timestamp);
|
||||
if other.0.len() > self.0.len() {
|
||||
self.0.resize(other.0.len(), 0);
|
||||
}
|
||||
|
||||
for (left, right) in self.0.iter_mut().zip(&other.0) {
|
||||
*left = cmp::max(*left, *right);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn meet(&mut self, other: &Self) {
|
||||
for timestamp in other.0.iter() {
|
||||
if let Some(entry) = self
|
||||
.0
|
||||
.iter_mut()
|
||||
.find(|t| t.replica_id == timestamp.replica_id)
|
||||
{
|
||||
entry.value = cmp::min(entry.value, timestamp.value);
|
||||
} else {
|
||||
self.0.push(*timestamp);
|
||||
if other.0.len() > self.0.len() {
|
||||
self.0.resize(other.0.len(), 0);
|
||||
}
|
||||
|
||||
let mut new_len = 0;
|
||||
for (ix, (left, right)) in self
|
||||
.0
|
||||
.iter_mut()
|
||||
.zip(other.0.iter().chain(iter::repeat(&0)))
|
||||
.enumerate()
|
||||
{
|
||||
if *left == 0 {
|
||||
*left = *right;
|
||||
} else if *right > 0 {
|
||||
*left = cmp::min(*left, *right);
|
||||
}
|
||||
|
||||
if *left != 0 {
|
||||
new_len = ix + 1;
|
||||
}
|
||||
}
|
||||
self.0.resize(new_len, 0);
|
||||
}
|
||||
|
||||
pub fn observed(&self, timestamp: Local) -> bool {
|
||||
self.get(timestamp.replica_id) >= timestamp.value
|
||||
}
|
||||
|
||||
pub fn changed_since(&self, other: &Self) -> bool {
|
||||
self.0.iter().any(|t| t.value > other.get(t.replica_id))
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> slice::Iter<Local> {
|
||||
self.0.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Global {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
let mut global_ordering = Ordering::Equal;
|
||||
|
||||
for timestamp in self.0.iter().chain(other.0.iter()) {
|
||||
let ordering = self
|
||||
.get(timestamp.replica_id)
|
||||
.cmp(&other.get(timestamp.replica_id));
|
||||
if ordering != Ordering::Equal {
|
||||
if global_ordering == Ordering::Equal {
|
||||
global_ordering = ordering;
|
||||
} else if ordering != global_ordering {
|
||||
return None;
|
||||
pub fn observed_any(&self, other: &Self) -> bool {
|
||||
let mut lhs = self.0.iter();
|
||||
let mut rhs = other.0.iter();
|
||||
loop {
|
||||
if let Some(left) = lhs.next() {
|
||||
if let Some(right) = rhs.next() {
|
||||
if *right > 0 && left >= right {
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(global_ordering)
|
||||
pub fn observed_all(&self, other: &Self) -> bool {
|
||||
let mut lhs = self.0.iter();
|
||||
let mut rhs = other.0.iter();
|
||||
loop {
|
||||
if let Some(left) = lhs.next() {
|
||||
if let Some(right) = rhs.next() {
|
||||
if left < right {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
return rhs.next().is_none();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn changed_since(&self, other: &Self) -> bool {
|
||||
if self.0.len() > other.0.len() {
|
||||
return true;
|
||||
}
|
||||
for (left, right) in self.0.iter().zip(other.0.iter()) {
|
||||
if left > right {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
pub fn iter<'a>(&'a self) -> impl 'a + Iterator<Item = Local> {
|
||||
self.0.iter().enumerate().map(|(replica_id, seq)| Local {
|
||||
replica_id: replica_id as ReplicaId,
|
||||
value: *seq,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -213,11 +265,11 @@ impl fmt::Debug for Lamport {
|
||||
impl fmt::Debug for Global {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "Global {{")?;
|
||||
for (i, element) in self.0.iter().enumerate() {
|
||||
if i > 0 {
|
||||
for timestamp in self.iter() {
|
||||
if timestamp.replica_id > 0 {
|
||||
write!(f, ", ")?;
|
||||
}
|
||||
write!(f, "{}: {}", element.replica_id, element.value)?;
|
||||
write!(f, "{}: {}", timestamp.replica_id, timestamp.value)?;
|
||||
}
|
||||
write!(f, "}}")
|
||||
}
|
||||
13
crates/collections/Cargo.toml
Normal file
13
crates/collections/Cargo.toml
Normal file
@@ -0,0 +1,13 @@
|
||||
[package]
|
||||
name = "collections"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
path = "src/collections.rs"
|
||||
|
||||
[features]
|
||||
test-support = ["seahash"]
|
||||
|
||||
[dependencies]
|
||||
seahash = { version = "4.1", optional = true }
|
||||
26
crates/collections/src/collections.rs
Normal file
26
crates/collections/src/collections.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
#[cfg(feature = "test-support")]
|
||||
#[derive(Clone, Default)]
|
||||
pub struct DeterministicState;
|
||||
|
||||
#[cfg(feature = "test-support")]
|
||||
impl std::hash::BuildHasher for DeterministicState {
|
||||
type Hasher = seahash::SeaHasher;
|
||||
|
||||
fn build_hasher(&self) -> Self::Hasher {
|
||||
seahash::SeaHasher::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "test-support")]
|
||||
pub type HashMap<K, V> = std::collections::HashMap<K, V, DeterministicState>;
|
||||
|
||||
#[cfg(feature = "test-support")]
|
||||
pub type HashSet<T> = std::collections::HashSet<T, DeterministicState>;
|
||||
|
||||
#[cfg(not(feature = "test-support"))]
|
||||
pub type HashMap<K, V> = std::collections::HashMap<K, V>;
|
||||
|
||||
#[cfg(not(feature = "test-support"))]
|
||||
pub type HashSet<T> = std::collections::HashSet<T>;
|
||||
|
||||
pub use std::collections::*;
|
||||
14
crates/contacts_panel/Cargo.toml
Normal file
14
crates/contacts_panel/Cargo.toml
Normal file
@@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "contacts_panel"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
path = "src/contacts_panel.rs"
|
||||
|
||||
[dependencies]
|
||||
client = { path = "../client" }
|
||||
gpui = { path = "../gpui" }
|
||||
theme = { path = "../theme" }
|
||||
workspace = { path = "../workspace" }
|
||||
postage = { version = "0.4.1", features = ["futures-traits"] }
|
||||
246
crates/contacts_panel/src/contacts_panel.rs
Normal file
246
crates/contacts_panel/src/contacts_panel.rs
Normal file
@@ -0,0 +1,246 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use client::{Contact, UserStore};
|
||||
use gpui::{
|
||||
elements::*,
|
||||
geometry::{rect::RectF, vector::vec2f},
|
||||
platform::CursorStyle,
|
||||
Element, ElementBox, Entity, LayoutContext, ModelHandle, RenderContext, Subscription, View,
|
||||
ViewContext,
|
||||
};
|
||||
use postage::watch;
|
||||
use workspace::{AppState, JoinProject, JoinProjectParams, Settings};
|
||||
|
||||
pub struct ContactsPanel {
|
||||
contacts: ListState,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
settings: watch::Receiver<Settings>,
|
||||
_maintain_contacts: Subscription,
|
||||
}
|
||||
|
||||
impl ContactsPanel {
|
||||
pub fn new(app_state: Arc<AppState>, cx: &mut ViewContext<Self>) -> Self {
|
||||
Self {
|
||||
contacts: ListState::new(
|
||||
app_state.user_store.read(cx).contacts().len(),
|
||||
Orientation::Top,
|
||||
1000.,
|
||||
{
|
||||
let app_state = app_state.clone();
|
||||
move |ix, cx| {
|
||||
let user_store = app_state.user_store.read(cx);
|
||||
let contacts = user_store.contacts().clone();
|
||||
let current_user_id = user_store.current_user().map(|user| user.id);
|
||||
Self::render_collaborator(
|
||||
&contacts[ix],
|
||||
current_user_id,
|
||||
app_state.clone(),
|
||||
cx,
|
||||
)
|
||||
}
|
||||
},
|
||||
),
|
||||
_maintain_contacts: cx.observe(&app_state.user_store, Self::update_contacts),
|
||||
user_store: app_state.user_store.clone(),
|
||||
settings: app_state.settings.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn update_contacts(&mut self, _: ModelHandle<UserStore>, cx: &mut ViewContext<Self>) {
|
||||
self.contacts
|
||||
.reset(self.user_store.read(cx).contacts().len());
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn render_collaborator(
|
||||
collaborator: &Contact,
|
||||
current_user_id: Option<u64>,
|
||||
app_state: Arc<AppState>,
|
||||
cx: &mut LayoutContext,
|
||||
) -> ElementBox {
|
||||
let theme = &app_state.settings.borrow().theme.contacts_panel;
|
||||
let project_count = collaborator.projects.len();
|
||||
let font_cache = cx.font_cache();
|
||||
let line_height = theme.unshared_project.name.text.line_height(font_cache);
|
||||
let cap_height = theme.unshared_project.name.text.cap_height(font_cache);
|
||||
let baseline_offset = theme.unshared_project.name.text.baseline_offset(font_cache)
|
||||
+ (theme.unshared_project.height - line_height) / 2.;
|
||||
let tree_branch_width = theme.tree_branch_width;
|
||||
let tree_branch_color = theme.tree_branch_color;
|
||||
let host_avatar_height = theme
|
||||
.host_avatar
|
||||
.width
|
||||
.or(theme.host_avatar.height)
|
||||
.unwrap_or(0.);
|
||||
|
||||
Flex::column()
|
||||
.with_child(
|
||||
Flex::row()
|
||||
.with_children(collaborator.user.avatar.clone().map(|avatar| {
|
||||
Image::new(avatar)
|
||||
.with_style(theme.host_avatar)
|
||||
.aligned()
|
||||
.left()
|
||||
.boxed()
|
||||
}))
|
||||
.with_child(
|
||||
Label::new(
|
||||
collaborator.user.github_login.clone(),
|
||||
theme.host_username.text.clone(),
|
||||
)
|
||||
.contained()
|
||||
.with_style(theme.host_username.container)
|
||||
.aligned()
|
||||
.left()
|
||||
.boxed(),
|
||||
)
|
||||
.constrained()
|
||||
.with_height(theme.host_row_height)
|
||||
.boxed(),
|
||||
)
|
||||
.with_children(
|
||||
collaborator
|
||||
.projects
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(ix, project)| {
|
||||
let project_id = project.id;
|
||||
|
||||
Flex::row()
|
||||
.with_child(
|
||||
Canvas::new(move |bounds, _, cx| {
|
||||
let start_x = bounds.min_x() + (bounds.width() / 2.)
|
||||
- (tree_branch_width / 2.);
|
||||
let end_x = bounds.max_x();
|
||||
let start_y = bounds.min_y();
|
||||
let end_y =
|
||||
bounds.min_y() + baseline_offset - (cap_height / 2.);
|
||||
|
||||
cx.scene.push_quad(gpui::Quad {
|
||||
bounds: RectF::from_points(
|
||||
vec2f(start_x, start_y),
|
||||
vec2f(
|
||||
start_x + tree_branch_width,
|
||||
if ix + 1 == project_count {
|
||||
end_y
|
||||
} else {
|
||||
bounds.max_y()
|
||||
},
|
||||
),
|
||||
),
|
||||
background: Some(tree_branch_color),
|
||||
border: gpui::Border::default(),
|
||||
corner_radius: 0.,
|
||||
});
|
||||
cx.scene.push_quad(gpui::Quad {
|
||||
bounds: RectF::from_points(
|
||||
vec2f(start_x, end_y),
|
||||
vec2f(end_x, end_y + tree_branch_width),
|
||||
),
|
||||
background: Some(tree_branch_color),
|
||||
border: gpui::Border::default(),
|
||||
corner_radius: 0.,
|
||||
});
|
||||
})
|
||||
.constrained()
|
||||
.with_width(host_avatar_height)
|
||||
.boxed(),
|
||||
)
|
||||
.with_child({
|
||||
let is_host = Some(collaborator.user.id) == current_user_id;
|
||||
let is_guest = !is_host
|
||||
&& project
|
||||
.guests
|
||||
.iter()
|
||||
.any(|guest| Some(guest.id) == current_user_id);
|
||||
let is_shared = project.is_shared;
|
||||
let app_state = app_state.clone();
|
||||
|
||||
MouseEventHandler::new::<ContactsPanel, _, _, _>(
|
||||
project_id as usize,
|
||||
cx,
|
||||
|mouse_state, _| {
|
||||
let style = match (project.is_shared, mouse_state.hovered) {
|
||||
(false, false) => &theme.unshared_project,
|
||||
(false, true) => &theme.hovered_unshared_project,
|
||||
(true, false) => &theme.shared_project,
|
||||
(true, true) => &theme.hovered_shared_project,
|
||||
};
|
||||
|
||||
Flex::row()
|
||||
.with_child(
|
||||
Label::new(
|
||||
project.worktree_root_names.join(", "),
|
||||
style.name.text.clone(),
|
||||
)
|
||||
.aligned()
|
||||
.left()
|
||||
.contained()
|
||||
.with_style(style.name.container)
|
||||
.boxed(),
|
||||
)
|
||||
.with_children(project.guests.iter().filter_map(
|
||||
|participant| {
|
||||
participant.avatar.clone().map(|avatar| {
|
||||
Image::new(avatar)
|
||||
.with_style(style.guest_avatar)
|
||||
.aligned()
|
||||
.left()
|
||||
.contained()
|
||||
.with_margin_right(
|
||||
style.guest_avatar_spacing,
|
||||
)
|
||||
.boxed()
|
||||
})
|
||||
},
|
||||
))
|
||||
.contained()
|
||||
.with_style(style.container)
|
||||
.constrained()
|
||||
.with_height(style.height)
|
||||
.boxed()
|
||||
},
|
||||
)
|
||||
.with_cursor_style(if is_host || is_shared {
|
||||
CursorStyle::PointingHand
|
||||
} else {
|
||||
CursorStyle::Arrow
|
||||
})
|
||||
.on_click(move |cx| {
|
||||
if !is_host && !is_guest {
|
||||
cx.dispatch_global_action(JoinProject(JoinProjectParams {
|
||||
project_id,
|
||||
app_state: app_state.clone(),
|
||||
}));
|
||||
}
|
||||
})
|
||||
.flexible(1., true)
|
||||
.boxed()
|
||||
})
|
||||
.constrained()
|
||||
.with_height(theme.unshared_project.height)
|
||||
.boxed()
|
||||
}),
|
||||
)
|
||||
.boxed()
|
||||
}
|
||||
}
|
||||
|
||||
pub enum Event {}
|
||||
|
||||
impl Entity for ContactsPanel {
|
||||
type Event = Event;
|
||||
}
|
||||
|
||||
impl View for ContactsPanel {
|
||||
fn ui_name() -> &'static str {
|
||||
"ContactsPanel"
|
||||
}
|
||||
|
||||
fn render(&mut self, _: &mut RenderContext<Self>) -> ElementBox {
|
||||
let theme = &self.settings.borrow().theme.contacts_panel;
|
||||
Container::new(List::new(self.contacts.clone()).boxed())
|
||||
.with_style(theme.container)
|
||||
.boxed()
|
||||
}
|
||||
}
|
||||
27
crates/diagnostics/Cargo.toml
Normal file
27
crates/diagnostics/Cargo.toml
Normal file
@@ -0,0 +1,27 @@
|
||||
[package]
|
||||
name = "diagnostics"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
path = "src/diagnostics.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
collections = { path = "../collections" }
|
||||
editor = { path = "../editor" }
|
||||
language = { path = "../language" }
|
||||
gpui = { path = "../gpui" }
|
||||
project = { path = "../project" }
|
||||
util = { path = "../util" }
|
||||
workspace = { path = "../workspace" }
|
||||
postage = { version = "0.4", features = ["futures-traits"] }
|
||||
|
||||
[dev-dependencies]
|
||||
unindent = "0.1"
|
||||
client = { path = "../client", features = ["test-support"] }
|
||||
editor = { path = "../editor", features = ["test-support"] }
|
||||
language = { path = "../language", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
workspace = { path = "../workspace", features = ["test-support"] }
|
||||
serde_json = { version = "1", features = ["preserve_order"] }
|
||||
882
crates/diagnostics/src/diagnostics.rs
Normal file
882
crates/diagnostics/src/diagnostics.rs
Normal file
@@ -0,0 +1,882 @@
|
||||
pub mod items;
|
||||
|
||||
use anyhow::Result;
|
||||
use collections::{HashMap, HashSet, BTreeSet};
|
||||
use editor::{
|
||||
context_header_renderer, diagnostic_block_renderer, diagnostic_header_renderer,
|
||||
display_map::{BlockDisposition, BlockId, BlockProperties},
|
||||
items::BufferItemHandle,
|
||||
Autoscroll, BuildSettings, Editor, ExcerptId, ExcerptProperties, MultiBuffer, ToOffset,
|
||||
};
|
||||
use gpui::{
|
||||
action, elements::*, keymap::Binding, AppContext, Entity, ModelHandle, MutableAppContext,
|
||||
RenderContext, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use language::{Bias, Buffer, DiagnosticEntry, Point, Selection, SelectionGoal};
|
||||
use postage::watch;
|
||||
use project::{Project, ProjectPath, WorktreeId};
|
||||
use std::{cmp::Ordering, mem, ops::Range};
|
||||
use util::TryFutureExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
action!(Deploy);
|
||||
action!(OpenExcerpts);
|
||||
|
||||
const CONTEXT_LINE_COUNT: u32 = 1;
|
||||
|
||||
pub fn init(cx: &mut MutableAppContext) {
|
||||
cx.add_bindings([
|
||||
Binding::new("alt-shift-D", Deploy, Some("Workspace")),
|
||||
Binding::new(
|
||||
"alt-shift-D",
|
||||
OpenExcerpts,
|
||||
Some("ProjectDiagnosticsEditor"),
|
||||
),
|
||||
]);
|
||||
cx.add_action(ProjectDiagnosticsEditor::deploy);
|
||||
cx.add_action(ProjectDiagnosticsEditor::open_excerpts);
|
||||
}
|
||||
|
||||
type Event = editor::Event;
|
||||
|
||||
struct ProjectDiagnostics {
|
||||
project: ModelHandle<Project>,
|
||||
}
|
||||
|
||||
struct ProjectDiagnosticsEditor {
|
||||
model: ModelHandle<ProjectDiagnostics>,
|
||||
workspace: WeakViewHandle<Workspace>,
|
||||
editor: ViewHandle<Editor>,
|
||||
excerpts: ModelHandle<MultiBuffer>,
|
||||
path_states: Vec<(ProjectPath, Vec<DiagnosticGroupState>)>,
|
||||
paths_to_update: HashMap<WorktreeId, BTreeSet<ProjectPath>>,
|
||||
build_settings: BuildSettings,
|
||||
settings: watch::Receiver<workspace::Settings>,
|
||||
}
|
||||
|
||||
struct DiagnosticGroupState {
|
||||
primary_diagnostic: DiagnosticEntry<language::Anchor>,
|
||||
primary_excerpt_ix: usize,
|
||||
excerpts: Vec<ExcerptId>,
|
||||
blocks: HashSet<BlockId>,
|
||||
block_count: usize,
|
||||
}
|
||||
|
||||
impl ProjectDiagnostics {
|
||||
fn new(project: ModelHandle<Project>) -> Self {
|
||||
Self { project }
|
||||
}
|
||||
}
|
||||
|
||||
impl Entity for ProjectDiagnostics {
|
||||
type Event = ();
|
||||
}
|
||||
|
||||
impl Entity for ProjectDiagnosticsEditor {
|
||||
type Event = Event;
|
||||
}
|
||||
|
||||
impl View for ProjectDiagnosticsEditor {
|
||||
fn ui_name() -> &'static str {
|
||||
"ProjectDiagnosticsEditor"
|
||||
}
|
||||
|
||||
fn render(&mut self, _: &mut RenderContext<Self>) -> ElementBox {
|
||||
if self.path_states.is_empty() {
|
||||
let theme = &self.settings.borrow().theme.project_diagnostics;
|
||||
Label::new(
|
||||
"No problems detected in the project".to_string(),
|
||||
theme.empty_message.clone(),
|
||||
)
|
||||
.aligned()
|
||||
.contained()
|
||||
.with_style(theme.container)
|
||||
.boxed()
|
||||
} else {
|
||||
ChildView::new(self.editor.id()).boxed()
|
||||
}
|
||||
}
|
||||
|
||||
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
|
||||
if !self.path_states.is_empty() {
|
||||
cx.focus(&self.editor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProjectDiagnosticsEditor {
|
||||
fn new(
|
||||
model: ModelHandle<ProjectDiagnostics>,
|
||||
workspace: WeakViewHandle<Workspace>,
|
||||
settings: watch::Receiver<workspace::Settings>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let project = model.read(cx).project.clone();
|
||||
cx.subscribe(&project, |this, _, event, cx| match event {
|
||||
project::Event::DiskBasedDiagnosticsUpdated { worktree_id } => {
|
||||
if let Some(paths) = this.paths_to_update.remove(&worktree_id) {
|
||||
this.update_excerpts(paths, cx);
|
||||
}
|
||||
}
|
||||
project::Event::DiagnosticsUpdated(path) => {
|
||||
this.paths_to_update
|
||||
.entry(path.worktree_id)
|
||||
.or_default()
|
||||
.insert(path.clone());
|
||||
}
|
||||
_ => {}
|
||||
})
|
||||
.detach();
|
||||
|
||||
let excerpts = cx.add_model(|cx| MultiBuffer::new(project.read(cx).replica_id()));
|
||||
let build_settings = editor::settings_builder(excerpts.downgrade(), settings.clone());
|
||||
let editor =
|
||||
cx.add_view(|cx| Editor::for_buffer(excerpts.clone(), build_settings.clone(), cx));
|
||||
cx.subscribe(&editor, |_, _, event, cx| cx.emit(*event))
|
||||
.detach();
|
||||
|
||||
let paths_to_update = project
|
||||
.read(cx)
|
||||
.diagnostic_summaries(cx)
|
||||
.map(|e| e.0)
|
||||
.collect();
|
||||
let this = Self {
|
||||
model,
|
||||
workspace,
|
||||
excerpts,
|
||||
editor,
|
||||
build_settings,
|
||||
settings,
|
||||
path_states: Default::default(),
|
||||
paths_to_update: Default::default(),
|
||||
};
|
||||
this.update_excerpts(paths_to_update, cx);
|
||||
this
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn text(&self, cx: &AppContext) -> String {
|
||||
self.editor.read(cx).text(cx)
|
||||
}
|
||||
|
||||
fn deploy(workspace: &mut Workspace, _: &Deploy, cx: &mut ViewContext<Workspace>) {
|
||||
if let Some(existing) = workspace.item_of_type::<ProjectDiagnostics>(cx) {
|
||||
workspace.activate_item(&existing, cx);
|
||||
} else {
|
||||
let diagnostics =
|
||||
cx.add_model(|_| ProjectDiagnostics::new(workspace.project().clone()));
|
||||
workspace.open_item(diagnostics, cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn open_excerpts(&mut self, _: &OpenExcerpts, cx: &mut ViewContext<Self>) {
|
||||
if let Some(workspace) = self.workspace.upgrade(cx) {
|
||||
let editor = self.editor.read(cx);
|
||||
let excerpts = self.excerpts.read(cx);
|
||||
let mut new_selections_by_buffer = HashMap::default();
|
||||
|
||||
for selection in editor.local_selections::<usize>(cx) {
|
||||
for (buffer, mut range) in
|
||||
excerpts.excerpted_buffers(selection.start..selection.end, cx)
|
||||
{
|
||||
if selection.reversed {
|
||||
mem::swap(&mut range.start, &mut range.end);
|
||||
}
|
||||
new_selections_by_buffer
|
||||
.entry(buffer)
|
||||
.or_insert(Vec::new())
|
||||
.push(range)
|
||||
}
|
||||
}
|
||||
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
for (buffer, ranges) in new_selections_by_buffer {
|
||||
let buffer = BufferItemHandle(buffer);
|
||||
if !workspace.activate_pane_for_item(&buffer, cx) {
|
||||
workspace.activate_next_pane(cx);
|
||||
}
|
||||
let editor = workspace
|
||||
.open_item(buffer, cx)
|
||||
.to_any()
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.select_ranges(ranges, Some(Autoscroll::Center), cx)
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn update_excerpts(
|
||||
&self,
|
||||
paths: BTreeSet<ProjectPath>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let project = self.model.read(cx).project.clone();
|
||||
cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
for path in paths {
|
||||
let buffer = project
|
||||
.update(&mut cx, |project, cx| project.open_buffer(path.clone(), cx))
|
||||
.await?;
|
||||
this.update(&mut cx, |view, cx| {
|
||||
view.populate_excerpts(path, buffer, cx)
|
||||
})
|
||||
}
|
||||
Result::<_, anyhow::Error>::Ok(())
|
||||
}
|
||||
.log_err()
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn populate_excerpts(
|
||||
&mut self,
|
||||
path: ProjectPath,
|
||||
buffer: ModelHandle<Buffer>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let was_empty = self.path_states.is_empty();
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let path_ix = match self
|
||||
.path_states
|
||||
.binary_search_by_key(&&path, |e| &e.0)
|
||||
{
|
||||
Ok(ix) => ix,
|
||||
Err(ix) => {
|
||||
self.path_states
|
||||
.insert(ix, (path.clone(), Default::default()));
|
||||
ix
|
||||
}
|
||||
};
|
||||
|
||||
let mut prev_excerpt_id = if path_ix > 0 {
|
||||
let prev_path_last_group = &self.path_states[path_ix - 1].1.last().unwrap();
|
||||
prev_path_last_group.excerpts.last().unwrap().clone()
|
||||
} else {
|
||||
ExcerptId::min()
|
||||
};
|
||||
|
||||
let groups = &mut self.path_states[path_ix].1;
|
||||
let mut groups_to_add = Vec::new();
|
||||
let mut group_ixs_to_remove = Vec::new();
|
||||
let mut blocks_to_add = Vec::new();
|
||||
let mut blocks_to_remove = HashSet::default();
|
||||
let excerpts_snapshot = self.excerpts.update(cx, |excerpts, excerpts_cx| {
|
||||
let mut old_groups = groups.iter().enumerate().peekable();
|
||||
let mut new_groups = snapshot
|
||||
.diagnostic_groups()
|
||||
.into_iter()
|
||||
.filter(|group| group.entries[group.primary_ix].diagnostic.is_disk_based)
|
||||
.peekable();
|
||||
|
||||
loop {
|
||||
let mut to_insert = None;
|
||||
let mut to_invalidate = None;
|
||||
let mut to_keep = None;
|
||||
match (old_groups.peek(), new_groups.peek()) {
|
||||
(None, None) => break,
|
||||
(None, Some(_)) => to_insert = new_groups.next(),
|
||||
(Some(_), None) => to_invalidate = old_groups.next(),
|
||||
(Some((_, old_group)), Some(new_group)) => {
|
||||
let old_primary = &old_group.primary_diagnostic;
|
||||
let new_primary = &new_group.entries[new_group.primary_ix];
|
||||
match compare_diagnostics(old_primary, new_primary, &snapshot) {
|
||||
Ordering::Less => to_invalidate = old_groups.next(),
|
||||
Ordering::Equal => {
|
||||
to_keep = old_groups.next();
|
||||
new_groups.next();
|
||||
}
|
||||
Ordering::Greater => to_insert = new_groups.next(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(group) = to_insert {
|
||||
let mut group_state = DiagnosticGroupState {
|
||||
primary_diagnostic: group.entries[group.primary_ix].clone(),
|
||||
primary_excerpt_ix: 0,
|
||||
excerpts: Default::default(),
|
||||
blocks: Default::default(),
|
||||
block_count: 0,
|
||||
};
|
||||
let mut pending_range: Option<(Range<Point>, usize)> = None;
|
||||
let mut is_first_excerpt_for_group = true;
|
||||
for (ix, entry) in group.entries.iter().map(Some).chain([None]).enumerate() {
|
||||
let resolved_entry = entry.map(|e| e.resolve::<Point>(&snapshot));
|
||||
if let Some((range, start_ix)) = &mut pending_range {
|
||||
if let Some(entry) = resolved_entry.as_ref() {
|
||||
if entry.range.start.row
|
||||
<= range.end.row + 1 + CONTEXT_LINE_COUNT * 2
|
||||
{
|
||||
range.end = range.end.max(entry.range.end);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let excerpt_start =
|
||||
Point::new(range.start.row.saturating_sub(CONTEXT_LINE_COUNT), 0);
|
||||
let excerpt_end = snapshot.clip_point(
|
||||
Point::new(range.end.row + CONTEXT_LINE_COUNT, u32::MAX),
|
||||
Bias::Left,
|
||||
);
|
||||
let excerpt_id = excerpts.insert_excerpt_after(
|
||||
&prev_excerpt_id,
|
||||
ExcerptProperties {
|
||||
buffer: &buffer,
|
||||
range: excerpt_start..excerpt_end,
|
||||
},
|
||||
excerpts_cx,
|
||||
);
|
||||
|
||||
prev_excerpt_id = excerpt_id.clone();
|
||||
group_state.excerpts.push(excerpt_id.clone());
|
||||
let header_position = (excerpt_id.clone(), language::Anchor::min());
|
||||
|
||||
if is_first_excerpt_for_group {
|
||||
is_first_excerpt_for_group = false;
|
||||
let primary = &group.entries[group.primary_ix].diagnostic;
|
||||
let mut header = primary.clone();
|
||||
header.message =
|
||||
primary.message.split('\n').next().unwrap().to_string();
|
||||
group_state.block_count += 1;
|
||||
blocks_to_add.push(BlockProperties {
|
||||
position: header_position,
|
||||
height: 3,
|
||||
render: diagnostic_header_renderer(
|
||||
buffer.clone(),
|
||||
header,
|
||||
true,
|
||||
self.build_settings.clone(),
|
||||
),
|
||||
disposition: BlockDisposition::Above,
|
||||
});
|
||||
} else {
|
||||
group_state.block_count += 1;
|
||||
blocks_to_add.push(BlockProperties {
|
||||
position: header_position,
|
||||
height: 1,
|
||||
render: context_header_renderer(self.build_settings.clone()),
|
||||
disposition: BlockDisposition::Above,
|
||||
});
|
||||
}
|
||||
|
||||
for entry in &group.entries[*start_ix..ix] {
|
||||
let mut diagnostic = entry.diagnostic.clone();
|
||||
if diagnostic.is_primary {
|
||||
group_state.primary_excerpt_ix = group_state.excerpts.len() - 1;
|
||||
diagnostic.message =
|
||||
entry.diagnostic.message.split('\n').skip(1).collect();
|
||||
}
|
||||
|
||||
if !diagnostic.message.is_empty() {
|
||||
group_state.block_count += 1;
|
||||
blocks_to_add.push(BlockProperties {
|
||||
position: (excerpt_id.clone(), entry.range.start.clone()),
|
||||
height: diagnostic.message.matches('\n').count() as u8 + 1,
|
||||
render: diagnostic_block_renderer(
|
||||
diagnostic,
|
||||
true,
|
||||
self.build_settings.clone(),
|
||||
),
|
||||
disposition: BlockDisposition::Below,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pending_range.take();
|
||||
}
|
||||
|
||||
if let Some(entry) = resolved_entry {
|
||||
pending_range = Some((entry.range.clone(), ix));
|
||||
}
|
||||
}
|
||||
|
||||
groups_to_add.push(group_state);
|
||||
} else if let Some((group_ix, group_state)) = to_invalidate {
|
||||
excerpts.remove_excerpts(group_state.excerpts.iter(), excerpts_cx);
|
||||
group_ixs_to_remove.push(group_ix);
|
||||
blocks_to_remove.extend(group_state.blocks.iter().copied());
|
||||
} else if let Some((_, group)) = to_keep {
|
||||
prev_excerpt_id = group.excerpts.last().unwrap().clone();
|
||||
}
|
||||
}
|
||||
|
||||
excerpts.snapshot(excerpts_cx)
|
||||
});
|
||||
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.remove_blocks(blocks_to_remove, cx);
|
||||
let mut block_ids = editor
|
||||
.insert_blocks(
|
||||
blocks_to_add.into_iter().map(|block| {
|
||||
let (excerpt_id, text_anchor) = block.position;
|
||||
BlockProperties {
|
||||
position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor),
|
||||
height: block.height,
|
||||
render: block.render,
|
||||
disposition: block.disposition,
|
||||
}
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
.into_iter();
|
||||
|
||||
for group_state in &mut groups_to_add {
|
||||
group_state.blocks = block_ids.by_ref().take(group_state.block_count).collect();
|
||||
}
|
||||
});
|
||||
|
||||
for ix in group_ixs_to_remove.into_iter().rev() {
|
||||
groups.remove(ix);
|
||||
}
|
||||
groups.extend(groups_to_add);
|
||||
groups.sort_unstable_by(|a, b| {
|
||||
let range_a = &a.primary_diagnostic.range;
|
||||
let range_b = &b.primary_diagnostic.range;
|
||||
range_a
|
||||
.start
|
||||
.cmp(&range_b.start, &snapshot)
|
||||
.unwrap()
|
||||
.then_with(|| range_a.end.cmp(&range_b.end, &snapshot).unwrap())
|
||||
});
|
||||
|
||||
if groups.is_empty() {
|
||||
self.path_states.remove(path_ix);
|
||||
}
|
||||
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let groups;
|
||||
let mut selections;
|
||||
let new_excerpt_ids_by_selection_id;
|
||||
if was_empty {
|
||||
groups = self.path_states.first()?.1.as_slice();
|
||||
new_excerpt_ids_by_selection_id = [(0, ExcerptId::min())].into_iter().collect();
|
||||
selections = vec![Selection {
|
||||
id: 0,
|
||||
start: 0,
|
||||
end: 0,
|
||||
reversed: false,
|
||||
goal: SelectionGoal::None,
|
||||
}];
|
||||
} else {
|
||||
groups = self.path_states.get(path_ix)?.1.as_slice();
|
||||
new_excerpt_ids_by_selection_id = editor.refresh_selections(cx);
|
||||
selections = editor.local_selections::<usize>(cx);
|
||||
}
|
||||
|
||||
// If any selection has lost its position, move it to start of the next primary diagnostic.
|
||||
for selection in &mut selections {
|
||||
if let Some(new_excerpt_id) = new_excerpt_ids_by_selection_id.get(&selection.id) {
|
||||
let group_ix = match groups.binary_search_by(|probe| {
|
||||
probe.excerpts.last().unwrap().cmp(&new_excerpt_id)
|
||||
}) {
|
||||
Ok(ix) | Err(ix) => ix,
|
||||
};
|
||||
if let Some(group) = groups.get(group_ix) {
|
||||
let offset = excerpts_snapshot
|
||||
.anchor_in_excerpt(
|
||||
group.excerpts[group.primary_excerpt_ix].clone(),
|
||||
group.primary_diagnostic.range.start.clone(),
|
||||
)
|
||||
.to_offset(&excerpts_snapshot);
|
||||
selection.start = offset;
|
||||
selection.end = offset;
|
||||
}
|
||||
}
|
||||
}
|
||||
editor.update_selections(selections, None, cx);
|
||||
Some(())
|
||||
});
|
||||
|
||||
if self.path_states.is_empty() {
|
||||
if self.editor.is_focused(cx) {
|
||||
cx.focus_self();
|
||||
}
|
||||
} else {
|
||||
if cx.handle().is_focused(cx) {
|
||||
cx.focus(&self.editor);
|
||||
}
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
impl workspace::Item for ProjectDiagnostics {
|
||||
type View = ProjectDiagnosticsEditor;
|
||||
|
||||
fn build_view(
|
||||
handle: ModelHandle<Self>,
|
||||
workspace: &Workspace,
|
||||
cx: &mut ViewContext<Self::View>,
|
||||
) -> Self::View {
|
||||
ProjectDiagnosticsEditor::new(handle, workspace.weak_handle(), workspace.settings(), cx)
|
||||
}
|
||||
|
||||
fn project_path(&self) -> Option<project::ProjectPath> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl workspace::ItemView for ProjectDiagnosticsEditor {
|
||||
type ItemHandle = ModelHandle<ProjectDiagnostics>;
|
||||
|
||||
fn item_handle(&self, _: &AppContext) -> Self::ItemHandle {
|
||||
self.model.clone()
|
||||
}
|
||||
|
||||
fn title(&self, _: &AppContext) -> String {
|
||||
"Project Diagnostics".to_string()
|
||||
}
|
||||
|
||||
fn project_path(&self, _: &AppContext) -> Option<project::ProjectPath> {
|
||||
None
|
||||
}
|
||||
|
||||
fn is_dirty(&self, cx: &AppContext) -> bool {
|
||||
self.excerpts.read(cx).read(cx).is_dirty()
|
||||
}
|
||||
|
||||
fn has_conflict(&self, cx: &AppContext) -> bool {
|
||||
self.excerpts.read(cx).read(cx).has_conflict()
|
||||
}
|
||||
|
||||
fn can_save(&self, _: &AppContext) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn save(&mut self, cx: &mut ViewContext<Self>) -> Result<Task<Result<()>>> {
|
||||
self.excerpts.update(cx, |excerpts, cx| excerpts.save(cx))
|
||||
}
|
||||
|
||||
fn can_save_as(&self, _: &AppContext) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn save_as(
|
||||
&mut self,
|
||||
_: ModelHandle<project::Worktree>,
|
||||
_: &std::path::Path,
|
||||
_: &mut ViewContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
fn should_activate_item_on_event(event: &Self::Event) -> bool {
|
||||
Editor::should_activate_item_on_event(event)
|
||||
}
|
||||
|
||||
fn should_update_tab_on_event(event: &Event) -> bool {
|
||||
matches!(
|
||||
event,
|
||||
Event::Saved | Event::Dirtied | Event::FileHandleChanged
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn compare_diagnostics<L: language::ToOffset, R: language::ToOffset>(
|
||||
lhs: &DiagnosticEntry<L>,
|
||||
rhs: &DiagnosticEntry<R>,
|
||||
snapshot: &language::BufferSnapshot,
|
||||
) -> Ordering {
|
||||
lhs.range
|
||||
.start
|
||||
.to_offset(&snapshot)
|
||||
.cmp(&rhs.range.start.to_offset(snapshot))
|
||||
.then_with(|| {
|
||||
lhs.range
|
||||
.end
|
||||
.to_offset(&snapshot)
|
||||
.cmp(&rhs.range.end.to_offset(snapshot))
|
||||
})
|
||||
.then_with(|| lhs.diagnostic.message.cmp(&rhs.diagnostic.message))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use client::{http::ServerResponse, test::FakeHttpClient, Client, UserStore};
|
||||
use editor::DisplayPoint;
|
||||
use gpui::TestAppContext;
|
||||
use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, LanguageRegistry, PointUtf16};
|
||||
use project::{worktree, FakeFs};
|
||||
use serde_json::json;
|
||||
use std::sync::Arc;
|
||||
use unindent::Unindent as _;
|
||||
use workspace::WorkspaceParams;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_diagnostics(mut cx: TestAppContext) {
|
||||
let workspace_params = cx.update(WorkspaceParams::test);
|
||||
let settings = workspace_params.settings.clone();
|
||||
let http_client = FakeHttpClient::new(|_| async move { Ok(ServerResponse::new(404)) });
|
||||
let client = Client::new(http_client.clone());
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
|
||||
let fs = Arc::new(FakeFs::new());
|
||||
|
||||
let project = cx.update(|cx| {
|
||||
Project::local(
|
||||
client.clone(),
|
||||
user_store,
|
||||
Arc::new(LanguageRegistry::new()),
|
||||
fs.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
fs.insert_tree(
|
||||
"/test",
|
||||
json!({
|
||||
"a.rs": "
|
||||
const a: i32 = 'a';
|
||||
".unindent(),
|
||||
|
||||
"main.rs": "
|
||||
fn main() {
|
||||
let x = vec![];
|
||||
let y = vec![];
|
||||
a(x);
|
||||
b(y);
|
||||
// comment 1
|
||||
// comment 2
|
||||
c(y);
|
||||
d(x);
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let worktree = project
|
||||
.update(&mut cx, |project, cx| {
|
||||
project.add_local_worktree("/test", cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
worktree.update(&mut cx, |worktree, cx| {
|
||||
worktree
|
||||
.update_diagnostic_entries(
|
||||
Arc::from("/test/main.rs".as_ref()),
|
||||
None,
|
||||
vec![
|
||||
DiagnosticEntry {
|
||||
range: PointUtf16::new(1, 8)..PointUtf16::new(1, 9),
|
||||
diagnostic: Diagnostic {
|
||||
message:
|
||||
"move occurs because `x` has type `Vec<char>`, which does not implement the `Copy` trait"
|
||||
.to_string(),
|
||||
severity: DiagnosticSeverity::INFORMATION,
|
||||
is_primary: false,
|
||||
is_disk_based: true,
|
||||
group_id: 1,
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: PointUtf16::new(2, 8)..PointUtf16::new(2, 9),
|
||||
diagnostic: Diagnostic {
|
||||
message:
|
||||
"move occurs because `y` has type `Vec<char>`, which does not implement the `Copy` trait"
|
||||
.to_string(),
|
||||
severity: DiagnosticSeverity::INFORMATION,
|
||||
is_primary: false,
|
||||
is_disk_based: true,
|
||||
group_id: 0,
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: PointUtf16::new(3, 6)..PointUtf16::new(3, 7),
|
||||
diagnostic: Diagnostic {
|
||||
message: "value moved here".to_string(),
|
||||
severity: DiagnosticSeverity::INFORMATION,
|
||||
is_primary: false,
|
||||
is_disk_based: true,
|
||||
group_id: 1,
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: PointUtf16::new(4, 6)..PointUtf16::new(4, 7),
|
||||
diagnostic: Diagnostic {
|
||||
message: "value moved here".to_string(),
|
||||
severity: DiagnosticSeverity::INFORMATION,
|
||||
is_primary: false,
|
||||
is_disk_based: true,
|
||||
group_id: 0,
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: PointUtf16::new(7, 6)..PointUtf16::new(7, 7),
|
||||
diagnostic: Diagnostic {
|
||||
message: "use of moved value\nvalue used here after move".to_string(),
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
is_primary: true,
|
||||
is_disk_based: true,
|
||||
group_id: 0,
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: PointUtf16::new(8, 6)..PointUtf16::new(8, 7),
|
||||
diagnostic: Diagnostic {
|
||||
message: "use of moved value\nvalue used here after move".to_string(),
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
is_primary: true,
|
||||
is_disk_based: true,
|
||||
group_id: 1,
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
],
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
|
||||
let model = cx.add_model(|_| ProjectDiagnostics::new(project.clone()));
|
||||
let workspace = cx.add_view(0, |cx| Workspace::new(&workspace_params, cx));
|
||||
|
||||
let view = cx.add_view(0, |cx| {
|
||||
ProjectDiagnosticsEditor::new(model, workspace.downgrade(), settings, cx)
|
||||
});
|
||||
|
||||
view.condition(&mut cx, |view, cx| view.text(cx).contains("fn main()"))
|
||||
.await;
|
||||
|
||||
view.update(&mut cx, |view, cx| {
|
||||
let editor = view.editor.update(cx, |editor, cx| editor.snapshot(cx));
|
||||
|
||||
assert_eq!(
|
||||
editor.text(),
|
||||
concat!(
|
||||
//
|
||||
// main.rs, diagnostic group 1
|
||||
//
|
||||
"\n", // padding
|
||||
"\n", // primary message
|
||||
"\n", // filename
|
||||
" let x = vec![];\n",
|
||||
" let y = vec![];\n",
|
||||
"\n", // supporting diagnostic
|
||||
" a(x);\n",
|
||||
" b(y);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" // comment 1\n",
|
||||
" // comment 2\n",
|
||||
" c(y);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" d(x);\n",
|
||||
//
|
||||
// main.rs, diagnostic group 2
|
||||
//
|
||||
"\n", // padding
|
||||
"\n", // primary message
|
||||
"\n", // filename
|
||||
"fn main() {\n",
|
||||
" let x = vec![];\n",
|
||||
"\n", // supporting diagnostic
|
||||
" let y = vec![];\n",
|
||||
" a(x);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" b(y);\n",
|
||||
"\n", // context ellipsis
|
||||
" c(y);\n",
|
||||
" d(x);\n",
|
||||
"\n", // supporting diagnostic
|
||||
"}"
|
||||
)
|
||||
);
|
||||
|
||||
view.editor.update(cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.selected_display_ranges(cx),
|
||||
[DisplayPoint::new(11, 6)..DisplayPoint::new(11, 6)]
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
worktree.update(&mut cx, |worktree, cx| {
|
||||
worktree
|
||||
.update_diagnostic_entries(
|
||||
Arc::from("/test/a.rs".as_ref()),
|
||||
None,
|
||||
vec![DiagnosticEntry {
|
||||
range: PointUtf16::new(0, 15)..PointUtf16::new(0, 15),
|
||||
diagnostic: Diagnostic {
|
||||
message: "mismatched types\nexpected `usize`, found `char`".to_string(),
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
is_primary: true,
|
||||
is_disk_based: true,
|
||||
group_id: 0,
|
||||
..Default::default()
|
||||
},
|
||||
}],
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
cx.emit(worktree::Event::DiskBasedDiagnosticsUpdated);
|
||||
});
|
||||
|
||||
view.condition(&mut cx, |view, cx| view.text(cx).contains("const a"))
|
||||
.await;
|
||||
|
||||
view.update(&mut cx, |view, cx| {
|
||||
let editor = view.editor.update(cx, |editor, cx| editor.snapshot(cx));
|
||||
|
||||
assert_eq!(
|
||||
editor.text(),
|
||||
concat!(
|
||||
//
|
||||
// a.rs
|
||||
//
|
||||
"\n", // padding
|
||||
"\n", // primary message
|
||||
"\n", // filename
|
||||
"const a: i32 = 'a';\n",
|
||||
"\n", // supporting diagnostic
|
||||
"\n", // context line
|
||||
//
|
||||
// main.rs, diagnostic group 1
|
||||
//
|
||||
"\n", // padding
|
||||
"\n", // primary message
|
||||
"\n", // filename
|
||||
" let x = vec![];\n",
|
||||
" let y = vec![];\n",
|
||||
"\n", // supporting diagnostic
|
||||
" a(x);\n",
|
||||
" b(y);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" // comment 1\n",
|
||||
" // comment 2\n",
|
||||
" c(y);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" d(x);\n",
|
||||
//
|
||||
// main.rs, diagnostic group 2
|
||||
//
|
||||
"\n", // padding
|
||||
"\n", // primary message
|
||||
"\n", // filename
|
||||
"fn main() {\n",
|
||||
" let x = vec![];\n",
|
||||
"\n", // supporting diagnostic
|
||||
" let y = vec![];\n",
|
||||
" a(x);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" b(y);\n",
|
||||
"\n", // context ellipsis
|
||||
" c(y);\n",
|
||||
" d(x);\n",
|
||||
"\n", // supporting diagnostic
|
||||
"}"
|
||||
)
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
87
crates/diagnostics/src/items.rs
Normal file
87
crates/diagnostics/src/items.rs
Normal file
@@ -0,0 +1,87 @@
|
||||
use gpui::{
|
||||
elements::*, platform::CursorStyle, Entity, ModelHandle, RenderContext, View, ViewContext,
|
||||
};
|
||||
use postage::watch;
|
||||
use project::Project;
|
||||
use std::fmt::Write;
|
||||
use workspace::{Settings, StatusItemView};
|
||||
|
||||
pub struct DiagnosticSummary {
|
||||
settings: watch::Receiver<Settings>,
|
||||
summary: project::DiagnosticSummary,
|
||||
in_progress: bool,
|
||||
}
|
||||
|
||||
impl DiagnosticSummary {
|
||||
pub fn new(
|
||||
project: &ModelHandle<Project>,
|
||||
settings: watch::Receiver<Settings>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
cx.subscribe(project, |this, project, event, cx| match event {
|
||||
project::Event::DiskBasedDiagnosticsUpdated { .. } => {
|
||||
this.summary = project.read(cx).diagnostic_summary(cx);
|
||||
cx.notify();
|
||||
}
|
||||
project::Event::DiskBasedDiagnosticsStarted => {
|
||||
this.in_progress = true;
|
||||
cx.notify();
|
||||
}
|
||||
project::Event::DiskBasedDiagnosticsFinished => {
|
||||
this.in_progress = false;
|
||||
cx.notify();
|
||||
}
|
||||
_ => {}
|
||||
})
|
||||
.detach();
|
||||
Self {
|
||||
settings,
|
||||
summary: project.read(cx).diagnostic_summary(cx),
|
||||
in_progress: project.read(cx).is_running_disk_based_diagnostics(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Entity for DiagnosticSummary {
|
||||
type Event = ();
|
||||
}
|
||||
|
||||
impl View for DiagnosticSummary {
|
||||
fn ui_name() -> &'static str {
|
||||
"DiagnosticSummary"
|
||||
}
|
||||
|
||||
fn render(&mut self, cx: &mut RenderContext<Self>) -> ElementBox {
|
||||
enum Tag {}
|
||||
|
||||
let theme = &self.settings.borrow().theme.project_diagnostics;
|
||||
let mut message = String::new();
|
||||
if self.in_progress {
|
||||
message.push_str("Checking... ");
|
||||
}
|
||||
write!(
|
||||
message,
|
||||
"Errors: {}, Warnings: {}",
|
||||
self.summary.error_count, self.summary.warning_count
|
||||
)
|
||||
.unwrap();
|
||||
MouseEventHandler::new::<Tag, _, _, _>(0, cx, |_, _| {
|
||||
Label::new(message, theme.status_bar_item.text.clone())
|
||||
.contained()
|
||||
.with_style(theme.status_bar_item.container)
|
||||
.boxed()
|
||||
})
|
||||
.with_cursor_style(CursorStyle::PointingHand)
|
||||
.on_click(|cx| cx.dispatch_action(crate::Deploy))
|
||||
.boxed()
|
||||
}
|
||||
}
|
||||
|
||||
impl StatusItemView for DiagnosticSummary {
|
||||
fn set_active_pane_item(
|
||||
&mut self,
|
||||
_: Option<&dyn workspace::ItemViewHandle>,
|
||||
_: &mut ViewContext<Self>,
|
||||
) {
|
||||
}
|
||||
}
|
||||
51
crates/editor/Cargo.toml
Normal file
51
crates/editor/Cargo.toml
Normal file
@@ -0,0 +1,51 @@
|
||||
[package]
|
||||
name = "editor"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
path = "src/editor.rs"
|
||||
|
||||
[features]
|
||||
test-support = [
|
||||
"rand",
|
||||
"text/test-support",
|
||||
"language/test-support",
|
||||
"gpui/test-support",
|
||||
"util/test-support",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
text = { path = "../text" }
|
||||
clock = { path = "../clock" }
|
||||
collections = { path = "../collections" }
|
||||
gpui = { path = "../gpui" }
|
||||
language = { path = "../language" }
|
||||
project = { path = "../project" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
theme = { path = "../theme" }
|
||||
util = { path = "../util" }
|
||||
workspace = { path = "../workspace" }
|
||||
aho-corasick = "0.7"
|
||||
anyhow = "1.0"
|
||||
itertools = "0.10"
|
||||
lazy_static = "1.4"
|
||||
log = "0.4"
|
||||
parking_lot = "0.11"
|
||||
postage = { version = "0.4", features = ["futures-traits"] }
|
||||
rand = { version = "0.8.3", optional = true }
|
||||
serde = { version = "1", features = ["derive", "rc"] }
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
smol = "1.2"
|
||||
|
||||
[dev-dependencies]
|
||||
text = { path = "../text", features = ["test-support"] }
|
||||
language = { path = "../language", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
util = { path = "../util", features = ["test-support"] }
|
||||
ctor = "0.1"
|
||||
env_logger = "0.8"
|
||||
rand = "0.8"
|
||||
unindent = "0.1.7"
|
||||
tree-sitter = "0.20"
|
||||
tree-sitter-rust = "0.20"
|
||||
1141
crates/editor/src/display_map.rs
Normal file
1141
crates/editor/src/display_map.rs
Normal file
File diff suppressed because it is too large
Load Diff
1415
crates/editor/src/display_map/block_map.rs
Normal file
1415
crates/editor/src/display_map/block_map.rs
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,14 +1,17 @@
|
||||
use super::fold_map::{self, FoldEdit, FoldPoint, FoldSnapshot, ToFoldPoint};
|
||||
use crate::MultiBufferSnapshot;
|
||||
use language::{rope, Chunk};
|
||||
use parking_lot::Mutex;
|
||||
use std::{cmp, mem, ops::Range};
|
||||
use sum_tree::Bias;
|
||||
use text::Point;
|
||||
use theme::SyntaxTheme;
|
||||
|
||||
use super::fold_map::{self, FoldEdit, FoldPoint, Snapshot as FoldSnapshot};
|
||||
use crate::{editor::rope, settings::HighlightId, util::Bias};
|
||||
use std::{mem, ops::Range};
|
||||
|
||||
pub struct TabMap(Mutex<Snapshot>);
|
||||
pub struct TabMap(Mutex<TabSnapshot>);
|
||||
|
||||
impl TabMap {
|
||||
pub fn new(input: FoldSnapshot, tab_size: usize) -> (Self, Snapshot) {
|
||||
let snapshot = Snapshot {
|
||||
pub fn new(input: FoldSnapshot, tab_size: usize) -> (Self, TabSnapshot) {
|
||||
let snapshot = TabSnapshot {
|
||||
fold_snapshot: input,
|
||||
tab_size,
|
||||
};
|
||||
@@ -19,9 +22,10 @@ impl TabMap {
|
||||
&self,
|
||||
fold_snapshot: FoldSnapshot,
|
||||
mut fold_edits: Vec<FoldEdit>,
|
||||
) -> (Snapshot, Vec<Edit>) {
|
||||
) -> (TabSnapshot, Vec<TabEdit>) {
|
||||
let mut old_snapshot = self.0.lock();
|
||||
let new_snapshot = Snapshot {
|
||||
let max_offset = old_snapshot.fold_snapshot.len();
|
||||
let new_snapshot = TabSnapshot {
|
||||
fold_snapshot,
|
||||
tab_size: old_snapshot.tab_size,
|
||||
};
|
||||
@@ -31,19 +35,19 @@ impl TabMap {
|
||||
let mut delta = 0;
|
||||
for chunk in old_snapshot
|
||||
.fold_snapshot
|
||||
.chunks_at(fold_edit.old_bytes.end)
|
||||
.chunks(fold_edit.old.end..max_offset, None)
|
||||
{
|
||||
let patterns: &[_] = &['\t', '\n'];
|
||||
if let Some(ix) = chunk.find(patterns) {
|
||||
if &chunk[ix..ix + 1] == "\t" {
|
||||
fold_edit.old_bytes.end.0 += delta + ix + 1;
|
||||
fold_edit.new_bytes.end.0 += delta + ix + 1;
|
||||
if let Some(ix) = chunk.text.find(patterns) {
|
||||
if &chunk.text[ix..ix + 1] == "\t" {
|
||||
fold_edit.old.end.0 += delta + ix + 1;
|
||||
fold_edit.new.end.0 += delta + ix + 1;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
delta += chunk.len();
|
||||
delta += chunk.text.len();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,9 +56,9 @@ impl TabMap {
|
||||
let (prev_edits, next_edits) = fold_edits.split_at_mut(ix);
|
||||
let prev_edit = prev_edits.last_mut().unwrap();
|
||||
let edit = &next_edits[0];
|
||||
if prev_edit.old_bytes.end >= edit.old_bytes.start {
|
||||
prev_edit.old_bytes.end = edit.old_bytes.end;
|
||||
prev_edit.new_bytes.end = edit.new_bytes.end;
|
||||
if prev_edit.old.end >= edit.old.start {
|
||||
prev_edit.old.end = edit.old.end;
|
||||
prev_edit.new.end = edit.new.end;
|
||||
fold_edits.remove(ix);
|
||||
} else {
|
||||
ix += 1;
|
||||
@@ -62,25 +66,13 @@ impl TabMap {
|
||||
}
|
||||
|
||||
for fold_edit in fold_edits {
|
||||
let old_start = fold_edit
|
||||
.old_bytes
|
||||
.start
|
||||
.to_point(&old_snapshot.fold_snapshot);
|
||||
let old_end = fold_edit
|
||||
.old_bytes
|
||||
.end
|
||||
.to_point(&old_snapshot.fold_snapshot);
|
||||
let new_start = fold_edit
|
||||
.new_bytes
|
||||
.start
|
||||
.to_point(&new_snapshot.fold_snapshot);
|
||||
let new_end = fold_edit
|
||||
.new_bytes
|
||||
.end
|
||||
.to_point(&new_snapshot.fold_snapshot);
|
||||
tab_edits.push(Edit {
|
||||
old_lines: old_snapshot.to_tab_point(old_start)..old_snapshot.to_tab_point(old_end),
|
||||
new_lines: new_snapshot.to_tab_point(new_start)..new_snapshot.to_tab_point(new_end),
|
||||
let old_start = fold_edit.old.start.to_point(&old_snapshot.fold_snapshot);
|
||||
let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot);
|
||||
let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot);
|
||||
let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot);
|
||||
tab_edits.push(TabEdit {
|
||||
old: old_snapshot.to_tab_point(old_start)..old_snapshot.to_tab_point(old_end),
|
||||
new: new_snapshot.to_tab_point(new_start)..new_snapshot.to_tab_point(new_end),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -90,12 +82,16 @@ impl TabMap {
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Snapshot {
|
||||
pub struct TabSnapshot {
|
||||
pub fold_snapshot: FoldSnapshot,
|
||||
pub tab_size: usize,
|
||||
}
|
||||
|
||||
impl Snapshot {
|
||||
impl TabSnapshot {
|
||||
pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
|
||||
self.fold_snapshot.buffer_snapshot()
|
||||
}
|
||||
|
||||
pub fn text_summary(&self) -> TextSummary {
|
||||
self.text_summary_for_range(TabPoint::zero()..self.max_point())
|
||||
}
|
||||
@@ -108,28 +104,31 @@ impl Snapshot {
|
||||
.text_summary_for_range(input_start..input_end);
|
||||
|
||||
let mut first_line_chars = 0;
|
||||
let mut first_line_bytes = 0;
|
||||
for c in self.chunks_at(range.start).flat_map(|chunk| chunk.chars()) {
|
||||
if c == '\n'
|
||||
|| (range.start.row() == range.end.row() && first_line_bytes == range.end.column())
|
||||
{
|
||||
let line_end = if range.start.row() == range.end.row() {
|
||||
range.end
|
||||
} else {
|
||||
self.max_point()
|
||||
};
|
||||
for c in self
|
||||
.chunks(range.start..line_end, None)
|
||||
.flat_map(|chunk| chunk.text.chars())
|
||||
{
|
||||
if c == '\n' {
|
||||
break;
|
||||
}
|
||||
first_line_chars += 1;
|
||||
first_line_bytes += c.len_utf8() as u32;
|
||||
}
|
||||
|
||||
let mut last_line_chars = 0;
|
||||
let mut last_line_bytes = 0;
|
||||
for c in self
|
||||
.chunks_at(TabPoint::new(range.end.row(), 0).max(range.start))
|
||||
.flat_map(|chunk| chunk.chars())
|
||||
{
|
||||
if last_line_bytes == range.end.column() {
|
||||
break;
|
||||
if range.start.row() == range.end.row() {
|
||||
last_line_chars = first_line_chars;
|
||||
} else {
|
||||
for _ in self
|
||||
.chunks(TabPoint::new(range.end.row(), 0)..range.end, None)
|
||||
.flat_map(|chunk| chunk.text.chars())
|
||||
{
|
||||
last_line_chars += 1;
|
||||
}
|
||||
last_line_chars += 1;
|
||||
last_line_bytes += c.len_utf8() as u32;
|
||||
}
|
||||
|
||||
TextSummary {
|
||||
@@ -145,21 +144,11 @@ impl Snapshot {
|
||||
self.fold_snapshot.version
|
||||
}
|
||||
|
||||
pub fn chunks_at(&self, point: TabPoint) -> Chunks {
|
||||
let (point, expanded_char_column, to_next_stop) = self.to_fold_point(point, Bias::Left);
|
||||
let fold_chunks = self
|
||||
.fold_snapshot
|
||||
.chunks_at(point.to_offset(&self.fold_snapshot));
|
||||
Chunks {
|
||||
fold_chunks,
|
||||
column: expanded_char_column,
|
||||
tab_size: self.tab_size,
|
||||
chunk: &SPACES[0..to_next_stop],
|
||||
skip_leading_tab: to_next_stop > 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn highlighted_chunks(&mut self, range: Range<TabPoint>) -> HighlightedChunks {
|
||||
pub fn chunks<'a>(
|
||||
&'a self,
|
||||
range: Range<TabPoint>,
|
||||
theme: Option<&'a SyntaxTheme>,
|
||||
) -> TabChunks<'a> {
|
||||
let (input_start, expanded_char_column, to_next_stop) =
|
||||
self.to_fold_point(range.start, Bias::Left);
|
||||
let input_start = input_start.to_offset(&self.fold_snapshot);
|
||||
@@ -167,25 +156,35 @@ impl Snapshot {
|
||||
.to_fold_point(range.end, Bias::Right)
|
||||
.0
|
||||
.to_offset(&self.fold_snapshot);
|
||||
HighlightedChunks {
|
||||
fold_chunks: self
|
||||
.fold_snapshot
|
||||
.highlighted_chunks(input_start..input_end),
|
||||
let to_next_stop = if range.start.0 + Point::new(0, to_next_stop as u32) > range.end.0 {
|
||||
(range.end.column() - range.start.column()) as usize
|
||||
} else {
|
||||
to_next_stop
|
||||
};
|
||||
|
||||
TabChunks {
|
||||
fold_chunks: self.fold_snapshot.chunks(input_start..input_end, theme),
|
||||
column: expanded_char_column,
|
||||
output_position: range.start.0,
|
||||
max_output_position: range.end.0,
|
||||
tab_size: self.tab_size,
|
||||
chunk: &SPACES[0..to_next_stop],
|
||||
chunk: Chunk {
|
||||
text: &SPACES[0..to_next_stop],
|
||||
..Default::default()
|
||||
},
|
||||
skip_leading_tab: to_next_stop > 0,
|
||||
style_id: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn buffer_rows(&self, row: u32) -> fold_map::BufferRows {
|
||||
pub fn buffer_rows(&self, row: u32) -> fold_map::FoldBufferRows {
|
||||
self.fold_snapshot.buffer_rows(row)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn text(&self) -> String {
|
||||
self.chunks_at(Default::default()).collect()
|
||||
self.chunks(TabPoint::zero()..self.max_point(), None)
|
||||
.map(|chunk| chunk.text)
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn max_point(&self) -> TabPoint {
|
||||
@@ -205,6 +204,10 @@ impl Snapshot {
|
||||
TabPoint::new(input.row(), expanded as u32)
|
||||
}
|
||||
|
||||
pub fn from_point(&self, point: Point, bias: Bias) -> TabPoint {
|
||||
self.to_tab_point(point.to_fold_point(&self.fold_snapshot, bias))
|
||||
}
|
||||
|
||||
pub fn to_fold_point(&self, output: TabPoint, bias: Bias) -> (FoldPoint, usize, usize) {
|
||||
let chars = self.fold_snapshot.chars_at(FoldPoint::new(output.row(), 0));
|
||||
let expanded = output.column() as usize;
|
||||
@@ -217,6 +220,12 @@ impl Snapshot {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn to_point(&self, point: TabPoint, bias: Bias) -> Point {
|
||||
self.to_fold_point(point, bias)
|
||||
.0
|
||||
.to_buffer_point(&self.fold_snapshot)
|
||||
}
|
||||
|
||||
fn expand_tabs(chars: impl Iterator<Item = char>, column: usize, tab_size: usize) -> usize {
|
||||
let mut expanded_chars = 0;
|
||||
let mut expanded_bytes = 0;
|
||||
@@ -306,11 +315,7 @@ impl From<super::Point> for TabPoint {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct Edit {
|
||||
pub old_lines: Range<TabPoint>,
|
||||
pub new_lines: Range<TabPoint>,
|
||||
}
|
||||
pub type TabEdit = text::Edit<TabPoint>;
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct TextSummary {
|
||||
@@ -364,23 +369,25 @@ impl<'a> std::ops::AddAssign<&'a Self> for TextSummary {
|
||||
// Handles a tab width <= 16
|
||||
const SPACES: &'static str = " ";
|
||||
|
||||
pub struct Chunks<'a> {
|
||||
fold_chunks: fold_map::Chunks<'a>,
|
||||
chunk: &'a str,
|
||||
pub struct TabChunks<'a> {
|
||||
fold_chunks: fold_map::FoldChunks<'a>,
|
||||
chunk: Chunk<'a>,
|
||||
column: usize,
|
||||
output_position: Point,
|
||||
max_output_position: Point,
|
||||
tab_size: usize,
|
||||
skip_leading_tab: bool,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Chunks<'a> {
|
||||
type Item = &'a str;
|
||||
impl<'a> Iterator for TabChunks<'a> {
|
||||
type Item = Chunk<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.chunk.is_empty() {
|
||||
if self.chunk.text.is_empty() {
|
||||
if let Some(chunk) = self.fold_chunks.next() {
|
||||
self.chunk = chunk;
|
||||
if self.skip_leading_tab {
|
||||
self.chunk = &self.chunk[1..];
|
||||
self.chunk.text = &self.chunk.text[1..];
|
||||
self.skip_leading_tab = false;
|
||||
}
|
||||
} else {
|
||||
@@ -388,88 +395,121 @@ impl<'a> Iterator for Chunks<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
for (ix, c) in self.chunk.char_indices() {
|
||||
for (ix, c) in self.chunk.text.char_indices() {
|
||||
match c {
|
||||
'\t' => {
|
||||
if ix > 0 {
|
||||
let (prefix, suffix) = self.chunk.split_at(ix);
|
||||
self.chunk = suffix;
|
||||
return Some(prefix);
|
||||
let (prefix, suffix) = self.chunk.text.split_at(ix);
|
||||
self.chunk.text = suffix;
|
||||
return Some(Chunk {
|
||||
text: prefix,
|
||||
..self.chunk
|
||||
});
|
||||
} else {
|
||||
self.chunk = &self.chunk[1..];
|
||||
let len = self.tab_size - self.column % self.tab_size;
|
||||
self.chunk.text = &self.chunk.text[1..];
|
||||
let mut len = self.tab_size - self.column % self.tab_size;
|
||||
let next_output_position = cmp::min(
|
||||
self.output_position + Point::new(0, len as u32),
|
||||
self.max_output_position,
|
||||
);
|
||||
len = (next_output_position.column - self.output_position.column) as usize;
|
||||
self.column += len;
|
||||
return Some(&SPACES[0..len]);
|
||||
self.output_position = next_output_position;
|
||||
return Some(Chunk {
|
||||
text: &SPACES[0..len],
|
||||
..self.chunk
|
||||
});
|
||||
}
|
||||
}
|
||||
'\n' => self.column = 0,
|
||||
_ => self.column += 1,
|
||||
}
|
||||
}
|
||||
|
||||
let result = Some(self.chunk);
|
||||
self.chunk = "";
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
pub struct HighlightedChunks<'a> {
|
||||
fold_chunks: fold_map::HighlightedChunks<'a>,
|
||||
chunk: &'a str,
|
||||
style_id: HighlightId,
|
||||
column: usize,
|
||||
tab_size: usize,
|
||||
skip_leading_tab: bool,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for HighlightedChunks<'a> {
|
||||
type Item = (&'a str, HighlightId);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.chunk.is_empty() {
|
||||
if let Some((chunk, style_id)) = self.fold_chunks.next() {
|
||||
self.chunk = chunk;
|
||||
self.style_id = style_id;
|
||||
if self.skip_leading_tab {
|
||||
self.chunk = &self.chunk[1..];
|
||||
self.skip_leading_tab = false;
|
||||
'\n' => {
|
||||
self.column = 0;
|
||||
self.output_position += Point::new(1, 0);
|
||||
}
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
for (ix, c) in self.chunk.char_indices() {
|
||||
match c {
|
||||
'\t' => {
|
||||
if ix > 0 {
|
||||
let (prefix, suffix) = self.chunk.split_at(ix);
|
||||
self.chunk = suffix;
|
||||
return Some((prefix, self.style_id));
|
||||
} else {
|
||||
self.chunk = &self.chunk[1..];
|
||||
let len = self.tab_size - self.column % self.tab_size;
|
||||
self.column += len;
|
||||
return Some((&SPACES[0..len], self.style_id));
|
||||
}
|
||||
_ => {
|
||||
self.column += 1;
|
||||
self.output_position.column += c.len_utf8() as u32;
|
||||
}
|
||||
'\n' => self.column = 0,
|
||||
_ => self.column += 1,
|
||||
}
|
||||
}
|
||||
|
||||
Some((mem::take(&mut self.chunk), mem::take(&mut self.style_id)))
|
||||
Some(mem::take(&mut self.chunk))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{display_map::fold_map::FoldMap, MultiBuffer};
|
||||
use rand::{prelude::StdRng, Rng};
|
||||
use text::{RandomCharIter, Rope};
|
||||
|
||||
#[test]
|
||||
fn test_expand_tabs() {
|
||||
assert_eq!(Snapshot::expand_tabs("\t".chars(), 0, 4), 0);
|
||||
assert_eq!(Snapshot::expand_tabs("\t".chars(), 1, 4), 4);
|
||||
assert_eq!(Snapshot::expand_tabs("\ta".chars(), 2, 4), 5);
|
||||
assert_eq!(TabSnapshot::expand_tabs("\t".chars(), 0, 4), 0);
|
||||
assert_eq!(TabSnapshot::expand_tabs("\t".chars(), 1, 4), 4);
|
||||
assert_eq!(TabSnapshot::expand_tabs("\ta".chars(), 2, 4), 5);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random_tabs(cx: &mut gpui::MutableAppContext, mut rng: StdRng) {
|
||||
let tab_size = rng.gen_range(1..=4);
|
||||
let len = rng.gen_range(0..30);
|
||||
let buffer = if rng.gen() {
|
||||
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
|
||||
MultiBuffer::build_simple(&text, cx)
|
||||
} else {
|
||||
MultiBuffer::build_random(&mut rng, cx)
|
||||
};
|
||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
log::info!("Buffer text: {:?}", buffer_snapshot.text());
|
||||
|
||||
let (mut fold_map, _) = FoldMap::new(buffer_snapshot.clone());
|
||||
fold_map.randomly_mutate(&mut rng);
|
||||
let (folds_snapshot, _) = fold_map.read(buffer_snapshot.clone(), vec![]);
|
||||
log::info!("FoldMap text: {:?}", folds_snapshot.text());
|
||||
|
||||
let (_, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size);
|
||||
let text = Rope::from(tabs_snapshot.text().as_str());
|
||||
log::info!(
|
||||
"TabMap text (tab size: {}): {:?}",
|
||||
tab_size,
|
||||
tabs_snapshot.text(),
|
||||
);
|
||||
|
||||
for _ in 0..5 {
|
||||
let end_row = rng.gen_range(0..=text.max_point().row);
|
||||
let end_column = rng.gen_range(0..=text.line_len(end_row));
|
||||
let mut end = TabPoint(text.clip_point(Point::new(end_row, end_column), Bias::Right));
|
||||
let start_row = rng.gen_range(0..=text.max_point().row);
|
||||
let start_column = rng.gen_range(0..=text.line_len(start_row));
|
||||
let mut start =
|
||||
TabPoint(text.clip_point(Point::new(start_row, start_column), Bias::Left));
|
||||
if start > end {
|
||||
mem::swap(&mut start, &mut end);
|
||||
}
|
||||
|
||||
let expected_text = text
|
||||
.chunks_in_range(text.point_to_offset(start.0)..text.point_to_offset(end.0))
|
||||
.collect::<String>();
|
||||
let expected_summary = TextSummary::from(expected_text.as_str());
|
||||
assert_eq!(
|
||||
expected_text,
|
||||
tabs_snapshot
|
||||
.chunks(start..end, None)
|
||||
.map(|c| c.text)
|
||||
.collect::<String>(),
|
||||
"chunks({:?}..{:?})",
|
||||
start,
|
||||
end
|
||||
);
|
||||
|
||||
let mut actual_summary = tabs_snapshot.text_summary_for_range(start..end);
|
||||
if tab_size > 1 && folds_snapshot.text().contains('\t') {
|
||||
actual_summary.longest_row = expected_summary.longest_row;
|
||||
actual_summary.longest_row_chars = expected_summary.longest_row_chars;
|
||||
}
|
||||
|
||||
assert_eq!(actual_summary, expected_summary,);
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
6293
crates/editor/src/editor.rs
Normal file
6293
crates/editor/src/editor.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,10 @@
|
||||
use super::{DisplayPoint, Editor, EditorMode, Insert, Scroll, Select, SelectPhase, Snapshot};
|
||||
use crate::{theme::EditorStyle, time::ReplicaId};
|
||||
use super::{
|
||||
display_map::{BlockContext, ToDisplayPoint},
|
||||
Anchor, DisplayPoint, Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, Input,
|
||||
Scroll, Select, SelectPhase, SoftWrap, ToPoint, MAX_LINE_LEN,
|
||||
};
|
||||
use clock::ReplicaId;
|
||||
use collections::{BTreeMap, HashMap};
|
||||
use gpui::{
|
||||
color::Color,
|
||||
geometry::{
|
||||
@@ -9,26 +14,27 @@ use gpui::{
|
||||
},
|
||||
json::{self, ToJson},
|
||||
keymap::Keystroke,
|
||||
text_layout::{self, TextLayoutCache},
|
||||
AppContext, Axis, Border, Element, Event, EventContext, FontCache, LayoutContext,
|
||||
text_layout::{self, RunStyle, TextLayoutCache},
|
||||
AppContext, Axis, Border, Element, ElementBox, Event, EventContext, FontCache, LayoutContext,
|
||||
MutableAppContext, PaintContext, Quad, Scene, SizeConstraint, ViewContext, WeakViewHandle,
|
||||
};
|
||||
use json::json;
|
||||
use language::{Bias, Chunk};
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
cmp::{self, Ordering},
|
||||
collections::{BTreeMap, HashMap},
|
||||
fmt::Write,
|
||||
ops::Range,
|
||||
};
|
||||
|
||||
pub struct EditorElement {
|
||||
view: WeakViewHandle<Editor>,
|
||||
style: EditorStyle,
|
||||
settings: EditorSettings,
|
||||
}
|
||||
|
||||
impl EditorElement {
|
||||
pub fn new(view: WeakViewHandle<Editor>, style: EditorStyle) -> Self {
|
||||
Self { view, style }
|
||||
pub fn new(view: WeakViewHandle<Editor>, settings: EditorSettings) -> Self {
|
||||
Self { view, settings }
|
||||
}
|
||||
|
||||
fn view<'a>(&self, cx: &'a AppContext) -> &'a Editor {
|
||||
@@ -42,26 +48,48 @@ impl EditorElement {
|
||||
self.view.upgrade(cx).unwrap().update(cx, f)
|
||||
}
|
||||
|
||||
fn snapshot(&self, cx: &mut MutableAppContext) -> Snapshot {
|
||||
fn snapshot(&self, cx: &mut MutableAppContext) -> EditorSnapshot {
|
||||
self.update_view(cx, |view, cx| view.snapshot(cx))
|
||||
}
|
||||
|
||||
fn mouse_down(
|
||||
&self,
|
||||
position: Vector2F,
|
||||
cmd: bool,
|
||||
alt: bool,
|
||||
shift: bool,
|
||||
mut click_count: usize,
|
||||
layout: &mut LayoutState,
|
||||
paint: &mut PaintState,
|
||||
cx: &mut EventContext,
|
||||
) -> bool {
|
||||
if paint.text_bounds.contains_point(position) {
|
||||
let snapshot = self.snapshot(cx.app);
|
||||
let position = paint.point_for_position(&snapshot, layout, position);
|
||||
cx.dispatch_action(Select(SelectPhase::Begin { position, add: cmd }));
|
||||
true
|
||||
} else {
|
||||
false
|
||||
if paint.gutter_bounds.contains_point(position) {
|
||||
click_count = 3; // Simulate triple-click when clicking the gutter to select lines
|
||||
} else if !paint.text_bounds.contains_point(position) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let snapshot = self.snapshot(cx.app);
|
||||
let (position, overshoot) = paint.point_for_position(&snapshot, layout, position);
|
||||
|
||||
if shift && alt {
|
||||
cx.dispatch_action(Select(SelectPhase::BeginColumnar {
|
||||
position,
|
||||
overshoot,
|
||||
}));
|
||||
} else if shift {
|
||||
cx.dispatch_action(Select(SelectPhase::Extend {
|
||||
position,
|
||||
click_count,
|
||||
}));
|
||||
} else {
|
||||
cx.dispatch_action(Select(SelectPhase::Begin {
|
||||
position,
|
||||
add: alt,
|
||||
click_count,
|
||||
}));
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
fn mouse_up(&self, _position: Vector2F, cx: &mut EventContext) -> bool {
|
||||
@@ -113,10 +141,11 @@ impl EditorElement {
|
||||
let font_cache = cx.font_cache.clone();
|
||||
let text_layout_cache = cx.text_layout_cache.clone();
|
||||
let snapshot = self.snapshot(cx.app);
|
||||
let position = paint.point_for_position(&snapshot, layout, position);
|
||||
let (position, overshoot) = paint.point_for_position(&snapshot, layout, position);
|
||||
|
||||
cx.dispatch_action(Select(SelectPhase::Update {
|
||||
position,
|
||||
overshoot,
|
||||
scroll_position: (snapshot.scroll_position() + scroll_delta).clamp(
|
||||
Vector2F::zero(),
|
||||
layout.scroll_max(&font_cache, &text_layout_cache),
|
||||
@@ -138,7 +167,7 @@ impl EditorElement {
|
||||
if chars.chars().any(|c| c.is_control()) || keystroke.cmd || keystroke.ctrl {
|
||||
false
|
||||
} else {
|
||||
cx.dispatch_action(Insert(chars.to_string()));
|
||||
cx.dispatch_action(Input(chars.to_string()));
|
||||
true
|
||||
}
|
||||
}
|
||||
@@ -191,15 +220,16 @@ impl EditorElement {
|
||||
let bounds = gutter_bounds.union_rect(text_bounds);
|
||||
let scroll_top = layout.snapshot.scroll_position().y() * layout.line_height;
|
||||
let editor = self.view(cx.app);
|
||||
let style = &self.settings.style;
|
||||
cx.scene.push_quad(Quad {
|
||||
bounds: gutter_bounds,
|
||||
background: Some(self.style.gutter_background),
|
||||
background: Some(style.gutter_background),
|
||||
border: Border::new(0., Color::transparent_black()),
|
||||
corner_radius: 0.,
|
||||
});
|
||||
cx.scene.push_quad(Quad {
|
||||
bounds: text_bounds,
|
||||
background: Some(self.style.background),
|
||||
background: Some(style.background),
|
||||
border: Border::new(0., Color::transparent_black()),
|
||||
corner_radius: 0.,
|
||||
});
|
||||
@@ -226,12 +256,26 @@ impl EditorElement {
|
||||
);
|
||||
cx.scene.push_quad(Quad {
|
||||
bounds: RectF::new(origin, size),
|
||||
background: Some(self.style.active_line_background),
|
||||
background: Some(style.active_line_background),
|
||||
border: Border::default(),
|
||||
corner_radius: 0.,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(highlighted_row) = layout.highlighted_row {
|
||||
let origin = vec2f(
|
||||
bounds.origin_x(),
|
||||
bounds.origin_y() + (layout.line_height * highlighted_row as f32) - scroll_top,
|
||||
);
|
||||
let size = vec2f(bounds.width(), layout.line_height);
|
||||
cx.scene.push_quad(Quad {
|
||||
bounds: RectF::new(origin, size),
|
||||
background: Some(style.highlighted_line_background),
|
||||
border: Border::default(),
|
||||
corner_radius: 0.,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -263,8 +307,8 @@ impl EditorElement {
|
||||
cx: &mut PaintContext,
|
||||
) {
|
||||
let view = self.view(cx.app);
|
||||
let settings = self.view(cx.app).settings.borrow();
|
||||
let theme = &settings.theme.editor;
|
||||
let style = &self.settings.style;
|
||||
let local_replica_id = view.replica_id(cx);
|
||||
let scroll_position = layout.snapshot.scroll_position();
|
||||
let start_row = scroll_position.y() as u32;
|
||||
let scroll_top = scroll_position.y() * layout.line_height;
|
||||
@@ -281,22 +325,16 @@ impl EditorElement {
|
||||
let content_origin = bounds.origin() + layout.text_offset;
|
||||
|
||||
for (replica_id, selections) in &layout.selections {
|
||||
let style_ix = *replica_id as usize % (theme.guest_selections.len() + 1);
|
||||
let style = if style_ix == 0 {
|
||||
&theme.selection
|
||||
} else {
|
||||
&theme.guest_selections[style_ix - 1]
|
||||
};
|
||||
let style = style.replica_selection_style(*replica_id);
|
||||
|
||||
for selection in selections {
|
||||
if selection.start != selection.end {
|
||||
let range_start = cmp::min(selection.start, selection.end);
|
||||
let range_end = cmp::max(selection.start, selection.end);
|
||||
let row_range = if range_end.column() == 0 {
|
||||
cmp::max(range_start.row(), start_row)..cmp::min(range_end.row(), end_row)
|
||||
let row_range = if selection.end.column() == 0 {
|
||||
cmp::max(selection.start.row(), start_row)
|
||||
..cmp::min(selection.end.row(), end_row)
|
||||
} else {
|
||||
cmp::max(range_start.row(), start_row)
|
||||
..cmp::min(range_end.row() + 1, end_row)
|
||||
cmp::max(selection.start.row(), start_row)
|
||||
..cmp::min(selection.end.row() + 1, end_row)
|
||||
};
|
||||
|
||||
let selection = Selection {
|
||||
@@ -309,16 +347,18 @@ impl EditorElement {
|
||||
.map(|row| {
|
||||
let line_layout = &layout.line_layouts[(row - start_row) as usize];
|
||||
SelectionLine {
|
||||
start_x: if row == range_start.row() {
|
||||
start_x: if row == selection.start.row() {
|
||||
content_origin.x()
|
||||
+ line_layout.x_for_index(range_start.column() as usize)
|
||||
+ line_layout
|
||||
.x_for_index(selection.start.column() as usize)
|
||||
- scroll_left
|
||||
} else {
|
||||
content_origin.x() - scroll_left
|
||||
},
|
||||
end_x: if row == range_end.row() {
|
||||
end_x: if row == selection.end.row() {
|
||||
content_origin.x()
|
||||
+ line_layout.x_for_index(range_end.column() as usize)
|
||||
+ line_layout
|
||||
.x_for_index(selection.end.column() as usize)
|
||||
- scroll_left
|
||||
} else {
|
||||
content_origin.x()
|
||||
@@ -334,14 +374,14 @@ impl EditorElement {
|
||||
selection.paint(bounds, cx.scene);
|
||||
}
|
||||
|
||||
if view.cursors_visible() {
|
||||
let cursor_position = selection.end;
|
||||
if view.show_local_cursors() || *replica_id != local_replica_id {
|
||||
let cursor_position = selection.head();
|
||||
if (start_row..end_row).contains(&cursor_position.row()) {
|
||||
let cursor_row_layout =
|
||||
&layout.line_layouts[(selection.end.row() - start_row) as usize];
|
||||
let x = cursor_row_layout.x_for_index(selection.end.column() as usize)
|
||||
&layout.line_layouts[(cursor_position.row() - start_row) as usize];
|
||||
let x = cursor_row_layout.x_for_index(cursor_position.column() as usize)
|
||||
- scroll_left;
|
||||
let y = selection.end.row() as f32 * layout.line_height - scroll_top;
|
||||
let y = cursor_position.row() as f32 * layout.line_height - scroll_top;
|
||||
cursors.push(Cursor {
|
||||
color: style.cursor,
|
||||
origin: content_origin + vec2f(x, y),
|
||||
@@ -374,6 +414,250 @@ impl EditorElement {
|
||||
|
||||
cx.scene.pop_layer();
|
||||
}
|
||||
|
||||
fn paint_blocks(
|
||||
&mut self,
|
||||
text_bounds: RectF,
|
||||
visible_bounds: RectF,
|
||||
layout: &mut LayoutState,
|
||||
cx: &mut PaintContext,
|
||||
) {
|
||||
let scroll_position = layout.snapshot.scroll_position();
|
||||
let scroll_left = scroll_position.x() * layout.em_width;
|
||||
let scroll_top = scroll_position.y() * layout.line_height;
|
||||
|
||||
for (row, element) in &mut layout.blocks {
|
||||
let origin = text_bounds.origin()
|
||||
+ vec2f(-scroll_left, *row as f32 * layout.line_height - scroll_top);
|
||||
element.paint(origin, visible_bounds, cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn max_line_number_width(&self, snapshot: &EditorSnapshot, cx: &LayoutContext) -> f32 {
|
||||
let digit_count = (snapshot.max_buffer_row() as f32).log10().floor() as usize + 1;
|
||||
let style = &self.settings.style;
|
||||
|
||||
cx.text_layout_cache
|
||||
.layout_str(
|
||||
"1".repeat(digit_count).as_str(),
|
||||
style.text.font_size,
|
||||
&[(
|
||||
digit_count,
|
||||
RunStyle {
|
||||
font_id: style.text.font_id,
|
||||
color: Color::black(),
|
||||
underline: None,
|
||||
},
|
||||
)],
|
||||
)
|
||||
.width()
|
||||
}
|
||||
|
||||
fn layout_rows(
|
||||
&self,
|
||||
rows: Range<u32>,
|
||||
active_rows: &BTreeMap<u32, bool>,
|
||||
snapshot: &EditorSnapshot,
|
||||
cx: &LayoutContext,
|
||||
) -> Vec<Option<text_layout::Line>> {
|
||||
let style = &self.settings.style;
|
||||
let include_line_numbers = snapshot.mode == EditorMode::Full;
|
||||
let mut line_number_layouts = Vec::with_capacity(rows.len());
|
||||
let mut line_number = String::new();
|
||||
for (ix, row) in snapshot
|
||||
.buffer_rows(rows.start)
|
||||
.take((rows.end - rows.start) as usize)
|
||||
.enumerate()
|
||||
{
|
||||
let display_row = rows.start + ix as u32;
|
||||
let color = if active_rows.contains_key(&display_row) {
|
||||
style.line_number_active
|
||||
} else {
|
||||
style.line_number
|
||||
};
|
||||
if let Some(buffer_row) = row {
|
||||
if include_line_numbers {
|
||||
line_number.clear();
|
||||
write!(&mut line_number, "{}", buffer_row + 1).unwrap();
|
||||
line_number_layouts.push(Some(cx.text_layout_cache.layout_str(
|
||||
&line_number,
|
||||
style.text.font_size,
|
||||
&[(
|
||||
line_number.len(),
|
||||
RunStyle {
|
||||
font_id: style.text.font_id,
|
||||
color,
|
||||
underline: None,
|
||||
},
|
||||
)],
|
||||
)));
|
||||
}
|
||||
} else {
|
||||
line_number_layouts.push(None);
|
||||
}
|
||||
}
|
||||
|
||||
line_number_layouts
|
||||
}
|
||||
|
||||
fn layout_lines(
|
||||
&mut self,
|
||||
mut rows: Range<u32>,
|
||||
snapshot: &mut EditorSnapshot,
|
||||
cx: &LayoutContext,
|
||||
) -> Vec<text_layout::Line> {
|
||||
rows.end = cmp::min(rows.end, snapshot.max_point().row() + 1);
|
||||
if rows.start >= rows.end {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
// When the editor is empty and unfocused, then show the placeholder.
|
||||
if snapshot.is_empty() && !snapshot.is_focused() {
|
||||
let placeholder_style = self.settings.style.placeholder_text();
|
||||
let placeholder_text = snapshot.placeholder_text();
|
||||
let placeholder_lines = placeholder_text
|
||||
.as_ref()
|
||||
.map_or("", AsRef::as_ref)
|
||||
.split('\n')
|
||||
.skip(rows.start as usize)
|
||||
.take(rows.len());
|
||||
return placeholder_lines
|
||||
.map(|line| {
|
||||
cx.text_layout_cache.layout_str(
|
||||
line,
|
||||
placeholder_style.font_size,
|
||||
&[(
|
||||
line.len(),
|
||||
RunStyle {
|
||||
font_id: placeholder_style.font_id,
|
||||
color: placeholder_style.color,
|
||||
underline: None,
|
||||
},
|
||||
)],
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
|
||||
let style = &self.settings.style;
|
||||
let mut prev_font_properties = style.text.font_properties.clone();
|
||||
let mut prev_font_id = style.text.font_id;
|
||||
|
||||
let mut layouts = Vec::with_capacity(rows.len());
|
||||
let mut line = String::new();
|
||||
let mut styles = Vec::new();
|
||||
let mut row = rows.start;
|
||||
let mut line_exceeded_max_len = false;
|
||||
let chunks = snapshot.chunks(rows.clone(), Some(&style.syntax));
|
||||
|
||||
let newline_chunk = Chunk {
|
||||
text: "\n",
|
||||
..Default::default()
|
||||
};
|
||||
'outer: for chunk in chunks.chain([newline_chunk]) {
|
||||
for (ix, mut line_chunk) in chunk.text.split('\n').enumerate() {
|
||||
if ix > 0 {
|
||||
layouts.push(cx.text_layout_cache.layout_str(
|
||||
&line,
|
||||
style.text.font_size,
|
||||
&styles,
|
||||
));
|
||||
line.clear();
|
||||
styles.clear();
|
||||
row += 1;
|
||||
line_exceeded_max_len = false;
|
||||
if row == rows.end {
|
||||
break 'outer;
|
||||
}
|
||||
}
|
||||
|
||||
if !line_chunk.is_empty() && !line_exceeded_max_len {
|
||||
let highlight_style =
|
||||
chunk.highlight_style.unwrap_or(style.text.clone().into());
|
||||
// Avoid a lookup if the font properties match the previous ones.
|
||||
let font_id = if highlight_style.font_properties == prev_font_properties {
|
||||
prev_font_id
|
||||
} else {
|
||||
cx.font_cache
|
||||
.select_font(
|
||||
style.text.font_family_id,
|
||||
&highlight_style.font_properties,
|
||||
)
|
||||
.unwrap_or(style.text.font_id)
|
||||
};
|
||||
|
||||
if line.len() + line_chunk.len() > MAX_LINE_LEN {
|
||||
let mut chunk_len = MAX_LINE_LEN - line.len();
|
||||
while !line_chunk.is_char_boundary(chunk_len) {
|
||||
chunk_len -= 1;
|
||||
}
|
||||
line_chunk = &line_chunk[..chunk_len];
|
||||
line_exceeded_max_len = true;
|
||||
}
|
||||
|
||||
let underline = if let Some(severity) = chunk.diagnostic {
|
||||
Some(super::diagnostic_style(severity, true, style).text)
|
||||
} else {
|
||||
highlight_style.underline
|
||||
};
|
||||
|
||||
line.push_str(line_chunk);
|
||||
styles.push((
|
||||
line_chunk.len(),
|
||||
RunStyle {
|
||||
font_id,
|
||||
color: highlight_style.color,
|
||||
underline,
|
||||
},
|
||||
));
|
||||
prev_font_id = font_id;
|
||||
prev_font_properties = highlight_style.font_properties;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
layouts
|
||||
}
|
||||
|
||||
fn layout_blocks(
|
||||
&mut self,
|
||||
rows: Range<u32>,
|
||||
snapshot: &EditorSnapshot,
|
||||
text_width: f32,
|
||||
line_height: f32,
|
||||
style: &EditorStyle,
|
||||
line_layouts: &[text_layout::Line],
|
||||
cx: &mut LayoutContext,
|
||||
) -> Vec<(u32, ElementBox)> {
|
||||
snapshot
|
||||
.blocks_in_range(rows.clone())
|
||||
.map(|(start_row, block)| {
|
||||
let anchor_row = block
|
||||
.position()
|
||||
.to_point(&snapshot.buffer_snapshot)
|
||||
.to_display_point(snapshot)
|
||||
.row();
|
||||
|
||||
let anchor_x = if rows.contains(&anchor_row) {
|
||||
line_layouts[(anchor_row - rows.start) as usize]
|
||||
.x_for_index(block.column() as usize)
|
||||
} else {
|
||||
layout_line(anchor_row, snapshot, style, cx.text_layout_cache)
|
||||
.x_for_index(block.column() as usize)
|
||||
};
|
||||
|
||||
let mut element = block.render(&BlockContext { cx, anchor_x });
|
||||
element.layout(
|
||||
SizeConstraint {
|
||||
min: Vector2F::zero(),
|
||||
max: vec2f(text_width, block.height() as f32 * line_height),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
(start_row, element)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl Element for EditorElement {
|
||||
@@ -390,32 +674,30 @@ impl Element for EditorElement {
|
||||
unimplemented!("we don't yet handle an infinite width constraint on buffer elements");
|
||||
}
|
||||
|
||||
let font_cache = &cx.font_cache;
|
||||
let layout_cache = &cx.text_layout_cache;
|
||||
let snapshot = self.snapshot(cx.app);
|
||||
let line_height = snapshot.line_height(font_cache);
|
||||
let style = self.settings.style.clone();
|
||||
let line_height = style.text.line_height(cx.font_cache);
|
||||
|
||||
let gutter_padding;
|
||||
let gutter_width;
|
||||
if snapshot.mode == EditorMode::Full {
|
||||
gutter_padding = snapshot.em_width(cx.font_cache);
|
||||
match snapshot.max_line_number_width(cx.font_cache, cx.text_layout_cache) {
|
||||
Err(error) => {
|
||||
log::error!("error computing max line number width: {}", error);
|
||||
return (size, None);
|
||||
}
|
||||
Ok(width) => gutter_width = width + gutter_padding * 2.0,
|
||||
}
|
||||
gutter_padding = style.text.em_width(cx.font_cache);
|
||||
gutter_width = self.max_line_number_width(&snapshot, cx) + gutter_padding * 2.0;
|
||||
} else {
|
||||
gutter_padding = 0.0;
|
||||
gutter_width = 0.0
|
||||
};
|
||||
|
||||
let text_width = size.x() - gutter_width;
|
||||
let text_offset = vec2f(-snapshot.font_descent(cx.font_cache), 0.);
|
||||
let em_width = snapshot.em_width(font_cache);
|
||||
let text_offset = vec2f(-style.text.descent(cx.font_cache), 0.);
|
||||
let em_width = style.text.em_width(cx.font_cache);
|
||||
let em_advance = style.text.em_advance(cx.font_cache);
|
||||
let overscroll = vec2f(em_width, 0.);
|
||||
let wrap_width = text_width - text_offset.x() - overscroll.x() - em_width;
|
||||
let wrap_width = match self.settings.soft_wrap {
|
||||
SoftWrap::None => None,
|
||||
SoftWrap::EditorWidth => Some(text_width - text_offset.x() - overscroll.x() - em_width),
|
||||
SoftWrap::Column(column) => Some(column as f32 * em_advance),
|
||||
};
|
||||
let snapshot = self.update_view(cx.app, |view, cx| {
|
||||
if view.set_wrap_width(wrap_width, cx) {
|
||||
view.snapshot(cx)
|
||||
@@ -449,90 +731,91 @@ impl Element for EditorElement {
|
||||
let scroll_top = scroll_position.y() * line_height;
|
||||
let end_row = ((scroll_top + size.y()) / line_height).ceil() as u32 + 1; // Add 1 to ensure selections bleed off screen
|
||||
|
||||
let mut selections = HashMap::new();
|
||||
let mut active_rows = BTreeMap::new();
|
||||
self.update_view(cx.app, |view, cx| {
|
||||
for selection_set_id in view.active_selection_sets(cx).collect::<Vec<_>>() {
|
||||
let mut set = Vec::new();
|
||||
for selection in view.selections_in_range(
|
||||
selection_set_id,
|
||||
DisplayPoint::new(start_row, 0)..DisplayPoint::new(end_row, 0),
|
||||
cx,
|
||||
) {
|
||||
set.push(selection.clone());
|
||||
if selection_set_id == view.selection_set_id {
|
||||
let is_empty = selection.start == selection.end;
|
||||
let mut selection_start;
|
||||
let mut selection_end;
|
||||
if selection.start < selection.end {
|
||||
selection_start = selection.start;
|
||||
selection_end = selection.end;
|
||||
} else {
|
||||
selection_start = selection.end;
|
||||
selection_end = selection.start;
|
||||
};
|
||||
selection_start = snapshot.prev_row_boundary(selection_start).0;
|
||||
selection_end = snapshot.next_row_boundary(selection_end).0;
|
||||
for row in cmp::max(selection_start.row(), start_row)
|
||||
..=cmp::min(selection_end.row(), end_row)
|
||||
{
|
||||
let contains_non_empty_selection =
|
||||
active_rows.entry(row).or_insert(!is_empty);
|
||||
*contains_non_empty_selection |= !is_empty;
|
||||
}
|
||||
}
|
||||
}
|
||||
let start_anchor = if start_row == 0 {
|
||||
Anchor::min()
|
||||
} else {
|
||||
snapshot
|
||||
.buffer_snapshot
|
||||
.anchor_before(DisplayPoint::new(start_row, 0).to_offset(&snapshot, Bias::Left))
|
||||
};
|
||||
let end_anchor = if end_row > snapshot.max_point().row() {
|
||||
Anchor::max()
|
||||
} else {
|
||||
snapshot
|
||||
.buffer_snapshot
|
||||
.anchor_before(DisplayPoint::new(end_row, 0).to_offset(&snapshot, Bias::Right))
|
||||
};
|
||||
|
||||
selections.insert(selection_set_id.replica_id, set);
|
||||
let mut selections = HashMap::default();
|
||||
let mut active_rows = BTreeMap::new();
|
||||
let mut highlighted_row = None;
|
||||
self.update_view(cx.app, |view, cx| {
|
||||
highlighted_row = view.highlighted_row();
|
||||
let display_map = view.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
||||
let local_selections = view
|
||||
.local_selections_in_range(start_anchor.clone()..end_anchor.clone(), &display_map);
|
||||
for selection in &local_selections {
|
||||
let is_empty = selection.start == selection.end;
|
||||
let selection_start = snapshot.prev_line_boundary(selection.start).1;
|
||||
let selection_end = snapshot.next_line_boundary(selection.end).1;
|
||||
for row in cmp::max(selection_start.row(), start_row)
|
||||
..=cmp::min(selection_end.row(), end_row)
|
||||
{
|
||||
let contains_non_empty_selection = active_rows.entry(row).or_insert(!is_empty);
|
||||
*contains_non_empty_selection |= !is_empty;
|
||||
}
|
||||
}
|
||||
selections.insert(
|
||||
view.replica_id(cx),
|
||||
local_selections
|
||||
.into_iter()
|
||||
.map(|selection| crate::Selection {
|
||||
id: selection.id,
|
||||
goal: selection.goal,
|
||||
reversed: selection.reversed,
|
||||
start: selection.start.to_display_point(&display_map),
|
||||
end: selection.end.to_display_point(&display_map),
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
|
||||
for (replica_id, selection) in display_map
|
||||
.buffer_snapshot
|
||||
.remote_selections_in_range(&(start_anchor..end_anchor))
|
||||
{
|
||||
selections
|
||||
.entry(replica_id)
|
||||
.or_insert(Vec::new())
|
||||
.push(crate::Selection {
|
||||
id: selection.id,
|
||||
goal: selection.goal,
|
||||
reversed: selection.reversed,
|
||||
start: selection.start.to_display_point(&display_map),
|
||||
end: selection.end.to_display_point(&display_map),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
let line_number_layouts = if snapshot.mode == EditorMode::Full {
|
||||
let settings = self
|
||||
.view
|
||||
.upgrade(cx.app)
|
||||
.unwrap()
|
||||
.read(cx.app)
|
||||
.settings
|
||||
.borrow();
|
||||
match snapshot.layout_line_numbers(
|
||||
start_row..end_row,
|
||||
&active_rows,
|
||||
cx.font_cache,
|
||||
cx.text_layout_cache,
|
||||
&settings.theme,
|
||||
) {
|
||||
Err(error) => {
|
||||
log::error!("error laying out line numbers: {}", error);
|
||||
return (size, None);
|
||||
}
|
||||
Ok(layouts) => layouts,
|
||||
}
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
let line_number_layouts = self.layout_rows(start_row..end_row, &active_rows, &snapshot, cx);
|
||||
|
||||
let mut max_visible_line_width = 0.0;
|
||||
let line_layouts = match snapshot.layout_lines(
|
||||
start_row..end_row,
|
||||
&self.style,
|
||||
font_cache,
|
||||
layout_cache,
|
||||
) {
|
||||
Err(error) => {
|
||||
log::error!("error laying out lines: {}", error);
|
||||
return (size, None);
|
||||
let line_layouts = self.layout_lines(start_row..end_row, &mut snapshot, cx);
|
||||
for line in &line_layouts {
|
||||
if line.width() > max_visible_line_width {
|
||||
max_visible_line_width = line.width();
|
||||
}
|
||||
Ok(layouts) => {
|
||||
for line in &layouts {
|
||||
if line.width() > max_visible_line_width {
|
||||
max_visible_line_width = line.width();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
layouts
|
||||
}
|
||||
};
|
||||
let blocks = self.layout_blocks(
|
||||
start_row..end_row,
|
||||
&snapshot,
|
||||
text_size.x(),
|
||||
line_height,
|
||||
&style,
|
||||
&line_layouts,
|
||||
cx,
|
||||
);
|
||||
|
||||
let mut layout = LayoutState {
|
||||
size,
|
||||
@@ -542,24 +825,31 @@ impl Element for EditorElement {
|
||||
overscroll,
|
||||
text_offset,
|
||||
snapshot,
|
||||
style: self.settings.style.clone(),
|
||||
active_rows,
|
||||
highlighted_row,
|
||||
line_layouts,
|
||||
line_number_layouts,
|
||||
blocks,
|
||||
line_height,
|
||||
em_width,
|
||||
em_advance,
|
||||
selections,
|
||||
max_visible_line_width,
|
||||
};
|
||||
|
||||
let scroll_max = layout.scroll_max(cx.font_cache, cx.text_layout_cache).x();
|
||||
let scroll_width = layout.scroll_width(cx.text_layout_cache);
|
||||
let max_glyph_width = style.text.em_width(&cx.font_cache);
|
||||
self.update_view(cx.app, |view, cx| {
|
||||
let clamped = view.clamp_scroll_left(layout.scroll_max(font_cache, layout_cache).x());
|
||||
let clamped = view.clamp_scroll_left(scroll_max);
|
||||
let autoscrolled;
|
||||
if autoscroll_horizontally {
|
||||
autoscrolled = view.autoscroll_horizontally(
|
||||
start_row,
|
||||
layout.text_size.x(),
|
||||
layout.scroll_width(font_cache, layout_cache),
|
||||
layout.snapshot.em_width(font_cache),
|
||||
scroll_width,
|
||||
max_glyph_width,
|
||||
&layout.line_layouts,
|
||||
cx,
|
||||
);
|
||||
@@ -596,11 +886,13 @@ impl Element for EditorElement {
|
||||
self.paint_gutter(gutter_bounds, visible_bounds, layout, cx);
|
||||
}
|
||||
self.paint_text(text_bounds, visible_bounds, layout, cx);
|
||||
self.paint_blocks(text_bounds, visible_bounds, layout, cx);
|
||||
|
||||
cx.scene.pop_layer();
|
||||
|
||||
Some(PaintState {
|
||||
bounds,
|
||||
gutter_bounds,
|
||||
text_bounds,
|
||||
})
|
||||
} else {
|
||||
@@ -618,9 +910,13 @@ impl Element for EditorElement {
|
||||
) -> bool {
|
||||
if let (Some(layout), Some(paint)) = (layout, paint) {
|
||||
match event {
|
||||
Event::LeftMouseDown { position, cmd } => {
|
||||
self.mouse_down(*position, *cmd, layout, paint, cx)
|
||||
}
|
||||
Event::LeftMouseDown {
|
||||
position,
|
||||
alt,
|
||||
shift,
|
||||
click_count,
|
||||
..
|
||||
} => self.mouse_down(*position, *alt, *shift, *click_count, layout, paint, cx),
|
||||
Event::LeftMouseUp { position } => self.mouse_up(*position, cx),
|
||||
Event::LeftMouseDragged { position } => {
|
||||
self.mouse_dragged(*position, layout, paint, cx)
|
||||
@@ -659,33 +955,34 @@ pub struct LayoutState {
|
||||
gutter_size: Vector2F,
|
||||
gutter_padding: f32,
|
||||
text_size: Vector2F,
|
||||
snapshot: Snapshot,
|
||||
style: EditorStyle,
|
||||
snapshot: EditorSnapshot,
|
||||
active_rows: BTreeMap<u32, bool>,
|
||||
highlighted_row: Option<u32>,
|
||||
line_layouts: Vec<text_layout::Line>,
|
||||
line_number_layouts: Vec<Option<text_layout::Line>>,
|
||||
blocks: Vec<(u32, ElementBox)>,
|
||||
line_height: f32,
|
||||
em_width: f32,
|
||||
selections: HashMap<ReplicaId, Vec<Range<DisplayPoint>>>,
|
||||
em_advance: f32,
|
||||
selections: HashMap<ReplicaId, Vec<text::Selection<DisplayPoint>>>,
|
||||
overscroll: Vector2F,
|
||||
text_offset: Vector2F,
|
||||
max_visible_line_width: f32,
|
||||
}
|
||||
|
||||
impl LayoutState {
|
||||
fn scroll_width(&self, font_cache: &FontCache, layout_cache: &TextLayoutCache) -> f32 {
|
||||
fn scroll_width(&self, layout_cache: &TextLayoutCache) -> f32 {
|
||||
let row = self.snapshot.longest_row();
|
||||
let longest_line_width = self
|
||||
.snapshot
|
||||
.layout_line(row, font_cache, layout_cache)
|
||||
.unwrap()
|
||||
.width();
|
||||
let longest_line_width =
|
||||
layout_line(row, &self.snapshot, &self.style, layout_cache).width();
|
||||
longest_line_width.max(self.max_visible_line_width) + self.overscroll.x()
|
||||
}
|
||||
|
||||
fn scroll_max(&self, font_cache: &FontCache, layout_cache: &TextLayoutCache) -> Vector2F {
|
||||
let text_width = self.text_size.x();
|
||||
let scroll_width = self.scroll_width(font_cache, layout_cache);
|
||||
let em_width = self.snapshot.em_width(font_cache);
|
||||
let scroll_width = self.scroll_width(layout_cache);
|
||||
let em_width = self.style.text.em_width(font_cache);
|
||||
let max_row = self.snapshot.max_point().row();
|
||||
|
||||
vec2f(
|
||||
@@ -695,18 +992,49 @@ impl LayoutState {
|
||||
}
|
||||
}
|
||||
|
||||
fn layout_line(
|
||||
row: u32,
|
||||
snapshot: &EditorSnapshot,
|
||||
style: &EditorStyle,
|
||||
layout_cache: &TextLayoutCache,
|
||||
) -> text_layout::Line {
|
||||
let mut line = snapshot.line(row);
|
||||
|
||||
if line.len() > MAX_LINE_LEN {
|
||||
let mut len = MAX_LINE_LEN;
|
||||
while !line.is_char_boundary(len) {
|
||||
len -= 1;
|
||||
}
|
||||
line.truncate(len);
|
||||
}
|
||||
|
||||
layout_cache.layout_str(
|
||||
&line,
|
||||
style.text.font_size,
|
||||
&[(
|
||||
snapshot.line_len(row) as usize,
|
||||
RunStyle {
|
||||
font_id: style.text.font_id,
|
||||
color: Color::black(),
|
||||
underline: None,
|
||||
},
|
||||
)],
|
||||
)
|
||||
}
|
||||
|
||||
pub struct PaintState {
|
||||
bounds: RectF,
|
||||
gutter_bounds: RectF,
|
||||
text_bounds: RectF,
|
||||
}
|
||||
|
||||
impl PaintState {
|
||||
fn point_for_position(
|
||||
&self,
|
||||
snapshot: &Snapshot,
|
||||
snapshot: &EditorSnapshot,
|
||||
layout: &LayoutState,
|
||||
position: Vector2F,
|
||||
) -> DisplayPoint {
|
||||
) -> (DisplayPoint, u32) {
|
||||
let scroll_position = snapshot.scroll_position();
|
||||
let position = position - self.text_bounds.origin();
|
||||
let y = position.y().max(0.0).min(layout.size.y());
|
||||
@@ -718,12 +1046,13 @@ impl PaintState {
|
||||
let column = if x >= 0.0 {
|
||||
line.index_for_x(x)
|
||||
.map(|ix| ix as u32)
|
||||
.unwrap_or(snapshot.line_len(row))
|
||||
.unwrap_or_else(|| snapshot.line_len(row))
|
||||
} else {
|
||||
0
|
||||
};
|
||||
let overshoot = (0f32.max(x - line.width()) / layout.em_advance) as u32;
|
||||
|
||||
DisplayPoint::new(row, column)
|
||||
(DisplayPoint::new(row, column), overshoot)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -864,3 +1193,36 @@ fn scale_vertical_mouse_autoscroll_delta(delta: f32) -> f32 {
|
||||
fn scale_horizontal_mouse_autoscroll_delta(delta: f32) -> f32 {
|
||||
delta.powf(1.2) / 300.0
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{Editor, EditorSettings, MultiBuffer};
|
||||
use std::sync::Arc;
|
||||
use util::test::sample_text;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_layout_line_numbers(cx: &mut gpui::MutableAppContext) {
|
||||
let settings = EditorSettings::test(cx);
|
||||
let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx);
|
||||
let (window_id, editor) = cx.add_window(Default::default(), |cx| {
|
||||
Editor::for_buffer(
|
||||
buffer,
|
||||
{
|
||||
let settings = settings.clone();
|
||||
Arc::new(move |_| settings.clone())
|
||||
},
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let element = EditorElement::new(editor.downgrade(), settings);
|
||||
|
||||
let layouts = editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let mut presenter = cx.build_presenter(window_id, 30.);
|
||||
let mut layout_cx = presenter.build_layout_context(false, cx);
|
||||
element.layout_rows(0..6, &Default::default(), &snapshot, &mut layout_cx)
|
||||
});
|
||||
assert_eq!(layouts.len(), 6);
|
||||
}
|
||||
}
|
||||
381
crates/editor/src/items.rs
Normal file
381
crates/editor/src/items.rs
Normal file
@@ -0,0 +1,381 @@
|
||||
use crate::{Editor, Event};
|
||||
use crate::{MultiBuffer, ToPoint as _};
|
||||
use anyhow::Result;
|
||||
use gpui::{
|
||||
elements::*, AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, RenderContext,
|
||||
Subscription, Task, View, ViewContext, ViewHandle, WeakModelHandle,
|
||||
};
|
||||
use language::{Buffer, Diagnostic, File as _};
|
||||
use postage::watch;
|
||||
use project::{File, ProjectPath, Worktree};
|
||||
use std::fmt::Write;
|
||||
use std::path::Path;
|
||||
use text::{Point, Selection};
|
||||
use workspace::{
|
||||
ItemHandle, ItemView, ItemViewHandle, PathOpener, Settings, StatusItemView, WeakItemHandle,
|
||||
Workspace,
|
||||
};
|
||||
|
||||
pub struct BufferOpener;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct BufferItemHandle(pub ModelHandle<Buffer>);
|
||||
|
||||
#[derive(Clone)]
|
||||
struct WeakBufferItemHandle(WeakModelHandle<Buffer>);
|
||||
|
||||
impl PathOpener for BufferOpener {
|
||||
fn open(
|
||||
&self,
|
||||
worktree: &mut Worktree,
|
||||
project_path: ProjectPath,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Option<Task<Result<Box<dyn ItemHandle>>>> {
|
||||
let buffer = worktree.open_buffer(project_path.path, cx);
|
||||
let task = cx.spawn(|_, _| async move {
|
||||
let buffer = buffer.await?;
|
||||
Ok(Box::new(BufferItemHandle(buffer)) as Box<dyn ItemHandle>)
|
||||
});
|
||||
Some(task)
|
||||
}
|
||||
}
|
||||
|
||||
impl ItemHandle for BufferItemHandle {
|
||||
fn add_view(
|
||||
&self,
|
||||
window_id: usize,
|
||||
workspace: &Workspace,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> Box<dyn ItemViewHandle> {
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(self.0.clone(), cx));
|
||||
let weak_buffer = buffer.downgrade();
|
||||
Box::new(cx.add_view(window_id, |cx| {
|
||||
Editor::for_buffer(
|
||||
buffer,
|
||||
crate::settings_builder(weak_buffer, workspace.settings()),
|
||||
cx,
|
||||
)
|
||||
}))
|
||||
}
|
||||
|
||||
fn boxed_clone(&self) -> Box<dyn ItemHandle> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
|
||||
fn to_any(&self) -> gpui::AnyModelHandle {
|
||||
self.0.clone().into()
|
||||
}
|
||||
|
||||
fn downgrade(&self) -> Box<dyn workspace::WeakItemHandle> {
|
||||
Box::new(WeakBufferItemHandle(self.0.downgrade()))
|
||||
}
|
||||
|
||||
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
|
||||
File::from_dyn(self.0.read(cx).file()).map(|f| ProjectPath {
|
||||
worktree_id: f.worktree_id(cx),
|
||||
path: f.path().clone(),
|
||||
})
|
||||
}
|
||||
|
||||
fn id(&self) -> usize {
|
||||
self.0.id()
|
||||
}
|
||||
}
|
||||
|
||||
impl WeakItemHandle for WeakBufferItemHandle {
|
||||
fn upgrade(&self, cx: &AppContext) -> Option<Box<dyn ItemHandle>> {
|
||||
self.0
|
||||
.upgrade(cx)
|
||||
.map(|buffer| Box::new(BufferItemHandle(buffer)) as Box<dyn ItemHandle>)
|
||||
}
|
||||
|
||||
fn id(&self) -> usize {
|
||||
self.0.id()
|
||||
}
|
||||
}
|
||||
|
||||
impl ItemView for Editor {
|
||||
type ItemHandle = BufferItemHandle;
|
||||
|
||||
fn item_handle(&self, cx: &AppContext) -> Self::ItemHandle {
|
||||
BufferItemHandle(self.buffer.read(cx).as_singleton().unwrap())
|
||||
}
|
||||
|
||||
fn title(&self, cx: &AppContext) -> String {
|
||||
let filename = self
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.file(cx)
|
||||
.and_then(|file| file.file_name());
|
||||
if let Some(name) = filename {
|
||||
name.to_string_lossy().into()
|
||||
} else {
|
||||
"untitled".into()
|
||||
}
|
||||
}
|
||||
|
||||
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
|
||||
File::from_dyn(self.buffer().read(cx).file(cx)).map(|file| ProjectPath {
|
||||
worktree_id: file.worktree_id(cx),
|
||||
path: file.path().clone(),
|
||||
})
|
||||
}
|
||||
|
||||
fn clone_on_split(&self, cx: &mut ViewContext<Self>) -> Option<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
Some(self.clone(cx))
|
||||
}
|
||||
|
||||
fn is_dirty(&self, cx: &AppContext) -> bool {
|
||||
self.buffer().read(cx).read(cx).is_dirty()
|
||||
}
|
||||
|
||||
fn has_conflict(&self, cx: &AppContext) -> bool {
|
||||
self.buffer().read(cx).read(cx).has_conflict()
|
||||
}
|
||||
|
||||
fn can_save(&self, cx: &AppContext) -> bool {
|
||||
self.project_path(cx).is_some()
|
||||
}
|
||||
|
||||
fn save(&mut self, cx: &mut ViewContext<Self>) -> Result<Task<Result<()>>> {
|
||||
let save = self.buffer().update(cx, |b, cx| b.save(cx))?;
|
||||
Ok(cx.spawn(|_, _| async move {
|
||||
save.await?;
|
||||
Ok(())
|
||||
}))
|
||||
}
|
||||
|
||||
fn can_save_as(&self, _: &AppContext) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn save_as(
|
||||
&mut self,
|
||||
worktree: ModelHandle<Worktree>,
|
||||
path: &Path,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let buffer = self
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.as_singleton()
|
||||
.expect("cannot call save_as on an excerpt list")
|
||||
.clone();
|
||||
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
let handle = cx.handle();
|
||||
let text = buffer.as_rope().clone();
|
||||
let version = buffer.version();
|
||||
|
||||
let save_as = worktree.update(cx, |worktree, cx| {
|
||||
worktree
|
||||
.as_local_mut()
|
||||
.unwrap()
|
||||
.save_buffer_as(handle, path, text, cx)
|
||||
});
|
||||
|
||||
cx.spawn(|buffer, mut cx| async move {
|
||||
save_as.await.map(|new_file| {
|
||||
let (language, language_server) = worktree.update(&mut cx, |worktree, cx| {
|
||||
let worktree = worktree.as_local_mut().unwrap();
|
||||
let language = worktree
|
||||
.language_registry()
|
||||
.select_language(new_file.full_path())
|
||||
.cloned();
|
||||
let language_server = language
|
||||
.as_ref()
|
||||
.and_then(|language| worktree.register_language(language, cx));
|
||||
(language, language_server.clone())
|
||||
});
|
||||
|
||||
buffer.update(&mut cx, |buffer, cx| {
|
||||
buffer.did_save(version, new_file.mtime, Some(Box::new(new_file)), cx);
|
||||
buffer.set_language(language, language_server, cx);
|
||||
});
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn should_activate_item_on_event(event: &Event) -> bool {
|
||||
matches!(event, Event::Activate)
|
||||
}
|
||||
|
||||
fn should_close_item_on_event(event: &Event) -> bool {
|
||||
matches!(event, Event::Closed)
|
||||
}
|
||||
|
||||
fn should_update_tab_on_event(event: &Event) -> bool {
|
||||
matches!(
|
||||
event,
|
||||
Event::Saved | Event::Dirtied | Event::FileHandleChanged
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CursorPosition {
|
||||
position: Option<Point>,
|
||||
selected_count: usize,
|
||||
settings: watch::Receiver<Settings>,
|
||||
_observe_active_editor: Option<Subscription>,
|
||||
}
|
||||
|
||||
impl CursorPosition {
|
||||
pub fn new(settings: watch::Receiver<Settings>) -> Self {
|
||||
Self {
|
||||
position: None,
|
||||
selected_count: 0,
|
||||
settings,
|
||||
_observe_active_editor: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn update_position(&mut self, editor: ViewHandle<Editor>, cx: &mut ViewContext<Self>) {
|
||||
let editor = editor.read(cx);
|
||||
let buffer = editor.buffer().read(cx).snapshot(cx);
|
||||
|
||||
self.selected_count = 0;
|
||||
let mut last_selection: Option<Selection<usize>> = None;
|
||||
for selection in editor.local_selections::<usize>(cx) {
|
||||
self.selected_count += selection.end - selection.start;
|
||||
if last_selection
|
||||
.as_ref()
|
||||
.map_or(true, |last_selection| selection.id > last_selection.id)
|
||||
{
|
||||
last_selection = Some(selection);
|
||||
}
|
||||
}
|
||||
self.position = last_selection.map(|s| s.head().to_point(&buffer));
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
impl Entity for CursorPosition {
|
||||
type Event = ();
|
||||
}
|
||||
|
||||
impl View for CursorPosition {
|
||||
fn ui_name() -> &'static str {
|
||||
"CursorPosition"
|
||||
}
|
||||
|
||||
fn render(&mut self, _: &mut RenderContext<Self>) -> ElementBox {
|
||||
if let Some(position) = self.position {
|
||||
let theme = &self.settings.borrow().theme.workspace.status_bar;
|
||||
let mut text = format!("{},{}", position.row + 1, position.column + 1);
|
||||
if self.selected_count > 0 {
|
||||
write!(text, " ({} selected)", self.selected_count).unwrap();
|
||||
}
|
||||
Label::new(text, theme.cursor_position.clone()).boxed()
|
||||
} else {
|
||||
Empty::new().boxed()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl StatusItemView for CursorPosition {
|
||||
fn set_active_pane_item(
|
||||
&mut self,
|
||||
active_pane_item: Option<&dyn ItemViewHandle>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let Some(editor) = active_pane_item.and_then(|item| item.to_any().downcast::<Editor>()) {
|
||||
self._observe_active_editor = Some(cx.observe(&editor, Self::update_position));
|
||||
self.update_position(editor, cx);
|
||||
} else {
|
||||
self.position = None;
|
||||
self._observe_active_editor = None;
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DiagnosticMessage {
|
||||
settings: watch::Receiver<Settings>,
|
||||
diagnostic: Option<Diagnostic>,
|
||||
_observe_active_editor: Option<Subscription>,
|
||||
}
|
||||
|
||||
impl DiagnosticMessage {
|
||||
pub fn new(settings: watch::Receiver<Settings>) -> Self {
|
||||
Self {
|
||||
diagnostic: None,
|
||||
settings,
|
||||
_observe_active_editor: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, editor: ViewHandle<Editor>, cx: &mut ViewContext<Self>) {
|
||||
let editor = editor.read(cx);
|
||||
let buffer = editor.buffer().read(cx);
|
||||
let cursor_position = editor.newest_selection::<usize>(&buffer.read(cx)).head();
|
||||
let new_diagnostic = buffer
|
||||
.read(cx)
|
||||
.diagnostics_in_range::<_, usize>(cursor_position..cursor_position)
|
||||
.filter(|entry| !entry.range.is_empty())
|
||||
.min_by_key(|entry| (entry.diagnostic.severity, entry.range.len()))
|
||||
.map(|entry| entry.diagnostic);
|
||||
if new_diagnostic != self.diagnostic {
|
||||
self.diagnostic = new_diagnostic;
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Entity for DiagnosticMessage {
|
||||
type Event = ();
|
||||
}
|
||||
|
||||
impl View for DiagnosticMessage {
|
||||
fn ui_name() -> &'static str {
|
||||
"DiagnosticMessage"
|
||||
}
|
||||
|
||||
fn render(&mut self, _: &mut RenderContext<Self>) -> ElementBox {
|
||||
if let Some(diagnostic) = &self.diagnostic {
|
||||
let theme = &self.settings.borrow().theme.workspace.status_bar;
|
||||
Flex::row()
|
||||
.with_child(
|
||||
Svg::new("icons/warning.svg")
|
||||
.with_color(theme.diagnostic_icon_color)
|
||||
.constrained()
|
||||
.with_height(theme.diagnostic_icon_size)
|
||||
.contained()
|
||||
.with_margin_right(theme.diagnostic_icon_spacing)
|
||||
.boxed(),
|
||||
)
|
||||
.with_child(
|
||||
Label::new(
|
||||
diagnostic.message.lines().next().unwrap().to_string(),
|
||||
theme.diagnostic_message.clone(),
|
||||
)
|
||||
.boxed(),
|
||||
)
|
||||
.boxed()
|
||||
} else {
|
||||
Empty::new().boxed()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl StatusItemView for DiagnosticMessage {
|
||||
fn set_active_pane_item(
|
||||
&mut self,
|
||||
active_pane_item: Option<&dyn ItemViewHandle>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let Some(editor) = active_pane_item.and_then(|item| item.to_any().downcast::<Editor>()) {
|
||||
self._observe_active_editor = Some(cx.observe(&editor, Self::update));
|
||||
self.update(editor, cx);
|
||||
} else {
|
||||
self.diagnostic = Default::default();
|
||||
self._observe_active_editor = None;
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
484
crates/editor/src/movement.rs
Normal file
484
crates/editor/src/movement.rs
Normal file
@@ -0,0 +1,484 @@
|
||||
use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint};
|
||||
use crate::ToPoint;
|
||||
use anyhow::Result;
|
||||
use std::{cmp, ops::Range};
|
||||
|
||||
pub fn left(map: &DisplaySnapshot, mut point: DisplayPoint) -> Result<DisplayPoint> {
|
||||
if point.column() > 0 {
|
||||
*point.column_mut() -= 1;
|
||||
} else if point.row() > 0 {
|
||||
*point.row_mut() -= 1;
|
||||
*point.column_mut() = map.line_len(point.row());
|
||||
}
|
||||
Ok(map.clip_point(point, Bias::Left))
|
||||
}
|
||||
|
||||
pub fn right(map: &DisplaySnapshot, mut point: DisplayPoint) -> Result<DisplayPoint> {
|
||||
let max_column = map.line_len(point.row());
|
||||
if point.column() < max_column {
|
||||
*point.column_mut() += 1;
|
||||
} else if point.row() < map.max_point().row() {
|
||||
*point.row_mut() += 1;
|
||||
*point.column_mut() = 0;
|
||||
}
|
||||
Ok(map.clip_point(point, Bias::Right))
|
||||
}
|
||||
|
||||
pub fn up(
|
||||
map: &DisplaySnapshot,
|
||||
start: DisplayPoint,
|
||||
goal: SelectionGoal,
|
||||
) -> Result<(DisplayPoint, SelectionGoal)> {
|
||||
let mut goal_column = if let SelectionGoal::Column(column) = goal {
|
||||
column
|
||||
} else {
|
||||
map.column_to_chars(start.row(), start.column())
|
||||
};
|
||||
|
||||
let prev_row = start.row().saturating_sub(1);
|
||||
let mut point = map.clip_point(
|
||||
DisplayPoint::new(prev_row, map.line_len(prev_row)),
|
||||
Bias::Left,
|
||||
);
|
||||
if point.row() < start.row() {
|
||||
*point.column_mut() = map.column_from_chars(point.row(), goal_column);
|
||||
} else {
|
||||
point = DisplayPoint::new(0, 0);
|
||||
goal_column = 0;
|
||||
}
|
||||
|
||||
let clip_bias = if point.column() == map.line_len(point.row()) {
|
||||
Bias::Left
|
||||
} else {
|
||||
Bias::Right
|
||||
};
|
||||
|
||||
Ok((
|
||||
map.clip_point(point, clip_bias),
|
||||
SelectionGoal::Column(goal_column),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn down(
|
||||
map: &DisplaySnapshot,
|
||||
start: DisplayPoint,
|
||||
goal: SelectionGoal,
|
||||
) -> Result<(DisplayPoint, SelectionGoal)> {
|
||||
let mut goal_column = if let SelectionGoal::Column(column) = goal {
|
||||
column
|
||||
} else {
|
||||
map.column_to_chars(start.row(), start.column())
|
||||
};
|
||||
|
||||
let next_row = start.row() + 1;
|
||||
let mut point = map.clip_point(DisplayPoint::new(next_row, 0), Bias::Right);
|
||||
if point.row() > start.row() {
|
||||
*point.column_mut() = map.column_from_chars(point.row(), goal_column);
|
||||
} else {
|
||||
point = map.max_point();
|
||||
goal_column = map.column_to_chars(point.row(), point.column())
|
||||
}
|
||||
|
||||
let clip_bias = if point.column() == map.line_len(point.row()) {
|
||||
Bias::Left
|
||||
} else {
|
||||
Bias::Right
|
||||
};
|
||||
|
||||
Ok((
|
||||
map.clip_point(point, clip_bias),
|
||||
SelectionGoal::Column(goal_column),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn line_beginning(
|
||||
map: &DisplaySnapshot,
|
||||
point: DisplayPoint,
|
||||
toggle_indent: bool,
|
||||
) -> DisplayPoint {
|
||||
let (indent, is_blank) = map.line_indent(point.row());
|
||||
if toggle_indent && !is_blank && point.column() != indent {
|
||||
DisplayPoint::new(point.row(), indent)
|
||||
} else {
|
||||
DisplayPoint::new(point.row(), 0)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn line_end(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint {
|
||||
let line_end = DisplayPoint::new(point.row(), map.line_len(point.row()));
|
||||
map.clip_point(line_end, Bias::Left)
|
||||
}
|
||||
|
||||
pub fn prev_word_boundary(map: &DisplaySnapshot, mut point: DisplayPoint) -> DisplayPoint {
|
||||
let mut line_start = 0;
|
||||
if point.row() > 0 {
|
||||
if let Some(indent) = map.soft_wrap_indent(point.row() - 1) {
|
||||
line_start = indent;
|
||||
}
|
||||
}
|
||||
|
||||
if point.column() == line_start {
|
||||
if point.row() == 0 {
|
||||
return DisplayPoint::new(0, 0);
|
||||
} else {
|
||||
let row = point.row() - 1;
|
||||
point = map.clip_point(DisplayPoint::new(row, map.line_len(row)), Bias::Left);
|
||||
}
|
||||
}
|
||||
|
||||
let mut boundary = DisplayPoint::new(point.row(), 0);
|
||||
let mut column = 0;
|
||||
let mut prev_char_kind = CharKind::Newline;
|
||||
for c in map.chars_at(DisplayPoint::new(point.row(), 0)) {
|
||||
if column >= point.column() {
|
||||
break;
|
||||
}
|
||||
|
||||
let char_kind = char_kind(c);
|
||||
if char_kind != prev_char_kind
|
||||
&& char_kind != CharKind::Whitespace
|
||||
&& char_kind != CharKind::Newline
|
||||
{
|
||||
*boundary.column_mut() = column;
|
||||
}
|
||||
|
||||
prev_char_kind = char_kind;
|
||||
column += c.len_utf8() as u32;
|
||||
}
|
||||
boundary
|
||||
}
|
||||
|
||||
pub fn next_word_boundary(map: &DisplaySnapshot, mut point: DisplayPoint) -> DisplayPoint {
|
||||
let mut prev_char_kind = None;
|
||||
for c in map.chars_at(point) {
|
||||
let char_kind = char_kind(c);
|
||||
if let Some(prev_char_kind) = prev_char_kind {
|
||||
if c == '\n' {
|
||||
break;
|
||||
}
|
||||
if prev_char_kind != char_kind
|
||||
&& prev_char_kind != CharKind::Whitespace
|
||||
&& prev_char_kind != CharKind::Newline
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if c == '\n' {
|
||||
*point.row_mut() += 1;
|
||||
*point.column_mut() = 0;
|
||||
} else {
|
||||
*point.column_mut() += c.len_utf8() as u32;
|
||||
}
|
||||
prev_char_kind = Some(char_kind);
|
||||
}
|
||||
map.clip_point(point, Bias::Right)
|
||||
}
|
||||
|
||||
pub fn is_inside_word(map: &DisplaySnapshot, point: DisplayPoint) -> bool {
|
||||
let ix = map.clip_point(point, Bias::Left).to_offset(map, Bias::Left);
|
||||
let text = &map.buffer_snapshot;
|
||||
let next_char_kind = text.chars_at(ix).next().map(char_kind);
|
||||
let prev_char_kind = text.reversed_chars_at(ix).next().map(char_kind);
|
||||
prev_char_kind.zip(next_char_kind) == Some((CharKind::Word, CharKind::Word))
|
||||
}
|
||||
|
||||
pub fn surrounding_word(map: &DisplaySnapshot, point: DisplayPoint) -> Range<DisplayPoint> {
|
||||
let mut start = map.clip_point(point, Bias::Left).to_offset(map, Bias::Left);
|
||||
let mut end = start;
|
||||
|
||||
let text = &map.buffer_snapshot;
|
||||
let mut next_chars = text.chars_at(start).peekable();
|
||||
let mut prev_chars = text.reversed_chars_at(start).peekable();
|
||||
let word_kind = cmp::max(
|
||||
prev_chars.peek().copied().map(char_kind),
|
||||
next_chars.peek().copied().map(char_kind),
|
||||
);
|
||||
|
||||
for ch in prev_chars {
|
||||
if Some(char_kind(ch)) == word_kind {
|
||||
start -= ch.len_utf8();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for ch in next_chars {
|
||||
if Some(char_kind(ch)) == word_kind {
|
||||
end += ch.len_utf8();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
start.to_point(&map.buffer_snapshot).to_display_point(map)
|
||||
..end.to_point(&map.buffer_snapshot).to_display_point(map)
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord)]
|
||||
enum CharKind {
|
||||
Newline,
|
||||
Punctuation,
|
||||
Whitespace,
|
||||
Word,
|
||||
}
|
||||
|
||||
fn char_kind(c: char) -> CharKind {
|
||||
if c == '\n' {
|
||||
CharKind::Newline
|
||||
} else if c.is_whitespace() {
|
||||
CharKind::Whitespace
|
||||
} else if c.is_alphanumeric() || c == '_' {
|
||||
CharKind::Word
|
||||
} else {
|
||||
CharKind::Punctuation
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
display_map::{BlockDisposition, BlockProperties},
|
||||
Buffer, DisplayMap, ExcerptProperties, MultiBuffer,
|
||||
};
|
||||
use gpui::{elements::Empty, Element};
|
||||
use language::Point;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_move_up_and_down_with_excerpts(cx: &mut gpui::MutableAppContext) {
|
||||
let family_id = cx.font_cache().load_family(&["Helvetica"]).unwrap();
|
||||
let font_id = cx
|
||||
.font_cache()
|
||||
.select_font(family_id, &Default::default())
|
||||
.unwrap();
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, "abc\ndefg\nhijkl\nmn", cx));
|
||||
let mut excerpt1_header_position = None;
|
||||
let mut excerpt2_header_position = None;
|
||||
let multibuffer = cx.add_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0);
|
||||
let excerpt1_id = multibuffer.push_excerpt(
|
||||
ExcerptProperties {
|
||||
buffer: &buffer,
|
||||
range: Point::new(0, 0)..Point::new(1, 4),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
let excerpt2_id = multibuffer.push_excerpt(
|
||||
ExcerptProperties {
|
||||
buffer: &buffer,
|
||||
range: Point::new(2, 0)..Point::new(3, 2),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
|
||||
excerpt1_header_position = Some(
|
||||
multibuffer
|
||||
.read(cx)
|
||||
.anchor_in_excerpt(excerpt1_id, language::Anchor::min()),
|
||||
);
|
||||
excerpt2_header_position = Some(
|
||||
multibuffer
|
||||
.read(cx)
|
||||
.anchor_in_excerpt(excerpt2_id, language::Anchor::min()),
|
||||
);
|
||||
multibuffer
|
||||
});
|
||||
|
||||
let display_map =
|
||||
cx.add_model(|cx| DisplayMap::new(multibuffer, 2, font_id, 14.0, None, cx));
|
||||
display_map.update(cx, |display_map, cx| {
|
||||
display_map.insert_blocks(
|
||||
[
|
||||
BlockProperties {
|
||||
position: excerpt1_header_position.unwrap(),
|
||||
height: 2,
|
||||
render: Arc::new(|_| Empty::new().boxed()),
|
||||
disposition: BlockDisposition::Above,
|
||||
},
|
||||
BlockProperties {
|
||||
position: excerpt2_header_position.unwrap(),
|
||||
height: 3,
|
||||
render: Arc::new(|_| Empty::new().boxed()),
|
||||
disposition: BlockDisposition::Above,
|
||||
},
|
||||
],
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
assert_eq!(snapshot.text(), "\n\nabc\ndefg\n\n\n\nhijkl\nmn");
|
||||
|
||||
// Can't move up into the first excerpt's header
|
||||
assert_eq!(
|
||||
up(&snapshot, DisplayPoint::new(2, 2), SelectionGoal::Column(2)).unwrap(),
|
||||
(DisplayPoint::new(2, 0), SelectionGoal::Column(0)),
|
||||
);
|
||||
assert_eq!(
|
||||
up(&snapshot, DisplayPoint::new(2, 0), SelectionGoal::None).unwrap(),
|
||||
(DisplayPoint::new(2, 0), SelectionGoal::Column(0)),
|
||||
);
|
||||
|
||||
// Move up and down within first excerpt
|
||||
assert_eq!(
|
||||
up(&snapshot, DisplayPoint::new(3, 4), SelectionGoal::Column(4)).unwrap(),
|
||||
(DisplayPoint::new(2, 3), SelectionGoal::Column(4)),
|
||||
);
|
||||
assert_eq!(
|
||||
down(&snapshot, DisplayPoint::new(2, 3), SelectionGoal::Column(4)).unwrap(),
|
||||
(DisplayPoint::new(3, 4), SelectionGoal::Column(4)),
|
||||
);
|
||||
|
||||
// Move up and down across second excerpt's header
|
||||
assert_eq!(
|
||||
up(&snapshot, DisplayPoint::new(7, 5), SelectionGoal::Column(5)).unwrap(),
|
||||
(DisplayPoint::new(3, 4), SelectionGoal::Column(5)),
|
||||
);
|
||||
assert_eq!(
|
||||
down(&snapshot, DisplayPoint::new(3, 4), SelectionGoal::Column(5)).unwrap(),
|
||||
(DisplayPoint::new(7, 5), SelectionGoal::Column(5)),
|
||||
);
|
||||
|
||||
// Can't move down off the end
|
||||
assert_eq!(
|
||||
down(&snapshot, DisplayPoint::new(8, 0), SelectionGoal::Column(0)).unwrap(),
|
||||
(DisplayPoint::new(8, 2), SelectionGoal::Column(2)),
|
||||
);
|
||||
assert_eq!(
|
||||
down(&snapshot, DisplayPoint::new(8, 2), SelectionGoal::Column(2)).unwrap(),
|
||||
(DisplayPoint::new(8, 2), SelectionGoal::Column(2)),
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_prev_next_word_boundary_multibyte(cx: &mut gpui::MutableAppContext) {
|
||||
let tab_size = 4;
|
||||
let family_id = cx.font_cache().load_family(&["Helvetica"]).unwrap();
|
||||
let font_id = cx
|
||||
.font_cache()
|
||||
.select_font(family_id, &Default::default())
|
||||
.unwrap();
|
||||
let font_size = 14.0;
|
||||
|
||||
let buffer = MultiBuffer::build_simple("a bcΔ defγ hi—jk", cx);
|
||||
let display_map =
|
||||
cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx));
|
||||
let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
assert_eq!(
|
||||
prev_word_boundary(&snapshot, DisplayPoint::new(0, 12)),
|
||||
DisplayPoint::new(0, 7)
|
||||
);
|
||||
assert_eq!(
|
||||
prev_word_boundary(&snapshot, DisplayPoint::new(0, 7)),
|
||||
DisplayPoint::new(0, 2)
|
||||
);
|
||||
assert_eq!(
|
||||
prev_word_boundary(&snapshot, DisplayPoint::new(0, 6)),
|
||||
DisplayPoint::new(0, 2)
|
||||
);
|
||||
assert_eq!(
|
||||
prev_word_boundary(&snapshot, DisplayPoint::new(0, 2)),
|
||||
DisplayPoint::new(0, 0)
|
||||
);
|
||||
assert_eq!(
|
||||
prev_word_boundary(&snapshot, DisplayPoint::new(0, 1)),
|
||||
DisplayPoint::new(0, 0)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
next_word_boundary(&snapshot, DisplayPoint::new(0, 0)),
|
||||
DisplayPoint::new(0, 1)
|
||||
);
|
||||
assert_eq!(
|
||||
next_word_boundary(&snapshot, DisplayPoint::new(0, 1)),
|
||||
DisplayPoint::new(0, 6)
|
||||
);
|
||||
assert_eq!(
|
||||
next_word_boundary(&snapshot, DisplayPoint::new(0, 2)),
|
||||
DisplayPoint::new(0, 6)
|
||||
);
|
||||
assert_eq!(
|
||||
next_word_boundary(&snapshot, DisplayPoint::new(0, 6)),
|
||||
DisplayPoint::new(0, 12)
|
||||
);
|
||||
assert_eq!(
|
||||
next_word_boundary(&snapshot, DisplayPoint::new(0, 7)),
|
||||
DisplayPoint::new(0, 12)
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_surrounding_word(cx: &mut gpui::MutableAppContext) {
|
||||
let tab_size = 4;
|
||||
let family_id = cx.font_cache().load_family(&["Helvetica"]).unwrap();
|
||||
let font_id = cx
|
||||
.font_cache()
|
||||
.select_font(family_id, &Default::default())
|
||||
.unwrap();
|
||||
let font_size = 14.0;
|
||||
let buffer = MultiBuffer::build_simple("lorem ipsum dolor\n sit", cx);
|
||||
let display_map =
|
||||
cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx));
|
||||
let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
||||
assert_eq!(
|
||||
surrounding_word(&snapshot, DisplayPoint::new(0, 0)),
|
||||
DisplayPoint::new(0, 0)..DisplayPoint::new(0, 5)
|
||||
);
|
||||
assert_eq!(
|
||||
surrounding_word(&snapshot, DisplayPoint::new(0, 2)),
|
||||
DisplayPoint::new(0, 0)..DisplayPoint::new(0, 5)
|
||||
);
|
||||
assert_eq!(
|
||||
surrounding_word(&snapshot, DisplayPoint::new(0, 5)),
|
||||
DisplayPoint::new(0, 0)..DisplayPoint::new(0, 5)
|
||||
);
|
||||
assert_eq!(
|
||||
surrounding_word(&snapshot, DisplayPoint::new(0, 6)),
|
||||
DisplayPoint::new(0, 6)..DisplayPoint::new(0, 11)
|
||||
);
|
||||
assert_eq!(
|
||||
surrounding_word(&snapshot, DisplayPoint::new(0, 7)),
|
||||
DisplayPoint::new(0, 6)..DisplayPoint::new(0, 11)
|
||||
);
|
||||
assert_eq!(
|
||||
surrounding_word(&snapshot, DisplayPoint::new(0, 11)),
|
||||
DisplayPoint::new(0, 6)..DisplayPoint::new(0, 11)
|
||||
);
|
||||
assert_eq!(
|
||||
surrounding_word(&snapshot, DisplayPoint::new(0, 13)),
|
||||
DisplayPoint::new(0, 11)..DisplayPoint::new(0, 14)
|
||||
);
|
||||
assert_eq!(
|
||||
surrounding_word(&snapshot, DisplayPoint::new(0, 14)),
|
||||
DisplayPoint::new(0, 14)..DisplayPoint::new(0, 19)
|
||||
);
|
||||
assert_eq!(
|
||||
surrounding_word(&snapshot, DisplayPoint::new(0, 17)),
|
||||
DisplayPoint::new(0, 14)..DisplayPoint::new(0, 19)
|
||||
);
|
||||
assert_eq!(
|
||||
surrounding_word(&snapshot, DisplayPoint::new(0, 19)),
|
||||
DisplayPoint::new(0, 14)..DisplayPoint::new(0, 19)
|
||||
);
|
||||
assert_eq!(
|
||||
surrounding_word(&snapshot, DisplayPoint::new(1, 0)),
|
||||
DisplayPoint::new(1, 0)..DisplayPoint::new(1, 4)
|
||||
);
|
||||
assert_eq!(
|
||||
surrounding_word(&snapshot, DisplayPoint::new(1, 1)),
|
||||
DisplayPoint::new(1, 0)..DisplayPoint::new(1, 4)
|
||||
);
|
||||
assert_eq!(
|
||||
surrounding_word(&snapshot, DisplayPoint::new(1, 6)),
|
||||
DisplayPoint::new(1, 4)..DisplayPoint::new(1, 7)
|
||||
);
|
||||
assert_eq!(
|
||||
surrounding_word(&snapshot, DisplayPoint::new(1, 7)),
|
||||
DisplayPoint::new(1, 4)..DisplayPoint::new(1, 7)
|
||||
);
|
||||
}
|
||||
}
|
||||
3120
crates/editor/src/multi_buffer.rs
Normal file
3120
crates/editor/src/multi_buffer.rs
Normal file
File diff suppressed because it is too large
Load Diff
141
crates/editor/src/multi_buffer/anchor.rs
Normal file
141
crates/editor/src/multi_buffer/anchor.rs
Normal file
@@ -0,0 +1,141 @@
|
||||
use super::{ExcerptId, MultiBufferSnapshot, ToOffset, ToPoint};
|
||||
use anyhow::Result;
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
ops::{Range, Sub},
|
||||
};
|
||||
use sum_tree::Bias;
|
||||
use text::{rope::TextDimension, Point};
|
||||
|
||||
#[derive(Clone, Eq, PartialEq, Debug, Hash)]
|
||||
pub struct Anchor {
|
||||
pub(crate) buffer_id: usize,
|
||||
pub(crate) excerpt_id: ExcerptId,
|
||||
pub(crate) text_anchor: text::Anchor,
|
||||
}
|
||||
|
||||
impl Anchor {
|
||||
pub fn min() -> Self {
|
||||
Self {
|
||||
buffer_id: 0,
|
||||
excerpt_id: ExcerptId::min(),
|
||||
text_anchor: text::Anchor::min(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn max() -> Self {
|
||||
Self {
|
||||
buffer_id: 0,
|
||||
excerpt_id: ExcerptId::max(),
|
||||
text_anchor: text::Anchor::max(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn excerpt_id(&self) -> &ExcerptId {
|
||||
&self.excerpt_id
|
||||
}
|
||||
|
||||
pub fn cmp<'a>(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Result<Ordering> {
|
||||
let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id);
|
||||
if excerpt_id_cmp.is_eq() {
|
||||
if self.excerpt_id == ExcerptId::min() || self.excerpt_id == ExcerptId::max() {
|
||||
Ok(Ordering::Equal)
|
||||
} else if let Some((buffer_id, buffer_snapshot)) =
|
||||
snapshot.buffer_snapshot_for_excerpt(&self.excerpt_id)
|
||||
{
|
||||
// Even though the anchor refers to a valid excerpt the underlying buffer might have
|
||||
// changed. In that case, treat the anchor as if it were at the start of that
|
||||
// excerpt.
|
||||
if self.buffer_id == buffer_id && other.buffer_id == buffer_id {
|
||||
self.text_anchor.cmp(&other.text_anchor, buffer_snapshot)
|
||||
} else if self.buffer_id == buffer_id {
|
||||
Ok(Ordering::Greater)
|
||||
} else if other.buffer_id == buffer_id {
|
||||
Ok(Ordering::Less)
|
||||
} else {
|
||||
Ok(Ordering::Equal)
|
||||
}
|
||||
} else {
|
||||
Ok(Ordering::Equal)
|
||||
}
|
||||
} else {
|
||||
Ok(excerpt_id_cmp)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
|
||||
if self.text_anchor.bias != Bias::Left {
|
||||
if let Some((buffer_id, buffer_snapshot)) =
|
||||
snapshot.buffer_snapshot_for_excerpt(&self.excerpt_id)
|
||||
{
|
||||
if self.buffer_id == buffer_id {
|
||||
return Self {
|
||||
buffer_id: self.buffer_id,
|
||||
excerpt_id: self.excerpt_id.clone(),
|
||||
text_anchor: self.text_anchor.bias_left(buffer_snapshot),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
self.clone()
|
||||
}
|
||||
|
||||
pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
|
||||
if self.text_anchor.bias != Bias::Right {
|
||||
if let Some((buffer_id, buffer_snapshot)) =
|
||||
snapshot.buffer_snapshot_for_excerpt(&self.excerpt_id)
|
||||
{
|
||||
if self.buffer_id == buffer_id {
|
||||
return Self {
|
||||
buffer_id: self.buffer_id,
|
||||
excerpt_id: self.excerpt_id.clone(),
|
||||
text_anchor: self.text_anchor.bias_right(buffer_snapshot),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
self.clone()
|
||||
}
|
||||
|
||||
pub fn summary<D>(&self, snapshot: &MultiBufferSnapshot) -> D
|
||||
where
|
||||
D: TextDimension + Ord + Sub<D, Output = D>,
|
||||
{
|
||||
snapshot.summary_for_anchor(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToOffset for Anchor {
|
||||
fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize {
|
||||
self.summary(snapshot)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToPoint for Anchor {
|
||||
fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point {
|
||||
self.summary(snapshot)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait AnchorRangeExt {
|
||||
fn cmp(&self, b: &Range<Anchor>, buffer: &MultiBufferSnapshot) -> Result<Ordering>;
|
||||
fn to_offset(&self, content: &MultiBufferSnapshot) -> Range<usize>;
|
||||
fn to_point(&self, content: &MultiBufferSnapshot) -> Range<Point>;
|
||||
}
|
||||
|
||||
impl AnchorRangeExt for Range<Anchor> {
|
||||
fn cmp(&self, other: &Range<Anchor>, buffer: &MultiBufferSnapshot) -> Result<Ordering> {
|
||||
Ok(match self.start.cmp(&other.start, buffer)? {
|
||||
Ordering::Equal => other.end.cmp(&self.end, buffer)?,
|
||||
ord @ _ => ord,
|
||||
})
|
||||
}
|
||||
|
||||
fn to_offset(&self, content: &MultiBufferSnapshot) -> Range<usize> {
|
||||
self.start.to_offset(&content)..self.end.to_offset(&content)
|
||||
}
|
||||
|
||||
fn to_point(&self, content: &MultiBufferSnapshot) -> Range<Point> {
|
||||
self.start.to_point(&content)..self.end.to_point(&content)
|
||||
}
|
||||
}
|
||||
6
crates/editor/src/test.rs
Normal file
6
crates/editor/src/test.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
#[cfg(test)]
|
||||
#[ctor::ctor]
|
||||
fn init_logger() {
|
||||
// std::env::set_var("RUST_LOG", "info");
|
||||
env_logger::init();
|
||||
}
|
||||
22
crates/file_finder/Cargo.toml
Normal file
22
crates/file_finder/Cargo.toml
Normal file
@@ -0,0 +1,22 @@
|
||||
[package]
|
||||
name = "file_finder"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
path = "src/file_finder.rs"
|
||||
|
||||
[dependencies]
|
||||
editor = { path = "../editor" }
|
||||
fuzzy = { path = "../fuzzy" }
|
||||
gpui = { path = "../gpui" }
|
||||
project = { path = "../project" }
|
||||
util = { path = "../util" }
|
||||
theme = { path = "../theme" }
|
||||
workspace = { path = "../workspace" }
|
||||
postage = { version = "0.4.1", features = ["futures-traits"] }
|
||||
|
||||
[dev-dependencies]
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
serde_json = { version = "1.0.64", features = ["preserve_order"] }
|
||||
workspace = { path = "../workspace", features = ["test-support"] }
|
||||
@@ -1,10 +1,5 @@
|
||||
use crate::{
|
||||
editor::{self, Editor},
|
||||
settings::Settings,
|
||||
util,
|
||||
workspace::Workspace,
|
||||
worktree::{match_paths, PathMatch},
|
||||
};
|
||||
use editor::{Editor, EditorSettings};
|
||||
use fuzzy::PathMatch;
|
||||
use gpui::{
|
||||
action,
|
||||
elements::*,
|
||||
@@ -13,10 +8,11 @@ use gpui::{
|
||||
menu::{SelectNext, SelectPrev},
|
||||
Binding,
|
||||
},
|
||||
AppContext, Axis, Entity, MutableAppContext, RenderContext, Task, View, ViewContext,
|
||||
ViewHandle, WeakViewHandle,
|
||||
AppContext, Axis, Entity, ModelHandle, MutableAppContext, RenderContext, Task, View,
|
||||
ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use postage::watch;
|
||||
use project::{Project, ProjectPath, WorktreeId};
|
||||
use std::{
|
||||
cmp,
|
||||
path::Path,
|
||||
@@ -25,11 +21,13 @@ use std::{
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
use util::post_inc;
|
||||
use workspace::{Settings, Workspace};
|
||||
|
||||
pub struct FileFinder {
|
||||
handle: WeakViewHandle<Self>,
|
||||
settings: watch::Receiver<Settings>,
|
||||
workspace: WeakViewHandle<Workspace>,
|
||||
project: ModelHandle<Project>,
|
||||
query_editor: ViewHandle<Editor>,
|
||||
search_count: usize,
|
||||
latest_search_id: usize,
|
||||
@@ -43,13 +41,7 @@ pub struct FileFinder {
|
||||
|
||||
action!(Toggle);
|
||||
action!(Confirm);
|
||||
action!(Select, Entry);
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Entry {
|
||||
worktree_id: usize,
|
||||
path: Arc<Path>,
|
||||
}
|
||||
action!(Select, ProjectPath);
|
||||
|
||||
pub fn init(cx: &mut MutableAppContext) {
|
||||
cx.add_action(FileFinder::toggle);
|
||||
@@ -66,7 +58,7 @@ pub fn init(cx: &mut MutableAppContext) {
|
||||
}
|
||||
|
||||
pub enum Event {
|
||||
Selected(usize, Arc<Path>),
|
||||
Selected(ProjectPath),
|
||||
Dismissed,
|
||||
}
|
||||
|
||||
@@ -88,13 +80,13 @@ impl View for FileFinder {
|
||||
Flex::new(Axis::Vertical)
|
||||
.with_child(
|
||||
Container::new(ChildView::new(self.query_editor.id()).boxed())
|
||||
.with_style(&settings.theme.selector.input_editor.container)
|
||||
.with_style(settings.theme.selector.input_editor.container)
|
||||
.boxed(),
|
||||
)
|
||||
.with_child(Flexible::new(1.0, self.render_matches()).boxed())
|
||||
.with_child(Flexible::new(1.0, false, self.render_matches()).boxed())
|
||||
.boxed(),
|
||||
)
|
||||
.with_style(&settings.theme.selector.container)
|
||||
.with_style(settings.theme.selector.container)
|
||||
.boxed(),
|
||||
)
|
||||
.with_max_width(500.0)
|
||||
@@ -127,7 +119,7 @@ impl FileFinder {
|
||||
)
|
||||
.boxed(),
|
||||
)
|
||||
.with_style(&settings.theme.selector.empty.container)
|
||||
.with_style(settings.theme.selector.empty.container)
|
||||
.named("empty matches");
|
||||
}
|
||||
|
||||
@@ -183,6 +175,7 @@ impl FileFinder {
|
||||
.with_child(
|
||||
Flexible::new(
|
||||
1.0,
|
||||
false,
|
||||
Flex::column()
|
||||
.with_child(
|
||||
Label::new(file_name.to_string(), style.label.clone())
|
||||
@@ -200,10 +193,10 @@ impl FileFinder {
|
||||
)
|
||||
.boxed(),
|
||||
)
|
||||
.with_style(&style.container);
|
||||
.with_style(style.container);
|
||||
|
||||
let action = Select(Entry {
|
||||
worktree_id: path_match.tree_id,
|
||||
let action = Select(ProjectPath {
|
||||
worktree_id: WorktreeId::from_usize(path_match.worktree_id),
|
||||
path: path_match.path.clone(),
|
||||
});
|
||||
EventHandler::new(container.boxed())
|
||||
@@ -241,8 +234,8 @@ impl FileFinder {
|
||||
|
||||
fn toggle(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext<Workspace>) {
|
||||
workspace.toggle_modal(cx, |cx, workspace| {
|
||||
let handle = cx.handle();
|
||||
let finder = cx.add_view(|cx| Self::new(workspace.settings.clone(), handle, cx));
|
||||
let project = workspace.project().clone();
|
||||
let finder = cx.add_view(|cx| Self::new(workspace.settings.clone(), project, cx));
|
||||
cx.subscribe(&finder, Self::on_event).detach();
|
||||
finder
|
||||
});
|
||||
@@ -255,10 +248,10 @@ impl FileFinder {
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) {
|
||||
match event {
|
||||
Event::Selected(tree_id, path) => {
|
||||
Event::Selected(project_path) => {
|
||||
workspace
|
||||
.open_entry((*tree_id, path.clone()), cx)
|
||||
.map(|d| d.detach());
|
||||
.open_path(project_path.clone(), cx)
|
||||
.detach_and_log_err(cx);
|
||||
workspace.dismiss_modal(cx);
|
||||
}
|
||||
Event::Dismissed => {
|
||||
@@ -269,24 +262,34 @@ impl FileFinder {
|
||||
|
||||
pub fn new(
|
||||
settings: watch::Receiver<Settings>,
|
||||
workspace: ViewHandle<Workspace>,
|
||||
project: ModelHandle<Project>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
cx.observe(&workspace, Self::workspace_updated).detach();
|
||||
cx.observe(&project, Self::project_updated).detach();
|
||||
|
||||
let query_editor = cx.add_view(|cx| {
|
||||
Editor::single_line(settings.clone(), cx).with_style({
|
||||
let settings = settings.clone();
|
||||
move |_| settings.borrow().theme.selector.input_editor.as_editor()
|
||||
})
|
||||
Editor::single_line(
|
||||
{
|
||||
let settings = settings.clone();
|
||||
Arc::new(move |_| {
|
||||
let settings = settings.borrow();
|
||||
EditorSettings {
|
||||
style: settings.theme.selector.input_editor.as_editor(),
|
||||
tab_size: settings.tab_size,
|
||||
soft_wrap: editor::SoftWrap::None,
|
||||
}
|
||||
})
|
||||
},
|
||||
cx,
|
||||
)
|
||||
});
|
||||
cx.subscribe(&query_editor, Self::on_query_editor_event)
|
||||
.detach();
|
||||
|
||||
Self {
|
||||
handle: cx.handle().downgrade(),
|
||||
handle: cx.weak_handle(),
|
||||
settings,
|
||||
workspace: workspace.downgrade(),
|
||||
project,
|
||||
query_editor,
|
||||
search_count: 0,
|
||||
latest_search_id: 0,
|
||||
@@ -299,7 +302,7 @@ impl FileFinder {
|
||||
}
|
||||
}
|
||||
|
||||
fn workspace_updated(&mut self, _: ViewHandle<Workspace>, cx: &mut ViewContext<Self>) {
|
||||
fn project_updated(&mut self, _: ModelHandle<Project>, cx: &mut ViewContext<Self>) {
|
||||
let query = self.query_editor.update(cx, |buffer, cx| buffer.text(cx));
|
||||
if let Some(task) = self.spawn_search(query, cx) {
|
||||
task.detach();
|
||||
@@ -316,7 +319,7 @@ impl FileFinder {
|
||||
editor::Event::Edited => {
|
||||
let query = self.query_editor.update(cx, |buffer, cx| buffer.text(cx));
|
||||
if query.is_empty() {
|
||||
self.latest_search_id = util::post_inc(&mut self.search_count);
|
||||
self.latest_search_id = post_inc(&mut self.search_count);
|
||||
self.matches.clear();
|
||||
cx.notify();
|
||||
} else {
|
||||
@@ -333,7 +336,7 @@ impl FileFinder {
|
||||
fn selected_index(&self) -> usize {
|
||||
if let Some(selected) = self.selected.as_ref() {
|
||||
for (ix, path_match) in self.matches.iter().enumerate() {
|
||||
if (path_match.tree_id, path_match.path.as_ref())
|
||||
if (path_match.worktree_id, path_match.path.as_ref())
|
||||
== (selected.0, selected.1.as_ref())
|
||||
{
|
||||
return ix;
|
||||
@@ -348,7 +351,7 @@ impl FileFinder {
|
||||
if selected_index > 0 {
|
||||
selected_index -= 1;
|
||||
let mat = &self.matches[selected_index];
|
||||
self.selected = Some((mat.tree_id, mat.path.clone()));
|
||||
self.selected = Some((mat.worktree_id, mat.path.clone()));
|
||||
}
|
||||
self.list_state.scroll_to(selected_index);
|
||||
cx.notify();
|
||||
@@ -359,7 +362,7 @@ impl FileFinder {
|
||||
if selected_index + 1 < self.matches.len() {
|
||||
selected_index += 1;
|
||||
let mat = &self.matches[selected_index];
|
||||
self.selected = Some((mat.tree_id, mat.path.clone()));
|
||||
self.selected = Some((mat.worktree_id, mat.path.clone()));
|
||||
}
|
||||
self.list_state.scroll_to(selected_index);
|
||||
cx.notify();
|
||||
@@ -367,40 +370,30 @@ impl FileFinder {
|
||||
|
||||
fn confirm(&mut self, _: &Confirm, cx: &mut ViewContext<Self>) {
|
||||
if let Some(m) = self.matches.get(self.selected_index()) {
|
||||
cx.emit(Event::Selected(m.tree_id, m.path.clone()));
|
||||
cx.emit(Event::Selected(ProjectPath {
|
||||
worktree_id: WorktreeId::from_usize(m.worktree_id),
|
||||
path: m.path.clone(),
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
fn select(&mut self, Select(entry): &Select, cx: &mut ViewContext<Self>) {
|
||||
cx.emit(Event::Selected(entry.worktree_id, entry.path.clone()));
|
||||
fn select(&mut self, Select(project_path): &Select, cx: &mut ViewContext<Self>) {
|
||||
cx.emit(Event::Selected(project_path.clone()));
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn spawn_search(&mut self, query: String, cx: &mut ViewContext<Self>) -> Option<Task<()>> {
|
||||
let snapshots = self
|
||||
.workspace
|
||||
.upgrade(&cx)?
|
||||
.read(cx)
|
||||
.worktrees()
|
||||
.iter()
|
||||
.map(|tree| tree.read(cx).snapshot())
|
||||
.collect::<Vec<_>>();
|
||||
let search_id = util::post_inc(&mut self.search_count);
|
||||
let background = cx.as_ref().background().clone();
|
||||
self.cancel_flag.store(true, atomic::Ordering::Relaxed);
|
||||
self.cancel_flag = Arc::new(AtomicBool::new(false));
|
||||
let cancel_flag = self.cancel_flag.clone();
|
||||
let project = self.project.clone();
|
||||
Some(cx.spawn(|this, mut cx| async move {
|
||||
let matches = match_paths(
|
||||
&snapshots,
|
||||
&query,
|
||||
false,
|
||||
false,
|
||||
100,
|
||||
cancel_flag.as_ref(),
|
||||
background,
|
||||
)
|
||||
.await;
|
||||
let matches = project
|
||||
.read_with(&cx, |project, cx| {
|
||||
project.match_paths(&query, false, false, 100, cancel_flag.as_ref(), cx)
|
||||
})
|
||||
.await;
|
||||
let did_cancel = cancel_flag.load(atomic::Ordering::Relaxed);
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update_matches((search_id, did_cancel, query, matches), cx)
|
||||
@@ -431,32 +424,39 @@ impl FileFinder {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
editor::{self, Insert},
|
||||
fs::FakeFs,
|
||||
test::{temp_tree, test_app_state},
|
||||
workspace::Workspace,
|
||||
};
|
||||
use editor::Input;
|
||||
use serde_json::json;
|
||||
use std::fs;
|
||||
use tempdir::TempDir;
|
||||
use std::path::PathBuf;
|
||||
use workspace::{Workspace, WorkspaceParams};
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_matching_paths(mut cx: gpui::TestAppContext) {
|
||||
let tmp_dir = TempDir::new("example").unwrap();
|
||||
fs::create_dir(tmp_dir.path().join("a")).unwrap();
|
||||
fs::write(tmp_dir.path().join("a/banana"), "banana").unwrap();
|
||||
fs::write(tmp_dir.path().join("a/bandana"), "bandana").unwrap();
|
||||
let mut path_openers = Vec::new();
|
||||
cx.update(|cx| {
|
||||
super::init(cx);
|
||||
editor::init(cx);
|
||||
editor::init(cx, &mut path_openers);
|
||||
});
|
||||
|
||||
let app_state = cx.update(test_app_state);
|
||||
let (window_id, workspace) = cx.add_window(|cx| Workspace::new(&app_state, cx));
|
||||
let mut params = cx.update(WorkspaceParams::test);
|
||||
params.path_openers = Arc::from(path_openers);
|
||||
params
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"a": {
|
||||
"banana": "",
|
||||
"bandana": "",
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let (window_id, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx));
|
||||
workspace
|
||||
.update(&mut cx, |workspace, cx| {
|
||||
workspace.add_worktree(tmp_dir.path(), cx)
|
||||
workspace.add_worktree(Path::new("/root"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -476,9 +476,9 @@ mod tests {
|
||||
let query_buffer = cx.read(|cx| finder.read(cx).query_editor.clone());
|
||||
|
||||
let chain = vec![finder.id(), query_buffer.id()];
|
||||
cx.dispatch_action(window_id, chain.clone(), Insert("b".into()));
|
||||
cx.dispatch_action(window_id, chain.clone(), Insert("n".into()));
|
||||
cx.dispatch_action(window_id, chain.clone(), Insert("a".into()));
|
||||
cx.dispatch_action(window_id, chain.clone(), Input("b".into()));
|
||||
cx.dispatch_action(window_id, chain.clone(), Input("n".into()));
|
||||
cx.dispatch_action(window_id, chain.clone(), Input("a".into()));
|
||||
finder
|
||||
.condition(&cx, |finder, _| finder.matches.len() == 2)
|
||||
.await;
|
||||
@@ -497,7 +497,8 @@ mod tests {
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_matching_cancellation(mut cx: gpui::TestAppContext) {
|
||||
let fs = Arc::new(FakeFs::new());
|
||||
let params = cx.update(WorkspaceParams::test);
|
||||
let fs = params.fs.as_fake();
|
||||
fs.insert_tree(
|
||||
"/dir",
|
||||
json!({
|
||||
@@ -512,10 +513,7 @@ mod tests {
|
||||
)
|
||||
.await;
|
||||
|
||||
let mut app_state = cx.update(test_app_state);
|
||||
Arc::get_mut(&mut app_state).unwrap().fs = fs;
|
||||
|
||||
let (_, workspace) = cx.add_window(|cx| Workspace::new(&app_state, cx));
|
||||
let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx));
|
||||
workspace
|
||||
.update(&mut cx, |workspace, cx| {
|
||||
workspace.add_worktree("/dir".as_ref(), cx)
|
||||
@@ -524,8 +522,13 @@ mod tests {
|
||||
.unwrap();
|
||||
cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
|
||||
.await;
|
||||
let (_, finder) =
|
||||
cx.add_window(|cx| FileFinder::new(app_state.settings.clone(), workspace.clone(), cx));
|
||||
let (_, finder) = cx.add_window(|cx| {
|
||||
FileFinder::new(
|
||||
params.settings.clone(),
|
||||
workspace.read(cx).project().clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let query = "hi".to_string();
|
||||
finder
|
||||
@@ -568,24 +571,29 @@ mod tests {
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_single_file_worktrees(mut cx: gpui::TestAppContext) {
|
||||
let temp_dir = TempDir::new("test-single-file-worktrees").unwrap();
|
||||
let dir_path = temp_dir.path().join("the-parent-dir");
|
||||
let file_path = dir_path.join("the-file");
|
||||
fs::create_dir(&dir_path).unwrap();
|
||||
fs::write(&file_path, "").unwrap();
|
||||
let params = cx.update(WorkspaceParams::test);
|
||||
params
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree("/root", json!({ "the-parent-dir": { "the-file": "" } }))
|
||||
.await;
|
||||
|
||||
let app_state = cx.update(test_app_state);
|
||||
let (_, workspace) = cx.add_window(|cx| Workspace::new(&app_state, cx));
|
||||
let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx));
|
||||
workspace
|
||||
.update(&mut cx, |workspace, cx| {
|
||||
workspace.add_worktree(&file_path, cx)
|
||||
workspace.add_worktree(Path::new("/root/the-parent-dir/the-file"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
|
||||
.await;
|
||||
let (_, finder) =
|
||||
cx.add_window(|cx| FileFinder::new(app_state.settings.clone(), workspace.clone(), cx));
|
||||
let (_, finder) = cx.add_window(|cx| {
|
||||
FileFinder::new(
|
||||
params.settings.clone(),
|
||||
workspace.read(cx).project().clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
// Even though there is only one worktree, that worktree's filename
|
||||
// is included in the matching, because the worktree is a single file.
|
||||
@@ -616,18 +624,25 @@ mod tests {
|
||||
|
||||
#[gpui::test(retries = 5)]
|
||||
async fn test_multiple_matches_with_same_relative_path(mut cx: gpui::TestAppContext) {
|
||||
let tmp_dir = temp_tree(json!({
|
||||
"dir1": { "a.txt": "" },
|
||||
"dir2": { "a.txt": "" }
|
||||
}));
|
||||
let params = cx.update(WorkspaceParams::test);
|
||||
params
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"dir1": { "a.txt": "" },
|
||||
"dir2": { "a.txt": "" }
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let app_state = cx.update(test_app_state);
|
||||
let (_, workspace) = cx.add_window(|cx| Workspace::new(&app_state, cx));
|
||||
let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx));
|
||||
|
||||
workspace
|
||||
.update(&mut cx, |workspace, cx| {
|
||||
workspace.open_paths(
|
||||
&[tmp_dir.path().join("dir1"), tmp_dir.path().join("dir2")],
|
||||
&[PathBuf::from("/root/dir1"), PathBuf::from("/root/dir2")],
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -635,8 +650,13 @@ mod tests {
|
||||
cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
|
||||
.await;
|
||||
|
||||
let (_, finder) =
|
||||
cx.add_window(|cx| FileFinder::new(app_state.settings.clone(), workspace.clone(), cx));
|
||||
let (_, finder) = cx.add_window(|cx| {
|
||||
FileFinder::new(
|
||||
params.settings.clone(),
|
||||
workspace.read(cx).project().clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
// Run a search that matches two files with the same relative path.
|
||||
finder
|
||||
@@ -4,6 +4,9 @@ version = "2.0.2"
|
||||
license = "MIT"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
path = "src/fsevent.rs"
|
||||
|
||||
[dependencies]
|
||||
bitflags = "1"
|
||||
fsevent-sys = "3.0.2"
|
||||
11
crates/fuzzy/Cargo.toml
Normal file
11
crates/fuzzy/Cargo.toml
Normal file
@@ -0,0 +1,11 @@
|
||||
[package]
|
||||
name = "fuzzy"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
path = "src/fuzzy.rs"
|
||||
|
||||
[dependencies]
|
||||
gpui = { path = "../gpui" }
|
||||
util = { path = "../util" }
|
||||
@@ -1,13 +1,9 @@
|
||||
mod char_bag;
|
||||
|
||||
use crate::{
|
||||
util,
|
||||
worktree::{EntryKind, Snapshot},
|
||||
};
|
||||
use gpui::executor;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
cmp::{max, min, Ordering},
|
||||
cmp::{self, Ordering},
|
||||
path::Path,
|
||||
sync::atomic::{self, AtomicBool},
|
||||
sync::Arc,
|
||||
@@ -19,7 +15,7 @@ const BASE_DISTANCE_PENALTY: f64 = 0.6;
|
||||
const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05;
|
||||
const MIN_DISTANCE_PENALTY: f64 = 0.2;
|
||||
|
||||
struct Matcher<'a> {
|
||||
pub struct Matcher<'a> {
|
||||
query: &'a [char],
|
||||
lowercase_query: &'a [char],
|
||||
query_char_bag: CharBag,
|
||||
@@ -52,7 +48,7 @@ pub struct PathMatchCandidate<'a> {
|
||||
pub struct PathMatch {
|
||||
pub score: f64,
|
||||
pub positions: Vec<usize>,
|
||||
pub tree_id: usize,
|
||||
pub worktree_id: usize,
|
||||
pub path: Arc<Path>,
|
||||
pub path_prefix: Arc<str>,
|
||||
}
|
||||
@@ -63,6 +59,14 @@ pub struct StringMatchCandidate {
|
||||
pub char_bag: CharBag,
|
||||
}
|
||||
|
||||
pub trait PathMatchCandidateSet<'a>: Send + Sync {
|
||||
type Candidates: Iterator<Item = PathMatchCandidate<'a>>;
|
||||
fn id(&self) -> usize;
|
||||
fn len(&self) -> usize;
|
||||
fn prefix(&self) -> Arc<str>;
|
||||
fn candidates(&'a self, start: usize) -> Self::Candidates;
|
||||
}
|
||||
|
||||
impl Match for PathMatch {
|
||||
fn score(&self) -> f64 {
|
||||
self.score
|
||||
@@ -152,7 +156,7 @@ impl Ord for PathMatch {
|
||||
self.score
|
||||
.partial_cmp(&other.score)
|
||||
.unwrap_or(Ordering::Equal)
|
||||
.then_with(|| self.tree_id.cmp(&other.tree_id))
|
||||
.then_with(|| self.worktree_id.cmp(&other.worktree_id))
|
||||
.then_with(|| Arc::as_ptr(&self.path).cmp(&Arc::as_ptr(&other.path)))
|
||||
}
|
||||
}
|
||||
@@ -213,20 +217,15 @@ pub async fn match_strings(
|
||||
results
|
||||
}
|
||||
|
||||
pub async fn match_paths(
|
||||
snapshots: &[Snapshot],
|
||||
pub async fn match_paths<'a, Set: PathMatchCandidateSet<'a>>(
|
||||
candidate_sets: &'a [Set],
|
||||
query: &str,
|
||||
include_ignored: bool,
|
||||
smart_case: bool,
|
||||
max_results: usize,
|
||||
cancel_flag: &AtomicBool,
|
||||
background: Arc<executor::Background>,
|
||||
) -> Vec<PathMatch> {
|
||||
let path_count: usize = if include_ignored {
|
||||
snapshots.iter().map(Snapshot::file_count).sum()
|
||||
} else {
|
||||
snapshots.iter().map(Snapshot::visible_file_count).sum()
|
||||
};
|
||||
let path_count: usize = candidate_sets.iter().map(|s| s.len()).sum();
|
||||
if path_count == 0 {
|
||||
return Vec::new();
|
||||
}
|
||||
@@ -259,45 +258,18 @@ pub async fn match_paths(
|
||||
);
|
||||
|
||||
let mut tree_start = 0;
|
||||
for snapshot in snapshots {
|
||||
let tree_end = if include_ignored {
|
||||
tree_start + snapshot.file_count()
|
||||
} else {
|
||||
tree_start + snapshot.visible_file_count()
|
||||
};
|
||||
for candidate_set in candidate_sets {
|
||||
let tree_end = tree_start + candidate_set.len();
|
||||
|
||||
if tree_start < segment_end && segment_start < tree_end {
|
||||
let path_prefix: Arc<str> =
|
||||
if snapshot.root_entry().map_or(false, |e| e.is_file()) {
|
||||
snapshot.root_name().into()
|
||||
} else if snapshots.len() > 1 {
|
||||
format!("{}/", snapshot.root_name()).into()
|
||||
} else {
|
||||
"".into()
|
||||
};
|
||||
|
||||
let start = max(tree_start, segment_start) - tree_start;
|
||||
let end = min(tree_end, segment_end) - tree_start;
|
||||
let entries = if include_ignored {
|
||||
snapshot.files(start).take(end - start)
|
||||
} else {
|
||||
snapshot.visible_files(start).take(end - start)
|
||||
};
|
||||
let paths = entries.map(|entry| {
|
||||
if let EntryKind::File(char_bag) = entry.kind {
|
||||
PathMatchCandidate {
|
||||
path: &entry.path,
|
||||
char_bag,
|
||||
}
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
});
|
||||
let start = cmp::max(tree_start, segment_start) - tree_start;
|
||||
let end = cmp::min(tree_end, segment_end) - tree_start;
|
||||
let candidates = candidate_set.candidates(start).take(end - start);
|
||||
|
||||
matcher.match_paths(
|
||||
snapshot.id(),
|
||||
path_prefix,
|
||||
paths,
|
||||
candidate_set.id(),
|
||||
candidate_set.prefix(),
|
||||
candidates,
|
||||
results,
|
||||
&cancel_flag,
|
||||
);
|
||||
@@ -324,7 +296,7 @@ pub async fn match_paths(
|
||||
}
|
||||
|
||||
impl<'a> Matcher<'a> {
|
||||
fn new(
|
||||
pub fn new(
|
||||
query: &'a [char],
|
||||
lowercase_query: &'a [char],
|
||||
query_char_bag: CharBag,
|
||||
@@ -345,7 +317,7 @@ impl<'a> Matcher<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn match_strings(
|
||||
pub fn match_strings(
|
||||
&mut self,
|
||||
candidates: &[StringMatchCandidate],
|
||||
results: &mut Vec<StringMatch>,
|
||||
@@ -365,11 +337,11 @@ impl<'a> Matcher<'a> {
|
||||
)
|
||||
}
|
||||
|
||||
fn match_paths(
|
||||
pub fn match_paths<'c: 'a>(
|
||||
&mut self,
|
||||
tree_id: usize,
|
||||
path_prefix: Arc<str>,
|
||||
path_entries: impl Iterator<Item = PathMatchCandidate<'a>>,
|
||||
path_entries: impl Iterator<Item = PathMatchCandidate<'c>>,
|
||||
results: &mut Vec<PathMatch>,
|
||||
cancel_flag: &AtomicBool,
|
||||
) {
|
||||
@@ -386,7 +358,7 @@ impl<'a> Matcher<'a> {
|
||||
cancel_flag,
|
||||
|candidate, score| PathMatch {
|
||||
score,
|
||||
tree_id,
|
||||
worktree_id: tree_id,
|
||||
positions: Vec::new(),
|
||||
path: candidate.path.clone(),
|
||||
path_prefix: path_prefix.clone(),
|
||||
14
crates/go_to_line/Cargo.toml
Normal file
14
crates/go_to_line/Cargo.toml
Normal file
@@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "go_to_line"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
path = "src/go_to_line.rs"
|
||||
|
||||
[dependencies]
|
||||
text = { path = "../text" }
|
||||
editor = { path = "../editor" }
|
||||
gpui = { path = "../gpui" }
|
||||
workspace = { path = "../workspace" }
|
||||
postage = { version = "0.4", features = ["futures-traits"] }
|
||||
224
crates/go_to_line/src/go_to_line.rs
Normal file
224
crates/go_to_line/src/go_to_line.rs
Normal file
@@ -0,0 +1,224 @@
|
||||
use editor::{display_map::ToDisplayPoint, Autoscroll, Editor, EditorSettings};
|
||||
use gpui::{
|
||||
action, elements::*, geometry::vector::Vector2F, keymap::Binding, Axis, Entity,
|
||||
MutableAppContext, RenderContext, View, ViewContext, ViewHandle,
|
||||
};
|
||||
use postage::watch;
|
||||
use std::sync::Arc;
|
||||
use text::{Bias, Point, Selection};
|
||||
use workspace::{Settings, Workspace};
|
||||
|
||||
action!(Toggle);
|
||||
action!(Confirm);
|
||||
|
||||
pub fn init(cx: &mut MutableAppContext) {
|
||||
cx.add_bindings([
|
||||
Binding::new("ctrl-g", Toggle, Some("Editor")),
|
||||
Binding::new("escape", Toggle, Some("GoToLine")),
|
||||
Binding::new("enter", Confirm, Some("GoToLine")),
|
||||
]);
|
||||
cx.add_action(GoToLine::toggle);
|
||||
cx.add_action(GoToLine::confirm);
|
||||
}
|
||||
|
||||
pub struct GoToLine {
|
||||
settings: watch::Receiver<Settings>,
|
||||
line_editor: ViewHandle<Editor>,
|
||||
active_editor: ViewHandle<Editor>,
|
||||
restore_state: Option<RestoreState>,
|
||||
line_selection: Option<Selection<usize>>,
|
||||
cursor_point: Point,
|
||||
max_point: Point,
|
||||
}
|
||||
|
||||
struct RestoreState {
|
||||
scroll_position: Vector2F,
|
||||
selections: Vec<Selection<usize>>,
|
||||
}
|
||||
|
||||
pub enum Event {
|
||||
Dismissed,
|
||||
}
|
||||
|
||||
impl GoToLine {
|
||||
pub fn new(
|
||||
active_editor: ViewHandle<Editor>,
|
||||
settings: watch::Receiver<Settings>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let line_editor = cx.add_view(|cx| {
|
||||
Editor::single_line(
|
||||
{
|
||||
let settings = settings.clone();
|
||||
Arc::new(move |_| {
|
||||
let settings = settings.borrow();
|
||||
EditorSettings {
|
||||
tab_size: settings.tab_size,
|
||||
style: settings.theme.selector.input_editor.as_editor(),
|
||||
soft_wrap: editor::SoftWrap::None,
|
||||
}
|
||||
})
|
||||
},
|
||||
cx,
|
||||
)
|
||||
});
|
||||
cx.subscribe(&line_editor, Self::on_line_editor_event)
|
||||
.detach();
|
||||
|
||||
let (restore_state, cursor_point, max_point) = active_editor.update(cx, |editor, cx| {
|
||||
let restore_state = Some(RestoreState {
|
||||
scroll_position: editor.scroll_position(cx),
|
||||
selections: editor.local_selections::<usize>(cx),
|
||||
});
|
||||
|
||||
let buffer = editor.buffer().read(cx).read(cx);
|
||||
(
|
||||
restore_state,
|
||||
editor.newest_selection(&buffer).head(),
|
||||
buffer.max_point(),
|
||||
)
|
||||
});
|
||||
|
||||
Self {
|
||||
settings: settings.clone(),
|
||||
line_editor,
|
||||
active_editor,
|
||||
restore_state,
|
||||
line_selection: None,
|
||||
cursor_point,
|
||||
max_point,
|
||||
}
|
||||
}
|
||||
|
||||
fn toggle(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext<Workspace>) {
|
||||
workspace.toggle_modal(cx, |cx, workspace| {
|
||||
let editor = workspace
|
||||
.active_item(cx)
|
||||
.unwrap()
|
||||
.to_any()
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
let view = cx.add_view(|cx| GoToLine::new(editor, workspace.settings.clone(), cx));
|
||||
cx.subscribe(&view, Self::on_event).detach();
|
||||
view
|
||||
});
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _: &Confirm, cx: &mut ViewContext<Self>) {
|
||||
self.restore_state.take();
|
||||
cx.emit(Event::Dismissed);
|
||||
}
|
||||
|
||||
fn on_event(
|
||||
workspace: &mut Workspace,
|
||||
_: ViewHandle<Self>,
|
||||
event: &Event,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) {
|
||||
match event {
|
||||
Event::Dismissed => workspace.dismiss_modal(cx),
|
||||
}
|
||||
}
|
||||
|
||||
fn on_line_editor_event(
|
||||
&mut self,
|
||||
_: ViewHandle<Editor>,
|
||||
event: &editor::Event,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
match event {
|
||||
editor::Event::Blurred => cx.emit(Event::Dismissed),
|
||||
editor::Event::Edited => {
|
||||
let line_editor = self.line_editor.read(cx).buffer().read(cx).read(cx).text();
|
||||
let mut components = line_editor.trim().split(&[',', ':'][..]);
|
||||
let row = components.next().and_then(|row| row.parse::<u32>().ok());
|
||||
let column = components.next().and_then(|row| row.parse::<u32>().ok());
|
||||
if let Some(point) = row.map(|row| {
|
||||
Point::new(
|
||||
row.saturating_sub(1),
|
||||
column.map(|column| column.saturating_sub(1)).unwrap_or(0),
|
||||
)
|
||||
}) {
|
||||
self.line_selection = self.active_editor.update(cx, |active_editor, cx| {
|
||||
let snapshot = active_editor.snapshot(cx).display_snapshot;
|
||||
let point = snapshot.buffer_snapshot.clip_point(point, Bias::Left);
|
||||
let display_point = point.to_display_point(&snapshot);
|
||||
active_editor.select_ranges([point..point], Some(Autoscroll::Center), cx);
|
||||
active_editor.set_highlighted_row(Some(display_point.row()));
|
||||
Some(active_editor.newest_selection(&snapshot.buffer_snapshot))
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Entity for GoToLine {
|
||||
type Event = Event;
|
||||
|
||||
fn release(&mut self, cx: &mut MutableAppContext) {
|
||||
let line_selection = self.line_selection.take();
|
||||
let restore_state = self.restore_state.take();
|
||||
self.active_editor.update(cx, |editor, cx| {
|
||||
editor.set_highlighted_row(None);
|
||||
if let Some((line_selection, restore_state)) = line_selection.zip(restore_state) {
|
||||
let newest_selection =
|
||||
editor.newest_selection::<usize>(&editor.buffer().read(cx).read(cx));
|
||||
if line_selection.id == newest_selection.id {
|
||||
editor.set_scroll_position(restore_state.scroll_position, cx);
|
||||
editor.update_selections(restore_state.selections, None, cx);
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl View for GoToLine {
|
||||
fn ui_name() -> &'static str {
|
||||
"GoToLine"
|
||||
}
|
||||
|
||||
fn render(&mut self, _: &mut RenderContext<Self>) -> ElementBox {
|
||||
let theme = &self.settings.borrow().theme.selector;
|
||||
|
||||
let label = format!(
|
||||
"{},{} of {} lines",
|
||||
self.cursor_point.row + 1,
|
||||
self.cursor_point.column + 1,
|
||||
self.max_point.row + 1
|
||||
);
|
||||
|
||||
Align::new(
|
||||
ConstrainedBox::new(
|
||||
Container::new(
|
||||
Flex::new(Axis::Vertical)
|
||||
.with_child(
|
||||
Container::new(ChildView::new(self.line_editor.id()).boxed())
|
||||
.with_style(theme.input_editor.container)
|
||||
.boxed(),
|
||||
)
|
||||
.with_child(
|
||||
Container::new(Label::new(label, theme.empty.label.clone()).boxed())
|
||||
.with_style(theme.empty.container)
|
||||
.boxed(),
|
||||
)
|
||||
.boxed(),
|
||||
)
|
||||
.with_style(theme.container)
|
||||
.boxed(),
|
||||
)
|
||||
.with_max_width(500.0)
|
||||
.boxed(),
|
||||
)
|
||||
.top()
|
||||
.named("go to line")
|
||||
}
|
||||
|
||||
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
|
||||
cx.focus(&self.line_editor);
|
||||
}
|
||||
|
||||
fn on_blur(&mut self, _: &mut ViewContext<Self>) {}
|
||||
}
|
||||
@@ -4,13 +4,22 @@ edition = "2018"
|
||||
name = "gpui"
|
||||
version = "0.1.0"
|
||||
|
||||
[lib]
|
||||
path = "src/gpui.rs"
|
||||
|
||||
[features]
|
||||
test-support = ["env_logger"]
|
||||
|
||||
[dependencies]
|
||||
arrayvec = "0.7.1"
|
||||
gpui_macros = { path = "../gpui_macros" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
async-task = "4.0.3"
|
||||
backtrace = "0.3"
|
||||
ctor = "0.1"
|
||||
env_logger = { version = "0.8", optional = true }
|
||||
etagere = "0.2"
|
||||
gpui_macros = { path = "../gpui_macros" }
|
||||
futures = "0.3"
|
||||
image = "0.23"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4"
|
||||
num_cpus = "1.13"
|
||||
@@ -29,7 +38,7 @@ smallvec = { version = "1.6", features = ["union"] }
|
||||
smol = "1.2"
|
||||
time = { version = "0.3" }
|
||||
tiny-skia = "0.5"
|
||||
tree-sitter = "0.19"
|
||||
tree-sitter = "0.20"
|
||||
usvg = "0.14"
|
||||
waker-fn = "1.1.0"
|
||||
|
||||
@@ -62,7 +62,7 @@ impl gpui::Element for TextElement {
|
||||
.select_font(family, &Default::default())
|
||||
.unwrap(),
|
||||
color: Color::default(),
|
||||
underline: false,
|
||||
underline: None,
|
||||
};
|
||||
let bold = RunStyle {
|
||||
font_id: cx
|
||||
@@ -76,7 +76,7 @@ impl gpui::Element for TextElement {
|
||||
)
|
||||
.unwrap(),
|
||||
color: Color::default(),
|
||||
underline: false,
|
||||
underline: None,
|
||||
};
|
||||
|
||||
let text = "Hello world!";
|
||||
@@ -14,7 +14,7 @@ include = ["bindings/rust/*", "grammar.js", "queries/*", "src/*"]
|
||||
path = "bindings/rust/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
tree-sitter = "0.19.3"
|
||||
tree-sitter = "0.20"
|
||||
|
||||
[build-dependencies]
|
||||
cc = "1.0"
|
||||
File diff suppressed because it is too large
Load Diff
@@ -29,6 +29,18 @@ impl Color {
|
||||
Self(ColorU::white())
|
||||
}
|
||||
|
||||
pub fn red() -> Self {
|
||||
Self(ColorU::from_u32(0xff0000ff))
|
||||
}
|
||||
|
||||
pub fn green() -> Self {
|
||||
Self(ColorU::from_u32(0x00ff00ff))
|
||||
}
|
||||
|
||||
pub fn blue() -> Self {
|
||||
Self(ColorU::from_u32(0x0000ffff))
|
||||
}
|
||||
|
||||
pub fn new(r: u8, g: u8, b: u8, a: u8) -> Self {
|
||||
Self(ColorU::new(r, g, b, a))
|
||||
}
|
||||
@@ -4,10 +4,11 @@ mod constrained_box;
|
||||
mod container;
|
||||
mod empty;
|
||||
mod event_handler;
|
||||
mod expanded;
|
||||
mod flex;
|
||||
mod hook;
|
||||
mod image;
|
||||
mod label;
|
||||
mod line_box;
|
||||
mod list;
|
||||
mod mouse_event_handler;
|
||||
mod overlay;
|
||||
@@ -16,27 +17,18 @@ mod svg;
|
||||
mod text;
|
||||
mod uniform_list;
|
||||
|
||||
use self::expanded::Expanded;
|
||||
pub use self::{
|
||||
align::*, canvas::*, constrained_box::*, container::*, empty::*, event_handler::*, flex::*,
|
||||
hook::*, image::*, label::*, list::*, mouse_event_handler::*, overlay::*, stack::*, svg::*,
|
||||
text::*, uniform_list::*,
|
||||
};
|
||||
pub use crate::presenter::ChildView;
|
||||
pub use align::*;
|
||||
pub use canvas::*;
|
||||
pub use constrained_box::*;
|
||||
pub use container::*;
|
||||
pub use empty::*;
|
||||
pub use event_handler::*;
|
||||
pub use flex::*;
|
||||
pub use hook::*;
|
||||
pub use label::*;
|
||||
pub use line_box::*;
|
||||
pub use list::*;
|
||||
pub use mouse_event_handler::*;
|
||||
pub use overlay::*;
|
||||
pub use stack::*;
|
||||
pub use svg::*;
|
||||
pub use text::*;
|
||||
pub use uniform_list::*;
|
||||
|
||||
use crate::{
|
||||
geometry::{rect::RectF, vector::Vector2F},
|
||||
geometry::{
|
||||
rect::RectF,
|
||||
vector::{vec2f, Vector2F},
|
||||
},
|
||||
json, DebugContext, Event, EventContext, LayoutContext, PaintContext, SizeConstraint,
|
||||
};
|
||||
use core::panic;
|
||||
@@ -118,6 +110,41 @@ pub trait Element {
|
||||
element: Rc::new(RefCell::new(Lifecycle::Init { element: self })),
|
||||
})
|
||||
}
|
||||
|
||||
fn constrained(self) -> ConstrainedBox
|
||||
where
|
||||
Self: 'static + Sized,
|
||||
{
|
||||
ConstrainedBox::new(self.boxed())
|
||||
}
|
||||
|
||||
fn aligned(self) -> Align
|
||||
where
|
||||
Self: 'static + Sized,
|
||||
{
|
||||
Align::new(self.boxed())
|
||||
}
|
||||
|
||||
fn contained(self) -> Container
|
||||
where
|
||||
Self: 'static + Sized,
|
||||
{
|
||||
Container::new(self.boxed())
|
||||
}
|
||||
|
||||
fn expanded(self) -> Expanded
|
||||
where
|
||||
Self: 'static + Sized,
|
||||
{
|
||||
Expanded::new(self.boxed())
|
||||
}
|
||||
|
||||
fn flexible(self, flex: f32, expanded: bool) -> Flexible
|
||||
where
|
||||
Self: 'static + Sized,
|
||||
{
|
||||
Flexible::new(flex, expanded, self.boxed())
|
||||
}
|
||||
}
|
||||
|
||||
pub enum Lifecycle<T: Element> {
|
||||
@@ -283,6 +310,10 @@ impl<T: Element> Default for Lifecycle<T> {
|
||||
}
|
||||
|
||||
impl ElementBox {
|
||||
pub fn name(&self) -> Option<&str> {
|
||||
self.0.name.as_deref()
|
||||
}
|
||||
|
||||
pub fn metadata<T: 'static>(&self) -> Option<&T> {
|
||||
let element = unsafe { &*self.0.element.as_ptr() };
|
||||
element.metadata().and_then(|m| m.downcast_ref())
|
||||
@@ -371,3 +402,13 @@ pub trait ParentElement<'a>: Extend<ElementBox> + Sized {
|
||||
}
|
||||
|
||||
impl<'a, T> ParentElement<'a> for T where T: Extend<ElementBox> {}
|
||||
|
||||
fn constrain_size_preserving_aspect_ratio(max_size: Vector2F, size: Vector2F) -> Vector2F {
|
||||
if max_size.x().is_infinite() && max_size.y().is_infinite() {
|
||||
size
|
||||
} else if max_size.x().is_infinite() || max_size.x() / max_size.y() > size.x() / size.y() {
|
||||
vec2f(size.x() * max_size.y() / size.y(), max_size.y())
|
||||
} else {
|
||||
vec2f(max_size.x(), size.y() * max_size.x() / size.x())
|
||||
}
|
||||
}
|
||||
@@ -25,6 +25,16 @@ impl Align {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn bottom(mut self) -> Self {
|
||||
self.alignment.set_y(1.0);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn left(mut self) -> Self {
|
||||
self.alignment.set_x(-1.0);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn right(mut self) -> Self {
|
||||
self.alignment.set_x(1.0);
|
||||
self
|
||||
@@ -13,7 +13,7 @@ use crate::{
|
||||
Element, ElementBox, Event, EventContext, LayoutContext, PaintContext, SizeConstraint,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
#[derive(Clone, Copy, Debug, Default, Deserialize)]
|
||||
pub struct ContainerStyle {
|
||||
#[serde(default)]
|
||||
pub margin: Margin,
|
||||
@@ -42,8 +42,8 @@ impl Container {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_style(mut self, style: &ContainerStyle) -> Self {
|
||||
self.style = style.clone();
|
||||
pub fn with_style(mut self, style: ContainerStyle) -> Self {
|
||||
self.style = style;
|
||||
self
|
||||
}
|
||||
|
||||
@@ -57,6 +57,11 @@ impl Container {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_margin_right(mut self, margin: f32) -> Self {
|
||||
self.style.margin.right = margin;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_horizontal_padding(mut self, padding: f32) -> Self {
|
||||
self.style.padding.left = padding;
|
||||
self.style.padding.right = padding;
|
||||
@@ -79,6 +84,11 @@ impl Container {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_padding_left(mut self, padding: f32) -> Self {
|
||||
self.style.padding.left = padding;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_padding_right(mut self, padding: f32) -> Self {
|
||||
self.style.padding.right = padding;
|
||||
self
|
||||
@@ -157,7 +167,10 @@ impl Element for Container {
|
||||
constraint: SizeConstraint,
|
||||
cx: &mut LayoutContext,
|
||||
) -> (Vector2F, Self::LayoutState) {
|
||||
let size_buffer = self.margin_size() + self.padding_size() + self.border_size();
|
||||
let mut size_buffer = self.margin_size() + self.padding_size();
|
||||
if !self.style.border.overlay {
|
||||
size_buffer += self.border_size();
|
||||
}
|
||||
let child_constraint = SizeConstraint {
|
||||
min: (constraint.min - size_buffer).max(Vector2F::zero()),
|
||||
max: (constraint.max - size_buffer).max(Vector2F::zero()),
|
||||
@@ -186,20 +199,43 @@ impl Element for Container {
|
||||
color: shadow.color,
|
||||
});
|
||||
}
|
||||
cx.scene.push_quad(Quad {
|
||||
bounds: quad_bounds,
|
||||
background: self.style.background_color,
|
||||
border: self.style.border,
|
||||
corner_radius: self.style.corner_radius,
|
||||
});
|
||||
|
||||
let child_origin = quad_bounds.origin()
|
||||
+ vec2f(self.style.padding.left, self.style.padding.top)
|
||||
+ vec2f(
|
||||
self.style.border.left_width(),
|
||||
self.style.border.top_width(),
|
||||
);
|
||||
self.child.paint(child_origin, visible_bounds, cx);
|
||||
let child_origin =
|
||||
quad_bounds.origin() + vec2f(self.style.padding.left, self.style.padding.top);
|
||||
|
||||
if self.style.border.overlay {
|
||||
cx.scene.push_quad(Quad {
|
||||
bounds: quad_bounds,
|
||||
background: self.style.background_color,
|
||||
border: Default::default(),
|
||||
corner_radius: self.style.corner_radius,
|
||||
});
|
||||
|
||||
self.child.paint(child_origin, visible_bounds, cx);
|
||||
|
||||
cx.scene.push_layer(None);
|
||||
cx.scene.push_quad(Quad {
|
||||
bounds: quad_bounds,
|
||||
background: Default::default(),
|
||||
border: self.style.border,
|
||||
corner_radius: self.style.corner_radius,
|
||||
});
|
||||
cx.scene.pop_layer();
|
||||
} else {
|
||||
cx.scene.push_quad(Quad {
|
||||
bounds: quad_bounds,
|
||||
background: self.style.background_color,
|
||||
border: self.style.border,
|
||||
corner_radius: self.style.corner_radius,
|
||||
});
|
||||
|
||||
let child_origin = child_origin
|
||||
+ vec2f(
|
||||
self.style.border.left_width(),
|
||||
self.style.border.top_width(),
|
||||
);
|
||||
self.child.paint(child_origin, visible_bounds, cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn dispatch_event(
|
||||
@@ -242,7 +278,7 @@ impl ToJson for ContainerStyle {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
#[derive(Clone, Copy, Debug, Default)]
|
||||
pub struct Margin {
|
||||
pub top: f32,
|
||||
pub left: f32,
|
||||
@@ -269,7 +305,7 @@ impl ToJson for Margin {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
#[derive(Clone, Copy, Debug, Default)]
|
||||
pub struct Padding {
|
||||
pub top: f32,
|
||||
pub left: f32,
|
||||
@@ -348,6 +384,17 @@ enum Spacing {
|
||||
},
|
||||
}
|
||||
|
||||
impl Padding {
|
||||
pub fn uniform(padding: f32) -> Self {
|
||||
Self {
|
||||
top: padding,
|
||||
left: padding,
|
||||
bottom: padding,
|
||||
right: padding,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToJson for Padding {
|
||||
fn to_json(&self) -> serde_json::Value {
|
||||
let mut value = json!({});
|
||||
@@ -367,7 +414,7 @@ impl ToJson for Padding {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
#[derive(Clone, Copy, Debug, Default, Deserialize)]
|
||||
pub struct Shadow {
|
||||
#[serde(default, deserialize_with = "deserialize_vec2f")]
|
||||
offset: Vector2F,
|
||||
90
crates/gpui/src/elements/expanded.rs
Normal file
90
crates/gpui/src/elements/expanded.rs
Normal file
@@ -0,0 +1,90 @@
|
||||
use crate::{
|
||||
geometry::{rect::RectF, vector::Vector2F},
|
||||
json, DebugContext, Element, ElementBox, Event, EventContext, LayoutContext, PaintContext,
|
||||
SizeConstraint,
|
||||
};
|
||||
use serde_json::json;
|
||||
|
||||
pub struct Expanded {
|
||||
child: ElementBox,
|
||||
full_width: bool,
|
||||
full_height: bool,
|
||||
}
|
||||
|
||||
impl Expanded {
|
||||
pub fn new(child: ElementBox) -> Self {
|
||||
Self {
|
||||
child,
|
||||
full_width: true,
|
||||
full_height: true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_full_width(mut self) -> Self {
|
||||
self.full_width = true;
|
||||
self.full_height = false;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn to_full_height(mut self) -> Self {
|
||||
self.full_width = false;
|
||||
self.full_height = true;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Element for Expanded {
|
||||
type LayoutState = ();
|
||||
type PaintState = ();
|
||||
|
||||
fn layout(
|
||||
&mut self,
|
||||
mut constraint: SizeConstraint,
|
||||
cx: &mut LayoutContext,
|
||||
) -> (Vector2F, Self::LayoutState) {
|
||||
if self.full_width {
|
||||
constraint.min.set_x(constraint.max.x());
|
||||
}
|
||||
if self.full_height {
|
||||
constraint.min.set_y(constraint.max.y());
|
||||
}
|
||||
let size = self.child.layout(constraint, cx);
|
||||
(size, ())
|
||||
}
|
||||
|
||||
fn paint(
|
||||
&mut self,
|
||||
bounds: RectF,
|
||||
visible_bounds: RectF,
|
||||
_: &mut Self::LayoutState,
|
||||
cx: &mut PaintContext,
|
||||
) -> Self::PaintState {
|
||||
self.child.paint(bounds.origin(), visible_bounds, cx);
|
||||
}
|
||||
|
||||
fn dispatch_event(
|
||||
&mut self,
|
||||
event: &Event,
|
||||
_: RectF,
|
||||
_: &mut Self::LayoutState,
|
||||
_: &mut Self::PaintState,
|
||||
cx: &mut EventContext,
|
||||
) -> bool {
|
||||
self.child.dispatch_event(event, cx)
|
||||
}
|
||||
|
||||
fn debug(
|
||||
&self,
|
||||
_: RectF,
|
||||
_: &Self::LayoutState,
|
||||
_: &Self::PaintState,
|
||||
cx: &DebugContext,
|
||||
) -> json::Value {
|
||||
json!({
|
||||
"type": "Expanded",
|
||||
"full_width": self.full_width,
|
||||
"full_height": self.full_height,
|
||||
"child": self.child.debug(cx)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -228,88 +228,15 @@ struct FlexParentData {
|
||||
expanded: bool,
|
||||
}
|
||||
|
||||
pub struct Expanded {
|
||||
metadata: FlexParentData,
|
||||
child: ElementBox,
|
||||
}
|
||||
|
||||
impl Expanded {
|
||||
pub fn new(flex: f32, child: ElementBox) -> Self {
|
||||
Expanded {
|
||||
metadata: FlexParentData {
|
||||
flex,
|
||||
expanded: true,
|
||||
},
|
||||
child,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Element for Expanded {
|
||||
type LayoutState = ();
|
||||
type PaintState = ();
|
||||
|
||||
fn layout(
|
||||
&mut self,
|
||||
constraint: SizeConstraint,
|
||||
cx: &mut LayoutContext,
|
||||
) -> (Vector2F, Self::LayoutState) {
|
||||
let size = self.child.layout(constraint, cx);
|
||||
(size, ())
|
||||
}
|
||||
|
||||
fn paint(
|
||||
&mut self,
|
||||
bounds: RectF,
|
||||
visible_bounds: RectF,
|
||||
_: &mut Self::LayoutState,
|
||||
cx: &mut PaintContext,
|
||||
) -> Self::PaintState {
|
||||
self.child.paint(bounds.origin(), visible_bounds, cx)
|
||||
}
|
||||
|
||||
fn dispatch_event(
|
||||
&mut self,
|
||||
event: &Event,
|
||||
_: RectF,
|
||||
_: &mut Self::LayoutState,
|
||||
_: &mut Self::PaintState,
|
||||
cx: &mut EventContext,
|
||||
) -> bool {
|
||||
self.child.dispatch_event(event, cx)
|
||||
}
|
||||
|
||||
fn metadata(&self) -> Option<&dyn Any> {
|
||||
Some(&self.metadata)
|
||||
}
|
||||
|
||||
fn debug(
|
||||
&self,
|
||||
_: RectF,
|
||||
_: &Self::LayoutState,
|
||||
_: &Self::PaintState,
|
||||
cx: &DebugContext,
|
||||
) -> Value {
|
||||
json!({
|
||||
"type": "Expanded",
|
||||
"flex": self.metadata.flex,
|
||||
"child": self.child.debug(cx)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Flexible {
|
||||
metadata: FlexParentData,
|
||||
child: ElementBox,
|
||||
}
|
||||
|
||||
impl Flexible {
|
||||
pub fn new(flex: f32, child: ElementBox) -> Self {
|
||||
pub fn new(flex: f32, expanded: bool, child: ElementBox) -> Self {
|
||||
Flexible {
|
||||
metadata: FlexParentData {
|
||||
flex,
|
||||
expanded: false,
|
||||
},
|
||||
metadata: FlexParentData { flex, expanded },
|
||||
child,
|
||||
}
|
||||
}
|
||||
103
crates/gpui/src/elements/image.rs
Normal file
103
crates/gpui/src/elements/image.rs
Normal file
@@ -0,0 +1,103 @@
|
||||
use super::constrain_size_preserving_aspect_ratio;
|
||||
use crate::{
|
||||
geometry::{
|
||||
rect::RectF,
|
||||
vector::{vec2f, Vector2F},
|
||||
},
|
||||
json::{json, ToJson},
|
||||
scene, Border, DebugContext, Element, Event, EventContext, ImageData, LayoutContext,
|
||||
PaintContext, SizeConstraint,
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub struct Image {
|
||||
data: Arc<ImageData>,
|
||||
style: ImageStyle,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Default, Deserialize)]
|
||||
pub struct ImageStyle {
|
||||
#[serde(default)]
|
||||
pub border: Border,
|
||||
#[serde(default)]
|
||||
pub corner_radius: f32,
|
||||
#[serde(default)]
|
||||
pub height: Option<f32>,
|
||||
#[serde(default)]
|
||||
pub width: Option<f32>,
|
||||
}
|
||||
|
||||
impl Image {
|
||||
pub fn new(data: Arc<ImageData>) -> Self {
|
||||
Self {
|
||||
data,
|
||||
style: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_style(mut self, style: ImageStyle) -> Self {
|
||||
self.style = style;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Element for Image {
|
||||
type LayoutState = ();
|
||||
type PaintState = ();
|
||||
|
||||
fn layout(
|
||||
&mut self,
|
||||
constraint: SizeConstraint,
|
||||
_: &mut LayoutContext,
|
||||
) -> (Vector2F, Self::LayoutState) {
|
||||
let desired_size = vec2f(
|
||||
self.style.width.unwrap_or(constraint.max.x()),
|
||||
self.style.height.unwrap_or(constraint.max.y()),
|
||||
);
|
||||
let size = constrain_size_preserving_aspect_ratio(
|
||||
constraint.constrain(desired_size),
|
||||
self.data.size().to_f32(),
|
||||
);
|
||||
(size, ())
|
||||
}
|
||||
|
||||
fn paint(
|
||||
&mut self,
|
||||
bounds: RectF,
|
||||
_: RectF,
|
||||
_: &mut Self::LayoutState,
|
||||
cx: &mut PaintContext,
|
||||
) -> Self::PaintState {
|
||||
cx.scene.push_image(scene::Image {
|
||||
bounds,
|
||||
border: self.style.border,
|
||||
corner_radius: self.style.corner_radius,
|
||||
data: self.data.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
fn dispatch_event(
|
||||
&mut self,
|
||||
_: &Event,
|
||||
_: RectF,
|
||||
_: &mut Self::LayoutState,
|
||||
_: &mut Self::PaintState,
|
||||
_: &mut EventContext,
|
||||
) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn debug(
|
||||
&self,
|
||||
bounds: RectF,
|
||||
_: &Self::LayoutState,
|
||||
_: &Self::PaintState,
|
||||
_: &DebugContext,
|
||||
) -> serde_json::Value {
|
||||
json!({
|
||||
"type": "Image",
|
||||
"bounds": bounds.to_json(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -18,7 +18,7 @@ pub struct Label {
|
||||
highlight_indices: Vec<usize>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[derive(Clone, Debug, Deserialize, Default)]
|
||||
pub struct LabelStyle {
|
||||
pub text: TextStyle,
|
||||
pub highlight_text: Option<TextStyle>,
|
||||
@@ -135,10 +135,12 @@ impl Element for Label {
|
||||
);
|
||||
|
||||
let size = vec2f(
|
||||
line.width().max(constraint.min.x()).min(constraint.max.x()),
|
||||
line.width()
|
||||
.ceil()
|
||||
.max(constraint.min.x())
|
||||
.min(constraint.max.x()),
|
||||
cx.font_cache
|
||||
.line_height(self.style.text.font_id, self.style.text.font_size)
|
||||
.ceil(),
|
||||
.line_height(self.style.text.font_id, self.style.text.font_size),
|
||||
);
|
||||
|
||||
(size, line)
|
||||
@@ -205,7 +207,7 @@ mod tests {
|
||||
"Menlo",
|
||||
12.,
|
||||
Default::default(),
|
||||
false,
|
||||
None,
|
||||
Color::black(),
|
||||
cx.font_cache(),
|
||||
)
|
||||
@@ -214,7 +216,7 @@ mod tests {
|
||||
"Menlo",
|
||||
12.,
|
||||
*FontProperties::new().weight(Weight::BOLD),
|
||||
false,
|
||||
None,
|
||||
Color::new(255, 0, 0, 255),
|
||||
cx.font_cache(),
|
||||
)
|
||||
@@ -4,14 +4,15 @@ use crate::{
|
||||
vector::{vec2f, Vector2F},
|
||||
},
|
||||
json::json,
|
||||
sum_tree::{self, Bias, SumTree},
|
||||
DebugContext, Element, ElementBox, ElementRc, Event, EventContext, LayoutContext, PaintContext,
|
||||
SizeConstraint,
|
||||
};
|
||||
use std::{cell::RefCell, collections::VecDeque, ops::Range, rc::Rc};
|
||||
use sum_tree::{Bias, SumTree};
|
||||
|
||||
pub struct List {
|
||||
state: ListState,
|
||||
invalidated_elements: Vec<ElementRc>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -79,7 +80,10 @@ struct Height(f32);
|
||||
|
||||
impl List {
|
||||
pub fn new(state: ListState) -> Self {
|
||||
Self { state }
|
||||
Self {
|
||||
state,
|
||||
invalidated_elements: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -124,7 +128,7 @@ impl Element for List {
|
||||
});
|
||||
|
||||
// Render items after the scroll top, including those in the trailing overdraw.
|
||||
let mut cursor = old_items.cursor::<Count, ()>();
|
||||
let mut cursor = old_items.cursor::<Count>();
|
||||
cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &());
|
||||
for (ix, item) in cursor.by_ref().enumerate() {
|
||||
if rendered_height - scroll_top.offset_in_item >= size.y() + state.overdraw {
|
||||
@@ -145,8 +149,7 @@ impl Element for List {
|
||||
while rendered_height < size.y() {
|
||||
cursor.prev(&());
|
||||
if let Some(item) = cursor.item() {
|
||||
let element =
|
||||
state.render_item(cursor.seek_start().0, item, item_constraint, cx);
|
||||
let element = state.render_item(cursor.start().0, item, item_constraint, cx);
|
||||
rendered_height += element.size().y();
|
||||
rendered_items.push_front(ListItem::Rendered(element));
|
||||
} else {
|
||||
@@ -155,7 +158,7 @@ impl Element for List {
|
||||
}
|
||||
|
||||
scroll_top = ListOffset {
|
||||
item_ix: cursor.seek_start().0,
|
||||
item_ix: cursor.start().0,
|
||||
offset_in_item: rendered_height - size.y(),
|
||||
};
|
||||
|
||||
@@ -166,7 +169,7 @@ impl Element for List {
|
||||
}
|
||||
Orientation::Bottom => {
|
||||
scroll_top = ListOffset {
|
||||
item_ix: cursor.seek_start().0,
|
||||
item_ix: cursor.start().0,
|
||||
offset_in_item: rendered_height - size.y(),
|
||||
};
|
||||
state.logical_scroll_top = None;
|
||||
@@ -179,7 +182,7 @@ impl Element for List {
|
||||
while leading_overdraw < state.overdraw {
|
||||
cursor.prev(&());
|
||||
if let Some(item) = cursor.item() {
|
||||
let element = state.render_item(cursor.seek_start().0, item, item_constraint, cx);
|
||||
let element = state.render_item(cursor.start().0, item, item_constraint, cx);
|
||||
leading_overdraw += element.size().y();
|
||||
rendered_items.push_front(ListItem::Rendered(element));
|
||||
} else {
|
||||
@@ -187,10 +190,9 @@ impl Element for List {
|
||||
}
|
||||
}
|
||||
|
||||
let new_rendered_range =
|
||||
cursor.seek_start().0..(cursor.seek_start().0 + rendered_items.len());
|
||||
let new_rendered_range = cursor.start().0..(cursor.start().0 + rendered_items.len());
|
||||
|
||||
let mut cursor = old_items.cursor::<Count, ()>();
|
||||
let mut cursor = old_items.cursor::<Count>();
|
||||
|
||||
if state.rendered_range.start < new_rendered_range.start {
|
||||
new_items.push_tree(
|
||||
@@ -198,7 +200,7 @@ impl Element for List {
|
||||
&(),
|
||||
);
|
||||
let remove_to = state.rendered_range.end.min(new_rendered_range.start);
|
||||
while cursor.seek_start().0 < remove_to {
|
||||
while cursor.start().0 < remove_to {
|
||||
new_items.push(cursor.item().unwrap().remove(), &());
|
||||
cursor.next(&());
|
||||
}
|
||||
@@ -217,7 +219,7 @@ impl Element for List {
|
||||
&(),
|
||||
);
|
||||
}
|
||||
while cursor.seek_start().0 < state.rendered_range.end {
|
||||
while cursor.start().0 < state.rendered_range.end {
|
||||
new_items.push(cursor.item().unwrap().remove(), &());
|
||||
cursor.next(&());
|
||||
}
|
||||
@@ -258,10 +260,35 @@ impl Element for List {
|
||||
let mut handled = false;
|
||||
|
||||
let mut state = self.state.0.borrow_mut();
|
||||
for (mut element, _) in state.visible_elements(bounds, scroll_top) {
|
||||
handled = element.dispatch_event(event, cx) || handled;
|
||||
let mut item_origin = bounds.origin() - vec2f(0., scroll_top.offset_in_item);
|
||||
let mut cursor = state.items.cursor::<Count>();
|
||||
let mut new_items = cursor.slice(&Count(scroll_top.item_ix), Bias::Right, &());
|
||||
while let Some(item) = cursor.item() {
|
||||
if item_origin.y() > bounds.max_y() {
|
||||
break;
|
||||
}
|
||||
|
||||
if let ListItem::Rendered(element) = item {
|
||||
let prev_notify_count = cx.notify_count();
|
||||
let mut element = element.clone();
|
||||
handled = element.dispatch_event(event, cx) || handled;
|
||||
item_origin.set_y(item_origin.y() + element.size().y());
|
||||
if cx.notify_count() > prev_notify_count {
|
||||
new_items.push(ListItem::Unrendered, &());
|
||||
self.invalidated_elements.push(element);
|
||||
} else {
|
||||
new_items.push(item.clone(), &());
|
||||
}
|
||||
cursor.next(&());
|
||||
} else {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
|
||||
new_items.push_tree(cursor.suffix(&()), &());
|
||||
drop(cursor);
|
||||
state.items = new_items;
|
||||
|
||||
match event {
|
||||
Event::ScrollWheel {
|
||||
position,
|
||||
@@ -361,7 +388,7 @@ impl ListState {
|
||||
new_end + state.rendered_range.end.saturating_sub(old_range.end);
|
||||
}
|
||||
|
||||
let mut old_heights = state.items.cursor::<Count, ()>();
|
||||
let mut old_heights = state.items.cursor::<Count>();
|
||||
let mut new_heights = old_heights.slice(&Count(old_range.start), Bias::Right, &());
|
||||
old_heights.seek_forward(&Count(old_range.end), Bias::Right, &());
|
||||
|
||||
@@ -397,12 +424,11 @@ impl StateInner {
|
||||
}
|
||||
|
||||
fn visible_range(&self, height: f32, scroll_top: &ListOffset) -> Range<usize> {
|
||||
let mut cursor = self.items.cursor::<Count, Height>();
|
||||
let mut cursor = self.items.cursor::<ListItemSummary>();
|
||||
cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &());
|
||||
let start_y = cursor.sum_start().0 + scroll_top.offset_in_item;
|
||||
let mut cursor = cursor.swap_dimensions();
|
||||
let start_y = cursor.start().height + scroll_top.offset_in_item;
|
||||
cursor.seek_forward(&Height(start_y + height), Bias::Left, &());
|
||||
scroll_top.item_ix..cursor.sum_start().0 + 1
|
||||
scroll_top.item_ix..cursor.start().count + 1
|
||||
}
|
||||
|
||||
fn visible_elements<'a>(
|
||||
@@ -411,7 +437,7 @@ impl StateInner {
|
||||
scroll_top: &ListOffset,
|
||||
) -> impl Iterator<Item = (ElementRc, Vector2F)> + 'a {
|
||||
let mut item_origin = bounds.origin() - vec2f(0., scroll_top.offset_in_item);
|
||||
let mut cursor = self.items.cursor::<Count, ()>();
|
||||
let mut cursor = self.items.cursor::<Count>();
|
||||
cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &());
|
||||
std::iter::from_fn(move || {
|
||||
while let Some(item) = cursor.item() {
|
||||
@@ -453,10 +479,10 @@ impl StateInner {
|
||||
if self.orientation == Orientation::Bottom && new_scroll_top == scroll_max {
|
||||
self.logical_scroll_top = None;
|
||||
} else {
|
||||
let mut cursor = self.items.cursor::<Height, Count>();
|
||||
let mut cursor = self.items.cursor::<ListItemSummary>();
|
||||
cursor.seek(&Height(new_scroll_top), Bias::Right, &());
|
||||
let item_ix = cursor.sum_start().0;
|
||||
let offset_in_item = new_scroll_top - cursor.seek_start().0;
|
||||
let item_ix = cursor.start().count;
|
||||
let offset_in_item = new_scroll_top - cursor.start().height;
|
||||
self.logical_scroll_top = Some(ListOffset {
|
||||
item_ix,
|
||||
offset_in_item,
|
||||
@@ -473,9 +499,9 @@ impl StateInner {
|
||||
}
|
||||
|
||||
fn scroll_top(&self, logical_scroll_top: &ListOffset) -> f32 {
|
||||
let mut cursor = self.items.cursor::<Count, Height>();
|
||||
let mut cursor = self.items.cursor::<ListItemSummary>();
|
||||
cursor.seek(&Count(logical_scroll_top.item_ix), Bias::Right, &());
|
||||
cursor.sum_start().0 + logical_scroll_top.offset_in_item
|
||||
cursor.start().height + logical_scroll_top.offset_in_item
|
||||
}
|
||||
}
|
||||
|
||||
@@ -527,12 +553,6 @@ impl sum_tree::Summary for ListItemSummary {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> sum_tree::Dimension<'a, ListItemSummary> for ListItemSummary {
|
||||
fn add_summary(&mut self, summary: &'a ListItemSummary, _: &()) {
|
||||
sum_tree::Summary::add_summary(self, summary, &());
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> sum_tree::Dimension<'a, ListItemSummary> for Count {
|
||||
fn add_summary(&mut self, summary: &'a ListItemSummary, _: &()) {
|
||||
self.0 += summary.count;
|
||||
@@ -557,9 +577,15 @@ impl<'a> sum_tree::Dimension<'a, ListItemSummary> for Height {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> sum_tree::SeekDimension<'a, ListItemSummary> for Height {
|
||||
fn cmp(&self, other: &Self, _: &()) -> std::cmp::Ordering {
|
||||
self.0.partial_cmp(&other.0).unwrap()
|
||||
impl<'a> sum_tree::SeekTarget<'a, ListItemSummary, ListItemSummary> for Count {
|
||||
fn cmp(&self, other: &ListItemSummary, _: &()) -> std::cmp::Ordering {
|
||||
self.0.partial_cmp(&other.count).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> sum_tree::SeekTarget<'a, ListItemSummary, ListItemSummary> for Height {
|
||||
fn cmp(&self, other: &ListItemSummary, _: &()) -> std::cmp::Ordering {
|
||||
self.0.partial_cmp(&other.height).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -603,7 +629,7 @@ mod tests {
|
||||
offset_in_item: 0.,
|
||||
},
|
||||
40.,
|
||||
vec2f(0., 54.),
|
||||
vec2f(0., -54.),
|
||||
true,
|
||||
&mut presenter.build_event_context(cx),
|
||||
);
|
||||
@@ -654,7 +680,7 @@ mod tests {
|
||||
assert_eq!(state.0.borrow().scroll_top(&logical_scroll_top), 114.);
|
||||
}
|
||||
|
||||
#[crate::test(self, iterations = 10000, seed = 0)]
|
||||
#[crate::test(self, iterations = 10, seed = 0)]
|
||||
fn test_random(cx: &mut crate::MutableAppContext, mut rng: StdRng) {
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
@@ -731,7 +757,7 @@ mod tests {
|
||||
log::info!("splice({:?}, {:?})", start_ix..end_ix, new_elements);
|
||||
state.splice(start_ix..end_ix, new_elements.len());
|
||||
elements.splice(start_ix..end_ix, new_elements);
|
||||
for (ix, item) in state.0.borrow().items.cursor::<(), ()>().enumerate() {
|
||||
for (ix, item) in state.0.borrow().items.cursor::<()>().enumerate() {
|
||||
if let ListItem::Rendered(element) = item {
|
||||
let (expected_id, _) = elements[ix];
|
||||
element.with_metadata(|metadata: Option<&usize>| {
|
||||
@@ -768,7 +794,7 @@ mod tests {
|
||||
let mut first_rendered_element_top = None;
|
||||
let mut last_rendered_element_bottom = None;
|
||||
assert_eq!(state.items.summary().count, elements.borrow().len());
|
||||
for (ix, item) in state.items.cursor::<(), ()>().enumerate() {
|
||||
for (ix, item) in state.items.cursor::<()>().enumerate() {
|
||||
match item {
|
||||
ListItem::Unrendered => {
|
||||
let item_bottom = item_top;
|
||||
@@ -116,7 +116,8 @@ impl Element for MouseEventHandler {
|
||||
let hit_bounds = RectF::from_points(
|
||||
bounds.origin() - vec2f(self.padding.left, self.padding.top),
|
||||
bounds.lower_right() + vec2f(self.padding.right, self.padding.bottom),
|
||||
);
|
||||
)
|
||||
.round_out();
|
||||
|
||||
self.state.update(cx, |state, cx| match event {
|
||||
Event::MouseMoved {
|
||||
@@ -41,25 +41,15 @@ impl Element for Svg {
|
||||
) -> (Vector2F, Self::LayoutState) {
|
||||
match cx.asset_cache.svg(&self.path) {
|
||||
Ok(tree) => {
|
||||
let size = if constraint.max.x().is_infinite() && constraint.max.y().is_infinite() {
|
||||
let rect = from_usvg_rect(tree.svg_node().view_box.rect);
|
||||
rect.size()
|
||||
} else {
|
||||
let max_size = constraint.max;
|
||||
let svg_size = from_usvg_rect(tree.svg_node().view_box.rect).size();
|
||||
|
||||
if max_size.x().is_infinite()
|
||||
|| max_size.x() / max_size.y() > svg_size.x() / svg_size.y()
|
||||
{
|
||||
vec2f(svg_size.x() * max_size.y() / svg_size.y(), max_size.y())
|
||||
} else {
|
||||
vec2f(max_size.x(), svg_size.y() * max_size.x() / svg_size.x())
|
||||
}
|
||||
};
|
||||
let size = constrain_size_preserving_aspect_ratio(
|
||||
constraint.max,
|
||||
from_usvg_rect(tree.svg_node().view_box.rect).size(),
|
||||
);
|
||||
(size, Some(tree))
|
||||
}
|
||||
Err(error) => {
|
||||
log::error!("{}", error);
|
||||
Err(_error) => {
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
log::error!("{}", _error);
|
||||
(constraint.min, None)
|
||||
}
|
||||
}
|
||||
@@ -111,6 +101,8 @@ impl Element for Svg {
|
||||
|
||||
use crate::json::ToJson;
|
||||
|
||||
use super::constrain_size_preserving_aspect_ratio;
|
||||
|
||||
fn from_usvg_rect(rect: usvg::Rect) -> RectF {
|
||||
RectF::new(
|
||||
vec2f(rect.x() as f32, rect.y() as f32),
|
||||
@@ -14,6 +14,7 @@ use serde_json::json;
|
||||
pub struct Text {
|
||||
text: String,
|
||||
style: TextStyle,
|
||||
soft_wrap: bool,
|
||||
}
|
||||
|
||||
pub struct LayoutState {
|
||||
@@ -23,13 +24,22 @@ pub struct LayoutState {
|
||||
|
||||
impl Text {
|
||||
pub fn new(text: String, style: TextStyle) -> Self {
|
||||
Self { text, style }
|
||||
Self {
|
||||
text,
|
||||
style,
|
||||
soft_wrap: true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_default_color(mut self, color: Color) -> Self {
|
||||
self.style.color = color;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_soft_wrap(mut self, soft_wrap: bool) -> Self {
|
||||
self.soft_wrap = soft_wrap;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Element for Text {
|
||||
@@ -54,9 +64,13 @@ impl Element for Text {
|
||||
self.style.font_size,
|
||||
&[(line.len(), self.style.to_run())],
|
||||
);
|
||||
let wrap_boundaries = wrapper
|
||||
.wrap_shaped_line(line, &shaped_line, constraint.max.x())
|
||||
.collect::<Vec<_>>();
|
||||
let wrap_boundaries = if self.soft_wrap {
|
||||
wrapper
|
||||
.wrap_shaped_line(line, &shaped_line, constraint.max.x())
|
||||
.collect::<Vec<_>>()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
max_line_width = max_line_width.max(shaped_line.width());
|
||||
line_count += wrap_boundaries.len() + 1;
|
||||
@@ -5,7 +5,7 @@ use crate::{
|
||||
vector::{vec2f, Vector2F},
|
||||
},
|
||||
json::{self, json},
|
||||
ElementBox, MutableAppContext,
|
||||
ElementBox,
|
||||
};
|
||||
use json::ToJson;
|
||||
use parking_lot::Mutex;
|
||||
@@ -38,25 +38,39 @@ pub struct LayoutState {
|
||||
|
||||
pub struct UniformList<F>
|
||||
where
|
||||
F: Fn(Range<usize>, &mut Vec<ElementBox>, &mut MutableAppContext),
|
||||
F: Fn(Range<usize>, &mut Vec<ElementBox>, &mut LayoutContext),
|
||||
{
|
||||
state: UniformListState,
|
||||
item_count: usize,
|
||||
append_items: F,
|
||||
padding_top: f32,
|
||||
padding_bottom: f32,
|
||||
}
|
||||
|
||||
impl<F> UniformList<F>
|
||||
where
|
||||
F: Fn(Range<usize>, &mut Vec<ElementBox>, &mut MutableAppContext),
|
||||
F: Fn(Range<usize>, &mut Vec<ElementBox>, &mut LayoutContext),
|
||||
{
|
||||
pub fn new(state: UniformListState, item_count: usize, append_items: F) -> Self {
|
||||
Self {
|
||||
state,
|
||||
item_count,
|
||||
append_items,
|
||||
padding_top: 0.,
|
||||
padding_bottom: 0.,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_padding_top(mut self, padding: f32) -> Self {
|
||||
self.padding_top = padding;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_padding_bottom(mut self, padding: f32) -> Self {
|
||||
self.padding_bottom = padding;
|
||||
self
|
||||
}
|
||||
|
||||
fn scroll(
|
||||
&self,
|
||||
_: Vector2F,
|
||||
@@ -84,7 +98,7 @@ where
|
||||
}
|
||||
|
||||
if let Some(item_ix) = state.scroll_to.take() {
|
||||
let item_top = item_ix as f32 * item_height;
|
||||
let item_top = self.padding_top + item_ix as f32 * item_height;
|
||||
let item_bottom = item_top + item_height;
|
||||
|
||||
if item_top < state.scroll_top {
|
||||
@@ -102,7 +116,7 @@ where
|
||||
|
||||
impl<F> Element for UniformList<F>
|
||||
where
|
||||
F: Fn(Range<usize>, &mut Vec<ElementBox>, &mut MutableAppContext),
|
||||
F: Fn(Range<usize>, &mut Vec<ElementBox>, &mut LayoutContext),
|
||||
{
|
||||
type LayoutState = LayoutState;
|
||||
type PaintState = ();
|
||||
@@ -124,7 +138,7 @@ where
|
||||
let mut scroll_max = 0.;
|
||||
|
||||
let mut items = Vec::new();
|
||||
(self.append_items)(0..1, &mut items, cx.app);
|
||||
(self.append_items)(0..1, &mut items, cx);
|
||||
if let Some(first_item) = items.first_mut() {
|
||||
let mut item_size = first_item.layout(item_constraint, cx);
|
||||
item_size.set_x(size.x());
|
||||
@@ -137,16 +151,21 @@ where
|
||||
size.set_y(size.y().min(scroll_height).max(constraint.min.y()));
|
||||
}
|
||||
|
||||
scroll_max = item_height * self.item_count as f32 - size.y();
|
||||
let scroll_height =
|
||||
item_height * self.item_count as f32 + self.padding_top + self.padding_bottom;
|
||||
scroll_max = (scroll_height - size.y()).max(0.);
|
||||
self.autoscroll(scroll_max, size.y(), item_height);
|
||||
|
||||
items.clear();
|
||||
let start = cmp::min((self.scroll_top() / item_height) as usize, self.item_count);
|
||||
let start = cmp::min(
|
||||
((self.scroll_top() - self.padding_top) / item_height) as usize,
|
||||
self.item_count,
|
||||
);
|
||||
let end = cmp::min(
|
||||
self.item_count,
|
||||
start + (size.y() / item_height).ceil() as usize + 1,
|
||||
);
|
||||
(self.append_items)(start..end, &mut items, cx.app);
|
||||
(self.append_items)(start..end, &mut items, cx);
|
||||
for item in &mut items {
|
||||
item.layout(item_constraint, cx);
|
||||
}
|
||||
@@ -173,8 +192,11 @@ where
|
||||
) -> Self::PaintState {
|
||||
cx.scene.push_layer(Some(bounds));
|
||||
|
||||
let mut item_origin =
|
||||
bounds.origin() - vec2f(0.0, self.state.scroll_top() % layout.item_height);
|
||||
let mut item_origin = bounds.origin()
|
||||
- vec2f(
|
||||
0.,
|
||||
(self.state.scroll_top() - self.padding_top) % layout.item_height,
|
||||
);
|
||||
|
||||
for item in &mut layout.items {
|
||||
item.paint(item_origin, visible_bounds, cx);
|
||||
@@ -1,13 +1,13 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_task::Runnable;
|
||||
pub use async_task::Task;
|
||||
use backtrace::{Backtrace, BacktraceFmt, BytesOrWideString};
|
||||
use parking_lot::Mutex;
|
||||
use postage::{barrier, prelude::Stream as _};
|
||||
use rand::prelude::*;
|
||||
use smol::{channel, prelude::*, Executor, Timer};
|
||||
use std::{
|
||||
fmt::{self, Debug},
|
||||
any::Any,
|
||||
fmt::{self, Debug, Display},
|
||||
marker::PhantomData,
|
||||
mem,
|
||||
ops::RangeInclusive,
|
||||
@@ -23,7 +23,10 @@ use std::{
|
||||
};
|
||||
use waker_fn::waker_fn;
|
||||
|
||||
use crate::{platform, util};
|
||||
use crate::{
|
||||
platform::{self, Dispatcher},
|
||||
util, MutableAppContext,
|
||||
};
|
||||
|
||||
pub enum Foreground {
|
||||
Platform {
|
||||
@@ -35,13 +38,35 @@ pub enum Foreground {
|
||||
}
|
||||
|
||||
pub enum Background {
|
||||
Deterministic(Arc<Deterministic>),
|
||||
Deterministic {
|
||||
executor: Arc<Deterministic>,
|
||||
},
|
||||
Production {
|
||||
executor: Arc<smol::Executor<'static>>,
|
||||
_stop: channel::Sender<()>,
|
||||
},
|
||||
}
|
||||
|
||||
type AnyLocalFuture = Pin<Box<dyn 'static + Future<Output = Box<dyn Any + 'static>>>>;
|
||||
type AnyFuture = Pin<Box<dyn 'static + Send + Future<Output = Box<dyn Any + Send + 'static>>>>;
|
||||
type AnyTask = async_task::Task<Box<dyn Any + Send + 'static>>;
|
||||
type AnyLocalTask = async_task::Task<Box<dyn Any + 'static>>;
|
||||
|
||||
#[must_use]
|
||||
pub enum Task<T> {
|
||||
Ready(Option<T>),
|
||||
Local {
|
||||
any_task: AnyLocalTask,
|
||||
result_type: PhantomData<T>,
|
||||
},
|
||||
Send {
|
||||
any_task: AnyTask,
|
||||
result_type: PhantomData<T>,
|
||||
},
|
||||
}
|
||||
|
||||
unsafe impl<T: Send> Send for Task<T> {}
|
||||
|
||||
struct DeterministicState {
|
||||
rng: StdRng,
|
||||
seed: u64,
|
||||
@@ -52,6 +77,7 @@ struct DeterministicState {
|
||||
block_on_ticks: RangeInclusive<usize>,
|
||||
now: Instant,
|
||||
pending_timers: Vec<(Instant, barrier::Sender)>,
|
||||
waiting_backtrace: Option<Backtrace>,
|
||||
}
|
||||
|
||||
pub struct Deterministic {
|
||||
@@ -72,16 +98,13 @@ impl Deterministic {
|
||||
block_on_ticks: 0..=1000,
|
||||
now: Instant::now(),
|
||||
pending_timers: Default::default(),
|
||||
waiting_backtrace: None,
|
||||
})),
|
||||
parker: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn spawn_from_foreground<F, T>(&self, future: F) -> Task<T>
|
||||
where
|
||||
T: 'static,
|
||||
F: Future<Output = T> + 'static,
|
||||
{
|
||||
fn spawn_from_foreground(&self, future: AnyLocalFuture) -> AnyLocalTask {
|
||||
let backtrace = Backtrace::new_unresolved();
|
||||
let scheduled_once = AtomicBool::new(false);
|
||||
let state = self.state.clone();
|
||||
@@ -100,11 +123,7 @@ impl Deterministic {
|
||||
task
|
||||
}
|
||||
|
||||
pub fn spawn<F, T>(&self, future: F) -> Task<T>
|
||||
where
|
||||
T: 'static + Send,
|
||||
F: 'static + Send + Future<Output = T>,
|
||||
{
|
||||
fn spawn(&self, future: AnyFuture) -> AnyTask {
|
||||
let backtrace = Backtrace::new_unresolved();
|
||||
let state = self.state.clone();
|
||||
let unparker = self.parker.lock().unparker();
|
||||
@@ -119,20 +138,15 @@ impl Deterministic {
|
||||
task
|
||||
}
|
||||
|
||||
pub fn run<F, T>(&self, future: F) -> T
|
||||
where
|
||||
T: 'static,
|
||||
F: Future<Output = T> + 'static,
|
||||
{
|
||||
fn run(&self, mut future: AnyLocalFuture) -> Box<dyn Any> {
|
||||
let woken = Arc::new(AtomicBool::new(false));
|
||||
let mut future = Box::pin(future);
|
||||
loop {
|
||||
if let Some(result) = self.run_internal(woken.clone(), &mut future) {
|
||||
return result;
|
||||
}
|
||||
|
||||
if !woken.load(SeqCst) && self.state.lock().forbid_parking {
|
||||
panic!("deterministic executor parked after a call to forbid_parking");
|
||||
if !woken.load(SeqCst) {
|
||||
self.state.lock().will_park();
|
||||
}
|
||||
|
||||
woken.store(false, SeqCst);
|
||||
@@ -142,16 +156,15 @@ impl Deterministic {
|
||||
|
||||
fn run_until_parked(&self) {
|
||||
let woken = Arc::new(AtomicBool::new(false));
|
||||
let future = std::future::pending::<()>();
|
||||
smol::pin!(future);
|
||||
self.run_internal(woken, future);
|
||||
let mut future = any_local_future(std::future::pending::<()>());
|
||||
self.run_internal(woken, &mut future);
|
||||
}
|
||||
|
||||
pub fn run_internal<F, T>(&self, woken: Arc<AtomicBool>, mut future: F) -> Option<T>
|
||||
where
|
||||
T: 'static,
|
||||
F: Future<Output = T> + Unpin,
|
||||
{
|
||||
fn run_internal(
|
||||
&self,
|
||||
woken: Arc<AtomicBool>,
|
||||
future: &mut AnyLocalFuture,
|
||||
) -> Option<Box<dyn Any>> {
|
||||
let unparker = self.parker.lock().unparker();
|
||||
let waker = waker_fn(move || {
|
||||
woken.store(true, SeqCst);
|
||||
@@ -195,6 +208,7 @@ impl Deterministic {
|
||||
}
|
||||
|
||||
let state = self.state.lock();
|
||||
|
||||
if state.scheduled_from_foreground.is_empty()
|
||||
&& state.scheduled_from_background.is_empty()
|
||||
&& state.spawned_from_foreground.is_empty()
|
||||
@@ -205,13 +219,7 @@ impl Deterministic {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn block_on<F, T>(&self, future: F) -> Option<T>
|
||||
where
|
||||
T: 'static,
|
||||
F: Future<Output = T>,
|
||||
{
|
||||
smol::pin!(future);
|
||||
|
||||
fn block_on(&self, future: &mut AnyLocalFuture) -> Option<Box<dyn Any>> {
|
||||
let unparker = self.parker.lock().unparker();
|
||||
let waker = waker_fn(move || {
|
||||
unparker.unpark();
|
||||
@@ -239,11 +247,9 @@ impl Deterministic {
|
||||
if let Poll::Ready(result) = future.as_mut().poll(&mut cx) {
|
||||
return Some(result);
|
||||
}
|
||||
let state = self.state.lock();
|
||||
let mut state = self.state.lock();
|
||||
if state.scheduled_from_background.is_empty() {
|
||||
if state.forbid_parking {
|
||||
panic!("deterministic executor parked after a call to forbid_parking");
|
||||
}
|
||||
state.will_park();
|
||||
drop(state);
|
||||
self.parker.lock().park();
|
||||
}
|
||||
@@ -256,6 +262,26 @@ impl Deterministic {
|
||||
}
|
||||
}
|
||||
|
||||
impl DeterministicState {
|
||||
fn will_park(&mut self) {
|
||||
if self.forbid_parking {
|
||||
let mut backtrace_message = String::new();
|
||||
if let Some(backtrace) = self.waiting_backtrace.as_mut() {
|
||||
backtrace.resolve();
|
||||
backtrace_message = format!(
|
||||
"\nbacktrace of waiting future:\n{:?}",
|
||||
CwdBacktrace::new(backtrace)
|
||||
);
|
||||
}
|
||||
|
||||
panic!(
|
||||
"deterministic executor parked after a call to forbid_parking{}",
|
||||
backtrace_message
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct Trace {
|
||||
executed: Vec<Backtrace>,
|
||||
@@ -301,32 +327,53 @@ impl Trace {
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Trace {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
struct FirstCwdFrameInBacktrace<'a>(&'a Backtrace);
|
||||
struct CwdBacktrace<'a> {
|
||||
backtrace: &'a Backtrace,
|
||||
first_frame_only: bool,
|
||||
}
|
||||
|
||||
impl<'a> Debug for FirstCwdFrameInBacktrace<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
let mut print_path = |fmt: &mut fmt::Formatter<'_>, path: BytesOrWideString<'_>| {
|
||||
fmt::Display::fmt(&path, fmt)
|
||||
};
|
||||
let mut fmt = BacktraceFmt::new(f, backtrace::PrintFmt::Full, &mut print_path);
|
||||
for frame in self.0.frames() {
|
||||
let mut formatted_frame = fmt.frame();
|
||||
if frame
|
||||
.symbols()
|
||||
.iter()
|
||||
.any(|s| s.filename().map_or(false, |f| f.starts_with(&cwd)))
|
||||
{
|
||||
formatted_frame.backtrace_frame(frame)?;
|
||||
break;
|
||||
}
|
||||
impl<'a> CwdBacktrace<'a> {
|
||||
fn new(backtrace: &'a Backtrace) -> Self {
|
||||
Self {
|
||||
backtrace,
|
||||
first_frame_only: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn first_frame(backtrace: &'a Backtrace) -> Self {
|
||||
Self {
|
||||
backtrace,
|
||||
first_frame_only: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Debug for CwdBacktrace<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
let mut print_path = |fmt: &mut fmt::Formatter<'_>, path: BytesOrWideString<'_>| {
|
||||
fmt::Display::fmt(&path, fmt)
|
||||
};
|
||||
let mut fmt = BacktraceFmt::new(f, backtrace::PrintFmt::Full, &mut print_path);
|
||||
for frame in self.backtrace.frames() {
|
||||
let mut formatted_frame = fmt.frame();
|
||||
if frame
|
||||
.symbols()
|
||||
.iter()
|
||||
.any(|s| s.filename().map_or(false, |f| f.starts_with(&cwd)))
|
||||
{
|
||||
formatted_frame.backtrace_frame(frame)?;
|
||||
if self.first_frame_only {
|
||||
break;
|
||||
}
|
||||
fmt.finish()
|
||||
}
|
||||
}
|
||||
fmt.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Trace {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
for ((backtrace, scheduled), spawned_from_foreground) in self
|
||||
.executed
|
||||
.iter()
|
||||
@@ -335,7 +382,7 @@ impl Debug for Trace {
|
||||
{
|
||||
writeln!(f, "Scheduled")?;
|
||||
for backtrace in scheduled {
|
||||
writeln!(f, "- {:?}", FirstCwdFrameInBacktrace(backtrace))?;
|
||||
writeln!(f, "- {:?}", CwdBacktrace::first_frame(backtrace))?;
|
||||
}
|
||||
if scheduled.is_empty() {
|
||||
writeln!(f, "None")?;
|
||||
@@ -344,14 +391,14 @@ impl Debug for Trace {
|
||||
|
||||
writeln!(f, "Spawned from foreground")?;
|
||||
for backtrace in spawned_from_foreground {
|
||||
writeln!(f, "- {:?}", FirstCwdFrameInBacktrace(backtrace))?;
|
||||
writeln!(f, "- {:?}", CwdBacktrace::first_frame(backtrace))?;
|
||||
}
|
||||
if spawned_from_foreground.is_empty() {
|
||||
writeln!(f, "None")?;
|
||||
}
|
||||
writeln!(f, "==========")?;
|
||||
|
||||
writeln!(f, "Run: {:?}", FirstCwdFrameInBacktrace(backtrace))?;
|
||||
writeln!(f, "Run: {:?}", CwdBacktrace::first_frame(backtrace))?;
|
||||
writeln!(f, "+++++++++++++++++++")?;
|
||||
}
|
||||
|
||||
@@ -396,24 +443,60 @@ impl Foreground {
|
||||
}
|
||||
|
||||
pub fn spawn<T: 'static>(&self, future: impl Future<Output = T> + 'static) -> Task<T> {
|
||||
match self {
|
||||
let future = any_local_future(future);
|
||||
let any_task = match self {
|
||||
Self::Deterministic(executor) => executor.spawn_from_foreground(future),
|
||||
Self::Platform { dispatcher, .. } => {
|
||||
let dispatcher = dispatcher.clone();
|
||||
let schedule = move |runnable: Runnable| dispatcher.run_on_main_thread(runnable);
|
||||
let (runnable, task) = async_task::spawn_local(future, schedule);
|
||||
runnable.schedule();
|
||||
task
|
||||
fn spawn_inner(
|
||||
future: AnyLocalFuture,
|
||||
dispatcher: &Arc<dyn Dispatcher>,
|
||||
) -> AnyLocalTask {
|
||||
let dispatcher = dispatcher.clone();
|
||||
let schedule =
|
||||
move |runnable: Runnable| dispatcher.run_on_main_thread(runnable);
|
||||
let (runnable, task) = async_task::spawn_local(future, schedule);
|
||||
runnable.schedule();
|
||||
task
|
||||
}
|
||||
spawn_inner(future, dispatcher)
|
||||
}
|
||||
Self::Test(executor) => executor.spawn(future),
|
||||
Self::Deterministic(executor) => executor.spawn_from_foreground(future),
|
||||
}
|
||||
};
|
||||
Task::local(any_task)
|
||||
}
|
||||
|
||||
pub fn run<T: 'static>(&self, future: impl 'static + Future<Output = T>) -> T {
|
||||
match self {
|
||||
let future = any_local_future(future);
|
||||
let any_value = match self {
|
||||
Self::Deterministic(executor) => executor.run(future),
|
||||
Self::Platform { .. } => panic!("you can't call run on a platform foreground executor"),
|
||||
Self::Test(executor) => smol::block_on(executor.run(future)),
|
||||
Self::Deterministic(executor) => executor.run(future),
|
||||
};
|
||||
*any_value.downcast().unwrap()
|
||||
}
|
||||
|
||||
pub fn parking_forbidden(&self) -> bool {
|
||||
match self {
|
||||
Self::Deterministic(executor) => executor.state.lock().forbid_parking,
|
||||
_ => panic!("this method can only be called on a deterministic executor"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start_waiting(&self) {
|
||||
match self {
|
||||
Self::Deterministic(executor) => {
|
||||
executor.state.lock().waiting_backtrace = Some(Backtrace::new_unresolved());
|
||||
}
|
||||
_ => panic!("this method can only be called on a deterministic executor"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finish_waiting(&self) {
|
||||
match self {
|
||||
Self::Deterministic(executor) => {
|
||||
executor.state.lock().waiting_backtrace.take();
|
||||
}
|
||||
_ => panic!("this method can only be called on a deterministic executor"),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -500,29 +583,34 @@ impl Background {
|
||||
T: 'static + Send,
|
||||
F: Send + Future<Output = T> + 'static,
|
||||
{
|
||||
match self {
|
||||
let future = any_future(future);
|
||||
let any_task = match self {
|
||||
Self::Production { executor, .. } => executor.spawn(future),
|
||||
Self::Deterministic(executor) => executor.spawn(future),
|
||||
}
|
||||
Self::Deterministic { executor, .. } => executor.spawn(future),
|
||||
};
|
||||
Task::send(any_task)
|
||||
}
|
||||
|
||||
pub fn block_with_timeout<F, T>(&self, timeout: Duration, mut future: F) -> Result<T, F>
|
||||
pub fn block_with_timeout<F, T>(
|
||||
&self,
|
||||
timeout: Duration,
|
||||
future: F,
|
||||
) -> Result<T, impl Future<Output = T>>
|
||||
where
|
||||
T: 'static,
|
||||
F: 'static + Unpin + Future<Output = T>,
|
||||
{
|
||||
let mut future = any_local_future(future);
|
||||
if !timeout.is_zero() {
|
||||
let output = match self {
|
||||
Self::Production { .. } => {
|
||||
smol::block_on(util::timeout(timeout, Pin::new(&mut future))).ok()
|
||||
}
|
||||
Self::Deterministic(executor) => executor.block_on(Pin::new(&mut future)),
|
||||
Self::Production { .. } => smol::block_on(util::timeout(timeout, &mut future)).ok(),
|
||||
Self::Deterministic { executor, .. } => executor.block_on(&mut future),
|
||||
};
|
||||
if let Some(output) = output {
|
||||
return Ok(output);
|
||||
return Ok(*output.downcast().unwrap());
|
||||
}
|
||||
}
|
||||
Err(future)
|
||||
Err(async { *future.await.downcast().unwrap() })
|
||||
}
|
||||
|
||||
pub async fn scoped<'scope, F>(&self, scheduler: F)
|
||||
@@ -569,6 +657,89 @@ pub fn deterministic(seed: u64) -> (Rc<Foreground>, Arc<Background>) {
|
||||
let executor = Arc::new(Deterministic::new(seed));
|
||||
(
|
||||
Rc::new(Foreground::Deterministic(executor.clone())),
|
||||
Arc::new(Background::Deterministic(executor)),
|
||||
Arc::new(Background::Deterministic { executor }),
|
||||
)
|
||||
}
|
||||
|
||||
impl<T> Task<T> {
|
||||
pub fn ready(value: T) -> Self {
|
||||
Self::Ready(Some(value))
|
||||
}
|
||||
|
||||
fn local(any_task: AnyLocalTask) -> Self {
|
||||
Self::Local {
|
||||
any_task,
|
||||
result_type: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn detach(self) {
|
||||
match self {
|
||||
Task::Ready(_) => {}
|
||||
Task::Local { any_task, .. } => any_task.detach(),
|
||||
Task::Send { any_task, .. } => any_task.detach(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static, E: 'static + Display> Task<Result<T, E>> {
|
||||
pub fn detach_and_log_err(self, cx: &mut MutableAppContext) {
|
||||
cx.spawn(|_| async move {
|
||||
if let Err(err) = self.await {
|
||||
log::error!("{}", err);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Send> Task<T> {
|
||||
fn send(any_task: AnyTask) -> Self {
|
||||
Self::Send {
|
||||
any_task,
|
||||
result_type: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: fmt::Debug> fmt::Debug for Task<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Task::Ready(value) => value.fmt(f),
|
||||
Task::Local { any_task, .. } => any_task.fmt(f),
|
||||
Task::Send { any_task, .. } => any_task.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static> Future for Task<T> {
|
||||
type Output = T;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
match unsafe { self.get_unchecked_mut() } {
|
||||
Task::Ready(value) => Poll::Ready(value.take().unwrap()),
|
||||
Task::Local { any_task, .. } => {
|
||||
any_task.poll(cx).map(|value| *value.downcast().unwrap())
|
||||
}
|
||||
Task::Send { any_task, .. } => {
|
||||
any_task.poll(cx).map(|value| *value.downcast().unwrap())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn any_future<T, F>(future: F) -> AnyFuture
|
||||
where
|
||||
T: 'static + Send,
|
||||
F: Future<Output = T> + Send + 'static,
|
||||
{
|
||||
async { Box::new(future.await) as Box<dyn Any + Send> }.boxed()
|
||||
}
|
||||
|
||||
fn any_local_future<T, F>(future: F) -> AnyLocalFuture
|
||||
where
|
||||
T: 'static,
|
||||
F: Future<Output = T> + 'static,
|
||||
{
|
||||
async { Box::new(future.await) as Box<dyn Any> }.boxed_local()
|
||||
}
|
||||
@@ -17,7 +17,7 @@ use std::{
|
||||
pub struct FamilyId(usize);
|
||||
|
||||
struct Family {
|
||||
name: String,
|
||||
name: Arc<str>,
|
||||
font_ids: Vec<FontId>,
|
||||
}
|
||||
|
||||
@@ -49,7 +49,7 @@ impl FontCache {
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn family_name(&self, family_id: FamilyId) -> Result<String> {
|
||||
pub fn family_name(&self, family_id: FamilyId) -> Result<Arc<str>> {
|
||||
self.0
|
||||
.read()
|
||||
.families
|
||||
@@ -62,7 +62,7 @@ impl FontCache {
|
||||
for name in names {
|
||||
let state = self.0.upgradable_read();
|
||||
|
||||
if let Some(ix) = state.families.iter().position(|f| f.name == *name) {
|
||||
if let Some(ix) = state.families.iter().position(|f| f.name.as_ref() == *name) {
|
||||
return Ok(FamilyId(ix));
|
||||
}
|
||||
|
||||
@@ -81,7 +81,7 @@ impl FontCache {
|
||||
}
|
||||
|
||||
state.families.push(Family {
|
||||
name: String::from(*name),
|
||||
name: Arc::from(*name),
|
||||
font_ids,
|
||||
});
|
||||
return Ok(family_id);
|
||||
@@ -141,8 +141,8 @@ impl FontCache {
|
||||
|
||||
pub fn bounding_box(&self, font_id: FontId, font_size: f32) -> Vector2F {
|
||||
let bounding_box = self.metric(font_id, |m| m.bounding_box);
|
||||
let width = self.scale_metric(bounding_box.width(), font_id, font_size);
|
||||
let height = self.scale_metric(bounding_box.height(), font_id, font_size);
|
||||
let width = bounding_box.width() * self.em_scale(font_id, font_size);
|
||||
let height = bounding_box.height() * self.em_scale(font_id, font_size);
|
||||
vec2f(width, height)
|
||||
}
|
||||
|
||||
@@ -154,28 +154,51 @@ impl FontCache {
|
||||
glyph_id = state.fonts.glyph_for_char(font_id, 'm').unwrap();
|
||||
bounds = state.fonts.typographic_bounds(font_id, glyph_id).unwrap();
|
||||
}
|
||||
self.scale_metric(bounds.width(), font_id, font_size)
|
||||
bounds.width() * self.em_scale(font_id, font_size)
|
||||
}
|
||||
|
||||
pub fn em_advance(&self, font_id: FontId, font_size: f32) -> f32 {
|
||||
let glyph_id;
|
||||
let advance;
|
||||
{
|
||||
let state = self.0.read();
|
||||
glyph_id = state.fonts.glyph_for_char(font_id, 'm').unwrap();
|
||||
advance = state.fonts.advance(font_id, glyph_id).unwrap();
|
||||
}
|
||||
advance.x() * self.em_scale(font_id, font_size)
|
||||
}
|
||||
|
||||
pub fn line_height(&self, font_id: FontId, font_size: f32) -> f32 {
|
||||
let height = self.metric(font_id, |m| m.bounding_box.height());
|
||||
self.scale_metric(height, font_id, font_size)
|
||||
(height * self.em_scale(font_id, font_size)).ceil()
|
||||
}
|
||||
|
||||
pub fn cap_height(&self, font_id: FontId, font_size: f32) -> f32 {
|
||||
self.scale_metric(self.metric(font_id, |m| m.cap_height), font_id, font_size)
|
||||
self.metric(font_id, |m| m.cap_height) * self.em_scale(font_id, font_size)
|
||||
}
|
||||
|
||||
pub fn x_height(&self, font_id: FontId, font_size: f32) -> f32 {
|
||||
self.metric(font_id, |m| m.x_height) * self.em_scale(font_id, font_size)
|
||||
}
|
||||
|
||||
pub fn ascent(&self, font_id: FontId, font_size: f32) -> f32 {
|
||||
self.scale_metric(self.metric(font_id, |m| m.ascent), font_id, font_size)
|
||||
self.metric(font_id, |m| m.ascent) * self.em_scale(font_id, font_size)
|
||||
}
|
||||
|
||||
pub fn descent(&self, font_id: FontId, font_size: f32) -> f32 {
|
||||
self.scale_metric(self.metric(font_id, |m| -m.descent), font_id, font_size)
|
||||
self.metric(font_id, |m| -m.descent) * self.em_scale(font_id, font_size)
|
||||
}
|
||||
|
||||
pub fn scale_metric(&self, metric: f32, font_id: FontId, font_size: f32) -> f32 {
|
||||
metric * font_size / self.metric(font_id, |m| m.units_per_em as f32)
|
||||
pub fn em_scale(&self, font_id: FontId, font_size: f32) -> f32 {
|
||||
font_size / self.metric(font_id, |m| m.units_per_em as f32)
|
||||
}
|
||||
|
||||
pub fn baseline_offset(&self, font_id: FontId, font_size: f32) -> f32 {
|
||||
let line_height = self.line_height(font_id, font_size);
|
||||
let ascent = self.ascent(font_id, font_size);
|
||||
let descent = self.descent(font_id, font_size);
|
||||
let padding_top = (line_height - ascent - descent) / 2.;
|
||||
padding_top + ascent
|
||||
}
|
||||
|
||||
pub fn line_wrapper(self: &Arc<Self>, font_id: FontId, font_size: f32) -> LineWrapperHandle {
|
||||
@@ -1,5 +1,6 @@
|
||||
use crate::{
|
||||
color::Color,
|
||||
font_cache::FamilyId,
|
||||
json::{json, ToJson},
|
||||
text_layout::RunStyle,
|
||||
FontCache,
|
||||
@@ -22,17 +23,18 @@ pub type GlyphId = u32;
|
||||
pub struct TextStyle {
|
||||
pub color: Color,
|
||||
pub font_family_name: Arc<str>,
|
||||
pub font_family_id: FamilyId,
|
||||
pub font_id: FontId,
|
||||
pub font_size: f32,
|
||||
pub font_properties: Properties,
|
||||
pub underline: bool,
|
||||
pub underline: Option<Color>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
#[derive(Copy, Clone, Debug, Default)]
|
||||
pub struct HighlightStyle {
|
||||
pub color: Color,
|
||||
pub font_properties: Properties,
|
||||
pub underline: bool,
|
||||
pub underline: Option<Color>,
|
||||
}
|
||||
|
||||
#[allow(non_camel_case_types)]
|
||||
@@ -62,7 +64,7 @@ struct TextStyleJson {
|
||||
#[serde(default)]
|
||||
italic: bool,
|
||||
#[serde(default)]
|
||||
underline: bool,
|
||||
underline: UnderlineStyleJson,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -72,7 +74,14 @@ struct HighlightStyleJson {
|
||||
#[serde(default)]
|
||||
italic: bool,
|
||||
#[serde(default)]
|
||||
underline: bool,
|
||||
underline: UnderlineStyleJson,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum UnderlineStyleJson {
|
||||
Underlined(bool),
|
||||
UnderlinedWithColor(Color),
|
||||
}
|
||||
|
||||
impl TextStyle {
|
||||
@@ -80,16 +89,17 @@ impl TextStyle {
|
||||
font_family_name: impl Into<Arc<str>>,
|
||||
font_size: f32,
|
||||
font_properties: Properties,
|
||||
underline: bool,
|
||||
underline: Option<Color>,
|
||||
color: Color,
|
||||
font_cache: &FontCache,
|
||||
) -> anyhow::Result<Self> {
|
||||
let font_family_name = font_family_name.into();
|
||||
let family_id = font_cache.load_family(&[&font_family_name])?;
|
||||
let font_id = font_cache.select_font(family_id, &font_properties)?;
|
||||
let font_family_id = font_cache.load_family(&[&font_family_name])?;
|
||||
let font_id = font_cache.select_font(font_family_id, &font_properties)?;
|
||||
Ok(Self {
|
||||
color,
|
||||
font_family_name,
|
||||
font_family_id,
|
||||
font_id,
|
||||
font_size,
|
||||
font_properties,
|
||||
@@ -113,7 +123,7 @@ impl TextStyle {
|
||||
json.family,
|
||||
json.size,
|
||||
font_properties,
|
||||
json.underline,
|
||||
underline_from_json(json.underline, json.color),
|
||||
json.color,
|
||||
font_cache,
|
||||
)
|
||||
@@ -124,6 +134,80 @@ impl TextStyle {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn line_height(&self, font_cache: &FontCache) -> f32 {
|
||||
font_cache.line_height(self.font_id, self.font_size)
|
||||
}
|
||||
|
||||
pub fn cap_height(&self, font_cache: &FontCache) -> f32 {
|
||||
font_cache.cap_height(self.font_id, self.font_size)
|
||||
}
|
||||
|
||||
pub fn x_height(&self, font_cache: &FontCache) -> f32 {
|
||||
font_cache.x_height(self.font_id, self.font_size)
|
||||
}
|
||||
|
||||
pub fn em_width(&self, font_cache: &FontCache) -> f32 {
|
||||
font_cache.em_width(self.font_id, self.font_size)
|
||||
}
|
||||
|
||||
pub fn em_advance(&self, font_cache: &FontCache) -> f32 {
|
||||
font_cache.em_advance(self.font_id, self.font_size)
|
||||
}
|
||||
|
||||
pub fn descent(&self, font_cache: &FontCache) -> f32 {
|
||||
font_cache.metric(self.font_id, |m| m.descent) * self.em_scale(font_cache)
|
||||
}
|
||||
|
||||
pub fn baseline_offset(&self, font_cache: &FontCache) -> f32 {
|
||||
font_cache.baseline_offset(self.font_id, self.font_size)
|
||||
}
|
||||
|
||||
fn em_scale(&self, font_cache: &FontCache) -> f32 {
|
||||
font_cache.em_scale(self.font_id, self.font_size)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TextStyle> for HighlightStyle {
|
||||
fn from(other: TextStyle) -> Self {
|
||||
Self {
|
||||
color: other.color,
|
||||
font_properties: other.font_properties,
|
||||
underline: other.underline,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for UnderlineStyleJson {
|
||||
fn default() -> Self {
|
||||
Self::Underlined(false)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for TextStyle {
|
||||
fn default() -> Self {
|
||||
FONT_CACHE.with(|font_cache| {
|
||||
let font_cache = font_cache.borrow();
|
||||
let font_cache = font_cache
|
||||
.as_ref()
|
||||
.expect("TextStyle::default can only be called within a call to with_font_cache");
|
||||
|
||||
let font_family_name = Arc::from("Courier");
|
||||
let font_family_id = font_cache.load_family(&[&font_family_name]).unwrap();
|
||||
let font_id = font_cache
|
||||
.select_font(font_family_id, &Default::default())
|
||||
.unwrap();
|
||||
Self {
|
||||
color: Default::default(),
|
||||
font_family_name,
|
||||
font_family_id,
|
||||
font_id,
|
||||
font_size: 14.,
|
||||
font_properties: Default::default(),
|
||||
underline: Default::default(),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl HighlightStyle {
|
||||
@@ -132,7 +216,7 @@ impl HighlightStyle {
|
||||
Self {
|
||||
color: json.color,
|
||||
font_properties,
|
||||
underline: json.underline,
|
||||
underline: underline_from_json(json.underline, json.color),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -142,7 +226,7 @@ impl From<Color> for HighlightStyle {
|
||||
Self {
|
||||
color,
|
||||
font_properties: Default::default(),
|
||||
underline: false,
|
||||
underline: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -181,12 +265,20 @@ impl<'de> Deserialize<'de> for HighlightStyle {
|
||||
Ok(Self {
|
||||
color: serde_json::from_value(json).map_err(de::Error::custom)?,
|
||||
font_properties: Properties::new(),
|
||||
underline: false,
|
||||
underline: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn underline_from_json(json: UnderlineStyleJson, text_color: Color) -> Option<Color> {
|
||||
match json {
|
||||
UnderlineStyleJson::Underlined(false) => None,
|
||||
UnderlineStyleJson::Underlined(true) => Some(text_color),
|
||||
UnderlineStyleJson::UnderlinedWithColor(color) => Some(color),
|
||||
}
|
||||
}
|
||||
|
||||
fn properties_from_json(weight: Option<WeightJson>, italic: bool) -> Properties {
|
||||
let weight = match weight.unwrap_or(WeightJson::normal) {
|
||||
WeightJson::thin => Weight::THIN,
|
||||
@@ -1,12 +1,13 @@
|
||||
mod app;
|
||||
pub use app::*;
|
||||
mod assets;
|
||||
pub mod sum_tree;
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod test;
|
||||
pub use assets::*;
|
||||
pub mod elements;
|
||||
pub mod font_cache;
|
||||
mod image_data;
|
||||
pub use crate::image_data::ImageData;
|
||||
pub mod views;
|
||||
pub use font_cache::FontCache;
|
||||
mod clipboard;
|
||||
43
crates/gpui/src/image_data.rs
Normal file
43
crates/gpui/src/image_data.rs
Normal file
@@ -0,0 +1,43 @@
|
||||
use crate::geometry::vector::{vec2i, Vector2I};
|
||||
use image::{Bgra, ImageBuffer};
|
||||
use std::{
|
||||
fmt,
|
||||
sync::{
|
||||
atomic::{AtomicUsize, Ordering::SeqCst},
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
|
||||
pub struct ImageData {
|
||||
pub id: usize,
|
||||
data: ImageBuffer<Bgra<u8>, Vec<u8>>,
|
||||
}
|
||||
|
||||
impl ImageData {
|
||||
pub fn new(data: ImageBuffer<Bgra<u8>, Vec<u8>>) -> Arc<Self> {
|
||||
static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
|
||||
|
||||
Arc::new(Self {
|
||||
id: NEXT_ID.fetch_add(1, SeqCst),
|
||||
data,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn as_bytes(&self) -> &[u8] {
|
||||
&self.data
|
||||
}
|
||||
|
||||
pub fn size(&self) -> Vector2I {
|
||||
let (width, height) = self.data.dimensions();
|
||||
vec2i(width as i32, height as i32)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for ImageData {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("ImageData")
|
||||
.field("id", &self.id)
|
||||
.field("size", &self.data.dimensions())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
@@ -93,6 +93,10 @@ impl Matcher {
|
||||
self.keymap.add_bindings(bindings);
|
||||
}
|
||||
|
||||
pub fn clear_pending(&mut self) {
|
||||
self.pending.clear();
|
||||
}
|
||||
|
||||
pub fn push_keystroke(
|
||||
&mut self,
|
||||
keystroke: Keystroke,
|
||||
@@ -12,7 +12,7 @@ use crate::{
|
||||
fonts::{FontId, GlyphId, Metrics as FontMetrics, Properties as FontProperties},
|
||||
geometry::{
|
||||
rect::{RectF, RectI},
|
||||
vector::{vec2f, Vector2F},
|
||||
vector::Vector2F,
|
||||
},
|
||||
text_layout::{LineLayout, RunStyle},
|
||||
AnyAction, ClipboardItem, Menu, Scene,
|
||||
@@ -48,15 +48,19 @@ pub trait Platform: Send + Sync {
|
||||
|
||||
fn write_credentials(&self, url: &str, username: &str, password: &[u8]) -> Result<()>;
|
||||
fn read_credentials(&self, url: &str) -> Result<Option<(String, Vec<u8>)>>;
|
||||
fn delete_credentials(&self, url: &str) -> Result<()>;
|
||||
|
||||
fn set_cursor_style(&self, style: CursorStyle);
|
||||
|
||||
fn local_timezone(&self) -> UtcOffset;
|
||||
|
||||
fn path_for_resource(&self, name: Option<&str>, extension: Option<&str>) -> Result<PathBuf>;
|
||||
}
|
||||
|
||||
pub(crate) trait ForegroundPlatform {
|
||||
fn on_become_active(&self, callback: Box<dyn FnMut()>);
|
||||
fn on_resign_active(&self, callback: Box<dyn FnMut()>);
|
||||
fn on_quit(&self, callback: Box<dyn FnMut()>);
|
||||
fn on_event(&self, callback: Box<dyn FnMut(Event) -> bool>);
|
||||
fn on_open_files(&self, callback: Box<dyn FnMut(Vec<PathBuf>)>);
|
||||
fn run(&self, on_finish_launching: Box<dyn FnOnce() -> ()>);
|
||||
@@ -101,13 +105,20 @@ pub trait WindowContext {
|
||||
fn present_scene(&mut self, scene: Scene);
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct WindowOptions<'a> {
|
||||
pub bounds: RectF,
|
||||
pub bounds: WindowBounds,
|
||||
pub title: Option<&'a str>,
|
||||
pub titlebar_appears_transparent: bool,
|
||||
pub traffic_light_position: Option<Vector2F>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum WindowBounds {
|
||||
Maximized,
|
||||
Fixed(RectF),
|
||||
}
|
||||
|
||||
pub struct PathPromptOptions {
|
||||
pub files: bool,
|
||||
pub directories: bool,
|
||||
@@ -137,6 +148,7 @@ pub trait FontSystem: Send + Sync {
|
||||
) -> anyhow::Result<FontId>;
|
||||
fn font_metrics(&self, font_id: FontId) -> FontMetrics;
|
||||
fn typographic_bounds(&self, font_id: FontId, glyph_id: GlyphId) -> anyhow::Result<RectF>;
|
||||
fn advance(&self, font_id: FontId, glyph_id: GlyphId) -> anyhow::Result<Vector2F>;
|
||||
fn glyph_for_char(&self, font_id: FontId, ch: char) -> Option<GlyphId>;
|
||||
fn rasterize_glyph(
|
||||
&self,
|
||||
@@ -153,7 +165,7 @@ pub trait FontSystem: Send + Sync {
|
||||
impl<'a> Default for WindowOptions<'a> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
bounds: RectF::new(Default::default(), vec2f(1024.0, 768.0)),
|
||||
bounds: WindowBounds::Maximized,
|
||||
title: Default::default(),
|
||||
titlebar_appears_transparent: Default::default(),
|
||||
traffic_light_position: Default::default(),
|
||||
@@ -14,7 +14,11 @@ pub enum Event {
|
||||
},
|
||||
LeftMouseDown {
|
||||
position: Vector2F,
|
||||
ctrl: bool,
|
||||
alt: bool,
|
||||
shift: bool,
|
||||
cmd: bool,
|
||||
click_count: usize,
|
||||
},
|
||||
LeftMouseUp {
|
||||
position: Vector2F,
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user