Compare commits
1556 Commits
v0.105.5
...
collab-v0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
51ecf8e6b4 | ||
|
|
b910bbf002 | ||
|
|
bd61d71018 | ||
|
|
7d300d3f5b | ||
|
|
1568ecbe1e | ||
|
|
51fa80ef06 | ||
|
|
b7625d81e8 | ||
|
|
bbe53895ef | ||
|
|
1b9619ca7f | ||
|
|
272f856460 | ||
|
|
36a73d657a | ||
|
|
18431051d9 | ||
|
|
795369a1e3 | ||
|
|
244e8ce101 | ||
|
|
ed5f1d3bdd | ||
|
|
66b520a513 | ||
|
|
0aa9c6b61d | ||
|
|
7b6514b178 | ||
|
|
6a3974ddbb | ||
|
|
84c5494949 | ||
|
|
45e695c906 | ||
|
|
6ee9beed73 | ||
|
|
88875fd006 | ||
|
|
c98a811c0b | ||
|
|
bcf3bac168 | ||
|
|
db34de6be4 | ||
|
|
f5b13071f1 | ||
|
|
6f1197e00c | ||
|
|
327a2f9967 | ||
|
|
58446c2715 | ||
|
|
d6abd8a2b4 | ||
|
|
30dffbb409 | ||
|
|
f33fc1b6fa | ||
|
|
d219ddbdaf | ||
|
|
b76ce65a63 | ||
|
|
607813e646 | ||
|
|
613afd3f66 | ||
|
|
49571127da | ||
|
|
609836397b | ||
|
|
a02d806715 | ||
|
|
991f58409e | ||
|
|
e0f68c77b0 | ||
|
|
b128377cd2 | ||
|
|
ccccf84867 | ||
|
|
fd6f6cc9f8 | ||
|
|
29a32039ba | ||
|
|
7841a56a11 | ||
|
|
b34f0c3bee | ||
|
|
033d0ae610 | ||
|
|
0128079de0 | ||
|
|
78573fb140 | ||
|
|
942167e046 | ||
|
|
0efbc81b0f | ||
|
|
c17b246bac | ||
|
|
bc4f8fbf4e | ||
|
|
5ff70f7dba | ||
|
|
ba789fc0c4 | ||
|
|
d5077d9d0c | ||
|
|
1a54ac0d69 | ||
|
|
04ab68502b | ||
|
|
14d24a9ac6 | ||
|
|
7bacdefa2e | ||
|
|
7b4e699d0e | ||
|
|
204aba07f6 | ||
|
|
510ab60bd4 | ||
|
|
e63a611c81 | ||
|
|
08e9b2e848 | ||
|
|
dc8a853842 | ||
|
|
b31a004def | ||
|
|
d3cd5f3ec5 | ||
|
|
3d8516b25f | ||
|
|
7a66ebae71 | ||
|
|
4986d4771d | ||
|
|
142b94bdd4 | ||
|
|
f3c113fe02 | ||
|
|
2879d231b1 | ||
|
|
3591ffe4a7 | ||
|
|
b8ae949dbe | ||
|
|
61cc3b93e3 | ||
|
|
a2c3971ad6 | ||
|
|
25e882d72a | ||
|
|
b3c3adab50 | ||
|
|
bd30ce837d | ||
|
|
6aca2289eb | ||
|
|
b46a4b5680 | ||
|
|
249bec3cac | ||
|
|
f16ff79ff8 | ||
|
|
96bbb5cdea | ||
|
|
37a561df0a | ||
|
|
dd89b2e6d4 | ||
|
|
34747bbbbc | ||
|
|
1e8b23d8fb | ||
|
|
558f54c424 | ||
|
|
10b581f250 | ||
|
|
6b65d76014 | ||
|
|
e27427dce8 | ||
|
|
f88ca2e7da | ||
|
|
ad7c49e4bb | ||
|
|
b4225ac82a | ||
|
|
ba189f2af1 | ||
|
|
7af77b1cf9 | ||
|
|
67ecc2fe04 | ||
|
|
af0c010b4a | ||
|
|
d2ab0d651e | ||
|
|
079de6fdf7 | ||
|
|
4a6a17d866 | ||
|
|
5f5234c5da | ||
|
|
c1904b493b | ||
|
|
ec9d79b6fe | ||
|
|
6c8bb4b05e | ||
|
|
bcdffc9963 | ||
|
|
a1c3826858 | ||
|
|
f75eb3f626 | ||
|
|
071e9b4871 | ||
|
|
30e7978dcf | ||
|
|
8e3314e680 | ||
|
|
832026a0a2 | ||
|
|
a23e05c20b | ||
|
|
516236e044 | ||
|
|
637cff3ebd | ||
|
|
8db3b3b4ca | ||
|
|
98d03f6e7a | ||
|
|
560e115656 | ||
|
|
973ca8d4f0 | ||
|
|
692aeff263 | ||
|
|
88a3a57789 | ||
|
|
61694bba6a | ||
|
|
65045b9c52 | ||
|
|
0eae962abf | ||
|
|
a569c82492 | ||
|
|
ed233d583f | ||
|
|
b3ad8c1c3e | ||
|
|
3cc91e5742 | ||
|
|
6784ccd9fe | ||
|
|
1887f3b594 | ||
|
|
8bc2071414 | ||
|
|
58e8012d8c | ||
|
|
8a70ef3e8f | ||
|
|
eb19071d84 | ||
|
|
7b4a895ab9 | ||
|
|
d62c51a4b8 | ||
|
|
fccc4ca85c | ||
|
|
42095f0673 | ||
|
|
88ef74ec8f | ||
|
|
6fe393db2a | ||
|
|
69e5ecc015 | ||
|
|
c9c9db903d | ||
|
|
4539cef6d7 | ||
|
|
ca82ec8e8e | ||
|
|
0eafb8886d | ||
|
|
27d2accb51 | ||
|
|
0a04c5734b | ||
|
|
8a0fb668f7 | ||
|
|
f4cff69729 | ||
|
|
a4b7e3c9f6 | ||
|
|
065d26f5b2 | ||
|
|
0285284ae1 | ||
|
|
8ecfea55cd | ||
|
|
170ebd8221 | ||
|
|
315744ec20 | ||
|
|
06c22206af | ||
|
|
6172cd9015 | ||
|
|
3bcae6d7de | ||
|
|
927278e20d | ||
|
|
71ad3e1b20 | ||
|
|
693246ba26 | ||
|
|
b875be9689 | ||
|
|
07b9ec29e6 | ||
|
|
db15db45ce | ||
|
|
6e9c779c51 | ||
|
|
483ac9491a | ||
|
|
6cf7d0b81c | ||
|
|
6185c935a6 | ||
|
|
45a8aea0f0 | ||
|
|
3447a9478c | ||
|
|
db7d12f628 | ||
|
|
4266ead958 | ||
|
|
5651ef7ca2 | ||
|
|
51aa0d6a94 | ||
|
|
bc3572f80e | ||
|
|
1ec6638c7f | ||
|
|
e31a9401a8 | ||
|
|
65389a8fac | ||
|
|
9fb9885931 | ||
|
|
8b972f6d8e | ||
|
|
965bfd8439 | ||
|
|
9c10152c89 | ||
|
|
28ef30f7a2 | ||
|
|
e6f2288a0c | ||
|
|
ffcec011f8 | ||
|
|
06bff41818 | ||
|
|
c5763cdb99 | ||
|
|
43d230cb2d | ||
|
|
7ec9cc08c7 | ||
|
|
5355007719 | ||
|
|
b5eae86b67 | ||
|
|
e67048ee7b | ||
|
|
5ba04dce2b | ||
|
|
7f6bb3d1eb | ||
|
|
412c0ff7dc | ||
|
|
1936ba5e30 | ||
|
|
9986e526e5 | ||
|
|
3460e95899 | ||
|
|
39480364bd | ||
|
|
1b75603f63 | ||
|
|
52f9f90ccb | ||
|
|
98e2490807 | ||
|
|
a0fe859d87 | ||
|
|
51fb33d8ef | ||
|
|
3a369bc207 | ||
|
|
92535ba2c4 | ||
|
|
6f6495ccbf | ||
|
|
08a4e53cfe | ||
|
|
c8a86c2dd1 | ||
|
|
841a5ef7b8 | ||
|
|
2c5caf91bc | ||
|
|
3566d920c5 | ||
|
|
cee8175c19 | ||
|
|
751df45263 | ||
|
|
eb8d376274 | ||
|
|
f67f42779b | ||
|
|
6b5947a1fa | ||
|
|
437d147935 | ||
|
|
0867175a4e | ||
|
|
4fcbc91aca | ||
|
|
e1032c5341 | ||
|
|
71bc35d241 | ||
|
|
c44d1cda9a | ||
|
|
26a3d41dc7 | ||
|
|
71c72d8e08 | ||
|
|
bb3f59252e | ||
|
|
258496700f | ||
|
|
8043d0d8a9 | ||
|
|
90f65ec9fe | ||
|
|
4f859e0253 | ||
|
|
dd34bb273e | ||
|
|
b5cbfb8f1d | ||
|
|
32367eba14 | ||
|
|
42259a4007 | ||
|
|
70eeefa1f8 | ||
|
|
6f173c64b3 | ||
|
|
b8bd070a83 | ||
|
|
c77376225d | ||
|
|
beb0af9763 | ||
|
|
1411b98a5d | ||
|
|
1c5b321058 | ||
|
|
a01b507ef4 | ||
|
|
dfc34e582a | ||
|
|
5b04f965fa | ||
|
|
77991ad865 | ||
|
|
afad2fbfa2 | ||
|
|
d6bd000aa8 | ||
|
|
bfdff4e2e0 | ||
|
|
4ca7ddfc42 | ||
|
|
51c66f508b | ||
|
|
5c03b6a610 | ||
|
|
cc9e92857b | ||
|
|
4740d6ed61 | ||
|
|
18930c33fe | ||
|
|
5fb8321ee0 | ||
|
|
09866ec3e9 | ||
|
|
9f8aaa4cdb | ||
|
|
1ad7f3a16d | ||
|
|
11953e613b | ||
|
|
0f03f8ff8a | ||
|
|
c8dfccff36 | ||
|
|
beb91fa094 | ||
|
|
9aaf7d0c46 | ||
|
|
c9e670397f | ||
|
|
463b24949e | ||
|
|
191285a8f1 | ||
|
|
9f9137cf76 | ||
|
|
9f48cdac83 | ||
|
|
b090cefdde | ||
|
|
bfb37bbe94 | ||
|
|
8117e7933c | ||
|
|
e9ce935991 | ||
|
|
2285a35016 | ||
|
|
0910760b76 | ||
|
|
cf380a9f62 | ||
|
|
24e21a4c29 | ||
|
|
bb35583998 | ||
|
|
0e48465adb | ||
|
|
4d5ca37edb | ||
|
|
ff497810dd | ||
|
|
feefb8d063 | ||
|
|
0dd45bbf21 | ||
|
|
23ad0a2c58 | ||
|
|
8ffe5a3ec7 | ||
|
|
9a53da28bc | ||
|
|
f2710f37c5 | ||
|
|
785901c75e | ||
|
|
171db00f6e | ||
|
|
6f5cf10acb | ||
|
|
6a532af1fd | ||
|
|
47f979d457 | ||
|
|
48cdefe6cf | ||
|
|
3358420f6a | ||
|
|
2b95db087b | ||
|
|
aa6990bb6b | ||
|
|
0e035c1a95 | ||
|
|
953857f8e3 | ||
|
|
18eb4a7292 | ||
|
|
e6087e0ed9 | ||
|
|
67e590202a | ||
|
|
97a0864134 | ||
|
|
6c163afb84 | ||
|
|
e99d862f3f | ||
|
|
c1c9db2ae2 | ||
|
|
ea4e67fb76 | ||
|
|
4d621f355d | ||
|
|
05cbceec24 | ||
|
|
297cef14ed | ||
|
|
0dfbfdd164 | ||
|
|
192b3512fd | ||
|
|
3a326bfa7e | ||
|
|
dd55ccef34 | ||
|
|
1e13e273d2 | ||
|
|
438cf529bb | ||
|
|
ec0b2e5430 | ||
|
|
21d4546a86 | ||
|
|
7832120a4c | ||
|
|
efbf0c828d | ||
|
|
c9d214e8ef | ||
|
|
38a7b39070 | ||
|
|
239b0c2f71 | ||
|
|
6e4e19d8fc | ||
|
|
cc445f7cef | ||
|
|
258fcaea94 | ||
|
|
fc927f7406 | ||
|
|
d1adce5890 | ||
|
|
a5836b0337 | ||
|
|
4e6fb9034d | ||
|
|
4d491ca3a1 | ||
|
|
2f0eaafc39 | ||
|
|
96f2c4a9de | ||
|
|
4e90e45999 | ||
|
|
8766e5f0d0 | ||
|
|
812ff9a97d | ||
|
|
a72434f67b | ||
|
|
c0e8ae5dfa | ||
|
|
0de4a93ec7 | ||
|
|
da8919002f | ||
|
|
5247f217fd | ||
|
|
56462ef793 | ||
|
|
b495669c86 | ||
|
|
3cf98c4fae | ||
|
|
9589f5573d | ||
|
|
a0b667a2ca | ||
|
|
e7c04d4aca | ||
|
|
72435af170 | ||
|
|
ce75be91e1 | ||
|
|
db6a3e1783 | ||
|
|
763b13e700 | ||
|
|
59adcc1744 | ||
|
|
50bbdd5cab | ||
|
|
5d10dc7e58 | ||
|
|
7171818d24 | ||
|
|
48033463c8 | ||
|
|
6ffeb048b3 | ||
|
|
23400a5a70 | ||
|
|
c20e781441 | ||
|
|
2b780ee7b2 | ||
|
|
d1dec8314a | ||
|
|
d813ae8845 | ||
|
|
05ae978cb7 | ||
|
|
3712794e56 | ||
|
|
a62baf34f2 | ||
|
|
0b57ab7303 | ||
|
|
5423012368 | ||
|
|
234ccbe51f | ||
|
|
4a0358a513 | ||
|
|
f4135e6bcf | ||
|
|
fce09e8c92 | ||
|
|
909fbb9538 | ||
|
|
89f4718ea1 | ||
|
|
2e2825ae98 | ||
|
|
3740c9d852 | ||
|
|
7bb99c9b9c | ||
|
|
aa3fb28f81 | ||
|
|
b7d30fca2b | ||
|
|
ef1a69156d | ||
|
|
4835c77840 | ||
|
|
106115676d | ||
|
|
e4fe9538d7 | ||
|
|
f3979a9f28 | ||
|
|
fc37abc356 | ||
|
|
b8936e5fca | ||
|
|
7e4de2ac16 | ||
|
|
5365fd2149 | ||
|
|
26638748bb | ||
|
|
7748848b6e | ||
|
|
0dae0f6027 | ||
|
|
2de34a905d | ||
|
|
7719ed0d6c | ||
|
|
fd8e6110b1 | ||
|
|
15fe2627d0 | ||
|
|
cbdd1d6d6c | ||
|
|
080638216a | ||
|
|
4dc2440024 | ||
|
|
a6d8915b6c | ||
|
|
f422a3c5d0 | ||
|
|
31872227f1 | ||
|
|
fae5b1e391 | ||
|
|
43d682f6b8 | ||
|
|
352a554c74 | ||
|
|
c7991ef04c | ||
|
|
1c36134cf9 | ||
|
|
b596b4153f | ||
|
|
cc7df91cc6 | ||
|
|
7c3a8a3774 | ||
|
|
808976ee28 | ||
|
|
825c352b6a | ||
|
|
b0b7f27f3a | ||
|
|
c831c5749a | ||
|
|
1409fc0da3 | ||
|
|
901af8de3f | ||
|
|
d9a030157e | ||
|
|
a0996c1807 | ||
|
|
a3dcaf21cb | ||
|
|
8ad7ebf02f | ||
|
|
e32aa95092 | ||
|
|
6844bb6510 | ||
|
|
47aa387b91 | ||
|
|
6150df71b2 | ||
|
|
fd94f2a5b5 | ||
|
|
847a1cb068 | ||
|
|
c1f7c9bb87 | ||
|
|
ac181183cc | ||
|
|
8a11053f1f | ||
|
|
b0acaed02f | ||
|
|
c3a917f8b3 | ||
|
|
766ee836b5 | ||
|
|
68bc22f9cd | ||
|
|
e03e5364d2 | ||
|
|
aa4b8d7246 | ||
|
|
0609628645 | ||
|
|
32028fbbb1 | ||
|
|
e3d948f60b | ||
|
|
40c6f738b4 | ||
|
|
296fc92721 | ||
|
|
21b4ae3fdc | ||
|
|
d69105bb77 | ||
|
|
4a6c8ff809 | ||
|
|
1f6d9369d6 | ||
|
|
3a70f02cbf | ||
|
|
44cb55fbe9 | ||
|
|
dd7e1c505c | ||
|
|
94f0140f62 | ||
|
|
28b29d0985 | ||
|
|
52f2521f6a | ||
|
|
e1e8b63eb5 | ||
|
|
a1aba32209 | ||
|
|
fa3916d1bf | ||
|
|
3ac7ef90ef | ||
|
|
4050bf43c4 | ||
|
|
851d7d0bc4 | ||
|
|
0fbf84e6bc | ||
|
|
d91b423a45 | ||
|
|
2bbce2f0fd | ||
|
|
1be1bffb29 | ||
|
|
92542e6b94 | ||
|
|
9c49191031 | ||
|
|
3932c1064e | ||
|
|
d446b91117 | ||
|
|
673257bbbc | ||
|
|
180ed7da81 | ||
|
|
38d8ab2285 | ||
|
|
c17a4d8453 | ||
|
|
e74285f6d2 | ||
|
|
33f06d3104 | ||
|
|
841cfac1f8 | ||
|
|
71fb23f769 | ||
|
|
150ed641ae | ||
|
|
5b90507310 | ||
|
|
0eff7c6ca9 | ||
|
|
2189983323 | ||
|
|
e45491d2f8 | ||
|
|
58650b7d2d | ||
|
|
19c2df4822 | ||
|
|
d5fc831321 | ||
|
|
743949753a | ||
|
|
184f5f2397 | ||
|
|
597aa0475e | ||
|
|
70984faee2 | ||
|
|
e657e4d1d1 | ||
|
|
d62f114c02 | ||
|
|
9e20ccc01a | ||
|
|
1343ea66c9 | ||
|
|
2b90b8d6b7 | ||
|
|
90d34c1251 | ||
|
|
93ff79febf | ||
|
|
7fef03a7db | ||
|
|
30269381e8 | ||
|
|
9985f388ac | ||
|
|
a869de3b1f | ||
|
|
4aac733238 | ||
|
|
7ed891e0c6 | ||
|
|
a1f7a97ff5 | ||
|
|
bca97f7186 | ||
|
|
61e09ff532 | ||
|
|
8e465b4393 | ||
|
|
b16d37953d | ||
|
|
f5c76d93bc | ||
|
|
98c0e00a2c | ||
|
|
3d8e9a593e | ||
|
|
ffa3362e16 | ||
|
|
999e9c3d5d | ||
|
|
40104c06e2 | ||
|
|
36731ba6b3 | ||
|
|
9d07561d99 | ||
|
|
3eb8aa8085 | ||
|
|
138fa45ecb | ||
|
|
cb76b2a6ad | ||
|
|
3853009d92 | ||
|
|
5257fb8533 | ||
|
|
ac54d2b927 | ||
|
|
e34a488b55 | ||
|
|
b07f9fe3b5 | ||
|
|
d05404a4df | ||
|
|
c22778bd92 | ||
|
|
65828c14fc | ||
|
|
178a84bcf6 | ||
|
|
2b11463567 | ||
|
|
7cb00aeb34 | ||
|
|
7b2782c0f6 | ||
|
|
587fd707ba | ||
|
|
f59f2eccd5 | ||
|
|
a0e01e075d | ||
|
|
3f076eeda6 | ||
|
|
32853c2044 | ||
|
|
a35d350cbd | ||
|
|
a6a50113da | ||
|
|
8b637e194e | ||
|
|
473067db31 | ||
|
|
289255d67a | ||
|
|
199740902a | ||
|
|
549e78d7b3 | ||
|
|
aa1825681c | ||
|
|
5a42ca6772 | ||
|
|
8dad3ad8ea | ||
|
|
655c9ece2d | ||
|
|
159d798c34 | ||
|
|
856d23626f | ||
|
|
8890636a56 | ||
|
|
4e68b588be | ||
|
|
b9bb27512c | ||
|
|
fa61c1b9c1 | ||
|
|
a0634fa79e | ||
|
|
03937a9f89 | ||
|
|
24086191af | ||
|
|
cf429ba284 | ||
|
|
178a79fc47 | ||
|
|
f0b9e9a89d | ||
|
|
99121ad5cd | ||
|
|
6e3393c93f | ||
|
|
aa41f97e38 | ||
|
|
2b53c67789 | ||
|
|
731ce1721a | ||
|
|
8321b9430e | ||
|
|
7f69350e4d | ||
|
|
1a156c1060 | ||
|
|
7149f99f02 | ||
|
|
5491398a64 | ||
|
|
4b8771f3e2 | ||
|
|
fea6d70d4d | ||
|
|
0e4bd485e0 | ||
|
|
ed8a2c8793 | ||
|
|
fecb27232e | ||
|
|
f58a9bad42 | ||
|
|
f4d50c4dca | ||
|
|
036e266bae | ||
|
|
d98c347902 | ||
|
|
1270bcc6ed | ||
|
|
8914b94577 | ||
|
|
acca8ea786 | ||
|
|
eaef1c8b8e | ||
|
|
0dfe70125b | ||
|
|
5afd83c883 | ||
|
|
0ce1ec5d15 | ||
|
|
a61b34cab5 | ||
|
|
ad1b96720a | ||
|
|
296a2b8e5d | ||
|
|
597a9f9548 | ||
|
|
e031718747 | ||
|
|
6452ff203e | ||
|
|
70aed4a605 | ||
|
|
821419ee5b | ||
|
|
72ed8a6dd2 | ||
|
|
ee87ac2f9b | ||
|
|
660021f5e5 | ||
|
|
783f05172b | ||
|
|
79e0509bf9 | ||
|
|
02853bbd60 | ||
|
|
218922d9f8 | ||
|
|
7a2b04a5d1 | ||
|
|
dc32e56a9c | ||
|
|
490cc7ded6 | ||
|
|
a874a09b7e | ||
|
|
4db0350f06 | ||
|
|
edc52e5b28 | ||
|
|
a1a1284696 | ||
|
|
6f849e8f64 | ||
|
|
3e32504526 | ||
|
|
6e84d3cce0 | ||
|
|
8c02de6c61 | ||
|
|
f09df31480 | ||
|
|
a8697df9e3 | ||
|
|
6f30d6b4d0 | ||
|
|
13c7bbbac6 | ||
|
|
cc390ba862 | ||
|
|
61490fbaa8 | ||
|
|
4ce7f059c3 | ||
|
|
04a28fe831 | ||
|
|
1c5e07f4a2 | ||
|
|
deb0e57c49 | ||
|
|
2795091f0c | ||
|
|
c380d437c6 | ||
|
|
a95cce9a60 | ||
|
|
08af830fd7 | ||
|
|
c872c86c4a | ||
|
|
ba5c188630 | ||
|
|
5a4161d293 | ||
|
|
33296802fb | ||
|
|
52834dbf21 | ||
|
|
8db389313b | ||
|
|
f2d36a47ae | ||
|
|
31241f48be | ||
|
|
19c1a54fea | ||
|
|
850d43c1e8 | ||
|
|
5b39fc8123 | ||
|
|
ec368c8102 | ||
|
|
3412becfc5 | ||
|
|
f225039d36 | ||
|
|
2456c077f6 | ||
|
|
ad92fe49c7 | ||
|
|
9cc55f895c | ||
|
|
851701cb6f | ||
|
|
c04171abf6 | ||
|
|
488d08b43c | ||
|
|
465d726bd4 | ||
|
|
18abb068b1 | ||
|
|
cec5280013 | ||
|
|
adabf0107f | ||
|
|
fd03915f85 | ||
|
|
a81484f13f | ||
|
|
c126ff10a7 | ||
|
|
bb348c1353 | ||
|
|
162f625716 | ||
|
|
fb1e7eef6b | ||
|
|
ac5b32c491 | ||
|
|
b526fc070d | ||
|
|
88ae4679d1 | ||
|
|
9e7a579365 | ||
|
|
b040ae8d4d | ||
|
|
c6e20aed9b | ||
|
|
695a24d8a7 | ||
|
|
b168bded1d | ||
|
|
2472142532 | ||
|
|
bf49f55c95 | ||
|
|
0df1eb71cb | ||
|
|
c12f0d2697 | ||
|
|
6ffbc3a0f5 | ||
|
|
500af6d775 | ||
|
|
2feb091961 | ||
|
|
4e7b35c917 | ||
|
|
c8b452d411 | ||
|
|
708034d1d3 | ||
|
|
c66385f0f9 | ||
|
|
247728b723 | ||
|
|
522b76e452 | ||
|
|
3127c78bc7 | ||
|
|
938dd8b9ca | ||
|
|
847376cd8f | ||
|
|
1a3650ef2a | ||
|
|
247cdb1e1a | ||
|
|
129273036a | ||
|
|
97d77440e7 | ||
|
|
75fbf2ca78 | ||
|
|
40755961ea | ||
|
|
29f45a2e38 | ||
|
|
5e1e0b4759 | ||
|
|
d2e769027a | ||
|
|
cc335db9e0 | ||
|
|
6f4008ebab | ||
|
|
ff245c61d2 | ||
|
|
cb7b011d6b | ||
|
|
5a0afcc835 | ||
|
|
83fb8d20b7 | ||
|
|
f6f9b5c8cb | ||
|
|
5e43c332f1 | ||
|
|
6891e86621 | ||
|
|
3c1ec2e9ca | ||
|
|
49caeeafce | ||
|
|
349ad7858b | ||
|
|
c70f220db3 | ||
|
|
f8fd77b83e | ||
|
|
af11cc6cfd | ||
|
|
e20bc87152 | ||
|
|
bb408936e9 | ||
|
|
e050d168a7 | ||
|
|
9c6f5de551 | ||
|
|
39e3ddb080 | ||
|
|
603765732e | ||
|
|
297b6b282c | ||
|
|
e590b43545 | ||
|
|
bc6ba5f547 | ||
|
|
8db86dcebf | ||
|
|
a8e352a473 | ||
|
|
2323fd17b0 | ||
|
|
fedb787b4f | ||
|
|
90f226193c | ||
|
|
bfbe4ae4b4 | ||
|
|
e477fa7a93 | ||
|
|
f3679b37a2 | ||
|
|
b30b1d145c | ||
|
|
e902d5d917 | ||
|
|
8bd4107423 | ||
|
|
7ba305e033 | ||
|
|
caa0eb6e29 | ||
|
|
c6d831a564 | ||
|
|
943c02bf79 | ||
|
|
c32b081029 | ||
|
|
16d9d77d88 | ||
|
|
803ab81eb6 | ||
|
|
634202340b | ||
|
|
525ff6bf74 | ||
|
|
65a0ebf975 | ||
|
|
da2b8082b3 | ||
|
|
ec4391b88e | ||
|
|
a7db2aa39d | ||
|
|
1c3ecc4ad2 | ||
|
|
034e9935d4 | ||
|
|
3241128840 | ||
|
|
fed3ffb681 | ||
|
|
1e1256dbdd | ||
|
|
69c65597d9 | ||
|
|
d1756b621f | ||
|
|
50cf25ae97 | ||
|
|
cf6ce0dbad | ||
|
|
c4fc9f7ed8 | ||
|
|
44a30e269e | ||
|
|
ef18aaa66f | ||
|
|
45f3a98359 | ||
|
|
36bca4f0d6 | ||
|
|
6e5ad75c5c | ||
|
|
79a61c28d7 | ||
|
|
d23bb3b05d | ||
|
|
bac43ae38e | ||
|
|
e900ea20b7 | ||
|
|
8496d02fe1 | ||
|
|
fc94c4ea40 | ||
|
|
c90d976d7a | ||
|
|
d320d3a8bf | ||
|
|
24bab48043 | ||
|
|
f5d6d7caca | ||
|
|
85fe11ff11 | ||
|
|
30979caf25 | ||
|
|
ce8533f83b | ||
|
|
78432d08ca | ||
|
|
540436a1f9 | ||
|
|
2e5461ee4d | ||
|
|
2044ccdc0b | ||
|
|
ca35573ad5 | ||
|
|
85332eacbd | ||
|
|
6dbe983461 | ||
|
|
262f5886a4 | ||
|
|
4688a94a54 | ||
|
|
207d843aee | ||
|
|
a6b872bb0c | ||
|
|
8cd112110e | ||
|
|
9581279919 | ||
|
|
002458f4c8 | ||
|
|
a50977e0fd | ||
|
|
ef73bf799c | ||
|
|
7aea95704e | ||
|
|
564a8bdc19 | ||
|
|
09ef3ccf67 | ||
|
|
1f84cdb88c | ||
|
|
12d7d8db0a | ||
|
|
1bfde4bfa2 | ||
|
|
80c0a6ead3 | ||
|
|
7f4ebf50d3 | ||
|
|
a528c6c686 | ||
|
|
23f11fcd5e | ||
|
|
3dad0d9811 | ||
|
|
d920f7edc1 | ||
|
|
690d9fb971 | ||
|
|
f37b83a0ea | ||
|
|
12573ed2e7 | ||
|
|
be1800884e | ||
|
|
f6d0934b5d | ||
|
|
93c233b1cf | ||
|
|
47b64a5074 | ||
|
|
a09ee3a41b | ||
|
|
d6fa06b3be | ||
|
|
0cec0c1c1d | ||
|
|
e2da2b232e | ||
|
|
bdf1731db3 | ||
|
|
5477b87774 | ||
|
|
7478e63ea0 | ||
|
|
922d1462a8 | ||
|
|
8f410d5e2e | ||
|
|
0d8c743dfe | ||
|
|
e50f4c0ee5 | ||
|
|
b6a9c58994 | ||
|
|
382693a199 | ||
|
|
acf2c2c6a5 | ||
|
|
006f840570 | ||
|
|
2d6725a41a | ||
|
|
7c867b6e54 | ||
|
|
457df8d3f3 | ||
|
|
b6e4208ea8 | ||
|
|
56fba5541a | ||
|
|
4a88a9e253 | ||
|
|
a69dbafe3c | ||
|
|
a9c69bf774 | ||
|
|
986a516bf1 | ||
|
|
9bf22c56cd | ||
|
|
b5705e079f | ||
|
|
2ec2036c2f | ||
|
|
faf1d38a6d | ||
|
|
6c1c7eaf75 | ||
|
|
2d5741aef8 | ||
|
|
a9f80a603c | ||
|
|
658b58378e | ||
|
|
8a807102a6 | ||
|
|
afee29ad3f | ||
|
|
6ec3927dd3 | ||
|
|
b109075bf2 | ||
|
|
f4667cbc33 | ||
|
|
d021842fa1 | ||
|
|
f42cb109a0 | ||
|
|
1b70e7d0df | ||
|
|
b687270207 | ||
|
|
06cac18d78 | ||
|
|
6cac58b34c | ||
|
|
4b15a2bd63 | ||
|
|
e8409a0108 | ||
|
|
39ad3a625c | ||
|
|
2a5b9b635b | ||
|
|
e2056756ef | ||
|
|
6a8e3fd02d | ||
|
|
2a68f01402 | ||
|
|
dca93fb177 | ||
|
|
010bb73ac2 | ||
|
|
bb2cc2d157 | ||
|
|
86618a64c6 | ||
|
|
1ff17bd15d | ||
|
|
12ea12e4e7 | ||
|
|
4f956d71e2 | ||
|
|
ce6b31d938 | ||
|
|
a8387b8b19 | ||
|
|
a420d9cdc7 | ||
|
|
a8dfa01362 | ||
|
|
92f23e626e | ||
|
|
553abd01be | ||
|
|
eced842dfc | ||
|
|
b6a3d9ce59 | ||
|
|
eebbc807e5 | ||
|
|
0fb7364235 | ||
|
|
76191fe47d | ||
|
|
f1cc62c21f | ||
|
|
f53b63eaf6 | ||
|
|
821997d372 | ||
|
|
85b76b1143 | ||
|
|
9004254fbf | ||
|
|
1de9add304 | ||
|
|
7a39455af9 | ||
|
|
96d60eff23 | ||
|
|
a69f93d214 | ||
|
|
8e1638b773 | ||
|
|
95ef61bc45 | ||
|
|
c142676b20 | ||
|
|
be843227a1 | ||
|
|
48d9b49ada | ||
|
|
19f774a4a4 | ||
|
|
d7d027bcf1 | ||
|
|
e6228ca682 | ||
|
|
f2ee61553f | ||
|
|
30088afa89 | ||
|
|
40430cf01b | ||
|
|
b1d88ced61 | ||
|
|
5b7ca6435c | ||
|
|
a6ae6b0752 | ||
|
|
61b8ad38bd | ||
|
|
e714653478 | ||
|
|
d70b4f04f6 | ||
|
|
9eff99de49 | ||
|
|
4855b8f3de | ||
|
|
84ad2cb827 | ||
|
|
0e537cced4 | ||
|
|
b366592878 | ||
|
|
5cf92980f0 | ||
|
|
8f7f38536d | ||
|
|
97edec6e72 | ||
|
|
40d58c9bc3 | ||
|
|
f76c9041bb | ||
|
|
66af1707a1 | ||
|
|
96fbf9fd06 | ||
|
|
801af95a13 | ||
|
|
f5af5f7334 | ||
|
|
48a12be538 | ||
|
|
012a7743ad | ||
|
|
678235023f | ||
|
|
a4afb72535 | ||
|
|
1db24e5f2a | ||
|
|
639ae671ae | ||
|
|
354882f2c0 | ||
|
|
1a4e9ecfef | ||
|
|
dcdd74dff4 | ||
|
|
ab050d1890 | ||
|
|
002e2cc42c | ||
|
|
ef7e2c5d86 | ||
|
|
e7badb38e9 | ||
|
|
dacc8cb5f4 | ||
|
|
a801a4aeef | ||
|
|
fe60f264c4 | ||
|
|
dfdb691f73 | ||
|
|
9fe5836240 | ||
|
|
8074e6b46a | ||
|
|
d4ef764305 | ||
|
|
f763ed9a7e | ||
|
|
8922437fcd | ||
|
|
6e98cd5aad | ||
|
|
08f4576aa6 | ||
|
|
1d29709c32 | ||
|
|
7610028a89 | ||
|
|
bdcbf9b92e | ||
|
|
b807b3c785 | ||
|
|
90b54a45e8 | ||
|
|
bb85d6f63e | ||
|
|
0d903f4d0d | ||
|
|
ba4f4e0a3e | ||
|
|
312f3d2ab9 | ||
|
|
6b710dc146 | ||
|
|
def67295e5 | ||
|
|
0823a18cff | ||
|
|
ca735ad70f | ||
|
|
af90077a6a | ||
|
|
9cba45910e | ||
|
|
613973d2b1 | ||
|
|
29ccdb3cd9 | ||
|
|
1e4f5145cf | ||
|
|
a0ab9fe56b | ||
|
|
fb57299a1d | ||
|
|
162cb19cff | ||
|
|
7020050b06 | ||
|
|
abfb4490d5 | ||
|
|
7b610f8dd8 | ||
|
|
8b3a357949 | ||
|
|
f73708d725 | ||
|
|
d3c79c7078 | ||
|
|
d889cdecde | ||
|
|
2654942b3c | ||
|
|
ed2c8cdc25 | ||
|
|
19434afe0a | ||
|
|
a7c4ae530d | ||
|
|
b2d735e573 | ||
|
|
044701e907 | ||
|
|
42e9800bde | ||
|
|
d956bd3743 | ||
|
|
6084486dcd | ||
|
|
100a4731e2 | ||
|
|
000ae27aff | ||
|
|
06b0707aa9 | ||
|
|
ac93449788 | ||
|
|
02d32de044 | ||
|
|
8f4d81903c | ||
|
|
333e3e4f01 | ||
|
|
f7721d0523 | ||
|
|
e5473fc51a | ||
|
|
a08ceadd1a | ||
|
|
dc2ddfb42c | ||
|
|
4eeed14d34 | ||
|
|
5dbda70235 | ||
|
|
38d53a6fe2 | ||
|
|
6a4c2a0d40 | ||
|
|
77a932fe3b | ||
|
|
4b2c24dd8c | ||
|
|
8814ea8241 | ||
|
|
8f6649e29e | ||
|
|
73360d37f7 | ||
|
|
eb642551ac | ||
|
|
f33d41af63 | ||
|
|
5e7954f152 | ||
|
|
9e79ad5a62 | ||
|
|
0dcbc47e15 | ||
|
|
b8b8fe6120 | ||
|
|
ff066ef177 | ||
|
|
63e834ce73 | ||
|
|
b118e60160 | ||
|
|
00e8531898 | ||
|
|
7c8d662315 | ||
|
|
2f6d67cad6 | ||
|
|
f5e5b44bc1 | ||
|
|
f795177ab6 | ||
|
|
a4bde421db | ||
|
|
f6a4151f60 | ||
|
|
34b7537948 | ||
|
|
66120fb97a | ||
|
|
6de69de868 | ||
|
|
82577b4acc | ||
|
|
f6bc229d1d | ||
|
|
8db7f7ed37 | ||
|
|
d5ffd4a1fb | ||
|
|
b53579858a | ||
|
|
28d504d7d3 | ||
|
|
63a230f92e | ||
|
|
56c2ac048d | ||
|
|
208d5df106 | ||
|
|
d09f53c380 | ||
|
|
f8ca86c6a7 | ||
|
|
4128e2ffcb | ||
|
|
696aee3891 | ||
|
|
bcad2f4e9e | ||
|
|
1cf5cdbeca | ||
|
|
8e94f3902b | ||
|
|
3412bb75be | ||
|
|
17925ed563 | ||
|
|
43da36948b | ||
|
|
88a6a41c7c | ||
|
|
b58c42cd53 | ||
|
|
d37785c214 | ||
|
|
b369a6dc2a | ||
|
|
9f32a6e209 | ||
|
|
3f66caedfc | ||
|
|
1dd82df59e | ||
|
|
81bc86be07 | ||
|
|
663649a100 | ||
|
|
5ee6814947 | ||
|
|
65cd4f5838 | ||
|
|
7fd35d68bb | ||
|
|
ad8187b151 | ||
|
|
1e557dddcc | ||
|
|
f18f870206 | ||
|
|
9d8cff1275 | ||
|
|
32a29cd4d3 | ||
|
|
8dca4c3f9a | ||
|
|
a881b1f5fb | ||
|
|
ea6f366d23 | ||
|
|
b8876f2b17 | ||
|
|
fe62423344 | ||
|
|
fcaf48eb49 | ||
|
|
77ba25328c | ||
|
|
ca88717f0c | ||
|
|
e8be14e5d6 | ||
|
|
370a3cafd0 | ||
|
|
1584dae9c2 | ||
|
|
e802c072f7 | ||
|
|
456baaa112 | ||
|
|
2c7e37e9ff | ||
|
|
2d99b327fc | ||
|
|
79ad5c08e4 | ||
|
|
391179657c | ||
|
|
ecfece3ac4 | ||
|
|
ed548a0de2 | ||
|
|
84553899f6 | ||
|
|
ca6eb5511c | ||
|
|
c46137e40d | ||
|
|
b391f5615b | ||
|
|
38ccf23567 | ||
|
|
c0a1328532 | ||
|
|
65c7765c07 | ||
|
|
e99f6c03c1 | ||
|
|
31062d424f | ||
|
|
559433bed0 | ||
|
|
8fafae2cfa | ||
|
|
b3c9473bc8 | ||
|
|
b77c815bcd | ||
|
|
13192fa03c | ||
|
|
b258ee5f77 | ||
|
|
a63eccf188 | ||
|
|
37de4a9990 | ||
|
|
c4870e1b6b | ||
|
|
6f7c305308 | ||
|
|
438dd42f7d | ||
|
|
f57d563578 | ||
|
|
c8535440d3 | ||
|
|
84ea34f918 | ||
|
|
44ada52185 | ||
|
|
78b1231386 | ||
|
|
fe3ef08f39 | ||
|
|
f1c743286d | ||
|
|
657a25178d | ||
|
|
0666fa80ac | ||
|
|
f3560caf93 | ||
|
|
2e056e9b0b | ||
|
|
92bda1231e | ||
|
|
ec1b4e6f85 | ||
|
|
7643bd61fd | ||
|
|
bf73b40529 | ||
|
|
ed20397a2b | ||
|
|
1c70ca2214 | ||
|
|
77b9a7aa5a | ||
|
|
0d0c760d94 | ||
|
|
177e385bb9 | ||
|
|
699a5d2944 | ||
|
|
d298afba01 | ||
|
|
45d08c70f0 | ||
|
|
77feecc623 | ||
|
|
acffc7e7f0 | ||
|
|
b0e56b7c54 | ||
|
|
df2fa87e6b | ||
|
|
a27be35325 | ||
|
|
2f3c3d510f | ||
|
|
d09767a90b | ||
|
|
427a857e9a | ||
|
|
e9842091e4 | ||
|
|
332f3f5617 | ||
|
|
73e78a2257 | ||
|
|
f7cd0e84f9 | ||
|
|
a4e77af571 | ||
|
|
c8bc68c267 | ||
|
|
02d6b91b73 | ||
|
|
5074bccae4 | ||
|
|
7d94b0325f | ||
|
|
ff1722d307 | ||
|
|
e68b24f839 | ||
|
|
339ba7986f | ||
|
|
6cb674a0aa | ||
|
|
6db47478cf | ||
|
|
01b45f4f23 | ||
|
|
4d61d01943 | ||
|
|
dd0edcd203 | ||
|
|
ebc80597d5 | ||
|
|
d28c81571c | ||
|
|
dc9a260425 | ||
|
|
249e6fe637 | ||
|
|
e84b8747a1 | ||
|
|
e548572f12 | ||
|
|
0323a60d85 | ||
|
|
a05cbf8169 | ||
|
|
5aa45607eb | ||
|
|
133c3a330c | ||
|
|
f9646208e9 | ||
|
|
4b793f44ef | ||
|
|
aae4f00a4b | ||
|
|
366a4918c3 | ||
|
|
bc1801fb03 | ||
|
|
25cd12cf33 | ||
|
|
90e22da930 | ||
|
|
e6c7e57711 | ||
|
|
d385bc9cce | ||
|
|
1816ab95a0 | ||
|
|
5c750b6880 | ||
|
|
cd1c137542 | ||
|
|
4cf2ba20c2 | ||
|
|
a1ee2db6d1 | ||
|
|
db8096ccdc | ||
|
|
25a2554bdd | ||
|
|
3bc7024f8b | ||
|
|
4ff80a7074 | ||
|
|
23ee8211c7 | ||
|
|
1e0ff65337 | ||
|
|
da211bef96 | ||
|
|
95342c8c33 | ||
|
|
61e0289014 | ||
|
|
af09861f5c | ||
|
|
7f9e3bc787 | ||
|
|
d995192dde | ||
|
|
c57e19c8fa | ||
|
|
550d9a9f71 | ||
|
|
4208ac2958 | ||
|
|
45429b5400 | ||
|
|
d3916b84c9 | ||
|
|
55d2b9b3c9 | ||
|
|
3b27d41c72 | ||
|
|
044fb9e2f5 | ||
|
|
6007c8705c | ||
|
|
a8c1958c75 | ||
|
|
d696b394c4 | ||
|
|
32c4138758 | ||
|
|
d8bfe77a3b | ||
|
|
8b0969b698 | ||
|
|
66dfa47c66 | ||
|
|
b10255a6dd | ||
|
|
3698e89b88 | ||
|
|
bfa211fb02 | ||
|
|
dc40ac854a | ||
|
|
2b6d041cb6 | ||
|
|
8a58733d91 | ||
|
|
e49b411205 | ||
|
|
933c21f3d3 | ||
|
|
f40d3e82c0 | ||
|
|
08464ee26e | ||
|
|
12ba10bc2c | ||
|
|
dcaf4c905f | ||
|
|
6046ed4f5c | ||
|
|
1a2756a232 | ||
|
|
ed894cc06f | ||
|
|
166ca2a227 | ||
|
|
cf5d89d13c | ||
|
|
9f160537ef | ||
|
|
bfe76467b0 | ||
|
|
18e7305b6d | ||
|
|
d9813a5bec | ||
|
|
d7867cd1e2 | ||
|
|
30afc8b1d2 | ||
|
|
32b4b4d24d | ||
|
|
7d32a717af | ||
|
|
892350fa2d | ||
|
|
0db4b29452 | ||
|
|
74ac6eb8a3 | ||
|
|
d9d997b218 | ||
|
|
84c4db13fb | ||
|
|
528fa5c57b | ||
|
|
9a9a35bf40 | ||
|
|
d14dc35efe | ||
|
|
27d784b23e | ||
|
|
9e1f7c4c18 | ||
|
|
77e67c19fe | ||
|
|
91582257fb | ||
|
|
66ef5549e9 | ||
|
|
79e1e1a747 | ||
|
|
0b13c0a437 | ||
|
|
08361eb84e | ||
|
|
3d68fcad0b | ||
|
|
7f44083a96 | ||
|
|
39af2bb0a4 | ||
|
|
e9637267ef | ||
|
|
f20f096a30 | ||
|
|
9dc292772a | ||
|
|
bf5d9e3224 | ||
|
|
d70014cfd0 | ||
|
|
a785eb9141 | ||
|
|
f52200a340 | ||
|
|
df7ac9b815 | ||
|
|
64a55681e6 | ||
|
|
1d5b665f13 | ||
|
|
51cf6a5ff3 | ||
|
|
e0ff7ba180 | ||
|
|
9ba975d6ad | ||
|
|
1469c02998 | ||
|
|
95e09dd2e9 | ||
|
|
e5e63ed201 | ||
|
|
4212a45767 | ||
|
|
ef01a64826 | ||
|
|
46b4118b9e | ||
|
|
c7fc5f3ab7 | ||
|
|
f50a23accd | ||
|
|
43a1296150 | ||
|
|
3b38641f98 | ||
|
|
8cac89d17c | ||
|
|
92bb9a5fdc | ||
|
|
963f179d7f | ||
|
|
103183f494 | ||
|
|
1cfc2f0c07 | ||
|
|
219715449d | ||
|
|
f011a3df52 | ||
|
|
7adaa2046d | ||
|
|
948871969f | ||
|
|
57707a80e6 | ||
|
|
55da5bc25d | ||
|
|
c718b810f6 | ||
|
|
afd293ee87 | ||
|
|
752bc5dcdd | ||
|
|
973f03e73e | ||
|
|
555c9847d4 | ||
|
|
c1a35a29a8 | ||
|
|
7a6c27cf24 | ||
|
|
d9c1cf9874 | ||
|
|
1155f1b0e1 | ||
|
|
dcc314f088 | ||
|
|
31ff5bffd6 | ||
|
|
4887ea3563 | ||
|
|
dbaaf4216d | ||
|
|
53c25690f9 | ||
|
|
3c12e711a4 | ||
|
|
9b7bd4e9ae | ||
|
|
026b3a1d0f | ||
|
|
d9c08de58a | ||
|
|
c379a6f2fb | ||
|
|
488a3eeace | ||
|
|
4dd9c9e2b9 | ||
|
|
ca0a4bdf8e | ||
|
|
247c7eff14 | ||
|
|
837ec5a27c | ||
|
|
5a15692589 | ||
|
|
0058702749 | ||
|
|
e34ebbc665 | ||
|
|
38a9e6fde1 | ||
|
|
f26ca0866c | ||
|
|
e7ee8a95f6 | ||
|
|
91adefedfa | ||
|
|
2f5eaa8475 | ||
|
|
da964fae93 | ||
|
|
e9c1ad6acd | ||
|
|
f965ee9b1b | ||
|
|
ce940da8e9 | ||
|
|
a8b35eb8f5 | ||
|
|
0c95e5a6ca | ||
|
|
0f39b63801 | ||
|
|
545b5e0161 | ||
|
|
3cf7164a54 | ||
|
|
3f50779a17 | ||
|
|
a8188a2f33 | ||
|
|
d30385f07c | ||
|
|
1b5ff68c43 | ||
|
|
97eabe6f81 | ||
|
|
57a95d1799 | ||
|
|
541dd994a9 | ||
|
|
81a107f503 | ||
|
|
5ab1034698 | ||
|
|
13ba450c4c | ||
|
|
c5470d4050 | ||
|
|
7e49c7d782 | ||
|
|
769a04517f | ||
|
|
768c991909 | ||
|
|
1ee70a0146 | ||
|
|
8be8047b8d | ||
|
|
51b24bbaf3 | ||
|
|
2cb320e246 | ||
|
|
7524f7fbe8 | ||
|
|
9fefb1d898 | ||
|
|
73fc1c1c56 | ||
|
|
d1baff1743 | ||
|
|
dd1cf5c3cf | ||
|
|
9246c11c35 | ||
|
|
0e6002dca2 | ||
|
|
78908bc5cb | ||
|
|
49672bfc5f | ||
|
|
f603d682cd | ||
|
|
b364d404a9 | ||
|
|
96f9c67e77 | ||
|
|
6cebcac805 | ||
|
|
3573896fe0 | ||
|
|
e9a84a21e4 | ||
|
|
25429f760c | ||
|
|
ece4875973 | ||
|
|
2c0547079a | ||
|
|
b3b3a56164 | ||
|
|
4242b45646 | ||
|
|
cab80cbe9d | ||
|
|
d671a8a21d | ||
|
|
6b88ac9c32 | ||
|
|
6ccaf55e54 | ||
|
|
edf29aa67d | ||
|
|
0e6fd645fd | ||
|
|
c63cc78ffd | ||
|
|
3682751455 | ||
|
|
abefa2738b | ||
|
|
4ccd69350b | ||
|
|
0d6880adb3 | ||
|
|
2f368de397 | ||
|
|
650a160f04 | ||
|
|
ecb037fc0e | ||
|
|
8e1bbf32be | ||
|
|
30bb3a109e | ||
|
|
37b6e1cbb7 | ||
|
|
cb83b49432 | ||
|
|
568fec0f54 | ||
|
|
7e2cef98a7 | ||
|
|
90f17d4a28 | ||
|
|
e8dd412ac1 | ||
|
|
54c63063e4 | ||
|
|
58dadad8ec | ||
|
|
e9e558d8c8 | ||
|
|
0897ed561f | ||
|
|
e263805847 | ||
|
|
bfe2205ecb | ||
|
|
04d3ea9563 | ||
|
|
8c47f117db | ||
|
|
36f022bb58 | ||
|
|
e75f56a0f2 | ||
|
|
342a00b89e | ||
|
|
330a71d28b | ||
|
|
ea278b5b12 | ||
|
|
5e7f0c65fe | ||
|
|
b131a2cb98 | ||
|
|
b5a39de3e2 | ||
|
|
42df5ef45e | ||
|
|
b29e295e1b | ||
|
|
8c90157990 | ||
|
|
b454f43b6c | ||
|
|
53194ede5e | ||
|
|
d17d38fe70 | ||
|
|
667fc25766 | ||
|
|
359847d047 | ||
|
|
15567493ba | ||
|
|
591ec02cea | ||
|
|
a1e080d495 | ||
|
|
c2fca054ae | ||
|
|
bf6c2f0dfd | ||
|
|
86ec0b1d9f | ||
|
|
769c330b3d | ||
|
|
5c75450a77 | ||
|
|
ad7c1f3c81 | ||
|
|
23767f734f | ||
|
|
80eaabd360 | ||
|
|
ff5d0f2aeb | ||
|
|
a278428bd5 | ||
|
|
0a491e773b | ||
|
|
45540a00ee | ||
|
|
55f4aa3b34 | ||
|
|
8b63e45f0b | ||
|
|
052cb459a6 | ||
|
|
a7803570dc | ||
|
|
b516ea2fe2 | ||
|
|
1fa45c69d6 | ||
|
|
c4abd93b9b | ||
|
|
91c1768939 | ||
|
|
1a5d6aa498 | ||
|
|
fb69f3d45f | ||
|
|
3fbe93f029 | ||
|
|
df388d9f33 | ||
|
|
0697d08e54 | ||
|
|
895386cfaf | ||
|
|
6a95f9e349 | ||
|
|
ad62a966a6 | ||
|
|
fe4248cf34 | ||
|
|
a237aa8164 | ||
|
|
3dc1e917bf | ||
|
|
27e3e09bb9 | ||
|
|
d1791a999d | ||
|
|
d0b15ed940 | ||
|
|
e4e9da7673 | ||
|
|
8b6e982495 | ||
|
|
71c1e36d1e | ||
|
|
343c426307 | ||
|
|
d8c6adf338 | ||
|
|
e979d75cb8 | ||
|
|
afa7045847 | ||
|
|
e84339ef4a | ||
|
|
fbd6b5b434 | ||
|
|
dc49dec4f0 | ||
|
|
68c37ca2a4 | ||
|
|
1f1c669673 | ||
|
|
d61565d227 | ||
|
|
a5e055f8a5 | ||
|
|
30b105afd5 | ||
|
|
d14e4d41ea | ||
|
|
f54634aeb2 | ||
|
|
5083ab7694 | ||
|
|
48e151495f | ||
|
|
66358f2900 | ||
|
|
5f6334696a | ||
|
|
02a85b1252 | ||
|
|
4628639ac6 | ||
|
|
8440ac3a54 | ||
|
|
1e6ac8caf2 | ||
|
|
7711530704 | ||
|
|
4ffa167256 | ||
|
|
baa07e935e | ||
|
|
c252eae32e | ||
|
|
92d3115f3d | ||
|
|
6bbf614a37 | ||
|
|
ed8b022b51 | ||
|
|
f34c6bd1ce | ||
|
|
c71566e7f5 | ||
|
|
83455028b0 | ||
|
|
d120d0cf2e | ||
|
|
a0416e9c6d | ||
|
|
a53c0b9472 | ||
|
|
3c2b05be90 | ||
|
|
8573c6e8c6 | ||
|
|
7b63369df2 | ||
|
|
997f362cc2 | ||
|
|
59e561dcf9 | ||
|
|
056353f8a8 | ||
|
|
19a9753663 | ||
|
|
66dd0e9ec0 | ||
|
|
d74b8ec4e3 | ||
|
|
dbfa1d7263 | ||
|
|
d090fd25e4 | ||
|
|
1c53b0a1c0 | ||
|
|
a2ac5ae478 | ||
|
|
ead7155b0f | ||
|
|
dfeb702544 | ||
|
|
32f8733313 | ||
|
|
4bf4c780be | ||
|
|
7a7ff4bb96 | ||
|
|
a59da3634b | ||
|
|
a25fcfdfa7 | ||
|
|
2d9db0fed1 | ||
|
|
6ad1f19a21 | ||
|
|
88a32ae48d | ||
|
|
a4f96e6452 | ||
|
|
e27b7d7812 | ||
|
|
ba5d84f7e8 | ||
|
|
ea3a1745f5 | ||
|
|
6d2b27689d | ||
|
|
a8bb3dd9a3 | ||
|
|
d42093e069 | ||
|
|
98482f0150 | ||
|
|
58f4efb579 | ||
|
|
fe10875285 | ||
|
|
e0fe97401d | ||
|
|
f2f507e619 | ||
|
|
f4d4a2f41b | ||
|
|
4ff44dfa3b | ||
|
|
ee16b2051e | ||
|
|
3633f091c5 | ||
|
|
44608517c1 | ||
|
|
841b4d648c | ||
|
|
5b0e333967 | ||
|
|
01b2db4845 | ||
|
|
e7d73b833b | ||
|
|
f7696114bb | ||
|
|
8de67fd9d9 | ||
|
|
be6690bf0b | ||
|
|
a86dc942d6 | ||
|
|
6dcb0bafb0 | ||
|
|
83dae46ec6 | ||
|
|
0cceb3fdf1 | ||
|
|
7885eaf974 | ||
|
|
37d0f06e07 | ||
|
|
2da664ed17 | ||
|
|
2699f170ca | ||
|
|
8406c0d9a3 | ||
|
|
e762f7f3c0 | ||
|
|
65aa4d5642 | ||
|
|
3a9f5d6ddc | ||
|
|
748ad5f05a | ||
|
|
89519b1521 | ||
|
|
7e300079ce | ||
|
|
26f442a675 | ||
|
|
8aa4fbea83 | ||
|
|
2701be91e3 | ||
|
|
f2e87a3429 | ||
|
|
c7a3186d08 | ||
|
|
a5e4ceb735 | ||
|
|
b725cadf48 | ||
|
|
df9a05ba92 | ||
|
|
db1dacde5d | ||
|
|
9f2a9d43b1 | ||
|
|
d6f24feb4a | ||
|
|
40e785fdff | ||
|
|
70a91c5426 | ||
|
|
0a2a5be71c | ||
|
|
4fd4f44bb7 | ||
|
|
378b2fbd9e | ||
|
|
1f9b52d5e1 | ||
|
|
b30bb56483 | ||
|
|
ba22da00bf | ||
|
|
cee5ddee53 | ||
|
|
c74c60a629 | ||
|
|
e4f8fe941e | ||
|
|
b9e1ca1385 | ||
|
|
fd10b49742 | ||
|
|
1c20a8cd31 | ||
|
|
a316e25034 | ||
|
|
f54f2c52e9 | ||
|
|
1adb7fa58c | ||
|
|
dcf6059a15 | ||
|
|
8a6e9f90be | ||
|
|
77af33ba5d | ||
|
|
faabed1df0 | ||
|
|
cae5c76bed | ||
|
|
53b698adb6 | ||
|
|
bbc4673f17 | ||
|
|
dc2733998e | ||
|
|
0d161519e4 | ||
|
|
5dad97779a | ||
|
|
3ba8857491 | ||
|
|
7cd416c63e |
7
.github/pull_request_template.md
vendored
7
.github/pull_request_template.md
vendored
@@ -2,11 +2,4 @@
|
||||
|
||||
Release Notes:
|
||||
|
||||
- N/A
|
||||
|
||||
or
|
||||
|
||||
- (Added|Fixed|Improved) ... ([#<public_issue_number_if_exists>](https://github.com/zed-industries/community/issues/<public_issue_number_if_exists>)).
|
||||
|
||||
If the release notes are only intended for a specific release channel only, add `(<release_channel>-only)` to the end of the release note line.
|
||||
These will be removed by the person making the release.
|
||||
|
||||
19
.github/workflows/release_actions.yml
vendored
19
.github/workflows/release_actions.yml
vendored
@@ -6,8 +6,8 @@ jobs:
|
||||
discord_release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get appropriate URL
|
||||
id: get-appropriate-url
|
||||
- name: Get release URL
|
||||
id: get-release-url
|
||||
run: |
|
||||
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
|
||||
URL="https://zed.dev/releases/preview/latest"
|
||||
@@ -15,14 +15,17 @@ jobs:
|
||||
URL="https://zed.dev/releases/stable/latest"
|
||||
fi
|
||||
echo "::set-output name=URL::$URL"
|
||||
- name: Get content
|
||||
uses: 2428392/gh-truncate-string-action@v1.2.0
|
||||
id: get-content
|
||||
with:
|
||||
stringToTruncate: |
|
||||
📣 Zed [${{ github.event.release.tag_name }}](${{ steps.get-release-url.outputs.URL }}) was just released!
|
||||
|
||||
${{ github.event.release.body }}
|
||||
maxLength: 2000
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v5.3.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
content: |
|
||||
📣 Zed ${{ github.event.release.tag_name }} was just released!
|
||||
|
||||
Restart your Zed or head to ${{ steps.get-appropriate-url.outputs.URL }} to grab it.
|
||||
|
||||
${{ github.event.release.body }}
|
||||
content: ${{ steps.get-content.outputs.string }}
|
||||
|
||||
1902
Cargo.lock
generated
1902
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
39
Cargo.toml
39
Cargo.toml
@@ -2,13 +2,17 @@
|
||||
members = [
|
||||
"crates/activity_indicator",
|
||||
"crates/ai",
|
||||
"crates/assistant",
|
||||
"crates/audio",
|
||||
"crates/audio2",
|
||||
"crates/auto_update",
|
||||
"crates/breadcrumbs",
|
||||
"crates/call",
|
||||
"crates/call2",
|
||||
"crates/channel",
|
||||
"crates/cli",
|
||||
"crates/client",
|
||||
"crates/client2",
|
||||
"crates/clock",
|
||||
"crates/collab",
|
||||
"crates/collab_ui",
|
||||
@@ -17,18 +21,24 @@ members = [
|
||||
"crates/component_test",
|
||||
"crates/context_menu",
|
||||
"crates/copilot",
|
||||
"crates/copilot2",
|
||||
"crates/copilot_button",
|
||||
"crates/db",
|
||||
"crates/db2",
|
||||
"crates/refineable",
|
||||
"crates/refineable/derive_refineable",
|
||||
"crates/diagnostics",
|
||||
"crates/drag_and_drop",
|
||||
"crates/editor",
|
||||
"crates/feature_flags",
|
||||
"crates/feature_flags2",
|
||||
"crates/feedback",
|
||||
"crates/file_finder",
|
||||
"crates/fs",
|
||||
"crates/fs2",
|
||||
"crates/fsevent",
|
||||
"crates/fuzzy",
|
||||
"crates/fuzzy2",
|
||||
"crates/git",
|
||||
"crates/go_to_line",
|
||||
"crates/gpui",
|
||||
@@ -36,39 +46,55 @@ members = [
|
||||
"crates/gpui2",
|
||||
"crates/gpui2_macros",
|
||||
"crates/install_cli",
|
||||
"crates/install_cli2",
|
||||
"crates/journal",
|
||||
"crates/journal2",
|
||||
"crates/language",
|
||||
"crates/language2",
|
||||
"crates/language_selector",
|
||||
"crates/language_tools",
|
||||
"crates/live_kit_client",
|
||||
"crates/live_kit_server",
|
||||
"crates/lsp",
|
||||
"crates/lsp2",
|
||||
"crates/media",
|
||||
"crates/menu",
|
||||
"crates/menu2",
|
||||
"crates/multi_buffer",
|
||||
"crates/multi_buffer2",
|
||||
"crates/node_runtime",
|
||||
"crates/notifications",
|
||||
"crates/outline",
|
||||
"crates/picker",
|
||||
"crates/plugin",
|
||||
"crates/plugin_macros",
|
||||
"crates/plugin_runtime",
|
||||
"crates/prettier",
|
||||
"crates/prettier2",
|
||||
"crates/project",
|
||||
"crates/project2",
|
||||
"crates/project_panel",
|
||||
"crates/project_symbols",
|
||||
"crates/recent_projects",
|
||||
"crates/rope",
|
||||
"crates/rpc",
|
||||
"crates/rpc2",
|
||||
"crates/search",
|
||||
"crates/settings",
|
||||
"crates/settings2",
|
||||
"crates/snippet",
|
||||
"crates/sqlez",
|
||||
"crates/sqlez_macros",
|
||||
"crates/feature_flags",
|
||||
"crates/storybook",
|
||||
"crates/rich_text",
|
||||
"crates/storybook2",
|
||||
"crates/sum_tree",
|
||||
"crates/terminal",
|
||||
"crates/terminal2",
|
||||
"crates/text",
|
||||
"crates/theme",
|
||||
"crates/theme2",
|
||||
"crates/theme_selector",
|
||||
"crates/ui2",
|
||||
"crates/util",
|
||||
"crates/semantic_index",
|
||||
"crates/vim",
|
||||
@@ -77,6 +103,7 @@ members = [
|
||||
"crates/welcome",
|
||||
"crates/xtask",
|
||||
"crates/zed",
|
||||
"crates/zed2",
|
||||
"crates/zed-actions"
|
||||
]
|
||||
default-members = ["crates/zed"]
|
||||
@@ -103,12 +130,15 @@ rand = { version = "0.8.5" }
|
||||
refineable = { path = "./crates/refineable" }
|
||||
regex = { version = "1.5" }
|
||||
rust-embed = { version = "8.0", features = ["include-exclude"] }
|
||||
rusqlite = { version = "0.29.0", features = ["blob", "array", "modern_sqlite"] }
|
||||
schemars = { version = "0.8" }
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_derive = { version = "1.0", features = ["deserialize_in_place"] }
|
||||
serde_json = { version = "1.0", features = ["preserve_order", "raw_value"] }
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
smol = { version = "1.2" }
|
||||
strum = { version = "0.25.0", features = ["derive"] }
|
||||
sysinfo = "0.29.10"
|
||||
tempdir = { version = "0.3.7" }
|
||||
thiserror = { version = "1.0.29" }
|
||||
time = { version = "0.3", features = ["serde", "serde-well-known"] }
|
||||
@@ -117,8 +147,9 @@ tree-sitter = "0.20"
|
||||
unindent = { version = "0.1.7" }
|
||||
pretty_assertions = "1.3.0"
|
||||
git2 = { version = "0.15", default-features = false}
|
||||
uuid = { version = "1.1.2", features = ["v4"] }
|
||||
|
||||
tree-sitter-bash = { git = "https://github.com/tree-sitter/tree-sitter-bash", rev = "1b0321ee85701d5036c334a6f04761cdc672e64c" }
|
||||
tree-sitter-bash = { git = "https://github.com/tree-sitter/tree-sitter-bash", rev = "7331995b19b8f8aba2d5e26deb51d2195c18bc94" }
|
||||
tree-sitter-c = "0.20.1"
|
||||
tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev="f44509141e7e483323d2ec178f2d2e6c0fc041c1" }
|
||||
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
|
||||
@@ -144,7 +175,7 @@ tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml",
|
||||
tree-sitter-lua = "0.0.14"
|
||||
tree-sitter-nix = { git = "https://github.com/nix-community/tree-sitter-nix", rev = "66e3e9ce9180ae08fc57372061006ef83f0abde7" }
|
||||
tree-sitter-nu = { git = "https://github.com/nushell/tree-sitter-nu", rev = "786689b0562b9799ce53e824cb45a1a2a04dc673"}
|
||||
|
||||
tree-sitter-vue = {git = "https://github.com/zed-industries/tree-sitter-vue", rev = "95b2890"}
|
||||
[patch.crates-io]
|
||||
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "35a6052fbcafc5e5fc0f9415b8652be7dcaf7222" }
|
||||
async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
|
||||
FROM rust:1.72-bullseye as builder
|
||||
FROM rust:1.73-bullseye as builder
|
||||
WORKDIR app
|
||||
COPY . .
|
||||
|
||||
|
||||
4
Procfile
4
Procfile
@@ -1,4 +1,4 @@
|
||||
web: cd ../zed.dev && PORT=3000 npx vercel dev
|
||||
collab: cd crates/collab && cargo run serve
|
||||
web: cd ../zed.dev && PORT=3000 npm run dev
|
||||
collab: cd crates/collab && RUST_LOG=${RUST_LOG:-warn,collab=info} cargo run serve
|
||||
livekit: livekit-server --dev
|
||||
postgrest: postgrest crates/collab/admin_api.conf
|
||||
|
||||
@@ -13,7 +13,7 @@ Welcome to Zed, a lightning-fast, collaborative code editor that makes your drea
|
||||
sudo xcodebuild -license
|
||||
```
|
||||
|
||||
* Install homebrew, node and rustup-init (rutup, rust, cargo, etc.)
|
||||
* Install homebrew, node and rustup-init (rustup, rust, cargo, etc.)
|
||||
```
|
||||
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
|
||||
brew install node rustup-init
|
||||
@@ -36,7 +36,7 @@ Welcome to Zed, a lightning-fast, collaborative code editor that makes your drea
|
||||
brew install foreman
|
||||
```
|
||||
|
||||
* Ensure the Zed.dev website is checked out in a sibling directory and install it's dependencies:
|
||||
* Ensure the Zed.dev website is checked out in a sibling directory and install its dependencies:
|
||||
|
||||
```
|
||||
cd ..
|
||||
@@ -83,9 +83,7 @@ foreman start
|
||||
If you want to run Zed pointed at the local servers, you can run:
|
||||
|
||||
```
|
||||
script/zed-with-local-servers
|
||||
# or...
|
||||
script/zed-with-local-servers --release
|
||||
script/zed-local
|
||||
```
|
||||
|
||||
### Dump element JSON
|
||||
|
||||
8
assets/icons/bell.svg
Normal file
8
assets/icons/bell.svg
Normal file
@@ -0,0 +1,8 @@
|
||||
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
clip-rule="evenodd"
|
||||
d="M8.60124 1.25086C8.60124 1.75459 8.26278 2.17927 7.80087 2.30989C10.1459 2.4647 12 4.41582 12 6.79999V10.25C12 11.0563 12.0329 11.7074 12.7236 12.0528C12.931 12.1565 13.0399 12.3892 12.9866 12.6149C12.9333 12.8406 12.7319 13 12.5 13H8.16144C8.36904 13.1832 8.49997 13.4513 8.49997 13.75C8.49997 14.3023 8.05226 14.75 7.49997 14.75C6.94769 14.75 6.49997 14.3023 6.49997 13.75C6.49997 13.4513 6.63091 13.1832 6.83851 13H2.49999C2.2681 13 2.06664 12.8406 2.01336 12.6149C1.96009 12.3892 2.06897 12.1565 2.27638 12.0528C2.96708 11.7074 2.99999 11.0563 2.99999 10.25V6.79999C2.99999 4.41537 4.85481 2.46396 7.20042 2.3098C6.73867 2.17908 6.40036 1.75448 6.40036 1.25086C6.40036 0.643104 6.89304 0.150421 7.5008 0.150421C8.10855 0.150421 8.60124 0.643104 8.60124 1.25086ZM7.49999 3.29999C5.56699 3.29999 3.99999 4.86699 3.99999 6.79999V10.25L4.00002 10.3009C4.0005 10.7463 4.00121 11.4084 3.69929 12H11.3007C10.9988 11.4084 10.9995 10.7463 11 10.3009L11 10.25V6.79999C11 4.86699 9.43299 3.29999 7.49999 3.29999Z"
|
||||
fill="currentColor"
|
||||
/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.2 KiB |
3
assets/icons/link.svg
Normal file
3
assets/icons/link.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.51192 3.00541C9.18827 2.54594 10.0434 2.53694 10.6788 2.95419C10.823 3.04893 10.9771 3.1993 11.389 3.61119C11.8009 4.02307 11.9513 4.17714 12.046 4.32141C12.4632 4.95675 12.4542 5.81192 11.9948 6.48827C11.8899 6.64264 11.7276 6.80811 11.3006 7.23511L10.6819 7.85383C10.4866 8.04909 10.4866 8.36567 10.6819 8.56093C10.8772 8.7562 11.1937 8.7562 11.389 8.56093L12.0077 7.94221L12.0507 7.89929C12.4203 7.52976 12.6568 7.2933 12.822 7.0502C13.4972 6.05623 13.5321 4.76252 12.8819 3.77248C12.7233 3.53102 12.4922 3.30001 12.1408 2.94871L12.0961 2.90408L12.0515 2.85942C11.7002 2.508 11.4692 2.27689 11.2277 2.11832C10.2377 1.46813 8.94396 1.50299 7.94999 2.17822C7.70689 2.34336 7.47042 2.57991 7.10088 2.94955L7.05797 2.99247L6.43926 3.61119C6.24399 3.80645 6.24399 4.12303 6.43926 4.31829C6.63452 4.51355 6.9511 4.51355 7.14636 4.31829L7.76508 3.69957C8.19208 3.27257 8.35755 3.11027 8.51192 3.00541ZM4.31794 7.14672C4.5132 6.95146 4.5132 6.63487 4.31794 6.43961C4.12267 6.24435 3.80609 6.24435 3.61083 6.43961L2.99211 7.05833L2.9492 7.10124C2.57955 7.47077 2.34301 7.70724 2.17786 7.95035C1.50263 8.94432 1.46778 10.238 2.11797 11.2281C2.27654 11.4695 2.50764 11.7005 2.85908 12.0518L2.90372 12.0965L2.94835 12.1411C3.29965 12.4925 3.53066 12.7237 3.77212 12.8822C4.76217 13.5324 6.05587 13.4976 7.04984 12.8223C7.29294 12.6572 7.52941 12.4206 7.89894 12.051L7.89895 12.051L7.94186 12.0081L8.56058 11.3894C8.75584 11.1941 8.75584 10.8775 8.56058 10.6823C8.36531 10.487 8.04873 10.487 7.85347 10.6823L7.23475 11.301C6.80775 11.728 6.64228 11.8903 6.48792 11.9951C5.81156 12.4546 4.9564 12.4636 4.32105 12.0464C4.17679 11.9516 4.02272 11.8012 3.61083 11.3894C3.19894 10.9775 3.04858 10.8234 2.95383 10.6791C2.53659 10.0438 2.54558 9.18863 3.00505 8.51227C3.10991 8.35791 3.27222 8.19244 3.69922 7.76544L4.31794 7.14672ZM9.6217 6.08558C9.81696 5.89032 9.81696 5.57373 9.6217 5.37847C9.42644 5.18321 9.10986 5.18321 8.91459 5.37847L5.37906 8.91401C5.1838 9.10927 5.1838 9.42585 5.37906 9.62111C5.57432 9.81637 5.8909 9.81637 6.08617 9.62111L9.6217 6.08558Z" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.2 KiB |
3
assets/icons/public.svg
Normal file
3
assets/icons/public.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M3.74393 2.00204C3.41963 1.97524 3.13502 2.37572 3.10823 2.70001C3.08143 3.0243 3.32558 3.47321 3.64986 3.50001C7.99878 3.85934 11.1406 7.00122 11.5 11.3501C11.5267 11.6744 11.9756 12.0269 12.3 12C12.6243 11.9733 13.0247 11.5804 12.998 11.2561C12.5912 6.33295 8.66704 2.40882 3.74393 2.00204ZM2.9 6.00001C2.96411 5.68099 3.33084 5.29361 3.64986 5.35772C6.66377 5.96341 9.03654 8.33618 9.64223 11.3501C9.70634 11.6691 9.319 12.0359 8.99999 12.1C8.68097 12.1641 8.06411 11.819 7.99999 11.5C7.48788 8.95167 6.0483 7.51213 3.49999 7.00001C3.18097 6.9359 2.8359 6.31902 2.9 6.00001ZM2 9.20001C2.0641 8.88099 2.38635 8.65788 2.70537 8.722C4.50255 9.08317 5.91684 10.4975 6.27801 12.2946C6.34212 12.6137 6.13547 12.9242 5.81646 12.9883C5.49744 13.0525 4.86411 12.819 4.8 12.5C4.53239 11.1683 3.83158 10.4676 2.5 10.2C2.18098 10.1359 1.93588 9.51902 2 9.20001Z" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1021 B |
5
assets/icons/stop_sharing.svg
Normal file
5
assets/icons/stop_sharing.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 8.6 KiB |
8
assets/icons/update.svg
Normal file
8
assets/icons/update.svg
Normal file
@@ -0,0 +1,8 @@
|
||||
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
clip-rule="evenodd"
|
||||
d="M1.90321 7.29677C1.90321 10.341 4.11041 12.4147 6.58893 12.8439C6.87255 12.893 7.06266 13.1627 7.01355 13.4464C6.96444 13.73 6.69471 13.9201 6.41109 13.871C3.49942 13.3668 0.86084 10.9127 0.86084 7.29677C0.860839 5.76009 1.55996 4.55245 2.37639 3.63377C2.96124 2.97568 3.63034 2.44135 4.16846 2.03202L2.53205 2.03202C2.25591 2.03202 2.03205 1.80816 2.03205 1.53202C2.03205 1.25588 2.25591 1.03202 2.53205 1.03202L5.53205 1.03202C5.80819 1.03202 6.03205 1.25588 6.03205 1.53202L6.03205 4.53202C6.03205 4.80816 5.80819 5.03202 5.53205 5.03202C5.25591 5.03202 5.03205 4.80816 5.03205 4.53202L5.03205 2.68645L5.03054 2.68759L5.03045 2.68766L5.03044 2.68767L5.03043 2.68767C4.45896 3.11868 3.76059 3.64538 3.15554 4.3262C2.44102 5.13021 1.90321 6.10154 1.90321 7.29677ZM13.0109 7.70321C13.0109 4.69115 10.8505 2.6296 8.40384 2.17029C8.12093 2.11718 7.93465 1.84479 7.98776 1.56188C8.04087 1.27898 8.31326 1.0927 8.59616 1.14581C11.4704 1.68541 14.0532 4.12605 14.0532 7.70321C14.0532 9.23988 13.3541 10.4475 12.5377 11.3662C11.9528 12.0243 11.2837 12.5586 10.7456 12.968L12.3821 12.968C12.6582 12.968 12.8821 13.1918 12.8821 13.468C12.8821 13.7441 12.6582 13.968 12.3821 13.968L9.38205 13.968C9.10591 13.968 8.88205 13.7441 8.88205 13.468L8.88205 10.468C8.88205 10.1918 9.10591 9.96796 9.38205 9.96796C9.65819 9.96796 9.88205 10.1918 9.88205 10.468L9.88205 12.3135L9.88362 12.3123C10.4551 11.8813 11.1535 11.3546 11.7585 10.6738C12.4731 9.86976 13.0109 8.89844 13.0109 7.70321Z"
|
||||
fill="currentColor"
|
||||
/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.6 KiB |
@@ -30,6 +30,7 @@
|
||||
"cmd-s": "workspace::Save",
|
||||
"cmd-shift-s": "workspace::SaveAs",
|
||||
"cmd-=": "zed::IncreaseBufferFontSize",
|
||||
"cmd-+": "zed::IncreaseBufferFontSize",
|
||||
"cmd--": "zed::DecreaseBufferFontSize",
|
||||
"cmd-0": "zed::ResetBufferFontSize",
|
||||
"cmd-,": "zed::OpenSettings",
|
||||
@@ -249,6 +250,7 @@
|
||||
"bindings": {
|
||||
"escape": "project_search::ToggleFocus",
|
||||
"alt-tab": "search::CycleMode",
|
||||
"cmd-shift-h": "search::ToggleReplace",
|
||||
"alt-cmd-g": "search::ActivateRegexMode",
|
||||
"alt-cmd-s": "search::ActivateSemanticMode",
|
||||
"alt-cmd-x": "search::ActivateTextMode"
|
||||
@@ -261,11 +263,19 @@
|
||||
"down": "search::NextHistoryQuery"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProjectSearchBar && in_replace",
|
||||
"bindings": {
|
||||
"enter": "search::ReplaceNext",
|
||||
"cmd-enter": "search::ReplaceAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProjectSearchView",
|
||||
"bindings": {
|
||||
"escape": "project_search::ToggleFocus",
|
||||
"alt-tab": "search::CycleMode",
|
||||
"cmd-shift-h": "search::ToggleReplace",
|
||||
"alt-cmd-g": "search::ActivateRegexMode",
|
||||
"alt-cmd-s": "search::ActivateSemanticMode",
|
||||
"alt-cmd-x": "search::ActivateTextMode"
|
||||
@@ -277,6 +287,7 @@
|
||||
"cmd-f": "project_search::ToggleFocus",
|
||||
"cmd-g": "search::SelectNextMatch",
|
||||
"cmd-shift-g": "search::SelectPrevMatch",
|
||||
"cmd-shift-h": "search::ToggleReplace",
|
||||
"alt-enter": "search::SelectAllMatches",
|
||||
"alt-cmd-c": "search::ToggleCaseSensitive",
|
||||
"alt-cmd-w": "search::ToggleWholeWord",
|
||||
@@ -359,42 +370,15 @@
|
||||
{
|
||||
"context": "Pane",
|
||||
"bindings": {
|
||||
"ctrl-1": [
|
||||
"pane::ActivateItem",
|
||||
0
|
||||
],
|
||||
"ctrl-2": [
|
||||
"pane::ActivateItem",
|
||||
1
|
||||
],
|
||||
"ctrl-3": [
|
||||
"pane::ActivateItem",
|
||||
2
|
||||
],
|
||||
"ctrl-4": [
|
||||
"pane::ActivateItem",
|
||||
3
|
||||
],
|
||||
"ctrl-5": [
|
||||
"pane::ActivateItem",
|
||||
4
|
||||
],
|
||||
"ctrl-6": [
|
||||
"pane::ActivateItem",
|
||||
5
|
||||
],
|
||||
"ctrl-7": [
|
||||
"pane::ActivateItem",
|
||||
6
|
||||
],
|
||||
"ctrl-8": [
|
||||
"pane::ActivateItem",
|
||||
7
|
||||
],
|
||||
"ctrl-9": [
|
||||
"pane::ActivateItem",
|
||||
8
|
||||
],
|
||||
"ctrl-1": ["pane::ActivateItem", 0],
|
||||
"ctrl-2": ["pane::ActivateItem", 1],
|
||||
"ctrl-3": ["pane::ActivateItem", 2],
|
||||
"ctrl-4": ["pane::ActivateItem", 3],
|
||||
"ctrl-5": ["pane::ActivateItem", 4],
|
||||
"ctrl-6": ["pane::ActivateItem", 5],
|
||||
"ctrl-7": ["pane::ActivateItem", 6],
|
||||
"ctrl-8": ["pane::ActivateItem", 7],
|
||||
"ctrl-9": ["pane::ActivateItem", 8],
|
||||
"ctrl-0": "pane::ActivateLastItem",
|
||||
"ctrl--": "pane::GoBack",
|
||||
"ctrl-_": "pane::GoForward",
|
||||
@@ -405,42 +389,15 @@
|
||||
{
|
||||
"context": "Workspace",
|
||||
"bindings": {
|
||||
"cmd-1": [
|
||||
"workspace::ActivatePane",
|
||||
0
|
||||
],
|
||||
"cmd-2": [
|
||||
"workspace::ActivatePane",
|
||||
1
|
||||
],
|
||||
"cmd-3": [
|
||||
"workspace::ActivatePane",
|
||||
2
|
||||
],
|
||||
"cmd-4": [
|
||||
"workspace::ActivatePane",
|
||||
3
|
||||
],
|
||||
"cmd-5": [
|
||||
"workspace::ActivatePane",
|
||||
4
|
||||
],
|
||||
"cmd-6": [
|
||||
"workspace::ActivatePane",
|
||||
5
|
||||
],
|
||||
"cmd-7": [
|
||||
"workspace::ActivatePane",
|
||||
6
|
||||
],
|
||||
"cmd-8": [
|
||||
"workspace::ActivatePane",
|
||||
7
|
||||
],
|
||||
"cmd-9": [
|
||||
"workspace::ActivatePane",
|
||||
8
|
||||
],
|
||||
"cmd-1": ["workspace::ActivatePane", 0],
|
||||
"cmd-2": ["workspace::ActivatePane", 1],
|
||||
"cmd-3": ["workspace::ActivatePane", 2],
|
||||
"cmd-4": ["workspace::ActivatePane", 3],
|
||||
"cmd-5": ["workspace::ActivatePane", 4],
|
||||
"cmd-6": ["workspace::ActivatePane", 5],
|
||||
"cmd-7": ["workspace::ActivatePane", 6],
|
||||
"cmd-8": ["workspace::ActivatePane", 7],
|
||||
"cmd-9": ["workspace::ActivatePane", 8],
|
||||
"cmd-b": "workspace::ToggleLeftDock",
|
||||
"cmd-r": "workspace::ToggleRightDock",
|
||||
"cmd-j": "workspace::ToggleBottomDock",
|
||||
@@ -483,22 +440,14 @@
|
||||
},
|
||||
{
|
||||
"bindings": {
|
||||
"cmd-k cmd-left": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Left"
|
||||
],
|
||||
"cmd-k cmd-right": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Right"
|
||||
],
|
||||
"cmd-k cmd-up": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Up"
|
||||
],
|
||||
"cmd-k cmd-down": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Down"
|
||||
]
|
||||
"cmd-k cmd-left": ["workspace::ActivatePaneInDirection", "Left"],
|
||||
"cmd-k cmd-right": ["workspace::ActivatePaneInDirection", "Right"],
|
||||
"cmd-k cmd-up": ["workspace::ActivatePaneInDirection", "Up"],
|
||||
"cmd-k cmd-down": ["workspace::ActivatePaneInDirection", "Down"],
|
||||
"cmd-k shift-left": ["workspace::SwapPaneInDirection", "Left"],
|
||||
"cmd-k shift-right": ["workspace::SwapPaneInDirection", "Right"],
|
||||
"cmd-k shift-up": ["workspace::SwapPaneInDirection", "Up"],
|
||||
"cmd-k shift-down": ["workspace::SwapPaneInDirection", "Down"]
|
||||
}
|
||||
},
|
||||
// Bindings from Atom
|
||||
@@ -562,7 +511,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProjectSearchBar",
|
||||
"context": "ProjectSearchBar && !in_replace",
|
||||
"bindings": {
|
||||
"cmd-enter": "project_search::SearchInNew"
|
||||
}
|
||||
@@ -588,18 +537,16 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "CollabPanel",
|
||||
"context": "CollabPanel && not_editing",
|
||||
"bindings": {
|
||||
"ctrl-backspace": "collab_panel::Remove",
|
||||
"space": "menu::Confirm"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "CollabPanel > Editor",
|
||||
"context": "(CollabPanel && editing) > Editor",
|
||||
"bindings": {
|
||||
"cmd-c": "collab_panel::StartLinkChannel",
|
||||
"cmd-x": "collab_panel::StartMoveChannel",
|
||||
"cmd-v": "collab_panel::MoveOrLinkToSelected"
|
||||
"space": "collab_panel::InsertSpace"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -622,57 +569,21 @@
|
||||
"cmd-v": "terminal::Paste",
|
||||
"cmd-k": "terminal::Clear",
|
||||
// Some nice conveniences
|
||||
"cmd-backspace": [
|
||||
"terminal::SendText",
|
||||
"\u0015"
|
||||
],
|
||||
"cmd-right": [
|
||||
"terminal::SendText",
|
||||
"\u0005"
|
||||
],
|
||||
"cmd-left": [
|
||||
"terminal::SendText",
|
||||
"\u0001"
|
||||
],
|
||||
"cmd-backspace": ["terminal::SendText", "\u0015"],
|
||||
"cmd-right": ["terminal::SendText", "\u0005"],
|
||||
"cmd-left": ["terminal::SendText", "\u0001"],
|
||||
// Terminal.app compatibility
|
||||
"alt-left": [
|
||||
"terminal::SendText",
|
||||
"\u001bb"
|
||||
],
|
||||
"alt-right": [
|
||||
"terminal::SendText",
|
||||
"\u001bf"
|
||||
],
|
||||
"alt-left": ["terminal::SendText", "\u001bb"],
|
||||
"alt-right": ["terminal::SendText", "\u001bf"],
|
||||
// There are conflicting bindings for these keys in the global context.
|
||||
// these bindings override them, remove at your own risk:
|
||||
"up": [
|
||||
"terminal::SendKeystroke",
|
||||
"up"
|
||||
],
|
||||
"pageup": [
|
||||
"terminal::SendKeystroke",
|
||||
"pageup"
|
||||
],
|
||||
"down": [
|
||||
"terminal::SendKeystroke",
|
||||
"down"
|
||||
],
|
||||
"pagedown": [
|
||||
"terminal::SendKeystroke",
|
||||
"pagedown"
|
||||
],
|
||||
"escape": [
|
||||
"terminal::SendKeystroke",
|
||||
"escape"
|
||||
],
|
||||
"enter": [
|
||||
"terminal::SendKeystroke",
|
||||
"enter"
|
||||
],
|
||||
"ctrl-c": [
|
||||
"terminal::SendKeystroke",
|
||||
"ctrl-c"
|
||||
]
|
||||
"up": ["terminal::SendKeystroke", "up"],
|
||||
"pageup": ["terminal::SendKeystroke", "pageup"],
|
||||
"down": ["terminal::SendKeystroke", "down"],
|
||||
"pagedown": ["terminal::SendKeystroke", "pagedown"],
|
||||
"escape": ["terminal::SendKeystroke", "escape"],
|
||||
"enter": ["terminal::SendKeystroke", "enter"],
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
}
|
||||
}
|
||||
],
|
||||
":": "command_palette::Toggle",
|
||||
"h": "vim::Left",
|
||||
"left": "vim::Left",
|
||||
"backspace": "vim::Backspace",
|
||||
@@ -38,6 +39,7 @@
|
||||
"w": "vim::NextWordStart",
|
||||
"{": "vim::StartOfParagraph",
|
||||
"}": "vim::EndOfParagraph",
|
||||
"|": "vim::GoToColumn",
|
||||
"shift-w": [
|
||||
"vim::NextWordStart",
|
||||
{
|
||||
@@ -94,15 +96,10 @@
|
||||
}
|
||||
],
|
||||
"ctrl-o": "pane::GoBack",
|
||||
"ctrl-i": "pane::GoForward",
|
||||
"ctrl-]": "editor::GoToDefinition",
|
||||
"escape": [
|
||||
"vim::SwitchMode",
|
||||
"Normal"
|
||||
],
|
||||
"ctrl+[": [
|
||||
"vim::SwitchMode",
|
||||
"Normal"
|
||||
],
|
||||
"escape": ["vim::SwitchMode", "Normal"],
|
||||
"ctrl+[": ["vim::SwitchMode", "Normal"],
|
||||
"v": "vim::ToggleVisual",
|
||||
"shift-v": "vim::ToggleVisualLine",
|
||||
"ctrl-v": "vim::ToggleVisualBlock",
|
||||
@@ -125,10 +122,26 @@
|
||||
"g shift-t": "pane::ActivatePrevItem",
|
||||
"g d": "editor::GoToDefinition",
|
||||
"g shift-d": "editor::GoToTypeDefinition",
|
||||
"g n": "vim::SelectNext",
|
||||
"g shift-n": "vim::SelectPrevious",
|
||||
"g >": [
|
||||
"editor::SelectNext",
|
||||
{
|
||||
"replace_newest": true
|
||||
}
|
||||
],
|
||||
"g <": [
|
||||
"editor::SelectPrevious",
|
||||
{
|
||||
"replace_newest": true
|
||||
}
|
||||
],
|
||||
"g a": "editor::SelectAllMatches",
|
||||
"g s": "outline::Toggle",
|
||||
"g shift-s": "project_symbols::Toggle",
|
||||
"g .": "editor::ToggleCodeActions", // zed specific
|
||||
"g shift-a": "editor::FindAllReferences", // zed specific
|
||||
"g space": "editor::OpenExcerpts", // zed specific
|
||||
"g *": [
|
||||
"vim::MoveToNext",
|
||||
{
|
||||
@@ -205,101 +218,46 @@
|
||||
"shift-z shift-q": [
|
||||
"pane::CloseActiveItem",
|
||||
{
|
||||
"saveBehavior": "dontSave"
|
||||
"saveIntent": "skip"
|
||||
}
|
||||
],
|
||||
"shift-z shift-z": [
|
||||
"pane::CloseActiveItem",
|
||||
{
|
||||
"saveBehavior": "promptOnConflict"
|
||||
"saveIntent": "saveAll"
|
||||
}
|
||||
],
|
||||
// Count support
|
||||
"1": [
|
||||
"vim::Number",
|
||||
1
|
||||
],
|
||||
"2": [
|
||||
"vim::Number",
|
||||
2
|
||||
],
|
||||
"3": [
|
||||
"vim::Number",
|
||||
3
|
||||
],
|
||||
"4": [
|
||||
"vim::Number",
|
||||
4
|
||||
],
|
||||
"5": [
|
||||
"vim::Number",
|
||||
5
|
||||
],
|
||||
"6": [
|
||||
"vim::Number",
|
||||
6
|
||||
],
|
||||
"7": [
|
||||
"vim::Number",
|
||||
7
|
||||
],
|
||||
"8": [
|
||||
"vim::Number",
|
||||
8
|
||||
],
|
||||
"9": [
|
||||
"vim::Number",
|
||||
9
|
||||
],
|
||||
"1": ["vim::Number", 1],
|
||||
"2": ["vim::Number", 2],
|
||||
"3": ["vim::Number", 3],
|
||||
"4": ["vim::Number", 4],
|
||||
"5": ["vim::Number", 5],
|
||||
"6": ["vim::Number", 6],
|
||||
"7": ["vim::Number", 7],
|
||||
"8": ["vim::Number", 8],
|
||||
"9": ["vim::Number", 9],
|
||||
// window related commands (ctrl-w X)
|
||||
"ctrl-w left": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Left"
|
||||
],
|
||||
"ctrl-w right": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Right"
|
||||
],
|
||||
"ctrl-w up": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Up"
|
||||
],
|
||||
"ctrl-w down": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Down"
|
||||
],
|
||||
"ctrl-w h": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Left"
|
||||
],
|
||||
"ctrl-w l": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Right"
|
||||
],
|
||||
"ctrl-w k": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Up"
|
||||
],
|
||||
"ctrl-w j": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Down"
|
||||
],
|
||||
"ctrl-w ctrl-h": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Left"
|
||||
],
|
||||
"ctrl-w ctrl-l": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Right"
|
||||
],
|
||||
"ctrl-w ctrl-k": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Up"
|
||||
],
|
||||
"ctrl-w ctrl-j": [
|
||||
"workspace::ActivatePaneInDirection",
|
||||
"Down"
|
||||
],
|
||||
"ctrl-w left": ["workspace::ActivatePaneInDirection", "Left"],
|
||||
"ctrl-w right": ["workspace::ActivatePaneInDirection", "Right"],
|
||||
"ctrl-w up": ["workspace::ActivatePaneInDirection", "Up"],
|
||||
"ctrl-w down": ["workspace::ActivatePaneInDirection", "Down"],
|
||||
"ctrl-w h": ["workspace::ActivatePaneInDirection", "Left"],
|
||||
"ctrl-w l": ["workspace::ActivatePaneInDirection", "Right"],
|
||||
"ctrl-w k": ["workspace::ActivatePaneInDirection", "Up"],
|
||||
"ctrl-w j": ["workspace::ActivatePaneInDirection", "Down"],
|
||||
"ctrl-w ctrl-h": ["workspace::ActivatePaneInDirection", "Left"],
|
||||
"ctrl-w ctrl-l": ["workspace::ActivatePaneInDirection", "Right"],
|
||||
"ctrl-w ctrl-k": ["workspace::ActivatePaneInDirection", "Up"],
|
||||
"ctrl-w ctrl-j": ["workspace::ActivatePaneInDirection", "Down"],
|
||||
"ctrl-w shift-left": ["workspace::SwapPaneInDirection", "Left"],
|
||||
"ctrl-w shift-right": ["workspace::SwapPaneInDirection", "Right"],
|
||||
"ctrl-w shift-up": ["workspace::SwapPaneInDirection", "Up"],
|
||||
"ctrl-w shift-down": ["workspace::SwapPaneInDirection", "Down"],
|
||||
"ctrl-w shift-h": ["workspace::SwapPaneInDirection", "Left"],
|
||||
"ctrl-w shift-l": ["workspace::SwapPaneInDirection", "Right"],
|
||||
"ctrl-w shift-k": ["workspace::SwapPaneInDirection", "Up"],
|
||||
"ctrl-w shift-j": ["workspace::SwapPaneInDirection", "Down"],
|
||||
"ctrl-w g t": "pane::ActivateNextItem",
|
||||
"ctrl-w ctrl-g t": "pane::ActivateNextItem",
|
||||
"ctrl-w g shift-t": "pane::ActivatePrevItem",
|
||||
@@ -318,7 +276,11 @@
|
||||
"ctrl-w c": "pane::CloseAllItems",
|
||||
"ctrl-w ctrl-c": "pane::CloseAllItems",
|
||||
"ctrl-w q": "pane::CloseAllItems",
|
||||
"ctrl-w ctrl-q": "pane::CloseAllItems"
|
||||
"ctrl-w ctrl-q": "pane::CloseAllItems",
|
||||
"ctrl-w o": "workspace::CloseInactiveTabsAndPanes",
|
||||
"ctrl-w ctrl-o": "workspace::CloseInactiveTabsAndPanes",
|
||||
"ctrl-w n": ["workspace::NewFileInDirection", "Up"],
|
||||
"ctrl-w ctrl-n": ["workspace::NewFileInDirection", "Up"]
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -333,21 +295,13 @@
|
||||
"context": "Editor && vim_mode == normal && vim_operator == none && !VimWaiting",
|
||||
"bindings": {
|
||||
".": "vim::Repeat",
|
||||
"c": [
|
||||
"vim::PushOperator",
|
||||
"Change"
|
||||
],
|
||||
"c": ["vim::PushOperator", "Change"],
|
||||
"shift-c": "vim::ChangeToEndOfLine",
|
||||
"d": [
|
||||
"vim::PushOperator",
|
||||
"Delete"
|
||||
],
|
||||
"d": ["vim::PushOperator", "Delete"],
|
||||
"shift-d": "vim::DeleteToEndOfLine",
|
||||
"shift-j": "vim::JoinLines",
|
||||
"y": [
|
||||
"vim::PushOperator",
|
||||
"Yank"
|
||||
],
|
||||
"y": ["vim::PushOperator", "Yank"],
|
||||
"shift-y": "vim::YankLine",
|
||||
"i": "vim::InsertBefore",
|
||||
"shift-i": "vim::InsertFirstNonWhitespace",
|
||||
"a": "vim::InsertAfter",
|
||||
@@ -357,6 +311,8 @@
|
||||
"o": "vim::InsertLineBelow",
|
||||
"shift-o": "vim::InsertLineAbove",
|
||||
"~": "vim::ChangeCase",
|
||||
"ctrl-a": "vim::Increment",
|
||||
"ctrl-x": "vim::Decrement",
|
||||
"p": "vim::Paste",
|
||||
"shift-p": [
|
||||
"vim::Paste",
|
||||
@@ -380,10 +336,7 @@
|
||||
"backwards": true
|
||||
}
|
||||
],
|
||||
"r": [
|
||||
"vim::PushOperator",
|
||||
"Replace"
|
||||
],
|
||||
"r": ["vim::PushOperator", "Replace"],
|
||||
"s": "vim::Substitute",
|
||||
"shift-s": "vim::SubstituteLine",
|
||||
"> >": "editor::Indent",
|
||||
@@ -395,10 +348,7 @@
|
||||
{
|
||||
"context": "Editor && VimCount",
|
||||
"bindings": {
|
||||
"0": [
|
||||
"vim::Number",
|
||||
0
|
||||
]
|
||||
"0": ["vim::Number", 0]
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -434,12 +384,15 @@
|
||||
"'": "vim::Quotes",
|
||||
"`": "vim::BackQuotes",
|
||||
"\"": "vim::DoubleQuotes",
|
||||
"|": "vim::VerticalBars",
|
||||
"(": "vim::Parentheses",
|
||||
")": "vim::Parentheses",
|
||||
"b": "vim::Parentheses",
|
||||
"[": "vim::SquareBrackets",
|
||||
"]": "vim::SquareBrackets",
|
||||
"{": "vim::CurlyBrackets",
|
||||
"}": "vim::CurlyBrackets",
|
||||
"shift-b": "vim::CurlyBrackets",
|
||||
"<": "vim::AngleBrackets",
|
||||
">": "vim::AngleBrackets"
|
||||
}
|
||||
@@ -468,25 +421,27 @@
|
||||
"shift-r": "vim::SubstituteLine",
|
||||
"c": "vim::Substitute",
|
||||
"~": "vim::ChangeCase",
|
||||
"ctrl-a": "vim::Increment",
|
||||
"ctrl-x": "vim::Decrement",
|
||||
"g ctrl-a": [
|
||||
"vim::Increment",
|
||||
{
|
||||
"step": true
|
||||
}
|
||||
],
|
||||
"g ctrl-x": [
|
||||
"vim::Decrement",
|
||||
{
|
||||
"step": true
|
||||
}
|
||||
],
|
||||
"shift-i": "vim::InsertBefore",
|
||||
"shift-a": "vim::InsertAfter",
|
||||
"shift-j": "vim::JoinLines",
|
||||
"r": [
|
||||
"vim::PushOperator",
|
||||
"Replace"
|
||||
],
|
||||
"ctrl-c": [
|
||||
"vim::SwitchMode",
|
||||
"Normal"
|
||||
],
|
||||
"escape": [
|
||||
"vim::SwitchMode",
|
||||
"Normal"
|
||||
],
|
||||
"ctrl+[": [
|
||||
"vim::SwitchMode",
|
||||
"Normal"
|
||||
],
|
||||
"r": ["vim::PushOperator", "Replace"],
|
||||
"ctrl-c": ["vim::SwitchMode", "Normal"],
|
||||
"escape": ["vim::SwitchMode", "Normal"],
|
||||
"ctrl+[": ["vim::SwitchMode", "Normal"],
|
||||
">": "editor::Indent",
|
||||
"<": "editor::Outdent",
|
||||
"i": [
|
||||
@@ -508,11 +463,16 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_mode == insert && !menu",
|
||||
"context": "Editor && vim_mode == insert",
|
||||
"bindings": {
|
||||
"escape": "vim::NormalBefore",
|
||||
"ctrl-c": "vim::NormalBefore",
|
||||
"ctrl-[": "vim::NormalBefore"
|
||||
"ctrl-[": "vim::NormalBefore",
|
||||
"ctrl-x ctrl-o": "editor::ShowCompletions",
|
||||
"ctrl-x ctrl-a": "assistant::InlineAssist", // zed specific
|
||||
"ctrl-x ctrl-c": "copilot::Suggest", // zed specific
|
||||
"ctrl-x ctrl-l": "editor::ToggleCodeActions", // zed specific
|
||||
"ctrl-x ctrl-z": "editor::Cancel"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -520,18 +480,12 @@
|
||||
"bindings": {
|
||||
"tab": "vim::Tab",
|
||||
"enter": "vim::Enter",
|
||||
"escape": [
|
||||
"vim::SwitchMode",
|
||||
"Normal"
|
||||
],
|
||||
"ctrl+[": [
|
||||
"vim::SwitchMode",
|
||||
"Normal"
|
||||
]
|
||||
"escape": ["vim::SwitchMode", "Normal"],
|
||||
"ctrl+[": ["vim::SwitchMode", "Normal"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar > VimEnabled",
|
||||
"context": "BufferSearchBar && !in_replace > VimEnabled",
|
||||
"bindings": {
|
||||
"enter": "vim::SearchSubmit",
|
||||
"escape": "buffer_search::Dismiss"
|
||||
|
||||
@@ -50,6 +50,9 @@
|
||||
// Whether to pop the completions menu while typing in an editor without
|
||||
// explicitly requesting it.
|
||||
"show_completions_on_input": true,
|
||||
// Whether to display inline and alongside documentation for items in the
|
||||
// completions menu
|
||||
"show_completion_documentation": true,
|
||||
// Whether to show wrap guides in the editor. Setting this to true will
|
||||
// show a guide at the 'preferred_line_length' value if softwrap is set to
|
||||
// 'preferred_line_length', and will show any additional guides as specified
|
||||
@@ -76,7 +79,7 @@
|
||||
// Settings related to calls in Zed
|
||||
"calls": {
|
||||
// Join calls with the microphone muted by default
|
||||
"mute_on_join": true
|
||||
"mute_on_join": false
|
||||
},
|
||||
// Scrollbar related settings
|
||||
"scrollbar": {
|
||||
@@ -139,6 +142,14 @@
|
||||
// Default width of the channels panel.
|
||||
"default_width": 240
|
||||
},
|
||||
"notification_panel": {
|
||||
// Whether to show the collaboration panel button in the status bar.
|
||||
"button": true,
|
||||
// Where to dock channels panel. Can be 'left' or 'right'.
|
||||
"dock": "right",
|
||||
// Default width of the channels panel.
|
||||
"default_width": 380
|
||||
},
|
||||
"assistant": {
|
||||
// Whether to show the assistant panel button in the status bar.
|
||||
"button": true,
|
||||
@@ -199,7 +210,12 @@
|
||||
// "arguments": ["--stdin-filepath", "{buffer_path}"]
|
||||
// }
|
||||
// }
|
||||
"formatter": "language_server",
|
||||
// 3. Format code using Zed's Prettier integration:
|
||||
// "formatter": "prettier"
|
||||
// 4. Default. Format files using Zed's Prettier integration (if applicable),
|
||||
// or falling back to formatting via language server:
|
||||
// "formatter": "auto"
|
||||
"formatter": "auto",
|
||||
// How to soft-wrap long lines of text. This setting can take
|
||||
// three values:
|
||||
//
|
||||
@@ -227,6 +243,11 @@
|
||||
},
|
||||
// Automatically update Zed
|
||||
"auto_update": true,
|
||||
// Diagnostics configuration.
|
||||
"diagnostics": {
|
||||
// Whether to show warnings or not by default.
|
||||
"include_warnings": true
|
||||
},
|
||||
// Git gutter behavior configuration.
|
||||
"git": {
|
||||
// Control whether the git gutter is shown. May take 2 values:
|
||||
@@ -356,7 +377,7 @@
|
||||
".venv",
|
||||
"venv"
|
||||
],
|
||||
// Can also be 'csh' and 'fish'
|
||||
// Can also be 'csh', 'fish', and `nushell`
|
||||
"activate_script": "default"
|
||||
}
|
||||
}
|
||||
@@ -370,7 +391,28 @@
|
||||
},
|
||||
// Difference settings for semantic_index
|
||||
"semantic_index": {
|
||||
"enabled": false
|
||||
"enabled": true
|
||||
},
|
||||
// Settings specific to our elixir integration
|
||||
"elixir": {
|
||||
// Change the LSP zed uses for elixir.
|
||||
// Note that changing this setting requires a restart of Zed
|
||||
// to take effect.
|
||||
//
|
||||
// May take 3 values:
|
||||
// 1. Use the standard ElixirLS, this is the default
|
||||
// "lsp": "elixir_ls"
|
||||
// 2. Use the experimental NextLs
|
||||
// "lsp": "next_ls",
|
||||
// 3. Use a language server installed locally on your machine:
|
||||
// "lsp": {
|
||||
// "local": {
|
||||
// "path": "~/next-ls/bin/start",
|
||||
// "arguments": ["--stdio"]
|
||||
// }
|
||||
// },
|
||||
//
|
||||
"lsp": "elixir_ls"
|
||||
},
|
||||
// Different settings for specific languages.
|
||||
"languages": {
|
||||
@@ -403,6 +445,16 @@
|
||||
"tab_size": 2
|
||||
}
|
||||
},
|
||||
// Zed's Prettier integration settings.
|
||||
// If Prettier is enabled, Zed will use this its Prettier instance for any applicable file, if
|
||||
// project has no other Prettier installed.
|
||||
"prettier": {
|
||||
// Use regular Prettier json configuration:
|
||||
// "trailingComma": "es5",
|
||||
// "tabWidth": 4,
|
||||
// "semi": false,
|
||||
// "singleQuote": true
|
||||
},
|
||||
// LSP Specific settings.
|
||||
"lsp": {
|
||||
// Specify the LSP name as a key here.
|
||||
|
||||
38
crates/Cargo.toml
Normal file
38
crates/Cargo.toml
Normal file
@@ -0,0 +1,38 @@
|
||||
[package]
|
||||
name = "ai"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/ai.rs"
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
test-support = []
|
||||
|
||||
[dependencies]
|
||||
gpui = { path = "../gpui" }
|
||||
util = { path = "../util" }
|
||||
language = { path = "../language" }
|
||||
async-trait.workspace = true
|
||||
anyhow.workspace = true
|
||||
futures.workspace = true
|
||||
lazy_static.workspace = true
|
||||
ordered-float.workspace = true
|
||||
parking_lot.workspace = true
|
||||
isahc.workspace = true
|
||||
regex.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
postage.workspace = true
|
||||
rand.workspace = true
|
||||
log.workspace = true
|
||||
parse_duration = "2.1.1"
|
||||
tiktoken-rs = "0.5.0"
|
||||
matrixmultiply = "0.3.7"
|
||||
rusqlite = { version = "0.29.0", features = ["blob", "array", "modern_sqlite"] }
|
||||
bincode = "1.3.3"
|
||||
|
||||
[dev-dependencies]
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
@@ -8,40 +8,31 @@ publish = false
|
||||
path = "src/ai.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
client = { path = "../client" }
|
||||
collections = { path = "../collections"}
|
||||
editor = { path = "../editor" }
|
||||
fs = { path = "../fs" }
|
||||
gpui = { path = "../gpui" }
|
||||
language = { path = "../language" }
|
||||
menu = { path = "../menu" }
|
||||
search = { path = "../search" }
|
||||
settings = { path = "../settings" }
|
||||
theme = { path = "../theme" }
|
||||
util = { path = "../util" }
|
||||
uuid = { version = "1.1.2", features = ["v4"] }
|
||||
workspace = { path = "../workspace" }
|
||||
[features]
|
||||
test-support = []
|
||||
|
||||
[dependencies]
|
||||
gpui = { path = "../gpui" }
|
||||
util = { path = "../util" }
|
||||
language = { path = "../language" }
|
||||
async-trait.workspace = true
|
||||
anyhow.workspace = true
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
futures.workspace = true
|
||||
indoc.workspace = true
|
||||
isahc.workspace = true
|
||||
lazy_static.workspace = true
|
||||
ordered-float.workspace = true
|
||||
parking_lot.workspace = true
|
||||
isahc.workspace = true
|
||||
regex.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
smol.workspace = true
|
||||
tiktoken-rs = "0.4"
|
||||
postage.workspace = true
|
||||
rand.workspace = true
|
||||
log.workspace = true
|
||||
parse_duration = "2.1.1"
|
||||
tiktoken-rs = "0.5.0"
|
||||
matrixmultiply = "0.3.7"
|
||||
rusqlite = { version = "0.29.0", features = ["blob", "array", "modern_sqlite"] }
|
||||
bincode = "1.3.3"
|
||||
|
||||
[dev-dependencies]
|
||||
editor = { path = "../editor", features = ["test-support"] }
|
||||
project = { path = "../project", features = ["test-support"] }
|
||||
|
||||
ctor.workspace = true
|
||||
env_logger.workspace = true
|
||||
log.workspace = true
|
||||
rand.workspace = true
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
|
||||
@@ -1,294 +1,8 @@
|
||||
pub mod assistant;
|
||||
mod assistant_settings;
|
||||
mod codegen;
|
||||
mod streaming_diff;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
pub use assistant::AssistantPanel;
|
||||
use assistant_settings::OpenAIModel;
|
||||
use chrono::{DateTime, Local};
|
||||
use collections::HashMap;
|
||||
use fs::Fs;
|
||||
use futures::{io::BufReader, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
|
||||
use gpui::{executor::Background, AppContext};
|
||||
use isahc::{http::StatusCode, Request, RequestExt};
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
cmp::Reverse,
|
||||
ffi::OsStr,
|
||||
fmt::{self, Display},
|
||||
io,
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
};
|
||||
use util::paths::CONVERSATIONS_DIR;
|
||||
|
||||
const OPENAI_API_URL: &'static str = "https://api.openai.com/v1";
|
||||
|
||||
// Data types for chat completion requests
|
||||
#[derive(Debug, Default, Serialize)]
|
||||
pub struct OpenAIRequest {
|
||||
model: String,
|
||||
messages: Vec<RequestMessage>,
|
||||
stream: bool,
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize,
|
||||
)]
|
||||
struct MessageId(usize);
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
struct MessageMetadata {
|
||||
role: Role,
|
||||
sent_at: DateTime<Local>,
|
||||
status: MessageStatus,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
enum MessageStatus {
|
||||
Pending,
|
||||
Done,
|
||||
Error(Arc<str>),
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct SavedMessage {
|
||||
id: MessageId,
|
||||
start: usize,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct SavedConversation {
|
||||
id: Option<String>,
|
||||
zed: String,
|
||||
version: String,
|
||||
text: String,
|
||||
messages: Vec<SavedMessage>,
|
||||
message_metadata: HashMap<MessageId, MessageMetadata>,
|
||||
summary: String,
|
||||
model: OpenAIModel,
|
||||
}
|
||||
|
||||
impl SavedConversation {
|
||||
const VERSION: &'static str = "0.1.0";
|
||||
}
|
||||
|
||||
struct SavedConversationMetadata {
|
||||
title: String,
|
||||
path: PathBuf,
|
||||
mtime: chrono::DateTime<chrono::Local>,
|
||||
}
|
||||
|
||||
impl SavedConversationMetadata {
|
||||
pub async fn list(fs: Arc<dyn Fs>) -> Result<Vec<Self>> {
|
||||
fs.create_dir(&CONVERSATIONS_DIR).await?;
|
||||
|
||||
let mut paths = fs.read_dir(&CONVERSATIONS_DIR).await?;
|
||||
let mut conversations = Vec::<SavedConversationMetadata>::new();
|
||||
while let Some(path) = paths.next().await {
|
||||
let path = path?;
|
||||
if path.extension() != Some(OsStr::new("json")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let pattern = r" - \d+.zed.json$";
|
||||
let re = Regex::new(pattern).unwrap();
|
||||
|
||||
let metadata = fs.metadata(&path).await?;
|
||||
if let Some((file_name, metadata)) = path
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.zip(metadata)
|
||||
{
|
||||
let title = re.replace(file_name, "");
|
||||
conversations.push(Self {
|
||||
title: title.into_owned(),
|
||||
path,
|
||||
mtime: metadata.mtime.into(),
|
||||
});
|
||||
}
|
||||
}
|
||||
conversations.sort_unstable_by_key(|conversation| Reverse(conversation.mtime));
|
||||
|
||||
Ok(conversations)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
struct RequestMessage {
|
||||
role: Role,
|
||||
content: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
pub struct ResponseMessage {
|
||||
role: Option<Role>,
|
||||
content: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
enum Role {
|
||||
User,
|
||||
Assistant,
|
||||
System,
|
||||
}
|
||||
|
||||
impl Role {
|
||||
pub fn cycle(&mut self) {
|
||||
*self = match self {
|
||||
Role::User => Role::Assistant,
|
||||
Role::Assistant => Role::System,
|
||||
Role::System => Role::User,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Role {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Role::User => write!(f, "User"),
|
||||
Role::Assistant => write!(f, "Assistant"),
|
||||
Role::System => write!(f, "System"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct OpenAIResponseStreamEvent {
|
||||
pub id: Option<String>,
|
||||
pub object: String,
|
||||
pub created: u32,
|
||||
pub model: String,
|
||||
pub choices: Vec<ChatChoiceDelta>,
|
||||
pub usage: Option<Usage>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct Usage {
|
||||
pub prompt_tokens: u32,
|
||||
pub completion_tokens: u32,
|
||||
pub total_tokens: u32,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct ChatChoiceDelta {
|
||||
pub index: u32,
|
||||
pub delta: ResponseMessage,
|
||||
pub finish_reason: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct OpenAIUsage {
|
||||
prompt_tokens: u64,
|
||||
completion_tokens: u64,
|
||||
total_tokens: u64,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct OpenAIChoice {
|
||||
text: String,
|
||||
index: u32,
|
||||
logprobs: Option<serde_json::Value>,
|
||||
finish_reason: Option<String>,
|
||||
}
|
||||
|
||||
pub fn init(cx: &mut AppContext) {
|
||||
assistant::init(cx);
|
||||
}
|
||||
|
||||
pub async fn stream_completion(
|
||||
api_key: String,
|
||||
executor: Arc<Background>,
|
||||
mut request: OpenAIRequest,
|
||||
) -> Result<impl Stream<Item = Result<OpenAIResponseStreamEvent>>> {
|
||||
request.stream = true;
|
||||
|
||||
let (tx, rx) = futures::channel::mpsc::unbounded::<Result<OpenAIResponseStreamEvent>>();
|
||||
|
||||
let json_data = serde_json::to_string(&request)?;
|
||||
let mut response = Request::post(format!("{OPENAI_API_URL}/chat/completions"))
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Authorization", format!("Bearer {}", api_key))
|
||||
.body(json_data)?
|
||||
.send_async()
|
||||
.await?;
|
||||
|
||||
let status = response.status();
|
||||
if status == StatusCode::OK {
|
||||
executor
|
||||
.spawn(async move {
|
||||
let mut lines = BufReader::new(response.body_mut()).lines();
|
||||
|
||||
fn parse_line(
|
||||
line: Result<String, io::Error>,
|
||||
) -> Result<Option<OpenAIResponseStreamEvent>> {
|
||||
if let Some(data) = line?.strip_prefix("data: ") {
|
||||
let event = serde_json::from_str(&data)?;
|
||||
Ok(Some(event))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
while let Some(line) = lines.next().await {
|
||||
if let Some(event) = parse_line(line).transpose() {
|
||||
let done = event.as_ref().map_or(false, |event| {
|
||||
event
|
||||
.choices
|
||||
.last()
|
||||
.map_or(false, |choice| choice.finish_reason.is_some())
|
||||
});
|
||||
if tx.unbounded_send(event).is_err() {
|
||||
break;
|
||||
}
|
||||
|
||||
if done {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
|
||||
Ok(rx)
|
||||
} else {
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OpenAIResponse {
|
||||
error: OpenAIError,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OpenAIError {
|
||||
message: String,
|
||||
}
|
||||
|
||||
match serde_json::from_str::<OpenAIResponse>(&body) {
|
||||
Ok(response) if !response.error.message.is_empty() => Err(anyhow!(
|
||||
"Failed to connect to OpenAI API: {}",
|
||||
response.error.message,
|
||||
)),
|
||||
|
||||
_ => Err(anyhow!(
|
||||
"Failed to connect to OpenAI API: {} {}",
|
||||
response.status(),
|
||||
body,
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[ctor::ctor]
|
||||
fn init_logger() {
|
||||
if std::env::var("RUST_LOG").is_ok() {
|
||||
env_logger::init();
|
||||
}
|
||||
}
|
||||
pub mod auth;
|
||||
pub mod completion;
|
||||
pub mod embedding;
|
||||
pub mod models;
|
||||
pub mod prompts;
|
||||
pub mod providers;
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod test;
|
||||
|
||||
15
crates/ai/src/auth.rs
Normal file
15
crates/ai/src/auth.rs
Normal file
@@ -0,0 +1,15 @@
|
||||
use gpui::AppContext;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum ProviderCredential {
|
||||
Credentials { api_key: String },
|
||||
NoCredentials,
|
||||
NotNeeded,
|
||||
}
|
||||
|
||||
pub trait CredentialProvider: Send + Sync {
|
||||
fn has_credentials(&self) -> bool;
|
||||
fn retrieve_credentials(&self, cx: &AppContext) -> ProviderCredential;
|
||||
fn save_credentials(&self, cx: &AppContext, credential: ProviderCredential);
|
||||
fn delete_credentials(&self, cx: &AppContext);
|
||||
}
|
||||
23
crates/ai/src/completion.rs
Normal file
23
crates/ai/src/completion.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
use anyhow::Result;
|
||||
use futures::{future::BoxFuture, stream::BoxStream};
|
||||
|
||||
use crate::{auth::CredentialProvider, models::LanguageModel};
|
||||
|
||||
pub trait CompletionRequest: Send + Sync {
|
||||
fn data(&self) -> serde_json::Result<String>;
|
||||
}
|
||||
|
||||
pub trait CompletionProvider: CredentialProvider {
|
||||
fn base_model(&self) -> Box<dyn LanguageModel>;
|
||||
fn complete(
|
||||
&self,
|
||||
prompt: Box<dyn CompletionRequest>,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>>;
|
||||
fn box_clone(&self) -> Box<dyn CompletionProvider>;
|
||||
}
|
||||
|
||||
impl Clone for Box<dyn CompletionProvider> {
|
||||
fn clone(&self) -> Box<dyn CompletionProvider> {
|
||||
self.box_clone()
|
||||
}
|
||||
}
|
||||
123
crates/ai/src/embedding.rs
Normal file
123
crates/ai/src/embedding.rs
Normal file
@@ -0,0 +1,123 @@
|
||||
use std::time::Instant;
|
||||
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use ordered_float::OrderedFloat;
|
||||
use rusqlite::types::{FromSql, FromSqlResult, ToSqlOutput, ValueRef};
|
||||
use rusqlite::ToSql;
|
||||
|
||||
use crate::auth::CredentialProvider;
|
||||
use crate::models::LanguageModel;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct Embedding(pub Vec<f32>);
|
||||
|
||||
// This is needed for semantic index functionality
|
||||
// Unfortunately it has to live wherever the "Embedding" struct is created.
|
||||
// Keeping this in here though, introduces a 'rusqlite' dependency into AI
|
||||
// which is less than ideal
|
||||
impl FromSql for Embedding {
|
||||
fn column_result(value: ValueRef) -> FromSqlResult<Self> {
|
||||
let bytes = value.as_blob()?;
|
||||
let embedding: Result<Vec<f32>, Box<bincode::ErrorKind>> = bincode::deserialize(bytes);
|
||||
if embedding.is_err() {
|
||||
return Err(rusqlite::types::FromSqlError::Other(embedding.unwrap_err()));
|
||||
}
|
||||
Ok(Embedding(embedding.unwrap()))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql for Embedding {
|
||||
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput> {
|
||||
let bytes = bincode::serialize(&self.0)
|
||||
.map_err(|err| rusqlite::Error::ToSqlConversionFailure(Box::new(err)))?;
|
||||
Ok(ToSqlOutput::Owned(rusqlite::types::Value::Blob(bytes)))
|
||||
}
|
||||
}
|
||||
impl From<Vec<f32>> for Embedding {
|
||||
fn from(value: Vec<f32>) -> Self {
|
||||
Embedding(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl Embedding {
|
||||
pub fn similarity(&self, other: &Self) -> OrderedFloat<f32> {
|
||||
let len = self.0.len();
|
||||
assert_eq!(len, other.0.len());
|
||||
|
||||
let mut result = 0.0;
|
||||
unsafe {
|
||||
matrixmultiply::sgemm(
|
||||
1,
|
||||
len,
|
||||
1,
|
||||
1.0,
|
||||
self.0.as_ptr(),
|
||||
len as isize,
|
||||
1,
|
||||
other.0.as_ptr(),
|
||||
1,
|
||||
len as isize,
|
||||
0.0,
|
||||
&mut result as *mut f32,
|
||||
1,
|
||||
1,
|
||||
);
|
||||
}
|
||||
OrderedFloat(result)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait EmbeddingProvider: CredentialProvider {
|
||||
fn base_model(&self) -> Box<dyn LanguageModel>;
|
||||
async fn embed_batch(&self, spans: Vec<String>) -> Result<Vec<Embedding>>;
|
||||
fn max_tokens_per_batch(&self) -> usize;
|
||||
fn rate_limit_expiration(&self) -> Option<Instant>;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rand::prelude::*;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_similarity(mut rng: StdRng) {
|
||||
assert_eq!(
|
||||
Embedding::from(vec![1., 0., 0., 0., 0.])
|
||||
.similarity(&Embedding::from(vec![0., 1., 0., 0., 0.])),
|
||||
0.
|
||||
);
|
||||
assert_eq!(
|
||||
Embedding::from(vec![2., 0., 0., 0., 0.])
|
||||
.similarity(&Embedding::from(vec![3., 1., 0., 0., 0.])),
|
||||
6.
|
||||
);
|
||||
|
||||
for _ in 0..100 {
|
||||
let size = 1536;
|
||||
let mut a = vec![0.; size];
|
||||
let mut b = vec![0.; size];
|
||||
for (a, b) in a.iter_mut().zip(b.iter_mut()) {
|
||||
*a = rng.gen();
|
||||
*b = rng.gen();
|
||||
}
|
||||
let a = Embedding::from(a);
|
||||
let b = Embedding::from(b);
|
||||
|
||||
assert_eq!(
|
||||
round_to_decimals(a.similarity(&b), 1),
|
||||
round_to_decimals(reference_dot(&a.0, &b.0), 1)
|
||||
);
|
||||
}
|
||||
|
||||
fn round_to_decimals(n: OrderedFloat<f32>, decimal_places: i32) -> f32 {
|
||||
let factor = (10.0 as f32).powi(decimal_places);
|
||||
(n * factor).round() / factor
|
||||
}
|
||||
|
||||
fn reference_dot(a: &[f32], b: &[f32]) -> OrderedFloat<f32> {
|
||||
OrderedFloat(a.iter().zip(b.iter()).map(|(a, b)| a * b).sum())
|
||||
}
|
||||
}
|
||||
}
|
||||
16
crates/ai/src/models.rs
Normal file
16
crates/ai/src/models.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
pub enum TruncationDirection {
|
||||
Start,
|
||||
End,
|
||||
}
|
||||
|
||||
pub trait LanguageModel {
|
||||
fn name(&self) -> String;
|
||||
fn count_tokens(&self, content: &str) -> anyhow::Result<usize>;
|
||||
fn truncate(
|
||||
&self,
|
||||
content: &str,
|
||||
length: usize,
|
||||
direction: TruncationDirection,
|
||||
) -> anyhow::Result<String>;
|
||||
fn capacity(&self) -> anyhow::Result<usize>;
|
||||
}
|
||||
330
crates/ai/src/prompts/base.rs
Normal file
330
crates/ai/src/prompts/base.rs
Normal file
@@ -0,0 +1,330 @@
|
||||
use std::cmp::Reverse;
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
|
||||
use language::BufferSnapshot;
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::models::LanguageModel;
|
||||
use crate::prompts::repository_context::PromptCodeSnippet;
|
||||
|
||||
pub(crate) enum PromptFileType {
|
||||
Text,
|
||||
Code,
|
||||
}
|
||||
|
||||
// TODO: Set this up to manage for defaults well
|
||||
pub struct PromptArguments {
|
||||
pub model: Arc<dyn LanguageModel>,
|
||||
pub user_prompt: Option<String>,
|
||||
pub language_name: Option<String>,
|
||||
pub project_name: Option<String>,
|
||||
pub snippets: Vec<PromptCodeSnippet>,
|
||||
pub reserved_tokens: usize,
|
||||
pub buffer: Option<BufferSnapshot>,
|
||||
pub selected_range: Option<Range<usize>>,
|
||||
}
|
||||
|
||||
impl PromptArguments {
|
||||
pub(crate) fn get_file_type(&self) -> PromptFileType {
|
||||
if self
|
||||
.language_name
|
||||
.as_ref()
|
||||
.and_then(|name| Some(!["Markdown", "Plain Text"].contains(&name.as_str())))
|
||||
.unwrap_or(true)
|
||||
{
|
||||
PromptFileType::Code
|
||||
} else {
|
||||
PromptFileType::Text
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait PromptTemplate {
|
||||
fn generate(
|
||||
&self,
|
||||
args: &PromptArguments,
|
||||
max_token_length: Option<usize>,
|
||||
) -> anyhow::Result<(String, usize)>;
|
||||
}
|
||||
|
||||
#[repr(i8)]
|
||||
#[derive(PartialEq, Eq, Ord)]
|
||||
pub enum PromptPriority {
|
||||
Mandatory, // Ignores truncation
|
||||
Ordered { order: usize }, // Truncates based on priority
|
||||
}
|
||||
|
||||
impl PartialOrd for PromptPriority {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
match (self, other) {
|
||||
(Self::Mandatory, Self::Mandatory) => Some(std::cmp::Ordering::Equal),
|
||||
(Self::Mandatory, Self::Ordered { .. }) => Some(std::cmp::Ordering::Greater),
|
||||
(Self::Ordered { .. }, Self::Mandatory) => Some(std::cmp::Ordering::Less),
|
||||
(Self::Ordered { order: a }, Self::Ordered { order: b }) => b.partial_cmp(a),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PromptChain {
|
||||
args: PromptArguments,
|
||||
templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)>,
|
||||
}
|
||||
|
||||
impl PromptChain {
|
||||
pub fn new(
|
||||
args: PromptArguments,
|
||||
templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)>,
|
||||
) -> Self {
|
||||
PromptChain { args, templates }
|
||||
}
|
||||
|
||||
pub fn generate(&self, truncate: bool) -> anyhow::Result<(String, usize)> {
|
||||
// Argsort based on Prompt Priority
|
||||
let seperator = "\n";
|
||||
let seperator_tokens = self.args.model.count_tokens(seperator)?;
|
||||
let mut sorted_indices = (0..self.templates.len()).collect::<Vec<_>>();
|
||||
sorted_indices.sort_by_key(|&i| Reverse(&self.templates[i].0));
|
||||
|
||||
// If Truncate
|
||||
let mut tokens_outstanding = if truncate {
|
||||
Some(self.args.model.capacity()? - self.args.reserved_tokens)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut prompts = vec!["".to_string(); sorted_indices.len()];
|
||||
for idx in sorted_indices {
|
||||
let (_, template) = &self.templates[idx];
|
||||
|
||||
if let Some((template_prompt, prompt_token_count)) =
|
||||
template.generate(&self.args, tokens_outstanding).log_err()
|
||||
{
|
||||
if template_prompt != "" {
|
||||
prompts[idx] = template_prompt;
|
||||
|
||||
if let Some(remaining_tokens) = tokens_outstanding {
|
||||
let new_tokens = prompt_token_count + seperator_tokens;
|
||||
tokens_outstanding = if remaining_tokens > new_tokens {
|
||||
Some(remaining_tokens - new_tokens)
|
||||
} else {
|
||||
Some(0)
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
prompts.retain(|x| x != "");
|
||||
|
||||
let full_prompt = prompts.join(seperator);
|
||||
let total_token_count = self.args.model.count_tokens(&full_prompt)?;
|
||||
anyhow::Ok((prompts.join(seperator), total_token_count))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use crate::models::TruncationDirection;
|
||||
use crate::test::FakeLanguageModel;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
pub fn test_prompt_chain() {
|
||||
struct TestPromptTemplate {}
|
||||
impl PromptTemplate for TestPromptTemplate {
|
||||
fn generate(
|
||||
&self,
|
||||
args: &PromptArguments,
|
||||
max_token_length: Option<usize>,
|
||||
) -> anyhow::Result<(String, usize)> {
|
||||
let mut content = "This is a test prompt template".to_string();
|
||||
|
||||
let mut token_count = args.model.count_tokens(&content)?;
|
||||
if let Some(max_token_length) = max_token_length {
|
||||
if token_count > max_token_length {
|
||||
content = args.model.truncate(
|
||||
&content,
|
||||
max_token_length,
|
||||
TruncationDirection::End,
|
||||
)?;
|
||||
token_count = max_token_length;
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::Ok((content, token_count))
|
||||
}
|
||||
}
|
||||
|
||||
struct TestLowPriorityTemplate {}
|
||||
impl PromptTemplate for TestLowPriorityTemplate {
|
||||
fn generate(
|
||||
&self,
|
||||
args: &PromptArguments,
|
||||
max_token_length: Option<usize>,
|
||||
) -> anyhow::Result<(String, usize)> {
|
||||
let mut content = "This is a low priority test prompt template".to_string();
|
||||
|
||||
let mut token_count = args.model.count_tokens(&content)?;
|
||||
if let Some(max_token_length) = max_token_length {
|
||||
if token_count > max_token_length {
|
||||
content = args.model.truncate(
|
||||
&content,
|
||||
max_token_length,
|
||||
TruncationDirection::End,
|
||||
)?;
|
||||
token_count = max_token_length;
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::Ok((content, token_count))
|
||||
}
|
||||
}
|
||||
|
||||
let model: Arc<dyn LanguageModel> = Arc::new(FakeLanguageModel { capacity: 100 });
|
||||
let args = PromptArguments {
|
||||
model: model.clone(),
|
||||
language_name: None,
|
||||
project_name: None,
|
||||
snippets: Vec::new(),
|
||||
reserved_tokens: 0,
|
||||
buffer: None,
|
||||
selected_range: None,
|
||||
user_prompt: None,
|
||||
};
|
||||
|
||||
let templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)> = vec![
|
||||
(
|
||||
PromptPriority::Ordered { order: 0 },
|
||||
Box::new(TestPromptTemplate {}),
|
||||
),
|
||||
(
|
||||
PromptPriority::Ordered { order: 1 },
|
||||
Box::new(TestLowPriorityTemplate {}),
|
||||
),
|
||||
];
|
||||
let chain = PromptChain::new(args, templates);
|
||||
|
||||
let (prompt, token_count) = chain.generate(false).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
prompt,
|
||||
"This is a test prompt template\nThis is a low priority test prompt template"
|
||||
.to_string()
|
||||
);
|
||||
|
||||
assert_eq!(model.count_tokens(&prompt).unwrap(), token_count);
|
||||
|
||||
// Testing with Truncation Off
|
||||
// Should ignore capacity and return all prompts
|
||||
let model: Arc<dyn LanguageModel> = Arc::new(FakeLanguageModel { capacity: 20 });
|
||||
let args = PromptArguments {
|
||||
model: model.clone(),
|
||||
language_name: None,
|
||||
project_name: None,
|
||||
snippets: Vec::new(),
|
||||
reserved_tokens: 0,
|
||||
buffer: None,
|
||||
selected_range: None,
|
||||
user_prompt: None,
|
||||
};
|
||||
|
||||
let templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)> = vec![
|
||||
(
|
||||
PromptPriority::Ordered { order: 0 },
|
||||
Box::new(TestPromptTemplate {}),
|
||||
),
|
||||
(
|
||||
PromptPriority::Ordered { order: 1 },
|
||||
Box::new(TestLowPriorityTemplate {}),
|
||||
),
|
||||
];
|
||||
let chain = PromptChain::new(args, templates);
|
||||
|
||||
let (prompt, token_count) = chain.generate(false).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
prompt,
|
||||
"This is a test prompt template\nThis is a low priority test prompt template"
|
||||
.to_string()
|
||||
);
|
||||
|
||||
assert_eq!(model.count_tokens(&prompt).unwrap(), token_count);
|
||||
|
||||
// Testing with Truncation Off
|
||||
// Should ignore capacity and return all prompts
|
||||
let capacity = 20;
|
||||
let model: Arc<dyn LanguageModel> = Arc::new(FakeLanguageModel { capacity });
|
||||
let args = PromptArguments {
|
||||
model: model.clone(),
|
||||
language_name: None,
|
||||
project_name: None,
|
||||
snippets: Vec::new(),
|
||||
reserved_tokens: 0,
|
||||
buffer: None,
|
||||
selected_range: None,
|
||||
user_prompt: None,
|
||||
};
|
||||
|
||||
let templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)> = vec![
|
||||
(
|
||||
PromptPriority::Ordered { order: 0 },
|
||||
Box::new(TestPromptTemplate {}),
|
||||
),
|
||||
(
|
||||
PromptPriority::Ordered { order: 1 },
|
||||
Box::new(TestLowPriorityTemplate {}),
|
||||
),
|
||||
(
|
||||
PromptPriority::Ordered { order: 2 },
|
||||
Box::new(TestLowPriorityTemplate {}),
|
||||
),
|
||||
];
|
||||
let chain = PromptChain::new(args, templates);
|
||||
|
||||
let (prompt, token_count) = chain.generate(true).unwrap();
|
||||
|
||||
assert_eq!(prompt, "This is a test promp".to_string());
|
||||
assert_eq!(token_count, capacity);
|
||||
|
||||
// Change Ordering of Prompts Based on Priority
|
||||
let capacity = 120;
|
||||
let reserved_tokens = 10;
|
||||
let model: Arc<dyn LanguageModel> = Arc::new(FakeLanguageModel { capacity });
|
||||
let args = PromptArguments {
|
||||
model: model.clone(),
|
||||
language_name: None,
|
||||
project_name: None,
|
||||
snippets: Vec::new(),
|
||||
reserved_tokens,
|
||||
buffer: None,
|
||||
selected_range: None,
|
||||
user_prompt: None,
|
||||
};
|
||||
let templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)> = vec![
|
||||
(
|
||||
PromptPriority::Mandatory,
|
||||
Box::new(TestLowPriorityTemplate {}),
|
||||
),
|
||||
(
|
||||
PromptPriority::Ordered { order: 0 },
|
||||
Box::new(TestPromptTemplate {}),
|
||||
),
|
||||
(
|
||||
PromptPriority::Ordered { order: 1 },
|
||||
Box::new(TestLowPriorityTemplate {}),
|
||||
),
|
||||
];
|
||||
let chain = PromptChain::new(args, templates);
|
||||
|
||||
let (prompt, token_count) = chain.generate(true).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
prompt,
|
||||
"This is a low priority test prompt template\nThis is a test prompt template\nThis is a low priority test prompt "
|
||||
.to_string()
|
||||
);
|
||||
assert_eq!(token_count, capacity - reserved_tokens);
|
||||
}
|
||||
}
|
||||
164
crates/ai/src/prompts/file_context.rs
Normal file
164
crates/ai/src/prompts/file_context.rs
Normal file
@@ -0,0 +1,164 @@
|
||||
use anyhow::anyhow;
|
||||
use language::BufferSnapshot;
|
||||
use language::ToOffset;
|
||||
|
||||
use crate::models::LanguageModel;
|
||||
use crate::models::TruncationDirection;
|
||||
use crate::prompts::base::PromptArguments;
|
||||
use crate::prompts::base::PromptTemplate;
|
||||
use std::fmt::Write;
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
|
||||
fn retrieve_context(
|
||||
buffer: &BufferSnapshot,
|
||||
selected_range: &Option<Range<usize>>,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
max_token_count: Option<usize>,
|
||||
) -> anyhow::Result<(String, usize, bool)> {
|
||||
let mut prompt = String::new();
|
||||
let mut truncated = false;
|
||||
if let Some(selected_range) = selected_range {
|
||||
let start = selected_range.start.to_offset(buffer);
|
||||
let end = selected_range.end.to_offset(buffer);
|
||||
|
||||
let start_window = buffer.text_for_range(0..start).collect::<String>();
|
||||
|
||||
let mut selected_window = String::new();
|
||||
if start == end {
|
||||
write!(selected_window, "<|START|>").unwrap();
|
||||
} else {
|
||||
write!(selected_window, "<|START|").unwrap();
|
||||
}
|
||||
|
||||
write!(
|
||||
selected_window,
|
||||
"{}",
|
||||
buffer.text_for_range(start..end).collect::<String>()
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
if start != end {
|
||||
write!(selected_window, "|END|>").unwrap();
|
||||
}
|
||||
|
||||
let end_window = buffer.text_for_range(end..buffer.len()).collect::<String>();
|
||||
|
||||
if let Some(max_token_count) = max_token_count {
|
||||
let selected_tokens = model.count_tokens(&selected_window)?;
|
||||
if selected_tokens > max_token_count {
|
||||
return Err(anyhow!(
|
||||
"selected range is greater than model context window, truncation not possible"
|
||||
));
|
||||
};
|
||||
|
||||
let mut remaining_tokens = max_token_count - selected_tokens;
|
||||
let start_window_tokens = model.count_tokens(&start_window)?;
|
||||
let end_window_tokens = model.count_tokens(&end_window)?;
|
||||
let outside_tokens = start_window_tokens + end_window_tokens;
|
||||
if outside_tokens > remaining_tokens {
|
||||
let (start_goal_tokens, end_goal_tokens) =
|
||||
if start_window_tokens < end_window_tokens {
|
||||
let start_goal_tokens = (remaining_tokens / 2).min(start_window_tokens);
|
||||
remaining_tokens -= start_goal_tokens;
|
||||
let end_goal_tokens = remaining_tokens.min(end_window_tokens);
|
||||
(start_goal_tokens, end_goal_tokens)
|
||||
} else {
|
||||
let end_goal_tokens = (remaining_tokens / 2).min(end_window_tokens);
|
||||
remaining_tokens -= end_goal_tokens;
|
||||
let start_goal_tokens = remaining_tokens.min(start_window_tokens);
|
||||
(start_goal_tokens, end_goal_tokens)
|
||||
};
|
||||
|
||||
let truncated_start_window =
|
||||
model.truncate(&start_window, start_goal_tokens, TruncationDirection::Start)?;
|
||||
let truncated_end_window =
|
||||
model.truncate(&end_window, end_goal_tokens, TruncationDirection::End)?;
|
||||
writeln!(
|
||||
prompt,
|
||||
"{truncated_start_window}{selected_window}{truncated_end_window}"
|
||||
)
|
||||
.unwrap();
|
||||
truncated = true;
|
||||
} else {
|
||||
writeln!(prompt, "{start_window}{selected_window}{end_window}").unwrap();
|
||||
}
|
||||
} else {
|
||||
// If we dont have a selected range, include entire file.
|
||||
writeln!(prompt, "{}", &buffer.text()).unwrap();
|
||||
|
||||
// Dumb truncation strategy
|
||||
if let Some(max_token_count) = max_token_count {
|
||||
if model.count_tokens(&prompt)? > max_token_count {
|
||||
truncated = true;
|
||||
prompt = model.truncate(&prompt, max_token_count, TruncationDirection::End)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let token_count = model.count_tokens(&prompt)?;
|
||||
anyhow::Ok((prompt, token_count, truncated))
|
||||
}
|
||||
|
||||
pub struct FileContext {}
|
||||
|
||||
impl PromptTemplate for FileContext {
|
||||
fn generate(
|
||||
&self,
|
||||
args: &PromptArguments,
|
||||
max_token_length: Option<usize>,
|
||||
) -> anyhow::Result<(String, usize)> {
|
||||
if let Some(buffer) = &args.buffer {
|
||||
let mut prompt = String::new();
|
||||
// Add Initial Preamble
|
||||
// TODO: Do we want to add the path in here?
|
||||
writeln!(
|
||||
prompt,
|
||||
"The file you are currently working on has the following content:"
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let language_name = args
|
||||
.language_name
|
||||
.clone()
|
||||
.unwrap_or("".to_string())
|
||||
.to_lowercase();
|
||||
|
||||
let (context, _, truncated) = retrieve_context(
|
||||
buffer,
|
||||
&args.selected_range,
|
||||
args.model.clone(),
|
||||
max_token_length,
|
||||
)?;
|
||||
writeln!(prompt, "```{language_name}\n{context}\n```").unwrap();
|
||||
|
||||
if truncated {
|
||||
writeln!(prompt, "Note the content has been truncated and only represents a portion of the file.").unwrap();
|
||||
}
|
||||
|
||||
if let Some(selected_range) = &args.selected_range {
|
||||
let start = selected_range.start.to_offset(buffer);
|
||||
let end = selected_range.end.to_offset(buffer);
|
||||
|
||||
if start == end {
|
||||
writeln!(prompt, "In particular, the user's cursor is currently on the '<|START|>' span in the above content, with no text selected.").unwrap();
|
||||
} else {
|
||||
writeln!(prompt, "In particular, the user has selected a section of the text between the '<|START|' and '|END|>' spans.").unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
// Really dumb truncation strategy
|
||||
if let Some(max_tokens) = max_token_length {
|
||||
prompt = args
|
||||
.model
|
||||
.truncate(&prompt, max_tokens, TruncationDirection::End)?;
|
||||
}
|
||||
|
||||
let token_count = args.model.count_tokens(&prompt)?;
|
||||
anyhow::Ok((prompt, token_count))
|
||||
} else {
|
||||
Err(anyhow!("no buffer provided to retrieve file context from"))
|
||||
}
|
||||
}
|
||||
}
|
||||
99
crates/ai/src/prompts/generate.rs
Normal file
99
crates/ai/src/prompts/generate.rs
Normal file
@@ -0,0 +1,99 @@
|
||||
use crate::prompts::base::{PromptArguments, PromptFileType, PromptTemplate};
|
||||
use anyhow::anyhow;
|
||||
use std::fmt::Write;
|
||||
|
||||
pub fn capitalize(s: &str) -> String {
|
||||
let mut c = s.chars();
|
||||
match c.next() {
|
||||
None => String::new(),
|
||||
Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GenerateInlineContent {}
|
||||
|
||||
impl PromptTemplate for GenerateInlineContent {
|
||||
fn generate(
|
||||
&self,
|
||||
args: &PromptArguments,
|
||||
max_token_length: Option<usize>,
|
||||
) -> anyhow::Result<(String, usize)> {
|
||||
let Some(user_prompt) = &args.user_prompt else {
|
||||
return Err(anyhow!("user prompt not provided"));
|
||||
};
|
||||
|
||||
let file_type = args.get_file_type();
|
||||
let content_type = match &file_type {
|
||||
PromptFileType::Code => "code",
|
||||
PromptFileType::Text => "text",
|
||||
};
|
||||
|
||||
let mut prompt = String::new();
|
||||
|
||||
if let Some(selected_range) = &args.selected_range {
|
||||
if selected_range.start == selected_range.end {
|
||||
writeln!(
|
||||
prompt,
|
||||
"Assume the cursor is located where the `<|START|>` span is."
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(
|
||||
prompt,
|
||||
"{} can't be replaced, so assume your answer will be inserted at the cursor.",
|
||||
capitalize(content_type)
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(
|
||||
prompt,
|
||||
"Generate {content_type} based on the users prompt: {user_prompt}",
|
||||
)
|
||||
.unwrap();
|
||||
} else {
|
||||
writeln!(prompt, "Modify the user's selected {content_type} based upon the users prompt: '{user_prompt}'").unwrap();
|
||||
writeln!(prompt, "You must reply with only the adjusted {content_type} (within the '<|START|' and '|END|>' spans) not the entire file.").unwrap();
|
||||
writeln!(prompt, "Double check that you only return code and not the '<|START|' and '|END|'> spans").unwrap();
|
||||
}
|
||||
} else {
|
||||
writeln!(
|
||||
prompt,
|
||||
"Generate {content_type} based on the users prompt: {user_prompt}"
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
if let Some(language_name) = &args.language_name {
|
||||
writeln!(
|
||||
prompt,
|
||||
"Your answer MUST always and only be valid {}.",
|
||||
language_name
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
writeln!(prompt, "Never make remarks about the output.").unwrap();
|
||||
writeln!(
|
||||
prompt,
|
||||
"Do not return anything else, except the generated {content_type}."
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
match file_type {
|
||||
PromptFileType::Code => {
|
||||
// writeln!(prompt, "Always wrap your code in a Markdown block.").unwrap();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Really dumb truncation strategy
|
||||
if let Some(max_tokens) = max_token_length {
|
||||
prompt = args.model.truncate(
|
||||
&prompt,
|
||||
max_tokens,
|
||||
crate::models::TruncationDirection::End,
|
||||
)?;
|
||||
}
|
||||
|
||||
let token_count = args.model.count_tokens(&prompt)?;
|
||||
|
||||
anyhow::Ok((prompt, token_count))
|
||||
}
|
||||
}
|
||||
5
crates/ai/src/prompts/mod.rs
Normal file
5
crates/ai/src/prompts/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod base;
|
||||
pub mod file_context;
|
||||
pub mod generate;
|
||||
pub mod preamble;
|
||||
pub mod repository_context;
|
||||
52
crates/ai/src/prompts/preamble.rs
Normal file
52
crates/ai/src/prompts/preamble.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
use crate::prompts::base::{PromptArguments, PromptFileType, PromptTemplate};
|
||||
use std::fmt::Write;
|
||||
|
||||
pub struct EngineerPreamble {}
|
||||
|
||||
impl PromptTemplate for EngineerPreamble {
|
||||
fn generate(
|
||||
&self,
|
||||
args: &PromptArguments,
|
||||
max_token_length: Option<usize>,
|
||||
) -> anyhow::Result<(String, usize)> {
|
||||
let mut prompts = Vec::new();
|
||||
|
||||
match args.get_file_type() {
|
||||
PromptFileType::Code => {
|
||||
prompts.push(format!(
|
||||
"You are an expert {}engineer.",
|
||||
args.language_name.clone().unwrap_or("".to_string()) + " "
|
||||
));
|
||||
}
|
||||
PromptFileType::Text => {
|
||||
prompts.push("You are an expert engineer.".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(project_name) = args.project_name.clone() {
|
||||
prompts.push(format!(
|
||||
"You are currently working inside the '{project_name}' project in code editor Zed."
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(mut remaining_tokens) = max_token_length {
|
||||
let mut prompt = String::new();
|
||||
let mut total_count = 0;
|
||||
for prompt_piece in prompts {
|
||||
let prompt_token_count =
|
||||
args.model.count_tokens(&prompt_piece)? + args.model.count_tokens("\n")?;
|
||||
if remaining_tokens > prompt_token_count {
|
||||
writeln!(prompt, "{prompt_piece}").unwrap();
|
||||
remaining_tokens -= prompt_token_count;
|
||||
total_count += prompt_token_count;
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::Ok((prompt, total_count))
|
||||
} else {
|
||||
let prompt = prompts.join("\n");
|
||||
let token_count = args.model.count_tokens(&prompt)?;
|
||||
anyhow::Ok((prompt, token_count))
|
||||
}
|
||||
}
|
||||
}
|
||||
94
crates/ai/src/prompts/repository_context.rs
Normal file
94
crates/ai/src/prompts/repository_context.rs
Normal file
@@ -0,0 +1,94 @@
|
||||
use crate::prompts::base::{PromptArguments, PromptTemplate};
|
||||
use std::fmt::Write;
|
||||
use std::{ops::Range, path::PathBuf};
|
||||
|
||||
use gpui::{AsyncAppContext, ModelHandle};
|
||||
use language::{Anchor, Buffer};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PromptCodeSnippet {
|
||||
path: Option<PathBuf>,
|
||||
language_name: Option<String>,
|
||||
content: String,
|
||||
}
|
||||
|
||||
impl PromptCodeSnippet {
|
||||
pub fn new(buffer: ModelHandle<Buffer>, range: Range<Anchor>, cx: &AsyncAppContext) -> Self {
|
||||
let (content, language_name, file_path) = buffer.read_with(cx, |buffer, _| {
|
||||
let snapshot = buffer.snapshot();
|
||||
let content = snapshot.text_for_range(range.clone()).collect::<String>();
|
||||
|
||||
let language_name = buffer
|
||||
.language()
|
||||
.and_then(|language| Some(language.name().to_string().to_lowercase()));
|
||||
|
||||
let file_path = buffer
|
||||
.file()
|
||||
.and_then(|file| Some(file.path().to_path_buf()));
|
||||
|
||||
(content, language_name, file_path)
|
||||
});
|
||||
|
||||
PromptCodeSnippet {
|
||||
path: file_path,
|
||||
language_name,
|
||||
content,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToString for PromptCodeSnippet {
|
||||
fn to_string(&self) -> String {
|
||||
let path = self
|
||||
.path
|
||||
.as_ref()
|
||||
.and_then(|path| Some(path.to_string_lossy().to_string()))
|
||||
.unwrap_or("".to_string());
|
||||
let language_name = self.language_name.clone().unwrap_or("".to_string());
|
||||
let content = self.content.clone();
|
||||
|
||||
format!("The below code snippet may be relevant from file: {path}\n```{language_name}\n{content}\n```")
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RepositoryContext {}
|
||||
|
||||
impl PromptTemplate for RepositoryContext {
|
||||
fn generate(
|
||||
&self,
|
||||
args: &PromptArguments,
|
||||
max_token_length: Option<usize>,
|
||||
) -> anyhow::Result<(String, usize)> {
|
||||
const MAXIMUM_SNIPPET_TOKEN_COUNT: usize = 500;
|
||||
let template = "You are working inside a large repository, here are a few code snippets that may be useful.";
|
||||
let mut prompt = String::new();
|
||||
|
||||
let mut remaining_tokens = max_token_length.clone();
|
||||
let seperator_token_length = args.model.count_tokens("\n")?;
|
||||
for snippet in &args.snippets {
|
||||
let mut snippet_prompt = template.to_string();
|
||||
let content = snippet.to_string();
|
||||
writeln!(snippet_prompt, "{content}").unwrap();
|
||||
|
||||
let token_count = args.model.count_tokens(&snippet_prompt)?;
|
||||
if token_count <= MAXIMUM_SNIPPET_TOKEN_COUNT {
|
||||
if let Some(tokens_left) = remaining_tokens {
|
||||
if tokens_left >= token_count {
|
||||
writeln!(prompt, "{snippet_prompt}").unwrap();
|
||||
remaining_tokens = if tokens_left >= (token_count + seperator_token_length)
|
||||
{
|
||||
Some(tokens_left - token_count - seperator_token_length)
|
||||
} else {
|
||||
Some(0)
|
||||
};
|
||||
}
|
||||
} else {
|
||||
writeln!(prompt, "{snippet_prompt}").unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let total_token_count = args.model.count_tokens(&prompt)?;
|
||||
anyhow::Ok((prompt, total_token_count))
|
||||
}
|
||||
}
|
||||
1
crates/ai/src/providers/mod.rs
Normal file
1
crates/ai/src/providers/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod open_ai;
|
||||
298
crates/ai/src/providers/open_ai/completion.rs
Normal file
298
crates/ai/src/providers/open_ai/completion.rs
Normal file
@@ -0,0 +1,298 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use futures::{
|
||||
future::BoxFuture, io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, FutureExt,
|
||||
Stream, StreamExt,
|
||||
};
|
||||
use gpui::{executor::Background, AppContext};
|
||||
use isahc::{http::StatusCode, Request, RequestExt};
|
||||
use parking_lot::RwLock;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
env,
|
||||
fmt::{self, Display},
|
||||
io,
|
||||
sync::Arc,
|
||||
};
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::{
|
||||
auth::{CredentialProvider, ProviderCredential},
|
||||
completion::{CompletionProvider, CompletionRequest},
|
||||
models::LanguageModel,
|
||||
};
|
||||
|
||||
use crate::providers::open_ai::{OpenAILanguageModel, OPENAI_API_URL};
|
||||
|
||||
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Role {
|
||||
User,
|
||||
Assistant,
|
||||
System,
|
||||
}
|
||||
|
||||
impl Role {
|
||||
pub fn cycle(&mut self) {
|
||||
*self = match self {
|
||||
Role::User => Role::Assistant,
|
||||
Role::Assistant => Role::System,
|
||||
Role::System => Role::User,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Role {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Role::User => write!(f, "User"),
|
||||
Role::Assistant => write!(f, "Assistant"),
|
||||
Role::System => write!(f, "System"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
pub struct RequestMessage {
|
||||
pub role: Role,
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Serialize)]
|
||||
pub struct OpenAIRequest {
|
||||
pub model: String,
|
||||
pub messages: Vec<RequestMessage>,
|
||||
pub stream: bool,
|
||||
pub stop: Vec<String>,
|
||||
pub temperature: f32,
|
||||
}
|
||||
|
||||
impl CompletionRequest for OpenAIRequest {
|
||||
fn data(&self) -> serde_json::Result<String> {
|
||||
serde_json::to_string(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
pub struct ResponseMessage {
|
||||
pub role: Option<Role>,
|
||||
pub content: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct OpenAIUsage {
|
||||
pub prompt_tokens: u32,
|
||||
pub completion_tokens: u32,
|
||||
pub total_tokens: u32,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct ChatChoiceDelta {
|
||||
pub index: u32,
|
||||
pub delta: ResponseMessage,
|
||||
pub finish_reason: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct OpenAIResponseStreamEvent {
|
||||
pub id: Option<String>,
|
||||
pub object: String,
|
||||
pub created: u32,
|
||||
pub model: String,
|
||||
pub choices: Vec<ChatChoiceDelta>,
|
||||
pub usage: Option<OpenAIUsage>,
|
||||
}
|
||||
|
||||
pub async fn stream_completion(
|
||||
credential: ProviderCredential,
|
||||
executor: Arc<Background>,
|
||||
request: Box<dyn CompletionRequest>,
|
||||
) -> Result<impl Stream<Item = Result<OpenAIResponseStreamEvent>>> {
|
||||
let api_key = match credential {
|
||||
ProviderCredential::Credentials { api_key } => api_key,
|
||||
_ => {
|
||||
return Err(anyhow!("no credentials provider for completion"));
|
||||
}
|
||||
};
|
||||
|
||||
let (tx, rx) = futures::channel::mpsc::unbounded::<Result<OpenAIResponseStreamEvent>>();
|
||||
|
||||
let json_data = request.data()?;
|
||||
let mut response = Request::post(format!("{OPENAI_API_URL}/chat/completions"))
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Authorization", format!("Bearer {}", api_key))
|
||||
.body(json_data)?
|
||||
.send_async()
|
||||
.await?;
|
||||
|
||||
let status = response.status();
|
||||
if status == StatusCode::OK {
|
||||
executor
|
||||
.spawn(async move {
|
||||
let mut lines = BufReader::new(response.body_mut()).lines();
|
||||
|
||||
fn parse_line(
|
||||
line: Result<String, io::Error>,
|
||||
) -> Result<Option<OpenAIResponseStreamEvent>> {
|
||||
if let Some(data) = line?.strip_prefix("data: ") {
|
||||
let event = serde_json::from_str(&data)?;
|
||||
Ok(Some(event))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
while let Some(line) = lines.next().await {
|
||||
if let Some(event) = parse_line(line).transpose() {
|
||||
let done = event.as_ref().map_or(false, |event| {
|
||||
event
|
||||
.choices
|
||||
.last()
|
||||
.map_or(false, |choice| choice.finish_reason.is_some())
|
||||
});
|
||||
if tx.unbounded_send(event).is_err() {
|
||||
break;
|
||||
}
|
||||
|
||||
if done {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
|
||||
Ok(rx)
|
||||
} else {
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OpenAIResponse {
|
||||
error: OpenAIError,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OpenAIError {
|
||||
message: String,
|
||||
}
|
||||
|
||||
match serde_json::from_str::<OpenAIResponse>(&body) {
|
||||
Ok(response) if !response.error.message.is_empty() => Err(anyhow!(
|
||||
"Failed to connect to OpenAI API: {}",
|
||||
response.error.message,
|
||||
)),
|
||||
|
||||
_ => Err(anyhow!(
|
||||
"Failed to connect to OpenAI API: {} {}",
|
||||
response.status(),
|
||||
body,
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OpenAICompletionProvider {
|
||||
model: OpenAILanguageModel,
|
||||
credential: Arc<RwLock<ProviderCredential>>,
|
||||
executor: Arc<Background>,
|
||||
}
|
||||
|
||||
impl OpenAICompletionProvider {
|
||||
pub fn new(model_name: &str, executor: Arc<Background>) -> Self {
|
||||
let model = OpenAILanguageModel::load(model_name);
|
||||
let credential = Arc::new(RwLock::new(ProviderCredential::NoCredentials));
|
||||
Self {
|
||||
model,
|
||||
credential,
|
||||
executor,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CredentialProvider for OpenAICompletionProvider {
|
||||
fn has_credentials(&self) -> bool {
|
||||
match *self.credential.read() {
|
||||
ProviderCredential::Credentials { .. } => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
fn retrieve_credentials(&self, cx: &AppContext) -> ProviderCredential {
|
||||
let mut credential = self.credential.write();
|
||||
match *credential {
|
||||
ProviderCredential::Credentials { .. } => {
|
||||
return credential.clone();
|
||||
}
|
||||
_ => {
|
||||
if let Ok(api_key) = env::var("OPENAI_API_KEY") {
|
||||
*credential = ProviderCredential::Credentials { api_key };
|
||||
} else if let Some((_, api_key)) = cx
|
||||
.platform()
|
||||
.read_credentials(OPENAI_API_URL)
|
||||
.log_err()
|
||||
.flatten()
|
||||
{
|
||||
if let Some(api_key) = String::from_utf8(api_key).log_err() {
|
||||
*credential = ProviderCredential::Credentials { api_key };
|
||||
}
|
||||
} else {
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
credential.clone()
|
||||
}
|
||||
|
||||
fn save_credentials(&self, cx: &AppContext, credential: ProviderCredential) {
|
||||
match credential.clone() {
|
||||
ProviderCredential::Credentials { api_key } => {
|
||||
cx.platform()
|
||||
.write_credentials(OPENAI_API_URL, "Bearer", api_key.as_bytes())
|
||||
.log_err();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
*self.credential.write() = credential;
|
||||
}
|
||||
fn delete_credentials(&self, cx: &AppContext) {
|
||||
cx.platform().delete_credentials(OPENAI_API_URL).log_err();
|
||||
*self.credential.write() = ProviderCredential::NoCredentials;
|
||||
}
|
||||
}
|
||||
|
||||
impl CompletionProvider for OpenAICompletionProvider {
|
||||
fn base_model(&self) -> Box<dyn LanguageModel> {
|
||||
let model: Box<dyn LanguageModel> = Box::new(self.model.clone());
|
||||
model
|
||||
}
|
||||
fn complete(
|
||||
&self,
|
||||
prompt: Box<dyn CompletionRequest>,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
|
||||
// Currently the CompletionRequest for OpenAI, includes a 'model' parameter
|
||||
// This means that the model is determined by the CompletionRequest and not the CompletionProvider,
|
||||
// which is currently model based, due to the langauge model.
|
||||
// At some point in the future we should rectify this.
|
||||
let credential = self.credential.read().clone();
|
||||
let request = stream_completion(credential, self.executor.clone(), prompt);
|
||||
async move {
|
||||
let response = request.await?;
|
||||
let stream = response
|
||||
.filter_map(|response| async move {
|
||||
match response {
|
||||
Ok(mut response) => Some(Ok(response.choices.pop()?.delta.content?)),
|
||||
Err(error) => Some(Err(error)),
|
||||
}
|
||||
})
|
||||
.boxed();
|
||||
Ok(stream)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
fn box_clone(&self) -> Box<dyn CompletionProvider> {
|
||||
Box::new((*self).clone())
|
||||
}
|
||||
}
|
||||
@@ -2,17 +2,14 @@ use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use futures::AsyncReadExt;
|
||||
use gpui::executor::Background;
|
||||
use gpui::serde_json;
|
||||
use gpui::{serde_json, AppContext};
|
||||
use isahc::http::StatusCode;
|
||||
use isahc::prelude::Configurable;
|
||||
use isahc::{AsyncBody, Response};
|
||||
use lazy_static::lazy_static;
|
||||
use ordered_float::OrderedFloat;
|
||||
use parking_lot::Mutex;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use parse_duration::parse;
|
||||
use postage::watch;
|
||||
use rusqlite::types::{FromSql, FromSqlResult, ToSqlOutput, ValueRef};
|
||||
use rusqlite::ToSql;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::env;
|
||||
use std::ops::Add;
|
||||
@@ -20,70 +17,23 @@ use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
use tiktoken_rs::{cl100k_base, CoreBPE};
|
||||
use util::http::{HttpClient, Request};
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::auth::{CredentialProvider, ProviderCredential};
|
||||
use crate::embedding::{Embedding, EmbeddingProvider};
|
||||
use crate::models::LanguageModel;
|
||||
use crate::providers::open_ai::OpenAILanguageModel;
|
||||
|
||||
use crate::providers::open_ai::OPENAI_API_URL;
|
||||
|
||||
lazy_static! {
|
||||
static ref OPENAI_API_KEY: Option<String> = env::var("OPENAI_API_KEY").ok();
|
||||
static ref OPENAI_BPE_TOKENIZER: CoreBPE = cl100k_base().unwrap();
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct Embedding(Vec<f32>);
|
||||
|
||||
impl From<Vec<f32>> for Embedding {
|
||||
fn from(value: Vec<f32>) -> Self {
|
||||
Embedding(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl Embedding {
|
||||
pub fn similarity(&self, other: &Self) -> OrderedFloat<f32> {
|
||||
let len = self.0.len();
|
||||
assert_eq!(len, other.0.len());
|
||||
|
||||
let mut result = 0.0;
|
||||
unsafe {
|
||||
matrixmultiply::sgemm(
|
||||
1,
|
||||
len,
|
||||
1,
|
||||
1.0,
|
||||
self.0.as_ptr(),
|
||||
len as isize,
|
||||
1,
|
||||
other.0.as_ptr(),
|
||||
1,
|
||||
len as isize,
|
||||
0.0,
|
||||
&mut result as *mut f32,
|
||||
1,
|
||||
1,
|
||||
);
|
||||
}
|
||||
OrderedFloat(result)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql for Embedding {
|
||||
fn column_result(value: ValueRef) -> FromSqlResult<Self> {
|
||||
let bytes = value.as_blob()?;
|
||||
let embedding: Result<Vec<f32>, Box<bincode::ErrorKind>> = bincode::deserialize(bytes);
|
||||
if embedding.is_err() {
|
||||
return Err(rusqlite::types::FromSqlError::Other(embedding.unwrap_err()));
|
||||
}
|
||||
Ok(Embedding(embedding.unwrap()))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql for Embedding {
|
||||
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput> {
|
||||
let bytes = bincode::serialize(&self.0)
|
||||
.map_err(|err| rusqlite::Error::ToSqlConversionFailure(Box::new(err)))?;
|
||||
Ok(ToSqlOutput::Owned(rusqlite::types::Value::Blob(bytes)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OpenAIEmbeddings {
|
||||
pub struct OpenAIEmbeddingProvider {
|
||||
model: OpenAILanguageModel,
|
||||
credential: Arc<RwLock<ProviderCredential>>,
|
||||
pub client: Arc<dyn HttpClient>,
|
||||
pub executor: Arc<Background>,
|
||||
rate_limit_count_rx: watch::Receiver<Option<Instant>>,
|
||||
@@ -115,59 +65,17 @@ struct OpenAIEmbeddingUsage {
|
||||
total_tokens: usize,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait EmbeddingProvider: Sync + Send {
|
||||
fn is_authenticated(&self) -> bool;
|
||||
async fn embed_batch(&self, spans: Vec<String>) -> Result<Vec<Embedding>>;
|
||||
fn max_tokens_per_batch(&self) -> usize;
|
||||
fn truncate(&self, span: &str) -> (String, usize);
|
||||
fn rate_limit_expiration(&self) -> Option<Instant>;
|
||||
}
|
||||
|
||||
pub struct DummyEmbeddings {}
|
||||
|
||||
#[async_trait]
|
||||
impl EmbeddingProvider for DummyEmbeddings {
|
||||
fn is_authenticated(&self) -> bool {
|
||||
true
|
||||
}
|
||||
fn rate_limit_expiration(&self) -> Option<Instant> {
|
||||
None
|
||||
}
|
||||
async fn embed_batch(&self, spans: Vec<String>) -> Result<Vec<Embedding>> {
|
||||
// 1024 is the OpenAI Embeddings size for ada models.
|
||||
// the model we will likely be starting with.
|
||||
let dummy_vec = Embedding::from(vec![0.32 as f32; 1536]);
|
||||
return Ok(vec![dummy_vec; spans.len()]);
|
||||
}
|
||||
|
||||
fn max_tokens_per_batch(&self) -> usize {
|
||||
OPENAI_INPUT_LIMIT
|
||||
}
|
||||
|
||||
fn truncate(&self, span: &str) -> (String, usize) {
|
||||
let mut tokens = OPENAI_BPE_TOKENIZER.encode_with_special_tokens(span);
|
||||
let token_count = tokens.len();
|
||||
let output = if token_count > OPENAI_INPUT_LIMIT {
|
||||
tokens.truncate(OPENAI_INPUT_LIMIT);
|
||||
let new_input = OPENAI_BPE_TOKENIZER.decode(tokens.clone());
|
||||
new_input.ok().unwrap_or_else(|| span.to_string())
|
||||
} else {
|
||||
span.to_string()
|
||||
};
|
||||
|
||||
(output, tokens.len())
|
||||
}
|
||||
}
|
||||
|
||||
const OPENAI_INPUT_LIMIT: usize = 8190;
|
||||
|
||||
impl OpenAIEmbeddings {
|
||||
impl OpenAIEmbeddingProvider {
|
||||
pub fn new(client: Arc<dyn HttpClient>, executor: Arc<Background>) -> Self {
|
||||
let (rate_limit_count_tx, rate_limit_count_rx) = watch::channel_with(None);
|
||||
let rate_limit_count_tx = Arc::new(Mutex::new(rate_limit_count_tx));
|
||||
|
||||
OpenAIEmbeddings {
|
||||
let model = OpenAILanguageModel::load("text-embedding-ada-002");
|
||||
let credential = Arc::new(RwLock::new(ProviderCredential::NoCredentials));
|
||||
|
||||
OpenAIEmbeddingProvider {
|
||||
model,
|
||||
credential,
|
||||
client,
|
||||
executor,
|
||||
rate_limit_count_rx,
|
||||
@@ -175,6 +83,13 @@ impl OpenAIEmbeddings {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_api_key(&self) -> Result<String> {
|
||||
match self.credential.read().clone() {
|
||||
ProviderCredential::Credentials { api_key } => Ok(api_key),
|
||||
_ => Err(anyhow!("api credentials not provided")),
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_rate_limit(&self) {
|
||||
let reset_time = *self.rate_limit_count_tx.lock().borrow();
|
||||
|
||||
@@ -231,11 +146,64 @@ impl OpenAIEmbeddings {
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EmbeddingProvider for OpenAIEmbeddings {
|
||||
fn is_authenticated(&self) -> bool {
|
||||
OPENAI_API_KEY.as_ref().is_some()
|
||||
impl CredentialProvider for OpenAIEmbeddingProvider {
|
||||
fn has_credentials(&self) -> bool {
|
||||
match *self.credential.read() {
|
||||
ProviderCredential::Credentials { .. } => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
fn retrieve_credentials(&self, cx: &AppContext) -> ProviderCredential {
|
||||
let mut credential = self.credential.write();
|
||||
match *credential {
|
||||
ProviderCredential::Credentials { .. } => {
|
||||
return credential.clone();
|
||||
}
|
||||
_ => {
|
||||
if let Ok(api_key) = env::var("OPENAI_API_KEY") {
|
||||
*credential = ProviderCredential::Credentials { api_key };
|
||||
} else if let Some((_, api_key)) = cx
|
||||
.platform()
|
||||
.read_credentials(OPENAI_API_URL)
|
||||
.log_err()
|
||||
.flatten()
|
||||
{
|
||||
if let Some(api_key) = String::from_utf8(api_key).log_err() {
|
||||
*credential = ProviderCredential::Credentials { api_key };
|
||||
}
|
||||
} else {
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
credential.clone()
|
||||
}
|
||||
|
||||
fn save_credentials(&self, cx: &AppContext, credential: ProviderCredential) {
|
||||
match credential.clone() {
|
||||
ProviderCredential::Credentials { api_key } => {
|
||||
cx.platform()
|
||||
.write_credentials(OPENAI_API_URL, "Bearer", api_key.as_bytes())
|
||||
.log_err();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
*self.credential.write() = credential;
|
||||
}
|
||||
fn delete_credentials(&self, cx: &AppContext) {
|
||||
cx.platform().delete_credentials(OPENAI_API_URL).log_err();
|
||||
*self.credential.write() = ProviderCredential::NoCredentials;
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EmbeddingProvider for OpenAIEmbeddingProvider {
|
||||
fn base_model(&self) -> Box<dyn LanguageModel> {
|
||||
let model: Box<dyn LanguageModel> = Box::new(self.model.clone());
|
||||
model
|
||||
}
|
||||
|
||||
fn max_tokens_per_batch(&self) -> usize {
|
||||
50000
|
||||
}
|
||||
@@ -243,28 +211,12 @@ impl EmbeddingProvider for OpenAIEmbeddings {
|
||||
fn rate_limit_expiration(&self) -> Option<Instant> {
|
||||
*self.rate_limit_count_rx.borrow()
|
||||
}
|
||||
fn truncate(&self, span: &str) -> (String, usize) {
|
||||
let mut tokens = OPENAI_BPE_TOKENIZER.encode_with_special_tokens(span);
|
||||
let output = if tokens.len() > OPENAI_INPUT_LIMIT {
|
||||
tokens.truncate(OPENAI_INPUT_LIMIT);
|
||||
OPENAI_BPE_TOKENIZER
|
||||
.decode(tokens.clone())
|
||||
.ok()
|
||||
.unwrap_or_else(|| span.to_string())
|
||||
} else {
|
||||
span.to_string()
|
||||
};
|
||||
|
||||
(output, tokens.len())
|
||||
}
|
||||
|
||||
async fn embed_batch(&self, spans: Vec<String>) -> Result<Vec<Embedding>> {
|
||||
const BACKOFF_SECONDS: [usize; 4] = [3, 5, 15, 45];
|
||||
const MAX_RETRIES: usize = 4;
|
||||
|
||||
let api_key = OPENAI_API_KEY
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("no api key"))?;
|
||||
let api_key = self.get_api_key()?;
|
||||
|
||||
let mut request_number = 0;
|
||||
let mut rate_limiting = false;
|
||||
@@ -273,11 +225,12 @@ impl EmbeddingProvider for OpenAIEmbeddings {
|
||||
while request_number < MAX_RETRIES {
|
||||
response = self
|
||||
.send_request(
|
||||
api_key,
|
||||
&api_key,
|
||||
spans.iter().map(|x| &**x).collect(),
|
||||
request_timeout,
|
||||
)
|
||||
.await?;
|
||||
|
||||
request_number += 1;
|
||||
|
||||
match response.status() {
|
||||
@@ -351,49 +304,3 @@ impl EmbeddingProvider for OpenAIEmbeddings {
|
||||
Err(anyhow!("openai max retries"))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rand::prelude::*;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_similarity(mut rng: StdRng) {
|
||||
assert_eq!(
|
||||
Embedding::from(vec![1., 0., 0., 0., 0.])
|
||||
.similarity(&Embedding::from(vec![0., 1., 0., 0., 0.])),
|
||||
0.
|
||||
);
|
||||
assert_eq!(
|
||||
Embedding::from(vec![2., 0., 0., 0., 0.])
|
||||
.similarity(&Embedding::from(vec![3., 1., 0., 0., 0.])),
|
||||
6.
|
||||
);
|
||||
|
||||
for _ in 0..100 {
|
||||
let size = 1536;
|
||||
let mut a = vec![0.; size];
|
||||
let mut b = vec![0.; size];
|
||||
for (a, b) in a.iter_mut().zip(b.iter_mut()) {
|
||||
*a = rng.gen();
|
||||
*b = rng.gen();
|
||||
}
|
||||
let a = Embedding::from(a);
|
||||
let b = Embedding::from(b);
|
||||
|
||||
assert_eq!(
|
||||
round_to_decimals(a.similarity(&b), 1),
|
||||
round_to_decimals(reference_dot(&a.0, &b.0), 1)
|
||||
);
|
||||
}
|
||||
|
||||
fn round_to_decimals(n: OrderedFloat<f32>, decimal_places: i32) -> f32 {
|
||||
let factor = (10.0 as f32).powi(decimal_places);
|
||||
(n * factor).round() / factor
|
||||
}
|
||||
|
||||
fn reference_dot(a: &[f32], b: &[f32]) -> OrderedFloat<f32> {
|
||||
OrderedFloat(a.iter().zip(b.iter()).map(|(a, b)| a * b).sum())
|
||||
}
|
||||
}
|
||||
}
|
||||
9
crates/ai/src/providers/open_ai/mod.rs
Normal file
9
crates/ai/src/providers/open_ai/mod.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
pub mod completion;
|
||||
pub mod embedding;
|
||||
pub mod model;
|
||||
|
||||
pub use completion::*;
|
||||
pub use embedding::*;
|
||||
pub use model::OpenAILanguageModel;
|
||||
|
||||
pub const OPENAI_API_URL: &'static str = "https://api.openai.com/v1";
|
||||
57
crates/ai/src/providers/open_ai/model.rs
Normal file
57
crates/ai/src/providers/open_ai/model.rs
Normal file
@@ -0,0 +1,57 @@
|
||||
use anyhow::anyhow;
|
||||
use tiktoken_rs::CoreBPE;
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::models::{LanguageModel, TruncationDirection};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OpenAILanguageModel {
|
||||
name: String,
|
||||
bpe: Option<CoreBPE>,
|
||||
}
|
||||
|
||||
impl OpenAILanguageModel {
|
||||
pub fn load(model_name: &str) -> Self {
|
||||
let bpe = tiktoken_rs::get_bpe_from_model(model_name).log_err();
|
||||
OpenAILanguageModel {
|
||||
name: model_name.to_string(),
|
||||
bpe,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModel for OpenAILanguageModel {
|
||||
fn name(&self) -> String {
|
||||
self.name.clone()
|
||||
}
|
||||
fn count_tokens(&self, content: &str) -> anyhow::Result<usize> {
|
||||
if let Some(bpe) = &self.bpe {
|
||||
anyhow::Ok(bpe.encode_with_special_tokens(content).len())
|
||||
} else {
|
||||
Err(anyhow!("bpe for open ai model was not retrieved"))
|
||||
}
|
||||
}
|
||||
fn truncate(
|
||||
&self,
|
||||
content: &str,
|
||||
length: usize,
|
||||
direction: TruncationDirection,
|
||||
) -> anyhow::Result<String> {
|
||||
if let Some(bpe) = &self.bpe {
|
||||
let tokens = bpe.encode_with_special_tokens(content);
|
||||
if tokens.len() > length {
|
||||
match direction {
|
||||
TruncationDirection::End => bpe.decode(tokens[..length].to_vec()),
|
||||
TruncationDirection::Start => bpe.decode(tokens[length..].to_vec()),
|
||||
}
|
||||
} else {
|
||||
bpe.decode(tokens)
|
||||
}
|
||||
} else {
|
||||
Err(anyhow!("bpe for open ai model was not retrieved"))
|
||||
}
|
||||
}
|
||||
fn capacity(&self) -> anyhow::Result<usize> {
|
||||
anyhow::Ok(tiktoken_rs::model::get_context_size(&self.name))
|
||||
}
|
||||
}
|
||||
11
crates/ai/src/providers/open_ai/new.rs
Normal file
11
crates/ai/src/providers/open_ai/new.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
pub trait LanguageModel {
|
||||
fn name(&self) -> String;
|
||||
fn count_tokens(&self, content: &str) -> anyhow::Result<usize>;
|
||||
fn truncate(
|
||||
&self,
|
||||
content: &str,
|
||||
length: usize,
|
||||
direction: TruncationDirection,
|
||||
) -> anyhow::Result<String>;
|
||||
fn capacity(&self) -> anyhow::Result<usize>;
|
||||
}
|
||||
191
crates/ai/src/test.rs
Normal file
191
crates/ai/src/test.rs
Normal file
@@ -0,0 +1,191 @@
|
||||
use std::{
|
||||
sync::atomic::{self, AtomicUsize, Ordering},
|
||||
time::Instant,
|
||||
};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use futures::{channel::mpsc, future::BoxFuture, stream::BoxStream, FutureExt, StreamExt};
|
||||
use gpui::AppContext;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
use crate::{
|
||||
auth::{CredentialProvider, ProviderCredential},
|
||||
completion::{CompletionProvider, CompletionRequest},
|
||||
embedding::{Embedding, EmbeddingProvider},
|
||||
models::{LanguageModel, TruncationDirection},
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FakeLanguageModel {
|
||||
pub capacity: usize,
|
||||
}
|
||||
|
||||
impl LanguageModel for FakeLanguageModel {
|
||||
fn name(&self) -> String {
|
||||
"dummy".to_string()
|
||||
}
|
||||
fn count_tokens(&self, content: &str) -> anyhow::Result<usize> {
|
||||
anyhow::Ok(content.chars().collect::<Vec<char>>().len())
|
||||
}
|
||||
fn truncate(
|
||||
&self,
|
||||
content: &str,
|
||||
length: usize,
|
||||
direction: TruncationDirection,
|
||||
) -> anyhow::Result<String> {
|
||||
println!("TRYING TO TRUNCATE: {:?}", length.clone());
|
||||
|
||||
if length > self.count_tokens(content)? {
|
||||
println!("NOT TRUNCATING");
|
||||
return anyhow::Ok(content.to_string());
|
||||
}
|
||||
|
||||
anyhow::Ok(match direction {
|
||||
TruncationDirection::End => content.chars().collect::<Vec<char>>()[..length]
|
||||
.into_iter()
|
||||
.collect::<String>(),
|
||||
TruncationDirection::Start => content.chars().collect::<Vec<char>>()[length..]
|
||||
.into_iter()
|
||||
.collect::<String>(),
|
||||
})
|
||||
}
|
||||
fn capacity(&self) -> anyhow::Result<usize> {
|
||||
anyhow::Ok(self.capacity)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FakeEmbeddingProvider {
|
||||
pub embedding_count: AtomicUsize,
|
||||
}
|
||||
|
||||
impl Clone for FakeEmbeddingProvider {
|
||||
fn clone(&self) -> Self {
|
||||
FakeEmbeddingProvider {
|
||||
embedding_count: AtomicUsize::new(self.embedding_count.load(Ordering::SeqCst)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for FakeEmbeddingProvider {
|
||||
fn default() -> Self {
|
||||
FakeEmbeddingProvider {
|
||||
embedding_count: AtomicUsize::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FakeEmbeddingProvider {
|
||||
pub fn embedding_count(&self) -> usize {
|
||||
self.embedding_count.load(atomic::Ordering::SeqCst)
|
||||
}
|
||||
|
||||
pub fn embed_sync(&self, span: &str) -> Embedding {
|
||||
let mut result = vec![1.0; 26];
|
||||
for letter in span.chars() {
|
||||
let letter = letter.to_ascii_lowercase();
|
||||
if letter as u32 >= 'a' as u32 {
|
||||
let ix = (letter as u32) - ('a' as u32);
|
||||
if ix < 26 {
|
||||
result[ix as usize] += 1.0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let norm = result.iter().map(|x| x * x).sum::<f32>().sqrt();
|
||||
for x in &mut result {
|
||||
*x /= norm;
|
||||
}
|
||||
|
||||
result.into()
|
||||
}
|
||||
}
|
||||
|
||||
impl CredentialProvider for FakeEmbeddingProvider {
|
||||
fn has_credentials(&self) -> bool {
|
||||
true
|
||||
}
|
||||
fn retrieve_credentials(&self, _cx: &AppContext) -> ProviderCredential {
|
||||
ProviderCredential::NotNeeded
|
||||
}
|
||||
fn save_credentials(&self, _cx: &AppContext, _credential: ProviderCredential) {}
|
||||
fn delete_credentials(&self, _cx: &AppContext) {}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EmbeddingProvider for FakeEmbeddingProvider {
|
||||
fn base_model(&self) -> Box<dyn LanguageModel> {
|
||||
Box::new(FakeLanguageModel { capacity: 1000 })
|
||||
}
|
||||
fn max_tokens_per_batch(&self) -> usize {
|
||||
1000
|
||||
}
|
||||
|
||||
fn rate_limit_expiration(&self) -> Option<Instant> {
|
||||
None
|
||||
}
|
||||
|
||||
async fn embed_batch(&self, spans: Vec<String>) -> anyhow::Result<Vec<Embedding>> {
|
||||
self.embedding_count
|
||||
.fetch_add(spans.len(), atomic::Ordering::SeqCst);
|
||||
|
||||
anyhow::Ok(spans.iter().map(|span| self.embed_sync(span)).collect())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FakeCompletionProvider {
|
||||
last_completion_tx: Mutex<Option<mpsc::Sender<String>>>,
|
||||
}
|
||||
|
||||
impl Clone for FakeCompletionProvider {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
last_completion_tx: Mutex::new(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FakeCompletionProvider {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
last_completion_tx: Mutex::new(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn send_completion(&self, completion: impl Into<String>) {
|
||||
let mut tx = self.last_completion_tx.lock();
|
||||
tx.as_mut().unwrap().try_send(completion.into()).unwrap();
|
||||
}
|
||||
|
||||
pub fn finish_completion(&self) {
|
||||
self.last_completion_tx.lock().take().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
impl CredentialProvider for FakeCompletionProvider {
|
||||
fn has_credentials(&self) -> bool {
|
||||
true
|
||||
}
|
||||
fn retrieve_credentials(&self, _cx: &AppContext) -> ProviderCredential {
|
||||
ProviderCredential::NotNeeded
|
||||
}
|
||||
fn save_credentials(&self, _cx: &AppContext, _credential: ProviderCredential) {}
|
||||
fn delete_credentials(&self, _cx: &AppContext) {}
|
||||
}
|
||||
|
||||
impl CompletionProvider for FakeCompletionProvider {
|
||||
fn base_model(&self) -> Box<dyn LanguageModel> {
|
||||
let model: Box<dyn LanguageModel> = Box::new(FakeLanguageModel { capacity: 8190 });
|
||||
model
|
||||
}
|
||||
fn complete(
|
||||
&self,
|
||||
_prompt: Box<dyn CompletionRequest>,
|
||||
) -> BoxFuture<'static, anyhow::Result<BoxStream<'static, anyhow::Result<String>>>> {
|
||||
let (tx, rx) = mpsc::channel(1);
|
||||
*self.last_completion_tx.lock() = Some(tx);
|
||||
async move { Ok(rx.map(|rx| Ok(rx)).boxed()) }.boxed()
|
||||
}
|
||||
fn box_clone(&self) -> Box<dyn CompletionProvider> {
|
||||
Box::new((*self).clone())
|
||||
}
|
||||
}
|
||||
38
crates/ai2/Cargo.toml
Normal file
38
crates/ai2/Cargo.toml
Normal file
@@ -0,0 +1,38 @@
|
||||
[package]
|
||||
name = "ai2"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/ai2.rs"
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
test-support = []
|
||||
|
||||
[dependencies]
|
||||
gpui2 = { path = "../gpui2" }
|
||||
util = { path = "../util" }
|
||||
language2 = { path = "../language2" }
|
||||
async-trait.workspace = true
|
||||
anyhow.workspace = true
|
||||
futures.workspace = true
|
||||
lazy_static.workspace = true
|
||||
ordered-float.workspace = true
|
||||
parking_lot.workspace = true
|
||||
isahc.workspace = true
|
||||
regex.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
postage.workspace = true
|
||||
rand.workspace = true
|
||||
log.workspace = true
|
||||
parse_duration = "2.1.1"
|
||||
tiktoken-rs = "0.5.0"
|
||||
matrixmultiply = "0.3.7"
|
||||
rusqlite = { version = "0.29.0", features = ["blob", "array", "modern_sqlite"] }
|
||||
bincode = "1.3.3"
|
||||
|
||||
[dev-dependencies]
|
||||
gpui2 = { path = "../gpui2", features = ["test-support"] }
|
||||
8
crates/ai2/src/ai2.rs
Normal file
8
crates/ai2/src/ai2.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
pub mod auth;
|
||||
pub mod completion;
|
||||
pub mod embedding;
|
||||
pub mod models;
|
||||
pub mod prompts;
|
||||
pub mod providers;
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod test;
|
||||
17
crates/ai2/src/auth.rs
Normal file
17
crates/ai2/src/auth.rs
Normal file
@@ -0,0 +1,17 @@
|
||||
use async_trait::async_trait;
|
||||
use gpui2::AppContext;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum ProviderCredential {
|
||||
Credentials { api_key: String },
|
||||
NoCredentials,
|
||||
NotNeeded,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait CredentialProvider: Send + Sync {
|
||||
fn has_credentials(&self) -> bool;
|
||||
async fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential;
|
||||
async fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential);
|
||||
async fn delete_credentials(&self, cx: &mut AppContext);
|
||||
}
|
||||
23
crates/ai2/src/completion.rs
Normal file
23
crates/ai2/src/completion.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
use anyhow::Result;
|
||||
use futures::{future::BoxFuture, stream::BoxStream};
|
||||
|
||||
use crate::{auth::CredentialProvider, models::LanguageModel};
|
||||
|
||||
pub trait CompletionRequest: Send + Sync {
|
||||
fn data(&self) -> serde_json::Result<String>;
|
||||
}
|
||||
|
||||
pub trait CompletionProvider: CredentialProvider {
|
||||
fn base_model(&self) -> Box<dyn LanguageModel>;
|
||||
fn complete(
|
||||
&self,
|
||||
prompt: Box<dyn CompletionRequest>,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>>;
|
||||
fn box_clone(&self) -> Box<dyn CompletionProvider>;
|
||||
}
|
||||
|
||||
impl Clone for Box<dyn CompletionProvider> {
|
||||
fn clone(&self) -> Box<dyn CompletionProvider> {
|
||||
self.box_clone()
|
||||
}
|
||||
}
|
||||
123
crates/ai2/src/embedding.rs
Normal file
123
crates/ai2/src/embedding.rs
Normal file
@@ -0,0 +1,123 @@
|
||||
use std::time::Instant;
|
||||
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use ordered_float::OrderedFloat;
|
||||
use rusqlite::types::{FromSql, FromSqlResult, ToSqlOutput, ValueRef};
|
||||
use rusqlite::ToSql;
|
||||
|
||||
use crate::auth::CredentialProvider;
|
||||
use crate::models::LanguageModel;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct Embedding(pub Vec<f32>);
|
||||
|
||||
// This is needed for semantic index functionality
|
||||
// Unfortunately it has to live wherever the "Embedding" struct is created.
|
||||
// Keeping this in here though, introduces a 'rusqlite' dependency into AI
|
||||
// which is less than ideal
|
||||
impl FromSql for Embedding {
|
||||
fn column_result(value: ValueRef) -> FromSqlResult<Self> {
|
||||
let bytes = value.as_blob()?;
|
||||
let embedding: Result<Vec<f32>, Box<bincode::ErrorKind>> = bincode::deserialize(bytes);
|
||||
if embedding.is_err() {
|
||||
return Err(rusqlite::types::FromSqlError::Other(embedding.unwrap_err()));
|
||||
}
|
||||
Ok(Embedding(embedding.unwrap()))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql for Embedding {
|
||||
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput> {
|
||||
let bytes = bincode::serialize(&self.0)
|
||||
.map_err(|err| rusqlite::Error::ToSqlConversionFailure(Box::new(err)))?;
|
||||
Ok(ToSqlOutput::Owned(rusqlite::types::Value::Blob(bytes)))
|
||||
}
|
||||
}
|
||||
impl From<Vec<f32>> for Embedding {
|
||||
fn from(value: Vec<f32>) -> Self {
|
||||
Embedding(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl Embedding {
|
||||
pub fn similarity(&self, other: &Self) -> OrderedFloat<f32> {
|
||||
let len = self.0.len();
|
||||
assert_eq!(len, other.0.len());
|
||||
|
||||
let mut result = 0.0;
|
||||
unsafe {
|
||||
matrixmultiply::sgemm(
|
||||
1,
|
||||
len,
|
||||
1,
|
||||
1.0,
|
||||
self.0.as_ptr(),
|
||||
len as isize,
|
||||
1,
|
||||
other.0.as_ptr(),
|
||||
1,
|
||||
len as isize,
|
||||
0.0,
|
||||
&mut result as *mut f32,
|
||||
1,
|
||||
1,
|
||||
);
|
||||
}
|
||||
OrderedFloat(result)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait EmbeddingProvider: CredentialProvider {
|
||||
fn base_model(&self) -> Box<dyn LanguageModel>;
|
||||
async fn embed_batch(&self, spans: Vec<String>) -> Result<Vec<Embedding>>;
|
||||
fn max_tokens_per_batch(&self) -> usize;
|
||||
fn rate_limit_expiration(&self) -> Option<Instant>;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rand::prelude::*;
|
||||
|
||||
#[gpui2::test]
|
||||
fn test_similarity(mut rng: StdRng) {
|
||||
assert_eq!(
|
||||
Embedding::from(vec![1., 0., 0., 0., 0.])
|
||||
.similarity(&Embedding::from(vec![0., 1., 0., 0., 0.])),
|
||||
0.
|
||||
);
|
||||
assert_eq!(
|
||||
Embedding::from(vec![2., 0., 0., 0., 0.])
|
||||
.similarity(&Embedding::from(vec![3., 1., 0., 0., 0.])),
|
||||
6.
|
||||
);
|
||||
|
||||
for _ in 0..100 {
|
||||
let size = 1536;
|
||||
let mut a = vec![0.; size];
|
||||
let mut b = vec![0.; size];
|
||||
for (a, b) in a.iter_mut().zip(b.iter_mut()) {
|
||||
*a = rng.gen();
|
||||
*b = rng.gen();
|
||||
}
|
||||
let a = Embedding::from(a);
|
||||
let b = Embedding::from(b);
|
||||
|
||||
assert_eq!(
|
||||
round_to_decimals(a.similarity(&b), 1),
|
||||
round_to_decimals(reference_dot(&a.0, &b.0), 1)
|
||||
);
|
||||
}
|
||||
|
||||
fn round_to_decimals(n: OrderedFloat<f32>, decimal_places: i32) -> f32 {
|
||||
let factor = (10.0 as f32).powi(decimal_places);
|
||||
(n * factor).round() / factor
|
||||
}
|
||||
|
||||
fn reference_dot(a: &[f32], b: &[f32]) -> OrderedFloat<f32> {
|
||||
OrderedFloat(a.iter().zip(b.iter()).map(|(a, b)| a * b).sum())
|
||||
}
|
||||
}
|
||||
}
|
||||
16
crates/ai2/src/models.rs
Normal file
16
crates/ai2/src/models.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
pub enum TruncationDirection {
|
||||
Start,
|
||||
End,
|
||||
}
|
||||
|
||||
pub trait LanguageModel {
|
||||
fn name(&self) -> String;
|
||||
fn count_tokens(&self, content: &str) -> anyhow::Result<usize>;
|
||||
fn truncate(
|
||||
&self,
|
||||
content: &str,
|
||||
length: usize,
|
||||
direction: TruncationDirection,
|
||||
) -> anyhow::Result<String>;
|
||||
fn capacity(&self) -> anyhow::Result<usize>;
|
||||
}
|
||||
330
crates/ai2/src/prompts/base.rs
Normal file
330
crates/ai2/src/prompts/base.rs
Normal file
@@ -0,0 +1,330 @@
|
||||
use std::cmp::Reverse;
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
|
||||
use language2::BufferSnapshot;
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::models::LanguageModel;
|
||||
use crate::prompts::repository_context::PromptCodeSnippet;
|
||||
|
||||
pub(crate) enum PromptFileType {
|
||||
Text,
|
||||
Code,
|
||||
}
|
||||
|
||||
// TODO: Set this up to manage for defaults well
|
||||
pub struct PromptArguments {
|
||||
pub model: Arc<dyn LanguageModel>,
|
||||
pub user_prompt: Option<String>,
|
||||
pub language_name: Option<String>,
|
||||
pub project_name: Option<String>,
|
||||
pub snippets: Vec<PromptCodeSnippet>,
|
||||
pub reserved_tokens: usize,
|
||||
pub buffer: Option<BufferSnapshot>,
|
||||
pub selected_range: Option<Range<usize>>,
|
||||
}
|
||||
|
||||
impl PromptArguments {
|
||||
pub(crate) fn get_file_type(&self) -> PromptFileType {
|
||||
if self
|
||||
.language_name
|
||||
.as_ref()
|
||||
.and_then(|name| Some(!["Markdown", "Plain Text"].contains(&name.as_str())))
|
||||
.unwrap_or(true)
|
||||
{
|
||||
PromptFileType::Code
|
||||
} else {
|
||||
PromptFileType::Text
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait PromptTemplate {
|
||||
fn generate(
|
||||
&self,
|
||||
args: &PromptArguments,
|
||||
max_token_length: Option<usize>,
|
||||
) -> anyhow::Result<(String, usize)>;
|
||||
}
|
||||
|
||||
#[repr(i8)]
|
||||
#[derive(PartialEq, Eq, Ord)]
|
||||
pub enum PromptPriority {
|
||||
Mandatory, // Ignores truncation
|
||||
Ordered { order: usize }, // Truncates based on priority
|
||||
}
|
||||
|
||||
impl PartialOrd for PromptPriority {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
match (self, other) {
|
||||
(Self::Mandatory, Self::Mandatory) => Some(std::cmp::Ordering::Equal),
|
||||
(Self::Mandatory, Self::Ordered { .. }) => Some(std::cmp::Ordering::Greater),
|
||||
(Self::Ordered { .. }, Self::Mandatory) => Some(std::cmp::Ordering::Less),
|
||||
(Self::Ordered { order: a }, Self::Ordered { order: b }) => b.partial_cmp(a),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PromptChain {
|
||||
args: PromptArguments,
|
||||
templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)>,
|
||||
}
|
||||
|
||||
impl PromptChain {
|
||||
pub fn new(
|
||||
args: PromptArguments,
|
||||
templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)>,
|
||||
) -> Self {
|
||||
PromptChain { args, templates }
|
||||
}
|
||||
|
||||
pub fn generate(&self, truncate: bool) -> anyhow::Result<(String, usize)> {
|
||||
// Argsort based on Prompt Priority
|
||||
let seperator = "\n";
|
||||
let seperator_tokens = self.args.model.count_tokens(seperator)?;
|
||||
let mut sorted_indices = (0..self.templates.len()).collect::<Vec<_>>();
|
||||
sorted_indices.sort_by_key(|&i| Reverse(&self.templates[i].0));
|
||||
|
||||
// If Truncate
|
||||
let mut tokens_outstanding = if truncate {
|
||||
Some(self.args.model.capacity()? - self.args.reserved_tokens)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut prompts = vec!["".to_string(); sorted_indices.len()];
|
||||
for idx in sorted_indices {
|
||||
let (_, template) = &self.templates[idx];
|
||||
|
||||
if let Some((template_prompt, prompt_token_count)) =
|
||||
template.generate(&self.args, tokens_outstanding).log_err()
|
||||
{
|
||||
if template_prompt != "" {
|
||||
prompts[idx] = template_prompt;
|
||||
|
||||
if let Some(remaining_tokens) = tokens_outstanding {
|
||||
let new_tokens = prompt_token_count + seperator_tokens;
|
||||
tokens_outstanding = if remaining_tokens > new_tokens {
|
||||
Some(remaining_tokens - new_tokens)
|
||||
} else {
|
||||
Some(0)
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
prompts.retain(|x| x != "");
|
||||
|
||||
let full_prompt = prompts.join(seperator);
|
||||
let total_token_count = self.args.model.count_tokens(&full_prompt)?;
|
||||
anyhow::Ok((prompts.join(seperator), total_token_count))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use crate::models::TruncationDirection;
|
||||
use crate::test::FakeLanguageModel;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
pub fn test_prompt_chain() {
|
||||
struct TestPromptTemplate {}
|
||||
impl PromptTemplate for TestPromptTemplate {
|
||||
fn generate(
|
||||
&self,
|
||||
args: &PromptArguments,
|
||||
max_token_length: Option<usize>,
|
||||
) -> anyhow::Result<(String, usize)> {
|
||||
let mut content = "This is a test prompt template".to_string();
|
||||
|
||||
let mut token_count = args.model.count_tokens(&content)?;
|
||||
if let Some(max_token_length) = max_token_length {
|
||||
if token_count > max_token_length {
|
||||
content = args.model.truncate(
|
||||
&content,
|
||||
max_token_length,
|
||||
TruncationDirection::End,
|
||||
)?;
|
||||
token_count = max_token_length;
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::Ok((content, token_count))
|
||||
}
|
||||
}
|
||||
|
||||
struct TestLowPriorityTemplate {}
|
||||
impl PromptTemplate for TestLowPriorityTemplate {
|
||||
fn generate(
|
||||
&self,
|
||||
args: &PromptArguments,
|
||||
max_token_length: Option<usize>,
|
||||
) -> anyhow::Result<(String, usize)> {
|
||||
let mut content = "This is a low priority test prompt template".to_string();
|
||||
|
||||
let mut token_count = args.model.count_tokens(&content)?;
|
||||
if let Some(max_token_length) = max_token_length {
|
||||
if token_count > max_token_length {
|
||||
content = args.model.truncate(
|
||||
&content,
|
||||
max_token_length,
|
||||
TruncationDirection::End,
|
||||
)?;
|
||||
token_count = max_token_length;
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::Ok((content, token_count))
|
||||
}
|
||||
}
|
||||
|
||||
let model: Arc<dyn LanguageModel> = Arc::new(FakeLanguageModel { capacity: 100 });
|
||||
let args = PromptArguments {
|
||||
model: model.clone(),
|
||||
language_name: None,
|
||||
project_name: None,
|
||||
snippets: Vec::new(),
|
||||
reserved_tokens: 0,
|
||||
buffer: None,
|
||||
selected_range: None,
|
||||
user_prompt: None,
|
||||
};
|
||||
|
||||
let templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)> = vec![
|
||||
(
|
||||
PromptPriority::Ordered { order: 0 },
|
||||
Box::new(TestPromptTemplate {}),
|
||||
),
|
||||
(
|
||||
PromptPriority::Ordered { order: 1 },
|
||||
Box::new(TestLowPriorityTemplate {}),
|
||||
),
|
||||
];
|
||||
let chain = PromptChain::new(args, templates);
|
||||
|
||||
let (prompt, token_count) = chain.generate(false).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
prompt,
|
||||
"This is a test prompt template\nThis is a low priority test prompt template"
|
||||
.to_string()
|
||||
);
|
||||
|
||||
assert_eq!(model.count_tokens(&prompt).unwrap(), token_count);
|
||||
|
||||
// Testing with Truncation Off
|
||||
// Should ignore capacity and return all prompts
|
||||
let model: Arc<dyn LanguageModel> = Arc::new(FakeLanguageModel { capacity: 20 });
|
||||
let args = PromptArguments {
|
||||
model: model.clone(),
|
||||
language_name: None,
|
||||
project_name: None,
|
||||
snippets: Vec::new(),
|
||||
reserved_tokens: 0,
|
||||
buffer: None,
|
||||
selected_range: None,
|
||||
user_prompt: None,
|
||||
};
|
||||
|
||||
let templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)> = vec![
|
||||
(
|
||||
PromptPriority::Ordered { order: 0 },
|
||||
Box::new(TestPromptTemplate {}),
|
||||
),
|
||||
(
|
||||
PromptPriority::Ordered { order: 1 },
|
||||
Box::new(TestLowPriorityTemplate {}),
|
||||
),
|
||||
];
|
||||
let chain = PromptChain::new(args, templates);
|
||||
|
||||
let (prompt, token_count) = chain.generate(false).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
prompt,
|
||||
"This is a test prompt template\nThis is a low priority test prompt template"
|
||||
.to_string()
|
||||
);
|
||||
|
||||
assert_eq!(model.count_tokens(&prompt).unwrap(), token_count);
|
||||
|
||||
// Testing with Truncation Off
|
||||
// Should ignore capacity and return all prompts
|
||||
let capacity = 20;
|
||||
let model: Arc<dyn LanguageModel> = Arc::new(FakeLanguageModel { capacity });
|
||||
let args = PromptArguments {
|
||||
model: model.clone(),
|
||||
language_name: None,
|
||||
project_name: None,
|
||||
snippets: Vec::new(),
|
||||
reserved_tokens: 0,
|
||||
buffer: None,
|
||||
selected_range: None,
|
||||
user_prompt: None,
|
||||
};
|
||||
|
||||
let templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)> = vec![
|
||||
(
|
||||
PromptPriority::Ordered { order: 0 },
|
||||
Box::new(TestPromptTemplate {}),
|
||||
),
|
||||
(
|
||||
PromptPriority::Ordered { order: 1 },
|
||||
Box::new(TestLowPriorityTemplate {}),
|
||||
),
|
||||
(
|
||||
PromptPriority::Ordered { order: 2 },
|
||||
Box::new(TestLowPriorityTemplate {}),
|
||||
),
|
||||
];
|
||||
let chain = PromptChain::new(args, templates);
|
||||
|
||||
let (prompt, token_count) = chain.generate(true).unwrap();
|
||||
|
||||
assert_eq!(prompt, "This is a test promp".to_string());
|
||||
assert_eq!(token_count, capacity);
|
||||
|
||||
// Change Ordering of Prompts Based on Priority
|
||||
let capacity = 120;
|
||||
let reserved_tokens = 10;
|
||||
let model: Arc<dyn LanguageModel> = Arc::new(FakeLanguageModel { capacity });
|
||||
let args = PromptArguments {
|
||||
model: model.clone(),
|
||||
language_name: None,
|
||||
project_name: None,
|
||||
snippets: Vec::new(),
|
||||
reserved_tokens,
|
||||
buffer: None,
|
||||
selected_range: None,
|
||||
user_prompt: None,
|
||||
};
|
||||
let templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)> = vec![
|
||||
(
|
||||
PromptPriority::Mandatory,
|
||||
Box::new(TestLowPriorityTemplate {}),
|
||||
),
|
||||
(
|
||||
PromptPriority::Ordered { order: 0 },
|
||||
Box::new(TestPromptTemplate {}),
|
||||
),
|
||||
(
|
||||
PromptPriority::Ordered { order: 1 },
|
||||
Box::new(TestLowPriorityTemplate {}),
|
||||
),
|
||||
];
|
||||
let chain = PromptChain::new(args, templates);
|
||||
|
||||
let (prompt, token_count) = chain.generate(true).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
prompt,
|
||||
"This is a low priority test prompt template\nThis is a test prompt template\nThis is a low priority test prompt "
|
||||
.to_string()
|
||||
);
|
||||
assert_eq!(token_count, capacity - reserved_tokens);
|
||||
}
|
||||
}
|
||||
164
crates/ai2/src/prompts/file_context.rs
Normal file
164
crates/ai2/src/prompts/file_context.rs
Normal file
@@ -0,0 +1,164 @@
|
||||
use anyhow::anyhow;
|
||||
use language2::BufferSnapshot;
|
||||
use language2::ToOffset;
|
||||
|
||||
use crate::models::LanguageModel;
|
||||
use crate::models::TruncationDirection;
|
||||
use crate::prompts::base::PromptArguments;
|
||||
use crate::prompts::base::PromptTemplate;
|
||||
use std::fmt::Write;
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
|
||||
fn retrieve_context(
|
||||
buffer: &BufferSnapshot,
|
||||
selected_range: &Option<Range<usize>>,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
max_token_count: Option<usize>,
|
||||
) -> anyhow::Result<(String, usize, bool)> {
|
||||
let mut prompt = String::new();
|
||||
let mut truncated = false;
|
||||
if let Some(selected_range) = selected_range {
|
||||
let start = selected_range.start.to_offset(buffer);
|
||||
let end = selected_range.end.to_offset(buffer);
|
||||
|
||||
let start_window = buffer.text_for_range(0..start).collect::<String>();
|
||||
|
||||
let mut selected_window = String::new();
|
||||
if start == end {
|
||||
write!(selected_window, "<|START|>").unwrap();
|
||||
} else {
|
||||
write!(selected_window, "<|START|").unwrap();
|
||||
}
|
||||
|
||||
write!(
|
||||
selected_window,
|
||||
"{}",
|
||||
buffer.text_for_range(start..end).collect::<String>()
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
if start != end {
|
||||
write!(selected_window, "|END|>").unwrap();
|
||||
}
|
||||
|
||||
let end_window = buffer.text_for_range(end..buffer.len()).collect::<String>();
|
||||
|
||||
if let Some(max_token_count) = max_token_count {
|
||||
let selected_tokens = model.count_tokens(&selected_window)?;
|
||||
if selected_tokens > max_token_count {
|
||||
return Err(anyhow!(
|
||||
"selected range is greater than model context window, truncation not possible"
|
||||
));
|
||||
};
|
||||
|
||||
let mut remaining_tokens = max_token_count - selected_tokens;
|
||||
let start_window_tokens = model.count_tokens(&start_window)?;
|
||||
let end_window_tokens = model.count_tokens(&end_window)?;
|
||||
let outside_tokens = start_window_tokens + end_window_tokens;
|
||||
if outside_tokens > remaining_tokens {
|
||||
let (start_goal_tokens, end_goal_tokens) =
|
||||
if start_window_tokens < end_window_tokens {
|
||||
let start_goal_tokens = (remaining_tokens / 2).min(start_window_tokens);
|
||||
remaining_tokens -= start_goal_tokens;
|
||||
let end_goal_tokens = remaining_tokens.min(end_window_tokens);
|
||||
(start_goal_tokens, end_goal_tokens)
|
||||
} else {
|
||||
let end_goal_tokens = (remaining_tokens / 2).min(end_window_tokens);
|
||||
remaining_tokens -= end_goal_tokens;
|
||||
let start_goal_tokens = remaining_tokens.min(start_window_tokens);
|
||||
(start_goal_tokens, end_goal_tokens)
|
||||
};
|
||||
|
||||
let truncated_start_window =
|
||||
model.truncate(&start_window, start_goal_tokens, TruncationDirection::Start)?;
|
||||
let truncated_end_window =
|
||||
model.truncate(&end_window, end_goal_tokens, TruncationDirection::End)?;
|
||||
writeln!(
|
||||
prompt,
|
||||
"{truncated_start_window}{selected_window}{truncated_end_window}"
|
||||
)
|
||||
.unwrap();
|
||||
truncated = true;
|
||||
} else {
|
||||
writeln!(prompt, "{start_window}{selected_window}{end_window}").unwrap();
|
||||
}
|
||||
} else {
|
||||
// If we dont have a selected range, include entire file.
|
||||
writeln!(prompt, "{}", &buffer.text()).unwrap();
|
||||
|
||||
// Dumb truncation strategy
|
||||
if let Some(max_token_count) = max_token_count {
|
||||
if model.count_tokens(&prompt)? > max_token_count {
|
||||
truncated = true;
|
||||
prompt = model.truncate(&prompt, max_token_count, TruncationDirection::End)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let token_count = model.count_tokens(&prompt)?;
|
||||
anyhow::Ok((prompt, token_count, truncated))
|
||||
}
|
||||
|
||||
pub struct FileContext {}
|
||||
|
||||
impl PromptTemplate for FileContext {
|
||||
fn generate(
|
||||
&self,
|
||||
args: &PromptArguments,
|
||||
max_token_length: Option<usize>,
|
||||
) -> anyhow::Result<(String, usize)> {
|
||||
if let Some(buffer) = &args.buffer {
|
||||
let mut prompt = String::new();
|
||||
// Add Initial Preamble
|
||||
// TODO: Do we want to add the path in here?
|
||||
writeln!(
|
||||
prompt,
|
||||
"The file you are currently working on has the following content:"
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let language_name = args
|
||||
.language_name
|
||||
.clone()
|
||||
.unwrap_or("".to_string())
|
||||
.to_lowercase();
|
||||
|
||||
let (context, _, truncated) = retrieve_context(
|
||||
buffer,
|
||||
&args.selected_range,
|
||||
args.model.clone(),
|
||||
max_token_length,
|
||||
)?;
|
||||
writeln!(prompt, "```{language_name}\n{context}\n```").unwrap();
|
||||
|
||||
if truncated {
|
||||
writeln!(prompt, "Note the content has been truncated and only represents a portion of the file.").unwrap();
|
||||
}
|
||||
|
||||
if let Some(selected_range) = &args.selected_range {
|
||||
let start = selected_range.start.to_offset(buffer);
|
||||
let end = selected_range.end.to_offset(buffer);
|
||||
|
||||
if start == end {
|
||||
writeln!(prompt, "In particular, the user's cursor is currently on the '<|START|>' span in the above content, with no text selected.").unwrap();
|
||||
} else {
|
||||
writeln!(prompt, "In particular, the user has selected a section of the text between the '<|START|' and '|END|>' spans.").unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
// Really dumb truncation strategy
|
||||
if let Some(max_tokens) = max_token_length {
|
||||
prompt = args
|
||||
.model
|
||||
.truncate(&prompt, max_tokens, TruncationDirection::End)?;
|
||||
}
|
||||
|
||||
let token_count = args.model.count_tokens(&prompt)?;
|
||||
anyhow::Ok((prompt, token_count))
|
||||
} else {
|
||||
Err(anyhow!("no buffer provided to retrieve file context from"))
|
||||
}
|
||||
}
|
||||
}
|
||||
99
crates/ai2/src/prompts/generate.rs
Normal file
99
crates/ai2/src/prompts/generate.rs
Normal file
@@ -0,0 +1,99 @@
|
||||
use crate::prompts::base::{PromptArguments, PromptFileType, PromptTemplate};
|
||||
use anyhow::anyhow;
|
||||
use std::fmt::Write;
|
||||
|
||||
pub fn capitalize(s: &str) -> String {
|
||||
let mut c = s.chars();
|
||||
match c.next() {
|
||||
None => String::new(),
|
||||
Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GenerateInlineContent {}
|
||||
|
||||
impl PromptTemplate for GenerateInlineContent {
|
||||
fn generate(
|
||||
&self,
|
||||
args: &PromptArguments,
|
||||
max_token_length: Option<usize>,
|
||||
) -> anyhow::Result<(String, usize)> {
|
||||
let Some(user_prompt) = &args.user_prompt else {
|
||||
return Err(anyhow!("user prompt not provided"));
|
||||
};
|
||||
|
||||
let file_type = args.get_file_type();
|
||||
let content_type = match &file_type {
|
||||
PromptFileType::Code => "code",
|
||||
PromptFileType::Text => "text",
|
||||
};
|
||||
|
||||
let mut prompt = String::new();
|
||||
|
||||
if let Some(selected_range) = &args.selected_range {
|
||||
if selected_range.start == selected_range.end {
|
||||
writeln!(
|
||||
prompt,
|
||||
"Assume the cursor is located where the `<|START|>` span is."
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(
|
||||
prompt,
|
||||
"{} can't be replaced, so assume your answer will be inserted at the cursor.",
|
||||
capitalize(content_type)
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(
|
||||
prompt,
|
||||
"Generate {content_type} based on the users prompt: {user_prompt}",
|
||||
)
|
||||
.unwrap();
|
||||
} else {
|
||||
writeln!(prompt, "Modify the user's selected {content_type} based upon the users prompt: '{user_prompt}'").unwrap();
|
||||
writeln!(prompt, "You must reply with only the adjusted {content_type} (within the '<|START|' and '|END|>' spans) not the entire file.").unwrap();
|
||||
writeln!(prompt, "Double check that you only return code and not the '<|START|' and '|END|'> spans").unwrap();
|
||||
}
|
||||
} else {
|
||||
writeln!(
|
||||
prompt,
|
||||
"Generate {content_type} based on the users prompt: {user_prompt}"
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
if let Some(language_name) = &args.language_name {
|
||||
writeln!(
|
||||
prompt,
|
||||
"Your answer MUST always and only be valid {}.",
|
||||
language_name
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
writeln!(prompt, "Never make remarks about the output.").unwrap();
|
||||
writeln!(
|
||||
prompt,
|
||||
"Do not return anything else, except the generated {content_type}."
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
match file_type {
|
||||
PromptFileType::Code => {
|
||||
// writeln!(prompt, "Always wrap your code in a Markdown block.").unwrap();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Really dumb truncation strategy
|
||||
if let Some(max_tokens) = max_token_length {
|
||||
prompt = args.model.truncate(
|
||||
&prompt,
|
||||
max_tokens,
|
||||
crate::models::TruncationDirection::End,
|
||||
)?;
|
||||
}
|
||||
|
||||
let token_count = args.model.count_tokens(&prompt)?;
|
||||
|
||||
anyhow::Ok((prompt, token_count))
|
||||
}
|
||||
}
|
||||
5
crates/ai2/src/prompts/mod.rs
Normal file
5
crates/ai2/src/prompts/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod base;
|
||||
pub mod file_context;
|
||||
pub mod generate;
|
||||
pub mod preamble;
|
||||
pub mod repository_context;
|
||||
52
crates/ai2/src/prompts/preamble.rs
Normal file
52
crates/ai2/src/prompts/preamble.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
use crate::prompts::base::{PromptArguments, PromptFileType, PromptTemplate};
|
||||
use std::fmt::Write;
|
||||
|
||||
pub struct EngineerPreamble {}
|
||||
|
||||
impl PromptTemplate for EngineerPreamble {
|
||||
fn generate(
|
||||
&self,
|
||||
args: &PromptArguments,
|
||||
max_token_length: Option<usize>,
|
||||
) -> anyhow::Result<(String, usize)> {
|
||||
let mut prompts = Vec::new();
|
||||
|
||||
match args.get_file_type() {
|
||||
PromptFileType::Code => {
|
||||
prompts.push(format!(
|
||||
"You are an expert {}engineer.",
|
||||
args.language_name.clone().unwrap_or("".to_string()) + " "
|
||||
));
|
||||
}
|
||||
PromptFileType::Text => {
|
||||
prompts.push("You are an expert engineer.".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(project_name) = args.project_name.clone() {
|
||||
prompts.push(format!(
|
||||
"You are currently working inside the '{project_name}' project in code editor Zed."
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(mut remaining_tokens) = max_token_length {
|
||||
let mut prompt = String::new();
|
||||
let mut total_count = 0;
|
||||
for prompt_piece in prompts {
|
||||
let prompt_token_count =
|
||||
args.model.count_tokens(&prompt_piece)? + args.model.count_tokens("\n")?;
|
||||
if remaining_tokens > prompt_token_count {
|
||||
writeln!(prompt, "{prompt_piece}").unwrap();
|
||||
remaining_tokens -= prompt_token_count;
|
||||
total_count += prompt_token_count;
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::Ok((prompt, total_count))
|
||||
} else {
|
||||
let prompt = prompts.join("\n");
|
||||
let token_count = args.model.count_tokens(&prompt)?;
|
||||
anyhow::Ok((prompt, token_count))
|
||||
}
|
||||
}
|
||||
}
|
||||
98
crates/ai2/src/prompts/repository_context.rs
Normal file
98
crates/ai2/src/prompts/repository_context.rs
Normal file
@@ -0,0 +1,98 @@
|
||||
use crate::prompts::base::{PromptArguments, PromptTemplate};
|
||||
use std::fmt::Write;
|
||||
use std::{ops::Range, path::PathBuf};
|
||||
|
||||
use gpui2::{AsyncAppContext, Model};
|
||||
use language2::{Anchor, Buffer};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PromptCodeSnippet {
|
||||
path: Option<PathBuf>,
|
||||
language_name: Option<String>,
|
||||
content: String,
|
||||
}
|
||||
|
||||
impl PromptCodeSnippet {
|
||||
pub fn new(
|
||||
buffer: Model<Buffer>,
|
||||
range: Range<Anchor>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
let (content, language_name, file_path) = buffer.update(cx, |buffer, _| {
|
||||
let snapshot = buffer.snapshot();
|
||||
let content = snapshot.text_for_range(range.clone()).collect::<String>();
|
||||
|
||||
let language_name = buffer
|
||||
.language()
|
||||
.and_then(|language| Some(language.name().to_string().to_lowercase()));
|
||||
|
||||
let file_path = buffer
|
||||
.file()
|
||||
.and_then(|file| Some(file.path().to_path_buf()));
|
||||
|
||||
(content, language_name, file_path)
|
||||
})?;
|
||||
|
||||
anyhow::Ok(PromptCodeSnippet {
|
||||
path: file_path,
|
||||
language_name,
|
||||
content,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToString for PromptCodeSnippet {
|
||||
fn to_string(&self) -> String {
|
||||
let path = self
|
||||
.path
|
||||
.as_ref()
|
||||
.and_then(|path| Some(path.to_string_lossy().to_string()))
|
||||
.unwrap_or("".to_string());
|
||||
let language_name = self.language_name.clone().unwrap_or("".to_string());
|
||||
let content = self.content.clone();
|
||||
|
||||
format!("The below code snippet may be relevant from file: {path}\n```{language_name}\n{content}\n```")
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RepositoryContext {}
|
||||
|
||||
impl PromptTemplate for RepositoryContext {
|
||||
fn generate(
|
||||
&self,
|
||||
args: &PromptArguments,
|
||||
max_token_length: Option<usize>,
|
||||
) -> anyhow::Result<(String, usize)> {
|
||||
const MAXIMUM_SNIPPET_TOKEN_COUNT: usize = 500;
|
||||
let template = "You are working inside a large repository, here are a few code snippets that may be useful.";
|
||||
let mut prompt = String::new();
|
||||
|
||||
let mut remaining_tokens = max_token_length.clone();
|
||||
let seperator_token_length = args.model.count_tokens("\n")?;
|
||||
for snippet in &args.snippets {
|
||||
let mut snippet_prompt = template.to_string();
|
||||
let content = snippet.to_string();
|
||||
writeln!(snippet_prompt, "{content}").unwrap();
|
||||
|
||||
let token_count = args.model.count_tokens(&snippet_prompt)?;
|
||||
if token_count <= MAXIMUM_SNIPPET_TOKEN_COUNT {
|
||||
if let Some(tokens_left) = remaining_tokens {
|
||||
if tokens_left >= token_count {
|
||||
writeln!(prompt, "{snippet_prompt}").unwrap();
|
||||
remaining_tokens = if tokens_left >= (token_count + seperator_token_length)
|
||||
{
|
||||
Some(tokens_left - token_count - seperator_token_length)
|
||||
} else {
|
||||
Some(0)
|
||||
};
|
||||
}
|
||||
} else {
|
||||
writeln!(prompt, "{snippet_prompt}").unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let total_token_count = args.model.count_tokens(&prompt)?;
|
||||
anyhow::Ok((prompt, total_token_count))
|
||||
}
|
||||
}
|
||||
1
crates/ai2/src/providers/mod.rs
Normal file
1
crates/ai2/src/providers/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod open_ai;
|
||||
306
crates/ai2/src/providers/open_ai/completion.rs
Normal file
306
crates/ai2/src/providers/open_ai/completion.rs
Normal file
@@ -0,0 +1,306 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use futures::{
|
||||
future::BoxFuture, io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, FutureExt,
|
||||
Stream, StreamExt,
|
||||
};
|
||||
use gpui2::{AppContext, Executor};
|
||||
use isahc::{http::StatusCode, Request, RequestExt};
|
||||
use parking_lot::RwLock;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
env,
|
||||
fmt::{self, Display},
|
||||
io,
|
||||
sync::Arc,
|
||||
};
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::{
|
||||
auth::{CredentialProvider, ProviderCredential},
|
||||
completion::{CompletionProvider, CompletionRequest},
|
||||
models::LanguageModel,
|
||||
};
|
||||
|
||||
use crate::providers::open_ai::{OpenAILanguageModel, OPENAI_API_URL};
|
||||
|
||||
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Role {
|
||||
User,
|
||||
Assistant,
|
||||
System,
|
||||
}
|
||||
|
||||
impl Role {
|
||||
pub fn cycle(&mut self) {
|
||||
*self = match self {
|
||||
Role::User => Role::Assistant,
|
||||
Role::Assistant => Role::System,
|
||||
Role::System => Role::User,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Role {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Role::User => write!(f, "User"),
|
||||
Role::Assistant => write!(f, "Assistant"),
|
||||
Role::System => write!(f, "System"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
pub struct RequestMessage {
|
||||
pub role: Role,
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Serialize)]
|
||||
pub struct OpenAIRequest {
|
||||
pub model: String,
|
||||
pub messages: Vec<RequestMessage>,
|
||||
pub stream: bool,
|
||||
pub stop: Vec<String>,
|
||||
pub temperature: f32,
|
||||
}
|
||||
|
||||
impl CompletionRequest for OpenAIRequest {
|
||||
fn data(&self) -> serde_json::Result<String> {
|
||||
serde_json::to_string(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
pub struct ResponseMessage {
|
||||
pub role: Option<Role>,
|
||||
pub content: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct OpenAIUsage {
|
||||
pub prompt_tokens: u32,
|
||||
pub completion_tokens: u32,
|
||||
pub total_tokens: u32,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct ChatChoiceDelta {
|
||||
pub index: u32,
|
||||
pub delta: ResponseMessage,
|
||||
pub finish_reason: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct OpenAIResponseStreamEvent {
|
||||
pub id: Option<String>,
|
||||
pub object: String,
|
||||
pub created: u32,
|
||||
pub model: String,
|
||||
pub choices: Vec<ChatChoiceDelta>,
|
||||
pub usage: Option<OpenAIUsage>,
|
||||
}
|
||||
|
||||
pub async fn stream_completion(
|
||||
credential: ProviderCredential,
|
||||
executor: Arc<Executor>,
|
||||
request: Box<dyn CompletionRequest>,
|
||||
) -> Result<impl Stream<Item = Result<OpenAIResponseStreamEvent>>> {
|
||||
let api_key = match credential {
|
||||
ProviderCredential::Credentials { api_key } => api_key,
|
||||
_ => {
|
||||
return Err(anyhow!("no credentials provider for completion"));
|
||||
}
|
||||
};
|
||||
|
||||
let (tx, rx) = futures::channel::mpsc::unbounded::<Result<OpenAIResponseStreamEvent>>();
|
||||
|
||||
let json_data = request.data()?;
|
||||
let mut response = Request::post(format!("{OPENAI_API_URL}/chat/completions"))
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Authorization", format!("Bearer {}", api_key))
|
||||
.body(json_data)?
|
||||
.send_async()
|
||||
.await?;
|
||||
|
||||
let status = response.status();
|
||||
if status == StatusCode::OK {
|
||||
executor
|
||||
.spawn(async move {
|
||||
let mut lines = BufReader::new(response.body_mut()).lines();
|
||||
|
||||
fn parse_line(
|
||||
line: Result<String, io::Error>,
|
||||
) -> Result<Option<OpenAIResponseStreamEvent>> {
|
||||
if let Some(data) = line?.strip_prefix("data: ") {
|
||||
let event = serde_json::from_str(&data)?;
|
||||
Ok(Some(event))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
while let Some(line) = lines.next().await {
|
||||
if let Some(event) = parse_line(line).transpose() {
|
||||
let done = event.as_ref().map_or(false, |event| {
|
||||
event
|
||||
.choices
|
||||
.last()
|
||||
.map_or(false, |choice| choice.finish_reason.is_some())
|
||||
});
|
||||
if tx.unbounded_send(event).is_err() {
|
||||
break;
|
||||
}
|
||||
|
||||
if done {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
|
||||
Ok(rx)
|
||||
} else {
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OpenAIResponse {
|
||||
error: OpenAIError,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OpenAIError {
|
||||
message: String,
|
||||
}
|
||||
|
||||
match serde_json::from_str::<OpenAIResponse>(&body) {
|
||||
Ok(response) if !response.error.message.is_empty() => Err(anyhow!(
|
||||
"Failed to connect to OpenAI API: {}",
|
||||
response.error.message,
|
||||
)),
|
||||
|
||||
_ => Err(anyhow!(
|
||||
"Failed to connect to OpenAI API: {} {}",
|
||||
response.status(),
|
||||
body,
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OpenAICompletionProvider {
|
||||
model: OpenAILanguageModel,
|
||||
credential: Arc<RwLock<ProviderCredential>>,
|
||||
executor: Arc<Executor>,
|
||||
}
|
||||
|
||||
impl OpenAICompletionProvider {
|
||||
pub fn new(model_name: &str, executor: Arc<Executor>) -> Self {
|
||||
let model = OpenAILanguageModel::load(model_name);
|
||||
let credential = Arc::new(RwLock::new(ProviderCredential::NoCredentials));
|
||||
Self {
|
||||
model,
|
||||
credential,
|
||||
executor,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl CredentialProvider for OpenAICompletionProvider {
|
||||
fn has_credentials(&self) -> bool {
|
||||
match *self.credential.read() {
|
||||
ProviderCredential::Credentials { .. } => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
async fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential {
|
||||
let existing_credential = self.credential.read().clone();
|
||||
|
||||
let retrieved_credential = cx
|
||||
.run_on_main(move |cx| match existing_credential {
|
||||
ProviderCredential::Credentials { .. } => {
|
||||
return existing_credential.clone();
|
||||
}
|
||||
_ => {
|
||||
if let Some(api_key) = env::var("OPENAI_API_KEY").log_err() {
|
||||
return ProviderCredential::Credentials { api_key };
|
||||
}
|
||||
|
||||
if let Some(Some((_, api_key))) = cx.read_credentials(OPENAI_API_URL).log_err()
|
||||
{
|
||||
if let Some(api_key) = String::from_utf8(api_key).log_err() {
|
||||
return ProviderCredential::Credentials { api_key };
|
||||
} else {
|
||||
return ProviderCredential::NoCredentials;
|
||||
}
|
||||
} else {
|
||||
return ProviderCredential::NoCredentials;
|
||||
}
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
*self.credential.write() = retrieved_credential.clone();
|
||||
retrieved_credential
|
||||
}
|
||||
|
||||
async fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential) {
|
||||
*self.credential.write() = credential.clone();
|
||||
let credential = credential.clone();
|
||||
cx.run_on_main(move |cx| match credential {
|
||||
ProviderCredential::Credentials { api_key } => {
|
||||
cx.write_credentials(OPENAI_API_URL, "Bearer", api_key.as_bytes())
|
||||
.log_err();
|
||||
}
|
||||
_ => {}
|
||||
})
|
||||
.await;
|
||||
}
|
||||
async fn delete_credentials(&self, cx: &mut AppContext) {
|
||||
cx.run_on_main(move |cx| cx.delete_credentials(OPENAI_API_URL).log_err())
|
||||
.await;
|
||||
*self.credential.write() = ProviderCredential::NoCredentials;
|
||||
}
|
||||
}
|
||||
|
||||
impl CompletionProvider for OpenAICompletionProvider {
|
||||
fn base_model(&self) -> Box<dyn LanguageModel> {
|
||||
let model: Box<dyn LanguageModel> = Box::new(self.model.clone());
|
||||
model
|
||||
}
|
||||
fn complete(
|
||||
&self,
|
||||
prompt: Box<dyn CompletionRequest>,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
|
||||
// Currently the CompletionRequest for OpenAI, includes a 'model' parameter
|
||||
// This means that the model is determined by the CompletionRequest and not the CompletionProvider,
|
||||
// which is currently model based, due to the langauge model.
|
||||
// At some point in the future we should rectify this.
|
||||
let credential = self.credential.read().clone();
|
||||
let request = stream_completion(credential, self.executor.clone(), prompt);
|
||||
async move {
|
||||
let response = request.await?;
|
||||
let stream = response
|
||||
.filter_map(|response| async move {
|
||||
match response {
|
||||
Ok(mut response) => Some(Ok(response.choices.pop()?.delta.content?)),
|
||||
Err(error) => Some(Err(error)),
|
||||
}
|
||||
})
|
||||
.boxed();
|
||||
Ok(stream)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
fn box_clone(&self) -> Box<dyn CompletionProvider> {
|
||||
Box::new((*self).clone())
|
||||
}
|
||||
}
|
||||
313
crates/ai2/src/providers/open_ai/embedding.rs
Normal file
313
crates/ai2/src/providers/open_ai/embedding.rs
Normal file
@@ -0,0 +1,313 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use futures::AsyncReadExt;
|
||||
use gpui2::Executor;
|
||||
use gpui2::{serde_json, AppContext};
|
||||
use isahc::http::StatusCode;
|
||||
use isahc::prelude::Configurable;
|
||||
use isahc::{AsyncBody, Response};
|
||||
use lazy_static::lazy_static;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use parse_duration::parse;
|
||||
use postage::watch;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::env;
|
||||
use std::ops::Add;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
use tiktoken_rs::{cl100k_base, CoreBPE};
|
||||
use util::http::{HttpClient, Request};
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::auth::{CredentialProvider, ProviderCredential};
|
||||
use crate::embedding::{Embedding, EmbeddingProvider};
|
||||
use crate::models::LanguageModel;
|
||||
use crate::providers::open_ai::OpenAILanguageModel;
|
||||
|
||||
use crate::providers::open_ai::OPENAI_API_URL;
|
||||
|
||||
lazy_static! {
|
||||
static ref OPENAI_BPE_TOKENIZER: CoreBPE = cl100k_base().unwrap();
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OpenAIEmbeddingProvider {
|
||||
model: OpenAILanguageModel,
|
||||
credential: Arc<RwLock<ProviderCredential>>,
|
||||
pub client: Arc<dyn HttpClient>,
|
||||
pub executor: Arc<Executor>,
|
||||
rate_limit_count_rx: watch::Receiver<Option<Instant>>,
|
||||
rate_limit_count_tx: Arc<Mutex<watch::Sender<Option<Instant>>>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct OpenAIEmbeddingRequest<'a> {
|
||||
model: &'static str,
|
||||
input: Vec<&'a str>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OpenAIEmbeddingResponse {
|
||||
data: Vec<OpenAIEmbedding>,
|
||||
usage: OpenAIEmbeddingUsage,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct OpenAIEmbedding {
|
||||
embedding: Vec<f32>,
|
||||
index: usize,
|
||||
object: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OpenAIEmbeddingUsage {
|
||||
prompt_tokens: usize,
|
||||
total_tokens: usize,
|
||||
}
|
||||
|
||||
impl OpenAIEmbeddingProvider {
|
||||
pub fn new(client: Arc<dyn HttpClient>, executor: Arc<Executor>) -> Self {
|
||||
let (rate_limit_count_tx, rate_limit_count_rx) = watch::channel_with(None);
|
||||
let rate_limit_count_tx = Arc::new(Mutex::new(rate_limit_count_tx));
|
||||
|
||||
let model = OpenAILanguageModel::load("text-embedding-ada-002");
|
||||
let credential = Arc::new(RwLock::new(ProviderCredential::NoCredentials));
|
||||
|
||||
OpenAIEmbeddingProvider {
|
||||
model,
|
||||
credential,
|
||||
client,
|
||||
executor,
|
||||
rate_limit_count_rx,
|
||||
rate_limit_count_tx,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_api_key(&self) -> Result<String> {
|
||||
match self.credential.read().clone() {
|
||||
ProviderCredential::Credentials { api_key } => Ok(api_key),
|
||||
_ => Err(anyhow!("api credentials not provided")),
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_rate_limit(&self) {
|
||||
let reset_time = *self.rate_limit_count_tx.lock().borrow();
|
||||
|
||||
if let Some(reset_time) = reset_time {
|
||||
if Instant::now() >= reset_time {
|
||||
*self.rate_limit_count_tx.lock().borrow_mut() = None
|
||||
}
|
||||
}
|
||||
|
||||
log::trace!(
|
||||
"resolving reset time: {:?}",
|
||||
*self.rate_limit_count_tx.lock().borrow()
|
||||
);
|
||||
}
|
||||
|
||||
fn update_reset_time(&self, reset_time: Instant) {
|
||||
let original_time = *self.rate_limit_count_tx.lock().borrow();
|
||||
|
||||
let updated_time = if let Some(original_time) = original_time {
|
||||
if reset_time < original_time {
|
||||
Some(reset_time)
|
||||
} else {
|
||||
Some(original_time)
|
||||
}
|
||||
} else {
|
||||
Some(reset_time)
|
||||
};
|
||||
|
||||
log::trace!("updating rate limit time: {:?}", updated_time);
|
||||
|
||||
*self.rate_limit_count_tx.lock().borrow_mut() = updated_time;
|
||||
}
|
||||
async fn send_request(
|
||||
&self,
|
||||
api_key: &str,
|
||||
spans: Vec<&str>,
|
||||
request_timeout: u64,
|
||||
) -> Result<Response<AsyncBody>> {
|
||||
let request = Request::post("https://api.openai.com/v1/embeddings")
|
||||
.redirect_policy(isahc::config::RedirectPolicy::Follow)
|
||||
.timeout(Duration::from_secs(request_timeout))
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Authorization", format!("Bearer {}", api_key))
|
||||
.body(
|
||||
serde_json::to_string(&OpenAIEmbeddingRequest {
|
||||
input: spans.clone(),
|
||||
model: "text-embedding-ada-002",
|
||||
})
|
||||
.unwrap()
|
||||
.into(),
|
||||
)?;
|
||||
|
||||
Ok(self.client.send(request).await?)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl CredentialProvider for OpenAIEmbeddingProvider {
|
||||
fn has_credentials(&self) -> bool {
|
||||
match *self.credential.read() {
|
||||
ProviderCredential::Credentials { .. } => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
async fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential {
|
||||
let existing_credential = self.credential.read().clone();
|
||||
|
||||
let retrieved_credential = cx
|
||||
.run_on_main(move |cx| match existing_credential {
|
||||
ProviderCredential::Credentials { .. } => {
|
||||
return existing_credential.clone();
|
||||
}
|
||||
_ => {
|
||||
if let Some(api_key) = env::var("OPENAI_API_KEY").log_err() {
|
||||
return ProviderCredential::Credentials { api_key };
|
||||
}
|
||||
|
||||
if let Some(Some((_, api_key))) = cx.read_credentials(OPENAI_API_URL).log_err()
|
||||
{
|
||||
if let Some(api_key) = String::from_utf8(api_key).log_err() {
|
||||
return ProviderCredential::Credentials { api_key };
|
||||
} else {
|
||||
return ProviderCredential::NoCredentials;
|
||||
}
|
||||
} else {
|
||||
return ProviderCredential::NoCredentials;
|
||||
}
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
*self.credential.write() = retrieved_credential.clone();
|
||||
retrieved_credential
|
||||
}
|
||||
|
||||
async fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential) {
|
||||
*self.credential.write() = credential.clone();
|
||||
let credential = credential.clone();
|
||||
cx.run_on_main(move |cx| match credential {
|
||||
ProviderCredential::Credentials { api_key } => {
|
||||
cx.write_credentials(OPENAI_API_URL, "Bearer", api_key.as_bytes())
|
||||
.log_err();
|
||||
}
|
||||
_ => {}
|
||||
})
|
||||
.await;
|
||||
}
|
||||
async fn delete_credentials(&self, cx: &mut AppContext) {
|
||||
cx.run_on_main(move |cx| cx.delete_credentials(OPENAI_API_URL).log_err())
|
||||
.await;
|
||||
*self.credential.write() = ProviderCredential::NoCredentials;
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EmbeddingProvider for OpenAIEmbeddingProvider {
|
||||
fn base_model(&self) -> Box<dyn LanguageModel> {
|
||||
let model: Box<dyn LanguageModel> = Box::new(self.model.clone());
|
||||
model
|
||||
}
|
||||
|
||||
fn max_tokens_per_batch(&self) -> usize {
|
||||
50000
|
||||
}
|
||||
|
||||
fn rate_limit_expiration(&self) -> Option<Instant> {
|
||||
*self.rate_limit_count_rx.borrow()
|
||||
}
|
||||
|
||||
async fn embed_batch(&self, spans: Vec<String>) -> Result<Vec<Embedding>> {
|
||||
const BACKOFF_SECONDS: [usize; 4] = [3, 5, 15, 45];
|
||||
const MAX_RETRIES: usize = 4;
|
||||
|
||||
let api_key = self.get_api_key()?;
|
||||
|
||||
let mut request_number = 0;
|
||||
let mut rate_limiting = false;
|
||||
let mut request_timeout: u64 = 15;
|
||||
let mut response: Response<AsyncBody>;
|
||||
while request_number < MAX_RETRIES {
|
||||
response = self
|
||||
.send_request(
|
||||
&api_key,
|
||||
spans.iter().map(|x| &**x).collect(),
|
||||
request_timeout,
|
||||
)
|
||||
.await?;
|
||||
|
||||
request_number += 1;
|
||||
|
||||
match response.status() {
|
||||
StatusCode::REQUEST_TIMEOUT => {
|
||||
request_timeout += 5;
|
||||
}
|
||||
StatusCode::OK => {
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
let response: OpenAIEmbeddingResponse = serde_json::from_str(&body)?;
|
||||
|
||||
log::trace!(
|
||||
"openai embedding completed. tokens: {:?}",
|
||||
response.usage.total_tokens
|
||||
);
|
||||
|
||||
// If we complete a request successfully that was previously rate_limited
|
||||
// resolve the rate limit
|
||||
if rate_limiting {
|
||||
self.resolve_rate_limit()
|
||||
}
|
||||
|
||||
return Ok(response
|
||||
.data
|
||||
.into_iter()
|
||||
.map(|embedding| Embedding::from(embedding.embedding))
|
||||
.collect());
|
||||
}
|
||||
StatusCode::TOO_MANY_REQUESTS => {
|
||||
rate_limiting = true;
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
|
||||
let delay_duration = {
|
||||
let delay = Duration::from_secs(BACKOFF_SECONDS[request_number - 1] as u64);
|
||||
if let Some(time_to_reset) =
|
||||
response.headers().get("x-ratelimit-reset-tokens")
|
||||
{
|
||||
if let Ok(time_str) = time_to_reset.to_str() {
|
||||
parse(time_str).unwrap_or(delay)
|
||||
} else {
|
||||
delay
|
||||
}
|
||||
} else {
|
||||
delay
|
||||
}
|
||||
};
|
||||
|
||||
// If we've previously rate limited, increment the duration but not the count
|
||||
let reset_time = Instant::now().add(delay_duration);
|
||||
self.update_reset_time(reset_time);
|
||||
|
||||
log::trace!(
|
||||
"openai rate limiting: waiting {:?} until lifted",
|
||||
&delay_duration
|
||||
);
|
||||
|
||||
self.executor.timer(delay_duration).await;
|
||||
}
|
||||
_ => {
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
return Err(anyhow!(
|
||||
"open ai bad request: {:?} {:?}",
|
||||
&response.status(),
|
||||
body
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(anyhow!("openai max retries"))
|
||||
}
|
||||
}
|
||||
9
crates/ai2/src/providers/open_ai/mod.rs
Normal file
9
crates/ai2/src/providers/open_ai/mod.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
pub mod completion;
|
||||
pub mod embedding;
|
||||
pub mod model;
|
||||
|
||||
pub use completion::*;
|
||||
pub use embedding::*;
|
||||
pub use model::OpenAILanguageModel;
|
||||
|
||||
pub const OPENAI_API_URL: &'static str = "https://api.openai.com/v1";
|
||||
57
crates/ai2/src/providers/open_ai/model.rs
Normal file
57
crates/ai2/src/providers/open_ai/model.rs
Normal file
@@ -0,0 +1,57 @@
|
||||
use anyhow::anyhow;
|
||||
use tiktoken_rs::CoreBPE;
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::models::{LanguageModel, TruncationDirection};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OpenAILanguageModel {
|
||||
name: String,
|
||||
bpe: Option<CoreBPE>,
|
||||
}
|
||||
|
||||
impl OpenAILanguageModel {
|
||||
pub fn load(model_name: &str) -> Self {
|
||||
let bpe = tiktoken_rs::get_bpe_from_model(model_name).log_err();
|
||||
OpenAILanguageModel {
|
||||
name: model_name.to_string(),
|
||||
bpe,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModel for OpenAILanguageModel {
|
||||
fn name(&self) -> String {
|
||||
self.name.clone()
|
||||
}
|
||||
fn count_tokens(&self, content: &str) -> anyhow::Result<usize> {
|
||||
if let Some(bpe) = &self.bpe {
|
||||
anyhow::Ok(bpe.encode_with_special_tokens(content).len())
|
||||
} else {
|
||||
Err(anyhow!("bpe for open ai model was not retrieved"))
|
||||
}
|
||||
}
|
||||
fn truncate(
|
||||
&self,
|
||||
content: &str,
|
||||
length: usize,
|
||||
direction: TruncationDirection,
|
||||
) -> anyhow::Result<String> {
|
||||
if let Some(bpe) = &self.bpe {
|
||||
let tokens = bpe.encode_with_special_tokens(content);
|
||||
if tokens.len() > length {
|
||||
match direction {
|
||||
TruncationDirection::End => bpe.decode(tokens[..length].to_vec()),
|
||||
TruncationDirection::Start => bpe.decode(tokens[length..].to_vec()),
|
||||
}
|
||||
} else {
|
||||
bpe.decode(tokens)
|
||||
}
|
||||
} else {
|
||||
Err(anyhow!("bpe for open ai model was not retrieved"))
|
||||
}
|
||||
}
|
||||
fn capacity(&self) -> anyhow::Result<usize> {
|
||||
anyhow::Ok(tiktoken_rs::model::get_context_size(&self.name))
|
||||
}
|
||||
}
|
||||
11
crates/ai2/src/providers/open_ai/new.rs
Normal file
11
crates/ai2/src/providers/open_ai/new.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
pub trait LanguageModel {
|
||||
fn name(&self) -> String;
|
||||
fn count_tokens(&self, content: &str) -> anyhow::Result<usize>;
|
||||
fn truncate(
|
||||
&self,
|
||||
content: &str,
|
||||
length: usize,
|
||||
direction: TruncationDirection,
|
||||
) -> anyhow::Result<String>;
|
||||
fn capacity(&self) -> anyhow::Result<usize>;
|
||||
}
|
||||
193
crates/ai2/src/test.rs
Normal file
193
crates/ai2/src/test.rs
Normal file
@@ -0,0 +1,193 @@
|
||||
use std::{
|
||||
sync::atomic::{self, AtomicUsize, Ordering},
|
||||
time::Instant,
|
||||
};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use futures::{channel::mpsc, future::BoxFuture, stream::BoxStream, FutureExt, StreamExt};
|
||||
use gpui2::AppContext;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
use crate::{
|
||||
auth::{CredentialProvider, ProviderCredential},
|
||||
completion::{CompletionProvider, CompletionRequest},
|
||||
embedding::{Embedding, EmbeddingProvider},
|
||||
models::{LanguageModel, TruncationDirection},
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FakeLanguageModel {
|
||||
pub capacity: usize,
|
||||
}
|
||||
|
||||
impl LanguageModel for FakeLanguageModel {
|
||||
fn name(&self) -> String {
|
||||
"dummy".to_string()
|
||||
}
|
||||
fn count_tokens(&self, content: &str) -> anyhow::Result<usize> {
|
||||
anyhow::Ok(content.chars().collect::<Vec<char>>().len())
|
||||
}
|
||||
fn truncate(
|
||||
&self,
|
||||
content: &str,
|
||||
length: usize,
|
||||
direction: TruncationDirection,
|
||||
) -> anyhow::Result<String> {
|
||||
println!("TRYING TO TRUNCATE: {:?}", length.clone());
|
||||
|
||||
if length > self.count_tokens(content)? {
|
||||
println!("NOT TRUNCATING");
|
||||
return anyhow::Ok(content.to_string());
|
||||
}
|
||||
|
||||
anyhow::Ok(match direction {
|
||||
TruncationDirection::End => content.chars().collect::<Vec<char>>()[..length]
|
||||
.into_iter()
|
||||
.collect::<String>(),
|
||||
TruncationDirection::Start => content.chars().collect::<Vec<char>>()[length..]
|
||||
.into_iter()
|
||||
.collect::<String>(),
|
||||
})
|
||||
}
|
||||
fn capacity(&self) -> anyhow::Result<usize> {
|
||||
anyhow::Ok(self.capacity)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FakeEmbeddingProvider {
|
||||
pub embedding_count: AtomicUsize,
|
||||
}
|
||||
|
||||
impl Clone for FakeEmbeddingProvider {
|
||||
fn clone(&self) -> Self {
|
||||
FakeEmbeddingProvider {
|
||||
embedding_count: AtomicUsize::new(self.embedding_count.load(Ordering::SeqCst)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for FakeEmbeddingProvider {
|
||||
fn default() -> Self {
|
||||
FakeEmbeddingProvider {
|
||||
embedding_count: AtomicUsize::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FakeEmbeddingProvider {
|
||||
pub fn embedding_count(&self) -> usize {
|
||||
self.embedding_count.load(atomic::Ordering::SeqCst)
|
||||
}
|
||||
|
||||
pub fn embed_sync(&self, span: &str) -> Embedding {
|
||||
let mut result = vec![1.0; 26];
|
||||
for letter in span.chars() {
|
||||
let letter = letter.to_ascii_lowercase();
|
||||
if letter as u32 >= 'a' as u32 {
|
||||
let ix = (letter as u32) - ('a' as u32);
|
||||
if ix < 26 {
|
||||
result[ix as usize] += 1.0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let norm = result.iter().map(|x| x * x).sum::<f32>().sqrt();
|
||||
for x in &mut result {
|
||||
*x /= norm;
|
||||
}
|
||||
|
||||
result.into()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl CredentialProvider for FakeEmbeddingProvider {
|
||||
fn has_credentials(&self) -> bool {
|
||||
true
|
||||
}
|
||||
async fn retrieve_credentials(&self, _cx: &mut AppContext) -> ProviderCredential {
|
||||
ProviderCredential::NotNeeded
|
||||
}
|
||||
async fn save_credentials(&self, _cx: &mut AppContext, _credential: ProviderCredential) {}
|
||||
async fn delete_credentials(&self, _cx: &mut AppContext) {}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EmbeddingProvider for FakeEmbeddingProvider {
|
||||
fn base_model(&self) -> Box<dyn LanguageModel> {
|
||||
Box::new(FakeLanguageModel { capacity: 1000 })
|
||||
}
|
||||
fn max_tokens_per_batch(&self) -> usize {
|
||||
1000
|
||||
}
|
||||
|
||||
fn rate_limit_expiration(&self) -> Option<Instant> {
|
||||
None
|
||||
}
|
||||
|
||||
async fn embed_batch(&self, spans: Vec<String>) -> anyhow::Result<Vec<Embedding>> {
|
||||
self.embedding_count
|
||||
.fetch_add(spans.len(), atomic::Ordering::SeqCst);
|
||||
|
||||
anyhow::Ok(spans.iter().map(|span| self.embed_sync(span)).collect())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FakeCompletionProvider {
|
||||
last_completion_tx: Mutex<Option<mpsc::Sender<String>>>,
|
||||
}
|
||||
|
||||
impl Clone for FakeCompletionProvider {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
last_completion_tx: Mutex::new(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FakeCompletionProvider {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
last_completion_tx: Mutex::new(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn send_completion(&self, completion: impl Into<String>) {
|
||||
let mut tx = self.last_completion_tx.lock();
|
||||
tx.as_mut().unwrap().try_send(completion.into()).unwrap();
|
||||
}
|
||||
|
||||
pub fn finish_completion(&self) {
|
||||
self.last_completion_tx.lock().take().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl CredentialProvider for FakeCompletionProvider {
|
||||
fn has_credentials(&self) -> bool {
|
||||
true
|
||||
}
|
||||
async fn retrieve_credentials(&self, _cx: &mut AppContext) -> ProviderCredential {
|
||||
ProviderCredential::NotNeeded
|
||||
}
|
||||
async fn save_credentials(&self, _cx: &mut AppContext, _credential: ProviderCredential) {}
|
||||
async fn delete_credentials(&self, _cx: &mut AppContext) {}
|
||||
}
|
||||
|
||||
impl CompletionProvider for FakeCompletionProvider {
|
||||
fn base_model(&self) -> Box<dyn LanguageModel> {
|
||||
let model: Box<dyn LanguageModel> = Box::new(FakeLanguageModel { capacity: 8190 });
|
||||
model
|
||||
}
|
||||
fn complete(
|
||||
&self,
|
||||
_prompt: Box<dyn CompletionRequest>,
|
||||
) -> BoxFuture<'static, anyhow::Result<BoxStream<'static, anyhow::Result<String>>>> {
|
||||
let (tx, rx) = mpsc::channel(1);
|
||||
*self.last_completion_tx.lock() = Some(tx);
|
||||
async move { Ok(rx.map(|rx| Ok(rx)).boxed()) }.boxed()
|
||||
}
|
||||
fn box_clone(&self) -> Box<dyn CompletionProvider> {
|
||||
Box::new((*self).clone())
|
||||
}
|
||||
}
|
||||
53
crates/assistant/Cargo.toml
Normal file
53
crates/assistant/Cargo.toml
Normal file
@@ -0,0 +1,53 @@
|
||||
[package]
|
||||
name = "assistant"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/assistant.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
ai = { path = "../ai" }
|
||||
client = { path = "../client" }
|
||||
collections = { path = "../collections"}
|
||||
editor = { path = "../editor" }
|
||||
fs = { path = "../fs" }
|
||||
gpui = { path = "../gpui" }
|
||||
language = { path = "../language" }
|
||||
menu = { path = "../menu" }
|
||||
multi_buffer = { path = "../multi_buffer" }
|
||||
search = { path = "../search" }
|
||||
settings = { path = "../settings" }
|
||||
theme = { path = "../theme" }
|
||||
util = { path = "../util" }
|
||||
workspace = { path = "../workspace" }
|
||||
semantic_index = { path = "../semantic_index" }
|
||||
project = { path = "../project" }
|
||||
|
||||
uuid.workspace = true
|
||||
log.workspace = true
|
||||
anyhow.workspace = true
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
futures.workspace = true
|
||||
indoc.workspace = true
|
||||
isahc.workspace = true
|
||||
ordered-float.workspace = true
|
||||
parking_lot.workspace = true
|
||||
regex.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
smol.workspace = true
|
||||
tiktoken-rs = "0.5"
|
||||
|
||||
[dev-dependencies]
|
||||
editor = { path = "../editor", features = ["test-support"] }
|
||||
project = { path = "../project", features = ["test-support"] }
|
||||
ai = { path = "../ai", features = ["test-support"]}
|
||||
|
||||
ctor.workspace = true
|
||||
env_logger.workspace = true
|
||||
log.workspace = true
|
||||
rand.workspace = true
|
||||
113
crates/assistant/src/assistant.rs
Normal file
113
crates/assistant/src/assistant.rs
Normal file
@@ -0,0 +1,113 @@
|
||||
pub mod assistant_panel;
|
||||
mod assistant_settings;
|
||||
mod codegen;
|
||||
mod prompts;
|
||||
mod streaming_diff;
|
||||
|
||||
use ai::providers::open_ai::Role;
|
||||
use anyhow::Result;
|
||||
pub use assistant_panel::AssistantPanel;
|
||||
use assistant_settings::OpenAIModel;
|
||||
use chrono::{DateTime, Local};
|
||||
use collections::HashMap;
|
||||
use fs::Fs;
|
||||
use futures::StreamExt;
|
||||
use gpui::AppContext;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{cmp::Reverse, ffi::OsStr, path::PathBuf, sync::Arc};
|
||||
use util::paths::CONVERSATIONS_DIR;
|
||||
|
||||
#[derive(
|
||||
Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize,
|
||||
)]
|
||||
struct MessageId(usize);
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
struct MessageMetadata {
|
||||
role: Role,
|
||||
sent_at: DateTime<Local>,
|
||||
status: MessageStatus,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
enum MessageStatus {
|
||||
Pending,
|
||||
Done,
|
||||
Error(Arc<str>),
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct SavedMessage {
|
||||
id: MessageId,
|
||||
start: usize,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct SavedConversation {
|
||||
id: Option<String>,
|
||||
zed: String,
|
||||
version: String,
|
||||
text: String,
|
||||
messages: Vec<SavedMessage>,
|
||||
message_metadata: HashMap<MessageId, MessageMetadata>,
|
||||
summary: String,
|
||||
model: OpenAIModel,
|
||||
}
|
||||
|
||||
impl SavedConversation {
|
||||
const VERSION: &'static str = "0.1.0";
|
||||
}
|
||||
|
||||
struct SavedConversationMetadata {
|
||||
title: String,
|
||||
path: PathBuf,
|
||||
mtime: chrono::DateTime<chrono::Local>,
|
||||
}
|
||||
|
||||
impl SavedConversationMetadata {
|
||||
pub async fn list(fs: Arc<dyn Fs>) -> Result<Vec<Self>> {
|
||||
fs.create_dir(&CONVERSATIONS_DIR).await?;
|
||||
|
||||
let mut paths = fs.read_dir(&CONVERSATIONS_DIR).await?;
|
||||
let mut conversations = Vec::<SavedConversationMetadata>::new();
|
||||
while let Some(path) = paths.next().await {
|
||||
let path = path?;
|
||||
if path.extension() != Some(OsStr::new("json")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let pattern = r" - \d+.zed.json$";
|
||||
let re = Regex::new(pattern).unwrap();
|
||||
|
||||
let metadata = fs.metadata(&path).await?;
|
||||
if let Some((file_name, metadata)) = path
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.zip(metadata)
|
||||
{
|
||||
let title = re.replace(file_name, "");
|
||||
conversations.push(Self {
|
||||
title: title.into_owned(),
|
||||
path,
|
||||
mtime: metadata.mtime.into(),
|
||||
});
|
||||
}
|
||||
}
|
||||
conversations.sort_unstable_by_key(|conversation| Reverse(conversation.mtime));
|
||||
|
||||
Ok(conversations)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(cx: &mut AppContext) {
|
||||
assistant_panel::init(cx);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[ctor::ctor]
|
||||
fn init_logger() {
|
||||
if std::env::var("RUST_LOG").is_ok() {
|
||||
env_logger::init();
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,59 +1,13 @@
|
||||
use crate::{
|
||||
stream_completion,
|
||||
streaming_diff::{Hunk, StreamingDiff},
|
||||
OpenAIRequest,
|
||||
};
|
||||
use crate::streaming_diff::{Hunk, StreamingDiff};
|
||||
use ai::completion::{CompletionProvider, CompletionRequest};
|
||||
use anyhow::Result;
|
||||
use editor::{
|
||||
multi_buffer, Anchor, AnchorRangeExt, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint,
|
||||
};
|
||||
use futures::{
|
||||
channel::mpsc, future::BoxFuture, stream::BoxStream, FutureExt, SinkExt, Stream, StreamExt,
|
||||
};
|
||||
use gpui::{executor::Background, Entity, ModelContext, ModelHandle, Task};
|
||||
use editor::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint};
|
||||
use futures::{channel::mpsc, SinkExt, Stream, StreamExt};
|
||||
use gpui::{Entity, ModelContext, ModelHandle, Task};
|
||||
use language::{Rope, TransactionId};
|
||||
use multi_buffer;
|
||||
use std::{cmp, future, ops::Range, sync::Arc};
|
||||
|
||||
pub trait CompletionProvider {
|
||||
fn complete(
|
||||
&self,
|
||||
prompt: OpenAIRequest,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>>;
|
||||
}
|
||||
|
||||
pub struct OpenAICompletionProvider {
|
||||
api_key: String,
|
||||
executor: Arc<Background>,
|
||||
}
|
||||
|
||||
impl OpenAICompletionProvider {
|
||||
pub fn new(api_key: String, executor: Arc<Background>) -> Self {
|
||||
Self { api_key, executor }
|
||||
}
|
||||
}
|
||||
|
||||
impl CompletionProvider for OpenAICompletionProvider {
|
||||
fn complete(
|
||||
&self,
|
||||
prompt: OpenAIRequest,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
|
||||
let request = stream_completion(self.api_key.clone(), self.executor.clone(), prompt);
|
||||
async move {
|
||||
let response = request.await?;
|
||||
let stream = response
|
||||
.filter_map(|response| async move {
|
||||
match response {
|
||||
Ok(mut response) => Some(Ok(response.choices.pop()?.delta.content?)),
|
||||
Err(error) => Some(Err(error)),
|
||||
}
|
||||
})
|
||||
.boxed();
|
||||
Ok(stream)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
}
|
||||
|
||||
pub enum Event {
|
||||
Finished,
|
||||
Undone,
|
||||
@@ -85,26 +39,11 @@ impl Entity for Codegen {
|
||||
impl Codegen {
|
||||
pub fn new(
|
||||
buffer: ModelHandle<MultiBuffer>,
|
||||
mut kind: CodegenKind,
|
||||
kind: CodegenKind,
|
||||
provider: Arc<dyn CompletionProvider>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
let snapshot = buffer.read(cx).snapshot(cx);
|
||||
match &mut kind {
|
||||
CodegenKind::Transform { range } => {
|
||||
let mut point_range = range.to_point(&snapshot);
|
||||
point_range.start.column = 0;
|
||||
if point_range.end.column > 0 || point_range.start.row == point_range.end.row {
|
||||
point_range.end.column = snapshot.line_len(point_range.end.row);
|
||||
}
|
||||
range.start = snapshot.anchor_before(point_range.start);
|
||||
range.end = snapshot.anchor_after(point_range.end);
|
||||
}
|
||||
CodegenKind::Generate { position } => {
|
||||
*position = position.bias_right(&snapshot);
|
||||
}
|
||||
}
|
||||
|
||||
Self {
|
||||
provider,
|
||||
buffer: buffer.clone(),
|
||||
@@ -157,7 +96,7 @@ impl Codegen {
|
||||
self.error.as_ref()
|
||||
}
|
||||
|
||||
pub fn start(&mut self, prompt: OpenAIRequest, cx: &mut ModelContext<Self>) {
|
||||
pub fn start(&mut self, prompt: Box<dyn CompletionRequest>, cx: &mut ModelContext<Self>) {
|
||||
let range = self.range();
|
||||
let snapshot = self.snapshot.clone();
|
||||
let selected_text = snapshot
|
||||
@@ -179,7 +118,7 @@ impl Codegen {
|
||||
|
||||
let (mut hunks_tx, mut hunks_rx) = mpsc::channel(1);
|
||||
let diff = cx.background().spawn(async move {
|
||||
let chunks = strip_markdown_codeblock(response.await?);
|
||||
let chunks = strip_invalid_spans_from_codeblock(response.await?);
|
||||
futures::pin_mut!(chunks);
|
||||
let mut diff = StreamingDiff::new(selected_text.to_string());
|
||||
|
||||
@@ -340,12 +279,13 @@ impl Codegen {
|
||||
}
|
||||
}
|
||||
|
||||
fn strip_markdown_codeblock(
|
||||
fn strip_invalid_spans_from_codeblock(
|
||||
stream: impl Stream<Item = Result<String>>,
|
||||
) -> impl Stream<Item = Result<String>> {
|
||||
let mut first_line = true;
|
||||
let mut buffer = String::new();
|
||||
let mut starts_with_fenced_code_block = false;
|
||||
let mut starts_with_markdown_codeblock = false;
|
||||
let mut includes_start_or_end_span = false;
|
||||
stream.filter_map(move |chunk| {
|
||||
let chunk = match chunk {
|
||||
Ok(chunk) => chunk,
|
||||
@@ -353,11 +293,31 @@ fn strip_markdown_codeblock(
|
||||
};
|
||||
buffer.push_str(&chunk);
|
||||
|
||||
if buffer.len() > "<|S|".len() && buffer.starts_with("<|S|") {
|
||||
includes_start_or_end_span = true;
|
||||
|
||||
buffer = buffer
|
||||
.strip_prefix("<|S|>")
|
||||
.or_else(|| buffer.strip_prefix("<|S|"))
|
||||
.unwrap_or(&buffer)
|
||||
.to_string();
|
||||
} else if buffer.ends_with("|E|>") {
|
||||
includes_start_or_end_span = true;
|
||||
} else if buffer.starts_with("<|")
|
||||
|| buffer.starts_with("<|S")
|
||||
|| buffer.starts_with("<|S|")
|
||||
|| buffer.ends_with("|")
|
||||
|| buffer.ends_with("|E")
|
||||
|| buffer.ends_with("|E|")
|
||||
{
|
||||
return future::ready(None);
|
||||
}
|
||||
|
||||
if first_line {
|
||||
if buffer == "" || buffer == "`" || buffer == "``" {
|
||||
return future::ready(None);
|
||||
} else if buffer.starts_with("```") {
|
||||
starts_with_fenced_code_block = true;
|
||||
starts_with_markdown_codeblock = true;
|
||||
if let Some(newline_ix) = buffer.find('\n') {
|
||||
buffer.replace_range(..newline_ix + 1, "");
|
||||
first_line = false;
|
||||
@@ -367,16 +327,26 @@ fn strip_markdown_codeblock(
|
||||
}
|
||||
}
|
||||
|
||||
let text = if starts_with_fenced_code_block {
|
||||
buffer
|
||||
let mut text = buffer.to_string();
|
||||
if starts_with_markdown_codeblock {
|
||||
text = text
|
||||
.strip_suffix("\n```\n")
|
||||
.or_else(|| buffer.strip_suffix("\n```"))
|
||||
.or_else(|| buffer.strip_suffix("\n``"))
|
||||
.or_else(|| buffer.strip_suffix("\n`"))
|
||||
.or_else(|| buffer.strip_suffix('\n'))
|
||||
.unwrap_or(&buffer)
|
||||
} else {
|
||||
&buffer
|
||||
.or_else(|| text.strip_suffix("\n```"))
|
||||
.or_else(|| text.strip_suffix("\n``"))
|
||||
.or_else(|| text.strip_suffix("\n`"))
|
||||
.or_else(|| text.strip_suffix('\n'))
|
||||
.unwrap_or(&text)
|
||||
.to_string();
|
||||
}
|
||||
|
||||
if includes_start_or_end_span {
|
||||
text = text
|
||||
.strip_suffix("|E|>")
|
||||
.or_else(|| text.strip_suffix("E|>"))
|
||||
.or_else(|| text.strip_prefix("|>"))
|
||||
.or_else(|| text.strip_prefix(">"))
|
||||
.unwrap_or(&text)
|
||||
.to_string();
|
||||
};
|
||||
|
||||
if text.contains('\n') {
|
||||
@@ -389,6 +359,7 @@ fn strip_markdown_codeblock(
|
||||
} else {
|
||||
Some(Ok(buffer.clone()))
|
||||
};
|
||||
|
||||
buffer = remainder;
|
||||
future::ready(result)
|
||||
})
|
||||
@@ -397,14 +368,26 @@ fn strip_markdown_codeblock(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use futures::stream;
|
||||
use ai::test::FakeCompletionProvider;
|
||||
use futures::stream::{self};
|
||||
use gpui::{executor::Deterministic, TestAppContext};
|
||||
use indoc::indoc;
|
||||
use language::{language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, Point};
|
||||
use parking_lot::Mutex;
|
||||
use rand::prelude::*;
|
||||
use serde::Serialize;
|
||||
use settings::SettingsStore;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct DummyCompletionRequest {
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
impl CompletionRequest for DummyCompletionRequest {
|
||||
fn data(&self) -> serde_json::Result<String> {
|
||||
serde_json::to_string(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_transform_autoindent(
|
||||
cx: &mut TestAppContext,
|
||||
@@ -427,9 +410,9 @@ mod tests {
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let range = buffer.read_with(cx, |buffer, cx| {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
snapshot.anchor_before(Point::new(1, 4))..snapshot.anchor_after(Point::new(4, 4))
|
||||
snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(4, 5))
|
||||
});
|
||||
let provider = Arc::new(TestCompletionProvider::new());
|
||||
let provider = Arc::new(FakeCompletionProvider::new());
|
||||
let codegen = cx.add_model(|cx| {
|
||||
Codegen::new(
|
||||
buffer.clone(),
|
||||
@@ -438,7 +421,11 @@ mod tests {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
codegen.update(cx, |codegen, cx| codegen.start(Default::default(), cx));
|
||||
|
||||
let request = Box::new(DummyCompletionRequest {
|
||||
name: "test".to_string(),
|
||||
});
|
||||
codegen.update(cx, |codegen, cx| codegen.start(request, cx));
|
||||
|
||||
let mut new_text = concat!(
|
||||
" let mut x = 0;\n",
|
||||
@@ -491,7 +478,7 @@ mod tests {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
snapshot.anchor_before(Point::new(1, 6))
|
||||
});
|
||||
let provider = Arc::new(TestCompletionProvider::new());
|
||||
let provider = Arc::new(FakeCompletionProvider::new());
|
||||
let codegen = cx.add_model(|cx| {
|
||||
Codegen::new(
|
||||
buffer.clone(),
|
||||
@@ -500,7 +487,11 @@ mod tests {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
codegen.update(cx, |codegen, cx| codegen.start(Default::default(), cx));
|
||||
|
||||
let request = Box::new(DummyCompletionRequest {
|
||||
name: "test".to_string(),
|
||||
});
|
||||
codegen.update(cx, |codegen, cx| codegen.start(request, cx));
|
||||
|
||||
let mut new_text = concat!(
|
||||
"t mut x = 0;\n",
|
||||
@@ -553,7 +544,7 @@ mod tests {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
snapshot.anchor_before(Point::new(1, 2))
|
||||
});
|
||||
let provider = Arc::new(TestCompletionProvider::new());
|
||||
let provider = Arc::new(FakeCompletionProvider::new());
|
||||
let codegen = cx.add_model(|cx| {
|
||||
Codegen::new(
|
||||
buffer.clone(),
|
||||
@@ -562,7 +553,11 @@ mod tests {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
codegen.update(cx, |codegen, cx| codegen.start(Default::default(), cx));
|
||||
|
||||
let request = Box::new(DummyCompletionRequest {
|
||||
name: "test".to_string(),
|
||||
});
|
||||
codegen.update(cx, |codegen, cx| codegen.start(request, cx));
|
||||
|
||||
let mut new_text = concat!(
|
||||
"let mut x = 0;\n",
|
||||
@@ -595,50 +590,82 @@ mod tests {
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_strip_markdown_codeblock() {
|
||||
async fn test_strip_invalid_spans_from_codeblock() {
|
||||
assert_eq!(
|
||||
strip_markdown_codeblock(chunks("Lorem ipsum dolor", 2))
|
||||
strip_invalid_spans_from_codeblock(chunks("Lorem ipsum dolor", 2))
|
||||
.map(|chunk| chunk.unwrap())
|
||||
.collect::<String>()
|
||||
.await,
|
||||
"Lorem ipsum dolor"
|
||||
);
|
||||
assert_eq!(
|
||||
strip_markdown_codeblock(chunks("```\nLorem ipsum dolor", 2))
|
||||
strip_invalid_spans_from_codeblock(chunks("```\nLorem ipsum dolor", 2))
|
||||
.map(|chunk| chunk.unwrap())
|
||||
.collect::<String>()
|
||||
.await,
|
||||
"Lorem ipsum dolor"
|
||||
);
|
||||
assert_eq!(
|
||||
strip_markdown_codeblock(chunks("```\nLorem ipsum dolor\n```", 2))
|
||||
strip_invalid_spans_from_codeblock(chunks("```\nLorem ipsum dolor\n```", 2))
|
||||
.map(|chunk| chunk.unwrap())
|
||||
.collect::<String>()
|
||||
.await,
|
||||
"Lorem ipsum dolor"
|
||||
);
|
||||
assert_eq!(
|
||||
strip_markdown_codeblock(chunks("```\nLorem ipsum dolor\n```\n", 2))
|
||||
strip_invalid_spans_from_codeblock(chunks("```\nLorem ipsum dolor\n```\n", 2))
|
||||
.map(|chunk| chunk.unwrap())
|
||||
.collect::<String>()
|
||||
.await,
|
||||
"Lorem ipsum dolor"
|
||||
);
|
||||
assert_eq!(
|
||||
strip_markdown_codeblock(chunks("```html\n```js\nLorem ipsum dolor\n```\n```", 2))
|
||||
.map(|chunk| chunk.unwrap())
|
||||
.collect::<String>()
|
||||
.await,
|
||||
strip_invalid_spans_from_codeblock(chunks(
|
||||
"```html\n```js\nLorem ipsum dolor\n```\n```",
|
||||
2
|
||||
))
|
||||
.map(|chunk| chunk.unwrap())
|
||||
.collect::<String>()
|
||||
.await,
|
||||
"```js\nLorem ipsum dolor\n```"
|
||||
);
|
||||
assert_eq!(
|
||||
strip_markdown_codeblock(chunks("``\nLorem ipsum dolor\n```", 2))
|
||||
strip_invalid_spans_from_codeblock(chunks("``\nLorem ipsum dolor\n```", 2))
|
||||
.map(|chunk| chunk.unwrap())
|
||||
.collect::<String>()
|
||||
.await,
|
||||
"``\nLorem ipsum dolor\n```"
|
||||
);
|
||||
assert_eq!(
|
||||
strip_invalid_spans_from_codeblock(chunks("<|S|Lorem ipsum|E|>", 2))
|
||||
.map(|chunk| chunk.unwrap())
|
||||
.collect::<String>()
|
||||
.await,
|
||||
"Lorem ipsum"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
strip_invalid_spans_from_codeblock(chunks("<|S|>Lorem ipsum", 2))
|
||||
.map(|chunk| chunk.unwrap())
|
||||
.collect::<String>()
|
||||
.await,
|
||||
"Lorem ipsum"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
strip_invalid_spans_from_codeblock(chunks("```\n<|S|>Lorem ipsum\n```", 2))
|
||||
.map(|chunk| chunk.unwrap())
|
||||
.collect::<String>()
|
||||
.await,
|
||||
"Lorem ipsum"
|
||||
);
|
||||
assert_eq!(
|
||||
strip_invalid_spans_from_codeblock(chunks("```\n<|S|Lorem ipsum|E|>\n```", 2))
|
||||
.map(|chunk| chunk.unwrap())
|
||||
.collect::<String>()
|
||||
.await,
|
||||
"Lorem ipsum"
|
||||
);
|
||||
fn chunks(text: &str, size: usize) -> impl Stream<Item = Result<String>> {
|
||||
stream::iter(
|
||||
text.chars()
|
||||
@@ -650,38 +677,6 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
struct TestCompletionProvider {
|
||||
last_completion_tx: Mutex<Option<mpsc::Sender<String>>>,
|
||||
}
|
||||
|
||||
impl TestCompletionProvider {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
last_completion_tx: Mutex::new(None),
|
||||
}
|
||||
}
|
||||
|
||||
fn send_completion(&self, completion: impl Into<String>) {
|
||||
let mut tx = self.last_completion_tx.lock();
|
||||
tx.as_mut().unwrap().try_send(completion.into()).unwrap();
|
||||
}
|
||||
|
||||
fn finish_completion(&self) {
|
||||
self.last_completion_tx.lock().take().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
impl CompletionProvider for TestCompletionProvider {
|
||||
fn complete(
|
||||
&self,
|
||||
_prompt: OpenAIRequest,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
|
||||
let (tx, rx) = mpsc::channel(1);
|
||||
*self.last_completion_tx.lock() = Some(tx);
|
||||
async move { Ok(rx.map(|rx| Ok(rx)).boxed()) }.boxed()
|
||||
}
|
||||
}
|
||||
|
||||
fn rust_lang() -> Language {
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
388
crates/assistant/src/prompts.rs
Normal file
388
crates/assistant/src/prompts.rs
Normal file
@@ -0,0 +1,388 @@
|
||||
use ai::models::LanguageModel;
|
||||
use ai::prompts::base::{PromptArguments, PromptChain, PromptPriority, PromptTemplate};
|
||||
use ai::prompts::file_context::FileContext;
|
||||
use ai::prompts::generate::GenerateInlineContent;
|
||||
use ai::prompts::preamble::EngineerPreamble;
|
||||
use ai::prompts::repository_context::{PromptCodeSnippet, RepositoryContext};
|
||||
use ai::providers::open_ai::OpenAILanguageModel;
|
||||
use language::{BufferSnapshot, OffsetRangeExt, ToOffset};
|
||||
use std::cmp::{self, Reverse};
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn summarize(buffer: &BufferSnapshot, selected_range: Range<impl ToOffset>) -> String {
|
||||
#[derive(Debug)]
|
||||
struct Match {
|
||||
collapse: Range<usize>,
|
||||
keep: Vec<Range<usize>>,
|
||||
}
|
||||
|
||||
let selected_range = selected_range.to_offset(buffer);
|
||||
let mut ts_matches = buffer.matches(0..buffer.len(), |grammar| {
|
||||
Some(&grammar.embedding_config.as_ref()?.query)
|
||||
});
|
||||
let configs = ts_matches
|
||||
.grammars()
|
||||
.iter()
|
||||
.map(|g| g.embedding_config.as_ref().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
let mut matches = Vec::new();
|
||||
while let Some(mat) = ts_matches.peek() {
|
||||
let config = &configs[mat.grammar_index];
|
||||
if let Some(collapse) = mat.captures.iter().find_map(|cap| {
|
||||
if Some(cap.index) == config.collapse_capture_ix {
|
||||
Some(cap.node.byte_range())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}) {
|
||||
let mut keep = Vec::new();
|
||||
for capture in mat.captures.iter() {
|
||||
if Some(capture.index) == config.keep_capture_ix {
|
||||
keep.push(capture.node.byte_range());
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
ts_matches.advance();
|
||||
matches.push(Match { collapse, keep });
|
||||
} else {
|
||||
ts_matches.advance();
|
||||
}
|
||||
}
|
||||
matches.sort_unstable_by_key(|mat| (mat.collapse.start, Reverse(mat.collapse.end)));
|
||||
let mut matches = matches.into_iter().peekable();
|
||||
|
||||
let mut summary = String::new();
|
||||
let mut offset = 0;
|
||||
let mut flushed_selection = false;
|
||||
while let Some(mat) = matches.next() {
|
||||
// Keep extending the collapsed range if the next match surrounds
|
||||
// the current one.
|
||||
while let Some(next_mat) = matches.peek() {
|
||||
if mat.collapse.start <= next_mat.collapse.start
|
||||
&& mat.collapse.end >= next_mat.collapse.end
|
||||
{
|
||||
matches.next().unwrap();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if offset > mat.collapse.start {
|
||||
// Skip collapsed nodes that have already been summarized.
|
||||
offset = cmp::max(offset, mat.collapse.end);
|
||||
continue;
|
||||
}
|
||||
|
||||
if offset <= selected_range.start && selected_range.start <= mat.collapse.end {
|
||||
if !flushed_selection {
|
||||
// The collapsed node ends after the selection starts, so we'll flush the selection first.
|
||||
summary.extend(buffer.text_for_range(offset..selected_range.start));
|
||||
summary.push_str("<|S|");
|
||||
if selected_range.end == selected_range.start {
|
||||
summary.push_str(">");
|
||||
} else {
|
||||
summary.extend(buffer.text_for_range(selected_range.clone()));
|
||||
summary.push_str("|E|>");
|
||||
}
|
||||
offset = selected_range.end;
|
||||
flushed_selection = true;
|
||||
}
|
||||
|
||||
// If the selection intersects the collapsed node, we won't collapse it.
|
||||
if selected_range.end >= mat.collapse.start {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
summary.extend(buffer.text_for_range(offset..mat.collapse.start));
|
||||
for keep in mat.keep {
|
||||
summary.extend(buffer.text_for_range(keep));
|
||||
}
|
||||
offset = mat.collapse.end;
|
||||
}
|
||||
|
||||
// Flush selection if we haven't already done so.
|
||||
if !flushed_selection && offset <= selected_range.start {
|
||||
summary.extend(buffer.text_for_range(offset..selected_range.start));
|
||||
summary.push_str("<|S|");
|
||||
if selected_range.end == selected_range.start {
|
||||
summary.push_str(">");
|
||||
} else {
|
||||
summary.extend(buffer.text_for_range(selected_range.clone()));
|
||||
summary.push_str("|E|>");
|
||||
}
|
||||
offset = selected_range.end;
|
||||
}
|
||||
|
||||
summary.extend(buffer.text_for_range(offset..buffer.len()));
|
||||
summary
|
||||
}
|
||||
|
||||
pub fn generate_content_prompt(
|
||||
user_prompt: String,
|
||||
language_name: Option<&str>,
|
||||
buffer: BufferSnapshot,
|
||||
range: Range<usize>,
|
||||
search_results: Vec<PromptCodeSnippet>,
|
||||
model: &str,
|
||||
project_name: Option<String>,
|
||||
) -> anyhow::Result<String> {
|
||||
// Using new Prompt Templates
|
||||
let openai_model: Arc<dyn LanguageModel> = Arc::new(OpenAILanguageModel::load(model));
|
||||
let lang_name = if let Some(language_name) = language_name {
|
||||
Some(language_name.to_string())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let args = PromptArguments {
|
||||
model: openai_model,
|
||||
language_name: lang_name.clone(),
|
||||
project_name,
|
||||
snippets: search_results.clone(),
|
||||
reserved_tokens: 1000,
|
||||
buffer: Some(buffer),
|
||||
selected_range: Some(range),
|
||||
user_prompt: Some(user_prompt.clone()),
|
||||
};
|
||||
|
||||
let templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)> = vec![
|
||||
(PromptPriority::Mandatory, Box::new(EngineerPreamble {})),
|
||||
(
|
||||
PromptPriority::Ordered { order: 1 },
|
||||
Box::new(RepositoryContext {}),
|
||||
),
|
||||
(
|
||||
PromptPriority::Ordered { order: 0 },
|
||||
Box::new(FileContext {}),
|
||||
),
|
||||
(
|
||||
PromptPriority::Mandatory,
|
||||
Box::new(GenerateInlineContent {}),
|
||||
),
|
||||
];
|
||||
let chain = PromptChain::new(args, templates);
|
||||
let (prompt, _) = chain.generate(true)?;
|
||||
|
||||
anyhow::Ok(prompt)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
|
||||
use super::*;
|
||||
use std::sync::Arc;
|
||||
|
||||
use gpui::AppContext;
|
||||
use indoc::indoc;
|
||||
use language::{language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, Point};
|
||||
use settings::SettingsStore;
|
||||
|
||||
pub(crate) fn rust_lang() -> Language {
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
)
|
||||
.with_embedding_query(
|
||||
r#"
|
||||
(
|
||||
[(line_comment) (attribute_item)]* @context
|
||||
.
|
||||
[
|
||||
(struct_item
|
||||
name: (_) @name)
|
||||
|
||||
(enum_item
|
||||
name: (_) @name)
|
||||
|
||||
(impl_item
|
||||
trait: (_)? @name
|
||||
"for"? @name
|
||||
type: (_) @name)
|
||||
|
||||
(trait_item
|
||||
name: (_) @name)
|
||||
|
||||
(function_item
|
||||
name: (_) @name
|
||||
body: (block
|
||||
"{" @keep
|
||||
"}" @keep) @collapse)
|
||||
|
||||
(macro_definition
|
||||
name: (_) @name)
|
||||
] @item
|
||||
)
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_outline_for_prompt(cx: &mut AppContext) {
|
||||
cx.set_global(SettingsStore::test(cx));
|
||||
language_settings::init(cx);
|
||||
let text = indoc! {"
|
||||
struct X {
|
||||
a: usize,
|
||||
b: usize,
|
||||
}
|
||||
|
||||
impl X {
|
||||
|
||||
fn new() -> Self {
|
||||
let a = 1;
|
||||
let b = 2;
|
||||
Self { a, b }
|
||||
}
|
||||
|
||||
pub fn a(&self, param: bool) -> usize {
|
||||
self.a
|
||||
}
|
||||
|
||||
pub fn b(&self) -> usize {
|
||||
self.b
|
||||
}
|
||||
}
|
||||
"};
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
|
||||
assert_eq!(
|
||||
summarize(&snapshot, Point::new(1, 4)..Point::new(1, 4)),
|
||||
indoc! {"
|
||||
struct X {
|
||||
<|S|>a: usize,
|
||||
b: usize,
|
||||
}
|
||||
|
||||
impl X {
|
||||
|
||||
fn new() -> Self {}
|
||||
|
||||
pub fn a(&self, param: bool) -> usize {}
|
||||
|
||||
pub fn b(&self) -> usize {}
|
||||
}
|
||||
"}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
summarize(&snapshot, Point::new(8, 12)..Point::new(8, 14)),
|
||||
indoc! {"
|
||||
struct X {
|
||||
a: usize,
|
||||
b: usize,
|
||||
}
|
||||
|
||||
impl X {
|
||||
|
||||
fn new() -> Self {
|
||||
let <|S|a |E|>= 1;
|
||||
let b = 2;
|
||||
Self { a, b }
|
||||
}
|
||||
|
||||
pub fn a(&self, param: bool) -> usize {}
|
||||
|
||||
pub fn b(&self) -> usize {}
|
||||
}
|
||||
"}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
summarize(&snapshot, Point::new(6, 0)..Point::new(6, 0)),
|
||||
indoc! {"
|
||||
struct X {
|
||||
a: usize,
|
||||
b: usize,
|
||||
}
|
||||
|
||||
impl X {
|
||||
<|S|>
|
||||
fn new() -> Self {}
|
||||
|
||||
pub fn a(&self, param: bool) -> usize {}
|
||||
|
||||
pub fn b(&self) -> usize {}
|
||||
}
|
||||
"}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
summarize(&snapshot, Point::new(21, 0)..Point::new(21, 0)),
|
||||
indoc! {"
|
||||
struct X {
|
||||
a: usize,
|
||||
b: usize,
|
||||
}
|
||||
|
||||
impl X {
|
||||
|
||||
fn new() -> Self {}
|
||||
|
||||
pub fn a(&self, param: bool) -> usize {}
|
||||
|
||||
pub fn b(&self) -> usize {}
|
||||
}
|
||||
<|S|>"}
|
||||
);
|
||||
|
||||
// Ensure nested functions get collapsed properly.
|
||||
let text = indoc! {"
|
||||
struct X {
|
||||
a: usize,
|
||||
b: usize,
|
||||
}
|
||||
|
||||
impl X {
|
||||
|
||||
fn new() -> Self {
|
||||
let a = 1;
|
||||
let b = 2;
|
||||
Self { a, b }
|
||||
}
|
||||
|
||||
pub fn a(&self, param: bool) -> usize {
|
||||
let a = 30;
|
||||
fn nested() -> usize {
|
||||
3
|
||||
}
|
||||
self.a + nested()
|
||||
}
|
||||
|
||||
pub fn b(&self) -> usize {
|
||||
self.b
|
||||
}
|
||||
}
|
||||
"};
|
||||
buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
assert_eq!(
|
||||
summarize(&snapshot, Point::new(0, 0)..Point::new(0, 0)),
|
||||
indoc! {"
|
||||
<|S|>struct X {
|
||||
a: usize,
|
||||
b: usize,
|
||||
}
|
||||
|
||||
impl X {
|
||||
|
||||
fn new() -> Self {}
|
||||
|
||||
pub fn a(&self, param: bool) -> usize {}
|
||||
|
||||
pub fn b(&self) -> usize {}
|
||||
}
|
||||
"}
|
||||
);
|
||||
}
|
||||
}
|
||||
24
crates/audio2/Cargo.toml
Normal file
24
crates/audio2/Cargo.toml
Normal file
@@ -0,0 +1,24 @@
|
||||
[package]
|
||||
name = "audio2"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/audio2.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
gpui2 = { path = "../gpui2" }
|
||||
collections = { path = "../collections" }
|
||||
util = { path = "../util" }
|
||||
|
||||
|
||||
rodio ={version = "0.17.1", default-features=false, features = ["wav"]}
|
||||
|
||||
log.workspace = true
|
||||
futures.workspace = true
|
||||
anyhow.workspace = true
|
||||
parking_lot.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
23
crates/audio2/audio/Cargo.toml
Normal file
23
crates/audio2/audio/Cargo.toml
Normal file
@@ -0,0 +1,23 @@
|
||||
[package]
|
||||
name = "audio"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/audio.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
gpui = { path = "../gpui" }
|
||||
collections = { path = "../collections" }
|
||||
util = { path = "../util" }
|
||||
|
||||
rodio ={version = "0.17.1", default-features=false, features = ["wav"]}
|
||||
|
||||
log.workspace = true
|
||||
|
||||
anyhow.workspace = true
|
||||
parking_lot.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
44
crates/audio2/audio/src/assets.rs
Normal file
44
crates/audio2/audio/src/assets.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
use std::{io::Cursor, sync::Arc};
|
||||
|
||||
use anyhow::Result;
|
||||
use collections::HashMap;
|
||||
use gpui::{AppContext, AssetSource};
|
||||
use rodio::{
|
||||
source::{Buffered, SamplesConverter},
|
||||
Decoder, Source,
|
||||
};
|
||||
|
||||
type Sound = Buffered<SamplesConverter<Decoder<Cursor<Vec<u8>>>, f32>>;
|
||||
|
||||
pub struct SoundRegistry {
|
||||
cache: Arc<parking_lot::Mutex<HashMap<String, Sound>>>,
|
||||
assets: Box<dyn AssetSource>,
|
||||
}
|
||||
|
||||
impl SoundRegistry {
|
||||
pub fn new(source: impl AssetSource) -> Arc<Self> {
|
||||
Arc::new(Self {
|
||||
cache: Default::default(),
|
||||
assets: Box::new(source),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn global(cx: &AppContext) -> Arc<Self> {
|
||||
cx.global::<Arc<Self>>().clone()
|
||||
}
|
||||
|
||||
pub fn get(&self, name: &str) -> Result<impl Source<Item = f32>> {
|
||||
if let Some(wav) = self.cache.lock().get(name) {
|
||||
return Ok(wav.clone());
|
||||
}
|
||||
|
||||
let path = format!("sounds/{}.wav", name);
|
||||
let bytes = self.assets.load(&path)?.into_owned();
|
||||
let cursor = Cursor::new(bytes);
|
||||
let source = Decoder::new(cursor)?.convert_samples::<f32>().buffered();
|
||||
|
||||
self.cache.lock().insert(name.to_string(), source.clone());
|
||||
|
||||
Ok(source)
|
||||
}
|
||||
}
|
||||
81
crates/audio2/audio/src/audio.rs
Normal file
81
crates/audio2/audio/src/audio.rs
Normal file
@@ -0,0 +1,81 @@
|
||||
use assets::SoundRegistry;
|
||||
use gpui::{AppContext, AssetSource};
|
||||
use rodio::{OutputStream, OutputStreamHandle};
|
||||
use util::ResultExt;
|
||||
|
||||
mod assets;
|
||||
|
||||
pub fn init(source: impl AssetSource, cx: &mut AppContext) {
|
||||
cx.set_global(SoundRegistry::new(source));
|
||||
cx.set_global(Audio::new());
|
||||
}
|
||||
|
||||
pub enum Sound {
|
||||
Joined,
|
||||
Leave,
|
||||
Mute,
|
||||
Unmute,
|
||||
StartScreenshare,
|
||||
StopScreenshare,
|
||||
}
|
||||
|
||||
impl Sound {
|
||||
fn file(&self) -> &'static str {
|
||||
match self {
|
||||
Self::Joined => "joined_call",
|
||||
Self::Leave => "leave_call",
|
||||
Self::Mute => "mute",
|
||||
Self::Unmute => "unmute",
|
||||
Self::StartScreenshare => "start_screenshare",
|
||||
Self::StopScreenshare => "stop_screenshare",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Audio {
|
||||
_output_stream: Option<OutputStream>,
|
||||
output_handle: Option<OutputStreamHandle>,
|
||||
}
|
||||
|
||||
impl Audio {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
_output_stream: None,
|
||||
output_handle: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_output_exists(&mut self) -> Option<&OutputStreamHandle> {
|
||||
if self.output_handle.is_none() {
|
||||
let (_output_stream, output_handle) = OutputStream::try_default().log_err().unzip();
|
||||
self.output_handle = output_handle;
|
||||
self._output_stream = _output_stream;
|
||||
}
|
||||
|
||||
self.output_handle.as_ref()
|
||||
}
|
||||
|
||||
pub fn play_sound(sound: Sound, cx: &mut AppContext) {
|
||||
if !cx.has_global::<Self>() {
|
||||
return;
|
||||
}
|
||||
|
||||
cx.update_global::<Self, _, _>(|this, cx| {
|
||||
let output_handle = this.ensure_output_exists()?;
|
||||
let source = SoundRegistry::global(cx).get(sound.file()).log_err()?;
|
||||
output_handle.play_raw(source).log_err()?;
|
||||
Some(())
|
||||
});
|
||||
}
|
||||
|
||||
pub fn end_call(cx: &mut AppContext) {
|
||||
if !cx.has_global::<Self>() {
|
||||
return;
|
||||
}
|
||||
|
||||
cx.update_global::<Self, _, _>(|this, _| {
|
||||
this._output_stream.take();
|
||||
this.output_handle.take();
|
||||
});
|
||||
}
|
||||
}
|
||||
44
crates/audio2/src/assets.rs
Normal file
44
crates/audio2/src/assets.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
use std::{io::Cursor, sync::Arc};
|
||||
|
||||
use anyhow::Result;
|
||||
use collections::HashMap;
|
||||
use gpui2::{AppContext, AssetSource};
|
||||
use rodio::{
|
||||
source::{Buffered, SamplesConverter},
|
||||
Decoder, Source,
|
||||
};
|
||||
|
||||
type Sound = Buffered<SamplesConverter<Decoder<Cursor<Vec<u8>>>, f32>>;
|
||||
|
||||
pub struct SoundRegistry {
|
||||
cache: Arc<parking_lot::Mutex<HashMap<String, Sound>>>,
|
||||
assets: Box<dyn AssetSource>,
|
||||
}
|
||||
|
||||
impl SoundRegistry {
|
||||
pub fn new(source: impl AssetSource) -> Arc<Self> {
|
||||
Arc::new(Self {
|
||||
cache: Default::default(),
|
||||
assets: Box::new(source),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn global(cx: &AppContext) -> Arc<Self> {
|
||||
cx.global::<Arc<Self>>().clone()
|
||||
}
|
||||
|
||||
pub fn get(&self, name: &str) -> Result<impl Source<Item = f32>> {
|
||||
if let Some(wav) = self.cache.lock().get(name) {
|
||||
return Ok(wav.clone());
|
||||
}
|
||||
|
||||
let path = format!("sounds/{}.wav", name);
|
||||
let bytes = self.assets.load(&path)?.into_owned();
|
||||
let cursor = Cursor::new(bytes);
|
||||
let source = Decoder::new(cursor)?.convert_samples::<f32>().buffered();
|
||||
|
||||
self.cache.lock().insert(name.to_string(), source.clone());
|
||||
|
||||
Ok(source)
|
||||
}
|
||||
}
|
||||
111
crates/audio2/src/audio2.rs
Normal file
111
crates/audio2/src/audio2.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use assets::SoundRegistry;
|
||||
use futures::{channel::mpsc, StreamExt};
|
||||
use gpui2::{AppContext, AssetSource, Executor};
|
||||
use rodio::{OutputStream, OutputStreamHandle};
|
||||
use util::ResultExt;
|
||||
|
||||
mod assets;
|
||||
|
||||
pub fn init(source: impl AssetSource, cx: &mut AppContext) {
|
||||
cx.set_global(Audio::new(cx.executor()));
|
||||
cx.set_global(SoundRegistry::new(source));
|
||||
}
|
||||
|
||||
pub enum Sound {
|
||||
Joined,
|
||||
Leave,
|
||||
Mute,
|
||||
Unmute,
|
||||
StartScreenshare,
|
||||
StopScreenshare,
|
||||
}
|
||||
|
||||
impl Sound {
|
||||
fn file(&self) -> &'static str {
|
||||
match self {
|
||||
Self::Joined => "joined_call",
|
||||
Self::Leave => "leave_call",
|
||||
Self::Mute => "mute",
|
||||
Self::Unmute => "unmute",
|
||||
Self::StartScreenshare => "start_screenshare",
|
||||
Self::StopScreenshare => "stop_screenshare",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Audio {
|
||||
tx: mpsc::UnboundedSender<Box<dyn FnOnce(&mut AudioState) + Send>>,
|
||||
}
|
||||
|
||||
struct AudioState {
|
||||
_output_stream: Option<OutputStream>,
|
||||
output_handle: Option<OutputStreamHandle>,
|
||||
}
|
||||
|
||||
impl AudioState {
|
||||
fn ensure_output_exists(&mut self) -> Option<&OutputStreamHandle> {
|
||||
if self.output_handle.is_none() {
|
||||
let (_output_stream, output_handle) = OutputStream::try_default().log_err().unzip();
|
||||
self.output_handle = output_handle;
|
||||
self._output_stream = _output_stream;
|
||||
}
|
||||
|
||||
self.output_handle.as_ref()
|
||||
}
|
||||
|
||||
fn take(&mut self) {
|
||||
self._output_stream.take();
|
||||
self.output_handle.take();
|
||||
}
|
||||
}
|
||||
|
||||
impl Audio {
|
||||
pub fn new(executor: &Executor) -> Self {
|
||||
let (tx, mut rx) = mpsc::unbounded::<Box<dyn FnOnce(&mut AudioState) + Send>>();
|
||||
executor
|
||||
.spawn_on_main(|| async move {
|
||||
let mut audio = AudioState {
|
||||
_output_stream: None,
|
||||
output_handle: None,
|
||||
};
|
||||
|
||||
while let Some(f) = rx.next().await {
|
||||
(f)(&mut audio);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
Self { tx }
|
||||
}
|
||||
|
||||
pub fn play_sound(sound: Sound, cx: &mut AppContext) {
|
||||
if !cx.has_global::<Self>() {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(source) = SoundRegistry::global(cx).get(sound.file()).log_err() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let this = cx.global::<Self>();
|
||||
this.tx
|
||||
.unbounded_send(Box::new(move |state| {
|
||||
if let Some(output_handle) = state.ensure_output_exists() {
|
||||
output_handle.play_raw(source).log_err();
|
||||
}
|
||||
}))
|
||||
.ok();
|
||||
}
|
||||
|
||||
pub fn end_call(cx: &AppContext) {
|
||||
if !cx.has_global::<Self>() {
|
||||
return;
|
||||
}
|
||||
|
||||
let this = cx.global::<Self>();
|
||||
|
||||
this.tx
|
||||
.unbounded_send(Box::new(move |state| state.take()))
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
@@ -115,13 +115,15 @@ pub fn check(_: &Check, cx: &mut AppContext) {
|
||||
|
||||
fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) {
|
||||
if let Some(auto_updater) = AutoUpdater::get(cx) {
|
||||
let server_url = &auto_updater.read(cx).server_url;
|
||||
let auto_updater = auto_updater.read(cx);
|
||||
let server_url = &auto_updater.server_url;
|
||||
let current_version = auto_updater.current_version;
|
||||
let latest_release_url = if cx.has_global::<ReleaseChannel>()
|
||||
&& *cx.global::<ReleaseChannel>() == ReleaseChannel::Preview
|
||||
{
|
||||
format!("{server_url}/releases/preview/latest")
|
||||
format!("{server_url}/releases/preview/{current_version}")
|
||||
} else {
|
||||
format!("{server_url}/releases/stable/latest")
|
||||
format!("{server_url}/releases/stable/{current_version}")
|
||||
};
|
||||
cx.platform().open_url(&latest_release_url);
|
||||
}
|
||||
|
||||
@@ -20,7 +20,6 @@ test-support = [
|
||||
|
||||
[dependencies]
|
||||
audio = { path = "../audio" }
|
||||
channel = { path = "../channel" }
|
||||
client = { path = "../client" }
|
||||
collections = { path = "../collections" }
|
||||
gpui = { path = "../gpui" }
|
||||
|
||||
@@ -2,22 +2,22 @@ pub mod call_settings;
|
||||
pub mod participant;
|
||||
pub mod room;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use audio::Audio;
|
||||
use call_settings::CallSettings;
|
||||
use channel::ChannelId;
|
||||
use client::{proto, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore};
|
||||
use client::{
|
||||
proto, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore,
|
||||
ZED_ALWAYS_ACTIVE,
|
||||
};
|
||||
use collections::HashSet;
|
||||
use futures::{future::Shared, FutureExt};
|
||||
use postage::watch;
|
||||
|
||||
use futures::{channel::oneshot, future::Shared, Future, FutureExt};
|
||||
use gpui::{
|
||||
AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Subscription, Task,
|
||||
WeakModelHandle,
|
||||
};
|
||||
use postage::watch;
|
||||
use project::Project;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use participant::ParticipantLocation;
|
||||
pub use room::Room;
|
||||
@@ -37,10 +37,42 @@ pub struct IncomingCall {
|
||||
pub initial_project: Option<proto::ParticipantProject>,
|
||||
}
|
||||
|
||||
pub struct OneAtATime {
|
||||
cancel: Option<oneshot::Sender<()>>,
|
||||
}
|
||||
|
||||
impl OneAtATime {
|
||||
/// spawn a task in the given context.
|
||||
/// if another task is spawned before that resolves, or if the OneAtATime itself is dropped, the first task will be cancelled and return Ok(None)
|
||||
/// otherwise you'll see the result of the task.
|
||||
fn spawn<F, Fut, R>(&mut self, cx: &mut AppContext, f: F) -> Task<Result<Option<R>>>
|
||||
where
|
||||
F: 'static + FnOnce(AsyncAppContext) -> Fut,
|
||||
Fut: Future<Output = Result<R>>,
|
||||
R: 'static,
|
||||
{
|
||||
let (tx, rx) = oneshot::channel();
|
||||
self.cancel.replace(tx);
|
||||
cx.spawn(|cx| async move {
|
||||
futures::select_biased! {
|
||||
_ = rx.fuse() => Ok(None),
|
||||
result = f(cx).fuse() => result.map(Some),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn running(&self) -> bool {
|
||||
self.cancel
|
||||
.as_ref()
|
||||
.is_some_and(|cancel| !cancel.is_canceled())
|
||||
}
|
||||
}
|
||||
|
||||
/// Singleton global maintaining the user's participation in a room across workspaces.
|
||||
pub struct ActiveCall {
|
||||
room: Option<(ModelHandle<Room>, Vec<Subscription>)>,
|
||||
pending_room_creation: Option<Shared<Task<Result<ModelHandle<Room>, Arc<anyhow::Error>>>>>,
|
||||
_join_debouncer: OneAtATime,
|
||||
location: Option<WeakModelHandle<Project>>,
|
||||
pending_invites: HashSet<u64>,
|
||||
incoming_call: (
|
||||
@@ -68,6 +100,8 @@ impl ActiveCall {
|
||||
location: None,
|
||||
pending_invites: Default::default(),
|
||||
incoming_call: watch::channel(),
|
||||
|
||||
_join_debouncer: OneAtATime { cancel: None },
|
||||
_subscriptions: vec![
|
||||
client.add_request_handler(cx.handle(), Self::handle_incoming_call),
|
||||
client.add_message_handler(cx.handle(), Self::handle_call_canceled),
|
||||
@@ -77,7 +111,7 @@ impl ActiveCall {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn channel_id(&self, cx: &AppContext) -> Option<ChannelId> {
|
||||
pub fn channel_id(&self, cx: &AppContext) -> Option<u64> {
|
||||
self.room()?.read(cx).channel_id()
|
||||
}
|
||||
|
||||
@@ -142,6 +176,10 @@ impl ActiveCall {
|
||||
}
|
||||
cx.notify();
|
||||
|
||||
if self._join_debouncer.running() {
|
||||
return Task::ready(Ok(()));
|
||||
}
|
||||
|
||||
let room = if let Some(room) = self.room().cloned() {
|
||||
Some(Task::ready(Ok(room)).shared())
|
||||
} else {
|
||||
@@ -206,9 +244,14 @@ impl ActiveCall {
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let result = invite.await;
|
||||
if result.is_ok() {
|
||||
this.update(&mut cx, |this, cx| this.report_call_event("invite", cx));
|
||||
} else {
|
||||
// TODO: Resport collaboration error
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.pending_invites.remove(&called_user_id);
|
||||
this.report_call_event("invite", cx);
|
||||
cx.notify();
|
||||
});
|
||||
result
|
||||
@@ -253,11 +296,20 @@ impl ActiveCall {
|
||||
return Task::ready(Err(anyhow!("no incoming call")));
|
||||
};
|
||||
|
||||
let join = Room::join(&call, self.client.clone(), self.user_store.clone(), cx);
|
||||
if self.pending_room_creation.is_some() {
|
||||
return Task::ready(Ok(()));
|
||||
}
|
||||
|
||||
let room_id = call.room_id.clone();
|
||||
let client = self.client.clone();
|
||||
let user_store = self.user_store.clone();
|
||||
let join = self
|
||||
._join_debouncer
|
||||
.spawn(cx, move |cx| Room::join(room_id, client, user_store, cx));
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let room = join.await?;
|
||||
this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))
|
||||
this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))
|
||||
.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.report_call_event("accept incoming", cx)
|
||||
@@ -273,13 +325,7 @@ impl ActiveCall {
|
||||
.borrow_mut()
|
||||
.take()
|
||||
.ok_or_else(|| anyhow!("no incoming call"))?;
|
||||
Self::report_call_event_for_room(
|
||||
"decline incoming",
|
||||
Some(call.room_id),
|
||||
None,
|
||||
&self.client,
|
||||
cx,
|
||||
);
|
||||
report_call_event_for_room("decline incoming", call.room_id, None, &self.client, cx);
|
||||
self.client.send(proto::DeclineCall {
|
||||
room_id: call.room_id,
|
||||
})?;
|
||||
@@ -290,25 +336,33 @@ impl ActiveCall {
|
||||
&mut self,
|
||||
channel_id: u64,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
) -> Task<Result<Option<ModelHandle<Room>>>> {
|
||||
if let Some(room) = self.room().cloned() {
|
||||
if room.read(cx).channel_id() == Some(channel_id) {
|
||||
return Task::ready(Ok(()));
|
||||
return Task::ready(Ok(Some(room)));
|
||||
} else {
|
||||
room.update(cx, |room, cx| room.clear_state(cx));
|
||||
}
|
||||
}
|
||||
|
||||
let join = Room::join_channel(channel_id, self.client.clone(), self.user_store.clone(), cx);
|
||||
if self.pending_room_creation.is_some() {
|
||||
return Task::ready(Ok(None));
|
||||
}
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let client = self.client.clone();
|
||||
let user_store = self.user_store.clone();
|
||||
let join = self._join_debouncer.spawn(cx, move |cx| async move {
|
||||
Room::join_channel(channel_id, client, user_store, cx).await
|
||||
});
|
||||
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
let room = join.await?;
|
||||
this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))
|
||||
this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))
|
||||
.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.report_call_event("join channel", cx)
|
||||
});
|
||||
Ok(())
|
||||
Ok(room)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -349,17 +403,22 @@ impl ActiveCall {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn location(&self) -> Option<&WeakModelHandle<Project>> {
|
||||
self.location.as_ref()
|
||||
}
|
||||
|
||||
pub fn set_location(
|
||||
&mut self,
|
||||
project: Option<&ModelHandle<Project>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
self.location = project.map(|project| project.downgrade());
|
||||
if let Some((room, _)) = self.room.as_ref() {
|
||||
room.update(cx, |room, cx| room.set_location(project, cx))
|
||||
} else {
|
||||
Task::ready(Ok(()))
|
||||
if project.is_some() || !*ZED_ALWAYS_ACTIVE {
|
||||
self.location = project.map(|project| project.downgrade());
|
||||
if let Some((room, _)) = self.room.as_ref() {
|
||||
return room.update(cx, |room, cx| room.set_location(project, cx));
|
||||
}
|
||||
}
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
|
||||
fn set_room(
|
||||
@@ -409,31 +468,83 @@ impl ActiveCall {
|
||||
&self.pending_invites
|
||||
}
|
||||
|
||||
fn report_call_event(&self, operation: &'static str, cx: &AppContext) {
|
||||
let (room_id, channel_id) = match self.room() {
|
||||
Some(room) => {
|
||||
let room = room.read(cx);
|
||||
(Some(room.id()), room.channel_id())
|
||||
}
|
||||
None => (None, None),
|
||||
};
|
||||
Self::report_call_event_for_room(operation, room_id, channel_id, &self.client, cx)
|
||||
}
|
||||
|
||||
pub fn report_call_event_for_room(
|
||||
operation: &'static str,
|
||||
room_id: Option<u64>,
|
||||
channel_id: Option<u64>,
|
||||
client: &Arc<Client>,
|
||||
cx: &AppContext,
|
||||
) {
|
||||
let telemetry = client.telemetry();
|
||||
let telemetry_settings = *settings::get::<TelemetrySettings>(cx);
|
||||
let event = ClickhouseEvent::Call {
|
||||
operation,
|
||||
room_id,
|
||||
channel_id,
|
||||
};
|
||||
telemetry.report_clickhouse_event(event, telemetry_settings);
|
||||
pub fn report_call_event(&self, operation: &'static str, cx: &AppContext) {
|
||||
if let Some(room) = self.room() {
|
||||
let room = room.read(cx);
|
||||
report_call_event_for_room(operation, room.id(), room.channel_id(), &self.client, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn report_call_event_for_room(
|
||||
operation: &'static str,
|
||||
room_id: u64,
|
||||
channel_id: Option<u64>,
|
||||
client: &Arc<Client>,
|
||||
cx: &AppContext,
|
||||
) {
|
||||
let telemetry = client.telemetry();
|
||||
let telemetry_settings = *settings::get::<TelemetrySettings>(cx);
|
||||
let event = ClickhouseEvent::Call {
|
||||
operation,
|
||||
room_id: Some(room_id),
|
||||
channel_id,
|
||||
};
|
||||
telemetry.report_clickhouse_event(event, telemetry_settings);
|
||||
}
|
||||
|
||||
pub fn report_call_event_for_channel(
|
||||
operation: &'static str,
|
||||
channel_id: u64,
|
||||
client: &Arc<Client>,
|
||||
cx: &AppContext,
|
||||
) {
|
||||
let room = ActiveCall::global(cx).read(cx).room();
|
||||
|
||||
let telemetry = client.telemetry();
|
||||
let telemetry_settings = *settings::get::<TelemetrySettings>(cx);
|
||||
|
||||
let event = ClickhouseEvent::Call {
|
||||
operation,
|
||||
room_id: room.map(|r| r.read(cx).id()),
|
||||
channel_id: Some(channel_id),
|
||||
};
|
||||
telemetry.report_clickhouse_event(event, telemetry_settings);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use gpui::TestAppContext;
|
||||
|
||||
use crate::OneAtATime;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_one_at_a_time(cx: &mut TestAppContext) {
|
||||
let mut one_at_a_time = OneAtATime { cancel: None };
|
||||
|
||||
assert_eq!(
|
||||
cx.update(|cx| one_at_a_time.spawn(cx, |_| async { Ok(1) }))
|
||||
.await
|
||||
.unwrap(),
|
||||
Some(1)
|
||||
);
|
||||
|
||||
let (a, b) = cx.update(|cx| {
|
||||
(
|
||||
one_at_a_time.spawn(cx, |_| async {
|
||||
assert!(false);
|
||||
Ok(2)
|
||||
}),
|
||||
one_at_a_time.spawn(cx, |_| async { Ok(3) }),
|
||||
)
|
||||
});
|
||||
|
||||
assert_eq!(a.await.unwrap(), None);
|
||||
assert_eq!(b.await.unwrap(), Some(3));
|
||||
|
||||
let promise = cx.update(|cx| one_at_a_time.spawn(cx, |_| async { Ok(4) }));
|
||||
drop(one_at_a_time);
|
||||
|
||||
assert_eq!(promise.await.unwrap(), None);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use client::ParticipantIndex;
|
||||
use client::{proto, User};
|
||||
use collections::HashMap;
|
||||
use gpui::WeakModelHandle;
|
||||
@@ -43,6 +44,7 @@ pub struct RemoteParticipant {
|
||||
pub peer_id: proto::PeerId,
|
||||
pub projects: Vec<proto::ParticipantProject>,
|
||||
pub location: ParticipantLocation,
|
||||
pub participant_index: ParticipantIndex,
|
||||
pub muted: bool,
|
||||
pub speaking: bool,
|
||||
pub video_tracks: HashMap<live_kit_client::Sid, Arc<RemoteVideoTrack>>,
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
use crate::{
|
||||
call_settings::CallSettings,
|
||||
participant::{LocalParticipant, ParticipantLocation, RemoteParticipant, RemoteVideoTrack},
|
||||
IncomingCall,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use audio::{Audio, Sound};
|
||||
use client::{
|
||||
proto::{self, PeerId},
|
||||
Client, TypedEnvelope, User, UserStore,
|
||||
Client, ParticipantIndex, TypedEnvelope, User, UserStore,
|
||||
};
|
||||
use collections::{BTreeMap, HashMap, HashSet};
|
||||
use fs::Fs;
|
||||
@@ -18,7 +17,7 @@ use live_kit_client::{
|
||||
LocalAudioTrack, LocalTrackPublication, LocalVideoTrack, RemoteAudioTrackUpdate,
|
||||
RemoteVideoTrackUpdate,
|
||||
};
|
||||
use postage::stream::Stream;
|
||||
use postage::{sink::Sink, stream::Stream, watch};
|
||||
use project::Project;
|
||||
use std::{future::Future, mem, pin::Pin, sync::Arc, time::Duration};
|
||||
use util::{post_inc, ResultExt, TryFutureExt};
|
||||
@@ -44,12 +43,18 @@ pub enum Event {
|
||||
RemoteProjectUnshared {
|
||||
project_id: u64,
|
||||
},
|
||||
RemoteProjectJoined {
|
||||
project_id: u64,
|
||||
},
|
||||
RemoteProjectInvitationDiscarded {
|
||||
project_id: u64,
|
||||
},
|
||||
Left,
|
||||
}
|
||||
|
||||
pub struct Room {
|
||||
id: u64,
|
||||
channel_id: Option<u64>,
|
||||
pub channel_id: Option<u64>,
|
||||
live_kit: Option<LiveKitRoom>,
|
||||
status: RoomStatus,
|
||||
shared_projects: HashSet<WeakModelHandle<Project>>,
|
||||
@@ -64,6 +69,8 @@ pub struct Room {
|
||||
user_store: ModelHandle<UserStore>,
|
||||
follows_by_leader_id_project_id: HashMap<(PeerId, u64), Vec<PeerId>>,
|
||||
subscriptions: Vec<client::Subscription>,
|
||||
room_update_completed_tx: watch::Sender<Option<()>>,
|
||||
room_update_completed_rx: watch::Receiver<Option<()>>,
|
||||
pending_room_update: Option<Task<()>>,
|
||||
maintain_connection: Option<Task<Option<()>>>,
|
||||
}
|
||||
@@ -98,6 +105,10 @@ impl Room {
|
||||
self.channel_id
|
||||
}
|
||||
|
||||
pub fn is_sharing_project(&self) -> bool {
|
||||
!self.shared_projects.is_empty()
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn is_connected(&self) -> bool {
|
||||
if let Some(live_kit) = self.live_kit.as_ref() {
|
||||
@@ -110,6 +121,10 @@ impl Room {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn can_publish(&self) -> bool {
|
||||
self.live_kit.as_ref().is_some_and(|room| room.can_publish)
|
||||
}
|
||||
|
||||
fn new(
|
||||
id: u64,
|
||||
channel_id: Option<u64>,
|
||||
@@ -169,20 +184,23 @@ impl Room {
|
||||
});
|
||||
|
||||
let connect = room.connect(&connection_info.server_url, &connection_info.token);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
connect.await?;
|
||||
if connection_info.can_publish {
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
connect.await?;
|
||||
|
||||
if !cx.read(Self::mute_on_join) {
|
||||
this.update(&mut cx, |this, cx| this.share_microphone(cx))
|
||||
.await?;
|
||||
}
|
||||
if !cx.read(Self::mute_on_join) {
|
||||
this.update(&mut cx, |this, cx| this.share_microphone(cx))
|
||||
.await?;
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
Some(LiveKitRoom {
|
||||
room,
|
||||
can_publish: connection_info.can_publish,
|
||||
screen_track: LocalTrack::None,
|
||||
microphone_track: LocalTrack::None,
|
||||
next_publish_id: 0,
|
||||
@@ -201,6 +219,8 @@ impl Room {
|
||||
|
||||
Audio::play_sound(Sound::Joined, cx);
|
||||
|
||||
let (room_update_completed_tx, room_update_completed_rx) = watch::channel();
|
||||
|
||||
Self {
|
||||
id,
|
||||
channel_id,
|
||||
@@ -220,6 +240,8 @@ impl Room {
|
||||
user_store,
|
||||
follows_by_leader_id_project_id: Default::default(),
|
||||
maintain_connection: Some(maintain_connection),
|
||||
room_update_completed_tx,
|
||||
room_update_completed_rx,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -268,37 +290,32 @@ impl Room {
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn join_channel(
|
||||
pub(crate) async fn join_channel(
|
||||
channel_id: u64,
|
||||
client: Arc<Client>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
cx: &mut AppContext,
|
||||
) -> Task<Result<ModelHandle<Self>>> {
|
||||
cx.spawn(|cx| async move {
|
||||
Self::from_join_response(
|
||||
client.request(proto::JoinChannel { channel_id }).await?,
|
||||
client,
|
||||
user_store,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
cx: AsyncAppContext,
|
||||
) -> Result<ModelHandle<Self>> {
|
||||
Self::from_join_response(
|
||||
client.request(proto::JoinChannel { channel_id }).await?,
|
||||
client,
|
||||
user_store,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn join(
|
||||
call: &IncomingCall,
|
||||
pub(crate) async fn join(
|
||||
room_id: u64,
|
||||
client: Arc<Client>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
cx: &mut AppContext,
|
||||
) -> Task<Result<ModelHandle<Self>>> {
|
||||
let id = call.room_id;
|
||||
cx.spawn(|cx| async move {
|
||||
Self::from_join_response(
|
||||
client.request(proto::JoinRoom { id }).await?,
|
||||
client,
|
||||
user_store,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
cx: AsyncAppContext,
|
||||
) -> Result<ModelHandle<Self>> {
|
||||
Self::from_join_response(
|
||||
client.request(proto::JoinRoom { id: room_id }).await?,
|
||||
client,
|
||||
user_store,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn mute_on_join(cx: &AppContext) -> bool {
|
||||
@@ -588,6 +605,43 @@ impl Room {
|
||||
.map_or(&[], |v| v.as_slice())
|
||||
}
|
||||
|
||||
/// Returns the most 'active' projects, defined as most people in the project
|
||||
pub fn most_active_project(&self, cx: &AppContext) -> Option<(u64, u64)> {
|
||||
let mut project_hosts_and_guest_counts = HashMap::<u64, (Option<u64>, u32)>::default();
|
||||
for participant in self.remote_participants.values() {
|
||||
match participant.location {
|
||||
ParticipantLocation::SharedProject { project_id } => {
|
||||
project_hosts_and_guest_counts
|
||||
.entry(project_id)
|
||||
.or_default()
|
||||
.1 += 1;
|
||||
}
|
||||
ParticipantLocation::External | ParticipantLocation::UnsharedProject => {}
|
||||
}
|
||||
for project in &participant.projects {
|
||||
project_hosts_and_guest_counts
|
||||
.entry(project.id)
|
||||
.or_default()
|
||||
.0 = Some(participant.user.id);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(user) = self.user_store.read(cx).current_user() {
|
||||
for project in &self.local_participant.projects {
|
||||
project_hosts_and_guest_counts
|
||||
.entry(project.id)
|
||||
.or_default()
|
||||
.0 = Some(user.id);
|
||||
}
|
||||
}
|
||||
|
||||
project_hosts_and_guest_counts
|
||||
.into_iter()
|
||||
.filter_map(|(id, (host, guest_count))| Some((id, host?, guest_count)))
|
||||
.max_by_key(|(_, _, guest_count)| *guest_count)
|
||||
.map(|(id, host, _)| (id, host))
|
||||
}
|
||||
|
||||
async fn handle_room_updated(
|
||||
this: ModelHandle<Self>,
|
||||
envelope: TypedEnvelope<proto::RoomUpdated>,
|
||||
@@ -651,6 +705,7 @@ impl Room {
|
||||
let Some(peer_id) = participant.peer_id else {
|
||||
continue;
|
||||
};
|
||||
let participant_index = ParticipantIndex(participant.participant_index);
|
||||
this.participant_user_ids.insert(participant.user_id);
|
||||
|
||||
let old_projects = this
|
||||
@@ -701,8 +756,9 @@ impl Room {
|
||||
if let Some(remote_participant) =
|
||||
this.remote_participants.get_mut(&participant.user_id)
|
||||
{
|
||||
remote_participant.projects = participant.projects;
|
||||
remote_participant.peer_id = peer_id;
|
||||
remote_participant.projects = participant.projects;
|
||||
remote_participant.participant_index = participant_index;
|
||||
if location != remote_participant.location {
|
||||
remote_participant.location = location;
|
||||
cx.emit(Event::ParticipantLocationChanged {
|
||||
@@ -714,6 +770,7 @@ impl Room {
|
||||
participant.user_id,
|
||||
RemoteParticipant {
|
||||
user: user.clone(),
|
||||
participant_index,
|
||||
peer_id,
|
||||
projects: participant.projects,
|
||||
location,
|
||||
@@ -807,7 +864,17 @@ impl Room {
|
||||
let _ = this.leave(cx);
|
||||
}
|
||||
|
||||
this.user_store.update(cx, |user_store, cx| {
|
||||
let participant_indices_by_user_id = this
|
||||
.remote_participants
|
||||
.iter()
|
||||
.map(|(user_id, participant)| (*user_id, participant.participant_index))
|
||||
.collect();
|
||||
user_store.set_participant_indices(participant_indices_by_user_id, cx);
|
||||
});
|
||||
|
||||
this.check_invariants();
|
||||
this.room_update_completed_tx.try_send(Some(())).ok();
|
||||
cx.notify();
|
||||
});
|
||||
}));
|
||||
@@ -816,6 +883,17 @@ impl Room {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn room_update_completed(&mut self) -> impl Future<Output = ()> {
|
||||
let mut done_rx = self.room_update_completed_rx.clone();
|
||||
async move {
|
||||
while let Some(result) = done_rx.next().await {
|
||||
if result.is_some() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn remote_video_track_updated(
|
||||
&mut self,
|
||||
change: RemoteVideoTrackUpdate,
|
||||
@@ -1003,6 +1081,7 @@ impl Room {
|
||||
) -> Task<Result<ModelHandle<Project>>> {
|
||||
let client = self.client.clone();
|
||||
let user_store = self.user_store.clone();
|
||||
cx.emit(Event::RemoteProjectJoined { project_id: id });
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let project =
|
||||
Project::remote(id, client, user_store, language_registry, fs, cx.clone()).await?;
|
||||
@@ -1173,7 +1252,7 @@ impl Room {
|
||||
.read_with(&cx, |this, _| {
|
||||
this.live_kit
|
||||
.as_ref()
|
||||
.map(|live_kit| live_kit.room.publish_audio_track(&track))
|
||||
.map(|live_kit| live_kit.room.publish_audio_track(track))
|
||||
})
|
||||
.ok_or_else(|| anyhow!("live-kit was not initialized"))?
|
||||
.await
|
||||
@@ -1259,7 +1338,7 @@ impl Room {
|
||||
.read_with(&cx, |this, _| {
|
||||
this.live_kit
|
||||
.as_ref()
|
||||
.map(|live_kit| live_kit.room.publish_video_track(&track))
|
||||
.map(|live_kit| live_kit.room.publish_video_track(track))
|
||||
})
|
||||
.ok_or_else(|| anyhow!("live-kit was not initialized"))?
|
||||
.await
|
||||
@@ -1420,6 +1499,7 @@ struct LiveKitRoom {
|
||||
deafened: bool,
|
||||
speaking: bool,
|
||||
next_publish_id: usize,
|
||||
can_publish: bool,
|
||||
_maintain_room: Task<()>,
|
||||
_maintain_tracks: [Task<()>; 2],
|
||||
}
|
||||
|
||||
52
crates/call2/Cargo.toml
Normal file
52
crates/call2/Cargo.toml
Normal file
@@ -0,0 +1,52 @@
|
||||
[package]
|
||||
name = "call2"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/call2.rs"
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
test-support = [
|
||||
"client2/test-support",
|
||||
"collections/test-support",
|
||||
"gpui2/test-support",
|
||||
"live_kit_client2/test-support",
|
||||
"project2/test-support",
|
||||
"util/test-support"
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
audio2 = { path = "../audio2" }
|
||||
client2 = { path = "../client2" }
|
||||
collections = { path = "../collections" }
|
||||
gpui2 = { path = "../gpui2" }
|
||||
log.workspace = true
|
||||
live_kit_client2 = { path = "../live_kit_client2" }
|
||||
fs2 = { path = "../fs2" }
|
||||
language2 = { path = "../language2" }
|
||||
media = { path = "../media" }
|
||||
project2 = { path = "../project2" }
|
||||
settings2 = { path = "../settings2" }
|
||||
util = { path = "../util" }
|
||||
|
||||
anyhow.workspace = true
|
||||
async-broadcast = "0.4"
|
||||
futures.workspace = true
|
||||
postage.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_derive.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
client2 = { path = "../client2", features = ["test-support"] }
|
||||
fs2 = { path = "../fs2", features = ["test-support"] }
|
||||
language2 = { path = "../language2", features = ["test-support"] }
|
||||
collections = { path = "../collections", features = ["test-support"] }
|
||||
gpui2 = { path = "../gpui2", features = ["test-support"] }
|
||||
live_kit_client2 = { path = "../live_kit_client2", features = ["test-support"] }
|
||||
project2 = { path = "../project2", features = ["test-support"] }
|
||||
util = { path = "../util", features = ["test-support"] }
|
||||
461
crates/call2/src/call2.rs
Normal file
461
crates/call2/src/call2.rs
Normal file
@@ -0,0 +1,461 @@
|
||||
pub mod call_settings;
|
||||
pub mod participant;
|
||||
pub mod room;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use audio2::Audio;
|
||||
use call_settings::CallSettings;
|
||||
use client2::{
|
||||
proto, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore,
|
||||
ZED_ALWAYS_ACTIVE,
|
||||
};
|
||||
use collections::HashSet;
|
||||
use futures::{future::Shared, FutureExt};
|
||||
use gpui2::{
|
||||
AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Subscription, Task,
|
||||
WeakModel,
|
||||
};
|
||||
use postage::watch;
|
||||
use project2::Project;
|
||||
use settings2::Settings;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use participant::ParticipantLocation;
|
||||
pub use room::Room;
|
||||
|
||||
pub fn init(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut AppContext) {
|
||||
CallSettings::register(cx);
|
||||
|
||||
let active_call = cx.build_model(|cx| ActiveCall::new(client, user_store, cx));
|
||||
cx.set_global(active_call);
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct IncomingCall {
|
||||
pub room_id: u64,
|
||||
pub calling_user: Arc<User>,
|
||||
pub participants: Vec<Arc<User>>,
|
||||
pub initial_project: Option<proto::ParticipantProject>,
|
||||
}
|
||||
|
||||
/// Singleton global maintaining the user's participation in a room across workspaces.
|
||||
pub struct ActiveCall {
|
||||
room: Option<(Model<Room>, Vec<Subscription>)>,
|
||||
pending_room_creation: Option<Shared<Task<Result<Model<Room>, Arc<anyhow::Error>>>>>,
|
||||
location: Option<WeakModel<Project>>,
|
||||
pending_invites: HashSet<u64>,
|
||||
incoming_call: (
|
||||
watch::Sender<Option<IncomingCall>>,
|
||||
watch::Receiver<Option<IncomingCall>>,
|
||||
),
|
||||
client: Arc<Client>,
|
||||
user_store: Model<UserStore>,
|
||||
_subscriptions: Vec<client2::Subscription>,
|
||||
}
|
||||
|
||||
impl EventEmitter for ActiveCall {
|
||||
type Event = room::Event;
|
||||
}
|
||||
|
||||
impl ActiveCall {
|
||||
fn new(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut ModelContext<Self>) -> Self {
|
||||
Self {
|
||||
room: None,
|
||||
pending_room_creation: None,
|
||||
location: None,
|
||||
pending_invites: Default::default(),
|
||||
incoming_call: watch::channel(),
|
||||
|
||||
_subscriptions: vec![
|
||||
client.add_request_handler(cx.weak_model(), Self::handle_incoming_call),
|
||||
client.add_message_handler(cx.weak_model(), Self::handle_call_canceled),
|
||||
],
|
||||
client,
|
||||
user_store,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn channel_id(&self, cx: &AppContext) -> Option<u64> {
|
||||
self.room()?.read(cx).channel_id()
|
||||
}
|
||||
|
||||
async fn handle_incoming_call(
|
||||
this: Model<Self>,
|
||||
envelope: TypedEnvelope<proto::IncomingCall>,
|
||||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<proto::Ack> {
|
||||
let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
|
||||
let call = IncomingCall {
|
||||
room_id: envelope.payload.room_id,
|
||||
participants: user_store
|
||||
.update(&mut cx, |user_store, cx| {
|
||||
user_store.get_users(envelope.payload.participant_user_ids, cx)
|
||||
})?
|
||||
.await?,
|
||||
calling_user: user_store
|
||||
.update(&mut cx, |user_store, cx| {
|
||||
user_store.get_user(envelope.payload.calling_user_id, cx)
|
||||
})?
|
||||
.await?,
|
||||
initial_project: envelope.payload.initial_project,
|
||||
};
|
||||
this.update(&mut cx, |this, _| {
|
||||
*this.incoming_call.0.borrow_mut() = Some(call);
|
||||
})?;
|
||||
|
||||
Ok(proto::Ack {})
|
||||
}
|
||||
|
||||
async fn handle_call_canceled(
|
||||
this: Model<Self>,
|
||||
envelope: TypedEnvelope<proto::CallCanceled>,
|
||||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
this.update(&mut cx, |this, _| {
|
||||
let mut incoming_call = this.incoming_call.0.borrow_mut();
|
||||
if incoming_call
|
||||
.as_ref()
|
||||
.map_or(false, |call| call.room_id == envelope.payload.room_id)
|
||||
{
|
||||
incoming_call.take();
|
||||
}
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn global(cx: &AppContext) -> Model<Self> {
|
||||
cx.global::<Model<Self>>().clone()
|
||||
}
|
||||
|
||||
pub fn invite(
|
||||
&mut self,
|
||||
called_user_id: u64,
|
||||
initial_project: Option<Model<Project>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
if !self.pending_invites.insert(called_user_id) {
|
||||
return Task::ready(Err(anyhow!("user was already invited")));
|
||||
}
|
||||
cx.notify();
|
||||
|
||||
let room = if let Some(room) = self.room().cloned() {
|
||||
Some(Task::ready(Ok(room)).shared())
|
||||
} else {
|
||||
self.pending_room_creation.clone()
|
||||
};
|
||||
|
||||
let invite = if let Some(room) = room {
|
||||
cx.spawn(move |_, mut cx| async move {
|
||||
let room = room.await.map_err(|err| anyhow!("{:?}", err))?;
|
||||
|
||||
let initial_project_id = if let Some(initial_project) = initial_project {
|
||||
Some(
|
||||
room.update(&mut cx, |room, cx| room.share_project(initial_project, cx))?
|
||||
.await?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
room.update(&mut cx, move |room, cx| {
|
||||
room.call(called_user_id, initial_project_id, cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
} else {
|
||||
let client = self.client.clone();
|
||||
let user_store = self.user_store.clone();
|
||||
let room = cx
|
||||
.spawn(move |this, mut cx| async move {
|
||||
let create_room = async {
|
||||
let room = cx
|
||||
.update(|cx| {
|
||||
Room::create(
|
||||
called_user_id,
|
||||
initial_project,
|
||||
client,
|
||||
user_store,
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))?
|
||||
.await?;
|
||||
|
||||
anyhow::Ok(room)
|
||||
};
|
||||
|
||||
let room = create_room.await;
|
||||
this.update(&mut cx, |this, _| this.pending_room_creation = None)?;
|
||||
room.map_err(Arc::new)
|
||||
})
|
||||
.shared();
|
||||
self.pending_room_creation = Some(room.clone());
|
||||
cx.executor().spawn(async move {
|
||||
room.await.map_err(|err| anyhow!("{:?}", err))?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
};
|
||||
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
let result = invite.await;
|
||||
if result.is_ok() {
|
||||
this.update(&mut cx, |this, cx| this.report_call_event("invite", cx))?;
|
||||
} else {
|
||||
// TODO: Resport collaboration error
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.pending_invites.remove(&called_user_id);
|
||||
cx.notify();
|
||||
})?;
|
||||
result
|
||||
})
|
||||
}
|
||||
|
||||
pub fn cancel_invite(
|
||||
&mut self,
|
||||
called_user_id: u64,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let room_id = if let Some(room) = self.room() {
|
||||
room.read(cx).id()
|
||||
} else {
|
||||
return Task::ready(Err(anyhow!("no active call")));
|
||||
};
|
||||
|
||||
let client = self.client.clone();
|
||||
cx.executor().spawn(async move {
|
||||
client
|
||||
.request(proto::CancelCall {
|
||||
room_id,
|
||||
called_user_id,
|
||||
})
|
||||
.await?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn incoming(&self) -> watch::Receiver<Option<IncomingCall>> {
|
||||
self.incoming_call.1.clone()
|
||||
}
|
||||
|
||||
pub fn accept_incoming(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
if self.room.is_some() {
|
||||
return Task::ready(Err(anyhow!("cannot join while on another call")));
|
||||
}
|
||||
|
||||
let call = if let Some(call) = self.incoming_call.1.borrow().clone() {
|
||||
call
|
||||
} else {
|
||||
return Task::ready(Err(anyhow!("no incoming call")));
|
||||
};
|
||||
|
||||
let join = Room::join(&call, self.client.clone(), self.user_store.clone(), cx);
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let room = join.await?;
|
||||
this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))?
|
||||
.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.report_call_event("accept incoming", cx)
|
||||
})?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn decline_incoming(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
|
||||
let call = self
|
||||
.incoming_call
|
||||
.0
|
||||
.borrow_mut()
|
||||
.take()
|
||||
.ok_or_else(|| anyhow!("no incoming call"))?;
|
||||
report_call_event_for_room("decline incoming", call.room_id, None, &self.client, cx);
|
||||
self.client.send(proto::DeclineCall {
|
||||
room_id: call.room_id,
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn join_channel(
|
||||
&mut self,
|
||||
channel_id: u64,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<Model<Room>>> {
|
||||
if let Some(room) = self.room().cloned() {
|
||||
if room.read(cx).channel_id() == Some(channel_id) {
|
||||
return Task::ready(Ok(room));
|
||||
} else {
|
||||
room.update(cx, |room, cx| room.clear_state(cx));
|
||||
}
|
||||
}
|
||||
|
||||
let join = Room::join_channel(channel_id, self.client.clone(), self.user_store.clone(), cx);
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let room = join.await?;
|
||||
this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))?
|
||||
.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.report_call_event("join channel", cx)
|
||||
})?;
|
||||
Ok(room)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn hang_up(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
cx.notify();
|
||||
self.report_call_event("hang up", cx);
|
||||
|
||||
Audio::end_call(cx);
|
||||
if let Some((room, _)) = self.room.take() {
|
||||
room.update(cx, |room, cx| room.leave(cx))
|
||||
} else {
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn share_project(
|
||||
&mut self,
|
||||
project: Model<Project>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<u64>> {
|
||||
if let Some((room, _)) = self.room.as_ref() {
|
||||
self.report_call_event("share project", cx);
|
||||
room.update(cx, |room, cx| room.share_project(project, cx))
|
||||
} else {
|
||||
Task::ready(Err(anyhow!("no active call")))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unshare_project(
|
||||
&mut self,
|
||||
project: Model<Project>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Result<()> {
|
||||
if let Some((room, _)) = self.room.as_ref() {
|
||||
self.report_call_event("unshare project", cx);
|
||||
room.update(cx, |room, cx| room.unshare_project(project, cx))
|
||||
} else {
|
||||
Err(anyhow!("no active call"))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn location(&self) -> Option<&WeakModel<Project>> {
|
||||
self.location.as_ref()
|
||||
}
|
||||
|
||||
pub fn set_location(
|
||||
&mut self,
|
||||
project: Option<&Model<Project>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
if project.is_some() || !*ZED_ALWAYS_ACTIVE {
|
||||
self.location = project.map(|project| project.downgrade());
|
||||
if let Some((room, _)) = self.room.as_ref() {
|
||||
return room.update(cx, |room, cx| room.set_location(project, cx));
|
||||
}
|
||||
}
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
|
||||
fn set_room(
|
||||
&mut self,
|
||||
room: Option<Model<Room>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
if room.as_ref() != self.room.as_ref().map(|room| &room.0) {
|
||||
cx.notify();
|
||||
if let Some(room) = room {
|
||||
if room.read(cx).status().is_offline() {
|
||||
self.room = None;
|
||||
Task::ready(Ok(()))
|
||||
} else {
|
||||
let subscriptions = vec![
|
||||
cx.observe(&room, |this, room, cx| {
|
||||
if room.read(cx).status().is_offline() {
|
||||
this.set_room(None, cx).detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
}),
|
||||
cx.subscribe(&room, |_, _, event, cx| cx.emit(event.clone())),
|
||||
];
|
||||
self.room = Some((room.clone(), subscriptions));
|
||||
let location = self
|
||||
.location
|
||||
.as_ref()
|
||||
.and_then(|location| location.upgrade());
|
||||
room.update(cx, |room, cx| room.set_location(location.as_ref(), cx))
|
||||
}
|
||||
} else {
|
||||
self.room = None;
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
} else {
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn room(&self) -> Option<&Model<Room>> {
|
||||
self.room.as_ref().map(|(room, _)| room)
|
||||
}
|
||||
|
||||
pub fn client(&self) -> Arc<Client> {
|
||||
self.client.clone()
|
||||
}
|
||||
|
||||
pub fn pending_invites(&self) -> &HashSet<u64> {
|
||||
&self.pending_invites
|
||||
}
|
||||
|
||||
pub fn report_call_event(&self, operation: &'static str, cx: &AppContext) {
|
||||
if let Some(room) = self.room() {
|
||||
let room = room.read(cx);
|
||||
report_call_event_for_room(operation, room.id(), room.channel_id(), &self.client, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn report_call_event_for_room(
|
||||
operation: &'static str,
|
||||
room_id: u64,
|
||||
channel_id: Option<u64>,
|
||||
client: &Arc<Client>,
|
||||
cx: &AppContext,
|
||||
) {
|
||||
let telemetry = client.telemetry();
|
||||
let telemetry_settings = *TelemetrySettings::get_global(cx);
|
||||
let event = ClickhouseEvent::Call {
|
||||
operation,
|
||||
room_id: Some(room_id),
|
||||
channel_id,
|
||||
};
|
||||
telemetry.report_clickhouse_event(event, telemetry_settings);
|
||||
}
|
||||
|
||||
pub fn report_call_event_for_channel(
|
||||
operation: &'static str,
|
||||
channel_id: u64,
|
||||
client: &Arc<Client>,
|
||||
cx: &AppContext,
|
||||
) {
|
||||
let room = ActiveCall::global(cx).read(cx).room();
|
||||
|
||||
let telemetry = client.telemetry();
|
||||
|
||||
let telemetry_settings = *TelemetrySettings::get_global(cx);
|
||||
|
||||
let event = ClickhouseEvent::Call {
|
||||
operation,
|
||||
room_id: room.map(|r| r.read(cx).id()),
|
||||
channel_id: Some(channel_id),
|
||||
};
|
||||
telemetry.report_clickhouse_event(event, telemetry_settings);
|
||||
}
|
||||
32
crates/call2/src/call_settings.rs
Normal file
32
crates/call2/src/call_settings.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
use anyhow::Result;
|
||||
use gpui2::AppContext;
|
||||
use schemars::JsonSchema;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use settings2::Settings;
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct CallSettings {
|
||||
pub mute_on_join: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
pub struct CallSettingsContent {
|
||||
pub mute_on_join: Option<bool>,
|
||||
}
|
||||
|
||||
impl Settings for CallSettings {
|
||||
const KEY: Option<&'static str> = Some("calls");
|
||||
|
||||
type FileContent = CallSettingsContent;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_cx: &mut AppContext,
|
||||
) -> Result<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
}
|
||||
}
|
||||
52
crates/call2/src/participant.rs
Normal file
52
crates/call2/src/participant.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use client2::ParticipantIndex;
|
||||
use client2::{proto, User};
|
||||
use collections::HashMap;
|
||||
use gpui2::WeakModel;
|
||||
pub use live_kit_client2::Frame;
|
||||
use live_kit_client2::{RemoteAudioTrack, RemoteVideoTrack};
|
||||
use project2::Project;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum ParticipantLocation {
|
||||
SharedProject { project_id: u64 },
|
||||
UnsharedProject,
|
||||
External,
|
||||
}
|
||||
|
||||
impl ParticipantLocation {
|
||||
pub fn from_proto(location: Option<proto::ParticipantLocation>) -> Result<Self> {
|
||||
match location.and_then(|l| l.variant) {
|
||||
Some(proto::participant_location::Variant::SharedProject(project)) => {
|
||||
Ok(Self::SharedProject {
|
||||
project_id: project.id,
|
||||
})
|
||||
}
|
||||
Some(proto::participant_location::Variant::UnsharedProject(_)) => {
|
||||
Ok(Self::UnsharedProject)
|
||||
}
|
||||
Some(proto::participant_location::Variant::External(_)) => Ok(Self::External),
|
||||
None => Err(anyhow!("participant location was not provided")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct LocalParticipant {
|
||||
pub projects: Vec<proto::ParticipantProject>,
|
||||
pub active_project: Option<WeakModel<Project>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct RemoteParticipant {
|
||||
pub user: Arc<User>,
|
||||
pub peer_id: proto::PeerId,
|
||||
pub projects: Vec<proto::ParticipantProject>,
|
||||
pub location: ParticipantLocation,
|
||||
pub participant_index: ParticipantIndex,
|
||||
pub muted: bool,
|
||||
pub speaking: bool,
|
||||
pub video_tracks: HashMap<live_kit_client2::Sid, Arc<RemoteVideoTrack>>,
|
||||
pub audio_tracks: HashMap<live_kit_client2::Sid, Arc<RemoteAudioTrack>>,
|
||||
}
|
||||
1605
crates/call2/src/room.rs
Normal file
1605
crates/call2/src/room.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -23,6 +23,7 @@ language = { path = "../language" }
|
||||
settings = { path = "../settings" }
|
||||
feature_flags = { path = "../feature_flags" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
clock = { path = "../clock" }
|
||||
|
||||
anyhow.workspace = true
|
||||
futures.workspace = true
|
||||
@@ -38,7 +39,7 @@ smol.workspace = true
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
tiny_http = "0.8"
|
||||
uuid = { version = "1.1.2", features = ["v4"] }
|
||||
uuid.workspace = true
|
||||
url = "2.2"
|
||||
serde.workspace = true
|
||||
serde_derive.workspace = true
|
||||
|
||||
@@ -2,19 +2,22 @@ mod channel_buffer;
|
||||
mod channel_chat;
|
||||
mod channel_store;
|
||||
|
||||
pub use channel_buffer::{ChannelBuffer, ChannelBufferEvent};
|
||||
pub use channel_chat::{ChannelChat, ChannelChatEvent, ChannelMessage, ChannelMessageId};
|
||||
pub use channel_store::{
|
||||
Channel, ChannelData, ChannelEvent, ChannelId, ChannelMembership, ChannelPath, ChannelStore,
|
||||
};
|
||||
|
||||
use client::Client;
|
||||
use client::{Client, UserStore};
|
||||
use gpui::{AppContext, ModelHandle};
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use channel_buffer::{ChannelBuffer, ChannelBufferEvent, ACKNOWLEDGE_DEBOUNCE_INTERVAL};
|
||||
pub use channel_chat::{
|
||||
mentions_to_proto, ChannelChat, ChannelChatEvent, ChannelMessage, ChannelMessageId,
|
||||
MessageParams,
|
||||
};
|
||||
pub use channel_store::{Channel, ChannelEvent, ChannelId, ChannelMembership, ChannelStore};
|
||||
|
||||
#[cfg(test)]
|
||||
mod channel_store_tests;
|
||||
|
||||
pub fn init(client: &Arc<Client>) {
|
||||
pub fn init(client: &Arc<Client>, user_store: ModelHandle<UserStore>, cx: &mut AppContext) {
|
||||
channel_store::init(client, user_store, cx);
|
||||
channel_buffer::init(client);
|
||||
channel_chat::init(client);
|
||||
}
|
||||
|
||||
@@ -1,31 +1,41 @@
|
||||
use crate::Channel;
|
||||
use crate::{Channel, ChannelId, ChannelStore};
|
||||
use anyhow::Result;
|
||||
use client::Client;
|
||||
use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle};
|
||||
use rpc::{proto, TypedEnvelope};
|
||||
use std::sync::Arc;
|
||||
use client::{Client, Collaborator, UserStore};
|
||||
use collections::HashMap;
|
||||
use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task};
|
||||
use language::proto::serialize_version;
|
||||
use rpc::{
|
||||
proto::{self, PeerId},
|
||||
TypedEnvelope,
|
||||
};
|
||||
use std::{sync::Arc, time::Duration};
|
||||
use util::ResultExt;
|
||||
|
||||
pub const ACKNOWLEDGE_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(250);
|
||||
|
||||
pub(crate) fn init(client: &Arc<Client>) {
|
||||
client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer);
|
||||
client.add_model_message_handler(ChannelBuffer::handle_add_channel_buffer_collaborator);
|
||||
client.add_model_message_handler(ChannelBuffer::handle_remove_channel_buffer_collaborator);
|
||||
client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer_collaborator);
|
||||
client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer_collaborators);
|
||||
}
|
||||
|
||||
pub struct ChannelBuffer {
|
||||
pub(crate) channel: Arc<Channel>,
|
||||
pub channel_id: ChannelId,
|
||||
connected: bool,
|
||||
collaborators: Vec<proto::Collaborator>,
|
||||
collaborators: HashMap<PeerId, Collaborator>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
channel_store: ModelHandle<ChannelStore>,
|
||||
buffer: ModelHandle<language::Buffer>,
|
||||
buffer_epoch: u64,
|
||||
client: Arc<Client>,
|
||||
subscription: Option<client::Subscription>,
|
||||
acknowledge_task: Option<Task<Result<()>>>,
|
||||
}
|
||||
|
||||
pub enum ChannelBufferEvent {
|
||||
CollaboratorsChanged,
|
||||
Disconnected,
|
||||
BufferEdited,
|
||||
ChannelChanged,
|
||||
}
|
||||
|
||||
impl Entity for ChannelBuffer {
|
||||
@@ -33,9 +43,12 @@ impl Entity for ChannelBuffer {
|
||||
|
||||
fn release(&mut self, _: &mut AppContext) {
|
||||
if self.connected {
|
||||
if let Some(task) = self.acknowledge_task.take() {
|
||||
task.detach();
|
||||
}
|
||||
self.client
|
||||
.send(proto::LeaveChannelBuffer {
|
||||
channel_id: self.channel.id,
|
||||
channel_id: self.channel_id,
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
@@ -46,6 +59,8 @@ impl ChannelBuffer {
|
||||
pub(crate) async fn new(
|
||||
channel: Arc<Channel>,
|
||||
client: Arc<Client>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
channel_store: ModelHandle<ChannelStore>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<ModelHandle<Self>> {
|
||||
let response = client
|
||||
@@ -61,8 +76,6 @@ impl ChannelBuffer {
|
||||
.map(language::proto::deserialize_operation)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let collaborators = response.collaborators;
|
||||
|
||||
let buffer = cx.add_model(|_| {
|
||||
language::Buffer::remote(response.buffer_id, response.replica_id as u16, base_text)
|
||||
});
|
||||
@@ -73,34 +86,51 @@ impl ChannelBuffer {
|
||||
anyhow::Ok(cx.add_model(|cx| {
|
||||
cx.subscribe(&buffer, Self::on_buffer_update).detach();
|
||||
|
||||
Self {
|
||||
let mut this = Self {
|
||||
buffer,
|
||||
buffer_epoch: response.epoch,
|
||||
client,
|
||||
connected: true,
|
||||
collaborators,
|
||||
channel,
|
||||
collaborators: Default::default(),
|
||||
acknowledge_task: None,
|
||||
channel_id: channel.id,
|
||||
subscription: Some(subscription.set_model(&cx.handle(), &mut cx.to_async())),
|
||||
}
|
||||
user_store,
|
||||
channel_store,
|
||||
};
|
||||
this.replace_collaborators(response.collaborators, cx);
|
||||
this
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn remote_id(&self, cx: &AppContext) -> u64 {
|
||||
self.buffer.read(cx).remote_id()
|
||||
}
|
||||
|
||||
pub fn user_store(&self) -> &ModelHandle<UserStore> {
|
||||
&self.user_store
|
||||
}
|
||||
|
||||
pub(crate) fn replace_collaborators(
|
||||
&mut self,
|
||||
collaborators: Vec<proto::Collaborator>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
for old_collaborator in &self.collaborators {
|
||||
if collaborators
|
||||
.iter()
|
||||
.any(|c| c.replica_id == old_collaborator.replica_id)
|
||||
{
|
||||
let mut new_collaborators = HashMap::default();
|
||||
for collaborator in collaborators {
|
||||
if let Ok(collaborator) = Collaborator::from_proto(collaborator) {
|
||||
new_collaborators.insert(collaborator.peer_id, collaborator);
|
||||
}
|
||||
}
|
||||
|
||||
for (_, old_collaborator) in &self.collaborators {
|
||||
if !new_collaborators.contains_key(&old_collaborator.peer_id) {
|
||||
self.buffer.update(cx, |buffer, cx| {
|
||||
buffer.remove_peer(old_collaborator.replica_id as u16, cx)
|
||||
});
|
||||
}
|
||||
}
|
||||
self.collaborators = collaborators;
|
||||
self.collaborators = new_collaborators;
|
||||
cx.emit(ChannelBufferEvent::CollaboratorsChanged);
|
||||
cx.notify();
|
||||
}
|
||||
@@ -127,64 +157,14 @@ impl ChannelBuffer {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_add_channel_buffer_collaborator(
|
||||
async fn handle_update_channel_buffer_collaborators(
|
||||
this: ModelHandle<Self>,
|
||||
envelope: TypedEnvelope<proto::AddChannelBufferCollaborator>,
|
||||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
let collaborator = envelope.payload.collaborator.ok_or_else(|| {
|
||||
anyhow::anyhow!(
|
||||
"Should have gotten a collaborator in the AddChannelBufferCollaborator message"
|
||||
)
|
||||
})?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.collaborators.push(collaborator);
|
||||
cx.emit(ChannelBufferEvent::CollaboratorsChanged);
|
||||
cx.notify();
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_remove_channel_buffer_collaborator(
|
||||
this: ModelHandle<Self>,
|
||||
message: TypedEnvelope<proto::RemoveChannelBufferCollaborator>,
|
||||
message: TypedEnvelope<proto::UpdateChannelBufferCollaborators>,
|
||||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.collaborators.retain(|collaborator| {
|
||||
if collaborator.peer_id == message.payload.peer_id {
|
||||
this.buffer.update(cx, |buffer, cx| {
|
||||
buffer.remove_peer(collaborator.replica_id as u16, cx)
|
||||
});
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
cx.emit(ChannelBufferEvent::CollaboratorsChanged);
|
||||
cx.notify();
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_update_channel_buffer_collaborator(
|
||||
this: ModelHandle<Self>,
|
||||
message: TypedEnvelope<proto::UpdateChannelBufferCollaborator>,
|
||||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
for collaborator in &mut this.collaborators {
|
||||
if collaborator.peer_id == message.payload.old_peer_id {
|
||||
collaborator.peer_id = message.payload.new_peer_id;
|
||||
break;
|
||||
}
|
||||
}
|
||||
this.replace_collaborators(message.payload.collaborators, cx);
|
||||
cx.emit(ChannelBufferEvent::CollaboratorsChanged);
|
||||
cx.notify();
|
||||
});
|
||||
@@ -196,19 +176,45 @@ impl ChannelBuffer {
|
||||
&mut self,
|
||||
_: ModelHandle<language::Buffer>,
|
||||
event: &language::Event,
|
||||
_: &mut ModelContext<Self>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
if let language::Event::Operation(operation) = event {
|
||||
let operation = language::proto::serialize_operation(operation);
|
||||
self.client
|
||||
.send(proto::UpdateChannelBuffer {
|
||||
channel_id: self.channel.id,
|
||||
operations: vec![operation],
|
||||
})
|
||||
.log_err();
|
||||
match event {
|
||||
language::Event::Operation(operation) => {
|
||||
let operation = language::proto::serialize_operation(operation);
|
||||
self.client
|
||||
.send(proto::UpdateChannelBuffer {
|
||||
channel_id: self.channel_id,
|
||||
operations: vec![operation],
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
language::Event::Edited => {
|
||||
cx.emit(ChannelBufferEvent::BufferEdited);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn acknowledge_buffer_version(&mut self, cx: &mut ModelContext<'_, ChannelBuffer>) {
|
||||
let buffer = self.buffer.read(cx);
|
||||
let version = buffer.version();
|
||||
let buffer_id = buffer.remote_id();
|
||||
let client = self.client.clone();
|
||||
let epoch = self.epoch();
|
||||
|
||||
self.acknowledge_task = Some(cx.spawn_weak(|_, cx| async move {
|
||||
cx.background().timer(ACKNOWLEDGE_DEBOUNCE_INTERVAL).await;
|
||||
client
|
||||
.send(proto::AckBufferOperation {
|
||||
buffer_id,
|
||||
epoch,
|
||||
version: serialize_version(&version),
|
||||
})
|
||||
.ok();
|
||||
Ok(())
|
||||
}));
|
||||
}
|
||||
|
||||
pub fn epoch(&self) -> u64 {
|
||||
self.buffer_epoch
|
||||
}
|
||||
@@ -217,16 +223,19 @@ impl ChannelBuffer {
|
||||
self.buffer.clone()
|
||||
}
|
||||
|
||||
pub fn collaborators(&self) -> &[proto::Collaborator] {
|
||||
pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
|
||||
&self.collaborators
|
||||
}
|
||||
|
||||
pub fn channel(&self) -> Arc<Channel> {
|
||||
self.channel.clone()
|
||||
pub fn channel(&self, cx: &AppContext) -> Option<Arc<Channel>> {
|
||||
self.channel_store
|
||||
.read(cx)
|
||||
.channel_for_id(self.channel_id)
|
||||
.cloned()
|
||||
}
|
||||
|
||||
pub(crate) fn disconnect(&mut self, cx: &mut ModelContext<Self>) {
|
||||
log::info!("channel buffer {} disconnected", self.channel.id);
|
||||
log::info!("channel buffer {} disconnected", self.channel_id);
|
||||
if self.connected {
|
||||
self.connected = false;
|
||||
self.subscription.take();
|
||||
@@ -235,6 +244,11 @@ impl ChannelBuffer {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn channel_changed(&mut self, cx: &mut ModelContext<Self>) {
|
||||
cx.emit(ChannelBufferEvent::ChannelChanged);
|
||||
cx.notify()
|
||||
}
|
||||
|
||||
pub fn is_connected(&self) -> bool {
|
||||
self.connected
|
||||
}
|
||||
|
||||
@@ -1,22 +1,30 @@
|
||||
use crate::Channel;
|
||||
use crate::{Channel, ChannelId, ChannelStore};
|
||||
use anyhow::{anyhow, Result};
|
||||
use client::{
|
||||
proto,
|
||||
user::{User, UserStore},
|
||||
Client, Subscription, TypedEnvelope,
|
||||
Client, Subscription, TypedEnvelope, UserId,
|
||||
};
|
||||
use futures::lock::Mutex;
|
||||
use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task};
|
||||
use rand::prelude::*;
|
||||
use std::{collections::HashSet, mem, ops::Range, sync::Arc};
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
mem,
|
||||
ops::{ControlFlow, Range},
|
||||
sync::Arc,
|
||||
};
|
||||
use sum_tree::{Bias, SumTree};
|
||||
use time::OffsetDateTime;
|
||||
use util::{post_inc, ResultExt as _, TryFutureExt};
|
||||
|
||||
pub struct ChannelChat {
|
||||
channel: Arc<Channel>,
|
||||
pub channel_id: ChannelId,
|
||||
messages: SumTree<ChannelMessage>,
|
||||
acknowledged_message_ids: HashSet<u64>,
|
||||
channel_store: ModelHandle<ChannelStore>,
|
||||
loaded_all_messages: bool,
|
||||
last_acknowledged_id: Option<u64>,
|
||||
next_pending_message_id: usize,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
rpc: Arc<Client>,
|
||||
@@ -25,6 +33,12 @@ pub struct ChannelChat {
|
||||
_subscription: Subscription,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct MessageParams {
|
||||
pub text: String,
|
||||
pub mentions: Vec<(Range<usize>, UserId)>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ChannelMessage {
|
||||
pub id: ChannelMessageId,
|
||||
@@ -32,9 +46,10 @@ pub struct ChannelMessage {
|
||||
pub timestamp: OffsetDateTime,
|
||||
pub sender: Arc<User>,
|
||||
pub nonce: u128,
|
||||
pub mentions: Vec<(Range<usize>, UserId)>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum ChannelMessageId {
|
||||
Saved(u64),
|
||||
Pending(usize),
|
||||
@@ -55,6 +70,10 @@ pub enum ChannelChatEvent {
|
||||
old_range: Range<usize>,
|
||||
new_count: usize,
|
||||
},
|
||||
NewMessage {
|
||||
channel_id: ChannelId,
|
||||
message_id: u64,
|
||||
},
|
||||
}
|
||||
|
||||
pub fn init(client: &Arc<Client>) {
|
||||
@@ -68,7 +87,7 @@ impl Entity for ChannelChat {
|
||||
fn release(&mut self, _: &mut AppContext) {
|
||||
self.rpc
|
||||
.send(proto::LeaveChannelChat {
|
||||
channel_id: self.channel.id,
|
||||
channel_id: self.channel_id,
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
@@ -77,6 +96,7 @@ impl Entity for ChannelChat {
|
||||
impl ChannelChat {
|
||||
pub async fn new(
|
||||
channel: Arc<Channel>,
|
||||
channel_store: ModelHandle<ChannelStore>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
client: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
@@ -92,13 +112,16 @@ impl ChannelChat {
|
||||
|
||||
Ok(cx.add_model(|cx| {
|
||||
let mut this = Self {
|
||||
channel,
|
||||
channel_id: channel.id,
|
||||
user_store,
|
||||
channel_store,
|
||||
rpc: client,
|
||||
outgoing_messages_lock: Default::default(),
|
||||
messages: Default::default(),
|
||||
acknowledged_message_ids: Default::default(),
|
||||
loaded_all_messages,
|
||||
next_pending_message_id: 0,
|
||||
last_acknowledged_id: None,
|
||||
rng: StdRng::from_entropy(),
|
||||
_subscription: subscription.set_model(&cx.handle(), &mut cx.to_async()),
|
||||
};
|
||||
@@ -107,16 +130,23 @@ impl ChannelChat {
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn channel(&self) -> &Arc<Channel> {
|
||||
&self.channel
|
||||
pub fn channel(&self, cx: &AppContext) -> Option<Arc<Channel>> {
|
||||
self.channel_store
|
||||
.read(cx)
|
||||
.channel_for_id(self.channel_id)
|
||||
.cloned()
|
||||
}
|
||||
|
||||
pub fn client(&self) -> &Arc<Client> {
|
||||
&self.rpc
|
||||
}
|
||||
|
||||
pub fn send_message(
|
||||
&mut self,
|
||||
body: String,
|
||||
message: MessageParams,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Result<Task<Result<()>>> {
|
||||
if body.is_empty() {
|
||||
) -> Result<Task<Result<u64>>> {
|
||||
if message.text.is_empty() {
|
||||
Err(anyhow!("message body can't be empty"))?;
|
||||
}
|
||||
|
||||
@@ -126,16 +156,17 @@ impl ChannelChat {
|
||||
.current_user()
|
||||
.ok_or_else(|| anyhow!("current_user is not present"))?;
|
||||
|
||||
let channel_id = self.channel.id;
|
||||
let channel_id = self.channel_id;
|
||||
let pending_id = ChannelMessageId::Pending(post_inc(&mut self.next_pending_message_id));
|
||||
let nonce = self.rng.gen();
|
||||
self.insert_messages(
|
||||
SumTree::from_item(
|
||||
ChannelMessage {
|
||||
id: pending_id,
|
||||
body: body.clone(),
|
||||
body: message.text.clone(),
|
||||
sender: current_user,
|
||||
timestamp: OffsetDateTime::now_utc(),
|
||||
mentions: message.mentions.clone(),
|
||||
nonce,
|
||||
},
|
||||
&(),
|
||||
@@ -149,27 +180,25 @@ impl ChannelChat {
|
||||
let outgoing_message_guard = outgoing_messages_lock.lock().await;
|
||||
let request = rpc.request(proto::SendChannelMessage {
|
||||
channel_id,
|
||||
body,
|
||||
body: message.text,
|
||||
nonce: Some(nonce.into()),
|
||||
mentions: mentions_to_proto(&message.mentions),
|
||||
});
|
||||
let response = request.await?;
|
||||
drop(outgoing_message_guard);
|
||||
let message = ChannelMessage::from_proto(
|
||||
response.message.ok_or_else(|| anyhow!("invalid message"))?,
|
||||
&user_store,
|
||||
&mut cx,
|
||||
)
|
||||
.await?;
|
||||
let response = response.message.ok_or_else(|| anyhow!("invalid message"))?;
|
||||
let id = response.id;
|
||||
let message = ChannelMessage::from_proto(response, &user_store, &mut cx).await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.insert_messages(SumTree::from_item(message, &()), cx);
|
||||
Ok(())
|
||||
Ok(id)
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn remove_message(&mut self, id: u64, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
let response = self.rpc.request(proto::RemoveChannelMessage {
|
||||
channel_id: self.channel.id,
|
||||
channel_id: self.channel_id,
|
||||
message_id: id,
|
||||
});
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
@@ -182,47 +211,102 @@ impl ChannelChat {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn load_more_messages(&mut self, cx: &mut ModelContext<Self>) -> bool {
|
||||
if !self.loaded_all_messages {
|
||||
let rpc = self.rpc.clone();
|
||||
let user_store = self.user_store.clone();
|
||||
let channel_id = self.channel.id;
|
||||
if let Some(before_message_id) =
|
||||
self.messages.first().and_then(|message| match message.id {
|
||||
ChannelMessageId::Saved(id) => Some(id),
|
||||
ChannelMessageId::Pending(_) => None,
|
||||
})
|
||||
{
|
||||
cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
let response = rpc
|
||||
.request(proto::GetChannelMessages {
|
||||
channel_id,
|
||||
before_message_id,
|
||||
})
|
||||
.await?;
|
||||
let loaded_all_messages = response.done;
|
||||
let messages =
|
||||
messages_from_proto(response.messages, &user_store, &mut cx).await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.loaded_all_messages = loaded_all_messages;
|
||||
this.insert_messages(messages, cx);
|
||||
});
|
||||
anyhow::Ok(())
|
||||
pub fn load_more_messages(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<Option<()>>> {
|
||||
if self.loaded_all_messages {
|
||||
return None;
|
||||
}
|
||||
|
||||
let rpc = self.rpc.clone();
|
||||
let user_store = self.user_store.clone();
|
||||
let channel_id = self.channel_id;
|
||||
let before_message_id = self.first_loaded_message_id()?;
|
||||
Some(cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
let response = rpc
|
||||
.request(proto::GetChannelMessages {
|
||||
channel_id,
|
||||
before_message_id,
|
||||
})
|
||||
.await?;
|
||||
let loaded_all_messages = response.done;
|
||||
let messages = messages_from_proto(response.messages, &user_store, &mut cx).await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.loaded_all_messages = loaded_all_messages;
|
||||
this.insert_messages(messages, cx);
|
||||
});
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.log_err()
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn first_loaded_message_id(&mut self) -> Option<u64> {
|
||||
self.messages.first().and_then(|message| match message.id {
|
||||
ChannelMessageId::Saved(id) => Some(id),
|
||||
ChannelMessageId::Pending(_) => None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Load all of the chat messages since a certain message id.
|
||||
///
|
||||
/// For now, we always maintain a suffix of the channel's messages.
|
||||
pub async fn load_history_since_message(
|
||||
chat: ModelHandle<Self>,
|
||||
message_id: u64,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Option<usize> {
|
||||
loop {
|
||||
let step = chat.update(&mut cx, |chat, cx| {
|
||||
if let Some(first_id) = chat.first_loaded_message_id() {
|
||||
if first_id <= message_id {
|
||||
let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>();
|
||||
let message_id = ChannelMessageId::Saved(message_id);
|
||||
cursor.seek(&message_id, Bias::Left, &());
|
||||
return ControlFlow::Break(
|
||||
if cursor
|
||||
.item()
|
||||
.map_or(false, |message| message.id == message_id)
|
||||
{
|
||||
Some(cursor.start().1 .0)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
);
|
||||
}
|
||||
.log_err()
|
||||
})
|
||||
.detach();
|
||||
return true;
|
||||
}
|
||||
ControlFlow::Continue(chat.load_more_messages(cx))
|
||||
});
|
||||
match step {
|
||||
ControlFlow::Break(ix) => return ix,
|
||||
ControlFlow::Continue(task) => task?.await?,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn acknowledge_last_message(&mut self, cx: &mut ModelContext<Self>) {
|
||||
if let ChannelMessageId::Saved(latest_message_id) = self.messages.summary().max_id {
|
||||
if self
|
||||
.last_acknowledged_id
|
||||
.map_or(true, |acknowledged_id| acknowledged_id < latest_message_id)
|
||||
{
|
||||
self.rpc
|
||||
.send(proto::AckChannelMessage {
|
||||
channel_id: self.channel_id,
|
||||
message_id: latest_message_id,
|
||||
})
|
||||
.ok();
|
||||
self.last_acknowledged_id = Some(latest_message_id);
|
||||
self.channel_store.update(cx, |store, cx| {
|
||||
store.acknowledge_message_id(self.channel_id, latest_message_id, cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
pub fn rejoin(&mut self, cx: &mut ModelContext<Self>) {
|
||||
let user_store = self.user_store.clone();
|
||||
let rpc = self.rpc.clone();
|
||||
let channel_id = self.channel.id;
|
||||
let channel_id = self.channel_id;
|
||||
cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
let response = rpc.request(proto::JoinChannelChat { channel_id }).await?;
|
||||
@@ -255,6 +339,7 @@ impl ChannelChat {
|
||||
let request = rpc.request(proto::SendChannelMessage {
|
||||
channel_id,
|
||||
body: pending_message.body,
|
||||
mentions: mentions_to_proto(&pending_message.mentions),
|
||||
nonce: Some(pending_message.nonce.into()),
|
||||
});
|
||||
let response = request.await?;
|
||||
@@ -290,6 +375,17 @@ impl ChannelChat {
|
||||
cursor.item().unwrap()
|
||||
}
|
||||
|
||||
pub fn acknowledge_message(&mut self, id: u64) {
|
||||
if self.acknowledged_message_ids.insert(id) {
|
||||
self.rpc
|
||||
.send(proto::AckChannelMessage {
|
||||
channel_id: self.channel_id,
|
||||
message_id: id,
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn messages_in_range(&self, range: Range<usize>) -> impl Iterator<Item = &ChannelMessage> {
|
||||
let mut cursor = self.messages.cursor::<Count>();
|
||||
cursor.seek(&Count(range.start), Bias::Right, &());
|
||||
@@ -313,10 +409,15 @@ impl ChannelChat {
|
||||
.payload
|
||||
.message
|
||||
.ok_or_else(|| anyhow!("empty message"))?;
|
||||
let message_id = message.id;
|
||||
|
||||
let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.insert_messages(SumTree::from_item(message, &()), cx)
|
||||
this.insert_messages(SumTree::from_item(message, &()), cx);
|
||||
cx.emit(ChannelChatEvent::NewMessage {
|
||||
channel_id: this.channel_id,
|
||||
message_id,
|
||||
})
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@@ -388,6 +489,7 @@ impl ChannelChat {
|
||||
old_range: start_ix..end_ix,
|
||||
new_count,
|
||||
});
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
@@ -416,22 +518,7 @@ async fn messages_from_proto(
|
||||
user_store: &ModelHandle<UserStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<SumTree<ChannelMessage>> {
|
||||
let unique_user_ids = proto_messages
|
||||
.iter()
|
||||
.map(|m| m.sender_id)
|
||||
.collect::<HashSet<_>>()
|
||||
.into_iter()
|
||||
.collect();
|
||||
user_store
|
||||
.update(cx, |user_store, cx| {
|
||||
user_store.get_users(unique_user_ids, cx)
|
||||
})
|
||||
.await?;
|
||||
|
||||
let mut messages = Vec::with_capacity(proto_messages.len());
|
||||
for message in proto_messages {
|
||||
messages.push(ChannelMessage::from_proto(message, user_store, cx).await?);
|
||||
}
|
||||
let messages = ChannelMessage::from_proto_vec(proto_messages, user_store, cx).await?;
|
||||
let mut result = SumTree::new();
|
||||
result.extend(messages, &());
|
||||
Ok(result)
|
||||
@@ -451,6 +538,14 @@ impl ChannelMessage {
|
||||
Ok(ChannelMessage {
|
||||
id: ChannelMessageId::Saved(message.id),
|
||||
body: message.body,
|
||||
mentions: message
|
||||
.mentions
|
||||
.into_iter()
|
||||
.filter_map(|mention| {
|
||||
let range = mention.range?;
|
||||
Some((range.start as usize..range.end as usize, mention.user_id))
|
||||
})
|
||||
.collect(),
|
||||
timestamp: OffsetDateTime::from_unix_timestamp(message.timestamp as i64)?,
|
||||
sender,
|
||||
nonce: message
|
||||
@@ -463,6 +558,43 @@ impl ChannelMessage {
|
||||
pub fn is_pending(&self) -> bool {
|
||||
matches!(self.id, ChannelMessageId::Pending(_))
|
||||
}
|
||||
|
||||
pub async fn from_proto_vec(
|
||||
proto_messages: Vec<proto::ChannelMessage>,
|
||||
user_store: &ModelHandle<UserStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<Vec<Self>> {
|
||||
let unique_user_ids = proto_messages
|
||||
.iter()
|
||||
.map(|m| m.sender_id)
|
||||
.collect::<HashSet<_>>()
|
||||
.into_iter()
|
||||
.collect();
|
||||
user_store
|
||||
.update(cx, |user_store, cx| {
|
||||
user_store.get_users(unique_user_ids, cx)
|
||||
})
|
||||
.await?;
|
||||
|
||||
let mut messages = Vec::with_capacity(proto_messages.len());
|
||||
for message in proto_messages {
|
||||
messages.push(ChannelMessage::from_proto(message, user_store, cx).await?);
|
||||
}
|
||||
Ok(messages)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mentions_to_proto(mentions: &[(Range<usize>, UserId)]) -> Vec<proto::ChatMention> {
|
||||
mentions
|
||||
.iter()
|
||||
.map(|(range, user_id)| proto::ChatMention {
|
||||
range: Some(proto::Range {
|
||||
start: range.start as u64,
|
||||
end: range.end as u64,
|
||||
}),
|
||||
user_id: *user_id as u64,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
impl sum_tree::Item for ChannelMessage {
|
||||
@@ -503,3 +635,12 @@ impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for Count {
|
||||
self.0 += summary.count;
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for MessageParams {
|
||||
fn from(value: &'a str) -> Self {
|
||||
Self {
|
||||
text: value.into(),
|
||||
mentions: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,30 +1,34 @@
|
||||
mod channel_index;
|
||||
|
||||
use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat};
|
||||
use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat, ChannelMessage};
|
||||
use anyhow::{anyhow, Result};
|
||||
use channel_index::ChannelIndex;
|
||||
use client::{Client, Subscription, User, UserId, UserStore};
|
||||
use collections::{hash_map, HashMap, HashSet};
|
||||
use db::RELEASE_CHANNEL;
|
||||
use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
|
||||
use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
|
||||
use rpc::{
|
||||
proto::{self, ChannelEdge, ChannelPermission},
|
||||
proto::{self, ChannelVisibility},
|
||||
TypedEnvelope,
|
||||
};
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use std::{borrow::Cow, hash::Hash, mem, ops::Deref, sync::Arc, time::Duration};
|
||||
use std::{mem, sync::Arc, time::Duration};
|
||||
use util::ResultExt;
|
||||
|
||||
use self::channel_index::ChannelIndex;
|
||||
pub fn init(client: &Arc<Client>, user_store: ModelHandle<UserStore>, cx: &mut AppContext) {
|
||||
let channel_store =
|
||||
cx.add_model(|cx| ChannelStore::new(client.clone(), user_store.clone(), cx));
|
||||
cx.set_global(channel_store);
|
||||
}
|
||||
|
||||
pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
|
||||
|
||||
pub type ChannelId = u64;
|
||||
|
||||
pub struct ChannelStore {
|
||||
channel_index: ChannelIndex,
|
||||
pub channel_index: ChannelIndex,
|
||||
channel_invitations: Vec<Arc<Channel>>,
|
||||
channel_participants: HashMap<ChannelId, Vec<Arc<User>>>,
|
||||
channels_with_admin_privileges: HashSet<ChannelId>,
|
||||
outgoing_invites: HashSet<(ChannelId, UserId)>,
|
||||
update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>,
|
||||
opened_buffers: HashMap<ChannelId, OpenedModelHandle<ChannelBuffer>>,
|
||||
@@ -37,21 +41,70 @@ pub struct ChannelStore {
|
||||
_update_channels: Task<()>,
|
||||
}
|
||||
|
||||
pub type ChannelData = (Channel, ChannelPath);
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Channel {
|
||||
pub id: ChannelId,
|
||||
pub name: String,
|
||||
pub visibility: proto::ChannelVisibility,
|
||||
pub role: proto::ChannelRole,
|
||||
pub unseen_note_version: Option<(u64, clock::Global)>,
|
||||
pub unseen_message_id: Option<u64>,
|
||||
pub parent_path: Vec<u64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Serialize, Deserialize)]
|
||||
pub struct ChannelPath(Arc<[ChannelId]>);
|
||||
impl Channel {
|
||||
pub fn link(&self) -> String {
|
||||
RELEASE_CHANNEL.link_prefix().to_owned()
|
||||
+ "channel/"
|
||||
+ &self.slug()
|
||||
+ "-"
|
||||
+ &self.id.to_string()
|
||||
}
|
||||
|
||||
pub fn slug(&self) -> String {
|
||||
let slug: String = self
|
||||
.name
|
||||
.chars()
|
||||
.map(|c| if c.is_alphanumeric() { c } else { '-' })
|
||||
.collect();
|
||||
|
||||
slug.trim_matches(|c| c == '-').to_string()
|
||||
}
|
||||
|
||||
pub fn can_edit_notes(&self) -> bool {
|
||||
self.role == proto::ChannelRole::Member || self.role == proto::ChannelRole::Admin
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ChannelMembership {
|
||||
pub user: Arc<User>,
|
||||
pub kind: proto::channel_member::Kind,
|
||||
pub admin: bool,
|
||||
pub role: proto::ChannelRole,
|
||||
}
|
||||
impl ChannelMembership {
|
||||
pub fn sort_key(&self) -> MembershipSortKey {
|
||||
MembershipSortKey {
|
||||
role_order: match self.role {
|
||||
proto::ChannelRole::Admin => 0,
|
||||
proto::ChannelRole::Member => 1,
|
||||
proto::ChannelRole::Banned => 2,
|
||||
proto::ChannelRole::Guest => 3,
|
||||
},
|
||||
kind_order: match self.kind {
|
||||
proto::channel_member::Kind::Member => 0,
|
||||
proto::channel_member::Kind::AncestorMember => 1,
|
||||
proto::channel_member::Kind::Invitee => 2,
|
||||
},
|
||||
username_order: self.user.github_login.as_str(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialOrd, Ord, PartialEq, Eq)]
|
||||
pub struct MembershipSortKey<'a> {
|
||||
role_order: u8,
|
||||
kind_order: u8,
|
||||
username_order: &'a str,
|
||||
}
|
||||
|
||||
pub enum ChannelEvent {
|
||||
@@ -69,6 +122,10 @@ enum OpenedModelHandle<E: Entity> {
|
||||
}
|
||||
|
||||
impl ChannelStore {
|
||||
pub fn global(cx: &AppContext) -> ModelHandle<Self> {
|
||||
cx.global::<ModelHandle<Self>>().clone()
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
client: Arc<Client>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
@@ -82,12 +139,18 @@ impl ChannelStore {
|
||||
let watch_connection_status = cx.spawn_weak(|this, mut cx| async move {
|
||||
while let Some(status) = connection_status.next().await {
|
||||
let this = this.upgrade(&cx)?;
|
||||
if status.is_connected() {
|
||||
this.update(&mut cx, |this, cx| this.handle_connect(cx))
|
||||
.await
|
||||
.log_err()?;
|
||||
} else {
|
||||
this.update(&mut cx, |this, cx| this.handle_disconnect(cx));
|
||||
match status {
|
||||
client::Status::Connected { .. } => {
|
||||
this.update(&mut cx, |this, cx| this.handle_connect(cx))
|
||||
.await
|
||||
.log_err()?;
|
||||
}
|
||||
client::Status::SignedOut | client::Status::UpgradeRequired => {
|
||||
this.update(&mut cx, |this, cx| this.handle_disconnect(false, cx));
|
||||
}
|
||||
_ => {
|
||||
this.update(&mut cx, |this, cx| this.handle_disconnect(true, cx));
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(())
|
||||
@@ -97,7 +160,6 @@ impl ChannelStore {
|
||||
channel_invitations: Vec::default(),
|
||||
channel_index: ChannelIndex::default(),
|
||||
channel_participants: Default::default(),
|
||||
channels_with_admin_privileges: Default::default(),
|
||||
outgoing_invites: Default::default(),
|
||||
opened_buffers: Default::default(),
|
||||
opened_chats: Default::default(),
|
||||
@@ -126,16 +188,6 @@ impl ChannelStore {
|
||||
self.client.clone()
|
||||
}
|
||||
|
||||
pub fn has_children(&self, channel_id: ChannelId) -> bool {
|
||||
self.channel_index.iter().any(|path| {
|
||||
if let Some(ix) = path.iter().position(|id| *id == channel_id) {
|
||||
path.len() > ix + 1
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the number of unique channels in the store
|
||||
pub fn channel_count(&self) -> usize {
|
||||
self.channel_index.by_id().len()
|
||||
@@ -155,26 +207,31 @@ impl ChannelStore {
|
||||
}
|
||||
|
||||
/// Iterate over all entries in the channel DAG
|
||||
pub fn channel_dag_entries(&self) -> impl '_ + Iterator<Item = (usize, &Arc<Channel>)> {
|
||||
self.channel_index.iter().map(move |path| {
|
||||
let id = path.last().unwrap();
|
||||
let channel = self.channel_for_id(*id).unwrap();
|
||||
(path.len() - 1, channel)
|
||||
})
|
||||
pub fn ordered_channels(&self) -> impl '_ + Iterator<Item = (usize, &Arc<Channel>)> {
|
||||
self.channel_index
|
||||
.ordered_channels()
|
||||
.iter()
|
||||
.filter_map(move |id| {
|
||||
let channel = self.channel_index.by_id().get(id)?;
|
||||
Some((channel.parent_path.len(), channel))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn channel_dag_entry_at(&self, ix: usize) -> Option<(&Arc<Channel>, &ChannelPath)> {
|
||||
let path = self.channel_index.get(ix)?;
|
||||
let id = path.last().unwrap();
|
||||
let channel = self.channel_for_id(*id).unwrap();
|
||||
|
||||
Some((channel, path))
|
||||
pub fn channel_at_index(&self, ix: usize) -> Option<&Arc<Channel>> {
|
||||
let channel_id = self.channel_index.ordered_channels().get(ix)?;
|
||||
self.channel_index.by_id().get(channel_id)
|
||||
}
|
||||
|
||||
pub fn channel_at(&self, ix: usize) -> Option<&Arc<Channel>> {
|
||||
self.channel_index.by_id().values().nth(ix)
|
||||
}
|
||||
|
||||
pub fn has_channel_invitation(&self, channel_id: ChannelId) -> bool {
|
||||
self.channel_invitations
|
||||
.iter()
|
||||
.any(|channel| channel.id == channel_id)
|
||||
}
|
||||
|
||||
pub fn channel_invitations(&self) -> &[Arc<Channel>] {
|
||||
&self.channel_invitations
|
||||
}
|
||||
@@ -198,14 +255,101 @@ impl ChannelStore {
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<ModelHandle<ChannelBuffer>>> {
|
||||
let client = self.client.clone();
|
||||
let user_store = self.user_store.clone();
|
||||
let channel_store = cx.handle();
|
||||
self.open_channel_resource(
|
||||
channel_id,
|
||||
|this| &mut this.opened_buffers,
|
||||
|channel, cx| ChannelBuffer::new(channel, client, cx),
|
||||
|channel, cx| ChannelBuffer::new(channel, client, user_store, channel_store, cx),
|
||||
cx,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn fetch_channel_messages(
|
||||
&self,
|
||||
message_ids: Vec<u64>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<Vec<ChannelMessage>>> {
|
||||
let request = if message_ids.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
self.client
|
||||
.request(proto::GetChannelMessagesById { message_ids }),
|
||||
)
|
||||
};
|
||||
cx.spawn_weak(|this, mut cx| async move {
|
||||
if let Some(request) = request {
|
||||
let response = request.await?;
|
||||
let this = this
|
||||
.upgrade(&cx)
|
||||
.ok_or_else(|| anyhow!("channel store dropped"))?;
|
||||
let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
|
||||
ChannelMessage::from_proto_vec(response.messages, &user_store, &mut cx).await
|
||||
} else {
|
||||
Ok(Vec::new())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn has_channel_buffer_changed(&self, channel_id: ChannelId) -> Option<bool> {
|
||||
self.channel_index
|
||||
.by_id()
|
||||
.get(&channel_id)
|
||||
.map(|channel| channel.unseen_note_version.is_some())
|
||||
}
|
||||
|
||||
pub fn has_new_messages(&self, channel_id: ChannelId) -> Option<bool> {
|
||||
self.channel_index
|
||||
.by_id()
|
||||
.get(&channel_id)
|
||||
.map(|channel| channel.unseen_message_id.is_some())
|
||||
}
|
||||
|
||||
pub fn notes_changed(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
epoch: u64,
|
||||
version: &clock::Global,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
self.channel_index.note_changed(channel_id, epoch, version);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn new_message(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
message_id: u64,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
self.channel_index.new_message(channel_id, message_id);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn acknowledge_message_id(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
message_id: u64,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
self.channel_index
|
||||
.acknowledge_message_id(channel_id, message_id);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn acknowledge_notes_version(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
epoch: u64,
|
||||
version: &clock::Global,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
self.channel_index
|
||||
.acknowledge_note_version(channel_id, epoch, version);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn open_channel_chat(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
@@ -213,10 +357,11 @@ impl ChannelStore {
|
||||
) -> Task<Result<ModelHandle<ChannelChat>>> {
|
||||
let client = self.client.clone();
|
||||
let user_store = self.user_store.clone();
|
||||
let this = cx.handle();
|
||||
self.open_channel_resource(
|
||||
channel_id,
|
||||
|this| &mut this.opened_chats,
|
||||
|channel, cx| ChannelChat::new(channel, user_store, client, cx),
|
||||
|channel, cx| ChannelChat::new(channel, this, user_store, client, cx),
|
||||
cx,
|
||||
)
|
||||
}
|
||||
@@ -292,16 +437,11 @@ impl ChannelStore {
|
||||
.spawn(async move { task.await.map_err(|error| anyhow!("{}", error)) })
|
||||
}
|
||||
|
||||
pub fn is_user_admin(&self, channel_id: ChannelId) -> bool {
|
||||
self.channel_index.iter().any(|path| {
|
||||
if let Some(ix) = path.iter().position(|id| *id == channel_id) {
|
||||
path[..=ix]
|
||||
.iter()
|
||||
.any(|id| self.channels_with_admin_privileges.contains(id))
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
pub fn is_channel_admin(&self, channel_id: ChannelId) -> bool {
|
||||
let Some(channel) = self.channel_for_id(channel_id) else {
|
||||
return false;
|
||||
};
|
||||
channel.role == proto::ChannelRole::Admin
|
||||
}
|
||||
|
||||
pub fn channel_participants(&self, channel_id: ChannelId) -> &[Arc<User>] {
|
||||
@@ -328,24 +468,19 @@ impl ChannelStore {
|
||||
.ok_or_else(|| anyhow!("missing channel in response"))?;
|
||||
let channel_id = channel.id;
|
||||
|
||||
let parent_edge = if let Some(parent_id) = parent_id {
|
||||
vec![ChannelEdge {
|
||||
channel_id: channel.id,
|
||||
parent_id,
|
||||
}]
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
// let parent_edge = if let Some(parent_id) = parent_id {
|
||||
// vec![ChannelEdge {
|
||||
// channel_id: channel.id,
|
||||
// parent_id,
|
||||
// }]
|
||||
// } else {
|
||||
// vec![]
|
||||
// };
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let task = this.update_channels(
|
||||
proto::UpdateChannels {
|
||||
channels: vec![channel],
|
||||
insert_edge: parent_edge,
|
||||
channel_permissions: vec![ChannelPermission {
|
||||
channel_id,
|
||||
is_admin: true,
|
||||
}],
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
@@ -363,52 +498,34 @@ impl ChannelStore {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn link_channel(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
to: ChannelId,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let client = self.client.clone();
|
||||
cx.spawn(|_, _| async move {
|
||||
let _ = client
|
||||
.request(proto::LinkChannel { channel_id, to })
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn unlink_channel(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
from: ChannelId,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let client = self.client.clone();
|
||||
cx.spawn(|_, _| async move {
|
||||
let _ = client
|
||||
.request(proto::UnlinkChannel { channel_id, from })
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn move_channel(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
from: ChannelId,
|
||||
to: ChannelId,
|
||||
to: Option<ChannelId>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let client = self.client.clone();
|
||||
cx.spawn(|_, _| async move {
|
||||
let _ = client
|
||||
.request(proto::MoveChannel {
|
||||
.request(proto::MoveChannel { channel_id, to })
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set_channel_visibility(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
visibility: ChannelVisibility,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let client = self.client.clone();
|
||||
cx.spawn(|_, _| async move {
|
||||
let _ = client
|
||||
.request(proto::SetChannelVisibility {
|
||||
channel_id,
|
||||
from,
|
||||
to,
|
||||
visibility: visibility.into(),
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -420,7 +537,7 @@ impl ChannelStore {
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
user_id: UserId,
|
||||
admin: bool,
|
||||
role: proto::ChannelRole,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
if !self.outgoing_invites.insert((channel_id, user_id)) {
|
||||
@@ -434,7 +551,7 @@ impl ChannelStore {
|
||||
.request(proto::InviteChannelMember {
|
||||
channel_id,
|
||||
user_id,
|
||||
admin,
|
||||
role: role.into(),
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -478,11 +595,11 @@ impl ChannelStore {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set_member_admin(
|
||||
pub fn set_member_role(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
user_id: UserId,
|
||||
admin: bool,
|
||||
role: proto::ChannelRole,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
if !self.outgoing_invites.insert((channel_id, user_id)) {
|
||||
@@ -493,10 +610,10 @@ impl ChannelStore {
|
||||
let client = self.client.clone();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let result = client
|
||||
.request(proto::SetChannelMemberAdmin {
|
||||
.request(proto::SetChannelMemberRole {
|
||||
channel_id,
|
||||
user_id,
|
||||
admin,
|
||||
role: role.into(),
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -548,14 +665,15 @@ impl ChannelStore {
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
accept: bool,
|
||||
) -> impl Future<Output = Result<()>> {
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let client = self.client.clone();
|
||||
async move {
|
||||
cx.background().spawn(async move {
|
||||
client
|
||||
.request(proto::RespondToChannelInvite { channel_id, accept })
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_channel_member_details(
|
||||
@@ -584,8 +702,8 @@ impl ChannelStore {
|
||||
.filter_map(|(user, member)| {
|
||||
Some(ChannelMembership {
|
||||
user,
|
||||
admin: member.admin,
|
||||
kind: proto::channel_member::Kind::from_i32(member.kind)?,
|
||||
role: member.role(),
|
||||
kind: member.kind(),
|
||||
})
|
||||
})
|
||||
.collect())
|
||||
@@ -623,6 +741,11 @@ impl ChannelStore {
|
||||
}
|
||||
|
||||
fn handle_connect(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
self.channel_index.clear();
|
||||
self.channel_invitations.clear();
|
||||
self.channel_participants.clear();
|
||||
self.channel_index.clear();
|
||||
self.outgoing_invites.clear();
|
||||
self.disconnect_channel_buffers_task.take();
|
||||
|
||||
for chat in self.opened_chats.values() {
|
||||
@@ -642,7 +765,7 @@ impl ChannelStore {
|
||||
let channel_buffer = buffer.read(cx);
|
||||
let buffer = channel_buffer.buffer().read(cx);
|
||||
buffer_versions.push(proto::ChannelBufferVersion {
|
||||
channel_id: channel_buffer.channel().id,
|
||||
channel_id: channel_buffer.channel_id,
|
||||
epoch: channel_buffer.epoch(),
|
||||
version: language::proto::serialize_version(&buffer.version()),
|
||||
});
|
||||
@@ -669,13 +792,13 @@ impl ChannelStore {
|
||||
};
|
||||
|
||||
channel_buffer.update(cx, |channel_buffer, cx| {
|
||||
let channel_id = channel_buffer.channel().id;
|
||||
let channel_id = channel_buffer.channel_id;
|
||||
if let Some(remote_buffer) = response
|
||||
.buffers
|
||||
.iter_mut()
|
||||
.find(|buffer| buffer.channel_id == channel_id)
|
||||
{
|
||||
let channel_id = channel_buffer.channel().id;
|
||||
let channel_id = channel_buffer.channel_id;
|
||||
let remote_version =
|
||||
language::proto::deserialize_version(&remote_buffer.version);
|
||||
|
||||
@@ -731,18 +854,15 @@ impl ChannelStore {
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_disconnect(&mut self, cx: &mut ModelContext<Self>) {
|
||||
self.channel_index.clear();
|
||||
self.channel_invitations.clear();
|
||||
self.channel_participants.clear();
|
||||
self.channels_with_admin_privileges.clear();
|
||||
self.channel_index.clear();
|
||||
self.outgoing_invites.clear();
|
||||
fn handle_disconnect(&mut self, wait_for_reconnect: bool, cx: &mut ModelContext<Self>) {
|
||||
cx.notify();
|
||||
|
||||
self.disconnect_channel_buffers_task.get_or_insert_with(|| {
|
||||
cx.spawn_weak(|this, mut cx| async move {
|
||||
cx.background().timer(RECONNECT_TIMEOUT).await;
|
||||
if wait_for_reconnect {
|
||||
cx.background().timer(RECONNECT_TIMEOUT).await;
|
||||
}
|
||||
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
for (_, buffer) in this.opened_buffers.drain() {
|
||||
@@ -777,7 +897,12 @@ impl ChannelStore {
|
||||
ix,
|
||||
Arc::new(Channel {
|
||||
id: channel.id,
|
||||
visibility: channel.visibility(),
|
||||
role: channel.role(),
|
||||
name: channel.name,
|
||||
unseen_note_version: None,
|
||||
unseen_message_id: None,
|
||||
parent_path: channel.parent_path,
|
||||
}),
|
||||
),
|
||||
}
|
||||
@@ -785,19 +910,24 @@ impl ChannelStore {
|
||||
|
||||
let channels_changed = !payload.channels.is_empty()
|
||||
|| !payload.delete_channels.is_empty()
|
||||
|| !payload.insert_edge.is_empty()
|
||||
|| !payload.delete_edge.is_empty();
|
||||
|| !payload.unseen_channel_messages.is_empty()
|
||||
|| !payload.unseen_channel_buffer_changes.is_empty();
|
||||
|
||||
if channels_changed {
|
||||
if !payload.delete_channels.is_empty() {
|
||||
self.channel_index.delete_channels(&payload.delete_channels);
|
||||
self.channel_participants
|
||||
.retain(|channel_id, _| !payload.delete_channels.contains(channel_id));
|
||||
self.channels_with_admin_privileges
|
||||
.retain(|channel_id| !payload.delete_channels.contains(channel_id));
|
||||
.retain(|channel_id, _| !&payload.delete_channels.contains(channel_id));
|
||||
|
||||
for channel_id in &payload.delete_channels {
|
||||
let channel_id = *channel_id;
|
||||
if payload
|
||||
.channels
|
||||
.iter()
|
||||
.any(|channel| channel.id == channel_id)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if let Some(OpenedModelHandle::Open(buffer)) =
|
||||
self.opened_buffers.remove(&channel_id)
|
||||
{
|
||||
@@ -810,25 +940,32 @@ impl ChannelStore {
|
||||
|
||||
let mut index = self.channel_index.bulk_insert();
|
||||
for channel in payload.channels {
|
||||
index.insert(channel)
|
||||
let id = channel.id;
|
||||
let channel_changed = index.insert(channel);
|
||||
|
||||
if channel_changed {
|
||||
if let Some(OpenedModelHandle::Open(buffer)) = self.opened_buffers.get(&id) {
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
buffer.update(cx, ChannelBuffer::channel_changed);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for edge in payload.insert_edge {
|
||||
index.insert_edge(edge.channel_id, edge.parent_id);
|
||||
for unseen_buffer_change in payload.unseen_channel_buffer_changes {
|
||||
let version = language::proto::deserialize_version(&unseen_buffer_change.version);
|
||||
index.note_changed(
|
||||
unseen_buffer_change.channel_id,
|
||||
unseen_buffer_change.epoch,
|
||||
&version,
|
||||
);
|
||||
}
|
||||
|
||||
for edge in payload.delete_edge {
|
||||
index.delete_edge(edge.parent_id, edge.channel_id);
|
||||
}
|
||||
}
|
||||
|
||||
for permission in payload.channel_permissions {
|
||||
if permission.is_admin {
|
||||
self.channels_with_admin_privileges
|
||||
.insert(permission.channel_id);
|
||||
} else {
|
||||
self.channels_with_admin_privileges
|
||||
.remove(&permission.channel_id);
|
||||
for unseen_channel_message in payload.unseen_channel_messages {
|
||||
index.new_messages(
|
||||
unseen_channel_message.channel_id,
|
||||
unseen_channel_message.message_id,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -878,44 +1015,3 @@ impl ChannelStore {
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ChannelPath {
|
||||
type Target = [ChannelId];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl ChannelPath {
|
||||
pub fn new(path: Arc<[ChannelId]>) -> Self {
|
||||
debug_assert!(path.len() >= 1);
|
||||
Self(path)
|
||||
}
|
||||
|
||||
pub fn parent_id(&self) -> Option<ChannelId> {
|
||||
self.0.len().checked_sub(2).map(|i| self.0[i])
|
||||
}
|
||||
|
||||
pub fn channel_id(&self) -> ChannelId {
|
||||
self.0[self.0.len() - 1]
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ChannelPath> for Cow<'static, ChannelPath> {
|
||||
fn from(value: ChannelPath) -> Self {
|
||||
Cow::Owned(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ChannelPath> for Cow<'a, ChannelPath> {
|
||||
fn from(value: &'a ChannelPath) -> Self {
|
||||
Cow::Borrowed(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ChannelPath {
|
||||
fn default() -> Self {
|
||||
ChannelPath(Arc::from([]))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
use std::{ops::Deref, sync::Arc};
|
||||
|
||||
use crate::{Channel, ChannelId};
|
||||
use collections::BTreeMap;
|
||||
use rpc::proto;
|
||||
|
||||
use super::ChannelPath;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct ChannelIndex {
|
||||
paths: Vec<ChannelPath>,
|
||||
channels_ordered: Vec<ChannelId>,
|
||||
channels_by_id: BTreeMap<ChannelId, Arc<Channel>>,
|
||||
}
|
||||
|
||||
@@ -17,8 +14,12 @@ impl ChannelIndex {
|
||||
&self.channels_by_id
|
||||
}
|
||||
|
||||
pub fn ordered_channels(&self) -> &[ChannelId] {
|
||||
&self.channels_ordered
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
self.paths.clear();
|
||||
self.channels_ordered.clear();
|
||||
self.channels_by_id.clear();
|
||||
}
|
||||
|
||||
@@ -26,25 +27,52 @@ impl ChannelIndex {
|
||||
pub fn delete_channels(&mut self, channels: &[ChannelId]) {
|
||||
self.channels_by_id
|
||||
.retain(|channel_id, _| !channels.contains(channel_id));
|
||||
self.paths.retain(|path| {
|
||||
path.iter()
|
||||
.all(|channel_id| self.channels_by_id.contains_key(channel_id))
|
||||
});
|
||||
self.channels_ordered
|
||||
.retain(|channel_id| !channels.contains(channel_id));
|
||||
}
|
||||
|
||||
pub fn bulk_insert(&mut self) -> ChannelPathsInsertGuard {
|
||||
ChannelPathsInsertGuard {
|
||||
paths: &mut self.paths,
|
||||
channels_ordered: &mut self.channels_ordered,
|
||||
channels_by_id: &mut self.channels_by_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ChannelIndex {
|
||||
type Target = [ChannelPath];
|
||||
pub fn acknowledge_note_version(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
epoch: u64,
|
||||
version: &clock::Global,
|
||||
) {
|
||||
if let Some(channel) = self.channels_by_id.get_mut(&channel_id) {
|
||||
let channel = Arc::make_mut(channel);
|
||||
if let Some((unseen_epoch, unseen_version)) = &channel.unseen_note_version {
|
||||
if epoch > *unseen_epoch
|
||||
|| epoch == *unseen_epoch && version.observed_all(unseen_version)
|
||||
{
|
||||
channel.unseen_note_version = None;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.paths
|
||||
pub fn acknowledge_message_id(&mut self, channel_id: ChannelId, message_id: u64) {
|
||||
if let Some(channel) = self.channels_by_id.get_mut(&channel_id) {
|
||||
let channel = Arc::make_mut(channel);
|
||||
if let Some(unseen_message_id) = channel.unseen_message_id {
|
||||
if message_id >= unseen_message_id {
|
||||
channel.unseen_message_id = None;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn note_changed(&mut self, channel_id: ChannelId, epoch: u64, version: &clock::Global) {
|
||||
insert_note_changed(&mut self.channels_by_id, channel_id, epoch, version);
|
||||
}
|
||||
|
||||
pub fn new_message(&mut self, channel_id: ChannelId, message_id: u64) {
|
||||
insert_new_message(&mut self.channels_by_id, channel_id, message_id)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,111 +80,105 @@ impl Deref for ChannelIndex {
|
||||
/// invariants after a series of insertions
|
||||
#[derive(Debug)]
|
||||
pub struct ChannelPathsInsertGuard<'a> {
|
||||
paths: &'a mut Vec<ChannelPath>,
|
||||
channels_ordered: &'a mut Vec<ChannelId>,
|
||||
channels_by_id: &'a mut BTreeMap<ChannelId, Arc<Channel>>,
|
||||
}
|
||||
|
||||
impl<'a> ChannelPathsInsertGuard<'a> {
|
||||
/// Remove the given edge from this index. This will not remove the channel.
|
||||
/// If this operation would result in a dangling edge, re-insert it.
|
||||
pub fn delete_edge(&mut self, parent_id: ChannelId, channel_id: ChannelId) {
|
||||
self.paths.retain(|path| {
|
||||
!path
|
||||
.windows(2)
|
||||
.any(|window| window == [parent_id, channel_id])
|
||||
});
|
||||
|
||||
// Ensure that there is at least one channel path in the index
|
||||
if !self
|
||||
.paths
|
||||
.iter()
|
||||
.any(|path| path.iter().any(|id| id == &channel_id))
|
||||
{
|
||||
self.insert_root(channel_id);
|
||||
}
|
||||
pub fn note_changed(&mut self, channel_id: ChannelId, epoch: u64, version: &clock::Global) {
|
||||
insert_note_changed(&mut self.channels_by_id, channel_id, epoch, &version);
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, channel_proto: proto::Channel) {
|
||||
pub fn new_messages(&mut self, channel_id: ChannelId, message_id: u64) {
|
||||
insert_new_message(&mut self.channels_by_id, channel_id, message_id)
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, channel_proto: proto::Channel) -> bool {
|
||||
let mut ret = false;
|
||||
if let Some(existing_channel) = self.channels_by_id.get_mut(&channel_proto.id) {
|
||||
Arc::make_mut(existing_channel).name = channel_proto.name;
|
||||
let existing_channel = Arc::make_mut(existing_channel);
|
||||
|
||||
ret = existing_channel.visibility != channel_proto.visibility()
|
||||
|| existing_channel.role != channel_proto.role()
|
||||
|| existing_channel.name != channel_proto.name;
|
||||
|
||||
existing_channel.visibility = channel_proto.visibility();
|
||||
existing_channel.role = channel_proto.role();
|
||||
existing_channel.name = channel_proto.name;
|
||||
} else {
|
||||
self.channels_by_id.insert(
|
||||
channel_proto.id,
|
||||
Arc::new(Channel {
|
||||
id: channel_proto.id,
|
||||
visibility: channel_proto.visibility(),
|
||||
role: channel_proto.role(),
|
||||
name: channel_proto.name,
|
||||
unseen_note_version: None,
|
||||
unseen_message_id: None,
|
||||
parent_path: channel_proto.parent_path,
|
||||
}),
|
||||
);
|
||||
self.insert_root(channel_proto.id);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_edge(&mut self, channel_id: ChannelId, parent_id: ChannelId) {
|
||||
let mut parents = Vec::new();
|
||||
let mut descendants = Vec::new();
|
||||
let mut ixs_to_remove = Vec::new();
|
||||
|
||||
for (ix, path) in self.paths.iter().enumerate() {
|
||||
if path
|
||||
.windows(2)
|
||||
.any(|window| window[0] == parent_id && window[1] == channel_id)
|
||||
{
|
||||
// We already have this edge in the index
|
||||
return;
|
||||
}
|
||||
if path.ends_with(&[parent_id]) {
|
||||
parents.push(path);
|
||||
} else if let Some(position) = path.iter().position(|id| id == &channel_id) {
|
||||
if position == 0 {
|
||||
ixs_to_remove.push(ix);
|
||||
}
|
||||
descendants.push(path.split_at(position).1);
|
||||
}
|
||||
}
|
||||
|
||||
let mut new_paths = Vec::new();
|
||||
for parent in parents.iter() {
|
||||
if descendants.is_empty() {
|
||||
let mut new_path = Vec::with_capacity(parent.len() + 1);
|
||||
new_path.extend_from_slice(parent);
|
||||
new_path.push(channel_id);
|
||||
new_paths.push(ChannelPath::new(new_path.into()));
|
||||
} else {
|
||||
for descendant in descendants.iter() {
|
||||
let mut new_path = Vec::with_capacity(parent.len() + descendant.len());
|
||||
new_path.extend_from_slice(parent);
|
||||
new_path.extend_from_slice(descendant);
|
||||
new_paths.push(ChannelPath::new(new_path.into()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for ix in ixs_to_remove.into_iter().rev() {
|
||||
self.paths.swap_remove(ix);
|
||||
}
|
||||
self.paths.extend(new_paths)
|
||||
ret
|
||||
}
|
||||
|
||||
fn insert_root(&mut self, channel_id: ChannelId) {
|
||||
self.paths.push(ChannelPath::new(Arc::from([channel_id])));
|
||||
self.channels_ordered.push(channel_id);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Drop for ChannelPathsInsertGuard<'a> {
|
||||
fn drop(&mut self) {
|
||||
self.paths.sort_by(|a, b| {
|
||||
let a = channel_path_sorting_key(a, &self.channels_by_id);
|
||||
let b = channel_path_sorting_key(b, &self.channels_by_id);
|
||||
self.channels_ordered.sort_by(|a, b| {
|
||||
let a = channel_path_sorting_key(*a, &self.channels_by_id);
|
||||
let b = channel_path_sorting_key(*b, &self.channels_by_id);
|
||||
a.cmp(b)
|
||||
});
|
||||
self.paths.dedup();
|
||||
self.channels_ordered.dedup();
|
||||
}
|
||||
}
|
||||
|
||||
fn channel_path_sorting_key<'a>(
|
||||
path: &'a [ChannelId],
|
||||
id: ChannelId,
|
||||
channels_by_id: &'a BTreeMap<ChannelId, Arc<Channel>>,
|
||||
) -> impl 'a + Iterator<Item = Option<&'a str>> {
|
||||
path.iter()
|
||||
.map(|id| Some(channels_by_id.get(id)?.name.as_str()))
|
||||
) -> impl Iterator<Item = &str> {
|
||||
let (parent_path, name) = channels_by_id
|
||||
.get(&id)
|
||||
.map_or((&[] as &[_], None), |channel| {
|
||||
(channel.parent_path.as_slice(), Some(channel.name.as_str()))
|
||||
});
|
||||
parent_path
|
||||
.iter()
|
||||
.filter_map(|id| Some(channels_by_id.get(id)?.name.as_str()))
|
||||
.chain(name)
|
||||
}
|
||||
|
||||
fn insert_note_changed(
|
||||
channels_by_id: &mut BTreeMap<ChannelId, Arc<Channel>>,
|
||||
channel_id: u64,
|
||||
epoch: u64,
|
||||
version: &clock::Global,
|
||||
) {
|
||||
if let Some(channel) = channels_by_id.get_mut(&channel_id) {
|
||||
let unseen_version = Arc::make_mut(channel)
|
||||
.unseen_note_version
|
||||
.get_or_insert((0, clock::Global::new()));
|
||||
if epoch > unseen_version.0 {
|
||||
*unseen_version = (epoch, version.clone());
|
||||
} else {
|
||||
unseen_version.1.join(&version);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_new_message(
|
||||
channels_by_id: &mut BTreeMap<ChannelId, Arc<Channel>>,
|
||||
channel_id: u64,
|
||||
message_id: u64,
|
||||
) {
|
||||
if let Some(channel) = channels_by_id.get_mut(&channel_id) {
|
||||
let unseen_message_id = Arc::make_mut(channel).unseen_message_id.get_or_insert(0);
|
||||
*unseen_message_id = message_id.max(*unseen_message_id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ use crate::channel_chat::ChannelChatEvent;
|
||||
use super::*;
|
||||
use client::{test::FakeServer, Client, UserStore};
|
||||
use gpui::{AppContext, ModelHandle, TestAppContext};
|
||||
use rpc::proto;
|
||||
use rpc::proto::{self};
|
||||
use settings::SettingsStore;
|
||||
use util::http::FakeHttpClient;
|
||||
|
||||
@@ -18,16 +18,18 @@ fn test_update_channels(cx: &mut AppContext) {
|
||||
proto::Channel {
|
||||
id: 1,
|
||||
name: "b".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
role: proto::ChannelRole::Admin.into(),
|
||||
parent_path: Vec::new(),
|
||||
},
|
||||
proto::Channel {
|
||||
id: 2,
|
||||
name: "a".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
role: proto::ChannelRole::Member.into(),
|
||||
parent_path: Vec::new(),
|
||||
},
|
||||
],
|
||||
channel_permissions: vec![proto::ChannelPermission {
|
||||
channel_id: 1,
|
||||
is_admin: true,
|
||||
}],
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
@@ -36,8 +38,8 @@ fn test_update_channels(cx: &mut AppContext) {
|
||||
&channel_store,
|
||||
&[
|
||||
//
|
||||
(0, "a".to_string(), false),
|
||||
(0, "b".to_string(), true),
|
||||
(0, "a".to_string(), proto::ChannelRole::Member),
|
||||
(0, "b".to_string(), proto::ChannelRole::Admin),
|
||||
],
|
||||
cx,
|
||||
);
|
||||
@@ -49,20 +51,16 @@ fn test_update_channels(cx: &mut AppContext) {
|
||||
proto::Channel {
|
||||
id: 3,
|
||||
name: "x".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
role: proto::ChannelRole::Admin.into(),
|
||||
parent_path: vec![1],
|
||||
},
|
||||
proto::Channel {
|
||||
id: 4,
|
||||
name: "y".to_string(),
|
||||
},
|
||||
],
|
||||
insert_edge: vec![
|
||||
proto::ChannelEdge {
|
||||
parent_id: 1,
|
||||
channel_id: 3,
|
||||
},
|
||||
proto::ChannelEdge {
|
||||
parent_id: 2,
|
||||
channel_id: 4,
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
role: proto::ChannelRole::Member.into(),
|
||||
parent_path: vec![2],
|
||||
},
|
||||
],
|
||||
..Default::default()
|
||||
@@ -72,10 +70,10 @@ fn test_update_channels(cx: &mut AppContext) {
|
||||
assert_channels(
|
||||
&channel_store,
|
||||
&[
|
||||
(0, "a".to_string(), false),
|
||||
(1, "y".to_string(), false),
|
||||
(0, "b".to_string(), true),
|
||||
(1, "x".to_string(), true),
|
||||
(0, "a".to_string(), proto::ChannelRole::Member),
|
||||
(1, "y".to_string(), proto::ChannelRole::Member),
|
||||
(0, "b".to_string(), proto::ChannelRole::Admin),
|
||||
(1, "x".to_string(), proto::ChannelRole::Admin),
|
||||
],
|
||||
cx,
|
||||
);
|
||||
@@ -92,30 +90,25 @@ fn test_dangling_channel_paths(cx: &mut AppContext) {
|
||||
proto::Channel {
|
||||
id: 0,
|
||||
name: "a".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
role: proto::ChannelRole::Admin.into(),
|
||||
parent_path: vec![],
|
||||
},
|
||||
proto::Channel {
|
||||
id: 1,
|
||||
name: "b".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
role: proto::ChannelRole::Admin.into(),
|
||||
parent_path: vec![0],
|
||||
},
|
||||
proto::Channel {
|
||||
id: 2,
|
||||
name: "c".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
role: proto::ChannelRole::Admin.into(),
|
||||
parent_path: vec![0, 1],
|
||||
},
|
||||
],
|
||||
insert_edge: vec![
|
||||
proto::ChannelEdge {
|
||||
parent_id: 0,
|
||||
channel_id: 1,
|
||||
},
|
||||
proto::ChannelEdge {
|
||||
parent_id: 1,
|
||||
channel_id: 2,
|
||||
},
|
||||
],
|
||||
channel_permissions: vec![proto::ChannelPermission {
|
||||
channel_id: 0,
|
||||
is_admin: true,
|
||||
}],
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
@@ -125,9 +118,9 @@ fn test_dangling_channel_paths(cx: &mut AppContext) {
|
||||
&channel_store,
|
||||
&[
|
||||
//
|
||||
(0, "a".to_string(), true),
|
||||
(1, "b".to_string(), true),
|
||||
(2, "c".to_string(), true),
|
||||
(0, "a".to_string(), proto::ChannelRole::Admin),
|
||||
(1, "b".to_string(), proto::ChannelRole::Admin),
|
||||
(2, "c".to_string(), proto::ChannelRole::Admin),
|
||||
],
|
||||
cx,
|
||||
);
|
||||
@@ -142,7 +135,11 @@ fn test_dangling_channel_paths(cx: &mut AppContext) {
|
||||
);
|
||||
|
||||
// Make sure that the 1/2/3 path is gone
|
||||
assert_channels(&channel_store, &[(0, "a".to_string(), true)], cx);
|
||||
assert_channels(
|
||||
&channel_store,
|
||||
&[(0, "a".to_string(), proto::ChannelRole::Admin)],
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -158,12 +155,19 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
|
||||
channels: vec![proto::Channel {
|
||||
id: channel_id,
|
||||
name: "the-channel".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
role: proto::ChannelRole::Member.into(),
|
||||
parent_path: vec![],
|
||||
}],
|
||||
..Default::default()
|
||||
});
|
||||
cx.foreground().run_until_parked();
|
||||
cx.read(|cx| {
|
||||
assert_channels(&channel_store, &[(0, "the-channel".to_string(), false)], cx);
|
||||
assert_channels(
|
||||
&channel_store,
|
||||
&[(0, "the-channel".to_string(), proto::ChannelRole::Member)],
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
let get_users = server.receive::<proto::GetUsers>().await.unwrap();
|
||||
@@ -181,7 +185,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
|
||||
|
||||
// Join a channel and populate its existing messages.
|
||||
let channel = channel_store.update(cx, |store, cx| {
|
||||
let channel_id = store.channel_dag_entries().next().unwrap().1.id;
|
||||
let channel_id = store.ordered_channels().next().unwrap().1.id;
|
||||
store.open_channel_chat(channel_id, cx)
|
||||
});
|
||||
let join_channel = server.receive::<proto::JoinChannelChat>().await.unwrap();
|
||||
@@ -194,6 +198,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
|
||||
body: "a".into(),
|
||||
timestamp: 1000,
|
||||
sender_id: 5,
|
||||
mentions: vec![],
|
||||
nonce: Some(1.into()),
|
||||
},
|
||||
proto::ChannelMessage {
|
||||
@@ -201,6 +206,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
|
||||
body: "b".into(),
|
||||
timestamp: 1001,
|
||||
sender_id: 6,
|
||||
mentions: vec![],
|
||||
nonce: Some(2.into()),
|
||||
},
|
||||
],
|
||||
@@ -247,6 +253,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
|
||||
body: "c".into(),
|
||||
timestamp: 1002,
|
||||
sender_id: 7,
|
||||
mentions: vec![],
|
||||
nonce: Some(3.into()),
|
||||
}),
|
||||
});
|
||||
@@ -284,7 +291,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
|
||||
|
||||
// Scroll up to view older messages.
|
||||
channel.update(cx, |channel, cx| {
|
||||
assert!(channel.load_more_messages(cx));
|
||||
channel.load_more_messages(cx).unwrap().detach();
|
||||
});
|
||||
let get_messages = server.receive::<proto::GetChannelMessages>().await.unwrap();
|
||||
assert_eq!(get_messages.payload.channel_id, 5);
|
||||
@@ -300,6 +307,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
|
||||
timestamp: 998,
|
||||
sender_id: 5,
|
||||
nonce: Some(4.into()),
|
||||
mentions: vec![],
|
||||
},
|
||||
proto::ChannelMessage {
|
||||
id: 9,
|
||||
@@ -307,6 +315,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
|
||||
timestamp: 999,
|
||||
sender_id: 6,
|
||||
nonce: Some(5.into()),
|
||||
mentions: vec![],
|
||||
},
|
||||
],
|
||||
},
|
||||
@@ -340,10 +349,10 @@ fn init_test(cx: &mut AppContext) -> ModelHandle<ChannelStore> {
|
||||
|
||||
cx.foreground().forbid_parking();
|
||||
cx.set_global(SettingsStore::test(cx));
|
||||
crate::init(&client);
|
||||
client::init(&client, cx);
|
||||
crate::init(&client, user_store, cx);
|
||||
|
||||
cx.add_model(|cx| ChannelStore::new(client, user_store, cx))
|
||||
ChannelStore::global(cx)
|
||||
}
|
||||
|
||||
fn update_channels(
|
||||
@@ -358,19 +367,13 @@ fn update_channels(
|
||||
#[track_caller]
|
||||
fn assert_channels(
|
||||
channel_store: &ModelHandle<ChannelStore>,
|
||||
expected_channels: &[(usize, String, bool)],
|
||||
expected_channels: &[(usize, String, proto::ChannelRole)],
|
||||
cx: &AppContext,
|
||||
) {
|
||||
let actual = channel_store.read_with(cx, |store, _| {
|
||||
store
|
||||
.channel_dag_entries()
|
||||
.map(|(depth, channel)| {
|
||||
(
|
||||
depth,
|
||||
channel.name.to_string(),
|
||||
store.is_user_admin(channel.id),
|
||||
)
|
||||
})
|
||||
.ordered_channels()
|
||||
.map(|(depth, channel)| (depth, channel.name.to_string(), channel.role))
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
assert_eq!(actual, expected_channels);
|
||||
|
||||
@@ -182,6 +182,7 @@ impl Bundle {
|
||||
kCFStringEncodingUTF8,
|
||||
ptr::null(),
|
||||
));
|
||||
// equivalent to: open zed-cli:... -a /Applications/Zed\ Preview.app
|
||||
let urls_to_open = CFArray::from_copyable(&[url_to_open.as_concrete_TypeRef()]);
|
||||
LSOpenFromURLSpec(
|
||||
&LSLaunchURLSpec {
|
||||
|
||||
@@ -33,15 +33,16 @@ parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
rand.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_derive.workspace = true
|
||||
smol.workspace = true
|
||||
sysinfo.workspace = true
|
||||
tempfile = "3"
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
tiny_http = "0.8"
|
||||
uuid = { version = "1.1.2", features = ["v4"] }
|
||||
uuid.workspace = true
|
||||
url = "2.2"
|
||||
serde.workspace = true
|
||||
serde_derive.workspace = true
|
||||
tempfile = "3"
|
||||
|
||||
[dev-dependencies]
|
||||
collections = { path = "../collections", features = ["test-support"] }
|
||||
|
||||
@@ -34,7 +34,7 @@ use std::{
|
||||
future::Future,
|
||||
marker::PhantomData,
|
||||
path::PathBuf,
|
||||
sync::{Arc, Weak},
|
||||
sync::{atomic::AtomicU64, Arc, Weak},
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use telemetry::Telemetry;
|
||||
@@ -62,13 +62,15 @@ lazy_static! {
|
||||
.and_then(|v| v.parse().ok());
|
||||
pub static ref ZED_APP_PATH: Option<PathBuf> =
|
||||
std::env::var("ZED_APP_PATH").ok().map(PathBuf::from);
|
||||
pub static ref ZED_ALWAYS_ACTIVE: bool =
|
||||
std::env::var("ZED_ALWAYS_ACTIVE").map_or(false, |e| e.len() > 0);
|
||||
}
|
||||
|
||||
pub const ZED_SECRET_CLIENT_TOKEN: &str = "618033988749894";
|
||||
pub const INITIAL_RECONNECTION_DELAY: Duration = Duration::from_millis(100);
|
||||
pub const CONNECTION_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
|
||||
actions!(client, [SignIn, SignOut]);
|
||||
actions!(client, [SignIn, SignOut, Reconnect]);
|
||||
|
||||
pub fn init_settings(cx: &mut AppContext) {
|
||||
settings::register::<TelemetrySettings>(cx);
|
||||
@@ -100,10 +102,21 @@ pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
|
||||
}
|
||||
}
|
||||
});
|
||||
cx.add_global_action({
|
||||
let client = client.clone();
|
||||
move |_: &Reconnect, cx| {
|
||||
if let Some(client) = client.upgrade() {
|
||||
cx.spawn(|cx| async move {
|
||||
client.reconnect(&cx);
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub struct Client {
|
||||
id: usize,
|
||||
id: AtomicU64,
|
||||
peer: Arc<Peer>,
|
||||
http: Arc<dyn HttpClient>,
|
||||
telemetry: Arc<Telemetry>,
|
||||
@@ -372,7 +385,7 @@ impl settings::Setting for TelemetrySettings {
|
||||
impl Client {
|
||||
pub fn new(http: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> {
|
||||
Arc::new(Self {
|
||||
id: 0,
|
||||
id: AtomicU64::new(0),
|
||||
peer: Peer::new(0),
|
||||
telemetry: Telemetry::new(http.clone(), cx),
|
||||
http,
|
||||
@@ -385,17 +398,16 @@ impl Client {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn id(&self) -> usize {
|
||||
self.id
|
||||
pub fn id(&self) -> u64 {
|
||||
self.id.load(std::sync::atomic::Ordering::SeqCst)
|
||||
}
|
||||
|
||||
pub fn http_client(&self) -> Arc<dyn HttpClient> {
|
||||
self.http.clone()
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn set_id(&mut self, id: usize) -> &Self {
|
||||
self.id = id;
|
||||
pub fn set_id(&self, id: u64) -> &Self {
|
||||
self.id.store(id, std::sync::atomic::Ordering::SeqCst);
|
||||
self
|
||||
}
|
||||
|
||||
@@ -452,7 +464,7 @@ impl Client {
|
||||
}
|
||||
|
||||
fn set_status(self: &Arc<Self>, status: Status, cx: &AsyncAppContext) {
|
||||
log::info!("set status on client {}: {:?}", self.id, status);
|
||||
log::info!("set status on client {}: {:?}", self.id(), status);
|
||||
let mut state = self.state.write();
|
||||
*state.status.0.borrow_mut() = status;
|
||||
|
||||
@@ -803,6 +815,7 @@ impl Client {
|
||||
}
|
||||
}
|
||||
let credentials = credentials.unwrap();
|
||||
self.set_id(credentials.user_id);
|
||||
|
||||
if was_disconnected {
|
||||
self.set_status(Status::Connecting, cx);
|
||||
@@ -1210,6 +1223,11 @@ impl Client {
|
||||
self.set_status(Status::SignedOut, cx);
|
||||
}
|
||||
|
||||
pub fn reconnect(self: &Arc<Self>, cx: &AsyncAppContext) {
|
||||
self.peer.teardown();
|
||||
self.set_status(Status::ConnectionLost, cx);
|
||||
}
|
||||
|
||||
fn connection_id(&self) -> Result<ConnectionId> {
|
||||
if let Status::Connected { connection_id, .. } = *self.status().borrow() {
|
||||
Ok(connection_id)
|
||||
@@ -1219,7 +1237,7 @@ impl Client {
|
||||
}
|
||||
|
||||
pub fn send<T: EnvelopedMessage>(&self, message: T) -> Result<()> {
|
||||
log::debug!("rpc send. client_id:{}, name:{}", self.id, T::NAME);
|
||||
log::debug!("rpc send. client_id:{}, name:{}", self.id(), T::NAME);
|
||||
self.peer.send(self.connection_id()?, message)
|
||||
}
|
||||
|
||||
@@ -1235,7 +1253,7 @@ impl Client {
|
||||
&self,
|
||||
request: T,
|
||||
) -> impl Future<Output = Result<TypedEnvelope<T::Response>>> {
|
||||
let client_id = self.id;
|
||||
let client_id = self.id();
|
||||
log::debug!(
|
||||
"rpc request start. client_id:{}. name:{}",
|
||||
client_id,
|
||||
@@ -1256,7 +1274,7 @@ impl Client {
|
||||
}
|
||||
|
||||
fn respond<T: RequestMessage>(&self, receipt: Receipt<T>, response: T::Response) -> Result<()> {
|
||||
log::debug!("rpc respond. client_id:{}. name:{}", self.id, T::NAME);
|
||||
log::debug!("rpc respond. client_id:{}. name:{}", self.id(), T::NAME);
|
||||
self.peer.respond(receipt, response)
|
||||
}
|
||||
|
||||
@@ -1265,7 +1283,7 @@ impl Client {
|
||||
receipt: Receipt<T>,
|
||||
error: proto::Error,
|
||||
) -> Result<()> {
|
||||
log::debug!("rpc respond. client_id:{}. name:{}", self.id, T::NAME);
|
||||
log::debug!("rpc respond. client_id:{}. name:{}", self.id(), T::NAME);
|
||||
self.peer.respond_with_error(receipt, error)
|
||||
}
|
||||
|
||||
@@ -1334,7 +1352,7 @@ impl Client {
|
||||
|
||||
if let Some(handler) = handler {
|
||||
let future = handler(subscriber, message, &self, cx.clone());
|
||||
let client_id = self.id;
|
||||
let client_id = self.id();
|
||||
log::debug!(
|
||||
"rpc message received. client_id:{}, sender_id:{:?}, type:{}",
|
||||
client_id,
|
||||
|
||||
@@ -4,6 +4,9 @@ use lazy_static::lazy_static;
|
||||
use parking_lot::Mutex;
|
||||
use serde::Serialize;
|
||||
use std::{env, io::Write, mem, path::PathBuf, sync::Arc, time::Duration};
|
||||
use sysinfo::{
|
||||
CpuRefreshKind, Pid, PidExt, ProcessExt, ProcessRefreshKind, RefreshKind, System, SystemExt,
|
||||
};
|
||||
use tempfile::NamedTempFile;
|
||||
use util::http::HttpClient;
|
||||
use util::{channel::ReleaseChannel, TryFutureExt};
|
||||
@@ -17,7 +20,8 @@ pub struct Telemetry {
|
||||
#[derive(Default)]
|
||||
struct TelemetryState {
|
||||
metrics_id: Option<Arc<str>>, // Per logged-in user
|
||||
installation_id: Option<Arc<str>>, // Per app installation
|
||||
installation_id: Option<Arc<str>>, // Per app installation (different for dev, preview, and stable)
|
||||
session_id: Option<Arc<str>>, // Per app launch
|
||||
app_version: Option<Arc<str>>,
|
||||
release_channel: Option<&'static str>,
|
||||
os_name: &'static str,
|
||||
@@ -40,6 +44,7 @@ lazy_static! {
|
||||
struct ClickhouseEventRequestBody {
|
||||
token: &'static str,
|
||||
installation_id: Option<Arc<str>>,
|
||||
session_id: Option<Arc<str>>,
|
||||
is_staff: Option<bool>,
|
||||
app_version: Option<Arc<str>>,
|
||||
os_name: &'static str,
|
||||
@@ -88,6 +93,14 @@ pub enum ClickhouseEvent {
|
||||
kind: AssistantKind,
|
||||
model: &'static str,
|
||||
},
|
||||
Cpu {
|
||||
usage_as_percentage: f32,
|
||||
core_count: u32,
|
||||
},
|
||||
Memory {
|
||||
memory_in_bytes: u64,
|
||||
virtual_memory_in_bytes: u64,
|
||||
},
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
@@ -122,6 +135,7 @@ impl Telemetry {
|
||||
release_channel,
|
||||
installation_id: None,
|
||||
metrics_id: None,
|
||||
session_id: None,
|
||||
clickhouse_events_queue: Default::default(),
|
||||
flush_clickhouse_events_task: Default::default(),
|
||||
log_file: None,
|
||||
@@ -136,15 +150,68 @@ impl Telemetry {
|
||||
Some(self.state.lock().log_file.as_ref()?.path().to_path_buf())
|
||||
}
|
||||
|
||||
pub fn start(self: &Arc<Self>, installation_id: Option<String>) {
|
||||
pub fn start(
|
||||
self: &Arc<Self>,
|
||||
installation_id: Option<String>,
|
||||
session_id: String,
|
||||
cx: &mut AppContext,
|
||||
) {
|
||||
let mut state = self.state.lock();
|
||||
state.installation_id = installation_id.map(|id| id.into());
|
||||
state.session_id = Some(session_id.into());
|
||||
let has_clickhouse_events = !state.clickhouse_events_queue.is_empty();
|
||||
drop(state);
|
||||
|
||||
if has_clickhouse_events {
|
||||
self.flush_clickhouse_events();
|
||||
}
|
||||
|
||||
let this = self.clone();
|
||||
cx.spawn(|mut cx| async move {
|
||||
// Avoiding calling `System::new_all()`, as there have been crashes related to it
|
||||
let refresh_kind = RefreshKind::new()
|
||||
.with_memory() // For memory usage
|
||||
.with_processes(ProcessRefreshKind::everything()) // For process usage
|
||||
.with_cpu(CpuRefreshKind::everything()); // For core count
|
||||
|
||||
let mut system = System::new_with_specifics(refresh_kind);
|
||||
|
||||
// Avoiding calling `refresh_all()`, just update what we need
|
||||
system.refresh_specifics(refresh_kind);
|
||||
|
||||
loop {
|
||||
// Waiting some amount of time before the first query is important to get a reasonable value
|
||||
// https://docs.rs/sysinfo/0.29.10/sysinfo/trait.ProcessExt.html#tymethod.cpu_usage
|
||||
const DURATION_BETWEEN_SYSTEM_EVENTS: Duration = Duration::from_secs(60);
|
||||
smol::Timer::after(DURATION_BETWEEN_SYSTEM_EVENTS).await;
|
||||
|
||||
system.refresh_specifics(refresh_kind);
|
||||
|
||||
let current_process = Pid::from_u32(std::process::id());
|
||||
let Some(process) = system.processes().get(¤t_process) else {
|
||||
let process = current_process;
|
||||
log::error!("Failed to find own process {process:?} in system process table");
|
||||
// TODO: Fire an error telemetry event
|
||||
return;
|
||||
};
|
||||
|
||||
let memory_event = ClickhouseEvent::Memory {
|
||||
memory_in_bytes: process.memory(),
|
||||
virtual_memory_in_bytes: process.virtual_memory(),
|
||||
};
|
||||
|
||||
let cpu_event = ClickhouseEvent::Cpu {
|
||||
usage_as_percentage: process.cpu_usage(),
|
||||
core_count: system.cpus().len() as u32,
|
||||
};
|
||||
|
||||
let telemetry_settings = cx.update(|cx| *settings::get::<TelemetrySettings>(cx));
|
||||
|
||||
this.report_clickhouse_event(memory_event, telemetry_settings);
|
||||
this.report_clickhouse_event(cpu_event, telemetry_settings);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub fn set_authenticated_user_info(
|
||||
@@ -230,22 +297,21 @@ impl Telemetry {
|
||||
|
||||
{
|
||||
let state = this.state.lock();
|
||||
json_bytes.clear();
|
||||
serde_json::to_writer(
|
||||
&mut json_bytes,
|
||||
&ClickhouseEventRequestBody {
|
||||
token: ZED_SECRET_CLIENT_TOKEN,
|
||||
installation_id: state.installation_id.clone(),
|
||||
is_staff: state.is_staff.clone(),
|
||||
app_version: state.app_version.clone(),
|
||||
os_name: state.os_name,
|
||||
os_version: state.os_version.clone(),
|
||||
architecture: state.architecture,
|
||||
let request_body = ClickhouseEventRequestBody {
|
||||
token: ZED_SECRET_CLIENT_TOKEN,
|
||||
installation_id: state.installation_id.clone(),
|
||||
session_id: state.session_id.clone(),
|
||||
is_staff: state.is_staff.clone(),
|
||||
app_version: state.app_version.clone(),
|
||||
os_name: state.os_name,
|
||||
os_version: state.os_version.clone(),
|
||||
architecture: state.architecture,
|
||||
|
||||
release_channel: state.release_channel,
|
||||
events,
|
||||
},
|
||||
)?;
|
||||
release_channel: state.release_channel,
|
||||
events,
|
||||
};
|
||||
json_bytes.clear();
|
||||
serde_json::to_writer(&mut json_bytes, &request_body)?;
|
||||
}
|
||||
|
||||
this.http_client
|
||||
|
||||
@@ -7,11 +7,15 @@ use gpui::{AsyncAppContext, Entity, ImageData, ModelContext, ModelHandle, Task};
|
||||
use postage::{sink::Sink, watch};
|
||||
use rpc::proto::{RequestMessage, UsersResponse};
|
||||
use std::sync::{Arc, Weak};
|
||||
use text::ReplicaId;
|
||||
use util::http::HttpClient;
|
||||
use util::TryFutureExt as _;
|
||||
|
||||
pub type UserId = u64;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct ParticipantIndex(pub u32);
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct User {
|
||||
pub id: UserId,
|
||||
@@ -19,6 +23,13 @@ pub struct User {
|
||||
pub avatar: Option<Arc<ImageData>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct Collaborator {
|
||||
pub peer_id: proto::PeerId,
|
||||
pub replica_id: ReplicaId,
|
||||
pub user_id: UserId,
|
||||
}
|
||||
|
||||
impl PartialOrd for User {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
@@ -56,6 +67,7 @@ pub enum ContactRequestStatus {
|
||||
|
||||
pub struct UserStore {
|
||||
users: HashMap<u64, Arc<User>>,
|
||||
participant_indices: HashMap<u64, ParticipantIndex>,
|
||||
update_contacts_tx: mpsc::UnboundedSender<UpdateContacts>,
|
||||
current_user: watch::Receiver<Option<Arc<User>>>,
|
||||
contacts: Vec<Arc<Contact>>,
|
||||
@@ -81,6 +93,7 @@ pub enum Event {
|
||||
kind: ContactEventKind,
|
||||
},
|
||||
ShowContacts,
|
||||
ParticipantIndicesChanged,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
@@ -118,6 +131,7 @@ impl UserStore {
|
||||
current_user: current_user_rx,
|
||||
contacts: Default::default(),
|
||||
incoming_contact_requests: Default::default(),
|
||||
participant_indices: Default::default(),
|
||||
outgoing_contact_requests: Default::default(),
|
||||
invite_info: None,
|
||||
client: Arc::downgrade(&client),
|
||||
@@ -279,21 +293,19 @@ impl UserStore {
|
||||
// No need to paralellize here
|
||||
let mut updated_contacts = Vec::new();
|
||||
for contact in message.contacts {
|
||||
let should_notify = contact.should_notify;
|
||||
updated_contacts.push((
|
||||
Arc::new(Contact::from_proto(contact, &this, &mut cx).await?),
|
||||
should_notify,
|
||||
updated_contacts.push(Arc::new(
|
||||
Contact::from_proto(contact, &this, &mut cx).await?,
|
||||
));
|
||||
}
|
||||
|
||||
let mut incoming_requests = Vec::new();
|
||||
for request in message.incoming_requests {
|
||||
incoming_requests.push({
|
||||
let user = this
|
||||
.update(&mut cx, |this, cx| this.get_user(request.requester_id, cx))
|
||||
.await?;
|
||||
(user, request.should_notify)
|
||||
});
|
||||
incoming_requests.push(
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.get_user(request.requester_id, cx)
|
||||
})
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
|
||||
let mut outgoing_requests = Vec::new();
|
||||
@@ -316,13 +328,7 @@ impl UserStore {
|
||||
this.contacts
|
||||
.retain(|contact| !removed_contacts.contains(&contact.user.id));
|
||||
// Update existing contacts and insert new ones
|
||||
for (updated_contact, should_notify) in updated_contacts {
|
||||
if should_notify {
|
||||
cx.emit(Event::Contact {
|
||||
user: updated_contact.user.clone(),
|
||||
kind: ContactEventKind::Accepted,
|
||||
});
|
||||
}
|
||||
for updated_contact in updated_contacts {
|
||||
match this.contacts.binary_search_by_key(
|
||||
&&updated_contact.user.github_login,
|
||||
|contact| &contact.user.github_login,
|
||||
@@ -345,14 +351,7 @@ impl UserStore {
|
||||
}
|
||||
});
|
||||
// Update existing incoming requests and insert new ones
|
||||
for (user, should_notify) in incoming_requests {
|
||||
if should_notify {
|
||||
cx.emit(Event::Contact {
|
||||
user: user.clone(),
|
||||
kind: ContactEventKind::Requested,
|
||||
});
|
||||
}
|
||||
|
||||
for user in incoming_requests {
|
||||
match this
|
||||
.incoming_contact_requests
|
||||
.binary_search_by_key(&&user.github_login, |contact| {
|
||||
@@ -401,6 +400,12 @@ impl UserStore {
|
||||
&self.incoming_contact_requests
|
||||
}
|
||||
|
||||
pub fn has_incoming_contact_request(&self, user_id: u64) -> bool {
|
||||
self.incoming_contact_requests
|
||||
.iter()
|
||||
.any(|user| user.id == user_id)
|
||||
}
|
||||
|
||||
pub fn outgoing_contact_requests(&self) -> &[Arc<User>] {
|
||||
&self.outgoing_contact_requests
|
||||
}
|
||||
@@ -581,6 +586,10 @@ impl UserStore {
|
||||
self.load_users(proto::FuzzySearchUsers { query }, cx)
|
||||
}
|
||||
|
||||
pub fn get_cached_user(&self, user_id: u64) -> Option<Arc<User>> {
|
||||
self.users.get(&user_id).cloned()
|
||||
}
|
||||
|
||||
pub fn get_user(
|
||||
&mut self,
|
||||
user_id: u64,
|
||||
@@ -641,6 +650,21 @@ impl UserStore {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set_participant_indices(
|
||||
&mut self,
|
||||
participant_indices: HashMap<u64, ParticipantIndex>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
if participant_indices != self.participant_indices {
|
||||
self.participant_indices = participant_indices;
|
||||
cx.emit(Event::ParticipantIndicesChanged);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn participant_indices(&self) -> &HashMap<u64, ParticipantIndex> {
|
||||
&self.participant_indices
|
||||
}
|
||||
}
|
||||
|
||||
impl User {
|
||||
@@ -672,6 +696,16 @@ impl Contact {
|
||||
}
|
||||
}
|
||||
|
||||
impl Collaborator {
|
||||
pub fn from_proto(message: proto::Collaborator) -> Result<Self> {
|
||||
Ok(Self {
|
||||
peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
|
||||
replica_id: message.replica_id as ReplicaId,
|
||||
user_id: message.user_id as UserId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch_avatar(http: &dyn HttpClient, url: &str) -> Result<Arc<ImageData>> {
|
||||
let mut response = http
|
||||
.get(url, Default::default(), true)
|
||||
|
||||
52
crates/client2/Cargo.toml
Normal file
52
crates/client2/Cargo.toml
Normal file
@@ -0,0 +1,52 @@
|
||||
[package]
|
||||
name = "client2"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/client2.rs"
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
test-support = ["collections/test-support", "gpui2/test-support", "rpc2/test-support"]
|
||||
|
||||
[dependencies]
|
||||
collections = { path = "../collections" }
|
||||
db2 = { path = "../db2" }
|
||||
gpui2 = { path = "../gpui2" }
|
||||
util = { path = "../util" }
|
||||
rpc2 = { path = "../rpc2" }
|
||||
text = { path = "../text" }
|
||||
settings2 = { path = "../settings2" }
|
||||
feature_flags2 = { path = "../feature_flags2" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
|
||||
anyhow.workspace = true
|
||||
async-recursion = "0.3"
|
||||
async-tungstenite = { version = "0.16", features = ["async-tls"] }
|
||||
futures.workspace = true
|
||||
image = "0.23"
|
||||
lazy_static.workspace = true
|
||||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
rand.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_derive.workspace = true
|
||||
smol.workspace = true
|
||||
sysinfo.workspace = true
|
||||
tempfile = "3"
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
tiny_http = "0.8"
|
||||
uuid.workspace = true
|
||||
url = "2.2"
|
||||
|
||||
[dev-dependencies]
|
||||
collections = { path = "../collections", features = ["test-support"] }
|
||||
gpui2 = { path = "../gpui2", features = ["test-support"] }
|
||||
rpc2 = { path = "../rpc2", features = ["test-support"] }
|
||||
settings = { path = "../settings", features = ["test-support"] }
|
||||
util = { path = "../util", features = ["test-support"] }
|
||||
1651
crates/client2/src/client2.rs
Normal file
1651
crates/client2/src/client2.rs
Normal file
File diff suppressed because it is too large
Load Diff
333
crates/client2/src/telemetry.rs
Normal file
333
crates/client2/src/telemetry.rs
Normal file
@@ -0,0 +1,333 @@
|
||||
use crate::{TelemetrySettings, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL};
|
||||
use gpui2::{serde_json, AppContext, AppMetadata, Executor, Task};
|
||||
use lazy_static::lazy_static;
|
||||
use parking_lot::Mutex;
|
||||
use serde::Serialize;
|
||||
use settings2::Settings;
|
||||
use std::{env, io::Write, mem, path::PathBuf, sync::Arc, time::Duration};
|
||||
use sysinfo::{
|
||||
CpuRefreshKind, Pid, PidExt, ProcessExt, ProcessRefreshKind, RefreshKind, System, SystemExt,
|
||||
};
|
||||
use tempfile::NamedTempFile;
|
||||
use util::http::HttpClient;
|
||||
use util::{channel::ReleaseChannel, TryFutureExt};
|
||||
|
||||
pub struct Telemetry {
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
executor: Executor,
|
||||
state: Mutex<TelemetryState>,
|
||||
}
|
||||
|
||||
struct TelemetryState {
|
||||
metrics_id: Option<Arc<str>>, // Per logged-in user
|
||||
installation_id: Option<Arc<str>>, // Per app installation (different for dev, preview, and stable)
|
||||
session_id: Option<Arc<str>>, // Per app launch
|
||||
release_channel: Option<&'static str>,
|
||||
app_metadata: AppMetadata,
|
||||
architecture: &'static str,
|
||||
clickhouse_events_queue: Vec<ClickhouseEventWrapper>,
|
||||
flush_clickhouse_events_task: Option<Task<()>>,
|
||||
log_file: Option<NamedTempFile>,
|
||||
is_staff: Option<bool>,
|
||||
}
|
||||
|
||||
const CLICKHOUSE_EVENTS_URL_PATH: &'static str = "/api/events";
|
||||
|
||||
lazy_static! {
|
||||
static ref CLICKHOUSE_EVENTS_URL: String =
|
||||
format!("{}{}", *ZED_SERVER_URL, CLICKHOUSE_EVENTS_URL_PATH);
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
struct ClickhouseEventRequestBody {
|
||||
token: &'static str,
|
||||
installation_id: Option<Arc<str>>,
|
||||
session_id: Option<Arc<str>>,
|
||||
is_staff: Option<bool>,
|
||||
app_version: Option<String>,
|
||||
os_name: &'static str,
|
||||
os_version: Option<String>,
|
||||
architecture: &'static str,
|
||||
release_channel: Option<&'static str>,
|
||||
events: Vec<ClickhouseEventWrapper>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
struct ClickhouseEventWrapper {
|
||||
signed_in: bool,
|
||||
#[serde(flatten)]
|
||||
event: ClickhouseEvent,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum AssistantKind {
|
||||
Panel,
|
||||
Inline,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum ClickhouseEvent {
|
||||
Editor {
|
||||
operation: &'static str,
|
||||
file_extension: Option<String>,
|
||||
vim_mode: bool,
|
||||
copilot_enabled: bool,
|
||||
copilot_enabled_for_language: bool,
|
||||
},
|
||||
Copilot {
|
||||
suggestion_id: Option<String>,
|
||||
suggestion_accepted: bool,
|
||||
file_extension: Option<String>,
|
||||
},
|
||||
Call {
|
||||
operation: &'static str,
|
||||
room_id: Option<u64>,
|
||||
channel_id: Option<u64>,
|
||||
},
|
||||
Assistant {
|
||||
conversation_id: Option<String>,
|
||||
kind: AssistantKind,
|
||||
model: &'static str,
|
||||
},
|
||||
Cpu {
|
||||
usage_as_percentage: f32,
|
||||
core_count: u32,
|
||||
},
|
||||
Memory {
|
||||
memory_in_bytes: u64,
|
||||
virtual_memory_in_bytes: u64,
|
||||
},
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
const MAX_QUEUE_LEN: usize = 1;
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
const MAX_QUEUE_LEN: usize = 10;
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(1);
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(30);
|
||||
|
||||
impl Telemetry {
|
||||
pub fn new(client: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> {
|
||||
let release_channel = if cx.has_global::<ReleaseChannel>() {
|
||||
Some(cx.global::<ReleaseChannel>().display_name())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
// TODO: Replace all hardware stuff with nested SystemSpecs json
|
||||
let this = Arc::new(Self {
|
||||
http_client: client,
|
||||
executor: cx.executor().clone(),
|
||||
state: Mutex::new(TelemetryState {
|
||||
app_metadata: cx.app_metadata(),
|
||||
architecture: env::consts::ARCH,
|
||||
release_channel,
|
||||
installation_id: None,
|
||||
metrics_id: None,
|
||||
session_id: None,
|
||||
clickhouse_events_queue: Default::default(),
|
||||
flush_clickhouse_events_task: Default::default(),
|
||||
log_file: None,
|
||||
is_staff: None,
|
||||
}),
|
||||
});
|
||||
|
||||
this
|
||||
}
|
||||
|
||||
pub fn log_file_path(&self) -> Option<PathBuf> {
|
||||
Some(self.state.lock().log_file.as_ref()?.path().to_path_buf())
|
||||
}
|
||||
|
||||
pub fn start(
|
||||
self: &Arc<Self>,
|
||||
installation_id: Option<String>,
|
||||
session_id: String,
|
||||
cx: &mut AppContext,
|
||||
) {
|
||||
let mut state = self.state.lock();
|
||||
state.installation_id = installation_id.map(|id| id.into());
|
||||
state.session_id = Some(session_id.into());
|
||||
let has_clickhouse_events = !state.clickhouse_events_queue.is_empty();
|
||||
drop(state);
|
||||
|
||||
if has_clickhouse_events {
|
||||
self.flush_clickhouse_events();
|
||||
}
|
||||
|
||||
let this = self.clone();
|
||||
cx.spawn(|cx| async move {
|
||||
// Avoiding calling `System::new_all()`, as there have been crashes related to it
|
||||
let refresh_kind = RefreshKind::new()
|
||||
.with_memory() // For memory usage
|
||||
.with_processes(ProcessRefreshKind::everything()) // For process usage
|
||||
.with_cpu(CpuRefreshKind::everything()); // For core count
|
||||
|
||||
let mut system = System::new_with_specifics(refresh_kind);
|
||||
|
||||
// Avoiding calling `refresh_all()`, just update what we need
|
||||
system.refresh_specifics(refresh_kind);
|
||||
|
||||
loop {
|
||||
// Waiting some amount of time before the first query is important to get a reasonable value
|
||||
// https://docs.rs/sysinfo/0.29.10/sysinfo/trait.ProcessExt.html#tymethod.cpu_usage
|
||||
const DURATION_BETWEEN_SYSTEM_EVENTS: Duration = Duration::from_secs(60);
|
||||
smol::Timer::after(DURATION_BETWEEN_SYSTEM_EVENTS).await;
|
||||
|
||||
system.refresh_specifics(refresh_kind);
|
||||
|
||||
let current_process = Pid::from_u32(std::process::id());
|
||||
let Some(process) = system.processes().get(¤t_process) else {
|
||||
let process = current_process;
|
||||
log::error!("Failed to find own process {process:?} in system process table");
|
||||
// TODO: Fire an error telemetry event
|
||||
return;
|
||||
};
|
||||
|
||||
let memory_event = ClickhouseEvent::Memory {
|
||||
memory_in_bytes: process.memory(),
|
||||
virtual_memory_in_bytes: process.virtual_memory(),
|
||||
};
|
||||
|
||||
let cpu_event = ClickhouseEvent::Cpu {
|
||||
usage_as_percentage: process.cpu_usage(),
|
||||
core_count: system.cpus().len() as u32,
|
||||
};
|
||||
|
||||
let telemetry_settings = if let Ok(telemetry_settings) =
|
||||
cx.update(|cx| *TelemetrySettings::get_global(cx))
|
||||
{
|
||||
telemetry_settings
|
||||
} else {
|
||||
break;
|
||||
};
|
||||
|
||||
this.report_clickhouse_event(memory_event, telemetry_settings);
|
||||
this.report_clickhouse_event(cpu_event, telemetry_settings);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub fn set_authenticated_user_info(
|
||||
self: &Arc<Self>,
|
||||
metrics_id: Option<String>,
|
||||
is_staff: bool,
|
||||
cx: &AppContext,
|
||||
) {
|
||||
if !TelemetrySettings::get_global(cx).metrics {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut state = self.state.lock();
|
||||
let metrics_id: Option<Arc<str>> = metrics_id.map(|id| id.into());
|
||||
state.metrics_id = metrics_id.clone();
|
||||
state.is_staff = Some(is_staff);
|
||||
drop(state);
|
||||
}
|
||||
|
||||
pub fn report_clickhouse_event(
|
||||
self: &Arc<Self>,
|
||||
event: ClickhouseEvent,
|
||||
telemetry_settings: TelemetrySettings,
|
||||
) {
|
||||
if !telemetry_settings.metrics {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut state = self.state.lock();
|
||||
let signed_in = state.metrics_id.is_some();
|
||||
state
|
||||
.clickhouse_events_queue
|
||||
.push(ClickhouseEventWrapper { signed_in, event });
|
||||
|
||||
if state.installation_id.is_some() {
|
||||
if state.clickhouse_events_queue.len() >= MAX_QUEUE_LEN {
|
||||
drop(state);
|
||||
self.flush_clickhouse_events();
|
||||
} else {
|
||||
let this = self.clone();
|
||||
let executor = self.executor.clone();
|
||||
state.flush_clickhouse_events_task = Some(self.executor.spawn(async move {
|
||||
executor.timer(DEBOUNCE_INTERVAL).await;
|
||||
this.flush_clickhouse_events();
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn metrics_id(self: &Arc<Self>) -> Option<Arc<str>> {
|
||||
self.state.lock().metrics_id.clone()
|
||||
}
|
||||
|
||||
pub fn installation_id(self: &Arc<Self>) -> Option<Arc<str>> {
|
||||
self.state.lock().installation_id.clone()
|
||||
}
|
||||
|
||||
pub fn is_staff(self: &Arc<Self>) -> Option<bool> {
|
||||
self.state.lock().is_staff
|
||||
}
|
||||
|
||||
fn flush_clickhouse_events(self: &Arc<Self>) {
|
||||
let mut state = self.state.lock();
|
||||
let mut events = mem::take(&mut state.clickhouse_events_queue);
|
||||
state.flush_clickhouse_events_task.take();
|
||||
drop(state);
|
||||
|
||||
let this = self.clone();
|
||||
self.executor
|
||||
.spawn(
|
||||
async move {
|
||||
let mut json_bytes = Vec::new();
|
||||
|
||||
if let Some(file) = &mut this.state.lock().log_file {
|
||||
let file = file.as_file_mut();
|
||||
for event in &mut events {
|
||||
json_bytes.clear();
|
||||
serde_json::to_writer(&mut json_bytes, event)?;
|
||||
file.write_all(&json_bytes)?;
|
||||
file.write(b"\n")?;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
let state = this.state.lock();
|
||||
let request_body = ClickhouseEventRequestBody {
|
||||
token: ZED_SECRET_CLIENT_TOKEN,
|
||||
installation_id: state.installation_id.clone(),
|
||||
session_id: state.session_id.clone(),
|
||||
is_staff: state.is_staff.clone(),
|
||||
app_version: state
|
||||
.app_metadata
|
||||
.app_version
|
||||
.map(|version| version.to_string()),
|
||||
os_name: state.app_metadata.os_name,
|
||||
os_version: state
|
||||
.app_metadata
|
||||
.os_version
|
||||
.map(|version| version.to_string()),
|
||||
architecture: state.architecture,
|
||||
|
||||
release_channel: state.release_channel,
|
||||
events,
|
||||
};
|
||||
json_bytes.clear();
|
||||
serde_json::to_writer(&mut json_bytes, &request_body)?;
|
||||
}
|
||||
|
||||
this.http_client
|
||||
.post_json(CLICKHOUSE_EVENTS_URL.as_str(), json_bytes.into())
|
||||
.await?;
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.log_err(),
|
||||
)
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
216
crates/client2/src/test.rs
Normal file
216
crates/client2/src/test.rs
Normal file
@@ -0,0 +1,216 @@
|
||||
use crate::{Client, Connection, Credentials, EstablishConnectionError, UserStore};
|
||||
use anyhow::{anyhow, Result};
|
||||
use futures::{stream::BoxStream, StreamExt};
|
||||
use gpui2::{Context, Executor, Model, TestAppContext};
|
||||
use parking_lot::Mutex;
|
||||
use rpc2::{
|
||||
proto::{self, GetPrivateUserInfo, GetPrivateUserInfoResponse},
|
||||
ConnectionId, Peer, Receipt, TypedEnvelope,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
use util::http::FakeHttpClient;
|
||||
|
||||
pub struct FakeServer {
|
||||
peer: Arc<Peer>,
|
||||
state: Arc<Mutex<FakeServerState>>,
|
||||
user_id: u64,
|
||||
executor: Executor,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct FakeServerState {
|
||||
incoming: Option<BoxStream<'static, Box<dyn proto::AnyTypedEnvelope>>>,
|
||||
connection_id: Option<ConnectionId>,
|
||||
forbid_connections: bool,
|
||||
auth_count: usize,
|
||||
access_token: usize,
|
||||
}
|
||||
|
||||
impl FakeServer {
|
||||
pub async fn for_client(
|
||||
client_user_id: u64,
|
||||
client: &Arc<Client>,
|
||||
cx: &TestAppContext,
|
||||
) -> Self {
|
||||
let server = Self {
|
||||
peer: Peer::new(0),
|
||||
state: Default::default(),
|
||||
user_id: client_user_id,
|
||||
executor: cx.executor().clone(),
|
||||
};
|
||||
|
||||
client
|
||||
.override_authenticate({
|
||||
let state = Arc::downgrade(&server.state);
|
||||
move |cx| {
|
||||
let state = state.clone();
|
||||
cx.spawn(move |_| async move {
|
||||
let state = state.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
|
||||
let mut state = state.lock();
|
||||
state.auth_count += 1;
|
||||
let access_token = state.access_token.to_string();
|
||||
Ok(Credentials {
|
||||
user_id: client_user_id,
|
||||
access_token,
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
||||
.override_establish_connection({
|
||||
let peer = Arc::downgrade(&server.peer);
|
||||
let state = Arc::downgrade(&server.state);
|
||||
move |credentials, cx| {
|
||||
let peer = peer.clone();
|
||||
let state = state.clone();
|
||||
let credentials = credentials.clone();
|
||||
cx.spawn(move |cx| async move {
|
||||
let state = state.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
|
||||
let peer = peer.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
|
||||
if state.lock().forbid_connections {
|
||||
Err(EstablishConnectionError::Other(anyhow!(
|
||||
"server is forbidding connections"
|
||||
)))?
|
||||
}
|
||||
|
||||
assert_eq!(credentials.user_id, client_user_id);
|
||||
|
||||
if credentials.access_token != state.lock().access_token.to_string() {
|
||||
Err(EstablishConnectionError::Unauthorized)?
|
||||
}
|
||||
|
||||
let (client_conn, server_conn, _) =
|
||||
Connection::in_memory(cx.executor().clone());
|
||||
let (connection_id, io, incoming) =
|
||||
peer.add_test_connection(server_conn, cx.executor().clone());
|
||||
cx.executor().spawn(io).detach();
|
||||
{
|
||||
let mut state = state.lock();
|
||||
state.connection_id = Some(connection_id);
|
||||
state.incoming = Some(incoming);
|
||||
}
|
||||
peer.send(
|
||||
connection_id,
|
||||
proto::Hello {
|
||||
peer_id: Some(connection_id.into()),
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
Ok(client_conn)
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
client
|
||||
.authenticate_and_connect(false, &cx.to_async())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
server
|
||||
}
|
||||
|
||||
pub fn disconnect(&self) {
|
||||
if self.state.lock().connection_id.is_some() {
|
||||
self.peer.disconnect(self.connection_id());
|
||||
let mut state = self.state.lock();
|
||||
state.connection_id.take();
|
||||
state.incoming.take();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn auth_count(&self) -> usize {
|
||||
self.state.lock().auth_count
|
||||
}
|
||||
|
||||
pub fn roll_access_token(&self) {
|
||||
self.state.lock().access_token += 1;
|
||||
}
|
||||
|
||||
pub fn forbid_connections(&self) {
|
||||
self.state.lock().forbid_connections = true;
|
||||
}
|
||||
|
||||
pub fn allow_connections(&self) {
|
||||
self.state.lock().forbid_connections = false;
|
||||
}
|
||||
|
||||
pub fn send<T: proto::EnvelopedMessage>(&self, message: T) {
|
||||
self.peer.send(self.connection_id(), message).unwrap();
|
||||
}
|
||||
|
||||
#[allow(clippy::await_holding_lock)]
|
||||
pub async fn receive<M: proto::EnvelopedMessage>(&self) -> Result<TypedEnvelope<M>> {
|
||||
self.executor.start_waiting();
|
||||
|
||||
loop {
|
||||
let message = self
|
||||
.state
|
||||
.lock()
|
||||
.incoming
|
||||
.as_mut()
|
||||
.expect("not connected")
|
||||
.next()
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("other half hung up"))?;
|
||||
self.executor.finish_waiting();
|
||||
let type_name = message.payload_type_name();
|
||||
let message = message.into_any();
|
||||
|
||||
if message.is::<TypedEnvelope<M>>() {
|
||||
return Ok(*message.downcast().unwrap());
|
||||
}
|
||||
|
||||
if message.is::<TypedEnvelope<GetPrivateUserInfo>>() {
|
||||
self.respond(
|
||||
message
|
||||
.downcast::<TypedEnvelope<GetPrivateUserInfo>>()
|
||||
.unwrap()
|
||||
.receipt(),
|
||||
GetPrivateUserInfoResponse {
|
||||
metrics_id: "the-metrics-id".into(),
|
||||
staff: false,
|
||||
flags: Default::default(),
|
||||
},
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
panic!(
|
||||
"fake server received unexpected message type: {:?}",
|
||||
type_name
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn respond<T: proto::RequestMessage>(&self, receipt: Receipt<T>, response: T::Response) {
|
||||
self.peer.respond(receipt, response).unwrap()
|
||||
}
|
||||
|
||||
fn connection_id(&self) -> ConnectionId {
|
||||
self.state.lock().connection_id.expect("not connected")
|
||||
}
|
||||
|
||||
pub async fn build_user_store(
|
||||
&self,
|
||||
client: Arc<Client>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> Model<UserStore> {
|
||||
let http_client = FakeHttpClient::with_404_response();
|
||||
let user_store = cx.build_model(|cx| UserStore::new(client, http_client, cx));
|
||||
assert_eq!(
|
||||
self.receive::<proto::GetUsers>()
|
||||
.await
|
||||
.unwrap()
|
||||
.payload
|
||||
.user_ids,
|
||||
&[self.user_id]
|
||||
);
|
||||
user_store
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for FakeServer {
|
||||
fn drop(&mut self) {
|
||||
self.disconnect();
|
||||
}
|
||||
}
|
||||
739
crates/client2/src/user.rs
Normal file
739
crates/client2/src/user.rs
Normal file
@@ -0,0 +1,739 @@
|
||||
use super::{proto, Client, Status, TypedEnvelope};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use collections::{hash_map::Entry, HashMap, HashSet};
|
||||
use feature_flags2::FeatureFlagAppExt;
|
||||
use futures::{channel::mpsc, future, AsyncReadExt, Future, StreamExt};
|
||||
use gpui2::{AsyncAppContext, EventEmitter, ImageData, Model, ModelContext, Task};
|
||||
use postage::{sink::Sink, watch};
|
||||
use rpc2::proto::{RequestMessage, UsersResponse};
|
||||
use std::sync::{Arc, Weak};
|
||||
use text::ReplicaId;
|
||||
use util::http::HttpClient;
|
||||
use util::TryFutureExt as _;
|
||||
|
||||
pub type UserId = u64;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct ParticipantIndex(pub u32);
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct User {
|
||||
pub id: UserId,
|
||||
pub github_login: String,
|
||||
pub avatar: Option<Arc<ImageData>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct Collaborator {
|
||||
pub peer_id: proto::PeerId,
|
||||
pub replica_id: ReplicaId,
|
||||
pub user_id: UserId,
|
||||
}
|
||||
|
||||
impl PartialOrd for User {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for User {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.github_login.cmp(&other.github_login)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for User {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.id == other.id && self.github_login == other.github_login
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for User {}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct Contact {
|
||||
pub user: Arc<User>,
|
||||
pub online: bool,
|
||||
pub busy: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ContactRequestStatus {
|
||||
None,
|
||||
RequestSent,
|
||||
RequestReceived,
|
||||
RequestAccepted,
|
||||
}
|
||||
|
||||
pub struct UserStore {
|
||||
users: HashMap<u64, Arc<User>>,
|
||||
participant_indices: HashMap<u64, ParticipantIndex>,
|
||||
update_contacts_tx: mpsc::UnboundedSender<UpdateContacts>,
|
||||
current_user: watch::Receiver<Option<Arc<User>>>,
|
||||
contacts: Vec<Arc<Contact>>,
|
||||
incoming_contact_requests: Vec<Arc<User>>,
|
||||
outgoing_contact_requests: Vec<Arc<User>>,
|
||||
pending_contact_requests: HashMap<u64, usize>,
|
||||
invite_info: Option<InviteInfo>,
|
||||
client: Weak<Client>,
|
||||
http: Arc<dyn HttpClient>,
|
||||
_maintain_contacts: Task<()>,
|
||||
_maintain_current_user: Task<Result<()>>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct InviteInfo {
|
||||
pub count: u32,
|
||||
pub url: Arc<str>,
|
||||
}
|
||||
|
||||
pub enum Event {
|
||||
Contact {
|
||||
user: Arc<User>,
|
||||
kind: ContactEventKind,
|
||||
},
|
||||
ShowContacts,
|
||||
ParticipantIndicesChanged,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum ContactEventKind {
|
||||
Requested,
|
||||
Accepted,
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
impl EventEmitter for UserStore {
|
||||
type Event = Event;
|
||||
}
|
||||
|
||||
enum UpdateContacts {
|
||||
Update(proto::UpdateContacts),
|
||||
Wait(postage::barrier::Sender),
|
||||
Clear(postage::barrier::Sender),
|
||||
}
|
||||
|
||||
impl UserStore {
|
||||
pub fn new(
|
||||
client: Arc<Client>,
|
||||
http: Arc<dyn HttpClient>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
let (mut current_user_tx, current_user_rx) = watch::channel();
|
||||
let (update_contacts_tx, mut update_contacts_rx) = mpsc::unbounded();
|
||||
let rpc_subscriptions = vec![
|
||||
client.add_message_handler(cx.weak_model(), Self::handle_update_contacts),
|
||||
client.add_message_handler(cx.weak_model(), Self::handle_update_invite_info),
|
||||
client.add_message_handler(cx.weak_model(), Self::handle_show_contacts),
|
||||
];
|
||||
Self {
|
||||
users: Default::default(),
|
||||
current_user: current_user_rx,
|
||||
contacts: Default::default(),
|
||||
incoming_contact_requests: Default::default(),
|
||||
participant_indices: Default::default(),
|
||||
outgoing_contact_requests: Default::default(),
|
||||
invite_info: None,
|
||||
client: Arc::downgrade(&client),
|
||||
update_contacts_tx,
|
||||
http,
|
||||
_maintain_contacts: cx.spawn(|this, mut cx| async move {
|
||||
let _subscriptions = rpc_subscriptions;
|
||||
while let Some(message) = update_contacts_rx.next().await {
|
||||
if let Ok(task) =
|
||||
this.update(&mut cx, |this, cx| this.update_contacts(message, cx))
|
||||
{
|
||||
task.log_err().await;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}),
|
||||
_maintain_current_user: cx.spawn(|this, mut cx| async move {
|
||||
let mut status = client.status();
|
||||
while let Some(status) = status.next().await {
|
||||
match status {
|
||||
Status::Connected { .. } => {
|
||||
if let Some(user_id) = client.user_id() {
|
||||
let fetch_user = if let Ok(fetch_user) = this
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.get_user(user_id, cx).log_err()
|
||||
}) {
|
||||
fetch_user
|
||||
} else {
|
||||
break;
|
||||
};
|
||||
let fetch_metrics_id =
|
||||
client.request(proto::GetPrivateUserInfo {}).log_err();
|
||||
let (user, info) = futures::join!(fetch_user, fetch_metrics_id);
|
||||
|
||||
cx.update(|cx| {
|
||||
if let Some(info) = info {
|
||||
cx.update_flags(info.staff, info.flags);
|
||||
client.telemetry.set_authenticated_user_info(
|
||||
Some(info.metrics_id.clone()),
|
||||
info.staff,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})?;
|
||||
|
||||
current_user_tx.send(user).await.ok();
|
||||
|
||||
this.update(&mut cx, |_, cx| cx.notify())?;
|
||||
}
|
||||
}
|
||||
Status::SignedOut => {
|
||||
current_user_tx.send(None).await.ok();
|
||||
this.update(&mut cx, |this, cx| {
|
||||
cx.notify();
|
||||
this.clear_contacts()
|
||||
})?
|
||||
.await;
|
||||
}
|
||||
Status::ConnectionLost => {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
cx.notify();
|
||||
this.clear_contacts()
|
||||
})?
|
||||
.await;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}),
|
||||
pending_contact_requests: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "test-support")]
|
||||
pub fn clear_cache(&mut self) {
|
||||
self.users.clear();
|
||||
}
|
||||
|
||||
async fn handle_update_invite_info(
|
||||
this: Model<Self>,
|
||||
message: TypedEnvelope<proto::UpdateInviteInfo>,
|
||||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.invite_info = Some(InviteInfo {
|
||||
url: Arc::from(message.payload.url),
|
||||
count: message.payload.count,
|
||||
});
|
||||
cx.notify();
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_show_contacts(
|
||||
this: Model<Self>,
|
||||
_: TypedEnvelope<proto::ShowContacts>,
|
||||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
this.update(&mut cx, |_, cx| cx.emit(Event::ShowContacts))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn invite_info(&self) -> Option<&InviteInfo> {
|
||||
self.invite_info.as_ref()
|
||||
}
|
||||
|
||||
async fn handle_update_contacts(
|
||||
this: Model<Self>,
|
||||
message: TypedEnvelope<proto::UpdateContacts>,
|
||||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.update_contacts_tx
|
||||
.unbounded_send(UpdateContacts::Update(message.payload))
|
||||
.unwrap();
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_contacts(
|
||||
&mut self,
|
||||
message: UpdateContacts,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
match message {
|
||||
UpdateContacts::Wait(barrier) => {
|
||||
drop(barrier);
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
UpdateContacts::Clear(barrier) => {
|
||||
self.contacts.clear();
|
||||
self.incoming_contact_requests.clear();
|
||||
self.outgoing_contact_requests.clear();
|
||||
drop(barrier);
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
UpdateContacts::Update(message) => {
|
||||
let mut user_ids = HashSet::default();
|
||||
for contact in &message.contacts {
|
||||
user_ids.insert(contact.user_id);
|
||||
}
|
||||
user_ids.extend(message.incoming_requests.iter().map(|req| req.requester_id));
|
||||
user_ids.extend(message.outgoing_requests.iter());
|
||||
|
||||
let load_users = self.get_users(user_ids.into_iter().collect(), cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
load_users.await?;
|
||||
|
||||
// Users are fetched in parallel above and cached in call to get_users
|
||||
// No need to paralellize here
|
||||
let mut updated_contacts = Vec::new();
|
||||
let this = this
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("can't upgrade user store handle"))?;
|
||||
for contact in message.contacts {
|
||||
let should_notify = contact.should_notify;
|
||||
updated_contacts.push((
|
||||
Arc::new(Contact::from_proto(contact, &this, &mut cx).await?),
|
||||
should_notify,
|
||||
));
|
||||
}
|
||||
|
||||
let mut incoming_requests = Vec::new();
|
||||
for request in message.incoming_requests {
|
||||
incoming_requests.push({
|
||||
let user = this
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.get_user(request.requester_id, cx)
|
||||
})?
|
||||
.await?;
|
||||
(user, request.should_notify)
|
||||
});
|
||||
}
|
||||
|
||||
let mut outgoing_requests = Vec::new();
|
||||
for requested_user_id in message.outgoing_requests {
|
||||
outgoing_requests.push(
|
||||
this.update(&mut cx, |this, cx| this.get_user(requested_user_id, cx))?
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
|
||||
let removed_contacts =
|
||||
HashSet::<u64>::from_iter(message.remove_contacts.iter().copied());
|
||||
let removed_incoming_requests =
|
||||
HashSet::<u64>::from_iter(message.remove_incoming_requests.iter().copied());
|
||||
let removed_outgoing_requests =
|
||||
HashSet::<u64>::from_iter(message.remove_outgoing_requests.iter().copied());
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
// Remove contacts
|
||||
this.contacts
|
||||
.retain(|contact| !removed_contacts.contains(&contact.user.id));
|
||||
// Update existing contacts and insert new ones
|
||||
for (updated_contact, should_notify) in updated_contacts {
|
||||
if should_notify {
|
||||
cx.emit(Event::Contact {
|
||||
user: updated_contact.user.clone(),
|
||||
kind: ContactEventKind::Accepted,
|
||||
});
|
||||
}
|
||||
match this.contacts.binary_search_by_key(
|
||||
&&updated_contact.user.github_login,
|
||||
|contact| &contact.user.github_login,
|
||||
) {
|
||||
Ok(ix) => this.contacts[ix] = updated_contact,
|
||||
Err(ix) => this.contacts.insert(ix, updated_contact),
|
||||
}
|
||||
}
|
||||
|
||||
// Remove incoming contact requests
|
||||
this.incoming_contact_requests.retain(|user| {
|
||||
if removed_incoming_requests.contains(&user.id) {
|
||||
cx.emit(Event::Contact {
|
||||
user: user.clone(),
|
||||
kind: ContactEventKind::Cancelled,
|
||||
});
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
// Update existing incoming requests and insert new ones
|
||||
for (user, should_notify) in incoming_requests {
|
||||
if should_notify {
|
||||
cx.emit(Event::Contact {
|
||||
user: user.clone(),
|
||||
kind: ContactEventKind::Requested,
|
||||
});
|
||||
}
|
||||
|
||||
match this
|
||||
.incoming_contact_requests
|
||||
.binary_search_by_key(&&user.github_login, |contact| {
|
||||
&contact.github_login
|
||||
}) {
|
||||
Ok(ix) => this.incoming_contact_requests[ix] = user,
|
||||
Err(ix) => this.incoming_contact_requests.insert(ix, user),
|
||||
}
|
||||
}
|
||||
|
||||
// Remove outgoing contact requests
|
||||
this.outgoing_contact_requests
|
||||
.retain(|user| !removed_outgoing_requests.contains(&user.id));
|
||||
// Update existing incoming requests and insert new ones
|
||||
for request in outgoing_requests {
|
||||
match this
|
||||
.outgoing_contact_requests
|
||||
.binary_search_by_key(&&request.github_login, |contact| {
|
||||
&contact.github_login
|
||||
}) {
|
||||
Ok(ix) => this.outgoing_contact_requests[ix] = request,
|
||||
Err(ix) => this.outgoing_contact_requests.insert(ix, request),
|
||||
}
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn contacts(&self) -> &[Arc<Contact>] {
|
||||
&self.contacts
|
||||
}
|
||||
|
||||
pub fn has_contact(&self, user: &Arc<User>) -> bool {
|
||||
self.contacts
|
||||
.binary_search_by_key(&&user.github_login, |contact| &contact.user.github_login)
|
||||
.is_ok()
|
||||
}
|
||||
|
||||
pub fn incoming_contact_requests(&self) -> &[Arc<User>] {
|
||||
&self.incoming_contact_requests
|
||||
}
|
||||
|
||||
pub fn outgoing_contact_requests(&self) -> &[Arc<User>] {
|
||||
&self.outgoing_contact_requests
|
||||
}
|
||||
|
||||
pub fn is_contact_request_pending(&self, user: &User) -> bool {
|
||||
self.pending_contact_requests.contains_key(&user.id)
|
||||
}
|
||||
|
||||
pub fn contact_request_status(&self, user: &User) -> ContactRequestStatus {
|
||||
if self
|
||||
.contacts
|
||||
.binary_search_by_key(&&user.github_login, |contact| &contact.user.github_login)
|
||||
.is_ok()
|
||||
{
|
||||
ContactRequestStatus::RequestAccepted
|
||||
} else if self
|
||||
.outgoing_contact_requests
|
||||
.binary_search_by_key(&&user.github_login, |user| &user.github_login)
|
||||
.is_ok()
|
||||
{
|
||||
ContactRequestStatus::RequestSent
|
||||
} else if self
|
||||
.incoming_contact_requests
|
||||
.binary_search_by_key(&&user.github_login, |user| &user.github_login)
|
||||
.is_ok()
|
||||
{
|
||||
ContactRequestStatus::RequestReceived
|
||||
} else {
|
||||
ContactRequestStatus::None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn request_contact(
|
||||
&mut self,
|
||||
responder_id: u64,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
self.perform_contact_request(responder_id, proto::RequestContact { responder_id }, cx)
|
||||
}
|
||||
|
||||
pub fn remove_contact(
|
||||
&mut self,
|
||||
user_id: u64,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
self.perform_contact_request(user_id, proto::RemoveContact { user_id }, cx)
|
||||
}
|
||||
|
||||
pub fn respond_to_contact_request(
|
||||
&mut self,
|
||||
requester_id: u64,
|
||||
accept: bool,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
self.perform_contact_request(
|
||||
requester_id,
|
||||
proto::RespondToContactRequest {
|
||||
requester_id,
|
||||
response: if accept {
|
||||
proto::ContactRequestResponse::Accept
|
||||
} else {
|
||||
proto::ContactRequestResponse::Decline
|
||||
} as i32,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn dismiss_contact_request(
|
||||
&mut self,
|
||||
requester_id: u64,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let client = self.client.upgrade();
|
||||
cx.spawn(move |_, _| async move {
|
||||
client
|
||||
.ok_or_else(|| anyhow!("can't upgrade client reference"))?
|
||||
.request(proto::RespondToContactRequest {
|
||||
requester_id,
|
||||
response: proto::ContactRequestResponse::Dismiss as i32,
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn perform_contact_request<T: RequestMessage>(
|
||||
&mut self,
|
||||
user_id: u64,
|
||||
request: T,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let client = self.client.upgrade();
|
||||
*self.pending_contact_requests.entry(user_id).or_insert(0) += 1;
|
||||
cx.notify();
|
||||
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
let response = client
|
||||
.ok_or_else(|| anyhow!("can't upgrade client reference"))?
|
||||
.request(request)
|
||||
.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if let Entry::Occupied(mut request_count) =
|
||||
this.pending_contact_requests.entry(user_id)
|
||||
{
|
||||
*request_count.get_mut() -= 1;
|
||||
if *request_count.get() == 0 {
|
||||
request_count.remove();
|
||||
}
|
||||
}
|
||||
cx.notify();
|
||||
})?;
|
||||
response?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn clear_contacts(&mut self) -> impl Future<Output = ()> {
|
||||
let (tx, mut rx) = postage::barrier::channel();
|
||||
self.update_contacts_tx
|
||||
.unbounded_send(UpdateContacts::Clear(tx))
|
||||
.unwrap();
|
||||
async move {
|
||||
rx.next().await;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn contact_updates_done(&mut self) -> impl Future<Output = ()> {
|
||||
let (tx, mut rx) = postage::barrier::channel();
|
||||
self.update_contacts_tx
|
||||
.unbounded_send(UpdateContacts::Wait(tx))
|
||||
.unwrap();
|
||||
async move {
|
||||
rx.next().await;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_users(
|
||||
&mut self,
|
||||
user_ids: Vec<u64>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<Vec<Arc<User>>>> {
|
||||
let mut user_ids_to_fetch = user_ids.clone();
|
||||
user_ids_to_fetch.retain(|id| !self.users.contains_key(id));
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
if !user_ids_to_fetch.is_empty() {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.load_users(
|
||||
proto::GetUsers {
|
||||
user_ids: user_ids_to_fetch,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await?;
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, _| {
|
||||
user_ids
|
||||
.iter()
|
||||
.map(|user_id| {
|
||||
this.users
|
||||
.get(user_id)
|
||||
.cloned()
|
||||
.ok_or_else(|| anyhow!("user {} not found", user_id))
|
||||
})
|
||||
.collect()
|
||||
})?
|
||||
})
|
||||
}
|
||||
|
||||
pub fn fuzzy_search_users(
|
||||
&mut self,
|
||||
query: String,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<Vec<Arc<User>>>> {
|
||||
self.load_users(proto::FuzzySearchUsers { query }, cx)
|
||||
}
|
||||
|
||||
pub fn get_cached_user(&self, user_id: u64) -> Option<Arc<User>> {
|
||||
self.users.get(&user_id).cloned()
|
||||
}
|
||||
|
||||
pub fn get_user(
|
||||
&mut self,
|
||||
user_id: u64,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<Arc<User>>> {
|
||||
if let Some(user) = self.users.get(&user_id).cloned() {
|
||||
return Task::ready(Ok(user));
|
||||
}
|
||||
|
||||
let load_users = self.get_users(vec![user_id], cx);
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
load_users.await?;
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.users
|
||||
.get(&user_id)
|
||||
.cloned()
|
||||
.ok_or_else(|| anyhow!("server responded with no users"))
|
||||
})?
|
||||
})
|
||||
}
|
||||
|
||||
pub fn current_user(&self) -> Option<Arc<User>> {
|
||||
self.current_user.borrow().clone()
|
||||
}
|
||||
|
||||
pub fn watch_current_user(&self) -> watch::Receiver<Option<Arc<User>>> {
|
||||
self.current_user.clone()
|
||||
}
|
||||
|
||||
fn load_users(
|
||||
&mut self,
|
||||
request: impl RequestMessage<Response = UsersResponse>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<Vec<Arc<User>>>> {
|
||||
let client = self.client.clone();
|
||||
let http = self.http.clone();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
if let Some(rpc) = client.upgrade() {
|
||||
let response = rpc.request(request).await.context("error loading users")?;
|
||||
let users = future::join_all(
|
||||
response
|
||||
.users
|
||||
.into_iter()
|
||||
.map(|user| User::new(user, http.as_ref())),
|
||||
)
|
||||
.await;
|
||||
|
||||
this.update(&mut cx, |this, _| {
|
||||
for user in &users {
|
||||
this.users.insert(user.id, user.clone());
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
|
||||
Ok(users)
|
||||
} else {
|
||||
Ok(Vec::new())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set_participant_indices(
|
||||
&mut self,
|
||||
participant_indices: HashMap<u64, ParticipantIndex>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
if participant_indices != self.participant_indices {
|
||||
self.participant_indices = participant_indices;
|
||||
cx.emit(Event::ParticipantIndicesChanged);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn participant_indices(&self) -> &HashMap<u64, ParticipantIndex> {
|
||||
&self.participant_indices
|
||||
}
|
||||
}
|
||||
|
||||
impl User {
|
||||
async fn new(message: proto::User, http: &dyn HttpClient) -> Arc<Self> {
|
||||
Arc::new(User {
|
||||
id: message.id,
|
||||
github_login: message.github_login,
|
||||
avatar: fetch_avatar(http, &message.avatar_url).warn_on_err().await,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Contact {
|
||||
async fn from_proto(
|
||||
contact: proto::Contact,
|
||||
user_store: &Model<UserStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<Self> {
|
||||
let user = user_store
|
||||
.update(cx, |user_store, cx| {
|
||||
user_store.get_user(contact.user_id, cx)
|
||||
})?
|
||||
.await?;
|
||||
Ok(Self {
|
||||
user,
|
||||
online: contact.online,
|
||||
busy: contact.busy,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Collaborator {
|
||||
pub fn from_proto(message: proto::Collaborator) -> Result<Self> {
|
||||
Ok(Self {
|
||||
peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
|
||||
replica_id: message.replica_id as ReplicaId,
|
||||
user_id: message.user_id as UserId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// todo!("we probably don't need this now that we fetch")
|
||||
async fn fetch_avatar(http: &dyn HttpClient, url: &str) -> Result<Arc<ImageData>> {
|
||||
let mut response = http
|
||||
.get(url, Default::default(), true)
|
||||
.await
|
||||
.map_err(|e| anyhow!("failed to send user avatar request: {}", e))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow!("avatar request failed {:?}", response.status()));
|
||||
}
|
||||
|
||||
let mut body = Vec::new();
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_end(&mut body)
|
||||
.await
|
||||
.map_err(|e| anyhow!("failed to read user avatar response body: {}", e))?;
|
||||
let format = image::guess_format(&body)?;
|
||||
let image = image::load_from_memory_with_format(&body, format)?.into_bgra8();
|
||||
Ok(Arc::new(ImageData::new(image)))
|
||||
}
|
||||
@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
|
||||
default-run = "collab"
|
||||
edition = "2021"
|
||||
name = "collab"
|
||||
version = "0.21.0"
|
||||
version = "0.28.0"
|
||||
publish = false
|
||||
|
||||
[[bin]]
|
||||
@@ -42,14 +42,12 @@ rand.workspace = true
|
||||
reqwest = { version = "0.11", features = ["json"], optional = true }
|
||||
scrypt = "0.7"
|
||||
smallvec.workspace = true
|
||||
# Remove fork dependency when a version with https://github.com/SeaQL/sea-orm/pull/1283 is released.
|
||||
sea-orm = { git = "https://github.com/zed-industries/sea-orm", rev = "18f4c691085712ad014a51792af75a9044bacee6", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls"] }
|
||||
sea-query = "0.27"
|
||||
sea-orm = { version = "0.12.x", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] }
|
||||
serde.workspace = true
|
||||
serde_derive.workspace = true
|
||||
serde_json.workspace = true
|
||||
sha-1 = "0.9"
|
||||
sqlx = { version = "0.6", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] }
|
||||
sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] }
|
||||
time.workspace = true
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tokio-tungstenite = "0.17"
|
||||
@@ -59,6 +57,7 @@ toml.workspace = true
|
||||
tracing = "0.1.34"
|
||||
tracing-log = "0.1.3"
|
||||
tracing-subscriber = { version = "0.3.11", features = ["env-filter", "json"] }
|
||||
uuid.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
audio = { path = "../audio" }
|
||||
@@ -73,6 +72,8 @@ fs = { path = "../fs", features = ["test-support"] }
|
||||
git = { path = "../git", features = ["test-support"] }
|
||||
live_kit_client = { path = "../live_kit_client", features = ["test-support"] }
|
||||
lsp = { path = "../lsp", features = ["test-support"] }
|
||||
node_runtime = { path = "../node_runtime" }
|
||||
notifications = { path = "../notifications", features = ["test-support"] }
|
||||
project = { path = "../project", features = ["test-support"] }
|
||||
rpc = { path = "../rpc", features = ["test-support"] }
|
||||
settings = { path = "../settings", features = ["test-support"] }
|
||||
@@ -87,9 +88,9 @@ env_logger.workspace = true
|
||||
indoc.workspace = true
|
||||
util = { path = "../util" }
|
||||
lazy_static.workspace = true
|
||||
sea-orm = { git = "https://github.com/zed-industries/sea-orm", rev = "18f4c691085712ad014a51792af75a9044bacee6", features = ["sqlx-sqlite"] }
|
||||
sea-orm = { version = "0.12.x", features = ["sqlx-sqlite"] }
|
||||
serde_json.workspace = true
|
||||
sqlx = { version = "0.6", features = ["sqlite"] }
|
||||
sqlx = { version = "0.7", features = ["sqlite"] }
|
||||
unindent.workspace = true
|
||||
|
||||
[features]
|
||||
|
||||
@@ -37,12 +37,14 @@ CREATE INDEX "index_contacts_user_id_b" ON "contacts" ("user_id_b");
|
||||
CREATE TABLE "rooms" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"live_kit_room" VARCHAR NOT NULL,
|
||||
"enviroment" VARCHAR,
|
||||
"channel_id" INTEGER REFERENCES channels (id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE UNIQUE INDEX "index_rooms_on_channel_id" ON "rooms" ("channel_id");
|
||||
|
||||
CREATE TABLE "projects" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"room_id" INTEGER REFERENCES rooms (id) NOT NULL,
|
||||
"room_id" INTEGER REFERENCES rooms (id) ON DELETE CASCADE NOT NULL,
|
||||
"host_user_id" INTEGER REFERENCES users (id) NOT NULL,
|
||||
"host_connection_id" INTEGER,
|
||||
"host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE,
|
||||
@@ -158,7 +160,8 @@ CREATE TABLE "room_participants" (
|
||||
"initial_project_id" INTEGER,
|
||||
"calling_user_id" INTEGER NOT NULL REFERENCES users (id),
|
||||
"calling_connection_id" INTEGER NOT NULL,
|
||||
"calling_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE SET NULL
|
||||
"calling_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE SET NULL,
|
||||
"participant_index" INTEGER
|
||||
);
|
||||
CREATE UNIQUE INDEX "index_room_participants_on_user_id" ON "room_participants" ("user_id");
|
||||
CREATE INDEX "index_room_participants_on_room_id" ON "room_participants" ("room_id");
|
||||
@@ -189,9 +192,13 @@ CREATE INDEX "index_followers_on_room_id" ON "followers" ("room_id");
|
||||
CREATE TABLE "channels" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"name" VARCHAR NOT NULL,
|
||||
"created_at" TIMESTAMP NOT NULL DEFAULT now
|
||||
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"visibility" VARCHAR NOT NULL,
|
||||
"parent_path" TEXT
|
||||
);
|
||||
|
||||
CREATE INDEX "index_channels_on_parent_path" ON "channels" ("parent_path");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "channel_chat_participants" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id),
|
||||
@@ -210,19 +217,22 @@ CREATE TABLE IF NOT EXISTS "channel_messages" (
|
||||
"nonce" BLOB NOT NULL
|
||||
);
|
||||
CREATE INDEX "index_channel_messages_on_channel_id" ON "channel_messages" ("channel_id");
|
||||
CREATE UNIQUE INDEX "index_channel_messages_on_nonce" ON "channel_messages" ("nonce");
|
||||
CREATE UNIQUE INDEX "index_channel_messages_on_sender_id_nonce" ON "channel_messages" ("sender_id", "nonce");
|
||||
|
||||
CREATE TABLE "channel_paths" (
|
||||
"id_path" TEXT NOT NULL PRIMARY KEY,
|
||||
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE
|
||||
CREATE TABLE "channel_message_mentions" (
|
||||
"message_id" INTEGER NOT NULL REFERENCES channel_messages (id) ON DELETE CASCADE,
|
||||
"start_offset" INTEGER NOT NULL,
|
||||
"end_offset" INTEGER NOT NULL,
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
|
||||
PRIMARY KEY(message_id, start_offset)
|
||||
);
|
||||
CREATE INDEX "index_channel_paths_on_channel_id" ON "channel_paths" ("channel_id");
|
||||
|
||||
CREATE TABLE "channel_members" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
|
||||
"admin" BOOLEAN NOT NULL DEFAULT false,
|
||||
"role" VARCHAR,
|
||||
"accepted" BOOLEAN NOT NULL DEFAULT false,
|
||||
"updated_at" TIMESTAMP NOT NULL DEFAULT now
|
||||
);
|
||||
@@ -288,3 +298,47 @@ CREATE TABLE "user_features" (
|
||||
CREATE UNIQUE INDEX "index_user_features_user_id_and_feature_id" ON "user_features" ("user_id", "feature_id");
|
||||
CREATE INDEX "index_user_features_on_user_id" ON "user_features" ("user_id");
|
||||
CREATE INDEX "index_user_features_on_feature_id" ON "user_features" ("feature_id");
|
||||
|
||||
|
||||
CREATE TABLE "observed_buffer_edits" (
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
|
||||
"buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
|
||||
"epoch" INTEGER NOT NULL,
|
||||
"lamport_timestamp" INTEGER NOT NULL,
|
||||
"replica_id" INTEGER NOT NULL,
|
||||
PRIMARY KEY (user_id, buffer_id)
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "index_observed_buffers_user_and_buffer_id" ON "observed_buffer_edits" ("user_id", "buffer_id");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "observed_channel_messages" (
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
|
||||
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
|
||||
"channel_message_id" INTEGER NOT NULL,
|
||||
PRIMARY KEY (user_id, channel_id)
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "index_observed_channel_messages_user_and_channel_id" ON "observed_channel_messages" ("user_id", "channel_id");
|
||||
|
||||
CREATE TABLE "notification_kinds" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"name" VARCHAR NOT NULL
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "index_notification_kinds_on_name" ON "notification_kinds" ("name");
|
||||
|
||||
CREATE TABLE "notifications" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"created_at" TIMESTAMP NOT NULL default CURRENT_TIMESTAMP,
|
||||
"recipient_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
|
||||
"kind" INTEGER NOT NULL REFERENCES notification_kinds (id),
|
||||
"entity_id" INTEGER,
|
||||
"content" TEXT,
|
||||
"is_read" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
"response" BOOLEAN
|
||||
);
|
||||
|
||||
CREATE INDEX
|
||||
"index_notifications_on_recipient_id_is_read_kind_entity_id"
|
||||
ON "notifications"
|
||||
("recipient_id", "is_read", "kind", "entity_id");
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
CREATE TABLE IF NOT EXISTS "observed_buffer_edits" (
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
|
||||
"buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
|
||||
"epoch" INTEGER NOT NULL,
|
||||
"lamport_timestamp" INTEGER NOT NULL,
|
||||
"replica_id" INTEGER NOT NULL,
|
||||
PRIMARY KEY (user_id, buffer_id)
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "index_observed_buffer_user_and_buffer_id" ON "observed_buffer_edits" ("user_id", "buffer_id");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "observed_channel_messages" (
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
|
||||
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
|
||||
"channel_message_id" INTEGER NOT NULL,
|
||||
PRIMARY KEY (user_id, channel_id)
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "index_observed_channel_messages_user_and_channel_id" ON "observed_channel_messages" ("user_id", "channel_id");
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE room_participants ADD COLUMN participant_index INTEGER;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user