Compare commits
340 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f753a83eb1 | ||
|
|
f877d26c3a | ||
|
|
37ad025ccd | ||
|
|
e8ebd6d7a2 | ||
|
|
bcc38a60bf | ||
|
|
8944ca9236 | ||
|
|
6ae297832f | ||
|
|
a7f788746c | ||
|
|
7ff55087f3 | ||
|
|
41102751ef | ||
|
|
a25580fb7f | ||
|
|
bc7bccf5f5 | ||
|
|
a89cc22af4 | ||
|
|
e682e2dd72 | ||
|
|
65641b1d3e | ||
|
|
248161aa63 | ||
|
|
d9278f7416 | ||
|
|
57781fd7aa | ||
|
|
2d889f59bf | ||
|
|
2802e3a1c6 | ||
|
|
ea39983f78 | ||
|
|
ca2e0256e1 | ||
|
|
070b89243f | ||
|
|
e530406d62 | ||
|
|
ea0dd8972f | ||
|
|
a1308d20ce | ||
|
|
486b3f64d1 | ||
|
|
0f93386071 | ||
|
|
77a4f907a0 | ||
|
|
d6acea525d | ||
|
|
89a5506f43 | ||
|
|
5431488a9a | ||
|
|
ac7618da17 | ||
|
|
647d9861b1 | ||
|
|
d7ac15fa71 | ||
|
|
3a1d533c01 | ||
|
|
c44acaefff | ||
|
|
1593b1e13d | ||
|
|
fabcdb909a | ||
|
|
f99e4043c4 | ||
|
|
1b45911857 | ||
|
|
4918ad5789 | ||
|
|
9f86748aff | ||
|
|
489be5e77b | ||
|
|
b396e153d1 | ||
|
|
1c572fd86e | ||
|
|
73af155dd6 | ||
|
|
eca6115e4b | ||
|
|
74aeec360d | ||
|
|
2f26fcd889 | ||
|
|
a4d9d6c750 | ||
|
|
a2a3ebc42f | ||
|
|
ddf4e1a316 | ||
|
|
a369fb8033 | ||
|
|
9ff34bcb6a | ||
|
|
10f130ee30 | ||
|
|
3819a67185 | ||
|
|
6e7101ca6b | ||
|
|
2df2d09e3c | ||
|
|
4c3244b982 | ||
|
|
a79b4e312b | ||
|
|
5eac797a93 | ||
|
|
a581d0c5b8 | ||
|
|
15799f7af6 | ||
|
|
81ed961659 | ||
|
|
9db55b3029 | ||
|
|
328b779185 | ||
|
|
7f3d937938 | ||
|
|
f68f9f37ab | ||
|
|
c22d13286d | ||
|
|
44c7f162b6 | ||
|
|
7003a475a7 | ||
|
|
3d8dbee76a | ||
|
|
160870c9de | ||
|
|
ba6ffd8256 | ||
|
|
ecb7d1072f | ||
|
|
38b83a70aa | ||
|
|
1fc6276eab | ||
|
|
45e4e3354e | ||
|
|
27a80a1c94 | ||
|
|
426aeb7c5e | ||
|
|
35524db136 | ||
|
|
e928c1c61e | ||
|
|
5d4eb2b7ae | ||
|
|
db978fcb6c | ||
|
|
3329b2bbd6 | ||
|
|
a66a0cfd70 | ||
|
|
27ee994e17 | ||
|
|
0414723a54 | ||
|
|
588419492a | ||
|
|
52296836fe | ||
|
|
678ee26c5e | ||
|
|
29d67452e0 | ||
|
|
51984f0d39 | ||
|
|
4d73d4b1b9 | ||
|
|
e8cea130a4 | ||
|
|
dff08d3cfe | ||
|
|
c48e3f3d05 | ||
|
|
f3509824e8 | ||
|
|
14c72cac58 | ||
|
|
f95bda64ba | ||
|
|
96ffe84edb | ||
|
|
2b3d09f70a | ||
|
|
8e8f66a5e1 | ||
|
|
c9299a49e1 | ||
|
|
9f048a4b1c | ||
|
|
0f0d5d5726 | ||
|
|
d060114f00 | ||
|
|
9d58032064 | ||
|
|
4609be20de | ||
|
|
4d05d61ed7 | ||
|
|
8dabdd1baa | ||
|
|
4678f6e0a5 | ||
|
|
95b259b841 | ||
|
|
79cf6fb8b6 | ||
|
|
cb610f37f2 | ||
|
|
36e4dcef16 | ||
|
|
c49dc8d6e5 | ||
|
|
f086fa3f21 | ||
|
|
c118f9aabd | ||
|
|
f2a5a4d0fd | ||
|
|
fb2278dc6d | ||
|
|
50d37e1ae7 | ||
|
|
8dcaa81aad | ||
|
|
e1a58e9381 | ||
|
|
56080771e6 | ||
|
|
bb24f1142f | ||
|
|
94b2f8e07f | ||
|
|
310d867aab | ||
|
|
9f74d6e4ac | ||
|
|
f7ceebfce3 | ||
|
|
083986dfae | ||
|
|
df1e1295e3 | ||
|
|
c1934d6232 | ||
|
|
4bee273511 | ||
|
|
2e37c0ea4a | ||
|
|
2f42af2ac3 | ||
|
|
be2c601176 | ||
|
|
8dcef46842 | ||
|
|
2aa7a9e95b | ||
|
|
8af1294ba5 | ||
|
|
5a00729fad | ||
|
|
97203e1e02 | ||
|
|
95e661a78c | ||
|
|
b54b77b9ec | ||
|
|
467e3dc50a | ||
|
|
131f3471fc | ||
|
|
88170df7f0 | ||
|
|
2967b46a17 | ||
|
|
4eeb1aec50 | ||
|
|
1851e2e77c | ||
|
|
4a46227909 | ||
|
|
86371d9f5e | ||
|
|
38476f5429 | ||
|
|
6c9422808a | ||
|
|
d30e129d63 | ||
|
|
ad1947fa50 | ||
|
|
f088de5947 | ||
|
|
c85ad96b45 | ||
|
|
1f649e52de | ||
|
|
0a7111d216 | ||
|
|
a58b39f884 | ||
|
|
c124caeb0d | ||
|
|
5ce065ac92 | ||
|
|
5189dea3d5 | ||
|
|
d9948bf772 | ||
|
|
062e7a03a9 | ||
|
|
17b4bfdf98 | ||
|
|
06c31a0daa | ||
|
|
203f569f2e | ||
|
|
b0fb5913b6 | ||
|
|
6cc84a77c8 | ||
|
|
27a6951403 | ||
|
|
9f3c8c1e3a | ||
|
|
a8f466b422 | ||
|
|
f8d092fdc6 | ||
|
|
8ca0f9ac99 | ||
|
|
a653e87658 | ||
|
|
bec03dc882 | ||
|
|
2c3c8b4cb0 | ||
|
|
a0a50cb412 | ||
|
|
cf193154e1 | ||
|
|
c3518cefe8 | ||
|
|
4746fb5936 | ||
|
|
8651320c9f | ||
|
|
c9a306b4ac | ||
|
|
292708573f | ||
|
|
c3b102f5a8 | ||
|
|
f61b870db6 | ||
|
|
1a6a807db5 | ||
|
|
01aac0de48 | ||
|
|
dc88a67f50 | ||
|
|
5ce0472a75 | ||
|
|
cc788dc5f7 | ||
|
|
7726a9ec3d | ||
|
|
fcf97ab41e | ||
|
|
bb200aa082 | ||
|
|
2cd9db1cfe | ||
|
|
467e5691b9 | ||
|
|
0bd6f9b6ce | ||
|
|
244f259331 | ||
|
|
625151806a | ||
|
|
6810490bf4 | ||
|
|
3312a06368 | ||
|
|
373902d933 | ||
|
|
f62d13de21 | ||
|
|
df2e9625b3 | ||
|
|
765773cfe6 | ||
|
|
9e5612348c | ||
|
|
aa9710f7c3 | ||
|
|
b90e1012bf | ||
|
|
96186a3dae | ||
|
|
2c1fd7b0bf | ||
|
|
9779663c6b | ||
|
|
8e02266d07 | ||
|
|
24ef80f4b6 | ||
|
|
febf992a43 | ||
|
|
e9fdb13cb5 | ||
|
|
216b1aec08 | ||
|
|
02f6928328 | ||
|
|
fe27f135c0 | ||
|
|
74f8b493b2 | ||
|
|
49379924cb | ||
|
|
14eec66e38 | ||
|
|
048da9ddce | ||
|
|
9c627e82a0 | ||
|
|
14899d867e | ||
|
|
db831c3fbb | ||
|
|
bfb43c67f8 | ||
|
|
a3da41bfad | ||
|
|
ef987cae6b | ||
|
|
41ff42ddec | ||
|
|
37a4de1a84 | ||
|
|
551dc1f318 | ||
|
|
866f0e1344 | ||
|
|
a222821dfa | ||
|
|
d49a29d793 | ||
|
|
176738d674 | ||
|
|
ebbe6e7aa9 | ||
|
|
d237bdaa9b | ||
|
|
5517e743e1 | ||
|
|
c1e61b479c | ||
|
|
828f406b4f | ||
|
|
e743f3b1d8 | ||
|
|
69e28d04b0 | ||
|
|
2be4f41964 | ||
|
|
97ed89a797 | ||
|
|
ad7eaca443 | ||
|
|
ddbf251b5f | ||
|
|
95098e4f29 | ||
|
|
529ccbda3a | ||
|
|
a73e264c3d | ||
|
|
0200fc5542 | ||
|
|
9694771752 | ||
|
|
9fc7f54631 | ||
|
|
1545b2ac61 | ||
|
|
318a0b7ed0 | ||
|
|
a46ca32356 | ||
|
|
12cd712b53 | ||
|
|
3cffee4065 | ||
|
|
213658f1e9 | ||
|
|
6b337914d7 | ||
|
|
386f7ba16d | ||
|
|
5387695ee0 | ||
|
|
73e7967a12 | ||
|
|
9d4cf2ff62 | ||
|
|
83c98ce049 | ||
|
|
658541ec9f | ||
|
|
6a57bd2794 | ||
|
|
8487ae77e7 | ||
|
|
b762d70202 | ||
|
|
53cb3a4429 | ||
|
|
ef192a902a | ||
|
|
585c23e9f6 | ||
|
|
4708f5d88f | ||
|
|
a165cd596b | ||
|
|
0d31c8c1c8 | ||
|
|
8c5a0ca3a4 | ||
|
|
5c05b7d413 | ||
|
|
3da69117ae | ||
|
|
4256a96051 | ||
|
|
82e9f736bd | ||
|
|
fa620bf98f | ||
|
|
378f0c32fe | ||
|
|
404f59090c | ||
|
|
eb02834582 | ||
|
|
77e322cb75 | ||
|
|
f669b8a029 | ||
|
|
09d57d1f26 | ||
|
|
7a629769b7 | ||
|
|
bd223f5a1f | ||
|
|
1006ada458 | ||
|
|
79f8f08caf | ||
|
|
789bbf15b7 | ||
|
|
1dd085fc92 | ||
|
|
90fb9b53ad | ||
|
|
8d70a22fa3 | ||
|
|
a6ffcdd0cf | ||
|
|
74843493f4 | ||
|
|
233b28a1b9 | ||
|
|
f3dee2d332 | ||
|
|
273988b8d5 | ||
|
|
599acf0daa | ||
|
|
6458a9144e | ||
|
|
344d05045d | ||
|
|
75803d8dbb | ||
|
|
42e74e7eef | ||
|
|
738e161bc6 | ||
|
|
559e14799c | ||
|
|
d750b02a7c | ||
|
|
c321f5d94a | ||
|
|
89da738fae | ||
|
|
8cd94060bb | ||
|
|
d8ccdff9fc | ||
|
|
47348542ef | ||
|
|
b5fb8e6b8b | ||
|
|
b0336cd27e | ||
|
|
ecd80c553c | ||
|
|
59d7f06c57 | ||
|
|
15f666a50a | ||
|
|
ec6f2a3ad4 | ||
|
|
213be3d6bd | ||
|
|
55800fc696 | ||
|
|
6a2066af6c | ||
|
|
cb8962691a | ||
|
|
bb00134f5f | ||
|
|
21d6665c37 | ||
|
|
6542b30d1f | ||
|
|
55ebfe8321 | ||
|
|
9d15b3d295 | ||
|
|
d31fd9bbf2 | ||
|
|
52babc51a0 | ||
|
|
1a3940a12e | ||
|
|
1aec691b35 | ||
|
|
70dd586be9 | ||
|
|
af85db9ea5 | ||
|
|
67b265b3d5 | ||
|
|
0ede89d82a | ||
|
|
d8219545c9 | ||
|
|
06f6d02579 |
@@ -8,4 +8,4 @@ crates/collab/static/styles.css
|
||||
vendor/bin
|
||||
assets/themes/*.json
|
||||
assets/themes/internal/*.json
|
||||
assets/themes/experiments/*.json
|
||||
assets/themes/staff/*.json
|
||||
|
||||
12
.github/workflows/ci.yml
vendored
12
.github/workflows/ci.yml
vendored
@@ -4,7 +4,7 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- "v*"
|
||||
- "v[0-9]+.[0-9]+.x"
|
||||
tags:
|
||||
- "v*"
|
||||
pull_request:
|
||||
@@ -41,16 +41,19 @@ jobs:
|
||||
with:
|
||||
clean: false
|
||||
submodules: 'recursive'
|
||||
|
||||
|
||||
- name: Run tests
|
||||
run: cargo test --workspace --no-fail-fast
|
||||
|
||||
|
||||
- name: Build collab
|
||||
run: cargo build -p collab
|
||||
|
||||
- name: Build other binaries
|
||||
run: cargo build --workspace --bins --all-features
|
||||
|
||||
- name: Generate license file
|
||||
run: script/generate-licenses
|
||||
|
||||
bundle:
|
||||
name: Bundle app
|
||||
runs-on:
|
||||
@@ -109,6 +112,9 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Generate license file
|
||||
run: script/generate-licenses
|
||||
|
||||
- name: Create app bundle
|
||||
run: script/bundle
|
||||
|
||||
|
||||
22
.github/workflows/release_actions.yml
vendored
22
.github/workflows/release_actions.yml
vendored
@@ -13,14 +13,28 @@ jobs:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
content: |
|
||||
📣 Zed ${{ github.event.release.tag_name }} was just released!
|
||||
|
||||
|
||||
Restart your Zed or head to https://zed.dev/releases/latest to grab it.
|
||||
|
||||
|
||||
```md
|
||||
# Changelog
|
||||
|
||||
|
||||
${{ github.event.release.body }}
|
||||
```
|
||||
discourse_release:
|
||||
if: ${{ ! github.event.release.prerelease }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "19"
|
||||
- run: >
|
||||
node "./script/discourse_release"
|
||||
${{ secrets.DISCOURSE_RELEASES_API_KEY }}
|
||||
${{ github.event.release.tag_name }}
|
||||
${{ github.event.release.body }}
|
||||
mixpanel_release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -31,7 +45,7 @@ jobs:
|
||||
architecture: "x64"
|
||||
cache: "pip"
|
||||
- run: pip install -r script/mixpanel_release/requirements.txt
|
||||
- run: >
|
||||
- run: >
|
||||
python script/mixpanel_release/main.py
|
||||
${{ github.event.release.tag_name }}
|
||||
${{ secrets.MIXPANEL_PROJECT_ID }}
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -7,8 +7,8 @@
|
||||
/crates/collab/static/styles.css
|
||||
/vendor/bin
|
||||
/assets/themes/*.json
|
||||
/assets/themes/Internal/*.json
|
||||
/assets/themes/Experiments/*.json
|
||||
/assets/*licenses.md
|
||||
/assets/themes/staff/*.json
|
||||
**/venv
|
||||
.build
|
||||
Packages
|
||||
|
||||
62
Cargo.lock
generated
62
Cargo.lock
generated
@@ -739,8 +739,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "bromberg_sl2"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2ed88064f69518b7e3ea50ecfc1b61d43f19248618a377b95ae5c8b611134d4d"
|
||||
source = "git+https://github.com/zed-industries/bromberg_sl2?rev=dac565a90e8f9245f48ff46225c915dc50f76920#dac565a90e8f9245f48ff46225c915dc50f76920"
|
||||
dependencies = [
|
||||
"digest 0.9.0",
|
||||
"lazy_static",
|
||||
@@ -820,8 +819,10 @@ dependencies = [
|
||||
"async-broadcast",
|
||||
"client",
|
||||
"collections",
|
||||
"fs",
|
||||
"futures 0.3.25",
|
||||
"gpui",
|
||||
"language",
|
||||
"live_kit_client",
|
||||
"log",
|
||||
"media",
|
||||
@@ -1131,7 +1132,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "collab"
|
||||
version = "0.4.2"
|
||||
version = "0.5.4"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-tungstenite",
|
||||
@@ -1274,6 +1275,7 @@ source = "git+https://github.com/servo/core-foundation-rs?rev=079665882507dd5e2f
|
||||
dependencies = [
|
||||
"core-foundation-sys",
|
||||
"libc",
|
||||
"uuid 0.5.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2020,6 +2022,33 @@ dependencies = [
|
||||
"instant",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "feedback"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"editor",
|
||||
"futures 0.3.25",
|
||||
"gpui",
|
||||
"human_bytes",
|
||||
"isahc",
|
||||
"language",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"postage",
|
||||
"project",
|
||||
"search",
|
||||
"serde",
|
||||
"settings",
|
||||
"sysinfo",
|
||||
"theme",
|
||||
"tree-sitter-markdown",
|
||||
"urlencoding",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "file-per-thread-logger"
|
||||
version = "0.1.5"
|
||||
@@ -2561,9 +2590,9 @@ dependencies = [
|
||||
"sum_tree",
|
||||
"time 0.3.17",
|
||||
"tiny-skia",
|
||||
"tree-sitter",
|
||||
"usvg",
|
||||
"util",
|
||||
"uuid 1.2.2",
|
||||
"waker-fn",
|
||||
]
|
||||
|
||||
@@ -3153,10 +3182,12 @@ dependencies = [
|
||||
"tree-sitter-html",
|
||||
"tree-sitter-javascript",
|
||||
"tree-sitter-json 0.19.0",
|
||||
"tree-sitter-markdown",
|
||||
"tree-sitter-python",
|
||||
"tree-sitter-ruby",
|
||||
"tree-sitter-rust",
|
||||
"tree-sitter-typescript",
|
||||
"unicase",
|
||||
"unindent",
|
||||
"util",
|
||||
]
|
||||
@@ -4450,6 +4481,7 @@ dependencies = [
|
||||
"aho-corasick",
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"backtrace",
|
||||
"client",
|
||||
"clock",
|
||||
"collections",
|
||||
@@ -5509,6 +5541,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"collections",
|
||||
"editor",
|
||||
"futures 0.3.25",
|
||||
"gpui",
|
||||
"language",
|
||||
"log",
|
||||
@@ -5519,6 +5552,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smallvec",
|
||||
"smol",
|
||||
"theme",
|
||||
"unindent",
|
||||
"util",
|
||||
@@ -5981,7 +6015,9 @@ dependencies = [
|
||||
"libsqlite3-sys",
|
||||
"parking_lot 0.11.2",
|
||||
"smol",
|
||||
"sqlez_macros",
|
||||
"thread_local",
|
||||
"uuid 1.2.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6236,9 +6272,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "sysinfo"
|
||||
version = "0.27.1"
|
||||
version = "0.27.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ccb297c0afb439440834b4bcf02c5c9da8ec2e808e70f36b0d8e815ff403bd24"
|
||||
checksum = "1620f9573034c573376acc550f3b9a2be96daeb08abb3c12c8523e1cee06e80f"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"core-foundation-sys",
|
||||
@@ -6431,6 +6467,7 @@ dependencies = [
|
||||
"settings",
|
||||
"smol",
|
||||
"theme",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
@@ -7292,6 +7329,12 @@ dependencies = [
|
||||
"tempdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bcc7e3b898aa6f6c08e5295b6c89258d1331e9ac578cc992fb818759951bdc22"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "0.8.2"
|
||||
@@ -8137,6 +8180,7 @@ dependencies = [
|
||||
"smallvec",
|
||||
"theme",
|
||||
"util",
|
||||
"uuid 1.2.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -8184,7 +8228,7 @@ checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.68.0"
|
||||
version = "0.72.2"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"anyhow",
|
||||
@@ -8209,6 +8253,7 @@ dependencies = [
|
||||
"easy-parallel",
|
||||
"editor",
|
||||
"env_logger",
|
||||
"feedback",
|
||||
"file_finder",
|
||||
"fs",
|
||||
"fsevent",
|
||||
@@ -8216,7 +8261,6 @@ dependencies = [
|
||||
"fuzzy",
|
||||
"go_to_line",
|
||||
"gpui",
|
||||
"human_bytes",
|
||||
"ignore",
|
||||
"image",
|
||||
"indexmap",
|
||||
@@ -8250,7 +8294,6 @@ dependencies = [
|
||||
"smallvec",
|
||||
"smol",
|
||||
"sum_tree",
|
||||
"sysinfo",
|
||||
"tempdir",
|
||||
"terminal_view",
|
||||
"text",
|
||||
@@ -8281,6 +8324,7 @@ dependencies = [
|
||||
"url",
|
||||
"urlencoding",
|
||||
"util",
|
||||
"uuid 1.2.2",
|
||||
"vim",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
@@ -17,6 +17,7 @@ members = [
|
||||
"crates/diagnostics",
|
||||
"crates/drag_and_drop",
|
||||
"crates/editor",
|
||||
"crates/feedback",
|
||||
"crates/file_finder",
|
||||
"crates/fs",
|
||||
"crates/fsevent",
|
||||
@@ -83,5 +84,3 @@ split-debuginfo = "unpacked"
|
||||
|
||||
[profile.release]
|
||||
debug = true
|
||||
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ WORKDIR app
|
||||
COPY . .
|
||||
|
||||
# Compile collab server
|
||||
ARG CARGO_PROFILE_RELEASE_PANIC=abort
|
||||
RUN --mount=type=cache,target=./script/node_modules \
|
||||
--mount=type=cache,target=/usr/local/cargo/registry \
|
||||
--mount=type=cache,target=./target \
|
||||
|
||||
79
README.md
79
README.md
@@ -49,30 +49,14 @@ script/zed-with-local-servers --release
|
||||
|
||||
If you trigger `cmd-alt-i`, Zed will copy a JSON representation of the current window contents to the clipboard. You can paste this in a tool like [DJSON](https://chrome.google.com/webstore/detail/djson-json-viewer-formatt/chaeijjekipecdajnijdldjjipaegdjc?hl=en) to navigate the state of on-screen elements in a structured way.
|
||||
|
||||
### Staff Only Features
|
||||
### Licensing
|
||||
|
||||
Many features (e.g. the terminal) take significant time and effort before they are polished enough to be released to even Alpha users. But Zed's team workflow relies on fast, daily PRs and there can be large merge conflicts for feature branchs that diverge for a few days. To bridge this gap, there is a `staff_mode` field in the Settings that staff can set to enable these unpolished or incomplete features. Note that this setting isn't leaked via autocompletion, but there is no mechanism to stop users from setting this anyway. As initilization of Zed components is only done once, on startup, setting `staff_mode` may require a restart to take effect. You can set staff only key bindings in the `assets/keymaps/internal.json` file, and add staff only themes in the `styles/src/themes/internal` directory
|
||||
We use `[cargo-about](https://github.com/EmbarkStudios/cargo-about)` to automatically comply with open source licenses. If CI is failing, check the following:
|
||||
|
||||
### Experimental Features
|
||||
- Is it showing a `no license specified` error for a crate you've created? If so, add `publish = false` under `[package]` in your crate's Cargo.toml.
|
||||
- Is the error `failed to satisfy license requirements` for a dependency? If so, first determine what license the project has and whether this system is sufficient to comply with this license's requirements. If you're unsure, ask a lawyer. Once you've verified that this system is acceptable add the license's SPDX identifier to the `accepted` array in `script/licenses/zed-licenses.toml`.
|
||||
- Is `cargo-about` unable to find the license for a dependency? If so, add a clarification field at the end of `script/licenses/zed-licenses.toml`, as specified in the [cargo-about book](https://embarkstudios.github.io/cargo-about/cli/generate/config.html#crate-configuration).
|
||||
|
||||
A user facing feature flag can be added to Zed by:
|
||||
|
||||
* Adding a setting to the crates/settings/src/settings.rs FeatureFlags struct. Use a boolean for a simple on/off, or use a struct to experiment with different configuration options.
|
||||
* If the feature needs keybindings, add a file to the `assets/keymaps/experiments/` folder, then update the `FeatureFlags::keymap_files()` method to check for your feature's flag and add it's keybindings's path to the method's list.
|
||||
* If you want to add an experimental theme, add it to the `styles/src/themes/experiments` folder
|
||||
|
||||
The Settings global should be initialized with the user's feature flags by the time the feature's `init(cx)` equivalent is called.
|
||||
|
||||
To promote an experimental feature to a full feature:
|
||||
|
||||
* If this is an experimental theme, move the theme file from the `styles/src/themes/experiments` folder to the `styles/src/themes/` folder
|
||||
* Take the features settings (if any) and add them under a new variable in the Settings struct. Don't forget to add a `merge()` call in `set_user_settings()`!
|
||||
* Take the feature's keybindings and add them to the default.json (or equivalent) file
|
||||
* Remove the file from the `FeatureFlags::keymap_files()` method
|
||||
* Remove the conditional in the feature's `init(cx)` equivalent.
|
||||
|
||||
|
||||
That's it 😸
|
||||
|
||||
### Wasm Plugins
|
||||
|
||||
@@ -83,56 +67,3 @@ rustup target add wasm32-wasi
|
||||
```
|
||||
|
||||
Plugins can be found in the `plugins` folder in the root. For more information about how plugins work, check the [Plugin Guide](./crates/plugin_runtime/README.md) in `crates/plugin_runtime/README.md`.
|
||||
|
||||
## Roadmap
|
||||
|
||||
We will organize our efforts around the following major milestones. We'll create tracking issues for each of these milestones to detail the individual tasks that comprise them.
|
||||
|
||||
### Minimal text editor
|
||||
|
||||
[Tracking issue](https://github.com/zed-industries/zed/issues/2)
|
||||
|
||||
Ship a minimal text editor to investors and other insiders. It should be extremely fast and stable, but all it can do is open, edit, and save text files, making it potentially useful for basic editing but not for real coding.
|
||||
|
||||
Establish basic infrastructure for building the app bundle and uploading an artifact. Once this is released, we should regularly distribute updates as features land.
|
||||
|
||||
### Collaborative code editor for internal use
|
||||
|
||||
[Tracking issue](https://github.com/zed-industries/zed/issues/6)
|
||||
|
||||
Turn the minimal text editor into a collaborative _code_ editor. This will include the minimal features that the Zed team needs to collaborate in Zed to build Zed without net loss in developer productivity. This includes productivity-critical features such as:
|
||||
|
||||
- Syntax highlighting and syntax-aware editing and navigation
|
||||
- The ability to see and edit non-local working copies of a repository
|
||||
- Language server support for Rust code navigation, refactoring, diagnostics, etc.
|
||||
- Project browsing and project-wide search and replace
|
||||
|
||||
We want to tackle collaboration fairly early so that the rest of the design of the product can flow around that assumption. We could probably produce a single-player code editor more quickly, but at the risk of having collaboration feel more "bolted on" when we eventually add it.
|
||||
|
||||
### Private alpha for Rust teams on macOS
|
||||
|
||||
The "minimal" milestones were about getting Zed to a point where the Zed team could use Zed productively to build Zed. What features are required for someone outside the company to use Zed to productively work on another project that is also written in Rust?
|
||||
|
||||
This includes infrastructure like auto-updates, error reporting, and metrics collection. It also includes some amount of polish to make the tool more discoverable for someone that didn't write it, such as a UI for updating settings and key bindings. We may also need to enhance the server to support user authentication and related concerns.
|
||||
|
||||
The initial target audience is like us. A small team working in Rust that's potentially interested in collaborating. As the alpha proceeds, we can work with teams of different sizes.
|
||||
|
||||
### Private beta for Rust teams on macOS
|
||||
|
||||
Once we're getting sufficiently positive feedback from our initial alpha users, we widen the audience by letting people share invites. Now may be a good time to get Zed running on the web, so that it's extremely easy for a Zed user to share a link and be collaborating in seconds. Once someone is using Zed on the Web, we'll let them register for the private beta and download the native binary if they're on macOS.
|
||||
|
||||
### Expand to other languages
|
||||
|
||||
Depending on how the Rust beta is going, focus hard on dominating another niche language such as Elixr or getting a foothold within a niche of a larger language, such as React/Typescript. Alternatively, go wide at this point and add decent support several widely-used languages such as Python, Ruby, Typescript, etc. This would entail taking 1-2 weeks per language and making sure we ship a solid experience based on a publicly-available language server. Each language has slightly different development practices, so we need to make sure Zed's UX meshes well with those practices.
|
||||
|
||||
### Future directions
|
||||
|
||||
Each of these sections could probably broken into multiple milestones, but this part of the roadmap is too far in the future to go into that level of detail at this point.
|
||||
|
||||
#### Expand to other platforms
|
||||
|
||||
Support Linux and Windows. We'll probably want to hire at least one person that prefers to work on each respective platform and have them spearhead the effort to port Zed to that platform. Once they've done so, they can join the general development effort while ensuring the user experience stays good on that platform.
|
||||
|
||||
#### Expand on collaboration
|
||||
|
||||
To start with, we'll focus on synchronous collaboration because that's where we're most differentiated, but there's no reason we have to limit ourselves to that. How can our tool facilitate collaboration generally, whether it's sync or async? What would it take for a team to go 100% Zed and collaborate fully within the tool? If we haven't added it already, basic Git support would be nice.
|
||||
|
||||
@@ -38,7 +38,7 @@
|
||||
"cmd-n": "workspace::NewFile",
|
||||
"cmd-shift-n": "workspace::NewWindow",
|
||||
"cmd-o": "workspace::Open",
|
||||
"alt-cmd-o": "recent_projects::Toggle",
|
||||
"alt-cmd-o": "projects::OpenRecent",
|
||||
"ctrl-`": "workspace::NewTerminal"
|
||||
}
|
||||
},
|
||||
@@ -186,10 +186,10 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar",
|
||||
"context": "BufferSearchBar > Editor",
|
||||
"bindings": {
|
||||
"escape": "buffer_search::Dismiss",
|
||||
"cmd-f": "buffer_search::FocusEditor",
|
||||
"tab": "buffer_search::FocusEditor",
|
||||
"enter": "search::SelectNextMatch",
|
||||
"shift-enter": "search::SelectPrevMatch"
|
||||
}
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
[]
|
||||
@@ -1,6 +1,6 @@
|
||||
[
|
||||
{
|
||||
"context": "Editor && VimControl",
|
||||
"context": "Editor && VimControl && !VimWaiting",
|
||||
"bindings": {
|
||||
"g": [
|
||||
"vim::PushOperator",
|
||||
@@ -53,6 +53,42 @@
|
||||
}
|
||||
],
|
||||
"%": "vim::Matching",
|
||||
"ctrl-y": [
|
||||
"vim::Scroll",
|
||||
"LineUp"
|
||||
],
|
||||
"f": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"FindForward": {
|
||||
"before": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"t": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"FindForward": {
|
||||
"before": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"shift-f": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"FindBackward": {
|
||||
"after": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"shift-t": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"FindBackward": {
|
||||
"after": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"escape": "editor::Cancel",
|
||||
"0": "vim::StartOfLine", // When no number operator present, use start of line motion
|
||||
"1": [
|
||||
@@ -94,7 +130,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_mode == normal && vim_operator == none",
|
||||
"context": "Editor && vim_mode == normal && vim_operator == none && !VimWaiting",
|
||||
"bindings": {
|
||||
"c": [
|
||||
"vim::PushOperator",
|
||||
@@ -174,9 +210,9 @@
|
||||
"vim::Scroll",
|
||||
"LineDown"
|
||||
],
|
||||
"ctrl-y": [
|
||||
"vim::Scroll",
|
||||
"LineUp"
|
||||
"r": [
|
||||
"vim::PushOperator",
|
||||
"Replace"
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -255,14 +291,18 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_mode == visual",
|
||||
"context": "Editor && vim_mode == visual && !VimWaiting",
|
||||
"bindings": {
|
||||
"u": "editor::Undo",
|
||||
"c": "vim::VisualChange",
|
||||
"d": "vim::VisualDelete",
|
||||
"x": "vim::VisualDelete",
|
||||
"y": "vim::VisualYank",
|
||||
"p": "vim::VisualPaste"
|
||||
"p": "vim::VisualPaste",
|
||||
"r": [
|
||||
"vim::PushOperator",
|
||||
"Replace"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -271,5 +311,11 @@
|
||||
"escape": "vim::NormalBefore",
|
||||
"ctrl-c": "vim::NormalBefore"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && VimWaiting",
|
||||
"bindings": {
|
||||
"*": "gpui::KeyPressed"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -13,6 +13,8 @@
|
||||
// Whether to show the informational hover box when moving the mouse
|
||||
// over symbols in the editor.
|
||||
"hover_popover_enabled": true,
|
||||
// Whether to confirm before quitting Zed.
|
||||
"confirm_quit": false,
|
||||
// Whether the cursor blinks in the editor.
|
||||
"cursor_blink": true,
|
||||
// Whether to pop the completions menu while typing in an editor without
|
||||
@@ -79,6 +81,13 @@
|
||||
"hard_tabs": false,
|
||||
// How many columns a tab should occupy.
|
||||
"tab_size": 4,
|
||||
// Control what info Zed sends to our servers
|
||||
"telemetry": {
|
||||
// Send debug info like crash reports.
|
||||
"diagnostics": true,
|
||||
// Send anonymized usage data like what languages you're using Zed with.
|
||||
"metrics": true
|
||||
},
|
||||
// Git gutter behavior configuration.
|
||||
"git": {
|
||||
// Control whether the git gutter is shown. May take 2 values:
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "activity_indicator"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/activity_indicator.rs"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "assets"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/assets.rs"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "auto_update"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/auto_update.rs"
|
||||
|
||||
@@ -2,15 +2,15 @@ mod update_notification;
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use client::{http::HttpClient, ZED_SECRET_CLIENT_TOKEN};
|
||||
use client::{ZED_APP_PATH, ZED_APP_VERSION};
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use gpui::{
|
||||
actions, platform::AppVersion, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
|
||||
MutableAppContext, Task, WeakViewHandle,
|
||||
};
|
||||
use lazy_static::lazy_static;
|
||||
use serde::Deserialize;
|
||||
use smol::{fs::File, io::AsyncReadExt, process::Command};
|
||||
use std::{env, ffi::OsString, path::PathBuf, sync::Arc, time::Duration};
|
||||
use std::{ffi::OsString, sync::Arc, time::Duration};
|
||||
use update_notification::UpdateNotification;
|
||||
use util::channel::ReleaseChannel;
|
||||
use workspace::Workspace;
|
||||
@@ -18,13 +18,6 @@ use workspace::Workspace;
|
||||
const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification";
|
||||
const POLL_INTERVAL: Duration = Duration::from_secs(60 * 60);
|
||||
|
||||
lazy_static! {
|
||||
pub static ref ZED_APP_VERSION: Option<AppVersion> = env::var("ZED_APP_VERSION")
|
||||
.ok()
|
||||
.and_then(|v| v.parse().ok());
|
||||
pub static ref ZED_APP_PATH: Option<PathBuf> = env::var("ZED_APP_PATH").ok().map(PathBuf::from);
|
||||
}
|
||||
|
||||
actions!(auto_update, [Check, DismissErrorMessage, ViewReleaseNotes]);
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "breadcrumbs"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/breadcrumbs.rs"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "call"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/call.rs"
|
||||
@@ -23,6 +24,8 @@ collections = { path = "../collections" }
|
||||
gpui = { path = "../gpui" }
|
||||
log = "0.4"
|
||||
live_kit_client = { path = "../live_kit_client" }
|
||||
fs = { path = "../fs" }
|
||||
language = { path = "../language" }
|
||||
media = { path = "../media" }
|
||||
project = { path = "../project" }
|
||||
util = { path = "../util" }
|
||||
@@ -34,6 +37,8 @@ postage = { version = "0.4.1", features = ["futures-traits"] }
|
||||
|
||||
[dev-dependencies]
|
||||
client = { path = "../client", features = ["test-support"] }
|
||||
fs = { path = "../fs", features = ["test-support"] }
|
||||
language = { path = "../language", features = ["test-support"] }
|
||||
collections = { path = "../collections", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
live_kit_client = { path = "../live_kit_client", features = ["test-support"] }
|
||||
|
||||
@@ -7,11 +7,13 @@ use client::{
|
||||
proto::{self, PeerId},
|
||||
Client, TypedEnvelope, User, UserStore,
|
||||
};
|
||||
use collections::{BTreeMap, HashSet};
|
||||
use collections::{BTreeMap, HashMap, HashSet};
|
||||
use fs::Fs;
|
||||
use futures::{FutureExt, StreamExt};
|
||||
use gpui::{
|
||||
AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, WeakModelHandle,
|
||||
};
|
||||
use language::LanguageRegistry;
|
||||
use live_kit_client::{LocalTrackPublication, LocalVideoTrack, RemoteVideoTrackUpdate};
|
||||
use postage::stream::Stream;
|
||||
use project::Project;
|
||||
@@ -43,6 +45,8 @@ pub struct Room {
|
||||
id: u64,
|
||||
live_kit: Option<LiveKitRoom>,
|
||||
status: RoomStatus,
|
||||
shared_projects: HashSet<WeakModelHandle<Project>>,
|
||||
joined_projects: HashSet<WeakModelHandle<Project>>,
|
||||
local_participant: LocalParticipant,
|
||||
remote_participants: BTreeMap<u64, RemoteParticipant>,
|
||||
pending_participants: Vec<Arc<User>>,
|
||||
@@ -62,7 +66,7 @@ impl Entity for Room {
|
||||
fn release(&mut self, _: &mut MutableAppContext) {
|
||||
if self.status.is_online() {
|
||||
log::info!("room was released, sending leave message");
|
||||
self.client.send(proto::LeaveRoom {}).log_err();
|
||||
let _ = self.client.send(proto::LeaveRoom {});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -132,6 +136,8 @@ impl Room {
|
||||
id,
|
||||
live_kit: live_kit_room,
|
||||
status: RoomStatus::Online,
|
||||
shared_projects: Default::default(),
|
||||
joined_projects: Default::default(),
|
||||
participant_user_ids: Default::default(),
|
||||
local_participant: Default::default(),
|
||||
remote_participants: Default::default(),
|
||||
@@ -234,6 +240,22 @@ impl Room {
|
||||
cx.notify();
|
||||
cx.emit(Event::Left);
|
||||
log::info!("leaving room");
|
||||
|
||||
for project in self.shared_projects.drain() {
|
||||
if let Some(project) = project.upgrade(cx) {
|
||||
project.update(cx, |project, cx| {
|
||||
project.unshare(cx).log_err();
|
||||
});
|
||||
}
|
||||
}
|
||||
for project in self.joined_projects.drain() {
|
||||
if let Some(project) = project.upgrade(cx) {
|
||||
project.update(cx, |project, cx| {
|
||||
project.disconnected_from_host(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
self.status = RoomStatus::Offline;
|
||||
self.remote_participants.clear();
|
||||
self.pending_participants.clear();
|
||||
@@ -257,16 +279,15 @@ impl Room {
|
||||
.next()
|
||||
.await
|
||||
.map_or(false, |s| s.is_connected());
|
||||
|
||||
// Even if we're initially connected, any future change of the status means we momentarily disconnected.
|
||||
if !is_connected || client_status.next().await.is_some() {
|
||||
log::info!("detected client disconnection");
|
||||
let room_id = this
|
||||
.upgrade(&cx)
|
||||
this.upgrade(&cx)
|
||||
.ok_or_else(|| anyhow!("room was dropped"))?
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.status = RoomStatus::Rejoining;
|
||||
cx.notify();
|
||||
this.id
|
||||
});
|
||||
|
||||
// Wait for client to re-establish a connection to the server.
|
||||
@@ -279,31 +300,21 @@ impl Room {
|
||||
"waiting for client status change, remaining attempts {}",
|
||||
remaining_attempts
|
||||
);
|
||||
if let Some(status) = client_status.next().await {
|
||||
if status.is_connected() {
|
||||
log::info!("client reconnected, attempting to rejoin room");
|
||||
let rejoin_room = async {
|
||||
let response =
|
||||
client.request(proto::JoinRoom { id: room_id }).await?;
|
||||
let room_proto =
|
||||
response.room.ok_or_else(|| anyhow!("invalid room"))?;
|
||||
this.upgrade(&cx)
|
||||
.ok_or_else(|| anyhow!("room was dropped"))?
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.status = RoomStatus::Online;
|
||||
this.apply_room_update(room_proto, cx)
|
||||
})?;
|
||||
anyhow::Ok(())
|
||||
};
|
||||
let Some(status) = client_status.next().await else { break };
|
||||
if status.is_connected() {
|
||||
log::info!("client reconnected, attempting to rejoin room");
|
||||
|
||||
if rejoin_room.await.log_err().is_some() {
|
||||
return true;
|
||||
} else {
|
||||
remaining_attempts -= 1;
|
||||
}
|
||||
let Some(this) = this.upgrade(&cx) else { break };
|
||||
if this
|
||||
.update(&mut cx, |this, cx| this.rejoin(cx))
|
||||
.await
|
||||
.log_err()
|
||||
.is_some()
|
||||
{
|
||||
return true;
|
||||
} else {
|
||||
remaining_attempts -= 1;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
false
|
||||
@@ -340,6 +351,82 @@ impl Room {
|
||||
}
|
||||
}
|
||||
|
||||
fn rejoin(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
let mut projects = HashMap::default();
|
||||
let mut reshared_projects = Vec::new();
|
||||
let mut rejoined_projects = Vec::new();
|
||||
self.shared_projects.retain(|project| {
|
||||
if let Some(handle) = project.upgrade(cx) {
|
||||
let project = handle.read(cx);
|
||||
if let Some(project_id) = project.remote_id() {
|
||||
projects.insert(project_id, handle.clone());
|
||||
reshared_projects.push(proto::UpdateProject {
|
||||
project_id,
|
||||
worktrees: project.worktree_metadata_protos(cx),
|
||||
});
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
});
|
||||
self.joined_projects.retain(|project| {
|
||||
if let Some(handle) = project.upgrade(cx) {
|
||||
let project = handle.read(cx);
|
||||
if let Some(project_id) = project.remote_id() {
|
||||
projects.insert(project_id, handle.clone());
|
||||
rejoined_projects.push(proto::RejoinProject {
|
||||
id: project_id,
|
||||
worktrees: project
|
||||
.worktrees(cx)
|
||||
.map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
proto::RejoinWorktree {
|
||||
id: worktree.id().to_proto(),
|
||||
scan_id: worktree.completed_scan_id() as u64,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
});
|
||||
}
|
||||
return true;
|
||||
}
|
||||
false
|
||||
});
|
||||
|
||||
let response = self.client.request(proto::RejoinRoom {
|
||||
id: self.id,
|
||||
reshared_projects,
|
||||
rejoined_projects,
|
||||
});
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let response = response.await?;
|
||||
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.status = RoomStatus::Online;
|
||||
this.apply_room_update(room_proto, cx)?;
|
||||
|
||||
for reshared_project in response.reshared_projects {
|
||||
if let Some(project) = projects.get(&reshared_project.id) {
|
||||
project.update(cx, |project, cx| {
|
||||
project.reshared(reshared_project, cx).log_err();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for rejoined_project in response.rejoined_projects {
|
||||
if let Some(project) = projects.get(&rejoined_project.id) {
|
||||
project.update(cx, |project, cx| {
|
||||
project.rejoined(rejoined_project, cx).log_err();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn id(&self) -> u64 {
|
||||
self.id
|
||||
}
|
||||
@@ -454,6 +541,20 @@ impl Room {
|
||||
}
|
||||
|
||||
for unshared_project_id in old_projects.difference(&new_projects) {
|
||||
this.joined_projects.retain(|project| {
|
||||
if let Some(project) = project.upgrade(cx) {
|
||||
project.update(cx, |project, cx| {
|
||||
if project.remote_id() == Some(*unshared_project_id) {
|
||||
project.disconnected_from_host(cx);
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
})
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
cx.emit(Event::RemoteProjectUnshared {
|
||||
project_id: *unshared_project_id,
|
||||
});
|
||||
@@ -630,6 +731,32 @@ impl Room {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn join_project(
|
||||
&mut self,
|
||||
id: u64,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
fs: Arc<dyn Fs>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<ModelHandle<Project>>> {
|
||||
let client = self.client.clone();
|
||||
let user_store = self.user_store.clone();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let project =
|
||||
Project::remote(id, client, user_store, language_registry, fs, cx.clone()).await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.joined_projects.retain(|project| {
|
||||
if let Some(project) = project.upgrade(cx) {
|
||||
!project.read(cx).is_read_only()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
this.joined_projects.insert(project.downgrade());
|
||||
});
|
||||
Ok(project)
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn share_project(
|
||||
&mut self,
|
||||
project: ModelHandle<Project>,
|
||||
@@ -641,31 +768,18 @@ impl Room {
|
||||
|
||||
let request = self.client.request(proto::ShareProject {
|
||||
room_id: self.id(),
|
||||
worktrees: project
|
||||
.read(cx)
|
||||
.worktrees(cx)
|
||||
.map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
proto::WorktreeMetadata {
|
||||
id: worktree.id().to_proto(),
|
||||
root_name: worktree.root_name().into(),
|
||||
visible: worktree.is_visible(),
|
||||
abs_path: worktree.abs_path().to_string_lossy().into(),
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
worktrees: project.read(cx).worktree_metadata_protos(cx),
|
||||
});
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let response = request.await?;
|
||||
|
||||
project.update(&mut cx, |project, cx| {
|
||||
project
|
||||
.shared(response.project_id, cx)
|
||||
.detach_and_log_err(cx)
|
||||
});
|
||||
project.shared(response.project_id, cx)
|
||||
})?;
|
||||
|
||||
// If the user's location is in this project, it changes from UnsharedProject to SharedProject.
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.shared_projects.insert(project.downgrade());
|
||||
let active_project = this.local_participant.active_project.as_ref();
|
||||
if active_project.map_or(false, |location| *location == project) {
|
||||
this.set_location(Some(&project), cx)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "cli"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/cli.rs"
|
||||
|
||||
@@ -9,7 +9,13 @@ use core_foundation::{
|
||||
use core_services::{kLSLaunchDefaults, LSLaunchURLSpec, LSOpenFromURLSpec, TCFType};
|
||||
use ipc_channel::ipc::{IpcOneShotServer, IpcReceiver, IpcSender};
|
||||
use serde::Deserialize;
|
||||
use std::{ffi::OsStr, fs, path::PathBuf, ptr};
|
||||
use std::{
|
||||
ffi::OsStr,
|
||||
fs::{self, OpenOptions},
|
||||
io,
|
||||
path::{Path, PathBuf},
|
||||
ptr,
|
||||
};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap(name = "zed", global_setting(clap::AppSettings::NoAutoVersion))]
|
||||
@@ -54,6 +60,12 @@ fn main() -> Result<()> {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
for path in args.paths.iter() {
|
||||
if !path.exists() {
|
||||
touch(path.as_path())?;
|
||||
}
|
||||
}
|
||||
|
||||
let (tx, rx) = launch_app(bundle_path)?;
|
||||
|
||||
tx.send(CliRequest::Open {
|
||||
@@ -77,6 +89,13 @@ fn main() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn touch(path: &Path) -> io::Result<()> {
|
||||
match OpenOptions::new().create(true).write(true).open(path) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
fn locate_bundle() -> Result<PathBuf> {
|
||||
let cli_path = std::env::current_exe()?.canonicalize()?;
|
||||
let mut app_path = cli_path.clone();
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "client"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/client.rs"
|
||||
|
||||
@@ -15,7 +15,7 @@ use futures::{future::LocalBoxFuture, AsyncReadExt, FutureExt, SinkExt, StreamEx
|
||||
use gpui::{
|
||||
actions,
|
||||
serde_json::{self, Value},
|
||||
AnyModelHandle, AnyViewHandle, AnyWeakModelHandle, AnyWeakViewHandle, AppContext,
|
||||
AnyModelHandle, AnyViewHandle, AnyWeakModelHandle, AnyWeakViewHandle, AppContext, AppVersion,
|
||||
AsyncAppContext, Entity, ModelHandle, MutableAppContext, Task, View, ViewContext, ViewHandle,
|
||||
};
|
||||
use http::HttpClient;
|
||||
@@ -25,6 +25,7 @@ use postage::watch;
|
||||
use rand::prelude::*;
|
||||
use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, PeerId, RequestMessage};
|
||||
use serde::Deserialize;
|
||||
use settings::{Settings, TelemetrySettings};
|
||||
use std::{
|
||||
any::TypeId,
|
||||
collections::HashMap,
|
||||
@@ -54,6 +55,11 @@ lazy_static! {
|
||||
pub static ref ADMIN_API_TOKEN: Option<String> = std::env::var("ZED_ADMIN_API_TOKEN")
|
||||
.ok()
|
||||
.and_then(|s| if s.is_empty() { None } else { Some(s) });
|
||||
pub static ref ZED_APP_VERSION: Option<AppVersion> = std::env::var("ZED_APP_VERSION")
|
||||
.ok()
|
||||
.and_then(|v| v.parse().ok());
|
||||
pub static ref ZED_APP_PATH: Option<PathBuf> =
|
||||
std::env::var("ZED_APP_PATH").ok().map(PathBuf::from);
|
||||
}
|
||||
|
||||
pub const ZED_SECRET_CLIENT_TOKEN: &str = "618033988749894";
|
||||
@@ -423,7 +429,9 @@ impl Client {
|
||||
}));
|
||||
}
|
||||
Status::SignedOut | Status::UpgradeRequired => {
|
||||
self.telemetry.set_authenticated_user_info(None, false);
|
||||
let telemetry_settings = cx.read(|cx| cx.global::<Settings>().telemetry());
|
||||
self.telemetry
|
||||
.set_authenticated_user_info(None, false, telemetry_settings);
|
||||
state._reconnect_task.take();
|
||||
}
|
||||
_ => {}
|
||||
@@ -706,7 +714,13 @@ impl Client {
|
||||
credentials = read_credentials_from_keychain(cx);
|
||||
read_from_keychain = credentials.is_some();
|
||||
if read_from_keychain {
|
||||
self.report_event("read credentials from keychain", Default::default());
|
||||
cx.read(|cx| {
|
||||
self.report_event(
|
||||
"read credentials from keychain",
|
||||
Default::default(),
|
||||
cx.global::<Settings>().telemetry(),
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
if credentials.is_none() {
|
||||
@@ -997,6 +1011,8 @@ impl Client {
|
||||
let executor = cx.background();
|
||||
let telemetry = self.telemetry.clone();
|
||||
let http = self.http.clone();
|
||||
let metrics_enabled = cx.read(|cx| cx.global::<Settings>().telemetry());
|
||||
|
||||
executor.clone().spawn(async move {
|
||||
// Generate a pair of asymmetric encryption keys. The public key will be used by the
|
||||
// zed server to encrypt the user's access token, so that it can'be intercepted by
|
||||
@@ -1079,7 +1095,11 @@ impl Client {
|
||||
.context("failed to decrypt access token")?;
|
||||
platform.activate(true);
|
||||
|
||||
telemetry.report_event("authenticate with browser", Default::default());
|
||||
telemetry.report_event(
|
||||
"authenticate with browser",
|
||||
Default::default(),
|
||||
metrics_enabled,
|
||||
);
|
||||
|
||||
Ok(Credentials {
|
||||
user_id: user_id.parse()?,
|
||||
@@ -1235,6 +1255,7 @@ impl Client {
|
||||
subscriber
|
||||
} else {
|
||||
log::info!("unhandled message {}", type_name);
|
||||
self.peer.respond_with_unhandled_message(message).log_err();
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -1278,6 +1299,7 @@ impl Client {
|
||||
.detach();
|
||||
} else {
|
||||
log::info!("unhandled message {}", type_name);
|
||||
self.peer.respond_with_unhandled_message(message).log_err();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1285,13 +1307,23 @@ impl Client {
|
||||
self.telemetry.start();
|
||||
}
|
||||
|
||||
pub fn report_event(&self, kind: &str, properties: Value) {
|
||||
self.telemetry.report_event(kind, properties.clone());
|
||||
pub fn report_event(
|
||||
&self,
|
||||
kind: &str,
|
||||
properties: Value,
|
||||
telemetry_settings: TelemetrySettings,
|
||||
) {
|
||||
self.telemetry
|
||||
.report_event(kind, properties.clone(), telemetry_settings);
|
||||
}
|
||||
|
||||
pub fn telemetry_log_file_path(&self) -> Option<PathBuf> {
|
||||
self.telemetry.log_file_path()
|
||||
}
|
||||
|
||||
pub fn metrics_id(&self) -> Option<Arc<str>> {
|
||||
self.telemetry.metrics_id()
|
||||
}
|
||||
}
|
||||
|
||||
impl WeakSubscriber {
|
||||
|
||||
@@ -10,6 +10,7 @@ use lazy_static::lazy_static;
|
||||
use parking_lot::Mutex;
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
use settings::TelemetrySettings;
|
||||
use std::{
|
||||
io::Write,
|
||||
mem,
|
||||
@@ -184,11 +185,18 @@ impl Telemetry {
|
||||
.detach();
|
||||
}
|
||||
|
||||
/// This method takes the entire TelemetrySettings struct in order to force client code
|
||||
/// to pull the struct out of the settings global. Do not remove!
|
||||
pub fn set_authenticated_user_info(
|
||||
self: &Arc<Self>,
|
||||
metrics_id: Option<String>,
|
||||
is_staff: bool,
|
||||
telemetry_settings: TelemetrySettings,
|
||||
) {
|
||||
if !telemetry_settings.metrics() {
|
||||
return;
|
||||
}
|
||||
|
||||
let this = self.clone();
|
||||
let mut state = self.state.lock();
|
||||
let device_id = state.device_id.clone();
|
||||
@@ -221,7 +229,16 @@ impl Telemetry {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn report_event(self: &Arc<Self>, kind: &str, properties: Value) {
|
||||
pub fn report_event(
|
||||
self: &Arc<Self>,
|
||||
kind: &str,
|
||||
properties: Value,
|
||||
telemetry_settings: TelemetrySettings,
|
||||
) {
|
||||
if !telemetry_settings.metrics() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut state = self.state.lock();
|
||||
let event = MixpanelEvent {
|
||||
event: kind.to_string(),
|
||||
@@ -261,6 +278,10 @@ impl Telemetry {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn metrics_id(self: &Arc<Self>) -> Option<Arc<str>> {
|
||||
self.state.lock().metrics_id.clone()
|
||||
}
|
||||
|
||||
fn flush(self: &Arc<Self>) {
|
||||
let mut state = self.state.lock();
|
||||
let mut events = mem::take(&mut state.queue);
|
||||
|
||||
@@ -5,8 +5,9 @@ use futures::{channel::mpsc, future, AsyncReadExt, Future, StreamExt};
|
||||
use gpui::{AsyncAppContext, Entity, ImageData, ModelContext, ModelHandle, Task};
|
||||
use postage::{sink::Sink, watch};
|
||||
use rpc::proto::{RequestMessage, UsersResponse};
|
||||
use settings::Settings;
|
||||
use std::sync::{Arc, Weak};
|
||||
use util::TryFutureExt as _;
|
||||
use util::{StaffMode, TryFutureExt as _};
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct User {
|
||||
@@ -141,14 +142,24 @@ impl UserStore {
|
||||
let fetch_metrics_id =
|
||||
client.request(proto::GetPrivateUserInfo {}).log_err();
|
||||
let (user, info) = futures::join!(fetch_user, fetch_metrics_id);
|
||||
if let Some(info) = info {
|
||||
client.telemetry.set_authenticated_user_info(
|
||||
Some(info.metrics_id.clone()),
|
||||
info.staff,
|
||||
);
|
||||
} else {
|
||||
client.telemetry.set_authenticated_user_info(None, false);
|
||||
}
|
||||
client.telemetry.set_authenticated_user_info(
|
||||
info.as_ref().map(|info| info.metrics_id.clone()),
|
||||
info.as_ref().map(|info| info.staff).unwrap_or(false),
|
||||
cx.read(|cx| cx.global::<Settings>().telemetry()),
|
||||
);
|
||||
|
||||
cx.update(|cx| {
|
||||
cx.update_default_global(|staff_mode: &mut StaffMode, _| {
|
||||
if !staff_mode.0 {
|
||||
*staff_mode = StaffMode(
|
||||
info.as_ref()
|
||||
.map(|info| info.staff)
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
()
|
||||
});
|
||||
});
|
||||
|
||||
current_user_tx.send(user).await.ok();
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "clock"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/clock.rs"
|
||||
|
||||
@@ -3,7 +3,8 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
|
||||
default-run = "collab"
|
||||
edition = "2021"
|
||||
name = "collab"
|
||||
version = "0.4.2"
|
||||
version = "0.5.4"
|
||||
publish = false
|
||||
|
||||
[[bin]]
|
||||
name = "collab"
|
||||
|
||||
@@ -57,7 +57,8 @@ CREATE TABLE "worktrees" (
|
||||
"abs_path" VARCHAR NOT NULL,
|
||||
"visible" BOOL NOT NULL,
|
||||
"scan_id" INTEGER NOT NULL,
|
||||
"is_complete" BOOL NOT NULL,
|
||||
"is_complete" BOOL NOT NULL DEFAULT FALSE,
|
||||
"completed_scan_id" INTEGER NOT NULL,
|
||||
PRIMARY KEY(project_id, id)
|
||||
);
|
||||
CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id");
|
||||
@@ -65,6 +66,7 @@ CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id");
|
||||
CREATE TABLE "worktree_entries" (
|
||||
"project_id" INTEGER NOT NULL,
|
||||
"worktree_id" INTEGER NOT NULL,
|
||||
"scan_id" INTEGER NOT NULL,
|
||||
"id" INTEGER NOT NULL,
|
||||
"is_dir" BOOL NOT NULL,
|
||||
"path" VARCHAR NOT NULL,
|
||||
@@ -73,6 +75,7 @@ CREATE TABLE "worktree_entries" (
|
||||
"mtime_nanos" INTEGER NOT NULL,
|
||||
"is_symlink" BOOL NOT NULL,
|
||||
"is_ignored" BOOL NOT NULL,
|
||||
"is_deleted" BOOL NOT NULL,
|
||||
PRIMARY KEY(project_id, worktree_id, id),
|
||||
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE "worktree_entries"
|
||||
ADD COLUMN "scan_id" INT8,
|
||||
ADD COLUMN "is_deleted" BOOL;
|
||||
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE worktrees
|
||||
ALTER COLUMN is_complete SET DEFAULT FALSE,
|
||||
ADD COLUMN completed_scan_id INT8;
|
||||
@@ -353,6 +353,8 @@ pub struct CreateInviteFromCodeParams {
|
||||
invite_code: String,
|
||||
email_address: String,
|
||||
device_id: Option<String>,
|
||||
#[serde(default)]
|
||||
added_to_mailing_list: bool,
|
||||
}
|
||||
|
||||
async fn create_invite_from_code(
|
||||
@@ -365,6 +367,7 @@ async fn create_invite_from_code(
|
||||
¶ms.invite_code,
|
||||
¶ms.email_address,
|
||||
params.device_id.as_deref(),
|
||||
params.added_to_mailing_list,
|
||||
)
|
||||
.await?,
|
||||
))
|
||||
|
||||
@@ -123,34 +123,6 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_stale_projects(
|
||||
&self,
|
||||
environment: &str,
|
||||
new_server_id: ServerId,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
let stale_server_epochs = self
|
||||
.stale_server_ids(environment, new_server_id, &tx)
|
||||
.await?;
|
||||
project_collaborator::Entity::delete_many()
|
||||
.filter(
|
||||
project_collaborator::Column::ConnectionServerId
|
||||
.is_in(stale_server_epochs.iter().copied()),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
project::Entity::delete_many()
|
||||
.filter(
|
||||
project::Column::HostConnectionServerId
|
||||
.is_in(stale_server_epochs.iter().copied()),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn stale_room_ids(
|
||||
&self,
|
||||
environment: &str,
|
||||
@@ -235,8 +207,8 @@ impl Database {
|
||||
|
||||
pub async fn delete_stale_servers(
|
||||
&self,
|
||||
new_server_id: ServerId,
|
||||
environment: &str,
|
||||
new_server_id: ServerId,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
server::Entity::delete_many()
|
||||
@@ -623,7 +595,16 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn remove_contact(&self, requester_id: UserId, responder_id: UserId) -> Result<()> {
|
||||
/// Returns a bool indicating whether the removed contact had originally accepted or not
|
||||
///
|
||||
/// Deletes the contact identified by the requester and responder ids, and then returns
|
||||
/// whether the deleted contact had originally accepted or was a pending contact request.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `requester_id` - The user that initiates this request
|
||||
/// * `responder_id` - The user that will be removed
|
||||
pub async fn remove_contact(&self, requester_id: UserId, responder_id: UserId) -> Result<bool> {
|
||||
self.transaction(|tx| async move {
|
||||
let (id_a, id_b) = if responder_id < requester_id {
|
||||
(responder_id, requester_id)
|
||||
@@ -631,20 +612,18 @@ impl Database {
|
||||
(requester_id, responder_id)
|
||||
};
|
||||
|
||||
let result = contact::Entity::delete_many()
|
||||
let contact = contact::Entity::find()
|
||||
.filter(
|
||||
contact::Column::UserIdA
|
||||
.eq(id_a)
|
||||
.and(contact::Column::UserIdB.eq(id_b)),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such contact"))?;
|
||||
|
||||
if result.rows_affected == 1 {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow!("no such contact"))?
|
||||
}
|
||||
contact::Entity::delete_by_id(contact.id).exec(&*tx).await?;
|
||||
Ok(contact.accepted)
|
||||
})
|
||||
.await
|
||||
}
|
||||
@@ -910,6 +889,7 @@ impl Database {
|
||||
code: &str,
|
||||
email_address: &str,
|
||||
device_id: Option<&str>,
|
||||
added_to_mailing_list: bool,
|
||||
) -> Result<Invite> {
|
||||
self.transaction(|tx| async move {
|
||||
let existing_user = user::Entity::find()
|
||||
@@ -961,6 +941,7 @@ impl Database {
|
||||
platform_windows: ActiveValue::set(false),
|
||||
platform_unknown: ActiveValue::set(true),
|
||||
device_id: ActiveValue::set(device_id.map(|device_id| device_id.into())),
|
||||
added_to_mailing_list: ActiveValue::set(added_to_mailing_list),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
@@ -1319,15 +1300,7 @@ impl Database {
|
||||
Condition::all()
|
||||
.add(room_participant::Column::RoomId.eq(room_id))
|
||||
.add(room_participant::Column::UserId.eq(user_id))
|
||||
.add(
|
||||
Condition::any()
|
||||
.add(room_participant::Column::AnsweringConnectionId.is_null())
|
||||
.add(room_participant::Column::AnsweringConnectionLost.eq(true))
|
||||
.add(
|
||||
room_participant::Column::AnsweringConnectionServerId
|
||||
.ne(connection.owner_id as i32),
|
||||
),
|
||||
),
|
||||
.add(room_participant::Column::AnsweringConnectionId.is_null()),
|
||||
)
|
||||
.set(room_participant::ActiveModel {
|
||||
answering_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
@@ -1349,6 +1322,245 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn rejoin_room(
|
||||
&self,
|
||||
rejoin_room: proto::RejoinRoom,
|
||||
user_id: UserId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<RejoinedRoom>> {
|
||||
self.room_transaction(|tx| async {
|
||||
let tx = tx;
|
||||
let room_id = RoomId::from_proto(rejoin_room.id);
|
||||
let participant_update = room_participant::Entity::update_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(room_participant::Column::RoomId.eq(room_id))
|
||||
.add(room_participant::Column::UserId.eq(user_id))
|
||||
.add(room_participant::Column::AnsweringConnectionId.is_not_null())
|
||||
.add(
|
||||
Condition::any()
|
||||
.add(room_participant::Column::AnsweringConnectionLost.eq(true))
|
||||
.add(
|
||||
room_participant::Column::AnsweringConnectionServerId
|
||||
.ne(connection.owner_id as i32),
|
||||
),
|
||||
),
|
||||
)
|
||||
.set(room_participant::ActiveModel {
|
||||
answering_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
answering_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
answering_connection_lost: ActiveValue::set(false),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
if participant_update.rows_affected == 0 {
|
||||
return Err(anyhow!("room does not exist or was already joined"))?;
|
||||
}
|
||||
|
||||
let mut reshared_projects = Vec::new();
|
||||
for reshared_project in &rejoin_room.reshared_projects {
|
||||
let project_id = ProjectId::from_proto(reshared_project.project_id);
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("project does not exist"))?;
|
||||
if project.host_user_id != user_id {
|
||||
return Err(anyhow!("no such project"))?;
|
||||
}
|
||||
|
||||
let mut collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
let host_ix = collaborators
|
||||
.iter()
|
||||
.position(|collaborator| {
|
||||
collaborator.user_id == user_id && collaborator.is_host
|
||||
})
|
||||
.ok_or_else(|| anyhow!("host not found among collaborators"))?;
|
||||
let host = collaborators.swap_remove(host_ix);
|
||||
let old_connection_id = host.connection();
|
||||
|
||||
project::Entity::update(project::ActiveModel {
|
||||
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
host_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
..project.into_active_model()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
project_collaborator::Entity::update(project_collaborator::ActiveModel {
|
||||
connection_id: ActiveValue::set(connection.id as i32),
|
||||
connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)),
|
||||
..host.into_active_model()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
self.update_project_worktrees(project_id, &reshared_project.worktrees, &tx)
|
||||
.await?;
|
||||
|
||||
reshared_projects.push(ResharedProject {
|
||||
id: project_id,
|
||||
old_connection_id,
|
||||
collaborators: collaborators
|
||||
.iter()
|
||||
.map(|collaborator| ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect(),
|
||||
worktrees: reshared_project.worktrees.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
project::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project::Column::RoomId.eq(room_id))
|
||||
.add(project::Column::HostUserId.eq(user_id))
|
||||
.add(
|
||||
project::Column::Id
|
||||
.is_not_in(reshared_projects.iter().map(|project| project.id)),
|
||||
),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut rejoined_projects = Vec::new();
|
||||
for rejoined_project in &rejoin_room.rejoined_projects {
|
||||
let project_id = ProjectId::from_proto(rejoined_project.id);
|
||||
let Some(project) = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await? else { continue };
|
||||
|
||||
let mut worktrees = Vec::new();
|
||||
let db_worktrees = project.find_related(worktree::Entity).all(&*tx).await?;
|
||||
for db_worktree in db_worktrees {
|
||||
let mut worktree = RejoinedWorktree {
|
||||
id: db_worktree.id as u64,
|
||||
abs_path: db_worktree.abs_path,
|
||||
root_name: db_worktree.root_name,
|
||||
visible: db_worktree.visible,
|
||||
updated_entries: Default::default(),
|
||||
removed_entries: Default::default(),
|
||||
diagnostic_summaries: Default::default(),
|
||||
scan_id: db_worktree.scan_id as u64,
|
||||
completed_scan_id: db_worktree.completed_scan_id as u64,
|
||||
};
|
||||
|
||||
let rejoined_worktree = rejoined_project
|
||||
.worktrees
|
||||
.iter()
|
||||
.find(|worktree| worktree.id == db_worktree.id as u64);
|
||||
let entry_filter = if let Some(rejoined_worktree) = rejoined_worktree {
|
||||
worktree_entry::Column::ScanId.gt(rejoined_worktree.scan_id)
|
||||
} else {
|
||||
worktree_entry::Column::IsDeleted.eq(false)
|
||||
};
|
||||
|
||||
let mut db_entries = worktree_entry::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(worktree_entry::Column::WorktreeId.eq(worktree.id))
|
||||
.add(entry_filter),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
while let Some(db_entry) = db_entries.next().await {
|
||||
let db_entry = db_entry?;
|
||||
if db_entry.is_deleted {
|
||||
worktree.removed_entries.push(db_entry.id as u64);
|
||||
} else {
|
||||
worktree.updated_entries.push(proto::Entry {
|
||||
id: db_entry.id as u64,
|
||||
is_dir: db_entry.is_dir,
|
||||
path: db_entry.path,
|
||||
inode: db_entry.inode as u64,
|
||||
mtime: Some(proto::Timestamp {
|
||||
seconds: db_entry.mtime_seconds as u64,
|
||||
nanos: db_entry.mtime_nanos as u32,
|
||||
}),
|
||||
is_symlink: db_entry.is_symlink,
|
||||
is_ignored: db_entry.is_ignored,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
worktrees.push(worktree);
|
||||
}
|
||||
|
||||
let language_servers = project
|
||||
.find_related(language_server::Entity)
|
||||
.all(&*tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|language_server| proto::LanguageServer {
|
||||
id: language_server.id as u64,
|
||||
name: language_server.name,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
let self_collaborator = if let Some(self_collaborator_ix) = collaborators
|
||||
.iter()
|
||||
.position(|collaborator| collaborator.user_id == user_id)
|
||||
{
|
||||
collaborators.swap_remove(self_collaborator_ix)
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
let old_connection_id = self_collaborator.connection();
|
||||
project_collaborator::Entity::update(project_collaborator::ActiveModel {
|
||||
connection_id: ActiveValue::set(connection.id as i32),
|
||||
connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)),
|
||||
..self_collaborator.into_active_model()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let collaborators = collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
rejoined_projects.push(RejoinedProject {
|
||||
id: project_id,
|
||||
old_connection_id,
|
||||
collaborators,
|
||||
worktrees,
|
||||
language_servers,
|
||||
});
|
||||
}
|
||||
|
||||
let room = self.get_room(room_id, &tx).await?;
|
||||
Ok((
|
||||
room_id,
|
||||
RejoinedRoom {
|
||||
room,
|
||||
rejoined_projects,
|
||||
reshared_projects,
|
||||
},
|
||||
))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn leave_room(
|
||||
&self,
|
||||
connection: ConnectionId,
|
||||
@@ -1381,12 +1593,8 @@ impl Database {
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(
|
||||
room_participant::Column::CallingConnectionId
|
||||
.eq(connection.id as i32),
|
||||
)
|
||||
.add(
|
||||
room_participant::Column::CallingConnectionServerId
|
||||
.eq(connection.owner_id as i32),
|
||||
room_participant::Column::CallingUserId
|
||||
.eq(leaving_participant.user_id),
|
||||
)
|
||||
.add(room_participant::Column::AnsweringConnectionId.is_null()),
|
||||
)
|
||||
@@ -1445,10 +1653,7 @@ impl Database {
|
||||
host_connection_id: Default::default(),
|
||||
});
|
||||
|
||||
let collaborator_connection_id = ConnectionId {
|
||||
owner_id: collaborator.connection_server_id.0 as u32,
|
||||
id: collaborator.connection_id as u32,
|
||||
};
|
||||
let collaborator_connection_id = collaborator.connection();
|
||||
if collaborator_connection_id != connection {
|
||||
left_project.connection_ids.push(collaborator_connection_id);
|
||||
}
|
||||
@@ -1572,11 +1777,8 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn connection_lost(
|
||||
&self,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<LeftProject>>> {
|
||||
self.room_transaction(|tx| async move {
|
||||
pub async fn connection_lost(&self, connection: ConnectionId) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
let participant = room_participant::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
@@ -1592,7 +1794,6 @@ impl Database {
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("not a participant in any room"))?;
|
||||
let room_id = participant.room_id;
|
||||
|
||||
room_participant::Entity::update(room_participant::ActiveModel {
|
||||
answering_connection_lost: ActiveValue::set(true),
|
||||
@@ -1601,66 +1802,7 @@ impl Database {
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let collaborator_on_projects = project_collaborator::Entity::find()
|
||||
.find_also_related(project::Entity)
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project_collaborator::Column::ConnectionId.eq(connection.id as i32))
|
||||
.add(
|
||||
project_collaborator::Column::ConnectionServerId
|
||||
.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
project_collaborator::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project_collaborator::Column::ConnectionId.eq(connection.id as i32))
|
||||
.add(
|
||||
project_collaborator::Column::ConnectionServerId
|
||||
.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut left_projects = Vec::new();
|
||||
for (_, project) in collaborator_on_projects {
|
||||
if let Some(project) = project {
|
||||
let collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
let connection_ids = collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| ConnectionId {
|
||||
id: collaborator.connection_id as u32,
|
||||
owner_id: collaborator.connection_server_id.0 as u32,
|
||||
})
|
||||
.collect();
|
||||
|
||||
left_projects.push(LeftProject {
|
||||
id: project.id,
|
||||
host_user_id: project.host_user_id,
|
||||
host_connection_id: project.host_connection()?,
|
||||
connection_ids,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
project::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project::Column::HostConnectionId.eq(connection.id as i32))
|
||||
.add(
|
||||
project::Column::HostConnectionServerId.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok((room_id, left_projects))
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
@@ -1778,7 +1920,9 @@ impl Database {
|
||||
};
|
||||
|
||||
if let Some(db_worktree) = db_worktree {
|
||||
project.worktree_root_names.push(db_worktree.root_name);
|
||||
if db_worktree.visible {
|
||||
project.worktree_root_names.push(db_worktree.root_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1860,7 +2004,7 @@ impl Database {
|
||||
root_name: ActiveValue::set(worktree.root_name.clone()),
|
||||
visible: ActiveValue::set(worktree.visible),
|
||||
scan_id: ActiveValue::set(0),
|
||||
is_complete: ActiveValue::set(false),
|
||||
completed_scan_id: ActiveValue::set(0),
|
||||
}
|
||||
}))
|
||||
.exec(&*tx)
|
||||
@@ -1930,35 +2074,7 @@ impl Database {
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
|
||||
if !worktrees.is_empty() {
|
||||
worktree::Entity::insert_many(worktrees.iter().map(|worktree| {
|
||||
worktree::ActiveModel {
|
||||
id: ActiveValue::set(worktree.id as i64),
|
||||
project_id: ActiveValue::set(project.id),
|
||||
abs_path: ActiveValue::set(worktree.abs_path.clone()),
|
||||
root_name: ActiveValue::set(worktree.root_name.clone()),
|
||||
visible: ActiveValue::set(worktree.visible),
|
||||
scan_id: ActiveValue::set(0),
|
||||
is_complete: ActiveValue::set(false),
|
||||
}
|
||||
}))
|
||||
.on_conflict(
|
||||
OnConflict::columns([worktree::Column::ProjectId, worktree::Column::Id])
|
||||
.update_column(worktree::Column::RootName)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
worktree::Entity::delete_many()
|
||||
.filter(
|
||||
worktree::Column::ProjectId.eq(project.id).and(
|
||||
worktree::Column::Id
|
||||
.is_not_in(worktrees.iter().map(|worktree| worktree.id as i64)),
|
||||
),
|
||||
)
|
||||
.exec(&*tx)
|
||||
self.update_project_worktrees(project.id, worktrees, &tx)
|
||||
.await?;
|
||||
|
||||
let guest_connection_ids = self.project_guest_connection_ids(project.id, &tx).await?;
|
||||
@@ -1968,6 +2084,41 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
async fn update_project_worktrees(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<()> {
|
||||
if !worktrees.is_empty() {
|
||||
worktree::Entity::insert_many(worktrees.iter().map(|worktree| worktree::ActiveModel {
|
||||
id: ActiveValue::set(worktree.id as i64),
|
||||
project_id: ActiveValue::set(project_id),
|
||||
abs_path: ActiveValue::set(worktree.abs_path.clone()),
|
||||
root_name: ActiveValue::set(worktree.root_name.clone()),
|
||||
visible: ActiveValue::set(worktree.visible),
|
||||
scan_id: ActiveValue::set(0),
|
||||
completed_scan_id: ActiveValue::set(0),
|
||||
}))
|
||||
.on_conflict(
|
||||
OnConflict::columns([worktree::Column::ProjectId, worktree::Column::Id])
|
||||
.update_column(worktree::Column::RootName)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
worktree::Entity::delete_many()
|
||||
.filter(worktree::Column::ProjectId.eq(project_id).and(
|
||||
worktree::Column::Id.is_not_in(worktrees.iter().map(|worktree| worktree.id as i64)),
|
||||
))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_worktree(
|
||||
&self,
|
||||
update: &proto::UpdateWorktree,
|
||||
@@ -1997,7 +2148,11 @@ impl Database {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
root_name: ActiveValue::set(update.root_name.clone()),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
is_complete: ActiveValue::set(update.is_last_update),
|
||||
completed_scan_id: if update.is_last_update {
|
||||
ActiveValue::set(update.scan_id as i64)
|
||||
} else {
|
||||
ActiveValue::default()
|
||||
},
|
||||
abs_path: ActiveValue::set(update.abs_path.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
@@ -2018,6 +2173,8 @@ impl Database {
|
||||
mtime_nanos: ActiveValue::set(mtime.nanos as i32),
|
||||
is_symlink: ActiveValue::set(entry.is_symlink),
|
||||
is_ignored: ActiveValue::set(entry.is_ignored),
|
||||
is_deleted: ActiveValue::set(false),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
}
|
||||
}))
|
||||
.on_conflict(
|
||||
@@ -2034,6 +2191,7 @@ impl Database {
|
||||
worktree_entry::Column::MtimeNanos,
|
||||
worktree_entry::Column::IsSymlink,
|
||||
worktree_entry::Column::IsIgnored,
|
||||
worktree_entry::Column::ScanId,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
@@ -2042,7 +2200,7 @@ impl Database {
|
||||
}
|
||||
|
||||
if !update.removed_entries.is_empty() {
|
||||
worktree_entry::Entity::delete_many()
|
||||
worktree_entry::Entity::update_many()
|
||||
.filter(
|
||||
worktree_entry::Column::ProjectId
|
||||
.eq(project_id)
|
||||
@@ -2052,6 +2210,11 @@ impl Database {
|
||||
.is_in(update.removed_entries.iter().map(|id| *id as i64)),
|
||||
),
|
||||
)
|
||||
.set(worktree_entry::ActiveModel {
|
||||
is_deleted: ActiveValue::Set(true),
|
||||
scan_id: ActiveValue::Set(update.scan_id as i64),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
@@ -2230,7 +2393,7 @@ impl Database {
|
||||
entries: Default::default(),
|
||||
diagnostic_summaries: Default::default(),
|
||||
scan_id: db_worktree.scan_id as u64,
|
||||
is_complete: db_worktree.is_complete,
|
||||
completed_scan_id: db_worktree.completed_scan_id as u64,
|
||||
},
|
||||
)
|
||||
})
|
||||
@@ -2239,7 +2402,11 @@ impl Database {
|
||||
// Populate worktree entries.
|
||||
{
|
||||
let mut db_entries = worktree_entry::Entity::find()
|
||||
.filter(worktree_entry::Column::ProjectId.eq(project_id))
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(worktree_entry::Column::ProjectId.eq(project_id))
|
||||
.add(worktree_entry::Column::IsDeleted.eq(false)),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(db_entry) = db_entries.next().await {
|
||||
@@ -2290,7 +2457,15 @@ impl Database {
|
||||
|
||||
let room_id = project.room_id;
|
||||
let project = Project {
|
||||
collaborators,
|
||||
collaborators: collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect(),
|
||||
worktrees,
|
||||
language_servers: language_servers
|
||||
.into_iter()
|
||||
@@ -2337,10 +2512,7 @@ impl Database {
|
||||
.await?;
|
||||
let connection_ids = collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| ConnectionId {
|
||||
owner_id: collaborator.connection_server_id.0 as u32,
|
||||
id: collaborator.connection_id as u32,
|
||||
})
|
||||
.map(|collaborator| collaborator.connection())
|
||||
.collect();
|
||||
|
||||
let left_project = LeftProject {
|
||||
@@ -2357,8 +2529,8 @@ impl Database {
|
||||
pub async fn project_collaborators(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<project_collaborator::Model>>> {
|
||||
connection_id: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<ProjectCollaborator>>> {
|
||||
self.room_transaction(|tx| async move {
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
@@ -2367,15 +2539,20 @@ impl Database {
|
||||
let collaborators = project_collaborator::Entity::find()
|
||||
.filter(project_collaborator::Column::ProjectId.eq(project_id))
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|collaborator| ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if collaborators.iter().any(|collaborator| {
|
||||
let collaborator_connection = ConnectionId {
|
||||
owner_id: collaborator.connection_server_id.0 as u32,
|
||||
id: collaborator.connection_id as u32,
|
||||
};
|
||||
collaborator_connection == connection
|
||||
}) {
|
||||
if collaborators
|
||||
.iter()
|
||||
.any(|collaborator| collaborator.connection_id == connection_id)
|
||||
{
|
||||
Ok((project.room_id, collaborators))
|
||||
} else {
|
||||
Err(anyhow!("no such project"))?
|
||||
@@ -2394,18 +2571,15 @@ impl Database {
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
let mut participants = project_collaborator::Entity::find()
|
||||
let mut collaborators = project_collaborator::Entity::find()
|
||||
.filter(project_collaborator::Column::ProjectId.eq(project_id))
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut connection_ids = HashSet::default();
|
||||
while let Some(participant) = participants.next().await {
|
||||
let participant = participant?;
|
||||
connection_ids.insert(ConnectionId {
|
||||
owner_id: participant.connection_server_id.0 as u32,
|
||||
id: participant.connection_id as u32,
|
||||
});
|
||||
while let Some(collaborator) = collaborators.next().await {
|
||||
let collaborator = collaborator?;
|
||||
connection_ids.insert(collaborator.connection());
|
||||
}
|
||||
|
||||
if connection_ids.contains(&connection_id) {
|
||||
@@ -2422,7 +2596,7 @@ impl Database {
|
||||
project_id: ProjectId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<ConnectionId>> {
|
||||
let mut participants = project_collaborator::Entity::find()
|
||||
let mut collaborators = project_collaborator::Entity::find()
|
||||
.filter(
|
||||
project_collaborator::Column::ProjectId
|
||||
.eq(project_id)
|
||||
@@ -2432,12 +2606,9 @@ impl Database {
|
||||
.await?;
|
||||
|
||||
let mut guest_connection_ids = Vec::new();
|
||||
while let Some(participant) = participants.next().await {
|
||||
let participant = participant?;
|
||||
guest_connection_ids.push(ConnectionId {
|
||||
owner_id: participant.connection_server_id.0 as u32,
|
||||
id: participant.connection_id as u32,
|
||||
});
|
||||
while let Some(collaborator) = collaborators.next().await {
|
||||
let collaborator = collaborator?;
|
||||
guest_connection_ids.push(collaborator.connection());
|
||||
}
|
||||
Ok(guest_connection_ids)
|
||||
}
|
||||
@@ -2849,6 +3020,40 @@ id_type!(ServerId);
|
||||
id_type!(SignupId);
|
||||
id_type!(UserId);
|
||||
|
||||
pub struct RejoinedRoom {
|
||||
pub room: proto::Room,
|
||||
pub rejoined_projects: Vec<RejoinedProject>,
|
||||
pub reshared_projects: Vec<ResharedProject>,
|
||||
}
|
||||
|
||||
pub struct ResharedProject {
|
||||
pub id: ProjectId,
|
||||
pub old_connection_id: ConnectionId,
|
||||
pub collaborators: Vec<ProjectCollaborator>,
|
||||
pub worktrees: Vec<proto::WorktreeMetadata>,
|
||||
}
|
||||
|
||||
pub struct RejoinedProject {
|
||||
pub id: ProjectId,
|
||||
pub old_connection_id: ConnectionId,
|
||||
pub collaborators: Vec<ProjectCollaborator>,
|
||||
pub worktrees: Vec<RejoinedWorktree>,
|
||||
pub language_servers: Vec<proto::LanguageServer>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct RejoinedWorktree {
|
||||
pub id: u64,
|
||||
pub abs_path: String,
|
||||
pub root_name: String,
|
||||
pub visible: bool,
|
||||
pub updated_entries: Vec<proto::Entry>,
|
||||
pub removed_entries: Vec<u64>,
|
||||
pub diagnostic_summaries: Vec<proto::DiagnosticSummary>,
|
||||
pub scan_id: u64,
|
||||
pub completed_scan_id: u64,
|
||||
}
|
||||
|
||||
pub struct LeftRoom {
|
||||
pub room: proto::Room,
|
||||
pub left_projects: HashMap<ProjectId, LeftProject>,
|
||||
@@ -2862,11 +3067,29 @@ pub struct RefreshedRoom {
|
||||
}
|
||||
|
||||
pub struct Project {
|
||||
pub collaborators: Vec<project_collaborator::Model>,
|
||||
pub collaborators: Vec<ProjectCollaborator>,
|
||||
pub worktrees: BTreeMap<u64, Worktree>,
|
||||
pub language_servers: Vec<proto::LanguageServer>,
|
||||
}
|
||||
|
||||
pub struct ProjectCollaborator {
|
||||
pub connection_id: ConnectionId,
|
||||
pub user_id: UserId,
|
||||
pub replica_id: ReplicaId,
|
||||
pub is_host: bool,
|
||||
}
|
||||
|
||||
impl ProjectCollaborator {
|
||||
pub fn to_proto(&self) -> proto::Collaborator {
|
||||
proto::Collaborator {
|
||||
peer_id: Some(self.connection_id.into()),
|
||||
replica_id: self.replica_id.0 as u32,
|
||||
user_id: self.user_id.to_proto(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LeftProject {
|
||||
pub id: ProjectId,
|
||||
pub host_user_id: UserId,
|
||||
@@ -2882,7 +3105,7 @@ pub struct Worktree {
|
||||
pub entries: Vec<proto::Entry>,
|
||||
pub diagnostic_summaries: Vec<proto::DiagnosticSummary>,
|
||||
pub scan_id: u64,
|
||||
pub is_complete: bool,
|
||||
pub completed_scan_id: u64,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use super::{ProjectCollaboratorId, ProjectId, ReplicaId, ServerId, UserId};
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
@@ -14,6 +15,15 @@ pub struct Model {
|
||||
pub is_host: bool,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn connection(&self) -> ConnectionId {
|
||||
ConnectionId {
|
||||
owner_id: self.connection_server_id.0 as u32,
|
||||
id: self.connection_id as u32,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
|
||||
@@ -567,7 +567,12 @@ async fn test_invite_codes() {
|
||||
|
||||
// User 2 redeems the invite code and becomes a contact of user 1.
|
||||
let user2_invite = db
|
||||
.create_invite_from_code(&invite_code, "user2@example.com", Some("user-2-device-id"))
|
||||
.create_invite_from_code(
|
||||
&invite_code,
|
||||
"user2@example.com",
|
||||
Some("user-2-device-id"),
|
||||
true,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let NewUserResult {
|
||||
@@ -617,7 +622,7 @@ async fn test_invite_codes() {
|
||||
|
||||
// User 3 redeems the invite code and becomes a contact of user 1.
|
||||
let user3_invite = db
|
||||
.create_invite_from_code(&invite_code, "user3@example.com", None)
|
||||
.create_invite_from_code(&invite_code, "user3@example.com", None, true)
|
||||
.await
|
||||
.unwrap();
|
||||
let NewUserResult {
|
||||
@@ -672,9 +677,14 @@ async fn test_invite_codes() {
|
||||
);
|
||||
|
||||
// Trying to reedem the code for the third time results in an error.
|
||||
db.create_invite_from_code(&invite_code, "user4@example.com", Some("user-4-device-id"))
|
||||
.await
|
||||
.unwrap_err();
|
||||
db.create_invite_from_code(
|
||||
&invite_code,
|
||||
"user4@example.com",
|
||||
Some("user-4-device-id"),
|
||||
true,
|
||||
)
|
||||
.await
|
||||
.unwrap_err();
|
||||
|
||||
// Invite count can be updated after the code has been created.
|
||||
db.set_invite_count_for_user(user1, 2).await.unwrap();
|
||||
@@ -684,7 +694,12 @@ async fn test_invite_codes() {
|
||||
|
||||
// User 4 can now redeem the invite code and becomes a contact of user 1.
|
||||
let user4_invite = db
|
||||
.create_invite_from_code(&invite_code, "user4@example.com", Some("user-4-device-id"))
|
||||
.create_invite_from_code(
|
||||
&invite_code,
|
||||
"user4@example.com",
|
||||
Some("user-4-device-id"),
|
||||
true,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let user4 = db
|
||||
@@ -739,9 +754,14 @@ async fn test_invite_codes() {
|
||||
);
|
||||
|
||||
// An existing user cannot redeem invite codes.
|
||||
db.create_invite_from_code(&invite_code, "user2@example.com", Some("user-2-device-id"))
|
||||
.await
|
||||
.unwrap_err();
|
||||
db.create_invite_from_code(
|
||||
&invite_code,
|
||||
"user2@example.com",
|
||||
Some("user-2-device-id"),
|
||||
true,
|
||||
)
|
||||
.await
|
||||
.unwrap_err();
|
||||
let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap();
|
||||
assert_eq!(invite_count, 1);
|
||||
|
||||
@@ -763,7 +783,7 @@ async fn test_invite_codes() {
|
||||
db.set_invite_count_for_user(user5, 5).await.unwrap();
|
||||
let (user5_invite_code, _) = db.get_invite_code_for_user(user5).await.unwrap().unwrap();
|
||||
let user5_invite_to_user1 = db
|
||||
.create_invite_from_code(&user5_invite_code, "user1@different.com", None)
|
||||
.create_invite_from_code(&user5_invite_code, "user1@different.com", None, true)
|
||||
.await
|
||||
.unwrap();
|
||||
let user1_2 = db
|
||||
|
||||
@@ -11,8 +11,10 @@ pub struct Model {
|
||||
pub abs_path: String,
|
||||
pub root_name: String,
|
||||
pub visible: bool,
|
||||
/// The last scan for which we've observed entries. It may be in progress.
|
||||
pub scan_id: i64,
|
||||
pub is_complete: bool,
|
||||
/// The last scan that fully completed.
|
||||
pub completed_scan_id: i64,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
|
||||
@@ -17,6 +17,8 @@ pub struct Model {
|
||||
pub mtime_nanos: i32,
|
||||
pub is_symlink: bool,
|
||||
pub is_ignored: bool,
|
||||
pub is_deleted: bool,
|
||||
pub scan_id: i64,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
|
||||
@@ -33,4 +33,12 @@ impl Executor {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn record_backtrace(&self) {
|
||||
match self {
|
||||
Executor::Production => {}
|
||||
#[cfg(test)]
|
||||
Executor::Deterministic(background) => background.record_backtrace(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,10 +3,11 @@ pub mod auth;
|
||||
pub mod db;
|
||||
pub mod env;
|
||||
pub mod executor;
|
||||
#[cfg(test)]
|
||||
mod integration_tests;
|
||||
pub mod rpc;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use axum::{http::StatusCode, response::IntoResponse};
|
||||
use db::Database;
|
||||
use serde::Deserialize;
|
||||
|
||||
@@ -95,6 +95,7 @@ struct Session {
|
||||
peer: Arc<Peer>,
|
||||
connection_pool: Arc<parking_lot::Mutex<ConnectionPool>>,
|
||||
live_kit_client: Option<Arc<dyn live_kit_server::api::Client>>,
|
||||
executor: Executor,
|
||||
}
|
||||
|
||||
impl Session {
|
||||
@@ -184,6 +185,7 @@ impl Server {
|
||||
.add_request_handler(ping)
|
||||
.add_request_handler(create_room)
|
||||
.add_request_handler(join_room)
|
||||
.add_request_handler(rejoin_room)
|
||||
.add_message_handler(leave_room)
|
||||
.add_request_handler(call)
|
||||
.add_request_handler(cancel_call)
|
||||
@@ -215,6 +217,7 @@ impl Server {
|
||||
.add_request_handler(forward_project_request::<proto::PrepareRename>)
|
||||
.add_request_handler(forward_project_request::<proto::PerformRename>)
|
||||
.add_request_handler(forward_project_request::<proto::ReloadBuffers>)
|
||||
.add_request_handler(forward_project_request::<proto::SynchronizeBuffers>)
|
||||
.add_request_handler(forward_project_request::<proto::FormatBuffers>)
|
||||
.add_request_handler(forward_project_request::<proto::CreateProjectEntry>)
|
||||
.add_request_handler(forward_project_request::<proto::RenameProjectEntry>)
|
||||
@@ -249,16 +252,6 @@ impl Server {
|
||||
let live_kit_client = self.app_state.live_kit_client.clone();
|
||||
|
||||
let span = info_span!("start server");
|
||||
let span_enter = span.enter();
|
||||
|
||||
tracing::info!("begin deleting stale projects");
|
||||
app_state
|
||||
.db
|
||||
.delete_stale_projects(&app_state.config.zed_environment, server_id)
|
||||
.await?;
|
||||
tracing::info!("finish deleting stale projects");
|
||||
|
||||
drop(span_enter);
|
||||
self.executor.spawn_detached(
|
||||
async move {
|
||||
tracing::info!("waiting for cleanup timeout");
|
||||
@@ -354,7 +347,7 @@ impl Server {
|
||||
|
||||
app_state
|
||||
.db
|
||||
.delete_stale_servers(server_id, &app_state.config.zed_environment)
|
||||
.delete_stale_servers(&app_state.config.zed_environment, server_id)
|
||||
.await
|
||||
.trace_err();
|
||||
}
|
||||
@@ -529,7 +522,8 @@ impl Server {
|
||||
db: Arc::new(tokio::sync::Mutex::new(DbHandle(this.app_state.db.clone()))),
|
||||
peer: this.peer.clone(),
|
||||
connection_pool: this.connection_pool.clone(),
|
||||
live_kit_client: this.app_state.live_kit_client.clone()
|
||||
live_kit_client: this.app_state.live_kit_client.clone(),
|
||||
executor: executor.clone(),
|
||||
};
|
||||
update_user_contacts(user_id, &session).await?;
|
||||
|
||||
@@ -586,7 +580,7 @@ impl Server {
|
||||
|
||||
drop(foreground_message_handlers);
|
||||
tracing::info!(%user_id, %login, %connection_id, %address, "signing out");
|
||||
if let Err(error) = sign_out(session, teardown, executor).await {
|
||||
if let Err(error) = connection_lost(session, teardown, executor).await {
|
||||
tracing::error!(%user_id, %login, %connection_id, %address, ?error, "error signing out");
|
||||
}
|
||||
|
||||
@@ -678,15 +672,17 @@ impl<'a> Drop for ConnectionPoolGuard<'a> {
|
||||
}
|
||||
|
||||
fn broadcast<F>(
|
||||
sender_id: ConnectionId,
|
||||
sender_id: Option<ConnectionId>,
|
||||
receiver_ids: impl IntoIterator<Item = ConnectionId>,
|
||||
mut f: F,
|
||||
) where
|
||||
F: FnMut(ConnectionId) -> anyhow::Result<()>,
|
||||
{
|
||||
for receiver_id in receiver_ids {
|
||||
if receiver_id != sender_id {
|
||||
f(receiver_id).trace_err();
|
||||
if Some(receiver_id) != sender_id {
|
||||
if let Err(error) = f(receiver_id) {
|
||||
tracing::error!("failed to send to {:?} {}", receiver_id, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -787,7 +783,7 @@ pub async fn handle_metrics(Extension(server): Extension<Arc<Server>>) -> Result
|
||||
}
|
||||
|
||||
#[instrument(err, skip(executor))]
|
||||
async fn sign_out(
|
||||
async fn connection_lost(
|
||||
session: Session,
|
||||
mut teardown: watch::Receiver<()>,
|
||||
executor: Executor,
|
||||
@@ -798,17 +794,12 @@ async fn sign_out(
|
||||
.await
|
||||
.remove_connection(session.connection_id)?;
|
||||
|
||||
if let Some(mut left_projects) = session
|
||||
session
|
||||
.db()
|
||||
.await
|
||||
.connection_lost(session.connection_id)
|
||||
.await
|
||||
.trace_err()
|
||||
{
|
||||
for left_project in mem::take(&mut *left_projects) {
|
||||
project_left(&left_project, &session);
|
||||
}
|
||||
}
|
||||
.trace_err();
|
||||
|
||||
futures::select_biased! {
|
||||
_ = executor.sleep(RECONNECT_TIMEOUT).fuse() => {
|
||||
@@ -941,6 +932,164 @@ async fn join_room(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn rejoin_room(
|
||||
request: proto::RejoinRoom,
|
||||
response: Response<proto::RejoinRoom>,
|
||||
session: Session,
|
||||
) -> Result<()> {
|
||||
{
|
||||
let mut rejoined_room = session
|
||||
.db()
|
||||
.await
|
||||
.rejoin_room(request, session.user_id, session.connection_id)
|
||||
.await?;
|
||||
|
||||
response.send(proto::RejoinRoomResponse {
|
||||
room: Some(rejoined_room.room.clone()),
|
||||
reshared_projects: rejoined_room
|
||||
.reshared_projects
|
||||
.iter()
|
||||
.map(|project| proto::ResharedProject {
|
||||
id: project.id.to_proto(),
|
||||
collaborators: project
|
||||
.collaborators
|
||||
.iter()
|
||||
.map(|collaborator| collaborator.to_proto())
|
||||
.collect(),
|
||||
})
|
||||
.collect(),
|
||||
rejoined_projects: rejoined_room
|
||||
.rejoined_projects
|
||||
.iter()
|
||||
.map(|rejoined_project| proto::RejoinedProject {
|
||||
id: rejoined_project.id.to_proto(),
|
||||
worktrees: rejoined_project
|
||||
.worktrees
|
||||
.iter()
|
||||
.map(|worktree| proto::WorktreeMetadata {
|
||||
id: worktree.id,
|
||||
root_name: worktree.root_name.clone(),
|
||||
visible: worktree.visible,
|
||||
abs_path: worktree.abs_path.clone(),
|
||||
})
|
||||
.collect(),
|
||||
collaborators: rejoined_project
|
||||
.collaborators
|
||||
.iter()
|
||||
.map(|collaborator| collaborator.to_proto())
|
||||
.collect(),
|
||||
language_servers: rejoined_project.language_servers.clone(),
|
||||
})
|
||||
.collect(),
|
||||
})?;
|
||||
room_updated(&rejoined_room.room, &session.peer);
|
||||
|
||||
for project in &rejoined_room.reshared_projects {
|
||||
for collaborator in &project.collaborators {
|
||||
session
|
||||
.peer
|
||||
.send(
|
||||
collaborator.connection_id,
|
||||
proto::UpdateProjectCollaborator {
|
||||
project_id: project.id.to_proto(),
|
||||
old_peer_id: Some(project.old_connection_id.into()),
|
||||
new_peer_id: Some(session.connection_id.into()),
|
||||
},
|
||||
)
|
||||
.trace_err();
|
||||
}
|
||||
|
||||
broadcast(
|
||||
Some(session.connection_id),
|
||||
project
|
||||
.collaborators
|
||||
.iter()
|
||||
.map(|collaborator| collaborator.connection_id),
|
||||
|connection_id| {
|
||||
session.peer.forward_send(
|
||||
session.connection_id,
|
||||
connection_id,
|
||||
proto::UpdateProject {
|
||||
project_id: project.id.to_proto(),
|
||||
worktrees: project.worktrees.clone(),
|
||||
},
|
||||
)
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
for project in &rejoined_room.rejoined_projects {
|
||||
for collaborator in &project.collaborators {
|
||||
session
|
||||
.peer
|
||||
.send(
|
||||
collaborator.connection_id,
|
||||
proto::UpdateProjectCollaborator {
|
||||
project_id: project.id.to_proto(),
|
||||
old_peer_id: Some(project.old_connection_id.into()),
|
||||
new_peer_id: Some(session.connection_id.into()),
|
||||
},
|
||||
)
|
||||
.trace_err();
|
||||
}
|
||||
}
|
||||
|
||||
for project in &mut rejoined_room.rejoined_projects {
|
||||
for worktree in mem::take(&mut project.worktrees) {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
const MAX_CHUNK_SIZE: usize = 2;
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
const MAX_CHUNK_SIZE: usize = 256;
|
||||
|
||||
// Stream this worktree's entries.
|
||||
let message = proto::UpdateWorktree {
|
||||
project_id: project.id.to_proto(),
|
||||
worktree_id: worktree.id,
|
||||
abs_path: worktree.abs_path.clone(),
|
||||
root_name: worktree.root_name,
|
||||
updated_entries: worktree.updated_entries,
|
||||
removed_entries: worktree.removed_entries,
|
||||
scan_id: worktree.scan_id,
|
||||
is_last_update: worktree.completed_scan_id == worktree.scan_id,
|
||||
};
|
||||
for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) {
|
||||
session.peer.send(session.connection_id, update.clone())?;
|
||||
}
|
||||
|
||||
// Stream this worktree's diagnostics.
|
||||
for summary in worktree.diagnostic_summaries {
|
||||
session.peer.send(
|
||||
session.connection_id,
|
||||
proto::UpdateDiagnosticSummary {
|
||||
project_id: project.id.to_proto(),
|
||||
worktree_id: worktree.id,
|
||||
summary: Some(summary),
|
||||
},
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
for language_server in &project.language_servers {
|
||||
session.peer.send(
|
||||
session.connection_id,
|
||||
proto::UpdateLanguageServer {
|
||||
project_id: project.id.to_proto(),
|
||||
language_server_id: language_server.id,
|
||||
variant: Some(
|
||||
proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
|
||||
proto::LspDiskBasedDiagnosticsUpdated {},
|
||||
),
|
||||
),
|
||||
},
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
update_user_contacts(session.user_id, &session).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn leave_room(_message: proto::LeaveRoom, session: Session) -> Result<()> {
|
||||
leave_room_for_session(&session).await
|
||||
}
|
||||
@@ -1132,7 +1281,7 @@ async fn unshare_project(message: proto::UnshareProject, session: Session) -> Re
|
||||
.await?;
|
||||
|
||||
broadcast(
|
||||
session.connection_id,
|
||||
Some(session.connection_id),
|
||||
guest_connection_ids.iter().copied(),
|
||||
|conn_id| session.peer.send(conn_id, message.clone()),
|
||||
);
|
||||
@@ -1160,18 +1309,8 @@ async fn join_project(
|
||||
let collaborators = project
|
||||
.collaborators
|
||||
.iter()
|
||||
.map(|collaborator| {
|
||||
let peer_id = proto::PeerId {
|
||||
owner_id: collaborator.connection_server_id.0 as u32,
|
||||
id: collaborator.connection_id as u32,
|
||||
};
|
||||
proto::Collaborator {
|
||||
peer_id: Some(peer_id),
|
||||
replica_id: collaborator.replica_id.0 as u32,
|
||||
user_id: collaborator.user_id.to_proto(),
|
||||
}
|
||||
})
|
||||
.filter(|collaborator| collaborator.peer_id != Some(session.connection_id.into()))
|
||||
.filter(|collaborator| collaborator.connection_id != session.connection_id)
|
||||
.map(|collaborator| collaborator.to_proto())
|
||||
.collect::<Vec<_>>();
|
||||
let worktrees = project
|
||||
.worktrees
|
||||
@@ -1224,7 +1363,7 @@ async fn join_project(
|
||||
updated_entries: worktree.entries,
|
||||
removed_entries: Default::default(),
|
||||
scan_id: worktree.scan_id,
|
||||
is_last_update: worktree.is_complete,
|
||||
is_last_update: worktree.scan_id == worktree.completed_scan_id,
|
||||
};
|
||||
for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) {
|
||||
session.peer.send(session.connection_id, update.clone())?;
|
||||
@@ -1293,7 +1432,7 @@ async fn update_project(
|
||||
.update_project(project_id, session.connection_id, &request.worktrees)
|
||||
.await?;
|
||||
broadcast(
|
||||
session.connection_id,
|
||||
Some(session.connection_id),
|
||||
guest_connection_ids.iter().copied(),
|
||||
|connection_id| {
|
||||
session
|
||||
@@ -1319,7 +1458,7 @@ async fn update_worktree(
|
||||
.await?;
|
||||
|
||||
broadcast(
|
||||
session.connection_id,
|
||||
Some(session.connection_id),
|
||||
guest_connection_ids.iter().copied(),
|
||||
|connection_id| {
|
||||
session
|
||||
@@ -1342,7 +1481,7 @@ async fn update_diagnostic_summary(
|
||||
.await?;
|
||||
|
||||
broadcast(
|
||||
session.connection_id,
|
||||
Some(session.connection_id),
|
||||
guest_connection_ids.iter().copied(),
|
||||
|connection_id| {
|
||||
session
|
||||
@@ -1365,7 +1504,7 @@ async fn start_language_server(
|
||||
.await?;
|
||||
|
||||
broadcast(
|
||||
session.connection_id,
|
||||
Some(session.connection_id),
|
||||
guest_connection_ids.iter().copied(),
|
||||
|connection_id| {
|
||||
session
|
||||
@@ -1380,6 +1519,7 @@ async fn update_language_server(
|
||||
request: proto::UpdateLanguageServer,
|
||||
session: Session,
|
||||
) -> Result<()> {
|
||||
session.executor.record_backtrace();
|
||||
let project_id = ProjectId::from_proto(request.project_id);
|
||||
let project_connection_ids = session
|
||||
.db()
|
||||
@@ -1387,7 +1527,7 @@ async fn update_language_server(
|
||||
.project_connection_ids(project_id, session.connection_id)
|
||||
.await?;
|
||||
broadcast(
|
||||
session.connection_id,
|
||||
Some(session.connection_id),
|
||||
project_connection_ids.iter().copied(),
|
||||
|connection_id| {
|
||||
session
|
||||
@@ -1406,6 +1546,7 @@ async fn forward_project_request<T>(
|
||||
where
|
||||
T: EntityMessage + RequestMessage,
|
||||
{
|
||||
session.executor.record_backtrace();
|
||||
let project_id = ProjectId::from_proto(request.remote_entity_id());
|
||||
let host_connection_id = {
|
||||
let collaborators = session
|
||||
@@ -1413,14 +1554,11 @@ where
|
||||
.await
|
||||
.project_collaborators(project_id, session.connection_id)
|
||||
.await?;
|
||||
let host = collaborators
|
||||
collaborators
|
||||
.iter()
|
||||
.find(|collaborator| collaborator.is_host)
|
||||
.ok_or_else(|| anyhow!("host not found"))?;
|
||||
ConnectionId {
|
||||
owner_id: host.connection_server_id.0 as u32,
|
||||
id: host.connection_id as u32,
|
||||
}
|
||||
.ok_or_else(|| anyhow!("host not found"))?
|
||||
.connection_id
|
||||
};
|
||||
|
||||
let payload = session
|
||||
@@ -1444,14 +1582,11 @@ async fn save_buffer(
|
||||
.await
|
||||
.project_collaborators(project_id, session.connection_id)
|
||||
.await?;
|
||||
let host = collaborators
|
||||
collaborators
|
||||
.iter()
|
||||
.find(|collaborator| collaborator.is_host)
|
||||
.ok_or_else(|| anyhow!("host not found"))?;
|
||||
ConnectionId {
|
||||
owner_id: host.connection_server_id.0 as u32,
|
||||
id: host.connection_id as u32,
|
||||
}
|
||||
.ok_or_else(|| anyhow!("host not found"))?
|
||||
.connection_id
|
||||
};
|
||||
let response_payload = session
|
||||
.peer
|
||||
@@ -1463,22 +1598,19 @@ async fn save_buffer(
|
||||
.await
|
||||
.project_collaborators(project_id, session.connection_id)
|
||||
.await?;
|
||||
collaborators.retain(|collaborator| {
|
||||
let collaborator_connection = ConnectionId {
|
||||
owner_id: collaborator.connection_server_id.0 as u32,
|
||||
id: collaborator.connection_id as u32,
|
||||
};
|
||||
collaborator_connection != session.connection_id
|
||||
});
|
||||
let project_connection_ids = collaborators.iter().map(|collaborator| ConnectionId {
|
||||
owner_id: collaborator.connection_server_id.0 as u32,
|
||||
id: collaborator.connection_id as u32,
|
||||
});
|
||||
broadcast(host_connection_id, project_connection_ids, |conn_id| {
|
||||
session
|
||||
.peer
|
||||
.forward_send(host_connection_id, conn_id, response_payload.clone())
|
||||
});
|
||||
collaborators.retain(|collaborator| collaborator.connection_id != session.connection_id);
|
||||
let project_connection_ids = collaborators
|
||||
.iter()
|
||||
.map(|collaborator| collaborator.connection_id);
|
||||
broadcast(
|
||||
Some(host_connection_id),
|
||||
project_connection_ids,
|
||||
|conn_id| {
|
||||
session
|
||||
.peer
|
||||
.forward_send(host_connection_id, conn_id, response_payload.clone())
|
||||
},
|
||||
);
|
||||
response.send(response_payload)?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -1487,6 +1619,7 @@ async fn create_buffer_for_peer(
|
||||
request: proto::CreateBufferForPeer,
|
||||
session: Session,
|
||||
) -> Result<()> {
|
||||
session.executor.record_backtrace();
|
||||
let peer_id = request.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?;
|
||||
session
|
||||
.peer
|
||||
@@ -1499,6 +1632,7 @@ async fn update_buffer(
|
||||
response: Response<proto::UpdateBuffer>,
|
||||
session: Session,
|
||||
) -> Result<()> {
|
||||
session.executor.record_backtrace();
|
||||
let project_id = ProjectId::from_proto(request.project_id);
|
||||
let project_connection_ids = session
|
||||
.db()
|
||||
@@ -1506,8 +1640,10 @@ async fn update_buffer(
|
||||
.project_connection_ids(project_id, session.connection_id)
|
||||
.await?;
|
||||
|
||||
session.executor.record_backtrace();
|
||||
|
||||
broadcast(
|
||||
session.connection_id,
|
||||
Some(session.connection_id),
|
||||
project_connection_ids.iter().copied(),
|
||||
|connection_id| {
|
||||
session
|
||||
@@ -1528,7 +1664,7 @@ async fn update_buffer_file(request: proto::UpdateBufferFile, session: Session)
|
||||
.await?;
|
||||
|
||||
broadcast(
|
||||
session.connection_id,
|
||||
Some(session.connection_id),
|
||||
project_connection_ids.iter().copied(),
|
||||
|connection_id| {
|
||||
session
|
||||
@@ -1547,7 +1683,7 @@ async fn buffer_reloaded(request: proto::BufferReloaded, session: Session) -> Re
|
||||
.project_connection_ids(project_id, session.connection_id)
|
||||
.await?;
|
||||
broadcast(
|
||||
session.connection_id,
|
||||
Some(session.connection_id),
|
||||
project_connection_ids.iter().copied(),
|
||||
|connection_id| {
|
||||
session
|
||||
@@ -1566,7 +1702,7 @@ async fn buffer_saved(request: proto::BufferSaved, session: Session) -> Result<(
|
||||
.project_connection_ids(project_id, session.connection_id)
|
||||
.await?;
|
||||
broadcast(
|
||||
session.connection_id,
|
||||
Some(session.connection_id),
|
||||
project_connection_ids.iter().copied(),
|
||||
|connection_id| {
|
||||
session
|
||||
@@ -1825,23 +1961,31 @@ async fn remove_contact(
|
||||
let requester_id = session.user_id;
|
||||
let responder_id = UserId::from_proto(request.user_id);
|
||||
let db = session.db().await;
|
||||
db.remove_contact(requester_id, responder_id).await?;
|
||||
let contact_accepted = db.remove_contact(requester_id, responder_id).await?;
|
||||
|
||||
let pool = session.connection_pool().await;
|
||||
// Update outgoing contact requests of requester
|
||||
let mut update = proto::UpdateContacts::default();
|
||||
update
|
||||
.remove_outgoing_requests
|
||||
.push(responder_id.to_proto());
|
||||
if contact_accepted {
|
||||
update.remove_contacts.push(responder_id.to_proto());
|
||||
} else {
|
||||
update
|
||||
.remove_outgoing_requests
|
||||
.push(responder_id.to_proto());
|
||||
}
|
||||
for connection_id in pool.user_connection_ids(requester_id) {
|
||||
session.peer.send(connection_id, update.clone())?;
|
||||
}
|
||||
|
||||
// Update incoming contact requests of responder
|
||||
let mut update = proto::UpdateContacts::default();
|
||||
update
|
||||
.remove_incoming_requests
|
||||
.push(requester_id.to_proto());
|
||||
if contact_accepted {
|
||||
update.remove_contacts.push(requester_id.to_proto());
|
||||
} else {
|
||||
update
|
||||
.remove_incoming_requests
|
||||
.push(requester_id.to_proto());
|
||||
}
|
||||
for connection_id in pool.user_connection_ids(responder_id) {
|
||||
session.peer.send(connection_id, update.clone())?;
|
||||
}
|
||||
@@ -1858,7 +2002,7 @@ async fn update_diff_base(request: proto::UpdateDiffBase, session: Session) -> R
|
||||
.project_connection_ids(project_id, session.connection_id)
|
||||
.await?;
|
||||
broadcast(
|
||||
session.connection_id,
|
||||
Some(session.connection_id),
|
||||
project_connection_ids.iter().copied(),
|
||||
|connection_id| {
|
||||
session
|
||||
@@ -1968,21 +2112,20 @@ fn contact_for_user(
|
||||
}
|
||||
|
||||
fn room_updated(room: &proto::Room, peer: &Peer) {
|
||||
for participant in &room.participants {
|
||||
if let Some(peer_id) = participant
|
||||
.peer_id
|
||||
.ok_or_else(|| anyhow!("invalid participant peer id"))
|
||||
.trace_err()
|
||||
{
|
||||
broadcast(
|
||||
None,
|
||||
room.participants
|
||||
.iter()
|
||||
.filter_map(|participant| Some(participant.peer_id?.into())),
|
||||
|peer_id| {
|
||||
peer.send(
|
||||
peer_id.into(),
|
||||
proto::RoomUpdated {
|
||||
room: Some(room.clone()),
|
||||
},
|
||||
)
|
||||
.trace_err();
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
async fn update_user_contacts(user_id: UserId, session: &Session) -> Result<()> {
|
||||
@@ -2103,16 +2246,6 @@ fn project_left(project: &db::LeftProject, session: &Session) {
|
||||
.trace_err();
|
||||
}
|
||||
}
|
||||
|
||||
session
|
||||
.peer
|
||||
.send(
|
||||
session.connection_id,
|
||||
proto::UnshareProject {
|
||||
project_id: project.id.to_proto(),
|
||||
},
|
||||
)
|
||||
.trace_err();
|
||||
}
|
||||
|
||||
pub trait ResultExt {
|
||||
|
||||
462
crates/collab/src/tests.rs
Normal file
462
crates/collab/src/tests.rs
Normal file
@@ -0,0 +1,462 @@
|
||||
use crate::{
|
||||
db::{NewUserParams, TestDb, UserId},
|
||||
executor::Executor,
|
||||
rpc::{Server, CLEANUP_TIMEOUT},
|
||||
AppState,
|
||||
};
|
||||
use anyhow::anyhow;
|
||||
use call::ActiveCall;
|
||||
use client::{
|
||||
self, proto::PeerId, test::FakeHttpClient, Client, Connection, Credentials,
|
||||
EstablishConnectionError, UserStore,
|
||||
};
|
||||
use collections::{HashMap, HashSet};
|
||||
use fs::FakeFs;
|
||||
use futures::{channel::oneshot, StreamExt as _};
|
||||
use gpui::{
|
||||
executor::Deterministic, test::EmptyView, ModelHandle, Task, TestAppContext, ViewHandle,
|
||||
};
|
||||
use language::LanguageRegistry;
|
||||
use parking_lot::Mutex;
|
||||
use project::{Project, WorktreeId};
|
||||
use settings::Settings;
|
||||
use std::{
|
||||
env,
|
||||
ops::Deref,
|
||||
path::{Path, PathBuf},
|
||||
sync::{
|
||||
atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
use theme::ThemeRegistry;
|
||||
use workspace::Workspace;
|
||||
|
||||
mod integration_tests;
|
||||
mod randomized_integration_tests;
|
||||
|
||||
struct TestServer {
|
||||
app_state: Arc<AppState>,
|
||||
server: Arc<Server>,
|
||||
connection_killers: Arc<Mutex<HashMap<PeerId, Arc<AtomicBool>>>>,
|
||||
forbid_connections: Arc<AtomicBool>,
|
||||
_test_db: TestDb,
|
||||
test_live_kit_server: Arc<live_kit_client::TestServer>,
|
||||
}
|
||||
|
||||
impl TestServer {
|
||||
async fn start(deterministic: &Arc<Deterministic>) -> Self {
|
||||
static NEXT_LIVE_KIT_SERVER_ID: AtomicUsize = AtomicUsize::new(0);
|
||||
|
||||
let use_postgres = env::var("USE_POSTGRES").ok();
|
||||
let use_postgres = use_postgres.as_deref();
|
||||
let test_db = if use_postgres == Some("true") || use_postgres == Some("1") {
|
||||
TestDb::postgres(deterministic.build_background())
|
||||
} else {
|
||||
TestDb::sqlite(deterministic.build_background())
|
||||
};
|
||||
let live_kit_server_id = NEXT_LIVE_KIT_SERVER_ID.fetch_add(1, SeqCst);
|
||||
let live_kit_server = live_kit_client::TestServer::create(
|
||||
format!("http://livekit.{}.test", live_kit_server_id),
|
||||
format!("devkey-{}", live_kit_server_id),
|
||||
format!("secret-{}", live_kit_server_id),
|
||||
deterministic.build_background(),
|
||||
)
|
||||
.unwrap();
|
||||
let app_state = Self::build_app_state(&test_db, &live_kit_server).await;
|
||||
let epoch = app_state
|
||||
.db
|
||||
.create_server(&app_state.config.zed_environment)
|
||||
.await
|
||||
.unwrap();
|
||||
let server = Server::new(
|
||||
epoch,
|
||||
app_state.clone(),
|
||||
Executor::Deterministic(deterministic.build_background()),
|
||||
);
|
||||
server.start().await.unwrap();
|
||||
// Advance clock to ensure the server's cleanup task is finished.
|
||||
deterministic.advance_clock(CLEANUP_TIMEOUT);
|
||||
Self {
|
||||
app_state,
|
||||
server,
|
||||
connection_killers: Default::default(),
|
||||
forbid_connections: Default::default(),
|
||||
_test_db: test_db,
|
||||
test_live_kit_server: live_kit_server,
|
||||
}
|
||||
}
|
||||
|
||||
async fn reset(&self) {
|
||||
self.app_state.db.reset();
|
||||
let epoch = self
|
||||
.app_state
|
||||
.db
|
||||
.create_server(&self.app_state.config.zed_environment)
|
||||
.await
|
||||
.unwrap();
|
||||
self.server.reset(epoch);
|
||||
}
|
||||
|
||||
async fn create_client(&mut self, cx: &mut TestAppContext, name: &str) -> TestClient {
|
||||
cx.update(|cx| {
|
||||
cx.set_global(Settings::test(cx));
|
||||
});
|
||||
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
let user_id = if let Ok(Some(user)) = self
|
||||
.app_state
|
||||
.db
|
||||
.get_user_by_github_account(name, None)
|
||||
.await
|
||||
{
|
||||
user.id
|
||||
} else {
|
||||
self.app_state
|
||||
.db
|
||||
.create_user(
|
||||
&format!("{name}@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: name.into(),
|
||||
github_user_id: 0,
|
||||
invite_count: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.expect("creating user failed")
|
||||
.user_id
|
||||
};
|
||||
let client_name = name.to_string();
|
||||
let mut client = cx.read(|cx| Client::new(http.clone(), cx));
|
||||
let server = self.server.clone();
|
||||
let db = self.app_state.db.clone();
|
||||
let connection_killers = self.connection_killers.clone();
|
||||
let forbid_connections = self.forbid_connections.clone();
|
||||
|
||||
Arc::get_mut(&mut client)
|
||||
.unwrap()
|
||||
.set_id(user_id.0 as usize)
|
||||
.override_authenticate(move |cx| {
|
||||
cx.spawn(|_| async move {
|
||||
let access_token = "the-token".to_string();
|
||||
Ok(Credentials {
|
||||
user_id: user_id.0 as u64,
|
||||
access_token,
|
||||
})
|
||||
})
|
||||
})
|
||||
.override_establish_connection(move |credentials, cx| {
|
||||
assert_eq!(credentials.user_id, user_id.0 as u64);
|
||||
assert_eq!(credentials.access_token, "the-token");
|
||||
|
||||
let server = server.clone();
|
||||
let db = db.clone();
|
||||
let connection_killers = connection_killers.clone();
|
||||
let forbid_connections = forbid_connections.clone();
|
||||
let client_name = client_name.clone();
|
||||
cx.spawn(move |cx| async move {
|
||||
if forbid_connections.load(SeqCst) {
|
||||
Err(EstablishConnectionError::other(anyhow!(
|
||||
"server is forbidding connections"
|
||||
)))
|
||||
} else {
|
||||
let (client_conn, server_conn, killed) =
|
||||
Connection::in_memory(cx.background());
|
||||
let (connection_id_tx, connection_id_rx) = oneshot::channel();
|
||||
let user = db
|
||||
.get_user_by_id(user_id)
|
||||
.await
|
||||
.expect("retrieving user failed")
|
||||
.unwrap();
|
||||
cx.background()
|
||||
.spawn(server.handle_connection(
|
||||
server_conn,
|
||||
client_name,
|
||||
user,
|
||||
Some(connection_id_tx),
|
||||
Executor::Deterministic(cx.background()),
|
||||
))
|
||||
.detach();
|
||||
let connection_id = connection_id_rx.await.unwrap();
|
||||
connection_killers
|
||||
.lock()
|
||||
.insert(connection_id.into(), killed);
|
||||
Ok(client_conn)
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http, cx));
|
||||
let app_state = Arc::new(workspace::AppState {
|
||||
client: client.clone(),
|
||||
user_store: user_store.clone(),
|
||||
languages: Arc::new(LanguageRegistry::new(Task::ready(()))),
|
||||
themes: ThemeRegistry::new((), cx.font_cache()),
|
||||
fs: fs.clone(),
|
||||
build_window_options: |_, _, _| Default::default(),
|
||||
initialize_workspace: |_, _, _| unimplemented!(),
|
||||
dock_default_item_factory: |_, _| unimplemented!(),
|
||||
});
|
||||
|
||||
Project::init(&client);
|
||||
cx.update(|cx| {
|
||||
workspace::init(app_state.clone(), cx);
|
||||
call::init(client.clone(), user_store.clone(), cx);
|
||||
});
|
||||
|
||||
client
|
||||
.authenticate_and_connect(false, &cx.to_async())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let client = TestClient {
|
||||
client,
|
||||
username: name.to_string(),
|
||||
local_projects: Default::default(),
|
||||
remote_projects: Default::default(),
|
||||
next_root_dir_id: 0,
|
||||
user_store,
|
||||
fs,
|
||||
language_registry: Arc::new(LanguageRegistry::test()),
|
||||
buffers: Default::default(),
|
||||
};
|
||||
client.wait_for_current_user(cx).await;
|
||||
client
|
||||
}
|
||||
|
||||
fn disconnect_client(&self, peer_id: PeerId) {
|
||||
self.connection_killers
|
||||
.lock()
|
||||
.remove(&peer_id)
|
||||
.unwrap()
|
||||
.store(true, SeqCst);
|
||||
}
|
||||
|
||||
fn forbid_connections(&self) {
|
||||
self.forbid_connections.store(true, SeqCst);
|
||||
}
|
||||
|
||||
fn allow_connections(&self) {
|
||||
self.forbid_connections.store(false, SeqCst);
|
||||
}
|
||||
|
||||
async fn make_contacts(&self, clients: &mut [(&TestClient, &mut TestAppContext)]) {
|
||||
for ix in 1..clients.len() {
|
||||
let (left, right) = clients.split_at_mut(ix);
|
||||
let (client_a, cx_a) = left.last_mut().unwrap();
|
||||
for (client_b, cx_b) in right {
|
||||
client_a
|
||||
.user_store
|
||||
.update(*cx_a, |store, cx| {
|
||||
store.request_contact(client_b.user_id().unwrap(), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx_a.foreground().run_until_parked();
|
||||
client_b
|
||||
.user_store
|
||||
.update(*cx_b, |store, cx| {
|
||||
store.respond_to_contact_request(client_a.user_id().unwrap(), true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn create_room(&self, clients: &mut [(&TestClient, &mut TestAppContext)]) {
|
||||
self.make_contacts(clients).await;
|
||||
|
||||
let (left, right) = clients.split_at_mut(1);
|
||||
let (_client_a, cx_a) = &mut left[0];
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
|
||||
for (client_b, cx_b) in right {
|
||||
let user_id_b = client_b.current_user_id(*cx_b).to_proto();
|
||||
active_call_a
|
||||
.update(*cx_a, |call, cx| call.invite(user_id_b, None, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx_b.foreground().run_until_parked();
|
||||
let active_call_b = cx_b.read(ActiveCall::global);
|
||||
active_call_b
|
||||
.update(*cx_b, |call, cx| call.accept_incoming(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
async fn build_app_state(
|
||||
test_db: &TestDb,
|
||||
fake_server: &live_kit_client::TestServer,
|
||||
) -> Arc<AppState> {
|
||||
Arc::new(AppState {
|
||||
db: test_db.db().clone(),
|
||||
live_kit_client: Some(Arc::new(fake_server.create_api_client())),
|
||||
config: Default::default(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for TestServer {
|
||||
type Target = Server;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.server
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TestServer {
|
||||
fn drop(&mut self) {
|
||||
self.server.teardown();
|
||||
self.test_live_kit_server.teardown().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
struct TestClient {
|
||||
client: Arc<Client>,
|
||||
username: String,
|
||||
local_projects: Vec<ModelHandle<Project>>,
|
||||
remote_projects: Vec<ModelHandle<Project>>,
|
||||
next_root_dir_id: usize,
|
||||
pub user_store: ModelHandle<UserStore>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
fs: Arc<FakeFs>,
|
||||
buffers: HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>,
|
||||
}
|
||||
|
||||
impl Deref for TestClient {
|
||||
type Target = Arc<Client>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.client
|
||||
}
|
||||
}
|
||||
|
||||
struct ContactsSummary {
|
||||
pub current: Vec<String>,
|
||||
pub outgoing_requests: Vec<String>,
|
||||
pub incoming_requests: Vec<String>,
|
||||
}
|
||||
|
||||
impl TestClient {
|
||||
pub fn current_user_id(&self, cx: &TestAppContext) -> UserId {
|
||||
UserId::from_proto(
|
||||
self.user_store
|
||||
.read_with(cx, |user_store, _| user_store.current_user().unwrap().id),
|
||||
)
|
||||
}
|
||||
|
||||
async fn wait_for_current_user(&self, cx: &TestAppContext) {
|
||||
let mut authed_user = self
|
||||
.user_store
|
||||
.read_with(cx, |user_store, _| user_store.watch_current_user());
|
||||
while authed_user.next().await.unwrap().is_none() {}
|
||||
}
|
||||
|
||||
async fn clear_contacts(&self, cx: &mut TestAppContext) {
|
||||
self.user_store
|
||||
.update(cx, |store, _| store.clear_contacts())
|
||||
.await;
|
||||
}
|
||||
|
||||
fn summarize_contacts(&self, cx: &TestAppContext) -> ContactsSummary {
|
||||
self.user_store.read_with(cx, |store, _| ContactsSummary {
|
||||
current: store
|
||||
.contacts()
|
||||
.iter()
|
||||
.map(|contact| contact.user.github_login.clone())
|
||||
.collect(),
|
||||
outgoing_requests: store
|
||||
.outgoing_contact_requests()
|
||||
.iter()
|
||||
.map(|user| user.github_login.clone())
|
||||
.collect(),
|
||||
incoming_requests: store
|
||||
.incoming_contact_requests()
|
||||
.iter()
|
||||
.map(|user| user.github_login.clone())
|
||||
.collect(),
|
||||
})
|
||||
}
|
||||
|
||||
async fn build_local_project(
|
||||
&self,
|
||||
root_path: impl AsRef<Path>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> (ModelHandle<Project>, WorktreeId) {
|
||||
let project = cx.update(|cx| {
|
||||
Project::local(
|
||||
self.client.clone(),
|
||||
self.user_store.clone(),
|
||||
self.language_registry.clone(),
|
||||
self.fs.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let (worktree, _) = project
|
||||
.update(cx, |p, cx| {
|
||||
p.find_or_create_local_worktree(root_path, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
worktree
|
||||
.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
(project, worktree.read_with(cx, |tree, _| tree.id()))
|
||||
}
|
||||
|
||||
async fn build_remote_project(
|
||||
&self,
|
||||
host_project_id: u64,
|
||||
guest_cx: &mut TestAppContext,
|
||||
) -> ModelHandle<Project> {
|
||||
let active_call = guest_cx.read(ActiveCall::global);
|
||||
let room = active_call.read_with(guest_cx, |call, _| call.room().unwrap().clone());
|
||||
room.update(guest_cx, |room, cx| {
|
||||
room.join_project(
|
||||
host_project_id,
|
||||
self.language_registry.clone(),
|
||||
self.fs.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn build_workspace(
|
||||
&self,
|
||||
project: &ModelHandle<Project>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> ViewHandle<Workspace> {
|
||||
let (_, root_view) = cx.add_window(|_| EmptyView);
|
||||
cx.add_view(&root_view, |cx| {
|
||||
Workspace::new(
|
||||
Default::default(),
|
||||
0,
|
||||
project.clone(),
|
||||
|_, _| unimplemented!(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn create_new_root_dir(&mut self) -> PathBuf {
|
||||
format!(
|
||||
"/{}-root-{}",
|
||||
self.username,
|
||||
util::post_inc(&mut self.next_root_dir_id)
|
||||
)
|
||||
.into()
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TestClient {
|
||||
fn drop(&mut self) {
|
||||
self.client.teardown();
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
1228
crates/collab/src/tests/randomized_integration_tests.rs
Normal file
1228
crates/collab/src/tests/randomized_integration_tests.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -2,6 +2,7 @@
|
||||
name = "collab_ui"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/collab_ui.rs"
|
||||
|
||||
@@ -7,10 +7,10 @@ mod incoming_call_notification;
|
||||
mod notifications;
|
||||
mod project_shared_notification;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use call::ActiveCall;
|
||||
pub use collab_titlebar_item::{CollabTitlebarItem, ToggleCollaborationMenu};
|
||||
use gpui::MutableAppContext;
|
||||
use project::Project;
|
||||
use std::sync::Arc;
|
||||
use workspace::{AppState, JoinProject, ToggleFollow, Workspace};
|
||||
|
||||
@@ -39,27 +39,35 @@ pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
|
||||
let workspace = if let Some(existing_workspace) = existing_workspace {
|
||||
existing_workspace
|
||||
} else {
|
||||
let project = Project::remote(
|
||||
project_id,
|
||||
app_state.client.clone(),
|
||||
app_state.user_store.clone(),
|
||||
app_state.languages.clone(),
|
||||
app_state.fs.clone(),
|
||||
cx.clone(),
|
||||
)
|
||||
.await?;
|
||||
let active_call = cx.read(ActiveCall::global);
|
||||
let room = active_call
|
||||
.read_with(&cx, |call, _| call.room().cloned())
|
||||
.ok_or_else(|| anyhow!("not in a call"))?;
|
||||
let project = room
|
||||
.update(&mut cx, |room, cx| {
|
||||
room.join_project(
|
||||
project_id,
|
||||
app_state.languages.clone(),
|
||||
app_state.fs.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await?;
|
||||
|
||||
let (_, workspace) = cx.add_window((app_state.build_window_options)(), |cx| {
|
||||
let mut workspace = Workspace::new(
|
||||
Default::default(),
|
||||
0,
|
||||
project,
|
||||
app_state.dock_default_item_factory,
|
||||
cx,
|
||||
);
|
||||
(app_state.initialize_workspace)(&mut workspace, &app_state, cx);
|
||||
workspace
|
||||
});
|
||||
let (_, workspace) = cx.add_window(
|
||||
(app_state.build_window_options)(None, None, cx.platform().as_ref()),
|
||||
|cx| {
|
||||
let mut workspace = Workspace::new(
|
||||
Default::default(),
|
||||
0,
|
||||
project,
|
||||
app_state.dock_default_item_factory,
|
||||
cx,
|
||||
);
|
||||
(app_state.initialize_workspace)(&mut workspace, &app_state, cx);
|
||||
workspace
|
||||
},
|
||||
);
|
||||
workspace
|
||||
};
|
||||
|
||||
|
||||
@@ -1,20 +1,22 @@
|
||||
use std::{mem, sync::Arc};
|
||||
|
||||
use crate::contacts_popover;
|
||||
use call::ActiveCall;
|
||||
use client::{proto::PeerId, Contact, User, UserStore};
|
||||
use editor::{Cancel, Editor};
|
||||
use futures::StreamExt;
|
||||
use fuzzy::{match_strings, StringMatchCandidate};
|
||||
use gpui::{
|
||||
elements::*,
|
||||
geometry::{rect::RectF, vector::vec2f},
|
||||
impl_actions, impl_internal_actions, keymap, AppContext, CursorStyle, Entity, ModelHandle,
|
||||
MouseButton, MutableAppContext, RenderContext, Subscription, View, ViewContext, ViewHandle,
|
||||
impl_actions, impl_internal_actions,
|
||||
keymap_matcher::KeymapContext,
|
||||
AppContext, CursorStyle, Entity, ModelHandle, MouseButton, MutableAppContext, PromptLevel,
|
||||
RenderContext, Subscription, View, ViewContext, ViewHandle,
|
||||
};
|
||||
use menu::{Confirm, SelectNext, SelectPrev};
|
||||
use project::Project;
|
||||
use serde::Deserialize;
|
||||
use settings::Settings;
|
||||
use std::{mem, sync::Arc};
|
||||
use theme::IconButton;
|
||||
use util::ResultExt;
|
||||
use workspace::{JoinProject, OpenSharedScreen};
|
||||
@@ -297,9 +299,19 @@ impl ContactList {
|
||||
}
|
||||
|
||||
fn remove_contact(&mut self, request: &RemoveContact, cx: &mut ViewContext<Self>) {
|
||||
self.user_store
|
||||
.update(cx, |store, cx| store.remove_contact(request.0, cx))
|
||||
.detach();
|
||||
let user_id = request.0;
|
||||
let user_store = self.user_store.clone();
|
||||
let prompt_message = "Are you sure you want to remove this contact?";
|
||||
let mut answer = cx.prompt(PromptLevel::Warning, prompt_message, &["Remove", "Cancel"]);
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
if answer.next().await == Some(0) {
|
||||
user_store
|
||||
.update(&mut cx, |store, cx| store.remove_contact(user_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn respond_to_contact_request(
|
||||
@@ -1049,7 +1061,7 @@ impl ContactList {
|
||||
let user_id = contact.user.id;
|
||||
let initial_project = project.clone();
|
||||
let mut element =
|
||||
MouseEventHandler::<Contact>::new(contact.user.id as usize, cx, |_, _| {
|
||||
MouseEventHandler::<Contact>::new(contact.user.id as usize, cx, |_, cx| {
|
||||
Flex::row()
|
||||
.with_children(contact.user.avatar.clone().map(|avatar| {
|
||||
let status_badge = if contact.online {
|
||||
@@ -1091,6 +1103,27 @@ impl ContactList {
|
||||
.flex(1., true)
|
||||
.boxed(),
|
||||
)
|
||||
.with_child(
|
||||
MouseEventHandler::<Cancel>::new(
|
||||
contact.user.id as usize,
|
||||
cx,
|
||||
|mouse_state, _| {
|
||||
let button_style =
|
||||
theme.contact_button.style_for(mouse_state, false);
|
||||
render_icon_button(button_style, "icons/x_mark_8.svg")
|
||||
.aligned()
|
||||
.flex_float()
|
||||
.boxed()
|
||||
},
|
||||
)
|
||||
.with_padding(Padding::uniform(2.))
|
||||
.with_cursor_style(CursorStyle::PointingHand)
|
||||
.on_click(MouseButton::Left, move |_, cx| {
|
||||
cx.dispatch_action(RemoveContact(user_id))
|
||||
})
|
||||
.flex_float()
|
||||
.boxed(),
|
||||
)
|
||||
.with_children(if calling {
|
||||
Some(
|
||||
Label::new("Calling".to_string(), theme.calling_indicator.text.clone())
|
||||
@@ -1267,7 +1300,7 @@ impl View for ContactList {
|
||||
"ContactList"
|
||||
}
|
||||
|
||||
fn keymap_context(&self, _: &AppContext) -> keymap::Context {
|
||||
fn keymap_context(&self, _: &AppContext) -> KeymapContext {
|
||||
let mut cx = Self::default_keymap_context();
|
||||
cx.set.insert("menu".into());
|
||||
cx
|
||||
|
||||
@@ -48,7 +48,7 @@ impl View for ContactNotification {
|
||||
ContactEventKind::Requested => render_user_notification(
|
||||
self.user.clone(),
|
||||
"wants to add you as a contact",
|
||||
Some("They won't know if you decline."),
|
||||
Some("They won't be alerted if you decline."),
|
||||
Dismiss(self.user.id),
|
||||
vec![
|
||||
(
|
||||
|
||||
@@ -32,11 +32,12 @@ pub fn init(cx: &mut MutableAppContext) {
|
||||
});
|
||||
|
||||
for screen in cx.platform().screens() {
|
||||
let screen_size = screen.size();
|
||||
let screen_bounds = screen.bounds();
|
||||
let (window_id, _) = cx.add_window(
|
||||
WindowOptions {
|
||||
bounds: WindowBounds::Fixed(RectF::new(
|
||||
vec2f(screen_size.x() - window_size.x() - PADDING, PADDING),
|
||||
screen_bounds.upper_right()
|
||||
- vec2f(PADDING + window_size.x(), PADDING),
|
||||
window_size,
|
||||
)),
|
||||
titlebar: None,
|
||||
@@ -48,6 +49,7 @@ pub fn init(cx: &mut MutableAppContext) {
|
||||
},
|
||||
|_| IncomingCallNotification::new(incoming_call.clone()),
|
||||
);
|
||||
|
||||
notification_windows.push(window_id);
|
||||
}
|
||||
}
|
||||
@@ -225,6 +227,7 @@ impl View for IncomingCallNotification {
|
||||
.theme
|
||||
.incoming_call_notification
|
||||
.background;
|
||||
|
||||
Flex::row()
|
||||
.with_child(self.render_caller(cx))
|
||||
.with_child(self.render_buttons(cx))
|
||||
|
||||
@@ -31,11 +31,11 @@ pub fn init(cx: &mut MutableAppContext) {
|
||||
let window_size = vec2f(theme.window_width, theme.window_height);
|
||||
|
||||
for screen in cx.platform().screens() {
|
||||
let screen_size = screen.size();
|
||||
let screen_bounds = screen.bounds();
|
||||
let (window_id, _) = cx.add_window(
|
||||
WindowOptions {
|
||||
bounds: WindowBounds::Fixed(RectF::new(
|
||||
vec2f(screen_size.x() - window_size.x() - PADDING, PADDING),
|
||||
screen_bounds.upper_right() - vec2f(PADDING + window_size.x(), PADDING),
|
||||
window_size,
|
||||
)),
|
||||
titlebar: None,
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "collections"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/collections.rs"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "command_palette"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/command_palette.rs"
|
||||
|
||||
@@ -3,7 +3,7 @@ use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::{
|
||||
actions,
|
||||
elements::{ChildView, Flex, Label, ParentElement},
|
||||
keymap::Keystroke,
|
||||
keymap_matcher::Keystroke,
|
||||
Action, AnyViewHandle, Element, Entity, MouseState, MutableAppContext, RenderContext, View,
|
||||
ViewContext, ViewHandle,
|
||||
};
|
||||
@@ -64,8 +64,10 @@ impl CommandPalette {
|
||||
name: humanize_action_name(name),
|
||||
action,
|
||||
keystrokes: bindings
|
||||
.iter()
|
||||
.filter_map(|binding| binding.keystrokes())
|
||||
.last()
|
||||
.map_or(Vec::new(), |binding| binding.keystrokes().to_vec()),
|
||||
.map_or(Vec::new(), |keystrokes| keystrokes.to_vec()),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "context_menu"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/context_menu.rs"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use gpui::{
|
||||
elements::*, geometry::vector::Vector2F, impl_internal_actions, keymap, platform::CursorStyle,
|
||||
Action, AnyViewHandle, AppContext, Axis, Entity, MouseButton, MutableAppContext, RenderContext,
|
||||
SizeConstraint, Subscription, View, ViewContext,
|
||||
elements::*, geometry::vector::Vector2F, impl_internal_actions, keymap_matcher::KeymapContext,
|
||||
platform::CursorStyle, Action, AnyViewHandle, AppContext, Axis, Entity, MouseButton,
|
||||
MutableAppContext, RenderContext, SizeConstraint, Subscription, View, ViewContext,
|
||||
};
|
||||
use menu::*;
|
||||
use settings::Settings;
|
||||
@@ -75,7 +75,7 @@ impl View for ContextMenu {
|
||||
"ContextMenu"
|
||||
}
|
||||
|
||||
fn keymap_context(&self, _: &AppContext) -> keymap::Context {
|
||||
fn keymap_context(&self, _: &AppContext) -> KeymapContext {
|
||||
let mut cx = Self::default_keymap_context();
|
||||
cx.set.insert("menu".into());
|
||||
cx
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "db"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/db.rs"
|
||||
|
||||
@@ -327,8 +327,6 @@ mod tests {
|
||||
.path();
|
||||
corrupted_backup_dir.push(DB_FILE_NAME);
|
||||
|
||||
dbg!(&corrupted_backup_dir);
|
||||
|
||||
let backup = Connection::open_file(&corrupted_backup_dir.to_string_lossy());
|
||||
assert!(backup.select_row::<usize>("SELECT * FROM test").unwrap()()
|
||||
.unwrap()
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "diagnostics"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/diagnostics.rs"
|
||||
|
||||
@@ -21,7 +21,6 @@ use language::{
|
||||
use project::{DiagnosticSummary, Project, ProjectPath};
|
||||
use serde_json::json;
|
||||
use settings::Settings;
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
any::{Any, TypeId},
|
||||
cmp::Ordering,
|
||||
@@ -521,12 +520,8 @@ impl Item for ProjectDiagnosticsEditor {
|
||||
)
|
||||
}
|
||||
|
||||
fn project_path(&self, _: &AppContext) -> Option<project::ProjectPath> {
|
||||
None
|
||||
}
|
||||
|
||||
fn project_entry_ids(&self, cx: &AppContext) -> SmallVec<[project::ProjectEntryId; 3]> {
|
||||
self.editor.project_entry_ids(cx)
|
||||
fn for_each_project_item(&self, cx: &AppContext, f: &mut dyn FnMut(usize, &dyn project::Item)) {
|
||||
self.editor.for_each_project_item(cx, f)
|
||||
}
|
||||
|
||||
fn is_singleton(&self, _: &AppContext) -> bool {
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "drag_and_drop"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/drag_and_drop.rs"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "editor"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/editor.rs"
|
||||
|
||||
@@ -36,6 +36,7 @@ use gpui::{
|
||||
fonts::{self, HighlightStyle, TextStyle},
|
||||
geometry::vector::Vector2F,
|
||||
impl_actions, impl_internal_actions,
|
||||
keymap_matcher::KeymapContext,
|
||||
platform::CursorStyle,
|
||||
serde_json::json,
|
||||
AnyViewHandle, AppContext, AsyncAppContext, ClipboardItem, Element, ElementBox, Entity,
|
||||
@@ -43,7 +44,7 @@ use gpui::{
|
||||
ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use highlight_matching_bracket::refresh_matching_bracket_highlights;
|
||||
use hover_popover::{hide_hover, HoverState};
|
||||
use hover_popover::{hide_hover, HideHover, HoverState};
|
||||
pub use items::MAX_TAB_TITLE_LEN;
|
||||
use itertools::Itertools;
|
||||
pub use language::{char_kind, CharKind};
|
||||
@@ -61,7 +62,7 @@ pub use multi_buffer::{
|
||||
};
|
||||
use multi_buffer::{MultiBufferChunks, ToOffsetUtf16};
|
||||
use ordered_float::OrderedFloat;
|
||||
use project::{FormatTrigger, LocationLink, Project, ProjectPath, ProjectTransaction};
|
||||
use project::{FormatTrigger, Location, LocationLink, Project, ProjectPath, ProjectTransaction};
|
||||
use scroll::{
|
||||
autoscroll::Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide,
|
||||
};
|
||||
@@ -464,7 +465,7 @@ pub struct Editor {
|
||||
searchable: bool,
|
||||
cursor_shape: CursorShape,
|
||||
workspace_id: Option<WorkspaceId>,
|
||||
keymap_context_layers: BTreeMap<TypeId, gpui::keymap::Context>,
|
||||
keymap_context_layers: BTreeMap<TypeId, KeymapContext>,
|
||||
input_enabled: bool,
|
||||
leader_replica_id: Option<u16>,
|
||||
remote_id: Option<ViewId>,
|
||||
@@ -827,6 +828,23 @@ impl CompletionsMenu {
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
|
||||
//Remove all candidates where the query's start does not match the start of any word in the candidate
|
||||
if let Some(query) = query {
|
||||
if let Some(query_start) = query.chars().next() {
|
||||
matches.retain(|string_match| {
|
||||
split_words(&string_match.string).any(|word| {
|
||||
//Check that the first codepoint of the word as lowercase matches the first
|
||||
//codepoint of the query as lowercase
|
||||
word.chars()
|
||||
.flat_map(|codepoint| codepoint.to_lowercase())
|
||||
.zip(query_start.to_lowercase())
|
||||
.all(|(word_cp, query_cp)| word_cp == query_cp)
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
matches.sort_unstable_by_key(|mat| {
|
||||
let completion = &self.completions[mat.candidate_id];
|
||||
(
|
||||
@@ -990,6 +1008,15 @@ impl Editor {
|
||||
Self::new(EditorMode::SingleLine, buffer, None, field_editor_style, cx)
|
||||
}
|
||||
|
||||
pub fn multi_line(
|
||||
field_editor_style: Option<Arc<GetFieldEditorTheme>>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
Self::new(EditorMode::Full, buffer, None, field_editor_style, cx)
|
||||
}
|
||||
|
||||
pub fn auto_height(
|
||||
max_lines: usize,
|
||||
field_editor_style: Option<Arc<GetFieldEditorTheme>>,
|
||||
@@ -1068,6 +1095,8 @@ impl Editor {
|
||||
|
||||
let blink_manager = cx.add_model(|cx| BlinkManager::new(CURSOR_BLINK_INTERVAL, cx));
|
||||
|
||||
let soft_wrap_mode_override =
|
||||
(mode == EditorMode::SingleLine).then(|| settings::SoftWrap::None);
|
||||
let mut this = Self {
|
||||
handle: cx.weak_handle(),
|
||||
buffer: buffer.clone(),
|
||||
@@ -1083,7 +1112,7 @@ impl Editor {
|
||||
select_larger_syntax_node_stack: Vec::new(),
|
||||
ime_transaction: Default::default(),
|
||||
active_diagnostics: None,
|
||||
soft_wrap_mode_override: None,
|
||||
soft_wrap_mode_override,
|
||||
get_field_editor_theme,
|
||||
project,
|
||||
focused: false,
|
||||
@@ -1225,7 +1254,7 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_keymap_context_layer<Tag: 'static>(&mut self, context: gpui::keymap::Context) {
|
||||
pub fn set_keymap_context_layer<Tag: 'static>(&mut self, context: KeymapContext) {
|
||||
self.keymap_context_layers
|
||||
.insert(TypeId::of::<Tag>(), context);
|
||||
}
|
||||
@@ -1301,7 +1330,7 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
hide_hover(self, cx);
|
||||
hide_hover(self, &HideHover, cx);
|
||||
|
||||
if old_cursor_position.to_display_point(&display_map).row()
|
||||
!= new_cursor_position.to_display_point(&display_map).row()
|
||||
@@ -1676,7 +1705,7 @@ impl Editor {
|
||||
return;
|
||||
}
|
||||
|
||||
if hide_hover(self, cx) {
|
||||
if hide_hover(self, &HideHover, cx) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -1717,7 +1746,7 @@ impl Editor {
|
||||
for (selection, autoclose_region) in
|
||||
self.selections_with_autoclose_regions(selections, &snapshot)
|
||||
{
|
||||
if let Some(language) = snapshot.language_at(selection.head()) {
|
||||
if let Some(language) = snapshot.language_scope_at(selection.head()) {
|
||||
// Determine if the inserted text matches the opening or closing
|
||||
// bracket of any of this language's bracket pairs.
|
||||
let mut bracket_pair = None;
|
||||
@@ -1878,7 +1907,7 @@ impl Editor {
|
||||
let end = selection.end;
|
||||
|
||||
let mut insert_extra_newline = false;
|
||||
if let Some(language) = buffer.language_at(start) {
|
||||
if let Some(language) = buffer.language_scope_at(start) {
|
||||
let leading_whitespace_len = buffer
|
||||
.reversed_chars_at(start)
|
||||
.take_while(|c| c.is_whitespace() && *c != '\n')
|
||||
@@ -2002,7 +2031,9 @@ impl Editor {
|
||||
old_selections
|
||||
.iter()
|
||||
.map(|s| (s.start..s.end, text.clone())),
|
||||
Some(AutoindentMode::EachLine),
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: Vec::new(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
anchors
|
||||
@@ -4511,7 +4542,10 @@ impl Editor {
|
||||
|
||||
// TODO: Handle selections that cross excerpts
|
||||
for selection in &mut selections {
|
||||
let language = if let Some(language) = snapshot.language_at(selection.start) {
|
||||
let start_column = snapshot.indent_size_for_line(selection.start.row).len;
|
||||
let language = if let Some(language) =
|
||||
snapshot.language_scope_at(Point::new(selection.start.row, start_column))
|
||||
{
|
||||
language
|
||||
} else {
|
||||
continue;
|
||||
@@ -4781,7 +4815,7 @@ impl Editor {
|
||||
if let Some(popover) = self.hover_state.diagnostic_popover.as_ref() {
|
||||
let (group_id, jump_to) = popover.activation_info();
|
||||
if self.activate_diagnostics(group_id, cx) {
|
||||
self.change_selections(Some(Autoscroll::center()), cx, |s| {
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
let mut new_selection = s.newest_anchor().clone();
|
||||
new_selection.collapse_to(jump_to, SelectionGoal::None);
|
||||
s.select_anchors(vec![new_selection.clone()]);
|
||||
@@ -4827,7 +4861,7 @@ impl Editor {
|
||||
|
||||
if let Some((primary_range, group_id)) = group {
|
||||
if self.activate_diagnostics(group_id, cx) {
|
||||
self.change_selections(Some(Autoscroll::center()), cx, |s| {
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select(vec![Selection {
|
||||
id: selection.id,
|
||||
start: primary_range.start,
|
||||
@@ -4902,7 +4936,7 @@ impl Editor {
|
||||
.dedup();
|
||||
|
||||
if let Some(hunk) = hunks.next() {
|
||||
this.change_selections(Some(Autoscroll::center()), cx, |s| {
|
||||
this.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
let row = hunk.start_display_row();
|
||||
let point = DisplayPoint::new(row, 0);
|
||||
s.select_display_ranges([point..point]);
|
||||
@@ -4987,25 +5021,49 @@ impl Editor {
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) {
|
||||
let pane = workspace.active_pane().clone();
|
||||
for definition in definitions {
|
||||
// If there is one definition, just open it directly
|
||||
if let [definition] = definitions.as_slice() {
|
||||
let range = definition
|
||||
.target
|
||||
.range
|
||||
.to_offset(definition.target.buffer.read(cx));
|
||||
|
||||
let target_editor_handle = workspace.open_project_item(definition.target.buffer, cx);
|
||||
let target_editor_handle =
|
||||
workspace.open_project_item(definition.target.buffer.clone(), cx);
|
||||
target_editor_handle.update(cx, |target_editor, cx| {
|
||||
// When selecting a definition in a different buffer, disable the nav history
|
||||
// to avoid creating a history entry at the previous cursor location.
|
||||
if editor_handle != target_editor_handle {
|
||||
pane.update(cx, |pane, _| pane.disable_history());
|
||||
}
|
||||
target_editor.change_selections(Some(Autoscroll::center()), cx, |s| {
|
||||
target_editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select_ranges([range]);
|
||||
});
|
||||
|
||||
pane.update(cx, |pane, _| pane.enable_history());
|
||||
});
|
||||
} else if !definitions.is_empty() {
|
||||
let replica_id = editor_handle.read(cx).replica_id(cx);
|
||||
let title = definitions
|
||||
.iter()
|
||||
.find(|definition| definition.origin.is_some())
|
||||
.and_then(|definition| {
|
||||
definition.origin.as_ref().map(|origin| {
|
||||
let buffer = origin.buffer.read(cx);
|
||||
format!(
|
||||
"Definitions for {}",
|
||||
buffer
|
||||
.text_for_range(origin.range.clone())
|
||||
.collect::<String>()
|
||||
)
|
||||
})
|
||||
})
|
||||
.unwrap_or("Definitions".to_owned());
|
||||
let locations = definitions
|
||||
.into_iter()
|
||||
.map(|definition| definition.target)
|
||||
.collect();
|
||||
Self::open_locations_in_multibuffer(workspace, locations, replica_id, title, cx)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5026,64 +5084,87 @@ impl Editor {
|
||||
let project = workspace.project().clone();
|
||||
let references = project.update(cx, |project, cx| project.references(&buffer, head, cx));
|
||||
Some(cx.spawn(|workspace, mut cx| async move {
|
||||
let mut locations = references.await?;
|
||||
let locations = references.await?;
|
||||
if locations.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
locations.sort_by_key(|location| location.buffer.id());
|
||||
let mut locations = locations.into_iter().peekable();
|
||||
let mut ranges_to_highlight = Vec::new();
|
||||
|
||||
let excerpt_buffer = cx.add_model(|cx| {
|
||||
let mut symbol_name = None;
|
||||
let mut multibuffer = MultiBuffer::new(replica_id);
|
||||
while let Some(location) = locations.next() {
|
||||
let buffer = location.buffer.read(cx);
|
||||
let mut ranges_for_buffer = Vec::new();
|
||||
let range = location.range.to_offset(buffer);
|
||||
ranges_for_buffer.push(range.clone());
|
||||
if symbol_name.is_none() {
|
||||
symbol_name = Some(buffer.text_for_range(range).collect::<String>());
|
||||
}
|
||||
|
||||
while let Some(next_location) = locations.peek() {
|
||||
if next_location.buffer == location.buffer {
|
||||
ranges_for_buffer.push(next_location.range.to_offset(buffer));
|
||||
locations.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
ranges_for_buffer.sort_by_key(|range| (range.start, Reverse(range.end)));
|
||||
ranges_to_highlight.extend(multibuffer.push_excerpts_with_context_lines(
|
||||
location.buffer.clone(),
|
||||
ranges_for_buffer,
|
||||
1,
|
||||
cx,
|
||||
));
|
||||
}
|
||||
multibuffer.with_title(format!("References to `{}`", symbol_name.unwrap()))
|
||||
});
|
||||
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
let editor =
|
||||
cx.add_view(|cx| Editor::for_multibuffer(excerpt_buffer, Some(project), cx));
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.highlight_background::<Self>(
|
||||
ranges_to_highlight,
|
||||
|theme| theme.editor.highlighted_line_background,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
workspace.add_item(Box::new(editor), cx);
|
||||
let title = locations
|
||||
.first()
|
||||
.as_ref()
|
||||
.map(|location| {
|
||||
let buffer = location.buffer.read(cx);
|
||||
format!(
|
||||
"References to `{}`",
|
||||
buffer
|
||||
.text_for_range(location.range.clone())
|
||||
.collect::<String>()
|
||||
)
|
||||
})
|
||||
.unwrap();
|
||||
Self::open_locations_in_multibuffer(workspace, locations, replica_id, title, cx);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}))
|
||||
}
|
||||
|
||||
/// Opens a multibuffer with the given project locations in it
|
||||
pub fn open_locations_in_multibuffer(
|
||||
workspace: &mut Workspace,
|
||||
mut locations: Vec<Location>,
|
||||
replica_id: ReplicaId,
|
||||
title: String,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) {
|
||||
// If there are multiple definitions, open them in a multibuffer
|
||||
locations.sort_by_key(|location| location.buffer.id());
|
||||
let mut locations = locations.into_iter().peekable();
|
||||
let mut ranges_to_highlight = Vec::new();
|
||||
|
||||
let excerpt_buffer = cx.add_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(replica_id);
|
||||
while let Some(location) = locations.next() {
|
||||
let buffer = location.buffer.read(cx);
|
||||
let mut ranges_for_buffer = Vec::new();
|
||||
let range = location.range.to_offset(buffer);
|
||||
ranges_for_buffer.push(range.clone());
|
||||
|
||||
while let Some(next_location) = locations.peek() {
|
||||
if next_location.buffer == location.buffer {
|
||||
ranges_for_buffer.push(next_location.range.to_offset(buffer));
|
||||
locations.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
ranges_for_buffer.sort_by_key(|range| (range.start, Reverse(range.end)));
|
||||
ranges_to_highlight.extend(multibuffer.push_excerpts_with_context_lines(
|
||||
location.buffer.clone(),
|
||||
ranges_for_buffer,
|
||||
1,
|
||||
cx,
|
||||
))
|
||||
}
|
||||
|
||||
multibuffer.with_title(title)
|
||||
});
|
||||
|
||||
let editor = cx.add_view(|cx| {
|
||||
Editor::for_multibuffer(excerpt_buffer, Some(workspace.project().clone()), cx)
|
||||
});
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.highlight_background::<Self>(
|
||||
ranges_to_highlight,
|
||||
|theme| theme.editor.highlighted_line_background,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
workspace.add_item(Box::new(editor), cx);
|
||||
}
|
||||
|
||||
pub fn rename(&mut self, _: &Rename, cx: &mut ViewContext<Self>) -> Option<Task<Result<()>>> {
|
||||
use language::ToOffset as _;
|
||||
|
||||
@@ -6069,10 +6150,11 @@ impl Editor {
|
||||
let extension = Path::new(file.file_name(cx))
|
||||
.extension()
|
||||
.and_then(|e| e.to_str());
|
||||
project
|
||||
.read(cx)
|
||||
.client()
|
||||
.report_event(name, json!({ "File Extension": extension }));
|
||||
project.read(cx).client().report_event(
|
||||
name,
|
||||
json!({ "File Extension": extension }),
|
||||
cx.global::<Settings>().telemetry(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6153,7 +6235,7 @@ impl View for Editor {
|
||||
cx.defer(move |cx| {
|
||||
if let Some(editor) = handle.upgrade(cx) {
|
||||
editor.update(cx, |editor, cx| {
|
||||
hide_hover(editor, cx);
|
||||
hide_hover(editor, &HideHover, cx);
|
||||
hide_link_definition(editor, cx);
|
||||
})
|
||||
}
|
||||
@@ -6202,7 +6284,7 @@ impl View for Editor {
|
||||
self.buffer
|
||||
.update(cx, |buffer, cx| buffer.remove_active_selections(cx));
|
||||
self.hide_context_menu(cx);
|
||||
hide_hover(self, cx);
|
||||
hide_hover(self, &HideHover, cx);
|
||||
cx.emit(Event::Blurred);
|
||||
cx.notify();
|
||||
}
|
||||
@@ -6245,7 +6327,7 @@ impl View for Editor {
|
||||
false
|
||||
}
|
||||
|
||||
fn keymap_context(&self, _: &AppContext) -> gpui::keymap::Context {
|
||||
fn keymap_context(&self, _: &AppContext) -> KeymapContext {
|
||||
let mut context = Self::default_keymap_context();
|
||||
let mode = match self.mode {
|
||||
EditorMode::SingleLine => "single_line",
|
||||
@@ -6799,6 +6881,34 @@ pub fn styled_runs_for_code_label<'a>(
|
||||
})
|
||||
}
|
||||
|
||||
pub fn split_words<'a>(text: &'a str) -> impl std::iter::Iterator<Item = &'a str> + 'a {
|
||||
let mut index = 0;
|
||||
let mut codepoints = text.char_indices().peekable();
|
||||
|
||||
std::iter::from_fn(move || {
|
||||
let start_index = index;
|
||||
while let Some((new_index, codepoint)) = codepoints.next() {
|
||||
index = new_index + codepoint.len_utf8();
|
||||
let current_upper = codepoint.is_uppercase();
|
||||
let next_upper = codepoints
|
||||
.peek()
|
||||
.map(|(_, c)| c.is_uppercase())
|
||||
.unwrap_or(false);
|
||||
|
||||
if !current_upper && next_upper {
|
||||
return Some(&text[start_index..index]);
|
||||
}
|
||||
}
|
||||
|
||||
index = text.len();
|
||||
if start_index < text.len() {
|
||||
return Some(&text[start_index..]);
|
||||
}
|
||||
None
|
||||
})
|
||||
.flat_map(|word| word.split_inclusive('_'))
|
||||
}
|
||||
|
||||
trait RangeExt<T> {
|
||||
fn sorted(&self) -> Range<T>;
|
||||
fn to_inclusive(&self) -> RangeInclusive<T>;
|
||||
|
||||
@@ -29,7 +29,11 @@ use workspace::{
|
||||
#[gpui::test]
|
||||
fn test_edit_events(cx: &mut MutableAppContext) {
|
||||
cx.set_global(Settings::test(cx));
|
||||
let buffer = cx.add_model(|cx| language::Buffer::new(0, "123456", cx));
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let mut buffer = language::Buffer::new(0, "123456", cx);
|
||||
buffer.set_group_interval(Duration::from_secs(1));
|
||||
buffer
|
||||
});
|
||||
|
||||
let events = Rc::new(RefCell::new(Vec::new()));
|
||||
let (_, editor1) = cx.add_window(Default::default(), {
|
||||
@@ -3502,6 +3506,8 @@ async fn test_surround_with_pair(cx: &mut gpui::TestAppContext) {
|
||||
]
|
||||
);
|
||||
|
||||
view.undo(&Undo, cx);
|
||||
view.undo(&Undo, cx);
|
||||
view.undo(&Undo, cx);
|
||||
assert_eq!(
|
||||
view.text(cx),
|
||||
@@ -5439,6 +5445,20 @@ async fn go_to_hunk(deterministic: Arc<Deterministic>, cx: &mut gpui::TestAppCon
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_split_words() {
|
||||
fn split<'a>(text: &'a str) -> Vec<&'a str> {
|
||||
split_words(text).collect()
|
||||
}
|
||||
|
||||
assert_eq!(split("HelloWorld"), &["Hello", "World"]);
|
||||
assert_eq!(split("hello_world"), &["hello_", "world"]);
|
||||
assert_eq!(split("_hello_world_"), &["_", "hello_", "world_"]);
|
||||
assert_eq!(split("Hello_World"), &["Hello_", "World"]);
|
||||
assert_eq!(split("helloWOrld"), &["hello", "WOrld"]);
|
||||
assert_eq!(split("helloworld"), &["helloworld"]);
|
||||
}
|
||||
|
||||
fn empty_range(row: usize, column: usize) -> Range<DisplayPoint> {
|
||||
let point = DisplayPoint::new(row as u32, column as u32);
|
||||
point..point
|
||||
|
||||
@@ -7,7 +7,7 @@ use crate::{
|
||||
display_map::{BlockStyle, DisplaySnapshot, TransformBlock},
|
||||
git::{diff_hunk_to_display, DisplayDiffHunk},
|
||||
hover_popover::{
|
||||
HoverAt, HOVER_POPOVER_GAP, MIN_POPOVER_CHARACTER_WIDTH, MIN_POPOVER_LINE_HEIGHT,
|
||||
HideHover, HoverAt, HOVER_POPOVER_GAP, MIN_POPOVER_CHARACTER_WIDTH, MIN_POPOVER_LINE_HEIGHT,
|
||||
},
|
||||
link_go_to_definition::{
|
||||
GoToFetchedDefinition, GoToFetchedTypeDefinition, UpdateGoToDefinitionLink,
|
||||
@@ -114,6 +114,7 @@ impl EditorElement {
|
||||
fn attach_mouse_handlers(
|
||||
view: &WeakViewHandle<Editor>,
|
||||
position_map: &Arc<PositionMap>,
|
||||
has_popovers: bool,
|
||||
visible_bounds: RectF,
|
||||
text_bounds: RectF,
|
||||
gutter_bounds: RectF,
|
||||
@@ -190,6 +191,11 @@ impl EditorElement {
|
||||
}
|
||||
}
|
||||
})
|
||||
.on_move_out(move |_, cx| {
|
||||
if has_popovers {
|
||||
cx.dispatch_action(HideHover);
|
||||
}
|
||||
})
|
||||
.on_scroll({
|
||||
let position_map = position_map.clone();
|
||||
move |e, cx| {
|
||||
@@ -1870,6 +1876,7 @@ impl Element for EditorElement {
|
||||
Self::attach_mouse_handlers(
|
||||
&self.view,
|
||||
&layout.position_map,
|
||||
layout.hover_popovers.is_some(),
|
||||
visible_bounds,
|
||||
text_bounds,
|
||||
gutter_bounds,
|
||||
|
||||
@@ -29,12 +29,16 @@ pub struct HoverAt {
|
||||
pub point: Option<DisplayPoint>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
pub struct HideHover;
|
||||
|
||||
actions!(editor, [Hover]);
|
||||
impl_internal_actions!(editor, [HoverAt]);
|
||||
impl_internal_actions!(editor, [HoverAt, HideHover]);
|
||||
|
||||
pub fn init(cx: &mut MutableAppContext) {
|
||||
cx.add_action(hover);
|
||||
cx.add_action(hover_at);
|
||||
cx.add_action(hide_hover);
|
||||
}
|
||||
|
||||
/// Bindable action which uses the most recent selection head to trigger a hover
|
||||
@@ -50,7 +54,7 @@ pub fn hover_at(editor: &mut Editor, action: &HoverAt, cx: &mut ViewContext<Edit
|
||||
if let Some(point) = action.point {
|
||||
show_hover(editor, point, false, cx);
|
||||
} else {
|
||||
hide_hover(editor, cx);
|
||||
hide_hover(editor, &HideHover, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -58,7 +62,7 @@ pub fn hover_at(editor: &mut Editor, action: &HoverAt, cx: &mut ViewContext<Edit
|
||||
/// Hides the type information popup.
|
||||
/// Triggered by the `Hover` action when the cursor is not over a symbol or when the
|
||||
/// selections changed.
|
||||
pub fn hide_hover(editor: &mut Editor, cx: &mut ViewContext<Editor>) -> bool {
|
||||
pub fn hide_hover(editor: &mut Editor, _: &HideHover, cx: &mut ViewContext<Editor>) -> bool {
|
||||
let did_hide = editor.hover_state.info_popover.take().is_some()
|
||||
| editor.hover_state.diagnostic_popover.take().is_some();
|
||||
|
||||
@@ -67,6 +71,10 @@ pub fn hide_hover(editor: &mut Editor, cx: &mut ViewContext<Editor>) -> bool {
|
||||
|
||||
editor.clear_background_highlights::<HoverState>(cx);
|
||||
|
||||
if did_hide {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
did_hide
|
||||
}
|
||||
|
||||
@@ -121,7 +129,7 @@ fn show_hover(
|
||||
// Hover triggered from same location as last time. Don't show again.
|
||||
return;
|
||||
} else {
|
||||
hide_hover(editor, cx);
|
||||
hide_hover(editor, &HideHover, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -323,7 +331,7 @@ impl InfoPopover {
|
||||
if let Some(language) = content
|
||||
.language
|
||||
.clone()
|
||||
.and_then(|language| project.languages().get_language(&language))
|
||||
.and_then(|language| project.languages().language_for_name(&language))
|
||||
{
|
||||
let runs = language
|
||||
.highlight_text(&content.text.as_str().into(), 0..content.text.len());
|
||||
|
||||
@@ -12,12 +12,13 @@ use gpui::{
|
||||
elements::*, geometry::vector::vec2f, AppContext, Entity, ModelHandle, MutableAppContext,
|
||||
RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use language::proto::serialize_anchor as serialize_text_anchor;
|
||||
use language::{Bias, Buffer, File as _, OffsetRangeExt, Point, SelectionGoal};
|
||||
use project::{File, FormatTrigger, Project, ProjectEntryId, ProjectPath};
|
||||
use language::{
|
||||
proto::serialize_anchor as serialize_text_anchor, Bias, Buffer, OffsetRangeExt, Point,
|
||||
SelectionGoal,
|
||||
};
|
||||
use project::{FormatTrigger, Item as _, Project, ProjectPath};
|
||||
use rpc::proto::{self, update_view};
|
||||
use settings::Settings;
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
cmp::{self, Ordering},
|
||||
@@ -554,22 +555,10 @@ impl Item for Editor {
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
|
||||
let buffer = self.buffer.read(cx).as_singleton()?;
|
||||
let file = buffer.read(cx).file();
|
||||
File::from_dyn(file).map(|file| ProjectPath {
|
||||
worktree_id: file.worktree_id(cx),
|
||||
path: file.path().clone(),
|
||||
})
|
||||
}
|
||||
|
||||
fn project_entry_ids(&self, cx: &AppContext) -> SmallVec<[ProjectEntryId; 3]> {
|
||||
fn for_each_project_item(&self, cx: &AppContext, f: &mut dyn FnMut(usize, &dyn project::Item)) {
|
||||
self.buffer
|
||||
.read(cx)
|
||||
.files(cx)
|
||||
.into_iter()
|
||||
.filter_map(|file| File::from_dyn(Some(file))?.project_entry_id(cx))
|
||||
.collect()
|
||||
.for_each_buffer(|buffer| f(buffer.id(), buffer.read(cx)));
|
||||
}
|
||||
|
||||
fn is_singleton(&self, cx: &AppContext) -> bool {
|
||||
@@ -606,7 +595,12 @@ impl Item for Editor {
|
||||
}
|
||||
|
||||
fn can_save(&self, cx: &AppContext) -> bool {
|
||||
!self.buffer().read(cx).is_singleton() || self.project_path(cx).is_some()
|
||||
let buffer = &self.buffer().read(cx);
|
||||
if let Some(buffer) = buffer.as_singleton() {
|
||||
buffer.read(cx).project_path(cx).is_some()
|
||||
} else {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
fn save(
|
||||
@@ -765,6 +759,7 @@ impl Item for Editor {
|
||||
fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext<Self>) {
|
||||
let workspace_id = workspace.database_id();
|
||||
let item_id = cx.view_id();
|
||||
self.workspace_id = Some(workspace_id);
|
||||
|
||||
fn serialize(
|
||||
buffer: ModelHandle<Buffer>,
|
||||
@@ -836,7 +831,11 @@ impl Item for Editor {
|
||||
.context("Project item at stored path was not a buffer")?;
|
||||
|
||||
Ok(cx.update(|cx| {
|
||||
cx.add_view(pane, |cx| Editor::for_buffer(buffer, Some(project), cx))
|
||||
cx.add_view(pane, |cx| {
|
||||
let mut editor = Editor::for_buffer(buffer, Some(project), cx);
|
||||
editor.read_scroll_position_from_db(item_id, workspace_id, cx);
|
||||
editor
|
||||
})
|
||||
}))
|
||||
})
|
||||
})
|
||||
@@ -1095,7 +1094,7 @@ impl StatusItemView for CursorPosition {
|
||||
active_pane_item: Option<&dyn ItemHandle>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let Some(editor) = active_pane_item.and_then(|item| item.downcast::<Editor>()) {
|
||||
if let Some(editor) = active_pane_item.and_then(|item| item.act_as::<Editor>(cx)) {
|
||||
self._observe_active_editor = Some(cx.observe(&editor, Self::update_position));
|
||||
self.update_position(editor, cx);
|
||||
} else {
|
||||
@@ -1114,7 +1113,7 @@ fn path_for_buffer<'a>(
|
||||
cx: &'a AppContext,
|
||||
) -> Option<Cow<'a, Path>> {
|
||||
let file = buffer.read(cx).as_singleton()?.read(cx).file()?;
|
||||
path_for_file(file, height, include_filename, cx)
|
||||
path_for_file(file.as_ref(), height, include_filename, cx)
|
||||
}
|
||||
|
||||
fn path_for_file<'a>(
|
||||
@@ -1159,9 +1158,11 @@ fn path_for_file<'a>(
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gpui::MutableAppContext;
|
||||
use language::RopeFingerprint;
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
#[gpui::test]
|
||||
@@ -1191,7 +1192,7 @@ mod tests {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn mtime(&self) -> std::time::SystemTime {
|
||||
fn mtime(&self) -> SystemTime {
|
||||
todo!()
|
||||
}
|
||||
|
||||
@@ -1210,7 +1211,7 @@ mod tests {
|
||||
_: clock::Global,
|
||||
_: project::LineEnding,
|
||||
_: &mut MutableAppContext,
|
||||
) -> gpui::Task<anyhow::Result<(clock::Global, String, std::time::SystemTime)>> {
|
||||
) -> gpui::Task<anyhow::Result<(clock::Global, RopeFingerprint, SystemTime)>> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
|
||||
@@ -352,6 +352,29 @@ pub fn surrounding_word(map: &DisplaySnapshot, position: DisplayPoint) -> Range<
|
||||
start..end
|
||||
}
|
||||
|
||||
pub fn split_display_range_by_lines(
|
||||
map: &DisplaySnapshot,
|
||||
range: Range<DisplayPoint>,
|
||||
) -> Vec<Range<DisplayPoint>> {
|
||||
let mut result = Vec::new();
|
||||
|
||||
let mut start = range.start;
|
||||
// Loop over all the covered rows until the one containing the range end
|
||||
for row in range.start.row()..range.end.row() {
|
||||
let row_end_column = map.line_len(row);
|
||||
let end = map.clip_point(DisplayPoint::new(row, row_end_column), Bias::Left);
|
||||
if start != end {
|
||||
result.push(start..end);
|
||||
}
|
||||
start = map.clip_point(DisplayPoint::new(row + 1, 0), Bias::Left);
|
||||
}
|
||||
|
||||
// Add the final range from the start of the last end to the original range end.
|
||||
result.push(start..range.end);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
@@ -4,16 +4,16 @@ pub use anchor::{Anchor, AnchorRangeExt};
|
||||
use anyhow::Result;
|
||||
use clock::ReplicaId;
|
||||
use collections::{BTreeMap, Bound, HashMap, HashSet};
|
||||
use futures::{channel::mpsc, SinkExt};
|
||||
use git::diff::DiffHunk;
|
||||
use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
|
||||
pub use language::Completion;
|
||||
use language::{
|
||||
char_kind, AutoindentMode, Buffer, BufferChunks, BufferSnapshot, CharKind, Chunk, CursorShape,
|
||||
DiagnosticEntry, File, IndentSize, Language, OffsetRangeExt, OffsetUtf16, Outline, OutlineItem,
|
||||
Point, PointUtf16, Selection, TextDimension, ToOffset as _, ToOffsetUtf16 as _, ToPoint as _,
|
||||
ToPointUtf16 as _, TransactionId, Unclipped,
|
||||
DiagnosticEntry, IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline,
|
||||
OutlineItem, Point, PointUtf16, Selection, TextDimension, ToOffset as _, ToOffsetUtf16 as _,
|
||||
ToPoint as _, ToPointUtf16 as _, TransactionId, Unclipped,
|
||||
};
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
cell::{Ref, RefCell},
|
||||
@@ -764,6 +764,63 @@ impl MultiBuffer {
|
||||
None
|
||||
}
|
||||
|
||||
pub fn stream_excerpts_with_context_lines(
|
||||
&mut self,
|
||||
excerpts: Vec<(ModelHandle<Buffer>, Vec<Range<text::Anchor>>)>,
|
||||
context_line_count: u32,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> (Task<()>, mpsc::Receiver<Range<Anchor>>) {
|
||||
let (mut tx, rx) = mpsc::channel(256);
|
||||
let task = cx.spawn(|this, mut cx| async move {
|
||||
for (buffer, ranges) in excerpts {
|
||||
let buffer_id = buffer.id();
|
||||
let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
|
||||
|
||||
let mut excerpt_ranges = Vec::new();
|
||||
let mut range_counts = Vec::new();
|
||||
cx.background()
|
||||
.scoped(|scope| {
|
||||
scope.spawn(async {
|
||||
let (ranges, counts) =
|
||||
build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
|
||||
excerpt_ranges = ranges;
|
||||
range_counts = counts;
|
||||
});
|
||||
})
|
||||
.await;
|
||||
|
||||
let mut ranges = ranges.into_iter();
|
||||
let mut range_counts = range_counts.into_iter();
|
||||
for excerpt_ranges in excerpt_ranges.chunks(100) {
|
||||
let excerpt_ids = this.update(&mut cx, |this, cx| {
|
||||
this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx)
|
||||
});
|
||||
|
||||
for (excerpt_id, range_count) in
|
||||
excerpt_ids.into_iter().zip(range_counts.by_ref())
|
||||
{
|
||||
for range in ranges.by_ref().take(range_count) {
|
||||
let start = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id: excerpt_id.clone(),
|
||||
text_anchor: range.start,
|
||||
};
|
||||
let end = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id: excerpt_id.clone(),
|
||||
text_anchor: range.end,
|
||||
};
|
||||
if tx.send(start..end).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
(task, rx)
|
||||
}
|
||||
|
||||
pub fn push_excerpts<O>(
|
||||
&mut self,
|
||||
buffer: ModelHandle<Buffer>,
|
||||
@@ -788,39 +845,8 @@ impl MultiBuffer {
|
||||
{
|
||||
let buffer_id = buffer.id();
|
||||
let buffer_snapshot = buffer.read(cx).snapshot();
|
||||
let max_point = buffer_snapshot.max_point();
|
||||
|
||||
let mut range_counts = Vec::new();
|
||||
let mut excerpt_ranges = Vec::new();
|
||||
let mut range_iter = ranges
|
||||
.iter()
|
||||
.map(|range| {
|
||||
range.start.to_point(&buffer_snapshot)..range.end.to_point(&buffer_snapshot)
|
||||
})
|
||||
.peekable();
|
||||
while let Some(range) = range_iter.next() {
|
||||
let excerpt_start = Point::new(range.start.row.saturating_sub(context_line_count), 0);
|
||||
let mut excerpt_end =
|
||||
Point::new(range.end.row + 1 + context_line_count, 0).min(max_point);
|
||||
let mut ranges_in_excerpt = 1;
|
||||
|
||||
while let Some(next_range) = range_iter.peek() {
|
||||
if next_range.start.row <= excerpt_end.row + context_line_count {
|
||||
excerpt_end =
|
||||
Point::new(next_range.end.row + 1 + context_line_count, 0).min(max_point);
|
||||
ranges_in_excerpt += 1;
|
||||
range_iter.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
excerpt_ranges.push(ExcerptRange {
|
||||
context: excerpt_start..excerpt_end,
|
||||
primary: Some(range),
|
||||
});
|
||||
range_counts.push(ranges_in_excerpt);
|
||||
}
|
||||
let (excerpt_ranges, range_counts) =
|
||||
build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
|
||||
|
||||
let excerpt_ids = self.push_excerpts(buffer, excerpt_ranges, cx);
|
||||
|
||||
@@ -1311,12 +1337,11 @@ impl MultiBuffer {
|
||||
.and_then(|(buffer, offset)| buffer.read(cx).language_at(offset))
|
||||
}
|
||||
|
||||
pub fn files<'a>(&'a self, cx: &'a AppContext) -> SmallVec<[&'a dyn File; 2]> {
|
||||
let buffers = self.buffers.borrow();
|
||||
buffers
|
||||
pub fn for_each_buffer(&self, mut f: impl FnMut(&ModelHandle<Buffer>)) {
|
||||
self.buffers
|
||||
.borrow()
|
||||
.values()
|
||||
.filter_map(|buffer| buffer.buffer.read(cx).file())
|
||||
.collect()
|
||||
.for_each(|state| f(&state.buffer))
|
||||
}
|
||||
|
||||
pub fn title<'a>(&'a self, cx: &'a AppContext) -> Cow<'a, str> {
|
||||
@@ -2666,6 +2691,11 @@ impl MultiBufferSnapshot {
|
||||
.and_then(|(buffer, offset)| buffer.language_at(offset))
|
||||
}
|
||||
|
||||
pub fn language_scope_at<'a, T: ToOffset>(&'a self, point: T) -> Option<LanguageScope> {
|
||||
self.point_to_buffer_offset(point)
|
||||
.and_then(|(buffer, offset)| buffer.language_scope_at(offset))
|
||||
}
|
||||
|
||||
pub fn is_dirty(&self) -> bool {
|
||||
self.is_dirty
|
||||
}
|
||||
@@ -3605,9 +3635,51 @@ impl ToPointUtf16 for PointUtf16 {
|
||||
}
|
||||
}
|
||||
|
||||
fn build_excerpt_ranges<T>(
|
||||
buffer: &BufferSnapshot,
|
||||
ranges: &[Range<T>],
|
||||
context_line_count: u32,
|
||||
) -> (Vec<ExcerptRange<Point>>, Vec<usize>)
|
||||
where
|
||||
T: text::ToPoint,
|
||||
{
|
||||
let max_point = buffer.max_point();
|
||||
let mut range_counts = Vec::new();
|
||||
let mut excerpt_ranges = Vec::new();
|
||||
let mut range_iter = ranges
|
||||
.iter()
|
||||
.map(|range| range.start.to_point(buffer)..range.end.to_point(buffer))
|
||||
.peekable();
|
||||
while let Some(range) = range_iter.next() {
|
||||
let excerpt_start = Point::new(range.start.row.saturating_sub(context_line_count), 0);
|
||||
let mut excerpt_end = Point::new(range.end.row + 1 + context_line_count, 0).min(max_point);
|
||||
let mut ranges_in_excerpt = 1;
|
||||
|
||||
while let Some(next_range) = range_iter.peek() {
|
||||
if next_range.start.row <= excerpt_end.row + context_line_count {
|
||||
excerpt_end =
|
||||
Point::new(next_range.end.row + 1 + context_line_count, 0).min(max_point);
|
||||
ranges_in_excerpt += 1;
|
||||
range_iter.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
excerpt_ranges.push(ExcerptRange {
|
||||
context: excerpt_start..excerpt_end,
|
||||
primary: Some(range),
|
||||
});
|
||||
range_counts.push(ranges_in_excerpt);
|
||||
}
|
||||
|
||||
(excerpt_ranges, range_counts)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use futures::StreamExt;
|
||||
use gpui::{MutableAppContext, TestAppContext};
|
||||
use language::{Buffer, Rope};
|
||||
use rand::prelude::*;
|
||||
@@ -3651,7 +3723,7 @@ mod tests {
|
||||
let state = host_buffer.read(cx).to_proto();
|
||||
let ops = cx
|
||||
.background()
|
||||
.block(host_buffer.read(cx).serialize_ops(cx));
|
||||
.block(host_buffer.read(cx).serialize_ops(None, cx));
|
||||
let mut buffer = Buffer::from_proto(1, state, None).unwrap();
|
||||
buffer
|
||||
.apply_ops(
|
||||
@@ -4012,6 +4084,44 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) {
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(20, 3, 'a'), cx));
|
||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
let (task, anchor_ranges) = multibuffer.update(cx, |multibuffer, cx| {
|
||||
let snapshot = buffer.read(cx);
|
||||
let ranges = vec![
|
||||
snapshot.anchor_before(Point::new(3, 2))..snapshot.anchor_before(Point::new(4, 2)),
|
||||
snapshot.anchor_before(Point::new(7, 1))..snapshot.anchor_before(Point::new(7, 3)),
|
||||
snapshot.anchor_before(Point::new(15, 0))
|
||||
..snapshot.anchor_before(Point::new(15, 0)),
|
||||
];
|
||||
multibuffer.stream_excerpts_with_context_lines(vec![(buffer.clone(), ranges)], 2, cx)
|
||||
});
|
||||
|
||||
let anchor_ranges = anchor_ranges.collect::<Vec<_>>().await;
|
||||
// Ensure task is finished when stream completes.
|
||||
task.await;
|
||||
|
||||
let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx));
|
||||
assert_eq!(
|
||||
snapshot.text(),
|
||||
"bbb\nccc\nddd\neee\nfff\nggg\nhhh\niii\njjj\n\nnnn\nooo\nppp\nqqq\nrrr\n"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
anchor_ranges
|
||||
.iter()
|
||||
.map(|range| range.to_point(&snapshot))
|
||||
.collect::<Vec<_>>(),
|
||||
vec![
|
||||
Point::new(2, 2)..Point::new(3, 2),
|
||||
Point::new(6, 1)..Point::new(6, 3),
|
||||
Point::new(12, 0)..Point::new(12, 0)
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_empty_multibuffer(cx: &mut MutableAppContext) {
|
||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
|
||||
@@ -2,9 +2,19 @@ use std::path::PathBuf;
|
||||
|
||||
use db::sqlez_macros::sql;
|
||||
use db::{define_connection, query};
|
||||
|
||||
use workspace::{ItemId, WorkspaceDb, WorkspaceId};
|
||||
|
||||
define_connection!(
|
||||
// Current schema shape using pseudo-rust syntax:
|
||||
// editors(
|
||||
// item_id: usize,
|
||||
// workspace_id: usize,
|
||||
// path: PathBuf,
|
||||
// scroll_top_row: usize,
|
||||
// scroll_vertical_offset: f32,
|
||||
// scroll_horizontal_offset: f32,
|
||||
// )
|
||||
pub static ref DB: EditorDb<WorkspaceDb> =
|
||||
&[sql! (
|
||||
CREATE TABLE editors(
|
||||
@@ -15,8 +25,13 @@ define_connection!(
|
||||
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
||||
ON DELETE CASCADE
|
||||
ON UPDATE CASCADE
|
||||
) STRICT;
|
||||
)];
|
||||
) STRICT;
|
||||
),
|
||||
sql! (
|
||||
ALTER TABLE editors ADD COLUMN scroll_top_row INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE editors ADD COLUMN scroll_horizontal_offset REAL NOT NULL DEFAULT 0;
|
||||
ALTER TABLE editors ADD COLUMN scroll_vertical_offset REAL NOT NULL DEFAULT 0;
|
||||
)];
|
||||
);
|
||||
|
||||
impl EditorDb {
|
||||
@@ -29,8 +44,40 @@ impl EditorDb {
|
||||
|
||||
query! {
|
||||
pub async fn save_path(item_id: ItemId, workspace_id: WorkspaceId, path: PathBuf) -> Result<()> {
|
||||
INSERT OR REPLACE INTO editors(item_id, workspace_id, path)
|
||||
VALUES (?, ?, ?)
|
||||
INSERT INTO editors
|
||||
(item_id, workspace_id, path)
|
||||
VALUES
|
||||
(?1, ?2, ?3)
|
||||
ON CONFLICT DO UPDATE SET
|
||||
item_id = ?1,
|
||||
workspace_id = ?2,
|
||||
path = ?3
|
||||
}
|
||||
}
|
||||
|
||||
// Returns the scroll top row, and offset
|
||||
query! {
|
||||
pub fn get_scroll_position(item_id: ItemId, workspace_id: WorkspaceId) -> Result<Option<(u32, f32, f32)>> {
|
||||
SELECT scroll_top_row, scroll_horizontal_offset, scroll_vertical_offset
|
||||
FROM editors
|
||||
WHERE item_id = ? AND workspace_id = ?
|
||||
}
|
||||
}
|
||||
|
||||
query! {
|
||||
pub async fn save_scroll_position(
|
||||
item_id: ItemId,
|
||||
workspace_id: WorkspaceId,
|
||||
top_row: u32,
|
||||
vertical_offset: f32,
|
||||
horizontal_offset: f32
|
||||
) -> Result<()> {
|
||||
UPDATE OR IGNORE editors
|
||||
SET
|
||||
scroll_top_row = ?3,
|
||||
scroll_horizontal_offset = ?4,
|
||||
scroll_vertical_offset = ?5
|
||||
WHERE item_id = ?1 AND workspace_id = ?2
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,11 +11,14 @@ use gpui::{
|
||||
geometry::vector::{vec2f, Vector2F},
|
||||
Axis, MutableAppContext, Task, ViewContext,
|
||||
};
|
||||
use language::Bias;
|
||||
use language::{Bias, Point};
|
||||
use util::ResultExt;
|
||||
use workspace::WorkspaceId;
|
||||
|
||||
use crate::{
|
||||
display_map::{DisplaySnapshot, ToDisplayPoint},
|
||||
hover_popover::hide_hover,
|
||||
hover_popover::{hide_hover, HideHover},
|
||||
persistence::DB,
|
||||
Anchor, DisplayPoint, Editor, EditorMode, Event, MultiBufferSnapshot, ToPoint,
|
||||
};
|
||||
|
||||
@@ -170,37 +173,68 @@ impl ScrollManager {
|
||||
scroll_position: Vector2F,
|
||||
map: &DisplaySnapshot,
|
||||
local: bool,
|
||||
workspace_id: Option<i64>,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
let new_anchor = if scroll_position.y() <= 0. {
|
||||
ScrollAnchor {
|
||||
top_anchor: Anchor::min(),
|
||||
offset: scroll_position.max(vec2f(0., 0.)),
|
||||
}
|
||||
let (new_anchor, top_row) = if scroll_position.y() <= 0. {
|
||||
(
|
||||
ScrollAnchor {
|
||||
top_anchor: Anchor::min(),
|
||||
offset: scroll_position.max(vec2f(0., 0.)),
|
||||
},
|
||||
0,
|
||||
)
|
||||
} else {
|
||||
let scroll_top_buffer_offset =
|
||||
DisplayPoint::new(scroll_position.y() as u32, 0).to_offset(&map, Bias::Right);
|
||||
let scroll_top_buffer_point =
|
||||
DisplayPoint::new(scroll_position.y() as u32, 0).to_point(&map);
|
||||
let top_anchor = map
|
||||
.buffer_snapshot
|
||||
.anchor_at(scroll_top_buffer_offset, Bias::Right);
|
||||
.anchor_at(scroll_top_buffer_point, Bias::Right);
|
||||
|
||||
ScrollAnchor {
|
||||
top_anchor,
|
||||
offset: vec2f(
|
||||
scroll_position.x(),
|
||||
scroll_position.y() - top_anchor.to_display_point(&map).row() as f32,
|
||||
),
|
||||
}
|
||||
(
|
||||
ScrollAnchor {
|
||||
top_anchor,
|
||||
offset: vec2f(
|
||||
scroll_position.x(),
|
||||
scroll_position.y() - top_anchor.to_display_point(&map).row() as f32,
|
||||
),
|
||||
},
|
||||
scroll_top_buffer_point.row,
|
||||
)
|
||||
};
|
||||
|
||||
self.set_anchor(new_anchor, local, cx);
|
||||
self.set_anchor(new_anchor, top_row, local, workspace_id, cx);
|
||||
}
|
||||
|
||||
fn set_anchor(&mut self, anchor: ScrollAnchor, local: bool, cx: &mut ViewContext<Editor>) {
|
||||
fn set_anchor(
|
||||
&mut self,
|
||||
anchor: ScrollAnchor,
|
||||
top_row: u32,
|
||||
local: bool,
|
||||
workspace_id: Option<i64>,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
self.anchor = anchor;
|
||||
cx.emit(Event::ScrollPositionChanged { local });
|
||||
self.show_scrollbar(cx);
|
||||
self.autoscroll_request.take();
|
||||
if let Some(workspace_id) = workspace_id {
|
||||
let item_id = cx.view_id();
|
||||
|
||||
cx.background()
|
||||
.spawn(async move {
|
||||
DB.save_scroll_position(
|
||||
item_id,
|
||||
workspace_id,
|
||||
top_row,
|
||||
anchor.offset.x(),
|
||||
anchor.offset.y(),
|
||||
)
|
||||
.await
|
||||
.log_err()
|
||||
})
|
||||
.detach()
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
@@ -273,9 +307,14 @@ impl Editor {
|
||||
) {
|
||||
let map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
||||
hide_hover(self, cx);
|
||||
self.scroll_manager
|
||||
.set_scroll_position(scroll_position, &map, local, cx);
|
||||
hide_hover(self, &HideHover, cx);
|
||||
self.scroll_manager.set_scroll_position(
|
||||
scroll_position,
|
||||
&map,
|
||||
local,
|
||||
self.workspace_id,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn scroll_position(&self, cx: &mut ViewContext<Self>) -> Vector2F {
|
||||
@@ -284,8 +323,13 @@ impl Editor {
|
||||
}
|
||||
|
||||
pub fn set_scroll_anchor(&mut self, scroll_anchor: ScrollAnchor, cx: &mut ViewContext<Self>) {
|
||||
hide_hover(self, cx);
|
||||
self.scroll_manager.set_anchor(scroll_anchor, true, cx);
|
||||
hide_hover(self, &HideHover, cx);
|
||||
let top_row = scroll_anchor
|
||||
.top_anchor
|
||||
.to_point(&self.buffer().read(cx).snapshot(cx))
|
||||
.row;
|
||||
self.scroll_manager
|
||||
.set_anchor(scroll_anchor, top_row, true, self.workspace_id, cx);
|
||||
}
|
||||
|
||||
pub(crate) fn set_scroll_anchor_remote(
|
||||
@@ -293,8 +337,13 @@ impl Editor {
|
||||
scroll_anchor: ScrollAnchor,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
hide_hover(self, cx);
|
||||
self.scroll_manager.set_anchor(scroll_anchor, false, cx);
|
||||
hide_hover(self, &HideHover, cx);
|
||||
let top_row = scroll_anchor
|
||||
.top_anchor
|
||||
.to_point(&self.buffer().read(cx).snapshot(cx))
|
||||
.row;
|
||||
self.scroll_manager
|
||||
.set_anchor(scroll_anchor, top_row, false, self.workspace_id, cx);
|
||||
}
|
||||
|
||||
pub fn scroll_screen(&mut self, amount: &ScrollAmount, cx: &mut ViewContext<Self>) {
|
||||
@@ -345,4 +394,25 @@ impl Editor {
|
||||
|
||||
Ordering::Greater
|
||||
}
|
||||
|
||||
pub fn read_scroll_position_from_db(
|
||||
&mut self,
|
||||
item_id: usize,
|
||||
workspace_id: WorkspaceId,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
let scroll_position = DB.get_scroll_position(item_id, workspace_id);
|
||||
if let Ok(Some((top_row, x, y))) = scroll_position {
|
||||
let top_anchor = self
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.snapshot(cx)
|
||||
.anchor_at(Point::new(top_row as u32, 0), Bias::Left);
|
||||
let scroll_anchor = ScrollAnchor {
|
||||
offset: Vector2F::new(x, y),
|
||||
top_anchor,
|
||||
};
|
||||
self.set_scroll_anchor(scroll_anchor, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,9 @@ use indoc::indoc;
|
||||
use crate::{
|
||||
display_map::ToDisplayPoint, AnchorRangeExt, Autoscroll, DisplayPoint, Editor, MultiBuffer,
|
||||
};
|
||||
use gpui::{keymap::Keystroke, AppContext, ContextHandle, ModelContext, ViewContext, ViewHandle};
|
||||
use gpui::{
|
||||
keymap_matcher::Keystroke, AppContext, ContextHandle, ModelContext, ViewContext, ViewHandle,
|
||||
};
|
||||
use language::{Buffer, BufferSnapshot};
|
||||
use settings::Settings;
|
||||
use util::{
|
||||
|
||||
34
crates/feedback/Cargo.toml
Normal file
34
crates/feedback/Cargo.toml
Normal file
@@ -0,0 +1,34 @@
|
||||
[package]
|
||||
name = "feedback"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/feedback.rs"
|
||||
|
||||
[features]
|
||||
test-support = []
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.38"
|
||||
client = { path = "../client" }
|
||||
editor = { path = "../editor" }
|
||||
language = { path = "../language" }
|
||||
log = "0.4"
|
||||
futures = "0.3"
|
||||
gpui = { path = "../gpui" }
|
||||
human_bytes = "0.4.1"
|
||||
isahc = "1.7"
|
||||
lazy_static = "1.4.0"
|
||||
postage = { version = "0.4", features = ["futures-traits"] }
|
||||
project = { path = "../project" }
|
||||
search = { path = "../search" }
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
settings = { path = "../settings" }
|
||||
sysinfo = "0.27.1"
|
||||
theme = { path = "../theme" }
|
||||
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
|
||||
urlencoding = "2.1.2"
|
||||
util = { path = "../util" }
|
||||
workspace = { path = "../workspace" }
|
||||
65
crates/feedback/src/feedback.rs
Normal file
65
crates/feedback/src/feedback.rs
Normal file
@@ -0,0 +1,65 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
pub mod feedback_editor;
|
||||
mod system_specs;
|
||||
use gpui::{actions, impl_actions, ClipboardItem, MutableAppContext, PromptLevel, ViewContext};
|
||||
use serde::Deserialize;
|
||||
use system_specs::SystemSpecs;
|
||||
use workspace::{AppState, Workspace};
|
||||
|
||||
#[derive(Deserialize, Clone, PartialEq)]
|
||||
pub struct OpenBrowser {
|
||||
pub url: Arc<str>,
|
||||
}
|
||||
|
||||
impl_actions!(zed, [OpenBrowser]);
|
||||
|
||||
actions!(
|
||||
zed,
|
||||
[CopySystemSpecsIntoClipboard, FileBugReport, RequestFeature]
|
||||
);
|
||||
|
||||
pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
|
||||
let system_specs = SystemSpecs::new(&cx);
|
||||
let system_specs_text = system_specs.to_string();
|
||||
|
||||
feedback_editor::init(system_specs, app_state, cx);
|
||||
|
||||
cx.add_global_action(move |action: &OpenBrowser, cx| cx.platform().open_url(&action.url));
|
||||
|
||||
let url = format!(
|
||||
"https://github.com/zed-industries/feedback/issues/new?assignees=&labels=defect%2Ctriage&template=2_bug_report.yml&environment={}",
|
||||
urlencoding::encode(&system_specs_text)
|
||||
);
|
||||
|
||||
cx.add_action(
|
||||
move |_: &mut Workspace,
|
||||
_: &CopySystemSpecsIntoClipboard,
|
||||
cx: &mut ViewContext<Workspace>| {
|
||||
cx.prompt(
|
||||
PromptLevel::Info,
|
||||
&format!("Copied into clipboard:\n\n{system_specs_text}"),
|
||||
&["OK"],
|
||||
);
|
||||
let item = ClipboardItem::new(system_specs_text.clone());
|
||||
cx.write_to_clipboard(item);
|
||||
},
|
||||
);
|
||||
|
||||
cx.add_action(
|
||||
|_: &mut Workspace, _: &RequestFeature, cx: &mut ViewContext<Workspace>| {
|
||||
let url = "https://github.com/zed-industries/feedback/issues/new?assignees=&labels=enhancement%2Ctriage&template=0_feature_request.yml";
|
||||
cx.dispatch_action(OpenBrowser {
|
||||
url: url.into(),
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
cx.add_action(
|
||||
move |_: &mut Workspace, _: &FileBugReport, cx: &mut ViewContext<Workspace>| {
|
||||
cx.dispatch_action(OpenBrowser {
|
||||
url: url.clone().into(),
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
437
crates/feedback/src/feedback_editor.rs
Normal file
437
crates/feedback/src/feedback_editor.rs
Normal file
@@ -0,0 +1,437 @@
|
||||
use std::{
|
||||
any::TypeId,
|
||||
ops::{Range, RangeInclusive},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use anyhow::bail;
|
||||
use client::{Client, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL};
|
||||
use editor::{Anchor, Editor};
|
||||
use futures::AsyncReadExt;
|
||||
use gpui::{
|
||||
actions,
|
||||
elements::{ChildView, Flex, Label, MouseEventHandler, ParentElement, Stack, Text},
|
||||
serde_json, AnyViewHandle, AppContext, CursorStyle, Element, ElementBox, Entity, ModelHandle,
|
||||
MouseButton, MutableAppContext, PromptLevel, RenderContext, Task, View, ViewContext,
|
||||
ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use isahc::Request;
|
||||
use language::Buffer;
|
||||
use postage::prelude::Stream;
|
||||
|
||||
use project::Project;
|
||||
use serde::Serialize;
|
||||
use settings::Settings;
|
||||
use workspace::{
|
||||
item::{Item, ItemHandle},
|
||||
searchable::{SearchableItem, SearchableItemHandle},
|
||||
AppState, StatusItemView, Workspace,
|
||||
};
|
||||
|
||||
use crate::system_specs::SystemSpecs;
|
||||
|
||||
const FEEDBACK_CHAR_LIMIT: RangeInclusive<usize> = 10..=5000;
|
||||
const FEEDBACK_PLACEHOLDER_TEXT: &str = "Save to submit feedback as Markdown.";
|
||||
const FEEDBACK_SUBMISSION_ERROR_TEXT: &str =
|
||||
"Feedback failed to submit, see error log for details.";
|
||||
|
||||
actions!(feedback, [SubmitFeedback, GiveFeedback, DeployFeedback]);
|
||||
|
||||
pub fn init(system_specs: SystemSpecs, app_state: Arc<AppState>, cx: &mut MutableAppContext) {
|
||||
cx.add_action({
|
||||
move |workspace: &mut Workspace, _: &GiveFeedback, cx: &mut ViewContext<Workspace>| {
|
||||
FeedbackEditor::deploy(system_specs.clone(), workspace, app_state.clone(), cx);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub struct FeedbackButton;
|
||||
|
||||
impl Entity for FeedbackButton {
|
||||
type Event = ();
|
||||
}
|
||||
|
||||
impl View for FeedbackButton {
|
||||
fn ui_name() -> &'static str {
|
||||
"FeedbackButton"
|
||||
}
|
||||
|
||||
fn render(&mut self, cx: &mut RenderContext<'_, Self>) -> ElementBox {
|
||||
Stack::new()
|
||||
.with_child(
|
||||
MouseEventHandler::<Self>::new(0, cx, |state, cx| {
|
||||
let theme = &cx.global::<Settings>().theme;
|
||||
let theme = &theme.workspace.status_bar.feedback;
|
||||
|
||||
Text::new(
|
||||
"Give Feedback".to_string(),
|
||||
theme.style_for(state, true).clone(),
|
||||
)
|
||||
.boxed()
|
||||
})
|
||||
.with_cursor_style(CursorStyle::PointingHand)
|
||||
.on_click(MouseButton::Left, |_, cx| cx.dispatch_action(GiveFeedback))
|
||||
.boxed(),
|
||||
)
|
||||
.boxed()
|
||||
}
|
||||
}
|
||||
|
||||
impl StatusItemView for FeedbackButton {
|
||||
fn set_active_pane_item(&mut self, _: Option<&dyn ItemHandle>, _: &mut ViewContext<Self>) {}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct FeedbackRequestBody<'a> {
|
||||
feedback_text: &'a str,
|
||||
metrics_id: Option<Arc<str>>,
|
||||
system_specs: SystemSpecs,
|
||||
token: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct FeedbackEditor {
|
||||
system_specs: SystemSpecs,
|
||||
editor: ViewHandle<Editor>,
|
||||
project: ModelHandle<Project>,
|
||||
}
|
||||
|
||||
impl FeedbackEditor {
|
||||
fn new(
|
||||
system_specs: SystemSpecs,
|
||||
project: ModelHandle<Project>,
|
||||
buffer: ModelHandle<Buffer>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let editor = cx.add_view(|cx| {
|
||||
let mut editor = Editor::for_buffer(buffer, Some(project.clone()), cx);
|
||||
editor.set_vertical_scroll_margin(5, cx);
|
||||
editor.set_placeholder_text(FEEDBACK_PLACEHOLDER_TEXT, cx);
|
||||
editor
|
||||
});
|
||||
|
||||
cx.subscribe(&editor, |_, _, e, cx| cx.emit(e.clone()))
|
||||
.detach();
|
||||
|
||||
Self {
|
||||
system_specs: system_specs.clone(),
|
||||
editor,
|
||||
project,
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_save(
|
||||
&mut self,
|
||||
_: ModelHandle<Project>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Task<anyhow::Result<()>> {
|
||||
let feedback_text = self.editor.read(cx).text(cx);
|
||||
let feedback_char_count = feedback_text.chars().count();
|
||||
let feedback_text = feedback_text.trim().to_string();
|
||||
|
||||
let error = if feedback_char_count < *FEEDBACK_CHAR_LIMIT.start() {
|
||||
Some(format!(
|
||||
"Feedback can't be shorter than {} characters.",
|
||||
FEEDBACK_CHAR_LIMIT.start()
|
||||
))
|
||||
} else if feedback_char_count > *FEEDBACK_CHAR_LIMIT.end() {
|
||||
Some(format!(
|
||||
"Feedback can't be longer than {} characters.",
|
||||
FEEDBACK_CHAR_LIMIT.end()
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(error) = error {
|
||||
cx.prompt(PromptLevel::Critical, &error, &["OK"]);
|
||||
return Task::ready(Ok(()));
|
||||
}
|
||||
|
||||
let mut answer = cx.prompt(
|
||||
PromptLevel::Info,
|
||||
"Ready to submit your feedback?",
|
||||
&["Yes, Submit!", "No"],
|
||||
);
|
||||
|
||||
let this = cx.handle();
|
||||
let client = cx.global::<Arc<Client>>().clone();
|
||||
let specs = self.system_specs.clone();
|
||||
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
let answer = answer.recv().await;
|
||||
|
||||
if answer == Some(0) {
|
||||
match FeedbackEditor::submit_feedback(&feedback_text, client, specs).await {
|
||||
Ok(_) => {
|
||||
cx.update(|cx| {
|
||||
this.update(cx, |_, cx| {
|
||||
cx.dispatch_action(workspace::CloseActiveItem);
|
||||
})
|
||||
});
|
||||
}
|
||||
Err(error) => {
|
||||
log::error!("{}", error);
|
||||
|
||||
cx.update(|cx| {
|
||||
this.update(cx, |_, cx| {
|
||||
cx.prompt(
|
||||
PromptLevel::Critical,
|
||||
FEEDBACK_SUBMISSION_ERROR_TEXT,
|
||||
&["OK"],
|
||||
);
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
|
||||
async fn submit_feedback(
|
||||
feedback_text: &str,
|
||||
zed_client: Arc<Client>,
|
||||
system_specs: SystemSpecs,
|
||||
) -> anyhow::Result<()> {
|
||||
let feedback_endpoint = format!("{}/api/feedback", *ZED_SERVER_URL);
|
||||
|
||||
let metrics_id = zed_client.metrics_id();
|
||||
let http_client = zed_client.http_client();
|
||||
|
||||
let request = FeedbackRequestBody {
|
||||
feedback_text: &feedback_text,
|
||||
metrics_id,
|
||||
system_specs,
|
||||
token: ZED_SECRET_CLIENT_TOKEN,
|
||||
};
|
||||
|
||||
let json_bytes = serde_json::to_vec(&request)?;
|
||||
|
||||
let request = Request::post(feedback_endpoint)
|
||||
.header("content-type", "application/json")
|
||||
.body(json_bytes.into())?;
|
||||
|
||||
let mut response = http_client.send(request).await?;
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
|
||||
let response_status = response.status();
|
||||
|
||||
if !response_status.is_success() {
|
||||
bail!("Feedback API failed with error: {}", response_status)
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl FeedbackEditor {
|
||||
pub fn deploy(
|
||||
system_specs: SystemSpecs,
|
||||
workspace: &mut Workspace,
|
||||
app_state: Arc<AppState>,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) {
|
||||
workspace
|
||||
.with_local_workspace(&app_state, cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
let markdown_language = project.read(cx).languages().language_for_name("Markdown");
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.create_buffer("", markdown_language, cx)
|
||||
})
|
||||
.expect("creating buffers on a local workspace always succeeds");
|
||||
let feedback_editor =
|
||||
cx.add_view(|cx| FeedbackEditor::new(system_specs, project, buffer, cx));
|
||||
workspace.add_item(Box::new(feedback_editor), cx);
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
|
||||
impl View for FeedbackEditor {
|
||||
fn ui_name() -> &'static str {
|
||||
"FeedbackEditor"
|
||||
}
|
||||
|
||||
fn render(&mut self, cx: &mut RenderContext<Self>) -> ElementBox {
|
||||
ChildView::new(&self.editor, cx).boxed()
|
||||
}
|
||||
|
||||
fn focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
if cx.is_self_focused() {
|
||||
cx.focus(&self.editor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Entity for FeedbackEditor {
|
||||
type Event = editor::Event;
|
||||
}
|
||||
|
||||
impl Item for FeedbackEditor {
|
||||
fn tab_content(&self, _: Option<usize>, style: &theme::Tab, _: &AppContext) -> ElementBox {
|
||||
Flex::row()
|
||||
.with_child(
|
||||
Label::new("Feedback".to_string(), style.label.clone())
|
||||
.aligned()
|
||||
.contained()
|
||||
.boxed(),
|
||||
)
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn for_each_project_item(&self, cx: &AppContext, f: &mut dyn FnMut(usize, &dyn project::Item)) {
|
||||
self.editor.for_each_project_item(cx, f)
|
||||
}
|
||||
|
||||
fn to_item_events(_: &Self::Event) -> Vec<workspace::item::ItemEvent> {
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
fn is_singleton(&self, _: &AppContext) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn set_nav_history(&mut self, _: workspace::ItemNavHistory, _: &mut ViewContext<Self>) {}
|
||||
|
||||
fn can_save(&self, _: &AppContext) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn save(
|
||||
&mut self,
|
||||
project: ModelHandle<Project>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Task<anyhow::Result<()>> {
|
||||
self.handle_save(project, cx)
|
||||
}
|
||||
|
||||
fn save_as(
|
||||
&mut self,
|
||||
project: ModelHandle<Project>,
|
||||
_: std::path::PathBuf,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Task<anyhow::Result<()>> {
|
||||
self.handle_save(project, cx)
|
||||
}
|
||||
|
||||
fn reload(
|
||||
&mut self,
|
||||
_: ModelHandle<Project>,
|
||||
_: &mut ViewContext<Self>,
|
||||
) -> Task<anyhow::Result<()>> {
|
||||
unreachable!("reload should not have been called")
|
||||
}
|
||||
|
||||
fn clone_on_split(
|
||||
&self,
|
||||
_workspace_id: workspace::WorkspaceId,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
let buffer = self
|
||||
.editor
|
||||
.read(cx)
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.as_singleton()
|
||||
.expect("Feedback buffer is only ever singleton");
|
||||
|
||||
Some(Self::new(
|
||||
self.system_specs.clone(),
|
||||
self.project.clone(),
|
||||
buffer.clone(),
|
||||
cx,
|
||||
))
|
||||
}
|
||||
|
||||
fn serialized_item_kind() -> Option<&'static str> {
|
||||
None
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
_: ModelHandle<Project>,
|
||||
_: WeakViewHandle<Workspace>,
|
||||
_: workspace::WorkspaceId,
|
||||
_: workspace::ItemId,
|
||||
_: &mut ViewContext<workspace::Pane>,
|
||||
) -> Task<anyhow::Result<ViewHandle<Self>>> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
fn as_searchable(&self, handle: &ViewHandle<Self>) -> Option<Box<dyn SearchableItemHandle>> {
|
||||
Some(Box::new(handle.clone()))
|
||||
}
|
||||
|
||||
fn act_as_type(
|
||||
&self,
|
||||
type_id: TypeId,
|
||||
self_handle: &ViewHandle<Self>,
|
||||
_: &AppContext,
|
||||
) -> Option<AnyViewHandle> {
|
||||
if type_id == TypeId::of::<Self>() {
|
||||
Some(self_handle.into())
|
||||
} else if type_id == TypeId::of::<Editor>() {
|
||||
Some((&self.editor).into())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SearchableItem for FeedbackEditor {
|
||||
type Match = Range<Anchor>;
|
||||
|
||||
fn to_search_event(event: &Self::Event) -> Option<workspace::searchable::SearchEvent> {
|
||||
Editor::to_search_event(event)
|
||||
}
|
||||
|
||||
fn clear_matches(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.clear_matches(cx))
|
||||
}
|
||||
|
||||
fn update_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>) {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.update_matches(matches, cx))
|
||||
}
|
||||
|
||||
fn query_suggestion(&mut self, cx: &mut ViewContext<Self>) -> String {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.query_suggestion(cx))
|
||||
}
|
||||
|
||||
fn activate_match(
|
||||
&mut self,
|
||||
index: usize,
|
||||
matches: Vec<Self::Match>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.activate_match(index, matches, cx))
|
||||
}
|
||||
|
||||
fn find_matches(
|
||||
&mut self,
|
||||
query: project::search::SearchQuery,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Task<Vec<Self::Match>> {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.find_matches(query, cx))
|
||||
}
|
||||
|
||||
fn active_match_index(
|
||||
&mut self,
|
||||
matches: Vec<Self::Match>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<usize> {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.active_match_index(matches, cx))
|
||||
}
|
||||
}
|
||||
75
crates/feedback/src/system_specs.rs
Normal file
75
crates/feedback/src/system_specs.rs
Normal file
@@ -0,0 +1,75 @@
|
||||
use client::ZED_APP_VERSION;
|
||||
use gpui::{AppContext, AppVersion};
|
||||
use human_bytes::human_bytes;
|
||||
use serde::Serialize;
|
||||
use std::{env, fmt::Display};
|
||||
use sysinfo::{System, SystemExt};
|
||||
use util::channel::ReleaseChannel;
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct SystemSpecs {
|
||||
#[serde(serialize_with = "serialize_app_version")]
|
||||
app_version: Option<AppVersion>,
|
||||
release_channel: &'static str,
|
||||
os_name: &'static str,
|
||||
os_version: Option<String>,
|
||||
memory: u64,
|
||||
architecture: &'static str,
|
||||
}
|
||||
|
||||
impl SystemSpecs {
|
||||
pub fn new(cx: &AppContext) -> Self {
|
||||
let platform = cx.platform();
|
||||
let app_version = ZED_APP_VERSION.or_else(|| platform.app_version().ok());
|
||||
let release_channel = cx.global::<ReleaseChannel>().dev_name();
|
||||
let os_name = platform.os_name();
|
||||
let system = System::new_all();
|
||||
let memory = system.total_memory();
|
||||
let architecture = env::consts::ARCH;
|
||||
let os_version = platform
|
||||
.os_version()
|
||||
.ok()
|
||||
.map(|os_version| os_version.to_string());
|
||||
|
||||
SystemSpecs {
|
||||
app_version,
|
||||
release_channel,
|
||||
os_name,
|
||||
os_version,
|
||||
memory,
|
||||
architecture,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for SystemSpecs {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let os_information = match &self.os_version {
|
||||
Some(os_version) => format!("OS: {} {}", self.os_name, os_version),
|
||||
None => format!("OS: {}", self.os_name),
|
||||
};
|
||||
let app_version_information = self
|
||||
.app_version
|
||||
.as_ref()
|
||||
.map(|app_version| format!("Zed: v{} ({})", app_version, self.release_channel));
|
||||
let system_specs = [
|
||||
app_version_information,
|
||||
Some(os_information),
|
||||
Some(format!("Memory: {}", human_bytes(self.memory as f64))),
|
||||
Some(format!("Architecture: {}", self.architecture)),
|
||||
]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<String>>()
|
||||
.join("\n");
|
||||
|
||||
write!(f, "{system_specs}")
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_app_version<S>(version: &Option<AppVersion>, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
version.map(|v| v.to_string()).serialize(serializer)
|
||||
}
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "file_finder"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/file_finder.rs"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "fs"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/fs.rs"
|
||||
|
||||
@@ -13,7 +13,6 @@ use smol::io::{AsyncReadExt, AsyncWriteExt};
|
||||
use std::borrow::Cow;
|
||||
use std::cmp;
|
||||
use std::io::Write;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
use std::{
|
||||
io,
|
||||
@@ -94,16 +93,6 @@ impl LineEnding {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct HomeDir(pub PathBuf);
|
||||
|
||||
impl Deref for HomeDir {
|
||||
type Target = PathBuf;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait Fs: Send + Sync {
|
||||
async fn create_dir(&self, path: &Path) -> Result<()>;
|
||||
@@ -389,6 +378,7 @@ pub struct FakeFs {
|
||||
struct FakeFsState {
|
||||
root: Arc<Mutex<FakeFsEntry>>,
|
||||
next_inode: u64,
|
||||
next_mtime: SystemTime,
|
||||
event_txs: Vec<smol::channel::Sender<Vec<fsevent::Event>>>,
|
||||
}
|
||||
|
||||
@@ -517,10 +507,11 @@ impl FakeFs {
|
||||
state: Mutex::new(FakeFsState {
|
||||
root: Arc::new(Mutex::new(FakeFsEntry::Dir {
|
||||
inode: 0,
|
||||
mtime: SystemTime::now(),
|
||||
mtime: SystemTime::UNIX_EPOCH,
|
||||
entries: Default::default(),
|
||||
git_repo_state: None,
|
||||
})),
|
||||
next_mtime: SystemTime::UNIX_EPOCH,
|
||||
next_inode: 1,
|
||||
event_txs: Default::default(),
|
||||
}),
|
||||
@@ -531,10 +522,12 @@ impl FakeFs {
|
||||
let mut state = self.state.lock().await;
|
||||
let path = path.as_ref();
|
||||
let inode = state.next_inode;
|
||||
let mtime = state.next_mtime;
|
||||
state.next_inode += 1;
|
||||
state.next_mtime += Duration::from_nanos(1);
|
||||
let file = Arc::new(Mutex::new(FakeFsEntry::File {
|
||||
inode,
|
||||
mtime: SystemTime::now(),
|
||||
mtime,
|
||||
content,
|
||||
}));
|
||||
state
|
||||
@@ -631,6 +624,21 @@ impl FakeFs {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn paths(&self) -> Vec<PathBuf> {
|
||||
let mut result = Vec::new();
|
||||
let mut queue = collections::VecDeque::new();
|
||||
queue.push_back((PathBuf::from("/"), self.state.lock().await.root.clone()));
|
||||
while let Some((path, entry)) = queue.pop_front() {
|
||||
if let FakeFsEntry::Dir { entries, .. } = &*entry.lock().await {
|
||||
for (name, entry) in entries {
|
||||
queue.push_back((path.join(name), entry.clone()));
|
||||
}
|
||||
}
|
||||
result.push(path);
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
pub async fn directories(&self) -> Vec<PathBuf> {
|
||||
let mut result = Vec::new();
|
||||
let mut queue = collections::VecDeque::new();
|
||||
@@ -726,6 +734,8 @@ impl Fs for FakeFs {
|
||||
}
|
||||
|
||||
let inode = state.next_inode;
|
||||
let mtime = state.next_mtime;
|
||||
state.next_mtime += Duration::from_nanos(1);
|
||||
state.next_inode += 1;
|
||||
state
|
||||
.write_path(&cur_path, |entry| {
|
||||
@@ -733,7 +743,7 @@ impl Fs for FakeFs {
|
||||
created_dirs.push(cur_path.clone());
|
||||
Arc::new(Mutex::new(FakeFsEntry::Dir {
|
||||
inode,
|
||||
mtime: SystemTime::now(),
|
||||
mtime,
|
||||
entries: Default::default(),
|
||||
git_repo_state: None,
|
||||
}))
|
||||
@@ -751,10 +761,12 @@ impl Fs for FakeFs {
|
||||
self.simulate_random_delay().await;
|
||||
let mut state = self.state.lock().await;
|
||||
let inode = state.next_inode;
|
||||
let mtime = state.next_mtime;
|
||||
state.next_mtime += Duration::from_nanos(1);
|
||||
state.next_inode += 1;
|
||||
let file = Arc::new(Mutex::new(FakeFsEntry::File {
|
||||
inode,
|
||||
mtime: SystemTime::now(),
|
||||
mtime,
|
||||
content: String::new(),
|
||||
}));
|
||||
state
|
||||
@@ -816,6 +828,9 @@ impl Fs for FakeFs {
|
||||
let source = normalize_path(source);
|
||||
let target = normalize_path(target);
|
||||
let mut state = self.state.lock().await;
|
||||
let mtime = state.next_mtime;
|
||||
let inode = util::post_inc(&mut state.next_inode);
|
||||
state.next_mtime += Duration::from_nanos(1);
|
||||
let source_entry = state.read_path(&source).await?;
|
||||
let content = source_entry.lock().await.file_content(&source)?.clone();
|
||||
let entry = state
|
||||
@@ -831,8 +846,8 @@ impl Fs for FakeFs {
|
||||
}
|
||||
btree_map::Entry::Vacant(e) => Ok(Some(
|
||||
e.insert(Arc::new(Mutex::new(FakeFsEntry::File {
|
||||
inode: 0,
|
||||
mtime: SystemTime::now(),
|
||||
inode,
|
||||
mtime,
|
||||
content: String::new(),
|
||||
})))
|
||||
.clone(),
|
||||
|
||||
@@ -3,6 +3,7 @@ name = "fsevent"
|
||||
version = "2.0.2"
|
||||
license = "MIT"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/fsevent.rs"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "fuzzy"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/fuzzy.rs"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "git"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/git.rs"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
name = "go_to_line"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/go_to_line.rs"
|
||||
|
||||
@@ -4,6 +4,7 @@ edition = "2021"
|
||||
name = "gpui"
|
||||
version = "0.1.0"
|
||||
description = "A GPU-accelerated UI framework"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/gpui.rs"
|
||||
@@ -45,8 +46,8 @@ smallvec = { version = "1.6", features = ["union"] }
|
||||
smol = "1.2"
|
||||
time = { version = "0.3", features = ["serde", "serde-well-known"] }
|
||||
tiny-skia = "0.5"
|
||||
tree-sitter = "0.20"
|
||||
usvg = "0.14"
|
||||
uuid = { version = "1.1.2", features = ["v4"] }
|
||||
waker-fn = "1.1.0"
|
||||
|
||||
[build-dependencies]
|
||||
@@ -66,7 +67,7 @@ media = { path = "../media" }
|
||||
anyhow = "1"
|
||||
block = "0.1"
|
||||
cocoa = "0.24"
|
||||
core-foundation = "0.9.3"
|
||||
core-foundation = { version = "0.9.3", features = ["with-uuid"] }
|
||||
core-graphics = "0.22.3"
|
||||
core-text = "19.2"
|
||||
font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "8eaf7a918eafa28b0a37dc759e2e0e7683fa24f1" }
|
||||
|
||||
@@ -6,7 +6,6 @@ use std::{
|
||||
|
||||
fn main() {
|
||||
generate_dispatch_bindings();
|
||||
compile_context_predicate_parser();
|
||||
compile_metal_shaders();
|
||||
generate_shader_bindings();
|
||||
}
|
||||
@@ -30,17 +29,6 @@ fn generate_dispatch_bindings() {
|
||||
.expect("couldn't write dispatch bindings");
|
||||
}
|
||||
|
||||
fn compile_context_predicate_parser() {
|
||||
let dir = PathBuf::from("./grammars/context-predicate/src");
|
||||
let parser_c = dir.join("parser.c");
|
||||
|
||||
println!("cargo:rerun-if-changed={}", &parser_c.to_str().unwrap());
|
||||
cc::Build::new()
|
||||
.include(&dir)
|
||||
.file(parser_c)
|
||||
.compile("tree_sitter_context_predicate");
|
||||
}
|
||||
|
||||
const SHADER_HEADER_PATH: &str = "./src/platform/mac/shaders/shaders.h";
|
||||
|
||||
fn compile_metal_shaders() {
|
||||
@@ -52,7 +40,7 @@ fn compile_metal_shaders() {
|
||||
println!("cargo:rerun-if-changed={}", shader_path);
|
||||
|
||||
let output = Command::new("xcrun")
|
||||
.args(&[
|
||||
.args([
|
||||
"-sdk",
|
||||
"macosx",
|
||||
"metal",
|
||||
@@ -76,7 +64,7 @@ fn compile_metal_shaders() {
|
||||
}
|
||||
|
||||
let output = Command::new("xcrun")
|
||||
.args(&["-sdk", "macosx", "metallib"])
|
||||
.args(["-sdk", "macosx", "metallib"])
|
||||
.arg(air_output_path)
|
||||
.arg("-o")
|
||||
.arg(metallib_output_path)
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
/node_modules
|
||||
/build
|
||||
@@ -1,20 +0,0 @@
|
||||
[package]
|
||||
name = "tree-sitter-context-predicate"
|
||||
description = "context-predicate grammar for the tree-sitter parsing library"
|
||||
version = "0.0.1"
|
||||
keywords = ["incremental", "parsing", "context-predicate"]
|
||||
categories = ["parsing", "text-editors"]
|
||||
repository = "https://github.com/tree-sitter/tree-sitter-javascript"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
build = "bindings/rust/build.rs"
|
||||
include = ["bindings/rust/*", "grammar.js", "queries/*", "src/*"]
|
||||
|
||||
[lib]
|
||||
path = "bindings/rust/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
tree-sitter = "0.20"
|
||||
|
||||
[build-dependencies]
|
||||
cc = "1.0"
|
||||
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"targets": [
|
||||
{
|
||||
"target_name": "tree_sitter_context_predicate_binding",
|
||||
"include_dirs": [
|
||||
"<!(node -e \"require('nan')\")",
|
||||
"src"
|
||||
],
|
||||
"sources": [
|
||||
"src/parser.c",
|
||||
"bindings/node/binding.cc"
|
||||
],
|
||||
"cflags_c": [
|
||||
"-std=c99",
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
#include "nan.h"
|
||||
#include "tree_sitter/parser.h"
|
||||
#include <node.h>
|
||||
|
||||
using namespace v8;
|
||||
|
||||
extern "C" TSLanguage *tree_sitter_context_predicate();
|
||||
|
||||
namespace {
|
||||
|
||||
NAN_METHOD(New) {}
|
||||
|
||||
void Init(Local<Object> exports, Local<Object> module) {
|
||||
Local<FunctionTemplate> tpl = Nan::New<FunctionTemplate>(New);
|
||||
tpl->SetClassName(Nan::New("Language").ToLocalChecked());
|
||||
tpl->InstanceTemplate()->SetInternalFieldCount(1);
|
||||
|
||||
Local<Function> constructor = Nan::GetFunction(tpl).ToLocalChecked();
|
||||
Local<Object> instance =
|
||||
constructor->NewInstance(Nan::GetCurrentContext()).ToLocalChecked();
|
||||
Nan::SetInternalFieldPointer(instance, 0, tree_sitter_context_predicate());
|
||||
|
||||
Nan::Set(instance, Nan::New("name").ToLocalChecked(),
|
||||
Nan::New("context_predicate").ToLocalChecked());
|
||||
Nan::Set(module, Nan::New("exports").ToLocalChecked(), instance);
|
||||
}
|
||||
|
||||
NODE_MODULE(tree_sitter_context_predicate_binding, Init)
|
||||
|
||||
} // namespace
|
||||
@@ -1,19 +0,0 @@
|
||||
try {
|
||||
module.exports = require("../../build/Release/tree_sitter_context_predicate_binding");
|
||||
} catch (error1) {
|
||||
if (error1.code !== 'MODULE_NOT_FOUND') {
|
||||
throw error1;
|
||||
}
|
||||
try {
|
||||
module.exports = require("../../build/Debug/tree_sitter_context_predicate_binding");
|
||||
} catch (error2) {
|
||||
if (error2.code !== 'MODULE_NOT_FOUND') {
|
||||
throw error2;
|
||||
}
|
||||
throw error1
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
module.exports.nodeTypeInfo = require("../../src/node-types.json");
|
||||
} catch (_) {}
|
||||
@@ -1,40 +0,0 @@
|
||||
fn main() {
|
||||
let src_dir = std::path::Path::new("src");
|
||||
|
||||
let mut c_config = cc::Build::new();
|
||||
c_config.include(&src_dir);
|
||||
c_config
|
||||
.flag_if_supported("-Wno-unused-parameter")
|
||||
.flag_if_supported("-Wno-unused-but-set-variable")
|
||||
.flag_if_supported("-Wno-trigraphs");
|
||||
let parser_path = src_dir.join("parser.c");
|
||||
c_config.file(&parser_path);
|
||||
|
||||
// If your language uses an external scanner written in C,
|
||||
// then include this block of code:
|
||||
|
||||
/*
|
||||
let scanner_path = src_dir.join("scanner.c");
|
||||
c_config.file(&scanner_path);
|
||||
println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap());
|
||||
*/
|
||||
|
||||
c_config.compile("parser");
|
||||
println!("cargo:rerun-if-changed={}", parser_path.to_str().unwrap());
|
||||
|
||||
// If your language uses an external scanner written in C++,
|
||||
// then include this block of code:
|
||||
|
||||
/*
|
||||
let mut cpp_config = cc::Build::new();
|
||||
cpp_config.cpp(true);
|
||||
cpp_config.include(&src_dir);
|
||||
cpp_config
|
||||
.flag_if_supported("-Wno-unused-parameter")
|
||||
.flag_if_supported("-Wno-unused-but-set-variable");
|
||||
let scanner_path = src_dir.join("scanner.cc");
|
||||
cpp_config.file(&scanner_path);
|
||||
cpp_config.compile("scanner");
|
||||
println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap());
|
||||
*/
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
//! This crate provides context_predicate language support for the [tree-sitter][] parsing library.
|
||||
//!
|
||||
//! Typically, you will use the [language][language func] function to add this language to a
|
||||
//! tree-sitter [Parser][], and then use the parser to parse some code:
|
||||
//!
|
||||
//! ```
|
||||
//! let code = "";
|
||||
//! let mut parser = tree_sitter::Parser::new();
|
||||
//! parser.set_language(tree_sitter_context_predicate::language()).expect("Error loading context_predicate grammar");
|
||||
//! let tree = parser.parse(code, None).unwrap();
|
||||
//! ```
|
||||
//!
|
||||
//! [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html
|
||||
//! [language func]: fn.language.html
|
||||
//! [Parser]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Parser.html
|
||||
//! [tree-sitter]: https://tree-sitter.github.io/
|
||||
|
||||
use tree_sitter::Language;
|
||||
|
||||
extern "C" {
|
||||
fn tree_sitter_context_predicate() -> Language;
|
||||
}
|
||||
|
||||
/// Get the tree-sitter [Language][] for this grammar.
|
||||
///
|
||||
/// [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html
|
||||
pub fn language() -> Language {
|
||||
unsafe { tree_sitter_context_predicate() }
|
||||
}
|
||||
|
||||
/// The content of the [`node-types.json`][] file for this grammar.
|
||||
///
|
||||
/// [`node-types.json`]: https://tree-sitter.github.io/tree-sitter/using-parsers#static-node-types
|
||||
pub const NODE_TYPES: &'static str = include_str!("../../src/node-types.json");
|
||||
|
||||
// Uncomment these to include any queries that this grammar contains
|
||||
|
||||
// pub const HIGHLIGHTS_QUERY: &'static str = include_str!("../../queries/highlights.scm");
|
||||
// pub const INJECTIONS_QUERY: &'static str = include_str!("../../queries/injections.scm");
|
||||
// pub const LOCALS_QUERY: &'static str = include_str!("../../queries/locals.scm");
|
||||
// pub const TAGS_QUERY: &'static str = include_str!("../../queries/tags.scm");
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[test]
|
||||
fn test_can_load_grammar() {
|
||||
let mut parser = tree_sitter::Parser::new();
|
||||
parser
|
||||
.set_language(super::language())
|
||||
.expect("Error loading context_predicate language");
|
||||
}
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
==================
|
||||
Identifiers
|
||||
==================
|
||||
|
||||
abc12
|
||||
|
||||
---
|
||||
|
||||
(source (identifier))
|
||||
|
||||
==================
|
||||
Negation
|
||||
==================
|
||||
|
||||
!abc
|
||||
|
||||
---
|
||||
|
||||
(source (not (identifier)))
|
||||
|
||||
==================
|
||||
And/Or
|
||||
==================
|
||||
|
||||
a || b && c && d
|
||||
|
||||
---
|
||||
|
||||
(source
|
||||
(or
|
||||
(identifier)
|
||||
(and
|
||||
(and (identifier) (identifier))
|
||||
(identifier))))
|
||||
|
||||
==================
|
||||
Expressions
|
||||
==================
|
||||
|
||||
a && (b == c || d != e)
|
||||
|
||||
---
|
||||
|
||||
(source
|
||||
(and
|
||||
(identifier)
|
||||
(parenthesized (or
|
||||
(equal (identifier) (identifier))
|
||||
(not_equal (identifier) (identifier))))))
|
||||
@@ -1,31 +0,0 @@
|
||||
module.exports = grammar({
|
||||
name: 'context_predicate',
|
||||
|
||||
rules: {
|
||||
source: $ => $._expression,
|
||||
|
||||
_expression: $ => choice(
|
||||
$.identifier,
|
||||
$.not,
|
||||
$.and,
|
||||
$.or,
|
||||
$.equal,
|
||||
$.not_equal,
|
||||
$.parenthesized,
|
||||
),
|
||||
|
||||
identifier: $ => /[A-Za-z0-9_-]+/,
|
||||
|
||||
not: $ => prec(3, seq("!", field("expression", $._expression))),
|
||||
|
||||
and: $ => prec.left(2, seq(field("left", $._expression), "&&", field("right", $._expression))),
|
||||
|
||||
or: $ => prec.left(1, seq(field("left", $._expression), "||", field("right", $._expression))),
|
||||
|
||||
equal: $ => seq(field("left", $.identifier), "==", field("right", $.identifier)),
|
||||
|
||||
not_equal: $ => seq(field("left", $.identifier), "!=", field("right", $.identifier)),
|
||||
|
||||
parenthesized: $ => seq("(", field("expression", $._expression), ")"),
|
||||
}
|
||||
});
|
||||
@@ -1,44 +0,0 @@
|
||||
{
|
||||
"name": "tree-sitter-context-predicate",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "tree-sitter-context-predicate",
|
||||
"dependencies": {
|
||||
"nan": "^2.14.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"tree-sitter-cli": "^0.19.5"
|
||||
}
|
||||
},
|
||||
"node_modules/nan": {
|
||||
"version": "2.14.2",
|
||||
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.2.tgz",
|
||||
"integrity": "sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ=="
|
||||
},
|
||||
"node_modules/tree-sitter-cli": {
|
||||
"version": "0.19.5",
|
||||
"resolved": "https://registry.npmjs.org/tree-sitter-cli/-/tree-sitter-cli-0.19.5.tgz",
|
||||
"integrity": "sha512-kRzKrUAwpDN9AjA3b0tPBwT1hd8N2oQvvvHup2OEsX6mdsSMLmAvR+NSqK9fe05JrRbVvG8mbteNUQsxlMQohQ==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"bin": {
|
||||
"tree-sitter": "cli.js"
|
||||
}
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"nan": {
|
||||
"version": "2.14.2",
|
||||
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.2.tgz",
|
||||
"integrity": "sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ=="
|
||||
},
|
||||
"tree-sitter-cli": {
|
||||
"version": "0.19.5",
|
||||
"resolved": "https://registry.npmjs.org/tree-sitter-cli/-/tree-sitter-cli-0.19.5.tgz",
|
||||
"integrity": "sha512-kRzKrUAwpDN9AjA3b0tPBwT1hd8N2oQvvvHup2OEsX6mdsSMLmAvR+NSqK9fe05JrRbVvG8mbteNUQsxlMQohQ==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"name": "tree-sitter-context-predicate",
|
||||
"main": "bindings/node",
|
||||
"devDependencies": {
|
||||
"tree-sitter-cli": "^0.19.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"nan": "^2.14.0"
|
||||
}
|
||||
}
|
||||
@@ -1,208 +0,0 @@
|
||||
{
|
||||
"name": "context_predicate",
|
||||
"rules": {
|
||||
"source": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
},
|
||||
"_expression": {
|
||||
"type": "CHOICE",
|
||||
"members": [
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "identifier"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "not"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "and"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "or"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "equal"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "not_equal"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "parenthesized"
|
||||
}
|
||||
]
|
||||
},
|
||||
"identifier": {
|
||||
"type": "PATTERN",
|
||||
"value": "[A-Za-z0-9_-]+"
|
||||
},
|
||||
"not": {
|
||||
"type": "PREC",
|
||||
"value": 3,
|
||||
"content": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "!"
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "expression",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"and": {
|
||||
"type": "PREC_LEFT",
|
||||
"value": 2,
|
||||
"content": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "left",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "&&"
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "right",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"or": {
|
||||
"type": "PREC_LEFT",
|
||||
"value": 1,
|
||||
"content": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "left",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "||"
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "right",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"equal": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "left",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "identifier"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "=="
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "right",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "identifier"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"not_equal": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "left",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "identifier"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "!="
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "right",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "identifier"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"parenthesized": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "("
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "expression",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": ")"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"extras": [
|
||||
{
|
||||
"type": "PATTERN",
|
||||
"value": "\\s"
|
||||
}
|
||||
],
|
||||
"conflicts": [],
|
||||
"precedences": [],
|
||||
"externals": [],
|
||||
"inline": [],
|
||||
"supertypes": []
|
||||
}
|
||||
|
||||
@@ -1,353 +0,0 @@
|
||||
[
|
||||
{
|
||||
"type": "and",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"left": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"right": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"left": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"right": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"expression": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"left": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"right": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"left": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"right": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"expression": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "source",
|
||||
"named": true,
|
||||
"fields": {},
|
||||
"children": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "!",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": "!=",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": "&&",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": "(",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": ")",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": "==",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "||",
|
||||
"named": false
|
||||
}
|
||||
]
|
||||
@@ -1,584 +0,0 @@
|
||||
#include <tree_sitter/parser.h>
|
||||
|
||||
#if defined(__GNUC__) || defined(__clang__)
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wmissing-field-initializers"
|
||||
#endif
|
||||
|
||||
#define LANGUAGE_VERSION 13
|
||||
#define STATE_COUNT 18
|
||||
#define LARGE_STATE_COUNT 6
|
||||
#define SYMBOL_COUNT 17
|
||||
#define ALIAS_COUNT 0
|
||||
#define TOKEN_COUNT 9
|
||||
#define EXTERNAL_TOKEN_COUNT 0
|
||||
#define FIELD_COUNT 3
|
||||
#define MAX_ALIAS_SEQUENCE_LENGTH 3
|
||||
#define PRODUCTION_ID_COUNT 3
|
||||
|
||||
enum {
|
||||
sym_identifier = 1,
|
||||
anon_sym_BANG = 2,
|
||||
anon_sym_AMP_AMP = 3,
|
||||
anon_sym_PIPE_PIPE = 4,
|
||||
anon_sym_EQ_EQ = 5,
|
||||
anon_sym_BANG_EQ = 6,
|
||||
anon_sym_LPAREN = 7,
|
||||
anon_sym_RPAREN = 8,
|
||||
sym_source = 9,
|
||||
sym__expression = 10,
|
||||
sym_not = 11,
|
||||
sym_and = 12,
|
||||
sym_or = 13,
|
||||
sym_equal = 14,
|
||||
sym_not_equal = 15,
|
||||
sym_parenthesized = 16,
|
||||
};
|
||||
|
||||
static const char *const ts_symbol_names[] = {
|
||||
[ts_builtin_sym_end] = "end",
|
||||
[sym_identifier] = "identifier",
|
||||
[anon_sym_BANG] = "!",
|
||||
[anon_sym_AMP_AMP] = "&&",
|
||||
[anon_sym_PIPE_PIPE] = "||",
|
||||
[anon_sym_EQ_EQ] = "==",
|
||||
[anon_sym_BANG_EQ] = "!=",
|
||||
[anon_sym_LPAREN] = "(",
|
||||
[anon_sym_RPAREN] = ")",
|
||||
[sym_source] = "source",
|
||||
[sym__expression] = "_expression",
|
||||
[sym_not] = "not",
|
||||
[sym_and] = "and",
|
||||
[sym_or] = "or",
|
||||
[sym_equal] = "equal",
|
||||
[sym_not_equal] = "not_equal",
|
||||
[sym_parenthesized] = "parenthesized",
|
||||
};
|
||||
|
||||
static const TSSymbol ts_symbol_map[] = {
|
||||
[ts_builtin_sym_end] = ts_builtin_sym_end,
|
||||
[sym_identifier] = sym_identifier,
|
||||
[anon_sym_BANG] = anon_sym_BANG,
|
||||
[anon_sym_AMP_AMP] = anon_sym_AMP_AMP,
|
||||
[anon_sym_PIPE_PIPE] = anon_sym_PIPE_PIPE,
|
||||
[anon_sym_EQ_EQ] = anon_sym_EQ_EQ,
|
||||
[anon_sym_BANG_EQ] = anon_sym_BANG_EQ,
|
||||
[anon_sym_LPAREN] = anon_sym_LPAREN,
|
||||
[anon_sym_RPAREN] = anon_sym_RPAREN,
|
||||
[sym_source] = sym_source,
|
||||
[sym__expression] = sym__expression,
|
||||
[sym_not] = sym_not,
|
||||
[sym_and] = sym_and,
|
||||
[sym_or] = sym_or,
|
||||
[sym_equal] = sym_equal,
|
||||
[sym_not_equal] = sym_not_equal,
|
||||
[sym_parenthesized] = sym_parenthesized,
|
||||
};
|
||||
|
||||
static const TSSymbolMetadata ts_symbol_metadata[] = {
|
||||
[ts_builtin_sym_end] =
|
||||
{
|
||||
.visible = false,
|
||||
.named = true,
|
||||
},
|
||||
[sym_identifier] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[anon_sym_BANG] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_AMP_AMP] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_PIPE_PIPE] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_EQ_EQ] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_BANG_EQ] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_LPAREN] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_RPAREN] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[sym_source] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym__expression] =
|
||||
{
|
||||
.visible = false,
|
||||
.named = true,
|
||||
},
|
||||
[sym_not] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_and] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_or] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_equal] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_not_equal] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_parenthesized] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
};
|
||||
|
||||
enum {
|
||||
field_expression = 1,
|
||||
field_left = 2,
|
||||
field_right = 3,
|
||||
};
|
||||
|
||||
static const char *const ts_field_names[] = {
|
||||
[0] = NULL,
|
||||
[field_expression] = "expression",
|
||||
[field_left] = "left",
|
||||
[field_right] = "right",
|
||||
};
|
||||
|
||||
static const TSFieldMapSlice ts_field_map_slices[PRODUCTION_ID_COUNT] = {
|
||||
[1] = {.index = 0, .length = 1},
|
||||
[2] = {.index = 1, .length = 2},
|
||||
};
|
||||
|
||||
static const TSFieldMapEntry ts_field_map_entries[] = {
|
||||
[0] = {field_expression, 1},
|
||||
[1] = {field_left, 0},
|
||||
{field_right, 2},
|
||||
};
|
||||
|
||||
static const TSSymbol ts_alias_sequences[PRODUCTION_ID_COUNT]
|
||||
[MAX_ALIAS_SEQUENCE_LENGTH] = {
|
||||
[0] = {0},
|
||||
};
|
||||
|
||||
static const uint16_t ts_non_terminal_alias_map[] = {
|
||||
0,
|
||||
};
|
||||
|
||||
static bool ts_lex(TSLexer *lexer, TSStateId state) {
|
||||
START_LEXER();
|
||||
eof = lexer->eof(lexer);
|
||||
switch (state) {
|
||||
case 0:
|
||||
if (eof)
|
||||
ADVANCE(7);
|
||||
if (lookahead == '!')
|
||||
ADVANCE(10);
|
||||
if (lookahead == '&')
|
||||
ADVANCE(2);
|
||||
if (lookahead == '(')
|
||||
ADVANCE(15);
|
||||
if (lookahead == ')')
|
||||
ADVANCE(16);
|
||||
if (lookahead == '=')
|
||||
ADVANCE(4);
|
||||
if (lookahead == '|')
|
||||
ADVANCE(5);
|
||||
if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' ||
|
||||
lookahead == ' ')
|
||||
SKIP(0)
|
||||
if (lookahead == '-' || ('0' <= lookahead && lookahead <= '9') ||
|
||||
('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' ||
|
||||
('a' <= lookahead && lookahead <= 'z'))
|
||||
ADVANCE(8);
|
||||
END_STATE();
|
||||
case 1:
|
||||
if (lookahead == '!')
|
||||
ADVANCE(9);
|
||||
if (lookahead == '(')
|
||||
ADVANCE(15);
|
||||
if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' ||
|
||||
lookahead == ' ')
|
||||
SKIP(1)
|
||||
if (lookahead == '-' || ('0' <= lookahead && lookahead <= '9') ||
|
||||
('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' ||
|
||||
('a' <= lookahead && lookahead <= 'z'))
|
||||
ADVANCE(8);
|
||||
END_STATE();
|
||||
case 2:
|
||||
if (lookahead == '&')
|
||||
ADVANCE(11);
|
||||
END_STATE();
|
||||
case 3:
|
||||
if (lookahead == '=')
|
||||
ADVANCE(14);
|
||||
END_STATE();
|
||||
case 4:
|
||||
if (lookahead == '=')
|
||||
ADVANCE(13);
|
||||
END_STATE();
|
||||
case 5:
|
||||
if (lookahead == '|')
|
||||
ADVANCE(12);
|
||||
END_STATE();
|
||||
case 6:
|
||||
if (eof)
|
||||
ADVANCE(7);
|
||||
if (lookahead == '!')
|
||||
ADVANCE(3);
|
||||
if (lookahead == '&')
|
||||
ADVANCE(2);
|
||||
if (lookahead == ')')
|
||||
ADVANCE(16);
|
||||
if (lookahead == '=')
|
||||
ADVANCE(4);
|
||||
if (lookahead == '|')
|
||||
ADVANCE(5);
|
||||
if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' ||
|
||||
lookahead == ' ')
|
||||
SKIP(6)
|
||||
END_STATE();
|
||||
case 7:
|
||||
ACCEPT_TOKEN(ts_builtin_sym_end);
|
||||
END_STATE();
|
||||
case 8:
|
||||
ACCEPT_TOKEN(sym_identifier);
|
||||
if (lookahead == '-' || ('0' <= lookahead && lookahead <= '9') ||
|
||||
('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' ||
|
||||
('a' <= lookahead && lookahead <= 'z'))
|
||||
ADVANCE(8);
|
||||
END_STATE();
|
||||
case 9:
|
||||
ACCEPT_TOKEN(anon_sym_BANG);
|
||||
END_STATE();
|
||||
case 10:
|
||||
ACCEPT_TOKEN(anon_sym_BANG);
|
||||
if (lookahead == '=')
|
||||
ADVANCE(14);
|
||||
END_STATE();
|
||||
case 11:
|
||||
ACCEPT_TOKEN(anon_sym_AMP_AMP);
|
||||
END_STATE();
|
||||
case 12:
|
||||
ACCEPT_TOKEN(anon_sym_PIPE_PIPE);
|
||||
END_STATE();
|
||||
case 13:
|
||||
ACCEPT_TOKEN(anon_sym_EQ_EQ);
|
||||
END_STATE();
|
||||
case 14:
|
||||
ACCEPT_TOKEN(anon_sym_BANG_EQ);
|
||||
END_STATE();
|
||||
case 15:
|
||||
ACCEPT_TOKEN(anon_sym_LPAREN);
|
||||
END_STATE();
|
||||
case 16:
|
||||
ACCEPT_TOKEN(anon_sym_RPAREN);
|
||||
END_STATE();
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
static const TSLexMode ts_lex_modes[STATE_COUNT] = {
|
||||
[0] = {.lex_state = 0}, [1] = {.lex_state = 1}, [2] = {.lex_state = 1},
|
||||
[3] = {.lex_state = 1}, [4] = {.lex_state = 1}, [5] = {.lex_state = 1},
|
||||
[6] = {.lex_state = 6}, [7] = {.lex_state = 0}, [8] = {.lex_state = 0},
|
||||
[9] = {.lex_state = 0}, [10] = {.lex_state = 0}, [11] = {.lex_state = 0},
|
||||
[12] = {.lex_state = 0}, [13] = {.lex_state = 0}, [14] = {.lex_state = 0},
|
||||
[15] = {.lex_state = 0}, [16] = {.lex_state = 0}, [17] = {.lex_state = 0},
|
||||
};
|
||||
|
||||
static const uint16_t ts_parse_table[LARGE_STATE_COUNT][SYMBOL_COUNT] = {
|
||||
[0] =
|
||||
{
|
||||
[ts_builtin_sym_end] = ACTIONS(1),
|
||||
[sym_identifier] = ACTIONS(1),
|
||||
[anon_sym_BANG] = ACTIONS(1),
|
||||
[anon_sym_AMP_AMP] = ACTIONS(1),
|
||||
[anon_sym_PIPE_PIPE] = ACTIONS(1),
|
||||
[anon_sym_EQ_EQ] = ACTIONS(1),
|
||||
[anon_sym_BANG_EQ] = ACTIONS(1),
|
||||
[anon_sym_LPAREN] = ACTIONS(1),
|
||||
[anon_sym_RPAREN] = ACTIONS(1),
|
||||
},
|
||||
[1] =
|
||||
{
|
||||
[sym_source] = STATE(15),
|
||||
[sym__expression] = STATE(13),
|
||||
[sym_not] = STATE(13),
|
||||
[sym_and] = STATE(13),
|
||||
[sym_or] = STATE(13),
|
||||
[sym_equal] = STATE(13),
|
||||
[sym_not_equal] = STATE(13),
|
||||
[sym_parenthesized] = STATE(13),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[2] =
|
||||
{
|
||||
[sym__expression] = STATE(7),
|
||||
[sym_not] = STATE(7),
|
||||
[sym_and] = STATE(7),
|
||||
[sym_or] = STATE(7),
|
||||
[sym_equal] = STATE(7),
|
||||
[sym_not_equal] = STATE(7),
|
||||
[sym_parenthesized] = STATE(7),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[3] =
|
||||
{
|
||||
[sym__expression] = STATE(14),
|
||||
[sym_not] = STATE(14),
|
||||
[sym_and] = STATE(14),
|
||||
[sym_or] = STATE(14),
|
||||
[sym_equal] = STATE(14),
|
||||
[sym_not_equal] = STATE(14),
|
||||
[sym_parenthesized] = STATE(14),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[4] =
|
||||
{
|
||||
[sym__expression] = STATE(11),
|
||||
[sym_not] = STATE(11),
|
||||
[sym_and] = STATE(11),
|
||||
[sym_or] = STATE(11),
|
||||
[sym_equal] = STATE(11),
|
||||
[sym_not_equal] = STATE(11),
|
||||
[sym_parenthesized] = STATE(11),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[5] =
|
||||
{
|
||||
[sym__expression] = STATE(12),
|
||||
[sym_not] = STATE(12),
|
||||
[sym_and] = STATE(12),
|
||||
[sym_or] = STATE(12),
|
||||
[sym_equal] = STATE(12),
|
||||
[sym_not_equal] = STATE(12),
|
||||
[sym_parenthesized] = STATE(12),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
};
|
||||
|
||||
static const uint16_t ts_small_parse_table[] = {
|
||||
[0] = 3,
|
||||
ACTIONS(11),
|
||||
1,
|
||||
anon_sym_EQ_EQ,
|
||||
ACTIONS(13),
|
||||
1,
|
||||
anon_sym_BANG_EQ,
|
||||
ACTIONS(9),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[13] = 1,
|
||||
ACTIONS(15),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[20] = 1,
|
||||
ACTIONS(17),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[27] = 1,
|
||||
ACTIONS(19),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[34] = 1,
|
||||
ACTIONS(21),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[41] = 1,
|
||||
ACTIONS(23),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[48] = 2,
|
||||
ACTIONS(27),
|
||||
1,
|
||||
anon_sym_AMP_AMP,
|
||||
ACTIONS(25),
|
||||
3,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[57] = 3,
|
||||
ACTIONS(27),
|
||||
1,
|
||||
anon_sym_AMP_AMP,
|
||||
ACTIONS(29),
|
||||
1,
|
||||
ts_builtin_sym_end,
|
||||
ACTIONS(31),
|
||||
1,
|
||||
anon_sym_PIPE_PIPE,
|
||||
[67] = 3,
|
||||
ACTIONS(27),
|
||||
1,
|
||||
anon_sym_AMP_AMP,
|
||||
ACTIONS(31),
|
||||
1,
|
||||
anon_sym_PIPE_PIPE,
|
||||
ACTIONS(33),
|
||||
1,
|
||||
anon_sym_RPAREN,
|
||||
[77] = 1,
|
||||
ACTIONS(35),
|
||||
1,
|
||||
ts_builtin_sym_end,
|
||||
[81] = 1,
|
||||
ACTIONS(37),
|
||||
1,
|
||||
sym_identifier,
|
||||
[85] = 1,
|
||||
ACTIONS(39),
|
||||
1,
|
||||
sym_identifier,
|
||||
};
|
||||
|
||||
static const uint32_t ts_small_parse_table_map[] = {
|
||||
[SMALL_STATE(6)] = 0, [SMALL_STATE(7)] = 13, [SMALL_STATE(8)] = 20,
|
||||
[SMALL_STATE(9)] = 27, [SMALL_STATE(10)] = 34, [SMALL_STATE(11)] = 41,
|
||||
[SMALL_STATE(12)] = 48, [SMALL_STATE(13)] = 57, [SMALL_STATE(14)] = 67,
|
||||
[SMALL_STATE(15)] = 77, [SMALL_STATE(16)] = 81, [SMALL_STATE(17)] = 85,
|
||||
};
|
||||
|
||||
static const TSParseActionEntry ts_parse_actions[] = {
|
||||
[0] = {.entry = {.count = 0, .reusable = false}},
|
||||
[1] = {.entry = {.count = 1, .reusable = false}},
|
||||
RECOVER(),
|
||||
[3] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(6),
|
||||
[5] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(2),
|
||||
[7] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(3),
|
||||
[9] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym__expression, 1),
|
||||
[11] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(16),
|
||||
[13] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(17),
|
||||
[15] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_not, 2, .production_id = 1),
|
||||
[17] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_equal, 3, .production_id = 2),
|
||||
[19] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_not_equal, 3, .production_id = 2),
|
||||
[21] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_parenthesized, 3, .production_id = 1),
|
||||
[23] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_and, 3, .production_id = 2),
|
||||
[25] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_or, 3, .production_id = 2),
|
||||
[27] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(4),
|
||||
[29] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_source, 1),
|
||||
[31] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(5),
|
||||
[33] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(10),
|
||||
[35] = {.entry = {.count = 1, .reusable = true}},
|
||||
ACCEPT_INPUT(),
|
||||
[37] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(8),
|
||||
[39] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(9),
|
||||
};
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
#ifdef _WIN32
|
||||
#define extern __declspec(dllexport)
|
||||
#endif
|
||||
|
||||
extern const TSLanguage *tree_sitter_context_predicate(void) {
|
||||
static const TSLanguage language = {
|
||||
.version = LANGUAGE_VERSION,
|
||||
.symbol_count = SYMBOL_COUNT,
|
||||
.alias_count = ALIAS_COUNT,
|
||||
.token_count = TOKEN_COUNT,
|
||||
.external_token_count = EXTERNAL_TOKEN_COUNT,
|
||||
.state_count = STATE_COUNT,
|
||||
.large_state_count = LARGE_STATE_COUNT,
|
||||
.production_id_count = PRODUCTION_ID_COUNT,
|
||||
.field_count = FIELD_COUNT,
|
||||
.max_alias_sequence_length = MAX_ALIAS_SEQUENCE_LENGTH,
|
||||
.parse_table = &ts_parse_table[0][0],
|
||||
.small_parse_table = ts_small_parse_table,
|
||||
.small_parse_table_map = ts_small_parse_table_map,
|
||||
.parse_actions = ts_parse_actions,
|
||||
.symbol_names = ts_symbol_names,
|
||||
.field_names = ts_field_names,
|
||||
.field_map_slices = ts_field_map_slices,
|
||||
.field_map_entries = ts_field_map_entries,
|
||||
.symbol_metadata = ts_symbol_metadata,
|
||||
.public_symbol_map = ts_symbol_map,
|
||||
.alias_map = ts_non_terminal_alias_map,
|
||||
.alias_sequences = &ts_alias_sequences[0][0],
|
||||
.lex_modes = ts_lex_modes,
|
||||
.lex_fn = ts_lex,
|
||||
};
|
||||
return &language;
|
||||
}
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
@@ -1,223 +0,0 @@
|
||||
#ifndef TREE_SITTER_PARSER_H_
|
||||
#define TREE_SITTER_PARSER_H_
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#include <stdbool.h>
|
||||
#include <stdint.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#define ts_builtin_sym_error ((TSSymbol)-1)
|
||||
#define ts_builtin_sym_end 0
|
||||
#define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024
|
||||
|
||||
typedef uint16_t TSStateId;
|
||||
|
||||
#ifndef TREE_SITTER_API_H_
|
||||
typedef uint16_t TSSymbol;
|
||||
typedef uint16_t TSFieldId;
|
||||
typedef struct TSLanguage TSLanguage;
|
||||
#endif
|
||||
|
||||
typedef struct {
|
||||
TSFieldId field_id;
|
||||
uint8_t child_index;
|
||||
bool inherited;
|
||||
} TSFieldMapEntry;
|
||||
|
||||
typedef struct {
|
||||
uint16_t index;
|
||||
uint16_t length;
|
||||
} TSFieldMapSlice;
|
||||
|
||||
typedef struct {
|
||||
bool visible;
|
||||
bool named;
|
||||
bool supertype;
|
||||
} TSSymbolMetadata;
|
||||
|
||||
typedef struct TSLexer TSLexer;
|
||||
|
||||
struct TSLexer {
|
||||
int32_t lookahead;
|
||||
TSSymbol result_symbol;
|
||||
void (*advance)(TSLexer *, bool);
|
||||
void (*mark_end)(TSLexer *);
|
||||
uint32_t (*get_column)(TSLexer *);
|
||||
bool (*is_at_included_range_start)(const TSLexer *);
|
||||
bool (*eof)(const TSLexer *);
|
||||
};
|
||||
|
||||
typedef enum {
|
||||
TSParseActionTypeShift,
|
||||
TSParseActionTypeReduce,
|
||||
TSParseActionTypeAccept,
|
||||
TSParseActionTypeRecover,
|
||||
} TSParseActionType;
|
||||
|
||||
typedef union {
|
||||
struct {
|
||||
uint8_t type;
|
||||
TSStateId state;
|
||||
bool extra;
|
||||
bool repetition;
|
||||
} shift;
|
||||
struct {
|
||||
uint8_t type;
|
||||
uint8_t child_count;
|
||||
TSSymbol symbol;
|
||||
int16_t dynamic_precedence;
|
||||
uint16_t production_id;
|
||||
} reduce;
|
||||
uint8_t type;
|
||||
} TSParseAction;
|
||||
|
||||
typedef struct {
|
||||
uint16_t lex_state;
|
||||
uint16_t external_lex_state;
|
||||
} TSLexMode;
|
||||
|
||||
typedef union {
|
||||
TSParseAction action;
|
||||
struct {
|
||||
uint8_t count;
|
||||
bool reusable;
|
||||
} entry;
|
||||
} TSParseActionEntry;
|
||||
|
||||
struct TSLanguage {
|
||||
uint32_t version;
|
||||
uint32_t symbol_count;
|
||||
uint32_t alias_count;
|
||||
uint32_t token_count;
|
||||
uint32_t external_token_count;
|
||||
uint32_t state_count;
|
||||
uint32_t large_state_count;
|
||||
uint32_t production_id_count;
|
||||
uint32_t field_count;
|
||||
uint16_t max_alias_sequence_length;
|
||||
const uint16_t *parse_table;
|
||||
const uint16_t *small_parse_table;
|
||||
const uint32_t *small_parse_table_map;
|
||||
const TSParseActionEntry *parse_actions;
|
||||
const char * const *symbol_names;
|
||||
const char * const *field_names;
|
||||
const TSFieldMapSlice *field_map_slices;
|
||||
const TSFieldMapEntry *field_map_entries;
|
||||
const TSSymbolMetadata *symbol_metadata;
|
||||
const TSSymbol *public_symbol_map;
|
||||
const uint16_t *alias_map;
|
||||
const TSSymbol *alias_sequences;
|
||||
const TSLexMode *lex_modes;
|
||||
bool (*lex_fn)(TSLexer *, TSStateId);
|
||||
bool (*keyword_lex_fn)(TSLexer *, TSStateId);
|
||||
TSSymbol keyword_capture_token;
|
||||
struct {
|
||||
const bool *states;
|
||||
const TSSymbol *symbol_map;
|
||||
void *(*create)(void);
|
||||
void (*destroy)(void *);
|
||||
bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist);
|
||||
unsigned (*serialize)(void *, char *);
|
||||
void (*deserialize)(void *, const char *, unsigned);
|
||||
} external_scanner;
|
||||
};
|
||||
|
||||
/*
|
||||
* Lexer Macros
|
||||
*/
|
||||
|
||||
#define START_LEXER() \
|
||||
bool result = false; \
|
||||
bool skip = false; \
|
||||
bool eof = false; \
|
||||
int32_t lookahead; \
|
||||
goto start; \
|
||||
next_state: \
|
||||
lexer->advance(lexer, skip); \
|
||||
start: \
|
||||
skip = false; \
|
||||
lookahead = lexer->lookahead;
|
||||
|
||||
#define ADVANCE(state_value) \
|
||||
{ \
|
||||
state = state_value; \
|
||||
goto next_state; \
|
||||
}
|
||||
|
||||
#define SKIP(state_value) \
|
||||
{ \
|
||||
skip = true; \
|
||||
state = state_value; \
|
||||
goto next_state; \
|
||||
}
|
||||
|
||||
#define ACCEPT_TOKEN(symbol_value) \
|
||||
result = true; \
|
||||
lexer->result_symbol = symbol_value; \
|
||||
lexer->mark_end(lexer);
|
||||
|
||||
#define END_STATE() return result;
|
||||
|
||||
/*
|
||||
* Parse Table Macros
|
||||
*/
|
||||
|
||||
#define SMALL_STATE(id) id - LARGE_STATE_COUNT
|
||||
|
||||
#define STATE(id) id
|
||||
|
||||
#define ACTIONS(id) id
|
||||
|
||||
#define SHIFT(state_value) \
|
||||
{{ \
|
||||
.shift = { \
|
||||
.type = TSParseActionTypeShift, \
|
||||
.state = state_value \
|
||||
} \
|
||||
}}
|
||||
|
||||
#define SHIFT_REPEAT(state_value) \
|
||||
{{ \
|
||||
.shift = { \
|
||||
.type = TSParseActionTypeShift, \
|
||||
.state = state_value, \
|
||||
.repetition = true \
|
||||
} \
|
||||
}}
|
||||
|
||||
#define SHIFT_EXTRA() \
|
||||
{{ \
|
||||
.shift = { \
|
||||
.type = TSParseActionTypeShift, \
|
||||
.extra = true \
|
||||
} \
|
||||
}}
|
||||
|
||||
#define REDUCE(symbol_val, child_count_val, ...) \
|
||||
{{ \
|
||||
.reduce = { \
|
||||
.type = TSParseActionTypeReduce, \
|
||||
.symbol = symbol_val, \
|
||||
.child_count = child_count_val, \
|
||||
__VA_ARGS__ \
|
||||
}, \
|
||||
}}
|
||||
|
||||
#define RECOVER() \
|
||||
{{ \
|
||||
.type = TSParseActionTypeRecover \
|
||||
}}
|
||||
|
||||
#define ACCEPT_INPUT() \
|
||||
{{ \
|
||||
.type = TSParseActionTypeAccept \
|
||||
}}
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif // TREE_SITTER_PARSER_H_
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,19 +1,44 @@
|
||||
use crate::MutableAppContext;
|
||||
use collections::{BTreeMap, HashMap, HashSet};
|
||||
use parking_lot::Mutex;
|
||||
use std::sync::Arc;
|
||||
use std::{hash::Hash, sync::Weak};
|
||||
|
||||
use parking_lot::Mutex;
|
||||
|
||||
use collections::{btree_map, BTreeMap, HashMap};
|
||||
|
||||
use crate::MutableAppContext;
|
||||
|
||||
pub type Mapping<K, F> = Mutex<HashMap<K, BTreeMap<usize, Option<F>>>>;
|
||||
|
||||
pub struct CallbackCollection<K: Hash + Eq, F> {
|
||||
internal: Arc<Mapping<K, F>>,
|
||||
pub struct CallbackCollection<K: Clone + Hash + Eq, F> {
|
||||
internal: Arc<Mutex<Mapping<K, F>>>,
|
||||
}
|
||||
|
||||
impl<K: Hash + Eq, F> Clone for CallbackCollection<K, F> {
|
||||
pub struct Subscription<K: Clone + Hash + Eq, F> {
|
||||
key: K,
|
||||
id: usize,
|
||||
mapping: Option<Weak<Mutex<Mapping<K, F>>>>,
|
||||
}
|
||||
|
||||
struct Mapping<K, F> {
|
||||
callbacks: HashMap<K, BTreeMap<usize, F>>,
|
||||
dropped_subscriptions: HashMap<K, HashSet<usize>>,
|
||||
}
|
||||
|
||||
impl<K: Hash + Eq, F> Mapping<K, F> {
|
||||
fn clear_dropped_state(&mut self, key: &K, subscription_id: usize) -> bool {
|
||||
if let Some(subscriptions) = self.dropped_subscriptions.get_mut(&key) {
|
||||
subscriptions.remove(&subscription_id)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, F> Default for Mapping<K, F> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
callbacks: Default::default(),
|
||||
dropped_subscriptions: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Clone + Hash + Eq, F> Clone for CallbackCollection<K, F> {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
internal: self.internal.clone(),
|
||||
@@ -21,7 +46,7 @@ impl<K: Hash + Eq, F> Clone for CallbackCollection<K, F> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Hash + Eq + Copy, F> Default for CallbackCollection<K, F> {
|
||||
impl<K: Clone + Hash + Eq + Copy, F> Default for CallbackCollection<K, F> {
|
||||
fn default() -> Self {
|
||||
CallbackCollection {
|
||||
internal: Arc::new(Mutex::new(Default::default())),
|
||||
@@ -29,78 +54,114 @@ impl<K: Hash + Eq + Copy, F> Default for CallbackCollection<K, F> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Hash + Eq + Copy, F> CallbackCollection<K, F> {
|
||||
pub fn downgrade(&self) -> Weak<Mapping<K, F>> {
|
||||
Arc::downgrade(&self.internal)
|
||||
}
|
||||
|
||||
impl<K: Clone + Hash + Eq + Copy, F> CallbackCollection<K, F> {
|
||||
#[cfg(test)]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.internal.lock().is_empty()
|
||||
self.internal.lock().callbacks.is_empty()
|
||||
}
|
||||
|
||||
pub fn add_callback(&mut self, id: K, subscription_id: usize, callback: F) {
|
||||
self.internal
|
||||
.lock()
|
||||
.entry(id)
|
||||
.or_default()
|
||||
.insert(subscription_id, Some(callback));
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, id: K) {
|
||||
self.internal.lock().remove(&id);
|
||||
}
|
||||
|
||||
pub fn add_or_remove_callback(&mut self, id: K, subscription_id: usize, callback: F) {
|
||||
match self
|
||||
.internal
|
||||
.lock()
|
||||
.entry(id)
|
||||
.or_default()
|
||||
.entry(subscription_id)
|
||||
{
|
||||
btree_map::Entry::Vacant(entry) => {
|
||||
entry.insert(Some(callback));
|
||||
}
|
||||
|
||||
btree_map::Entry::Occupied(entry) => {
|
||||
// TODO: This seems like it should never be called because no code
|
||||
// should ever attempt to remove an existing callback
|
||||
debug_assert!(entry.get().is_none());
|
||||
entry.remove();
|
||||
}
|
||||
pub fn subscribe(&mut self, key: K, subscription_id: usize) -> Subscription<K, F> {
|
||||
Subscription {
|
||||
key,
|
||||
id: subscription_id,
|
||||
mapping: Some(Arc::downgrade(&self.internal)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn emit_and_cleanup<C: FnMut(&mut F, &mut MutableAppContext) -> bool>(
|
||||
pub fn add_callback(&mut self, key: K, subscription_id: usize, callback: F) {
|
||||
let mut this = self.internal.lock();
|
||||
|
||||
// If this callback's subscription was dropped before the callback was
|
||||
// added, then just drop the callback.
|
||||
if this.clear_dropped_state(&key, subscription_id) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.callbacks
|
||||
.entry(key)
|
||||
.or_default()
|
||||
.insert(subscription_id, callback);
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, key: K) {
|
||||
// Drop these callbacks after releasing the lock, in case one of them
|
||||
// owns a subscription to this callback collection.
|
||||
let mut this = self.internal.lock();
|
||||
let callbacks = this.callbacks.remove(&key);
|
||||
this.dropped_subscriptions.remove(&key);
|
||||
drop(this);
|
||||
drop(callbacks);
|
||||
}
|
||||
|
||||
pub fn emit<C: FnMut(&mut F, &mut MutableAppContext) -> bool>(
|
||||
&mut self,
|
||||
id: K,
|
||||
key: K,
|
||||
cx: &mut MutableAppContext,
|
||||
mut call_callback: C,
|
||||
) {
|
||||
let callbacks = self.internal.lock().remove(&id);
|
||||
let callbacks = self.internal.lock().callbacks.remove(&key);
|
||||
if let Some(callbacks) = callbacks {
|
||||
for (subscription_id, callback) in callbacks {
|
||||
if let Some(mut callback) = callback {
|
||||
let alive = call_callback(&mut callback, cx);
|
||||
if alive {
|
||||
match self
|
||||
.internal
|
||||
.lock()
|
||||
.entry(id)
|
||||
.or_default()
|
||||
.entry(subscription_id)
|
||||
{
|
||||
btree_map::Entry::Vacant(entry) => {
|
||||
entry.insert(Some(callback));
|
||||
}
|
||||
btree_map::Entry::Occupied(entry) => {
|
||||
entry.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
for (subscription_id, mut callback) in callbacks {
|
||||
// If this callback's subscription was dropped while invoking an
|
||||
// earlier callback, then just drop the callback.
|
||||
let mut this = self.internal.lock();
|
||||
if this.clear_dropped_state(&key, subscription_id) {
|
||||
continue;
|
||||
}
|
||||
|
||||
drop(this);
|
||||
let alive = call_callback(&mut callback, cx);
|
||||
|
||||
// If this callback's subscription was dropped while invoking the callback
|
||||
// itself, or if the callback returns false, then just drop the callback.
|
||||
let mut this = self.internal.lock();
|
||||
if this.clear_dropped_state(&key, subscription_id) || !alive {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.callbacks
|
||||
.entry(key)
|
||||
.or_default()
|
||||
.insert(subscription_id, callback);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Clone + Hash + Eq, F> Subscription<K, F> {
|
||||
pub fn id(&self) -> usize {
|
||||
self.id
|
||||
}
|
||||
|
||||
pub fn detach(&mut self) {
|
||||
self.mapping.take();
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Clone + Hash + Eq, F> Drop for Subscription<K, F> {
|
||||
fn drop(&mut self) {
|
||||
if let Some(mapping) = self.mapping.as_ref().and_then(|mapping| mapping.upgrade()) {
|
||||
let mut mapping = mapping.lock();
|
||||
|
||||
// If the callback is present in the mapping, then just remove it.
|
||||
if let Some(callbacks) = mapping.callbacks.get_mut(&self.key) {
|
||||
let callback = callbacks.remove(&self.id);
|
||||
if callback.is_some() {
|
||||
drop(mapping);
|
||||
drop(callback);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// If this subscription's callback is not present, then either it has been
|
||||
// temporarily removed during emit, or it has not yet been added. Record
|
||||
// that this subscription has been dropped so that the callback can be
|
||||
// removed later.
|
||||
mapping
|
||||
.dropped_subscriptions
|
||||
.entry(self.key.clone())
|
||||
.or_default()
|
||||
.insert(self.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,11 +17,11 @@ use parking_lot::{Mutex, RwLock};
|
||||
use smol::stream::StreamExt;
|
||||
|
||||
use crate::{
|
||||
executor, geometry::vector::Vector2F, keymap::Keystroke, platform, Action, AnyViewHandle,
|
||||
AppContext, Appearance, Entity, Event, FontCache, InputHandler, KeyDownEvent, LeakDetector,
|
||||
ModelContext, ModelHandle, MutableAppContext, Platform, ReadModelWith, ReadViewWith,
|
||||
RenderContext, Task, UpdateModel, UpdateView, View, ViewContext, ViewHandle, WeakHandle,
|
||||
WindowInputHandler,
|
||||
executor, geometry::vector::Vector2F, keymap_matcher::Keystroke, platform, Action,
|
||||
AnyViewHandle, AppContext, Appearance, Entity, Event, FontCache, InputHandler, KeyDownEvent,
|
||||
LeakDetector, ModelContext, ModelHandle, MutableAppContext, Platform, ReadModelWith,
|
||||
ReadViewWith, RenderContext, Task, UpdateModel, UpdateView, View, ViewContext, ViewHandle,
|
||||
WeakHandle, WindowInputHandler,
|
||||
};
|
||||
use collections::BTreeMap;
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user