Compare commits
353 Commits
dynamic-ru
...
click-scro
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b420aea6bb | ||
|
|
88e33a1dbe | ||
|
|
139bb3275a | ||
|
|
7f0e373358 | ||
|
|
6286d86262 | ||
|
|
fc9f84446a | ||
|
|
109482761b | ||
|
|
572ba3f93b | ||
|
|
427d66990c | ||
|
|
ac4bbb6135 | ||
|
|
754df9356d | ||
|
|
3c28282108 | ||
|
|
80b80dfa78 | ||
|
|
47afc70979 | ||
|
|
646f69583a | ||
|
|
e792c1a5c5 | ||
|
|
9a2dceeea1 | ||
|
|
fb83cf2042 | ||
|
|
a105b5f215 | ||
|
|
aeb2a98058 | ||
|
|
34f09bae4f | ||
|
|
c09fe1ce8a | ||
|
|
64219ba49f | ||
|
|
05dfe96f0c | ||
|
|
89c67fb1ab | ||
|
|
41d8ba12ec | ||
|
|
e7289c385d | ||
|
|
ece0fb532d | ||
|
|
8c87b349dc | ||
|
|
39a0841ea8 | ||
|
|
409aa513d4 | ||
|
|
d5796dc5eb | ||
|
|
9f396948bb | ||
|
|
e103607134 | ||
|
|
0a341261d0 | ||
|
|
d362588055 | ||
|
|
e5bd9f184b | ||
|
|
98cf494057 | ||
|
|
3be1402a3d | ||
|
|
44adb0a316 | ||
|
|
14a0d8039b | ||
|
|
c0b1f74794 | ||
|
|
02dcdd0228 | ||
|
|
25c471f9e4 | ||
|
|
fe70a2646f | ||
|
|
b2981f4baa | ||
|
|
f2aa183512 | ||
|
|
dfcc143ead | ||
|
|
2b67bb27cf | ||
|
|
91a0923fc4 | ||
|
|
b4ddc83e85 | ||
|
|
3bd9d14420 | ||
|
|
95b311cb90 | ||
|
|
8eea281288 | ||
|
|
373a4e7614 | ||
|
|
f9f9f0670f | ||
|
|
20d5f5e8da | ||
|
|
f066dd268f | ||
|
|
0be20d0817 | ||
|
|
a04932c4eb | ||
|
|
ceadb39c38 | ||
|
|
2244419dfd | ||
|
|
a8fa1f7363 | ||
|
|
eb5e18c66d | ||
|
|
830e107921 | ||
|
|
45c4d35da8 | ||
|
|
298314d526 | ||
|
|
2f6c78b0c0 | ||
|
|
4700d33728 | ||
|
|
9afd78b35e | ||
|
|
9ff3cff6f8 | ||
|
|
d66f8f99bd | ||
|
|
269848775c | ||
|
|
39bd12a557 | ||
|
|
e1f8a1e8b2 | ||
|
|
41dc5fc412 | ||
|
|
f4a86e6fea | ||
|
|
597465b0f5 | ||
|
|
ccc939124f | ||
|
|
a03fecafbb | ||
|
|
ca696fd5f6 | ||
|
|
456efb53ad | ||
|
|
d4ec78f328 | ||
|
|
efe5203a09 | ||
|
|
146971fb02 | ||
|
|
347178039c | ||
|
|
6a7a3b257a | ||
|
|
8a6264d933 | ||
|
|
5abcc1c3c5 | ||
|
|
977af37cfe | ||
|
|
1756c1fc1e | ||
|
|
be953b78ef | ||
|
|
51ebe0eb01 | ||
|
|
a550b9cecf | ||
|
|
bf295eac90 | ||
|
|
fa5dfe19f8 | ||
|
|
7b73e2824b | ||
|
|
1081ba7a62 | ||
|
|
ed8aa6d200 | ||
|
|
af564242e1 | ||
|
|
aa7be4b5d8 | ||
|
|
866d791760 | ||
|
|
f67abd2943 | ||
|
|
d247086b21 | ||
|
|
467a179837 | ||
|
|
b50f86735f | ||
|
|
e85d484952 | ||
|
|
2d83580df4 | ||
|
|
a90a667fd0 | ||
|
|
35c7b5d7dd | ||
|
|
ffebe2e4a6 | ||
|
|
e85f190128 | ||
|
|
284a57d4d1 | ||
|
|
9068911eb4 | ||
|
|
27518f4280 | ||
|
|
86748a09e7 | ||
|
|
b5370cd15a | ||
|
|
85e6bc94e9 | ||
|
|
105e654dce | ||
|
|
6b8984279f | ||
|
|
bc7fb9f253 | ||
|
|
d450fde1ed | ||
|
|
4c9c9df730 | ||
|
|
3a9ec906af | ||
|
|
01fe3eec4d | ||
|
|
0a07746381 | ||
|
|
026cdc617c | ||
|
|
4238793d16 | ||
|
|
1a9387035d | ||
|
|
4f53e6e9a0 | ||
|
|
75a42c27db | ||
|
|
4d2156e2ad | ||
|
|
9481b346e2 | ||
|
|
8a92d28663 | ||
|
|
8be4b4d75d | ||
|
|
c0edb5bd6c | ||
|
|
c8e03ce42a | ||
|
|
74e7611ceb | ||
|
|
0b87be71e6 | ||
|
|
ae5ec9224c | ||
|
|
ca37d39109 | ||
|
|
675ae24964 | ||
|
|
e273198ada | ||
|
|
af87fb98d0 | ||
|
|
effc317a06 | ||
|
|
6bbd09e28e | ||
|
|
06035dadea | ||
|
|
8357039419 | ||
|
|
59faef5800 | ||
|
|
a0fac3866a | ||
|
|
8352f39ff9 | ||
|
|
850ddddcac | ||
|
|
563c4db350 | ||
|
|
8eb0239d5a | ||
|
|
deb86a1ffc | ||
|
|
b622dcbc64 | ||
|
|
2f15676b7c | ||
|
|
4a60326c1c | ||
|
|
567fee4219 | ||
|
|
6036830049 | ||
|
|
c8383e3b18 | ||
|
|
334c3c670b | ||
|
|
e3a7192c11 | ||
|
|
6327f3ced8 | ||
|
|
c58422cb3f | ||
|
|
6d53846824 | ||
|
|
01e5e4224a | ||
|
|
4de80688b9 | ||
|
|
ce6bde5a24 | ||
|
|
35c516fda9 | ||
|
|
bca98caa07 | ||
|
|
b68a277b5e | ||
|
|
703c9655a0 | ||
|
|
addfcdea8d | ||
|
|
d112bcfadf | ||
|
|
9e66d48ccd | ||
|
|
d98b61e3d6 | ||
|
|
ad0c5731e5 | ||
|
|
cfffa29f9a | ||
|
|
9a2ed4bf1a | ||
|
|
b6af393e6d | ||
|
|
a25edcc5a8 | ||
|
|
22fe03913c | ||
|
|
52f750b216 | ||
|
|
36c4831806 | ||
|
|
7c9f680b1b | ||
|
|
2b8b913b6b | ||
|
|
d286c56ebb | ||
|
|
0b34b1de7b | ||
|
|
537d92533c | ||
|
|
fae5e83d93 | ||
|
|
6a268e959f | ||
|
|
4b2e774594 | ||
|
|
e0c66b30c8 | ||
|
|
27c5343707 | ||
|
|
4167c66b86 | ||
|
|
d223fe446d | ||
|
|
b742db65fe | ||
|
|
f53823c840 | ||
|
|
2201b9b116 | ||
|
|
b2f18cfe71 | ||
|
|
95e532c56d | ||
|
|
d7b5c883fe | ||
|
|
78fa596839 | ||
|
|
1dd4c1b057 | ||
|
|
9ea50ed649 | ||
|
|
9c6a0d98ed | ||
|
|
09760340ca | ||
|
|
12980dd88f | ||
|
|
a860530a2e | ||
|
|
996f1036fc | ||
|
|
33ef5b7731 | ||
|
|
16be391211 | ||
|
|
94593dca4b | ||
|
|
98a1e87fbe | ||
|
|
6121c286b7 | ||
|
|
c91969d828 | ||
|
|
538298378a | ||
|
|
3a184bbadd | ||
|
|
38f106cde3 | ||
|
|
20acc123af | ||
|
|
ff65008316 | ||
|
|
1442fcb497 | ||
|
|
49f378ead3 | ||
|
|
0717d30389 | ||
|
|
36d9b3d483 | ||
|
|
a5eab29662 | ||
|
|
6a6dbe3aa1 | ||
|
|
37ffa86043 | ||
|
|
9095a6b04e | ||
|
|
69e0474ebb | ||
|
|
a88df2c103 | ||
|
|
53630dc74c | ||
|
|
1fa9496334 | ||
|
|
08f9c3f568 | ||
|
|
56f0418c93 | ||
|
|
2964a01d73 | ||
|
|
83f6a1ea49 | ||
|
|
6685d3ac97 | ||
|
|
bfc648553f | ||
|
|
5fad319cb5 | ||
|
|
fe04f69caf | ||
|
|
20d133322a | ||
|
|
a6dbaac653 | ||
|
|
33790b81fc | ||
|
|
373e18bc88 | ||
|
|
191fcf67d1 | ||
|
|
2f876471a1 | ||
|
|
659974411d | ||
|
|
6a9e8faad2 | ||
|
|
ea68f86476 | ||
|
|
d19957b705 | ||
|
|
328c8a94b3 | ||
|
|
3ab16d8012 | ||
|
|
ca9d5a2f6b | ||
|
|
bd00aed7db | ||
|
|
4097e8870b | ||
|
|
008c5053e6 | ||
|
|
2bfc646f33 | ||
|
|
3e287911c3 | ||
|
|
503bebaacc | ||
|
|
f79f56f8b4 | ||
|
|
a17c207217 | ||
|
|
fc8e515fe8 | ||
|
|
eaf2fbb21b | ||
|
|
8bc35c33c5 | ||
|
|
52052f342b | ||
|
|
d7962aa2d3 | ||
|
|
9735912965 | ||
|
|
5935681c5c | ||
|
|
12440d5e0d | ||
|
|
4b81b15cad | ||
|
|
87efb75e53 | ||
|
|
bf666af3a2 | ||
|
|
c9a509c805 | ||
|
|
c19587d4e4 | ||
|
|
0ac203bde0 | ||
|
|
5de7492146 | ||
|
|
f9dc871422 | ||
|
|
6fcd57ac53 | ||
|
|
0903062933 | ||
|
|
e5e6c7f09d | ||
|
|
ca2cda8d2a | ||
|
|
5c2bd816ae | ||
|
|
26fdd149e1 | ||
|
|
486f0ae454 | ||
|
|
cfe90c37fc | ||
|
|
e3608af992 | ||
|
|
8c3ae8b264 | ||
|
|
b2cc617886 | ||
|
|
a84a3c0ebe | ||
|
|
03f18053bb | ||
|
|
26103e8bb9 | ||
|
|
b7784d414a | ||
|
|
268fa1cbaf | ||
|
|
f3f2225a8e | ||
|
|
242f032d74 | ||
|
|
5523a510c5 | ||
|
|
3efb871cd4 | ||
|
|
37f7957826 | ||
|
|
400fb12f7e | ||
|
|
64460e492a | ||
|
|
cdf702aeff | ||
|
|
91d1146d97 | ||
|
|
9723ca95e3 | ||
|
|
aeed9d0d8b | ||
|
|
d6492d091d | ||
|
|
34de33ef72 | ||
|
|
7f5aa1fca6 | ||
|
|
0d0ce95eae | ||
|
|
eb1ab69606 | ||
|
|
7c1ef966f3 | ||
|
|
4cc4f08a53 | ||
|
|
dab886f479 | ||
|
|
cbcd011a36 | ||
|
|
b3b94e64ba | ||
|
|
db9cc42245 | ||
|
|
faa6f979be | ||
|
|
dc7befb884 | ||
|
|
519655297a | ||
|
|
47bcb305af | ||
|
|
953bc5eee2 | ||
|
|
c94852b843 | ||
|
|
81886a9baf | ||
|
|
225dd0f9a0 | ||
|
|
778b6fb27b | ||
|
|
b7429bf29d | ||
|
|
9bd5ebb74b | ||
|
|
ac30ded80e | ||
|
|
7f954cbbb8 | ||
|
|
c07237df33 | ||
|
|
387c161d8c | ||
|
|
b76e0d997e | ||
|
|
198dfe0097 | ||
|
|
16eb17e2f8 | ||
|
|
014e6f66bb | ||
|
|
9e4b3ce94c | ||
|
|
4a4ca2c3b8 | ||
|
|
495de89747 | ||
|
|
6a3ac94eea | ||
|
|
6e04c1f924 | ||
|
|
57f5f128f3 | ||
|
|
7efa8d079d | ||
|
|
d0ffd51bb1 | ||
|
|
7aba9eb4b7 | ||
|
|
517ea734ee | ||
|
|
a52177fd39 | ||
|
|
893e55ff96 | ||
|
|
f8959834c4 | ||
|
|
2e516261fe | ||
|
|
ca092fb694 | ||
|
|
96d9df073e | ||
|
|
9f7e625d37 |
@@ -10,3 +10,6 @@
|
||||
# in one spot, that's going to trigger a rebuild of all of the artifacts. Using ci-config.toml we can define these overrides for CI in one spot and not worry about it.
|
||||
[build]
|
||||
rustflags = ["-D", "warnings"]
|
||||
|
||||
[alias]
|
||||
xtask = "run --package xtask --"
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
[build]
|
||||
# v0 mangling scheme provides more detailed backtraces around closures
|
||||
rustflags = ["-C", "symbol-mangling-version=v0"]
|
||||
rustflags = ["-C", "symbol-mangling-version=v0", "--cfg", "tokio_unstable"]
|
||||
|
||||
[alias]
|
||||
xtask = "run --package xtask --"
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/1_bug_report.yml
vendored
@@ -32,9 +32,9 @@ body:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: |
|
||||
If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.
|
||||
label: If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.
|
||||
description: |
|
||||
Drag Zed.log into the text input below.
|
||||
If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000.
|
||||
description: Drag Zed.log into the text input below
|
||||
validations:
|
||||
required: false
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/2_crash_report.yml
vendored
@@ -31,9 +31,9 @@ body:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: |
|
||||
If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.
|
||||
label: If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.
|
||||
description: |
|
||||
Drag Zed.log into the text input below.
|
||||
If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000.
|
||||
description: Drag Zed.log into the text input below
|
||||
validations:
|
||||
required: false
|
||||
|
||||
31
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,16 +1,17 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Language Request
|
||||
url: https://github.com/zed-industries/extensions/issues/new?assignees=&labels=language&projects=&template=1_language_request.yml&title=%3Cname_of_language%3E
|
||||
about: Request a language in the extensions repository
|
||||
- name: Theme Request
|
||||
url: https://github.com/zed-industries/extensions/issues/new?assignees=&labels=theme&projects=&template=0_theme_request.yml&title=%3Cname_of_theme%3E+theme
|
||||
about: Request a theme in the extensions repository
|
||||
- name: Top-Ranking Issues
|
||||
url: https://github.com/zed-industries/zed/issues/5393
|
||||
about: See an overview of the most popular Zed issues
|
||||
- name: Platform Support
|
||||
url: https://github.com/zed-industries/zed/issues/5391
|
||||
about: A quick note on platform support
|
||||
- name: Positive Feedback
|
||||
url: https://github.com/zed-industries/zed/discussions/5397
|
||||
about: A central location for kind words about Zed
|
||||
- name: Language Request
|
||||
url: https://github.com/zed-industries/extensions/issues/new?assignees=&labels=language&projects=&template=1_language_request.yml&title=%3Cname_of_language%3E
|
||||
about: Request a language in the extensions repository
|
||||
- name: Theme Request
|
||||
url: https://github.com/zed-industries/extensions/issues/new?assignees=&labels=theme&projects=&template=0_theme_request.yml&title=%3Cname_of_theme%3E+theme
|
||||
about: Request a theme in the extensions repository
|
||||
- name: Top-Ranking Issues
|
||||
url: https://github.com/zed-industries/zed/issues/5393
|
||||
about: See an overview of the most popular Zed issues
|
||||
- name: Platform Support
|
||||
url: https://github.com/zed-industries/zed/issues/5391
|
||||
about: A quick note on platform support
|
||||
- name: Positive Feedback
|
||||
url: https://github.com/zed-industries/zed/discussions/5397
|
||||
about: A central location for kind words about Zed
|
||||
|
||||
2
.github/cherry-pick-bot.yml
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
enabled: true
|
||||
preservePullRequestTitle: true
|
||||
105
.github/workflows/ci.yml
vendored
@@ -87,8 +87,7 @@ jobs:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: cargo clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clippy
|
||||
run: cargo xtask clippy
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
@@ -99,7 +98,7 @@ jobs:
|
||||
- name: Build other binaries and features
|
||||
run: cargo build --workspace --bins --all-features; cargo check -p gpui --features "macos-blade"
|
||||
|
||||
# todo!(linux): Actually run the tests
|
||||
# todo(linux): Actually run the tests
|
||||
linux_tests:
|
||||
name: (Linux) Run Clippy and tests
|
||||
runs-on: ubuntu-latest
|
||||
@@ -120,13 +119,12 @@ jobs:
|
||||
run: script/linux
|
||||
|
||||
- name: cargo clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clippy
|
||||
run: cargo xtask clippy
|
||||
|
||||
- name: Build Zed
|
||||
run: cargo build -p zed
|
||||
|
||||
# todo!(windows): Actually run the tests
|
||||
# todo(windows): Actually run the tests
|
||||
windows_tests:
|
||||
name: (Windows) Run Clippy and tests
|
||||
runs-on: windows-latest
|
||||
@@ -143,18 +141,17 @@ jobs:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
|
||||
- name: cargo clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clippy
|
||||
run: cargo xtask clippy
|
||||
|
||||
- name: Build Zed
|
||||
run: cargo build -p zed
|
||||
|
||||
bundle:
|
||||
name: Bundle macOS app
|
||||
bundle-mac:
|
||||
name: Create a macOS bundle
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- bundle
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-build-dmg') }}
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
needs: [macos_tests]
|
||||
env:
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
@@ -209,12 +206,12 @@ jobs:
|
||||
- name: Generate license file
|
||||
run: script/generate-licenses
|
||||
|
||||
- name: Create app bundle
|
||||
run: script/bundle
|
||||
- name: Create macOS app bundle
|
||||
run: script/bundle-mac
|
||||
|
||||
- name: Upload app bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-build-dmg') }}
|
||||
uses: actions/upload-artifact@v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg
|
||||
path: target/release/Zed.dmg
|
||||
@@ -229,3 +226,81 @@ jobs:
|
||||
body: ""
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
bundle-deb:
|
||||
name: Create a *.deb Linux bundle
|
||||
runs-on: ubuntu-22.04 # keep the version fixed to avoid libc and dynamic linked library issues
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
|
||||
- name: Configure linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/linux
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
version=$(script/get-crate-version zed)
|
||||
channel=$(cat crates/zed/RELEASE_CHANNEL)
|
||||
echo "Publishing version: ${version} on release channel ${channel}"
|
||||
echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV
|
||||
|
||||
expected_tag_name=""
|
||||
case ${channel} in
|
||||
stable)
|
||||
expected_tag_name="v${version}";;
|
||||
preview)
|
||||
expected_tag_name="v${version}-pre";;
|
||||
nightly)
|
||||
expected_tag_name="v${version}-nightly";;
|
||||
*)
|
||||
echo "can't publish a release on channel ${channel}"
|
||||
exit 1;;
|
||||
esac
|
||||
if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then
|
||||
echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# TODO linux : Find a way to add licenses to the final bundle
|
||||
# - name: Generate license file
|
||||
# run: script/generate-licenses
|
||||
|
||||
- name: Create Linux *.deb bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload app bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.deb
|
||||
path: target/release/*.deb
|
||||
|
||||
# TODO linux : make it stable enough to be uploaded as a release
|
||||
# - uses: softprops/action-gh-release@v1
|
||||
# name: Upload app bundle to release
|
||||
# if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
|
||||
# with:
|
||||
# draft: true
|
||||
# prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
# files: target/release/Zed.dmg
|
||||
# body: ""
|
||||
# env:
|
||||
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
9
.github/workflows/deploy_collab.yml
vendored
@@ -28,8 +28,7 @@ jobs:
|
||||
uses: ./.github/actions/check_style
|
||||
|
||||
- name: Run clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clippy
|
||||
run: cargo xtask clippy
|
||||
|
||||
tests:
|
||||
name: Run tests
|
||||
@@ -105,8 +104,12 @@ jobs:
|
||||
set -eu
|
||||
if [[ $GITHUB_REF_NAME = "collab-production" ]]; then
|
||||
export ZED_KUBE_NAMESPACE=production
|
||||
export ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT=10
|
||||
export ZED_API_LOAD_BALANCER_SIZE_UNIT=2
|
||||
elif [[ $GITHUB_REF_NAME = "collab-staging" ]]; then
|
||||
export ZED_KUBE_NAMESPACE=staging
|
||||
export ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT=1
|
||||
export ZED_API_LOAD_BALANCER_SIZE_UNIT=1
|
||||
else
|
||||
echo "cowardly refusing to deploy from an unknown branch"
|
||||
exit 1
|
||||
@@ -121,11 +124,13 @@ jobs:
|
||||
export ZED_IMAGE_ID="registry.digitalocean.com/zed/collab:${GITHUB_SHA}"
|
||||
|
||||
export ZED_SERVICE_NAME=collab
|
||||
export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT
|
||||
envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
|
||||
kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
|
||||
echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
|
||||
|
||||
export ZED_SERVICE_NAME=api
|
||||
export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_API_LOAD_BALANCER_SIZE_UNIT
|
||||
envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
|
||||
kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
|
||||
echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
|
||||
|
||||
58
.github/workflows/release_nightly.yml
vendored
@@ -32,8 +32,7 @@ jobs:
|
||||
uses: ./.github/actions/check_style
|
||||
|
||||
- name: Run clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clippy
|
||||
run: cargo xtask clippy
|
||||
tests:
|
||||
name: Run tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
@@ -51,8 +50,8 @@ jobs:
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
bundle:
|
||||
name: Bundle app
|
||||
bundle-mac:
|
||||
name: Create a macOS bundle
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
@@ -78,9 +77,6 @@ jobs:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Set release channel to nightly
|
||||
run: |
|
||||
set -eu
|
||||
@@ -91,8 +87,50 @@ jobs:
|
||||
- name: Generate license file
|
||||
run: script/generate-licenses
|
||||
|
||||
- name: Create app bundle
|
||||
run: script/bundle
|
||||
- name: Create macOS app bundle
|
||||
run: script/bundle-mac
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly
|
||||
run: script/upload-nightly macos
|
||||
|
||||
bundle-deb:
|
||||
name: Create a *.deb Linux bundle
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-22.04 # keep the version fixed to avoid libc and dynamic linked library issues
|
||||
needs: tests
|
||||
env:
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
|
||||
- name: Configure linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/linux
|
||||
|
||||
- name: Set release channel to nightly
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
|
||||
# TODO linux : find a way to add licenses to the final bundle
|
||||
# - name: Generate license file
|
||||
# run: script/generate-licenses
|
||||
|
||||
- name: Create Linux *.deb bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly linux-deb
|
||||
|
||||
2
.gitignore
vendored
@@ -10,6 +10,7 @@
|
||||
/assets/*licenses.md
|
||||
**/venv
|
||||
.build
|
||||
*.wasm
|
||||
Packages
|
||||
*.xcodeproj
|
||||
xcuserdata/
|
||||
@@ -22,3 +23,4 @@ DerivedData/
|
||||
.pytest_cache
|
||||
.venv
|
||||
.blob_store
|
||||
.vscode
|
||||
|
||||
1884
Cargo.lock
generated
116
Cargo.toml
@@ -23,6 +23,7 @@ members = [
|
||||
"crates/diagnostics",
|
||||
"crates/editor",
|
||||
"crates/extension",
|
||||
"crates/extension_api",
|
||||
"crates/extensions_ui",
|
||||
"crates/feature_flags",
|
||||
"crates/feedback",
|
||||
@@ -53,7 +54,6 @@ members = [
|
||||
"crates/picker",
|
||||
"crates/prettier",
|
||||
"crates/project",
|
||||
"crates/project_core",
|
||||
"crates/project_panel",
|
||||
"crates/project_symbols",
|
||||
"crates/quick_action_bar",
|
||||
@@ -89,8 +89,14 @@ members = [
|
||||
"crates/vim",
|
||||
"crates/welcome",
|
||||
"crates/workspace",
|
||||
"crates/worktree",
|
||||
"crates/zed",
|
||||
"crates/zed_actions",
|
||||
|
||||
"extensions/gleam",
|
||||
"extensions/uiua",
|
||||
|
||||
"tooling/xtask",
|
||||
]
|
||||
default-members = ["crates/zed"]
|
||||
resolver = "2"
|
||||
@@ -102,6 +108,7 @@ assets = { path = "crates/assets" }
|
||||
assistant = { path = "crates/assistant" }
|
||||
audio = { path = "crates/audio" }
|
||||
auto_update = { path = "crates/auto_update" }
|
||||
base64 = "0.13"
|
||||
breadcrumbs = { path = "crates/breadcrumbs" }
|
||||
call = { path = "crates/call" }
|
||||
channel = { path = "crates/channel" }
|
||||
@@ -152,7 +159,7 @@ plugin = { path = "crates/plugin" }
|
||||
plugin_macros = { path = "crates/plugin_macros" }
|
||||
prettier = { path = "crates/prettier" }
|
||||
project = { path = "crates/project" }
|
||||
project_core = { path = "crates/project_core" }
|
||||
worktree = { path = "crates/worktree" }
|
||||
project_panel = { path = "crates/project_panel" }
|
||||
project_symbols = { path = "crates/project_symbols" }
|
||||
quick_action_bar = { path = "crates/quick_action_bar" }
|
||||
@@ -191,33 +198,43 @@ zed_actions = { path = "crates/zed_actions" }
|
||||
|
||||
anyhow = "1.0.57"
|
||||
async-compression = { version = "0.4", features = ["gzip", "futures-io"] }
|
||||
async-fs = "1.6"
|
||||
async-recursion = "1.0.0"
|
||||
async-tar = "0.4.2"
|
||||
async-trait = "0.1"
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "e9d93a4d41f3946a03ffb76136290d6ccf7f2b80" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "e9d93a4d41f3946a03ffb76136290d6ccf7f2b80" }
|
||||
bitflags = "2.4.2"
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "43721bf42d298b7cbee2195ee66f73a5f1c7b2fc" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "43721bf42d298b7cbee2195ee66f73a5f1c7b2fc" }
|
||||
blade-rwh = { package = "raw-window-handle", version = "0.5" }
|
||||
cap-std = "2.0"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
clap = "4.4"
|
||||
clickhouse = { version = "0.11.6" }
|
||||
ctor = "0.2.6"
|
||||
core-foundation = { version = "0.9.3" }
|
||||
core-foundation-sys = "0.8.6"
|
||||
derive_more = "0.99.17"
|
||||
emojis = "0.6.1"
|
||||
env_logger = "0.9"
|
||||
futures = "0.3"
|
||||
futures-lite = "1.13"
|
||||
git2 = { version = "0.15", default-features = false }
|
||||
globset = "0.4"
|
||||
hex = "0.4.3"
|
||||
ignore = "0.4.22"
|
||||
indoc = "1"
|
||||
# We explicitly disable http2 support in isahc.
|
||||
isahc = { version = "1.7.2", default-features = false, features = ["static-curl", "text-decoding"] }
|
||||
isahc = { version = "1.7.2", default-features = false, features = [
|
||||
"static-curl",
|
||||
"text-decoding",
|
||||
] }
|
||||
itertools = "0.11.0"
|
||||
lazy_static = "1.4.0"
|
||||
linkify = "0.10.0"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||
ordered-float = "2.1.1"
|
||||
palette = { version = "0.7.5", default-features = false, features = ["std"] }
|
||||
parking_lot = "0.11.1"
|
||||
parking_lot = "0.12.1"
|
||||
profiling = "1"
|
||||
postage = { version = "0.5", features = ["futures-traits"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
@@ -233,18 +250,26 @@ semver = "1.0"
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_derive = { version = "1.0", features = ["deserialize_in_place"] }
|
||||
serde_json = { version = "1.0", features = ["preserve_order", "raw_value"] }
|
||||
serde_json_lenient = { version = "0.1", features = ["preserve_order", "raw_value"] }
|
||||
serde_json_lenient = { version = "0.1", features = [
|
||||
"preserve_order",
|
||||
"raw_value",
|
||||
] }
|
||||
serde_repr = "0.1"
|
||||
sha2 = "0.10"
|
||||
shellexpand = "2.1.0"
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
smol = "1.2"
|
||||
strum = { version = "0.25.0", features = ["derive"] }
|
||||
sysinfo = "0.29.10"
|
||||
subtle = "2.5.0"
|
||||
sysinfo = "0.30.7"
|
||||
tempfile = "3.9.0"
|
||||
thiserror = "1.0.29"
|
||||
tiktoken-rs = "0.5.7"
|
||||
time = { version = "0.3", features = ["serde", "serde-well-known", "formatting"] }
|
||||
time = { version = "0.3", features = [
|
||||
"serde",
|
||||
"serde-well-known",
|
||||
"formatting",
|
||||
] }
|
||||
toml = "0.8"
|
||||
tower-http = "0.4.4"
|
||||
tree-sitter = { version = "0.20", features = ["wasm"] }
|
||||
@@ -261,7 +286,6 @@ tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir"
|
||||
tree-sitter-elm = { git = "https://github.com/elm-tooling/tree-sitter-elm", rev = "692c50c0b961364c40299e73c1306aecb5d20f40" }
|
||||
tree-sitter-embedded-template = "0.20.0"
|
||||
tree-sitter-erlang = "0.4.0"
|
||||
tree-sitter-gitcommit = { git = "https://github.com/gbprod/tree-sitter-gitcommit" }
|
||||
tree-sitter-gleam = { git = "https://github.com/gleam-lang/tree-sitter-gleam", rev = "58b7cac8fc14c92b0677c542610d8738c373fa81" }
|
||||
tree-sitter-glsl = { git = "https://github.com/theHamsta/tree-sitter-glsl", rev = "2a56fb7bc8bb03a1892b4741279dd0a8758b7fb3" }
|
||||
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
|
||||
@@ -269,6 +293,7 @@ tree-sitter-gomod = { git = "https://github.com/camdencheek/tree-sitter-go-mod"
|
||||
tree-sitter-gowork = { git = "https://github.com/d1y/tree-sitter-go-work" }
|
||||
tree-sitter-haskell = { git = "https://github.com/tree-sitter/tree-sitter-haskell", rev = "8a99848fc734f9c4ea523b3f2a07df133cbbcec2" }
|
||||
tree-sitter-hcl = { git = "https://github.com/MichaHoffmann/tree-sitter-hcl", rev = "v1.1.0" }
|
||||
rustc-demangle = "0.1.23"
|
||||
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "2e1348c3cf2c9323e87c2744796cf3f3868aa82a" }
|
||||
tree-sitter-html = "0.19.0"
|
||||
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "40a81c01a40ac48744e0c8ccabbaba1920441199" }
|
||||
@@ -289,17 +314,45 @@ tree-sitter-scheme = { git = "https://github.com/6cdh/tree-sitter-scheme", rev =
|
||||
tree-sitter-svelte = { git = "https://github.com/Himujjal/tree-sitter-svelte", rev = "bd60db7d3d06f89b6ec3b287c9a6e9190b5564bd" }
|
||||
tree-sitter-toml = { git = "https://github.com/tree-sitter/tree-sitter-toml", rev = "342d9be207c2dba869b9967124c679b5e6fd0ebe" }
|
||||
tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259" }
|
||||
tree-sitter-uiua = { git = "https://github.com/shnarazk/tree-sitter-uiua", rev = "21dc2db39494585bf29a3f86d5add6e9d11a22ba" }
|
||||
tree-sitter-vue = { git = "https://github.com/zed-industries/tree-sitter-vue", rev = "6608d9d60c386f19d80af7d8132322fa11199c42" }
|
||||
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930" }
|
||||
tree-sitter-zig = { git = "https://github.com/maxxnino/tree-sitter-zig", rev = "0d08703e4c3f426ec61695d7617415fff97029bd" }
|
||||
unindent = "0.1.7"
|
||||
unicase = "2.6"
|
||||
url = "2.2"
|
||||
uuid = { version = "1.1.2", features = ["v4"] }
|
||||
wasmtime = "18.0"
|
||||
wasmparser = "0.121"
|
||||
wasm-encoder = "0.41"
|
||||
wasmtime = { version = "18.0", default-features = false, features = ["async", "demangle", "runtime", "cranelift", "component-model"] }
|
||||
wasmtime-wasi = "18.0"
|
||||
which = "6.0.0"
|
||||
wit-component = "0.20"
|
||||
sys-locale = "0.3.1"
|
||||
|
||||
[workspace.dependencies.windows]
|
||||
version = "0.53.0"
|
||||
features = [
|
||||
"implement",
|
||||
"Wdk_System_SystemServices",
|
||||
"Win32_Graphics_Gdi",
|
||||
"Win32_Graphics_DirectComposition",
|
||||
"Win32_UI_Controls",
|
||||
"Win32_UI_WindowsAndMessaging",
|
||||
"Win32_UI_Input_KeyboardAndMouse",
|
||||
"Win32_UI_Shell",
|
||||
"Win32_System_Com",
|
||||
"Win32_System_SystemInformation",
|
||||
"Win32_System_SystemServices",
|
||||
"Win32_System_Time",
|
||||
"Win32_Security",
|
||||
"Win32_System_Com",
|
||||
"Win32_System_Com_StructuredStorage",
|
||||
"Win32_System_Threading",
|
||||
"Win32_System_DataExchange",
|
||||
"Win32_System_Ole",
|
||||
"Win32_System_Com",
|
||||
]
|
||||
|
||||
[patch.crates-io]
|
||||
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "e4a23971ec3071a09c1e84816954c98f96e98e52" }
|
||||
# Workaround for a broken nightly build of gpui: See #7644 and revisit once 0.5.3 is released.
|
||||
@@ -309,7 +362,7 @@ pathfinder_simd = { git = "https://github.com/servo/pathfinder.git", rev = "e4fc
|
||||
split-debuginfo = "unpacked"
|
||||
debug = "limited"
|
||||
|
||||
# todo!(linux) - Remove this
|
||||
# todo(linux) - Remove this
|
||||
[profile.dev.package.blade-graphics]
|
||||
split-debuginfo = "off"
|
||||
debug = "full"
|
||||
@@ -317,6 +370,8 @@ debug = "full"
|
||||
[profile.dev.package]
|
||||
taffy = { opt-level = 3 }
|
||||
cranelift-codegen = { opt-level = 3 }
|
||||
rustybuzz = { opt-level = 3 }
|
||||
ttf-parser = { opt-level = 3 }
|
||||
wasmtime-cranelift = { opt-level = 3 }
|
||||
|
||||
[profile.release]
|
||||
@@ -324,5 +379,40 @@ debug = "limited"
|
||||
lto = "thin"
|
||||
codegen-units = 1
|
||||
|
||||
[workspace.lints.clippy]
|
||||
dbg_macro = "deny"
|
||||
todo = "deny"
|
||||
|
||||
# These are all of the rules that currently have violations in the Zed
|
||||
# codebase.
|
||||
#
|
||||
# We'll want to drive this list down by either:
|
||||
# 1. fixing violations of the rule and begin enforcing it
|
||||
# 2. deciding we want to allow the rule permanently, at which point
|
||||
# we should codify that separately above.
|
||||
#
|
||||
# This list shouldn't be added to; it should only get shorter.
|
||||
# =============================================================================
|
||||
|
||||
# There are a bunch of rules currently failing in the `style` group, so
|
||||
# allow all of those, for now.
|
||||
style = "allow"
|
||||
|
||||
# Individual rules that have violations in the codebase:
|
||||
almost_complete_range = "allow"
|
||||
arc_with_non_send_sync = "allow"
|
||||
await_holding_lock = "allow"
|
||||
borrowed_box = "allow"
|
||||
derive_ord_xor_partial_ord = "allow"
|
||||
eq_op = "allow"
|
||||
let_underscore_future = "allow"
|
||||
map_entry = "allow"
|
||||
never_loop = "allow"
|
||||
non_canonical_clone_impl = "allow"
|
||||
non_canonical_partial_ord_impl = "allow"
|
||||
reversed_empty_ranges = "allow"
|
||||
single_range_in_vec_init = "allow"
|
||||
type_complexity = "allow"
|
||||
|
||||
[workspace.metadata.cargo-machete]
|
||||
ignored = ["bindgen", "cbindgen", "prost_build", "serde"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
|
||||
FROM rust:1.76-bullseye as builder
|
||||
FROM rust:1.76-bookworm as builder
|
||||
WORKDIR app
|
||||
COPY . .
|
||||
|
||||
@@ -20,9 +20,10 @@ RUN --mount=type=cache,target=./target \
|
||||
cp /app/target/release/collab /app/collab
|
||||
|
||||
# Copy collab server binary to the runtime image
|
||||
FROM debian:bullseye-slim as runtime
|
||||
FROM debian:bookworm-slim as runtime
|
||||
RUN apt-get update; \
|
||||
apt-get install -y --no-install-recommends libcurl4-openssl-dev ca-certificates
|
||||
apt-get install -y --no-install-recommends libcurl4-openssl-dev ca-certificates \
|
||||
linux-perf binutils
|
||||
WORKDIR app
|
||||
COPY --from=builder /app/collab /app/collab
|
||||
COPY --from=builder /app/crates/collab/migrations /app/migrations
|
||||
|
||||
2
Procfile
@@ -1,3 +1,3 @@
|
||||
collab: RUST_LOG=${RUST_LOG:-warn,tower_http=info,collab=info} cargo run --package=collab serve
|
||||
livekit: livekit-server --dev
|
||||
blob_store: MINIO_ROOT_USER=the-blob-store-access-key MINIO_ROOT_PASSWORD=the-blob-store-secret-key minio server .blob_store
|
||||
blob_store: ./script/run-local-minio
|
||||
|
||||
@@ -10,7 +10,7 @@ You can [download](https://zed.dev/download) Zed today for macOS (v10.15+).
|
||||
|
||||
Support for additional platforms is on our [roadmap](https://zed.dev/roadmap):
|
||||
|
||||
- Linux ([tracking issue](https://github.com/zed-industries/zed/issues/5395))
|
||||
- Linux ([tracking issue](https://github.com/zed-industries/zed/issues/7015))
|
||||
- Windows ([tracking issue](https://github.com/zed-industries/zed/issues/5394))
|
||||
- Web ([tracking issue](https://github.com/zed-industries/zed/issues/5396))
|
||||
|
||||
@@ -23,6 +23,7 @@ brew install zed
|
||||
|
||||
- [Building Zed for macOS](./docs/src/developing_zed__building_zed_macos.md)
|
||||
- [Building Zed for Linux](./docs/src/developing_zed__building_zed_linux.md)
|
||||
- [Building Zed for Windows](./docs/src/developing_zed__building_zed_windows.md)
|
||||
- [Running Collaboration Locally](./docs/src/developing_zed__local_collaboration.md)
|
||||
|
||||
## Contributing
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M12 22.596c6.628 0 12-4.338 12-9.688 0-3.318-2.057-6.248-5.219-7.986-1.286-.715-2.297-1.357-3.139-1.89C14.058 2.025 13.08 1.404 12 1.404c-1.097 0-2.334.785-3.966 1.821a49.92 49.92 0 0 1-2.816 1.697C2.057 6.66 0 9.59 0 12.908c0 5.35 5.372 9.687 12 9.687v.001ZM10.599 4.715c.334-.759.503-1.58.498-2.409 0-.145.202-.187.23-.029.658 2.783-.902 4.162-2.057 4.624-.124.048-.199-.121-.103-.209a5.763 5.763 0 0 0 1.432-1.977Zm2.058-.102a5.82 5.82 0 0 0-.782-2.306v-.016c-.069-.123.086-.263.185-.172 1.962 2.111 1.307 4.067.556 5.051-.082.103-.23-.003-.189-.126a5.85 5.85 0 0 0 .23-2.431Zm1.776-.561a5.727 5.727 0 0 0-1.612-1.806v-.014c-.112-.085-.024-.274.114-.218 2.595 1.087 2.774 3.18 2.459 4.407a.116.116 0 0 1-.049.071.11.11 0 0 1-.153-.026.122.122 0 0 1-.022-.083 5.891 5.891 0 0 0-.737-2.331Zm-5.087.561c-.617.546-1.282.76-2.063 1-.117 0-.195-.078-.156-.181 1.752-.909 2.376-1.649 2.999-2.778 0 0 .155-.118.188.085 0 .304-.349 1.329-.968 1.874Zm4.945 11.237a2.957 2.957 0 0 1-.937 1.553c-.346.346-.8.565-1.286.62a2.178 2.178 0 0 1-1.327-.62 2.955 2.955 0 0 1-.925-1.553.244.244 0 0 1 .064-.198.234.234 0 0 1 .193-.069h3.965a.226.226 0 0 1 .19.07c.05.053.073.125.063.197Zm-5.458-2.176a1.862 1.862 0 0 1-2.384-.245 1.98 1.98 0 0 1-.233-2.447c.207-.319.503-.566.848-.713a1.84 1.84 0 0 1 1.092-.11c.366.075.703.261.967.531a1.98 1.98 0 0 1 .408 2.114 1.931 1.931 0 0 1-.698.869v.001Zm8.495.005a1.86 1.86 0 0 1-2.381-.253 1.964 1.964 0 0 1-.547-1.366c0-.384.11-.76.32-1.079.207-.319.503-.567.849-.713a1.844 1.844 0 0 1 1.093-.108c.367.076.704.262.968.534a1.98 1.98 0 0 1 .4 2.117 1.932 1.932 0 0 1-.702.868Z"/>
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="14px" height="14px" viewBox="0 0 14 14" version="1.1">
|
||||
<g id="surface1">
|
||||
<path style=" stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;" d="M 7 13.179688 C 10.867188 13.179688 14 10.652344 14 7.53125 C 14 5.59375 12.800781 3.886719 10.957031 2.871094 C 10.207031 2.453125 9.617188 2.078125 9.125 1.769531 C 8.199219 1.179688 7.628906 0.820312 7 0.820312 C 6.359375 0.820312 5.636719 1.277344 4.6875 1.882812 C 4.148438 2.230469 3.601562 2.558594 3.042969 2.871094 C 1.199219 3.886719 0 5.59375 0 7.53125 C 0 10.652344 3.132812 13.179688 7 13.179688 Z M 6.183594 2.75 C 6.378906 2.308594 6.476562 1.828125 6.472656 1.34375 C 6.472656 1.261719 6.589844 1.234375 6.605469 1.328125 C 6.992188 2.953125 6.082031 3.757812 5.40625 4.027344 C 5.335938 4.054688 5.292969 3.953125 5.347656 3.902344 C 5.703125 3.582031 5.988281 3.191406 6.183594 2.75 Z M 7.382812 2.691406 C 7.328125 2.214844 7.171875 1.757812 6.925781 1.347656 L 6.925781 1.335938 C 6.886719 1.265625 6.976562 1.183594 7.035156 1.234375 C 8.179688 2.46875 7.796875 3.609375 7.359375 4.183594 C 7.3125 4.242188 7.226562 4.179688 7.25 4.109375 C 7.394531 3.652344 7.4375 3.167969 7.382812 2.691406 Z M 8.417969 2.363281 C 8.183594 1.949219 7.863281 1.589844 7.480469 1.308594 L 7.480469 1.300781 C 7.414062 1.253906 7.464844 1.140625 7.546875 1.175781 C 9.058594 1.808594 9.164062 3.03125 8.980469 3.746094 C 8.976562 3.761719 8.964844 3.777344 8.953125 3.785156 C 8.921875 3.808594 8.882812 3.800781 8.863281 3.773438 C 8.851562 3.757812 8.847656 3.742188 8.847656 3.722656 C 8.800781 3.246094 8.65625 2.78125 8.417969 2.363281 Z M 5.453125 2.691406 C 5.09375 3.007812 4.703125 3.132812 4.25 3.273438 C 4.179688 3.273438 4.132812 3.230469 4.15625 3.167969 C 5.179688 2.636719 5.542969 2.207031 5.90625 1.546875 C 5.90625 1.546875 5.996094 1.480469 6.015625 1.597656 C 6.015625 1.773438 5.8125 2.371094 5.453125 2.691406 Z M 8.335938 9.246094 C 8.253906 9.597656 8.0625 9.914062 7.789062 10.152344 C 7.589844 10.355469 7.324219 10.480469 7.039062 10.511719 C 6.746094 10.484375 6.472656 10.359375 6.265625 10.152344 C 5.996094 9.914062 5.808594 9.597656 5.726562 9.246094 C 5.71875 9.203125 5.734375 9.160156 5.761719 9.128906 C 5.792969 9.101562 5.835938 9.085938 5.875 9.089844 L 8.1875 9.089844 C 8.230469 9.085938 8.269531 9.101562 8.300781 9.132812 C 8.328125 9.160156 8.34375 9.203125 8.335938 9.246094 Z M 5.152344 7.976562 C 4.714844 8.273438 4.128906 8.210938 3.761719 7.832031 C 3.390625 7.445312 3.335938 6.855469 3.625 6.40625 C 3.746094 6.21875 3.917969 6.074219 4.121094 5.992188 C 4.320312 5.90625 4.542969 5.882812 4.757812 5.925781 C 4.972656 5.96875 5.167969 6.078125 5.320312 6.234375 C 5.636719 6.5625 5.730469 7.046875 5.558594 7.46875 C 5.476562 7.675781 5.335938 7.851562 5.152344 7.976562 Z M 10.109375 7.980469 C 9.671875 8.273438 9.085938 8.210938 8.71875 7.832031 C 8.511719 7.617188 8.398438 7.332031 8.398438 7.035156 C 8.398438 6.8125 8.464844 6.589844 8.585938 6.40625 C 8.707031 6.21875 8.878906 6.074219 9.082031 5.988281 C 9.28125 5.90625 9.503906 5.882812 9.71875 5.925781 C 9.933594 5.972656 10.128906 6.078125 10.285156 6.238281 C 10.597656 6.566406 10.691406 7.050781 10.515625 7.472656 C 10.433594 7.679688 10.292969 7.855469 10.109375 7.980469 Z M 10.109375 7.980469 "/>
|
||||
</g>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 3.3 KiB |
6
assets/icons/file_icons/coffeescript.svg
Normal file
|
After Width: | Height: | Size: 8.9 KiB |
@@ -1,4 +1,6 @@
|
||||
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
||||
<svg fill="#000000" width="800px" height="800px" viewBox="0 0 32 32" version="1.1" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M24.235 6.519l-16.47-0.004 0.266 3.277 12.653 0.002-0.319 3.394h-8.298l0.3 3.215h7.725l-0.457 4.403-3.636 1.005-3.694-1.012-0.235-2.637h-3.262l0.362 4.817 6.829 2.128 6.714-1.912 1.521-16.675zM2.879 1.004h26.242l-2.387 26.946-10.763 3.045-10.703-3.047z"></path>
|
||||
</svg>
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="14px" height="14px" viewBox="0 0 14 14" version="1.1">
|
||||
<g id="surface1">
|
||||
<path style=" stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;" d="M 10.601562 2.851562 L 3.398438 2.851562 L 3.511719 4.285156 L 9.050781 4.285156 L 8.910156 5.769531 L 5.28125 5.769531 L 5.410156 7.175781 L 8.789062 7.175781 L 8.589844 9.101562 L 7 9.542969 L 5.382812 9.097656 L 5.28125 7.945312 L 3.851562 7.945312 L 4.011719 10.054688 L 7 10.984375 L 9.9375 10.148438 Z M 1.257812 0.4375 L 12.742188 0.4375 L 11.695312 12.226562 L 6.988281 13.558594 L 2.304688 12.226562 Z M 1.257812 0.4375 "/>
|
||||
</g>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 482 B After Width: | Height: | Size: 730 B |
@@ -1 +1,12 @@
|
||||
<svg height="64" viewBox="0 0 128 128" width="64" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><linearGradient id="a" gradientUnits="userSpaceOnUse" x1="0" x2="128" y1="128" y2="0"><stop offset="0" stop-color="#333"/><stop offset="1" stop-color="#5d5d5d"/></linearGradient><path d="m12.239265 30.664279h14.960911c-5.59432 5.460938-7.654216 10.692785-10.342106 18.023379-3.200764 8.729348-.549141 29.987457 3.815534 37.55289 2.943384 5.101853 6.282685 8.994876 8.233522 11.095173h-16.667861zm89.614855 0h13.90661v66.671442h-13.55518c1.31391-1.750328 3.43934-4.534454 5.12085-6.426163 2.32782-2.618784 4.97023-6.978412 4.97023-6.978412l-16.015202-8.133112s-5.48977 11.600331-15.964999 15.964998c-10.475214 4.364666-19.784679-.838179-25.604243-7.530659-5.819578-6.692502-5.82371-22.14014-5.82371-22.14014h60.797524c1.16391-14.839892-2.63216-21.249816-4.66901-25.90547-.91799-2.098266-1.89261-3.810819-3.16287-5.522484zm-38.356164 1.757154c.35429-.01632.731685-.0092 1.104497 0 11.930114.290977 13.053143 12.802122 13.053143 12.802122h-27.311192s2.170772-12.298638 13.153552-12.802122z" fill="url(#a)"/></svg>
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="14px" height="14px" viewBox="0 0 14 14" version="1.1">
|
||||
<defs>
|
||||
<linearGradient id="linear0" gradientUnits="userSpaceOnUse" x1="0" y1="128" x2="128" y2="0" gradientTransform="matrix(0.109375,0,0,0.109375,0,0)">
|
||||
<stop offset="0" style="stop-color:rgb(20%,20%,20%);stop-opacity:1;"/>
|
||||
<stop offset="1" style="stop-color:rgb(36.470588%,36.470588%,36.470588%);stop-opacity:1;"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g id="surface1">
|
||||
<path style=" stroke:none;fill-rule:nonzero;fill:url(#linear0);" d="M 1.339844 3.355469 L 2.976562 3.355469 C 2.363281 3.953125 2.136719 4.523438 1.84375 5.324219 C 1.492188 6.28125 1.785156 8.605469 2.261719 9.433594 C 2.582031 9.992188 2.949219 10.417969 3.160156 10.644531 L 1.339844 10.644531 Z M 11.140625 3.355469 L 12.660156 3.355469 L 12.660156 10.644531 L 11.179688 10.644531 C 11.324219 10.453125 11.554688 10.148438 11.738281 9.941406 C 11.992188 9.65625 12.28125 9.179688 12.28125 9.179688 L 10.53125 8.289062 C 10.53125 8.289062 9.929688 9.558594 8.785156 10.035156 C 7.640625 10.515625 6.621094 9.945312 5.984375 9.214844 C 5.347656 8.480469 5.347656 6.792969 5.347656 6.792969 L 11.996094 6.792969 C 12.125 5.167969 11.710938 4.46875 11.484375 3.957031 C 11.386719 3.726562 11.277344 3.542969 11.140625 3.355469 Z M 6.945312 3.546875 C 6.984375 3.542969 7.023438 3.546875 7.066406 3.546875 C 8.371094 3.578125 8.492188 4.945312 8.492188 4.945312 L 5.507812 4.945312 C 5.507812 4.945312 5.742188 3.601562 6.945312 3.546875 Z M 6.945312 3.546875 "/>
|
||||
</g>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 1.6 KiB |
@@ -27,6 +27,7 @@
|
||||
"css": "css",
|
||||
"csv": "storage",
|
||||
"cts": "typescript",
|
||||
"coffee": "coffeescript",
|
||||
"dart": "dart",
|
||||
"dat": "storage",
|
||||
"db": "storage",
|
||||
@@ -48,6 +49,7 @@
|
||||
"fmp": "storage",
|
||||
"fp7": "storage",
|
||||
"frm": "storage",
|
||||
"fs": "fsharp",
|
||||
"gdb": "storage",
|
||||
"gif": "image",
|
||||
"gitattributes": "vcs",
|
||||
@@ -104,6 +106,7 @@
|
||||
"myd": "storage",
|
||||
"myi": "storage",
|
||||
"nu": "terminal",
|
||||
"nim": "nim",
|
||||
"odp": "document",
|
||||
"ods": "document",
|
||||
"odt": "document",
|
||||
@@ -138,6 +141,9 @@
|
||||
"sqlite": "storage",
|
||||
"svelte": "template",
|
||||
"svg": "image",
|
||||
"sc": "scala",
|
||||
"scala": "scala",
|
||||
"sql": "storage",
|
||||
"swift": "swift",
|
||||
"tf": "terraform",
|
||||
"tfvars": "terraform",
|
||||
@@ -148,6 +154,7 @@
|
||||
"ttf": "font",
|
||||
"tsx": "code",
|
||||
"txt": "document",
|
||||
"tcl": "tcl",
|
||||
"vue": "vue",
|
||||
"wav": "audio",
|
||||
"webm": "video",
|
||||
@@ -189,6 +196,9 @@
|
||||
"css": {
|
||||
"icon": "icons/file_icons/css.svg"
|
||||
},
|
||||
"coffeescript": {
|
||||
"icon": "icons/file_icons/coffeescript.svg"
|
||||
},
|
||||
"dart": {
|
||||
"icon": "icons/file_icons/dart.svg"
|
||||
},
|
||||
@@ -222,6 +232,9 @@
|
||||
"font": {
|
||||
"icon": "icons/file_icons/font.svg"
|
||||
},
|
||||
"fsharp": {
|
||||
"icon": "icons/file_icons/fsharp.svg"
|
||||
},
|
||||
"haskell": {
|
||||
"icon": "icons/file_icons/haskell.svg"
|
||||
},
|
||||
@@ -258,6 +271,9 @@
|
||||
"ocaml": {
|
||||
"icon": "icons/file_icons/ocaml.svg"
|
||||
},
|
||||
"nim": {
|
||||
"icon": "icons/file_icons/nim.svg"
|
||||
},
|
||||
"phoenix": {
|
||||
"icon": "icons/file_icons/phoenix.svg"
|
||||
},
|
||||
@@ -288,6 +304,9 @@
|
||||
"storage": {
|
||||
"icon": "icons/file_icons/database.svg"
|
||||
},
|
||||
"scala": {
|
||||
"icon": "icons/file_icons/scala.svg"
|
||||
},
|
||||
"swift": {
|
||||
"icon": "icons/file_icons/swift.svg"
|
||||
},
|
||||
@@ -306,6 +325,9 @@
|
||||
"typescript": {
|
||||
"icon": "icons/file_icons/typescript.svg"
|
||||
},
|
||||
"tcl": {
|
||||
"icon": "icons/file_icons/tcl.svg"
|
||||
},
|
||||
"vcs": {
|
||||
"icon": "icons/file_icons/git.svg"
|
||||
},
|
||||
|
||||
8
assets/icons/file_icons/fsharp.svg
Normal file
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="14px" height="14px" viewBox="0 0 14 14" version="1.1">
|
||||
<g id="surface1">
|
||||
<path style=" stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;" d="M 3.324219 3.496094 C 1.507812 5.40625 0.0273438 6.984375 0.0273438 6.996094 C 0.0273438 7.015625 1.511719 8.59375 3.332031 10.503906 L 6.632812 13.980469 L 6.632812 10.472656 L 4.984375 8.734375 L 3.332031 6.996094 L 4.984375 5.257812 L 6.632812 3.519531 L 6.628906 1.773438 L 6.621094 0.0273438 Z M 3.324219 3.496094 "/>
|
||||
<path style=" stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;" d="M 7.125 1.773438 L 7.125 3.492188 L 8.769531 5.222656 C 9.675781 6.171875 10.421875 6.96875 10.425781 6.984375 C 10.433594 7.003906 9.691406 7.800781 8.78125 8.761719 L 7.125 10.503906 L 7.125 13.972656 L 7.214844 13.890625 C 7.296875 13.820312 8.203125 12.90625 9.167969 11.910156 C 9.398438 11.671875 9.605469 11.464844 9.621094 11.449219 C 9.671875 11.402344 11.261719 9.789062 11.601562 9.4375 C 11.773438 9.261719 11.957031 9.082031 12 9.035156 C 12.046875 8.988281 12.433594 8.59375 12.863281 8.160156 C 13.289062 7.726562 13.722656 7.289062 13.824219 7.183594 L 14.007812 6.996094 L 13.808594 6.796875 C 13.179688 6.167969 12.527344 5.503906 11.820312 4.785156 C 11.574219 4.53125 11.105469 4.058594 10.785156 3.734375 C 10.460938 3.414062 9.871094 2.8125 9.472656 2.402344 C 9.074219 1.996094 8.609375 1.515625 8.4375 1.339844 C 8.265625 1.160156 7.910156 0.800781 7.652344 0.539062 C 7.394531 0.273438 7.167969 0.0585938 7.15625 0.0585938 C 7.132812 0.0585938 7.125 0.59375 7.125 1.773438 Z M 7.125 1.773438 "/>
|
||||
<path style=" stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;" d="M 5.453125 5.757812 C 4.308594 6.964844 4.285156 6.992188 4.332031 7.050781 C 4.359375 7.082031 4.886719 7.636719 5.5 8.289062 L 6.621094 9.464844 L 6.628906 8.222656 C 6.632812 7.539062 6.632812 6.429688 6.628906 5.753906 L 6.621094 4.527344 Z M 5.453125 5.757812 "/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.0 KiB |
@@ -1,13 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
viewBox="0 0 32 32"
|
||||
version="1.1"
|
||||
id="svg977"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<defs
|
||||
id="defs981" />
|
||||
<path
|
||||
id="path973"
|
||||
d="M 10.699219 8.9003906 L 10.699219 9 C 12.199219 11.3 13.800781 13.600391 15.300781 15.900391 L 15.300781 16 C 13.800781 18.3 12.199219 20.600391 10.699219 22.900391 L 10.699219 23 L 14.199219 23 L 14.300781 22.900391 C 15.200781 21.500391 16.199609 20.099219 17.099609 18.699219 C 17.199609 18.599219 17.099219 18.599219 17.199219 18.699219 C 18.099219 20.099219 19.1 21.500391 20 22.900391 L 20.099609 23 L 23.599609 23 C 21.699609 20 19.699219 17.099609 17.699219 14.099609 C 16.499219 12.399609 15.399219 10.600391 14.199219 8.9003906 L 10.699219 8.9003906 z M 6 9 C 7.6 11.3 9.0996094 13.6 10.599609 16 L 10.599609 16.099609 C 9.4996094 17.799609 8.4007813 19.399609 7.3007812 21.099609 C 6.8007813 21.699609 6.4 22.4 6 23 L 6 23.099609 L 9.5 23.099609 C 11.1 20.699609 12.699219 18.399609 14.199219 16.099609 L 14.199219 16 C 13.499219 14.8 12.700391 13.7 11.900391 12.5 C 11.100391 11.4 10.399609 10.199609 9.5996094 9.0996094 L 9.5 9 L 6 9 z M 18.199219 13 L 18.199219 13.099609 C 18.699219 13.899609 19.199219 14.600391 19.699219 15.400391 L 26 15.400391 L 26 13 L 18.199219 13 z M 20.5 16.599609 L 20.5 16.699219 C 21 17.499219 21.5 18.2 22 19 L 26 19 L 26 16.599609 L 20.5 16.599609 z " />
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="14px" height="14px" viewBox="0 0 14 14" version="1.1">
|
||||
<g id="surface1">
|
||||
<path style=" stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;" d="M 4.679688 3.894531 L 4.679688 3.9375 C 5.335938 4.945312 6.039062 5.949219 6.695312 6.957031 L 6.695312 7 C 6.039062 8.007812 5.335938 9.011719 4.679688 10.019531 L 4.679688 10.0625 L 6.210938 10.0625 L 6.257812 10.019531 C 6.648438 9.40625 7.085938 8.792969 7.480469 8.179688 C 7.523438 8.136719 7.480469 8.136719 7.523438 8.179688 C 7.917969 8.792969 8.355469 9.40625 8.75 10.019531 L 8.792969 10.0625 L 10.324219 10.0625 C 9.492188 8.75 8.617188 7.480469 7.742188 6.167969 C 7.21875 5.425781 6.738281 4.636719 6.210938 3.894531 Z M 2.625 3.9375 C 3.324219 4.945312 3.980469 5.949219 4.636719 7 L 4.636719 7.042969 C 4.15625 7.789062 3.675781 8.488281 3.195312 9.230469 C 2.976562 9.492188 2.800781 9.800781 2.625 10.0625 L 2.625 10.105469 L 4.15625 10.105469 C 4.855469 9.054688 5.554688 8.050781 6.210938 7.042969 L 6.210938 7 C 5.90625 6.476562 5.554688 5.992188 5.207031 5.46875 C 4.855469 4.988281 4.550781 4.460938 4.199219 3.980469 L 4.15625 3.9375 Z M 7.960938 5.6875 L 7.960938 5.730469 C 8.179688 6.082031 8.398438 6.386719 8.617188 6.738281 L 11.375 6.738281 L 11.375 5.6875 Z M 8.96875 7.261719 L 8.96875 7.304688 C 9.1875 7.65625 9.40625 7.960938 9.625 8.3125 L 11.375 8.3125 L 11.375 7.261719 Z M 8.96875 7.261719 "/>
|
||||
</g>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.5 KiB |
6
assets/icons/file_icons/nim.svg
Normal file
@@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="14px" height="14px" viewBox="0 0 14 14" version="1.1">
|
||||
<g id="surface1">
|
||||
<path style=" stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;" d="M 7.054688 1.691406 C 7.054688 1.691406 6.519531 2.148438 5.972656 2.59375 C 5.410156 2.578125 4.304688 2.710938 3.710938 2.945312 C 3.15625 2.570312 2.671875 2.15625 2.671875 2.15625 C 2.671875 2.15625 2.257812 2.917969 2 3.367188 C 1.613281 3.585938 1.226562 3.832031 0.882812 4.160156 C 0.480469 3.988281 0.015625 3.78125 0 3.777344 C 0.53125 4.921875 0.886719 6.070312 1.863281 6.761719 C 3.410156 4.144531 10.601562 4.386719 12.183594 6.746094 C 13.203125 6.175781 13.597656 4.953125 14 3.820312 C 13.957031 3.835938 13.410156 4.03125 13.058594 4.175781 C 12.84375 3.929688 12.347656 3.550781 12.0625 3.367188 C 11.847656 2.949219 11.628906 2.539062 11.402344 2.128906 C 11.402344 2.128906 10.941406 2.496094 10.402344 2.902344 C 9.675781 2.757812 8.800781 2.585938 8.0625 2.628906 C 7.71875 2.320312 7.382812 2.011719 7.054688 1.691406 Z M 0.550781 6.210938 L 1.828125 9.519531 C 4.046875 12.652344 9.707031 12.867188 12.175781 9.582031 C 12.757812 8.171875 13.546875 6.191406 13.546875 6.191406 C 12.914062 7.199219 11.882812 7.890625 11.246094 8.261719 C 10.796875 8.527344 9.757812 8.6875 9.757812 8.6875 L 7.023438 7.171875 L 4.277344 8.65625 C 4.277344 8.65625 3.25 8.480469 2.785156 8.25 C 1.847656 7.714844 1.214844 7.078125 0.550781 6.210938 Z M 0.550781 6.210938 "/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
@@ -1 +1,6 @@
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" width="512" height="512"><path d="M170.322 349.808c-2.4-15.66-9-28.38-25.020-34.531-6.27-2.4-11.7-6.78-17.88-9.54-7.020-3.15-14.16-6.15-21.57-8.1-5.61-1.5-10.83 1.020-14.16 5.94-3.15 4.62-0.87 8.97 1.77 12.84 2.97 4.35 6.27 8.49 9.6 12.57 5.52 6.78 11.37 13.29 16.74 20.161 5.13 6.57 9.51 13.86 8.76 22.56-1.65 19.080-10.29 34.891-24.21 47.76-1.53 1.38-4.23 2.37-6.21 2.19-8.88-0.96-16.95-4.32-23.46-10.53-7.47-7.11-6.33-15.48 2.61-20.67 2.13-1.23 4.35-2.37 6.3-3.87 5.46-4.11 7.29-11.13 4.32-17.22-1.41-2.94-3-6.12-5.34-8.25-11.43-10.41-22.651-21.151-34.891-30.63-29.671-23.041-44.91-53.52-47.251-90.421-2.64-40.981 6.87-79.231 28.5-114.242 8.19-13.29 17.73-25.951 32.37-32.52 9.96-4.47 20.88-6.99 31.531-9.78 29.311-7.71 58.89-13.5 89.401-8.34 26.28 4.41 45.511 17.94 54.331 43.77 5.79 16.89 7.17 34.35 5.37 52.231-3.54 35.131-29.49 66.541-63.331 75.841-14.67 4.020-22.68 1.77-31.5-10.44-6.33-8.79-11.58-18.36-17.25-27.631-0.84-1.38-1.44-2.97-2.16-4.44-0.69-1.47-1.44-2.88-2.16-4.35 2.13 15.24 5.67 29.911 13.98 42.99 4.5 7.11 10.5 12.36 19.29 13.14 32.34 2.91 59.641-7.71 79.021-33.721 21.69-29.101 26.461-62.581 20.19-97.831-1.23-6.96-3.3-13.77-4.77-20.7-0.99-4.47 0.78-7.77 5.19-9.33 2.040-0.69 4.14-1.26 6.18-1.68 26.461-5.7 53.221-7.59 80.191-4.86 30.601 3.060 59.551 11.46 85.441 28.471 40.531 26.67 65.641 64.621 79.291 110.522 1.98 6.66 2.28 13.95 2.46 20.971 0.12 4.68-2.88 5.91-6.45 2.97-3.93-3.21-7.53-6.87-10.92-10.65-3.15-3.57-5.67-7.65-8.73-11.4-2.37-2.94-4.44-2.49-5.58 1.17-0.72 2.22-1.35 4.41-1.98 6.63-7.080 25.26-18.24 48.3-36.33 67.711-2.52 2.73-4.77 6.78-5.070 10.38-0.78 9.96-1.35 20.13-0.39 30.060 1.98 21.331 5.070 42.57 7.47 63.871 1.35 12.030-2.52 19.11-13.83 23.281-7.95 2.91-16.47 5.040-24.87 5.64-13.38 0.93-26.88 0.27-40.32 0.27-0.36-15 0.93-29.731-13.17-37.771 2.73-11.13 5.88-21.69 7.77-32.49 1.56-8.97 0.24-17.79-6.060-25.14-5.91-6.93-13.32-8.82-20.101-4.86-20.43 11.91-41.671 11.97-63.301 4.17-9.93-3.6-16.86-1.56-22.351 7.5-5.91 9.75-8.4 20.7-7.74 31.771 0.84 13.95 3.27 27.75 5.13 41.64 1.020 7.77 0.15 9.78-7.56 11.76-17.13 4.35-34.56 4.83-52.081 3.42-0.93-0.090-1.86-0.48-2.46-0.63-0.87-14.55 0.66-29.671-16.68-37.411 7.68-16.29 6.63-33.18 3.99-50.070l-0.060-0.15zM66.761 292.718c2.55-2.4 4.59-6.15 5.31-9.6 1.8-8.64-4.68-20.22-12.18-23.43-3.99-1.74-7.47-1.11-10.29 2.070-6.87 7.77-13.65 15.63-20.401 23.521-1.14 1.35-2.16 2.94-2.97 4.53-2.7 5.19-1.11 8.97 4.65 10.38 3.48 0.87 7.080 1.050 10.65 1.56 9.3-0.9 18.3-2.46 25.23-9v-0.030zM67.541 206.347c-0.030-6.18-5.19-11.34-11.28-11.37-6.27-0.030-11.67 5.58-11.46 11.76 0.27 6.21 5.43 11.19 11.61 11.070 6.24-0.090 11.22-5.19 11.16-11.43l-0.030-0.030z"></path></svg>
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="14px" height="14px" viewBox="0 0 14 14" version="1.1">
|
||||
<g id="surface1">
|
||||
<path style=" stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;" d="M 4.65625 9.566406 C 4.589844 9.136719 4.410156 8.789062 3.972656 8.621094 C 3.800781 8.554688 3.652344 8.433594 3.484375 8.359375 C 3.292969 8.273438 3.097656 8.191406 2.894531 8.136719 C 2.742188 8.097656 2.597656 8.167969 2.507812 8.300781 C 2.421875 8.425781 2.484375 8.546875 2.554688 8.652344 C 2.636719 8.769531 2.726562 8.882812 2.816406 8.996094 C 2.96875 9.179688 3.128906 9.359375 3.277344 9.546875 C 3.417969 9.726562 3.535156 9.925781 3.515625 10.164062 C 3.46875 10.6875 3.234375 11.117188 2.851562 11.46875 C 2.8125 11.507812 2.738281 11.535156 2.683594 11.53125 C 2.441406 11.503906 2.21875 11.410156 2.042969 11.242188 C 1.835938 11.046875 1.867188 10.820312 2.113281 10.675781 C 2.171875 10.644531 2.230469 10.613281 2.285156 10.570312 C 2.433594 10.457031 2.484375 10.265625 2.402344 10.101562 C 2.367188 10.019531 2.320312 9.933594 2.257812 9.875 C 1.945312 9.589844 1.636719 9.296875 1.304688 9.035156 C 0.492188 8.40625 0.0742188 7.574219 0.0117188 6.5625 C -0.0585938 5.445312 0.199219 4.398438 0.792969 3.441406 C 1.015625 3.078125 1.277344 2.730469 1.675781 2.550781 C 1.949219 2.429688 2.246094 2.359375 2.539062 2.285156 C 3.339844 2.074219 4.148438 1.914062 4.984375 2.054688 C 5.703125 2.175781 6.226562 2.546875 6.46875 3.253906 C 6.625 3.714844 6.664062 4.191406 6.617188 4.679688 C 6.519531 5.640625 5.808594 6.5 4.882812 6.753906 C 4.484375 6.863281 4.261719 6.804688 4.023438 6.46875 C 3.847656 6.230469 3.707031 5.96875 3.550781 5.714844 C 3.527344 5.675781 3.511719 5.632812 3.492188 5.59375 C 3.472656 5.550781 3.453125 5.511719 3.433594 5.472656 C 3.492188 5.890625 3.585938 6.292969 3.816406 6.648438 C 3.9375 6.84375 4.101562 6.988281 4.34375 7.007812 C 5.226562 7.085938 5.972656 6.796875 6.503906 6.085938 C 7.097656 5.289062 7.226562 4.375 7.054688 3.410156 C 7.019531 3.21875 6.964844 3.035156 6.925781 2.84375 C 6.898438 2.722656 6.945312 2.632812 7.066406 2.589844 C 7.121094 2.570312 7.179688 2.554688 7.234375 2.542969 C 7.960938 2.386719 8.691406 2.335938 9.429688 2.410156 C 10.265625 2.496094 11.054688 2.722656 11.765625 3.191406 C 12.871094 3.917969 13.558594 4.957031 13.933594 6.210938 C 13.988281 6.394531 13.996094 6.59375 14 6.785156 C 14.003906 6.914062 13.921875 6.945312 13.824219 6.867188 C 13.714844 6.777344 13.617188 6.679688 13.523438 6.574219 C 13.4375 6.476562 13.371094 6.367188 13.285156 6.261719 C 13.222656 6.183594 13.164062 6.195312 13.132812 6.296875 C 13.113281 6.355469 13.097656 6.414062 13.078125 6.476562 C 12.886719 7.167969 12.582031 7.796875 12.085938 8.328125 C 12.015625 8.402344 11.957031 8.511719 11.949219 8.613281 C 11.925781 8.882812 11.910156 9.164062 11.9375 9.433594 C 11.992188 10.015625 12.074219 10.597656 12.140625 11.179688 C 12.179688 11.507812 12.070312 11.703125 11.761719 11.816406 C 11.546875 11.894531 11.3125 11.953125 11.082031 11.972656 C 10.71875 11.996094 10.347656 11.976562 9.980469 11.976562 C 9.96875 11.566406 10.003906 11.164062 9.621094 10.945312 C 9.695312 10.640625 9.78125 10.351562 9.832031 10.058594 C 9.875 9.8125 9.839844 9.570312 9.667969 9.371094 C 9.503906 9.179688 9.304688 9.128906 9.117188 9.238281 C 8.558594 9.5625 7.976562 9.5625 7.386719 9.351562 C 7.113281 9.253906 6.925781 9.308594 6.773438 9.554688 C 6.613281 9.824219 6.546875 10.121094 6.5625 10.425781 C 6.585938 10.804688 6.652344 11.183594 6.703125 11.5625 C 6.730469 11.777344 6.707031 11.832031 6.496094 11.886719 C 6.027344 12.003906 5.550781 12.015625 5.074219 11.976562 C 5.046875 11.976562 5.023438 11.964844 5.007812 11.960938 C 4.980469 11.5625 5.023438 11.148438 4.550781 10.9375 C 4.761719 10.492188 4.730469 10.03125 4.660156 9.570312 Z M 1.824219 8.003906 C 1.894531 7.9375 1.949219 7.835938 1.96875 7.742188 C 2.019531 7.503906 1.84375 7.1875 1.636719 7.101562 C 1.527344 7.054688 1.433594 7.070312 1.355469 7.15625 C 1.167969 7.371094 0.984375 7.585938 0.796875 7.800781 C 0.765625 7.835938 0.738281 7.882812 0.71875 7.925781 C 0.644531 8.066406 0.6875 8.167969 0.84375 8.207031 C 0.941406 8.230469 1.039062 8.238281 1.136719 8.25 C 1.390625 8.226562 1.636719 8.183594 1.824219 8.003906 Z M 1.847656 5.640625 C 1.847656 5.472656 1.703125 5.332031 1.539062 5.332031 C 1.367188 5.332031 1.21875 5.484375 1.226562 5.652344 C 1.230469 5.824219 1.375 5.957031 1.542969 5.957031 C 1.714844 5.953125 1.847656 5.8125 1.847656 5.644531 Z M 1.847656 5.640625 "/>
|
||||
</g>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 2.6 KiB After Width: | Height: | Size: 4.5 KiB |
6
assets/icons/file_icons/scala.svg
Normal file
@@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="14px" height="14px" viewBox="0 0 14 14" version="1.1">
|
||||
<g id="surface1">
|
||||
<path style=" stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;" d="M 11.214844 0.015625 C 11.214844 1.125 5.859375 1.976562 2.785156 2.179688 L 2.785156 5.335938 C 4.082031 5.421875 5.789062 5.628906 7.324219 5.917969 C 5.789062 6.207031 4.082031 6.410156 2.785156 6.492188 L 2.785156 9.648438 C 4.082031 9.734375 5.785156 9.9375 7.320312 10.226562 C 5.785156 10.515625 4.082031 10.71875 2.785156 10.804688 L 2.785156 13.988281 L 4.308594 13.765625 C 7.152344 13.339844 10.285156 12.53125 11.214844 11.820312 L 11.214844 8.632812 C 11.214844 8.433594 10.851562 8.21875 10.265625 8 C 10.675781 7.832031 11.003906 7.667969 11.214844 7.507812 L 11.214844 4.324219 C 11.214844 4.125 10.855469 3.90625 10.277344 3.6875 C 10.683594 3.523438 11.003906 3.355469 11.214844 3.195312 Z M 10.269531 8.339844 C 10.6875 8.503906 10.839844 8.617188 10.890625 8.664062 C 10.855469 8.734375 10.726562 8.871094 10.359375 9.050781 C 10.300781 9.078125 10.238281 9.105469 10.183594 9.128906 C 9.550781 9.402344 8.570312 9.667969 7.320312 9.90625 C 6.707031 9.792969 6.046875 9.6875 5.382812 9.597656 C 7.25 9.261719 9.09375 8.796875 10.269531 8.339844 Z M 10.277344 4.027344 C 10.691406 4.195312 10.839844 4.308594 10.890625 4.355469 C 10.863281 4.40625 10.78125 4.5 10.578125 4.625 L 10.574219 4.625 C 10.535156 4.648438 10.488281 4.675781 10.441406 4.699219 C 9.839844 5.011719 8.765625 5.324219 7.335938 5.59375 C 6.71875 5.480469 6.058594 5.375 5.394531 5.285156 C 7.269531 4.953125 9.097656 4.492188 10.277344 4.027344 Z M 10.277344 4.027344 "/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
8
assets/icons/file_icons/tcl.svg
Normal file
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="14px" height="14px" viewBox="0 0 14 14" version="1.1">
|
||||
<g id="surface1">
|
||||
<path style="fill-rule:nonzero;fill:rgb(100%,100%,100%);fill-opacity:1;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke:rgb(0%,0%,0%);stroke-opacity:1;stroke-miterlimit:4;" d="M 21.946429 2.875 C 21.982143 5.348214 21.910714 7.785714 19.776786 10.107143 L 19.696429 10.196429 L 19.8125 10.196429 L 20.6875 10.205357 C 19.267857 13.160714 18.348214 16.098214 16.303571 19.035714 L 16.232143 19.142857 L 16.357143 19.125 L 17.4375 18.919643 C 16.883929 20.598214 15.607143 21.946429 13.955357 22.571429 C 13.5625 17.116071 16.285714 12.303571 18.598214 7.5 L 18.607143 7.491071 L 18.517857 7.428571 C 14.732143 11.660714 13.026786 17.625 12.383929 22.553571 C 11.285714 21.901786 10.5 20.821429 10.241071 19.5625 L 11.133929 19.946429 L 11.232143 19.982143 L 11.214286 19.883929 C 10.526786 16.857143 11.589286 14.678571 12.607143 11.830357 L 13.348214 12.321429 L 13.4375 12.383929 L 13.4375 12.276786 C 13.375 9.964286 14.9375 7.633929 17.008929 5.553571 L 17.294643 6.321429 L 17.339286 6.419643 L 17.392857 6.321429 L 18.026786 5.267857 C 18.973214 3.991071 20.375 3.133929 21.946429 2.875 Z M 21.946429 2.875 " transform="matrix(0.4375,0,0,0.4375,0,0)"/>
|
||||
<path style="fill-rule:nonzero;fill:rgb(100%,100%,100%);fill-opacity:1;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke:rgb(100%,100%,100%);stroke-opacity:1;stroke-miterlimit:4;" d="M 21.946429 2.875 C 20.375 3.133929 18.973214 3.991071 18.017857 5.258929 L 18.017857 5.267857 L 17.392857 6.321429 L 17.339286 6.419643 L 17.294643 6.321429 L 17 5.544643 C 14.928571 7.625 13.366071 9.955357 13.419643 12.267857 L 13.419643 12.375 L 13.339286 12.3125 L 12.598214 11.821429 C 11.571429 14.669643 10.517857 16.848214 11.196429 19.875 L 11.223214 19.973214 L 11.125 19.9375 L 10.241071 19.5625 C 10.241071 19.580357 10.25 19.598214 10.25 19.616071 C 10.517857 20.839286 11.294643 21.901786 12.375 22.544643 C 12.428571 22.160714 12.482143 21.776786 12.544643 21.383929 C 11 17.767857 12.348214 15.107143 12.955357 12.723214 L 13.892857 13.258929 C 13.758929 11.026786 15.080357 8.607143 16.785714 6.508929 L 17.285714 7.375 C 18.553571 4.767857 19.5625 3.723214 21.946429 2.875 Z M 21.946429 2.875 " transform="matrix(0.4375,0,0,0.4375,0,0)"/>
|
||||
<path style="fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke:rgb(0%,0%,0%);stroke-opacity:1;stroke-miterlimit:4;" d="M 22.517857 2 L 22.464286 2.008929 C 20.383929 2.375 18.339286 3.133929 17.446429 4.973214 L 17.071429 4.3125 L 17.035714 4.25 L 16.991071 4.303571 C 15.883929 5.357143 14.892857 6.526786 14.044643 7.803571 C 13.339286 8.723214 12.919643 9.839286 12.839286 11 L 12.303571 10.339286 L 12.25 10.267857 L 12.214286 10.348214 C 11.508929 11.857143 10.9375 13.428571 10.517857 15.044643 C 10.116071 16.25 10.0625 17.535714 10.366071 18.767857 L 9.482143 18.258929 L 9.410714 18.214286 L 9.401786 18.294643 C 9.223214 20.151786 9.982143 21.973214 11.419643 23.142857 L 10.455357 23.383929 L 10.25 23.428571 L 10.455357 23.482143 C 10.973214 23.598214 11.464286 23.794643 11.901786 24.089286 C 12.3125 24.383929 12.508929 24.892857 12.419643 25.383929 L 12.419643 28.098214 L 12.428571 28.116071 L 13.651786 29.857143 L 13.75 30 L 13.75 25.723214 C 13.839286 25.178571 14.053571 24.678571 14.366071 24.232143 C 14.660714 23.910714 15.071429 23.723214 15.5 23.696429 L 15.678571 23.678571 L 15.517857 23.598214 L 14.875 23.303571 C 16.714286 22.035714 18.035714 20.142857 18.571429 17.982143 L 18.589286 17.892857 L 18.508929 17.919643 L 17.714286 18.133929 C 18.607143 17.098214 19.3125 15.910714 19.803571 14.633929 C 20.508929 13 21.178571 11.169643 21.732143 9.696429 L 21.758929 9.625 L 21.678571 9.625 L 21.0625 9.669643 C 21.857143 8.651786 22.339286 7.428571 22.446429 6.142857 C 22.633929 4.785714 22.660714 3.419643 22.526786 2.053571 Z M 21.946429 2.875 C 21.982143 5.348214 21.910714 7.785714 19.776786 10.107143 L 19.696429 10.196429 L 19.8125 10.196429 L 20.6875 10.205357 C 19.267857 13.160714 18.348214 16.098214 16.303571 19.035714 L 16.232143 19.142857 L 16.357143 19.125 L 17.4375 18.919643 C 16.883929 20.598214 15.607143 21.946429 13.955357 22.571429 C 13.5625 17.116071 16.285714 12.303571 18.598214 7.5 L 18.607143 7.491071 L 18.517857 7.428571 C 14.732143 11.660714 13.026786 17.625 12.383929 22.553571 C 11.285714 21.901786 10.508929 20.821429 10.241071 19.5625 L 11.142857 19.946429 L 11.232143 19.982143 L 11.214286 19.883929 C 10.526786 16.857143 11.589286 14.678571 12.616071 11.830357 L 13.348214 12.321429 L 13.4375 12.383929 L 13.4375 12.276786 C 13.375 9.964286 14.9375 7.633929 17.008929 5.553571 L 17.303571 6.321429 L 17.339286 6.419643 L 17.392857 6.321429 L 18.026786 5.267857 C 18.973214 3.991071 20.375 3.133929 21.946429 2.875 Z M 21.946429 2.875 " transform="matrix(0.4375,0,0,0.4375,0,0)"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 4.9 KiB |
@@ -1,6 +1,9 @@
|
||||
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M12.5066 8.01531L19.2375 12.1073V20.2894L12.5066 16.1992V8.01531Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M20.0294 12.1073V20.2894L27.1563 16.1992V8.01531L20.0294 12.1073Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M4.58781 3.66V11.5787L11.7147 15.5381V7.61937L4.58781 3.66Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M12.5066 25.04L19.2375 29V21.1348V21.0818L12.5066 17.1219V25.04Z" fill="black"/>
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="14px" height="14px" viewBox="0 0 14 14" version="1.1">
|
||||
<g id="surface1">
|
||||
<path style=" stroke:none;fill-rule:evenodd;fill:rgb(0%,0%,0%);fill-opacity:1;" d="M 5.472656 3.507812 L 8.417969 5.296875 L 8.417969 8.875 L 5.472656 7.085938 Z M 5.472656 3.507812 "/>
|
||||
<path style=" stroke:none;fill-rule:evenodd;fill:rgb(0%,0%,0%);fill-opacity:1;" d="M 8.761719 5.296875 L 8.761719 8.875 L 11.882812 7.085938 L 11.882812 3.507812 Z M 8.761719 5.296875 "/>
|
||||
<path style=" stroke:none;fill-rule:evenodd;fill:rgb(0%,0%,0%);fill-opacity:1;" d="M 2.007812 1.601562 L 2.007812 5.066406 L 5.125 6.796875 L 5.125 3.332031 Z M 2.007812 1.601562 "/>
|
||||
<path style=" stroke:none;fill-rule:evenodd;fill:rgb(0%,0%,0%);fill-opacity:1;" d="M 5.472656 10.953125 L 8.417969 12.6875 L 8.417969 9.222656 L 5.472656 7.492188 Z M 5.472656 10.953125 "/>
|
||||
</g>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 620 B After Width: | Height: | Size: 961 B |
4
assets/icons/reply_arrow.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
||||
<svg width="800px" height="800px" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M20 17V15.8C20 14.1198 20 13.2798 19.673 12.638C19.3854 12.0735 18.9265 11.6146 18.362 11.327C17.7202 11 16.8802 11 15.2 11H4M4 11L8 7M4 11L8 15" stroke="#000000" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 468 B |
@@ -16,6 +16,7 @@
|
||||
"ctrl-enter": "menu::SecondaryConfirm",
|
||||
"escape": "menu::Cancel",
|
||||
"ctrl-c": "menu::Cancel",
|
||||
"shift-enter": "menu::UseSelectedQuery",
|
||||
"ctrl-shift-w": "workspace::CloseWindow",
|
||||
"shift-escape": "workspace::ToggleZoom",
|
||||
"ctrl-o": "workspace::Open",
|
||||
@@ -72,7 +73,7 @@
|
||||
"ctrl-n": "editor::MoveDown",
|
||||
"ctrl-b": "editor::MoveLeft",
|
||||
"ctrl-f": "editor::MoveRight",
|
||||
"ctrl-shift-l": "editor::NextScreen", // todo!(linux): What is this
|
||||
"ctrl-shift-l": "editor::NextScreen", // todo(linux): What is this
|
||||
"alt-left": "editor::MoveToPreviousWordStart",
|
||||
"alt-b": "editor::MoveToPreviousWordStart",
|
||||
"alt-right": "editor::MoveToNextWordEnd",
|
||||
@@ -116,7 +117,9 @@
|
||||
{
|
||||
"stop_at_soft_wraps": true
|
||||
}
|
||||
]
|
||||
],
|
||||
"ctrl-;": "editor::ToggleLineNumbers",
|
||||
"ctrl-alt-z": "editor::RevertSelectedHunks"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -133,10 +136,21 @@
|
||||
"focus": true
|
||||
}
|
||||
],
|
||||
"alt-\\": "copilot::Suggest",
|
||||
"ctrl->": "assistant::QuoteSelection"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && mode == full && copilot_suggestion",
|
||||
"bindings": {
|
||||
"alt-]": "copilot::NextSuggestion",
|
||||
"alt-[": "copilot::PreviousSuggestion",
|
||||
"ctrl->": "assistant::QuoteSelection"
|
||||
"alt-right": "editor::AcceptPartialCopilotSuggestion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && !copilot_suggestion",
|
||||
"bindings": {
|
||||
"alt-\\": "copilot::Suggest"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -341,10 +355,18 @@
|
||||
{
|
||||
"context": "Workspace",
|
||||
"bindings": {
|
||||
// Change the default action on `menu::Confirm` by setting the parameter
|
||||
// "alt-cmd-o": [
|
||||
// "projects::OpenRecent",
|
||||
// {
|
||||
// "create_new_window": true
|
||||
// }
|
||||
// ]
|
||||
"ctrl-alt-o": "projects::OpenRecent",
|
||||
"ctrl-alt-b": "branches::OpenRecent",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
"ctrl-s": "workspace::Save",
|
||||
"ctrl-k s": "workspace::SaveWithoutFormat",
|
||||
"ctrl-shift-s": "workspace::SaveAs",
|
||||
"ctrl-n": "workspace::NewFile",
|
||||
"ctrl-shift-n": "workspace::NewWindow",
|
||||
@@ -387,7 +409,7 @@
|
||||
}
|
||||
},
|
||||
// Bindings from Sublime Text
|
||||
// todo!(linux) make sure these match linux bindings or remove above comment?
|
||||
// todo(linux) make sure these match linux bindings or remove above comment?
|
||||
{
|
||||
"context": "Editor",
|
||||
"bindings": {
|
||||
@@ -411,7 +433,7 @@
|
||||
}
|
||||
},
|
||||
// Bindings from Atom
|
||||
// todo!(linux) make sure these match linux bindings or remove above comment?
|
||||
// todo(linux) make sure these match linux bindings or remove above comment?
|
||||
{
|
||||
"context": "Pane",
|
||||
"bindings": {
|
||||
@@ -466,6 +488,7 @@
|
||||
"context": "Editor && mode == full",
|
||||
"bindings": {
|
||||
"alt-enter": "editor::OpenExcerpts",
|
||||
"ctrl-k enter": "editor::OpenExcerptsSplit",
|
||||
"ctrl-f8": "editor::GoToHunk",
|
||||
"ctrl-shift-f8": "editor::GoToPrevHunk",
|
||||
"ctrl-enter": "assistant::InlineAssist"
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
"cmd-enter": "menu::SecondaryConfirm",
|
||||
"escape": "menu::Cancel",
|
||||
"ctrl-c": "menu::Cancel",
|
||||
"shift-enter": "menu::UseSelectedQuery",
|
||||
"cmd-shift-w": "workspace::CloseWindow",
|
||||
"shift-escape": "workspace::ToggleZoom",
|
||||
"cmd-o": "workspace::Open",
|
||||
@@ -48,6 +49,7 @@
|
||||
"cmd-backspace": "editor::DeleteToBeginningOfLine",
|
||||
"cmd-delete": "editor::DeleteToEndOfLine",
|
||||
"alt-backspace": "editor::DeleteToPreviousWordStart",
|
||||
"ctrl-w": "editor::DeleteToPreviousWordStart",
|
||||
"alt-delete": "editor::DeleteToNextWordEnd",
|
||||
"alt-h": "editor::DeleteToPreviousWordStart",
|
||||
"alt-d": "editor::DeleteToNextWordEnd",
|
||||
@@ -150,7 +152,9 @@
|
||||
"center_cursor": true
|
||||
}
|
||||
],
|
||||
"ctrl-cmd-space": "editor::ShowCharacterPalette"
|
||||
"ctrl-cmd-space": "editor::ShowCharacterPalette",
|
||||
"cmd-;": "editor::ToggleLineNumbers",
|
||||
"cmd-alt-z": "editor::RevertSelectedHunks"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -173,10 +177,21 @@
|
||||
"focus": false
|
||||
}
|
||||
],
|
||||
"alt-\\": "copilot::Suggest",
|
||||
"cmd->": "assistant::QuoteSelection"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && mode == full && copilot_suggestion",
|
||||
"bindings": {
|
||||
"alt-]": "copilot::NextSuggestion",
|
||||
"alt-[": "copilot::PreviousSuggestion",
|
||||
"cmd->": "assistant::QuoteSelection"
|
||||
"alt-right": "editor::AcceptPartialCopilotSuggestion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && !copilot_suggestion",
|
||||
"bindings": {
|
||||
"alt-\\": "copilot::Suggest"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -301,6 +316,18 @@
|
||||
"cmd-ctrl-p": "editor::AddSelectionAbove",
|
||||
"cmd-alt-down": "editor::AddSelectionBelow",
|
||||
"cmd-ctrl-n": "editor::AddSelectionBelow",
|
||||
"cmd-shift-k": "editor::DeleteLine",
|
||||
"alt-up": "editor::MoveLineUp",
|
||||
"alt-down": "editor::MoveLineDown",
|
||||
"alt-shift-up": [
|
||||
"editor::DuplicateLine",
|
||||
{
|
||||
"move_upwards": true
|
||||
}
|
||||
],
|
||||
"alt-shift-down": "editor::DuplicateLine",
|
||||
"ctrl-shift-right": "editor::SelectLargerSyntaxNode",
|
||||
"ctrl-shift-left": "editor::SelectSmallerSyntaxNode",
|
||||
"cmd-d": [
|
||||
"editor::SelectNext",
|
||||
{
|
||||
@@ -333,8 +360,6 @@
|
||||
"advance_downwards": false
|
||||
}
|
||||
],
|
||||
"alt-up": "editor::SelectLargerSyntaxNode",
|
||||
"alt-down": "editor::SelectSmallerSyntaxNode",
|
||||
"cmd-u": "editor::UndoSelection",
|
||||
"cmd-shift-u": "editor::RedoSelection",
|
||||
"f8": "editor::GoToDiagnostic",
|
||||
@@ -383,10 +408,18 @@
|
||||
{
|
||||
"context": "Workspace",
|
||||
"bindings": {
|
||||
// Change the default action on `menu::Confirm` by setting the parameter
|
||||
// "alt-cmd-o": [
|
||||
// "projects::OpenRecent",
|
||||
// {
|
||||
// "create_new_window": true
|
||||
// }
|
||||
// ]
|
||||
"alt-cmd-o": "projects::OpenRecent",
|
||||
"alt-cmd-b": "branches::OpenRecent",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
"cmd-s": "workspace::Save",
|
||||
"cmd-k s": "workspace::SaveWithoutFormat",
|
||||
"cmd-shift-s": "workspace::SaveAs",
|
||||
"cmd-n": "workspace::NewFile",
|
||||
"cmd-shift-n": "workspace::NewWindow",
|
||||
@@ -405,8 +438,8 @@
|
||||
"cmd-j": "workspace::ToggleBottomDock",
|
||||
"alt-cmd-y": "workspace::CloseAllDocks",
|
||||
"cmd-shift-f": "pane::DeploySearch",
|
||||
"cmd-k cmd-t": "theme_selector::Toggle",
|
||||
"cmd-k cmd-s": "zed::OpenKeymap",
|
||||
"cmd-k cmd-t": "theme_selector::Toggle",
|
||||
"cmd-t": "project_symbols::Toggle",
|
||||
"cmd-p": "file_finder::Toggle",
|
||||
"cmd-shift-p": "command_palette::Toggle",
|
||||
@@ -432,11 +465,7 @@
|
||||
{
|
||||
"context": "Editor",
|
||||
"bindings": {
|
||||
"ctrl-shift-k": "editor::DeleteLine",
|
||||
"cmd-shift-d": "editor::DuplicateLine",
|
||||
"ctrl-j": "editor::JoinLines",
|
||||
"ctrl-cmd-up": "editor::MoveLineUp",
|
||||
"ctrl-cmd-down": "editor::MoveLineDown",
|
||||
"ctrl-alt-backspace": "editor::DeleteToPreviousSubwordStart",
|
||||
"ctrl-alt-h": "editor::DeleteToPreviousSubwordStart",
|
||||
"ctrl-alt-delete": "editor::DeleteToNextSubwordEnd",
|
||||
@@ -506,6 +535,7 @@
|
||||
"context": "Editor && mode == full",
|
||||
"bindings": {
|
||||
"alt-enter": "editor::OpenExcerpts",
|
||||
"cmd-k enter": "editor::OpenExcerptsSplit",
|
||||
"cmd-f8": "editor::GoToHunk",
|
||||
"cmd-shift-f8": "editor::GoToPrevHunk",
|
||||
"ctrl-enter": "assistant::InlineAssist"
|
||||
|
||||
@@ -39,6 +39,8 @@
|
||||
"advance_downwards": true
|
||||
}
|
||||
],
|
||||
"alt-up": "editor::SelectLargerSyntaxNode",
|
||||
"alt-down": "editor::SelectSmallerSyntaxNode",
|
||||
"shift-alt-up": "editor::MoveLineUp",
|
||||
"shift-alt-down": "editor::MoveLineDown",
|
||||
"cmd-alt-l": "editor::Format",
|
||||
|
||||
@@ -37,30 +37,42 @@
|
||||
"_": "vim::StartOfLineDownward",
|
||||
"g _": "vim::EndOfLineDownward",
|
||||
"shift-g": "vim::EndOfDocument",
|
||||
"w": "vim::NextWordStart",
|
||||
"{": "vim::StartOfParagraph",
|
||||
"}": "vim::EndOfParagraph",
|
||||
"|": "vim::GoToColumn",
|
||||
|
||||
// Word motions
|
||||
"w": "vim::NextWordStart",
|
||||
"e": "vim::NextWordEnd",
|
||||
"b": "vim::PreviousWordStart",
|
||||
"g e": "vim::PreviousWordEnd",
|
||||
|
||||
// Subword motions
|
||||
// "w": "vim::NextSubwordStart",
|
||||
// "b": "vim::PreviousSubwordStart",
|
||||
// "e": "vim::NextSubwordEnd",
|
||||
// "g e": "vim::PreviousSubwordEnd",
|
||||
|
||||
"shift-w": [
|
||||
"vim::NextWordStart",
|
||||
{
|
||||
"ignorePunctuation": true
|
||||
}
|
||||
],
|
||||
"e": "vim::NextWordEnd",
|
||||
"shift-e": [
|
||||
"vim::NextWordEnd",
|
||||
{
|
||||
"ignorePunctuation": true
|
||||
}
|
||||
],
|
||||
"b": "vim::PreviousWordStart",
|
||||
"shift-b": [
|
||||
"vim::PreviousWordStart",
|
||||
{
|
||||
"ignorePunctuation": true
|
||||
}
|
||||
],
|
||||
"g shift-e": ["vim::PreviousWordEnd", { "ignorePunctuation": true }],
|
||||
|
||||
"n": "search::SelectNextMatch",
|
||||
"shift-n": "search::SelectPrevMatch",
|
||||
"%": "vim::Matching",
|
||||
@@ -117,8 +129,6 @@
|
||||
"ctrl-e": "vim::LineDown",
|
||||
"ctrl-y": "vim::LineUp",
|
||||
// "g" commands
|
||||
"g e": "vim::PreviousWordEnd",
|
||||
"g shift-e": ["vim::PreviousWordEnd", { "ignorePunctuation": true }],
|
||||
"g g": "vim::StartOfDocument",
|
||||
"g h": "editor::Hover",
|
||||
"g t": "pane::ActivateNextItem",
|
||||
@@ -218,6 +228,7 @@
|
||||
// z commands
|
||||
"z t": "editor::ScrollCursorTop",
|
||||
"z z": "editor::ScrollCursorCenter",
|
||||
"z .": ["workspace::SendKeystrokes", "z z ^"],
|
||||
"z b": "editor::ScrollCursorBottom",
|
||||
"z c": "editor::Fold",
|
||||
"z o": "editor::UnfoldLines",
|
||||
@@ -288,6 +299,13 @@
|
||||
"ctrl-w ctrl-o": "workspace::CloseInactiveTabsAndPanes",
|
||||
"ctrl-w n": ["workspace::NewFileInDirection", "Up"],
|
||||
"ctrl-w ctrl-n": ["workspace::NewFileInDirection", "Up"],
|
||||
|
||||
"ctrl-w d": "editor::GoToDefinitionSplit",
|
||||
"ctrl-w g d": "editor::GoToDefinitionSplit",
|
||||
"ctrl-w shift-d": "editor::GoToTypeDefinitionSplit",
|
||||
"ctrl-w g shift-d": "editor::GoToTypeDefinitionSplit",
|
||||
"ctrl-w space": "editor::OpenExcerptsSplit",
|
||||
"ctrl-w g space": "editor::OpenExcerptsSplit",
|
||||
"-": "pane::RevealInProjectPanel"
|
||||
}
|
||||
},
|
||||
@@ -345,7 +363,9 @@
|
||||
"> >": "vim::Indent",
|
||||
"< <": "vim::Outdent",
|
||||
"ctrl-pagedown": "pane::ActivateNextItem",
|
||||
"ctrl-pageup": "pane::ActivatePrevItem"
|
||||
"ctrl-pageup": "pane::ActivatePrevItem",
|
||||
"[ x": "editor::SelectLargerSyntaxNode",
|
||||
"] x": "editor::SelectSmallerSyntaxNode"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -385,6 +405,7 @@
|
||||
],
|
||||
"t": "vim::Tag",
|
||||
"s": "vim::Sentence",
|
||||
"p": "vim::Paragraph",
|
||||
"'": "vim::Quotes",
|
||||
"`": "vim::BackQuotes",
|
||||
"\"": "vim::DoubleQuotes",
|
||||
|
||||
@@ -140,6 +140,15 @@
|
||||
// Whether to show diagnostic indicators in the scrollbar.
|
||||
"diagnostics": true
|
||||
},
|
||||
// What to do when multibuffer is double clicked in some of its excerpts
|
||||
// (parts of singleton buffers).
|
||||
// May take 2 values:
|
||||
// 1. Behave as a regular buffer and select the whole word (default).
|
||||
// "double_click_in_multibuffer": "select"
|
||||
// 2. Open the excerpt clicked as a new buffer in the new tab.
|
||||
// "double_click_in_multibuffer": "open",
|
||||
// For the case of "open", regular selection behavior can be achieved by holding `alt` when double clicking.
|
||||
"double_click_in_multibuffer": "select",
|
||||
"gutter": {
|
||||
// Whether to show line numbers in the gutter.
|
||||
"line_numbers": true,
|
||||
@@ -148,7 +157,7 @@
|
||||
// Whether to show fold buttons in the gutter.
|
||||
"folds": true
|
||||
},
|
||||
// The number of lines to keep above/below the cursor when scrolling.
|
||||
// The number of lines to keep above/below the cursor when moving the cursor.
|
||||
"vertical_scroll_margin": 3,
|
||||
"relative_line_numbers": false,
|
||||
// When to populate a new search's query based on the text under the cursor.
|
||||
@@ -180,7 +189,7 @@
|
||||
"project_panel": {
|
||||
// Default width of the project panel.
|
||||
"default_width": 240,
|
||||
// Where to dock project panel. Can be 'left' or 'right'.
|
||||
// Where to dock the project panel. Can be 'left' or 'right'.
|
||||
"dock": "left",
|
||||
// Whether to show file icons in the project panel.
|
||||
"file_icons": true,
|
||||
@@ -198,45 +207,64 @@
|
||||
"collaboration_panel": {
|
||||
// Whether to show the collaboration panel button in the status bar.
|
||||
"button": true,
|
||||
// Where to dock channels panel. Can be 'left' or 'right'.
|
||||
// Where to dock the collaboration panel. Can be 'left' or 'right'.
|
||||
"dock": "left",
|
||||
// Default width of the channels panel.
|
||||
// Default width of the collaboration panel.
|
||||
"default_width": 240
|
||||
},
|
||||
"chat_panel": {
|
||||
// Whether to show the collaboration panel button in the status bar.
|
||||
// Whether to show the chat panel button in the status bar.
|
||||
"button": true,
|
||||
// Where to dock channels panel. Can be 'left' or 'right'.
|
||||
// Where to the chat panel. Can be 'left' or 'right'.
|
||||
"dock": "right",
|
||||
// Default width of the channels panel.
|
||||
// Default width of the chat panel.
|
||||
"default_width": 240
|
||||
},
|
||||
"message_editor": {
|
||||
// Whether to automatically replace emoji shortcodes with emoji characters.
|
||||
// For example: typing `:wave:` gets replaced with `👋`.
|
||||
"auto_replace_emoji_shortcode": true
|
||||
},
|
||||
"notification_panel": {
|
||||
// Whether to show the collaboration panel button in the status bar.
|
||||
// Whether to show the notification panel button in the status bar.
|
||||
"button": true,
|
||||
// Where to dock channels panel. Can be 'left' or 'right'.
|
||||
// Where to dock the notification panel. Can be 'left' or 'right'.
|
||||
"dock": "right",
|
||||
// Default width of the channels panel.
|
||||
// Default width of the notification panel.
|
||||
"default_width": 380
|
||||
},
|
||||
"assistant": {
|
||||
// Whether to show the assistant panel button in the status bar.
|
||||
"button": true,
|
||||
// Where to dock the assistant. Can be 'left', 'right' or 'bottom'.
|
||||
// Where to dock the assistant panel. Can be 'left', 'right' or 'bottom'.
|
||||
"dock": "right",
|
||||
// Default width when the assistant is docked to the left or right.
|
||||
"default_width": 640,
|
||||
// Default height when the assistant is docked to the bottom.
|
||||
"default_height": 320,
|
||||
// Deprecated: Please use `provider.api_url` instead.
|
||||
// The default OpenAI API endpoint to use when starting new conversations.
|
||||
"openai_api_url": "https://api.openai.com/v1",
|
||||
// Deprecated: Please use `provider.default_model` instead.
|
||||
// The default OpenAI model to use when starting new conversations. This
|
||||
// setting can take three values:
|
||||
//
|
||||
// 1. "gpt-3.5-turbo-0613""
|
||||
// 2. "gpt-4-0613""
|
||||
// 3. "gpt-4-1106-preview"
|
||||
"default_open_ai_model": "gpt-4-1106-preview"
|
||||
"default_open_ai_model": "gpt-4-1106-preview",
|
||||
"provider": {
|
||||
"type": "openai",
|
||||
// The default OpenAI API endpoint to use when starting new conversations.
|
||||
"api_url": "https://api.openai.com/v1",
|
||||
// The default OpenAI model to use when starting new conversations. This
|
||||
// setting can take three values:
|
||||
//
|
||||
// 1. "gpt-3.5-turbo-0613""
|
||||
// 2. "gpt-4-0613""
|
||||
// 3. "gpt-4-1106-preview"
|
||||
"default_model": "gpt-4-1106-preview"
|
||||
}
|
||||
},
|
||||
// Whether the screen sharing icon is shown in the os status bar.
|
||||
"show_call_status_icon": true,
|
||||
@@ -458,6 +486,10 @@
|
||||
// Can also be 'csh', 'fish', and `nushell`
|
||||
"activate_script": "default"
|
||||
}
|
||||
},
|
||||
"toolbar": {
|
||||
// Whether to display the terminal title in its toolbar.
|
||||
"title": true
|
||||
}
|
||||
// Set the terminal's font size. If this option is not included,
|
||||
// the terminal will default to matching the buffer's font size.
|
||||
@@ -572,7 +604,8 @@
|
||||
// Vim settings
|
||||
"vim": {
|
||||
"use_system_clipboard": "always",
|
||||
"use_multiline_find": false
|
||||
"use_multiline_find": false,
|
||||
"use_smartcase_find": false
|
||||
},
|
||||
// The server to connect to. If the environment variable
|
||||
// ZED_SERVER_URL is set, it will override this setting.
|
||||
|
||||
@@ -4,9 +4,7 @@
|
||||
[
|
||||
{
|
||||
"label": "Example task",
|
||||
"command": "bash",
|
||||
// rest of the parameters are optional
|
||||
"args": ["-c", "for i in {1..5}; do echo \"Hello $i/5\"; sleep 1; done"],
|
||||
"command": "for i in {1..5}; do echo \"Hello $i/5\"; sleep 1; done",
|
||||
// Env overrides for the command, will be appended to the terminal's environment from the settings.
|
||||
"env": { "foo": "bar" },
|
||||
// Current working directory to spawn the command into, defaults to current project root.
|
||||
|
||||
@@ -77,7 +77,7 @@
|
||||
"terminal.ansi.green": "#5cc1a3ff",
|
||||
"terminal.ansi.bright_green": "#31614fff",
|
||||
"terminal.ansi.dim_green": "#b3e1d1ff",
|
||||
"terminal.ansi.yellow": "#f5c177ff",
|
||||
"terminal.ansi.yellow": "#f6c177ff",
|
||||
"terminal.ansi.bright_yellow": "#8a643aff",
|
||||
"terminal.ansi.dim_yellow": "#fedfbbff",
|
||||
"terminal.ansi.blue": "#9bced6ff",
|
||||
@@ -93,7 +93,7 @@
|
||||
"terminal.ansi.bright_white": "#e0def4ff",
|
||||
"terminal.ansi.dim_white": "#514e68ff",
|
||||
"link_text.hover": "#9bced6ff",
|
||||
"conflict": "#f5c177ff",
|
||||
"conflict": "#f6c177ff",
|
||||
"conflict.background": "#50331aff",
|
||||
"conflict.border": "#6d4d2bff",
|
||||
"created": "#5cc1a3ff",
|
||||
@@ -117,7 +117,7 @@
|
||||
"info": "#9bced6ff",
|
||||
"info.background": "#2f3639ff",
|
||||
"info.border": "#435255ff",
|
||||
"modified": "#f5c177ff",
|
||||
"modified": "#f6c177ff",
|
||||
"modified.background": "#50331aff",
|
||||
"modified.border": "#6d4d2bff",
|
||||
"predictive": "#556b81ff",
|
||||
@@ -132,7 +132,7 @@
|
||||
"unreachable": "#74708dff",
|
||||
"unreachable.background": "#292738ff",
|
||||
"unreachable.border": "#423f55ff",
|
||||
"warning": "#f5c177ff",
|
||||
"warning": "#f6c177ff",
|
||||
"warning.background": "#50331aff",
|
||||
"warning.border": "#6d4d2bff",
|
||||
"players": [
|
||||
@@ -167,9 +167,9 @@
|
||||
"selection": "#ea6e923d"
|
||||
},
|
||||
{
|
||||
"cursor": "#f5c177ff",
|
||||
"background": "#f5c177ff",
|
||||
"selection": "#f5c1773d"
|
||||
"cursor": "#f6c177ff",
|
||||
"background": "#f6c177ff",
|
||||
"selection": "#f6c1773d"
|
||||
},
|
||||
{
|
||||
"cursor": "#5cc1a3ff",
|
||||
@@ -319,7 +319,7 @@
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#f5c177ff",
|
||||
"color": "#f6c177ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
@@ -354,7 +354,7 @@
|
||||
"font_weight": null
|
||||
},
|
||||
"title": {
|
||||
"color": "#f5c177ff",
|
||||
"color": "#f6c177ff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
},
|
||||
@@ -835,7 +835,7 @@
|
||||
"terminal.ansi.green": "#5cc1a3ff",
|
||||
"terminal.ansi.bright_green": "#31614fff",
|
||||
"terminal.ansi.dim_green": "#b3e1d1ff",
|
||||
"terminal.ansi.yellow": "#f5c177ff",
|
||||
"terminal.ansi.yellow": "#f6c177ff",
|
||||
"terminal.ansi.bright_yellow": "#8a643aff",
|
||||
"terminal.ansi.dim_yellow": "#fedfbbff",
|
||||
"terminal.ansi.blue": "#9bced6ff",
|
||||
@@ -851,7 +851,7 @@
|
||||
"terminal.ansi.bright_white": "#e0def4ff",
|
||||
"terminal.ansi.dim_white": "#74708dff",
|
||||
"link_text.hover": "#9bced6ff",
|
||||
"conflict": "#f5c177ff",
|
||||
"conflict": "#f6c177ff",
|
||||
"conflict.background": "#50331aff",
|
||||
"conflict.border": "#6d4d2bff",
|
||||
"created": "#5cc1a3ff",
|
||||
@@ -875,7 +875,7 @@
|
||||
"info": "#9bced6ff",
|
||||
"info.background": "#2f3639ff",
|
||||
"info.border": "#435255ff",
|
||||
"modified": "#f5c177ff",
|
||||
"modified": "#f6c177ff",
|
||||
"modified.background": "#50331aff",
|
||||
"modified.border": "#6d4d2bff",
|
||||
"predictive": "#516b83ff",
|
||||
@@ -890,7 +890,7 @@
|
||||
"unreachable": "#85819eff",
|
||||
"unreachable.background": "#38354eff",
|
||||
"unreachable.border": "#504c68ff",
|
||||
"warning": "#f5c177ff",
|
||||
"warning": "#f6c177ff",
|
||||
"warning.background": "#50331aff",
|
||||
"warning.border": "#6d4d2bff",
|
||||
"players": [
|
||||
@@ -925,9 +925,9 @@
|
||||
"selection": "#ea6e923d"
|
||||
},
|
||||
{
|
||||
"cursor": "#f5c177ff",
|
||||
"background": "#f5c177ff",
|
||||
"selection": "#f5c1773d"
|
||||
"cursor": "#f6c177ff",
|
||||
"background": "#f6c177ff",
|
||||
"selection": "#f6c1773d"
|
||||
},
|
||||
{
|
||||
"cursor": "#5cc1a3ff",
|
||||
@@ -1077,7 +1077,7 @@
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#f5c177ff",
|
||||
"color": "#f6c177ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
@@ -1112,7 +1112,7 @@
|
||||
"font_weight": null
|
||||
},
|
||||
"title": {
|
||||
"color": "#f5c177ff",
|
||||
"color": "#f6c177ff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
},
|
||||
|
||||
@@ -5,6 +5,9 @@ edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/activity_indicator.rs"
|
||||
doctest = false
|
||||
|
||||
@@ -6,7 +6,7 @@ use gpui::{
|
||||
ParentElement as _, Render, SharedString, StatefulInteractiveElement, Styled, View,
|
||||
ViewContext, VisualContext as _,
|
||||
};
|
||||
use language::{LanguageRegistry, LanguageServerBinaryStatus};
|
||||
use language::{LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName};
|
||||
use project::{LanguageServerProgress, Project};
|
||||
use smallvec::SmallVec;
|
||||
use std::{cmp::Reverse, fmt::Write, sync::Arc};
|
||||
@@ -30,7 +30,7 @@ pub struct ActivityIndicator {
|
||||
}
|
||||
|
||||
struct LspStatus {
|
||||
name: Arc<str>,
|
||||
name: LanguageServerName,
|
||||
status: LanguageServerBinaryStatus,
|
||||
}
|
||||
|
||||
@@ -58,13 +58,10 @@ impl ActivityIndicator {
|
||||
let this = cx.new_view(|cx: &mut ViewContext<Self>| {
|
||||
let mut status_events = languages.language_server_binary_statuses();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
while let Some((language, event)) = status_events.next().await {
|
||||
while let Some((name, status)) = status_events.next().await {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.statuses.retain(|s| s.name != language.name());
|
||||
this.statuses.push(LspStatus {
|
||||
name: language.name(),
|
||||
status: event,
|
||||
});
|
||||
this.statuses.retain(|s| s.name != name);
|
||||
this.statuses.push(LspStatus { name, status });
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
@@ -97,7 +94,7 @@ impl ActivityIndicator {
|
||||
cx,
|
||||
);
|
||||
});
|
||||
workspace.add_item(
|
||||
workspace.add_item_to_active_pane(
|
||||
Box::new(
|
||||
cx.new_view(|cx| Editor::for_buffer(buffer, Some(project.clone()), cx)),
|
||||
),
|
||||
@@ -114,7 +111,7 @@ impl ActivityIndicator {
|
||||
self.statuses.retain(|status| {
|
||||
if let LanguageServerBinaryStatus::Failed { error } = &status.status {
|
||||
cx.emit(Event::ShowError {
|
||||
lsp_name: status.name.clone(),
|
||||
lsp_name: status.name.0.clone(),
|
||||
error: error.clone(),
|
||||
});
|
||||
false
|
||||
@@ -202,11 +199,12 @@ impl ActivityIndicator {
|
||||
let mut checking_for_update = SmallVec::<[_; 3]>::new();
|
||||
let mut failed = SmallVec::<[_; 3]>::new();
|
||||
for status in &self.statuses {
|
||||
let name = status.name.clone();
|
||||
match status.status {
|
||||
LanguageServerBinaryStatus::CheckingForUpdate => checking_for_update.push(name),
|
||||
LanguageServerBinaryStatus::Downloading => downloading.push(name),
|
||||
LanguageServerBinaryStatus::Failed { .. } => failed.push(name),
|
||||
LanguageServerBinaryStatus::CheckingForUpdate => {
|
||||
checking_for_update.push(status.name.0.as_ref())
|
||||
}
|
||||
LanguageServerBinaryStatus::Downloading => downloading.push(status.name.0.as_ref()),
|
||||
LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.0.as_ref()),
|
||||
LanguageServerBinaryStatus::Downloaded | LanguageServerBinaryStatus::Cached => {}
|
||||
}
|
||||
}
|
||||
@@ -214,34 +212,28 @@ impl ActivityIndicator {
|
||||
if !downloading.is_empty() {
|
||||
return Content {
|
||||
icon: Some(DOWNLOAD_ICON),
|
||||
message: format!(
|
||||
"Downloading {} language server{}...",
|
||||
downloading.join(", "),
|
||||
if downloading.len() > 1 { "s" } else { "" }
|
||||
),
|
||||
message: format!("Downloading {}...", downloading.join(", "),),
|
||||
on_click: None,
|
||||
};
|
||||
} else if !checking_for_update.is_empty() {
|
||||
}
|
||||
|
||||
if !checking_for_update.is_empty() {
|
||||
return Content {
|
||||
icon: Some(DOWNLOAD_ICON),
|
||||
message: format!(
|
||||
"Checking for updates to {} language server{}...",
|
||||
"Checking for updates to {}...",
|
||||
checking_for_update.join(", "),
|
||||
if checking_for_update.len() > 1 {
|
||||
"s"
|
||||
} else {
|
||||
""
|
||||
}
|
||||
),
|
||||
on_click: None,
|
||||
};
|
||||
} else if !failed.is_empty() {
|
||||
}
|
||||
|
||||
if !failed.is_empty() {
|
||||
return Content {
|
||||
icon: Some(WARNING_ICON),
|
||||
message: format!(
|
||||
"Failed to download {} language server{}. Click to show error.",
|
||||
"Failed to download {}. Click to show error.",
|
||||
failed.join(", "),
|
||||
if failed.len() > 1 { "s" } else { "" }
|
||||
),
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.show_error_message(&Default::default(), cx)
|
||||
@@ -249,6 +241,17 @@ impl ActivityIndicator {
|
||||
};
|
||||
}
|
||||
|
||||
// Show any formatting failure
|
||||
if let Some(failure) = self.project.read(cx).last_formatting_failure() {
|
||||
return Content {
|
||||
icon: Some(WARNING_ICON),
|
||||
message: format!("Formatting failed: {}. Click to see logs.", failure),
|
||||
on_click: Some(Arc::new(|_, cx| {
|
||||
cx.dispatch_action(Box::new(workspace::OpenLog));
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
// Show any application auto-update info.
|
||||
if let Some(updater) = &self.auto_updater {
|
||||
return match &updater.read(cx).status() {
|
||||
|
||||
@@ -5,6 +5,9 @@ edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/ai.rs"
|
||||
doctest = false
|
||||
@@ -20,7 +23,6 @@ futures.workspace = true
|
||||
gpui.workspace = true
|
||||
isahc.workspace = true
|
||||
language.workspace = true
|
||||
lazy_static.workspace = true
|
||||
log.workspace = true
|
||||
matrixmultiply = "0.3.7"
|
||||
ordered-float.workspace = true
|
||||
@@ -29,6 +31,7 @@ parse_duration = "2.1.1"
|
||||
postage.workspace = true
|
||||
rand.workspace = true
|
||||
rusqlite = { version = "0.29.0", features = ["blob", "array", "modern_sqlite"] }
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
tiktoken-rs.workspace = true
|
||||
|
||||
@@ -19,11 +19,9 @@ pub struct Embedding(pub Vec<f32>);
|
||||
impl FromSql for Embedding {
|
||||
fn column_result(value: ValueRef) -> FromSqlResult<Self> {
|
||||
let bytes = value.as_blob()?;
|
||||
let embedding: Result<Vec<f32>, Box<bincode::ErrorKind>> = bincode::deserialize(bytes);
|
||||
if embedding.is_err() {
|
||||
return Err(rusqlite::types::FromSqlError::Other(embedding.unwrap_err()));
|
||||
}
|
||||
Ok(Embedding(embedding.unwrap()))
|
||||
let embedding =
|
||||
bincode::deserialize(bytes).map_err(|err| rusqlite::types::FromSqlError::Other(err))?;
|
||||
Ok(Embedding(embedding))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -112,7 +110,7 @@ mod tests {
|
||||
}
|
||||
|
||||
fn round_to_decimals(n: OrderedFloat<f32>, decimal_places: i32) -> f32 {
|
||||
let factor = (10.0 as f32).powi(decimal_places);
|
||||
let factor = 10.0_f32.powi(decimal_places);
|
||||
(n * factor).round() / factor
|
||||
}
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ impl PromptArguments {
|
||||
if self
|
||||
.language_name
|
||||
.as_ref()
|
||||
.and_then(|name| Some(!["Markdown", "Plain Text"].contains(&name.as_str())))
|
||||
.map(|name| !["Markdown", "Plain Text"].contains(&name.as_str()))
|
||||
.unwrap_or(true)
|
||||
{
|
||||
PromptFileType::Code
|
||||
@@ -51,8 +51,10 @@ pub trait PromptTemplate {
|
||||
#[repr(i8)]
|
||||
#[derive(PartialEq, Eq, Ord)]
|
||||
pub enum PromptPriority {
|
||||
Mandatory, // Ignores truncation
|
||||
Ordered { order: usize }, // Truncates based on priority
|
||||
/// Ignores truncation.
|
||||
Mandatory,
|
||||
/// Truncates based on priority.
|
||||
Ordered { order: usize },
|
||||
}
|
||||
|
||||
impl PartialOrd for PromptPriority {
|
||||
@@ -86,7 +88,6 @@ impl PromptChain {
|
||||
let mut sorted_indices = (0..self.templates.len()).collect::<Vec<_>>();
|
||||
sorted_indices.sort_by_key(|&i| Reverse(&self.templates[i].0));
|
||||
|
||||
// If Truncate
|
||||
let mut tokens_outstanding = if truncate {
|
||||
Some(self.args.model.capacity()? - self.args.reserved_tokens)
|
||||
} else {
|
||||
|
||||
@@ -24,11 +24,9 @@ impl PromptCodeSnippet {
|
||||
|
||||
let language_name = buffer
|
||||
.language()
|
||||
.and_then(|language| Some(language.name().to_string().to_lowercase()));
|
||||
.map(|language| language.name().to_string().to_lowercase());
|
||||
|
||||
let file_path = buffer
|
||||
.file()
|
||||
.and_then(|file| Some(file.path().to_path_buf()));
|
||||
let file_path = buffer.file().map(|file| file.path().to_path_buf());
|
||||
|
||||
(content, language_name, file_path)
|
||||
})?;
|
||||
@@ -46,7 +44,7 @@ impl ToString for PromptCodeSnippet {
|
||||
let path = self
|
||||
.path
|
||||
.as_ref()
|
||||
.and_then(|path| Some(path.to_string_lossy().to_string()))
|
||||
.map(|path| path.to_string_lossy().to_string())
|
||||
.unwrap_or("".to_string());
|
||||
let language_name = self.language_name.clone().unwrap_or("".to_string());
|
||||
let content = self.content.clone();
|
||||
@@ -67,7 +65,7 @@ impl PromptTemplate for RepositoryContext {
|
||||
let template = "You are working inside a large repository, here are a few code snippets that may be useful.";
|
||||
let mut prompt = String::new();
|
||||
|
||||
let mut remaining_tokens = max_token_length.clone();
|
||||
let mut remaining_tokens = max_token_length;
|
||||
let separator_token_length = args.model.count_tokens("\n")?;
|
||||
for snippet in &args.snippets {
|
||||
let mut snippet_prompt = template.to_string();
|
||||
|
||||
@@ -6,4 +6,4 @@ pub use completion::*;
|
||||
pub use embedding::*;
|
||||
pub use model::OpenAiLanguageModel;
|
||||
|
||||
pub const OPEN_AI_API_URL: &'static str = "https://api.openai.com/v1";
|
||||
pub const OPEN_AI_API_URL: &str = "https://api.openai.com/v1";
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
use std::{
|
||||
env,
|
||||
fmt::{self, Display},
|
||||
io,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use futures::{
|
||||
future::BoxFuture, io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, FutureExt,
|
||||
@@ -6,23 +13,17 @@ use futures::{
|
||||
use gpui::{AppContext, BackgroundExecutor};
|
||||
use isahc::{http::StatusCode, Request, RequestExt};
|
||||
use parking_lot::RwLock;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
env,
|
||||
fmt::{self, Display},
|
||||
io,
|
||||
sync::Arc,
|
||||
};
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::providers::open_ai::{OpenAiLanguageModel, OPEN_AI_API_URL};
|
||||
use crate::{
|
||||
auth::{CredentialProvider, ProviderCredential},
|
||||
completion::{CompletionProvider, CompletionRequest},
|
||||
models::LanguageModel,
|
||||
};
|
||||
|
||||
use crate::providers::open_ai::{OpenAiLanguageModel, OPEN_AI_API_URL};
|
||||
|
||||
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Role {
|
||||
@@ -102,8 +103,9 @@ pub struct OpenAiResponseStreamEvent {
|
||||
pub usage: Option<OpenAiUsage>,
|
||||
}
|
||||
|
||||
pub async fn stream_completion(
|
||||
async fn stream_completion(
|
||||
api_url: String,
|
||||
kind: OpenAiCompletionProviderKind,
|
||||
credential: ProviderCredential,
|
||||
executor: BackgroundExecutor,
|
||||
request: Box<dyn CompletionRequest>,
|
||||
@@ -117,10 +119,11 @@ pub async fn stream_completion(
|
||||
|
||||
let (tx, rx) = futures::channel::mpsc::unbounded::<Result<OpenAiResponseStreamEvent>>();
|
||||
|
||||
let (auth_header_name, auth_header_value) = kind.auth_header(api_key);
|
||||
let json_data = request.data()?;
|
||||
let mut response = Request::post(format!("{api_url}/chat/completions"))
|
||||
let mut response = Request::post(kind.completions_endpoint_url(&api_url))
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Authorization", format!("Bearer {}", api_key))
|
||||
.header(auth_header_name, auth_header_value)
|
||||
.body(json_data)?
|
||||
.send_async()
|
||||
.await?;
|
||||
@@ -194,22 +197,109 @@ pub async fn stream_completion(
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema)]
|
||||
pub enum AzureOpenAiApiVersion {
|
||||
/// Retiring April 2, 2024.
|
||||
#[serde(rename = "2023-03-15-preview")]
|
||||
V2023_03_15Preview,
|
||||
#[serde(rename = "2023-05-15")]
|
||||
V2023_05_15,
|
||||
/// Retiring April 2, 2024.
|
||||
#[serde(rename = "2023-06-01-preview")]
|
||||
V2023_06_01Preview,
|
||||
/// Retiring April 2, 2024.
|
||||
#[serde(rename = "2023-07-01-preview")]
|
||||
V2023_07_01Preview,
|
||||
/// Retiring April 2, 2024.
|
||||
#[serde(rename = "2023-08-01-preview")]
|
||||
V2023_08_01Preview,
|
||||
/// Retiring April 2, 2024.
|
||||
#[serde(rename = "2023-09-01-preview")]
|
||||
V2023_09_01Preview,
|
||||
#[serde(rename = "2023-12-01-preview")]
|
||||
V2023_12_01Preview,
|
||||
#[serde(rename = "2024-02-15-preview")]
|
||||
V2024_02_15Preview,
|
||||
}
|
||||
|
||||
impl fmt::Display for AzureOpenAiApiVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
match self {
|
||||
Self::V2023_03_15Preview => "2023-03-15-preview",
|
||||
Self::V2023_05_15 => "2023-05-15",
|
||||
Self::V2023_06_01Preview => "2023-06-01-preview",
|
||||
Self::V2023_07_01Preview => "2023-07-01-preview",
|
||||
Self::V2023_08_01Preview => "2023-08-01-preview",
|
||||
Self::V2023_09_01Preview => "2023-09-01-preview",
|
||||
Self::V2023_12_01Preview => "2023-12-01-preview",
|
||||
Self::V2024_02_15Preview => "2024-02-15-preview",
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum OpenAiCompletionProviderKind {
|
||||
OpenAi,
|
||||
AzureOpenAi {
|
||||
deployment_id: String,
|
||||
api_version: AzureOpenAiApiVersion,
|
||||
},
|
||||
}
|
||||
|
||||
impl OpenAiCompletionProviderKind {
|
||||
/// Returns the chat completion endpoint URL for this [`OpenAiCompletionProviderKind`].
|
||||
fn completions_endpoint_url(&self, api_url: &str) -> String {
|
||||
match self {
|
||||
Self::OpenAi => {
|
||||
// https://platform.openai.com/docs/api-reference/chat/create
|
||||
format!("{api_url}/chat/completions")
|
||||
}
|
||||
Self::AzureOpenAi {
|
||||
deployment_id,
|
||||
api_version,
|
||||
} => {
|
||||
// https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions
|
||||
format!("{api_url}/openai/deployments/{deployment_id}/chat/completions?api-version={api_version}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the authentication header for this [`OpenAiCompletionProviderKind`].
|
||||
fn auth_header(&self, api_key: String) -> (&'static str, String) {
|
||||
match self {
|
||||
Self::OpenAi => ("Authorization", format!("Bearer {api_key}")),
|
||||
Self::AzureOpenAi { .. } => ("Api-Key", api_key),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OpenAiCompletionProvider {
|
||||
api_url: String,
|
||||
kind: OpenAiCompletionProviderKind,
|
||||
model: OpenAiLanguageModel,
|
||||
credential: Arc<RwLock<ProviderCredential>>,
|
||||
executor: BackgroundExecutor,
|
||||
}
|
||||
|
||||
impl OpenAiCompletionProvider {
|
||||
pub async fn new(api_url: String, model_name: String, executor: BackgroundExecutor) -> Self {
|
||||
pub async fn new(
|
||||
api_url: String,
|
||||
kind: OpenAiCompletionProviderKind,
|
||||
model_name: String,
|
||||
executor: BackgroundExecutor,
|
||||
) -> Self {
|
||||
let model = executor
|
||||
.spawn(async move { OpenAiLanguageModel::load(&model_name) })
|
||||
.await;
|
||||
let credential = Arc::new(RwLock::new(ProviderCredential::NoCredentials));
|
||||
Self {
|
||||
api_url,
|
||||
kind,
|
||||
model,
|
||||
credential,
|
||||
executor,
|
||||
@@ -297,6 +387,7 @@ impl CompletionProvider for OpenAiCompletionProvider {
|
||||
let model: Box<dyn LanguageModel> = Box::new(self.model.clone());
|
||||
model
|
||||
}
|
||||
|
||||
fn complete(
|
||||
&self,
|
||||
prompt: Box<dyn CompletionRequest>,
|
||||
@@ -307,7 +398,8 @@ impl CompletionProvider for OpenAiCompletionProvider {
|
||||
// At some point in the future we should rectify this.
|
||||
let credential = self.credential.read().clone();
|
||||
let api_url = self.api_url.clone();
|
||||
let request = stream_completion(api_url, credential, self.executor.clone(), prompt);
|
||||
let kind = self.kind.clone();
|
||||
let request = stream_completion(api_url, kind, credential, self.executor.clone(), prompt);
|
||||
async move {
|
||||
let response = request.await?;
|
||||
let stream = response
|
||||
@@ -322,6 +414,7 @@ impl CompletionProvider for OpenAiCompletionProvider {
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn box_clone(&self) -> Box<dyn CompletionProvider> {
|
||||
Box::new((*self).clone())
|
||||
}
|
||||
|
||||
@@ -8,7 +8,6 @@ use gpui::BackgroundExecutor;
|
||||
use isahc::http::StatusCode;
|
||||
use isahc::prelude::Configurable;
|
||||
use isahc::{AsyncBody, Response};
|
||||
use lazy_static::lazy_static;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use parse_duration::parse;
|
||||
use postage::watch;
|
||||
@@ -16,7 +15,7 @@ use serde::{Deserialize, Serialize};
|
||||
use serde_json;
|
||||
use std::env;
|
||||
use std::ops::Add;
|
||||
use std::sync::Arc;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use std::time::{Duration, Instant};
|
||||
use tiktoken_rs::{cl100k_base, CoreBPE};
|
||||
use util::http::{HttpClient, Request};
|
||||
@@ -29,8 +28,9 @@ use crate::providers::open_ai::OpenAiLanguageModel;
|
||||
|
||||
use crate::providers::open_ai::OPEN_AI_API_URL;
|
||||
|
||||
lazy_static! {
|
||||
pub(crate) static ref OPEN_AI_BPE_TOKENIZER: CoreBPE = cl100k_base().unwrap();
|
||||
pub(crate) fn open_ai_bpe_tokenizer() -> &'static CoreBPE {
|
||||
static OPEN_AI_BPE_TOKENIZER: OnceLock<CoreBPE> = OnceLock::new();
|
||||
OPEN_AI_BPE_TOKENIZER.get_or_init(|| cl100k_base().unwrap())
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
|
||||
@@ -3,7 +3,7 @@ use tiktoken_rs::CoreBPE;
|
||||
|
||||
use crate::models::{LanguageModel, TruncationDirection};
|
||||
|
||||
use super::OPEN_AI_BPE_TOKENIZER;
|
||||
use super::open_ai_bpe_tokenizer;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OpenAiLanguageModel {
|
||||
@@ -13,8 +13,8 @@ pub struct OpenAiLanguageModel {
|
||||
|
||||
impl OpenAiLanguageModel {
|
||||
pub fn load(model_name: &str) -> Self {
|
||||
let bpe =
|
||||
tiktoken_rs::get_bpe_from_model(model_name).unwrap_or(OPEN_AI_BPE_TOKENIZER.to_owned());
|
||||
let bpe = tiktoken_rs::get_bpe_from_model(model_name)
|
||||
.unwrap_or(open_ai_bpe_tokenizer().to_owned());
|
||||
OpenAiLanguageModel {
|
||||
name: model_name.to_string(),
|
||||
bpe: Some(bpe),
|
||||
|
||||
@@ -54,6 +54,7 @@ impl LanguageModel for FakeLanguageModel {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct FakeEmbeddingProvider {
|
||||
pub embedding_count: AtomicUsize,
|
||||
}
|
||||
@@ -66,14 +67,6 @@ impl Clone for FakeEmbeddingProvider {
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for FakeEmbeddingProvider {
|
||||
fn default() -> Self {
|
||||
FakeEmbeddingProvider {
|
||||
embedding_count: AtomicUsize::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FakeEmbeddingProvider {
|
||||
pub fn embedding_count(&self) -> usize {
|
||||
self.embedding_count.load(atomic::Ordering::SeqCst)
|
||||
|
||||
@@ -5,6 +5,9 @@ edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
gpui.workspace = true
|
||||
|
||||
@@ -5,6 +5,9 @@ edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/assistant.rs"
|
||||
doctest = false
|
||||
|
||||
@@ -7,11 +7,13 @@ use crate::{
|
||||
SavedMessage, Split, ToggleFocus, ToggleIncludeConversation, ToggleRetrieveContext,
|
||||
};
|
||||
use ai::prompts::repository_context::PromptCodeSnippet;
|
||||
use ai::providers::open_ai::OPEN_AI_API_URL;
|
||||
use ai::{
|
||||
auth::ProviderCredential,
|
||||
completion::{CompletionProvider, CompletionRequest},
|
||||
providers::open_ai::{OpenAiCompletionProvider, OpenAiRequest, RequestMessage},
|
||||
providers::open_ai::{
|
||||
OpenAiCompletionProvider, OpenAiCompletionProviderKind, OpenAiRequest, RequestMessage,
|
||||
OPEN_AI_API_URL,
|
||||
},
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use chrono::{DateTime, Local};
|
||||
@@ -29,9 +31,9 @@ use fs::Fs;
|
||||
use futures::StreamExt;
|
||||
use gpui::{
|
||||
canvas, div, point, relative, rems, uniform_list, Action, AnyElement, AppContext,
|
||||
AsyncAppContext, AsyncWindowContext, AvailableSpace, ClipboardItem, Context, EventEmitter,
|
||||
FocusHandle, FocusableView, FontStyle, FontWeight, HighlightStyle, InteractiveElement,
|
||||
IntoElement, Model, ModelContext, ParentElement, Pixels, PromptLevel, Render, SharedString,
|
||||
AsyncAppContext, AsyncWindowContext, ClipboardItem, Context, EventEmitter, FocusHandle,
|
||||
FocusableView, FontStyle, FontWeight, HighlightStyle, InteractiveElement, IntoElement, Model,
|
||||
ModelContext, ParentElement, Pixels, PromptLevel, Render, SharedString,
|
||||
StatefulInteractiveElement, Styled, Subscription, Task, TextStyle, UniformListScrollHandle,
|
||||
View, ViewContext, VisualContext, WeakModel, WeakView, WhiteSpace, WindowContext,
|
||||
};
|
||||
@@ -122,15 +124,18 @@ impl AssistantPanel {
|
||||
.await
|
||||
.log_err()
|
||||
.unwrap_or_default();
|
||||
let (api_url, model_name) = cx.update(|cx| {
|
||||
let (provider_kind, api_url, model_name) = cx.update(|cx| {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
(
|
||||
settings.openai_api_url.clone(),
|
||||
settings.default_open_ai_model.full_name().to_string(),
|
||||
)
|
||||
})?;
|
||||
anyhow::Ok((
|
||||
settings.provider_kind()?,
|
||||
settings.provider_api_url()?,
|
||||
settings.provider_model_name()?,
|
||||
))
|
||||
})??;
|
||||
|
||||
let completion_provider = OpenAiCompletionProvider::new(
|
||||
api_url,
|
||||
provider_kind,
|
||||
model_name,
|
||||
cx.background_executor().clone(),
|
||||
)
|
||||
@@ -647,7 +652,7 @@ impl AssistantPanel {
|
||||
// If Markdown or No Language is Known, increase the randomness for more creative output
|
||||
// If Code, decrease temperature to get more deterministic outputs
|
||||
let temperature = if let Some(language) = language_name.clone() {
|
||||
if language.to_string() != "Markdown".to_string() {
|
||||
if *language != *"Markdown" {
|
||||
0.5
|
||||
} else {
|
||||
1.0
|
||||
@@ -690,24 +695,29 @@ impl AssistantPanel {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
};
|
||||
|
||||
let mut model = AssistantSettings::get_global(cx)
|
||||
.default_open_ai_model
|
||||
.clone();
|
||||
let model_name = model.full_name();
|
||||
let Some(mut model_name) = AssistantSettings::get_global(cx)
|
||||
.provider_model_name()
|
||||
.log_err()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let prompt = cx.background_executor().spawn(async move {
|
||||
let snippets = snippets.await?;
|
||||
let prompt = cx.background_executor().spawn({
|
||||
let model_name = model_name.clone();
|
||||
async move {
|
||||
let snippets = snippets.await?;
|
||||
|
||||
let language_name = language_name.as_deref();
|
||||
generate_content_prompt(
|
||||
user_prompt,
|
||||
language_name,
|
||||
buffer,
|
||||
range,
|
||||
snippets,
|
||||
model_name,
|
||||
project_name,
|
||||
)
|
||||
let language_name = language_name.as_deref();
|
||||
generate_content_prompt(
|
||||
user_prompt,
|
||||
language_name,
|
||||
buffer,
|
||||
range,
|
||||
snippets,
|
||||
&model_name,
|
||||
project_name,
|
||||
)
|
||||
}
|
||||
});
|
||||
|
||||
let mut messages = Vec::new();
|
||||
@@ -719,7 +729,7 @@ impl AssistantPanel {
|
||||
.messages(cx)
|
||||
.map(|message| message.to_open_ai_message(buffer)),
|
||||
);
|
||||
model = conversation.model.clone();
|
||||
model_name = conversation.model.full_name().to_string();
|
||||
}
|
||||
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
@@ -732,7 +742,7 @@ impl AssistantPanel {
|
||||
});
|
||||
|
||||
let request = Box::new(OpenAiRequest {
|
||||
model: model.full_name().into(),
|
||||
model: model_name,
|
||||
messages,
|
||||
stream: true,
|
||||
stop: vec!["|END|>".to_string()],
|
||||
@@ -771,7 +781,7 @@ impl AssistantPanel {
|
||||
} else {
|
||||
editor.highlight_background::<PendingInlineAssist>(
|
||||
background_ranges,
|
||||
|theme| theme.editor_active_line_background, // todo!("use the appropriate color")
|
||||
|theme| theme.editor_active_line_background, // todo("use the appropriate color")
|
||||
cx,
|
||||
);
|
||||
}
|
||||
@@ -969,7 +979,7 @@ impl AssistantPanel {
|
||||
font_size: rems(0.875).into(),
|
||||
font_weight: FontWeight::NORMAL,
|
||||
font_style: FontStyle::Normal,
|
||||
line_height: relative(1.3).into(),
|
||||
line_height: relative(1.3),
|
||||
background_color: None,
|
||||
underline: None,
|
||||
strikethrough: None,
|
||||
@@ -1274,25 +1284,25 @@ impl Render for AssistantPanel {
|
||||
let view = cx.view().clone();
|
||||
let scroll_handle = self.saved_conversations_scroll_handle.clone();
|
||||
let conversation_count = self.saved_conversations.len();
|
||||
canvas(move |bounds, cx| {
|
||||
uniform_list(
|
||||
view,
|
||||
"saved_conversations",
|
||||
conversation_count,
|
||||
|this, range, cx| {
|
||||
range
|
||||
.map(|ix| this.render_saved_conversation(ix, cx))
|
||||
.collect()
|
||||
},
|
||||
)
|
||||
.track_scroll(scroll_handle)
|
||||
.into_any_element()
|
||||
.draw(
|
||||
bounds.origin,
|
||||
bounds.size.map(AvailableSpace::Definite),
|
||||
cx,
|
||||
);
|
||||
})
|
||||
canvas(
|
||||
move |bounds, cx| {
|
||||
let mut list = uniform_list(
|
||||
view,
|
||||
"saved_conversations",
|
||||
conversation_count,
|
||||
|this, range, cx| {
|
||||
range
|
||||
.map(|ix| this.render_saved_conversation(ix, cx))
|
||||
.collect()
|
||||
},
|
||||
)
|
||||
.track_scroll(scroll_handle)
|
||||
.into_any_element();
|
||||
list.layout(bounds.origin, bounds.size.into(), cx);
|
||||
list
|
||||
},
|
||||
|_bounds, mut list, cx| list.paint(cx),
|
||||
)
|
||||
.size_full()
|
||||
.into_any_element()
|
||||
}),
|
||||
@@ -1451,8 +1461,14 @@ impl Conversation {
|
||||
});
|
||||
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
let model = settings.default_open_ai_model.clone();
|
||||
let api_url = settings.openai_api_url.clone();
|
||||
let model = settings
|
||||
.provider_model()
|
||||
.log_err()
|
||||
.unwrap_or(OpenAiModel::FourTurbo);
|
||||
let api_url = settings
|
||||
.provider_api_url()
|
||||
.log_err()
|
||||
.unwrap_or_else(|| OPEN_AI_API_URL.to_string());
|
||||
|
||||
let mut this = Self {
|
||||
id: Some(Uuid::new_v4().to_string()),
|
||||
@@ -1467,7 +1483,7 @@ impl Conversation {
|
||||
max_token_count: tiktoken_rs::model::get_context_size(&model.full_name()),
|
||||
pending_token_count: Task::ready(None),
|
||||
api_url: Some(api_url),
|
||||
model: model.clone(),
|
||||
model,
|
||||
_subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
|
||||
pending_save: Task::ready(Ok(())),
|
||||
path: None,
|
||||
@@ -1511,7 +1527,7 @@ impl Conversation {
|
||||
.as_ref()
|
||||
.map(|summary| summary.text.clone())
|
||||
.unwrap_or_default(),
|
||||
model: self.model.clone(),
|
||||
model: self.model,
|
||||
api_url: self.api_url.clone(),
|
||||
}
|
||||
}
|
||||
@@ -1533,6 +1549,7 @@ impl Conversation {
|
||||
api_url
|
||||
.clone()
|
||||
.unwrap_or_else(|| OPEN_AI_API_URL.to_string()),
|
||||
OpenAiCompletionProviderKind::OpenAi,
|
||||
model.full_name().into(),
|
||||
cx.background_executor().clone(),
|
||||
)
|
||||
@@ -1616,26 +1633,23 @@ impl Conversation {
|
||||
fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
|
||||
let messages = self
|
||||
.messages(cx)
|
||||
.into_iter()
|
||||
.filter_map(|message| {
|
||||
Some(tiktoken_rs::ChatCompletionRequestMessage {
|
||||
role: match message.role {
|
||||
Role::User => "user".into(),
|
||||
Role::Assistant => "assistant".into(),
|
||||
Role::System => "system".into(),
|
||||
},
|
||||
content: Some(
|
||||
self.buffer
|
||||
.read(cx)
|
||||
.text_for_range(message.offset_range)
|
||||
.collect(),
|
||||
),
|
||||
name: None,
|
||||
function_call: None,
|
||||
})
|
||||
.map(|message| tiktoken_rs::ChatCompletionRequestMessage {
|
||||
role: match message.role {
|
||||
Role::User => "user".into(),
|
||||
Role::Assistant => "assistant".into(),
|
||||
Role::System => "system".into(),
|
||||
},
|
||||
content: Some(
|
||||
self.buffer
|
||||
.read(cx)
|
||||
.text_for_range(message.offset_range)
|
||||
.collect(),
|
||||
),
|
||||
name: None,
|
||||
function_call: None,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let model = self.model.clone();
|
||||
let model = self.model;
|
||||
self.pending_token_count = cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
cx.background_executor()
|
||||
@@ -2399,7 +2413,9 @@ impl ConversationEditor {
|
||||
.read(cx)
|
||||
.messages(cx)
|
||||
.map(|message| BlockProperties {
|
||||
position: buffer.anchor_in_excerpt(excerpt_id, message.anchor),
|
||||
position: buffer
|
||||
.anchor_in_excerpt(excerpt_id, message.anchor)
|
||||
.unwrap(),
|
||||
height: 2,
|
||||
style: BlockStyle::Sticky,
|
||||
render: Arc::new({
|
||||
@@ -2818,6 +2834,7 @@ impl FocusableView for InlineAssistant {
|
||||
}
|
||||
|
||||
impl InlineAssistant {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(
|
||||
id: usize,
|
||||
measurements: Rc<Cell<BlockMeasurements>>,
|
||||
@@ -3183,7 +3200,7 @@ impl InlineAssistant {
|
||||
font_size: rems(0.875).into(),
|
||||
font_weight: FontWeight::NORMAL,
|
||||
font_style: FontStyle::Normal,
|
||||
line_height: relative(1.3).into(),
|
||||
line_height: relative(1.3),
|
||||
background_color: None,
|
||||
underline: None,
|
||||
strikethrough: None,
|
||||
@@ -3651,9 +3668,9 @@ fn report_assistant_event(
|
||||
let client = workspace.read(cx).project().read(cx).client();
|
||||
let telemetry = client.telemetry();
|
||||
|
||||
let model = AssistantSettings::get_global(cx)
|
||||
.default_open_ai_model
|
||||
.clone();
|
||||
let Ok(model_name) = AssistantSettings::get_global(cx).provider_model_name() else {
|
||||
return;
|
||||
};
|
||||
|
||||
telemetry.report_assistant_event(conversation_id, assistant_kind, model.full_name())
|
||||
telemetry.report_assistant_event(conversation_id, assistant_kind, &model_name)
|
||||
}
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
use anyhow;
|
||||
use ai::providers::open_ai::{
|
||||
AzureOpenAiApiVersion, OpenAiCompletionProviderKind, OPEN_AI_API_URL,
|
||||
};
|
||||
use anyhow::anyhow;
|
||||
use gpui::Pixels;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum OpenAiModel {
|
||||
#[serde(rename = "gpt-3.5-turbo-0613")]
|
||||
ThreePointFiveTurbo,
|
||||
@@ -17,25 +21,25 @@ pub enum OpenAiModel {
|
||||
impl OpenAiModel {
|
||||
pub fn full_name(&self) -> &'static str {
|
||||
match self {
|
||||
OpenAiModel::ThreePointFiveTurbo => "gpt-3.5-turbo-0613",
|
||||
OpenAiModel::Four => "gpt-4-0613",
|
||||
OpenAiModel::FourTurbo => "gpt-4-1106-preview",
|
||||
Self::ThreePointFiveTurbo => "gpt-3.5-turbo-0613",
|
||||
Self::Four => "gpt-4-0613",
|
||||
Self::FourTurbo => "gpt-4-1106-preview",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn short_name(&self) -> &'static str {
|
||||
match self {
|
||||
OpenAiModel::ThreePointFiveTurbo => "gpt-3.5-turbo",
|
||||
OpenAiModel::Four => "gpt-4",
|
||||
OpenAiModel::FourTurbo => "gpt-4-turbo",
|
||||
Self::ThreePointFiveTurbo => "gpt-3.5-turbo",
|
||||
Self::Four => "gpt-4",
|
||||
Self::FourTurbo => "gpt-4-turbo",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cycle(&self) -> Self {
|
||||
match self {
|
||||
OpenAiModel::ThreePointFiveTurbo => OpenAiModel::Four,
|
||||
OpenAiModel::Four => OpenAiModel::FourTurbo,
|
||||
OpenAiModel::FourTurbo => OpenAiModel::ThreePointFiveTurbo,
|
||||
Self::ThreePointFiveTurbo => Self::Four,
|
||||
Self::Four => Self::FourTurbo,
|
||||
Self::FourTurbo => Self::ThreePointFiveTurbo,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -48,14 +52,113 @@ pub enum AssistantDockPosition {
|
||||
Bottom,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct AssistantSettings {
|
||||
/// Whether to show the assistant panel button in the status bar.
|
||||
pub button: bool,
|
||||
/// Where to dock the assistant.
|
||||
pub dock: AssistantDockPosition,
|
||||
/// Default width in pixels when the assistant is docked to the left or right.
|
||||
pub default_width: Pixels,
|
||||
/// Default height in pixels when the assistant is docked to the bottom.
|
||||
pub default_height: Pixels,
|
||||
/// The default OpenAI model to use when starting new conversations.
|
||||
#[deprecated = "Please use `provider.default_model` instead."]
|
||||
pub default_open_ai_model: OpenAiModel,
|
||||
/// OpenAI API base URL to use when starting new conversations.
|
||||
#[deprecated = "Please use `provider.api_url` instead."]
|
||||
pub openai_api_url: String,
|
||||
/// The settings for the AI provider.
|
||||
pub provider: AiProviderSettings,
|
||||
}
|
||||
|
||||
impl AssistantSettings {
|
||||
pub fn provider_kind(&self) -> anyhow::Result<OpenAiCompletionProviderKind> {
|
||||
match &self.provider {
|
||||
AiProviderSettings::OpenAi(_) => Ok(OpenAiCompletionProviderKind::OpenAi),
|
||||
AiProviderSettings::AzureOpenAi(settings) => {
|
||||
let deployment_id = settings
|
||||
.deployment_id
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("no Azure OpenAI deployment ID"))?;
|
||||
let api_version = settings
|
||||
.api_version
|
||||
.ok_or_else(|| anyhow!("no Azure OpenAI API version"))?;
|
||||
|
||||
Ok(OpenAiCompletionProviderKind::AzureOpenAi {
|
||||
deployment_id,
|
||||
api_version,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn provider_api_url(&self) -> anyhow::Result<String> {
|
||||
match &self.provider {
|
||||
AiProviderSettings::OpenAi(settings) => Ok(settings
|
||||
.api_url
|
||||
.clone()
|
||||
.unwrap_or_else(|| OPEN_AI_API_URL.to_string())),
|
||||
AiProviderSettings::AzureOpenAi(settings) => settings
|
||||
.api_url
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("no Azure OpenAI API URL")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn provider_model(&self) -> anyhow::Result<OpenAiModel> {
|
||||
match &self.provider {
|
||||
AiProviderSettings::OpenAi(settings) => {
|
||||
Ok(settings.default_model.unwrap_or(OpenAiModel::FourTurbo))
|
||||
}
|
||||
AiProviderSettings::AzureOpenAi(settings) => {
|
||||
let deployment_id = settings
|
||||
.deployment_id
|
||||
.as_deref()
|
||||
.ok_or_else(|| anyhow!("no Azure OpenAI deployment ID"))?;
|
||||
|
||||
match deployment_id {
|
||||
// https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models#gpt-4-and-gpt-4-turbo-preview
|
||||
"gpt-4" | "gpt-4-32k" => Ok(OpenAiModel::Four),
|
||||
// https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models#gpt-35
|
||||
"gpt-35-turbo" | "gpt-35-turbo-16k" | "gpt-35-turbo-instruct" => {
|
||||
Ok(OpenAiModel::ThreePointFiveTurbo)
|
||||
}
|
||||
_ => Err(anyhow!(
|
||||
"no matching OpenAI model found for deployment ID: '{deployment_id}'"
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn provider_model_name(&self) -> anyhow::Result<String> {
|
||||
match &self.provider {
|
||||
AiProviderSettings::OpenAi(settings) => Ok(settings
|
||||
.default_model
|
||||
.unwrap_or(OpenAiModel::FourTurbo)
|
||||
.full_name()
|
||||
.to_string()),
|
||||
AiProviderSettings::AzureOpenAi(settings) => settings
|
||||
.deployment_id
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("no Azure OpenAI deployment ID")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Settings for AssistantSettings {
|
||||
const KEY: Option<&'static str> = Some("assistant");
|
||||
|
||||
type FileContent = AssistantSettingsContent;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_: &mut gpui::AppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
}
|
||||
}
|
||||
|
||||
/// Assistant panel settings
|
||||
@@ -77,26 +180,88 @@ pub struct AssistantSettingsContent {
|
||||
///
|
||||
/// Default: 320
|
||||
pub default_height: Option<f32>,
|
||||
/// Deprecated: Please use `provider.default_model` instead.
|
||||
/// The default OpenAI model to use when starting new conversations.
|
||||
///
|
||||
/// Default: gpt-4-1106-preview
|
||||
#[deprecated = "Please use `provider.default_model` instead."]
|
||||
pub default_open_ai_model: Option<OpenAiModel>,
|
||||
/// Deprecated: Please use `provider.api_url` instead.
|
||||
/// OpenAI API base URL to use when starting new conversations.
|
||||
///
|
||||
/// Default: https://api.openai.com/v1
|
||||
#[deprecated = "Please use `provider.api_url` instead."]
|
||||
pub openai_api_url: Option<String>,
|
||||
/// The settings for the AI provider.
|
||||
#[serde(default)]
|
||||
pub provider: AiProviderSettingsContent,
|
||||
}
|
||||
|
||||
impl Settings for AssistantSettings {
|
||||
const KEY: Option<&'static str> = Some("assistant");
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum AiProviderSettings {
|
||||
/// The settings for the OpenAI provider.
|
||||
#[serde(rename = "openai")]
|
||||
OpenAi(OpenAiProviderSettings),
|
||||
/// The settings for the Azure OpenAI provider.
|
||||
#[serde(rename = "azure_openai")]
|
||||
AzureOpenAi(AzureOpenAiProviderSettings),
|
||||
}
|
||||
|
||||
type FileContent = AssistantSettingsContent;
|
||||
/// The settings for the AI provider used by the Zed Assistant.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum AiProviderSettingsContent {
|
||||
/// The settings for the OpenAI provider.
|
||||
#[serde(rename = "openai")]
|
||||
OpenAi(OpenAiProviderSettingsContent),
|
||||
/// The settings for the Azure OpenAI provider.
|
||||
#[serde(rename = "azure_openai")]
|
||||
AzureOpenAi(AzureOpenAiProviderSettingsContent),
|
||||
}
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_: &mut gpui::AppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
impl Default for AiProviderSettingsContent {
|
||||
fn default() -> Self {
|
||||
Self::OpenAi(OpenAiProviderSettingsContent::default())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct OpenAiProviderSettings {
|
||||
/// The OpenAI API base URL to use when starting new conversations.
|
||||
pub api_url: Option<String>,
|
||||
/// The default OpenAI model to use when starting new conversations.
|
||||
pub default_model: Option<OpenAiModel>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct OpenAiProviderSettingsContent {
|
||||
/// The OpenAI API base URL to use when starting new conversations.
|
||||
///
|
||||
/// Default: https://api.openai.com/v1
|
||||
pub api_url: Option<String>,
|
||||
/// The default OpenAI model to use when starting new conversations.
|
||||
///
|
||||
/// Default: gpt-4-1106-preview
|
||||
pub default_model: Option<OpenAiModel>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct AzureOpenAiProviderSettings {
|
||||
/// The Azure OpenAI API base URL to use when starting new conversations.
|
||||
pub api_url: Option<String>,
|
||||
/// The Azure OpenAI API version.
|
||||
pub api_version: Option<AzureOpenAiApiVersion>,
|
||||
/// The Azure OpenAI API deployment ID.
|
||||
pub deployment_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct AzureOpenAiProviderSettingsContent {
|
||||
/// The Azure OpenAI API base URL to use when starting new conversations.
|
||||
pub api_url: Option<String>,
|
||||
/// The Azure OpenAI API version.
|
||||
pub api_version: Option<AzureOpenAiApiVersion>,
|
||||
/// The Azure OpenAI deployment ID.
|
||||
pub deployment_id: Option<String>,
|
||||
}
|
||||
|
||||
@@ -297,7 +297,7 @@ fn strip_invalid_spans_from_codeblock(
|
||||
} else if buffer.starts_with("<|")
|
||||
|| buffer.starts_with("<|S")
|
||||
|| buffer.starts_with("<|S|")
|
||||
|| buffer.ends_with("|")
|
||||
|| buffer.ends_with('|')
|
||||
|| buffer.ends_with("|E")
|
||||
|| buffer.ends_with("|E|")
|
||||
{
|
||||
@@ -335,7 +335,7 @@ fn strip_invalid_spans_from_codeblock(
|
||||
.strip_suffix("|E|>")
|
||||
.or_else(|| text.strip_suffix("E|>"))
|
||||
.or_else(|| text.strip_prefix("|>"))
|
||||
.or_else(|| text.strip_prefix(">"))
|
||||
.or_else(|| text.strip_prefix('>'))
|
||||
.unwrap_or(&text)
|
||||
.to_string();
|
||||
};
|
||||
|
||||
@@ -5,6 +5,9 @@ edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/audio.rs"
|
||||
doctest = false
|
||||
|
||||
@@ -5,6 +5,9 @@ edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/auto_update.rs"
|
||||
doctest = false
|
||||
|
||||
@@ -20,7 +20,7 @@ use smol::io::AsyncReadExt;
|
||||
use settings::{Settings, SettingsStore};
|
||||
use smol::{fs::File, process::Command};
|
||||
|
||||
use release_channel::{AppCommitSha, ReleaseChannel};
|
||||
use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
|
||||
use std::{
|
||||
env::consts::{ARCH, OS},
|
||||
ffi::OsString,
|
||||
@@ -190,7 +190,7 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) -> Option<(
|
||||
|
||||
fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext<Workspace>) {
|
||||
let release_channel = ReleaseChannel::global(cx);
|
||||
let version = env!("CARGO_PKG_VERSION");
|
||||
let version = AppVersion::global(cx).to_string();
|
||||
|
||||
let client = client::Client::global(cx).http_client();
|
||||
let url = client.build_url(&format!(
|
||||
@@ -229,6 +229,7 @@ fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext<Wo
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit([(0..0, body.release_notes)], None, cx)
|
||||
});
|
||||
let language_registry = project.read(cx).languages().clone();
|
||||
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
|
||||
@@ -240,9 +241,10 @@ fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext<Wo
|
||||
editor,
|
||||
workspace_handle,
|
||||
Some(tab_description),
|
||||
language_registry,
|
||||
cx,
|
||||
);
|
||||
workspace.add_item(Box::new(view.clone()), cx);
|
||||
workspace.add_item_to_active_pane(Box::new(view.clone()), cx);
|
||||
cx.notify();
|
||||
})
|
||||
.log_err();
|
||||
@@ -343,8 +345,7 @@ impl AutoUpdater {
|
||||
));
|
||||
cx.update(|cx| {
|
||||
if let Some(param) = ReleaseChannel::try_global(cx)
|
||||
.map(|release_channel| release_channel.release_query_param())
|
||||
.flatten()
|
||||
.and_then(|release_channel| release_channel.release_query_param())
|
||||
{
|
||||
url_string += "&";
|
||||
url_string += param;
|
||||
|
||||
@@ -44,7 +44,7 @@ impl Render for UpdateNotification {
|
||||
crate::view_release_notes(&Default::default(), cx);
|
||||
this.dismiss(&menu::Cancel, cx)
|
||||
})),
|
||||
);
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,9 @@ edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/breadcrumbs.rs"
|
||||
doctest = false
|
||||
|
||||
@@ -4,10 +4,11 @@ use gpui::{
|
||||
ViewContext,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use std::cmp;
|
||||
use theme::ActiveTheme;
|
||||
use ui::{prelude::*, ButtonLike, ButtonStyle, Label, Tooltip};
|
||||
use workspace::{
|
||||
item::{ItemEvent, ItemHandle},
|
||||
item::{BreadcrumbText, ItemEvent, ItemHandle},
|
||||
ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView,
|
||||
};
|
||||
|
||||
@@ -31,14 +32,30 @@ impl EventEmitter<ToolbarItemEvent> for Breadcrumbs {}
|
||||
|
||||
impl Render for Breadcrumbs {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
const MAX_SEGMENTS: usize = 12;
|
||||
let element = h_flex().text_ui();
|
||||
let Some(active_item) = self.active_item.as_ref() else {
|
||||
return element;
|
||||
};
|
||||
let Some(segments) = active_item.breadcrumbs(cx.theme(), cx) else {
|
||||
let Some(mut segments) = active_item.breadcrumbs(cx.theme(), cx) else {
|
||||
return element;
|
||||
};
|
||||
|
||||
let prefix_end_ix = cmp::min(segments.len(), MAX_SEGMENTS / 2);
|
||||
let suffix_start_ix = cmp::max(
|
||||
prefix_end_ix,
|
||||
segments.len().saturating_sub(MAX_SEGMENTS / 2),
|
||||
);
|
||||
if suffix_start_ix > prefix_end_ix {
|
||||
segments.splice(
|
||||
prefix_end_ix..suffix_start_ix,
|
||||
Some(BreadcrumbText {
|
||||
text: "⋯".into(),
|
||||
highlights: None,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
let highlighted_segments = segments.into_iter().map(|segment| {
|
||||
let mut text_style = cx.text_style();
|
||||
text_style.color = Color::Muted.color(cx);
|
||||
|
||||
@@ -5,6 +5,9 @@ edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/call.rs"
|
||||
doctest = false
|
||||
|
||||
@@ -302,7 +302,7 @@ impl ActiveCall {
|
||||
return Task::ready(Ok(()));
|
||||
}
|
||||
|
||||
let room_id = call.room_id.clone();
|
||||
let room_id = call.room_id;
|
||||
let client = self.client.clone();
|
||||
let user_store = self.user_store.clone();
|
||||
let join = self
|
||||
|
||||
@@ -1182,19 +1182,10 @@ impl Room {
|
||||
) -> Task<Result<Model<Project>>> {
|
||||
let client = self.client.clone();
|
||||
let user_store = self.user_store.clone();
|
||||
let role = self.local_participant.role;
|
||||
cx.emit(Event::RemoteProjectJoined { project_id: id });
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
let project = Project::remote(
|
||||
id,
|
||||
client,
|
||||
user_store,
|
||||
language_registry,
|
||||
fs,
|
||||
role,
|
||||
cx.clone(),
|
||||
)
|
||||
.await?;
|
||||
let project =
|
||||
Project::remote(id, client, user_store, language_registry, fs, cx.clone()).await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.joined_projects.retain(|project| {
|
||||
|
||||
@@ -5,6 +5,9 @@ edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/channel.rs"
|
||||
doctest = false
|
||||
|
||||
@@ -11,7 +11,7 @@ pub use channel_chat::{
|
||||
mentions_to_proto, ChannelChat, ChannelChatEvent, ChannelMessage, ChannelMessageId,
|
||||
MessageParams,
|
||||
};
|
||||
pub use channel_store::{Channel, ChannelEvent, ChannelMembership, ChannelStore, HostedProjectId};
|
||||
pub use channel_store::{Channel, ChannelEvent, ChannelMembership, ChannelStore};
|
||||
|
||||
#[cfg(test)]
|
||||
mod channel_store_tests;
|
||||
|
||||
@@ -126,7 +126,7 @@ impl ChannelBuffer {
|
||||
for (_, old_collaborator) in &self.collaborators {
|
||||
if !new_collaborators.contains_key(&old_collaborator.peer_id) {
|
||||
self.buffer.update(cx, |buffer, cx| {
|
||||
buffer.remove_peer(old_collaborator.replica_id as u16, cx)
|
||||
buffer.remove_peer(old_collaborator.replica_id, cx)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -681,7 +681,7 @@ pub fn mentions_to_proto(mentions: &[(Range<usize>, UserId)]) -> Vec<proto::Chat
|
||||
start: range.start as u64,
|
||||
end: range.end as u64,
|
||||
}),
|
||||
user_id: *user_id as u64,
|
||||
user_id: *user_id,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ mod channel_index;
|
||||
use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat, ChannelMessage};
|
||||
use anyhow::{anyhow, Result};
|
||||
use channel_index::ChannelIndex;
|
||||
use client::{ChannelId, Client, Subscription, User, UserId, UserStore};
|
||||
use client::{ChannelId, Client, ClientSettings, ProjectId, Subscription, User, UserId, UserStore};
|
||||
use collections::{hash_map, HashMap, HashSet};
|
||||
use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
|
||||
use gpui::{
|
||||
@@ -11,11 +11,11 @@ use gpui::{
|
||||
Task, WeakModel,
|
||||
};
|
||||
use language::Capability;
|
||||
use release_channel::RELEASE_CHANNEL;
|
||||
use rpc::{
|
||||
proto::{self, ChannelRole, ChannelVisibility},
|
||||
TypedEnvelope,
|
||||
};
|
||||
use settings::Settings;
|
||||
use std::{mem, sync::Arc, time::Duration};
|
||||
use util::{async_maybe, maybe, ResultExt};
|
||||
|
||||
@@ -27,10 +27,7 @@ pub fn init(client: &Arc<Client>, user_store: Model<UserStore>, cx: &mut AppCont
|
||||
cx.set_global(GlobalChannelStore(channel_store));
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
|
||||
pub struct HostedProjectId(pub u64);
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[derive(Debug, Clone, Default, PartialEq)]
|
||||
struct NotesVersion {
|
||||
epoch: u64,
|
||||
version: clock::Global,
|
||||
@@ -38,7 +35,7 @@ struct NotesVersion {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct HostedProject {
|
||||
id: HostedProjectId,
|
||||
project_id: ProjectId,
|
||||
channel_id: ChannelId,
|
||||
name: SharedString,
|
||||
_visibility: proto::ChannelVisibility,
|
||||
@@ -47,7 +44,7 @@ pub struct HostedProject {
|
||||
impl From<proto::HostedProject> for HostedProject {
|
||||
fn from(project: proto::HostedProject) -> Self {
|
||||
Self {
|
||||
id: HostedProjectId(project.id),
|
||||
project_id: ProjectId(project.project_id),
|
||||
channel_id: ChannelId(project.channel_id),
|
||||
_visibility: project.visibility(),
|
||||
name: project.name.into(),
|
||||
@@ -60,7 +57,7 @@ pub struct ChannelStore {
|
||||
channel_invitations: Vec<Arc<Channel>>,
|
||||
channel_participants: HashMap<ChannelId, Vec<Arc<User>>>,
|
||||
channel_states: HashMap<ChannelId, ChannelState>,
|
||||
hosted_projects: HashMap<HostedProjectId, HostedProject>,
|
||||
hosted_projects: HashMap<ProjectId, HostedProject>,
|
||||
|
||||
outgoing_invites: HashSet<(ChannelId, UserId)>,
|
||||
update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>,
|
||||
@@ -82,27 +79,28 @@ pub struct Channel {
|
||||
pub parent_path: Vec<ChannelId>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
#[derive(Default, Debug)]
|
||||
pub struct ChannelState {
|
||||
latest_chat_message: Option<u64>,
|
||||
latest_notes_versions: Option<NotesVersion>,
|
||||
latest_notes_version: NotesVersion,
|
||||
observed_notes_version: NotesVersion,
|
||||
observed_chat_message: Option<u64>,
|
||||
observed_notes_versions: Option<NotesVersion>,
|
||||
role: Option<ChannelRole>,
|
||||
projects: HashSet<HostedProjectId>,
|
||||
projects: HashSet<ProjectId>,
|
||||
}
|
||||
|
||||
impl Channel {
|
||||
pub fn link(&self) -> String {
|
||||
RELEASE_CHANNEL.link_prefix().to_owned()
|
||||
+ "channel/"
|
||||
+ &Self::slug(&self.name)
|
||||
+ "-"
|
||||
+ &self.id.to_string()
|
||||
pub fn link(&self, cx: &AppContext) -> String {
|
||||
format!(
|
||||
"{}/channel/{}-{}",
|
||||
ClientSettings::get_global(cx).server_url,
|
||||
Self::slug(&self.name),
|
||||
self.id
|
||||
)
|
||||
}
|
||||
|
||||
pub fn notes_link(&self, heading: Option<String>) -> String {
|
||||
self.link()
|
||||
pub fn notes_link(&self, heading: Option<String>, cx: &AppContext) -> String {
|
||||
self.link(cx)
|
||||
+ "/notes"
|
||||
+ &heading
|
||||
.map(|h| format!("#{}", Self::slug(&h)))
|
||||
@@ -305,8 +303,8 @@ impl ChannelStore {
|
||||
self.channel_index.by_id().get(&channel_id)
|
||||
}
|
||||
|
||||
pub fn projects_for_id(&self, channel_id: ChannelId) -> Vec<(SharedString, HostedProjectId)> {
|
||||
let mut projects: Vec<(SharedString, HostedProjectId)> = self
|
||||
pub fn projects_for_id(&self, channel_id: ChannelId) -> Vec<(SharedString, ProjectId)> {
|
||||
let mut projects: Vec<(SharedString, ProjectId)> = self
|
||||
.channel_states
|
||||
.get(&channel_id)
|
||||
.map(|state| state.projects.clone())
|
||||
@@ -592,7 +590,7 @@ impl ChannelStore {
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<ChannelId>> {
|
||||
let client = self.client.clone();
|
||||
let name = name.trim_start_matches("#").to_owned();
|
||||
let name = name.trim_start_matches('#').to_owned();
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
let response = client
|
||||
.request(proto::CreateChannel {
|
||||
@@ -839,12 +837,10 @@ impl ChannelStore {
|
||||
Ok(users
|
||||
.into_iter()
|
||||
.zip(response.members)
|
||||
.filter_map(|(user, member)| {
|
||||
Some(ChannelMembership {
|
||||
user,
|
||||
role: member.role(),
|
||||
kind: member.kind(),
|
||||
})
|
||||
.map(|(user, member)| ChannelMembership {
|
||||
user,
|
||||
role: member.role(),
|
||||
kind: member.kind(),
|
||||
})
|
||||
.collect())
|
||||
})
|
||||
@@ -1161,27 +1157,27 @@ impl ChannelStore {
|
||||
let hosted_project: HostedProject = hosted_project.into();
|
||||
if let Some(old_project) = self
|
||||
.hosted_projects
|
||||
.insert(hosted_project.id, hosted_project.clone())
|
||||
.insert(hosted_project.project_id, hosted_project.clone())
|
||||
{
|
||||
self.channel_states
|
||||
.entry(old_project.channel_id)
|
||||
.or_default()
|
||||
.remove_hosted_project(old_project.id);
|
||||
.remove_hosted_project(old_project.project_id);
|
||||
}
|
||||
self.channel_states
|
||||
.entry(hosted_project.channel_id)
|
||||
.or_default()
|
||||
.add_hosted_project(hosted_project.id);
|
||||
.add_hosted_project(hosted_project.project_id);
|
||||
}
|
||||
|
||||
for hosted_project_id in payload.deleted_hosted_projects {
|
||||
let hosted_project_id = HostedProjectId(hosted_project_id);
|
||||
let hosted_project_id = ProjectId(hosted_project_id);
|
||||
|
||||
if let Some(old_project) = self.hosted_projects.remove(&hosted_project_id) {
|
||||
self.channel_states
|
||||
.entry(old_project.channel_id)
|
||||
.or_default()
|
||||
.remove_hosted_project(old_project.id);
|
||||
.remove_hosted_project(old_project.project_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1238,19 +1234,12 @@ impl ChannelState {
|
||||
}
|
||||
|
||||
fn has_channel_buffer_changed(&self) -> bool {
|
||||
if let Some(latest_version) = &self.latest_notes_versions {
|
||||
if let Some(observed_version) = &self.observed_notes_versions {
|
||||
latest_version.epoch > observed_version.epoch
|
||||
|| (latest_version.epoch == observed_version.epoch
|
||||
&& latest_version
|
||||
.version
|
||||
.changed_since(&observed_version.version))
|
||||
} else {
|
||||
true
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
self.latest_notes_version.epoch > self.observed_notes_version.epoch
|
||||
|| (self.latest_notes_version.epoch == self.observed_notes_version.epoch
|
||||
&& self
|
||||
.latest_notes_version
|
||||
.version
|
||||
.changed_since(&self.observed_notes_version.version))
|
||||
}
|
||||
|
||||
fn has_new_messages(&self) -> bool {
|
||||
@@ -1277,36 +1266,32 @@ impl ChannelState {
|
||||
}
|
||||
|
||||
fn acknowledge_notes_version(&mut self, epoch: u64, version: &clock::Global) {
|
||||
if let Some(existing) = &mut self.observed_notes_versions {
|
||||
if existing.epoch == epoch {
|
||||
existing.version.join(version);
|
||||
return;
|
||||
}
|
||||
if self.observed_notes_version.epoch == epoch {
|
||||
self.observed_notes_version.version.join(version);
|
||||
} else {
|
||||
self.observed_notes_version = NotesVersion {
|
||||
epoch,
|
||||
version: version.clone(),
|
||||
};
|
||||
}
|
||||
self.observed_notes_versions = Some(NotesVersion {
|
||||
epoch,
|
||||
version: version.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
fn update_latest_notes_version(&mut self, epoch: u64, version: &clock::Global) {
|
||||
if let Some(existing) = &mut self.latest_notes_versions {
|
||||
if existing.epoch == epoch {
|
||||
existing.version.join(version);
|
||||
return;
|
||||
}
|
||||
if self.latest_notes_version.epoch == epoch {
|
||||
self.latest_notes_version.version.join(version);
|
||||
} else {
|
||||
self.latest_notes_version = NotesVersion {
|
||||
epoch,
|
||||
version: version.clone(),
|
||||
};
|
||||
}
|
||||
self.latest_notes_versions = Some(NotesVersion {
|
||||
epoch,
|
||||
version: version.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
fn add_hosted_project(&mut self, project_id: HostedProjectId) {
|
||||
fn add_hosted_project(&mut self, project_id: ProjectId) {
|
||||
self.projects.insert(project_id);
|
||||
}
|
||||
|
||||
fn remove_hosted_project(&mut self, project_id: HostedProjectId) {
|
||||
fn remove_hosted_project(&mut self, project_id: ProjectId) {
|
||||
self.projects.remove(&project_id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -97,9 +97,9 @@ impl<'a> Drop for ChannelPathsInsertGuard<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn channel_path_sorting_key<'a>(
|
||||
fn channel_path_sorting_key(
|
||||
id: ChannelId,
|
||||
channels_by_id: &'a BTreeMap<ChannelId, Arc<Channel>>,
|
||||
channels_by_id: &BTreeMap<ChannelId, Arc<Channel>>,
|
||||
) -> impl Iterator<Item = (&str, ChannelId)> {
|
||||
let (parent_path, name) = channels_by_id
|
||||
.get(&id)
|
||||
|
||||
@@ -5,6 +5,9 @@ edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/cli.rs"
|
||||
doctest = false
|
||||
@@ -15,6 +18,7 @@ path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
# TODO: Use workspace version of `clap`.
|
||||
clap = { version = "3.1", features = ["derive"] }
|
||||
ipc-channel = "0.16"
|
||||
serde.workspace = true
|
||||
|
||||
@@ -9,12 +9,11 @@ pub struct IpcHandshake {
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum CliRequest {
|
||||
// The filed is named `path` for compatibility, but now CLI can request
|
||||
// opening a path at a certain row and/or column: `some/path:123` and `some/path:123:456`.
|
||||
//
|
||||
// Since Zed CLI has to be installed separately, there can be situations when old CLI is
|
||||
// querying new Zed editors, support both formats by using `String` here and parsing it on Zed side later.
|
||||
Open { paths: Vec<String>, wait: bool },
|
||||
Open {
|
||||
paths: Vec<String>,
|
||||
wait: bool,
|
||||
open_new_workspace: Option<bool>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
|
||||
@@ -5,19 +5,25 @@ use clap::Parser;
|
||||
use cli::{CliRequest, CliResponse};
|
||||
use serde::Deserialize;
|
||||
use std::{
|
||||
env,
|
||||
ffi::OsStr,
|
||||
fs::{self, OpenOptions},
|
||||
io,
|
||||
fs::{self},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use util::paths::PathLikeWithPosition;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[derive(Parser, Debug)]
|
||||
#[clap(name = "zed", global_setting(clap::AppSettings::NoAutoVersion))]
|
||||
struct Args {
|
||||
/// Wait for all of the given paths to be opened/closed before exiting.
|
||||
#[clap(short, long)]
|
||||
wait: bool,
|
||||
/// Add files to the currently open workspace
|
||||
#[clap(short, long, overrides_with = "new")]
|
||||
add: bool,
|
||||
/// Create a new workspace
|
||||
#[clap(short, long, overrides_with = "add")]
|
||||
new: bool,
|
||||
/// A sequence of space-separated paths that you want to open.
|
||||
///
|
||||
/// Use `path:line:row` syntax to open a file at a specific location.
|
||||
@@ -56,31 +62,41 @@ fn main() -> Result<()> {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
for path in args
|
||||
.paths_with_position
|
||||
.iter()
|
||||
.map(|path_with_position| &path_with_position.path_like)
|
||||
{
|
||||
if !path.exists() {
|
||||
touch(path.as_path())?;
|
||||
}
|
||||
let curdir = env::current_dir()?;
|
||||
let mut paths = vec![];
|
||||
for path in args.paths_with_position {
|
||||
let canonicalized = path.map_path_like(|path| match fs::canonicalize(&path) {
|
||||
Ok(path) => Ok(path),
|
||||
Err(e) => {
|
||||
if let Some(mut parent) = path.parent() {
|
||||
if parent == Path::new("") {
|
||||
parent = &curdir;
|
||||
}
|
||||
match fs::canonicalize(parent) {
|
||||
Ok(parent) => Ok(parent.join(path.file_name().unwrap())),
|
||||
Err(_) => Err(e),
|
||||
}
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
})?;
|
||||
paths.push(canonicalized.to_string(|path| path.display().to_string()))
|
||||
}
|
||||
|
||||
let (tx, rx) = bundle.launch()?;
|
||||
let open_new_workspace = if args.new {
|
||||
Some(true)
|
||||
} else if args.add {
|
||||
Some(false)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
tx.send(CliRequest::Open {
|
||||
paths: args
|
||||
.paths_with_position
|
||||
.into_iter()
|
||||
.map(|path_with_position| {
|
||||
let path_with_position = path_with_position.map_path_like(|path| {
|
||||
fs::canonicalize(&path)
|
||||
.with_context(|| format!("path {path:?} canonicalization"))
|
||||
})?;
|
||||
Ok(path_with_position.to_string(|path| path.display().to_string()))
|
||||
})
|
||||
.collect::<Result<_>>()?,
|
||||
paths,
|
||||
wait: args.wait,
|
||||
open_new_workspace,
|
||||
})?;
|
||||
|
||||
while let Ok(response) = rx.recv() {
|
||||
@@ -106,13 +122,6 @@ enum Bundle {
|
||||
},
|
||||
}
|
||||
|
||||
fn touch(path: &Path) -> io::Result<()> {
|
||||
match OpenOptions::new().create(true).write(true).open(path) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
fn locate_bundle() -> Result<PathBuf> {
|
||||
let cli_path = std::env::current_exe()?.canonicalize()?;
|
||||
let mut app_path = cli_path.clone();
|
||||
@@ -156,7 +165,7 @@ mod linux {
|
||||
}
|
||||
}
|
||||
|
||||
// todo!("windows")
|
||||
// todo("windows")
|
||||
#[cfg(target_os = "windows")]
|
||||
mod windows {
|
||||
use std::path::Path;
|
||||
|
||||
@@ -5,6 +5,9 @@ edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/client.rs"
|
||||
doctest = false
|
||||
|
||||
@@ -27,7 +27,7 @@ use release_channel::{AppVersion, ReleaseChannel};
|
||||
use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, PeerId, RequestMessage};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json;
|
||||
|
||||
use settings::{Settings, SettingsStore};
|
||||
use std::{
|
||||
any::TypeId,
|
||||
@@ -61,7 +61,7 @@ lazy_static! {
|
||||
pub static ref ZED_APP_PATH: Option<PathBuf> =
|
||||
std::env::var("ZED_APP_PATH").ok().map(PathBuf::from);
|
||||
pub static ref ZED_ALWAYS_ACTIVE: bool =
|
||||
std::env::var("ZED_ALWAYS_ACTIVE").map_or(false, |e| e.len() > 0);
|
||||
std::env::var("ZED_ALWAYS_ACTIVE").map_or(false, |e| !e.is_empty());
|
||||
}
|
||||
|
||||
pub const INITIAL_RECONNECTION_DELAY: Duration = Duration::from_millis(100);
|
||||
@@ -427,7 +427,7 @@ impl Client {
|
||||
http: Arc<HttpClientWithUrl>,
|
||||
cx: &mut AppContext,
|
||||
) -> Arc<Self> {
|
||||
let client = Arc::new(Self {
|
||||
Arc::new(Self {
|
||||
id: AtomicU64::new(0),
|
||||
peer: Peer::new(0),
|
||||
telemetry: Telemetry::new(clock, http.clone(), cx),
|
||||
@@ -438,9 +438,7 @@ impl Client {
|
||||
authenticate: Default::default(),
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
establish_connection: Default::default(),
|
||||
});
|
||||
|
||||
client
|
||||
})
|
||||
}
|
||||
|
||||
pub fn id(&self) -> u64 {
|
||||
@@ -573,17 +571,18 @@ impl Client {
|
||||
let mut state = self.state.write();
|
||||
if state.entities_by_type_and_remote_id.contains_key(&id) {
|
||||
return Err(anyhow!("already subscribed to entity"));
|
||||
} else {
|
||||
state
|
||||
.entities_by_type_and_remote_id
|
||||
.insert(id, WeakSubscriber::Pending(Default::default()));
|
||||
Ok(PendingEntitySubscription {
|
||||
client: self.clone(),
|
||||
remote_id,
|
||||
consumed: false,
|
||||
_entity_type: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
state
|
||||
.entities_by_type_and_remote_id
|
||||
.insert(id, WeakSubscriber::Pending(Default::default()));
|
||||
|
||||
Ok(PendingEntitySubscription {
|
||||
client: self.clone(),
|
||||
remote_id,
|
||||
consumed: false,
|
||||
_entity_type: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
@@ -926,7 +925,7 @@ impl Client {
|
||||
move |cx| async move {
|
||||
match handle_io.await {
|
||||
Ok(()) => {
|
||||
if this.status().borrow().clone()
|
||||
if *this.status().borrow()
|
||||
== (Status::Connected {
|
||||
connection_id,
|
||||
peer_id,
|
||||
@@ -1335,7 +1334,7 @@ impl Client {
|
||||
pending.push(message);
|
||||
return;
|
||||
}
|
||||
Some(weak_subscriber @ _) => match weak_subscriber {
|
||||
Some(weak_subscriber) => match weak_subscriber {
|
||||
WeakSubscriber::Entity { handle } => {
|
||||
subscriber = handle.upgrade();
|
||||
}
|
||||
@@ -1438,21 +1437,29 @@ async fn delete_credentials_from_keychain(cx: &AsyncAppContext) -> Result<()> {
|
||||
.await
|
||||
}
|
||||
|
||||
const WORKTREE_URL_PREFIX: &str = "zed://worktrees/";
|
||||
/// prefix for the zed:// url scheme
|
||||
pub static ZED_URL_SCHEME: &str = "zed";
|
||||
|
||||
pub fn encode_worktree_url(id: u64, access_token: &str) -> String {
|
||||
format!("{}{}/{}", WORKTREE_URL_PREFIX, id, access_token)
|
||||
}
|
||||
|
||||
pub fn decode_worktree_url(url: &str) -> Option<(u64, String)> {
|
||||
let path = url.trim().strip_prefix(WORKTREE_URL_PREFIX)?;
|
||||
let mut parts = path.split('/');
|
||||
let id = parts.next()?.parse::<u64>().ok()?;
|
||||
let access_token = parts.next()?;
|
||||
if access_token.is_empty() {
|
||||
return None;
|
||||
/// Parses the given link into a Zed link.
|
||||
///
|
||||
/// Returns a [`Some`] containing the unprefixed link if the link is a Zed link.
|
||||
/// Returns [`None`] otherwise.
|
||||
pub fn parse_zed_link<'a>(link: &'a str, cx: &AppContext) -> Option<&'a str> {
|
||||
let server_url = &ClientSettings::get_global(cx).server_url;
|
||||
if let Some(stripped) = link
|
||||
.strip_prefix(server_url)
|
||||
.and_then(|result| result.strip_prefix('/'))
|
||||
{
|
||||
return Some(stripped);
|
||||
}
|
||||
Some((id, access_token.to_string()))
|
||||
if let Some(stripped) = link
|
||||
.strip_prefix(ZED_URL_SCHEME)
|
||||
.and_then(|result| result.strip_prefix("://"))
|
||||
{
|
||||
return Some(stripped);
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -1630,17 +1637,6 @@ mod tests {
|
||||
assert_eq!(*dropped_auth_count.lock(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_encode_and_decode_worktree_url() {
|
||||
let url = encode_worktree_url(5, "deadbeef");
|
||||
assert_eq!(decode_worktree_url(&url), Some((5, "deadbeef".to_string())));
|
||||
assert_eq!(
|
||||
decode_worktree_url(&format!("\n {}\t", url)),
|
||||
Some((5, "deadbeef".to_string()))
|
||||
);
|
||||
assert_eq!(decode_worktree_url("not://the-right-format"), None);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_subscribing_to_entity(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
@@ -12,9 +12,7 @@ use settings::{Settings, SettingsStore};
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::io::Write;
|
||||
use std::{env, mem, path::PathBuf, sync::Arc, time::Duration};
|
||||
use sysinfo::{
|
||||
CpuRefreshKind, Pid, PidExt, ProcessExt, ProcessRefreshKind, RefreshKind, System, SystemExt,
|
||||
};
|
||||
use sysinfo::{CpuRefreshKind, MemoryRefreshKind, Pid, ProcessRefreshKind, RefreshKind, System};
|
||||
use telemetry_events::{
|
||||
ActionEvent, AppEvent, AssistantEvent, AssistantKind, CallEvent, CopilotEvent, CpuEvent,
|
||||
EditEvent, EditorEvent, Event, EventRequestBody, EventWrapper, MemoryEvent, SettingEvent,
|
||||
@@ -84,7 +82,7 @@ impl Telemetry {
|
||||
TelemetrySettings::register(cx);
|
||||
|
||||
let state = Arc::new(Mutex::new(TelemetryState {
|
||||
settings: TelemetrySettings::get_global(cx).clone(),
|
||||
settings: *TelemetrySettings::get_global(cx),
|
||||
app_metadata: cx.app_metadata(),
|
||||
architecture: env::consts::ARCH,
|
||||
release_channel,
|
||||
@@ -119,7 +117,7 @@ impl Telemetry {
|
||||
|
||||
move |cx| {
|
||||
let mut state = state.lock();
|
||||
state.settings = TelemetrySettings::get_global(cx).clone();
|
||||
state.settings = *TelemetrySettings::get_global(cx);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
@@ -168,14 +166,14 @@ impl Telemetry {
|
||||
) {
|
||||
let mut state = self.state.lock();
|
||||
state.installation_id = installation_id.map(|id| id.into());
|
||||
state.session_id = Some(session_id.into());
|
||||
state.session_id = Some(session_id);
|
||||
drop(state);
|
||||
|
||||
let this = self.clone();
|
||||
cx.spawn(|_| async move {
|
||||
// Avoiding calling `System::new_all()`, as there have been crashes related to it
|
||||
let refresh_kind = RefreshKind::new()
|
||||
.with_memory() // For memory usage
|
||||
.with_memory(MemoryRefreshKind::everything()) // For memory usage
|
||||
.with_processes(ProcessRefreshKind::everything()) // For process usage
|
||||
.with_cpu(CpuRefreshKind::everything()); // For core count
|
||||
|
||||
@@ -263,7 +261,7 @@ impl Telemetry {
|
||||
self: &Arc<Self>,
|
||||
conversation_id: Option<String>,
|
||||
kind: AssistantKind,
|
||||
model: &'static str,
|
||||
model: &str,
|
||||
) {
|
||||
let event = Event::Assistant(AssistantEvent {
|
||||
conversation_id,
|
||||
@@ -387,11 +385,9 @@ impl Telemetry {
|
||||
event,
|
||||
});
|
||||
|
||||
if state.installation_id.is_some() {
|
||||
if state.events_queue.len() >= state.max_queue_size {
|
||||
drop(state);
|
||||
self.flush_events();
|
||||
}
|
||||
if state.installation_id.is_some() && state.events_queue.len() >= state.max_queue_size {
|
||||
drop(state);
|
||||
self.flush_events();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -433,7 +429,7 @@ impl Telemetry {
|
||||
json_bytes.clear();
|
||||
serde_json::to_writer(&mut json_bytes, event)?;
|
||||
file.write_all(&json_bytes)?;
|
||||
file.write(b"\n")?;
|
||||
file.write_all(b"\n")?;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -442,7 +438,7 @@ impl Telemetry {
|
||||
let request_body = EventRequestBody {
|
||||
installation_id: state.installation_id.as_deref().map(Into::into),
|
||||
session_id: state.session_id.clone(),
|
||||
is_staff: state.is_staff.clone(),
|
||||
is_staff: state.is_staff,
|
||||
app_version: state
|
||||
.app_metadata
|
||||
.app_version
|
||||
|
||||
@@ -24,6 +24,9 @@ impl std::fmt::Display for ChannelId {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
|
||||
pub struct ProjectId(pub u64);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct ParticipantIndex(pub u32);
|
||||
|
||||
@@ -653,7 +656,7 @@ impl UserStore {
|
||||
let users = response
|
||||
.users
|
||||
.into_iter()
|
||||
.map(|user| User::new(user))
|
||||
.map(User::new)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
this.update(&mut cx, |this, _| {
|
||||
|
||||
@@ -5,6 +5,9 @@ edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/clock.rs"
|
||||
doctest = false
|
||||
|
||||
@@ -19,5 +19,7 @@ ZED_CLIENT_CHECKSUM_SEED = "development-checksum-seed"
|
||||
# CLICKHOUSE_PASSWORD = ""
|
||||
# CLICKHOUSE_DATABASE = "default"
|
||||
|
||||
# SLACK_PANICS_WEBHOOK = ""
|
||||
|
||||
# RUST_LOG=info
|
||||
# LOG_JSON=true
|
||||
|
||||
@@ -7,23 +7,23 @@ version = "0.44.0"
|
||||
publish = false
|
||||
license = "AGPL-3.0-or-later"
|
||||
|
||||
[features]
|
||||
seed-support = ["reqwest"]
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[[bin]]
|
||||
name = "collab"
|
||||
|
||||
[[bin]]
|
||||
name = "seed"
|
||||
required-features = ["seed-support"]
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
async-tungstenite = "0.16"
|
||||
aws-config = { version = "1.1.5" }
|
||||
aws-sdk-s3 = { version = "1.15.0" }
|
||||
axum = { version = "0.5", features = ["json", "headers", "ws"] }
|
||||
axum-extra = { version = "0.3", features = ["erased-json"] }
|
||||
axum = { version = "0.6", features = ["json", "headers", "ws"] }
|
||||
axum-extra = { version = "0.4", features = ["erased-json"] }
|
||||
base64.workspace = true
|
||||
chrono.workspace = true
|
||||
clock.workspace = true
|
||||
clickhouse.workspace = true
|
||||
@@ -32,8 +32,6 @@ dashmap = "5.4"
|
||||
envy = "0.4.2"
|
||||
futures.workspace = true
|
||||
hex.workspace = true
|
||||
hyper = "0.14"
|
||||
lazy_static.workspace = true
|
||||
live_kit_server.workspace = true
|
||||
log.workspace = true
|
||||
nanoid = "0.4"
|
||||
@@ -41,7 +39,7 @@ parking_lot.workspace = true
|
||||
prometheus = "0.13"
|
||||
prost.workspace = true
|
||||
rand.workspace = true
|
||||
reqwest = { version = "0.11", features = ["json"], optional = true }
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
rpc.workspace = true
|
||||
scrypt = "0.7"
|
||||
sea-orm = { version = "0.12.x", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] }
|
||||
@@ -51,6 +49,8 @@ serde_derive.workspace = true
|
||||
serde_json.workspace = true
|
||||
sha2.workspace = true
|
||||
sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] }
|
||||
subtle.workspace = true
|
||||
rustc-demangle.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
text.workspace = true
|
||||
time.workspace = true
|
||||
@@ -59,8 +59,7 @@ toml.workspace = true
|
||||
tower = "0.4"
|
||||
tower-http = { workspace = true, features = ["trace"] }
|
||||
tracing = "0.1.34"
|
||||
tracing-log = "0.1.3"
|
||||
tracing-subscriber = { version = "0.3.11", features = ["env-filter", "json"] }
|
||||
tracing-subscriber = { version = "0.3.11", features = ["env-filter", "json", "registry", "tracing-log"] }
|
||||
util.workspace = true
|
||||
uuid.workspace = true
|
||||
|
||||
|
||||
@@ -11,6 +11,8 @@ metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: ${ZED_SERVICE_NAME}
|
||||
annotations:
|
||||
service.beta.kubernetes.io/do-loadbalancer-name: "${ZED_SERVICE_NAME}-${ZED_KUBE_NAMESPACE}"
|
||||
service.beta.kubernetes.io/do-loadbalancer-size-unit: "${ZED_LOAD_BALANCER_SIZE_UNIT}"
|
||||
service.beta.kubernetes.io/do-loadbalancer-tls-ports: "443"
|
||||
service.beta.kubernetes.io/do-loadbalancer-certificate-id: ${ZED_DO_CERTIFICATE_ID}
|
||||
service.beta.kubernetes.io/do-loadbalancer-disable-lets-encrypt-dns-records: "true"
|
||||
@@ -33,6 +35,11 @@ metadata:
|
||||
|
||||
spec:
|
||||
replicas: 1
|
||||
strategy:
|
||||
type: RollingUpdate
|
||||
rollingUpdate:
|
||||
maxSurge: 1
|
||||
maxUnavailable: 0
|
||||
selector:
|
||||
matchLabels:
|
||||
app: ${ZED_SERVICE_NAME}
|
||||
@@ -76,6 +83,13 @@ spec:
|
||||
port: 8080
|
||||
initialDelaySeconds: 1
|
||||
periodSeconds: 1
|
||||
startupProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: 8080
|
||||
initialDelaySeconds: 1
|
||||
periodSeconds: 1
|
||||
failureThreshold: 15
|
||||
env:
|
||||
- name: HTTP_PORT
|
||||
value: "8080"
|
||||
@@ -156,6 +170,11 @@ spec:
|
||||
secretKeyRef:
|
||||
name: clickhouse
|
||||
key: database
|
||||
- name: SLACK_PANICS_WEBHOOK
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: slack
|
||||
key: panics_webhook
|
||||
- name: INVITE_LINK_PREFIX
|
||||
value: ${INVITE_LINK_PREFIX}
|
||||
- name: RUST_BACKTRACE
|
||||
@@ -171,3 +190,4 @@ spec:
|
||||
# FIXME - Switch to the more restrictive `PERFMON` capability.
|
||||
# This capability isn't yet available in a stable version of Debian.
|
||||
add: ["SYS_ADMIN"]
|
||||
terminationGracePeriodSeconds: 10
|
||||
|
||||
@@ -5,6 +5,7 @@ metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: postgrest
|
||||
annotations:
|
||||
service.beta.kubernetes.io/do-loadbalancer-name: "postgrest-${ZED_KUBE_NAMESPACE}"
|
||||
service.beta.kubernetes.io/do-loadbalancer-tls-ports: "443"
|
||||
service.beta.kubernetes.io/do-loadbalancer-certificate-id: ${ZED_DO_CERTIFICATE_ID}
|
||||
service.beta.kubernetes.io/do-loadbalancer-disable-lets-encrypt-dns-records: "true"
|
||||
|
||||
@@ -46,10 +46,11 @@ CREATE UNIQUE INDEX "index_rooms_on_channel_id" ON "rooms" ("channel_id");
|
||||
CREATE TABLE "projects" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"room_id" INTEGER REFERENCES rooms (id) ON DELETE CASCADE NOT NULL,
|
||||
"host_user_id" INTEGER REFERENCES users (id) NOT NULL,
|
||||
"host_user_id" INTEGER REFERENCES users (id),
|
||||
"host_connection_id" INTEGER,
|
||||
"host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE,
|
||||
"unregistered" BOOLEAN NOT NULL DEFAULT FALSE
|
||||
"unregistered" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
"hosted_project_id" INTEGER REFERENCES hosted_projects (id)
|
||||
);
|
||||
CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id");
|
||||
CREATE INDEX "index_projects_on_host_connection_id_and_host_connection_server_id" ON "projects" ("host_connection_id", "host_connection_server_id");
|
||||
@@ -247,7 +248,10 @@ CREATE UNIQUE INDEX "index_channel_members_on_channel_id_and_user_id" ON "channe
|
||||
CREATE TABLE "buffers" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
|
||||
"epoch" INTEGER NOT NULL DEFAULT 0
|
||||
"epoch" INTEGER NOT NULL DEFAULT 0,
|
||||
"latest_operation_epoch" INTEGER,
|
||||
"latest_operation_replica_id" INTEGER,
|
||||
"latest_operation_lamport_timestamp" INTEGER
|
||||
);
|
||||
|
||||
CREATE INDEX "index_buffers_on_channel_id" ON "buffers" ("channel_id");
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
-- Add migration script here
|
||||
ALTER TABLE projects ALTER COLUMN host_user_id DROP NOT NULL;
|
||||
ALTER TABLE projects ADD COLUMN hosted_project_id INTEGER REFERENCES hosted_projects(id) UNIQUE NULL;
|
||||
@@ -0,0 +1,17 @@
|
||||
-- Add migration script here
|
||||
|
||||
ALTER TABLE buffers ADD COLUMN latest_operation_epoch INTEGER;
|
||||
ALTER TABLE buffers ADD COLUMN latest_operation_lamport_timestamp INTEGER;
|
||||
ALTER TABLE buffers ADD COLUMN latest_operation_replica_id INTEGER;
|
||||
|
||||
WITH ops AS (
|
||||
SELECT DISTINCT ON (buffer_id) buffer_id, epoch, lamport_timestamp, replica_id
|
||||
FROM buffer_operations
|
||||
ORDER BY buffer_id, epoch DESC, lamport_timestamp DESC, replica_id DESC
|
||||
)
|
||||
UPDATE buffers
|
||||
SET latest_operation_epoch = ops.epoch,
|
||||
latest_operation_lamport_timestamp = ops.lamport_timestamp,
|
||||
latest_operation_replica_id = ops.replica_id
|
||||
FROM ops
|
||||
WHERE buffers.id = ops.buffer_id;
|
||||
@@ -1,5 +1,7 @@
|
||||
pub mod events;
|
||||
pub mod extensions;
|
||||
pub mod ips_file;
|
||||
pub mod slack;
|
||||
|
||||
use crate::{
|
||||
auth,
|
||||
@@ -9,7 +11,7 @@ use crate::{
|
||||
use anyhow::anyhow;
|
||||
use axum::{
|
||||
body::Body,
|
||||
extract::{Path, Query},
|
||||
extract::{self, Path, Query},
|
||||
http::{self, Request, StatusCode},
|
||||
middleware::{self, Next},
|
||||
response::IntoResponse,
|
||||
@@ -21,15 +23,13 @@ use chrono::SecondsFormat;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Arc;
|
||||
use tower::ServiceBuilder;
|
||||
use tracing::instrument;
|
||||
|
||||
pub use extensions::fetch_extensions_from_blob_store_periodically;
|
||||
|
||||
pub fn routes(rpc_server: Option<Arc<rpc::Server>>, state: Arc<AppState>) -> Router<Body> {
|
||||
pub fn routes(rpc_server: Option<Arc<rpc::Server>>, state: Arc<AppState>) -> Router<(), Body> {
|
||||
Router::new()
|
||||
.route("/user", get(get_authenticated_user))
|
||||
.route("/users/:id/access_tokens", post(create_access_token))
|
||||
.route("/panic", post(trace_panic))
|
||||
.route("/rpc_server_snapshot", get(get_rpc_server_snapshot))
|
||||
.route("/contributors", get(get_contributors).post(add_contributor))
|
||||
.route("/contributor", get(check_is_contributor))
|
||||
@@ -120,20 +120,6 @@ struct CreateUserResponse {
|
||||
metrics_id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Panic {
|
||||
version: String,
|
||||
release_channel: String,
|
||||
backtrace_hash: String,
|
||||
text: String,
|
||||
}
|
||||
|
||||
#[instrument(skip(panic))]
|
||||
async fn trace_panic(panic: Json<Panic>) -> Result<()> {
|
||||
tracing::error!(version = %panic.version, release_channel = %panic.release_channel, backtrace_hash = %panic.backtrace_hash, text = %panic.text, "panic report");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_rpc_server_snapshot(
|
||||
Extension(rpc_server): Extension<Option<Arc<rpc::Server>>>,
|
||||
) -> Result<ErasedJson> {
|
||||
@@ -190,17 +176,16 @@ async fn check_is_contributor(
|
||||
}
|
||||
|
||||
async fn add_contributor(
|
||||
Json(params): Json<AuthenticatedUserParams>,
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
extract::Json(params): extract::Json<AuthenticatedUserParams>,
|
||||
) -> Result<()> {
|
||||
Ok(app
|
||||
.db
|
||||
app.db
|
||||
.add_contributor(
|
||||
¶ms.github_login,
|
||||
params.github_user_id,
|
||||
params.github_email.as_deref(),
|
||||
)
|
||||
.await?)
|
||||
.await
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
|
||||
@@ -1,34 +1,38 @@
|
||||
use std::sync::Arc;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use aws_sdk_s3::primitives::ByteStream;
|
||||
use axum::{
|
||||
body::Bytes, headers::Header, http::HeaderName, routing::post, Extension, Router, TypedHeader,
|
||||
body::Bytes,
|
||||
headers::Header,
|
||||
http::{HeaderMap, HeaderName, StatusCode},
|
||||
routing::post,
|
||||
Extension, Router, TypedHeader,
|
||||
};
|
||||
use hyper::StatusCode;
|
||||
use lazy_static::lazy_static;
|
||||
use serde::{Serialize, Serializer};
|
||||
use sha2::{Digest, Sha256};
|
||||
use telemetry_events::{
|
||||
ActionEvent, AppEvent, AssistantEvent, CallEvent, CopilotEvent, CpuEvent, EditEvent,
|
||||
EditorEvent, Event, EventRequestBody, EventWrapper, MemoryEvent, SettingEvent,
|
||||
};
|
||||
use util::SemanticVersion;
|
||||
|
||||
use crate::{AppState, Error, Result};
|
||||
use crate::{api::slack, AppState, Error, Result};
|
||||
|
||||
use super::ips_file::IpsFile;
|
||||
|
||||
pub fn router() -> Router {
|
||||
Router::new().route("/telemetry/events", post(post_events))
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref ZED_CHECKSUM_HEADER: HeaderName = HeaderName::from_static("x-zed-checksum");
|
||||
static ref CLOUDFLARE_IP_COUNTRY_HEADER: HeaderName = HeaderName::from_static("cf-ipcountry");
|
||||
Router::new()
|
||||
.route("/telemetry/events", post(post_events))
|
||||
.route("/telemetry/crashes", post(post_crash))
|
||||
}
|
||||
|
||||
pub struct ZedChecksumHeader(Vec<u8>);
|
||||
|
||||
impl Header for ZedChecksumHeader {
|
||||
fn name() -> &'static HeaderName {
|
||||
&ZED_CHECKSUM_HEADER
|
||||
static ZED_CHECKSUM_HEADER: OnceLock<HeaderName> = OnceLock::new();
|
||||
ZED_CHECKSUM_HEADER.get_or_init(|| HeaderName::from_static("x-zed-checksum"))
|
||||
}
|
||||
|
||||
fn decode<'i, I>(values: &mut I) -> Result<Self, axum::headers::Error>
|
||||
@@ -55,7 +59,8 @@ pub struct CloudflareIpCountryHeader(String);
|
||||
|
||||
impl Header for CloudflareIpCountryHeader {
|
||||
fn name() -> &'static HeaderName {
|
||||
&CLOUDFLARE_IP_COUNTRY_HEADER
|
||||
static CLOUDFLARE_IP_COUNTRY_HEADER: OnceLock<HeaderName> = OnceLock::new();
|
||||
CLOUDFLARE_IP_COUNTRY_HEADER.get_or_init(|| HeaderName::from_static("cf-ipcountry"))
|
||||
}
|
||||
|
||||
fn decode<'i, I>(values: &mut I) -> Result<Self, axum::headers::Error>
|
||||
@@ -77,6 +82,140 @@ impl Header for CloudflareIpCountryHeader {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn post_crash(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
headers: HeaderMap,
|
||||
body: Bytes,
|
||||
) -> Result<()> {
|
||||
static CRASH_REPORTS_BUCKET: &str = "zed-crash-reports";
|
||||
|
||||
let report = IpsFile::parse(&body)?;
|
||||
let version_threshold = SemanticVersion::new(0, 123, 0);
|
||||
|
||||
let bundle_id = &report.header.bundle_id;
|
||||
let app_version = &report.app_version();
|
||||
|
||||
if bundle_id == "dev.zed.Zed-Dev" {
|
||||
log::error!("Crash uploads from {} are ignored.", bundle_id);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if app_version.is_none() || app_version.unwrap() < version_threshold {
|
||||
log::error!(
|
||||
"Crash uploads from {} are ignored.",
|
||||
report.header.app_version
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
let app_version = app_version.unwrap();
|
||||
|
||||
if let Some(blob_store_client) = app.blob_store_client.as_ref() {
|
||||
let response = blob_store_client
|
||||
.head_object()
|
||||
.bucket(CRASH_REPORTS_BUCKET)
|
||||
.key(report.header.incident_id.clone() + ".ips")
|
||||
.send()
|
||||
.await;
|
||||
|
||||
if response.is_ok() {
|
||||
log::info!("We've already uploaded this crash");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
blob_store_client
|
||||
.put_object()
|
||||
.bucket(CRASH_REPORTS_BUCKET)
|
||||
.key(report.header.incident_id.clone() + ".ips")
|
||||
.acl(aws_sdk_s3::types::ObjectCannedAcl::PublicRead)
|
||||
.body(ByteStream::from(body.to_vec()))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| log::error!("Failed to upload crash: {}", e))
|
||||
.ok();
|
||||
}
|
||||
|
||||
let recent_panic_on: Option<i64> = headers
|
||||
.get("x-zed-panicked-on")
|
||||
.and_then(|h| h.to_str().ok())
|
||||
.and_then(|s| s.parse().ok());
|
||||
let mut recent_panic = None;
|
||||
|
||||
if let Some(recent_panic_on) = recent_panic_on {
|
||||
let crashed_at = match report.timestamp() {
|
||||
Ok(t) => Some(t),
|
||||
Err(e) => {
|
||||
log::error!("Can't parse {}: {}", report.header.timestamp, e);
|
||||
None
|
||||
}
|
||||
};
|
||||
if crashed_at.is_some_and(|t| (t.timestamp_millis() - recent_panic_on).abs() <= 30000) {
|
||||
recent_panic = headers.get("x-zed-panic").and_then(|h| h.to_str().ok());
|
||||
}
|
||||
}
|
||||
|
||||
let description = report.description(recent_panic);
|
||||
let summary = report.backtrace_summary();
|
||||
|
||||
tracing::error!(
|
||||
service = "client",
|
||||
version = %report.header.app_version,
|
||||
os_version = %report.header.os_version,
|
||||
bundle_id = %report.header.bundle_id,
|
||||
incident_id = %report.header.incident_id,
|
||||
description = %description,
|
||||
backtrace = %summary,
|
||||
"crash report");
|
||||
|
||||
if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() {
|
||||
let payload = slack::WebhookBody::new(|w| {
|
||||
w.add_section(|s| s.text(slack::Text::markdown(description)))
|
||||
.add_section(|s| {
|
||||
s.add_field(slack::Text::markdown(format!(
|
||||
"*Version:*\n{} ({})",
|
||||
bundle_id, app_version
|
||||
)))
|
||||
.add_field({
|
||||
let hostname = app.config.blob_store_url.clone().unwrap_or_default();
|
||||
let hostname = hostname.strip_prefix("https://").unwrap_or_else(|| {
|
||||
hostname.strip_prefix("http://").unwrap_or_default()
|
||||
});
|
||||
|
||||
slack::Text::markdown(format!(
|
||||
"*Incident:*\n<https://{}.{}/{}.ips|{}…>",
|
||||
CRASH_REPORTS_BUCKET,
|
||||
hostname,
|
||||
report.header.incident_id,
|
||||
report
|
||||
.header
|
||||
.incident_id
|
||||
.chars()
|
||||
.take(8)
|
||||
.collect::<String>(),
|
||||
))
|
||||
})
|
||||
})
|
||||
.add_rich_text(|r| r.add_preformatted(|p| p.add_text(summary)))
|
||||
});
|
||||
let payload_json = serde_json::to_string(&payload).map_err(|err| {
|
||||
log::error!("Failed to serialize payload to JSON: {err}");
|
||||
Error::Internal(anyhow!(err))
|
||||
})?;
|
||||
|
||||
reqwest::Client::new()
|
||||
.post(slack_panics_webhook)
|
||||
.header("Content-Type", "application/json")
|
||||
.body(payload_json)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|err| {
|
||||
log::error!("Failed to send payload to Slack: {err}");
|
||||
Error::Internal(anyhow!(err))
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn post_events(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
TypedHeader(ZedChecksumHeader(checksum)): TypedHeader<ZedChecksumHeader>,
|
||||
@@ -102,7 +241,7 @@ pub async fn post_events(
|
||||
summer.update(&body);
|
||||
summer.update(checksum_seed);
|
||||
|
||||
if &checksum[..] != &summer.finalize()[..] {
|
||||
if &checksum != &summer.finalize()[..] {
|
||||
return Err(Error::Http(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"invalid checksum".into(),
|
||||
@@ -219,40 +358,68 @@ struct ToUpload {
|
||||
|
||||
impl ToUpload {
|
||||
pub async fn upload(&self, clickhouse_client: &clickhouse::Client) -> anyhow::Result<()> {
|
||||
Self::upload_to_table("editor_events", &self.editor_events, clickhouse_client)
|
||||
const EDITOR_EVENTS_TABLE: &str = "editor_events";
|
||||
Self::upload_to_table(EDITOR_EVENTS_TABLE, &self.editor_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table 'editor_events'"))?;
|
||||
Self::upload_to_table("copilot_events", &self.copilot_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table 'copilot_events'"))?;
|
||||
.with_context(|| format!("failed to upload to table '{EDITOR_EVENTS_TABLE}'"))?;
|
||||
|
||||
const COPILOT_EVENTS_TABLE: &str = "copilot_events";
|
||||
Self::upload_to_table(
|
||||
"assistant_events",
|
||||
COPILOT_EVENTS_TABLE,
|
||||
&self.copilot_events,
|
||||
clickhouse_client,
|
||||
)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table '{COPILOT_EVENTS_TABLE}'"))?;
|
||||
|
||||
const ASSISTANT_EVENTS_TABLE: &str = "assistant_events";
|
||||
Self::upload_to_table(
|
||||
ASSISTANT_EVENTS_TABLE,
|
||||
&self.assistant_events,
|
||||
clickhouse_client,
|
||||
)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table 'assistant_events'"))?;
|
||||
Self::upload_to_table("call_events", &self.call_events, clickhouse_client)
|
||||
.with_context(|| format!("failed to upload to table '{ASSISTANT_EVENTS_TABLE}'"))?;
|
||||
|
||||
const CALL_EVENTS_TABLE: &str = "call_events";
|
||||
Self::upload_to_table(CALL_EVENTS_TABLE, &self.call_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table 'call_events'"))?;
|
||||
Self::upload_to_table("cpu_events", &self.cpu_events, clickhouse_client)
|
||||
.with_context(|| format!("failed to upload to table '{CALL_EVENTS_TABLE}'"))?;
|
||||
|
||||
const CPU_EVENTS_TABLE: &str = "cpu_events";
|
||||
Self::upload_to_table(CPU_EVENTS_TABLE, &self.cpu_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table 'cpu_events'"))?;
|
||||
Self::upload_to_table("memory_events", &self.memory_events, clickhouse_client)
|
||||
.with_context(|| format!("failed to upload to table '{CPU_EVENTS_TABLE}'"))?;
|
||||
|
||||
const MEMORY_EVENTS_TABLE: &str = "memory_events";
|
||||
Self::upload_to_table(MEMORY_EVENTS_TABLE, &self.memory_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table 'memory_events'"))?;
|
||||
Self::upload_to_table("app_events", &self.app_events, clickhouse_client)
|
||||
.with_context(|| format!("failed to upload to table '{MEMORY_EVENTS_TABLE}'"))?;
|
||||
|
||||
const APP_EVENTS_TABLE: &str = "app_events";
|
||||
Self::upload_to_table(APP_EVENTS_TABLE, &self.app_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table 'app_events'"))?;
|
||||
Self::upload_to_table("setting_events", &self.setting_events, clickhouse_client)
|
||||
.with_context(|| format!("failed to upload to table '{APP_EVENTS_TABLE}'"))?;
|
||||
|
||||
const SETTING_EVENTS_TABLE: &str = "setting_events";
|
||||
Self::upload_to_table(
|
||||
SETTING_EVENTS_TABLE,
|
||||
&self.setting_events,
|
||||
clickhouse_client,
|
||||
)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table '{SETTING_EVENTS_TABLE}'"))?;
|
||||
|
||||
const EDIT_EVENTS_TABLE: &str = "edit_events";
|
||||
Self::upload_to_table(EDIT_EVENTS_TABLE, &self.edit_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table 'setting_events'"))?;
|
||||
Self::upload_to_table("edit_events", &self.edit_events, clickhouse_client)
|
||||
.with_context(|| format!("failed to upload to table '{EDIT_EVENTS_TABLE}'"))?;
|
||||
|
||||
const ACTION_EVENTS_TABLE: &str = "action_events";
|
||||
Self::upload_to_table(ACTION_EVENTS_TABLE, &self.action_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table 'edit_events'"))?;
|
||||
Self::upload_to_table("action_events", &self.action_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table 'action_events'"))?;
|
||||
.with_context(|| format!("failed to upload to table '{ACTION_EVENTS_TABLE}'"))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
@@ -7,12 +7,12 @@ use anyhow::{anyhow, Context as _};
|
||||
use aws_sdk_s3::presigning::PresigningConfig;
|
||||
use axum::{
|
||||
extract::{Path, Query},
|
||||
http::StatusCode,
|
||||
response::Redirect,
|
||||
routing::get,
|
||||
Extension, Json, Router,
|
||||
};
|
||||
use collections::HashMap;
|
||||
use hyper::StatusCode;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{sync::Arc, time::Duration};
|
||||
use time::PrimitiveDateTime;
|
||||
@@ -140,6 +140,8 @@ async fn fetch_extensions_from_blob_store(
|
||||
blob_store_bucket: &String,
|
||||
app_state: &Arc<AppState>,
|
||||
) -> anyhow::Result<()> {
|
||||
log::info!("fetching extensions from blob store");
|
||||
|
||||
let list = blob_store_client
|
||||
.list_objects()
|
||||
.bucket(blob_store_bucket)
|
||||
@@ -147,9 +149,7 @@ async fn fetch_extensions_from_blob_store(
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let objects = list
|
||||
.contents
|
||||
.ok_or_else(|| anyhow!("missing bucket contents"))?;
|
||||
let objects = list.contents.unwrap_or_default();
|
||||
|
||||
let mut published_versions = HashMap::<&str, Vec<&str>>::default();
|
||||
for object in &objects {
|
||||
@@ -166,10 +166,12 @@ async fn fetch_extensions_from_blob_store(
|
||||
let Some(version) = parts.next() else {
|
||||
continue;
|
||||
};
|
||||
published_versions
|
||||
.entry(extension_id)
|
||||
.or_default()
|
||||
.push(version);
|
||||
if parts.next() == Some("manifest.json") {
|
||||
published_versions
|
||||
.entry(extension_id)
|
||||
.or_default()
|
||||
.push(version);
|
||||
}
|
||||
}
|
||||
|
||||
let known_versions = app_state.db.get_known_extension_versions().await?;
|
||||
@@ -184,46 +186,20 @@ async fn fetch_extensions_from_blob_store(
|
||||
.binary_search_by_key(&published_version, String::as_str)
|
||||
.is_err()
|
||||
{
|
||||
let object = blob_store_client
|
||||
.get_object()
|
||||
.bucket(blob_store_bucket)
|
||||
.key(format!(
|
||||
"extensions/{extension_id}/{published_version}/manifest.json"
|
||||
))
|
||||
.send()
|
||||
.await?;
|
||||
let manifest_bytes = object
|
||||
.body
|
||||
.collect()
|
||||
.await
|
||||
.map(|data| data.into_bytes())
|
||||
.with_context(|| format!("failed to download manifest for extension {extension_id} version {published_version}"))?
|
||||
.to_vec();
|
||||
let manifest = serde_json::from_slice::<ExtensionManifest>(&manifest_bytes)
|
||||
.with_context(|| format!("invalid manifest for extension {extension_id} version {published_version}: {}", String::from_utf8_lossy(&manifest_bytes)))?;
|
||||
|
||||
let published_at = object.last_modified.ok_or_else(|| anyhow!("missing last modified timestamp for extension {extension_id} version {published_version}"))?;
|
||||
let published_at =
|
||||
time::OffsetDateTime::from_unix_timestamp_nanos(published_at.as_nanos())?;
|
||||
let published_at = PrimitiveDateTime::new(published_at.date(), published_at.time());
|
||||
|
||||
let version = semver::Version::parse(&manifest.version).with_context(|| {
|
||||
format!(
|
||||
"invalid version for extension {extension_id} version {published_version}"
|
||||
)
|
||||
})?;
|
||||
|
||||
new_versions
|
||||
.entry(extension_id)
|
||||
.or_default()
|
||||
.push(NewExtensionVersion {
|
||||
name: manifest.name,
|
||||
version,
|
||||
description: manifest.description.unwrap_or_default(),
|
||||
authors: manifest.authors,
|
||||
repository: manifest.repository,
|
||||
published_at,
|
||||
});
|
||||
if let Some(extension) = fetch_extension_manifest(
|
||||
blob_store_client,
|
||||
blob_store_bucket,
|
||||
extension_id,
|
||||
published_version,
|
||||
)
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
new_versions
|
||||
.entry(extension_id)
|
||||
.or_default()
|
||||
.push(extension);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -233,5 +209,56 @@ async fn fetch_extensions_from_blob_store(
|
||||
.insert_extension_versions(&new_versions)
|
||||
.await?;
|
||||
|
||||
log::info!(
|
||||
"fetched {} new extensions from blob store",
|
||||
new_versions.values().map(|v| v.len()).sum::<usize>()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn fetch_extension_manifest(
|
||||
blob_store_client: &aws_sdk_s3::Client,
|
||||
blob_store_bucket: &String,
|
||||
extension_id: &str,
|
||||
version: &str,
|
||||
) -> Result<NewExtensionVersion, anyhow::Error> {
|
||||
let object = blob_store_client
|
||||
.get_object()
|
||||
.bucket(blob_store_bucket)
|
||||
.key(format!("extensions/{extension_id}/{version}/manifest.json"))
|
||||
.send()
|
||||
.await?;
|
||||
let manifest_bytes = object
|
||||
.body
|
||||
.collect()
|
||||
.await
|
||||
.map(|data| data.into_bytes())
|
||||
.with_context(|| {
|
||||
format!("failed to download manifest for extension {extension_id} version {version}")
|
||||
})?
|
||||
.to_vec();
|
||||
let manifest =
|
||||
serde_json::from_slice::<ExtensionManifest>(&manifest_bytes).with_context(|| {
|
||||
format!(
|
||||
"invalid manifest for extension {extension_id} version {version}: {}",
|
||||
String::from_utf8_lossy(&manifest_bytes)
|
||||
)
|
||||
})?;
|
||||
let published_at = object.last_modified.ok_or_else(|| {
|
||||
anyhow!("missing last modified timestamp for extension {extension_id} version {version}")
|
||||
})?;
|
||||
let published_at = time::OffsetDateTime::from_unix_timestamp_nanos(published_at.as_nanos())?;
|
||||
let published_at = PrimitiveDateTime::new(published_at.date(), published_at.time());
|
||||
let version = semver::Version::parse(&manifest.version).with_context(|| {
|
||||
format!("invalid version for extension {extension_id} version {version}")
|
||||
})?;
|
||||
Ok(NewExtensionVersion {
|
||||
name: manifest.name,
|
||||
version,
|
||||
description: manifest.description.unwrap_or_default(),
|
||||
authors: manifest.authors,
|
||||
repository: manifest.repository,
|
||||
published_at,
|
||||
})
|
||||
}
|
||||
|
||||
352
crates/collab/src/api/ips_file.rs
Normal file
@@ -0,0 +1,352 @@
|
||||
use collections::HashMap;
|
||||
|
||||
use serde_derive::Deserialize;
|
||||
use serde_derive::Serialize;
|
||||
use serde_json::Value;
|
||||
use util::SemanticVersion;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IpsFile {
|
||||
pub header: Header,
|
||||
pub body: Body,
|
||||
}
|
||||
|
||||
impl IpsFile {
|
||||
pub fn parse(bytes: &[u8]) -> anyhow::Result<IpsFile> {
|
||||
let mut split = bytes.splitn(2, |&b| b == b'\n');
|
||||
let header_bytes = split
|
||||
.next()
|
||||
.ok_or_else(|| anyhow::anyhow!("No header found"))?;
|
||||
let header: Header = serde_json::from_slice(header_bytes)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to parse header: {}", e))?;
|
||||
|
||||
let body_bytes = split
|
||||
.next()
|
||||
.ok_or_else(|| anyhow::anyhow!("No body found"))?;
|
||||
|
||||
let body: Body = serde_json::from_slice(body_bytes)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to parse body: {}", e))?;
|
||||
Ok(IpsFile { header, body })
|
||||
}
|
||||
|
||||
pub fn faulting_thread(&self) -> Option<&Thread> {
|
||||
self.body.threads.get(self.body.faulting_thread? as usize)
|
||||
}
|
||||
|
||||
pub fn app_version(&self) -> Option<SemanticVersion> {
|
||||
self.header.app_version.parse().ok()
|
||||
}
|
||||
|
||||
pub fn timestamp(&self) -> anyhow::Result<chrono::DateTime<chrono::FixedOffset>> {
|
||||
chrono::DateTime::parse_from_str(&self.header.timestamp, "%Y-%m-%d %H:%M:%S%.f %#z")
|
||||
.map_err(|e| anyhow::anyhow!(e))
|
||||
}
|
||||
|
||||
pub fn description(&self, panic: Option<&str>) -> String {
|
||||
let mut desc = if self.body.termination.indicator == "Abort trap: 6" {
|
||||
match panic {
|
||||
Some(panic_message) => format!("Panic `{}`", panic_message),
|
||||
None => "Crash `Abort trap: 6` (possible panic)".into(),
|
||||
}
|
||||
} else if let Some(msg) = &self.body.exception.message {
|
||||
format!("Exception `{}`", msg)
|
||||
} else {
|
||||
format!("Crash `{}`", self.body.termination.indicator)
|
||||
};
|
||||
if let Some(thread) = self.faulting_thread() {
|
||||
if let Some(queue) = thread.queue.as_ref() {
|
||||
desc += &format!(
|
||||
" on thread {} ({})",
|
||||
self.body.faulting_thread.unwrap_or_default(),
|
||||
queue
|
||||
);
|
||||
} else {
|
||||
desc += &format!(
|
||||
" on thread {} ({})",
|
||||
self.body.faulting_thread.unwrap_or_default(),
|
||||
thread.name.clone().unwrap_or_default()
|
||||
);
|
||||
}
|
||||
}
|
||||
desc
|
||||
}
|
||||
|
||||
pub fn backtrace_summary(&self) -> String {
|
||||
if let Some(thread) = self.faulting_thread() {
|
||||
let mut frames = thread
|
||||
.frames
|
||||
.iter()
|
||||
.filter_map(|frame| {
|
||||
if let Some(name) = &frame.symbol {
|
||||
if self.is_ignorable_frame(name) {
|
||||
return None;
|
||||
}
|
||||
Some(format!("{:#}", rustc_demangle::demangle(name)))
|
||||
} else if let Some(image) = self.body.used_images.get(frame.image_index) {
|
||||
Some(image.name.clone().unwrap_or("<unknown-image>".into()))
|
||||
} else {
|
||||
Some("<unknown>".into())
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let total = frames.len();
|
||||
if total > 21 {
|
||||
frames = frames.into_iter().take(20).collect();
|
||||
frames.push(format!(" and {} more...", total - 20))
|
||||
}
|
||||
frames.join("\n")
|
||||
} else {
|
||||
"<no backtrace available>".into()
|
||||
}
|
||||
}
|
||||
|
||||
fn is_ignorable_frame(&self, symbol: &String) -> bool {
|
||||
[
|
||||
"pthread_kill",
|
||||
"panic",
|
||||
"backtrace",
|
||||
"rust_begin_unwind",
|
||||
"abort",
|
||||
]
|
||||
.iter()
|
||||
.any(|s| symbol.contains(s))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct Header {
|
||||
pub app_name: String,
|
||||
pub timestamp: String,
|
||||
pub app_version: String,
|
||||
pub slice_uuid: String,
|
||||
pub build_version: String,
|
||||
pub platform: i64,
|
||||
#[serde(rename = "bundleID", default)]
|
||||
pub bundle_id: String,
|
||||
pub share_with_app_devs: i64,
|
||||
pub is_first_party: i64,
|
||||
pub bug_type: String,
|
||||
pub os_version: String,
|
||||
pub roots_installed: i64,
|
||||
pub name: String,
|
||||
pub incident_id: String,
|
||||
}
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Body {
|
||||
pub uptime: i64,
|
||||
pub proc_role: String,
|
||||
pub version: i64,
|
||||
#[serde(rename = "userID")]
|
||||
pub user_id: i64,
|
||||
pub deploy_version: i64,
|
||||
pub model_code: String,
|
||||
#[serde(rename = "coalitionID")]
|
||||
pub coalition_id: i64,
|
||||
pub os_version: OsVersion,
|
||||
pub capture_time: String,
|
||||
pub code_signing_monitor: i64,
|
||||
pub incident: String,
|
||||
pub pid: i64,
|
||||
pub translated: bool,
|
||||
pub cpu_type: String,
|
||||
#[serde(rename = "roots_installed")]
|
||||
pub roots_installed: i64,
|
||||
#[serde(rename = "bug_type")]
|
||||
pub bug_type: String,
|
||||
pub proc_launch: String,
|
||||
pub proc_start_abs_time: i64,
|
||||
pub proc_exit_abs_time: i64,
|
||||
pub proc_name: String,
|
||||
pub proc_path: String,
|
||||
pub bundle_info: BundleInfo,
|
||||
pub store_info: StoreInfo,
|
||||
pub parent_proc: String,
|
||||
pub parent_pid: i64,
|
||||
pub coalition_name: String,
|
||||
pub crash_reporter_key: String,
|
||||
#[serde(rename = "codeSigningID")]
|
||||
pub code_signing_id: String,
|
||||
#[serde(rename = "codeSigningTeamID")]
|
||||
pub code_signing_team_id: String,
|
||||
pub code_signing_flags: i64,
|
||||
pub code_signing_validation_category: i64,
|
||||
pub code_signing_trust_level: i64,
|
||||
pub instruction_byte_stream: InstructionByteStream,
|
||||
pub sip: String,
|
||||
pub exception: Exception,
|
||||
pub termination: Termination,
|
||||
pub asi: Asi,
|
||||
pub ext_mods: ExtMods,
|
||||
pub faulting_thread: Option<i64>,
|
||||
pub threads: Vec<Thread>,
|
||||
pub used_images: Vec<UsedImage>,
|
||||
pub shared_cache: SharedCache,
|
||||
pub vm_summary: String,
|
||||
pub legacy_info: LegacyInfo,
|
||||
pub log_writing_signature: String,
|
||||
pub trial_info: TrialInfo,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct OsVersion {
|
||||
pub train: String,
|
||||
pub build: String,
|
||||
pub release_type: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct BundleInfo {
|
||||
#[serde(rename = "CFBundleShortVersionString")]
|
||||
pub cfbundle_short_version_string: String,
|
||||
#[serde(rename = "CFBundleVersion")]
|
||||
pub cfbundle_version: String,
|
||||
#[serde(rename = "CFBundleIdentifier")]
|
||||
pub cfbundle_identifier: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct StoreInfo {
|
||||
pub device_identifier_for_vendor: String,
|
||||
pub third_party: bool,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct InstructionByteStream {
|
||||
#[serde(rename = "beforePC")]
|
||||
pub before_pc: String,
|
||||
#[serde(rename = "atPC")]
|
||||
pub at_pc: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Exception {
|
||||
pub codes: String,
|
||||
pub raw_codes: Vec<i64>,
|
||||
#[serde(rename = "type")]
|
||||
pub type_field: String,
|
||||
pub subtype: Option<String>,
|
||||
pub signal: String,
|
||||
pub port: Option<i64>,
|
||||
pub guard_id: Option<i64>,
|
||||
pub message: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Termination {
|
||||
pub flags: i64,
|
||||
pub code: i64,
|
||||
pub namespace: String,
|
||||
pub indicator: String,
|
||||
pub by_proc: String,
|
||||
pub by_pid: i64,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Asi {
|
||||
#[serde(rename = "libsystem_c.dylib")]
|
||||
pub libsystem_c_dylib: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct ExtMods {
|
||||
pub caller: ExtMod,
|
||||
pub system: ExtMod,
|
||||
pub targeted: ExtMod,
|
||||
pub warnings: i64,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct ExtMod {
|
||||
#[serde(rename = "thread_create")]
|
||||
pub thread_create: i64,
|
||||
#[serde(rename = "thread_set_state")]
|
||||
pub thread_set_state: i64,
|
||||
#[serde(rename = "task_for_pid")]
|
||||
pub task_for_pid: i64,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Thread {
|
||||
pub thread_state: HashMap<String, Value>,
|
||||
pub id: i64,
|
||||
pub triggered: Option<bool>,
|
||||
pub name: Option<String>,
|
||||
pub queue: Option<String>,
|
||||
pub frames: Vec<Frame>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Frame {
|
||||
pub image_offset: i64,
|
||||
pub symbol: Option<String>,
|
||||
pub symbol_location: Option<i64>,
|
||||
pub image_index: usize,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct UsedImage {
|
||||
pub source: String,
|
||||
pub arch: Option<String>,
|
||||
pub base: i64,
|
||||
#[serde(rename = "CFBundleShortVersionString")]
|
||||
pub cfbundle_short_version_string: Option<String>,
|
||||
#[serde(rename = "CFBundleIdentifier")]
|
||||
pub cfbundle_identifier: Option<String>,
|
||||
pub size: i64,
|
||||
pub uuid: String,
|
||||
pub path: Option<String>,
|
||||
pub name: Option<String>,
|
||||
#[serde(rename = "CFBundleVersion")]
|
||||
pub cfbundle_version: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct SharedCache {
|
||||
pub base: i64,
|
||||
pub size: i64,
|
||||
pub uuid: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct LegacyInfo {
|
||||
pub thread_triggered: ThreadTriggered,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct ThreadTriggered {
|
||||
pub name: String,
|
||||
pub queue: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct TrialInfo {
|
||||
pub rollouts: Vec<Rollout>,
|
||||
pub experiments: Vec<Value>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Rollout {
|
||||
pub rollout_id: String,
|
||||
pub factor_pack_ids: HashMap<String, Value>,
|
||||
pub deployment_id: i64,
|
||||
}
|
||||
144
crates/collab/src/api/slack.rs
Normal file
@@ -0,0 +1,144 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// https://api.slack.com/reference/messaging/payload
|
||||
#[derive(Default, Clone, Serialize, Deserialize)]
|
||||
pub struct WebhookBody {
|
||||
text: String,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
blocks: Vec<Block>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
thread_ts: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
mrkdwn: Option<bool>,
|
||||
}
|
||||
|
||||
impl WebhookBody {
|
||||
pub fn new(f: impl FnOnce(Self) -> Self) -> Self {
|
||||
f(Self::default())
|
||||
}
|
||||
|
||||
pub fn add_section(mut self, build: impl FnOnce(Section) -> Section) -> Self {
|
||||
self.blocks.push(Block::Section(build(Section::default())));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_rich_text(mut self, build: impl FnOnce(RichText) -> RichText) -> Self {
|
||||
self.blocks
|
||||
.push(Block::RichText(build(RichText::default())));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
/// https://api.slack.com/reference/block-kit/blocks
|
||||
pub enum Block {
|
||||
#[serde(rename = "section")]
|
||||
Section(Section),
|
||||
#[serde(rename = "rich_text")]
|
||||
RichText(RichText),
|
||||
// .... etc.
|
||||
}
|
||||
|
||||
/// https://api.slack.com/reference/block-kit/blocks#section
|
||||
#[derive(Default, Clone, Serialize, Deserialize)]
|
||||
pub struct Section {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
text: Option<Text>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
fields: Vec<Text>,
|
||||
// fields, accessories...
|
||||
}
|
||||
|
||||
impl Section {
|
||||
pub fn text(mut self, text: Text) -> Self {
|
||||
self.text = Some(text);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_field(mut self, field: Text) -> Self {
|
||||
self.fields.push(field);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// https://api.slack.com/reference/block-kit/composition-objects#text
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum Text {
|
||||
#[serde(rename = "plain_text")]
|
||||
PlainText { text: String, emoji: bool },
|
||||
#[serde(rename = "mrkdwn")]
|
||||
Markdown { text: String, verbatim: bool },
|
||||
}
|
||||
|
||||
impl Text {
|
||||
pub fn plain(s: String) -> Self {
|
||||
Self::PlainText {
|
||||
text: s,
|
||||
emoji: true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn markdown(s: String) -> Self {
|
||||
Self::Markdown {
|
||||
text: s,
|
||||
verbatim: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Serialize, Deserialize)]
|
||||
pub struct RichText {
|
||||
elements: Vec<RichTextObject>,
|
||||
}
|
||||
|
||||
impl RichText {
|
||||
pub fn new(f: impl FnOnce(Self) -> Self) -> Self {
|
||||
f(Self::default())
|
||||
}
|
||||
|
||||
pub fn add_preformatted(
|
||||
mut self,
|
||||
build: impl FnOnce(RichTextPreformatted) -> RichTextPreformatted,
|
||||
) -> Self {
|
||||
self.elements.push(RichTextObject::Preformatted(build(
|
||||
RichTextPreformatted::default(),
|
||||
)));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// https://api.slack.com/reference/block-kit/blocks#rich_text
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum RichTextObject {
|
||||
#[serde(rename = "rich_text_preformatted")]
|
||||
Preformatted(RichTextPreformatted),
|
||||
// etc.
|
||||
}
|
||||
|
||||
/// https://api.slack.com/reference/block-kit/blocks#rich_text_preformatted
|
||||
#[derive(Clone, Default, Serialize, Deserialize)]
|
||||
pub struct RichTextPreformatted {
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
elements: Vec<RichTextElement>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
border: Option<u8>,
|
||||
}
|
||||
|
||||
impl RichTextPreformatted {
|
||||
pub fn add_text(mut self, text: String) -> Self {
|
||||
self.elements.push(RichTextElement::Text { text });
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// https://api.slack.com/reference/block-kit/blocks#element-types
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum RichTextElement {
|
||||
#[serde(rename = "text")]
|
||||
Text { text: String },
|
||||
// etc.
|
||||
}
|
||||
@@ -8,24 +8,16 @@ use axum::{
|
||||
middleware::Next,
|
||||
response::IntoResponse,
|
||||
};
|
||||
use lazy_static::lazy_static;
|
||||
use prometheus::{exponential_buckets, register_histogram, Histogram};
|
||||
use rand::thread_rng;
|
||||
use scrypt::{
|
||||
password_hash::{PasswordHash, PasswordHasher, PasswordVerifier, SaltString},
|
||||
password_hash::{PasswordHash, PasswordVerifier},
|
||||
Scrypt,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sha2::Digest;
|
||||
use std::sync::OnceLock;
|
||||
use std::{sync::Arc, time::Instant};
|
||||
|
||||
lazy_static! {
|
||||
static ref METRIC_ACCESS_TOKEN_HASHING_TIME: Histogram = register_histogram!(
|
||||
"access_token_hashing_time",
|
||||
"time spent hashing access tokens",
|
||||
exponential_buckets(10.0, 2.0, 10).unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
use subtle::ConstantTimeEq;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub struct Impersonator(pub Option<db::User>);
|
||||
@@ -124,8 +116,7 @@ pub async fn create_access_token(
|
||||
) -> Result<String> {
|
||||
const VERSION: usize = 1;
|
||||
let access_token = rpc::auth::random_token();
|
||||
let access_token_hash =
|
||||
hash_access_token(&access_token).context("failed to hash access token")?;
|
||||
let access_token_hash = hash_access_token(&access_token);
|
||||
let id = db
|
||||
.create_access_token(
|
||||
user_id,
|
||||
@@ -141,23 +132,15 @@ pub async fn create_access_token(
|
||||
})?)
|
||||
}
|
||||
|
||||
fn hash_access_token(token: &str) -> Result<String> {
|
||||
// Avoid slow hashing in debug mode.
|
||||
let params = if cfg!(debug_assertions) {
|
||||
scrypt::Params::new(1, 1, 1).unwrap()
|
||||
} else {
|
||||
scrypt::Params::new(14, 8, 1).unwrap()
|
||||
};
|
||||
|
||||
Ok(Scrypt
|
||||
.hash_password(
|
||||
token.as_bytes(),
|
||||
None,
|
||||
params,
|
||||
&SaltString::generate(thread_rng()),
|
||||
)
|
||||
.map_err(anyhow::Error::new)?
|
||||
.to_string())
|
||||
/// Hashing prevents anyone with access to the database being able to login.
|
||||
/// As the token is randomly generated, we don't need to worry about scrypt-style
|
||||
/// protection.
|
||||
fn hash_access_token(token: &str) -> String {
|
||||
let digest = sha2::Sha256::digest(token);
|
||||
format!(
|
||||
"$sha256${}",
|
||||
base64::encode_config(digest, base64::URL_SAFE)
|
||||
)
|
||||
}
|
||||
|
||||
/// Encrypts the given access token with the given public key to avoid leaking it on the way
|
||||
@@ -182,6 +165,16 @@ pub async fn verify_access_token(
|
||||
user_id: UserId,
|
||||
db: &Arc<Database>,
|
||||
) -> Result<VerifyAccessTokenResult> {
|
||||
static METRIC_ACCESS_TOKEN_HASHING_TIME: OnceLock<Histogram> = OnceLock::new();
|
||||
let metric_access_token_hashing_time = METRIC_ACCESS_TOKEN_HASHING_TIME.get_or_init(|| {
|
||||
register_histogram!(
|
||||
"access_token_hashing_time",
|
||||
"time spent hashing access tokens",
|
||||
exponential_buckets(10.0, 2.0, 10).unwrap(),
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
let token: AccessTokenJson = serde_json::from_str(&token)?;
|
||||
|
||||
let db_token = db.get_access_token(token.id).await?;
|
||||
@@ -189,15 +182,27 @@ pub async fn verify_access_token(
|
||||
if token_user_id != user_id {
|
||||
return Err(anyhow!("no such access token"))?;
|
||||
}
|
||||
|
||||
let db_hash = PasswordHash::new(&db_token.hash).map_err(anyhow::Error::new)?;
|
||||
let t0 = Instant::now();
|
||||
let is_valid = Scrypt
|
||||
.verify_password(token.token.as_bytes(), &db_hash)
|
||||
.is_ok();
|
||||
|
||||
let is_valid = if db_token.hash.starts_with("$scrypt$") {
|
||||
let db_hash = PasswordHash::new(&db_token.hash).map_err(anyhow::Error::new)?;
|
||||
Scrypt
|
||||
.verify_password(token.token.as_bytes(), &db_hash)
|
||||
.is_ok()
|
||||
} else {
|
||||
let token_hash = hash_access_token(&token.token);
|
||||
db_token.hash.as_bytes().ct_eq(token_hash.as_ref()).into()
|
||||
};
|
||||
|
||||
let duration = t0.elapsed();
|
||||
log::info!("hashed access token in {:?}", duration);
|
||||
METRIC_ACCESS_TOKEN_HASHING_TIME.observe(duration.as_millis() as f64);
|
||||
metric_access_token_hashing_time.observe(duration.as_millis() as f64);
|
||||
|
||||
if is_valid && db_token.hash.starts_with("$scrypt$") {
|
||||
let new_hash = hash_access_token(&token.token);
|
||||
db.update_access_token_hash(db_token.id, &new_hash).await?;
|
||||
}
|
||||
|
||||
Ok(VerifyAccessTokenResult {
|
||||
is_valid,
|
||||
impersonator_id: if db_token.impersonated_user_id.is_some() {
|
||||
@@ -207,3 +212,145 @@ pub async fn verify_access_token(
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use rand::thread_rng;
|
||||
use scrypt::password_hash::{PasswordHasher, SaltString};
|
||||
use sea_orm::EntityTrait;
|
||||
|
||||
use super::*;
|
||||
use crate::db::{access_token, NewUserParams};
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_verify_access_token(cx: &mut gpui::TestAppContext) {
|
||||
let test_db = crate::db::TestDb::postgres(cx.executor().clone());
|
||||
let db = test_db.db();
|
||||
|
||||
let user = db
|
||||
.create_user(
|
||||
"example@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "example".into(),
|
||||
github_user_id: 1,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let token = create_access_token(&db, user.user_id, None).await.unwrap();
|
||||
assert!(matches!(
|
||||
verify_access_token(&token, user.user_id, &db)
|
||||
.await
|
||||
.unwrap(),
|
||||
VerifyAccessTokenResult {
|
||||
is_valid: true,
|
||||
impersonator_id: None,
|
||||
}
|
||||
));
|
||||
|
||||
let old_token = create_previous_access_token(user.user_id, None, &db)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let old_token_id = serde_json::from_str::<AccessTokenJson>(&old_token)
|
||||
.unwrap()
|
||||
.id;
|
||||
|
||||
let hash = db
|
||||
.transaction(|tx| async move {
|
||||
Ok(access_token::Entity::find_by_id(old_token_id)
|
||||
.one(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.hash;
|
||||
assert!(hash.starts_with("$scrypt$"));
|
||||
|
||||
assert!(matches!(
|
||||
verify_access_token(&old_token, user.user_id, &db)
|
||||
.await
|
||||
.unwrap(),
|
||||
VerifyAccessTokenResult {
|
||||
is_valid: true,
|
||||
impersonator_id: None,
|
||||
}
|
||||
));
|
||||
|
||||
let hash = db
|
||||
.transaction(|tx| async move {
|
||||
Ok(access_token::Entity::find_by_id(old_token_id)
|
||||
.one(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.hash;
|
||||
assert!(hash.starts_with("$sha256$"));
|
||||
|
||||
assert!(matches!(
|
||||
verify_access_token(&old_token, user.user_id, &db)
|
||||
.await
|
||||
.unwrap(),
|
||||
VerifyAccessTokenResult {
|
||||
is_valid: true,
|
||||
impersonator_id: None,
|
||||
}
|
||||
));
|
||||
|
||||
assert!(matches!(
|
||||
verify_access_token(&token, user.user_id, &db)
|
||||
.await
|
||||
.unwrap(),
|
||||
VerifyAccessTokenResult {
|
||||
is_valid: true,
|
||||
impersonator_id: None,
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
async fn create_previous_access_token(
|
||||
user_id: UserId,
|
||||
impersonated_user_id: Option<UserId>,
|
||||
db: &Database,
|
||||
) -> Result<String> {
|
||||
let access_token = rpc::auth::random_token();
|
||||
let access_token_hash = previous_hash_access_token(&access_token)?;
|
||||
let id = db
|
||||
.create_access_token(
|
||||
user_id,
|
||||
impersonated_user_id,
|
||||
&access_token_hash,
|
||||
MAX_ACCESS_TOKENS_TO_STORE,
|
||||
)
|
||||
.await?;
|
||||
Ok(serde_json::to_string(&AccessTokenJson {
|
||||
version: 1,
|
||||
id,
|
||||
token: access_token,
|
||||
})?)
|
||||
}
|
||||
|
||||
fn previous_hash_access_token(token: &str) -> Result<String> {
|
||||
// Avoid slow hashing in debug mode.
|
||||
let params = if cfg!(debug_assertions) {
|
||||
scrypt::Params::new(1, 1, 1).unwrap()
|
||||
} else {
|
||||
scrypt::Params::new(14, 8, 1).unwrap()
|
||||
};
|
||||
|
||||
Ok(Scrypt
|
||||
.hash_password(
|
||||
token.as_bytes(),
|
||||
None,
|
||||
params,
|
||||
&SaltString::generate(thread_rng()),
|
||||
)
|
||||
.map_err(anyhow::Error::new)?
|
||||
.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,9 +88,9 @@ async fn fetch_github<T: DeserializeOwned>(client: &reqwest::Client, url: &str)
|
||||
.header("user-agent", "zed")
|
||||
.send()
|
||||
.await
|
||||
.expect(&format!("failed to fetch '{}'", url));
|
||||
.unwrap_or_else(|_| panic!("failed to fetch '{}'", url));
|
||||
response
|
||||
.json()
|
||||
.await
|
||||
.expect(&format!("failed to deserialize github user from '{}'", url))
|
||||
.unwrap_or_else(|_| panic!("failed to deserialize github user from '{}'", url))
|
||||
}
|
||||
|
||||
@@ -359,7 +359,7 @@ impl Database {
|
||||
const SLEEPS: [f32; 10] = [10., 20., 40., 80., 160., 320., 640., 1280., 2560., 5120.];
|
||||
if is_serialization_error(error) && prev_attempt_count < SLEEPS.len() {
|
||||
let base_delay = SLEEPS[prev_attempt_count];
|
||||
let randomized_delay = base_delay as f32 * self.rng.lock().await.gen_range(0.5..=2.0);
|
||||
let randomized_delay = base_delay * self.rng.lock().await.gen_range(0.5..=2.0);
|
||||
log::info!(
|
||||
"retrying transaction after serialization error. delay: {} ms.",
|
||||
randomized_delay
|
||||
@@ -375,7 +375,7 @@ impl Database {
|
||||
}
|
||||
|
||||
fn is_serialization_error(error: &Error) -> bool {
|
||||
const SERIALIZATION_FAILURE_CODE: &'static str = "40001";
|
||||
const SERIALIZATION_FAILURE_CODE: &str = "40001";
|
||||
match error {
|
||||
Error::Database(
|
||||
DbErr::Exec(sea_orm::RuntimeErr::SqlxError(error))
|
||||
@@ -670,6 +670,8 @@ pub struct RefreshedChannelBuffer {
|
||||
}
|
||||
|
||||
pub struct Project {
|
||||
pub id: ProjectId,
|
||||
pub role: ChannelRole,
|
||||
pub collaborators: Vec<ProjectCollaborator>,
|
||||
pub worktrees: BTreeMap<u64, Worktree>,
|
||||
pub language_servers: Vec<proto::LanguageServer>,
|
||||
@@ -695,7 +697,7 @@ impl ProjectCollaborator {
|
||||
#[derive(Debug)]
|
||||
pub struct LeftProject {
|
||||
pub id: ProjectId,
|
||||
pub host_user_id: UserId,
|
||||
pub host_user_id: Option<UserId>,
|
||||
pub host_connection_id: Option<ConnectionId>,
|
||||
pub connection_ids: Vec<ConnectionId>,
|
||||
}
|
||||
|
||||
@@ -55,4 +55,22 @@ impl Database {
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Retrieves the access token with the given ID.
|
||||
pub async fn update_access_token_hash(
|
||||
&self,
|
||||
id: AccessTokenId,
|
||||
new_hash: &str,
|
||||
) -> Result<access_token::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(access_token::Entity::update(access_token::ActiveModel {
|
||||
id: ActiveValue::unchanged(id),
|
||||
hash: ActiveValue::set(new_hash.into()),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ impl Database {
|
||||
connection: ConnectionId,
|
||||
) -> Result<proto::JoinChannelBufferResponse> {
|
||||
self.transaction(|tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
self.check_user_is_channel_participant(&channel, user_id, &tx)
|
||||
.await?;
|
||||
|
||||
@@ -134,10 +134,10 @@ impl Database {
|
||||
let mut results = Vec::new();
|
||||
for client_buffer in buffers {
|
||||
let channel = self
|
||||
.get_channel_internal(ChannelId::from_proto(client_buffer.channel_id), &*tx)
|
||||
.get_channel_internal(ChannelId::from_proto(client_buffer.channel_id), &tx)
|
||||
.await?;
|
||||
if self
|
||||
.check_user_is_channel_participant(&channel, user_id, &*tx)
|
||||
.check_user_is_channel_participant(&channel, user_id, &tx)
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
@@ -145,7 +145,7 @@ impl Database {
|
||||
continue;
|
||||
}
|
||||
|
||||
let buffer = self.get_channel_buffer(channel.id, &*tx).await?;
|
||||
let buffer = self.get_channel_buffer(channel.id, &tx).await?;
|
||||
let mut collaborators = channel_buffer_collaborator::Entity::find()
|
||||
.filter(channel_buffer_collaborator::Column::ChannelId.eq(channel.id))
|
||||
.all(&*tx)
|
||||
@@ -161,11 +161,9 @@ impl Database {
|
||||
|
||||
// Find the collaborator record for this user's previous lost
|
||||
// connection. Update it with the new connection id.
|
||||
let server_id = ServerId(connection_id.owner_id as i32);
|
||||
let Some(self_collaborator) = collaborators.iter_mut().find(|c| {
|
||||
c.user_id == user_id
|
||||
&& (c.connection_lost || c.connection_server_id != server_id)
|
||||
}) else {
|
||||
let Some(self_collaborator) =
|
||||
collaborators.iter_mut().find(|c| c.user_id == user_id)
|
||||
else {
|
||||
log::info!("can't rejoin buffer, no previous collaborator found");
|
||||
continue;
|
||||
};
|
||||
@@ -182,7 +180,7 @@ impl Database {
|
||||
|
||||
let client_version = version_from_wire(&client_buffer.version);
|
||||
let serialization_version = self
|
||||
.get_buffer_operation_serialization_version(buffer.id, buffer.epoch, &*tx)
|
||||
.get_buffer_operation_serialization_version(buffer.id, buffer.epoch, &tx)
|
||||
.await?;
|
||||
|
||||
let mut rows = buffer_operation::Entity::find()
|
||||
@@ -285,7 +283,7 @@ impl Database {
|
||||
connection: ConnectionId,
|
||||
) -> Result<LeftChannelBuffer> {
|
||||
self.transaction(|tx| async move {
|
||||
self.leave_channel_buffer_internal(channel_id, connection, &*tx)
|
||||
self.leave_channel_buffer_internal(channel_id, connection, &tx)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
@@ -310,7 +308,7 @@ impl Database {
|
||||
connection_lost: ActiveValue::set(true),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.exec(tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -339,7 +337,7 @@ impl Database {
|
||||
let mut result = Vec::new();
|
||||
for channel_id in channel_ids {
|
||||
let left_channel_buffer = self
|
||||
.leave_channel_buffer_internal(channel_id, connection, &*tx)
|
||||
.leave_channel_buffer_internal(channel_id, connection, &tx)
|
||||
.await?;
|
||||
result.push(left_channel_buffer);
|
||||
}
|
||||
@@ -365,7 +363,7 @@ impl Database {
|
||||
.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.exec(tx)
|
||||
.await?;
|
||||
if result.rows_affected == 0 {
|
||||
Err(anyhow!("not a collaborator on this project"))?;
|
||||
@@ -377,7 +375,7 @@ impl Database {
|
||||
.filter(
|
||||
Condition::all().add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.stream(tx)
|
||||
.await?;
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
@@ -408,7 +406,7 @@ impl Database {
|
||||
channel_id: ChannelId,
|
||||
) -> Result<Vec<UserId>> {
|
||||
self.transaction(|tx| async move {
|
||||
self.get_channel_buffer_collaborators_internal(channel_id, &*tx)
|
||||
self.get_channel_buffer_collaborators_internal(channel_id, &tx)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
@@ -431,7 +429,7 @@ impl Database {
|
||||
Condition::all().add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
|
||||
)
|
||||
.into_values::<_, QueryUserIds>()
|
||||
.all(&*tx)
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
Ok(users)
|
||||
@@ -449,7 +447,7 @@ impl Database {
|
||||
Vec<proto::VectorClockEntry>,
|
||||
)> {
|
||||
self.transaction(move |tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
|
||||
let mut requires_write_permission = false;
|
||||
for op in operations.iter() {
|
||||
@@ -459,10 +457,10 @@ impl Database {
|
||||
}
|
||||
}
|
||||
if requires_write_permission {
|
||||
self.check_user_is_channel_member(&channel, user, &*tx)
|
||||
self.check_user_is_channel_member(&channel, user, &tx)
|
||||
.await?;
|
||||
} else {
|
||||
self.check_user_is_channel_participant(&channel, user, &*tx)
|
||||
self.check_user_is_channel_participant(&channel, user, &tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
@@ -473,7 +471,7 @@ impl Database {
|
||||
.ok_or_else(|| anyhow!("no such buffer"))?;
|
||||
|
||||
let serialization_version = self
|
||||
.get_buffer_operation_serialization_version(buffer.id, buffer.epoch, &*tx)
|
||||
.get_buffer_operation_serialization_version(buffer.id, buffer.epoch, &tx)
|
||||
.await?;
|
||||
|
||||
let operations = operations
|
||||
@@ -502,13 +500,13 @@ impl Database {
|
||||
buffer.epoch,
|
||||
*max_operation.replica_id.as_ref(),
|
||||
*max_operation.lamport_timestamp.as_ref(),
|
||||
&*tx,
|
||||
&tx,
|
||||
)
|
||||
.await?;
|
||||
|
||||
channel_members = self.get_channel_participants(&channel, &*tx).await?;
|
||||
channel_members = self.get_channel_participants(&channel, &tx).await?;
|
||||
let collaborators = self
|
||||
.get_channel_buffer_collaborators_internal(channel_id, &*tx)
|
||||
.get_channel_buffer_collaborators_internal(channel_id, &tx)
|
||||
.await?;
|
||||
channel_members.retain(|member| !collaborators.contains(member));
|
||||
|
||||
@@ -560,6 +558,17 @@ impl Database {
|
||||
lamport_timestamp: i32,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<()> {
|
||||
buffer::Entity::update(buffer::ActiveModel {
|
||||
id: ActiveValue::Unchanged(buffer_id),
|
||||
epoch: ActiveValue::Unchanged(epoch),
|
||||
latest_operation_epoch: ActiveValue::Set(Some(epoch)),
|
||||
latest_operation_replica_id: ActiveValue::Set(Some(replica_id)),
|
||||
latest_operation_lamport_timestamp: ActiveValue::Set(Some(lamport_timestamp)),
|
||||
channel_id: ActiveValue::NotSet,
|
||||
})
|
||||
.exec(tx)
|
||||
.await?;
|
||||
|
||||
use observed_buffer_edits::Column;
|
||||
observed_buffer_edits::Entity::insert(observed_buffer_edits::ActiveModel {
|
||||
user_id: ActiveValue::Set(user_id),
|
||||
@@ -604,7 +613,7 @@ impl Database {
|
||||
.select_only()
|
||||
.column(buffer_snapshot::Column::OperationSerializationVersion)
|
||||
.into_values::<_, QueryOperationSerializationVersion>()
|
||||
.one(&*tx)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("missing buffer snapshot"))?)
|
||||
}
|
||||
@@ -619,7 +628,7 @@ impl Database {
|
||||
..Default::default()
|
||||
}
|
||||
.find_related(buffer::Entity)
|
||||
.one(&*tx)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such buffer"))?)
|
||||
}
|
||||
@@ -641,7 +650,7 @@ impl Database {
|
||||
.eq(id)
|
||||
.and(buffer_snapshot::Column::Epoch.eq(buffer.epoch)),
|
||||
)
|
||||
.one(&*tx)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such snapshot"))?;
|
||||
|
||||
@@ -659,7 +668,7 @@ impl Database {
|
||||
)
|
||||
.order_by_asc(buffer_operation::Column::LamportTimestamp)
|
||||
.order_by_asc(buffer_operation::Column::ReplicaId)
|
||||
.stream(&*tx)
|
||||
.stream(tx)
|
||||
.await?;
|
||||
|
||||
let mut operations = Vec::new();
|
||||
@@ -713,7 +722,10 @@ impl Database {
|
||||
buffer::ActiveModel {
|
||||
id: ActiveValue::Unchanged(buffer.id),
|
||||
epoch: ActiveValue::Set(epoch),
|
||||
..Default::default()
|
||||
latest_operation_epoch: ActiveValue::NotSet,
|
||||
latest_operation_replica_id: ActiveValue::NotSet,
|
||||
latest_operation_lamport_timestamp: ActiveValue::NotSet,
|
||||
channel_id: ActiveValue::NotSet,
|
||||
}
|
||||
.save(tx)
|
||||
.await?;
|
||||
@@ -739,7 +751,7 @@ impl Database {
|
||||
epoch,
|
||||
component.replica_id as i32,
|
||||
component.timestamp as i32,
|
||||
&*tx,
|
||||
&tx,
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
@@ -747,30 +759,6 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn latest_channel_buffer_changes(
|
||||
&self,
|
||||
channel_ids_by_buffer_id: &HashMap<BufferId, ChannelId>,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<proto::ChannelBufferVersion>> {
|
||||
let latest_operations = self
|
||||
.get_latest_operations_for_buffers(channel_ids_by_buffer_id.keys().copied(), &*tx)
|
||||
.await?;
|
||||
|
||||
Ok(latest_operations
|
||||
.iter()
|
||||
.flat_map(|op| {
|
||||
Some(proto::ChannelBufferVersion {
|
||||
channel_id: channel_ids_by_buffer_id.get(&op.buffer_id)?.to_proto(),
|
||||
epoch: op.epoch as u64,
|
||||
version: vec![proto::VectorClockEntry {
|
||||
replica_id: op.replica_id as u32,
|
||||
timestamp: op.lamport_timestamp as u32,
|
||||
}],
|
||||
})
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub async fn observed_channel_buffer_changes(
|
||||
&self,
|
||||
channel_ids_by_buffer_id: &HashMap<BufferId, ChannelId>,
|
||||
@@ -783,7 +771,7 @@ impl Database {
|
||||
observed_buffer_edits::Column::BufferId
|
||||
.is_in(channel_ids_by_buffer_id.keys().copied()),
|
||||
)
|
||||
.all(&*tx)
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
Ok(observed_operations
|
||||
@@ -800,55 +788,6 @@ impl Database {
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
/// Returns the latest operations for the buffers with the specified IDs.
|
||||
pub async fn get_latest_operations_for_buffers(
|
||||
&self,
|
||||
buffer_ids: impl IntoIterator<Item = BufferId>,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<buffer_operation::Model>> {
|
||||
let mut values = String::new();
|
||||
for id in buffer_ids {
|
||||
if !values.is_empty() {
|
||||
values.push_str(", ");
|
||||
}
|
||||
write!(&mut values, "({})", id).unwrap();
|
||||
}
|
||||
|
||||
if values.is_empty() {
|
||||
return Ok(Vec::default());
|
||||
}
|
||||
|
||||
let sql = format!(
|
||||
r#"
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
*,
|
||||
row_number() OVER (
|
||||
PARTITION BY buffer_id
|
||||
ORDER BY
|
||||
epoch DESC,
|
||||
lamport_timestamp DESC,
|
||||
replica_id DESC
|
||||
) as row_number
|
||||
FROM buffer_operations
|
||||
WHERE
|
||||
buffer_id in ({values})
|
||||
) AS last_operations
|
||||
WHERE
|
||||
row_number = 1
|
||||
"#,
|
||||
);
|
||||
|
||||
let stmt = Statement::from_string(self.pool.get_database_backend(), sql);
|
||||
Ok(buffer_operation::Entity::find()
|
||||
.from_raw_sql(stmt)
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
}
|
||||
}
|
||||
|
||||
fn operation_to_storage(
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
use super::*;
|
||||
use rpc::{proto::channel_member::Kind, ErrorCode, ErrorCodeExt};
|
||||
use rpc::{
|
||||
proto::{channel_member::Kind, ChannelBufferVersion, VectorClockEntry},
|
||||
ErrorCode, ErrorCodeExt,
|
||||
};
|
||||
use sea_orm::TryGetableMany;
|
||||
|
||||
impl Database {
|
||||
@@ -53,8 +56,8 @@ impl Database {
|
||||
let mut membership = None;
|
||||
|
||||
if let Some(parent_channel_id) = parent_channel_id {
|
||||
let parent_channel = self.get_channel_internal(parent_channel_id, &*tx).await?;
|
||||
self.check_user_is_channel_admin(&parent_channel, admin_id, &*tx)
|
||||
let parent_channel = self.get_channel_internal(parent_channel_id, &tx).await?;
|
||||
self.check_user_is_channel_admin(&parent_channel, admin_id, &tx)
|
||||
.await?;
|
||||
parent = Some(parent_channel);
|
||||
}
|
||||
@@ -105,14 +108,14 @@ impl Database {
|
||||
connection: ConnectionId,
|
||||
) -> Result<(JoinRoom, Option<MembershipUpdated>, ChannelRole)> {
|
||||
self.transaction(move |tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
let mut role = self.channel_role_for_user(&channel, user_id, &*tx).await?;
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
let mut role = self.channel_role_for_user(&channel, user_id, &tx).await?;
|
||||
|
||||
let mut accept_invite_result = None;
|
||||
|
||||
if role.is_none() {
|
||||
if let Some(invitation) = self
|
||||
.pending_invite_for_channel(&channel, user_id, &*tx)
|
||||
.pending_invite_for_channel(&channel, user_id, &tx)
|
||||
.await?
|
||||
{
|
||||
// note, this may be a parent channel
|
||||
@@ -125,12 +128,12 @@ impl Database {
|
||||
.await?;
|
||||
|
||||
accept_invite_result = Some(
|
||||
self.calculate_membership_updated(&channel, user_id, &*tx)
|
||||
self.calculate_membership_updated(&channel, user_id, &tx)
|
||||
.await?,
|
||||
);
|
||||
|
||||
debug_assert!(
|
||||
self.channel_role_for_user(&channel, user_id, &*tx).await? == role
|
||||
self.channel_role_for_user(&channel, user_id, &tx).await? == role
|
||||
);
|
||||
} else if channel.visibility == ChannelVisibility::Public {
|
||||
role = Some(ChannelRole::Guest);
|
||||
@@ -145,12 +148,12 @@ impl Database {
|
||||
.await?;
|
||||
|
||||
accept_invite_result = Some(
|
||||
self.calculate_membership_updated(&channel, user_id, &*tx)
|
||||
self.calculate_membership_updated(&channel, user_id, &tx)
|
||||
.await?,
|
||||
);
|
||||
|
||||
debug_assert!(
|
||||
self.channel_role_for_user(&channel, user_id, &*tx).await? == role
|
||||
self.channel_role_for_user(&channel, user_id, &tx).await? == role
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -162,10 +165,10 @@ impl Database {
|
||||
|
||||
let live_kit_room = format!("channel-{}", nanoid::nanoid!(30));
|
||||
let room_id = self
|
||||
.get_or_create_channel_room(channel_id, &live_kit_room, &*tx)
|
||||
.get_or_create_channel_room(channel_id, &live_kit_room, &tx)
|
||||
.await?;
|
||||
|
||||
self.join_channel_room_internal(room_id, user_id, connection, role, &*tx)
|
||||
self.join_channel_room_internal(room_id, user_id, connection, role, &tx)
|
||||
.await
|
||||
.map(|jr| (jr, accept_invite_result, role))
|
||||
})
|
||||
@@ -180,13 +183,13 @@ impl Database {
|
||||
admin_id: UserId,
|
||||
) -> Result<(Channel, Vec<channel_member::Model>)> {
|
||||
self.transaction(move |tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
self.check_user_is_channel_admin(&channel, admin_id, &*tx)
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
self.check_user_is_channel_admin(&channel, admin_id, &tx)
|
||||
.await?;
|
||||
|
||||
if visibility == ChannelVisibility::Public {
|
||||
if let Some(parent_id) = channel.parent_id() {
|
||||
let parent = self.get_channel_internal(parent_id, &*tx).await?;
|
||||
let parent = self.get_channel_internal(parent_id, &tx).await?;
|
||||
|
||||
if parent.visibility != ChannelVisibility::Public {
|
||||
Err(ErrorCode::BadPublicNesting
|
||||
@@ -196,7 +199,7 @@ impl Database {
|
||||
}
|
||||
} else if visibility == ChannelVisibility::Members {
|
||||
if self
|
||||
.get_channel_descendants_excluding_self([&channel], &*tx)
|
||||
.get_channel_descendants_excluding_self([&channel], &tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.any(|channel| channel.visibility == ChannelVisibility::Public)
|
||||
@@ -228,7 +231,7 @@ impl Database {
|
||||
requires_zed_cla: bool,
|
||||
) -> Result<()> {
|
||||
self.transaction(move |tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
let mut model = channel.into_active_model();
|
||||
model.requires_zed_cla = ActiveValue::Set(requires_zed_cla);
|
||||
model.update(&*tx).await?;
|
||||
@@ -244,8 +247,8 @@ impl Database {
|
||||
user_id: UserId,
|
||||
) -> Result<(Vec<ChannelId>, Vec<UserId>)> {
|
||||
self.transaction(move |tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
self.check_user_is_channel_admin(&channel, user_id, &*tx)
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
self.check_user_is_channel_admin(&channel, user_id, &tx)
|
||||
.await?;
|
||||
|
||||
let members_to_notify: Vec<UserId> = channel_member::Entity::find()
|
||||
@@ -258,7 +261,7 @@ impl Database {
|
||||
.await?;
|
||||
|
||||
let channels_to_remove = self
|
||||
.get_channel_descendants_excluding_self([&channel], &*tx)
|
||||
.get_channel_descendants_excluding_self([&channel], &tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|channel| channel.id)
|
||||
@@ -284,8 +287,8 @@ impl Database {
|
||||
role: ChannelRole,
|
||||
) -> Result<InviteMemberResult> {
|
||||
self.transaction(move |tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
self.check_user_is_channel_admin(&channel, inviter_id, &*tx)
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
self.check_user_is_channel_admin(&channel, inviter_id, &tx)
|
||||
.await?;
|
||||
if !channel.is_root() {
|
||||
Err(ErrorCode::NotARootChannel.anyhow())?
|
||||
@@ -312,7 +315,7 @@ impl Database {
|
||||
inviter_id: inviter_id.to_proto(),
|
||||
},
|
||||
true,
|
||||
&*tx,
|
||||
&tx,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
@@ -344,8 +347,8 @@ impl Database {
|
||||
self.transaction(move |tx| async move {
|
||||
let new_name = Self::sanitize_channel_name(new_name)?.to_string();
|
||||
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
self.check_user_is_channel_admin(&channel, admin_id, &*tx)
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
self.check_user_is_channel_admin(&channel, admin_id, &tx)
|
||||
.await?;
|
||||
|
||||
let mut model = channel.into_active_model();
|
||||
@@ -370,7 +373,7 @@ impl Database {
|
||||
accept: bool,
|
||||
) -> Result<RespondToChannelInvite> {
|
||||
self.transaction(move |tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
|
||||
let membership_update = if accept {
|
||||
let rows_affected = channel_member::Entity::update_many()
|
||||
@@ -393,7 +396,7 @@ impl Database {
|
||||
}
|
||||
|
||||
Some(
|
||||
self.calculate_membership_updated(&channel, user_id, &*tx)
|
||||
self.calculate_membership_updated(&channel, user_id, &tx)
|
||||
.await?,
|
||||
)
|
||||
} else {
|
||||
@@ -425,7 +428,7 @@ impl Database {
|
||||
inviter_id: Default::default(),
|
||||
},
|
||||
accept,
|
||||
&*tx,
|
||||
&tx,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
@@ -441,9 +444,9 @@ impl Database {
|
||||
user_id: UserId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<MembershipUpdated> {
|
||||
let new_channels = self.get_user_channels(user_id, Some(channel), &*tx).await?;
|
||||
let new_channels = self.get_user_channels(user_id, Some(channel), tx).await?;
|
||||
let removed_channels = self
|
||||
.get_channel_descendants_excluding_self([channel], &*tx)
|
||||
.get_channel_descendants_excluding_self([channel], tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|channel| channel.id)
|
||||
@@ -466,10 +469,10 @@ impl Database {
|
||||
admin_id: UserId,
|
||||
) -> Result<RemoveChannelMemberResult> {
|
||||
self.transaction(|tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
|
||||
if member_id != admin_id {
|
||||
self.check_user_is_channel_admin(&channel, admin_id, &*tx)
|
||||
self.check_user_is_channel_admin(&channel, admin_id, &tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
@@ -488,7 +491,7 @@ impl Database {
|
||||
|
||||
Ok(RemoveChannelMemberResult {
|
||||
membership_update: self
|
||||
.calculate_membership_updated(&channel, member_id, &*tx)
|
||||
.calculate_membership_updated(&channel, member_id, &tx)
|
||||
.await?,
|
||||
notification_id: self
|
||||
.remove_notification(
|
||||
@@ -498,7 +501,7 @@ impl Database {
|
||||
channel_name: Default::default(),
|
||||
inviter_id: Default::default(),
|
||||
},
|
||||
&*tx,
|
||||
&tx,
|
||||
)
|
||||
.await?,
|
||||
})
|
||||
@@ -529,10 +532,7 @@ impl Database {
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let channels = channels
|
||||
.into_iter()
|
||||
.filter_map(|channel| Some(Channel::from_model(channel)))
|
||||
.collect();
|
||||
let channels = channels.into_iter().map(Channel::from_model).collect();
|
||||
|
||||
Ok(channels)
|
||||
})
|
||||
@@ -567,16 +567,16 @@ impl Database {
|
||||
|
||||
let channel_memberships = channel_member::Entity::find()
|
||||
.filter(filter)
|
||||
.all(&*tx)
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
let channels = channel::Entity::find()
|
||||
.filter(channel::Column::Id.is_in(channel_memberships.iter().map(|m| m.channel_id)))
|
||||
.all(&*tx)
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
let mut descendants = self
|
||||
.get_channel_descendants_excluding_self(channels.iter(), &*tx)
|
||||
.get_channel_descendants_excluding_self(channels.iter(), tx)
|
||||
.await?;
|
||||
|
||||
for channel in channels {
|
||||
@@ -617,7 +617,7 @@ impl Database {
|
||||
.column(room::Column::ChannelId)
|
||||
.column(room_participant::Column::UserId)
|
||||
.into_values::<_, QueryUserIdsAndChannelIds>()
|
||||
.stream(&*tx)
|
||||
.stream(tx)
|
||||
.await?;
|
||||
while let Some(row) = rows.next().await {
|
||||
let row: (ChannelId, UserId) = row?;
|
||||
@@ -628,32 +628,44 @@ impl Database {
|
||||
let channel_ids = channels.iter().map(|c| c.id).collect::<Vec<_>>();
|
||||
|
||||
let mut channel_ids_by_buffer_id = HashMap::default();
|
||||
let mut latest_buffer_versions: Vec<ChannelBufferVersion> = vec![];
|
||||
let mut rows = buffer::Entity::find()
|
||||
.filter(buffer::Column::ChannelId.is_in(channel_ids.iter().copied()))
|
||||
.stream(&*tx)
|
||||
.stream(tx)
|
||||
.await?;
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
channel_ids_by_buffer_id.insert(row.id, row.channel_id);
|
||||
latest_buffer_versions.push(ChannelBufferVersion {
|
||||
channel_id: row.channel_id.0 as u64,
|
||||
epoch: row.latest_operation_epoch.unwrap_or_default() as u64,
|
||||
version: if let Some((latest_lamport_timestamp, latest_replica_id)) = row
|
||||
.latest_operation_lamport_timestamp
|
||||
.zip(row.latest_operation_replica_id)
|
||||
{
|
||||
vec![VectorClockEntry {
|
||||
timestamp: latest_lamport_timestamp as u32,
|
||||
replica_id: latest_replica_id as u32,
|
||||
}]
|
||||
} else {
|
||||
vec![]
|
||||
},
|
||||
});
|
||||
}
|
||||
drop(rows);
|
||||
|
||||
let latest_buffer_versions = self
|
||||
.latest_channel_buffer_changes(&channel_ids_by_buffer_id, &*tx)
|
||||
.await?;
|
||||
|
||||
let latest_channel_messages = self.latest_channel_messages(&channel_ids, &*tx).await?;
|
||||
let latest_channel_messages = self.latest_channel_messages(&channel_ids, tx).await?;
|
||||
|
||||
let observed_buffer_versions = self
|
||||
.observed_channel_buffer_changes(&channel_ids_by_buffer_id, user_id, &*tx)
|
||||
.observed_channel_buffer_changes(&channel_ids_by_buffer_id, user_id, tx)
|
||||
.await?;
|
||||
|
||||
let observed_channel_messages = self
|
||||
.observed_channel_messages(&channel_ids, user_id, &*tx)
|
||||
.observed_channel_messages(&channel_ids, user_id, tx)
|
||||
.await?;
|
||||
|
||||
let hosted_projects = self
|
||||
.get_hosted_projects(&channel_ids, &roles_by_channel_id, &*tx)
|
||||
.get_hosted_projects(&channel_ids, &roles_by_channel_id, tx)
|
||||
.await?;
|
||||
|
||||
Ok(ChannelsForUser {
|
||||
@@ -677,8 +689,8 @@ impl Database {
|
||||
role: ChannelRole,
|
||||
) -> Result<SetMemberRoleResult> {
|
||||
self.transaction(|tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
self.check_user_is_channel_admin(&channel, admin_id, &*tx)
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
self.check_user_is_channel_admin(&channel, admin_id, &tx)
|
||||
.await?;
|
||||
|
||||
let membership = channel_member::Entity::find()
|
||||
@@ -700,7 +712,7 @@ impl Database {
|
||||
|
||||
if updated.accepted {
|
||||
Ok(SetMemberRoleResult::MembershipUpdated(
|
||||
self.calculate_membership_updated(&channel, for_user, &*tx)
|
||||
self.calculate_membership_updated(&channel, for_user, &tx)
|
||||
.await?,
|
||||
))
|
||||
} else {
|
||||
@@ -720,13 +732,13 @@ impl Database {
|
||||
) -> Result<Vec<proto::ChannelMember>> {
|
||||
let (role, members) = self
|
||||
.transaction(move |tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
let role = self
|
||||
.check_user_is_channel_participant(&channel, user_id, &*tx)
|
||||
.check_user_is_channel_participant(&channel, user_id, &tx)
|
||||
.await?;
|
||||
Ok((
|
||||
role,
|
||||
self.get_channel_participant_details_internal(&channel, &*tx)
|
||||
self.get_channel_participant_details_internal(&channel, &tx)
|
||||
.await?,
|
||||
))
|
||||
})
|
||||
@@ -781,7 +793,7 @@ impl Database {
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<UserId>> {
|
||||
let participants = self
|
||||
.get_channel_participant_details_internal(channel, &*tx)
|
||||
.get_channel_participant_details_internal(channel, tx)
|
||||
.await?;
|
||||
Ok(participants
|
||||
.into_iter()
|
||||
@@ -858,7 +870,7 @@ impl Database {
|
||||
.filter(channel_member::Column::ChannelId.eq(channel.root_id()))
|
||||
.filter(channel_member::Column::UserId.eq(user_id))
|
||||
.filter(channel_member::Column::Accepted.eq(false))
|
||||
.one(&*tx)
|
||||
.one(tx)
|
||||
.await?;
|
||||
|
||||
Ok(row)
|
||||
@@ -878,7 +890,7 @@ impl Database {
|
||||
.and(channel_member::Column::UserId.eq(user_id))
|
||||
.and(channel_member::Column::Accepted.eq(true)),
|
||||
)
|
||||
.one(&*tx)
|
||||
.one(tx)
|
||||
.await?;
|
||||
|
||||
let Some(membership) = membership else {
|
||||
@@ -918,8 +930,8 @@ impl Database {
|
||||
/// Returns the channel with the given ID.
|
||||
pub async fn get_channel(&self, channel_id: ChannelId, user_id: UserId) -> Result<Channel> {
|
||||
self.transaction(|tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
self.check_user_is_channel_participant(&channel, user_id, &*tx)
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
self.check_user_is_channel_participant(&channel, user_id, &tx)
|
||||
.await?;
|
||||
|
||||
Ok(Channel::from_model(channel))
|
||||
@@ -933,7 +945,7 @@ impl Database {
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<channel::Model> {
|
||||
Ok(channel::Entity::find_by_id(channel_id)
|
||||
.one(&*tx)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| proto::ErrorCode::NoSuchChannel.anyhow())?)
|
||||
}
|
||||
@@ -946,7 +958,7 @@ impl Database {
|
||||
) -> Result<RoomId> {
|
||||
let room = room::Entity::find()
|
||||
.filter(room::Column::ChannelId.eq(channel_id))
|
||||
.one(&*tx)
|
||||
.one(tx)
|
||||
.await?;
|
||||
|
||||
let room_id = if let Some(room) = room {
|
||||
@@ -957,7 +969,7 @@ impl Database {
|
||||
live_kit_room: ActiveValue::Set(live_kit_room.to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.exec(tx)
|
||||
.await?;
|
||||
|
||||
result.last_insert_id
|
||||
@@ -974,10 +986,10 @@ impl Database {
|
||||
admin_id: UserId,
|
||||
) -> Result<(Vec<Channel>, Vec<channel_member::Model>)> {
|
||||
self.transaction(|tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
self.check_user_is_channel_admin(&channel, admin_id, &*tx)
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
self.check_user_is_channel_admin(&channel, admin_id, &tx)
|
||||
.await?;
|
||||
let new_parent = self.get_channel_internal(new_parent_id, &*tx).await?;
|
||||
let new_parent = self.get_channel_internal(new_parent_id, &tx).await?;
|
||||
|
||||
if new_parent.root_id() != channel.root_id() {
|
||||
Err(anyhow!(ErrorCode::WrongMoveTarget))?;
|
||||
|
||||
@@ -177,7 +177,7 @@ impl Database {
|
||||
sender_id: sender_id.to_proto(),
|
||||
},
|
||||
true,
|
||||
&*tx,
|
||||
&tx,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
@@ -227,7 +227,7 @@ impl Database {
|
||||
rpc::Notification::ContactRequest {
|
||||
sender_id: requester_id.to_proto(),
|
||||
},
|
||||
&*tx,
|
||||
&tx,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
@@ -335,7 +335,7 @@ impl Database {
|
||||
sender_id: requester_id.to_proto(),
|
||||
},
|
||||
accept,
|
||||
&*tx,
|
||||
&tx,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
@@ -348,7 +348,7 @@ impl Database {
|
||||
responder_id: responder_id.to_proto(),
|
||||
},
|
||||
true,
|
||||
&*tx,
|
||||
&tx,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
|
||||
@@ -72,7 +72,7 @@ impl Database {
|
||||
github_login,
|
||||
github_user_id,
|
||||
github_email,
|
||||
&*tx,
|
||||
&tx,
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use rpc::proto;
|
||||
use rpc::{proto, ErrorCode};
|
||||
|
||||
use super::*;
|
||||
|
||||
@@ -9,20 +9,21 @@ impl Database {
|
||||
roles: &HashMap<ChannelId, ChannelRole>,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<proto::HostedProject>> {
|
||||
Ok(hosted_project::Entity::find()
|
||||
let projects = hosted_project::Entity::find()
|
||||
.find_also_related(project::Entity)
|
||||
.filter(hosted_project::Column::ChannelId.is_in(channel_ids.iter().map(|id| id.0)))
|
||||
.all(&*tx)
|
||||
.all(tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.flat_map(|project| {
|
||||
if project.deleted_at.is_some() {
|
||||
.flat_map(|(hosted_project, project)| {
|
||||
if hosted_project.deleted_at.is_some() {
|
||||
return None;
|
||||
}
|
||||
match project.visibility {
|
||||
match hosted_project.visibility {
|
||||
ChannelVisibility::Public => {}
|
||||
ChannelVisibility::Members => {
|
||||
let is_visible = roles
|
||||
.get(&project.channel_id)
|
||||
.get(&hosted_project.channel_id)
|
||||
.map(|role| role.can_see_all_descendants())
|
||||
.unwrap_or(false);
|
||||
if !is_visible {
|
||||
@@ -31,12 +32,54 @@ impl Database {
|
||||
}
|
||||
};
|
||||
Some(proto::HostedProject {
|
||||
id: project.id.to_proto(),
|
||||
channel_id: project.channel_id.to_proto(),
|
||||
name: project.name.clone(),
|
||||
visibility: project.visibility.into(),
|
||||
project_id: project?.id.to_proto(),
|
||||
channel_id: hosted_project.channel_id.to_proto(),
|
||||
name: hosted_project.name.clone(),
|
||||
visibility: hosted_project.visibility.into(),
|
||||
})
|
||||
})
|
||||
.collect())
|
||||
.collect();
|
||||
|
||||
Ok(projects)
|
||||
}
|
||||
|
||||
pub async fn get_hosted_project(
|
||||
&self,
|
||||
hosted_project_id: HostedProjectId,
|
||||
user_id: UserId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<(hosted_project::Model, ChannelRole)> {
|
||||
let project = hosted_project::Entity::find_by_id(hosted_project_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?;
|
||||
let channel = channel::Entity::find_by_id(project.channel_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!(ErrorCode::NoSuchChannel))?;
|
||||
|
||||
let role = match project.visibility {
|
||||
ChannelVisibility::Public => {
|
||||
self.check_user_is_channel_participant(&channel, user_id, tx)
|
||||
.await?
|
||||
}
|
||||
ChannelVisibility::Members => {
|
||||
self.check_user_is_channel_member(&channel, user_id, tx)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
Ok((project, role))
|
||||
}
|
||||
|
||||
pub async fn is_hosted_project(&self, project_id: ProjectId) -> Result<bool> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.map(|project| project.hosted_project_id.is_some())
|
||||
.ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,8 +12,8 @@ impl Database {
|
||||
user_id: UserId,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
self.check_user_is_channel_participant(&channel, user_id, &*tx)
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
self.check_user_is_channel_participant(&channel, user_id, &tx)
|
||||
.await?;
|
||||
channel_chat_participant::ActiveModel {
|
||||
id: ActiveValue::NotSet,
|
||||
@@ -87,8 +87,8 @@ impl Database {
|
||||
before_message_id: Option<MessageId>,
|
||||
) -> Result<Vec<proto::ChannelMessage>> {
|
||||
self.transaction(|tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
self.check_user_is_channel_participant(&channel, user_id, &*tx)
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
self.check_user_is_channel_participant(&channel, user_id, &tx)
|
||||
.await?;
|
||||
|
||||
let mut condition =
|
||||
@@ -105,7 +105,7 @@ impl Database {
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
self.load_channel_messages(rows, &*tx).await
|
||||
self.load_channel_messages(rows, &tx).await
|
||||
})
|
||||
.await
|
||||
}
|
||||
@@ -127,16 +127,16 @@ impl Database {
|
||||
for row in &rows {
|
||||
channels.insert(
|
||||
row.channel_id,
|
||||
self.get_channel_internal(row.channel_id, &*tx).await?,
|
||||
self.get_channel_internal(row.channel_id, &tx).await?,
|
||||
);
|
||||
}
|
||||
|
||||
for (_, channel) in channels {
|
||||
self.check_user_is_channel_participant(&channel, user_id, &*tx)
|
||||
self.check_user_is_channel_participant(&channel, user_id, &tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let messages = self.load_channel_messages(rows, &*tx).await?;
|
||||
let messages = self.load_channel_messages(rows, &tx).await?;
|
||||
Ok(messages)
|
||||
})
|
||||
.await
|
||||
@@ -171,7 +171,7 @@ impl Database {
|
||||
.filter(channel_message_mention::Column::MessageId.is_in(messages.iter().map(|m| m.id)))
|
||||
.order_by_asc(channel_message_mention::Column::MessageId)
|
||||
.order_by_asc(channel_message_mention::Column::StartOffset)
|
||||
.stream(&*tx)
|
||||
.stream(tx)
|
||||
.await?;
|
||||
|
||||
let mut message_ix = 0;
|
||||
@@ -200,6 +200,7 @@ impl Database {
|
||||
}
|
||||
|
||||
/// Creates a new channel message.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn create_channel_message(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
@@ -211,8 +212,8 @@ impl Database {
|
||||
reply_to_message_id: Option<MessageId>,
|
||||
) -> Result<CreatedChannelMessage> {
|
||||
self.transaction(|tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
self.check_user_is_channel_participant(&channel, user_id, &*tx)
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
self.check_user_is_channel_participant(&channel, user_id, &tx)
|
||||
.await?;
|
||||
|
||||
let mut rows = channel_chat_participant::Entity::find()
|
||||
@@ -302,13 +303,13 @@ impl Database {
|
||||
channel_id: channel_id.to_proto(),
|
||||
},
|
||||
false,
|
||||
&*tx,
|
||||
&tx,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
|
||||
self.observe_channel_message_internal(channel_id, user_id, message_id, &*tx)
|
||||
self.observe_channel_message_internal(channel_id, user_id, message_id, &tx)
|
||||
.await?;
|
||||
}
|
||||
_ => {
|
||||
@@ -321,7 +322,7 @@ impl Database {
|
||||
}
|
||||
}
|
||||
|
||||
let mut channel_members = self.get_channel_participants(&channel, &*tx).await?;
|
||||
let mut channel_members = self.get_channel_participants(&channel, &tx).await?;
|
||||
channel_members.retain(|member| !participant_user_ids.contains(member));
|
||||
|
||||
Ok(CreatedChannelMessage {
|
||||
@@ -341,7 +342,7 @@ impl Database {
|
||||
message_id: MessageId,
|
||||
) -> Result<NotificationBatch> {
|
||||
self.transaction(|tx| async move {
|
||||
self.observe_channel_message_internal(channel_id, user_id, message_id, &*tx)
|
||||
self.observe_channel_message_internal(channel_id, user_id, message_id, &tx)
|
||||
.await?;
|
||||
let mut batch = NotificationBatch::default();
|
||||
batch.extend(
|
||||
@@ -352,7 +353,7 @@ impl Database {
|
||||
sender_id: Default::default(),
|
||||
channel_id: Default::default(),
|
||||
},
|
||||
&*tx,
|
||||
&tx,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
@@ -383,7 +384,7 @@ impl Database {
|
||||
.to_owned(),
|
||||
)
|
||||
// TODO: Try to upgrade SeaORM so we don't have to do this hack around their bug
|
||||
.exec_without_returning(&*tx)
|
||||
.exec_without_returning(tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -400,7 +401,7 @@ impl Database {
|
||||
observed_channel_messages::Column::ChannelId
|
||||
.is_in(channel_ids.iter().map(|id| id.0)),
|
||||
)
|
||||
.all(&*tx)
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
Ok(rows
|
||||
@@ -451,7 +452,7 @@ impl Database {
|
||||
|
||||
let stmt = Statement::from_string(self.pool.get_database_backend(), sql);
|
||||
let mut last_messages = channel_message::Model::find_by_statement(stmt)
|
||||
.stream(&*tx)
|
||||
.stream(tx)
|
||||
.await?;
|
||||
|
||||
let mut results = Vec::new();
|
||||
@@ -500,9 +501,9 @@ impl Database {
|
||||
.await?;
|
||||
|
||||
if result.rows_affected == 0 {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
if self
|
||||
.check_user_is_channel_admin(&channel, user_id, &*tx)
|
||||
.check_user_is_channel_admin(&channel, user_id, &tx)
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
|
||||
@@ -95,7 +95,7 @@ impl Database {
|
||||
content: ActiveValue::Set(proto.content.clone()),
|
||||
..Default::default()
|
||||
}
|
||||
.save(&*tx)
|
||||
.save(tx)
|
||||
.await?;
|
||||
|
||||
Ok(Some((
|
||||
@@ -184,7 +184,7 @@ impl Database {
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Option<(UserId, proto::Notification)>> {
|
||||
if let Some(id) = self
|
||||
.find_notification(recipient_id, notification, &*tx)
|
||||
.find_notification(recipient_id, notification, tx)
|
||||
.await?
|
||||
{
|
||||
let row = notification::Entity::update(notification::ActiveModel {
|
||||
@@ -236,7 +236,7 @@ impl Database {
|
||||
}),
|
||||
)
|
||||
.into_values::<_, QueryIds>()
|
||||
.one(&*tx)
|
||||
.one(tx)
|
||||
.await?)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -57,13 +57,14 @@ impl Database {
|
||||
}
|
||||
|
||||
let project = project::ActiveModel {
|
||||
room_id: ActiveValue::set(participant.room_id),
|
||||
host_user_id: ActiveValue::set(participant.user_id),
|
||||
room_id: ActiveValue::set(Some(participant.room_id)),
|
||||
host_user_id: ActiveValue::set(Some(participant.user_id)),
|
||||
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
host_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
..Default::default()
|
||||
id: ActiveValue::NotSet,
|
||||
hosted_project_id: ActiveValue::Set(None),
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
@@ -153,8 +154,12 @@ impl Database {
|
||||
self.update_project_worktrees(project.id, worktrees, &tx)
|
||||
.await?;
|
||||
|
||||
let room_id = project
|
||||
.room_id
|
||||
.ok_or_else(|| anyhow!("project not in a room"))?;
|
||||
|
||||
let guest_connection_ids = self.project_guest_connection_ids(project.id, &tx).await?;
|
||||
let room = self.get_room(project.room_id, &tx).await?;
|
||||
let room = self.get_room(room_id, &tx).await?;
|
||||
Ok((room, guest_connection_ids))
|
||||
})
|
||||
.await
|
||||
@@ -181,7 +186,7 @@ impl Database {
|
||||
.update_column(worktree::Column::RootName)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.exec(tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
@@ -189,7 +194,7 @@ impl Database {
|
||||
.filter(worktree::Column::ProjectId.eq(project_id).and(
|
||||
worktree::Column::Id.is_not_in(worktrees.iter().map(|worktree| worktree.id as i64)),
|
||||
))
|
||||
.exec(&*tx)
|
||||
.exec(tx)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
@@ -382,7 +387,6 @@ impl Database {
|
||||
language_server_id: ActiveValue::set(summary.language_server_id as i64),
|
||||
error_count: ActiveValue::set(summary.error_count as i32),
|
||||
warning_count: ActiveValue::set(summary.warning_count as i32),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
@@ -434,7 +438,6 @@ impl Database {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
id: ActiveValue::set(server.id as i64),
|
||||
name: ActiveValue::set(server.name.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
@@ -506,8 +509,42 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
/// Adds the given connection to the specified project.
|
||||
pub async fn join_project(
|
||||
/// Adds the given connection to the specified hosted project
|
||||
pub async fn join_hosted_project(
|
||||
&self,
|
||||
id: ProjectId,
|
||||
user_id: UserId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<(Project, ReplicaId)> {
|
||||
self.transaction(|tx| async move {
|
||||
let (project, hosted_project) = project::Entity::find_by_id(id)
|
||||
.find_also_related(hosted_project::Entity)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("hosted project is no longer shared"))?;
|
||||
|
||||
let Some(hosted_project) = hosted_project else {
|
||||
return Err(anyhow!("project is not hosted"))?;
|
||||
};
|
||||
|
||||
let channel = channel::Entity::find_by_id(hosted_project.channel_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such channel"))?;
|
||||
|
||||
let role = self
|
||||
.check_user_is_channel_participant(&channel, user_id, &tx)
|
||||
.await?;
|
||||
|
||||
self.join_project_internal(project, user_id, connection, role, &tx)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Adds the given connection to the specified project
|
||||
/// in the current room.
|
||||
pub async fn join_project_in_room(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
@@ -534,180 +571,240 @@ impl Database {
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
if project.room_id != participant.room_id {
|
||||
if project.room_id != Some(participant.room_id) {
|
||||
return Err(anyhow!("no such project"))?;
|
||||
}
|
||||
self.join_project_internal(
|
||||
project,
|
||||
participant.user_id,
|
||||
connection,
|
||||
participant.role.unwrap_or(ChannelRole::Member),
|
||||
&tx,
|
||||
)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
let mut collaborators = project
|
||||
async fn join_project_internal(
|
||||
&self,
|
||||
project: project::Model,
|
||||
user_id: UserId,
|
||||
connection: ConnectionId,
|
||||
role: ChannelRole,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<(Project, ReplicaId)> {
|
||||
let mut collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(tx)
|
||||
.await?;
|
||||
let replica_ids = collaborators
|
||||
.iter()
|
||||
.map(|c| c.replica_id)
|
||||
.collect::<HashSet<_>>();
|
||||
let mut replica_id = ReplicaId(1);
|
||||
while replica_ids.contains(&replica_id) {
|
||||
replica_id.0 += 1;
|
||||
}
|
||||
let new_collaborator = project_collaborator::ActiveModel {
|
||||
project_id: ActiveValue::set(project.id),
|
||||
connection_id: ActiveValue::set(connection.id as i32),
|
||||
connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)),
|
||||
user_id: ActiveValue::set(user_id),
|
||||
replica_id: ActiveValue::set(replica_id),
|
||||
is_host: ActiveValue::set(false),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(tx)
|
||||
.await?;
|
||||
collaborators.push(new_collaborator);
|
||||
|
||||
let db_worktrees = project.find_related(worktree::Entity).all(tx).await?;
|
||||
let mut worktrees = db_worktrees
|
||||
.into_iter()
|
||||
.map(|db_worktree| {
|
||||
(
|
||||
db_worktree.id as u64,
|
||||
Worktree {
|
||||
id: db_worktree.id as u64,
|
||||
abs_path: db_worktree.abs_path,
|
||||
root_name: db_worktree.root_name,
|
||||
visible: db_worktree.visible,
|
||||
entries: Default::default(),
|
||||
repository_entries: Default::default(),
|
||||
diagnostic_summaries: Default::default(),
|
||||
settings_files: Default::default(),
|
||||
scan_id: db_worktree.scan_id as u64,
|
||||
completed_scan_id: db_worktree.completed_scan_id as u64,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<BTreeMap<_, _>>();
|
||||
|
||||
// Populate worktree entries.
|
||||
{
|
||||
let mut db_entries = worktree_entry::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(worktree_entry::Column::ProjectId.eq(project.id))
|
||||
.add(worktree_entry::Column::IsDeleted.eq(false)),
|
||||
)
|
||||
.stream(tx)
|
||||
.await?;
|
||||
while let Some(db_entry) = db_entries.next().await {
|
||||
let db_entry = db_entry?;
|
||||
if let Some(worktree) = worktrees.get_mut(&(db_entry.worktree_id as u64)) {
|
||||
worktree.entries.push(proto::Entry {
|
||||
id: db_entry.id as u64,
|
||||
is_dir: db_entry.is_dir,
|
||||
path: db_entry.path,
|
||||
inode: db_entry.inode as u64,
|
||||
mtime: Some(proto::Timestamp {
|
||||
seconds: db_entry.mtime_seconds as u64,
|
||||
nanos: db_entry.mtime_nanos as u32,
|
||||
}),
|
||||
is_symlink: db_entry.is_symlink,
|
||||
is_ignored: db_entry.is_ignored,
|
||||
is_external: db_entry.is_external,
|
||||
git_status: db_entry.git_status.map(|status| status as i32),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate repository entries.
|
||||
{
|
||||
let mut db_repository_entries = worktree_repository::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(worktree_repository::Column::ProjectId.eq(project.id))
|
||||
.add(worktree_repository::Column::IsDeleted.eq(false)),
|
||||
)
|
||||
.stream(tx)
|
||||
.await?;
|
||||
while let Some(db_repository_entry) = db_repository_entries.next().await {
|
||||
let db_repository_entry = db_repository_entry?;
|
||||
if let Some(worktree) = worktrees.get_mut(&(db_repository_entry.worktree_id as u64))
|
||||
{
|
||||
worktree.repository_entries.insert(
|
||||
db_repository_entry.work_directory_id as u64,
|
||||
proto::RepositoryEntry {
|
||||
work_directory_id: db_repository_entry.work_directory_id as u64,
|
||||
branch: db_repository_entry.branch,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate worktree diagnostic summaries.
|
||||
{
|
||||
let mut db_summaries = worktree_diagnostic_summary::Entity::find()
|
||||
.filter(worktree_diagnostic_summary::Column::ProjectId.eq(project.id))
|
||||
.stream(tx)
|
||||
.await?;
|
||||
while let Some(db_summary) = db_summaries.next().await {
|
||||
let db_summary = db_summary?;
|
||||
if let Some(worktree) = worktrees.get_mut(&(db_summary.worktree_id as u64)) {
|
||||
worktree
|
||||
.diagnostic_summaries
|
||||
.push(proto::DiagnosticSummary {
|
||||
path: db_summary.path,
|
||||
language_server_id: db_summary.language_server_id as u64,
|
||||
error_count: db_summary.error_count as u32,
|
||||
warning_count: db_summary.warning_count as u32,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate worktree settings files
|
||||
{
|
||||
let mut db_settings_files = worktree_settings_file::Entity::find()
|
||||
.filter(worktree_settings_file::Column::ProjectId.eq(project.id))
|
||||
.stream(tx)
|
||||
.await?;
|
||||
while let Some(db_settings_file) = db_settings_files.next().await {
|
||||
let db_settings_file = db_settings_file?;
|
||||
if let Some(worktree) = worktrees.get_mut(&(db_settings_file.worktree_id as u64)) {
|
||||
worktree.settings_files.push(WorktreeSettingsFile {
|
||||
path: db_settings_file.path,
|
||||
content: db_settings_file.content,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate language servers.
|
||||
let language_servers = project
|
||||
.find_related(language_server::Entity)
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
let project = Project {
|
||||
id: project.id,
|
||||
role,
|
||||
collaborators: collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect(),
|
||||
worktrees,
|
||||
language_servers: language_servers
|
||||
.into_iter()
|
||||
.map(|language_server| proto::LanguageServer {
|
||||
id: language_server.id as u64,
|
||||
name: language_server.name,
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
Ok((project, replica_id as ReplicaId))
|
||||
}
|
||||
|
||||
pub async fn leave_hosted_project(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<LeftProject> {
|
||||
self.transaction(|tx| async move {
|
||||
let result = project_collaborator::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project_collaborator::Column::ProjectId.eq(project_id))
|
||||
.add(project_collaborator::Column::ConnectionId.eq(connection.id as i32))
|
||||
.add(
|
||||
project_collaborator::Column::ConnectionServerId
|
||||
.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
if result.rows_affected == 0 {
|
||||
return Err(anyhow!("not in the project"))?;
|
||||
}
|
||||
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
let collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
let replica_ids = collaborators
|
||||
.iter()
|
||||
.map(|c| c.replica_id)
|
||||
.collect::<HashSet<_>>();
|
||||
let mut replica_id = ReplicaId(1);
|
||||
while replica_ids.contains(&replica_id) {
|
||||
replica_id.0 += 1;
|
||||
}
|
||||
let new_collaborator = project_collaborator::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
connection_id: ActiveValue::set(connection.id as i32),
|
||||
connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)),
|
||||
user_id: ActiveValue::set(participant.user_id),
|
||||
replica_id: ActiveValue::set(replica_id),
|
||||
is_host: ActiveValue::set(false),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
collaborators.push(new_collaborator);
|
||||
|
||||
let db_worktrees = project.find_related(worktree::Entity).all(&*tx).await?;
|
||||
let mut worktrees = db_worktrees
|
||||
let connection_ids = collaborators
|
||||
.into_iter()
|
||||
.map(|db_worktree| {
|
||||
(
|
||||
db_worktree.id as u64,
|
||||
Worktree {
|
||||
id: db_worktree.id as u64,
|
||||
abs_path: db_worktree.abs_path,
|
||||
root_name: db_worktree.root_name,
|
||||
visible: db_worktree.visible,
|
||||
entries: Default::default(),
|
||||
repository_entries: Default::default(),
|
||||
diagnostic_summaries: Default::default(),
|
||||
settings_files: Default::default(),
|
||||
scan_id: db_worktree.scan_id as u64,
|
||||
completed_scan_id: db_worktree.completed_scan_id as u64,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<BTreeMap<_, _>>();
|
||||
|
||||
// Populate worktree entries.
|
||||
{
|
||||
let mut db_entries = worktree_entry::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(worktree_entry::Column::ProjectId.eq(project_id))
|
||||
.add(worktree_entry::Column::IsDeleted.eq(false)),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(db_entry) = db_entries.next().await {
|
||||
let db_entry = db_entry?;
|
||||
if let Some(worktree) = worktrees.get_mut(&(db_entry.worktree_id as u64)) {
|
||||
worktree.entries.push(proto::Entry {
|
||||
id: db_entry.id as u64,
|
||||
is_dir: db_entry.is_dir,
|
||||
path: db_entry.path,
|
||||
inode: db_entry.inode as u64,
|
||||
mtime: Some(proto::Timestamp {
|
||||
seconds: db_entry.mtime_seconds as u64,
|
||||
nanos: db_entry.mtime_nanos as u32,
|
||||
}),
|
||||
is_symlink: db_entry.is_symlink,
|
||||
is_ignored: db_entry.is_ignored,
|
||||
is_external: db_entry.is_external,
|
||||
git_status: db_entry.git_status.map(|status| status as i32),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate repository entries.
|
||||
{
|
||||
let mut db_repository_entries = worktree_repository::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(worktree_repository::Column::ProjectId.eq(project_id))
|
||||
.add(worktree_repository::Column::IsDeleted.eq(false)),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(db_repository_entry) = db_repository_entries.next().await {
|
||||
let db_repository_entry = db_repository_entry?;
|
||||
if let Some(worktree) =
|
||||
worktrees.get_mut(&(db_repository_entry.worktree_id as u64))
|
||||
{
|
||||
worktree.repository_entries.insert(
|
||||
db_repository_entry.work_directory_id as u64,
|
||||
proto::RepositoryEntry {
|
||||
work_directory_id: db_repository_entry.work_directory_id as u64,
|
||||
branch: db_repository_entry.branch,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate worktree diagnostic summaries.
|
||||
{
|
||||
let mut db_summaries = worktree_diagnostic_summary::Entity::find()
|
||||
.filter(worktree_diagnostic_summary::Column::ProjectId.eq(project_id))
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(db_summary) = db_summaries.next().await {
|
||||
let db_summary = db_summary?;
|
||||
if let Some(worktree) = worktrees.get_mut(&(db_summary.worktree_id as u64)) {
|
||||
worktree
|
||||
.diagnostic_summaries
|
||||
.push(proto::DiagnosticSummary {
|
||||
path: db_summary.path,
|
||||
language_server_id: db_summary.language_server_id as u64,
|
||||
error_count: db_summary.error_count as u32,
|
||||
warning_count: db_summary.warning_count as u32,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate worktree settings files
|
||||
{
|
||||
let mut db_settings_files = worktree_settings_file::Entity::find()
|
||||
.filter(worktree_settings_file::Column::ProjectId.eq(project_id))
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(db_settings_file) = db_settings_files.next().await {
|
||||
let db_settings_file = db_settings_file?;
|
||||
if let Some(worktree) =
|
||||
worktrees.get_mut(&(db_settings_file.worktree_id as u64))
|
||||
{
|
||||
worktree.settings_files.push(WorktreeSettingsFile {
|
||||
path: db_settings_file.path,
|
||||
content: db_settings_file.content,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate language servers.
|
||||
let language_servers = project
|
||||
.find_related(language_server::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let project = Project {
|
||||
collaborators: collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect(),
|
||||
worktrees,
|
||||
language_servers: language_servers
|
||||
.into_iter()
|
||||
.map(|language_server| proto::LanguageServer {
|
||||
id: language_server.id as u64,
|
||||
name: language_server.name,
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
Ok((project, replica_id as ReplicaId))
|
||||
.map(|collaborator| collaborator.connection())
|
||||
.collect();
|
||||
Ok(LeftProject {
|
||||
id: project.id,
|
||||
connection_ids,
|
||||
host_user_id: None,
|
||||
host_connection_id: None,
|
||||
})
|
||||
})
|
||||
.await
|
||||
}
|
||||
@@ -774,7 +871,7 @@ impl Database {
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let room = self.get_room(project.room_id, &tx).await?;
|
||||
let room = self.get_room(room_id, &tx).await?;
|
||||
let left_project = LeftProject {
|
||||
id: project_id,
|
||||
host_user_id: project.host_user_id,
|
||||
@@ -998,7 +1095,9 @@ impl Database {
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("project {} not found", project_id))?;
|
||||
Ok(project.room_id)
|
||||
Ok(project
|
||||
.room_id
|
||||
.ok_or_else(|| anyhow!("project not in room"))?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
@@ -1061,7 +1160,7 @@ impl Database {
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
let room = self.get_room(room_id, &*tx).await?;
|
||||
let room = self.get_room(room_id, &tx).await?;
|
||||
Ok(room)
|
||||
})
|
||||
.await
|
||||
@@ -1095,7 +1194,7 @@ impl Database {
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let room = self.get_room(room_id, &*tx).await?;
|
||||
let room = self.get_room(room_id, &tx).await?;
|
||||
Ok(room)
|
||||
})
|
||||
.await
|
||||
|
||||
@@ -321,7 +321,7 @@ impl Database {
|
||||
}
|
||||
|
||||
let participant_index = self
|
||||
.get_next_participant_index_internal(room_id, &*tx)
|
||||
.get_next_participant_index_internal(room_id, &tx)
|
||||
.await?;
|
||||
|
||||
let result = room_participant::Entity::update_many()
|
||||
@@ -374,7 +374,7 @@ impl Database {
|
||||
.select_only()
|
||||
.column(room_participant::Column::ParticipantIndex)
|
||||
.into_values::<_, QueryParticipantIndices>()
|
||||
.all(&*tx)
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
let mut participant_index = 0;
|
||||
@@ -407,7 +407,7 @@ impl Database {
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<JoinRoom> {
|
||||
let participant_index = self
|
||||
.get_next_participant_index_internal(room_id, &*tx)
|
||||
.get_next_participant_index_internal(room_id, tx)
|
||||
.await?;
|
||||
|
||||
room_participant::Entity::insert_many([room_participant::ActiveModel {
|
||||
@@ -441,12 +441,12 @@ impl Database {
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.exec(tx)
|
||||
.await?;
|
||||
|
||||
let (channel, room) = self.get_channel_room(room_id, &tx).await?;
|
||||
let channel = channel.ok_or_else(|| anyhow!("no channel for room"))?;
|
||||
let channel_members = self.get_channel_participants(&channel, &*tx).await?;
|
||||
let channel_members = self.get_channel_participants(&channel, tx).await?;
|
||||
Ok(JoinRoom {
|
||||
room,
|
||||
channel_id: Some(channel.id),
|
||||
@@ -468,15 +468,7 @@ impl Database {
|
||||
Condition::all()
|
||||
.add(room_participant::Column::RoomId.eq(room_id))
|
||||
.add(room_participant::Column::UserId.eq(user_id))
|
||||
.add(room_participant::Column::AnsweringConnectionId.is_not_null())
|
||||
.add(
|
||||
Condition::any()
|
||||
.add(room_participant::Column::AnsweringConnectionLost.eq(true))
|
||||
.add(
|
||||
room_participant::Column::AnsweringConnectionServerId
|
||||
.ne(connection.owner_id as i32),
|
||||
),
|
||||
),
|
||||
.add(room_participant::Column::AnsweringConnectionId.is_not_null()),
|
||||
)
|
||||
.set(room_participant::ActiveModel {
|
||||
answering_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
@@ -499,7 +491,7 @@ impl Database {
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("project does not exist"))?;
|
||||
if project.host_user_id != user_id {
|
||||
if project.host_user_id != Some(user_id) {
|
||||
return Err(anyhow!("no such project"))?;
|
||||
}
|
||||
|
||||
@@ -859,7 +851,7 @@ impl Database {
|
||||
}
|
||||
|
||||
if collaborator.is_host {
|
||||
left_project.host_user_id = collaborator.user_id;
|
||||
left_project.host_user_id = Some(collaborator.user_id);
|
||||
left_project.host_connection_id = Some(collaborator_connection_id);
|
||||
}
|
||||
}
|
||||
@@ -1018,7 +1010,7 @@ impl Database {
|
||||
.ok_or_else(|| anyhow!("only admins can set participant role"))?;
|
||||
|
||||
if role.requires_cla() {
|
||||
self.check_user_has_signed_cla(user_id, room_id, &*tx)
|
||||
self.check_user_has_signed_cla(user_id, room_id, &tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
@@ -1029,7 +1021,7 @@ impl Database {
|
||||
.add(room_participant::Column::UserId.eq(user_id)),
|
||||
)
|
||||
.set(room_participant::ActiveModel {
|
||||
role: ActiveValue::set(Some(ChannelRole::from(role))),
|
||||
role: ActiveValue::set(Some(role)),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
@@ -1038,7 +1030,7 @@ impl Database {
|
||||
if result.rows_affected != 1 {
|
||||
Err(anyhow!("could not update room participant role"))?;
|
||||
}
|
||||
Ok(self.get_room(room_id, &tx).await?)
|
||||
self.get_room(room_id, &tx).await
|
||||
})
|
||||
.await
|
||||
}
|
||||
@@ -1050,11 +1042,11 @@ impl Database {
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<()> {
|
||||
let channel = room::Entity::find_by_id(room_id)
|
||||
.one(&*tx)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("could not find room"))?
|
||||
.find_related(channel::Entity)
|
||||
.one(&*tx)
|
||||
.one(tx)
|
||||
.await?;
|
||||
|
||||
if let Some(channel) = channel {
|
||||
@@ -1065,13 +1057,13 @@ impl Database {
|
||||
.is_in(channel.ancestors())
|
||||
.and(channel::Column::RequiresZedCla.eq(true)),
|
||||
)
|
||||
.count(&*tx)
|
||||
.count(tx)
|
||||
.await?
|
||||
> 0;
|
||||
if requires_zed_cla {
|
||||
if contributor::Entity::find()
|
||||
.filter(contributor::Column::UserId.eq(user_id))
|
||||
.one(&*tx)
|
||||
.one(tx)
|
||||
.await?
|
||||
.is_none()
|
||||
{
|
||||
@@ -1084,10 +1076,9 @@ impl Database {
|
||||
|
||||
pub async fn connection_lost(&self, connection: ConnectionId) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
self.room_connection_lost(connection, &*tx).await?;
|
||||
self.channel_buffer_connection_lost(connection, &*tx)
|
||||
.await?;
|
||||
self.channel_chat_connection_lost(connection, &*tx).await?;
|
||||
self.room_connection_lost(connection, &tx).await?;
|
||||
self.channel_buffer_connection_lost(connection, &tx).await?;
|
||||
self.channel_chat_connection_lost(connection, &tx).await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
@@ -1107,7 +1098,7 @@ impl Database {
|
||||
.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.one(&*tx)
|
||||
.one(tx)
|
||||
.await?;
|
||||
|
||||
if let Some(participant) = participant {
|
||||
@@ -1115,7 +1106,7 @@ impl Database {
|
||||
answering_connection_lost: ActiveValue::set(true),
|
||||
..participant.into_active_model()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.exec(tx)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
@@ -1304,7 +1295,7 @@ impl Database {
|
||||
drop(db_followers);
|
||||
|
||||
let channel = if let Some(channel_id) = db_room.channel_id {
|
||||
Some(self.get_channel_internal(channel_id, &*tx).await?)
|
||||
Some(self.get_channel_internal(channel_id, tx).await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||