Compare commits
43 Commits
blade-maco
...
access-kit
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
63e9a7069f | ||
|
|
1c361ac579 | ||
|
|
bea36918f4 | ||
|
|
43e8fdbe82 | ||
|
|
5df1318e75 | ||
|
|
577b244b03 | ||
|
|
2e0d18ee76 | ||
|
|
c5a23faf7c | ||
|
|
86f81c4db3 | ||
|
|
07501d9cfa | ||
|
|
07c7778cff | ||
|
|
aa6926e57a | ||
|
|
ae577c9d5c | ||
|
|
f4f72a1136 | ||
|
|
4310b0b8de | ||
|
|
a161a7d0c9 | ||
|
|
ab6b9196e1 | ||
|
|
ef551cedef | ||
|
|
9ef83a2557 | ||
|
|
32fdff0285 | ||
|
|
4094562321 | ||
|
|
6869b62af3 | ||
|
|
21a7421ee0 | ||
|
|
96dcc385dd | ||
|
|
e6766e102e | ||
|
|
694e18417e | ||
|
|
94426c4393 | ||
|
|
bf1bcd027c | ||
|
|
23132b5ab1 | ||
|
|
a8d5864524 | ||
|
|
ea322e1d1c | ||
|
|
e1ae0d46da | ||
|
|
bdc2558eac | ||
|
|
a41fb29e01 | ||
|
|
aa319ccfd0 | ||
|
|
f01763a1fa | ||
|
|
2dffc5f6e1 | ||
|
|
ed791c4fc1 | ||
|
|
7c6b34cb73 | ||
|
|
3921259b6c | ||
|
|
e93dca5ec3 | ||
|
|
c6626627c2 | ||
|
|
db86f4006e |
38
.github/ISSUE_TEMPLATE/0_feature_request.yml
vendored
38
.github/ISSUE_TEMPLATE/0_feature_request.yml
vendored
@@ -2,23 +2,23 @@ name: Feature Request
|
||||
description: "Tip: open this issue template from within Zed with the `request feature` command palette action"
|
||||
labels: ["admin read", "triage", "enhancement"]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Check for existing issues
|
||||
description: Check the backlog of issues to reduce the chances of creating duplicates; if an issue already exists, place a `+1` (👍) on it.
|
||||
options:
|
||||
- label: Completed
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the feature
|
||||
description: A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Check for existing issues
|
||||
description: Check the backlog of issues to reduce the chances of creating duplicates; if an issue already exists, place a `+1` (👍) on it.
|
||||
options:
|
||||
- label: Completed
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: |
|
||||
If applicable, add mockups / screenshots to help present your vision of the feature
|
||||
description: Drag images into the text input below
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the feature
|
||||
description: A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: |
|
||||
If applicable, add mockups / screenshots to help present your vision of the feature
|
||||
description: Drag images into the text input below
|
||||
validations:
|
||||
required: false
|
||||
|
||||
82
.github/ISSUE_TEMPLATE/1_language_support.yml
vendored
82
.github/ISSUE_TEMPLATE/1_language_support.yml
vendored
@@ -2,46 +2,46 @@ name: Language Support
|
||||
description: Request language support
|
||||
title: "<name_of_language> support"
|
||||
labels:
|
||||
[
|
||||
"admin read",
|
||||
"triage",
|
||||
"enhancement",
|
||||
"language",
|
||||
"unsupported language",
|
||||
"potential plugin",
|
||||
]
|
||||
[
|
||||
"admin read",
|
||||
"triage",
|
||||
"enhancement",
|
||||
"language",
|
||||
"unsupported language",
|
||||
"potential extension",
|
||||
]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Check for existing issues
|
||||
description: Check the backlog of issues to reduce the chances of creating duplicates; if an issue already exists, place a `+1` (👍) on it.
|
||||
options:
|
||||
- label: Completed
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: Language
|
||||
description: What language do you want support for?
|
||||
placeholder: HTML
|
||||
validations:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Check for existing issues
|
||||
description: Check the backlog of issues to reduce the chances of creating duplicates; if an issue already exists, place a `+1` (👍) on it.
|
||||
options:
|
||||
- label: Completed
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: Tree Sitter parser link
|
||||
description: If applicable, provide a link to the appropriate tree sitter parser. Look here first - https://tree-sitter.github.io/tree-sitter/#available-parsers
|
||||
placeholder: https://github.com/tree-sitter/tree-sitter-html
|
||||
validations:
|
||||
required: false
|
||||
- type: input
|
||||
attributes:
|
||||
label: Language server link
|
||||
description: If applicable, provide a link to the appropriate language server. Look here first - https://microsoft.github.io/language-server-protocol/implementors/servers/
|
||||
placeholder: https://github.com/Microsoft/vscode/tree/main/extensions/html-language-features/server
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Misc notes
|
||||
description: Provide any additional things the team should consider when adding support for this language
|
||||
validations:
|
||||
required: false
|
||||
- type: input
|
||||
attributes:
|
||||
label: Language
|
||||
description: What language do you want support for?
|
||||
placeholder: HTML
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: Tree Sitter parser link
|
||||
description: If applicable, provide a link to the appropriate tree sitter parser. Look here first - https://tree-sitter.github.io/tree-sitter/#available-parsers
|
||||
placeholder: https://github.com/tree-sitter/tree-sitter-html
|
||||
validations:
|
||||
required: false
|
||||
- type: input
|
||||
attributes:
|
||||
label: Language server link
|
||||
description: If applicable, provide a link to the appropriate language server. Look here first - https://microsoft.github.io/language-server-protocol/implementors/servers/
|
||||
placeholder: https://github.com/Microsoft/vscode/tree/main/extensions/html-language-features/server
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Misc notes
|
||||
description: Provide any additional things the team should consider when adding support for this language
|
||||
validations:
|
||||
required: false
|
||||
|
||||
66
.github/ISSUE_TEMPLATE/2_bug_report.yml
vendored
66
.github/ISSUE_TEMPLATE/2_bug_report.yml
vendored
@@ -2,37 +2,37 @@ name: Bug Report
|
||||
description: "Tip: open this issue template from within Zed with the `file bug report` command palette action"
|
||||
labels: ["admin read", "triage", "defect"]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Check for existing issues
|
||||
description: Check the backlog of issues to reduce the chances of creating duplicates; if an issue already exists, place a `+1` (👍) on it.
|
||||
options:
|
||||
- label: Completed
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the bug / provide steps to reproduce it
|
||||
description: A clear and concise description of what the bug is.
|
||||
validations:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Check for existing issues
|
||||
description: Check the backlog of issues to reduce the chances of creating duplicates; if an issue already exists, place a `+1` (👍) on it.
|
||||
options:
|
||||
- label: Completed
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Environment
|
||||
description: Run the `copy system specs into clipboard` command palette action and paste the output in the field below.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, add mockups / screenshots to help explain present your vision of the feature
|
||||
description: Drag issues into the text input below
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: |
|
||||
If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.
|
||||
If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000.
|
||||
description: Drag Zed.log into the text input below
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the bug / provide steps to reproduce it
|
||||
description: A clear and concise description of what the bug is.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Environment
|
||||
description: Run the `copy system specs into clipboard` command palette action and paste the output in the field below.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, add mockups / screenshots to help explain present your vision of the feature
|
||||
description: Drag issues into the text input below
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: |
|
||||
If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.
|
||||
If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000.
|
||||
description: Drag Zed.log into the text input below
|
||||
validations:
|
||||
required: false
|
||||
|
||||
21
.github/ISSUE_TEMPLATE/config.yml
vendored
21
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,10 +1,13 @@
|
||||
contact_links:
|
||||
- name: Top-Ranking Issues
|
||||
url: https://github.com/zed-industries/zed/issues/5393
|
||||
about: See an overview of the most popular Zed issues
|
||||
- name: Platform Support
|
||||
url: https://github.com/zed-industries/zed/issues/5391
|
||||
about: A quick note on platform support
|
||||
- name: Positive Feedback
|
||||
url: https://github.com/zed-industries/zed/discussions/5397
|
||||
about: A central location for kind words about Zed
|
||||
- name: Theme Request
|
||||
url: https://github.com/zed-industries/extensions/issues/new/choose
|
||||
about: Request a theme in the extensions repository
|
||||
- name: Top-Ranking Issues
|
||||
url: https://github.com/zed-industries/zed/issues/5393
|
||||
about: See an overview of the most popular Zed issues
|
||||
- name: Platform Support
|
||||
url: https://github.com/zed-industries/zed/issues/5391
|
||||
about: A quick note on platform support
|
||||
- name: Positive Feedback
|
||||
url: https://github.com/zed-industries/zed/discussions/5397
|
||||
about: A central location for kind words about Zed
|
||||
|
||||
20
.github/actions/check_style/action.yml
vendored
20
.github/actions/check_style/action.yml
vendored
@@ -2,14 +2,14 @@ name: "Check formatting"
|
||||
description: "Checks code formatting use cargo fmt"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: cargo fmt
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: cargo fmt --all -- --check
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: cargo fmt
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
- name: Find modified migrations
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
export SQUAWK_GITHUB_TOKEN=${{ github.token }}
|
||||
. ./script/squawk
|
||||
- name: Find modified migrations
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
export SQUAWK_GITHUB_TOKEN=${{ github.token }}
|
||||
. ./script/squawk
|
||||
|
||||
32
.github/actions/run_tests/action.yml
vendored
32
.github/actions/run_tests/action.yml
vendored
@@ -2,22 +2,22 @@ name: "Run tests"
|
||||
description: "Runs the tests"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install Rust
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
cargo install cargo-nextest
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install Rust
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
cargo install cargo-nextest
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Limit target directory size
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
- name: Limit target directory size
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Run tests
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
- name: Run tests
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
|
||||
367
.github/workflows/ci.yml
vendored
367
.github/workflows/ci.yml
vendored
@@ -1,210 +1,209 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- "v[0-9]+.[0-9]+.x"
|
||||
tags:
|
||||
- "v*"
|
||||
pull_request:
|
||||
branches:
|
||||
- "**"
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- "v[0-9]+.[0-9]+.x"
|
||||
tags:
|
||||
- "v*"
|
||||
pull_request:
|
||||
branches:
|
||||
- "**"
|
||||
|
||||
concurrency:
|
||||
# Allow only one workflow per any non-`main` branch.
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
# Allow only one workflow per any non-`main` branch.
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
style:
|
||||
name: Check formatting and spelling
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
fetch-depth: 0
|
||||
style:
|
||||
name: Check formatting and spelling
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up default .cargo/config.toml
|
||||
run: cp ./.cargo/ci-config.toml ~/.cargo/config.toml
|
||||
- name: Remove untracked files
|
||||
run: git clean -df
|
||||
|
||||
- name: Check spelling
|
||||
run: |
|
||||
if ! which typos > /dev/null; then
|
||||
cargo install typos-cli
|
||||
fi
|
||||
typos
|
||||
- name: Set up default .cargo/config.toml
|
||||
run: cp ./.cargo/ci-config.toml ~/.cargo/config.toml
|
||||
|
||||
- name: Run style checks
|
||||
uses: ./.github/actions/check_style
|
||||
- name: Check spelling
|
||||
run: |
|
||||
if ! which typos > /dev/null; then
|
||||
cargo install typos-cli
|
||||
fi
|
||||
typos
|
||||
|
||||
- name: Ensure fresh merge
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ];
|
||||
then
|
||||
echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> $GITHUB_ENV
|
||||
else
|
||||
git checkout -B temp
|
||||
git merge -q origin/$GITHUB_BASE_REF -m "merge main into temp"
|
||||
echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: Run style checks
|
||||
uses: ./.github/actions/check_style
|
||||
|
||||
- uses: bufbuild/buf-setup-action@v1
|
||||
- uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
input: "crates/rpc/proto/"
|
||||
against: "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/rpc/proto/"
|
||||
- name: Ensure fresh merge
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ];
|
||||
then
|
||||
echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> $GITHUB_ENV
|
||||
else
|
||||
git checkout -B temp
|
||||
git merge -q origin/$GITHUB_BASE_REF -m "merge main into temp"
|
||||
echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
macos_tests:
|
||||
name: (macOS) Run Clippy and tests
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
- uses: bufbuild/buf-setup-action@v1
|
||||
- uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
input: "crates/rpc/proto/"
|
||||
against: "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/rpc/proto/"
|
||||
|
||||
- name: cargo clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clippy
|
||||
macos_tests:
|
||||
name: (macOS) Run Clippy and tests
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
- name: cargo clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clippy
|
||||
|
||||
- name: Build collab
|
||||
run: cargo build -p collab
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
- name: Build other binaries
|
||||
run: cargo build --workspace --bins --all-features
|
||||
- name: Build collab
|
||||
run: cargo build -p collab
|
||||
|
||||
- name: Build Blade GPUI
|
||||
run: cargo check --features "macos-blade"
|
||||
working-directory: "crates/gpui"
|
||||
- name: Build other binaries
|
||||
run: cargo build --workspace --bins --all-features
|
||||
|
||||
# todo!(linux): Actually run the tests
|
||||
linux_tests:
|
||||
name: (Linux) Run Clippy and tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
# todo!(linux): Actually run the tests
|
||||
linux_tests:
|
||||
name: (Linux) Run Clippy and tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Restore from cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/rust-toolchain.toml') }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-cargo-${{ hashFiles('**/rust-toolchain.toml') }}-
|
||||
- name: Restore from cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/rust-toolchain.toml') }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-cargo-${{ hashFiles('**/rust-toolchain.toml') }}-
|
||||
|
||||
- name: configure linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/linux
|
||||
- name: configure linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/linux
|
||||
|
||||
- name: cargo clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clippy
|
||||
- name: cargo clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clippy
|
||||
|
||||
- name: Build Zed
|
||||
run: cargo build -p zed
|
||||
bundle:
|
||||
name: Bundle app
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- bundle
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-build-dmg') }}
|
||||
needs: [macos_tests, linux_tests]
|
||||
- name: Build Zed
|
||||
run: cargo build -p zed
|
||||
bundle:
|
||||
name: Bundle macOS app
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- bundle
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-build-dmg') }}
|
||||
needs: [macos_tests]
|
||||
env:
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }}
|
||||
APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
version=$(script/get-crate-version zed)
|
||||
channel=$(cat crates/zed/RELEASE_CHANNEL)
|
||||
echo "Publishing version: ${version} on release channel ${channel}"
|
||||
echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV
|
||||
|
||||
expected_tag_name=""
|
||||
case ${channel} in
|
||||
stable)
|
||||
expected_tag_name="v${version}";;
|
||||
preview)
|
||||
expected_tag_name="v${version}-pre";;
|
||||
nightly)
|
||||
expected_tag_name="v${version}-nightly";;
|
||||
*)
|
||||
echo "can't publish a release on channel ${channel}"
|
||||
exit 1;;
|
||||
esac
|
||||
if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then
|
||||
echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Generate license file
|
||||
run: script/generate-licenses
|
||||
|
||||
- name: Create app bundle
|
||||
run: script/bundle
|
||||
|
||||
- name: Upload app bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-build-dmg') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg
|
||||
path: target/release/Zed.dmg
|
||||
|
||||
- uses: softprops/action-gh-release@v1
|
||||
name: Upload app bundle to release
|
||||
if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: target/release/Zed.dmg
|
||||
body: ""
|
||||
env:
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }}
|
||||
APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
version=$(script/get-crate-version zed)
|
||||
channel=$(cat crates/zed/RELEASE_CHANNEL)
|
||||
echo "Publishing version: ${version} on release channel ${channel}"
|
||||
echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV
|
||||
|
||||
expected_tag_name=""
|
||||
case ${channel} in
|
||||
stable)
|
||||
expected_tag_name="v${version}";;
|
||||
preview)
|
||||
expected_tag_name="v${version}-pre";;
|
||||
nightly)
|
||||
expected_tag_name="v${version}-nightly";;
|
||||
*)
|
||||
echo "can't publish a release on channel ${channel}"
|
||||
exit 1;;
|
||||
esac
|
||||
if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then
|
||||
echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Generate license file
|
||||
run: script/generate-licenses
|
||||
|
||||
- name: Create app bundle
|
||||
run: script/bundle
|
||||
|
||||
- name: Upload app bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-build-dmg') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg
|
||||
path: target/release/Zed.dmg
|
||||
|
||||
- uses: softprops/action-gh-release@v1
|
||||
name: Upload app bundle to release
|
||||
if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: target/release/Zed.dmg
|
||||
body: ""
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
50
.github/workflows/danger.yml
vendored
50
.github/workflows/danger.yml
vendored
@@ -1,35 +1,35 @@
|
||||
name: Danger
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- edited
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- edited
|
||||
|
||||
jobs:
|
||||
danger:
|
||||
runs-on: ubuntu-latest
|
||||
danger:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: pnpm/action-setup@v3
|
||||
with:
|
||||
version: 8
|
||||
- uses: pnpm/action-setup@v3
|
||||
with:
|
||||
version: 8
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "pnpm"
|
||||
cache-dependency-path: "script/danger/pnpm-lock.yaml"
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "pnpm"
|
||||
cache-dependency-path: "script/danger/pnpm-lock.yaml"
|
||||
|
||||
- run: pnpm install --dir script/danger
|
||||
- run: pnpm install --dir script/danger
|
||||
|
||||
- name: Run Danger
|
||||
run: pnpm run --dir script/danger danger ci
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
- name: Run Danger
|
||||
run: pnpm run --dir script/danger danger ci
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
|
||||
34
.github/workflows/deploy_collab.yml
vendored
34
.github/workflows/deploy_collab.yml
vendored
@@ -45,8 +45,18 @@ jobs:
|
||||
submodules: "recursive"
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install cargo nextest
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
cargo install cargo-nextest
|
||||
|
||||
- name: Limit target directory size
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: cargo nextest run --package collab --no-fail-fast
|
||||
|
||||
publish:
|
||||
name: Publish collab server image
|
||||
@@ -90,22 +100,26 @@ jobs:
|
||||
- name: Sign into Kubernetes
|
||||
run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 ${{ secrets.CLUSTER_NAME }}
|
||||
|
||||
- name: Determine namespace
|
||||
- name: Start rollout
|
||||
run: |
|
||||
set -eu
|
||||
if [[ $GITHUB_REF_NAME = "collab-production" ]]; then
|
||||
echo "Deploying collab:$GITHUB_SHA to production"
|
||||
echo "KUBE_NAMESPACE=production" >> $GITHUB_ENV
|
||||
export ZED_KUBE_NAMESPACE=production
|
||||
elif [[ $GITHUB_REF_NAME = "collab-staging" ]]; then
|
||||
echo "Deploying collab:$GITHUB_SHA to staging"
|
||||
echo "KUBE_NAMESPACE=staging" >> $GITHUB_ENV
|
||||
export ZED_KUBE_NAMESPACE=staging
|
||||
else
|
||||
echo "cowardly refusing to deploy from an unknown branch"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Start rollout
|
||||
run: kubectl -n "$KUBE_NAMESPACE" set image deployment/collab collab=registry.digitalocean.com/zed/collab:${GITHUB_SHA}
|
||||
echo "Deploying collab:$GITHUB_SHA to $ZED_KUBE_NAMESPACE"
|
||||
|
||||
- name: Wait for rollout to finish
|
||||
run: kubectl -n "$KUBE_NAMESPACE" rollout status deployment/collab
|
||||
source script/lib/deploy-helpers.sh
|
||||
export_vars_for_environment $ZED_KUBE_NAMESPACE
|
||||
|
||||
export ZED_DO_CERTIFICATE_ID=$(doctl compute certificate list --format ID --no-header)
|
||||
export ZED_IMAGE_ID="registry.digitalocean.com/zed/collab:${GITHUB_SHA}"
|
||||
|
||||
envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
|
||||
kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/collab --watch
|
||||
echo "deployed collab.template.yml to ${ZED_KUBE_NAMESPACE}"
|
||||
|
||||
52
.github/workflows/randomized_tests.yml
vendored
52
.github/workflows/randomized_tests.yml
vendored
@@ -3,35 +3,35 @@ name: Randomized Tests
|
||||
concurrency: randomized-tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- randomized-tests-runner
|
||||
# schedule:
|
||||
# - cron: '0 * * * *'
|
||||
push:
|
||||
branches:
|
||||
- randomized-tests-runner
|
||||
# schedule:
|
||||
# - cron: '0 * * * *'
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
ZED_SERVER_URL: https://zed.dev
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
ZED_SERVER_URL: https://zed.dev
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Run randomized tests
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- randomized-tests
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
tests:
|
||||
name: Run randomized tests
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- randomized-tests
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Run randomized tests
|
||||
run: script/randomized-test-ci
|
||||
- name: Run randomized tests
|
||||
run: script/randomized-test-ci
|
||||
|
||||
162
.github/workflows/release_nightly.yml
vendored
162
.github/workflows/release_nightly.yml
vendored
@@ -1,98 +1,98 @@
|
||||
name: Release Nightly
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Fire every day at 7:00am UTC (Roughly before EU workday and after US workday)
|
||||
- cron: "0 7 * * *"
|
||||
push:
|
||||
tags:
|
||||
- "nightly"
|
||||
schedule:
|
||||
# Fire every day at 7:00am UTC (Roughly before EU workday and after US workday)
|
||||
- cron: "0 7 * * *"
|
||||
push:
|
||||
tags:
|
||||
- "nightly"
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
style:
|
||||
name: Check formatting and Clippy lints
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
fetch-depth: 0
|
||||
style:
|
||||
name: Check formatting and Clippy lints
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Run style checks
|
||||
uses: ./.github/actions/check_style
|
||||
- name: Run style checks
|
||||
uses: ./.github/actions/check_style
|
||||
|
||||
- name: Run clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clippy
|
||||
tests:
|
||||
name: Run tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
needs: style
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
- name: Run clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clippy
|
||||
tests:
|
||||
name: Run tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
needs: style
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
bundle:
|
||||
name: Bundle app
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- bundle
|
||||
needs: tests
|
||||
env:
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }}
|
||||
APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
bundle:
|
||||
name: Bundle app
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- bundle
|
||||
needs: tests
|
||||
env:
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }}
|
||||
APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Set release channel to nightly
|
||||
run: |
|
||||
set -eu
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
- name: Set release channel to nightly
|
||||
run: |
|
||||
set -eu
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
|
||||
- name: Generate license file
|
||||
run: script/generate-licenses
|
||||
- name: Generate license file
|
||||
run: script/generate-licenses
|
||||
|
||||
- name: Create app bundle
|
||||
run: script/bundle
|
||||
- name: Create app bundle
|
||||
run: script/bundle
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 */12 * * *"
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 */12 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update_top_ranking_issues:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10.5"
|
||||
architecture: "x64"
|
||||
cache: "pip"
|
||||
- run: pip install -r script/update_top_ranking_issues/requirements.txt
|
||||
- run: python script/update_top_ranking_issues/main.py 5393 --github-token ${{ secrets.GITHUB_TOKEN }} --prod
|
||||
update_top_ranking_issues:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10.5"
|
||||
architecture: "x64"
|
||||
cache: "pip"
|
||||
- run: pip install -r script/update_top_ranking_issues/requirements.txt
|
||||
- run: python script/update_top_ranking_issues/main.py 5393 --github-token ${{ secrets.GITHUB_TOKEN }} --prod
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 15 * * *"
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 15 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update_top_ranking_issues:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10.5"
|
||||
architecture: "x64"
|
||||
cache: "pip"
|
||||
- run: pip install -r script/update_top_ranking_issues/requirements.txt
|
||||
- run: python script/update_top_ranking_issues/main.py 6952 --github-token ${{ secrets.GITHUB_TOKEN }} --prod --query-day-interval 7
|
||||
update_top_ranking_issues:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10.5"
|
||||
architecture: "x64"
|
||||
cache: "pip"
|
||||
- run: pip install -r script/update_top_ranking_issues/requirements.txt
|
||||
- run: python script/update_top_ranking_issues/main.py 6952 --github-token ${{ secrets.GITHUB_TOKEN }} --prod --query-day-interval 7
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -5,12 +5,8 @@
|
||||
.DS_Store
|
||||
/plugins/bin
|
||||
/script/node_modules
|
||||
/styles/node_modules
|
||||
/styles/src/types/zed.ts
|
||||
/crates/theme/schemas/theme.json
|
||||
/crates/collab/static/styles.css
|
||||
/crates/collab/.admins.json
|
||||
/vendor/bin
|
||||
/assets/*licenses.md
|
||||
**/venv
|
||||
.build
|
||||
@@ -25,3 +21,4 @@ DerivedData/
|
||||
**/*.db
|
||||
.pytest_cache
|
||||
.venv
|
||||
.blob_store
|
||||
|
||||
2
.mailmap
2
.mailmap
@@ -11,6 +11,8 @@
|
||||
|
||||
Antonio Scandurra <me@as-cii.com>
|
||||
Antonio Scandurra <me@as-cii.com> <antonio@zed.dev>
|
||||
Christian Bergschneider <christian.bergschneider@gmx.de>
|
||||
Christian Bergschneider <christian.bergschneider@gmx.de> <magiclake@gmx.de>
|
||||
Conrad Irwin <conrad@zed.dev>
|
||||
Conrad Irwin <conrad@zed.dev> <conrad.irwin@gmail.com>
|
||||
Greg Morenz <greg-morenz@droid.cafe>
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
{
|
||||
"JSON": {
|
||||
"tab_size": 4
|
||||
"languages": {
|
||||
"TOML": {
|
||||
"formatter": "prettier",
|
||||
"format_on_save": "off"
|
||||
},
|
||||
"YAML": {
|
||||
"formatter": "prettier"
|
||||
}
|
||||
},
|
||||
"formatter": "auto"
|
||||
}
|
||||
|
||||
1293
Cargo.lock
generated
1293
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
16
Cargo.toml
16
Cargo.toml
@@ -181,9 +181,6 @@ anyhow = "1.0.57"
|
||||
async-compression = { version = "0.4", features = ["gzip", "futures-io"] }
|
||||
async-tar = "0.4.2"
|
||||
async-trait = "0.1"
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "e9d93a4d41f3946a03ffb76136290d6ccf7f2b80" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "e9d93a4d41f3946a03ffb76136290d6ccf7f2b80" }
|
||||
blade-rwh = { package = "raw-window-handle", version = "0.5" }
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
ctor = "0.2.6"
|
||||
derive_more = "0.99.17"
|
||||
@@ -193,10 +190,7 @@ git2 = { version = "0.15", default-features = false }
|
||||
globset = "0.4"
|
||||
indoc = "1"
|
||||
# We explicitly disable a http2 support in isahc.
|
||||
isahc = { version = "1.7.2", default-features = false, features = [
|
||||
"static-curl",
|
||||
"text-decoding",
|
||||
] }
|
||||
isahc = { version = "1.7.2", default-features = false, features = ["static-curl", "text-decoding"] }
|
||||
lazy_static = "1.4.0"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||
ordered-float = "2.1.1"
|
||||
@@ -211,13 +205,11 @@ regex = "1.5"
|
||||
rusqlite = { version = "0.29.0", features = ["blob", "array", "modern_sqlite"] }
|
||||
rust-embed = { version = "8.0", features = ["include-exclude"] }
|
||||
schemars = "0.8"
|
||||
semver = "1.0"
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_derive = { version = "1.0", features = ["deserialize_in_place"] }
|
||||
serde_json = { version = "1.0", features = ["preserve_order", "raw_value"] }
|
||||
serde_json_lenient = { version = "0.1", features = [
|
||||
"preserve_order",
|
||||
"raw_value",
|
||||
] }
|
||||
serde_json_lenient = { version = "0.1", features = ["preserve_order", "raw_value"] }
|
||||
serde_repr = "0.1"
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
smol = "1.2"
|
||||
@@ -226,7 +218,7 @@ sysinfo = "0.29.10"
|
||||
tempfile = "3.9.0"
|
||||
thiserror = "1.0.29"
|
||||
tiktoken-rs = "0.5.7"
|
||||
time = { version = "0.3", features = ["serde", "serde-well-known"] }
|
||||
time = { version = "0.3", features = ["serde", "serde-well-known", "formatting"] }
|
||||
toml = "0.5"
|
||||
tree-sitter = { version = "0.20", features = ["wasm"] }
|
||||
tree-sitter-astro = { git = "https://github.com/virchau13/tree-sitter-astro.git", rev = "e924787e12e8a03194f36a113290ac11d6dc10f3" }
|
||||
|
||||
3
Procfile
3
Procfile
@@ -1,2 +1,3 @@
|
||||
collab: cd crates/collab && RUST_LOG=${RUST_LOG:-warn,collab=info} cargo run serve
|
||||
collab: RUST_LOG=${RUST_LOG:-warn,collab=info} cargo run --package=collab serve
|
||||
livekit: livekit-server --dev
|
||||
blob_store: MINIO_ROOT_USER=the-blob-store-access-key MINIO_ROOT_PASSWORD=the-blob-store-secret-key minio server .blob_store
|
||||
|
||||
@@ -16,10 +16,12 @@
|
||||
"bmp": "image",
|
||||
"c": "code",
|
||||
"cc": "code",
|
||||
"cjs": "code",
|
||||
"conf": "settings",
|
||||
"cpp": "code",
|
||||
"css": "css",
|
||||
"csv": "storage",
|
||||
"cts": "typescript",
|
||||
"dat": "storage",
|
||||
"db": "storage",
|
||||
"dbf": "storage",
|
||||
@@ -80,12 +82,14 @@
|
||||
"mdf": "storage",
|
||||
"mdx": "document",
|
||||
"mkv": "video",
|
||||
"mjs": "code",
|
||||
"mka": "audio",
|
||||
"ml": "ocaml",
|
||||
"mli": "ocaml",
|
||||
"mov": "video",
|
||||
"mp3": "audio",
|
||||
"mp4": "video",
|
||||
"mts": "typescript",
|
||||
"myd": "storage",
|
||||
"myi": "storage",
|
||||
"odp": "document",
|
||||
|
||||
@@ -117,6 +117,8 @@
|
||||
"ctrl-e": "vim::LineDown",
|
||||
"ctrl-y": "vim::LineUp",
|
||||
// "g" commands
|
||||
"g e": "vim::PreviousWordEnd",
|
||||
"g shift-e": ["vim::PreviousWordEnd", { "ignorePunctuation": true }],
|
||||
"g g": "vim::StartOfDocument",
|
||||
"g h": "editor::Hover",
|
||||
"g t": "pane::ActivateNextItem",
|
||||
|
||||
@@ -482,6 +482,7 @@
|
||||
"deno": {
|
||||
"enable": false
|
||||
},
|
||||
"code_actions_on_format": {},
|
||||
// Different settings for specific languages.
|
||||
"languages": {
|
||||
"Plain Text": {
|
||||
@@ -492,7 +493,10 @@
|
||||
},
|
||||
"Go": {
|
||||
"tab_size": 4,
|
||||
"hard_tabs": true
|
||||
"hard_tabs": true,
|
||||
"code_actions_on_format": {
|
||||
"source.organizeImports": true
|
||||
}
|
||||
},
|
||||
"Markdown": {
|
||||
"soft_wrap": "preferred_line_length"
|
||||
|
||||
@@ -7,6 +7,11 @@ ZED_ENVIRONMENT = "development"
|
||||
LIVE_KIT_SERVER = "http://localhost:7880"
|
||||
LIVE_KIT_KEY = "devkey"
|
||||
LIVE_KIT_SECRET = "secret"
|
||||
BLOB_STORE_ACCESS_KEY = "the-blob-store-access-key"
|
||||
BLOB_STORE_SECRET_KEY = "the-blob-store-secret-key"
|
||||
BLOB_STORE_BUCKET = "the-extensions-bucket"
|
||||
BLOB_STORE_URL = "http://127.0.0.1:9000"
|
||||
BLOB_STORE_REGION = "the-region"
|
||||
|
||||
# RUST_LOG=info
|
||||
# LOG_JSON=true
|
||||
|
||||
@@ -17,6 +17,8 @@ required-features = ["seed-support"]
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
async-tungstenite = "0.16"
|
||||
aws-config = { version = "1.1.5" }
|
||||
aws-sdk-s3 = { version = "1.15.0" }
|
||||
axum = { version = "0.5", features = ["json", "headers", "ws"] }
|
||||
axum-extra = { version = "0.3", features = ["erased-json"] }
|
||||
base64 = "0.13"
|
||||
@@ -41,6 +43,7 @@ reqwest = { version = "0.11", features = ["json"], optional = true }
|
||||
rpc.workspace = true
|
||||
scrypt = "0.7"
|
||||
sea-orm = { version = "0.12.x", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] }
|
||||
semver.workspace = true
|
||||
serde.workspace = true
|
||||
serde_derive.workspace = true
|
||||
serde_json.workspace = true
|
||||
@@ -61,7 +64,6 @@ util.workspace = true
|
||||
uuid.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
release_channel.workspace = true
|
||||
async-trait.workspace = true
|
||||
audio.workspace = true
|
||||
call = { workspace = true, features = ["test-support"] }
|
||||
@@ -86,6 +88,7 @@ node_runtime.workspace = true
|
||||
notifications = { workspace = true, features = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
release_channel.workspace = true
|
||||
rpc = { workspace = true, features = ["test-support"] }
|
||||
sea-orm = { version = "0.12.x", features = ["sqlx-sqlite"] }
|
||||
serde_json.workspace = true
|
||||
|
||||
@@ -105,6 +105,31 @@ spec:
|
||||
secretKeyRef:
|
||||
name: livekit
|
||||
key: secret
|
||||
- name: BLOB_STORE_ACCESS_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: blob-store
|
||||
key: access_key
|
||||
- name: BLOB_STORE_SECRET_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: blob-store
|
||||
key: secret_key
|
||||
- name: BLOB_STORE_URL
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: blob-store
|
||||
key: url
|
||||
- name: BLOB_STORE_REGION
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: blob-store
|
||||
key: region
|
||||
- name: BLOB_STORE_BUCKET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: blob-store
|
||||
key: bucket
|
||||
- name: INVITE_LINK_PREFIX
|
||||
value: ${INVITE_LINK_PREFIX}
|
||||
- name: RUST_BACKTRACE
|
||||
|
||||
@@ -353,3 +353,25 @@ CREATE TABLE contributors (
|
||||
signed_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (user_id)
|
||||
);
|
||||
|
||||
CREATE TABLE extensions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
external_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
latest_version TEXT NOT NULL,
|
||||
total_download_count INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE TABLE extension_versions (
|
||||
extension_id INTEGER REFERENCES extensions(id),
|
||||
version TEXT NOT NULL,
|
||||
published_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
authors TEXT NOT NULL,
|
||||
repository TEXT NOT NULL,
|
||||
description TEXT NOT NULL,
|
||||
download_count INTEGER NOT NULL DEFAULT 0,
|
||||
PRIMARY KEY (extension_id, version)
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "index_extensions_external_id" ON "extensions" ("external_id");
|
||||
CREATE INDEX "index_extensions_total_download_count" ON "extensions" ("total_download_count");
|
||||
|
||||
22
crates/collab/migrations/20240214102900_add_extensions.sql
Normal file
22
crates/collab/migrations/20240214102900_add_extensions.sql
Normal file
@@ -0,0 +1,22 @@
|
||||
CREATE TABLE IF NOT EXISTS extensions (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
external_id TEXT NOT NULL,
|
||||
latest_version TEXT NOT NULL,
|
||||
total_download_count BIGINT NOT NULL DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS extension_versions (
|
||||
extension_id INTEGER REFERENCES extensions(id),
|
||||
version TEXT NOT NULL,
|
||||
published_at TIMESTAMP NOT NULL DEFAULT now(),
|
||||
authors TEXT NOT NULL,
|
||||
repository TEXT NOT NULL,
|
||||
description TEXT NOT NULL,
|
||||
download_count BIGINT NOT NULL DEFAULT 0,
|
||||
PRIMARY KEY(extension_id, version)
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "index_extensions_external_id" ON "extensions" ("external_id");
|
||||
CREATE INDEX "trigram_index_extensions_name" ON "extensions" USING GIN(name gin_trgm_ops);
|
||||
CREATE INDEX "index_extensions_total_download_count" ON "extensions" ("total_download_count");
|
||||
@@ -1,3 +1,5 @@
|
||||
mod extensions;
|
||||
|
||||
use crate::{
|
||||
auth,
|
||||
db::{ContributorSelector, User, UserId},
|
||||
@@ -20,6 +22,8 @@ use std::sync::Arc;
|
||||
use tower::ServiceBuilder;
|
||||
use tracing::instrument;
|
||||
|
||||
pub use extensions::fetch_extensions_from_blob_store_periodically;
|
||||
|
||||
pub fn routes(rpc_server: Arc<rpc::Server>, state: Arc<AppState>) -> Router<Body> {
|
||||
Router::new()
|
||||
.route("/user", get(get_authenticated_user))
|
||||
@@ -28,6 +32,7 @@ pub fn routes(rpc_server: Arc<rpc::Server>, state: Arc<AppState>) -> Router<Body
|
||||
.route("/rpc_server_snapshot", get(get_rpc_server_snapshot))
|
||||
.route("/contributors", get(get_contributors).post(add_contributor))
|
||||
.route("/contributor", get(check_is_contributor))
|
||||
.merge(extensions::router())
|
||||
.layer(
|
||||
ServiceBuilder::new()
|
||||
.layer(Extension(state))
|
||||
|
||||
237
crates/collab/src/api/extensions.rs
Normal file
237
crates/collab/src/api/extensions.rs
Normal file
@@ -0,0 +1,237 @@
|
||||
use crate::{
|
||||
db::{ExtensionMetadata, NewExtensionVersion},
|
||||
executor::Executor,
|
||||
AppState, Error, Result,
|
||||
};
|
||||
use anyhow::{anyhow, Context as _};
|
||||
use aws_sdk_s3::presigning::PresigningConfig;
|
||||
use axum::{
|
||||
extract::{Path, Query},
|
||||
response::Redirect,
|
||||
routing::get,
|
||||
Extension, Json, Router,
|
||||
};
|
||||
use collections::HashMap;
|
||||
use hyper::StatusCode;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{sync::Arc, time::Duration};
|
||||
use time::PrimitiveDateTime;
|
||||
use util::ResultExt;
|
||||
|
||||
pub fn router() -> Router {
|
||||
Router::new()
|
||||
.route("/extensions", get(get_extensions))
|
||||
.route(
|
||||
"/extensions/:extension_id/:version/download",
|
||||
get(download_extension),
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GetExtensionsParams {
|
||||
filter: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct DownloadExtensionParams {
|
||||
extension_id: String,
|
||||
version: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct GetExtensionsResponse {
|
||||
pub data: Vec<ExtensionMetadata>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct ExtensionManifest {
|
||||
name: String,
|
||||
version: String,
|
||||
description: Option<String>,
|
||||
authors: Vec<String>,
|
||||
repository: String,
|
||||
}
|
||||
|
||||
async fn get_extensions(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
Query(params): Query<GetExtensionsParams>,
|
||||
) -> Result<Json<GetExtensionsResponse>> {
|
||||
let extensions = app.db.get_extensions(params.filter.as_deref(), 30).await?;
|
||||
Ok(Json(GetExtensionsResponse { data: extensions }))
|
||||
}
|
||||
|
||||
async fn download_extension(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
Path(params): Path<DownloadExtensionParams>,
|
||||
) -> Result<Redirect> {
|
||||
let Some((blob_store_client, bucket)) = app
|
||||
.blob_store_client
|
||||
.clone()
|
||||
.zip(app.config.blob_store_bucket.clone())
|
||||
else {
|
||||
Err(Error::Http(
|
||||
StatusCode::NOT_IMPLEMENTED,
|
||||
"not supported".into(),
|
||||
))?
|
||||
};
|
||||
|
||||
let DownloadExtensionParams {
|
||||
extension_id,
|
||||
version,
|
||||
} = params;
|
||||
|
||||
let version_exists = app
|
||||
.db
|
||||
.record_extension_download(&extension_id, &version)
|
||||
.await?;
|
||||
|
||||
if !version_exists {
|
||||
Err(Error::Http(
|
||||
StatusCode::NOT_FOUND,
|
||||
"unknown extension version".into(),
|
||||
))?;
|
||||
}
|
||||
|
||||
let url = blob_store_client
|
||||
.get_object()
|
||||
.bucket(bucket)
|
||||
.key(format!(
|
||||
"extensions/{extension_id}/{version}/archive.tar.gz"
|
||||
))
|
||||
.presigned(PresigningConfig::expires_in(EXTENSION_DOWNLOAD_URL_LIFETIME).unwrap())
|
||||
.await
|
||||
.map_err(|e| anyhow!("failed to create presigned extension download url {e}"))?;
|
||||
|
||||
Ok(Redirect::temporary(url.uri()))
|
||||
}
|
||||
|
||||
const EXTENSION_FETCH_INTERVAL: Duration = Duration::from_secs(5 * 60);
|
||||
const EXTENSION_DOWNLOAD_URL_LIFETIME: Duration = Duration::from_secs(3 * 60);
|
||||
|
||||
pub fn fetch_extensions_from_blob_store_periodically(app_state: Arc<AppState>, executor: Executor) {
|
||||
let Some(blob_store_client) = app_state.blob_store_client.clone() else {
|
||||
log::info!("no blob store client");
|
||||
return;
|
||||
};
|
||||
let Some(blob_store_bucket) = app_state.config.blob_store_bucket.clone() else {
|
||||
log::info!("no blob store bucket");
|
||||
return;
|
||||
};
|
||||
|
||||
executor.spawn_detached({
|
||||
let executor = executor.clone();
|
||||
async move {
|
||||
loop {
|
||||
fetch_extensions_from_blob_store(
|
||||
&blob_store_client,
|
||||
&blob_store_bucket,
|
||||
&app_state,
|
||||
)
|
||||
.await
|
||||
.log_err();
|
||||
executor.sleep(EXTENSION_FETCH_INTERVAL).await;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async fn fetch_extensions_from_blob_store(
|
||||
blob_store_client: &aws_sdk_s3::Client,
|
||||
blob_store_bucket: &String,
|
||||
app_state: &Arc<AppState>,
|
||||
) -> anyhow::Result<()> {
|
||||
let list = blob_store_client
|
||||
.list_objects()
|
||||
.bucket(blob_store_bucket)
|
||||
.prefix("extensions/")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let objects = list
|
||||
.contents
|
||||
.ok_or_else(|| anyhow!("missing bucket contents"))?;
|
||||
|
||||
let mut published_versions = HashMap::<&str, Vec<&str>>::default();
|
||||
for object in &objects {
|
||||
let Some(key) = object.key.as_ref() else {
|
||||
continue;
|
||||
};
|
||||
let mut parts = key.split('/');
|
||||
let Some(_) = parts.next().filter(|part| *part == "extensions") else {
|
||||
continue;
|
||||
};
|
||||
let Some(extension_id) = parts.next() else {
|
||||
continue;
|
||||
};
|
||||
let Some(version) = parts.next() else {
|
||||
continue;
|
||||
};
|
||||
published_versions
|
||||
.entry(extension_id)
|
||||
.or_default()
|
||||
.push(version);
|
||||
}
|
||||
|
||||
let known_versions = app_state.db.get_known_extension_versions().await?;
|
||||
|
||||
let mut new_versions = HashMap::<&str, Vec<NewExtensionVersion>>::default();
|
||||
let empty = Vec::new();
|
||||
for (extension_id, published_versions) in published_versions {
|
||||
let known_versions = known_versions.get(extension_id).unwrap_or(&empty);
|
||||
|
||||
for published_version in published_versions {
|
||||
if known_versions
|
||||
.binary_search_by_key(&published_version, String::as_str)
|
||||
.is_err()
|
||||
{
|
||||
let object = blob_store_client
|
||||
.get_object()
|
||||
.bucket(blob_store_bucket)
|
||||
.key(format!(
|
||||
"extensions/{extension_id}/{published_version}/manifest.json"
|
||||
))
|
||||
.send()
|
||||
.await?;
|
||||
let manifest_bytes = object
|
||||
.body
|
||||
.collect()
|
||||
.await
|
||||
.map(|data| data.into_bytes())
|
||||
.with_context(|| format!("failed to download manifest for extension {extension_id} version {published_version}"))?
|
||||
.to_vec();
|
||||
let manifest = serde_json::from_slice::<ExtensionManifest>(&manifest_bytes)
|
||||
.with_context(|| format!("invalid manifest for extension {extension_id} version {published_version}: {}", String::from_utf8_lossy(&manifest_bytes)))?;
|
||||
|
||||
let published_at = object.last_modified.ok_or_else(|| anyhow!("missing last modified timestamp for extension {extension_id} version {published_version}"))?;
|
||||
let published_at =
|
||||
time::OffsetDateTime::from_unix_timestamp_nanos(published_at.as_nanos())?;
|
||||
let published_at = PrimitiveDateTime::new(published_at.date(), published_at.time());
|
||||
|
||||
let version = semver::Version::parse(&manifest.version).with_context(|| {
|
||||
format!(
|
||||
"invalid version for extension {extension_id} version {published_version}"
|
||||
)
|
||||
})?;
|
||||
|
||||
new_versions
|
||||
.entry(extension_id)
|
||||
.or_default()
|
||||
.push(NewExtensionVersion {
|
||||
name: manifest.name,
|
||||
version,
|
||||
description: manifest.description.unwrap_or_default(),
|
||||
authors: manifest.authors,
|
||||
repository: manifest.repository,
|
||||
published_at,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
app_state
|
||||
.db
|
||||
.insert_extension_versions(&new_versions)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,12 +1,8 @@
|
||||
#[cfg(test)]
|
||||
pub mod tests;
|
||||
|
||||
#[cfg(test)]
|
||||
pub use tests::TestDb;
|
||||
|
||||
mod ids;
|
||||
mod queries;
|
||||
mod tables;
|
||||
#[cfg(test)]
|
||||
pub mod tests;
|
||||
|
||||
use crate::{executor::Executor, Error, Result};
|
||||
use anyhow::anyhow;
|
||||
@@ -25,7 +21,7 @@ use sea_orm::{
|
||||
FromQueryResult, IntoActiveModel, IsolationLevel, JoinType, QueryOrder, QuerySelect, Statement,
|
||||
TransactionTrait,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::{ser::Error as _, Deserialize, Serialize, Serializer};
|
||||
use sqlx::{
|
||||
migrate::{Migrate, Migration, MigrationSource},
|
||||
Connection,
|
||||
@@ -40,13 +36,17 @@ use std::{
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
pub use tables::*;
|
||||
use time::{format_description::well_known::iso8601, PrimitiveDateTime};
|
||||
use tokio::sync::{Mutex, OwnedMutexGuard};
|
||||
|
||||
#[cfg(test)]
|
||||
pub use tests::TestDb;
|
||||
|
||||
pub use ids::*;
|
||||
pub use queries::contributors::ContributorSelector;
|
||||
pub use sea_orm::ConnectOptions;
|
||||
pub use tables::user::Model as User;
|
||||
pub use tables::*;
|
||||
|
||||
/// Database gives you a handle that lets you access the database.
|
||||
/// It handles pooling internally.
|
||||
@@ -717,3 +717,43 @@ pub struct WorktreeSettingsFile {
|
||||
pub path: String,
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
pub struct NewExtensionVersion {
|
||||
pub name: String,
|
||||
pub version: semver::Version,
|
||||
pub description: String,
|
||||
pub authors: Vec<String>,
|
||||
pub repository: String,
|
||||
pub published_at: PrimitiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct ExtensionMetadata {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub version: String,
|
||||
pub authors: Vec<String>,
|
||||
pub description: String,
|
||||
pub repository: String,
|
||||
#[serde(serialize_with = "serialize_iso8601")]
|
||||
pub published_at: PrimitiveDateTime,
|
||||
pub download_count: u64,
|
||||
}
|
||||
|
||||
pub fn serialize_iso8601<S: Serializer>(
|
||||
datetime: &PrimitiveDateTime,
|
||||
serializer: S,
|
||||
) -> Result<S::Ok, S::Error> {
|
||||
const SERDE_CONFIG: iso8601::EncodedConfig = iso8601::Config::DEFAULT
|
||||
.set_year_is_six_digits(false)
|
||||
.set_time_precision(iso8601::TimePrecision::Second {
|
||||
decimal_digits: None,
|
||||
})
|
||||
.encode();
|
||||
|
||||
datetime
|
||||
.assume_utc()
|
||||
.format(&time::format_description::well_known::Iso8601::<SERDE_CONFIG>)
|
||||
.map_err(S::Error::custom)?
|
||||
.serialize(serializer)
|
||||
}
|
||||
|
||||
@@ -85,6 +85,7 @@ id_type!(SignupId);
|
||||
id_type!(UserId);
|
||||
id_type!(ChannelBufferCollaboratorId);
|
||||
id_type!(FlagId);
|
||||
id_type!(ExtensionId);
|
||||
id_type!(NotificationId);
|
||||
id_type!(NotificationKindId);
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ pub mod buffers;
|
||||
pub mod channels;
|
||||
pub mod contacts;
|
||||
pub mod contributors;
|
||||
pub mod extensions;
|
||||
pub mod messages;
|
||||
pub mod notifications;
|
||||
pub mod projects;
|
||||
|
||||
206
crates/collab/src/db/queries/extensions.rs
Normal file
206
crates/collab/src/db/queries/extensions.rs
Normal file
@@ -0,0 +1,206 @@
|
||||
use super::*;
|
||||
|
||||
impl Database {
|
||||
pub async fn get_extensions(
|
||||
&self,
|
||||
filter: Option<&str>,
|
||||
limit: usize,
|
||||
) -> Result<Vec<ExtensionMetadata>> {
|
||||
self.transaction(|tx| async move {
|
||||
let mut condition = Condition::all();
|
||||
if let Some(filter) = filter {
|
||||
let fuzzy_name_filter = Self::fuzzy_like_string(filter);
|
||||
condition = condition.add(Expr::cust_with_expr("name ILIKE $1", fuzzy_name_filter));
|
||||
}
|
||||
|
||||
let extensions = extension::Entity::find()
|
||||
.filter(condition)
|
||||
.order_by_desc(extension::Column::TotalDownloadCount)
|
||||
.order_by_asc(extension::Column::Name)
|
||||
.limit(Some(limit as u64))
|
||||
.filter(
|
||||
extension::Column::LatestVersion
|
||||
.into_expr()
|
||||
.eq(extension_version::Column::Version.into_expr()),
|
||||
)
|
||||
.inner_join(extension_version::Entity)
|
||||
.select_also(extension_version::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(extensions
|
||||
.into_iter()
|
||||
.filter_map(|(extension, latest_version)| {
|
||||
let version = latest_version?;
|
||||
Some(ExtensionMetadata {
|
||||
id: extension.external_id,
|
||||
name: extension.name,
|
||||
version: version.version,
|
||||
authors: version
|
||||
.authors
|
||||
.split(',')
|
||||
.map(|author| author.trim().to_string())
|
||||
.collect::<Vec<_>>(),
|
||||
description: version.description,
|
||||
repository: version.repository,
|
||||
published_at: version.published_at,
|
||||
download_count: extension.total_download_count as u64,
|
||||
})
|
||||
})
|
||||
.collect())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_known_extension_versions<'a>(&self) -> Result<HashMap<String, Vec<String>>> {
|
||||
self.transaction(|tx| async move {
|
||||
let mut extension_external_ids_by_id = HashMap::default();
|
||||
|
||||
let mut rows = extension::Entity::find().stream(&*tx).await?;
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
extension_external_ids_by_id.insert(row.id, row.external_id);
|
||||
}
|
||||
drop(rows);
|
||||
|
||||
let mut known_versions_by_extension_id: HashMap<String, Vec<String>> =
|
||||
HashMap::default();
|
||||
let mut rows = extension_version::Entity::find().stream(&*tx).await?;
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
|
||||
let Some(extension_id) = extension_external_ids_by_id.get(&row.extension_id) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let versions = known_versions_by_extension_id
|
||||
.entry(extension_id.clone())
|
||||
.or_default();
|
||||
if let Err(ix) = versions.binary_search(&row.version) {
|
||||
versions.insert(ix, row.version);
|
||||
}
|
||||
}
|
||||
drop(rows);
|
||||
|
||||
Ok(known_versions_by_extension_id)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn insert_extension_versions(
|
||||
&self,
|
||||
versions_by_extension_id: &HashMap<&str, Vec<NewExtensionVersion>>,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
for (external_id, versions) in versions_by_extension_id {
|
||||
if versions.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let latest_version = versions
|
||||
.iter()
|
||||
.max_by_key(|version| &version.version)
|
||||
.unwrap();
|
||||
|
||||
let insert = extension::Entity::insert(extension::ActiveModel {
|
||||
name: ActiveValue::Set(latest_version.name.clone()),
|
||||
external_id: ActiveValue::Set(external_id.to_string()),
|
||||
id: ActiveValue::NotSet,
|
||||
latest_version: ActiveValue::Set(latest_version.version.to_string()),
|
||||
total_download_count: ActiveValue::NotSet,
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::columns([extension::Column::ExternalId])
|
||||
.update_column(extension::Column::ExternalId)
|
||||
.to_owned(),
|
||||
);
|
||||
|
||||
let extension = if tx.support_returning() {
|
||||
insert.exec_with_returning(&*tx).await?
|
||||
} else {
|
||||
// Sqlite
|
||||
insert.exec_without_returning(&*tx).await?;
|
||||
extension::Entity::find()
|
||||
.filter(extension::Column::ExternalId.eq(*external_id))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("failed to insert extension"))?
|
||||
};
|
||||
|
||||
extension_version::Entity::insert_many(versions.iter().map(|version| {
|
||||
extension_version::ActiveModel {
|
||||
extension_id: ActiveValue::Set(extension.id),
|
||||
published_at: ActiveValue::Set(version.published_at),
|
||||
version: ActiveValue::Set(version.version.to_string()),
|
||||
authors: ActiveValue::Set(version.authors.join(", ")),
|
||||
repository: ActiveValue::Set(version.repository.clone()),
|
||||
description: ActiveValue::Set(version.description.clone()),
|
||||
download_count: ActiveValue::NotSet,
|
||||
}
|
||||
}))
|
||||
.on_conflict(OnConflict::new().do_nothing().to_owned())
|
||||
.exec_without_returning(&*tx)
|
||||
.await?;
|
||||
|
||||
if let Ok(db_version) = semver::Version::parse(&extension.latest_version) {
|
||||
if db_version >= latest_version.version {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let mut extension = extension.into_active_model();
|
||||
extension.latest_version = ActiveValue::Set(latest_version.version.to_string());
|
||||
extension.name = ActiveValue::set(latest_version.name.clone());
|
||||
extension::Entity::update(extension).exec(&*tx).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn record_extension_download(&self, extension: &str, version: &str) -> Result<bool> {
|
||||
self.transaction(|tx| async move {
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryId {
|
||||
Id,
|
||||
}
|
||||
|
||||
let extension_id: Option<ExtensionId> = extension::Entity::find()
|
||||
.filter(extension::Column::ExternalId.eq(extension))
|
||||
.select_only()
|
||||
.column(extension::Column::Id)
|
||||
.into_values::<_, QueryId>()
|
||||
.one(&*tx)
|
||||
.await?;
|
||||
let Some(extension_id) = extension_id else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
extension_version::Entity::update_many()
|
||||
.col_expr(
|
||||
extension_version::Column::DownloadCount,
|
||||
extension_version::Column::DownloadCount.into_expr().add(1),
|
||||
)
|
||||
.filter(
|
||||
extension_version::Column::ExtensionId
|
||||
.eq(extension_id)
|
||||
.and(extension_version::Column::Version.eq(version)),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
extension::Entity::update_many()
|
||||
.col_expr(
|
||||
extension::Column::TotalDownloadCount,
|
||||
extension::Column::TotalDownloadCount.into_expr().add(1),
|
||||
)
|
||||
.filter(extension::Column::Id.eq(extension_id))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(true)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,8 @@ pub mod channel_message;
|
||||
pub mod channel_message_mention;
|
||||
pub mod contact;
|
||||
pub mod contributor;
|
||||
pub mod extension;
|
||||
pub mod extension_version;
|
||||
pub mod feature_flag;
|
||||
pub mod follower;
|
||||
pub mod language_server;
|
||||
|
||||
27
crates/collab/src/db/tables/extension.rs
Normal file
27
crates/collab/src/db/tables/extension.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use crate::db::ExtensionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "extensions")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: ExtensionId,
|
||||
pub external_id: String,
|
||||
pub name: String,
|
||||
pub latest_version: String,
|
||||
pub total_download_count: i64,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_one = "super::extension_version::Entity")]
|
||||
LatestVersion,
|
||||
}
|
||||
|
||||
impl Related<super::extension_version::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::LatestVersion.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
36
crates/collab/src/db/tables/extension_version.rs
Normal file
36
crates/collab/src/db/tables/extension_version.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use crate::db::ExtensionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use time::PrimitiveDateTime;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "extension_versions")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub extension_id: ExtensionId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub version: String,
|
||||
pub published_at: PrimitiveDateTime,
|
||||
pub authors: String,
|
||||
pub repository: String,
|
||||
pub description: String,
|
||||
pub download_count: i64,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::extension::Entity",
|
||||
from = "Column::ExtensionId",
|
||||
to = "super::extension::Column::Id"
|
||||
on_condition = r#"super::extension::Column::LatestVersion.into_expr().eq(Column::Version.into_expr())"#
|
||||
)]
|
||||
Extension,
|
||||
}
|
||||
|
||||
impl Related<super::extension::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Extension.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
@@ -2,6 +2,7 @@ mod buffer_tests;
|
||||
mod channel_tests;
|
||||
mod contributor_tests;
|
||||
mod db_tests;
|
||||
mod extension_tests;
|
||||
mod feature_flag_tests;
|
||||
mod message_tests;
|
||||
|
||||
|
||||
225
crates/collab/src/db/tests/extension_tests.rs
Normal file
225
crates/collab/src/db/tests/extension_tests.rs
Normal file
@@ -0,0 +1,225 @@
|
||||
use super::Database;
|
||||
use crate::{
|
||||
db::{ExtensionMetadata, NewExtensionVersion},
|
||||
test_both_dbs,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
use time::{OffsetDateTime, PrimitiveDateTime};
|
||||
|
||||
test_both_dbs!(
|
||||
test_extensions,
|
||||
test_extensions_postgres,
|
||||
test_extensions_sqlite
|
||||
);
|
||||
|
||||
async fn test_extensions(db: &Arc<Database>) {
|
||||
let versions = db.get_known_extension_versions().await.unwrap();
|
||||
assert!(versions.is_empty());
|
||||
|
||||
let extensions = db.get_extensions(None, 5).await.unwrap();
|
||||
assert!(extensions.is_empty());
|
||||
|
||||
let t0 = OffsetDateTime::from_unix_timestamp_nanos(0).unwrap();
|
||||
let t0 = PrimitiveDateTime::new(t0.date(), t0.time());
|
||||
|
||||
db.insert_extension_versions(
|
||||
&[
|
||||
(
|
||||
"ext1",
|
||||
vec![
|
||||
NewExtensionVersion {
|
||||
name: "Extension 1".into(),
|
||||
version: semver::Version::parse("0.0.1").unwrap(),
|
||||
description: "an extension".into(),
|
||||
authors: vec!["max".into()],
|
||||
repository: "ext1/repo".into(),
|
||||
published_at: t0,
|
||||
},
|
||||
NewExtensionVersion {
|
||||
name: "Extension One".into(),
|
||||
version: semver::Version::parse("0.0.2").unwrap(),
|
||||
description: "a good extension".into(),
|
||||
authors: vec!["max".into(), "marshall".into()],
|
||||
repository: "ext1/repo".into(),
|
||||
published_at: t0,
|
||||
},
|
||||
],
|
||||
),
|
||||
(
|
||||
"ext2",
|
||||
vec![NewExtensionVersion {
|
||||
name: "Extension Two".into(),
|
||||
version: semver::Version::parse("0.2.0").unwrap(),
|
||||
description: "a great extension".into(),
|
||||
authors: vec!["marshall".into()],
|
||||
repository: "ext2/repo".into(),
|
||||
published_at: t0,
|
||||
}],
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let versions = db.get_known_extension_versions().await.unwrap();
|
||||
assert_eq!(
|
||||
versions,
|
||||
[
|
||||
("ext1".into(), vec!["0.0.1".into(), "0.0.2".into()]),
|
||||
("ext2".into(), vec!["0.2.0".into()])
|
||||
]
|
||||
.into_iter()
|
||||
.collect()
|
||||
);
|
||||
|
||||
// The latest version of each extension is returned.
|
||||
let extensions = db.get_extensions(None, 5).await.unwrap();
|
||||
assert_eq!(
|
||||
extensions,
|
||||
&[
|
||||
ExtensionMetadata {
|
||||
id: "ext1".into(),
|
||||
name: "Extension One".into(),
|
||||
version: "0.0.2".into(),
|
||||
authors: vec!["max".into(), "marshall".into()],
|
||||
description: "a good extension".into(),
|
||||
repository: "ext1/repo".into(),
|
||||
published_at: t0,
|
||||
download_count: 0,
|
||||
},
|
||||
ExtensionMetadata {
|
||||
id: "ext2".into(),
|
||||
name: "Extension Two".into(),
|
||||
version: "0.2.0".into(),
|
||||
authors: vec!["marshall".into()],
|
||||
description: "a great extension".into(),
|
||||
repository: "ext2/repo".into(),
|
||||
published_at: t0,
|
||||
download_count: 0
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
// Record extensions being downloaded.
|
||||
for _ in 0..7 {
|
||||
assert!(db.record_extension_download("ext2", "0.0.2").await.unwrap());
|
||||
}
|
||||
|
||||
for _ in 0..3 {
|
||||
assert!(db.record_extension_download("ext1", "0.0.1").await.unwrap());
|
||||
}
|
||||
|
||||
for _ in 0..2 {
|
||||
assert!(db.record_extension_download("ext1", "0.0.2").await.unwrap());
|
||||
}
|
||||
|
||||
// Record download returns false if the extension does not exist.
|
||||
assert!(!db
|
||||
.record_extension_download("no-such-extension", "0.0.2")
|
||||
.await
|
||||
.unwrap());
|
||||
|
||||
// Extensions are returned in descending order of total downloads.
|
||||
let extensions = db.get_extensions(None, 5).await.unwrap();
|
||||
assert_eq!(
|
||||
extensions,
|
||||
&[
|
||||
ExtensionMetadata {
|
||||
id: "ext2".into(),
|
||||
name: "Extension Two".into(),
|
||||
version: "0.2.0".into(),
|
||||
authors: vec!["marshall".into()],
|
||||
description: "a great extension".into(),
|
||||
repository: "ext2/repo".into(),
|
||||
published_at: t0,
|
||||
download_count: 7
|
||||
},
|
||||
ExtensionMetadata {
|
||||
id: "ext1".into(),
|
||||
name: "Extension One".into(),
|
||||
version: "0.0.2".into(),
|
||||
authors: vec!["max".into(), "marshall".into()],
|
||||
description: "a good extension".into(),
|
||||
repository: "ext1/repo".into(),
|
||||
published_at: t0,
|
||||
download_count: 5,
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
// Add more extensions, including a new version of `ext1`, and backfilling
|
||||
// an older version of `ext2`.
|
||||
db.insert_extension_versions(
|
||||
&[
|
||||
(
|
||||
"ext1",
|
||||
vec![NewExtensionVersion {
|
||||
name: "Extension One".into(),
|
||||
version: semver::Version::parse("0.0.3").unwrap(),
|
||||
description: "a real good extension".into(),
|
||||
authors: vec!["max".into(), "marshall".into()],
|
||||
repository: "ext1/repo".into(),
|
||||
published_at: t0,
|
||||
}],
|
||||
),
|
||||
(
|
||||
"ext2",
|
||||
vec![NewExtensionVersion {
|
||||
name: "Extension Two".into(),
|
||||
version: semver::Version::parse("0.1.0").unwrap(),
|
||||
description: "an old extension".into(),
|
||||
authors: vec!["marshall".into()],
|
||||
repository: "ext2/repo".into(),
|
||||
published_at: t0,
|
||||
}],
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let versions = db.get_known_extension_versions().await.unwrap();
|
||||
assert_eq!(
|
||||
versions,
|
||||
[
|
||||
(
|
||||
"ext1".into(),
|
||||
vec!["0.0.1".into(), "0.0.2".into(), "0.0.3".into()]
|
||||
),
|
||||
("ext2".into(), vec!["0.1.0".into(), "0.2.0".into()])
|
||||
]
|
||||
.into_iter()
|
||||
.collect()
|
||||
);
|
||||
|
||||
let extensions = db.get_extensions(None, 5).await.unwrap();
|
||||
assert_eq!(
|
||||
extensions,
|
||||
&[
|
||||
ExtensionMetadata {
|
||||
id: "ext2".into(),
|
||||
name: "Extension Two".into(),
|
||||
version: "0.2.0".into(),
|
||||
authors: vec!["marshall".into()],
|
||||
description: "a great extension".into(),
|
||||
repository: "ext2/repo".into(),
|
||||
published_at: t0,
|
||||
download_count: 7
|
||||
},
|
||||
ExtensionMetadata {
|
||||
id: "ext1".into(),
|
||||
name: "Extension One".into(),
|
||||
version: "0.0.3".into(),
|
||||
authors: vec!["max".into(), "marshall".into()],
|
||||
description: "a real good extension".into(),
|
||||
repository: "ext1/repo".into(),
|
||||
published_at: t0,
|
||||
download_count: 5,
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
@@ -3,7 +3,8 @@ use std::fs;
|
||||
|
||||
pub fn load_dotenv() -> anyhow::Result<()> {
|
||||
let env: toml::map::Map<String, toml::Value> = toml::de::from_str(
|
||||
&fs::read_to_string("./.env.toml").map_err(|_| anyhow!("no .env.toml file found"))?,
|
||||
&fs::read_to_string("./crates/collab/.env.toml")
|
||||
.map_err(|_| anyhow!("no .env.toml file found"))?,
|
||||
)?;
|
||||
|
||||
for (key, value) in env {
|
||||
|
||||
@@ -8,11 +8,14 @@ pub mod rpc;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use aws_config::{BehaviorVersion, Region};
|
||||
use axum::{http::StatusCode, response::IntoResponse};
|
||||
use db::Database;
|
||||
use executor::Executor;
|
||||
use serde::Deserialize;
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
use util::ResultExt;
|
||||
|
||||
pub type Result<T, E = Error> = std::result::Result<T, E>;
|
||||
|
||||
@@ -100,6 +103,11 @@ pub struct Config {
|
||||
pub live_kit_secret: Option<String>,
|
||||
pub rust_log: Option<String>,
|
||||
pub log_json: Option<bool>,
|
||||
pub blob_store_url: Option<String>,
|
||||
pub blob_store_region: Option<String>,
|
||||
pub blob_store_access_key: Option<String>,
|
||||
pub blob_store_secret_key: Option<String>,
|
||||
pub blob_store_bucket: Option<String>,
|
||||
pub zed_environment: Arc<str>,
|
||||
}
|
||||
|
||||
@@ -118,6 +126,7 @@ pub struct MigrateConfig {
|
||||
pub struct AppState {
|
||||
pub db: Arc<Database>,
|
||||
pub live_kit_client: Option<Arc<dyn live_kit_server::api::Client>>,
|
||||
pub blob_store_client: Option<aws_sdk_s3::Client>,
|
||||
pub config: Config,
|
||||
}
|
||||
|
||||
@@ -146,8 +155,44 @@ impl AppState {
|
||||
let this = Self {
|
||||
db: Arc::new(db),
|
||||
live_kit_client,
|
||||
blob_store_client: build_blob_store_client(&config).await.log_err(),
|
||||
config,
|
||||
};
|
||||
Ok(Arc::new(this))
|
||||
}
|
||||
}
|
||||
|
||||
async fn build_blob_store_client(config: &Config) -> anyhow::Result<aws_sdk_s3::Client> {
|
||||
let keys = aws_sdk_s3::config::Credentials::new(
|
||||
config
|
||||
.blob_store_access_key
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("missing blob_store_access_key"))?,
|
||||
config
|
||||
.blob_store_secret_key
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("missing blob_store_secret_key"))?,
|
||||
None,
|
||||
None,
|
||||
"env",
|
||||
);
|
||||
|
||||
let s3_config = aws_config::defaults(BehaviorVersion::latest())
|
||||
.endpoint_url(
|
||||
config
|
||||
.blob_store_url
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("missing blob_store_url"))?,
|
||||
)
|
||||
.region(Region::new(
|
||||
config
|
||||
.blob_store_region
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("missing blob_store_region"))?,
|
||||
))
|
||||
.credentials_provider(keys)
|
||||
.load()
|
||||
.await;
|
||||
|
||||
Ok(aws_sdk_s3::Client::new(&s3_config))
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
use anyhow::anyhow;
|
||||
use axum::{routing::get, Extension, Router};
|
||||
use collab::{db, env, executor::Executor, AppState, Config, MigrateConfig, Result};
|
||||
use collab::{
|
||||
api::fetch_extensions_from_blob_store_periodically, db, env, executor::Executor, AppState,
|
||||
Config, MigrateConfig, Result,
|
||||
};
|
||||
use db::Database;
|
||||
use std::{
|
||||
env::args,
|
||||
@@ -50,6 +53,8 @@ async fn main() -> Result<()> {
|
||||
let rpc_server = collab::rpc::Server::new(epoch, state.clone(), Executor::Production);
|
||||
rpc_server.start().await?;
|
||||
|
||||
fetch_extensions_from_blob_store_periodically(state.clone(), Executor::Production);
|
||||
|
||||
let app = collab::api::routes(rpc_server.clone(), state.clone())
|
||||
.merge(collab::rpc::routes(rpc_server.clone()))
|
||||
.merge(
|
||||
|
||||
@@ -2077,3 +2077,66 @@ async fn test_following_to_channel_notes_other_workspace(
|
||||
assert_eq!(editor.tab_description(0, cx).unwrap(), "1.txt");
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
let (_, client_a, client_b, channel) = TestServer::start2(cx_a, cx_b).await;
|
||||
|
||||
let mut cx_a2 = cx_a.clone();
|
||||
let (workspace_a, cx_a) = client_a.build_test_workspace(cx_a).await;
|
||||
join_channel(channel, &client_a, cx_a).await.unwrap();
|
||||
share_workspace(&workspace_a, cx_a).await.unwrap();
|
||||
|
||||
// a opens 1.txt
|
||||
cx_a.simulate_keystrokes("cmd-p 1 enter");
|
||||
cx_a.run_until_parked();
|
||||
workspace_a.update(cx_a, |workspace, cx| {
|
||||
let editor = workspace.active_item(cx).unwrap();
|
||||
assert_eq!(editor.tab_description(0, cx).unwrap(), "1.txt");
|
||||
});
|
||||
|
||||
// b joins channel and is following a
|
||||
join_channel(channel, &client_b, cx_b).await.unwrap();
|
||||
cx_b.run_until_parked();
|
||||
let (workspace_b, cx_b) = client_b.active_workspace(cx_b);
|
||||
workspace_b.update(cx_b, |workspace, cx| {
|
||||
let editor = workspace.active_item(cx).unwrap();
|
||||
assert_eq!(editor.tab_description(0, cx).unwrap(), "1.txt");
|
||||
});
|
||||
|
||||
// stop following
|
||||
cx_b.simulate_keystrokes("down");
|
||||
|
||||
// a opens a different file while not followed
|
||||
cx_a.simulate_keystrokes("cmd-p 2 enter");
|
||||
|
||||
workspace_b.update(cx_b, |workspace, cx| {
|
||||
let editor = workspace.active_item_as::<Editor>(cx).unwrap();
|
||||
assert_eq!(editor.tab_description(0, cx).unwrap(), "1.txt");
|
||||
});
|
||||
|
||||
// a opens a file in a new window
|
||||
let (_, cx_a2) = client_a.build_test_workspace(&mut cx_a2).await;
|
||||
cx_a2.update(|cx| cx.activate_window());
|
||||
cx_a2.simulate_keystrokes("cmd-p 3 enter");
|
||||
cx_a2.run_until_parked();
|
||||
|
||||
// b starts following a again
|
||||
cx_b.simulate_keystrokes("cmd-ctrl-alt-f");
|
||||
cx_a.run_until_parked();
|
||||
|
||||
// a returns to the shared project
|
||||
cx_a.update(|cx| cx.activate_window());
|
||||
cx_a.run_until_parked();
|
||||
|
||||
workspace_a.update(cx_a, |workspace, cx| {
|
||||
let editor = workspace.active_item(cx).unwrap();
|
||||
assert_eq!(editor.tab_description(0, cx).unwrap(), "2.js");
|
||||
});
|
||||
|
||||
// b should follow a back
|
||||
workspace_b.update(cx_b, |workspace, cx| {
|
||||
let editor = workspace.active_item_as::<Editor>(cx).unwrap();
|
||||
assert_eq!(editor.tab_description(0, cx).unwrap(), "2.js");
|
||||
});
|
||||
}
|
||||
|
||||
@@ -479,6 +479,7 @@ impl TestServer {
|
||||
Arc::new(AppState {
|
||||
db: test_db.db().clone(),
|
||||
live_kit_client: Some(Arc::new(fake_server.create_api_client())),
|
||||
blob_store_client: None,
|
||||
config: Config {
|
||||
http_port: 0,
|
||||
database_url: "".into(),
|
||||
@@ -491,6 +492,11 @@ impl TestServer {
|
||||
rust_log: None,
|
||||
log_json: None,
|
||||
zed_environment: "test".into(),
|
||||
blob_store_url: None,
|
||||
blob_store_region: None,
|
||||
blob_store_access_key: None,
|
||||
blob_store_secret_key: None,
|
||||
blob_store_bucket: None,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -733,7 +733,7 @@ impl Render for ChatPanel {
|
||||
v_flex()
|
||||
.key_context("ChatPanel")
|
||||
.track_focus(&self.focus_handle)
|
||||
.full()
|
||||
.size_full()
|
||||
.on_action(cx.listener(Self::send))
|
||||
.child(
|
||||
h_flex().z_index(1).child(
|
||||
@@ -755,11 +755,11 @@ impl Render for ChatPanel {
|
||||
)
|
||||
.child(div().flex_grow().px_2().map(|this| {
|
||||
if self.active_chat.is_some() {
|
||||
this.child(list(self.message_list.clone()).full())
|
||||
this.child(list(self.message_list.clone()).size_full())
|
||||
} else {
|
||||
this.child(
|
||||
div()
|
||||
.full()
|
||||
.size_full()
|
||||
.p_4()
|
||||
.child(
|
||||
Label::new("Select a channel to chat in.")
|
||||
|
||||
@@ -2036,7 +2036,7 @@ impl CollabPanel {
|
||||
fn render_signed_in(&mut self, cx: &mut ViewContext<Self>) -> Div {
|
||||
v_flex()
|
||||
.size_full()
|
||||
.child(list(self.list_state.clone()).full())
|
||||
.child(list(self.list_state.clone()).size_full())
|
||||
.child(
|
||||
v_flex()
|
||||
.child(div().mx_2().border_primary(cx).border_t())
|
||||
|
||||
@@ -11,7 +11,7 @@ use gpui::{
|
||||
};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use std::sync::Arc;
|
||||
use ui::{prelude::*, Avatar, Checkbox, ContextMenu, ListItem, ListItemSpacing};
|
||||
use ui::{prelude::*, Avatar, CheckboxWithLabel, ContextMenu, ListItem, ListItemSpacing};
|
||||
use util::TryFutureExt;
|
||||
use workspace::{notifications::DetachAndPromptErr, ModalView};
|
||||
|
||||
@@ -177,22 +177,16 @@ impl Render for ChannelModal {
|
||||
.h(rems(22. / 16.))
|
||||
.justify_between()
|
||||
.line_height(rems(1.25))
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
Checkbox::new(
|
||||
"is-public",
|
||||
if visibility == ChannelVisibility::Public {
|
||||
ui::Selection::Selected
|
||||
} else {
|
||||
ui::Selection::Unselected
|
||||
},
|
||||
)
|
||||
.on_click(cx.listener(Self::set_channel_visibility)),
|
||||
)
|
||||
.child(Label::new("Public").size(LabelSize::Small)),
|
||||
)
|
||||
.child(CheckboxWithLabel::new(
|
||||
"is-public",
|
||||
Label::new("Public").size(LabelSize::Small),
|
||||
if visibility == ChannelVisibility::Public {
|
||||
ui::Selection::Selected
|
||||
} else {
|
||||
ui::Selection::Unselected
|
||||
},
|
||||
cx.listener(Self::set_channel_visibility),
|
||||
))
|
||||
.children(
|
||||
Some(
|
||||
Button::new("copy-link", "Copy Link")
|
||||
|
||||
@@ -566,10 +566,7 @@ impl CollabTitlebarItem {
|
||||
ActiveCall::global(cx)
|
||||
.update(cx, |call, cx| call.set_location(Some(&self.project), cx))
|
||||
.detach_and_log_err(cx);
|
||||
return;
|
||||
}
|
||||
|
||||
if cx.active_window().is_none() {
|
||||
} else if cx.active_window().is_none() {
|
||||
ActiveCall::global(cx)
|
||||
.update(cx, |call, cx| call.set_location(None, cx))
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
@@ -317,8 +317,8 @@ impl PickerDelegate for CommandPaletteDelegate {
|
||||
});
|
||||
let action = command.action;
|
||||
cx.focus(&self.previous_focus_handle);
|
||||
cx.dispatch_action(action);
|
||||
self.dismissed(cx);
|
||||
cx.dispatch_action(action);
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
|
||||
@@ -116,6 +116,11 @@ impl CopilotCodeVerification {
|
||||
.full_width()
|
||||
.style(ButtonStyle::Filled),
|
||||
)
|
||||
.child(
|
||||
Button::new("copilot-enable-cancel-button", "Cancel")
|
||||
.full_width()
|
||||
.on_click(cx.listener(|_, _, cx| cx.emit(DismissEvent))),
|
||||
)
|
||||
}
|
||||
fn render_enabled_modal(cx: &mut ViewContext<Self>) -> impl Element {
|
||||
v_flex()
|
||||
@@ -131,7 +136,7 @@ impl CopilotCodeVerification {
|
||||
)
|
||||
}
|
||||
|
||||
fn render_unauthorized_modal() -> impl Element {
|
||||
fn render_unauthorized_modal(cx: &mut ViewContext<Self>) -> impl Element {
|
||||
v_flex()
|
||||
.child(Headline::new("You must have an active GitHub Copilot subscription.").size(HeadlineSize::Large))
|
||||
|
||||
@@ -143,6 +148,11 @@ impl CopilotCodeVerification {
|
||||
.full_width()
|
||||
.on_click(|_, cx| cx.open_url(COPILOT_SIGN_UP_URL)),
|
||||
)
|
||||
.child(
|
||||
Button::new("copilot-subscribe-cancel-button", "Cancel")
|
||||
.full_width()
|
||||
.on_click(cx.listener(|_, _, cx| cx.emit(DismissEvent))),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_disabled_modal() -> impl Element {
|
||||
@@ -160,7 +170,7 @@ impl Render for CopilotCodeVerification {
|
||||
} => Self::render_prompting_modal(self.connect_clicked, &prompt, cx).into_any_element(),
|
||||
Status::Unauthorized => {
|
||||
self.connect_clicked = false;
|
||||
Self::render_unauthorized_modal().into_any_element()
|
||||
Self::render_unauthorized_modal(cx).into_any_element()
|
||||
}
|
||||
Status::Authorized => {
|
||||
self.connect_clicked = false;
|
||||
|
||||
@@ -289,6 +289,7 @@ fn show_hover(
|
||||
})?;
|
||||
|
||||
let hover_result = hover_request.await.ok().flatten();
|
||||
let snapshot = this.update(&mut cx, |this, cx| this.snapshot(cx))?;
|
||||
let hover_popover = match hover_result {
|
||||
Some(hover_result) if !hover_result.is_empty() => {
|
||||
// Create symbol range of anchors for highlighting and filtering of future requests.
|
||||
|
||||
@@ -704,10 +704,12 @@ impl Item for Editor {
|
||||
|
||||
fn save(&mut self, project: Model<Project>, cx: &mut ViewContext<Self>) -> Task<Result<()>> {
|
||||
self.report_editor_event("save", None, cx);
|
||||
let format = self.perform_format(project.clone(), FormatTrigger::Save, cx);
|
||||
let buffers = self.buffer().clone().read(cx).all_buffers();
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
format.await?;
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.perform_format(project.clone(), FormatTrigger::Save, cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
if buffers.len() == 1 {
|
||||
project
|
||||
|
||||
@@ -5,6 +5,7 @@ use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint};
|
||||
use crate::{char_kind, scroll::ScrollAnchor, CharKind, EditorStyle, ToOffset, ToPoint};
|
||||
use gpui::{px, Pixels, WindowTextSystem};
|
||||
use language::Point;
|
||||
use multi_buffer::MultiBufferSnapshot;
|
||||
|
||||
use std::{ops::Range, sync::Arc};
|
||||
|
||||
@@ -254,7 +255,7 @@ pub fn previous_word_start(map: &DisplaySnapshot, point: DisplayPoint) -> Displa
|
||||
let raw_point = point.to_point(map);
|
||||
let scope = map.buffer_snapshot.language_scope_at(raw_point);
|
||||
|
||||
find_preceding_boundary(map, point, FindRange::MultiLine, |left, right| {
|
||||
find_preceding_boundary_display_point(map, point, FindRange::MultiLine, |left, right| {
|
||||
(char_kind(&scope, left) != char_kind(&scope, right) && !right.is_whitespace())
|
||||
|| left == '\n'
|
||||
})
|
||||
@@ -267,7 +268,7 @@ pub fn previous_subword_start(map: &DisplaySnapshot, point: DisplayPoint) -> Dis
|
||||
let raw_point = point.to_point(map);
|
||||
let scope = map.buffer_snapshot.language_scope_at(raw_point);
|
||||
|
||||
find_preceding_boundary(map, point, FindRange::MultiLine, |left, right| {
|
||||
find_preceding_boundary_display_point(map, point, FindRange::MultiLine, |left, right| {
|
||||
let is_word_start =
|
||||
char_kind(&scope, left) != char_kind(&scope, right) && !right.is_whitespace();
|
||||
let is_subword_start =
|
||||
@@ -366,16 +367,16 @@ pub fn end_of_paragraph(
|
||||
/// indicated by the given predicate returning true.
|
||||
/// The predicate is called with the character to the left and right of the candidate boundary location.
|
||||
/// If FindRange::SingleLine is specified and no boundary is found before the start of the current line, the start of the current line will be returned.
|
||||
pub fn find_preceding_boundary(
|
||||
map: &DisplaySnapshot,
|
||||
from: DisplayPoint,
|
||||
pub fn find_preceding_boundary_point(
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
from: Point,
|
||||
find_range: FindRange,
|
||||
mut is_boundary: impl FnMut(char, char) -> bool,
|
||||
) -> DisplayPoint {
|
||||
) -> Point {
|
||||
let mut prev_ch = None;
|
||||
let mut offset = from.to_point(map).to_offset(&map.buffer_snapshot);
|
||||
let mut offset = from.to_offset(&buffer_snapshot);
|
||||
|
||||
for ch in map.buffer_snapshot.reversed_chars_at(offset) {
|
||||
for ch in buffer_snapshot.reversed_chars_at(offset) {
|
||||
if find_range == FindRange::SingleLine && ch == '\n' {
|
||||
break;
|
||||
}
|
||||
@@ -389,7 +390,26 @@ pub fn find_preceding_boundary(
|
||||
prev_ch = Some(ch);
|
||||
}
|
||||
|
||||
map.clip_point(offset.to_display_point(map), Bias::Left)
|
||||
offset.to_point(&buffer_snapshot)
|
||||
}
|
||||
|
||||
/// Scans for a boundary preceding the given start point `from` until a boundary is found,
|
||||
/// indicated by the given predicate returning true.
|
||||
/// The predicate is called with the character to the left and right of the candidate boundary location.
|
||||
/// If FindRange::SingleLine is specified and no boundary is found before the start of the current line, the start of the current line will be returned.
|
||||
pub fn find_preceding_boundary_display_point(
|
||||
map: &DisplaySnapshot,
|
||||
from: DisplayPoint,
|
||||
find_range: FindRange,
|
||||
is_boundary: impl FnMut(char, char) -> bool,
|
||||
) -> DisplayPoint {
|
||||
let result = find_preceding_boundary_point(
|
||||
&map.buffer_snapshot,
|
||||
from.to_point(map),
|
||||
find_range,
|
||||
is_boundary,
|
||||
);
|
||||
map.clip_point(result.to_display_point(map), Bias::Left)
|
||||
}
|
||||
|
||||
/// Scans for a boundary following the given start point until a boundary is found, indicated by the
|
||||
@@ -626,7 +646,7 @@ mod tests {
|
||||
) {
|
||||
let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
assert_eq!(
|
||||
find_preceding_boundary(
|
||||
find_preceding_boundary_display_point(
|
||||
&snapshot,
|
||||
display_points[1],
|
||||
FindRange::MultiLine,
|
||||
@@ -700,7 +720,7 @@ mod tests {
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
find_preceding_boundary(
|
||||
find_preceding_boundary_display_point(
|
||||
&snapshot,
|
||||
buffer_snapshot.len().to_display_point(&snapshot),
|
||||
FindRange::MultiLine,
|
||||
|
||||
@@ -42,6 +42,7 @@ pub struct Extension {
|
||||
pub description: Option<String>,
|
||||
pub authors: Vec<String>,
|
||||
pub repository: String,
|
||||
pub download_count: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
use client::telemetry::Telemetry;
|
||||
use editor::{Editor, EditorElement, EditorStyle};
|
||||
use extension::{Extension, ExtensionStatus, ExtensionStore};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
actions, canvas, uniform_list, AnyElement, AppContext, AvailableSpace, EventEmitter,
|
||||
FocusableView, FontStyle, FontWeight, InteractiveElement, KeyContext, ParentElement, Render,
|
||||
Styled, Task, TextStyle, UniformListScrollHandle, View, ViewContext, VisualContext, WeakView,
|
||||
WhiteSpace, WindowContext,
|
||||
Styled, Task, TextStyle, UniformListScrollHandle, View, ViewContext, VisualContext, WhiteSpace,
|
||||
WindowContext,
|
||||
};
|
||||
use settings::Settings;
|
||||
use std::time::Duration;
|
||||
@@ -32,8 +31,6 @@ pub fn init(cx: &mut AppContext) {
|
||||
}
|
||||
|
||||
pub struct ExtensionsPage {
|
||||
workspace: WeakView<Workspace>,
|
||||
fs: Arc<dyn Fs>,
|
||||
list: UniformListScrollHandle,
|
||||
telemetry: Arc<Telemetry>,
|
||||
is_fetching_extensions: bool,
|
||||
@@ -46,7 +43,7 @@ pub struct ExtensionsPage {
|
||||
|
||||
impl ExtensionsPage {
|
||||
pub fn new(workspace: &Workspace, cx: &mut ViewContext<Workspace>) -> View<Self> {
|
||||
let extensions_panel = cx.new_view(|cx: &mut ViewContext<Self>| {
|
||||
cx.new_view(|cx: &mut ViewContext<Self>| {
|
||||
let store = ExtensionStore::global(cx);
|
||||
let subscription = cx.observe(&store, |_, _, cx| cx.notify());
|
||||
|
||||
@@ -54,8 +51,6 @@ impl ExtensionsPage {
|
||||
cx.subscribe(&query_editor, Self::on_query_change).detach();
|
||||
|
||||
let mut this = Self {
|
||||
fs: workspace.project().read(cx).fs().clone(),
|
||||
workspace: workspace.weak_handle(),
|
||||
list: UniformListScrollHandle::new(),
|
||||
telemetry: workspace.client().telemetry().clone(),
|
||||
is_fetching_extensions: false,
|
||||
@@ -67,8 +62,7 @@ impl ExtensionsPage {
|
||||
};
|
||||
this.fetch_extensions(None, cx);
|
||||
this
|
||||
});
|
||||
extensions_panel
|
||||
})
|
||||
}
|
||||
|
||||
fn install_extension(
|
||||
@@ -200,6 +194,8 @@ impl ExtensionsPage {
|
||||
}
|
||||
.color(Color::Accent);
|
||||
|
||||
let repository_url = extension.repository.clone();
|
||||
|
||||
div().w_full().child(
|
||||
v_flex()
|
||||
.w_full()
|
||||
@@ -236,18 +232,24 @@ impl ExtensionsPage {
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex().justify_between().child(
|
||||
Label::new(format!(
|
||||
"{}: {}",
|
||||
if extension.authors.len() > 1 {
|
||||
"Authors"
|
||||
} else {
|
||||
"Author"
|
||||
},
|
||||
extension.authors.join(", ")
|
||||
))
|
||||
.size(LabelSize::Small),
|
||||
),
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.child(
|
||||
Label::new(format!(
|
||||
"{}: {}",
|
||||
if extension.authors.len() > 1 {
|
||||
"Authors"
|
||||
} else {
|
||||
"Author"
|
||||
},
|
||||
extension.authors.join(", ")
|
||||
))
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.child(
|
||||
Label::new(format!("Downloads: {}", extension.download_count))
|
||||
.size(LabelSize::Small),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
@@ -256,7 +258,19 @@ impl ExtensionsPage {
|
||||
Label::new(description.clone())
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Default)
|
||||
})),
|
||||
}))
|
||||
.child(
|
||||
IconButton::new(
|
||||
SharedString::from(format!("repository-{}", extension.id)),
|
||||
IconName::Github,
|
||||
)
|
||||
.icon_color(Color::Accent)
|
||||
.icon_size(IconSize::Small)
|
||||
.style(ButtonStyle::Filled)
|
||||
.on_click(cx.listener(move |_, _, cx| {
|
||||
cx.open_url(&repository_url);
|
||||
})),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -453,25 +467,9 @@ impl Item for ExtensionsPage {
|
||||
fn clone_on_split(
|
||||
&self,
|
||||
_workspace_id: WorkspaceId,
|
||||
cx: &mut ViewContext<Self>,
|
||||
_: &mut ViewContext<Self>,
|
||||
) -> Option<View<Self>> {
|
||||
Some(cx.new_view(|cx| {
|
||||
let store = ExtensionStore::global(cx);
|
||||
let subscription = cx.observe(&store, |_, _, cx| cx.notify());
|
||||
|
||||
ExtensionsPage {
|
||||
fs: self.fs.clone(),
|
||||
workspace: self.workspace.clone(),
|
||||
list: UniformListScrollHandle::new(),
|
||||
telemetry: self.telemetry.clone(),
|
||||
is_fetching_extensions: false,
|
||||
extensions_entries: Default::default(),
|
||||
query_editor: self.query_editor.clone(),
|
||||
_subscription: subscription,
|
||||
query_contains_error: false,
|
||||
extension_fetch_task: None,
|
||||
}
|
||||
}))
|
||||
None
|
||||
}
|
||||
|
||||
fn to_item_events(event: &Self::Event, mut f: impl FnMut(workspace::item::ItemEvent)) {
|
||||
|
||||
@@ -16,7 +16,6 @@ test-support = [
|
||||
"util/test-support",
|
||||
]
|
||||
runtime_shaders = []
|
||||
macos-blade = ["blade-graphics", "blade-macros", "blade-rwh", "bytemuck"]
|
||||
|
||||
[lib]
|
||||
path = "src/gpui.rs"
|
||||
@@ -27,10 +26,6 @@ anyhow.workspace = true
|
||||
async-task = "4.7"
|
||||
backtrace = { version = "0.3", optional = true }
|
||||
bitflags = "2.4.0"
|
||||
blade-graphics = { workspace = true, optional = true }
|
||||
blade-macros = { workspace = true, optional = true }
|
||||
blade-rwh = { workspace = true, optional = true }
|
||||
bytemuck = { version = "1", optional = true }
|
||||
collections.workspace = true
|
||||
ctor.workspace = true
|
||||
derive_more.workspace = true
|
||||
@@ -38,7 +33,7 @@ dhat = { version = "0.3", optional = true }
|
||||
env_logger = { version = "0.9", optional = true }
|
||||
etagere = "0.2"
|
||||
futures.workspace = true
|
||||
font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "5a5c4d4" }
|
||||
font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "d97147f" }
|
||||
gpui_macros.workspace = true
|
||||
image = "0.23"
|
||||
itertools = "0.10"
|
||||
@@ -53,6 +48,7 @@ pathfinder_geometry = "0.5"
|
||||
postage.workspace = true
|
||||
rand.workspace = true
|
||||
raw-window-handle = "0.6"
|
||||
blade-rwh = { package = "raw-window-handle", version = "0.5" }
|
||||
refineable.workspace = true
|
||||
resvg = "0.14"
|
||||
schemars.workspace = true
|
||||
@@ -72,6 +68,7 @@ usvg = { version = "0.14", features = [] }
|
||||
util.workspace = true
|
||||
uuid = { version = "1.1.2", features = ["v4"] }
|
||||
waker-fn = "1.1.0"
|
||||
accesskit = { version = "0.12" }
|
||||
|
||||
[dev-dependencies]
|
||||
backtrace = "0.3"
|
||||
@@ -90,24 +87,28 @@ cbindgen = "0.26.0"
|
||||
block = "0.1"
|
||||
cocoa = "0.25"
|
||||
core-foundation = { version = "0.9.3", features = ["with-uuid"] }
|
||||
core-graphics = "0.23"
|
||||
core-text = "20.1"
|
||||
foreign-types = "0.5"
|
||||
core-graphics = "0.22.3"
|
||||
core-text = "19.2"
|
||||
foreign-types = "0.3"
|
||||
log.workspace = true
|
||||
media.workspace = true
|
||||
metal = "0.25"
|
||||
metal = "0.21.0"
|
||||
objc = "0.2"
|
||||
accesskit_macos = "0.11.0"
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
flume = "0.11"
|
||||
xcb = { version = "1.3", features = ["as-raw-xcb-connection", "present", "randr"] }
|
||||
open = "5.0.1"
|
||||
ashpd = "0.7.0"
|
||||
# todo!(linux) - Technically do not use `randr`, but it doesn't compile otherwise
|
||||
xcb = { version = "1.3", features = ["as-raw-xcb-connection", "present", "randr", "xkb"] }
|
||||
wayland-client= { version = "0.31.2" }
|
||||
wayland-protocols = { version = "0.31.2", features = ["client"] }
|
||||
wayland-backend = { version = "0.3.3", features = ["client_system"] }
|
||||
as-raw-xcb-connection = "1"
|
||||
#TODO: use these on all platforms
|
||||
blade-graphics.workspace = true
|
||||
blade-macros.workspace = true
|
||||
blade-rwh.workspace = true
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "c4f951a88b345724cb952e920ad30e39851f7760" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "c4f951a88b345724cb952e920ad30e39851f7760" }
|
||||
bytemuck = "1"
|
||||
cosmic-text = "0.10.0"
|
||||
xkbcommon = { version = "0.7", features = ["x11"] }
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#![cfg_attr(any(not(target_os = "macos"), feature = "macos-blade"), allow(unused))]
|
||||
#![cfg_attr(not(target_os = "macos"), allow(unused))]
|
||||
|
||||
use std::{
|
||||
env,
|
||||
@@ -7,18 +7,15 @@ use std::{
|
||||
|
||||
use cbindgen::Config;
|
||||
|
||||
//TODO: consider generating shader code for WGSL
|
||||
//TODO: deprecate "runtime-shaders" and "macos-blade"
|
||||
|
||||
fn main() {
|
||||
#[cfg(target_os = "macos")]
|
||||
generate_dispatch_bindings();
|
||||
#[cfg(all(target_os = "macos", not(feature = "macos-blade")))]
|
||||
#[cfg(target_os = "macos")]
|
||||
let header_path = generate_shader_bindings();
|
||||
#[cfg(all(target_os = "macos", not(feature = "macos-blade")))]
|
||||
#[cfg(target_os = "macos")]
|
||||
#[cfg(feature = "runtime_shaders")]
|
||||
emit_stitched_shaders(&header_path);
|
||||
#[cfg(all(target_os = "macos", not(feature = "macos-blade")))]
|
||||
#[cfg(target_os = "macos")]
|
||||
#[cfg(not(feature = "runtime_shaders"))]
|
||||
compile_metal_shaders(&header_path);
|
||||
}
|
||||
|
||||
58
crates/gpui/src/access_kit.rs
Normal file
58
crates/gpui/src/access_kit.rs
Normal file
@@ -0,0 +1,58 @@
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
use collections::{hash_map::Entry, HashMap};
|
||||
|
||||
use crate::{BorrowWindow, ElementContext, ElementId, GlobalElementId, WindowContext};
|
||||
|
||||
pub type AccessKitState = HashMap<accesskit::NodeId, accesskit::NodeBuilder>;
|
||||
|
||||
impl From<&GlobalElementId> for accesskit::NodeId {
|
||||
fn from(value: &GlobalElementId) -> Self {
|
||||
let mut hasher = std::hash::DefaultHasher::new();
|
||||
value.0.hash(&mut hasher);
|
||||
accesskit::NodeId(hasher.finish())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ElementContext<'a> {
|
||||
|
||||
// TODO: What's a good, useful signature for this? Need to expose this from the div as well.
|
||||
fn accesskit_action(&mut self, id: impl Into<ElementId>, action: accesskit::Action, f: impl FnOnce(accesskit::ActionRequest)) {
|
||||
self.with_element_id(Some(id), |cx| {
|
||||
// Get the access kit actions from somewhere
|
||||
// call f with the action request and cx
|
||||
// egui impl:
|
||||
// let accesskit_id = id.accesskit_id();
|
||||
// self.events.iter().filter_map(move |event| {
|
||||
// if let Event::AccessKitActionRequest(request) = event {
|
||||
// if request.target == accesskit_id && request.action == action {
|
||||
// return Some(request);
|
||||
// }
|
||||
// }
|
||||
// None
|
||||
// })
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
// TODO: Expose this through the div API
|
||||
fn with_accesskit_node(&mut self, id: impl Into<ElementId>, f: impl FnOnce(&mut accesskit::NodeBuilder)) {
|
||||
let id = id.into();
|
||||
let window = self.window_mut();
|
||||
let parent_id: accesskit::NodeId = (&window.element_id_stack).into();
|
||||
self.with_element_id(Some(id), |cx| {
|
||||
let window = cx.window_mut();
|
||||
let this_id: accesskit::NodeId = (&window.element_id_stack).into();
|
||||
|
||||
window.next_frame.accesskit.as_mut().map(|nodes| {
|
||||
if let Entry::Vacant(entry) = nodes.entry(this_id) {
|
||||
entry.insert(Default::default());
|
||||
let parent = nodes.get_mut(&parent_id).unwrap();
|
||||
parent.push_child(this_id);
|
||||
}
|
||||
|
||||
f(nodes.get_mut(&this_id).unwrap());
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -241,6 +241,7 @@ pub struct AppContext {
|
||||
pub(crate) quit_observers: SubscriberSet<(), QuitHandler>,
|
||||
pub(crate) layout_id_buffer: Vec<LayoutId>, // We recycle this memory across layout requests.
|
||||
pub(crate) propagate_event: bool,
|
||||
pub(crate) screen_reader_enabled: bool,
|
||||
}
|
||||
|
||||
impl AppContext {
|
||||
@@ -299,6 +300,7 @@ impl AppContext {
|
||||
quit_observers: SubscriberSet::new(),
|
||||
layout_id_buffer: Default::default(),
|
||||
propagate_event: true,
|
||||
screen_reader_enabled: false,
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -314,6 +316,7 @@ impl AppContext {
|
||||
app
|
||||
}
|
||||
|
||||
|
||||
/// Quit the application gracefully. Handlers registered with [`ModelContext::on_app_quit`]
|
||||
/// will be given 100ms to complete before exiting.
|
||||
pub fn shutdown(&mut self) {
|
||||
|
||||
@@ -38,9 +38,10 @@ use crate::{
|
||||
util::FluentBuilder, ArenaBox, AvailableSpace, Bounds, ElementContext, ElementId, LayoutId,
|
||||
Pixels, Point, Size, ViewContext, WindowContext, ELEMENT_ARENA,
|
||||
};
|
||||
use collections::FxHashSet;
|
||||
use derive_more::{Deref, DerefMut};
|
||||
pub(crate) use smallvec::SmallVec;
|
||||
use std::{any::Any, fmt::Debug, ops::DerefMut};
|
||||
use std::{any::Any, fmt::Debug, hash::{Hash, Hasher, SipHasher}, ops::DerefMut};
|
||||
|
||||
/// Implemented by types that participate in laying out and painting the contents of a window.
|
||||
/// Elements form a tree and are laid out according to web-based layout rules, as implemented by Taffy.
|
||||
@@ -222,7 +223,8 @@ impl<C: RenderOnce> IntoElement for Component<C> {
|
||||
|
||||
/// A globally unique identifier for an element, used to track state across frames.
|
||||
#[derive(Deref, DerefMut, Default, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub(crate) struct GlobalElementId(SmallVec<[ElementId; 32]>);
|
||||
pub(crate) struct GlobalElementId(pub(crate) SmallVec<[ElementId; 32]>);
|
||||
|
||||
|
||||
trait ElementObject {
|
||||
fn element_id(&self) -> Option<ElementId>;
|
||||
|
||||
@@ -67,6 +67,7 @@
|
||||
mod action;
|
||||
mod app;
|
||||
|
||||
mod access_kit;
|
||||
mod arena;
|
||||
mod assets;
|
||||
mod color;
|
||||
|
||||
@@ -3,16 +3,10 @@
|
||||
|
||||
mod app_menu;
|
||||
mod keystroke;
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
mod linux;
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
mod mac;
|
||||
|
||||
#[cfg(any(target_os = "linux", feature = "macos-blade"))]
|
||||
mod blade;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
mod test;
|
||||
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
mod blade_atlas;
|
||||
mod blade_belt;
|
||||
mod blade_renderer;
|
||||
|
||||
pub(crate) use blade_atlas::*;
|
||||
pub(crate) use blade_renderer::*;
|
||||
|
||||
use blade_belt::*;
|
||||
@@ -1,6 +1,6 @@
|
||||
//todo!(linux): remove this
|
||||
#![allow(unused_variables)]
|
||||
|
||||
mod blade_atlas;
|
||||
mod blade_belt;
|
||||
mod blade_renderer;
|
||||
mod client;
|
||||
mod client_dispatcher;
|
||||
mod dispatcher;
|
||||
@@ -9,7 +9,10 @@ mod text_system;
|
||||
mod wayland;
|
||||
mod x11;
|
||||
|
||||
pub(crate) use blade_atlas::*;
|
||||
pub(crate) use dispatcher::*;
|
||||
pub(crate) use platform::*;
|
||||
pub(crate) use text_system::*;
|
||||
pub(crate) use x11::*;
|
||||
|
||||
use blade_belt::*;
|
||||
|
||||
@@ -200,7 +200,7 @@ impl BladeAtlasState {
|
||||
}
|
||||
|
||||
fn upload_texture(&mut self, id: AtlasTextureId, bounds: Bounds<DevicePixels>, bytes: &[u8]) {
|
||||
let data = self.upload_belt.alloc_data(bytes, &self.gpu);
|
||||
let data = unsafe { self.upload_belt.alloc_data(bytes, &self.gpu) };
|
||||
self.uploads.push(PendingUpload { id, bounds, data });
|
||||
}
|
||||
|
||||
@@ -75,8 +75,8 @@ impl BladeBelt {
|
||||
chunk.into()
|
||||
}
|
||||
|
||||
//todo!(linux): enforce T: bytemuck::Zeroable
|
||||
pub fn alloc_data<T>(&mut self, data: &[T], gpu: &gpu::Context) -> gpu::BufferPiece {
|
||||
// SAFETY: T should be zeroable and ordinary data, no references, pointers, cells or other complicated data type.
|
||||
pub unsafe fn alloc_data<T>(&mut self, data: &[T], gpu: &gpu::Context) -> gpu::BufferPiece {
|
||||
assert!(!data.is_empty());
|
||||
let type_alignment = mem::align_of::<T>() as u64;
|
||||
debug_assert_eq!(
|
||||
@@ -1,18 +1,14 @@
|
||||
// Doing `if let` gives you nice scoping with passes/encoders
|
||||
#![allow(irrefutable_let_patterns)]
|
||||
|
||||
use super::{BladeAtlas, BladeBelt, BladeBeltDescriptor, PATH_TEXTURE_FORMAT};
|
||||
use super::{BladeBelt, BladeBeltDescriptor};
|
||||
use crate::{
|
||||
AtlasTextureKind, AtlasTile, Bounds, ContentMask, Hsla, MonochromeSprite, Path, PathId,
|
||||
PathVertex, PolychromeSprite, PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size,
|
||||
Underline,
|
||||
AtlasTextureKind, AtlasTile, BladeAtlas, Bounds, ContentMask, Hsla, MonochromeSprite, Path,
|
||||
PathId, PathVertex, PolychromeSprite, PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow,
|
||||
Underline, PATH_TEXTURE_FORMAT,
|
||||
};
|
||||
use bytemuck::{Pod, Zeroable};
|
||||
use collections::HashMap;
|
||||
#[cfg(target_os = "macos")]
|
||||
use media::core_video::CVMetalTextureCache;
|
||||
#[cfg(target_os = "macos")]
|
||||
use std::ffi::c_void;
|
||||
|
||||
use blade_graphics as gpu;
|
||||
use std::{mem, sync::Arc};
|
||||
@@ -20,61 +16,6 @@ use std::{mem, sync::Arc};
|
||||
const SURFACE_FRAME_COUNT: u32 = 3;
|
||||
const MAX_FRAME_TIME_MS: u32 = 1000;
|
||||
|
||||
pub type Context = ();
|
||||
pub type Renderer = BladeRenderer;
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pub unsafe fn new_renderer(
|
||||
_context: self::Context,
|
||||
native_window: *mut c_void,
|
||||
native_view: *mut c_void,
|
||||
bounds: crate::Size<f32>,
|
||||
) -> Renderer {
|
||||
struct RawWindow {
|
||||
window: *mut c_void,
|
||||
view: *mut c_void,
|
||||
}
|
||||
|
||||
unsafe impl blade_rwh::HasRawWindowHandle for RawWindow {
|
||||
fn raw_window_handle(&self) -> blade_rwh::RawWindowHandle {
|
||||
let mut wh = blade_rwh::AppKitWindowHandle::empty();
|
||||
wh.ns_window = self.window;
|
||||
wh.ns_view = self.view;
|
||||
wh.into()
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl blade_rwh::HasRawDisplayHandle for RawWindow {
|
||||
fn raw_display_handle(&self) -> blade_rwh::RawDisplayHandle {
|
||||
let dh = blade_rwh::AppKitDisplayHandle::empty();
|
||||
dh.into()
|
||||
}
|
||||
}
|
||||
|
||||
let gpu = Arc::new(
|
||||
gpu::Context::init_windowed(
|
||||
&RawWindow {
|
||||
window: native_window as *mut _,
|
||||
view: native_view as *mut _,
|
||||
},
|
||||
gpu::ContextDesc {
|
||||
validation: cfg!(debug_assertions),
|
||||
capture: false,
|
||||
},
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
BladeRenderer::new(
|
||||
gpu,
|
||||
gpu::Extent {
|
||||
width: bounds.width as u32,
|
||||
height: bounds.height as u32,
|
||||
depth: 1,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Copy, Pod, Zeroable)]
|
||||
struct GlobalParams {
|
||||
@@ -82,31 +23,6 @@ struct GlobalParams {
|
||||
pad: [u32; 2],
|
||||
}
|
||||
|
||||
//Note: we can't use `Bounds` directly here because
|
||||
// it doesn't implement Pod + Zeroable
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Copy, Pod, Zeroable)]
|
||||
struct PodBounds {
|
||||
origin: [f32; 2],
|
||||
size: [f32; 2],
|
||||
}
|
||||
|
||||
impl From<Bounds<ScaledPixels>> for PodBounds {
|
||||
fn from(bounds: Bounds<ScaledPixels>) -> Self {
|
||||
Self {
|
||||
origin: [bounds.origin.x.0, bounds.origin.y.0],
|
||||
size: [bounds.size.width.0, bounds.size.height.0],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Copy, Pod, Zeroable)]
|
||||
struct SurfaceParams {
|
||||
bounds: PodBounds,
|
||||
content_mask: PodBounds,
|
||||
}
|
||||
|
||||
#[derive(blade_macros::ShaderData)]
|
||||
struct ShaderQuadsData {
|
||||
globals: GlobalParams,
|
||||
@@ -155,15 +71,6 @@ struct ShaderPolySpritesData {
|
||||
b_poly_sprites: gpu::BufferPiece,
|
||||
}
|
||||
|
||||
#[derive(blade_macros::ShaderData)]
|
||||
struct ShaderSurfacesData {
|
||||
globals: GlobalParams,
|
||||
surface_locals: SurfaceParams,
|
||||
t_y: gpu::TextureView,
|
||||
t_cb_cr: gpu::TextureView,
|
||||
s_surface: gpu::Sampler,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
#[repr(C)]
|
||||
struct PathSprite {
|
||||
@@ -180,7 +87,6 @@ struct BladePipelines {
|
||||
underlines: gpu::RenderPipeline,
|
||||
mono_sprites: gpu::RenderPipeline,
|
||||
poly_sprites: gpu::RenderPipeline,
|
||||
surfaces: gpu::RenderPipeline,
|
||||
}
|
||||
|
||||
impl BladePipelines {
|
||||
@@ -190,8 +96,6 @@ impl BladePipelines {
|
||||
let shader = gpu.create_shader(gpu::ShaderDesc {
|
||||
source: include_str!("shaders.wgsl"),
|
||||
});
|
||||
shader.check_struct_size::<GlobalParams>();
|
||||
shader.check_struct_size::<SurfaceParams>();
|
||||
shader.check_struct_size::<Quad>();
|
||||
shader.check_struct_size::<Shadow>();
|
||||
assert_eq!(
|
||||
@@ -316,22 +220,6 @@ impl BladePipelines {
|
||||
write_mask: gpu::ColorWrites::default(),
|
||||
}],
|
||||
}),
|
||||
surfaces: gpu.create_render_pipeline(gpu::RenderPipelineDesc {
|
||||
name: "surfaces",
|
||||
data_layouts: &[&ShaderSurfacesData::layout()],
|
||||
vertex: shader.at("vs_surface"),
|
||||
primitive: gpu::PrimitiveState {
|
||||
topology: gpu::PrimitiveTopology::TriangleStrip,
|
||||
..Default::default()
|
||||
},
|
||||
depth_stencil: None,
|
||||
fragment: shader.at("fs_surface"),
|
||||
color_targets: &[gpu::ColorTargetState {
|
||||
format: surface_format,
|
||||
blend: Some(gpu::BlendState::ALPHA_BLENDING),
|
||||
write_mask: gpu::ColorWrites::default(),
|
||||
}],
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -346,8 +234,6 @@ pub struct BladeRenderer {
|
||||
path_tiles: HashMap<PathId, AtlasTile>,
|
||||
atlas: Arc<BladeAtlas>,
|
||||
atlas_sampler: gpu::Sampler,
|
||||
#[cfg(target_os = "macos")]
|
||||
core_video_texture_cache: CVMetalTextureCache,
|
||||
}
|
||||
|
||||
impl BladeRenderer {
|
||||
@@ -382,12 +268,6 @@ impl BladeRenderer {
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
let core_video_texture_cache = unsafe {
|
||||
use foreign_types::ForeignType as _;
|
||||
CVMetalTextureCache::new(gpu.metal_device().as_ptr()).unwrap()
|
||||
};
|
||||
|
||||
Self {
|
||||
gpu,
|
||||
command_encoder,
|
||||
@@ -398,8 +278,6 @@ impl BladeRenderer {
|
||||
path_tiles: HashMap::default(),
|
||||
atlas,
|
||||
atlas_sampler,
|
||||
#[cfg(target_os = "macos")]
|
||||
core_video_texture_cache,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -411,39 +289,27 @@ impl BladeRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_drawable_size(&mut self, size: Size<f64>) {
|
||||
let gpu_size = gpu::Extent {
|
||||
width: size.width as u32,
|
||||
height: size.height as u32,
|
||||
depth: 1,
|
||||
};
|
||||
pub fn destroy(&mut self) {
|
||||
self.wait_for_gpu();
|
||||
self.atlas.destroy();
|
||||
self.instance_belt.destroy(&self.gpu);
|
||||
self.gpu.destroy_command_encoder(&mut self.command_encoder);
|
||||
}
|
||||
|
||||
if gpu_size != self.viewport_size() {
|
||||
self.wait_for_gpu();
|
||||
self.gpu.resize(Self::make_surface_config(gpu_size));
|
||||
self.viewport_size = gpu_size;
|
||||
}
|
||||
pub fn resize(&mut self, size: gpu::Extent) {
|
||||
self.wait_for_gpu();
|
||||
self.gpu.resize(Self::make_surface_config(size));
|
||||
self.viewport_size = size;
|
||||
}
|
||||
|
||||
pub fn viewport_size(&self) -> gpu::Extent {
|
||||
self.viewport_size
|
||||
}
|
||||
|
||||
pub fn sprite_atlas(&self) -> &Arc<BladeAtlas> {
|
||||
pub fn atlas(&self) -> &Arc<BladeAtlas> {
|
||||
&self.atlas
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pub fn layer(&self) -> metal::MetalLayer {
|
||||
self.gpu.metal_layer().unwrap()
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pub fn layer_ptr(&self) -> *mut metal::CAMetalLayer {
|
||||
use metal::foreign_types::ForeignType as _;
|
||||
self.gpu.metal_layer().unwrap().as_ptr()
|
||||
}
|
||||
|
||||
fn rasterize_paths(&mut self, paths: &[Path<ScaledPixels>]) {
|
||||
self.path_tiles.clear();
|
||||
let mut vertices_by_texture_id = HashMap::default();
|
||||
@@ -474,7 +340,7 @@ impl BladeRenderer {
|
||||
pad: [0; 2],
|
||||
};
|
||||
|
||||
let vertex_buf = self.instance_belt.alloc_data(&vertices, &self.gpu);
|
||||
let vertex_buf = unsafe { self.instance_belt.alloc_data(&vertices, &self.gpu) };
|
||||
let mut pass = self.command_encoder.render(gpu::RenderTargetSet {
|
||||
colors: &[gpu::RenderTarget {
|
||||
view: tex_info.raw_view,
|
||||
@@ -496,13 +362,6 @@ impl BladeRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn destroy(&mut self) {
|
||||
self.wait_for_gpu();
|
||||
self.atlas.destroy();
|
||||
self.instance_belt.destroy(&self.gpu);
|
||||
self.gpu.destroy_command_encoder(&mut self.command_encoder);
|
||||
}
|
||||
|
||||
pub fn draw(&mut self, scene: &Scene) {
|
||||
let frame = self.gpu.acquire_frame();
|
||||
self.command_encoder.start();
|
||||
@@ -530,7 +389,8 @@ impl BladeRenderer {
|
||||
for batch in scene.batches() {
|
||||
match batch {
|
||||
PrimitiveBatch::Quads(quads) => {
|
||||
let instance_buf = self.instance_belt.alloc_data(quads, &self.gpu);
|
||||
let instance_buf =
|
||||
unsafe { self.instance_belt.alloc_data(quads, &self.gpu) };
|
||||
let mut encoder = pass.with(&self.pipelines.quads);
|
||||
encoder.bind(
|
||||
0,
|
||||
@@ -542,7 +402,8 @@ impl BladeRenderer {
|
||||
encoder.draw(0, 4, 0, quads.len() as u32);
|
||||
}
|
||||
PrimitiveBatch::Shadows(shadows) => {
|
||||
let instance_buf = self.instance_belt.alloc_data(shadows, &self.gpu);
|
||||
let instance_buf =
|
||||
unsafe { self.instance_belt.alloc_data(shadows, &self.gpu) };
|
||||
let mut encoder = pass.with(&self.pipelines.shadows);
|
||||
encoder.bind(
|
||||
0,
|
||||
@@ -569,7 +430,8 @@ impl BladeRenderer {
|
||||
tile: (*tile).clone(),
|
||||
}];
|
||||
|
||||
let instance_buf = self.instance_belt.alloc_data(&sprites, &self.gpu);
|
||||
let instance_buf =
|
||||
unsafe { self.instance_belt.alloc_data(&sprites, &self.gpu) };
|
||||
encoder.bind(
|
||||
0,
|
||||
&ShaderPathsData {
|
||||
@@ -583,7 +445,8 @@ impl BladeRenderer {
|
||||
}
|
||||
}
|
||||
PrimitiveBatch::Underlines(underlines) => {
|
||||
let instance_buf = self.instance_belt.alloc_data(underlines, &self.gpu);
|
||||
let instance_buf =
|
||||
unsafe { self.instance_belt.alloc_data(underlines, &self.gpu) };
|
||||
let mut encoder = pass.with(&self.pipelines.underlines);
|
||||
encoder.bind(
|
||||
0,
|
||||
@@ -599,7 +462,8 @@ impl BladeRenderer {
|
||||
sprites,
|
||||
} => {
|
||||
let tex_info = self.atlas.get_texture_info(texture_id);
|
||||
let instance_buf = self.instance_belt.alloc_data(&sprites, &self.gpu);
|
||||
let instance_buf =
|
||||
unsafe { self.instance_belt.alloc_data(sprites, &self.gpu) };
|
||||
let mut encoder = pass.with(&self.pipelines.mono_sprites);
|
||||
encoder.bind(
|
||||
0,
|
||||
@@ -617,7 +481,8 @@ impl BladeRenderer {
|
||||
sprites,
|
||||
} => {
|
||||
let tex_info = self.atlas.get_texture_info(texture_id);
|
||||
let instance_buf = self.instance_belt.alloc_data(&sprites, &self.gpu);
|
||||
let instance_buf =
|
||||
unsafe { self.instance_belt.alloc_data(sprites, &self.gpu) };
|
||||
let mut encoder = pass.with(&self.pipelines.poly_sprites);
|
||||
encoder.bind(
|
||||
0,
|
||||
@@ -630,78 +495,8 @@ impl BladeRenderer {
|
||||
);
|
||||
encoder.draw(0, 4, 0, sprites.len() as u32);
|
||||
}
|
||||
PrimitiveBatch::Surfaces(surfaces) => {
|
||||
let mut encoder = pass.with(&self.pipelines.surfaces);
|
||||
|
||||
for surface in surfaces {
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
let (t_y, t_cb_cr) = {
|
||||
let _ = surface;
|
||||
continue;
|
||||
};
|
||||
#[cfg(target_os = "macos")]
|
||||
let (t_y, t_cb_cr) = {
|
||||
use core_foundation::base::TCFType as _;
|
||||
use std::ptr;
|
||||
|
||||
assert_eq!(
|
||||
surface.image_buffer.pixel_format_type(),
|
||||
media::core_video::kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
|
||||
);
|
||||
|
||||
let y_texture = unsafe {
|
||||
self.core_video_texture_cache
|
||||
.create_texture_from_image(
|
||||
surface.image_buffer.as_concrete_TypeRef(),
|
||||
ptr::null(),
|
||||
metal::MTLPixelFormat::R8Unorm,
|
||||
surface.image_buffer.plane_width(0),
|
||||
surface.image_buffer.plane_height(0),
|
||||
0,
|
||||
)
|
||||
.unwrap()
|
||||
};
|
||||
let cb_cr_texture = unsafe {
|
||||
self.core_video_texture_cache
|
||||
.create_texture_from_image(
|
||||
surface.image_buffer.as_concrete_TypeRef(),
|
||||
ptr::null(),
|
||||
metal::MTLPixelFormat::RG8Unorm,
|
||||
surface.image_buffer.plane_width(1),
|
||||
surface.image_buffer.plane_height(1),
|
||||
1,
|
||||
)
|
||||
.unwrap()
|
||||
};
|
||||
(
|
||||
gpu::TextureView::from_metal_texture(
|
||||
y_texture.as_texture_ref(),
|
||||
),
|
||||
gpu::TextureView::from_metal_texture(
|
||||
cb_cr_texture.as_texture_ref(),
|
||||
),
|
||||
)
|
||||
};
|
||||
|
||||
#[cfg_attr(
|
||||
any(not(target_os = "macos"), feature = "macos-blade"),
|
||||
allow(unreachable_code)
|
||||
)]
|
||||
encoder.bind(
|
||||
0,
|
||||
&ShaderSurfacesData {
|
||||
globals,
|
||||
surface_locals: SurfaceParams {
|
||||
bounds: surface.bounds.into(),
|
||||
content_mask: surface.content_mask.bounds.into(),
|
||||
},
|
||||
t_y,
|
||||
t_cb_cr,
|
||||
s_surface: self.atlas_sampler,
|
||||
},
|
||||
);
|
||||
encoder.draw(0, 4, 0, 1);
|
||||
}
|
||||
PrimitiveBatch::Surfaces { .. } => {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -717,4 +512,4 @@ impl BladeRenderer {
|
||||
self.wait_for_gpu();
|
||||
self.last_sync_point = Some(sync_point);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@ use std::{
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use ashpd::desktop::file_chooser::{OpenFileRequest, SaveFileRequest};
|
||||
use async_task::Runnable;
|
||||
use flume::{Receiver, Sender};
|
||||
use futures::channel::oneshot;
|
||||
@@ -49,8 +50,8 @@ pub(crate) struct LinuxPlatformInner {
|
||||
}
|
||||
|
||||
pub(crate) struct LinuxPlatform {
|
||||
client: Arc<dyn Client>,
|
||||
inner: Arc<LinuxPlatformInner>,
|
||||
client: Rc<dyn Client>,
|
||||
inner: Rc<LinuxPlatformInner>,
|
||||
}
|
||||
|
||||
pub(crate) struct LinuxPlatformState {
|
||||
@@ -93,7 +94,7 @@ impl LinuxPlatform {
|
||||
let client_dispatcher: Arc<dyn ClientDispatcher + Send + Sync> =
|
||||
Arc::new(WaylandClientDispatcher::new(&conn));
|
||||
let dispatcher = Arc::new(LinuxDispatcher::new(main_sender, &client_dispatcher));
|
||||
let inner = Arc::new(LinuxPlatformInner {
|
||||
let inner = Rc::new(LinuxPlatformInner {
|
||||
background_executor: BackgroundExecutor::new(dispatcher.clone()),
|
||||
foreground_executor: ForegroundExecutor::new(dispatcher.clone()),
|
||||
main_receiver,
|
||||
@@ -101,10 +102,10 @@ impl LinuxPlatform {
|
||||
callbacks,
|
||||
state,
|
||||
});
|
||||
let client = Arc::new(WaylandClient::new(Arc::clone(&inner), Arc::clone(&conn)));
|
||||
let client = Rc::new(WaylandClient::new(Rc::clone(&inner), Arc::clone(&conn)));
|
||||
Self {
|
||||
client,
|
||||
inner: Arc::clone(&inner),
|
||||
inner: Rc::clone(&inner),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -115,15 +116,27 @@ impl LinuxPlatform {
|
||||
callbacks: Mutex<Callbacks>,
|
||||
state: Mutex<LinuxPlatformState>,
|
||||
) -> Self {
|
||||
let (xcb_connection, x_root_index) =
|
||||
xcb::Connection::connect_with_extensions(None, &[xcb::Extension::Present], &[])
|
||||
.unwrap();
|
||||
let (xcb_connection, x_root_index) = xcb::Connection::connect_with_extensions(
|
||||
None,
|
||||
&[xcb::Extension::Present, xcb::Extension::Xkb],
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let xkb_ver = xcb_connection
|
||||
.wait_for_reply(xcb_connection.send_request(&xcb::xkb::UseExtension {
|
||||
wanted_major: xcb::xkb::MAJOR_VERSION as u16,
|
||||
wanted_minor: xcb::xkb::MINOR_VERSION as u16,
|
||||
}))
|
||||
.unwrap();
|
||||
assert!(xkb_ver.supported());
|
||||
|
||||
let atoms = XcbAtoms::intern_all(&xcb_connection).unwrap();
|
||||
let xcb_connection = Arc::new(xcb_connection);
|
||||
let client_dispatcher: Arc<dyn ClientDispatcher + Send + Sync> =
|
||||
Arc::new(X11ClientDispatcher::new(&xcb_connection, x_root_index));
|
||||
let dispatcher = Arc::new(LinuxDispatcher::new(main_sender, &client_dispatcher));
|
||||
let inner = Arc::new(LinuxPlatformInner {
|
||||
let inner = Rc::new(LinuxPlatformInner {
|
||||
background_executor: BackgroundExecutor::new(dispatcher.clone()),
|
||||
foreground_executor: ForegroundExecutor::new(dispatcher.clone()),
|
||||
main_receiver,
|
||||
@@ -131,15 +144,15 @@ impl LinuxPlatform {
|
||||
callbacks,
|
||||
state,
|
||||
});
|
||||
let client = Arc::new(X11Client::new(
|
||||
Arc::clone(&inner),
|
||||
let client = Rc::new(X11Client::new(
|
||||
Rc::clone(&inner),
|
||||
xcb_connection,
|
||||
x_root_index,
|
||||
atoms,
|
||||
));
|
||||
Self {
|
||||
client,
|
||||
inner: Arc::clone(&inner),
|
||||
inner: Rc::clone(&inner),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -202,7 +215,7 @@ impl Platform for LinuxPlatform {
|
||||
}
|
||||
|
||||
fn open_url(&self, url: &str) {
|
||||
unimplemented!()
|
||||
open::that(url);
|
||||
}
|
||||
|
||||
fn on_open_urls(&self, callback: Box<dyn FnMut(Vec<String>)>) {
|
||||
@@ -213,15 +226,75 @@ impl Platform for LinuxPlatform {
|
||||
&self,
|
||||
options: PathPromptOptions,
|
||||
) -> oneshot::Receiver<Option<Vec<PathBuf>>> {
|
||||
unimplemented!()
|
||||
let (done_tx, done_rx) = oneshot::channel();
|
||||
self.foreground_executor()
|
||||
.spawn(async move {
|
||||
let title = if options.multiple {
|
||||
if !options.files {
|
||||
"Open folders"
|
||||
} else {
|
||||
"Open files"
|
||||
}
|
||||
} else {
|
||||
if !options.files {
|
||||
"Open folder"
|
||||
} else {
|
||||
"Open file"
|
||||
}
|
||||
};
|
||||
|
||||
let result = OpenFileRequest::default()
|
||||
.modal(true)
|
||||
.title(title)
|
||||
.accept_label("Select")
|
||||
.multiple(options.multiple)
|
||||
.directory(options.directories)
|
||||
.send()
|
||||
.await
|
||||
.ok()
|
||||
.and_then(|request| request.response().ok())
|
||||
.and_then(|response| {
|
||||
response
|
||||
.uris()
|
||||
.iter()
|
||||
.map(|uri| uri.to_file_path().ok())
|
||||
.collect()
|
||||
});
|
||||
|
||||
done_tx.send(result);
|
||||
})
|
||||
.detach();
|
||||
done_rx
|
||||
}
|
||||
|
||||
fn prompt_for_new_path(&self, directory: &Path) -> oneshot::Receiver<Option<PathBuf>> {
|
||||
unimplemented!()
|
||||
let (done_tx, done_rx) = oneshot::channel();
|
||||
let directory = directory.to_owned();
|
||||
self.foreground_executor()
|
||||
.spawn(async move {
|
||||
let result = SaveFileRequest::default()
|
||||
.modal(true)
|
||||
.title("Select new path")
|
||||
.accept_label("Accept")
|
||||
.send()
|
||||
.await
|
||||
.ok()
|
||||
.and_then(|request| request.response().ok())
|
||||
.and_then(|response| {
|
||||
response
|
||||
.uris()
|
||||
.first()
|
||||
.and_then(|uri| uri.to_file_path().ok())
|
||||
});
|
||||
|
||||
done_tx.send(result);
|
||||
})
|
||||
.detach();
|
||||
done_rx
|
||||
}
|
||||
|
||||
fn reveal_path(&self, path: &Path) {
|
||||
unimplemented!()
|
||||
open::that(path);
|
||||
}
|
||||
|
||||
fn on_become_active(&self, callback: Box<dyn FnMut()>) {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
struct GlobalParams {
|
||||
struct Globals {
|
||||
viewport_size: vec2<f32>,
|
||||
pad: vec2<u32>,
|
||||
}
|
||||
|
||||
var<uniform> globals: GlobalParams;
|
||||
var<uniform> globals: Globals;
|
||||
var t_sprite: texture_2d<f32>;
|
||||
var s_sprite: sampler;
|
||||
|
||||
@@ -563,56 +563,7 @@ fn fs_poly_sprite(input: PolySpriteVarying) -> @location(0) vec4<f32> {
|
||||
color = vec4<f32>(vec3<f32>(grayscale), sample.a);
|
||||
}
|
||||
color.a *= saturate(0.5 - distance);
|
||||
return color;
|
||||
return color;;
|
||||
}
|
||||
|
||||
// --- surfaces --- //
|
||||
|
||||
struct SurfaceParams {
|
||||
bounds: Bounds,
|
||||
content_mask: Bounds,
|
||||
}
|
||||
|
||||
var<uniform> surface_locals: SurfaceParams;
|
||||
var t_y: texture_2d<f32>;
|
||||
var t_cb_cr: texture_2d<f32>;
|
||||
var s_surface: sampler;
|
||||
|
||||
const ycbcr_to_RGB = mat4x4<f32>(
|
||||
vec4<f32>( 1.0000f, 1.0000f, 1.0000f, 0.0),
|
||||
vec4<f32>( 0.0000f, -0.3441f, 1.7720f, 0.0),
|
||||
vec4<f32>( 1.4020f, -0.7141f, 0.0000f, 0.0),
|
||||
vec4<f32>(-0.7010f, 0.5291f, -0.8860f, 1.0),
|
||||
);
|
||||
|
||||
struct SurfaceVarying {
|
||||
@builtin(position) position: vec4<f32>,
|
||||
@location(0) texture_position: vec2<f32>,
|
||||
@location(3) clip_distances: vec4<f32>,
|
||||
}
|
||||
|
||||
@vertex
|
||||
fn vs_surface(@builtin(vertex_index) vertex_id: u32) -> SurfaceVarying {
|
||||
let unit_vertex = vec2<f32>(f32(vertex_id & 1u), 0.5 * f32(vertex_id & 2u));
|
||||
|
||||
var out = SurfaceVarying();
|
||||
out.position = to_device_position(unit_vertex, surface_locals.bounds);
|
||||
out.texture_position = unit_vertex;
|
||||
out.clip_distances = distance_from_clip_rect(unit_vertex, surface_locals.bounds, surface_locals.content_mask);
|
||||
return out;
|
||||
}
|
||||
|
||||
@fragment
|
||||
fn fs_surface(input: SurfaceVarying) -> @location(0) vec4<f32> {
|
||||
// Alpha clip after using the derivatives.
|
||||
if (any(input.clip_distances < vec4<f32>(0.0))) {
|
||||
return vec4<f32>(0.0);
|
||||
}
|
||||
|
||||
let y_cb_cr = vec4<f32>(
|
||||
textureSampleLevel(t_y, s_surface, input.texture_position, 0.0).r,
|
||||
textureSampleLevel(t_cb_cr, s_surface, input.texture_position, 0.0).rg,
|
||||
1.0);
|
||||
|
||||
return ycbcr_to_RGB * y_cb_cr;
|
||||
}
|
||||
// --- surface sprites --- //
|
||||
@@ -1,3 +1,6 @@
|
||||
//todo!(linux): remove this once the relevant functionality has been implemented
|
||||
#![allow(unused_variables)]
|
||||
|
||||
pub(crate) use client::*;
|
||||
pub(crate) use client_dispatcher::*;
|
||||
|
||||
|
||||
@@ -26,12 +26,12 @@ pub(crate) struct WaylandClientState {
|
||||
compositor: Option<wl_compositor::WlCompositor>,
|
||||
buffer: Option<wl_buffer::WlBuffer>,
|
||||
wm_base: Option<xdg_wm_base::XdgWmBase>,
|
||||
windows: Vec<(xdg_surface::XdgSurface, Arc<WaylandWindowState>)>,
|
||||
platform_inner: Arc<LinuxPlatformInner>,
|
||||
windows: Vec<(xdg_surface::XdgSurface, Rc<WaylandWindowState>)>,
|
||||
platform_inner: Rc<LinuxPlatformInner>,
|
||||
}
|
||||
|
||||
pub(crate) struct WaylandClient {
|
||||
platform_inner: Arc<LinuxPlatformInner>,
|
||||
platform_inner: Rc<LinuxPlatformInner>,
|
||||
conn: Arc<Connection>,
|
||||
state: Mutex<WaylandClientState>,
|
||||
event_queue: Mutex<EventQueue<WaylandClientState>>,
|
||||
@@ -39,16 +39,13 @@ pub(crate) struct WaylandClient {
|
||||
}
|
||||
|
||||
impl WaylandClient {
|
||||
pub(crate) fn new(
|
||||
linux_platform_inner: Arc<LinuxPlatformInner>,
|
||||
conn: Arc<Connection>,
|
||||
) -> Self {
|
||||
pub(crate) fn new(linux_platform_inner: Rc<LinuxPlatformInner>, conn: Arc<Connection>) -> Self {
|
||||
let state = WaylandClientState {
|
||||
compositor: None,
|
||||
buffer: None,
|
||||
wm_base: None,
|
||||
windows: Vec::new(),
|
||||
platform_inner: Arc::clone(&linux_platform_inner),
|
||||
platform_inner: Rc::clone(&linux_platform_inner),
|
||||
};
|
||||
let event_queue: EventQueue<WaylandClientState> = conn.new_event_queue();
|
||||
let qh = event_queue.handle();
|
||||
@@ -109,14 +106,14 @@ impl Client for WaylandClient {
|
||||
wl_surface.frame(&self.qh, wl_surface.clone());
|
||||
wl_surface.commit();
|
||||
|
||||
let window_state: Arc<WaylandWindowState> = Arc::new(WaylandWindowState::new(
|
||||
let window_state = Rc::new(WaylandWindowState::new(
|
||||
&self.conn,
|
||||
wl_surface.clone(),
|
||||
Arc::new(toplevel),
|
||||
options,
|
||||
));
|
||||
|
||||
state.windows.push((xdg_surface, Arc::clone(&window_state)));
|
||||
state.windows.push((xdg_surface, Rc::clone(&window_state)));
|
||||
Box::new(WaylandWindow(window_state))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,12 +10,12 @@ pub(crate) struct WaylandDisplay {}
|
||||
impl PlatformDisplay for WaylandDisplay {
|
||||
// todo!(linux)
|
||||
fn id(&self) -> DisplayId {
|
||||
return DisplayId(123); // return some fake data so it doesn't panic
|
||||
DisplayId(123) // return some fake data so it doesn't panic
|
||||
}
|
||||
|
||||
// todo!(linux)
|
||||
fn uuid(&self) -> anyhow::Result<Uuid> {
|
||||
return Ok(Uuid::from_bytes([0; 16])); // return some fake data so it doesn't panic
|
||||
Ok(Uuid::from_bytes([0; 16])) // return some fake data so it doesn't panic
|
||||
}
|
||||
|
||||
// todo!(linux)
|
||||
|
||||
@@ -13,12 +13,13 @@ use raw_window_handle::{
|
||||
use wayland_client::{protocol::wl_surface, Proxy};
|
||||
use wayland_protocols::xdg::shell::client::xdg_toplevel;
|
||||
|
||||
use crate::platform::linux::blade_renderer::BladeRenderer;
|
||||
use crate::platform::linux::wayland::display::WaylandDisplay;
|
||||
use crate::platform::{PlatformAtlas, PlatformInputHandler, PlatformWindow};
|
||||
use crate::scene::Scene;
|
||||
use crate::{
|
||||
px, BladeRenderer, Bounds, Modifiers, Pixels, PlatformDisplay, PlatformInput, Point,
|
||||
PromptLevel, Size, WindowAppearance, WindowBounds, WindowOptions,
|
||||
px, Bounds, Modifiers, Pixels, PlatformDisplay, PlatformInput, Point, PromptLevel, Size,
|
||||
WindowAppearance, WindowBounds, WindowOptions,
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -180,7 +181,7 @@ impl WaylandWindowState {
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct WaylandWindow(pub(crate) Arc<WaylandWindowState>);
|
||||
pub(crate) struct WaylandWindow(pub(crate) Rc<WaylandWindowState>);
|
||||
|
||||
impl HasWindowHandle for WaylandWindow {
|
||||
fn window_handle(&self) -> Result<WindowHandle<'_>, HandleError> {
|
||||
@@ -211,7 +212,7 @@ impl PlatformWindow for WaylandWindow {
|
||||
|
||||
// todo!(linux)
|
||||
fn scale_factor(&self) -> f32 {
|
||||
return 1f32;
|
||||
1f32
|
||||
}
|
||||
|
||||
//todo!(linux)
|
||||
@@ -340,7 +341,7 @@ impl PlatformWindow for WaylandWindow {
|
||||
|
||||
fn sprite_atlas(&self) -> Arc<dyn PlatformAtlas> {
|
||||
let inner = self.0.inner.lock();
|
||||
inner.renderer.sprite_atlas().clone()
|
||||
inner.renderer.atlas().clone()
|
||||
}
|
||||
|
||||
fn set_graphics_profiler_enabled(&self, enabled: bool) {
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
mod client;
|
||||
mod client_dispatcher;
|
||||
pub mod display;
|
||||
mod display;
|
||||
mod event;
|
||||
mod window;
|
||||
|
||||
pub(crate) use client::*;
|
||||
pub(crate) use client_dispatcher::*;
|
||||
pub(crate) use display::*;
|
||||
pub(crate) use event::*;
|
||||
pub(crate) use window::*;
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::{rc::Rc, sync::Arc};
|
||||
|
||||
use parking_lot::Mutex;
|
||||
use xcb::{x, Xid};
|
||||
use xcb::{x, Xid as _};
|
||||
use xkbcommon::xkb;
|
||||
|
||||
use collections::HashMap;
|
||||
|
||||
@@ -10,14 +10,17 @@ use crate::platform::linux::client::Client;
|
||||
use crate::platform::{
|
||||
LinuxPlatformInner, PlatformWindow, X11Display, X11Window, X11WindowState, XcbAtoms,
|
||||
};
|
||||
use crate::{AnyWindowHandle, Bounds, DisplayId, PlatformDisplay, Point, Size, WindowOptions};
|
||||
use crate::{
|
||||
AnyWindowHandle, Bounds, DisplayId, PlatformDisplay, PlatformInput, Point, Size, WindowOptions,
|
||||
};
|
||||
|
||||
pub(crate) struct X11ClientState {
|
||||
pub(crate) windows: HashMap<x::Window, Rc<X11WindowState>>,
|
||||
xkb: xkbcommon::xkb::State,
|
||||
}
|
||||
|
||||
pub(crate) struct X11Client {
|
||||
platform_inner: Arc<LinuxPlatformInner>,
|
||||
platform_inner: Rc<LinuxPlatformInner>,
|
||||
xcb_connection: Arc<xcb::Connection>,
|
||||
x_root_index: i32,
|
||||
atoms: XcbAtoms,
|
||||
@@ -26,11 +29,22 @@ pub(crate) struct X11Client {
|
||||
|
||||
impl X11Client {
|
||||
pub(crate) fn new(
|
||||
inner: Arc<LinuxPlatformInner>,
|
||||
inner: Rc<LinuxPlatformInner>,
|
||||
xcb_connection: Arc<xcb::Connection>,
|
||||
x_root_index: i32,
|
||||
atoms: XcbAtoms,
|
||||
) -> Self {
|
||||
let xkb_context = xkb::Context::new(xkb::CONTEXT_NO_FLAGS);
|
||||
let xkb_device_id = xkb::x11::get_core_keyboard_device_id(&xcb_connection);
|
||||
let xkb_keymap = xkb::x11::keymap_new_from_device(
|
||||
&xkb_context,
|
||||
&xcb_connection,
|
||||
xkb_device_id,
|
||||
xkb::KEYMAP_COMPILE_NO_FLAGS,
|
||||
);
|
||||
let xkb_state =
|
||||
xkb::x11::state_new_from_device(&xkb_keymap, &xcb_connection, xkb_device_id);
|
||||
|
||||
Self {
|
||||
platform_inner: inner,
|
||||
xcb_connection,
|
||||
@@ -38,6 +52,7 @@ impl X11Client {
|
||||
atoms,
|
||||
state: Mutex::new(X11ClientState {
|
||||
windows: HashMap::default(),
|
||||
xkb: xkb_state,
|
||||
}),
|
||||
}
|
||||
}
|
||||
@@ -91,6 +106,97 @@ impl Client for X11Client {
|
||||
window.request_refresh();
|
||||
}
|
||||
xcb::Event::Present(xcb::present::Event::IdleNotify(_ev)) => {}
|
||||
xcb::Event::X(x::Event::KeyPress(ev)) => {
|
||||
let window = self.get_window(ev.event());
|
||||
let modifiers = super::modifiers_from_state(ev.state());
|
||||
let key = {
|
||||
let code = ev.detail().into();
|
||||
let mut state = self.state.lock();
|
||||
let key = state.xkb.key_get_utf8(code);
|
||||
state.xkb.update_key(code, xkb::KeyDirection::Down);
|
||||
key
|
||||
};
|
||||
window.handle_input(PlatformInput::KeyDown(crate::KeyDownEvent {
|
||||
keystroke: crate::Keystroke {
|
||||
modifiers,
|
||||
key,
|
||||
ime_key: None,
|
||||
},
|
||||
is_held: false,
|
||||
}));
|
||||
}
|
||||
xcb::Event::X(x::Event::KeyRelease(ev)) => {
|
||||
let window = self.get_window(ev.event());
|
||||
let modifiers = super::modifiers_from_state(ev.state());
|
||||
let key = {
|
||||
let code = ev.detail().into();
|
||||
let mut state = self.state.lock();
|
||||
let key = state.xkb.key_get_utf8(code);
|
||||
state.xkb.update_key(code, xkb::KeyDirection::Up);
|
||||
key
|
||||
};
|
||||
window.handle_input(PlatformInput::KeyUp(crate::KeyUpEvent {
|
||||
keystroke: crate::Keystroke {
|
||||
modifiers,
|
||||
key,
|
||||
ime_key: None,
|
||||
},
|
||||
}));
|
||||
}
|
||||
xcb::Event::X(x::Event::ButtonPress(ev)) => {
|
||||
let window = self.get_window(ev.event());
|
||||
let modifiers = super::modifiers_from_state(ev.state());
|
||||
let position =
|
||||
Point::new((ev.event_x() as f32).into(), (ev.event_y() as f32).into());
|
||||
if let Some(button) = super::button_of_key(ev.detail()) {
|
||||
window.handle_input(PlatformInput::MouseDown(crate::MouseDownEvent {
|
||||
button,
|
||||
position,
|
||||
modifiers,
|
||||
click_count: 1,
|
||||
}));
|
||||
} else {
|
||||
log::warn!("Unknown button press: {ev:?}");
|
||||
}
|
||||
}
|
||||
xcb::Event::X(x::Event::ButtonRelease(ev)) => {
|
||||
let window = self.get_window(ev.event());
|
||||
let modifiers = super::modifiers_from_state(ev.state());
|
||||
let position =
|
||||
Point::new((ev.event_x() as f32).into(), (ev.event_y() as f32).into());
|
||||
if let Some(button) = super::button_of_key(ev.detail()) {
|
||||
window.handle_input(PlatformInput::MouseUp(crate::MouseUpEvent {
|
||||
button,
|
||||
position,
|
||||
modifiers,
|
||||
click_count: 1,
|
||||
}));
|
||||
}
|
||||
}
|
||||
xcb::Event::X(x::Event::MotionNotify(ev)) => {
|
||||
let window = self.get_window(ev.event());
|
||||
let pressed_button = super::button_from_state(ev.state());
|
||||
let position =
|
||||
Point::new((ev.event_x() as f32).into(), (ev.event_y() as f32).into());
|
||||
let modifiers = super::modifiers_from_state(ev.state());
|
||||
window.handle_input(PlatformInput::MouseMove(crate::MouseMoveEvent {
|
||||
pressed_button,
|
||||
position,
|
||||
modifiers,
|
||||
}));
|
||||
}
|
||||
xcb::Event::X(x::Event::LeaveNotify(ev)) => {
|
||||
let window = self.get_window(ev.event());
|
||||
let pressed_button = super::button_from_state(ev.state());
|
||||
let position =
|
||||
Point::new((ev.event_x() as f32).into(), (ev.event_y() as f32).into());
|
||||
let modifiers = super::modifiers_from_state(ev.state());
|
||||
window.handle_input(PlatformInput::MouseExited(crate::MouseExitEvent {
|
||||
pressed_button,
|
||||
position,
|
||||
modifiers,
|
||||
}));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
|
||||
34
crates/gpui/src/platform/linux/x11/event.rs
Normal file
34
crates/gpui/src/platform/linux/x11/event.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
use xcb::x;
|
||||
|
||||
use crate::{Modifiers, MouseButton};
|
||||
|
||||
pub(crate) fn button_of_key(detail: x::Button) -> Option<MouseButton> {
|
||||
Some(match detail {
|
||||
1 => MouseButton::Left,
|
||||
2 => MouseButton::Middle,
|
||||
3 => MouseButton::Right,
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn modifiers_from_state(state: x::KeyButMask) -> Modifiers {
|
||||
Modifiers {
|
||||
control: state.contains(x::KeyButMask::CONTROL),
|
||||
alt: state.contains(x::KeyButMask::MOD1),
|
||||
shift: state.contains(x::KeyButMask::SHIFT),
|
||||
command: state.contains(x::KeyButMask::MOD4),
|
||||
function: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn button_from_state(state: x::KeyButMask) -> Option<MouseButton> {
|
||||
Some(if state.contains(x::KeyButMask::BUTTON1) {
|
||||
MouseButton::Left
|
||||
} else if state.contains(x::KeyButMask::BUTTON2) {
|
||||
MouseButton::Middle
|
||||
} else if state.contains(x::KeyButMask::BUTTON3) {
|
||||
MouseButton::Right
|
||||
} else {
|
||||
return None;
|
||||
})
|
||||
}
|
||||
@@ -1,19 +1,6 @@
|
||||
//todo!(linux): remove
|
||||
#![allow(unused)]
|
||||
|
||||
use crate::{
|
||||
BladeRenderer, Bounds, GlobalPixels, Pixels, PlatformDisplay, PlatformInputHandler,
|
||||
PlatformWindow, Point, Size, WindowAppearance, WindowBounds, WindowOptions, X11Display,
|
||||
};
|
||||
use blade_graphics as gpu;
|
||||
use parking_lot::Mutex;
|
||||
use raw_window_handle as rwh;
|
||||
|
||||
use xcb::{
|
||||
x::{self, StackMode},
|
||||
Xid as _,
|
||||
};
|
||||
|
||||
use std::{
|
||||
ffi::c_void,
|
||||
mem,
|
||||
@@ -23,6 +10,17 @@ use std::{
|
||||
sync::{self, Arc},
|
||||
};
|
||||
|
||||
use blade_graphics as gpu;
|
||||
use parking_lot::Mutex;
|
||||
use raw_window_handle as rwh;
|
||||
use xcb::{x, Xid as _};
|
||||
|
||||
use crate::platform::linux::blade_renderer::BladeRenderer;
|
||||
use crate::{
|
||||
Bounds, GlobalPixels, Pixels, PlatformDisplay, PlatformInput, PlatformInputHandler,
|
||||
PlatformWindow, Point, Size, WindowAppearance, WindowBounds, WindowOptions, X11Display,
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
struct Callbacks {
|
||||
request_frame: Option<Box<dyn FnMut()>>,
|
||||
@@ -51,6 +49,7 @@ struct LinuxWindowInner {
|
||||
bounds: Bounds<i32>,
|
||||
scale_factor: f32,
|
||||
renderer: BladeRenderer,
|
||||
input_handler: Option<PlatformInputHandler>,
|
||||
}
|
||||
|
||||
impl LinuxWindowInner {
|
||||
@@ -151,7 +150,19 @@ impl X11WindowState {
|
||||
let xcb_values = [
|
||||
x::Cw::BackPixel(screen.white_pixel()),
|
||||
x::Cw::EventMask(
|
||||
x::EventMask::EXPOSURE | x::EventMask::STRUCTURE_NOTIFY | x::EventMask::KEY_PRESS,
|
||||
x::EventMask::EXPOSURE
|
||||
| x::EventMask::STRUCTURE_NOTIFY
|
||||
| x::EventMask::KEY_PRESS
|
||||
| x::EventMask::KEY_RELEASE
|
||||
| x::EventMask::BUTTON_PRESS
|
||||
| x::EventMask::BUTTON_RELEASE
|
||||
| x::EventMask::POINTER_MOTION
|
||||
| x::EventMask::BUTTON1_MOTION
|
||||
| x::EventMask::BUTTON2_MOTION
|
||||
| x::EventMask::BUTTON3_MOTION
|
||||
| x::EventMask::BUTTON4_MOTION
|
||||
| x::EventMask::BUTTON5_MOTION
|
||||
| x::EventMask::BUTTON_MOTION,
|
||||
),
|
||||
];
|
||||
|
||||
@@ -237,7 +248,7 @@ impl X11WindowState {
|
||||
|
||||
// Note: this has to be done after the GPU init, or otherwise
|
||||
// the sizes are immediately invalidated.
|
||||
let gpu_extent = query_render_extent(&xcb_connection, x_window);
|
||||
let gpu_extent = query_render_extent(xcb_connection, x_window);
|
||||
|
||||
Self {
|
||||
xcb_connection: Arc::clone(xcb_connection),
|
||||
@@ -249,6 +260,7 @@ impl X11WindowState {
|
||||
bounds,
|
||||
scale_factor: 1.0,
|
||||
renderer: BladeRenderer::new(gpu, gpu_extent),
|
||||
input_handler: None,
|
||||
}),
|
||||
}
|
||||
}
|
||||
@@ -312,6 +324,20 @@ impl X11WindowState {
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
pub fn handle_input(&self, input: PlatformInput) {
|
||||
if let Some(ref mut fun) = self.callbacks.lock().input {
|
||||
if fun(input.clone()) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
if let PlatformInput::KeyDown(event) = input {
|
||||
let mut inner = self.inner.lock();
|
||||
if let Some(ref mut input_handler) = inner.input_handler {
|
||||
input_handler.replace_text_in_range(None, &event.keystroke.key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PlatformWindow for X11Window {
|
||||
@@ -355,12 +381,12 @@ impl PlatformWindow for X11Window {
|
||||
self
|
||||
}
|
||||
|
||||
//todo!(linux)
|
||||
fn set_input_handler(&mut self, input_handler: PlatformInputHandler) {}
|
||||
fn set_input_handler(&mut self, input_handler: PlatformInputHandler) {
|
||||
self.0.inner.lock().input_handler = Some(input_handler);
|
||||
}
|
||||
|
||||
//todo!(linux)
|
||||
fn take_input_handler(&mut self) -> Option<PlatformInputHandler> {
|
||||
None
|
||||
self.0.inner.lock().input_handler.take()
|
||||
}
|
||||
|
||||
//todo!(linux)
|
||||
@@ -377,7 +403,7 @@ impl PlatformWindow for X11Window {
|
||||
fn activate(&self) {
|
||||
self.0.xcb_connection.send_request(&x::ConfigureWindow {
|
||||
window: self.0.x_window,
|
||||
value_list: &[x::ConfigWindow::StackMode(StackMode::Above)],
|
||||
value_list: &[x::ConfigWindow::StackMode(x::StackMode::Above)],
|
||||
});
|
||||
}
|
||||
|
||||
@@ -467,7 +493,7 @@ impl PlatformWindow for X11Window {
|
||||
|
||||
fn sprite_atlas(&self) -> sync::Arc<dyn crate::PlatformAtlas> {
|
||||
let inner = self.0.inner.lock();
|
||||
inner.renderer.sprite_atlas().clone()
|
||||
inner.renderer.atlas().clone()
|
||||
}
|
||||
|
||||
fn set_graphics_profiler_enabled(&self, enabled: bool) {
|
||||
|
||||
@@ -4,18 +4,8 @@ mod dispatcher;
|
||||
mod display;
|
||||
mod display_link;
|
||||
mod events;
|
||||
|
||||
#[cfg(not(feature = "macos-blade"))]
|
||||
mod metal_atlas;
|
||||
#[cfg(not(feature = "macos-blade"))]
|
||||
pub mod metal_renderer;
|
||||
|
||||
#[cfg(not(feature = "macos-blade"))]
|
||||
use metal_renderer as renderer;
|
||||
|
||||
#[cfg(feature = "macos-blade")]
|
||||
use crate::platform::blade as renderer;
|
||||
|
||||
mod metal_renderer;
|
||||
mod open_type;
|
||||
mod platform;
|
||||
mod text_system;
|
||||
@@ -27,13 +17,14 @@ use cocoa::{
|
||||
base::{id, nil},
|
||||
foundation::{NSAutoreleasePool, NSNotFound, NSRect, NSSize, NSString, NSUInteger},
|
||||
};
|
||||
|
||||
use metal_renderer::*;
|
||||
use objc::runtime::{BOOL, NO, YES};
|
||||
use std::ops::Range;
|
||||
|
||||
pub(crate) use dispatcher::*;
|
||||
pub(crate) use display::*;
|
||||
pub(crate) use display_link::*;
|
||||
pub(crate) use metal_atlas::*;
|
||||
pub(crate) use platform::*;
|
||||
pub(crate) use text_system::*;
|
||||
pub(crate) use window::*;
|
||||
|
||||
@@ -111,11 +111,11 @@ mod sys {
|
||||
pub enum CVDisplayLink {}
|
||||
|
||||
foreign_type! {
|
||||
pub unsafe type DisplayLink {
|
||||
type CType = CVDisplayLink;
|
||||
fn drop = CVDisplayLinkRelease;
|
||||
fn clone = CVDisplayLinkRetain;
|
||||
}
|
||||
type CType = CVDisplayLink;
|
||||
fn drop = CVDisplayLinkRelease;
|
||||
fn clone = CVDisplayLinkRetain;
|
||||
pub struct DisplayLink;
|
||||
pub struct DisplayLinkRef;
|
||||
}
|
||||
|
||||
impl Debug for DisplayLink {
|
||||
|
||||
@@ -13,7 +13,7 @@ use core_graphics::{
|
||||
event_source::{CGEventSource, CGEventSourceStateID},
|
||||
};
|
||||
use ctor::ctor;
|
||||
use metal::foreign_types::ForeignType as _;
|
||||
use foreign_types::ForeignType;
|
||||
use objc::{class, msg_send, sel, sel_impl};
|
||||
use std::{borrow::Cow, ffi::CStr, mem, os::raw::c_char, ptr};
|
||||
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
use crate::{
|
||||
platform::mac::ns_string, point, size, AtlasTextureId, AtlasTextureKind, AtlasTile, Bounds,
|
||||
ContentMask, DevicePixels, Hsla, MonochromeSprite, Path, PathId, PathVertex,
|
||||
ContentMask, DevicePixels, Hsla, MetalAtlas, MonochromeSprite, Path, PathId, PathVertex,
|
||||
PolychromeSprite, PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, Surface, Underline,
|
||||
};
|
||||
use super::metal_atlas::MetalAtlas;
|
||||
use block::ConcreteBlock;
|
||||
use cocoa::{
|
||||
base::{nil, NO, YES},
|
||||
@@ -14,7 +13,7 @@ use collections::HashMap;
|
||||
use core_foundation::base::TCFType;
|
||||
use foreign_types::ForeignType;
|
||||
use media::core_video::CVMetalTextureCache;
|
||||
use metal::{CAMetalLayer, CommandQueue, MTLPixelFormat, MTLResourceOptions, NSRange};
|
||||
use metal::{CommandQueue, MTLPixelFormat, MTLResourceOptions, NSRange};
|
||||
use objc::{self, msg_send, sel, sel_impl};
|
||||
use parking_lot::Mutex;
|
||||
use smallvec::SmallVec;
|
||||
@@ -30,18 +29,6 @@ const SHADERS_SOURCE_FILE: &'static str =
|
||||
include_str!(concat!(env!("OUT_DIR"), "/stitched_shaders.metal"));
|
||||
const INSTANCE_BUFFER_SIZE: usize = 2 * 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value (maybe even we could adjust dynamically...)
|
||||
|
||||
pub type Context = Arc<Mutex<Vec<metal::Buffer>>>;
|
||||
pub type Renderer = MetalRenderer;
|
||||
|
||||
pub unsafe fn new_renderer(
|
||||
context: self::Context,
|
||||
_native_window: *mut c_void,
|
||||
_native_view: *mut c_void,
|
||||
_bounds: crate::Size<f32>,
|
||||
) -> Renderer {
|
||||
MetalRenderer::new(context)
|
||||
}
|
||||
|
||||
pub(crate) struct MetalRenderer {
|
||||
device: metal::Device,
|
||||
layer: metal::MetalLayer,
|
||||
@@ -209,33 +196,33 @@ impl MetalRenderer {
|
||||
&self.layer
|
||||
}
|
||||
|
||||
pub fn layer_ptr(&self) -> *mut CAMetalLayer {
|
||||
self.layer.as_ptr()
|
||||
}
|
||||
|
||||
pub fn sprite_atlas(&self) -> &Arc<MetalAtlas> {
|
||||
&self.sprite_atlas
|
||||
}
|
||||
|
||||
/// Enables or disables the Metal HUD for debugging purposes. Note that this only works
|
||||
/// when the app is bundled and it has the `MetalHudEnabled` key set to true in Info.plist.
|
||||
pub fn set_hud_enabled(&mut self, enabled: bool) {
|
||||
unsafe {
|
||||
if enabled {
|
||||
let hud_properties = NSDictionary::dictionaryWithObject_forKey_(
|
||||
nil,
|
||||
ns_string("default"),
|
||||
ns_string("mode"),
|
||||
);
|
||||
let _: () = msg_send![&*self.layer, setDeveloperHUDProperties: hud_properties];
|
||||
} else {
|
||||
let _: () = msg_send![&*self.layer, setDeveloperHUDProperties: NSDictionary::dictionary(nil)];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_presents_with_transaction(&mut self, presents_with_transaction: bool) {
|
||||
self.presents_with_transaction = presents_with_transaction;
|
||||
self.layer
|
||||
.set_presents_with_transaction(presents_with_transaction);
|
||||
}
|
||||
|
||||
pub fn update_drawable_size(&mut self, size: Size<f64>) {
|
||||
unsafe {
|
||||
let _: () = msg_send![
|
||||
self.layer(),
|
||||
setDrawableSize: size
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
pub fn destroy(&mut self) {
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
pub fn draw(&mut self, scene: &Scene) {
|
||||
let layer = self.layer.clone();
|
||||
let viewport_size = layer.drawable_size();
|
||||
|
||||
@@ -52,8 +52,6 @@ use std::{
|
||||
};
|
||||
use time::UtcOffset;
|
||||
|
||||
use super::renderer;
|
||||
|
||||
#[allow(non_upper_case_globals)]
|
||||
const NSUTF8StringEncoding: NSUInteger = 4;
|
||||
|
||||
@@ -147,7 +145,7 @@ pub(crate) struct MacPlatformState {
|
||||
background_executor: BackgroundExecutor,
|
||||
foreground_executor: ForegroundExecutor,
|
||||
text_system: Arc<MacTextSystem>,
|
||||
renderer_context: renderer::Context,
|
||||
instance_buffer_pool: Arc<Mutex<Vec<metal::Buffer>>>,
|
||||
pasteboard: id,
|
||||
text_hash_pasteboard_type: id,
|
||||
metadata_pasteboard_type: id,
|
||||
@@ -177,7 +175,7 @@ impl MacPlatform {
|
||||
background_executor: BackgroundExecutor::new(dispatcher.clone()),
|
||||
foreground_executor: ForegroundExecutor::new(dispatcher),
|
||||
text_system: Arc::new(MacTextSystem::new()),
|
||||
renderer_context: renderer::Context::default(),
|
||||
instance_buffer_pool: Arc::default(),
|
||||
pasteboard: unsafe { NSPasteboard::generalPasteboard(nil) },
|
||||
text_hash_pasteboard_type: unsafe { ns_string("zed-text-hash") },
|
||||
metadata_pasteboard_type: unsafe { ns_string("zed-metadata") },
|
||||
@@ -496,11 +494,12 @@ impl Platform for MacPlatform {
|
||||
handle: AnyWindowHandle,
|
||||
options: WindowOptions,
|
||||
) -> Box<dyn PlatformWindow> {
|
||||
let instance_buffer_pool = self.0.lock().instance_buffer_pool.clone();
|
||||
Box::new(MacWindow::open(
|
||||
handle,
|
||||
options,
|
||||
self.foreground_executor(),
|
||||
self.0.lock().renderer_context.clone(),
|
||||
instance_buffer_pool,
|
||||
))
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use super::{global_bounds_from_ns_rect, ns_string, MacDisplay, NSRange, renderer};
|
||||
use super::{global_bounds_from_ns_rect, ns_string, MacDisplay, MetalRenderer, NSRange};
|
||||
use crate::{
|
||||
global_bounds_to_ns_rect, platform::PlatformInputHandler, point, px, size, AnyWindowHandle,
|
||||
Bounds, DisplayLink, ExternalPaths, FileDropEvent, ForegroundExecutor, GlobalPixels,
|
||||
@@ -23,6 +23,7 @@ use cocoa::{
|
||||
};
|
||||
use core_graphics::display::{CGDirectDisplayID, CGRect};
|
||||
use ctor::ctor;
|
||||
use foreign_types::ForeignTypeRef;
|
||||
use futures::channel::oneshot;
|
||||
use objc::{
|
||||
class,
|
||||
@@ -221,6 +222,22 @@ unsafe fn build_classes() {
|
||||
accepts_first_mouse as extern "C" fn(&Object, Sel, id) -> BOOL,
|
||||
);
|
||||
|
||||
// AccesKit integration poitns
|
||||
decl.add_method(
|
||||
sel!(accessibilityChildren),
|
||||
accessibility_children as extern "C" fn(&mut Object, Sel) -> id,
|
||||
);
|
||||
|
||||
decl.add_method(
|
||||
sel!(accessibilityFocusedUIElement),
|
||||
accessibility_focused as extern "C" fn(&mut Object, Sel) -> id,
|
||||
);
|
||||
|
||||
decl.add_method(
|
||||
sel!(accessibilityHitTest:),
|
||||
accessibility_hit_test as extern "C" fn(&mut Object, Sel, NSPoint) -> id,
|
||||
);
|
||||
|
||||
decl.register()
|
||||
};
|
||||
}
|
||||
@@ -321,7 +338,7 @@ struct MacWindowState {
|
||||
native_window: id,
|
||||
native_view: NonNull<Object>,
|
||||
display_link: Option<DisplayLink>,
|
||||
renderer: renderer::Renderer,
|
||||
renderer: MetalRenderer,
|
||||
kind: WindowKind,
|
||||
request_frame_callback: Option<Box<dyn FnMut()>>,
|
||||
event_callback: Option<Box<dyn FnMut(PlatformInput) -> bool>>,
|
||||
@@ -342,6 +359,7 @@ struct MacWindowState {
|
||||
input_during_keydown: Option<SmallVec<[ImeInput; 1]>>,
|
||||
previous_keydown_inserted_text: Option<String>,
|
||||
external_files_dragged: bool,
|
||||
accesskit: Option<accesskit_macos::Adapter>
|
||||
}
|
||||
|
||||
impl MacWindowState {
|
||||
@@ -449,13 +467,6 @@ impl MacWindowState {
|
||||
get_scale_factor(self.native_window)
|
||||
}
|
||||
|
||||
fn update_drawable_size(&mut self, drawable_size: NSSize) {
|
||||
self.renderer.update_drawable_size(Size {
|
||||
width: drawable_size.width,
|
||||
height: drawable_size.height,
|
||||
})
|
||||
}
|
||||
|
||||
fn titlebar_height(&self) -> Pixels {
|
||||
unsafe {
|
||||
let frame = NSWindow::frame(self.native_window);
|
||||
@@ -473,11 +484,34 @@ impl MacWindowState {
|
||||
msg_send![self.native_window, convertPointToScreen: point]
|
||||
}
|
||||
}
|
||||
|
||||
fn get_accesskit_adapter(&mut self) -> &accesskit_macos::Adapter {
|
||||
self.accesskit.get_or_insert_with(|| {
|
||||
fn handler() -> (accesskit::TreeUpdate, bool) {
|
||||
todo!()
|
||||
}
|
||||
|
||||
let (initial_state, focused) = handler();// self.handler.accesskit_tree();
|
||||
|
||||
struct Handler {}
|
||||
impl accesskit::ActionHandler for Handler {
|
||||
fn do_action(&mut self, _action: accesskit::ActionRequest) {
|
||||
todo!();
|
||||
}
|
||||
}
|
||||
|
||||
let action_handler = Box::new(Handler {});
|
||||
|
||||
// SAFETY: The view pointer is based on a valid borrowed reference
|
||||
// to the view.
|
||||
unsafe { accesskit_macos::Adapter::new(self.native_view.as_ptr() as *mut _, initial_state, self.is_active, action_handler) }
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
unsafe impl Send for MacWindowState {}
|
||||
|
||||
|
||||
pub(crate) struct MacWindow(Arc<Mutex<MacWindowState>>);
|
||||
|
||||
impl MacWindow {
|
||||
@@ -485,7 +519,7 @@ impl MacWindow {
|
||||
handle: AnyWindowHandle,
|
||||
options: WindowOptions,
|
||||
executor: ForegroundExecutor,
|
||||
renderer_context: renderer::Context,
|
||||
instance_buffer_pool: Arc<Mutex<Vec<metal::Buffer>>>,
|
||||
) -> Self {
|
||||
unsafe {
|
||||
let pool = NSAutoreleasePool::new(nil);
|
||||
@@ -548,16 +582,7 @@ impl MacWindow {
|
||||
let native_view = NSView::init(native_view);
|
||||
assert!(!native_view.is_null());
|
||||
|
||||
let window_size = {
|
||||
let bounds = match options.bounds {
|
||||
WindowBounds::Fullscreen | WindowBounds::Maximized => {
|
||||
native_window.screen().visibleFrame()
|
||||
}
|
||||
WindowBounds::Fixed(bounds) => global_bounds_to_ns_rect(bounds),
|
||||
};
|
||||
let scale = get_scale_factor(native_window);
|
||||
size(bounds.size.width as f32 * scale, bounds.size.height as f32 * scale)
|
||||
};
|
||||
// let accesskit_adapter = accesskit_macos::Adapter::new(native_view, accesskit::TreeUpdate { nodes: Default::default(), tree: Default::default(), focus: accesskit::NodeId(0) }, false, action_handler);
|
||||
|
||||
let window = Self(Arc::new(Mutex::new(MacWindowState {
|
||||
handle,
|
||||
@@ -565,7 +590,7 @@ impl MacWindow {
|
||||
native_window,
|
||||
native_view: NonNull::new_unchecked(native_view),
|
||||
display_link: None,
|
||||
renderer: renderer::new_renderer(renderer_context, native_window as *mut _, native_view as *mut _, window_size),
|
||||
renderer: MetalRenderer::new(instance_buffer_pool),
|
||||
kind: options.kind,
|
||||
request_frame_callback: None,
|
||||
event_callback: None,
|
||||
@@ -588,6 +613,7 @@ impl MacWindow {
|
||||
input_during_keydown: None,
|
||||
previous_keydown_inserted_text: None,
|
||||
external_files_dragged: false,
|
||||
accesskit: None,
|
||||
})));
|
||||
|
||||
(*native_window).set_ivar(
|
||||
@@ -722,7 +748,6 @@ impl MacWindow {
|
||||
impl Drop for MacWindow {
|
||||
fn drop(&mut self) {
|
||||
let mut this = self.0.lock();
|
||||
this.renderer.destroy();
|
||||
let window = this.native_window;
|
||||
this.display_link.take();
|
||||
this.executor
|
||||
@@ -1050,22 +1075,7 @@ impl PlatformWindow for MacWindow {
|
||||
/// Enables or disables the Metal HUD for debugging purposes. Note that this only works
|
||||
/// when the app is bundled and it has the `MetalHudEnabled` key set to true in Info.plist.
|
||||
fn set_graphics_profiler_enabled(&self, enabled: bool) {
|
||||
let this_lock = self.0.lock();
|
||||
let layer = this_lock.renderer.layer();
|
||||
|
||||
unsafe {
|
||||
if enabled {
|
||||
let hud_properties = NSDictionary::dictionaryWithObject_forKey_(
|
||||
nil,
|
||||
ns_string("default"),
|
||||
ns_string("mode"),
|
||||
);
|
||||
let _: () = msg_send![layer, setDeveloperHUDProperties: hud_properties];
|
||||
} else {
|
||||
let _: () = msg_send![layer, setDeveloperHUDProperties: NSDictionary::dictionary(nil)];
|
||||
}
|
||||
}
|
||||
|
||||
self.0.lock().renderer.set_hud_enabled(enabled);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1521,28 +1531,31 @@ extern "C" fn close_window(this: &Object, _: Sel) {
|
||||
extern "C" fn make_backing_layer(this: &Object, _: Sel) -> id {
|
||||
let window_state = unsafe { get_window_state(this) };
|
||||
let window_state = window_state.as_ref().lock();
|
||||
window_state.renderer.layer_ptr() as id
|
||||
window_state.renderer.layer().as_ptr() as id
|
||||
}
|
||||
|
||||
extern "C" fn view_did_change_backing_properties(this: &Object, _: Sel) {
|
||||
let window_state = unsafe { get_window_state(this) };
|
||||
let mut lock = window_state.as_ref().lock();
|
||||
|
||||
let scale_factor = lock.scale_factor() as f64;
|
||||
let size = lock.content_size();
|
||||
let drawable_size: NSSize = NSSize {
|
||||
width: f64::from(size.width) * scale_factor,
|
||||
height: f64::from(size.height) * scale_factor,
|
||||
};
|
||||
unsafe {
|
||||
let scale_factor = lock.scale_factor() as f64;
|
||||
let size = lock.content_size();
|
||||
let drawable_size: NSSize = NSSize {
|
||||
width: f64::from(size.width) * scale_factor,
|
||||
height: f64::from(size.height) * scale_factor,
|
||||
};
|
||||
|
||||
let _: () = msg_send![
|
||||
lock.renderer.layer(),
|
||||
setContentsScale: scale_factor
|
||||
];
|
||||
let _: () = msg_send![
|
||||
lock.renderer.layer(),
|
||||
setDrawableSize: drawable_size
|
||||
];
|
||||
}
|
||||
|
||||
lock.update_drawable_size(drawable_size);
|
||||
|
||||
if let Some(mut callback) = lock.resize_callback.take() {
|
||||
let content_size = lock.content_size();
|
||||
let scale_factor = lock.scale_factor();
|
||||
@@ -1554,7 +1567,7 @@ extern "C" fn view_did_change_backing_properties(this: &Object, _: Sel) {
|
||||
|
||||
extern "C" fn set_frame_size(this: &Object, _: Sel, size: NSSize) {
|
||||
let window_state = unsafe { get_window_state(this) };
|
||||
let mut lock = window_state.as_ref().lock();
|
||||
let lock = window_state.as_ref().lock();
|
||||
|
||||
if lock.content_size() == size.into() {
|
||||
return;
|
||||
@@ -1570,7 +1583,12 @@ extern "C" fn set_frame_size(this: &Object, _: Sel, size: NSSize) {
|
||||
height: size.height * scale_factor,
|
||||
};
|
||||
|
||||
lock.update_drawable_size(drawable_size);
|
||||
unsafe {
|
||||
let _: () = msg_send![
|
||||
lock.renderer.layer(),
|
||||
setDrawableSize: drawable_size
|
||||
];
|
||||
}
|
||||
|
||||
drop(lock);
|
||||
let mut lock = window_state.lock();
|
||||
@@ -1587,7 +1605,6 @@ extern "C" fn display_layer(this: &Object, _: Sel, _: id) {
|
||||
let window_state = unsafe { get_window_state(this) };
|
||||
let mut lock = window_state.lock();
|
||||
if let Some(mut callback) = lock.request_frame_callback.take() {
|
||||
#[cfg(not(feature = "macos-blade"))]
|
||||
lock.renderer.set_presents_with_transaction(true);
|
||||
lock.stop_display_link();
|
||||
drop(lock);
|
||||
@@ -1595,7 +1612,6 @@ extern "C" fn display_layer(this: &Object, _: Sel, _: id) {
|
||||
|
||||
let mut lock = window_state.lock();
|
||||
lock.request_frame_callback = Some(callback);
|
||||
#[cfg(not(feature = "macos-blade"))]
|
||||
lock.renderer.set_presents_with_transaction(false);
|
||||
lock.start_display_link();
|
||||
}
|
||||
@@ -1765,6 +1781,36 @@ extern "C" fn accepts_first_mouse(this: &Object, _: Sel, _: id) -> BOOL {
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" fn accessibility_children(this: &mut Object, _: Sel) -> id {
|
||||
unsafe {
|
||||
let state = get_window_state(this);
|
||||
let mut lock = state.as_ref().lock();
|
||||
let adapter = lock.get_accesskit_adapter();
|
||||
adapter.view_children() as *mut _
|
||||
}
|
||||
}
|
||||
extern "C" fn accessibility_focused(this: &mut Object, _: Sel) -> id {
|
||||
unsafe {
|
||||
let state = get_window_state(this);
|
||||
let mut lock = state.as_ref().lock();
|
||||
let adapter = lock.get_accesskit_adapter();
|
||||
adapter.focus() as *mut _
|
||||
}
|
||||
}
|
||||
extern "C" fn accessibility_hit_test(this: &mut Object, _: Sel, point: NSPoint) -> id {
|
||||
unsafe {
|
||||
let state = get_window_state(this);
|
||||
let mut lock = state.as_ref().lock();
|
||||
let adapter = lock.get_accesskit_adapter();
|
||||
|
||||
let point = accesskit_macos::NSPoint {
|
||||
x: point.x,
|
||||
y: point.y,
|
||||
};
|
||||
adapter.hit_test(point) as *mut _
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" fn dragging_entered(this: &Object, _: Sel, dragging_info: id) -> NSDragOperation {
|
||||
let window_state = unsafe { get_window_state(this) };
|
||||
if send_new_event(&window_state, {
|
||||
|
||||
@@ -21,13 +21,6 @@ pub trait Styled: Sized {
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the size of the element to sthe full width and height.
|
||||
fn full(mut self) -> Self {
|
||||
self.style().size.width = Some(relative(1.).into());
|
||||
self.style().size.height = Some(relative(1.).into());
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the position of the element to `relative`.
|
||||
/// [Docs](https://tailwindcss.com/docs/position)
|
||||
fn relative(mut self) -> Self {
|
||||
|
||||
@@ -282,7 +282,10 @@ impl Element for AnyView {
|
||||
cx: &mut ElementContext,
|
||||
) -> (LayoutId, Self::State) {
|
||||
cx.with_view_id(self.entity_id(), |cx| {
|
||||
if self.cache {
|
||||
if self.cache
|
||||
&& !cx.window.dirty_views.contains(&self.entity_id())
|
||||
&& !cx.window.refreshing
|
||||
{
|
||||
if let Some(state) = state {
|
||||
let layout_id = cx.request_layout(&state.root_style, None);
|
||||
return (layout_id, state);
|
||||
@@ -313,8 +316,6 @@ impl Element for AnyView {
|
||||
&& cache_key.content_mask == cx.content_mask()
|
||||
&& cache_key.stacking_order == *cx.stacking_order()
|
||||
&& cache_key.text_style == cx.text_style()
|
||||
&& !cx.window.dirty_views.contains(&self.entity_id())
|
||||
&& !cx.window.refreshing
|
||||
{
|
||||
cx.reuse_view(state.next_stacking_order_id);
|
||||
return;
|
||||
|
||||
@@ -1,13 +1,5 @@
|
||||
use crate::{
|
||||
px, size, transparent_black, Action, AnyDrag, AnyView, AppContext, Arena, AsyncWindowContext,
|
||||
AvailableSpace, Bounds, Context, Corners, CursorStyle, DispatchActionListener, DispatchNodeId,
|
||||
DispatchTree, DisplayId, Edges, Effect, Entity, EntityId, EventEmitter, FileDropEvent, Flatten,
|
||||
Global, GlobalElementId, Hsla, KeyBinding, KeyContext, KeyDownEvent, KeyMatch, KeymatchResult,
|
||||
Keystroke, KeystrokeEvent, Model, ModelContext, Modifiers, MouseButton, MouseMoveEvent,
|
||||
MouseUpEvent, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformWindow, Point,
|
||||
PromptLevel, Render, ScaledPixels, SharedString, Size, SubscriberSet, Subscription,
|
||||
TaffyLayoutEngine, Task, View, VisualContext, WeakView, WindowAppearance, WindowBounds,
|
||||
WindowOptions, WindowTextSystem,
|
||||
px, size, transparent_black, Action, AnyDrag, AnyView, AppContext, Arena, AsyncWindowContext, AvailableSpace, Bounds, Context, Corners, CursorStyle, DispatchActionListener, DispatchNodeId, DispatchTree, DisplayId, Edges, Effect, Entity, EntityId, EventEmitter, FileDropEvent, Flatten, Global, GlobalElementId, Hsla, IntoElement, KeyBinding, KeyContext, KeyDownEvent, KeyMatch, KeymatchResult, Keystroke, KeystrokeEvent, Model, ModelContext, Modifiers, MouseButton, MouseMoveEvent, MouseUpEvent, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformWindow, Point, PromptLevel, Render, ScaledPixels, SharedString, Size, SubscriberSet, Subscription, TaffyLayoutEngine, Task, View, VisualContext, WeakView, WindowAppearance, WindowBounds, WindowOptions, WindowTextSystem
|
||||
};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use collections::FxHashSet;
|
||||
@@ -17,20 +9,10 @@ use parking_lot::RwLock;
|
||||
use slotmap::SlotMap;
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
any::{Any, TypeId},
|
||||
borrow::{Borrow, BorrowMut},
|
||||
cell::{Cell, RefCell},
|
||||
fmt::{Debug, Display},
|
||||
future::Future,
|
||||
hash::{Hash, Hasher},
|
||||
marker::PhantomData,
|
||||
mem,
|
||||
rc::Rc,
|
||||
sync::{
|
||||
any::{Any, TypeId}, borrow::{Borrow, BorrowMut}, cell::{Cell, RefCell}, collections::HashMap, fmt::{Debug, Display}, future::Future, hash::{Hash, Hasher}, marker::PhantomData, mem, rc::Rc, sync::{
|
||||
atomic::{AtomicUsize, Ordering::SeqCst},
|
||||
Arc,
|
||||
},
|
||||
time::{Duration, Instant},
|
||||
}, time::{Duration, Instant}
|
||||
};
|
||||
use util::{measure, ResultExt};
|
||||
|
||||
@@ -950,6 +932,7 @@ impl<'a> WindowContext<'a> {
|
||||
}
|
||||
|
||||
let root_view = self.window.root_view.take().unwrap();
|
||||
|
||||
self.with_element_context(|cx| {
|
||||
cx.with_z_index(0, |cx| {
|
||||
cx.with_key_dispatch(Some(KeyContext::default()), None, |_, cx| {
|
||||
|
||||
@@ -29,14 +29,7 @@ use smallvec::SmallVec;
|
||||
use util::post_inc;
|
||||
|
||||
use crate::{
|
||||
prelude::*, size, AnyTooltip, AppContext, AvailableSpace, Bounds, BoxShadow, ContentMask,
|
||||
Corners, CursorStyle, DevicePixels, DispatchPhase, DispatchTree, ElementId, ElementStateBox,
|
||||
EntityId, FocusHandle, FocusId, FontId, GlobalElementId, GlyphId, Hsla, ImageData,
|
||||
InputHandler, IsZero, KeyContext, KeyEvent, LayoutId, MonochromeSprite, MouseEvent, PaintQuad,
|
||||
Path, Pixels, PlatformInputHandler, Point, PolychromeSprite, Quad, RenderGlyphParams,
|
||||
RenderImageParams, RenderSvgParams, Scene, Shadow, SharedString, Size, StackingContext,
|
||||
StackingOrder, StrikethroughStyle, Style, TextStyleRefinement, Underline, UnderlineStyle,
|
||||
Window, WindowContext, SUBPIXEL_VARIANTS,
|
||||
access_kit::AccessKitState, prelude::*, size, AnyTooltip, AppContext, AvailableSpace, Bounds, BoxShadow, ContentMask, Corners, CursorStyle, DevicePixels, DispatchPhase, DispatchTree, ElementId, ElementStateBox, EntityId, FocusHandle, FocusId, FontId, GlobalElementId, GlyphId, Hsla, ImageData, InputHandler, IsZero, KeyContext, KeyEvent, LayoutId, MonochromeSprite, MouseEvent, PaintQuad, Path, Pixels, PlatformInputHandler, Point, PolychromeSprite, Quad, RenderGlyphParams, RenderImageParams, RenderSvgParams, Scene, Shadow, SharedString, Size, StackingContext, StackingOrder, StrikethroughStyle, Style, TextStyleRefinement, Underline, UnderlineStyle, Window, WindowContext, SUBPIXEL_VARIANTS
|
||||
};
|
||||
|
||||
type AnyMouseListener = Box<dyn FnMut(&dyn Any, DispatchPhase, &mut ElementContext) + 'static>;
|
||||
@@ -70,6 +63,7 @@ pub(crate) struct Frame {
|
||||
pub(crate) requested_cursor_style: Option<CursorStyle>,
|
||||
pub(crate) view_stack: Vec<EntityId>,
|
||||
pub(crate) reused_views: FxHashSet<EntityId>,
|
||||
pub(crate) accesskit: Option<AccessKitState>,
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub(crate) debug_bounds: FxHashMap<String, Bounds<Pixels>>,
|
||||
@@ -96,6 +90,7 @@ impl Frame {
|
||||
requested_cursor_style: None,
|
||||
view_stack: Vec::new(),
|
||||
reused_views: FxHashSet::default(),
|
||||
accesskit: None,
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
debug_bounds: FxHashMap::default(),
|
||||
@@ -386,6 +381,7 @@ impl<'a> ElementContext<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Updates the cursor style at the platform level.
|
||||
pub fn set_cursor_style(&mut self, style: CursorStyle) {
|
||||
let view_id = self.parent_view_id();
|
||||
|
||||
@@ -93,6 +93,8 @@ pub struct LanguageSettings {
|
||||
pub inlay_hints: InlayHintSettings,
|
||||
/// Whether to automatically close brackets.
|
||||
pub use_autoclose: bool,
|
||||
/// Which code actions to run on save
|
||||
pub code_actions_on_format: HashMap<String, bool>,
|
||||
}
|
||||
|
||||
/// The settings for [GitHub Copilot](https://github.com/features/copilot).
|
||||
@@ -215,6 +217,11 @@ pub struct LanguageSettingsContent {
|
||||
///
|
||||
/// Default: true
|
||||
pub use_autoclose: Option<bool>,
|
||||
|
||||
/// Which code actions to run on save
|
||||
///
|
||||
/// Default: {} (or {"source.organizeImports": true} for Go).
|
||||
pub code_actions_on_format: Option<HashMap<String, bool>>,
|
||||
}
|
||||
|
||||
/// The contents of the GitHub Copilot settings.
|
||||
@@ -550,6 +557,10 @@ fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent
|
||||
merge(&mut settings.use_autoclose, src.use_autoclose);
|
||||
merge(&mut settings.show_wrap_guides, src.show_wrap_guides);
|
||||
merge(&mut settings.wrap_guides, src.wrap_guides.clone());
|
||||
merge(
|
||||
&mut settings.code_actions_on_format,
|
||||
src.code_actions_on_format.clone(),
|
||||
);
|
||||
|
||||
merge(
|
||||
&mut settings.preferred_line_length,
|
||||
|
||||
@@ -23,7 +23,7 @@ use std::{
|
||||
path::PathBuf,
|
||||
str::{self, FromStr as _},
|
||||
sync::{
|
||||
atomic::{AtomicUsize, Ordering::SeqCst},
|
||||
atomic::{AtomicI32, Ordering::SeqCst},
|
||||
Arc, Weak,
|
||||
},
|
||||
time::{Duration, Instant},
|
||||
@@ -36,7 +36,7 @@ const JSON_RPC_VERSION: &str = "2.0";
|
||||
const CONTENT_LEN_HEADER: &str = "Content-Length: ";
|
||||
const LSP_REQUEST_TIMEOUT: Duration = Duration::from_secs(60 * 2);
|
||||
|
||||
type NotificationHandler = Box<dyn Send + FnMut(Option<usize>, &str, AsyncAppContext)>;
|
||||
type NotificationHandler = Box<dyn Send + FnMut(Option<RequestId>, &str, AsyncAppContext)>;
|
||||
type ResponseHandler = Box<dyn Send + FnOnce(Result<String, Error>)>;
|
||||
type IoHandler = Box<dyn Send + FnMut(IoKind, &str)>;
|
||||
|
||||
@@ -59,14 +59,14 @@ pub struct LanguageServerBinary {
|
||||
/// A running language server process.
|
||||
pub struct LanguageServer {
|
||||
server_id: LanguageServerId,
|
||||
next_id: AtomicUsize,
|
||||
next_id: AtomicI32,
|
||||
outbound_tx: channel::Sender<String>,
|
||||
name: String,
|
||||
capabilities: ServerCapabilities,
|
||||
code_action_kinds: Option<Vec<CodeActionKind>>,
|
||||
notification_handlers: Arc<Mutex<HashMap<&'static str, NotificationHandler>>>,
|
||||
response_handlers: Arc<Mutex<Option<HashMap<usize, ResponseHandler>>>>,
|
||||
io_handlers: Arc<Mutex<HashMap<usize, IoHandler>>>,
|
||||
response_handlers: Arc<Mutex<Option<HashMap<RequestId, ResponseHandler>>>>,
|
||||
io_handlers: Arc<Mutex<HashMap<i32, IoHandler>>>,
|
||||
executor: BackgroundExecutor,
|
||||
#[allow(clippy::type_complexity)]
|
||||
io_tasks: Mutex<Option<(Task<Option<()>>, Task<Option<()>>)>>,
|
||||
@@ -87,18 +87,28 @@ pub enum Subscription {
|
||||
notification_handlers: Option<Arc<Mutex<HashMap<&'static str, NotificationHandler>>>>,
|
||||
},
|
||||
Io {
|
||||
id: usize,
|
||||
io_handlers: Option<Weak<Mutex<HashMap<usize, IoHandler>>>>,
|
||||
id: i32,
|
||||
io_handlers: Option<Weak<Mutex<HashMap<i32, IoHandler>>>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Language server protocol RPC request message ID.
|
||||
///
|
||||
/// [LSP Specification](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#requestMessage)
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum RequestId {
|
||||
Int(i32),
|
||||
Str(String),
|
||||
}
|
||||
|
||||
/// Language server protocol RPC request message.
|
||||
///
|
||||
/// [LSP Specification](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#requestMessage)
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Request<'a, T> {
|
||||
jsonrpc: &'static str,
|
||||
id: usize,
|
||||
id: RequestId,
|
||||
method: &'a str,
|
||||
params: T,
|
||||
}
|
||||
@@ -107,7 +117,7 @@ pub struct Request<'a, T> {
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct AnyResponse<'a> {
|
||||
jsonrpc: &'a str,
|
||||
id: usize,
|
||||
id: RequestId,
|
||||
#[serde(default)]
|
||||
error: Option<Error>,
|
||||
#[serde(borrow)]
|
||||
@@ -120,7 +130,7 @@ struct AnyResponse<'a> {
|
||||
#[derive(Serialize)]
|
||||
struct Response<T> {
|
||||
jsonrpc: &'static str,
|
||||
id: usize,
|
||||
id: RequestId,
|
||||
result: Option<T>,
|
||||
error: Option<Error>,
|
||||
}
|
||||
@@ -140,7 +150,7 @@ struct Notification<'a, T> {
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
struct AnyNotification<'a> {
|
||||
#[serde(default)]
|
||||
id: Option<usize>,
|
||||
id: Option<RequestId>,
|
||||
#[serde(borrow)]
|
||||
method: &'a str,
|
||||
#[serde(borrow, default)]
|
||||
@@ -305,8 +315,8 @@ impl LanguageServer {
|
||||
stdout: Stdout,
|
||||
mut on_unhandled_notification: F,
|
||||
notification_handlers: Arc<Mutex<HashMap<&'static str, NotificationHandler>>>,
|
||||
response_handlers: Arc<Mutex<Option<HashMap<usize, ResponseHandler>>>>,
|
||||
io_handlers: Arc<Mutex<HashMap<usize, IoHandler>>>,
|
||||
response_handlers: Arc<Mutex<Option<HashMap<RequestId, ResponseHandler>>>>,
|
||||
io_handlers: Arc<Mutex<HashMap<i32, IoHandler>>>,
|
||||
cx: AsyncAppContext,
|
||||
) -> anyhow::Result<()>
|
||||
where
|
||||
@@ -387,7 +397,7 @@ impl LanguageServer {
|
||||
|
||||
async fn handle_stderr<Stderr>(
|
||||
stderr: Stderr,
|
||||
io_handlers: Arc<Mutex<HashMap<usize, IoHandler>>>,
|
||||
io_handlers: Arc<Mutex<HashMap<i32, IoHandler>>>,
|
||||
stderr_capture: Arc<Mutex<Option<String>>>,
|
||||
) -> anyhow::Result<()>
|
||||
where
|
||||
@@ -424,8 +434,8 @@ impl LanguageServer {
|
||||
stdin: Stdin,
|
||||
outbound_rx: channel::Receiver<String>,
|
||||
output_done_tx: barrier::Sender,
|
||||
response_handlers: Arc<Mutex<Option<HashMap<usize, ResponseHandler>>>>,
|
||||
io_handlers: Arc<Mutex<HashMap<usize, IoHandler>>>,
|
||||
response_handlers: Arc<Mutex<Option<HashMap<RequestId, ResponseHandler>>>>,
|
||||
io_handlers: Arc<Mutex<HashMap<i32, IoHandler>>>,
|
||||
) -> anyhow::Result<()>
|
||||
where
|
||||
Stdin: AsyncWrite + Unpin + Send + 'static,
|
||||
@@ -621,7 +631,7 @@ impl LanguageServer {
|
||||
pub fn shutdown(&self) -> Option<impl 'static + Send + Future<Output = Option<()>>> {
|
||||
if let Some(tasks) = self.io_tasks.lock().take() {
|
||||
let response_handlers = self.response_handlers.clone();
|
||||
let next_id = AtomicUsize::new(self.next_id.load(SeqCst));
|
||||
let next_id = AtomicI32::new(self.next_id.load(SeqCst));
|
||||
let outbound_tx = self.outbound_tx.clone();
|
||||
let executor = self.executor.clone();
|
||||
let mut output_done = self.output_done_rx.lock().take().unwrap();
|
||||
@@ -850,8 +860,8 @@ impl LanguageServer {
|
||||
}
|
||||
|
||||
fn request_internal<T: request::Request>(
|
||||
next_id: &AtomicUsize,
|
||||
response_handlers: &Mutex<Option<HashMap<usize, ResponseHandler>>>,
|
||||
next_id: &AtomicI32,
|
||||
response_handlers: &Mutex<Option<HashMap<RequestId, ResponseHandler>>>,
|
||||
outbound_tx: &channel::Sender<String>,
|
||||
executor: &BackgroundExecutor,
|
||||
params: T::Params,
|
||||
@@ -862,7 +872,7 @@ impl LanguageServer {
|
||||
let id = next_id.fetch_add(1, SeqCst);
|
||||
let message = serde_json::to_string(&Request {
|
||||
jsonrpc: JSON_RPC_VERSION,
|
||||
id,
|
||||
id: RequestId::Int(id),
|
||||
method: T::METHOD,
|
||||
params,
|
||||
})
|
||||
@@ -876,7 +886,7 @@ impl LanguageServer {
|
||||
.map(|handlers| {
|
||||
let executor = executor.clone();
|
||||
handlers.insert(
|
||||
id,
|
||||
RequestId::Int(id),
|
||||
Box::new(move |result| {
|
||||
executor
|
||||
.spawn(async move {
|
||||
@@ -1340,4 +1350,31 @@ mod tests {
|
||||
b"Content-Length: 1235\r\nContent-Type: application/vscode-jsonrpc\r\n\r\n"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_deserialize_string_digit_id() {
|
||||
let json = r#"{"jsonrpc":"2.0","id":"2","method":"workspace/configuration","params":{"items":[{"scopeUri":"file:///Users/mph/Devel/personal/hello-scala/","section":"metals"}]}}"#;
|
||||
let notification = serde_json::from_str::<AnyNotification>(json)
|
||||
.expect("message with string id should be parsed");
|
||||
let expected_id = RequestId::Str("2".to_string());
|
||||
assert_eq!(notification.id, Some(expected_id));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_deserialize_string_id() {
|
||||
let json = r#"{"jsonrpc":"2.0","id":"anythingAtAll","method":"workspace/configuration","params":{"items":[{"scopeUri":"file:///Users/mph/Devel/personal/hello-scala/","section":"metals"}]}}"#;
|
||||
let notification = serde_json::from_str::<AnyNotification>(json)
|
||||
.expect("message with string id should be parsed");
|
||||
let expected_id = RequestId::Str("anythingAtAll".to_string());
|
||||
assert_eq!(notification.id, Some(expected_id));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_deserialize_int_id() {
|
||||
let json = r#"{"jsonrpc":"2.0","id":2,"method":"workspace/configuration","params":{"items":[{"scopeUri":"file:///Users/mph/Devel/personal/hello-scala/","section":"metals"}]}}"#;
|
||||
let notification = serde_json::from_str::<AnyNotification>(json)
|
||||
.expect("message with string id should be parsed");
|
||||
let expected_id = RequestId::Int(2);
|
||||
assert_eq!(notification.id, Some(expected_id));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -209,13 +209,13 @@ impl Render for MarkdownPreviewView {
|
||||
.id("MarkdownPreview")
|
||||
.key_context("MarkdownPreview")
|
||||
.track_focus(&self.focus_handle)
|
||||
.full()
|
||||
.size_full()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.p_4()
|
||||
.child(
|
||||
div()
|
||||
.flex_grow()
|
||||
.map(|this| this.child(list(self.list_state.clone()).full())),
|
||||
.map(|this| this.child(list(self.list_state.clone()).size_full())),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,8 +16,8 @@ bytes = "1.2"
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
core-foundation = "0.9.3"
|
||||
foreign-types = "0.5"
|
||||
metal = "0.25"
|
||||
foreign-types = "0.3"
|
||||
metal = "0.21.0"
|
||||
objc = "0.2"
|
||||
|
||||
[build-dependencies]
|
||||
|
||||
@@ -123,6 +123,7 @@ pub(crate) struct GetCompletions {
|
||||
|
||||
pub(crate) struct GetCodeActions {
|
||||
pub range: Range<Anchor>,
|
||||
pub kinds: Option<Vec<lsp::CodeActionKind>>,
|
||||
}
|
||||
|
||||
pub(crate) struct OnTypeFormatting {
|
||||
@@ -1603,7 +1604,10 @@ impl LspCommand for GetCodeActions {
|
||||
partial_result_params: Default::default(),
|
||||
context: lsp::CodeActionContext {
|
||||
diagnostics: relevant_diagnostics,
|
||||
only: language_server.code_action_kinds(),
|
||||
only: self
|
||||
.kinds
|
||||
.clone()
|
||||
.or_else(|| language_server.code_action_kinds()),
|
||||
..lsp::CodeActionContext::default()
|
||||
},
|
||||
}
|
||||
@@ -1664,7 +1668,10 @@ impl LspCommand for GetCodeActions {
|
||||
})?
|
||||
.await?;
|
||||
|
||||
Ok(Self { range: start..end })
|
||||
Ok(Self {
|
||||
range: start..end,
|
||||
kinds: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn response_to_proto(
|
||||
|
||||
@@ -4150,10 +4150,11 @@ impl Project {
|
||||
let buffer = buffer_handle.read(cx);
|
||||
let file = File::from_dyn(buffer.file())?;
|
||||
let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
|
||||
let server = self
|
||||
let (adapter, server) = self
|
||||
.primary_language_server_for_buffer(buffer, cx)
|
||||
.map(|s| s.1.clone());
|
||||
Some((buffer_handle, buffer_abs_path, server))
|
||||
.map(|(a, s)| (Some(a.clone()), Some(s.clone())))
|
||||
.unwrap_or((None, None));
|
||||
Some((buffer_handle, buffer_abs_path, adapter, server))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@@ -4161,7 +4162,7 @@ impl Project {
|
||||
// Do not allow multiple concurrent formatting requests for the
|
||||
// same buffer.
|
||||
project.update(&mut cx, |this, cx| {
|
||||
buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
|
||||
buffers_with_paths_and_servers.retain(|(buffer, _, _, _)| {
|
||||
this.buffers_being_formatted
|
||||
.insert(buffer.read(cx).remote_id())
|
||||
});
|
||||
@@ -4173,7 +4174,7 @@ impl Project {
|
||||
let buffers = &buffers_with_paths_and_servers;
|
||||
move || {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
for (buffer, _, _) in buffers {
|
||||
for (buffer, _, _, _) in buffers {
|
||||
this.buffers_being_formatted
|
||||
.remove(&buffer.read(cx).remote_id());
|
||||
}
|
||||
@@ -4183,7 +4184,9 @@ impl Project {
|
||||
});
|
||||
|
||||
let mut project_transaction = ProjectTransaction::default();
|
||||
for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers {
|
||||
for (buffer, buffer_abs_path, lsp_adapter, language_server) in
|
||||
&buffers_with_paths_and_servers
|
||||
{
|
||||
let settings = buffer.update(&mut cx, |buffer, cx| {
|
||||
language_settings(buffer.language(), buffer.file(), cx).clone()
|
||||
})?;
|
||||
@@ -4214,6 +4217,88 @@ impl Project {
|
||||
buffer.end_transaction(cx)
|
||||
})?;
|
||||
|
||||
if let (Some(lsp_adapter), Some(language_server)) =
|
||||
(lsp_adapter, language_server)
|
||||
{
|
||||
// Apply the code actions on
|
||||
let code_actions: Vec<lsp::CodeActionKind> = settings
|
||||
.code_actions_on_format
|
||||
.iter()
|
||||
.flat_map(|(kind, enabled)| {
|
||||
if *enabled {
|
||||
Some(kind.clone().into())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
if !code_actions.is_empty()
|
||||
&& !(trigger == FormatTrigger::Save
|
||||
&& settings.format_on_save == FormatOnSave::Off)
|
||||
{
|
||||
let actions = project
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.request_lsp(
|
||||
buffer.clone(),
|
||||
LanguageServerToQuery::Other(language_server.server_id()),
|
||||
GetCodeActions {
|
||||
range: text::Anchor::MIN..text::Anchor::MAX,
|
||||
kinds: Some(code_actions),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
for action in actions {
|
||||
if let Some(edit) = action.lsp_action.edit {
|
||||
if edit.changes.is_none() && edit.document_changes.is_none() {
|
||||
continue;
|
||||
}
|
||||
let new = Self::deserialize_workspace_edit(
|
||||
project
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("project dropped"))?,
|
||||
edit,
|
||||
push_to_history,
|
||||
lsp_adapter.clone(),
|
||||
language_server.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await?;
|
||||
project_transaction.0.extend(new.0);
|
||||
}
|
||||
|
||||
if let Some(command) = action.lsp_action.command {
|
||||
project.update(&mut cx, |this, _| {
|
||||
this.last_workspace_edits_by_language_server
|
||||
.remove(&language_server.server_id());
|
||||
})?;
|
||||
|
||||
language_server
|
||||
.request::<lsp::request::ExecuteCommand>(
|
||||
lsp::ExecuteCommandParams {
|
||||
command: command.command,
|
||||
arguments: command.arguments.unwrap_or_default(),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
project.update(&mut cx, |this, _| {
|
||||
project_transaction.0.extend(
|
||||
this.last_workspace_edits_by_language_server
|
||||
.remove(&language_server.server_id())
|
||||
.unwrap_or_default()
|
||||
.0,
|
||||
)
|
||||
})?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Apply language-specific formatting using either a language server
|
||||
// or external command.
|
||||
let mut format_operation = None;
|
||||
@@ -4323,6 +4408,8 @@ impl Project {
|
||||
|
||||
if let Some(transaction_id) = whitespace_transaction_id {
|
||||
b.group_until_transaction(transaction_id);
|
||||
} else if let Some(transaction) = project_transaction.0.get(buffer) {
|
||||
b.group_until_transaction(transaction.id)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5162,7 +5249,7 @@ impl Project {
|
||||
self.request_lsp(
|
||||
buffer_handle.clone(),
|
||||
LanguageServerToQuery::Primary,
|
||||
GetCodeActions { range },
|
||||
GetCodeActions { range, kinds: None },
|
||||
cx,
|
||||
)
|
||||
}
|
||||
@@ -5178,6 +5265,103 @@ impl Project {
|
||||
self.code_actions_impl(buffer_handle, range, cx)
|
||||
}
|
||||
|
||||
pub fn apply_code_actions_on_save(
|
||||
&self,
|
||||
buffers: HashSet<Model<Buffer>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<ProjectTransaction>> {
|
||||
if !self.is_local() {
|
||||
return Task::ready(Ok(Default::default()));
|
||||
}
|
||||
|
||||
let buffers_with_adapters_and_servers = buffers
|
||||
.into_iter()
|
||||
.filter_map(|buffer_handle| {
|
||||
let buffer = buffer_handle.read(cx);
|
||||
self.primary_language_server_for_buffer(buffer, cx)
|
||||
.map(|(a, s)| (buffer_handle, a.clone(), s.clone()))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
for (buffer_handle, lsp_adapter, lang_server) in buffers_with_adapters_and_servers {
|
||||
let actions = this
|
||||
.update(&mut cx, |this, cx| {
|
||||
let buffer = buffer_handle.read(cx);
|
||||
let kinds: Vec<lsp::CodeActionKind> =
|
||||
language_settings(buffer.language(), buffer.file(), cx)
|
||||
.code_actions_on_format
|
||||
.iter()
|
||||
.flat_map(|(kind, enabled)| {
|
||||
if *enabled {
|
||||
Some(kind.clone().into())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
if kinds.is_empty() {
|
||||
return Task::ready(Ok(vec![]));
|
||||
}
|
||||
|
||||
this.request_lsp(
|
||||
buffer_handle.clone(),
|
||||
LanguageServerToQuery::Other(lang_server.server_id()),
|
||||
GetCodeActions {
|
||||
range: text::Anchor::MIN..text::Anchor::MAX,
|
||||
kinds: Some(kinds),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
for action in actions {
|
||||
if let Some(edit) = action.lsp_action.edit {
|
||||
if edit.changes.is_some() || edit.document_changes.is_some() {
|
||||
return Self::deserialize_workspace_edit(
|
||||
this.upgrade().ok_or_else(|| anyhow!("no app present"))?,
|
||||
edit,
|
||||
true,
|
||||
lsp_adapter.clone(),
|
||||
lang_server.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(command) = action.lsp_action.command {
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.last_workspace_edits_by_language_server
|
||||
.remove(&lang_server.server_id());
|
||||
})?;
|
||||
|
||||
let result = lang_server
|
||||
.request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
|
||||
command: command.command,
|
||||
arguments: command.arguments.unwrap_or_default(),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
|
||||
if let Err(err) = result {
|
||||
// TODO: LSP ERROR
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
return Ok(this.update(&mut cx, |this, _| {
|
||||
this.last_workspace_edits_by_language_server
|
||||
.remove(&lang_server.server_id())
|
||||
.unwrap_or_default()
|
||||
})?);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(ProjectTransaction::default())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn apply_code_action(
|
||||
&self,
|
||||
buffer_handle: Model<Buffer>,
|
||||
|
||||
@@ -93,6 +93,7 @@ pub struct LocalWorktree {
|
||||
diagnostic_summaries: HashMap<Arc<Path>, HashMap<LanguageServerId, DiagnosticSummary>>,
|
||||
client: Arc<Client>,
|
||||
fs: Arc<dyn Fs>,
|
||||
fs_case_sensitive: bool,
|
||||
visible: bool,
|
||||
}
|
||||
|
||||
@@ -314,6 +315,13 @@ impl Worktree {
|
||||
.await
|
||||
.context("failed to stat worktree path")?;
|
||||
|
||||
let fs_case_sensitive = fs.is_case_sensitive().await.unwrap_or_else(|e| {
|
||||
log::error!(
|
||||
"Failed to determine whether filesystem is case sensitive (falling back to true) due to error: {e:#}"
|
||||
);
|
||||
true
|
||||
});
|
||||
|
||||
let closure_fs = Arc::clone(&fs);
|
||||
let closure_next_entry_id = Arc::clone(&next_entry_id);
|
||||
let closure_abs_path = abs_path.to_path_buf();
|
||||
@@ -435,6 +443,7 @@ impl Worktree {
|
||||
diagnostic_summaries: Default::default(),
|
||||
client,
|
||||
fs,
|
||||
fs_case_sensitive,
|
||||
visible,
|
||||
})
|
||||
})
|
||||
@@ -1301,9 +1310,29 @@ impl LocalWorktree {
|
||||
let abs_old_path = self.absolutize(&old_path);
|
||||
let abs_new_path = self.absolutize(&new_path);
|
||||
let fs = self.fs.clone();
|
||||
let case_sensitive = self.fs_case_sensitive;
|
||||
let rename = cx.background_executor().spawn(async move {
|
||||
fs.rename(&abs_old_path?, &abs_new_path?, Default::default())
|
||||
.await
|
||||
let abs_old_path = abs_old_path?;
|
||||
let abs_new_path = abs_new_path?;
|
||||
|
||||
let abs_old_path_lower = abs_old_path.to_str().map(|p| p.to_lowercase());
|
||||
let abs_new_path_lower = abs_new_path.to_str().map(|p| p.to_lowercase());
|
||||
|
||||
// If we're on a case-insensitive FS and we're doing a case-only rename (i.e. `foobar` to `FOOBAR`)
|
||||
// we want to overwrite, because otherwise we run into a file-already-exists error.
|
||||
let overwrite = !case_sensitive
|
||||
&& abs_old_path != abs_new_path
|
||||
&& abs_old_path_lower == abs_new_path_lower;
|
||||
|
||||
fs.rename(
|
||||
&abs_old_path,
|
||||
&abs_new_path,
|
||||
fs::RenameOptions {
|
||||
overwrite,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await
|
||||
});
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
|
||||
@@ -687,7 +687,6 @@ impl BufferSearchBar {
|
||||
});
|
||||
});
|
||||
self.search_options = options;
|
||||
self.query_contains_error = false;
|
||||
self.clear_matches(cx);
|
||||
cx.notify();
|
||||
}
|
||||
@@ -804,7 +803,6 @@ impl BufferSearchBar {
|
||||
editor::EditorEvent::Focused => self.query_editor_focused = true,
|
||||
editor::EditorEvent::Blurred => self.query_editor_focused = false,
|
||||
editor::EditorEvent::Edited => {
|
||||
self.query_contains_error = false;
|
||||
self.clear_matches(cx);
|
||||
let search = self.update_matches(cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
@@ -869,6 +867,7 @@ impl BufferSearchBar {
|
||||
self.pending_search.take();
|
||||
|
||||
if let Some(active_searchable_item) = self.active_searchable_item.as_ref() {
|
||||
self.query_contains_error = false;
|
||||
if query.is_empty() {
|
||||
self.active_match_index.take();
|
||||
active_searchable_item.clear_matches(cx);
|
||||
|
||||
@@ -118,30 +118,34 @@ impl ThemeRegistry {
|
||||
AppearanceContent::Dark => PlayerColors::dark(),
|
||||
};
|
||||
if !user_theme.style.players.is_empty() {
|
||||
player_colors = PlayerColors(
|
||||
user_theme
|
||||
.style
|
||||
.players
|
||||
.into_iter()
|
||||
.map(|player| PlayerColor {
|
||||
cursor: player
|
||||
.cursor
|
||||
.as_ref()
|
||||
.and_then(|color| try_parse_color(&color).ok())
|
||||
.unwrap_or_default(),
|
||||
background: player
|
||||
.background
|
||||
.as_ref()
|
||||
.and_then(|color| try_parse_color(&color).ok())
|
||||
.unwrap_or_default(),
|
||||
selection: player
|
||||
.selection
|
||||
.as_ref()
|
||||
.and_then(|color| try_parse_color(&color).ok())
|
||||
.unwrap_or_default(),
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
for (idx, player) in user_theme.style.players.clone().into_iter().enumerate() {
|
||||
let cursor = player
|
||||
.cursor
|
||||
.as_ref()
|
||||
.and_then(|color| try_parse_color(&color).ok());
|
||||
let background = player
|
||||
.background
|
||||
.as_ref()
|
||||
.and_then(|color| try_parse_color(&color).ok());
|
||||
let selection = player
|
||||
.selection
|
||||
.as_ref()
|
||||
.and_then(|color| try_parse_color(&color).ok());
|
||||
|
||||
if let Some(player_color) = player_colors.0.get_mut(idx) {
|
||||
*player_color = PlayerColor {
|
||||
cursor: cursor.unwrap_or(player_color.cursor),
|
||||
background: background.unwrap_or(player_color.background),
|
||||
selection: selection.unwrap_or(player_color.selection),
|
||||
};
|
||||
} else {
|
||||
player_colors.0.push(PlayerColor {
|
||||
cursor: cursor.unwrap_or_default(),
|
||||
background: background.unwrap_or_default(),
|
||||
selection: selection.unwrap_or_default(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut syntax_colors = match user_theme.appearance {
|
||||
|
||||
@@ -40,9 +40,20 @@ impl ThemeSettings {
|
||||
/// taking into account the current [`SystemAppearance`].
|
||||
pub fn reload_current_theme(cx: &mut AppContext) {
|
||||
let mut theme_settings = ThemeSettings::get_global(cx).clone();
|
||||
let system_appearance = SystemAppearance::global(cx);
|
||||
|
||||
if let Some(theme_selection) = theme_settings.theme_selection.clone() {
|
||||
let theme_name = theme_selection.theme(*SystemAppearance::global(cx));
|
||||
let mut theme_name = theme_selection.theme(*system_appearance);
|
||||
|
||||
// If the selected theme doesn't exist, fall back to a default theme
|
||||
// based on the system appearance.
|
||||
let theme_registry = ThemeRegistry::global(cx);
|
||||
if theme_registry.get(&theme_name).ok().is_none() {
|
||||
theme_name = match *system_appearance {
|
||||
Appearance::Light => "One Light",
|
||||
Appearance::Dark => "One Dark",
|
||||
};
|
||||
};
|
||||
|
||||
if let Some(_theme) = theme_settings.switch_theme(&theme_name, cx) {
|
||||
ThemeSettings::override_global(theme_settings, cx);
|
||||
|
||||
@@ -1,149 +1,5 @@
|
||||
use gpui::{div, prelude::*, ElementId, IntoElement, Styled, WindowContext};
|
||||
mod checkbox;
|
||||
mod checkbox_with_label;
|
||||
|
||||
use crate::prelude::*;
|
||||
use crate::{Color, Icon, IconName, Selection};
|
||||
|
||||
pub type CheckHandler = Box<dyn Fn(&Selection, &mut WindowContext) + 'static>;
|
||||
|
||||
/// # Checkbox
|
||||
///
|
||||
/// Checkboxes are used for multiple choices, not for mutually exclusive choices.
|
||||
/// Each checkbox works independently from other checkboxes in the list,
|
||||
/// therefore checking an additional box does not affect any other selections.
|
||||
#[derive(IntoElement)]
|
||||
pub struct Checkbox {
|
||||
id: ElementId,
|
||||
checked: Selection,
|
||||
disabled: bool,
|
||||
on_click: Option<CheckHandler>,
|
||||
}
|
||||
|
||||
impl Checkbox {
|
||||
pub fn new(id: impl Into<ElementId>, checked: Selection) -> Self {
|
||||
Self {
|
||||
id: id.into(),
|
||||
checked,
|
||||
disabled: false,
|
||||
on_click: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn disabled(mut self, disabled: bool) -> Self {
|
||||
self.disabled = disabled;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn on_click(
|
||||
mut self,
|
||||
handler: impl 'static + Fn(&Selection, &mut WindowContext) + Send + Sync,
|
||||
) -> Self {
|
||||
self.on_click = Some(Box::new(handler));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl RenderOnce for Checkbox {
|
||||
fn render(self, cx: &mut WindowContext) -> impl IntoElement {
|
||||
let group_id = format!("checkbox_group_{:?}", self.id);
|
||||
|
||||
let icon = match self.checked {
|
||||
Selection::Selected => Some(Icon::new(IconName::Check).size(IconSize::Small).color(
|
||||
if self.disabled {
|
||||
Color::Disabled
|
||||
} else {
|
||||
Color::Selected
|
||||
},
|
||||
)),
|
||||
Selection::Indeterminate => Some(
|
||||
Icon::new(IconName::Dash)
|
||||
.size(IconSize::Small)
|
||||
.color(if self.disabled {
|
||||
Color::Disabled
|
||||
} else {
|
||||
Color::Selected
|
||||
}),
|
||||
),
|
||||
Selection::Unselected => None,
|
||||
};
|
||||
|
||||
// A checkbox could be in an indeterminate state,
|
||||
// for example the indeterminate state could represent:
|
||||
// - a group of options of which only some are selected
|
||||
// - an enabled option that is no longer available
|
||||
// - a previously agreed to license that has been updated
|
||||
//
|
||||
// For the sake of styles we treat the indeterminate state as selected,
|
||||
// but its icon will be different.
|
||||
let selected =
|
||||
self.checked == Selection::Selected || self.checked == Selection::Indeterminate;
|
||||
|
||||
// We could use something like this to make the checkbox background when selected:
|
||||
//
|
||||
// ```rs
|
||||
// ...
|
||||
// .when(selected, |this| {
|
||||
// this.bg(cx.theme().colors().element_selected)
|
||||
// })
|
||||
// ```
|
||||
//
|
||||
// But we use a match instead here because the checkbox might be disabled,
|
||||
// and it could be disabled _while_ it is selected, as well as while it is not selected.
|
||||
let (bg_color, border_color) = match (self.disabled, selected) {
|
||||
(true, _) => (
|
||||
cx.theme().colors().ghost_element_disabled,
|
||||
cx.theme().colors().border_disabled,
|
||||
),
|
||||
(false, true) => (
|
||||
cx.theme().colors().element_selected,
|
||||
cx.theme().colors().border,
|
||||
),
|
||||
(false, false) => (
|
||||
cx.theme().colors().element_background,
|
||||
cx.theme().colors().border,
|
||||
),
|
||||
};
|
||||
|
||||
h_flex()
|
||||
.id(self.id)
|
||||
// Rather than adding `px_1()` to add some space around the checkbox,
|
||||
// we use a larger parent element to create a slightly larger
|
||||
// click area for the checkbox.
|
||||
.size_5()
|
||||
// Because we've enlarged the click area, we need to create a
|
||||
// `group` to pass down interactivity events to the checkbox.
|
||||
.group(group_id.clone())
|
||||
.child(
|
||||
div()
|
||||
.flex()
|
||||
// This prevent the flex element from growing
|
||||
// or shrinking in response to any size changes
|
||||
.flex_none()
|
||||
// The combo of `justify_center()` and `items_center()`
|
||||
// is used frequently to center elements in a flex container.
|
||||
//
|
||||
// We use this to center the icon in the checkbox.
|
||||
.justify_center()
|
||||
.items_center()
|
||||
.m_1()
|
||||
.size_4()
|
||||
.rounded_sm()
|
||||
.bg(bg_color)
|
||||
.border()
|
||||
.border_color(border_color)
|
||||
// We only want the interactivity states to fire when we
|
||||
// are in a checkbox that isn't disabled.
|
||||
.when(!self.disabled, |this| {
|
||||
// Here instead of `hover()` we use `group_hover()`
|
||||
// to pass it the group id.
|
||||
this.group_hover(group_id.clone(), |el| {
|
||||
el.bg(cx.theme().colors().element_hover)
|
||||
})
|
||||
})
|
||||
.children(icon),
|
||||
)
|
||||
.when_some(
|
||||
self.on_click.filter(|_| !self.disabled),
|
||||
|this, on_click| this.on_click(move |_, cx| on_click(&self.checked.inverse(), cx)),
|
||||
)
|
||||
}
|
||||
}
|
||||
pub use checkbox::*;
|
||||
pub use checkbox_with_label::*;
|
||||
|
||||
144
crates/ui/src/components/checkbox/checkbox.rs
Normal file
144
crates/ui/src/components/checkbox/checkbox.rs
Normal file
@@ -0,0 +1,144 @@
|
||||
use gpui::{div, prelude::*, ElementId, IntoElement, Styled, WindowContext};
|
||||
|
||||
use crate::prelude::*;
|
||||
use crate::{Color, Icon, IconName, Selection};
|
||||
|
||||
/// # Checkbox
|
||||
///
|
||||
/// Checkboxes are used for multiple choices, not for mutually exclusive choices.
|
||||
/// Each checkbox works independently from other checkboxes in the list,
|
||||
/// therefore checking an additional box does not affect any other selections.
|
||||
#[derive(IntoElement)]
|
||||
pub struct Checkbox {
|
||||
id: ElementId,
|
||||
checked: Selection,
|
||||
disabled: bool,
|
||||
on_click: Option<Box<dyn Fn(&Selection, &mut WindowContext) + 'static>>,
|
||||
}
|
||||
|
||||
impl Checkbox {
|
||||
pub fn new(id: impl Into<ElementId>, checked: Selection) -> Self {
|
||||
Self {
|
||||
id: id.into(),
|
||||
checked,
|
||||
disabled: false,
|
||||
on_click: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn disabled(mut self, disabled: bool) -> Self {
|
||||
self.disabled = disabled;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn on_click(mut self, handler: impl Fn(&Selection, &mut WindowContext) + 'static) -> Self {
|
||||
self.on_click = Some(Box::new(handler));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl RenderOnce for Checkbox {
|
||||
fn render(self, cx: &mut WindowContext) -> impl IntoElement {
|
||||
let group_id = format!("checkbox_group_{:?}", self.id);
|
||||
|
||||
let icon = match self.checked {
|
||||
Selection::Selected => Some(Icon::new(IconName::Check).size(IconSize::Small).color(
|
||||
if self.disabled {
|
||||
Color::Disabled
|
||||
} else {
|
||||
Color::Selected
|
||||
},
|
||||
)),
|
||||
Selection::Indeterminate => Some(
|
||||
Icon::new(IconName::Dash)
|
||||
.size(IconSize::Small)
|
||||
.color(if self.disabled {
|
||||
Color::Disabled
|
||||
} else {
|
||||
Color::Selected
|
||||
}),
|
||||
),
|
||||
Selection::Unselected => None,
|
||||
};
|
||||
|
||||
// A checkbox could be in an indeterminate state,
|
||||
// for example the indeterminate state could represent:
|
||||
// - a group of options of which only some are selected
|
||||
// - an enabled option that is no longer available
|
||||
// - a previously agreed to license that has been updated
|
||||
//
|
||||
// For the sake of styles we treat the indeterminate state as selected,
|
||||
// but its icon will be different.
|
||||
let selected =
|
||||
self.checked == Selection::Selected || self.checked == Selection::Indeterminate;
|
||||
|
||||
// We could use something like this to make the checkbox background when selected:
|
||||
//
|
||||
// ```rs
|
||||
// ...
|
||||
// .when(selected, |this| {
|
||||
// this.bg(cx.theme().colors().element_selected)
|
||||
// })
|
||||
// ```
|
||||
//
|
||||
// But we use a match instead here because the checkbox might be disabled,
|
||||
// and it could be disabled _while_ it is selected, as well as while it is not selected.
|
||||
let (bg_color, border_color) = match (self.disabled, selected) {
|
||||
(true, _) => (
|
||||
cx.theme().colors().ghost_element_disabled,
|
||||
cx.theme().colors().border_disabled,
|
||||
),
|
||||
(false, true) => (
|
||||
cx.theme().colors().element_selected,
|
||||
cx.theme().colors().border,
|
||||
),
|
||||
(false, false) => (
|
||||
cx.theme().colors().element_background,
|
||||
cx.theme().colors().border,
|
||||
),
|
||||
};
|
||||
|
||||
h_flex()
|
||||
.id(self.id)
|
||||
// Rather than adding `px_1()` to add some space around the checkbox,
|
||||
// we use a larger parent element to create a slightly larger
|
||||
// click area for the checkbox.
|
||||
.size_5()
|
||||
// Because we've enlarged the click area, we need to create a
|
||||
// `group` to pass down interactivity events to the checkbox.
|
||||
.group(group_id.clone())
|
||||
.child(
|
||||
div()
|
||||
.flex()
|
||||
// This prevent the flex element from growing
|
||||
// or shrinking in response to any size changes
|
||||
.flex_none()
|
||||
// The combo of `justify_center()` and `items_center()`
|
||||
// is used frequently to center elements in a flex container.
|
||||
//
|
||||
// We use this to center the icon in the checkbox.
|
||||
.justify_center()
|
||||
.items_center()
|
||||
.m_1()
|
||||
.size_4()
|
||||
.rounded_sm()
|
||||
.bg(bg_color)
|
||||
.border()
|
||||
.border_color(border_color)
|
||||
// We only want the interactivity states to fire when we
|
||||
// are in a checkbox that isn't disabled.
|
||||
.when(!self.disabled, |this| {
|
||||
// Here instead of `hover()` we use `group_hover()`
|
||||
// to pass it the group id.
|
||||
this.group_hover(group_id.clone(), |el| {
|
||||
el.bg(cx.theme().colors().element_hover)
|
||||
})
|
||||
})
|
||||
.children(icon),
|
||||
)
|
||||
.when_some(
|
||||
self.on_click.filter(|_| !self.disabled),
|
||||
|this, on_click| this.on_click(move |_, cx| on_click(&self.checked.inverse(), cx)),
|
||||
)
|
||||
}
|
||||
}
|
||||
49
crates/ui/src/components/checkbox/checkbox_with_label.rs
Normal file
49
crates/ui/src/components/checkbox/checkbox_with_label.rs
Normal file
@@ -0,0 +1,49 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{prelude::*, Checkbox};
|
||||
|
||||
/// A [`Checkbox`] that has a [`Label`].
|
||||
#[derive(IntoElement)]
|
||||
pub struct CheckboxWithLabel {
|
||||
id: ElementId,
|
||||
label: Label,
|
||||
checked: Selection,
|
||||
on_click: Arc<dyn Fn(&Selection, &mut WindowContext) + 'static>,
|
||||
}
|
||||
|
||||
impl CheckboxWithLabel {
|
||||
pub fn new(
|
||||
id: impl Into<ElementId>,
|
||||
label: Label,
|
||||
checked: Selection,
|
||||
on_click: impl Fn(&Selection, &mut WindowContext) + 'static,
|
||||
) -> Self {
|
||||
Self {
|
||||
id: id.into(),
|
||||
label,
|
||||
checked,
|
||||
on_click: Arc::new(on_click),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RenderOnce for CheckboxWithLabel {
|
||||
fn render(self, _cx: &mut WindowContext) -> impl IntoElement {
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(Checkbox::new(self.id.clone(), self.checked).on_click({
|
||||
let on_click = self.on_click.clone();
|
||||
move |checked, cx| {
|
||||
(on_click)(checked, cx);
|
||||
}
|
||||
}))
|
||||
.child(
|
||||
div()
|
||||
.id(SharedString::from(format!("{}-label", self.id)))
|
||||
.on_click(move |_event, cx| {
|
||||
(self.on_click)(&self.checked.inverse(), cx);
|
||||
})
|
||||
.child(self.label),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -224,8 +224,8 @@ impl ContextMenu {
|
||||
.timer(Duration::from_millis(50))
|
||||
.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.cancel(&menu::Cancel, cx);
|
||||
cx.dispatch_action(action);
|
||||
this.cancel(&menu::Cancel, cx)
|
||||
})
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user