Compare commits
9 Commits
v0.159.8
...
faster_mac
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ec3e1ad498 | ||
|
|
e03fd56b77 | ||
|
|
f43b15ef84 | ||
|
|
8ad9743cd5 | ||
|
|
28aa34d80c | ||
|
|
7097861be5 | ||
|
|
660e3006b4 | ||
|
|
0ed0bf5f2f | ||
|
|
8a6ea2d390 |
2
.github/workflows/bump_patch_version.yml
vendored
2
.github/workflows/bump_patch_version.yml
vendored
@@ -43,8 +43,6 @@ jobs:
|
||||
esac
|
||||
which cargo-set-version > /dev/null || cargo install cargo-edit
|
||||
output=$(cargo set-version -p zed --bump patch 2>&1 | sed 's/.* //')
|
||||
export GIT_COMMITTER_NAME="Zed Bot"
|
||||
export GIT_COMMITTER_EMAIL="hi@zed.dev"
|
||||
git commit -am "Bump to $output for @$GITHUB_ACTOR" --author "Zed Bot <hi@zed.dev>"
|
||||
git tag v${output}${tag_suffix}
|
||||
git push origin HEAD v${output}${tag_suffix}
|
||||
|
||||
282
.github/workflows/ci.yml
vendored
282
.github/workflows/ci.yml
vendored
@@ -25,194 +25,8 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
RUSTFLAGS: "-D warnings"
|
||||
|
||||
jobs:
|
||||
migration_checks:
|
||||
name: Check Postgres and Protobuf migrations, mergability
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
timeout-minutes: 60
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0 # fetch full history
|
||||
|
||||
- name: Remove untracked files
|
||||
run: git clean -df
|
||||
|
||||
- name: Find modified migrations
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
export SQUAWK_GITHUB_TOKEN=${{ github.token }}
|
||||
. ./script/squawk
|
||||
|
||||
- name: Ensure fresh merge
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ];
|
||||
then
|
||||
echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> $GITHUB_ENV
|
||||
else
|
||||
git checkout -B temp
|
||||
git merge -q origin/$GITHUB_BASE_REF -m "merge main into temp"
|
||||
echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- uses: bufbuild/buf-setup-action@v1
|
||||
with:
|
||||
version: v1.29.0
|
||||
- uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
input: "crates/proto/proto/"
|
||||
against: "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"
|
||||
|
||||
style:
|
||||
timeout-minutes: 60
|
||||
name: Check formatting and spelling
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- buildjet-8vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- name: Run style checks
|
||||
uses: ./.github/actions/check_style
|
||||
|
||||
- name: Check for typos
|
||||
uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6
|
||||
with:
|
||||
config: ./typos.toml
|
||||
|
||||
macos_tests:
|
||||
timeout-minutes: 60
|
||||
name: (macOS) Run Clippy and tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: cargo clippy
|
||||
run: ./script/clippy
|
||||
|
||||
- name: Check unused dependencies
|
||||
uses: bnjbvr/cargo-machete@main
|
||||
|
||||
- name: Check licenses
|
||||
run: |
|
||||
script/check-licenses
|
||||
script/generate-licenses /tmp/zed_licenses_output
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
- name: Build collab
|
||||
run: cargo build -p collab
|
||||
|
||||
- name: Build other binaries and features
|
||||
run: |
|
||||
cargo build --workspace --bins --all-features
|
||||
cargo check -p gpui --features "macos-blade"
|
||||
cargo build -p remote_server
|
||||
|
||||
linux_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Run Clippy and tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "buildjet"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: cargo clippy
|
||||
run: ./script/clippy
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
- name: Build Zed
|
||||
run: cargo build -p zed
|
||||
|
||||
build_remote_server:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Build Remote Server
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "buildjet"
|
||||
|
||||
- name: Install Clang & Mold
|
||||
run: ./script/remote-server && ./script/install-mold 2.34.0
|
||||
|
||||
- name: Build Remote Server
|
||||
run: cargo build -p remote_server
|
||||
|
||||
# todo(windows): Actually run the tests
|
||||
windows_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Windows) Run Clippy and tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: hosted-windows-1
|
||||
steps:
|
||||
# more info here:- https://github.com/rust-lang/cargo/issues/13020
|
||||
- name: Enable longer pathnames for git
|
||||
run: git config --system core.longpaths true
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "github"
|
||||
|
||||
- name: cargo clippy
|
||||
# Windows can't run shell scripts, so we need to use `cargo xtask`.
|
||||
run: cargo xtask clippy
|
||||
|
||||
- name: Build Zed
|
||||
run: cargo build
|
||||
|
||||
bundle-mac:
|
||||
timeout-minutes: 60
|
||||
name: Create a macOS bundle
|
||||
@@ -220,7 +34,6 @@ jobs:
|
||||
- self-hosted
|
||||
- bundle
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
needs: [macos_tests]
|
||||
env:
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
@@ -296,7 +109,6 @@ jobs:
|
||||
|
||||
- uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
name: Upload app bundle to release
|
||||
if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
@@ -309,97 +121,3 @@ jobs:
|
||||
body_path: target/release-notes.md
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
bundle-linux:
|
||||
timeout-minutes: 60
|
||||
name: Create a Linux bundle
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2004
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux && ./script/install-mold 2.34.0
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
|
||||
- name: Create Linux .tar.gz bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload app bundle to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: |
|
||||
target/zed-remote-server-linux-x86_64.gz
|
||||
target/release/zed-linux-x86_64.tar.gz
|
||||
body: ""
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
bundle-linux-aarch64: # this runs on ubuntu22.04
|
||||
timeout-minutes: 60
|
||||
name: Create arm64 Linux bundle
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204-arm
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
|
||||
- name: Create and upload Linux .tar.gz bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload app bundle to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: |
|
||||
target/zed-remote-server-linux-aarch64.gz
|
||||
target/release/zed-linux-aarch64.tar.gz
|
||||
body: ""
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
542
Cargo.lock
generated
542
Cargo.lock
generated
@@ -16,7 +16,6 @@ dependencies = [
|
||||
"project",
|
||||
"smallvec",
|
||||
"ui",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
@@ -292,12 +291,6 @@ dependencies = [
|
||||
"syn 2.0.76",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arraydeque"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7d902e3d592a523def97af8f317b08ce16b7ab854c1985a0c671e6f15cebc236"
|
||||
|
||||
[[package]]
|
||||
name = "arrayref"
|
||||
version = "0.3.8"
|
||||
@@ -392,7 +385,7 @@ dependencies = [
|
||||
"ctor",
|
||||
"db",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
@@ -854,7 +847,7 @@ dependencies = [
|
||||
"chrono",
|
||||
"futures-util",
|
||||
"http-types",
|
||||
"hyper 0.14.31",
|
||||
"hyper 0.14.30",
|
||||
"hyper-rustls 0.24.2",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -1350,7 +1343,7 @@ dependencies = [
|
||||
"http-body 0.4.6",
|
||||
"http-body 1.0.1",
|
||||
"httparse",
|
||||
"hyper 0.14.31",
|
||||
"hyper 0.14.30",
|
||||
"hyper-rustls 0.24.2",
|
||||
"once_cell",
|
||||
"pin-project-lite",
|
||||
@@ -1441,7 +1434,7 @@ dependencies = [
|
||||
"headers",
|
||||
"http 0.2.12",
|
||||
"http-body 0.4.6",
|
||||
"hyper 0.14.31",
|
||||
"hyper 0.14.30",
|
||||
"itoa",
|
||||
"matchit",
|
||||
"memchr",
|
||||
@@ -1587,7 +1580,7 @@ dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"cexpr",
|
||||
"clang-sys",
|
||||
"itertools 0.12.1",
|
||||
"itertools 0.10.5",
|
||||
"lazy_static",
|
||||
"lazycell",
|
||||
"proc-macro2",
|
||||
@@ -2366,7 +2359,7 @@ dependencies = [
|
||||
"clickhouse-derive",
|
||||
"clickhouse-rs-cityhash-sys",
|
||||
"futures 0.3.30",
|
||||
"hyper 0.14.31",
|
||||
"hyper 0.14.30",
|
||||
"hyper-tls",
|
||||
"lz4",
|
||||
"sealed",
|
||||
@@ -2558,7 +2551,7 @@ dependencies = [
|
||||
"dashmap 6.0.1",
|
||||
"derive_more",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"envy",
|
||||
"file_finder",
|
||||
"fs",
|
||||
@@ -2569,7 +2562,7 @@ dependencies = [
|
||||
"gpui",
|
||||
"hex",
|
||||
"http_client",
|
||||
"hyper 0.14.31",
|
||||
"hyper 0.14.30",
|
||||
"indoc",
|
||||
"jsonwebtoken",
|
||||
"language",
|
||||
@@ -2713,7 +2706,7 @@ dependencies = [
|
||||
"command_palette_hooks",
|
||||
"ctor",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"fuzzy",
|
||||
"go_to_line",
|
||||
"gpui",
|
||||
@@ -3490,7 +3483,7 @@ dependencies = [
|
||||
"collections",
|
||||
"ctor",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"language",
|
||||
@@ -3678,7 +3671,7 @@ dependencies = [
|
||||
"ctor",
|
||||
"db",
|
||||
"emojis",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"file_icons",
|
||||
"futures 0.3.30",
|
||||
"fuzzy",
|
||||
@@ -3718,6 +3711,7 @@ dependencies = [
|
||||
"tree-sitter-rust",
|
||||
"tree-sitter-typescript",
|
||||
"ui",
|
||||
"unicode-segmentation",
|
||||
"unindent",
|
||||
"url",
|
||||
"util",
|
||||
@@ -3884,19 +3878,6 @@ dependencies = [
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.10.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580"
|
||||
dependencies = [
|
||||
"humantime",
|
||||
"is-terminal",
|
||||
"log",
|
||||
"regex",
|
||||
"termcolor",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.11.5"
|
||||
@@ -4005,7 +3986,7 @@ dependencies = [
|
||||
"client",
|
||||
"clock",
|
||||
"collections",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"git",
|
||||
@@ -4100,7 +4081,7 @@ dependencies = [
|
||||
"client",
|
||||
"collections",
|
||||
"ctor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
@@ -4142,7 +4123,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"extension",
|
||||
"fs",
|
||||
"language",
|
||||
@@ -4301,7 +4282,7 @@ dependencies = [
|
||||
"collections",
|
||||
"ctor",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"file_icons",
|
||||
"futures 0.3.30",
|
||||
"fuzzy",
|
||||
@@ -5056,7 +5037,7 @@ dependencies = [
|
||||
"ctor",
|
||||
"derive_more",
|
||||
"embed-resource",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"etagere",
|
||||
"filedescriptor",
|
||||
"flume",
|
||||
@@ -5246,15 +5227,6 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7"
|
||||
dependencies = [
|
||||
"hashbrown 0.14.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.9.1"
|
||||
@@ -5568,9 +5540,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
|
||||
|
||||
[[package]]
|
||||
name = "hyper"
|
||||
version = "0.14.31"
|
||||
version = "0.14.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8c08302e8fa335b151b788c775ff56e7a03ae64ff85c548ee820fecb70356e85"
|
||||
checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9"
|
||||
dependencies = [
|
||||
"bytes 1.7.2",
|
||||
"futures-channel",
|
||||
@@ -5583,7 +5555,7 @@ dependencies = [
|
||||
"httpdate",
|
||||
"itoa",
|
||||
"pin-project-lite",
|
||||
"socket2 0.5.7",
|
||||
"socket2 0.4.10",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
@@ -5618,7 +5590,7 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590"
|
||||
dependencies = [
|
||||
"futures-util",
|
||||
"http 0.2.12",
|
||||
"hyper 0.14.31",
|
||||
"hyper 0.14.30",
|
||||
"log",
|
||||
"rustls 0.21.12",
|
||||
"rustls-native-certs 0.6.3",
|
||||
@@ -5651,7 +5623,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905"
|
||||
dependencies = [
|
||||
"bytes 1.7.2",
|
||||
"hyper 0.14.31",
|
||||
"hyper 0.14.30",
|
||||
"native-tls",
|
||||
"tokio",
|
||||
"tokio-native-tls",
|
||||
@@ -6213,7 +6185,7 @@ dependencies = [
|
||||
"collections",
|
||||
"ctor",
|
||||
"ec4rs",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"fuzzy",
|
||||
"git",
|
||||
@@ -6270,7 +6242,7 @@ dependencies = [
|
||||
"copilot",
|
||||
"ctor",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"feature_flags",
|
||||
"futures 0.3.30",
|
||||
"google_ai",
|
||||
@@ -6327,7 +6299,7 @@ dependencies = [
|
||||
"collections",
|
||||
"copilot",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"language",
|
||||
@@ -6361,11 +6333,6 @@ dependencies = [
|
||||
"lsp",
|
||||
"node_runtime",
|
||||
"paths",
|
||||
"pet",
|
||||
"pet-conda",
|
||||
"pet-core",
|
||||
"pet-poetry",
|
||||
"pet-reporter",
|
||||
"project",
|
||||
"regex",
|
||||
"rope",
|
||||
@@ -6473,7 +6440,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-targets 0.52.6",
|
||||
"windows-targets 0.48.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6662,7 +6629,7 @@ dependencies = [
|
||||
"async-pipe",
|
||||
"collections",
|
||||
"ctor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"log",
|
||||
@@ -6745,7 +6712,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assets",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"language",
|
||||
@@ -6858,7 +6825,7 @@ dependencies = [
|
||||
"clap",
|
||||
"clap_complete",
|
||||
"elasticlunr-rs",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures-util",
|
||||
"handlebars 5.1.2",
|
||||
"ignore",
|
||||
@@ -7040,15 +7007,6 @@ dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "msvc_spectre_libs"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8661ace213a0a130c7c5b9542df5023aedf092a02008ccf477b39ff108990305"
|
||||
dependencies = [
|
||||
"cc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "multi_buffer"
|
||||
version = "0.1.0"
|
||||
@@ -7057,7 +7015,7 @@ dependencies = [
|
||||
"clock",
|
||||
"collections",
|
||||
"ctor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"itertools 0.13.0",
|
||||
@@ -7771,10 +7729,8 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smallvec",
|
||||
"smol",
|
||||
"theme",
|
||||
"ui",
|
||||
"util",
|
||||
"workspace",
|
||||
"worktree",
|
||||
@@ -8017,366 +7973,6 @@ dependencies = [
|
||||
"sha2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"env_logger 0.10.2",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-conda",
|
||||
"pet-core",
|
||||
"pet-env-var-path",
|
||||
"pet-fs",
|
||||
"pet-global-virtualenvs",
|
||||
"pet-homebrew",
|
||||
"pet-jsonrpc",
|
||||
"pet-linux-global-python",
|
||||
"pet-mac-commandlinetools",
|
||||
"pet-mac-python-org",
|
||||
"pet-mac-xcode",
|
||||
"pet-pipenv",
|
||||
"pet-poetry",
|
||||
"pet-pyenv",
|
||||
"pet-python-utils",
|
||||
"pet-reporter",
|
||||
"pet-telemetry",
|
||||
"pet-venv",
|
||||
"pet-virtualenv",
|
||||
"pet-virtualenvwrapper",
|
||||
"pet-windows-registry",
|
||||
"pet-windows-store",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-conda"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"env_logger 0.10.2",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-reporter",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"yaml-rust2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-core"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-fs",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-env-var-path"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-conda",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-fs"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-global-virtualenvs"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-conda",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-virtualenv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-homebrew"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-conda",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-jsonrpc"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"env_logger 0.10.2",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-linux-global-python"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-mac-commandlinetools"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-mac-python-org"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-mac-xcode"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-pipenv"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-poetry"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-reporter",
|
||||
"pet-virtualenv",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"toml 0.8.19",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-pyenv"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-conda",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-reporter",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-python-utils"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"env_logger 0.10.2",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-reporter"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"env_logger 0.10.2",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-jsonrpc",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-telemetry"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"env_logger 0.10.2",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-venv"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-virtualenv"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-virtualenvwrapper"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-windows-registry"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-conda",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
"pet-windows-store",
|
||||
"regex",
|
||||
"winreg 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-windows-store"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
"regex",
|
||||
"winreg 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "petgraph"
|
||||
version = "0.6.5"
|
||||
@@ -8465,7 +8061,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"ctor",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"gpui",
|
||||
"menu",
|
||||
"serde",
|
||||
@@ -8811,7 +8407,7 @@ dependencies = [
|
||||
"client",
|
||||
"clock",
|
||||
"collections",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
"fuzzy",
|
||||
@@ -9502,7 +9098,6 @@ dependencies = [
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"itertools 0.13.0",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"prost",
|
||||
@@ -9524,11 +9119,10 @@ dependencies = [
|
||||
"async-watch",
|
||||
"backtrace",
|
||||
"cargo_toml",
|
||||
"chrono",
|
||||
"clap",
|
||||
"client",
|
||||
"clock",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"fork",
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
@@ -9544,8 +9138,6 @@ dependencies = [
|
||||
"node_runtime",
|
||||
"paths",
|
||||
"project",
|
||||
"proto",
|
||||
"release_channel",
|
||||
"remote",
|
||||
"reqwest_client",
|
||||
"rpc",
|
||||
@@ -9555,7 +9147,6 @@ dependencies = [
|
||||
"settings",
|
||||
"shellexpand 2.1.2",
|
||||
"smol",
|
||||
"telemetry_events",
|
||||
"toml 0.8.19",
|
||||
"util",
|
||||
"worktree",
|
||||
@@ -9582,7 +9173,7 @@ dependencies = [
|
||||
"collections",
|
||||
"command_palette_hooks",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"http_client",
|
||||
@@ -9627,7 +9218,7 @@ dependencies = [
|
||||
"h2 0.3.26",
|
||||
"http 0.2.12",
|
||||
"http-body 0.4.6",
|
||||
"hyper 0.14.31",
|
||||
"hyper 0.14.30",
|
||||
"hyper-tls",
|
||||
"ipnet",
|
||||
"js-sys",
|
||||
@@ -9862,7 +9453,7 @@ dependencies = [
|
||||
"arrayvec",
|
||||
"criterion",
|
||||
"ctor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"gpui",
|
||||
"log",
|
||||
"rand 0.8.5",
|
||||
@@ -9893,7 +9484,7 @@ dependencies = [
|
||||
"base64 0.22.1",
|
||||
"chrono",
|
||||
"collections",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"parking_lot",
|
||||
@@ -9931,9 +9522,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "runtimelib"
|
||||
version = "0.15.1"
|
||||
version = "0.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "43075bcdb843dc90af086586895247681fb79ed9dc24c62e5455995a807d3d85"
|
||||
checksum = "a7d76d28b882a7b889ebb04e79bc2b160b3061821ea596ff0f4a838fc7a76db0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-dispatcher",
|
||||
@@ -10482,7 +10073,7 @@ dependencies = [
|
||||
"client",
|
||||
"clock",
|
||||
"collections",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
@@ -11175,7 +10766,7 @@ dependencies = [
|
||||
"futures-io",
|
||||
"futures-util",
|
||||
"hashbrown 0.14.5",
|
||||
"hashlink 0.9.1",
|
||||
"hashlink",
|
||||
"hex",
|
||||
"indexmap 2.4.0",
|
||||
"log",
|
||||
@@ -11499,7 +11090,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"arrayvec",
|
||||
"ctor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"log",
|
||||
"rand 0.8.5",
|
||||
"rayon",
|
||||
@@ -11513,7 +11104,7 @@ dependencies = [
|
||||
"client",
|
||||
"collections",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"http_client",
|
||||
@@ -11812,7 +11403,7 @@ dependencies = [
|
||||
"collections",
|
||||
"ctor",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"gpui",
|
||||
"language",
|
||||
"menu",
|
||||
@@ -12019,7 +11610,7 @@ dependencies = [
|
||||
"clock",
|
||||
"collections",
|
||||
"ctor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"gpui",
|
||||
"http_client",
|
||||
"log",
|
||||
@@ -12508,21 +12099,6 @@ dependencies = [
|
||||
"winnow 0.6.18",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toolchain_selector"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"editor",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"language",
|
||||
"picker",
|
||||
"project",
|
||||
"ui",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "topological-sort"
|
||||
version = "0.2.2"
|
||||
@@ -13268,7 +12844,6 @@ dependencies = [
|
||||
"git",
|
||||
"gpui",
|
||||
"picker",
|
||||
"project",
|
||||
"ui",
|
||||
"util",
|
||||
"workspace",
|
||||
@@ -13417,7 +12992,7 @@ dependencies = [
|
||||
"futures-util",
|
||||
"headers",
|
||||
"http 0.2.12",
|
||||
"hyper 0.14.31",
|
||||
"hyper 0.14.30",
|
||||
"log",
|
||||
"mime",
|
||||
"mime_guess",
|
||||
@@ -14130,7 +13705,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -14692,7 +14267,7 @@ dependencies = [
|
||||
"collections",
|
||||
"db",
|
||||
"derive_more",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
"git",
|
||||
@@ -14729,7 +14304,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"clock",
|
||||
"collections",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
"fuzzy",
|
||||
@@ -14899,17 +14474,6 @@ dependencies = [
|
||||
"clap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yaml-rust2"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8902160c4e6f2fb145dbe9d6760a75e3c9522d8bf796ed7047c85919ac7115f8"
|
||||
dependencies = [
|
||||
"arraydeque",
|
||||
"encoding_rs",
|
||||
"hashlink 0.8.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yansi"
|
||||
version = "1.0.1"
|
||||
@@ -14997,7 +14561,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.159.8"
|
||||
version = "0.160.0"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"anyhow",
|
||||
@@ -15023,7 +14587,7 @@ dependencies = [
|
||||
"db",
|
||||
"diagnostics",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"extension",
|
||||
"extensions_ui",
|
||||
"feature_flags",
|
||||
@@ -15062,7 +14626,6 @@ dependencies = [
|
||||
"project",
|
||||
"project_panel",
|
||||
"project_symbols",
|
||||
"proto",
|
||||
"quick_action_bar",
|
||||
"recent_projects",
|
||||
"release_channel",
|
||||
@@ -15091,7 +14654,6 @@ dependencies = [
|
||||
"theme",
|
||||
"theme_selector",
|
||||
"time",
|
||||
"toolchain_selector",
|
||||
"tree-sitter-md",
|
||||
"tree-sitter-rust",
|
||||
"ui",
|
||||
@@ -15139,7 +14701,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed_dart"
|
||||
version = "0.1.2"
|
||||
version = "0.1.1"
|
||||
dependencies = [
|
||||
"zed_extension_api 0.1.0",
|
||||
]
|
||||
|
||||
@@ -117,7 +117,6 @@ members = [
|
||||
"crates/theme_selector",
|
||||
"crates/time_format",
|
||||
"crates/title_bar",
|
||||
"crates/toolchain_selector",
|
||||
"crates/ui",
|
||||
"crates/ui_input",
|
||||
"crates/ui_macros",
|
||||
@@ -291,7 +290,6 @@ theme_importer = { path = "crates/theme_importer" }
|
||||
theme_selector = { path = "crates/theme_selector" }
|
||||
time_format = { path = "crates/time_format" }
|
||||
title_bar = { path = "crates/title_bar" }
|
||||
toolchain_selector = { path = "crates/toolchain_selector" }
|
||||
ui = { path = "crates/ui" }
|
||||
ui_input = { path = "crates/ui_input" }
|
||||
ui_macros = { path = "crates/ui_macros" }
|
||||
@@ -378,11 +376,6 @@ ordered-float = "2.1.1"
|
||||
palette = { version = "0.7.5", default-features = false, features = ["std"] }
|
||||
parking_lot = "0.12.1"
|
||||
pathdiff = "0.2"
|
||||
pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
postage = { version = "0.5", features = ["futures-traits"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
profiling = "1"
|
||||
@@ -471,7 +464,7 @@ tree-sitter-typescript = "0.23"
|
||||
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" }
|
||||
unicase = "2.6"
|
||||
unindent = "0.1.7"
|
||||
unicode-segmentation = "1.10"
|
||||
unicode-segmentation = "1.11"
|
||||
url = "2.2"
|
||||
uuid = { version = "1.1.2", features = ["v4", "v5", "serde"] }
|
||||
wasmparser = "0.215"
|
||||
|
||||
@@ -58,7 +58,6 @@
|
||||
"gitignore": "vcs",
|
||||
"gitkeep": "vcs",
|
||||
"gitmodules": "vcs",
|
||||
"gleam": "gleam",
|
||||
"go": "go",
|
||||
"gql": "graphql",
|
||||
"graphql": "graphql",
|
||||
@@ -84,7 +83,6 @@
|
||||
"j2k": "image",
|
||||
"java": "java",
|
||||
"jfif": "image",
|
||||
"jl": "julia",
|
||||
"jp2": "image",
|
||||
"jpeg": "image",
|
||||
"jpg": "image",
|
||||
@@ -92,6 +90,7 @@
|
||||
"json": "storage",
|
||||
"jsonc": "storage",
|
||||
"jsx": "react",
|
||||
"julia": "julia",
|
||||
"jxl": "image",
|
||||
"kt": "kotlin",
|
||||
"ldf": "storage",
|
||||
@@ -265,9 +264,6 @@
|
||||
"fsharp": {
|
||||
"icon": "icons/file_icons/fsharp.svg"
|
||||
},
|
||||
"gleam": {
|
||||
"icon": "icons/file_icons/gleam.svg"
|
||||
},
|
||||
"go": {
|
||||
"icon": "icons/file_icons/go.svg"
|
||||
},
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16" fill="none">
|
||||
<path fill-rule="evenodd" fill="black" d="M 3.828125 14.601562 C 3.894531 15.726562 5.183594 16.375 6.132812 15.785156 L 6.136719 15.785156 L 8.988281 13.824219 C 8.996094 13.816406 9.007812 13.8125 9.015625 13.804688 C 9.203125 13.675781 9.4375 13.636719 9.65625 13.691406 L 12.988281 14.550781 C 14.105469 14.839844 15.140625 13.769531 14.8125 12.667969 L 13.832031 9.386719 C 13.769531 9.167969 13.800781 8.9375 13.921875 8.75 C 13.921875 8.746094 13.925781 8.746094 13.925781 8.746094 L 15.777344 5.863281 L 15.777344 5.859375 C 15.78125 5.851562 15.785156 5.84375 15.789062 5.835938 L 15.792969 5.835938 C 16.382812 4.871094 15.6875 3.582031 14.542969 3.554688 L 11.109375 3.472656 C 10.878906 3.464844 10.664062 3.359375 10.519531 3.183594 L 8.339844 0.542969 C 8.019531 0.152344 7.550781 -0.015625 7.105469 0.0078125 L 7.101562 0.0078125 C 7.039062 0.0117188 6.976562 0.0195312 6.914062 0.0273438 C 6.414062 0.117188 5.945312 0.453125 5.75 1 L 4.609375 4.222656 C 4.535156 4.4375 4.367188 4.613281 4.152344 4.695312 L 0.957031 5.945312 C -0.121094 6.363281 -0.328125 7.835938 0.589844 8.535156 L 3.316406 10.609375 C 3.5 10.75 3.609375 10.960938 3.625 11.191406 Z M 7.515625 1.847656 C 7.421875 1.730469 7.296875 1.695312 7.183594 1.714844 C 7.066406 1.734375 6.960938 1.8125 6.914062 1.953125 L 5.867188 4.902344 C 5.699219 5.382812 5.328125 5.765625 4.851562 5.949219 L 1.925781 7.09375 C 1.785156 7.148438 1.710938 7.253906 1.695312 7.371094 C 1.679688 7.484375 1.71875 7.605469 1.839844 7.695312 L 4.335938 9.597656 C 4.742188 9.90625 4.992188 10.375 5.023438 10.882812 L 5.207031 14.003906 C 5.214844 14.152344 5.296875 14.253906 5.398438 14.304688 C 5.503906 14.355469 5.632812 14.355469 5.757812 14.269531 L 8.347656 12.492188 C 8.765625 12.207031 9.292969 12.113281 9.785156 12.242188 L 12.824219 13.027344 C 12.972656 13.066406 13.09375 13.023438 13.175781 12.9375 C 13.257812 12.855469 13.296875 12.734375 13.253906 12.589844 L 12.355469 9.589844 C 12.210938 9.105469 12.285156 8.578125 12.558594 8.148438 L 14.253906 5.511719 C 14.335938 5.386719 14.332031 5.257812 14.277344 5.15625 C 14.222656 5.054688 14.117188 4.980469 13.964844 4.976562 L 10.824219 4.902344 C 10.316406 4.886719 9.835938 4.65625 9.511719 4.261719 Z M 7.515625 1.847656 "/>
|
||||
<path fill="black" d="M 5.71875 7.257812 C 5.671875 7.25 5.628906 7.246094 5.582031 7.246094 C 5.09375 7.246094 4.695312 7.644531 4.695312 8.128906 C 4.695312 8.613281 5.09375 9.011719 5.582031 9.011719 C 6.070312 9.011719 6.46875 8.613281 6.46875 8.128906 C 6.46875 7.6875 6.140625 7.320312 5.71875 7.257812 Z M 5.71875 7.257812 "/>
|
||||
<path fill="black" d="M 11.019531 7.953125 C 10.976562 7.957031 10.929688 7.960938 10.886719 7.960938 C 10.398438 7.960938 10 7.5625 10 7.078125 C 10 6.59375 10.398438 6.195312 10.886719 6.195312 C 11.371094 6.195312 11.773438 6.59375 11.773438 7.078125 C 11.773438 7.519531 11.445312 7.886719 11.019531 7.953125 Z M 11.019531 7.953125 "/>
|
||||
<path fill="black" d="M 7.269531 9.089844 C 7.53125 8.988281 7.828125 9.113281 7.933594 9.375 C 8.125 9.859375 8.503906 9.996094 8.796875 9.949219 C 9.082031 9.898438 9.378906 9.664062 9.378906 9.136719 C 9.378906 8.855469 9.605469 8.628906 9.886719 8.628906 C 10.167969 8.628906 10.398438 8.855469 10.398438 9.136719 C 10.398438 10.140625 9.757812 10.816406 8.96875 10.949219 C 8.1875 11.078125 7.351562 10.664062 6.988281 9.75 C 6.882812 9.488281 7.011719 9.195312 7.269531 9.089844 Z M 7.269531 9.089844 "/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 3.5 KiB |
@@ -313,15 +313,6 @@
|
||||
"ctrl-k ctrl-l": "editor::ToggleFold",
|
||||
"ctrl-k ctrl-[": "editor::FoldRecursive",
|
||||
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
|
||||
"ctrl-k ctrl-1": ["editor::FoldAtLevel", { "level": 1 }],
|
||||
"ctrl-k ctrl-2": ["editor::FoldAtLevel", { "level": 2 }],
|
||||
"ctrl-k ctrl-3": ["editor::FoldAtLevel", { "level": 3 }],
|
||||
"ctrl-k ctrl-4": ["editor::FoldAtLevel", { "level": 4 }],
|
||||
"ctrl-k ctrl-5": ["editor::FoldAtLevel", { "level": 5 }],
|
||||
"ctrl-k ctrl-6": ["editor::FoldAtLevel", { "level": 6 }],
|
||||
"ctrl-k ctrl-7": ["editor::FoldAtLevel", { "level": 7 }],
|
||||
"ctrl-k ctrl-8": ["editor::FoldAtLevel", { "level": 8 }],
|
||||
"ctrl-k ctrl-9": ["editor::FoldAtLevel", { "level": 9 }],
|
||||
"ctrl-k ctrl-0": "editor::FoldAll",
|
||||
"ctrl-k ctrl-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
@@ -514,13 +505,6 @@
|
||||
"ctrl-enter": "assistant::InlineAssist"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProposedChangesEditor",
|
||||
"bindings": {
|
||||
"ctrl-shift-y": "editor::ApplyDiffHunk",
|
||||
"ctrl-alt-a": "editor::ApplyAllDiffHunks"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && jupyter && !ContextEditor",
|
||||
"bindings": {
|
||||
|
||||
@@ -350,15 +350,6 @@
|
||||
"cmd-k cmd-l": "editor::ToggleFold",
|
||||
"cmd-k cmd-[": "editor::FoldRecursive",
|
||||
"cmd-k cmd-]": "editor::UnfoldRecursive",
|
||||
"cmd-k cmd-1": ["editor::FoldAtLevel", { "level": 1 }],
|
||||
"cmd-k cmd-2": ["editor::FoldAtLevel", { "level": 2 }],
|
||||
"cmd-k cmd-3": ["editor::FoldAtLevel", { "level": 3 }],
|
||||
"cmd-k cmd-4": ["editor::FoldAtLevel", { "level": 4 }],
|
||||
"cmd-k cmd-5": ["editor::FoldAtLevel", { "level": 5 }],
|
||||
"cmd-k cmd-6": ["editor::FoldAtLevel", { "level": 6 }],
|
||||
"cmd-k cmd-7": ["editor::FoldAtLevel", { "level": 7 }],
|
||||
"cmd-k cmd-8": ["editor::FoldAtLevel", { "level": 8 }],
|
||||
"cmd-k cmd-9": ["editor::FoldAtLevel", { "level": 9 }],
|
||||
"cmd-k cmd-0": "editor::FoldAll",
|
||||
"cmd-k cmd-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
@@ -547,13 +538,6 @@
|
||||
"ctrl-enter": "assistant::InlineAssist"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProposedChangesEditor",
|
||||
"bindings": {
|
||||
"cmd-shift-y": "editor::ApplyDiffHunk",
|
||||
"cmd-shift-a": "editor::ApplyAllDiffHunks"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "PromptEditor",
|
||||
"bindings": {
|
||||
|
||||
@@ -88,6 +88,7 @@ origin: (f64, f64),
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<description>Update the Rectangle's new function to take an origin parameter</description>
|
||||
<operation>update</operation>
|
||||
<old_text>
|
||||
fn new(width: f64, height: f64) -> Self {
|
||||
@@ -116,6 +117,7 @@ pub struct Circle {
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<description>Update the Circle's new function to take an origin parameter</description>
|
||||
<operation>update</operation>
|
||||
<old_text>
|
||||
fn new(radius: f64) -> Self {
|
||||
@@ -132,6 +134,7 @@ fn new(origin: (f64, f64), radius: f64) -> Self {
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<description>Add an import for the std::fmt module</description>
|
||||
<operation>insert_before</operation>
|
||||
<old_text>
|
||||
struct Rectangle {
|
||||
@@ -144,10 +147,7 @@ use std::fmt;
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<description>
|
||||
Add a manual Display implementation for Rectangle.
|
||||
Currently, this is the same as a derived Display implementation.
|
||||
</description>
|
||||
<description>Add a Display implementation for Rectangle</description>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>
|
||||
Rectangle { width, height }
|
||||
@@ -169,6 +169,7 @@ impl fmt::Display for Rectangle {
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<description>Add an import for the `std::fmt` module</description>
|
||||
<operation>insert_before</operation>
|
||||
<old_text>
|
||||
struct Circle {
|
||||
@@ -180,6 +181,7 @@ use std::fmt;
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<description>Add a Display implementation for Circle</description>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>
|
||||
Circle { radius }
|
||||
|
||||
@@ -346,6 +346,8 @@
|
||||
"git_status": true,
|
||||
// Amount of indentation for nested items.
|
||||
"indent_size": 20,
|
||||
// Whether to show indent guides in the project panel.
|
||||
"indent_guides": true,
|
||||
// Whether to reveal it in the project panel automatically,
|
||||
// when a corresponding project entry becomes active.
|
||||
// Gitignored entries are never auto revealed.
|
||||
@@ -369,17 +371,6 @@
|
||||
/// 5. Never show the scrollbar:
|
||||
/// "never"
|
||||
"show": null
|
||||
},
|
||||
// Settings related to indent guides in the project panel.
|
||||
"indent_guides": {
|
||||
// When to show indent guides in the project panel.
|
||||
// This setting can take two values:
|
||||
//
|
||||
// 1. Always show indent guides:
|
||||
// "always"
|
||||
// 2. Never show indent guides:
|
||||
// "never"
|
||||
"show": "always"
|
||||
}
|
||||
},
|
||||
"outline_panel": {
|
||||
@@ -403,18 +394,7 @@
|
||||
"auto_reveal_entries": true,
|
||||
/// Whether to fold directories automatically
|
||||
/// when a directory has only one directory inside.
|
||||
"auto_fold_dirs": true,
|
||||
// Settings related to indent guides in the outline panel.
|
||||
"indent_guides": {
|
||||
// When to show indent guides in the outline panel.
|
||||
// This setting can take two values:
|
||||
//
|
||||
// 1. Always show indent guides:
|
||||
// "always"
|
||||
// 2. Never show indent guides:
|
||||
// "never"
|
||||
"show": "always"
|
||||
}
|
||||
"auto_fold_dirs": true
|
||||
},
|
||||
"collaboration_panel": {
|
||||
// Whether to show the collaboration panel button in the status bar.
|
||||
@@ -797,7 +777,6 @@
|
||||
"tasks": {
|
||||
"variables": {}
|
||||
},
|
||||
"toolchain": { "name": "default", "path": "default" },
|
||||
// An object whose keys are language names, and whose values
|
||||
// are arrays of filenames or extensions of files that should
|
||||
// use those languages.
|
||||
|
||||
@@ -23,7 +23,6 @@ language.workspace = true
|
||||
project.workspace = true
|
||||
smallvec.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
workspace.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
@@ -13,8 +13,7 @@ use language::{
|
||||
use project::{EnvironmentErrorMessage, LanguageServerProgress, Project, WorktreeId};
|
||||
use smallvec::SmallVec;
|
||||
use std::{cmp::Reverse, fmt::Write, sync::Arc, time::Duration};
|
||||
use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip};
|
||||
use util::truncate_and_trailoff;
|
||||
use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle};
|
||||
use workspace::{item::ItemHandle, StatusItemView, Workspace};
|
||||
|
||||
actions!(activity_indicator, [ShowErrorMessage]);
|
||||
@@ -447,8 +446,6 @@ impl ActivityIndicator {
|
||||
|
||||
impl EventEmitter<Event> for ActivityIndicator {}
|
||||
|
||||
const MAX_MESSAGE_LEN: usize = 50;
|
||||
|
||||
impl Render for ActivityIndicator {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let result = h_flex()
|
||||
@@ -459,7 +456,6 @@ impl Render for ActivityIndicator {
|
||||
return result;
|
||||
};
|
||||
let this = cx.view().downgrade();
|
||||
let truncate_content = content.message.len() > MAX_MESSAGE_LEN;
|
||||
result.gap_2().child(
|
||||
PopoverMenu::new("activity-indicator-popover")
|
||||
.trigger(
|
||||
@@ -468,21 +464,7 @@ impl Render for ActivityIndicator {
|
||||
.id("activity-indicator-status")
|
||||
.gap_2()
|
||||
.children(content.icon)
|
||||
.map(|button| {
|
||||
if truncate_content {
|
||||
button
|
||||
.child(
|
||||
Label::new(truncate_and_trailoff(
|
||||
&content.message,
|
||||
MAX_MESSAGE_LEN,
|
||||
))
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.tooltip(move |cx| Tooltip::text(&content.message, cx))
|
||||
} else {
|
||||
button.child(Label::new(content.message).size(LabelSize::Small))
|
||||
}
|
||||
})
|
||||
.child(Label::new(content.message).size(LabelSize::Small))
|
||||
.when_some(content.on_click, |this, handler| {
|
||||
this.on_click(cx.listener(move |this, _, cx| {
|
||||
handler(this, cx);
|
||||
|
||||
@@ -298,64 +298,25 @@ fn register_context_server_handlers(cx: &mut AppContext) {
|
||||
return;
|
||||
};
|
||||
|
||||
if protocol.capable(context_servers::protocol::ServerCapability::Prompts) {
|
||||
if let Some(prompts) = protocol.list_prompts().await.log_err() {
|
||||
for prompt in prompts
|
||||
.into_iter()
|
||||
.filter(context_server_command::acceptable_prompt)
|
||||
{
|
||||
log::info!(
|
||||
"registering context server command: {:?}",
|
||||
prompt.name
|
||||
);
|
||||
context_server_registry.register_command(
|
||||
server.id.clone(),
|
||||
prompt.name.as_str(),
|
||||
);
|
||||
slash_command_registry.register_command(
|
||||
context_server_command::ContextServerSlashCommand::new(
|
||||
&server, prompt,
|
||||
),
|
||||
true,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
cx.update_model(
|
||||
&manager,
|
||||
|manager: &mut context_servers::manager::ContextServerManager, cx| {
|
||||
let tool_registry = ToolRegistry::global(cx);
|
||||
let context_server_registry = ContextServerRegistry::global(cx);
|
||||
if let Some(server) = manager.get_server(server_id) {
|
||||
cx.spawn(|_, _| async move {
|
||||
let Some(protocol) = server.client.read().clone() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if protocol.capable(context_servers::protocol::ServerCapability::Tools) {
|
||||
if let Some(tools) = protocol.list_tools().await.log_err() {
|
||||
for tool in tools.tools {
|
||||
log::info!(
|
||||
"registering context server tool: {:?}",
|
||||
tool.name
|
||||
);
|
||||
context_server_registry.register_tool(
|
||||
server.id.clone(),
|
||||
tool.name.as_str(),
|
||||
);
|
||||
tool_registry.register_tool(
|
||||
tools::context_server_tool::ContextServerTool::new(
|
||||
server.id.clone(),
|
||||
tool
|
||||
),
|
||||
);
|
||||
}
|
||||
if let Some(prompts) = protocol.list_prompts().await.log_err() {
|
||||
for prompt in prompts
|
||||
.into_iter()
|
||||
.filter(context_server_command::acceptable_prompt)
|
||||
{
|
||||
log::info!(
|
||||
"registering context server command: {:?}",
|
||||
prompt.name
|
||||
);
|
||||
context_server_registry.register_command(
|
||||
server.id.clone(),
|
||||
prompt.name.as_str(),
|
||||
);
|
||||
slash_command_registry.register_command(
|
||||
context_server_command::ContextServerSlashCommand::new(
|
||||
&server, prompt,
|
||||
),
|
||||
true,
|
||||
);
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -373,14 +334,6 @@ fn register_context_server_handlers(cx: &mut AppContext) {
|
||||
context_server_registry.unregister_command(&server_id, &command_name);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(tools) = context_server_registry.get_tools(server_id) {
|
||||
let tool_registry = ToolRegistry::global(cx);
|
||||
for tool_name in tools {
|
||||
tool_registry.unregister_tool_by_name(&tool_name);
|
||||
context_server_registry.unregister_tool(&server_id, &tool_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@@ -4707,7 +4707,7 @@ impl Render for ConfigurationView {
|
||||
|
||||
let mut element = v_flex()
|
||||
.id("assistant-configuration-view")
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.track_focus(&self.focus_handle)
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.size_full()
|
||||
.overflow_y_scroll()
|
||||
|
||||
@@ -636,7 +636,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn one".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
description: "add a `two` function".into(),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
@@ -690,7 +690,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn zero".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
description: "add a `two` function".into(),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
@@ -754,7 +754,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn zero".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
description: "add a `two` function".into(),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
@@ -798,7 +798,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn zero".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
description: "add a `two` function".into(),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
|
||||
@@ -54,7 +54,7 @@ use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
|
||||
use terminal_view::terminal_panel::TerminalPanel;
|
||||
use text::{OffsetRangeExt, ToPoint as _};
|
||||
use theme::ThemeSettings;
|
||||
use ui::{prelude::*, text_for_action, CheckboxWithLabel, IconButtonShape, Popover, Tooltip};
|
||||
use ui::{prelude::*, CheckboxWithLabel, IconButtonShape, Popover, Tooltip};
|
||||
use util::{RangeExt, ResultExt};
|
||||
use workspace::{notifications::NotificationId, ItemHandle, Toast, Workspace};
|
||||
|
||||
@@ -1596,7 +1596,7 @@ impl PromptEditor {
|
||||
// always show the cursor (even when it isn't focused) because
|
||||
// typing in one will make what you typed appear in all of them.
|
||||
editor.set_show_cursor_when_unfocused(true, cx);
|
||||
editor.set_placeholder_text(Self::placeholder_text(codegen.read(cx), cx), cx);
|
||||
editor.set_placeholder_text("Add a prompt…", cx);
|
||||
editor
|
||||
});
|
||||
|
||||
@@ -1653,7 +1653,6 @@ impl PromptEditor {
|
||||
self.editor = cx.new_view(|cx| {
|
||||
let mut editor = Editor::auto_height(Self::MAX_LINES as usize, cx);
|
||||
editor.set_soft_wrap_mode(language::language_settings::SoftWrap::EditorWidth, cx);
|
||||
editor.set_placeholder_text(Self::placeholder_text(self.codegen.read(cx), cx), cx);
|
||||
editor.set_placeholder_text("Add a prompt…", cx);
|
||||
editor.set_text(prompt, cx);
|
||||
if focus {
|
||||
@@ -1664,20 +1663,6 @@ impl PromptEditor {
|
||||
self.subscribe_to_editor(cx);
|
||||
}
|
||||
|
||||
fn placeholder_text(codegen: &Codegen, cx: &WindowContext) -> String {
|
||||
let context_keybinding = text_for_action(&crate::ToggleFocus, cx)
|
||||
.map(|keybinding| format!(" • {keybinding} for context"))
|
||||
.unwrap_or_default();
|
||||
|
||||
let action = if codegen.is_insertion {
|
||||
"Generate"
|
||||
} else {
|
||||
"Transform"
|
||||
};
|
||||
|
||||
format!("{action}…{context_keybinding} • ↓↑ for history")
|
||||
}
|
||||
|
||||
fn prompt(&self, cx: &AppContext) -> String {
|
||||
self.editor.read(cx).text(cx)
|
||||
}
|
||||
@@ -2275,7 +2260,6 @@ pub struct Codegen {
|
||||
initial_transaction_id: Option<TransactionId>,
|
||||
telemetry: Option<Arc<Telemetry>>,
|
||||
builder: Arc<PromptBuilder>,
|
||||
is_insertion: bool,
|
||||
}
|
||||
|
||||
impl Codegen {
|
||||
@@ -2298,7 +2282,6 @@ impl Codegen {
|
||||
)
|
||||
});
|
||||
let mut this = Self {
|
||||
is_insertion: range.to_offset(&buffer.read(cx).snapshot(cx)).is_empty(),
|
||||
alternatives: vec![codegen],
|
||||
active_alternative: 0,
|
||||
seen_alternatives: HashSet::default(),
|
||||
@@ -2700,7 +2683,7 @@ impl CodegenAlternative {
|
||||
|
||||
let prompt = self
|
||||
.builder
|
||||
.generate_inline_transformation_prompt(user_prompt, language_name, buffer, range)
|
||||
.generate_content_prompt(user_prompt, language_name, buffer, range)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to generate content prompt: {}", e))?;
|
||||
|
||||
let mut messages = Vec::new();
|
||||
|
||||
@@ -33,21 +33,21 @@ pub enum AssistantEditKind {
|
||||
Update {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
description: String,
|
||||
},
|
||||
Create {
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
description: String,
|
||||
},
|
||||
InsertBefore {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
description: String,
|
||||
},
|
||||
InsertAfter {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
description: String,
|
||||
},
|
||||
Delete {
|
||||
old_text: String,
|
||||
@@ -86,37 +86,19 @@ enum SearchDirection {
|
||||
Diagonal,
|
||||
}
|
||||
|
||||
// A measure of the currently quality of an in-progress fuzzy search.
|
||||
//
|
||||
// Uses 60 bits to store a numeric cost, and 4 bits to store the preceding
|
||||
// operation in the search.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
struct SearchState {
|
||||
cost: u32,
|
||||
score: u32,
|
||||
direction: SearchDirection,
|
||||
}
|
||||
|
||||
impl SearchState {
|
||||
fn new(cost: u32, direction: SearchDirection) -> Self {
|
||||
Self { cost, direction }
|
||||
}
|
||||
}
|
||||
|
||||
struct SearchMatrix {
|
||||
cols: usize,
|
||||
data: Vec<SearchState>,
|
||||
}
|
||||
|
||||
impl SearchMatrix {
|
||||
fn new(rows: usize, cols: usize) -> Self {
|
||||
SearchMatrix {
|
||||
cols,
|
||||
data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
|
||||
}
|
||||
}
|
||||
|
||||
fn get(&self, row: usize, col: usize) -> SearchState {
|
||||
self.data[row * self.cols + col]
|
||||
}
|
||||
|
||||
fn set(&mut self, row: usize, col: usize, cost: SearchState) {
|
||||
self.data[row * self.cols + col] = cost;
|
||||
fn new(score: u32, direction: SearchDirection) -> Self {
|
||||
Self { score, direction }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -205,23 +187,23 @@ impl AssistantEdit {
|
||||
"update" => AssistantEditKind::Update {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
description,
|
||||
description: description.ok_or_else(|| anyhow!("missing description"))?,
|
||||
},
|
||||
"insert_before" => AssistantEditKind::InsertBefore {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
description,
|
||||
description: description.ok_or_else(|| anyhow!("missing description"))?,
|
||||
},
|
||||
"insert_after" => AssistantEditKind::InsertAfter {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
description,
|
||||
description: description.ok_or_else(|| anyhow!("missing description"))?,
|
||||
},
|
||||
"delete" => AssistantEditKind::Delete {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
},
|
||||
"create" => AssistantEditKind::Create {
|
||||
description,
|
||||
description: description.ok_or_else(|| anyhow!("missing description"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
},
|
||||
_ => Err(anyhow!("unknown operation {operation:?}"))?,
|
||||
@@ -282,7 +264,7 @@ impl AssistantEditKind {
|
||||
ResolvedEdit {
|
||||
range,
|
||||
new_text,
|
||||
description,
|
||||
description: Some(description),
|
||||
}
|
||||
}
|
||||
Self::Create {
|
||||
@@ -290,7 +272,7 @@ impl AssistantEditKind {
|
||||
description,
|
||||
} => ResolvedEdit {
|
||||
range: text::Anchor::MIN..text::Anchor::MAX,
|
||||
description,
|
||||
description: Some(description),
|
||||
new_text,
|
||||
},
|
||||
Self::InsertBefore {
|
||||
@@ -303,7 +285,7 @@ impl AssistantEditKind {
|
||||
ResolvedEdit {
|
||||
range: range.start..range.start,
|
||||
new_text,
|
||||
description,
|
||||
description: Some(description),
|
||||
}
|
||||
}
|
||||
Self::InsertAfter {
|
||||
@@ -316,7 +298,7 @@ impl AssistantEditKind {
|
||||
ResolvedEdit {
|
||||
range: range.end..range.end,
|
||||
new_text,
|
||||
description,
|
||||
description: Some(description),
|
||||
}
|
||||
}
|
||||
Self::Delete { old_text } => {
|
||||
@@ -332,29 +314,44 @@ impl AssistantEditKind {
|
||||
|
||||
fn resolve_location(buffer: &text::BufferSnapshot, search_query: &str) -> Range<text::Anchor> {
|
||||
const INSERTION_COST: u32 = 3;
|
||||
const DELETION_COST: u32 = 10;
|
||||
const WHITESPACE_INSERTION_COST: u32 = 1;
|
||||
const DELETION_COST: u32 = 3;
|
||||
const WHITESPACE_DELETION_COST: u32 = 1;
|
||||
const EQUALITY_BONUS: u32 = 5;
|
||||
|
||||
struct Matrix {
|
||||
cols: usize,
|
||||
data: Vec<SearchState>,
|
||||
}
|
||||
|
||||
impl Matrix {
|
||||
fn new(rows: usize, cols: usize) -> Self {
|
||||
Matrix {
|
||||
cols,
|
||||
data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
|
||||
}
|
||||
}
|
||||
|
||||
fn get(&self, row: usize, col: usize) -> SearchState {
|
||||
self.data[row * self.cols + col]
|
||||
}
|
||||
|
||||
fn set(&mut self, row: usize, col: usize, cost: SearchState) {
|
||||
self.data[row * self.cols + col] = cost;
|
||||
}
|
||||
}
|
||||
|
||||
let buffer_len = buffer.len();
|
||||
let query_len = search_query.len();
|
||||
let mut matrix = SearchMatrix::new(query_len + 1, buffer_len + 1);
|
||||
let mut leading_deletion_cost = 0_u32;
|
||||
let mut matrix = Matrix::new(query_len + 1, buffer_len + 1);
|
||||
|
||||
for (row, query_byte) in search_query.bytes().enumerate() {
|
||||
let deletion_cost = if query_byte.is_ascii_whitespace() {
|
||||
WHITESPACE_DELETION_COST
|
||||
} else {
|
||||
DELETION_COST
|
||||
};
|
||||
|
||||
leading_deletion_cost = leading_deletion_cost.saturating_add(deletion_cost);
|
||||
matrix.set(
|
||||
row + 1,
|
||||
0,
|
||||
SearchState::new(leading_deletion_cost, SearchDirection::Diagonal),
|
||||
);
|
||||
|
||||
for (col, buffer_byte) in buffer.bytes_in_range(0..buffer.len()).flatten().enumerate() {
|
||||
let deletion_cost = if query_byte.is_ascii_whitespace() {
|
||||
WHITESPACE_DELETION_COST
|
||||
} else {
|
||||
DELETION_COST
|
||||
};
|
||||
let insertion_cost = if buffer_byte.is_ascii_whitespace() {
|
||||
WHITESPACE_INSERTION_COST
|
||||
} else {
|
||||
@@ -362,35 +359,38 @@ impl AssistantEditKind {
|
||||
};
|
||||
|
||||
let up = SearchState::new(
|
||||
matrix.get(row, col + 1).cost.saturating_add(deletion_cost),
|
||||
matrix.get(row, col + 1).score.saturating_sub(deletion_cost),
|
||||
SearchDirection::Up,
|
||||
);
|
||||
let left = SearchState::new(
|
||||
matrix.get(row + 1, col).cost.saturating_add(insertion_cost),
|
||||
matrix
|
||||
.get(row + 1, col)
|
||||
.score
|
||||
.saturating_sub(insertion_cost),
|
||||
SearchDirection::Left,
|
||||
);
|
||||
let diagonal = SearchState::new(
|
||||
if query_byte == *buffer_byte {
|
||||
matrix.get(row, col).cost
|
||||
matrix.get(row, col).score.saturating_add(EQUALITY_BONUS)
|
||||
} else {
|
||||
matrix
|
||||
.get(row, col)
|
||||
.cost
|
||||
.saturating_add(deletion_cost + insertion_cost)
|
||||
.score
|
||||
.saturating_sub(deletion_cost + insertion_cost)
|
||||
},
|
||||
SearchDirection::Diagonal,
|
||||
);
|
||||
matrix.set(row + 1, col + 1, up.min(left).min(diagonal));
|
||||
matrix.set(row + 1, col + 1, up.max(left).max(diagonal));
|
||||
}
|
||||
}
|
||||
|
||||
// Traceback to find the best match
|
||||
let mut best_buffer_end = buffer_len;
|
||||
let mut best_cost = u32::MAX;
|
||||
let mut best_score = 0;
|
||||
for col in 1..=buffer_len {
|
||||
let cost = matrix.get(query_len, col).cost;
|
||||
if cost < best_cost {
|
||||
best_cost = cost;
|
||||
let score = matrix.get(query_len, col).score;
|
||||
if score > best_score {
|
||||
best_score = score;
|
||||
best_buffer_end = col;
|
||||
}
|
||||
}
|
||||
@@ -560,84 +560,89 @@ mod tests {
|
||||
language_settings::AllLanguageSettings, Language, LanguageConfig, LanguageMatcher,
|
||||
};
|
||||
use settings::SettingsStore;
|
||||
use text::{OffsetRangeExt, Point};
|
||||
use ui::BorrowAppContext;
|
||||
use unindent::Unindent as _;
|
||||
use util::test::{generate_marked_text, marked_text_ranges};
|
||||
|
||||
#[gpui::test]
|
||||
fn test_resolve_location(cx: &mut AppContext) {
|
||||
assert_location_resolution(
|
||||
concat!(
|
||||
" Lorem\n",
|
||||
"« ipsum\n",
|
||||
" dolor sit amet»\n",
|
||||
" consecteur",
|
||||
),
|
||||
"ipsum\ndolor",
|
||||
cx,
|
||||
);
|
||||
{
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::local(
|
||||
concat!(
|
||||
" Lorem\n",
|
||||
" ipsum\n",
|
||||
" dolor sit amet\n",
|
||||
" consecteur",
|
||||
),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
assert_eq!(
|
||||
AssistantEditKind::resolve_location(&snapshot, "ipsum\ndolor").to_point(&snapshot),
|
||||
Point::new(1, 0)..Point::new(2, 18)
|
||||
);
|
||||
}
|
||||
|
||||
assert_location_resolution(
|
||||
&"
|
||||
«fn foo1(a: usize) -> usize {
|
||||
40
|
||||
}»
|
||||
{
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::local(
|
||||
concat!(
|
||||
"fn foo1(a: usize) -> usize {\n",
|
||||
" 40\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"fn foo2(b: usize) -> usize {\n",
|
||||
" 42\n",
|
||||
"}\n",
|
||||
),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
assert_eq!(
|
||||
AssistantEditKind::resolve_location(&snapshot, "fn foo1(b: usize) {\n40\n}")
|
||||
.to_point(&snapshot),
|
||||
Point::new(0, 0)..Point::new(2, 1)
|
||||
);
|
||||
}
|
||||
|
||||
fn foo2(b: usize) -> usize {
|
||||
42
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
"fn foo1(b: usize) {\n40\n}",
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_location_resolution(
|
||||
&"
|
||||
fn main() {
|
||||
« Foo
|
||||
.bar()
|
||||
.baz()
|
||||
.qux()»
|
||||
}
|
||||
|
||||
fn foo2(b: usize) -> usize {
|
||||
42
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
"Foo.bar.baz.qux()",
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_location_resolution(
|
||||
&"
|
||||
class Something {
|
||||
one() { return 1; }
|
||||
« two() { return 2222; }
|
||||
three() { return 333; }
|
||||
four() { return 4444; }
|
||||
five() { return 5555; }
|
||||
six() { return 6666; }
|
||||
» seven() { return 7; }
|
||||
eight() { return 8; }
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
&"
|
||||
two() { return 2222; }
|
||||
four() { return 4444; }
|
||||
five() { return 5555; }
|
||||
six() { return 6666; }
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
{
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::local(
|
||||
concat!(
|
||||
"fn main() {\n",
|
||||
" Foo\n",
|
||||
" .bar()\n",
|
||||
" .baz()\n",
|
||||
" .qux()\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"fn foo2(b: usize) -> usize {\n",
|
||||
" 42\n",
|
||||
"}\n",
|
||||
),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
assert_eq!(
|
||||
AssistantEditKind::resolve_location(&snapshot, "Foo.bar.baz.qux()")
|
||||
.to_point(&snapshot),
|
||||
Point::new(1, 0)..Point::new(4, 14)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_resolve_edits(cx: &mut AppContext) {
|
||||
init_test(cx);
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
cx.update_global::<SettingsStore, _>(|settings, cx| {
|
||||
settings.update_user_settings::<AllLanguageSettings>(cx, |_| {});
|
||||
});
|
||||
|
||||
assert_edits(
|
||||
"
|
||||
@@ -670,7 +675,7 @@ mod tests {
|
||||
last_name: String,
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
description: "".into(),
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
@@ -685,7 +690,7 @@ mod tests {
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
description: "".into(),
|
||||
},
|
||||
],
|
||||
"
|
||||
@@ -729,7 +734,7 @@ mod tests {
|
||||
qux();
|
||||
}"
|
||||
.unindent(),
|
||||
description: Some("implement bar".into()),
|
||||
description: "implement bar".into(),
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
@@ -742,7 +747,7 @@ mod tests {
|
||||
bar();
|
||||
}"
|
||||
.unindent(),
|
||||
description: Some("call bar in foo".into()),
|
||||
description: "call bar in foo".into(),
|
||||
},
|
||||
AssistantEditKind::InsertAfter {
|
||||
old_text: "
|
||||
@@ -757,7 +762,7 @@ mod tests {
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: Some("implement qux".into()),
|
||||
description: "implement qux".into(),
|
||||
},
|
||||
],
|
||||
"
|
||||
@@ -809,7 +814,7 @@ mod tests {
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
description: "pick better number".into(),
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
@@ -824,7 +829,7 @@ mod tests {
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
description: "pick better number".into(),
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
@@ -839,7 +844,7 @@ mod tests {
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
description: "pick better number".into(),
|
||||
},
|
||||
],
|
||||
"
|
||||
@@ -860,69 +865,6 @@ mod tests {
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_edits(
|
||||
"
|
||||
impl Person {
|
||||
fn set_name(&mut self, name: String) {
|
||||
self.name = name;
|
||||
}
|
||||
|
||||
fn name(&self) -> String {
|
||||
return self.name;
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
vec![
|
||||
AssistantEditKind::Update {
|
||||
old_text: "self.name = name;".unindent(),
|
||||
new_text: "self._name = name;".unindent(),
|
||||
description: None,
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "return self.name;\n".unindent(),
|
||||
new_text: "return self._name;\n".unindent(),
|
||||
description: None,
|
||||
},
|
||||
],
|
||||
"
|
||||
impl Person {
|
||||
fn set_name(&mut self, name: String) {
|
||||
self._name = name;
|
||||
}
|
||||
|
||||
fn name(&self) -> String {
|
||||
return self._name;
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut AppContext) {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
cx.update_global::<SettingsStore, _>(|settings, cx| {
|
||||
settings.update_user_settings::<AllLanguageSettings>(cx, |_| {});
|
||||
});
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_location_resolution(
|
||||
text_with_expected_range: &str,
|
||||
query: &str,
|
||||
cx: &mut AppContext,
|
||||
) {
|
||||
let (text, _) = marked_text_ranges(text_with_expected_range, false);
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text.clone(), cx));
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let range = AssistantEditKind::resolve_location(&snapshot, query).to_offset(&snapshot);
|
||||
let text_with_actual_range = generate_marked_text(&text, &[range], false);
|
||||
pretty_assertions::assert_eq!(text_with_actual_range, text_with_expected_range);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
|
||||
@@ -204,7 +204,7 @@ impl PromptBuilder {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn generate_inline_transformation_prompt(
|
||||
pub fn generate_content_prompt(
|
||||
&self,
|
||||
user_prompt: String,
|
||||
language_name: Option<&LanguageName>,
|
||||
|
||||
@@ -1,2 +1 @@
|
||||
pub mod context_server_tool;
|
||||
pub mod now_tool;
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
use anyhow::{anyhow, bail};
|
||||
use assistant_tool::Tool;
|
||||
use context_servers::manager::ContextServerManager;
|
||||
use context_servers::types;
|
||||
use gpui::Task;
|
||||
|
||||
pub struct ContextServerTool {
|
||||
server_id: String,
|
||||
tool: types::Tool,
|
||||
}
|
||||
|
||||
impl ContextServerTool {
|
||||
pub fn new(server_id: impl Into<String>, tool: types::Tool) -> Self {
|
||||
Self {
|
||||
server_id: server_id.into(),
|
||||
tool,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Tool for ContextServerTool {
|
||||
fn name(&self) -> String {
|
||||
self.tool.name.clone()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
self.tool.description.clone().unwrap_or_default()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
match &self.tool.input_schema {
|
||||
serde_json::Value::Null => {
|
||||
serde_json::json!({ "type": "object", "properties": [] })
|
||||
}
|
||||
serde_json::Value::Object(map) if map.is_empty() => {
|
||||
serde_json::json!({ "type": "object", "properties": [] })
|
||||
}
|
||||
_ => self.tool.input_schema.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: std::sync::Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_workspace: gpui::WeakView<workspace::Workspace>,
|
||||
cx: &mut ui::WindowContext,
|
||||
) -> gpui::Task<gpui::Result<String>> {
|
||||
let manager = ContextServerManager::global(cx);
|
||||
let manager = manager.read(cx);
|
||||
if let Some(server) = manager.get_server(&self.server_id) {
|
||||
cx.foreground_executor().spawn({
|
||||
let tool_name = self.tool.name.clone();
|
||||
async move {
|
||||
let Some(protocol) = server.client.read().clone() else {
|
||||
bail!("Context server not initialized");
|
||||
};
|
||||
|
||||
let arguments = if let serde_json::Value::Object(map) = input {
|
||||
Some(map.into_iter().collect())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
log::trace!(
|
||||
"Running tool: {} with arguments: {:?}",
|
||||
tool_name,
|
||||
arguments
|
||||
);
|
||||
let response = protocol.run_tool(tool_name, arguments).await?;
|
||||
|
||||
let tool_result = match response.tool_result {
|
||||
serde_json::Value::String(s) => s,
|
||||
_ => serde_json::to_string(&response.tool_result)?,
|
||||
};
|
||||
Ok(tool_result)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
Task::ready(Err(anyhow!("Context server not found")))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -84,9 +84,9 @@ pub struct AutoUpdater {
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct JsonRelease {
|
||||
pub version: String,
|
||||
pub url: String,
|
||||
struct JsonRelease {
|
||||
version: String,
|
||||
url: String,
|
||||
}
|
||||
|
||||
struct MacOsUnmounter {
|
||||
@@ -482,7 +482,7 @@ impl AutoUpdater {
|
||||
release_channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<(JsonRelease, String)> {
|
||||
) -> Result<(String, String)> {
|
||||
let this = cx.update(|cx| {
|
||||
cx.default_global::<GlobalAutoUpdate>()
|
||||
.0
|
||||
@@ -504,7 +504,7 @@ impl AutoUpdater {
|
||||
let update_request_body = build_remote_server_update_request_body(cx)?;
|
||||
let body = serde_json::to_string(&update_request_body)?;
|
||||
|
||||
Ok((release, body))
|
||||
Ok((release.url, body))
|
||||
}
|
||||
|
||||
async fn get_release(
|
||||
@@ -686,12 +686,6 @@ async fn download_remote_server_binary(
|
||||
let request_body = AsyncBody::from(serde_json::to_string(&update_request_body)?);
|
||||
|
||||
let mut response = client.get(&release.url, request_body, true).await?;
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow!(
|
||||
"failed to download remote server release: {:?}",
|
||||
response.status()
|
||||
));
|
||||
}
|
||||
smol::io::copy(response.body_mut(), &mut temp_file).await?;
|
||||
smol::fs::rename(&temp, &target_path).await?;
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ mod channel_index;
|
||||
use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat, ChannelMessage};
|
||||
use anyhow::{anyhow, Result};
|
||||
use channel_index::ChannelIndex;
|
||||
use client::{ChannelId, Client, ClientSettings, Subscription, User, UserId, UserStore};
|
||||
use client::{ChannelId, Client, ClientSettings, ProjectId, Subscription, User, UserId, UserStore};
|
||||
use collections::{hash_map, HashMap, HashSet};
|
||||
use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
|
||||
use gpui::{
|
||||
@@ -33,11 +33,30 @@ struct NotesVersion {
|
||||
version: clock::Global,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct HostedProject {
|
||||
project_id: ProjectId,
|
||||
channel_id: ChannelId,
|
||||
name: SharedString,
|
||||
_visibility: proto::ChannelVisibility,
|
||||
}
|
||||
impl From<proto::HostedProject> for HostedProject {
|
||||
fn from(project: proto::HostedProject) -> Self {
|
||||
Self {
|
||||
project_id: ProjectId(project.project_id),
|
||||
channel_id: ChannelId(project.channel_id),
|
||||
_visibility: project.visibility(),
|
||||
name: project.name.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
pub struct ChannelStore {
|
||||
pub channel_index: ChannelIndex,
|
||||
channel_invitations: Vec<Arc<Channel>>,
|
||||
channel_participants: HashMap<ChannelId, Vec<Arc<User>>>,
|
||||
channel_states: HashMap<ChannelId, ChannelState>,
|
||||
hosted_projects: HashMap<ProjectId, HostedProject>,
|
||||
|
||||
outgoing_invites: HashSet<(ChannelId, UserId)>,
|
||||
update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>,
|
||||
opened_buffers: HashMap<ChannelId, OpenedModelHandle<ChannelBuffer>>,
|
||||
@@ -66,6 +85,7 @@ pub struct ChannelState {
|
||||
observed_notes_version: NotesVersion,
|
||||
observed_chat_message: Option<u64>,
|
||||
role: Option<ChannelRole>,
|
||||
projects: HashSet<ProjectId>,
|
||||
}
|
||||
|
||||
impl Channel {
|
||||
@@ -196,6 +216,7 @@ impl ChannelStore {
|
||||
channel_invitations: Vec::default(),
|
||||
channel_index: ChannelIndex::default(),
|
||||
channel_participants: Default::default(),
|
||||
hosted_projects: Default::default(),
|
||||
outgoing_invites: Default::default(),
|
||||
opened_buffers: Default::default(),
|
||||
opened_chats: Default::default(),
|
||||
@@ -295,6 +316,19 @@ impl ChannelStore {
|
||||
self.channel_index.by_id().get(&channel_id)
|
||||
}
|
||||
|
||||
pub fn projects_for_id(&self, channel_id: ChannelId) -> Vec<(SharedString, ProjectId)> {
|
||||
let mut projects: Vec<(SharedString, ProjectId)> = self
|
||||
.channel_states
|
||||
.get(&channel_id)
|
||||
.map(|state| state.projects.clone())
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.flat_map(|id| Some((self.hosted_projects.get(&id)?.name.clone(), id)))
|
||||
.collect();
|
||||
projects.sort();
|
||||
projects
|
||||
}
|
||||
|
||||
pub fn has_open_channel_buffer(&self, channel_id: ChannelId, _cx: &AppContext) -> bool {
|
||||
if let Some(buffer) = self.opened_buffers.get(&channel_id) {
|
||||
if let OpenedModelHandle::Open(buffer) = buffer {
|
||||
@@ -1068,7 +1102,9 @@ impl ChannelStore {
|
||||
let channels_changed = !payload.channels.is_empty()
|
||||
|| !payload.delete_channels.is_empty()
|
||||
|| !payload.latest_channel_message_ids.is_empty()
|
||||
|| !payload.latest_channel_buffer_versions.is_empty();
|
||||
|| !payload.latest_channel_buffer_versions.is_empty()
|
||||
|| !payload.hosted_projects.is_empty()
|
||||
|| !payload.deleted_hosted_projects.is_empty();
|
||||
|
||||
if channels_changed {
|
||||
if !payload.delete_channels.is_empty() {
|
||||
@@ -1125,6 +1161,34 @@ impl ChannelStore {
|
||||
.or_default()
|
||||
.update_latest_message_id(latest_channel_message.message_id);
|
||||
}
|
||||
|
||||
for hosted_project in payload.hosted_projects {
|
||||
let hosted_project: HostedProject = hosted_project.into();
|
||||
if let Some(old_project) = self
|
||||
.hosted_projects
|
||||
.insert(hosted_project.project_id, hosted_project.clone())
|
||||
{
|
||||
self.channel_states
|
||||
.entry(old_project.channel_id)
|
||||
.or_default()
|
||||
.remove_hosted_project(old_project.project_id);
|
||||
}
|
||||
self.channel_states
|
||||
.entry(hosted_project.channel_id)
|
||||
.or_default()
|
||||
.add_hosted_project(hosted_project.project_id);
|
||||
}
|
||||
|
||||
for hosted_project_id in payload.deleted_hosted_projects {
|
||||
let hosted_project_id = ProjectId(hosted_project_id);
|
||||
|
||||
if let Some(old_project) = self.hosted_projects.remove(&hosted_project_id) {
|
||||
self.channel_states
|
||||
.entry(old_project.channel_id)
|
||||
.or_default()
|
||||
.remove_hosted_project(old_project.project_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
@@ -1231,4 +1295,12 @@ impl ChannelState {
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn add_hosted_project(&mut self, project_id: ProjectId) {
|
||||
self.projects.insert(project_id);
|
||||
}
|
||||
|
||||
fn remove_hosted_project(&mut self, project_id: ProjectId) {
|
||||
self.projects.remove(&project_id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,7 +48,6 @@ pub struct Collaborator {
|
||||
pub peer_id: proto::PeerId,
|
||||
pub replica_id: ReplicaId,
|
||||
pub user_id: UserId,
|
||||
pub is_host: bool,
|
||||
}
|
||||
|
||||
impl PartialOrd for User {
|
||||
@@ -825,7 +824,6 @@ impl Collaborator {
|
||||
peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
|
||||
replica_id: message.replica_id as ReplicaId,
|
||||
user_id: message.user_id as UserId,
|
||||
is_host: message.is_host,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -252,10 +252,7 @@ async fn create_billing_subscription(
|
||||
|
||||
let default_model = llm_db.model(rpc::LanguageModelProvider::Anthropic, "claude-3-5-sonnet")?;
|
||||
let stripe_model = stripe_billing.register_model(default_model).await?;
|
||||
let success_url = format!(
|
||||
"{}/account?checkout_complete=1",
|
||||
app.config.zed_dot_dev_url()
|
||||
);
|
||||
let success_url = format!("{}/account", app.config.zed_dot_dev_url());
|
||||
let checkout_session_url = stripe_billing
|
||||
.checkout(customer_id, &user.github_login, &stripe_model, &success_url)
|
||||
.await?;
|
||||
|
||||
@@ -617,6 +617,7 @@ pub struct ChannelsForUser {
|
||||
pub channels: Vec<Channel>,
|
||||
pub channel_memberships: Vec<channel_member::Model>,
|
||||
pub channel_participants: HashMap<ChannelId, Vec<UserId>>,
|
||||
pub hosted_projects: Vec<proto::HostedProject>,
|
||||
pub invited_channels: Vec<Channel>,
|
||||
|
||||
pub observed_buffer_versions: Vec<proto::ChannelBufferVersion>,
|
||||
@@ -740,7 +741,6 @@ impl ProjectCollaborator {
|
||||
peer_id: Some(self.connection_id.into()),
|
||||
replica_id: self.replica_id.0 as u32,
|
||||
user_id: self.user_id.to_proto(),
|
||||
is_host: self.is_host,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ pub mod contacts;
|
||||
pub mod contributors;
|
||||
pub mod embeddings;
|
||||
pub mod extensions;
|
||||
pub mod hosted_projects;
|
||||
pub mod messages;
|
||||
pub mod notifications;
|
||||
pub mod processed_stripe_events;
|
||||
|
||||
@@ -116,7 +116,6 @@ impl Database {
|
||||
peer_id: Some(collaborator.connection().into()),
|
||||
user_id: collaborator.user_id.to_proto(),
|
||||
replica_id: collaborator.replica_id.0 as u32,
|
||||
is_host: false,
|
||||
})
|
||||
.collect(),
|
||||
})
|
||||
@@ -223,7 +222,6 @@ impl Database {
|
||||
peer_id: Some(collaborator.connection().into()),
|
||||
user_id: collaborator.user_id.to_proto(),
|
||||
replica_id: collaborator.replica_id.0 as u32,
|
||||
is_host: false,
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
@@ -259,7 +257,6 @@ impl Database {
|
||||
peer_id: Some(db_collaborator.connection().into()),
|
||||
replica_id: db_collaborator.replica_id.0 as u32,
|
||||
user_id: db_collaborator.user_id.to_proto(),
|
||||
is_host: false,
|
||||
})
|
||||
} else {
|
||||
collaborator_ids_to_remove.push(db_collaborator.id);
|
||||
@@ -388,7 +385,6 @@ impl Database {
|
||||
peer_id: Some(connection.into()),
|
||||
replica_id: row.replica_id.0 as u32,
|
||||
user_id: row.user_id.to_proto(),
|
||||
is_host: false,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -615,10 +615,15 @@ impl Database {
|
||||
.observed_channel_messages(&channel_ids, user_id, tx)
|
||||
.await?;
|
||||
|
||||
let hosted_projects = self
|
||||
.get_hosted_projects(&channel_ids, &roles_by_channel_id, tx)
|
||||
.await?;
|
||||
|
||||
Ok(ChannelsForUser {
|
||||
channel_memberships,
|
||||
channels,
|
||||
invited_channels,
|
||||
hosted_projects,
|
||||
channel_participants,
|
||||
latest_buffer_versions,
|
||||
latest_channel_messages,
|
||||
|
||||
85
crates/collab/src/db/queries/hosted_projects.rs
Normal file
85
crates/collab/src/db/queries/hosted_projects.rs
Normal file
@@ -0,0 +1,85 @@
|
||||
use rpc::{proto, ErrorCode};
|
||||
|
||||
use super::*;
|
||||
|
||||
impl Database {
|
||||
pub async fn get_hosted_projects(
|
||||
&self,
|
||||
channel_ids: &[ChannelId],
|
||||
roles: &HashMap<ChannelId, ChannelRole>,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<proto::HostedProject>> {
|
||||
let projects = hosted_project::Entity::find()
|
||||
.find_also_related(project::Entity)
|
||||
.filter(hosted_project::Column::ChannelId.is_in(channel_ids.iter().map(|id| id.0)))
|
||||
.all(tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.flat_map(|(hosted_project, project)| {
|
||||
if hosted_project.deleted_at.is_some() {
|
||||
return None;
|
||||
}
|
||||
match hosted_project.visibility {
|
||||
ChannelVisibility::Public => {}
|
||||
ChannelVisibility::Members => {
|
||||
let is_visible = roles
|
||||
.get(&hosted_project.channel_id)
|
||||
.map(|role| role.can_see_all_descendants())
|
||||
.unwrap_or(false);
|
||||
if !is_visible {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
};
|
||||
Some(proto::HostedProject {
|
||||
project_id: project?.id.to_proto(),
|
||||
channel_id: hosted_project.channel_id.to_proto(),
|
||||
name: hosted_project.name.clone(),
|
||||
visibility: hosted_project.visibility.into(),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(projects)
|
||||
}
|
||||
|
||||
pub async fn get_hosted_project(
|
||||
&self,
|
||||
hosted_project_id: HostedProjectId,
|
||||
user_id: UserId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<(hosted_project::Model, ChannelRole)> {
|
||||
let project = hosted_project::Entity::find_by_id(hosted_project_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?;
|
||||
let channel = channel::Entity::find_by_id(project.channel_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!(ErrorCode::NoSuchChannel))?;
|
||||
|
||||
let role = match project.visibility {
|
||||
ChannelVisibility::Public => {
|
||||
self.check_user_is_channel_participant(&channel, user_id, tx)
|
||||
.await?
|
||||
}
|
||||
ChannelVisibility::Members => {
|
||||
self.check_user_is_channel_member(&channel, user_id, tx)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
Ok((project, role))
|
||||
}
|
||||
|
||||
pub async fn is_hosted_project(&self, project_id: ProjectId) -> Result<bool> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.map(|project| project.hosted_project_id.is_some())
|
||||
.ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
@@ -68,6 +68,7 @@ impl Database {
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
id: ActiveValue::NotSet,
|
||||
hosted_project_id: ActiveValue::Set(None),
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
@@ -535,6 +536,39 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
/// Adds the given connection to the specified hosted project
|
||||
pub async fn join_hosted_project(
|
||||
&self,
|
||||
id: ProjectId,
|
||||
user_id: UserId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<(Project, ReplicaId)> {
|
||||
self.transaction(|tx| async move {
|
||||
let (project, hosted_project) = project::Entity::find_by_id(id)
|
||||
.find_also_related(hosted_project::Entity)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("hosted project is no longer shared"))?;
|
||||
|
||||
let Some(hosted_project) = hosted_project else {
|
||||
return Err(anyhow!("project is not hosted"))?;
|
||||
};
|
||||
|
||||
let channel = channel::Entity::find_by_id(hosted_project.channel_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such channel"))?;
|
||||
|
||||
let role = self
|
||||
.check_user_is_channel_participant(&channel, user_id, &tx)
|
||||
.await?;
|
||||
|
||||
self.join_project_internal(project, user_id, connection, role, &tx)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_project(&self, id: ProjectId) -> Result<project::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(project::Entity::find_by_id(id)
|
||||
|
||||
@@ -18,6 +18,7 @@ pub mod extension;
|
||||
pub mod extension_version;
|
||||
pub mod feature_flag;
|
||||
pub mod follower;
|
||||
pub mod hosted_project;
|
||||
pub mod language_server;
|
||||
pub mod notification;
|
||||
pub mod notification_kind;
|
||||
|
||||
27
crates/collab/src/db/tables/hosted_project.rs
Normal file
27
crates/collab/src/db/tables/hosted_project.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use crate::db::{ChannelId, ChannelVisibility, HostedProjectId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "hosted_projects")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: HostedProjectId,
|
||||
pub channel_id: ChannelId,
|
||||
pub name: String,
|
||||
pub visibility: ChannelVisibility,
|
||||
pub deleted_at: Option<DateTime>,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_one = "super::project::Entity")]
|
||||
Project,
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::db::{ProjectId, Result, RoomId, ServerId, UserId};
|
||||
use crate::db::{HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId};
|
||||
use anyhow::anyhow;
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
@@ -12,6 +12,7 @@ pub struct Model {
|
||||
pub host_user_id: Option<UserId>,
|
||||
pub host_connection_id: Option<i32>,
|
||||
pub host_connection_server_id: Option<ServerId>,
|
||||
pub hosted_project_id: Option<HostedProjectId>,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
@@ -49,6 +50,12 @@ pub enum Relation {
|
||||
Collaborators,
|
||||
#[sea_orm(has_many = "super::language_server::Entity")]
|
||||
LanguageServers,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::hosted_project::Entity",
|
||||
from = "Column::HostedProjectId",
|
||||
to = "super::hosted_project::Column::Id"
|
||||
)]
|
||||
HostedProject,
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
@@ -81,4 +88,10 @@ impl Related<super::language_server::Entity> for Entity {
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::hosted_project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::HostedProject.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
@@ -121,13 +121,11 @@ async fn test_channel_buffers(db: &Arc<Database>) {
|
||||
user_id: a_id.to_proto(),
|
||||
peer_id: Some(rpc::proto::PeerId { id: 1, owner_id }),
|
||||
replica_id: 0,
|
||||
is_host: false,
|
||||
},
|
||||
rpc::proto::Collaborator {
|
||||
user_id: b_id.to_proto(),
|
||||
peer_id: Some(rpc::proto::PeerId { id: 2, owner_id }),
|
||||
replica_id: 1,
|
||||
is_host: false,
|
||||
}
|
||||
]
|
||||
);
|
||||
|
||||
@@ -449,10 +449,6 @@ async fn check_usage_limit(
|
||||
model_name: &str,
|
||||
claims: &LlmTokenClaims,
|
||||
) -> Result<()> {
|
||||
if claims.is_staff {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let model = state.db.model(provider, model_name)?;
|
||||
let usage = state
|
||||
.db
|
||||
@@ -517,6 +513,11 @@ async fn check_usage_limit(
|
||||
];
|
||||
|
||||
for (used, limit, usage_measure) in checks {
|
||||
// Temporarily bypass rate-limiting for staff members.
|
||||
if claims.is_staff {
|
||||
continue;
|
||||
}
|
||||
|
||||
if used > limit {
|
||||
let resource = match usage_measure {
|
||||
UsageMeasure::RequestsPerMinute => "requests_per_minute",
|
||||
|
||||
@@ -287,6 +287,7 @@ impl Server {
|
||||
.add_request_handler(share_project)
|
||||
.add_message_handler(unshare_project)
|
||||
.add_request_handler(join_project)
|
||||
.add_request_handler(join_hosted_project)
|
||||
.add_message_handler(leave_project)
|
||||
.add_request_handler(update_project)
|
||||
.add_request_handler(update_worktree)
|
||||
@@ -307,8 +308,6 @@ impl Server {
|
||||
.add_request_handler(forward_read_only_project_request::<proto::InlayHints>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::ResolveInlayHint>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GitBranches>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::UpdateGitBranch>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::GetCompletions>)
|
||||
.add_request_handler(
|
||||
forward_mutating_project_request::<proto::ApplyCompletionAdditionalEdits>,
|
||||
@@ -1794,6 +1793,11 @@ impl JoinProjectInternalResponse for Response<proto::JoinProject> {
|
||||
Response::<proto::JoinProject>::send(self, result)
|
||||
}
|
||||
}
|
||||
impl JoinProjectInternalResponse for Response<proto::JoinHostedProject> {
|
||||
fn send(self, result: proto::JoinProjectResponse) -> Result<()> {
|
||||
Response::<proto::JoinHostedProject>::send(self, result)
|
||||
}
|
||||
}
|
||||
|
||||
fn join_project_internal(
|
||||
response: impl JoinProjectInternalResponse,
|
||||
@@ -1827,7 +1831,6 @@ fn join_project_internal(
|
||||
peer_id: Some(session.connection_id.into()),
|
||||
replica_id: replica_id.0 as u32,
|
||||
user_id: guest_user_id.to_proto(),
|
||||
is_host: false,
|
||||
}),
|
||||
};
|
||||
|
||||
@@ -1918,6 +1921,11 @@ async fn leave_project(request: proto::LeaveProject, session: Session) -> Result
|
||||
let sender_id = session.connection_id;
|
||||
let project_id = ProjectId::from_proto(request.project_id);
|
||||
let db = session.db().await;
|
||||
if db.is_hosted_project(project_id).await? {
|
||||
let project = db.leave_hosted_project(project_id, sender_id).await?;
|
||||
project_left(&project, &session);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let (room, project) = &*db.leave_project(project_id, sender_id).await?;
|
||||
tracing::info!(
|
||||
@@ -1933,6 +1941,24 @@ async fn leave_project(request: proto::LeaveProject, session: Session) -> Result
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn join_hosted_project(
|
||||
request: proto::JoinHostedProject,
|
||||
response: Response<proto::JoinHostedProject>,
|
||||
session: Session,
|
||||
) -> Result<()> {
|
||||
let (mut project, replica_id) = session
|
||||
.db()
|
||||
.await
|
||||
.join_hosted_project(
|
||||
ProjectId(request.project_id as i32),
|
||||
session.user_id(),
|
||||
session.connection_id,
|
||||
)
|
||||
.await?;
|
||||
|
||||
join_project_internal(response, session, &mut project, &replica_id)
|
||||
}
|
||||
|
||||
/// Updates other participants with changes to the project
|
||||
async fn update_project(
|
||||
request: proto::UpdateProject,
|
||||
@@ -4174,6 +4200,7 @@ fn build_channels_update(channels: ChannelsForUser) -> proto::UpdateChannels {
|
||||
update.channel_invitations.push(channel.to_proto());
|
||||
}
|
||||
|
||||
update.hosted_projects = channels.hosted_projects;
|
||||
update
|
||||
}
|
||||
|
||||
|
||||
@@ -21,8 +21,8 @@ use language::{
|
||||
language_settings::{
|
||||
AllLanguageSettings, Formatter, FormatterList, PrettierSettings, SelectedFormatter,
|
||||
},
|
||||
tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, FakeLspAdapter,
|
||||
Language, LanguageConfig, LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope,
|
||||
tree_sitter_rust, Diagnostic, DiagnosticEntry, FakeLspAdapter, Language, LanguageConfig,
|
||||
LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope,
|
||||
};
|
||||
use live_kit_client::MacOSDisplay;
|
||||
use lsp::LanguageServerId;
|
||||
@@ -4461,7 +4461,7 @@ async fn test_prettier_formatting_buffer(
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
|
||||
Some(tree_sitter_rust::LANGUAGE.into()),
|
||||
)));
|
||||
let mut fake_language_servers = client_a.language_registry().register_fake_lsp(
|
||||
"TypeScript",
|
||||
@@ -6575,95 +6575,3 @@ async fn test_context_collaboration_with_reconnect(
|
||||
assert!(context.buffer().read(cx).read_only());
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_remote_git_branches(
|
||||
executor: BackgroundExecutor,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
) {
|
||||
let mut server = TestServer::start(executor.clone()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
server
|
||||
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
.await;
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree("/project", serde_json::json!({ ".git":{} }))
|
||||
.await;
|
||||
let branches = ["main", "dev", "feature-1"];
|
||||
client_a
|
||||
.fs()
|
||||
.insert_branches(Path::new("/project/.git"), &branches);
|
||||
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/project", cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
let root_path = ProjectPath::root_path(worktree_id);
|
||||
// Client A sees that a guest has joined.
|
||||
executor.run_until_parked();
|
||||
|
||||
let branches_b = cx_b
|
||||
.update(|cx| project_b.update(cx, |project, cx| project.branches(root_path.clone(), cx)))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_branch = branches[2];
|
||||
|
||||
let branches_b = branches_b
|
||||
.into_iter()
|
||||
.map(|branch| branch.name)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(&branches_b, &branches);
|
||||
|
||||
cx_b.update(|cx| {
|
||||
project_b.update(cx, |project, cx| {
|
||||
project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
|
||||
let host_branch = cx_a.update(|cx| {
|
||||
project_a.update(cx, |project, cx| {
|
||||
project.worktree_store().update(cx, |worktree_store, cx| {
|
||||
worktree_store
|
||||
.current_branch(root_path.clone(), cx)
|
||||
.unwrap()
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(host_branch.as_ref(), branches[2]);
|
||||
|
||||
// Also try creating a new branch
|
||||
cx_b.update(|cx| {
|
||||
project_b.update(cx, |project, cx| {
|
||||
project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
|
||||
let host_branch = cx_a.update(|cx| {
|
||||
project_a.update(cx, |project, cx| {
|
||||
project.worktree_store().update(cx, |worktree_store, cx| {
|
||||
worktree_store.current_branch(root_path, cx).unwrap()
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(host_branch.as_ref(), "totally-new-branch");
|
||||
}
|
||||
|
||||
@@ -1,27 +1,14 @@
|
||||
use crate::tests::TestServer;
|
||||
use call::ActiveCall;
|
||||
use collections::HashSet;
|
||||
use fs::{FakeFs, Fs as _};
|
||||
use futures::StreamExt as _;
|
||||
use gpui::{BackgroundExecutor, Context as _, TestAppContext, UpdateGlobal as _};
|
||||
use gpui::{Context as _, TestAppContext};
|
||||
use http_client::BlockedHttpClient;
|
||||
use language::{
|
||||
language_settings::{
|
||||
language_settings, AllLanguageSettings, Formatter, FormatterList, PrettierSettings,
|
||||
SelectedFormatter,
|
||||
},
|
||||
tree_sitter_typescript, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
|
||||
LanguageRegistry,
|
||||
};
|
||||
use language::{language_settings::language_settings, LanguageRegistry};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::{
|
||||
lsp_store::{FormatTarget, FormatTrigger},
|
||||
ProjectPath,
|
||||
};
|
||||
use project::ProjectPath;
|
||||
use remote::SshRemoteClient;
|
||||
use remote_server::{HeadlessAppState, HeadlessProject};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
@@ -187,311 +174,3 @@ async fn test_sharing_an_ssh_remote_project(
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ssh_collaboration_git_branches(
|
||||
executor: BackgroundExecutor,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
server_cx: &mut TestAppContext,
|
||||
) {
|
||||
cx_a.set_name("a");
|
||||
cx_b.set_name("b");
|
||||
server_cx.set_name("server");
|
||||
|
||||
let mut server = TestServer::start(executor.clone()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
server
|
||||
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
.await;
|
||||
|
||||
// Set up project on remote FS
|
||||
let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
|
||||
let remote_fs = FakeFs::new(server_cx.executor());
|
||||
remote_fs
|
||||
.insert_tree("/project", serde_json::json!({ ".git":{} }))
|
||||
.await;
|
||||
|
||||
let branches = ["main", "dev", "feature-1"];
|
||||
remote_fs.insert_branches(Path::new("/project/.git"), &branches);
|
||||
|
||||
// User A connects to the remote project via SSH.
|
||||
server_cx.update(HeadlessProject::init);
|
||||
let remote_http_client = Arc::new(BlockedHttpClient);
|
||||
let node = NodeRuntime::unavailable();
|
||||
let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
|
||||
let headless_project = server_cx.new_model(|cx| {
|
||||
client::init_settings(cx);
|
||||
HeadlessProject::new(
|
||||
HeadlessAppState {
|
||||
session: server_ssh,
|
||||
fs: remote_fs.clone(),
|
||||
http_client: remote_http_client,
|
||||
node_runtime: node,
|
||||
languages,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
|
||||
let (project_a, worktree_id) = client_a
|
||||
.build_ssh_project("/project", client_ssh, cx_a)
|
||||
.await;
|
||||
|
||||
// While the SSH worktree is being scanned, user A shares the remote project.
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// User B joins the project.
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Give client A sometime to see that B has joined, and that the headless server
|
||||
// has some git repositories
|
||||
executor.run_until_parked();
|
||||
|
||||
let root_path = ProjectPath::root_path(worktree_id);
|
||||
|
||||
let branches_b = cx_b
|
||||
.update(|cx| project_b.update(cx, |project, cx| project.branches(root_path.clone(), cx)))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_branch = branches[2];
|
||||
|
||||
let branches_b = branches_b
|
||||
.into_iter()
|
||||
.map(|branch| branch.name)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(&branches_b, &branches);
|
||||
|
||||
cx_b.update(|cx| {
|
||||
project_b.update(cx, |project, cx| {
|
||||
project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
|
||||
let server_branch = server_cx.update(|cx| {
|
||||
headless_project.update(cx, |headless_project, cx| {
|
||||
headless_project
|
||||
.worktree_store
|
||||
.update(cx, |worktree_store, cx| {
|
||||
worktree_store
|
||||
.current_branch(root_path.clone(), cx)
|
||||
.unwrap()
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(server_branch.as_ref(), branches[2]);
|
||||
|
||||
// Also try creating a new branch
|
||||
cx_b.update(|cx| {
|
||||
project_b.update(cx, |project, cx| {
|
||||
project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
|
||||
let server_branch = server_cx.update(|cx| {
|
||||
headless_project.update(cx, |headless_project, cx| {
|
||||
headless_project
|
||||
.worktree_store
|
||||
.update(cx, |worktree_store, cx| {
|
||||
worktree_store.current_branch(root_path, cx).unwrap()
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(server_branch.as_ref(), "totally-new-branch");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ssh_collaboration_formatting_with_prettier(
|
||||
executor: BackgroundExecutor,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
server_cx: &mut TestAppContext,
|
||||
) {
|
||||
cx_a.set_name("a");
|
||||
cx_b.set_name("b");
|
||||
server_cx.set_name("server");
|
||||
|
||||
let mut server = TestServer::start(executor.clone()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
server
|
||||
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
.await;
|
||||
|
||||
let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
|
||||
let remote_fs = FakeFs::new(server_cx.executor());
|
||||
let buffer_text = "let one = \"two\"";
|
||||
let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX;
|
||||
remote_fs
|
||||
.insert_tree("/project", serde_json::json!({ "a.ts": buffer_text }))
|
||||
.await;
|
||||
|
||||
let test_plugin = "test_plugin";
|
||||
let ts_lang = Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
name: "TypeScript".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["ts".to_string()],
|
||||
..LanguageMatcher::default()
|
||||
},
|
||||
..LanguageConfig::default()
|
||||
},
|
||||
Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
|
||||
));
|
||||
client_a.language_registry().add(ts_lang.clone());
|
||||
client_b.language_registry().add(ts_lang.clone());
|
||||
|
||||
let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
|
||||
let mut fake_language_servers = languages.register_fake_lsp(
|
||||
"TypeScript",
|
||||
FakeLspAdapter {
|
||||
prettier_plugins: vec![test_plugin],
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
|
||||
// User A connects to the remote project via SSH.
|
||||
server_cx.update(HeadlessProject::init);
|
||||
let remote_http_client = Arc::new(BlockedHttpClient);
|
||||
let _headless_project = server_cx.new_model(|cx| {
|
||||
client::init_settings(cx);
|
||||
HeadlessProject::new(
|
||||
HeadlessAppState {
|
||||
session: server_ssh,
|
||||
fs: remote_fs.clone(),
|
||||
http_client: remote_http_client,
|
||||
node_runtime: NodeRuntime::unavailable(),
|
||||
languages,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
|
||||
let (project_a, worktree_id) = client_a
|
||||
.build_ssh_project("/project", client_ssh, cx_a)
|
||||
.await;
|
||||
|
||||
// While the SSH worktree is being scanned, user A shares the remote project.
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// User B joins the project.
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
executor.run_until_parked();
|
||||
|
||||
// Opens the buffer and formats it
|
||||
let buffer_b = project_b
|
||||
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
|
||||
.await
|
||||
.expect("user B opens buffer for formatting");
|
||||
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
|
||||
file.defaults.formatter = Some(SelectedFormatter::Auto);
|
||||
file.defaults.prettier = Some(PrettierSettings {
|
||||
allowed: true,
|
||||
..PrettierSettings::default()
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
cx_b.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
|
||||
file.defaults.formatter = Some(SelectedFormatter::List(FormatterList(
|
||||
vec![Formatter::LanguageServer { name: None }].into(),
|
||||
)));
|
||||
file.defaults.prettier = Some(PrettierSettings {
|
||||
allowed: true,
|
||||
..PrettierSettings::default()
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
let fake_language_server = fake_language_servers.next().await.unwrap();
|
||||
fake_language_server.handle_request::<lsp::request::Formatting, _, _>(|_, _| async move {
|
||||
panic!(
|
||||
"Unexpected: prettier should be preferred since it's enabled and language supports it"
|
||||
)
|
||||
});
|
||||
|
||||
project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project.format(
|
||||
HashSet::from_iter([buffer_b.clone()]),
|
||||
true,
|
||||
FormatTrigger::Save,
|
||||
FormatTarget::Buffer,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
|
||||
buffer_text.to_string() + "\n" + prettier_format_suffix,
|
||||
"Prettier formatting was not applied to client buffer after client's request"
|
||||
);
|
||||
|
||||
// User A opens and formats the same buffer too
|
||||
let buffer_a = project_a
|
||||
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
|
||||
.await
|
||||
.expect("user A opens buffer for formatting");
|
||||
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
|
||||
file.defaults.formatter = Some(SelectedFormatter::Auto);
|
||||
file.defaults.prettier = Some(PrettierSettings {
|
||||
allowed: true,
|
||||
..PrettierSettings::default()
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
project_a
|
||||
.update(cx_a, |project, cx| {
|
||||
project.format(
|
||||
HashSet::from_iter([buffer_a.clone()]),
|
||||
true,
|
||||
FormatTrigger::Manual,
|
||||
FormatTarget::Buffer,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
|
||||
buffer_text.to_string() + "\n" + prettier_format_suffix + "\n" + prettier_format_suffix,
|
||||
"Prettier formatting was not applied to client buffer after host's request"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ use self::channel_modal::ChannelModal;
|
||||
use crate::{channel_view::ChannelView, chat_panel::ChatPanel, CollaborationPanelSettings};
|
||||
use call::ActiveCall;
|
||||
use channel::{Channel, ChannelEvent, ChannelStore};
|
||||
use client::{ChannelId, Client, Contact, User, UserStore};
|
||||
use client::{ChannelId, Client, Contact, ProjectId, User, UserStore};
|
||||
use contact_finder::ContactFinder;
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use editor::{Editor, EditorElement, EditorStyle};
|
||||
@@ -182,6 +182,10 @@ enum ListEntry {
|
||||
ChannelEditor {
|
||||
depth: usize,
|
||||
},
|
||||
HostedProject {
|
||||
id: ProjectId,
|
||||
name: SharedString,
|
||||
},
|
||||
Contact {
|
||||
contact: Arc<Contact>,
|
||||
calling: bool,
|
||||
@@ -562,6 +566,7 @@ impl CollabPanel {
|
||||
}
|
||||
}
|
||||
|
||||
let hosted_projects = channel_store.projects_for_id(channel.id);
|
||||
let has_children = channel_store
|
||||
.channel_at_index(mat.candidate_id + 1)
|
||||
.map_or(false, |next_channel| {
|
||||
@@ -595,6 +600,10 @@ impl CollabPanel {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (name, id) in hosted_projects {
|
||||
self.entries.push(ListEntry::HostedProject { id, name });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1020,6 +1029,40 @@ impl CollabPanel {
|
||||
.tooltip(move |cx| Tooltip::text("Open Chat", cx))
|
||||
}
|
||||
|
||||
fn render_channel_project(
|
||||
&self,
|
||||
id: ProjectId,
|
||||
name: &SharedString,
|
||||
is_selected: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> impl IntoElement {
|
||||
ListItem::new(ElementId::NamedInteger(
|
||||
"channel-project".into(),
|
||||
id.0 as usize,
|
||||
))
|
||||
.indent_level(2)
|
||||
.indent_step_size(px(20.))
|
||||
.selected(is_selected)
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
if let Some(workspace) = this.workspace.upgrade() {
|
||||
let app_state = workspace.read(cx).app_state().clone();
|
||||
workspace::join_hosted_project(id, app_state, cx).detach_and_prompt_err(
|
||||
"Failed to open project",
|
||||
cx,
|
||||
|_, _| None,
|
||||
)
|
||||
}
|
||||
}))
|
||||
.start_slot(
|
||||
h_flex()
|
||||
.relative()
|
||||
.gap_1()
|
||||
.child(IconButton::new(0, IconName::FileTree)),
|
||||
)
|
||||
.child(Label::new(name.clone()))
|
||||
.tooltip(move |cx| Tooltip::text("Open Project", cx))
|
||||
}
|
||||
|
||||
fn has_subchannels(&self, ix: usize) -> bool {
|
||||
self.entries.get(ix).map_or(false, |entry| {
|
||||
if let ListEntry::Channel { has_children, .. } = entry {
|
||||
@@ -1495,6 +1538,12 @@ impl CollabPanel {
|
||||
ListEntry::ChannelChat { channel_id } => {
|
||||
self.join_channel_chat(*channel_id, cx)
|
||||
}
|
||||
ListEntry::HostedProject {
|
||||
id: _id,
|
||||
name: _name,
|
||||
} => {
|
||||
// todo()
|
||||
}
|
||||
ListEntry::OutgoingRequest(_) => {}
|
||||
ListEntry::ChannelEditor { .. } => {}
|
||||
}
|
||||
@@ -2108,6 +2157,10 @@ impl CollabPanel {
|
||||
ListEntry::ChannelChat { channel_id } => self
|
||||
.render_channel_chat(*channel_id, is_selected, cx)
|
||||
.into_any_element(),
|
||||
|
||||
ListEntry::HostedProject { id, name } => self
|
||||
.render_channel_project(*id, name, is_selected, cx)
|
||||
.into_any_element(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2726,7 +2779,7 @@ impl Render for CollabPanel {
|
||||
.on_action(cx.listener(CollabPanel::collapse_selected_channel))
|
||||
.on_action(cx.listener(CollabPanel::expand_selected_channel))
|
||||
.on_action(cx.listener(CollabPanel::start_move_selected_channel))
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.track_focus(&self.focus_handle)
|
||||
.size_full()
|
||||
.child(if self.user_store.read(cx).current_user().is_none() {
|
||||
self.render_signed_out(cx)
|
||||
@@ -2845,6 +2898,11 @@ impl PartialEq for ListEntry {
|
||||
return channel_1.id == channel_2.id;
|
||||
}
|
||||
}
|
||||
ListEntry::HostedProject { id, .. } => {
|
||||
if let ListEntry::HostedProject { id: other_id, .. } = other {
|
||||
return id == other_id;
|
||||
}
|
||||
}
|
||||
ListEntry::ChannelNotes { channel_id } => {
|
||||
if let ListEntry::ChannelNotes {
|
||||
channel_id: other_id,
|
||||
|
||||
@@ -180,39 +180,6 @@ impl InitializedContextServerProtocol {
|
||||
|
||||
Ok(completion)
|
||||
}
|
||||
|
||||
/// List MCP tools.
|
||||
pub async fn list_tools(&self) -> Result<types::ListToolsResponse> {
|
||||
self.check_capability(ServerCapability::Tools)?;
|
||||
|
||||
let response = self
|
||||
.inner
|
||||
.request::<types::ListToolsResponse>(types::RequestType::ListTools.as_str(), ())
|
||||
.await?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
/// Executes a tool with the given arguments
|
||||
pub async fn run_tool<P: AsRef<str>>(
|
||||
&self,
|
||||
tool: P,
|
||||
arguments: Option<HashMap<String, serde_json::Value>>,
|
||||
) -> Result<types::CallToolResponse> {
|
||||
self.check_capability(ServerCapability::Tools)?;
|
||||
|
||||
let params = types::CallToolParams {
|
||||
name: tool.as_ref().to_string(),
|
||||
arguments,
|
||||
};
|
||||
|
||||
let response: types::CallToolResponse = self
|
||||
.inner
|
||||
.request(types::RequestType::CallTool.as_str(), params)
|
||||
.await?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
}
|
||||
|
||||
impl InitializedContextServerProtocol {
|
||||
|
||||
@@ -9,8 +9,7 @@ struct GlobalContextServerRegistry(Arc<ContextServerRegistry>);
|
||||
impl Global for GlobalContextServerRegistry {}
|
||||
|
||||
pub struct ContextServerRegistry {
|
||||
command_registry: RwLock<HashMap<String, Vec<Arc<str>>>>,
|
||||
tool_registry: RwLock<HashMap<String, Vec<Arc<str>>>>,
|
||||
registry: RwLock<HashMap<String, Vec<Arc<str>>>>,
|
||||
}
|
||||
|
||||
impl ContextServerRegistry {
|
||||
@@ -21,14 +20,13 @@ impl ContextServerRegistry {
|
||||
pub fn register(cx: &mut AppContext) {
|
||||
cx.set_global(GlobalContextServerRegistry(Arc::new(
|
||||
ContextServerRegistry {
|
||||
command_registry: RwLock::new(HashMap::default()),
|
||||
tool_registry: RwLock::new(HashMap::default()),
|
||||
registry: RwLock::new(HashMap::default()),
|
||||
},
|
||||
)))
|
||||
}
|
||||
|
||||
pub fn register_command(&self, server_id: String, command_name: &str) {
|
||||
let mut registry = self.command_registry.write();
|
||||
let mut registry = self.registry.write();
|
||||
registry
|
||||
.entry(server_id)
|
||||
.or_default()
|
||||
@@ -36,34 +34,14 @@ impl ContextServerRegistry {
|
||||
}
|
||||
|
||||
pub fn unregister_command(&self, server_id: &str, command_name: &str) {
|
||||
let mut registry = self.command_registry.write();
|
||||
let mut registry = self.registry.write();
|
||||
if let Some(commands) = registry.get_mut(server_id) {
|
||||
commands.retain(|name| name.as_ref() != command_name);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_commands(&self, server_id: &str) -> Option<Vec<Arc<str>>> {
|
||||
let registry = self.command_registry.read();
|
||||
registry.get(server_id).cloned()
|
||||
}
|
||||
|
||||
pub fn register_tool(&self, server_id: String, tool_name: &str) {
|
||||
let mut registry = self.tool_registry.write();
|
||||
registry
|
||||
.entry(server_id)
|
||||
.or_default()
|
||||
.push(tool_name.into());
|
||||
}
|
||||
|
||||
pub fn unregister_tool(&self, server_id: &str, tool_name: &str) {
|
||||
let mut registry = self.tool_registry.write();
|
||||
if let Some(tools) = registry.get_mut(server_id) {
|
||||
tools.retain(|name| name.as_ref() != tool_name);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_tools(&self, server_id: &str) -> Option<Vec<Arc<str>>> {
|
||||
let registry = self.tool_registry.read();
|
||||
let registry = self.registry.read();
|
||||
registry.get(server_id).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,8 +16,6 @@ pub enum RequestType {
|
||||
PromptsList,
|
||||
CompletionComplete,
|
||||
Ping,
|
||||
ListTools,
|
||||
ListResourceTemplates,
|
||||
}
|
||||
|
||||
impl RequestType {
|
||||
@@ -34,8 +32,6 @@ impl RequestType {
|
||||
RequestType::PromptsList => "prompts/list",
|
||||
RequestType::CompletionComplete => "completion/complete",
|
||||
RequestType::Ping => "ping",
|
||||
RequestType::ListTools => "tools/list",
|
||||
RequestType::ListResourceTemplates => "resources/templates/list",
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -406,17 +402,3 @@ pub struct Completion {
|
||||
pub values: Vec<String>,
|
||||
pub total: CompletionTotal,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CallToolResponse {
|
||||
pub tool_result: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListToolsResponse {
|
||||
pub tools: Vec<Tool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
@@ -185,7 +185,7 @@ impl Render for CopilotCodeVerification {
|
||||
|
||||
v_flex()
|
||||
.id("copilot code verification")
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.track_focus(&self.focus_handle)
|
||||
.elevation_3(cx)
|
||||
.w_96()
|
||||
.items_center()
|
||||
|
||||
@@ -101,7 +101,7 @@ impl Render for ProjectDiagnosticsEditor {
|
||||
};
|
||||
|
||||
div()
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.track_focus(&self.focus_handle)
|
||||
.when(self.path_states.is_empty(), |el| {
|
||||
el.key_context("EmptyPane")
|
||||
})
|
||||
|
||||
@@ -81,6 +81,7 @@ ui.workspace = true
|
||||
url.workspace = true
|
||||
util.workspace = true
|
||||
workspace.workspace = true
|
||||
unicode-segmentation.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
ctor.workspace = true
|
||||
|
||||
@@ -153,10 +153,6 @@ pub struct DeleteToPreviousWordStart {
|
||||
pub ignore_newlines: bool,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Clone, Deserialize, Default)]
|
||||
pub struct FoldAtLevel {
|
||||
pub level: u32,
|
||||
}
|
||||
impl_actions!(
|
||||
editor,
|
||||
[
|
||||
@@ -186,7 +182,6 @@ impl_actions!(
|
||||
ToggleCodeActions,
|
||||
ToggleComments,
|
||||
UnfoldAt,
|
||||
FoldAtLevel
|
||||
]
|
||||
);
|
||||
|
||||
@@ -198,7 +193,6 @@ gpui::actions!(
|
||||
AcceptPartialInlineCompletion,
|
||||
AddSelectionAbove,
|
||||
AddSelectionBelow,
|
||||
ApplyAllDiffHunks,
|
||||
ApplyDiffHunk,
|
||||
Backspace,
|
||||
Cancel,
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
//! of several smaller structures that form a hierarchy (starting at the bottom):
|
||||
//! - [`InlayMap`] that decides where the [`Inlay`]s should be displayed.
|
||||
//! - [`FoldMap`] that decides where the fold indicators should be; it also tracks parts of a source file that are currently folded.
|
||||
//! - [`TabMap`] that keeps track of hard tabs in a buffer.
|
||||
//! - [`CharMap`] that replaces tabs and non-printable characters
|
||||
//! - [`WrapMap`] that handles soft wrapping.
|
||||
//! - [`BlockMap`] that tracks custom blocks such as diagnostics that should be displayed within buffer.
|
||||
//! - [`DisplayMap`] that adds background highlights to the regions of text.
|
||||
@@ -18,10 +18,11 @@
|
||||
//! [EditorElement]: crate::element::EditorElement
|
||||
|
||||
mod block_map;
|
||||
mod char_map;
|
||||
mod crease_map;
|
||||
mod fold_map;
|
||||
mod inlay_map;
|
||||
mod tab_map;
|
||||
mod invisibles;
|
||||
mod wrap_map;
|
||||
|
||||
use crate::{
|
||||
@@ -32,6 +33,7 @@ pub use block_map::{
|
||||
BlockPlacement, BlockPoint, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
|
||||
};
|
||||
use block_map::{BlockRow, BlockSnapshot};
|
||||
use char_map::{CharMap, CharSnapshot};
|
||||
use collections::{HashMap, HashSet};
|
||||
pub use crease_map::*;
|
||||
pub use fold_map::{Fold, FoldId, FoldPlaceholder, FoldPoint};
|
||||
@@ -42,6 +44,7 @@ use gpui::{
|
||||
pub(crate) use inlay_map::Inlay;
|
||||
use inlay_map::{InlayMap, InlaySnapshot};
|
||||
pub use inlay_map::{InlayOffset, InlayPoint};
|
||||
pub use invisibles::is_invisible;
|
||||
use language::{
|
||||
language_settings::language_settings, ChunkRenderer, OffsetUtf16, Point,
|
||||
Subscription as BufferSubscription,
|
||||
@@ -61,9 +64,9 @@ use std::{
|
||||
sync::Arc,
|
||||
};
|
||||
use sum_tree::{Bias, TreeMap};
|
||||
use tab_map::{TabMap, TabSnapshot};
|
||||
use text::LineIndent;
|
||||
use ui::WindowContext;
|
||||
use ui::{px, WindowContext};
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
use wrap_map::{WrapMap, WrapSnapshot};
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
@@ -94,7 +97,7 @@ pub struct DisplayMap {
|
||||
/// Decides where the fold indicators should be and tracks parts of a source file that are currently folded.
|
||||
fold_map: FoldMap,
|
||||
/// Keeps track of hard tabs in a buffer.
|
||||
tab_map: TabMap,
|
||||
char_map: CharMap,
|
||||
/// Handles soft wrapping.
|
||||
wrap_map: Model<WrapMap>,
|
||||
/// Tracks custom blocks such as diagnostics that should be displayed within buffer.
|
||||
@@ -131,7 +134,7 @@ impl DisplayMap {
|
||||
let crease_map = CreaseMap::new(&buffer_snapshot);
|
||||
let (inlay_map, snapshot) = InlayMap::new(buffer_snapshot);
|
||||
let (fold_map, snapshot) = FoldMap::new(snapshot);
|
||||
let (tab_map, snapshot) = TabMap::new(snapshot, tab_size);
|
||||
let (char_map, snapshot) = CharMap::new(snapshot, tab_size);
|
||||
let (wrap_map, snapshot) = WrapMap::new(snapshot, font, font_size, wrap_width, cx);
|
||||
let block_map = BlockMap::new(
|
||||
snapshot,
|
||||
@@ -148,7 +151,7 @@ impl DisplayMap {
|
||||
buffer_subscription,
|
||||
fold_map,
|
||||
inlay_map,
|
||||
tab_map,
|
||||
char_map,
|
||||
wrap_map,
|
||||
block_map,
|
||||
crease_map,
|
||||
@@ -166,17 +169,17 @@ impl DisplayMap {
|
||||
let (inlay_snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits);
|
||||
let (fold_snapshot, edits) = self.fold_map.read(inlay_snapshot.clone(), edits);
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (tab_snapshot, edits) = self.tab_map.sync(fold_snapshot.clone(), edits, tab_size);
|
||||
let (char_snapshot, edits) = self.char_map.sync(fold_snapshot.clone(), edits, tab_size);
|
||||
let (wrap_snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(tab_snapshot.clone(), edits, cx));
|
||||
.update(cx, |map, cx| map.sync(char_snapshot.clone(), edits, cx));
|
||||
let block_snapshot = self.block_map.read(wrap_snapshot.clone(), edits).snapshot;
|
||||
|
||||
DisplaySnapshot {
|
||||
buffer_snapshot: self.buffer.read(cx).snapshot(cx),
|
||||
fold_snapshot,
|
||||
inlay_snapshot,
|
||||
tab_snapshot,
|
||||
char_snapshot,
|
||||
wrap_snapshot,
|
||||
block_snapshot,
|
||||
crease_snapshot: self.crease_map.snapshot(),
|
||||
@@ -212,13 +215,13 @@ impl DisplayMap {
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
|
||||
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
self.block_map.read(snapshot, edits);
|
||||
let (snapshot, edits) = fold_map.fold(ranges);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
@@ -236,13 +239,13 @@ impl DisplayMap {
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
|
||||
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
self.block_map.read(snapshot, edits);
|
||||
let (snapshot, edits) = fold_map.unfold(ranges, inclusive);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
@@ -277,7 +280,7 @@ impl DisplayMap {
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
|
||||
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
@@ -295,7 +298,7 @@ impl DisplayMap {
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
|
||||
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
@@ -313,7 +316,7 @@ impl DisplayMap {
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
|
||||
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
@@ -331,7 +334,7 @@ impl DisplayMap {
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
|
||||
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
@@ -407,7 +410,7 @@ impl DisplayMap {
|
||||
let (snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits);
|
||||
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
@@ -415,7 +418,7 @@ impl DisplayMap {
|
||||
|
||||
let (snapshot, edits) = self.inlay_map.splice(to_remove, to_insert);
|
||||
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
@@ -467,7 +470,7 @@ pub struct DisplaySnapshot {
|
||||
pub fold_snapshot: FoldSnapshot,
|
||||
pub crease_snapshot: CreaseSnapshot,
|
||||
inlay_snapshot: InlaySnapshot,
|
||||
tab_snapshot: TabSnapshot,
|
||||
char_snapshot: CharSnapshot,
|
||||
wrap_snapshot: WrapSnapshot,
|
||||
block_snapshot: BlockSnapshot,
|
||||
text_highlights: TextHighlights,
|
||||
@@ -567,8 +570,8 @@ impl DisplaySnapshot {
|
||||
fn point_to_display_point(&self, point: MultiBufferPoint, bias: Bias) -> DisplayPoint {
|
||||
let inlay_point = self.inlay_snapshot.to_inlay_point(point);
|
||||
let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias);
|
||||
let tab_point = self.tab_snapshot.to_tab_point(fold_point);
|
||||
let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point);
|
||||
let char_point = self.char_snapshot.to_char_point(fold_point);
|
||||
let wrap_point = self.wrap_snapshot.char_point_to_wrap_point(char_point);
|
||||
let block_point = self.block_snapshot.to_block_point(wrap_point);
|
||||
DisplayPoint(block_point)
|
||||
}
|
||||
@@ -596,21 +599,21 @@ impl DisplaySnapshot {
|
||||
fn display_point_to_inlay_point(&self, point: DisplayPoint, bias: Bias) -> InlayPoint {
|
||||
let block_point = point.0;
|
||||
let wrap_point = self.block_snapshot.to_wrap_point(block_point);
|
||||
let tab_point = self.wrap_snapshot.to_tab_point(wrap_point);
|
||||
let fold_point = self.tab_snapshot.to_fold_point(tab_point, bias).0;
|
||||
let char_point = self.wrap_snapshot.to_char_point(wrap_point);
|
||||
let fold_point = self.char_snapshot.to_fold_point(char_point, bias).0;
|
||||
fold_point.to_inlay_point(&self.fold_snapshot)
|
||||
}
|
||||
|
||||
pub fn display_point_to_fold_point(&self, point: DisplayPoint, bias: Bias) -> FoldPoint {
|
||||
let block_point = point.0;
|
||||
let wrap_point = self.block_snapshot.to_wrap_point(block_point);
|
||||
let tab_point = self.wrap_snapshot.to_tab_point(wrap_point);
|
||||
self.tab_snapshot.to_fold_point(tab_point, bias).0
|
||||
let char_point = self.wrap_snapshot.to_char_point(wrap_point);
|
||||
self.char_snapshot.to_fold_point(char_point, bias).0
|
||||
}
|
||||
|
||||
pub fn fold_point_to_display_point(&self, fold_point: FoldPoint) -> DisplayPoint {
|
||||
let tab_point = self.tab_snapshot.to_tab_point(fold_point);
|
||||
let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point);
|
||||
let char_point = self.char_snapshot.to_char_point(fold_point);
|
||||
let wrap_point = self.wrap_snapshot.char_point_to_wrap_point(char_point);
|
||||
let block_point = self.block_snapshot.to_block_point(wrap_point);
|
||||
DisplayPoint(block_point)
|
||||
}
|
||||
@@ -688,6 +691,23 @@ impl DisplaySnapshot {
|
||||
}
|
||||
}
|
||||
|
||||
if chunk.is_invisible {
|
||||
let invisible_highlight = HighlightStyle {
|
||||
background_color: Some(editor_style.status.hint_background),
|
||||
underline: Some(UnderlineStyle {
|
||||
color: Some(editor_style.status.hint),
|
||||
thickness: px(1.),
|
||||
wavy: false,
|
||||
}),
|
||||
..Default::default()
|
||||
};
|
||||
if let Some(highlight_style) = highlight_style.as_mut() {
|
||||
highlight_style.highlight(invisible_highlight);
|
||||
} else {
|
||||
highlight_style = Some(invisible_highlight);
|
||||
}
|
||||
}
|
||||
|
||||
let mut diagnostic_highlight = HighlightStyle::default();
|
||||
|
||||
if chunk.is_unnecessary {
|
||||
@@ -784,12 +804,11 @@ impl DisplaySnapshot {
|
||||
layout_line.closest_index_for_x(x) as u32
|
||||
}
|
||||
|
||||
pub fn display_chars_at(
|
||||
&self,
|
||||
mut point: DisplayPoint,
|
||||
) -> impl Iterator<Item = (char, DisplayPoint)> + '_ {
|
||||
pub fn grapheme_at(&self, mut point: DisplayPoint) -> Option<String> {
|
||||
point = DisplayPoint(self.block_snapshot.clip_point(point.0, Bias::Left));
|
||||
self.text_chunks(point.row())
|
||||
|
||||
let chars = self
|
||||
.text_chunks(point.row())
|
||||
.flat_map(str::chars)
|
||||
.skip_while({
|
||||
let mut column = 0;
|
||||
@@ -799,16 +818,21 @@ impl DisplaySnapshot {
|
||||
!at_point
|
||||
}
|
||||
})
|
||||
.map(move |ch| {
|
||||
let result = (ch, point);
|
||||
if ch == '\n' {
|
||||
*point.row_mut() += 1;
|
||||
*point.column_mut() = 0;
|
||||
} else {
|
||||
*point.column_mut() += ch.len_utf8() as u32;
|
||||
.take_while({
|
||||
let mut prev = false;
|
||||
move |char| {
|
||||
let now = char.is_ascii();
|
||||
let end = char.is_ascii() && (char.is_ascii_whitespace() || prev);
|
||||
prev = now;
|
||||
!end
|
||||
}
|
||||
result
|
||||
})
|
||||
});
|
||||
|
||||
chars
|
||||
.collect::<String>()
|
||||
.graphemes(true)
|
||||
.next()
|
||||
.map(|s| s.to_owned())
|
||||
}
|
||||
|
||||
pub fn buffer_chars_at(&self, mut offset: usize) -> impl Iterator<Item = (char, usize)> + '_ {
|
||||
@@ -1120,8 +1144,8 @@ impl DisplayPoint {
|
||||
|
||||
pub fn to_offset(self, map: &DisplaySnapshot, bias: Bias) -> usize {
|
||||
let wrap_point = map.block_snapshot.to_wrap_point(self.0);
|
||||
let tab_point = map.wrap_snapshot.to_tab_point(wrap_point);
|
||||
let fold_point = map.tab_snapshot.to_fold_point(tab_point, bias).0;
|
||||
let char_point = map.wrap_snapshot.to_char_point(wrap_point);
|
||||
let fold_point = map.char_snapshot.to_fold_point(char_point, bias).0;
|
||||
let inlay_point = fold_point.to_inlay_point(&map.fold_snapshot);
|
||||
map.inlay_snapshot
|
||||
.to_buffer_offset(map.inlay_snapshot.to_offset(inlay_point))
|
||||
@@ -1157,23 +1181,17 @@ pub mod tests {
|
||||
use super::*;
|
||||
use crate::{movement, test::marked_display_snapshot};
|
||||
use block_map::BlockPlacement;
|
||||
use gpui::{
|
||||
div, font, observe, px, AppContext, BorrowAppContext, Context, Element, Hsla, Rgba,
|
||||
};
|
||||
use gpui::{div, font, observe, px, AppContext, BorrowAppContext, Context, Element, Hsla};
|
||||
use language::{
|
||||
language_settings::{AllLanguageSettings, AllLanguageSettingsContent},
|
||||
Buffer, Diagnostic, DiagnosticEntry, DiagnosticSet, Language, LanguageConfig,
|
||||
LanguageMatcher,
|
||||
Buffer, Language, LanguageConfig, LanguageMatcher,
|
||||
};
|
||||
use lsp::LanguageServerId;
|
||||
use project::Project;
|
||||
use rand::{prelude::*, Rng};
|
||||
use settings::SettingsStore;
|
||||
use smol::stream::StreamExt;
|
||||
use std::{env, sync::Arc};
|
||||
use text::PointUtf16;
|
||||
use theme::{LoadThemes, SyntaxTheme};
|
||||
use unindent::Unindent as _;
|
||||
use util::test::{marked_text_ranges, sample_text};
|
||||
use Bias::*;
|
||||
|
||||
@@ -1235,7 +1253,7 @@ pub mod tests {
|
||||
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
|
||||
log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
|
||||
log::info!("fold text: {:?}", snapshot.fold_snapshot.text());
|
||||
log::info!("tab text: {:?}", snapshot.tab_snapshot.text());
|
||||
log::info!("char text: {:?}", snapshot.char_snapshot.text());
|
||||
log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text());
|
||||
log::info!("block text: {:?}", snapshot.block_snapshot.text());
|
||||
log::info!("display text: {:?}", snapshot.text());
|
||||
@@ -1350,7 +1368,7 @@ pub mod tests {
|
||||
fold_count = snapshot.fold_count();
|
||||
log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
|
||||
log::info!("fold text: {:?}", snapshot.fold_snapshot.text());
|
||||
log::info!("tab text: {:?}", snapshot.tab_snapshot.text());
|
||||
log::info!("char text: {:?}", snapshot.char_snapshot.text());
|
||||
log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text());
|
||||
log::info!("block text: {:?}", snapshot.block_snapshot.text());
|
||||
log::info!("display text: {:?}", snapshot.text());
|
||||
@@ -1630,6 +1648,8 @@ pub mod tests {
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_chunks(cx: &mut gpui::TestAppContext) {
|
||||
use unindent::Unindent as _;
|
||||
|
||||
let text = r#"
|
||||
fn outer() {}
|
||||
|
||||
@@ -1726,229 +1746,12 @@ pub mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_chunks_with_syntax_highlighting_across_blocks(cx: &mut gpui::TestAppContext) {
|
||||
cx.background_executor
|
||||
.set_block_on_ticks(usize::MAX..=usize::MAX);
|
||||
|
||||
let text = r#"
|
||||
const A: &str = "
|
||||
one
|
||||
two
|
||||
three
|
||||
";
|
||||
const B: &str = "four";
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let theme = SyntaxTheme::new_test(vec![
|
||||
("string", Hsla::red()),
|
||||
("punctuation", Hsla::blue()),
|
||||
("keyword", Hsla::green()),
|
||||
]);
|
||||
let language = Arc::new(
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::LANGUAGE.into()),
|
||||
)
|
||||
.with_highlights_query(
|
||||
r#"
|
||||
(string_literal) @string
|
||||
"const" @keyword
|
||||
[":" ";"] @punctuation
|
||||
"#,
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
language.set_theme(&theme);
|
||||
|
||||
cx.update(|cx| init_test(cx, |_| {}));
|
||||
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx));
|
||||
cx.condition(&buffer, |buf, _| !buf.is_parsing()).await;
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx));
|
||||
|
||||
let map = cx.new_model(|cx| {
|
||||
DisplayMap::new(
|
||||
buffer,
|
||||
font("Courier"),
|
||||
px(16.0),
|
||||
None,
|
||||
true,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
FoldPlaceholder::test(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
// Insert a block in the middle of a multi-line string literal
|
||||
map.update(cx, |map, cx| {
|
||||
map.insert_blocks(
|
||||
[BlockProperties {
|
||||
placement: BlockPlacement::Below(
|
||||
buffer_snapshot.anchor_before(Point::new(1, 0)),
|
||||
),
|
||||
height: 1,
|
||||
style: BlockStyle::Sticky,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}],
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
pretty_assertions::assert_eq!(
|
||||
cx.update(|cx| syntax_chunks(DisplayRow(0)..DisplayRow(7), &map, &theme, cx)),
|
||||
[
|
||||
("const".into(), Some(Hsla::green())),
|
||||
(" A".into(), None),
|
||||
(":".into(), Some(Hsla::blue())),
|
||||
(" &str = ".into(), None),
|
||||
("\"\n one\n".into(), Some(Hsla::red())),
|
||||
("\n".into(), None),
|
||||
(" two\n three\n\"".into(), Some(Hsla::red())),
|
||||
(";".into(), Some(Hsla::blue())),
|
||||
("\n".into(), None),
|
||||
("const".into(), Some(Hsla::green())),
|
||||
(" B".into(), None),
|
||||
(":".into(), Some(Hsla::blue())),
|
||||
(" &str = ".into(), None),
|
||||
("\"four\"".into(), Some(Hsla::red())),
|
||||
(";".into(), Some(Hsla::blue())),
|
||||
("\n".into(), None),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_chunks_with_diagnostics_across_blocks(cx: &mut gpui::TestAppContext) {
|
||||
cx.background_executor
|
||||
.set_block_on_ticks(usize::MAX..=usize::MAX);
|
||||
|
||||
let text = r#"
|
||||
struct A {
|
||||
b: usize;
|
||||
}
|
||||
const c: usize = 1;
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
cx.update(|cx| init_test(cx, |_| {}));
|
||||
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text, cx));
|
||||
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.update_diagnostics(
|
||||
LanguageServerId(0),
|
||||
DiagnosticSet::new(
|
||||
[DiagnosticEntry {
|
||||
range: PointUtf16::new(0, 0)..PointUtf16::new(2, 1),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
group_id: 1,
|
||||
message: "hi".into(),
|
||||
..Default::default()
|
||||
},
|
||||
}],
|
||||
buffer,
|
||||
),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx));
|
||||
|
||||
let map = cx.new_model(|cx| {
|
||||
DisplayMap::new(
|
||||
buffer,
|
||||
font("Courier"),
|
||||
px(16.0),
|
||||
None,
|
||||
true,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
FoldPlaceholder::test(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let black = gpui::black().to_rgb();
|
||||
let red = gpui::red().to_rgb();
|
||||
|
||||
// Insert a block in the middle of a multi-line diagnostic.
|
||||
map.update(cx, |map, cx| {
|
||||
map.highlight_text(
|
||||
TypeId::of::<usize>(),
|
||||
vec![
|
||||
buffer_snapshot.anchor_before(Point::new(3, 9))
|
||||
..buffer_snapshot.anchor_after(Point::new(3, 14)),
|
||||
buffer_snapshot.anchor_before(Point::new(3, 17))
|
||||
..buffer_snapshot.anchor_after(Point::new(3, 18)),
|
||||
],
|
||||
red.into(),
|
||||
);
|
||||
map.insert_blocks(
|
||||
[BlockProperties {
|
||||
placement: BlockPlacement::Below(
|
||||
buffer_snapshot.anchor_before(Point::new(1, 0)),
|
||||
),
|
||||
height: 1,
|
||||
style: BlockStyle::Sticky,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}],
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let mut chunks = Vec::<(String, Option<DiagnosticSeverity>, Rgba)>::new();
|
||||
for chunk in snapshot.chunks(DisplayRow(0)..DisplayRow(5), true, Default::default()) {
|
||||
let color = chunk
|
||||
.highlight_style
|
||||
.and_then(|style| style.color)
|
||||
.map_or(black, |color| color.to_rgb());
|
||||
if let Some((last_chunk, last_severity, last_color)) = chunks.last_mut() {
|
||||
if *last_severity == chunk.diagnostic_severity && *last_color == color {
|
||||
last_chunk.push_str(chunk.text);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
chunks.push((chunk.text.to_string(), chunk.diagnostic_severity, color));
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
chunks,
|
||||
[
|
||||
(
|
||||
"struct A {\n b: usize;\n".into(),
|
||||
Some(DiagnosticSeverity::ERROR),
|
||||
black
|
||||
),
|
||||
("\n".into(), None, black),
|
||||
("}".into(), Some(DiagnosticSeverity::ERROR), black),
|
||||
("\nconst c: ".into(), None, black),
|
||||
("usize".into(), None, red),
|
||||
(" = ".into(), None, black),
|
||||
("1".into(), None, red),
|
||||
(";\n".into(), None, black),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
// todo(linux) fails due to pixel differences in text rendering
|
||||
#[cfg(target_os = "macos")]
|
||||
#[gpui::test]
|
||||
async fn test_chunks_with_soft_wrapping(cx: &mut gpui::TestAppContext) {
|
||||
use unindent::Unindent as _;
|
||||
|
||||
cx.background_executor
|
||||
.set_block_on_ticks(usize::MAX..=usize::MAX);
|
||||
|
||||
|
||||
@@ -1666,7 +1666,7 @@ fn offset_for_row(s: &str, target: u32) -> (u32, usize) {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::display_map::{
|
||||
fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap, wrap_map::WrapMap,
|
||||
char_map::CharMap, fold_map::FoldMap, inlay_map::InlayMap, wrap_map::WrapMap,
|
||||
};
|
||||
use gpui::{div, font, px, AppContext, Context as _, Element};
|
||||
use language::{Buffer, Capability};
|
||||
@@ -1701,9 +1701,9 @@ mod tests {
|
||||
let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
|
||||
let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 1.try_into().unwrap());
|
||||
let (mut char_map, char_snapshot) = CharMap::new(fold_snapshot, 1.try_into().unwrap());
|
||||
let (wrap_map, wraps_snapshot) =
|
||||
cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
|
||||
cx.update(|cx| WrapMap::new(char_snapshot, font("Helvetica"), px(14.0), None, cx));
|
||||
let mut block_map = BlockMap::new(wraps_snapshot.clone(), true, 1, 1, 1);
|
||||
|
||||
let mut writer = block_map.write(wraps_snapshot.clone(), Default::default());
|
||||
@@ -1851,10 +1851,10 @@ mod tests {
|
||||
let (inlay_snapshot, inlay_edits) =
|
||||
inlay_map.sync(buffer_snapshot, subscription.consume().into_inner());
|
||||
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
|
||||
let (tab_snapshot, tab_edits) =
|
||||
tab_map.sync(fold_snapshot, fold_edits, 4.try_into().unwrap());
|
||||
let (char_snapshot, tab_edits) =
|
||||
char_map.sync(fold_snapshot, fold_edits, 4.try_into().unwrap());
|
||||
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
|
||||
wrap_map.sync(tab_snapshot, tab_edits, cx)
|
||||
wrap_map.sync(char_snapshot, tab_edits, cx)
|
||||
});
|
||||
let snapshot = block_map.read(wraps_snapshot, wrap_edits);
|
||||
assert_eq!(snapshot.text(), "aaa\n\nb!!!\n\n\nbb\nccc\nddd\n\n\n");
|
||||
@@ -1914,8 +1914,9 @@ mod tests {
|
||||
let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx);
|
||||
let (_, inlay_snapshot) = InlayMap::new(multi_buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, wraps_snapshot) = WrapMap::new(tab_snapshot, font, font_size, Some(wrap_width), cx);
|
||||
let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, wraps_snapshot) =
|
||||
WrapMap::new(char_snapshot, font, font_size, Some(wrap_width), cx);
|
||||
|
||||
let block_map = BlockMap::new(wraps_snapshot.clone(), true, 1, 1, 1);
|
||||
let snapshot = block_map.read(wraps_snapshot, Default::default());
|
||||
@@ -1952,9 +1953,9 @@ mod tests {
|
||||
let _subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
|
||||
let (_inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 1.try_into().unwrap());
|
||||
let (_char_map, char_snapshot) = CharMap::new(fold_snapshot, 1.try_into().unwrap());
|
||||
let (_wrap_map, wraps_snapshot) =
|
||||
cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
|
||||
cx.update(|cx| WrapMap::new(char_snapshot, font("Helvetica"), px(14.0), None, cx));
|
||||
let mut block_map = BlockMap::new(wraps_snapshot.clone(), false, 1, 1, 0);
|
||||
|
||||
let mut writer = block_map.write(wraps_snapshot.clone(), Default::default());
|
||||
@@ -2054,9 +2055,15 @@ mod tests {
|
||||
let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, wraps_snapshot) = cx.update(|cx| {
|
||||
WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), Some(px(60.)), cx)
|
||||
WrapMap::new(
|
||||
char_snapshot,
|
||||
font("Helvetica"),
|
||||
px(14.0),
|
||||
Some(px(60.)),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let mut block_map = BlockMap::new(wraps_snapshot.clone(), true, 1, 1, 0);
|
||||
|
||||
@@ -2099,7 +2106,7 @@ mod tests {
|
||||
let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let tab_size = 1.try_into().unwrap();
|
||||
let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, tab_size);
|
||||
let (mut tab_map, tab_snapshot) = CharMap::new(fold_snapshot, tab_size);
|
||||
let (wrap_map, wraps_snapshot) =
|
||||
cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
|
||||
let mut block_map = BlockMap::new(wraps_snapshot.clone(), false, 1, 1, 0);
|
||||
@@ -2250,9 +2257,9 @@ mod tests {
|
||||
let mut buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
|
||||
let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (mut char_map, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (wrap_map, wraps_snapshot) = cx
|
||||
.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), font_size, wrap_width, cx));
|
||||
.update(|cx| WrapMap::new(char_snapshot, font("Helvetica"), font_size, wrap_width, cx));
|
||||
let mut block_map = BlockMap::new(
|
||||
wraps_snapshot,
|
||||
true,
|
||||
@@ -2314,10 +2321,10 @@ mod tests {
|
||||
let (inlay_snapshot, inlay_edits) =
|
||||
inlay_map.sync(buffer_snapshot.clone(), vec![]);
|
||||
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
|
||||
let (tab_snapshot, tab_edits) =
|
||||
tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (char_snapshot, tab_edits) =
|
||||
char_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
|
||||
wrap_map.sync(tab_snapshot, tab_edits, cx)
|
||||
wrap_map.sync(char_snapshot, tab_edits, cx)
|
||||
});
|
||||
let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
|
||||
block_map.insert(block_properties.iter().map(|props| BlockProperties {
|
||||
@@ -2339,10 +2346,10 @@ mod tests {
|
||||
let (inlay_snapshot, inlay_edits) =
|
||||
inlay_map.sync(buffer_snapshot.clone(), vec![]);
|
||||
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
|
||||
let (tab_snapshot, tab_edits) =
|
||||
tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (char_snapshot, tab_edits) =
|
||||
char_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
|
||||
wrap_map.sync(tab_snapshot, tab_edits, cx)
|
||||
wrap_map.sync(char_snapshot, tab_edits, cx)
|
||||
});
|
||||
let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
|
||||
block_map.remove(block_ids_to_remove);
|
||||
@@ -2362,9 +2369,9 @@ mod tests {
|
||||
let (inlay_snapshot, inlay_edits) =
|
||||
inlay_map.sync(buffer_snapshot.clone(), buffer_edits);
|
||||
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
|
||||
let (tab_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (char_snapshot, tab_edits) = char_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
|
||||
wrap_map.sync(tab_snapshot, tab_edits, cx)
|
||||
wrap_map.sync(char_snapshot, tab_edits, cx)
|
||||
});
|
||||
let blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits);
|
||||
assert_eq!(
|
||||
@@ -2479,7 +2486,7 @@ mod tests {
|
||||
.row as usize];
|
||||
|
||||
let soft_wrapped = wraps_snapshot
|
||||
.to_tab_point(WrapPoint::new(wrap_row, 0))
|
||||
.to_char_point(WrapPoint::new(wrap_row, 0))
|
||||
.column()
|
||||
> 0;
|
||||
expected_buffer_rows.push(if soft_wrapped { None } else { buffer_row });
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use super::{
|
||||
fold_map::{self, FoldChunks, FoldEdit, FoldPoint, FoldSnapshot},
|
||||
invisibles::{is_invisible, replacement},
|
||||
Highlights,
|
||||
};
|
||||
use language::{Chunk, Point};
|
||||
@@ -9,14 +10,14 @@ use sum_tree::Bias;
|
||||
|
||||
const MAX_EXPANSION_COLUMN: u32 = 256;
|
||||
|
||||
/// Keeps track of hard tabs in a text buffer.
|
||||
/// Keeps track of hard tabs and non-printable characters in a text buffer.
|
||||
///
|
||||
/// See the [`display_map` module documentation](crate::display_map) for more information.
|
||||
pub struct TabMap(TabSnapshot);
|
||||
pub struct CharMap(CharSnapshot);
|
||||
|
||||
impl TabMap {
|
||||
pub fn new(fold_snapshot: FoldSnapshot, tab_size: NonZeroU32) -> (Self, TabSnapshot) {
|
||||
let snapshot = TabSnapshot {
|
||||
impl CharMap {
|
||||
pub fn new(fold_snapshot: FoldSnapshot, tab_size: NonZeroU32) -> (Self, CharSnapshot) {
|
||||
let snapshot = CharSnapshot {
|
||||
fold_snapshot,
|
||||
tab_size,
|
||||
max_expansion_column: MAX_EXPANSION_COLUMN,
|
||||
@@ -26,7 +27,7 @@ impl TabMap {
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn set_max_expansion_column(&mut self, column: u32) -> TabSnapshot {
|
||||
pub fn set_max_expansion_column(&mut self, column: u32) -> CharSnapshot {
|
||||
self.0.max_expansion_column = column;
|
||||
self.0.clone()
|
||||
}
|
||||
@@ -36,9 +37,9 @@ impl TabMap {
|
||||
fold_snapshot: FoldSnapshot,
|
||||
mut fold_edits: Vec<FoldEdit>,
|
||||
tab_size: NonZeroU32,
|
||||
) -> (TabSnapshot, Vec<TabEdit>) {
|
||||
) -> (CharSnapshot, Vec<TabEdit>) {
|
||||
let old_snapshot = &mut self.0;
|
||||
let mut new_snapshot = TabSnapshot {
|
||||
let mut new_snapshot = CharSnapshot {
|
||||
fold_snapshot,
|
||||
tab_size,
|
||||
max_expansion_column: old_snapshot.max_expansion_column,
|
||||
@@ -137,15 +138,15 @@ impl TabMap {
|
||||
let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot);
|
||||
let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot);
|
||||
tab_edits.push(TabEdit {
|
||||
old: old_snapshot.to_tab_point(old_start)..old_snapshot.to_tab_point(old_end),
|
||||
new: new_snapshot.to_tab_point(new_start)..new_snapshot.to_tab_point(new_end),
|
||||
old: old_snapshot.to_char_point(old_start)..old_snapshot.to_char_point(old_end),
|
||||
new: new_snapshot.to_char_point(new_start)..new_snapshot.to_char_point(new_end),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
new_snapshot.version += 1;
|
||||
tab_edits.push(TabEdit {
|
||||
old: TabPoint::zero()..old_snapshot.max_point(),
|
||||
new: TabPoint::zero()..new_snapshot.max_point(),
|
||||
old: CharPoint::zero()..old_snapshot.max_point(),
|
||||
new: CharPoint::zero()..new_snapshot.max_point(),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -155,14 +156,14 @@ impl TabMap {
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TabSnapshot {
|
||||
pub struct CharSnapshot {
|
||||
pub fold_snapshot: FoldSnapshot,
|
||||
pub tab_size: NonZeroU32,
|
||||
pub max_expansion_column: u32,
|
||||
pub version: usize,
|
||||
}
|
||||
|
||||
impl TabSnapshot {
|
||||
impl CharSnapshot {
|
||||
pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
|
||||
&self.fold_snapshot.inlay_snapshot.buffer
|
||||
}
|
||||
@@ -170,7 +171,7 @@ impl TabSnapshot {
|
||||
pub fn line_len(&self, row: u32) -> u32 {
|
||||
let max_point = self.max_point();
|
||||
if row < max_point.row() {
|
||||
self.to_tab_point(FoldPoint::new(row, self.fold_snapshot.line_len(row)))
|
||||
self.to_char_point(FoldPoint::new(row, self.fold_snapshot.line_len(row)))
|
||||
.0
|
||||
.column
|
||||
} else {
|
||||
@@ -179,10 +180,10 @@ impl TabSnapshot {
|
||||
}
|
||||
|
||||
pub fn text_summary(&self) -> TextSummary {
|
||||
self.text_summary_for_range(TabPoint::zero()..self.max_point())
|
||||
self.text_summary_for_range(CharPoint::zero()..self.max_point())
|
||||
}
|
||||
|
||||
pub fn text_summary_for_range(&self, range: Range<TabPoint>) -> TextSummary {
|
||||
pub fn text_summary_for_range(&self, range: Range<CharPoint>) -> TextSummary {
|
||||
let input_start = self.to_fold_point(range.start, Bias::Left).0;
|
||||
let input_end = self.to_fold_point(range.end, Bias::Right).0;
|
||||
let input_summary = self
|
||||
@@ -211,7 +212,7 @@ impl TabSnapshot {
|
||||
} else {
|
||||
for _ in self
|
||||
.chunks(
|
||||
TabPoint::new(range.end.row(), 0)..range.end,
|
||||
CharPoint::new(range.end.row(), 0)..range.end,
|
||||
false,
|
||||
Highlights::default(),
|
||||
)
|
||||
@@ -232,7 +233,7 @@ impl TabSnapshot {
|
||||
|
||||
pub fn chunks<'a>(
|
||||
&'a self,
|
||||
range: Range<TabPoint>,
|
||||
range: Range<CharPoint>,
|
||||
language_aware: bool,
|
||||
highlights: Highlights<'a>,
|
||||
) -> TabChunks<'a> {
|
||||
@@ -279,7 +280,7 @@ impl TabSnapshot {
|
||||
#[cfg(test)]
|
||||
pub fn text(&self) -> String {
|
||||
self.chunks(
|
||||
TabPoint::zero()..self.max_point(),
|
||||
CharPoint::zero()..self.max_point(),
|
||||
false,
|
||||
Highlights::default(),
|
||||
)
|
||||
@@ -287,24 +288,24 @@ impl TabSnapshot {
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn max_point(&self) -> TabPoint {
|
||||
self.to_tab_point(self.fold_snapshot.max_point())
|
||||
pub fn max_point(&self) -> CharPoint {
|
||||
self.to_char_point(self.fold_snapshot.max_point())
|
||||
}
|
||||
|
||||
pub fn clip_point(&self, point: TabPoint, bias: Bias) -> TabPoint {
|
||||
self.to_tab_point(
|
||||
pub fn clip_point(&self, point: CharPoint, bias: Bias) -> CharPoint {
|
||||
self.to_char_point(
|
||||
self.fold_snapshot
|
||||
.clip_point(self.to_fold_point(point, bias).0, bias),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn to_tab_point(&self, input: FoldPoint) -> TabPoint {
|
||||
pub fn to_char_point(&self, input: FoldPoint) -> CharPoint {
|
||||
let chars = self.fold_snapshot.chars_at(FoldPoint::new(input.row(), 0));
|
||||
let expanded = self.expand_tabs(chars, input.column());
|
||||
TabPoint::new(input.row(), expanded)
|
||||
CharPoint::new(input.row(), expanded)
|
||||
}
|
||||
|
||||
pub fn to_fold_point(&self, output: TabPoint, bias: Bias) -> (FoldPoint, u32, u32) {
|
||||
pub fn to_fold_point(&self, output: CharPoint, bias: Bias) -> (FoldPoint, u32, u32) {
|
||||
let chars = self.fold_snapshot.chars_at(FoldPoint::new(output.row(), 0));
|
||||
let expanded = output.column();
|
||||
let (collapsed, expanded_char_column, to_next_stop) =
|
||||
@@ -316,13 +317,13 @@ impl TabSnapshot {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn make_tab_point(&self, point: Point, bias: Bias) -> TabPoint {
|
||||
pub fn make_char_point(&self, point: Point, bias: Bias) -> CharPoint {
|
||||
let inlay_point = self.fold_snapshot.inlay_snapshot.to_inlay_point(point);
|
||||
let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias);
|
||||
self.to_tab_point(fold_point)
|
||||
self.to_char_point(fold_point)
|
||||
}
|
||||
|
||||
pub fn to_point(&self, point: TabPoint, bias: Bias) -> Point {
|
||||
pub fn to_point(&self, point: CharPoint, bias: Bias) -> Point {
|
||||
let fold_point = self.to_fold_point(point, bias).0;
|
||||
let inlay_point = fold_point.to_inlay_point(&self.fold_snapshot);
|
||||
self.fold_snapshot
|
||||
@@ -345,6 +346,9 @@ impl TabSnapshot {
|
||||
let tab_len = tab_size - expanded_chars % tab_size;
|
||||
expanded_bytes += tab_len;
|
||||
expanded_chars += tab_len;
|
||||
} else if let Some(replacement) = replacement(c) {
|
||||
expanded_chars += replacement.chars().count() as u32;
|
||||
expanded_bytes += replacement.len() as u32;
|
||||
} else {
|
||||
expanded_bytes += c.len_utf8() as u32;
|
||||
expanded_chars += 1;
|
||||
@@ -384,6 +388,9 @@ impl TabSnapshot {
|
||||
Bias::Right => (collapsed_bytes + 1, expanded_chars, 0),
|
||||
};
|
||||
}
|
||||
} else if let Some(replacement) = replacement(c) {
|
||||
expanded_chars += replacement.chars().count() as u32;
|
||||
expanded_bytes += replacement.len() as u32;
|
||||
} else {
|
||||
expanded_chars += 1;
|
||||
expanded_bytes += c.len_utf8() as u32;
|
||||
@@ -405,9 +412,9 @@ impl TabSnapshot {
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
|
||||
pub struct TabPoint(pub Point);
|
||||
pub struct CharPoint(pub Point);
|
||||
|
||||
impl TabPoint {
|
||||
impl CharPoint {
|
||||
pub fn new(row: u32, column: u32) -> Self {
|
||||
Self(Point::new(row, column))
|
||||
}
|
||||
@@ -425,13 +432,13 @@ impl TabPoint {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Point> for TabPoint {
|
||||
impl From<Point> for CharPoint {
|
||||
fn from(point: Point) -> Self {
|
||||
Self(point)
|
||||
}
|
||||
}
|
||||
|
||||
pub type TabEdit = text::Edit<TabPoint>;
|
||||
pub type TabEdit = text::Edit<CharPoint>;
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct TextSummary {
|
||||
@@ -486,7 +493,7 @@ impl<'a> std::ops::AddAssign<&'a Self> for TextSummary {
|
||||
const SPACES: &str = " ";
|
||||
|
||||
pub struct TabChunks<'a> {
|
||||
snapshot: &'a TabSnapshot,
|
||||
snapshot: &'a CharSnapshot,
|
||||
fold_chunks: FoldChunks<'a>,
|
||||
chunk: Chunk<'a>,
|
||||
column: u32,
|
||||
@@ -499,7 +506,7 @@ pub struct TabChunks<'a> {
|
||||
}
|
||||
|
||||
impl<'a> TabChunks<'a> {
|
||||
pub(crate) fn seek(&mut self, range: Range<TabPoint>) {
|
||||
pub(crate) fn seek(&mut self, range: Range<CharPoint>) {
|
||||
let (input_start, expanded_char_column, to_next_stop) =
|
||||
self.snapshot.to_fold_point(range.start, Bias::Left);
|
||||
let input_column = input_start.column();
|
||||
@@ -584,6 +591,37 @@ impl<'a> Iterator for TabChunks<'a> {
|
||||
self.input_column = 0;
|
||||
self.output_position += Point::new(1, 0);
|
||||
}
|
||||
_ if is_invisible(c) => {
|
||||
if ix > 0 {
|
||||
let (prefix, suffix) = self.chunk.text.split_at(ix);
|
||||
self.chunk.text = suffix;
|
||||
return Some(Chunk {
|
||||
text: prefix,
|
||||
is_invisible: false,
|
||||
..self.chunk.clone()
|
||||
});
|
||||
}
|
||||
let c_len = c.len_utf8();
|
||||
let replacement = replacement(c).unwrap_or(&self.chunk.text[..c_len]);
|
||||
if self.chunk.text.len() >= c_len {
|
||||
self.chunk.text = &self.chunk.text[c_len..];
|
||||
} else {
|
||||
self.chunk.text = "";
|
||||
}
|
||||
let len = replacement.chars().count() as u32;
|
||||
let next_output_position = cmp::min(
|
||||
self.output_position + Point::new(0, len),
|
||||
self.max_output_position,
|
||||
);
|
||||
self.column += len;
|
||||
self.input_column += 1;
|
||||
self.output_position = next_output_position;
|
||||
return Some(Chunk {
|
||||
text: replacement,
|
||||
is_invisible: true,
|
||||
..self.chunk.clone()
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
self.column += 1;
|
||||
if !self.inside_leading_tab {
|
||||
@@ -613,11 +651,11 @@ mod tests {
|
||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
|
||||
assert_eq!(tab_snapshot.expand_tabs("\t".chars(), 0), 0);
|
||||
assert_eq!(tab_snapshot.expand_tabs("\t".chars(), 1), 4);
|
||||
assert_eq!(tab_snapshot.expand_tabs("\ta".chars(), 2), 5);
|
||||
assert_eq!(char_snapshot.expand_tabs("\t".chars(), 0), 0);
|
||||
assert_eq!(char_snapshot.expand_tabs("\t".chars(), 1), 4);
|
||||
assert_eq!(char_snapshot.expand_tabs("\ta".chars(), 2), 5);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -630,16 +668,16 @@ mod tests {
|
||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, mut tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, mut char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
|
||||
tab_snapshot.max_expansion_column = max_expansion_column;
|
||||
assert_eq!(tab_snapshot.text(), output);
|
||||
char_snapshot.max_expansion_column = max_expansion_column;
|
||||
assert_eq!(char_snapshot.text(), output);
|
||||
|
||||
for (ix, c) in input.char_indices() {
|
||||
assert_eq!(
|
||||
tab_snapshot
|
||||
char_snapshot
|
||||
.chunks(
|
||||
TabPoint::new(0, ix as u32)..tab_snapshot.max_point(),
|
||||
CharPoint::new(0, ix as u32)..char_snapshot.max_point(),
|
||||
false,
|
||||
Highlights::default(),
|
||||
)
|
||||
@@ -653,13 +691,13 @@ mod tests {
|
||||
let input_point = Point::new(0, ix as u32);
|
||||
let output_point = Point::new(0, output.find(c).unwrap() as u32);
|
||||
assert_eq!(
|
||||
tab_snapshot.to_tab_point(FoldPoint(input_point)),
|
||||
TabPoint(output_point),
|
||||
"to_tab_point({input_point:?})"
|
||||
char_snapshot.to_char_point(FoldPoint(input_point)),
|
||||
CharPoint(output_point),
|
||||
"to_char_point({input_point:?})"
|
||||
);
|
||||
assert_eq!(
|
||||
tab_snapshot
|
||||
.to_fold_point(TabPoint(output_point), Bias::Left)
|
||||
char_snapshot
|
||||
.to_fold_point(CharPoint(output_point), Bias::Left)
|
||||
.0,
|
||||
FoldPoint(input_point),
|
||||
"to_fold_point({output_point:?})"
|
||||
@@ -677,10 +715,10 @@ mod tests {
|
||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, mut tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, mut char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
|
||||
tab_snapshot.max_expansion_column = max_expansion_column;
|
||||
assert_eq!(tab_snapshot.text(), input);
|
||||
char_snapshot.max_expansion_column = max_expansion_column;
|
||||
assert_eq!(char_snapshot.text(), input);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -691,10 +729,10 @@ mod tests {
|
||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
|
||||
assert_eq!(
|
||||
chunks(&tab_snapshot, TabPoint::zero()),
|
||||
chunks(&char_snapshot, CharPoint::zero()),
|
||||
vec![
|
||||
(" ".to_string(), true),
|
||||
(" ".to_string(), false),
|
||||
@@ -703,7 +741,7 @@ mod tests {
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
chunks(&tab_snapshot, TabPoint::new(0, 2)),
|
||||
chunks(&char_snapshot, CharPoint::new(0, 2)),
|
||||
vec![
|
||||
(" ".to_string(), true),
|
||||
(" ".to_string(), false),
|
||||
@@ -712,7 +750,7 @@ mod tests {
|
||||
]
|
||||
);
|
||||
|
||||
fn chunks(snapshot: &TabSnapshot, start: TabPoint) -> Vec<(String, bool)> {
|
||||
fn chunks(snapshot: &CharSnapshot, start: CharPoint) -> Vec<(String, bool)> {
|
||||
let mut chunks = Vec::new();
|
||||
let mut was_tab = false;
|
||||
let mut text = String::new();
|
||||
@@ -758,12 +796,12 @@ mod tests {
|
||||
let (inlay_snapshot, _) = inlay_map.randomly_mutate(&mut 0, &mut rng);
|
||||
log::info!("InlayMap text: {:?}", inlay_snapshot.text());
|
||||
|
||||
let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size);
|
||||
let tabs_snapshot = tab_map.set_max_expansion_column(32);
|
||||
let (mut char_map, _) = CharMap::new(fold_snapshot.clone(), tab_size);
|
||||
let tabs_snapshot = char_map.set_max_expansion_column(32);
|
||||
|
||||
let text = text::Rope::from(tabs_snapshot.text().as_str());
|
||||
log::info!(
|
||||
"TabMap text (tab size: {}): {:?}",
|
||||
"CharMap text (tab size: {}): {:?}",
|
||||
tab_size,
|
||||
tabs_snapshot.text(),
|
||||
);
|
||||
@@ -771,11 +809,11 @@ mod tests {
|
||||
for _ in 0..5 {
|
||||
let end_row = rng.gen_range(0..=text.max_point().row);
|
||||
let end_column = rng.gen_range(0..=text.line_len(end_row));
|
||||
let mut end = TabPoint(text.clip_point(Point::new(end_row, end_column), Bias::Right));
|
||||
let mut end = CharPoint(text.clip_point(Point::new(end_row, end_column), Bias::Right));
|
||||
let start_row = rng.gen_range(0..=text.max_point().row);
|
||||
let start_column = rng.gen_range(0..=text.line_len(start_row));
|
||||
let mut start =
|
||||
TabPoint(text.clip_point(Point::new(start_row, start_column), Bias::Left));
|
||||
CharPoint(text.clip_point(Point::new(start_row, start_column), Bias::Left));
|
||||
if start > end {
|
||||
mem::swap(&mut start, &mut end);
|
||||
}
|
||||
@@ -255,22 +255,6 @@ impl<'a> InlayChunks<'a> {
|
||||
self.buffer_chunk = None;
|
||||
self.output_offset = new_range.start;
|
||||
self.max_output_offset = new_range.end;
|
||||
|
||||
let mut highlight_endpoints = Vec::new();
|
||||
if let Some(text_highlights) = self.highlights.text_highlights {
|
||||
if !text_highlights.is_empty() {
|
||||
self.snapshot.apply_text_highlights(
|
||||
&mut self.transforms,
|
||||
&new_range,
|
||||
text_highlights,
|
||||
&mut highlight_endpoints,
|
||||
);
|
||||
self.transforms.seek(&new_range.start, Bias::Right, &());
|
||||
highlight_endpoints.sort();
|
||||
}
|
||||
}
|
||||
self.highlight_endpoints = highlight_endpoints.into_iter().peekable();
|
||||
self.active_highlights.clear();
|
||||
}
|
||||
|
||||
pub fn offset(&self) -> InlayOffset {
|
||||
|
||||
157
crates/editor/src/display_map/invisibles.rs
Normal file
157
crates/editor/src/display_map/invisibles.rs
Normal file
@@ -0,0 +1,157 @@
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use collections::HashMap;
|
||||
|
||||
// Invisibility in a Unicode context is not well defined, so we have to guess.
|
||||
//
|
||||
// We highlight all ASCII control codes, and unicode whitespace because they are likely
|
||||
// confused with a normal space (U+0020).
|
||||
//
|
||||
// We also highlight the handful of blank non-space characters:
|
||||
// U+2800 BRAILLE PATTERN BLANK - Category: So
|
||||
// U+115F HANGUL CHOSEONG FILLER - Category: Lo
|
||||
// U+1160 HANGUL CHOSEONG FILLER - Category: Lo
|
||||
// U+3164 HANGUL FILLER - Category: Lo
|
||||
// U+FFA0 HALFWIDTH HANGUL FILLER - Category: Lo
|
||||
// U+FFFC OBJECT REPLACEMENT CHARACTER - Category: So
|
||||
//
|
||||
// For the rest of Unicode, invisibility happens for two reasons:
|
||||
// * A Format character (like a byte order mark or right-to-left override)
|
||||
// * An invisible Nonspacing Mark character (like U+034F, or variation selectors)
|
||||
//
|
||||
// We don't consider unassigned codepoints invisible as the font renderer already shows
|
||||
// a replacement character in that case (and there are a *lot* of them)
|
||||
//
|
||||
// Control characters are mostly fine to highlight; except:
|
||||
// * U+E0020..=U+E007F are used in emoji flags. We don't highlight them right now, but we could if we tightened our heuristics.
|
||||
// * U+200D is used to join characters. We highlight this but don't replace it. As our font system ignores mid-glyph highlights this mostly works to highlight unexpected uses.
|
||||
//
|
||||
// Nonspacing marks are handled like U+200D. This means that mid-glyph we ignore them, but
|
||||
// probably causes issues with end-of-glyph usage.
|
||||
//
|
||||
// ref: https://invisible-characters.com
|
||||
// ref: https://www.compart.com/en/unicode/category/Cf
|
||||
// ref: https://gist.github.com/ConradIrwin/f759e1fc29267143c4c7895aa495dca5?h=1
|
||||
// ref: https://unicode.org/Public/emoji/13.0/emoji-test.txt
|
||||
// https://github.com/bits/UTF-8-Unicode-Test-Documents/blob/master/UTF-8_sequence_separated/utf8_sequence_0-0x10ffff_assigned_including-unprintable-asis.txt
|
||||
pub fn is_invisible(c: char) -> bool {
|
||||
if c <= '\u{1f}' {
|
||||
c != '\t' && c != '\n' && c != '\r'
|
||||
} else if c >= '\u{7f}' {
|
||||
c <= '\u{9f}' || c.is_whitespace() || contains(c, &FORMAT) || contains(c, &OTHER)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn replacement(c: char) -> Option<&'static str> {
|
||||
if !is_invisible(c) {
|
||||
return None;
|
||||
}
|
||||
if c <= '\x7f' {
|
||||
REPLACEMENTS.get(&c).copied()
|
||||
} else if contains(c, &PRESERVE) {
|
||||
None
|
||||
} else {
|
||||
Some(" ")
|
||||
}
|
||||
}
|
||||
|
||||
const REPLACEMENTS: LazyLock<HashMap<char, &'static str>> = LazyLock::new(|| {
|
||||
[
|
||||
('\x00', "␀"),
|
||||
('\x01', "␁"),
|
||||
('\x02', "␂"),
|
||||
('\x03', "␃"),
|
||||
('\x04', "␄"),
|
||||
('\x05', "␅"),
|
||||
('\x06', "␆"),
|
||||
('\x07', "␇"),
|
||||
('\x08', "␈"),
|
||||
('\x0B', "␋"),
|
||||
('\x0C', "␌"),
|
||||
('\x0D', "␍"),
|
||||
('\x0E', "␎"),
|
||||
('\x0F', "␏"),
|
||||
('\x10', "␐"),
|
||||
('\x11', "␑"),
|
||||
('\x12', "␒"),
|
||||
('\x13', "␓"),
|
||||
('\x14', "␔"),
|
||||
('\x15', "␕"),
|
||||
('\x16', "␖"),
|
||||
('\x17', "␗"),
|
||||
('\x18', "␘"),
|
||||
('\x19', "␙"),
|
||||
('\x1A', "␚"),
|
||||
('\x1B', "␛"),
|
||||
('\x1C', "␜"),
|
||||
('\x1D', "␝"),
|
||||
('\x1E', "␞"),
|
||||
('\x1F', "␟"),
|
||||
('\u{007F}', "␡"),
|
||||
]
|
||||
.into_iter()
|
||||
.collect()
|
||||
});
|
||||
|
||||
// generated using ucd-generate: ucd-generate general-category --include Format --chars ucd-16.0.0
|
||||
pub const FORMAT: &'static [(char, char)] = &[
|
||||
('\u{ad}', '\u{ad}'),
|
||||
('\u{600}', '\u{605}'),
|
||||
('\u{61c}', '\u{61c}'),
|
||||
('\u{6dd}', '\u{6dd}'),
|
||||
('\u{70f}', '\u{70f}'),
|
||||
('\u{890}', '\u{891}'),
|
||||
('\u{8e2}', '\u{8e2}'),
|
||||
('\u{180e}', '\u{180e}'),
|
||||
('\u{200b}', '\u{200f}'),
|
||||
('\u{202a}', '\u{202e}'),
|
||||
('\u{2060}', '\u{2064}'),
|
||||
('\u{2066}', '\u{206f}'),
|
||||
('\u{feff}', '\u{feff}'),
|
||||
('\u{fff9}', '\u{fffb}'),
|
||||
('\u{110bd}', '\u{110bd}'),
|
||||
('\u{110cd}', '\u{110cd}'),
|
||||
('\u{13430}', '\u{1343f}'),
|
||||
('\u{1bca0}', '\u{1bca3}'),
|
||||
('\u{1d173}', '\u{1d17a}'),
|
||||
('\u{e0001}', '\u{e0001}'),
|
||||
('\u{e0020}', '\u{e007f}'),
|
||||
];
|
||||
|
||||
// hand-made base on https://invisible-characters.com (Excluding Cf)
|
||||
pub const OTHER: &'static [(char, char)] = &[
|
||||
('\u{034f}', '\u{034f}'),
|
||||
('\u{115F}', '\u{1160}'),
|
||||
('\u{17b4}', '\u{17b5}'),
|
||||
('\u{180b}', '\u{180d}'),
|
||||
('\u{2800}', '\u{2800}'),
|
||||
('\u{3164}', '\u{3164}'),
|
||||
('\u{fe00}', '\u{fe0d}'),
|
||||
('\u{ffa0}', '\u{ffa0}'),
|
||||
('\u{fffc}', '\u{fffc}'),
|
||||
('\u{e0100}', '\u{e01ef}'),
|
||||
];
|
||||
|
||||
// a subset of FORMAT/OTHER that may appear within glyphs
|
||||
const PRESERVE: &'static [(char, char)] = &[
|
||||
('\u{034f}', '\u{034f}'),
|
||||
('\u{200d}', '\u{200d}'),
|
||||
('\u{17b4}', '\u{17b5}'),
|
||||
('\u{180b}', '\u{180d}'),
|
||||
('\u{e0061}', '\u{e007a}'),
|
||||
('\u{e007f}', '\u{e007f}'),
|
||||
];
|
||||
|
||||
fn contains(c: char, list: &[(char, char)]) -> bool {
|
||||
for (start, end) in list {
|
||||
if c < *start {
|
||||
return false;
|
||||
}
|
||||
if c <= *end {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
use super::{
|
||||
char_map::{self, CharPoint, CharSnapshot, TabEdit},
|
||||
fold_map::FoldBufferRows,
|
||||
tab_map::{self, TabEdit, TabPoint, TabSnapshot},
|
||||
Highlights,
|
||||
};
|
||||
use gpui::{AppContext, Context, Font, LineWrapper, Model, ModelContext, Pixels, Task};
|
||||
@@ -12,7 +12,7 @@ use std::{cmp, collections::VecDeque, mem, ops::Range, time::Duration};
|
||||
use sum_tree::{Bias, Cursor, SumTree};
|
||||
use text::Patch;
|
||||
|
||||
pub use super::tab_map::TextSummary;
|
||||
pub use super::char_map::TextSummary;
|
||||
pub type WrapEdit = text::Edit<u32>;
|
||||
|
||||
/// Handles soft wrapping of text.
|
||||
@@ -20,7 +20,7 @@ pub type WrapEdit = text::Edit<u32>;
|
||||
/// See the [`display_map` module documentation](crate::display_map) for more information.
|
||||
pub struct WrapMap {
|
||||
snapshot: WrapSnapshot,
|
||||
pending_edits: VecDeque<(TabSnapshot, Vec<TabEdit>)>,
|
||||
pending_edits: VecDeque<(CharSnapshot, Vec<TabEdit>)>,
|
||||
interpolated_edits: Patch<u32>,
|
||||
edits_since_sync: Patch<u32>,
|
||||
wrap_width: Option<Pixels>,
|
||||
@@ -30,7 +30,7 @@ pub struct WrapMap {
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct WrapSnapshot {
|
||||
tab_snapshot: TabSnapshot,
|
||||
char_snapshot: CharSnapshot,
|
||||
transforms: SumTree<Transform>,
|
||||
interpolated: bool,
|
||||
}
|
||||
@@ -51,11 +51,11 @@ struct TransformSummary {
|
||||
pub struct WrapPoint(pub Point);
|
||||
|
||||
pub struct WrapChunks<'a> {
|
||||
input_chunks: tab_map::TabChunks<'a>,
|
||||
input_chunks: char_map::TabChunks<'a>,
|
||||
input_chunk: Chunk<'a>,
|
||||
output_position: WrapPoint,
|
||||
max_output_row: u32,
|
||||
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
|
||||
transforms: Cursor<'a, Transform, (WrapPoint, CharPoint)>,
|
||||
snapshot: &'a WrapSnapshot,
|
||||
}
|
||||
|
||||
@@ -66,7 +66,7 @@ pub struct WrapBufferRows<'a> {
|
||||
output_row: u32,
|
||||
soft_wrapped: bool,
|
||||
max_output_row: u32,
|
||||
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
|
||||
transforms: Cursor<'a, Transform, (WrapPoint, CharPoint)>,
|
||||
}
|
||||
|
||||
impl<'a> WrapBufferRows<'a> {
|
||||
@@ -86,7 +86,7 @@ impl<'a> WrapBufferRows<'a> {
|
||||
|
||||
impl WrapMap {
|
||||
pub fn new(
|
||||
tab_snapshot: TabSnapshot,
|
||||
char_snapshot: CharSnapshot,
|
||||
font: Font,
|
||||
font_size: Pixels,
|
||||
wrap_width: Option<Pixels>,
|
||||
@@ -99,7 +99,7 @@ impl WrapMap {
|
||||
pending_edits: Default::default(),
|
||||
interpolated_edits: Default::default(),
|
||||
edits_since_sync: Default::default(),
|
||||
snapshot: WrapSnapshot::new(tab_snapshot),
|
||||
snapshot: WrapSnapshot::new(char_snapshot),
|
||||
background_task: None,
|
||||
};
|
||||
this.set_wrap_width(wrap_width, cx);
|
||||
@@ -117,17 +117,17 @@ impl WrapMap {
|
||||
|
||||
pub fn sync(
|
||||
&mut self,
|
||||
tab_snapshot: TabSnapshot,
|
||||
char_snapshot: CharSnapshot,
|
||||
edits: Vec<TabEdit>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> (WrapSnapshot, Patch<u32>) {
|
||||
if self.wrap_width.is_some() {
|
||||
self.pending_edits.push_back((tab_snapshot, edits));
|
||||
self.pending_edits.push_back((char_snapshot, edits));
|
||||
self.flush_edits(cx);
|
||||
} else {
|
||||
self.edits_since_sync = self
|
||||
.edits_since_sync
|
||||
.compose(self.snapshot.interpolate(tab_snapshot, &edits));
|
||||
.compose(self.snapshot.interpolate(char_snapshot, &edits));
|
||||
self.snapshot.interpolated = false;
|
||||
}
|
||||
|
||||
@@ -177,11 +177,11 @@ impl WrapMap {
|
||||
let (font, font_size) = self.font_with_size.clone();
|
||||
let task = cx.background_executor().spawn(async move {
|
||||
let mut line_wrapper = text_system.line_wrapper(font, font_size);
|
||||
let tab_snapshot = new_snapshot.tab_snapshot.clone();
|
||||
let range = TabPoint::zero()..tab_snapshot.max_point();
|
||||
let char_snapshot = new_snapshot.char_snapshot.clone();
|
||||
let range = CharPoint::zero()..char_snapshot.max_point();
|
||||
let edits = new_snapshot
|
||||
.update(
|
||||
tab_snapshot,
|
||||
char_snapshot,
|
||||
&[TabEdit {
|
||||
old: range.clone(),
|
||||
new: range.clone(),
|
||||
@@ -221,7 +221,7 @@ impl WrapMap {
|
||||
} else {
|
||||
let old_rows = self.snapshot.transforms.summary().output.lines.row + 1;
|
||||
self.snapshot.transforms = SumTree::default();
|
||||
let summary = self.snapshot.tab_snapshot.text_summary();
|
||||
let summary = self.snapshot.char_snapshot.text_summary();
|
||||
if !summary.lines.is_zero() {
|
||||
self.snapshot
|
||||
.transforms
|
||||
@@ -239,8 +239,8 @@ impl WrapMap {
|
||||
fn flush_edits(&mut self, cx: &mut ModelContext<Self>) {
|
||||
if !self.snapshot.interpolated {
|
||||
let mut to_remove_len = 0;
|
||||
for (tab_snapshot, _) in &self.pending_edits {
|
||||
if tab_snapshot.version <= self.snapshot.tab_snapshot.version {
|
||||
for (char_snapshot, _) in &self.pending_edits {
|
||||
if char_snapshot.version <= self.snapshot.char_snapshot.version {
|
||||
to_remove_len += 1;
|
||||
} else {
|
||||
break;
|
||||
@@ -262,9 +262,9 @@ impl WrapMap {
|
||||
let update_task = cx.background_executor().spawn(async move {
|
||||
let mut edits = Patch::default();
|
||||
let mut line_wrapper = text_system.line_wrapper(font, font_size);
|
||||
for (tab_snapshot, tab_edits) in pending_edits {
|
||||
for (char_snapshot, tab_edits) in pending_edits {
|
||||
let wrap_edits = snapshot
|
||||
.update(tab_snapshot, &tab_edits, wrap_width, &mut line_wrapper)
|
||||
.update(char_snapshot, &tab_edits, wrap_width, &mut line_wrapper)
|
||||
.await;
|
||||
edits = edits.compose(&wrap_edits);
|
||||
}
|
||||
@@ -301,11 +301,11 @@ impl WrapMap {
|
||||
|
||||
let was_interpolated = self.snapshot.interpolated;
|
||||
let mut to_remove_len = 0;
|
||||
for (tab_snapshot, edits) in &self.pending_edits {
|
||||
if tab_snapshot.version <= self.snapshot.tab_snapshot.version {
|
||||
for (char_snapshot, edits) in &self.pending_edits {
|
||||
if char_snapshot.version <= self.snapshot.char_snapshot.version {
|
||||
to_remove_len += 1;
|
||||
} else {
|
||||
let interpolated_edits = self.snapshot.interpolate(tab_snapshot.clone(), edits);
|
||||
let interpolated_edits = self.snapshot.interpolate(char_snapshot.clone(), edits);
|
||||
self.edits_since_sync = self.edits_since_sync.compose(&interpolated_edits);
|
||||
self.interpolated_edits = self.interpolated_edits.compose(&interpolated_edits);
|
||||
}
|
||||
@@ -318,45 +318,49 @@ impl WrapMap {
|
||||
}
|
||||
|
||||
impl WrapSnapshot {
|
||||
fn new(tab_snapshot: TabSnapshot) -> Self {
|
||||
fn new(char_snapshot: CharSnapshot) -> Self {
|
||||
let mut transforms = SumTree::default();
|
||||
let extent = tab_snapshot.text_summary();
|
||||
let extent = char_snapshot.text_summary();
|
||||
if !extent.lines.is_zero() {
|
||||
transforms.push(Transform::isomorphic(extent), &());
|
||||
}
|
||||
Self {
|
||||
transforms,
|
||||
tab_snapshot,
|
||||
char_snapshot,
|
||||
interpolated: true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
|
||||
self.tab_snapshot.buffer_snapshot()
|
||||
self.char_snapshot.buffer_snapshot()
|
||||
}
|
||||
|
||||
fn interpolate(&mut self, new_tab_snapshot: TabSnapshot, tab_edits: &[TabEdit]) -> Patch<u32> {
|
||||
fn interpolate(
|
||||
&mut self,
|
||||
new_char_snapshot: CharSnapshot,
|
||||
tab_edits: &[TabEdit],
|
||||
) -> Patch<u32> {
|
||||
let mut new_transforms;
|
||||
if tab_edits.is_empty() {
|
||||
new_transforms = self.transforms.clone();
|
||||
} else {
|
||||
let mut old_cursor = self.transforms.cursor::<TabPoint>(&());
|
||||
let mut old_cursor = self.transforms.cursor::<CharPoint>(&());
|
||||
|
||||
let mut tab_edits_iter = tab_edits.iter().peekable();
|
||||
new_transforms =
|
||||
old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right, &());
|
||||
|
||||
while let Some(edit) = tab_edits_iter.next() {
|
||||
if edit.new.start > TabPoint::from(new_transforms.summary().input.lines) {
|
||||
let summary = new_tab_snapshot.text_summary_for_range(
|
||||
TabPoint::from(new_transforms.summary().input.lines)..edit.new.start,
|
||||
if edit.new.start > CharPoint::from(new_transforms.summary().input.lines) {
|
||||
let summary = new_char_snapshot.text_summary_for_range(
|
||||
CharPoint::from(new_transforms.summary().input.lines)..edit.new.start,
|
||||
);
|
||||
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
||||
}
|
||||
|
||||
if !edit.new.is_empty() {
|
||||
new_transforms.push_or_extend(Transform::isomorphic(
|
||||
new_tab_snapshot.text_summary_for_range(edit.new.clone()),
|
||||
new_char_snapshot.text_summary_for_range(edit.new.clone()),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -365,7 +369,7 @@ impl WrapSnapshot {
|
||||
if next_edit.old.start > old_cursor.end(&()) {
|
||||
if old_cursor.end(&()) > edit.old.end {
|
||||
let summary = self
|
||||
.tab_snapshot
|
||||
.char_snapshot
|
||||
.text_summary_for_range(edit.old.end..old_cursor.end(&()));
|
||||
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
||||
}
|
||||
@@ -379,7 +383,7 @@ impl WrapSnapshot {
|
||||
} else {
|
||||
if old_cursor.end(&()) > edit.old.end {
|
||||
let summary = self
|
||||
.tab_snapshot
|
||||
.char_snapshot
|
||||
.text_summary_for_range(edit.old.end..old_cursor.end(&()));
|
||||
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
||||
}
|
||||
@@ -392,7 +396,7 @@ impl WrapSnapshot {
|
||||
let old_snapshot = mem::replace(
|
||||
self,
|
||||
WrapSnapshot {
|
||||
tab_snapshot: new_tab_snapshot,
|
||||
char_snapshot: new_char_snapshot,
|
||||
transforms: new_transforms,
|
||||
interpolated: true,
|
||||
},
|
||||
@@ -403,7 +407,7 @@ impl WrapSnapshot {
|
||||
|
||||
async fn update(
|
||||
&mut self,
|
||||
new_tab_snapshot: TabSnapshot,
|
||||
new_char_snapshot: CharSnapshot,
|
||||
tab_edits: &[TabEdit],
|
||||
wrap_width: Pixels,
|
||||
line_wrapper: &mut LineWrapper,
|
||||
@@ -440,27 +444,27 @@ impl WrapSnapshot {
|
||||
new_transforms = self.transforms.clone();
|
||||
} else {
|
||||
let mut row_edits = row_edits.into_iter().peekable();
|
||||
let mut old_cursor = self.transforms.cursor::<TabPoint>(&());
|
||||
let mut old_cursor = self.transforms.cursor::<CharPoint>(&());
|
||||
|
||||
new_transforms = old_cursor.slice(
|
||||
&TabPoint::new(row_edits.peek().unwrap().old_rows.start, 0),
|
||||
&CharPoint::new(row_edits.peek().unwrap().old_rows.start, 0),
|
||||
Bias::Right,
|
||||
&(),
|
||||
);
|
||||
|
||||
while let Some(edit) = row_edits.next() {
|
||||
if edit.new_rows.start > new_transforms.summary().input.lines.row {
|
||||
let summary = new_tab_snapshot.text_summary_for_range(
|
||||
TabPoint(new_transforms.summary().input.lines)
|
||||
..TabPoint::new(edit.new_rows.start, 0),
|
||||
let summary = new_char_snapshot.text_summary_for_range(
|
||||
CharPoint(new_transforms.summary().input.lines)
|
||||
..CharPoint::new(edit.new_rows.start, 0),
|
||||
);
|
||||
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
||||
}
|
||||
|
||||
let mut line = String::new();
|
||||
let mut remaining = None;
|
||||
let mut chunks = new_tab_snapshot.chunks(
|
||||
TabPoint::new(edit.new_rows.start, 0)..new_tab_snapshot.max_point(),
|
||||
let mut chunks = new_char_snapshot.chunks(
|
||||
CharPoint::new(edit.new_rows.start, 0)..new_char_snapshot.max_point(),
|
||||
false,
|
||||
Highlights::default(),
|
||||
);
|
||||
@@ -507,19 +511,19 @@ impl WrapSnapshot {
|
||||
}
|
||||
new_transforms.extend(edit_transforms, &());
|
||||
|
||||
old_cursor.seek_forward(&TabPoint::new(edit.old_rows.end, 0), Bias::Right, &());
|
||||
old_cursor.seek_forward(&CharPoint::new(edit.old_rows.end, 0), Bias::Right, &());
|
||||
if let Some(next_edit) = row_edits.peek() {
|
||||
if next_edit.old_rows.start > old_cursor.end(&()).row() {
|
||||
if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) {
|
||||
let summary = self.tab_snapshot.text_summary_for_range(
|
||||
TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
|
||||
if old_cursor.end(&()) > CharPoint::new(edit.old_rows.end, 0) {
|
||||
let summary = self.char_snapshot.text_summary_for_range(
|
||||
CharPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
|
||||
);
|
||||
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
||||
}
|
||||
old_cursor.next(&());
|
||||
new_transforms.append(
|
||||
old_cursor.slice(
|
||||
&TabPoint::new(next_edit.old_rows.start, 0),
|
||||
&CharPoint::new(next_edit.old_rows.start, 0),
|
||||
Bias::Right,
|
||||
&(),
|
||||
),
|
||||
@@ -527,9 +531,9 @@ impl WrapSnapshot {
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) {
|
||||
let summary = self.tab_snapshot.text_summary_for_range(
|
||||
TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
|
||||
if old_cursor.end(&()) > CharPoint::new(edit.old_rows.end, 0) {
|
||||
let summary = self.char_snapshot.text_summary_for_range(
|
||||
CharPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
|
||||
);
|
||||
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
||||
}
|
||||
@@ -542,7 +546,7 @@ impl WrapSnapshot {
|
||||
let old_snapshot = mem::replace(
|
||||
self,
|
||||
WrapSnapshot {
|
||||
tab_snapshot: new_tab_snapshot,
|
||||
char_snapshot: new_char_snapshot,
|
||||
transforms: new_transforms,
|
||||
interpolated: false,
|
||||
},
|
||||
@@ -595,17 +599,17 @@ impl WrapSnapshot {
|
||||
) -> WrapChunks<'a> {
|
||||
let output_start = WrapPoint::new(rows.start, 0);
|
||||
let output_end = WrapPoint::new(rows.end, 0);
|
||||
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
||||
let mut transforms = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
|
||||
transforms.seek(&output_start, Bias::Right, &());
|
||||
let mut input_start = TabPoint(transforms.start().1 .0);
|
||||
let mut input_start = CharPoint(transforms.start().1 .0);
|
||||
if transforms.item().map_or(false, |t| t.is_isomorphic()) {
|
||||
input_start.0 += output_start.0 - transforms.start().0 .0;
|
||||
}
|
||||
let input_end = self
|
||||
.to_tab_point(output_end)
|
||||
.min(self.tab_snapshot.max_point());
|
||||
.to_char_point(output_end)
|
||||
.min(self.char_snapshot.max_point());
|
||||
WrapChunks {
|
||||
input_chunks: self.tab_snapshot.chunks(
|
||||
input_chunks: self.char_snapshot.chunks(
|
||||
input_start..input_end,
|
||||
language_aware,
|
||||
highlights,
|
||||
@@ -623,7 +627,7 @@ impl WrapSnapshot {
|
||||
}
|
||||
|
||||
pub fn line_len(&self, row: u32) -> u32 {
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
|
||||
cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left, &());
|
||||
if cursor
|
||||
.item()
|
||||
@@ -631,7 +635,7 @@ impl WrapSnapshot {
|
||||
{
|
||||
let overshoot = row - cursor.start().0.row();
|
||||
let tab_row = cursor.start().1.row() + overshoot;
|
||||
let tab_line_len = self.tab_snapshot.line_len(tab_row);
|
||||
let tab_line_len = self.char_snapshot.line_len(tab_row);
|
||||
if overshoot == 0 {
|
||||
cursor.start().0.column() + (tab_line_len - cursor.start().1.column())
|
||||
} else {
|
||||
@@ -648,15 +652,17 @@ impl WrapSnapshot {
|
||||
let start = WrapPoint::new(rows.start, 0);
|
||||
let end = WrapPoint::new(rows.end, 0);
|
||||
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
|
||||
cursor.seek(&start, Bias::Right, &());
|
||||
if let Some(transform) = cursor.item() {
|
||||
let start_in_transform = start.0 - cursor.start().0 .0;
|
||||
let end_in_transform = cmp::min(end, cursor.end(&()).0).0 - cursor.start().0 .0;
|
||||
if transform.is_isomorphic() {
|
||||
let tab_start = TabPoint(cursor.start().1 .0 + start_in_transform);
|
||||
let tab_end = TabPoint(cursor.start().1 .0 + end_in_transform);
|
||||
summary += &self.tab_snapshot.text_summary_for_range(tab_start..tab_end);
|
||||
let char_start = CharPoint(cursor.start().1 .0 + start_in_transform);
|
||||
let char_end = CharPoint(cursor.start().1 .0 + end_in_transform);
|
||||
summary += &self
|
||||
.char_snapshot
|
||||
.text_summary_for_range(char_start..char_end);
|
||||
} else {
|
||||
debug_assert_eq!(start_in_transform.row, end_in_transform.row);
|
||||
let indent_len = end_in_transform.column - start_in_transform.column;
|
||||
@@ -681,9 +687,9 @@ impl WrapSnapshot {
|
||||
let end_in_transform = end.0 - cursor.start().0 .0;
|
||||
if transform.is_isomorphic() {
|
||||
let char_start = cursor.start().1;
|
||||
let char_end = TabPoint(char_start.0 + end_in_transform);
|
||||
let char_end = CharPoint(char_start.0 + end_in_transform);
|
||||
summary += &self
|
||||
.tab_snapshot
|
||||
.char_snapshot
|
||||
.text_summary_for_range(char_start..char_end);
|
||||
} else {
|
||||
debug_assert_eq!(end_in_transform, Point::new(1, 0));
|
||||
@@ -718,14 +724,14 @@ impl WrapSnapshot {
|
||||
}
|
||||
|
||||
pub fn buffer_rows(&self, start_row: u32) -> WrapBufferRows {
|
||||
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
||||
let mut transforms = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
|
||||
transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
|
||||
let mut input_row = transforms.start().1.row();
|
||||
if transforms.item().map_or(false, |t| t.is_isomorphic()) {
|
||||
input_row += start_row - transforms.start().0.row();
|
||||
}
|
||||
let soft_wrapped = transforms.item().map_or(false, |t| !t.is_isomorphic());
|
||||
let mut input_buffer_rows = self.tab_snapshot.buffer_rows(input_row);
|
||||
let mut input_buffer_rows = self.char_snapshot.buffer_rows(input_row);
|
||||
let input_buffer_row = input_buffer_rows.next().unwrap();
|
||||
WrapBufferRows {
|
||||
transforms,
|
||||
@@ -737,26 +743,26 @@ impl WrapSnapshot {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint {
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
||||
pub fn to_char_point(&self, point: WrapPoint) -> CharPoint {
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
|
||||
cursor.seek(&point, Bias::Right, &());
|
||||
let mut tab_point = cursor.start().1 .0;
|
||||
let mut char_point = cursor.start().1 .0;
|
||||
if cursor.item().map_or(false, |t| t.is_isomorphic()) {
|
||||
tab_point += point.0 - cursor.start().0 .0;
|
||||
char_point += point.0 - cursor.start().0 .0;
|
||||
}
|
||||
TabPoint(tab_point)
|
||||
CharPoint(char_point)
|
||||
}
|
||||
|
||||
pub fn to_point(&self, point: WrapPoint, bias: Bias) -> Point {
|
||||
self.tab_snapshot.to_point(self.to_tab_point(point), bias)
|
||||
self.char_snapshot.to_point(self.to_char_point(point), bias)
|
||||
}
|
||||
|
||||
pub fn make_wrap_point(&self, point: Point, bias: Bias) -> WrapPoint {
|
||||
self.tab_point_to_wrap_point(self.tab_snapshot.make_tab_point(point, bias))
|
||||
self.char_point_to_wrap_point(self.char_snapshot.make_char_point(point, bias))
|
||||
}
|
||||
|
||||
pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint {
|
||||
let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(&());
|
||||
pub fn char_point_to_wrap_point(&self, point: CharPoint) -> WrapPoint {
|
||||
let mut cursor = self.transforms.cursor::<(CharPoint, WrapPoint)>(&());
|
||||
cursor.seek(&point, Bias::Right, &());
|
||||
WrapPoint(cursor.start().1 .0 + (point.0 - cursor.start().0 .0))
|
||||
}
|
||||
@@ -771,7 +777,10 @@ impl WrapSnapshot {
|
||||
}
|
||||
}
|
||||
|
||||
self.tab_point_to_wrap_point(self.tab_snapshot.clip_point(self.to_tab_point(point), bias))
|
||||
self.char_point_to_wrap_point(
|
||||
self.char_snapshot
|
||||
.clip_point(self.to_char_point(point), bias),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn prev_row_boundary(&self, mut point: WrapPoint) -> u32 {
|
||||
@@ -781,7 +790,7 @@ impl WrapSnapshot {
|
||||
|
||||
*point.column_mut() = 0;
|
||||
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
|
||||
cursor.seek(&point, Bias::Right, &());
|
||||
if cursor.item().is_none() {
|
||||
cursor.prev(&());
|
||||
@@ -801,7 +810,7 @@ impl WrapSnapshot {
|
||||
pub fn next_row_boundary(&self, mut point: WrapPoint) -> Option<u32> {
|
||||
point.0 += Point::new(1, 0);
|
||||
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
|
||||
cursor.seek(&point, Bias::Right, &());
|
||||
while let Some(transform) = cursor.item() {
|
||||
if transform.is_isomorphic() && cursor.start().1.column() == 0 {
|
||||
@@ -833,8 +842,8 @@ impl WrapSnapshot {
|
||||
#[cfg(test)]
|
||||
{
|
||||
assert_eq!(
|
||||
TabPoint::from(self.transforms.summary().input.lines),
|
||||
self.tab_snapshot.max_point()
|
||||
CharPoint::from(self.transforms.summary().input.lines),
|
||||
self.char_snapshot.max_point()
|
||||
);
|
||||
|
||||
{
|
||||
@@ -847,18 +856,18 @@ impl WrapSnapshot {
|
||||
}
|
||||
|
||||
let text = language::Rope::from(self.text().as_str());
|
||||
let mut input_buffer_rows = self.tab_snapshot.buffer_rows(0);
|
||||
let mut input_buffer_rows = self.char_snapshot.buffer_rows(0);
|
||||
let mut expected_buffer_rows = Vec::new();
|
||||
let mut prev_tab_row = 0;
|
||||
for display_row in 0..=self.max_point().row() {
|
||||
let tab_point = self.to_tab_point(WrapPoint::new(display_row, 0));
|
||||
if tab_point.row() == prev_tab_row && display_row != 0 {
|
||||
let char_point = self.to_char_point(WrapPoint::new(display_row, 0));
|
||||
if char_point.row() == prev_tab_row && display_row != 0 {
|
||||
expected_buffer_rows.push(None);
|
||||
} else {
|
||||
expected_buffer_rows.push(input_buffer_rows.next().unwrap());
|
||||
}
|
||||
|
||||
prev_tab_row = tab_point.row();
|
||||
prev_tab_row = char_point.row();
|
||||
assert_eq!(self.line_len(display_row), text.line_len(display_row));
|
||||
}
|
||||
|
||||
@@ -880,14 +889,14 @@ impl<'a> WrapChunks<'a> {
|
||||
let output_start = WrapPoint::new(rows.start, 0);
|
||||
let output_end = WrapPoint::new(rows.end, 0);
|
||||
self.transforms.seek(&output_start, Bias::Right, &());
|
||||
let mut input_start = TabPoint(self.transforms.start().1 .0);
|
||||
let mut input_start = CharPoint(self.transforms.start().1 .0);
|
||||
if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
|
||||
input_start.0 += output_start.0 - self.transforms.start().0 .0;
|
||||
}
|
||||
let input_end = self
|
||||
.snapshot
|
||||
.to_tab_point(output_end)
|
||||
.min(self.snapshot.tab_snapshot.max_point());
|
||||
.to_char_point(output_end)
|
||||
.min(self.snapshot.char_snapshot.max_point());
|
||||
self.input_chunks.seek(input_start..input_end);
|
||||
self.input_chunk = Chunk::default();
|
||||
self.output_position = output_start;
|
||||
@@ -942,13 +951,11 @@ impl<'a> Iterator for WrapChunks<'a> {
|
||||
} else {
|
||||
*self.output_position.column_mut() += char_len as u32;
|
||||
}
|
||||
|
||||
if self.output_position >= transform_end {
|
||||
self.transforms.next(&());
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let (prefix, suffix) = self.input_chunk.text.split_at(input_len);
|
||||
self.input_chunk.text = suffix;
|
||||
Some(Chunk {
|
||||
@@ -1103,7 +1110,7 @@ impl sum_tree::Summary for TransformSummary {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> sum_tree::Dimension<'a, TransformSummary> for TabPoint {
|
||||
impl<'a> sum_tree::Dimension<'a, TransformSummary> for CharPoint {
|
||||
fn zero(_cx: &()) -> Self {
|
||||
Default::default()
|
||||
}
|
||||
@@ -1113,7 +1120,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for TabPoint {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> sum_tree::SeekTarget<'a, TransformSummary, TransformSummary> for TabPoint {
|
||||
impl<'a> sum_tree::SeekTarget<'a, TransformSummary, TransformSummary> for CharPoint {
|
||||
fn cmp(&self, cursor_location: &TransformSummary, _: &()) -> std::cmp::Ordering {
|
||||
Ord::cmp(&self.0, &cursor_location.input.lines)
|
||||
}
|
||||
@@ -1161,7 +1168,7 @@ fn consolidate_wrap_edits(edits: Vec<WrapEdit>) -> Vec<WrapEdit> {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap},
|
||||
display_map::{char_map::CharMap, fold_map::FoldMap, inlay_map::InlayMap},
|
||||
MultiBuffer,
|
||||
};
|
||||
use gpui::{font, px, test::observe};
|
||||
@@ -1213,9 +1220,9 @@ mod tests {
|
||||
log::info!("InlayMap text: {:?}", inlay_snapshot.text());
|
||||
let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot.clone());
|
||||
log::info!("FoldMap text: {:?}", fold_snapshot.text());
|
||||
let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size);
|
||||
let tabs_snapshot = tab_map.set_max_expansion_column(32);
|
||||
log::info!("TabMap text: {:?}", tabs_snapshot.text());
|
||||
let (mut char_map, _) = CharMap::new(fold_snapshot.clone(), tab_size);
|
||||
let tabs_snapshot = char_map.set_max_expansion_column(32);
|
||||
log::info!("CharMap text: {:?}", tabs_snapshot.text());
|
||||
|
||||
let mut line_wrapper = text_system.line_wrapper(font.clone(), font_size);
|
||||
let unwrapped_text = tabs_snapshot.text();
|
||||
@@ -1261,7 +1268,7 @@ mod tests {
|
||||
20..=39 => {
|
||||
for (fold_snapshot, fold_edits) in fold_map.randomly_mutate(&mut rng) {
|
||||
let (tabs_snapshot, tab_edits) =
|
||||
tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
char_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (mut snapshot, wrap_edits) =
|
||||
wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
|
||||
snapshot.check_invariants();
|
||||
@@ -1274,7 +1281,7 @@ mod tests {
|
||||
inlay_map.randomly_mutate(&mut next_inlay_id, &mut rng);
|
||||
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
|
||||
let (tabs_snapshot, tab_edits) =
|
||||
tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
char_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (mut snapshot, wrap_edits) =
|
||||
wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
|
||||
snapshot.check_invariants();
|
||||
@@ -1298,8 +1305,8 @@ mod tests {
|
||||
log::info!("InlayMap text: {:?}", inlay_snapshot.text());
|
||||
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
|
||||
log::info!("FoldMap text: {:?}", fold_snapshot.text());
|
||||
let (tabs_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
log::info!("TabMap text: {:?}", tabs_snapshot.text());
|
||||
let (tabs_snapshot, tab_edits) = char_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
log::info!("CharMap text: {:?}", tabs_snapshot.text());
|
||||
|
||||
let unwrapped_text = tabs_snapshot.text();
|
||||
let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
|
||||
@@ -1345,7 +1352,7 @@ mod tests {
|
||||
|
||||
if tab_size.get() == 1
|
||||
|| !wrapped_snapshot
|
||||
.tab_snapshot
|
||||
.char_snapshot
|
||||
.fold_snapshot
|
||||
.text()
|
||||
.contains('\t')
|
||||
|
||||
@@ -3244,21 +3244,9 @@ impl Editor {
|
||||
}
|
||||
|
||||
if enabled && pair.start.ends_with(text.as_ref()) {
|
||||
let prefix_len = pair.start.len() - text.len();
|
||||
let preceding_text_matches_prefix = prefix_len == 0
|
||||
|| (selection.start.column >= (prefix_len as u32)
|
||||
&& snapshot.contains_str_at(
|
||||
Point::new(
|
||||
selection.start.row,
|
||||
selection.start.column - (prefix_len as u32),
|
||||
),
|
||||
&pair.start[..prefix_len],
|
||||
));
|
||||
if preceding_text_matches_prefix {
|
||||
bracket_pair = Some(pair.clone());
|
||||
is_bracket_pair_start = true;
|
||||
break;
|
||||
}
|
||||
bracket_pair = Some(pair.clone());
|
||||
is_bracket_pair_start = true;
|
||||
break;
|
||||
}
|
||||
if pair.end.as_str() == text.as_ref() {
|
||||
bracket_pair = Some(pair.clone());
|
||||
@@ -3275,6 +3263,8 @@ impl Editor {
|
||||
self.use_auto_surround && snapshot_settings.use_auto_surround;
|
||||
if selection.is_empty() {
|
||||
if is_bracket_pair_start {
|
||||
let prefix_len = bracket_pair.start.len() - text.len();
|
||||
|
||||
// If the inserted text is a suffix of an opening bracket and the
|
||||
// selection is preceded by the rest of the opening bracket, then
|
||||
// insert the closing bracket.
|
||||
@@ -3282,25 +3272,20 @@ impl Editor {
|
||||
.chars_at(selection.start)
|
||||
.next()
|
||||
.map_or(true, |c| scope.should_autoclose_before(c));
|
||||
|
||||
let is_closing_quote = if bracket_pair.end == bracket_pair.start
|
||||
&& bracket_pair.start.len() == 1
|
||||
{
|
||||
let target = bracket_pair.start.chars().next().unwrap();
|
||||
let current_line_count = snapshot
|
||||
.reversed_chars_at(selection.start)
|
||||
.take_while(|&c| c != '\n')
|
||||
.filter(|&c| c == target)
|
||||
.count();
|
||||
current_line_count % 2 == 1
|
||||
} else {
|
||||
false
|
||||
};
|
||||
let preceding_text_matches_prefix = prefix_len == 0
|
||||
|| (selection.start.column >= (prefix_len as u32)
|
||||
&& snapshot.contains_str_at(
|
||||
Point::new(
|
||||
selection.start.row,
|
||||
selection.start.column - (prefix_len as u32),
|
||||
),
|
||||
&bracket_pair.start[..prefix_len],
|
||||
));
|
||||
|
||||
if autoclose
|
||||
&& bracket_pair.close
|
||||
&& following_text_allows_autoclose
|
||||
&& !is_closing_quote
|
||||
&& preceding_text_matches_prefix
|
||||
{
|
||||
let anchor = snapshot.anchor_before(selection.end);
|
||||
new_selections.push((selection.map(|_| anchor), text.len()));
|
||||
@@ -10460,7 +10445,7 @@ impl Editor {
|
||||
|
||||
fn cancel_language_server_work(
|
||||
&mut self,
|
||||
_: &actions::CancelLanguageServerWork,
|
||||
_: &CancelLanguageServerWork,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let Some(project) = self.project.clone() {
|
||||
@@ -10743,42 +10728,15 @@ impl Editor {
|
||||
self.fold_ranges(fold_ranges, true, cx);
|
||||
}
|
||||
|
||||
fn fold_at_level(&mut self, fold_at: &FoldAtLevel, cx: &mut ViewContext<Self>) {
|
||||
let fold_at_level = fold_at.level;
|
||||
let snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let mut fold_ranges = Vec::new();
|
||||
let mut stack = vec![(0, snapshot.max_buffer_row().0, 1)];
|
||||
|
||||
while let Some((mut start_row, end_row, current_level)) = stack.pop() {
|
||||
while start_row < end_row {
|
||||
match self.snapshot(cx).foldable_range(MultiBufferRow(start_row)) {
|
||||
Some(foldable_range) => {
|
||||
let nested_start_row = foldable_range.0.start.row + 1;
|
||||
let nested_end_row = foldable_range.0.end.row;
|
||||
|
||||
if current_level < fold_at_level {
|
||||
stack.push((nested_start_row, nested_end_row, current_level + 1));
|
||||
} else if current_level == fold_at_level {
|
||||
fold_ranges.push(foldable_range);
|
||||
}
|
||||
|
||||
start_row = nested_end_row + 1;
|
||||
}
|
||||
None => start_row += 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.fold_ranges(fold_ranges, true, cx);
|
||||
}
|
||||
|
||||
pub fn fold_all(&mut self, _: &actions::FoldAll, cx: &mut ViewContext<Self>) {
|
||||
let mut fold_ranges = Vec::new();
|
||||
let snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
||||
for row in 0..snapshot.max_buffer_row().0 {
|
||||
if let Some(foldable_range) = self.snapshot(cx).foldable_range(MultiBufferRow(row)) {
|
||||
fold_ranges.push(foldable_range);
|
||||
for row in 0..display_map.max_buffer_row().0 {
|
||||
if let Some((foldable_range, fold_text)) =
|
||||
display_map.foldable_range(MultiBufferRow(row))
|
||||
{
|
||||
fold_ranges.push((foldable_range, fold_text));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1080,112 +1080,6 @@ fn test_fold_action_multiple_line_breaks(cx: &mut TestAppContext) {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_fold_at_level(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let view = cx.add_window(|cx| {
|
||||
let buffer = MultiBuffer::build_simple(
|
||||
&"
|
||||
class Foo:
|
||||
# Hello!
|
||||
|
||||
def a():
|
||||
print(1)
|
||||
|
||||
def b():
|
||||
print(2)
|
||||
|
||||
|
||||
class Bar:
|
||||
# World!
|
||||
|
||||
def a():
|
||||
print(1)
|
||||
|
||||
def b():
|
||||
print(2)
|
||||
|
||||
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
build_editor(buffer.clone(), cx)
|
||||
});
|
||||
|
||||
_ = view.update(cx, |view, cx| {
|
||||
view.fold_at_level(&FoldAtLevel { level: 2 }, cx);
|
||||
assert_eq!(
|
||||
view.display_text(cx),
|
||||
"
|
||||
class Foo:
|
||||
# Hello!
|
||||
|
||||
def a():⋯
|
||||
|
||||
def b():⋯
|
||||
|
||||
|
||||
class Bar:
|
||||
# World!
|
||||
|
||||
def a():⋯
|
||||
|
||||
def b():⋯
|
||||
|
||||
|
||||
"
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
view.fold_at_level(&FoldAtLevel { level: 1 }, cx);
|
||||
assert_eq!(
|
||||
view.display_text(cx),
|
||||
"
|
||||
class Foo:⋯
|
||||
|
||||
|
||||
class Bar:⋯
|
||||
|
||||
|
||||
"
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
view.unfold_all(&UnfoldAll, cx);
|
||||
view.fold_at_level(&FoldAtLevel { level: 0 }, cx);
|
||||
assert_eq!(
|
||||
view.display_text(cx),
|
||||
"
|
||||
class Foo:
|
||||
# Hello!
|
||||
|
||||
def a():
|
||||
print(1)
|
||||
|
||||
def b():
|
||||
print(2)
|
||||
|
||||
|
||||
class Bar:
|
||||
# World!
|
||||
|
||||
def a():
|
||||
print(1)
|
||||
|
||||
def b():
|
||||
print(2)
|
||||
|
||||
|
||||
"
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
assert_eq!(view.display_text(cx), view.buffer.read(cx).read(cx).text());
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_move_cursor(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
@@ -68,6 +68,7 @@ use sum_tree::Bias;
|
||||
use theme::{ActiveTheme, Appearance, PlayerColor};
|
||||
use ui::prelude::*;
|
||||
use ui::{h_flex, ButtonLike, ButtonStyle, ContextMenu, Tooltip};
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
use util::RangeExt;
|
||||
use util::ResultExt;
|
||||
use workspace::{item::Item, Workspace};
|
||||
@@ -336,7 +337,6 @@ impl EditorElement {
|
||||
register_action(view, cx, Editor::open_url);
|
||||
register_action(view, cx, Editor::open_file);
|
||||
register_action(view, cx, Editor::fold);
|
||||
register_action(view, cx, Editor::fold_at_level);
|
||||
register_action(view, cx, Editor::fold_all);
|
||||
register_action(view, cx, Editor::fold_at);
|
||||
register_action(view, cx, Editor::fold_recursive);
|
||||
@@ -445,7 +445,6 @@ impl EditorElement {
|
||||
register_action(view, cx, Editor::accept_inline_completion);
|
||||
register_action(view, cx, Editor::revert_file);
|
||||
register_action(view, cx, Editor::revert_selected_hunks);
|
||||
register_action(view, cx, Editor::apply_all_diff_hunks);
|
||||
register_action(view, cx, Editor::apply_selected_diff_hunks);
|
||||
register_action(view, cx, Editor::open_active_item_in_terminal);
|
||||
register_action(view, cx, Editor::reload_file)
|
||||
@@ -1027,23 +1026,21 @@ impl EditorElement {
|
||||
}
|
||||
let block_text = if let CursorShape::Block = selection.cursor_shape {
|
||||
snapshot
|
||||
.display_chars_at(cursor_position)
|
||||
.next()
|
||||
.grapheme_at(cursor_position)
|
||||
.or_else(|| {
|
||||
if cursor_column == 0 {
|
||||
snapshot
|
||||
.placeholder_text()
|
||||
.and_then(|s| s.chars().next())
|
||||
.map(|c| (c, cursor_position))
|
||||
snapshot.placeholder_text().and_then(|s| {
|
||||
s.graphemes(true).next().map(|s| s.to_owned())
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.and_then(|(character, _)| {
|
||||
let text = if character == '\n' {
|
||||
.and_then(|grapheme| {
|
||||
let text = if grapheme == "\n" {
|
||||
SharedString::from(" ")
|
||||
} else {
|
||||
SharedString::from(character.to_string())
|
||||
SharedString::from(grapheme)
|
||||
};
|
||||
let len = text.len();
|
||||
|
||||
|
||||
@@ -368,15 +368,12 @@ impl GitBlame {
|
||||
.spawn({
|
||||
let snapshot = snapshot.clone();
|
||||
async move {
|
||||
let Some(Blame {
|
||||
let Blame {
|
||||
entries,
|
||||
permalinks,
|
||||
messages,
|
||||
remote_url,
|
||||
}) = blame.await?
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
} = blame.await?;
|
||||
|
||||
let entries = build_blame_entry_sum_tree(entries, snapshot.max_point().row);
|
||||
let commit_details = parse_commit_messages(
|
||||
@@ -388,16 +385,13 @@ impl GitBlame {
|
||||
)
|
||||
.await;
|
||||
|
||||
anyhow::Ok(Some((entries, commit_details)))
|
||||
anyhow::Ok((entries, commit_details))
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
this.update(&mut cx, |this, cx| match result {
|
||||
Ok(None) => {
|
||||
// Nothing to do, e.g. no repository found
|
||||
}
|
||||
Ok(Some((entries, commit_details))) => {
|
||||
Ok((entries, commit_details)) => {
|
||||
this.buffer_edits = buffer_edits;
|
||||
this.buffer_snapshot = snapshot;
|
||||
this.entries = entries;
|
||||
@@ -416,7 +410,11 @@ impl GitBlame {
|
||||
} else {
|
||||
// If we weren't triggered by a user, we just log errors in the background, instead of sending
|
||||
// notifications.
|
||||
log::error!("failed to get git blame data: {error:?}");
|
||||
// Except for `NoRepositoryError`, which can happen often if a user has inline-blame turned on
|
||||
// and opens a non-git file.
|
||||
if error.downcast_ref::<project::NoRepositoryError>().is_none() {
|
||||
log::error!("failed to get git blame data: {error:?}");
|
||||
}
|
||||
}
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -706,11 +706,10 @@ pub(crate) async fn find_file(
|
||||
) -> Option<ResolvedPath> {
|
||||
project
|
||||
.update(cx, |project, cx| {
|
||||
project.resolve_path_in_buffer(&candidate_file_path, buffer, cx)
|
||||
project.resolve_existing_file_path(&candidate_file_path, buffer, cx)
|
||||
})
|
||||
.ok()?
|
||||
.await
|
||||
.filter(|s| s.is_file())
|
||||
}
|
||||
|
||||
if let Some(existing_path) = check_path(&candidate_file_path, &project, buffer, cx).await {
|
||||
@@ -1613,46 +1612,4 @@ mod tests {
|
||||
assert_eq!(file_path.to_str().unwrap(), "/root/dir/file2.rs");
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_hover_directories(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
lsp::ServerCapabilities {
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
// Insert a new file
|
||||
let fs = cx.update_workspace(|workspace, cx| workspace.project().read(cx).fs().clone());
|
||||
fs.as_fake()
|
||||
.insert_file("/root/dir/file2.rs", "This is file2.rs".as_bytes().to_vec())
|
||||
.await;
|
||||
|
||||
cx.set_state(indoc! {"
|
||||
You can't open ../diˇr because it's a directory.
|
||||
"});
|
||||
|
||||
// File does not exist
|
||||
let screen_coord = cx.pixel_position(indoc! {"
|
||||
You can't open ../diˇr because it's a directory.
|
||||
"});
|
||||
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
|
||||
|
||||
// No highlight
|
||||
cx.update_editor(|editor, cx| {
|
||||
assert!(editor
|
||||
.snapshot(cx)
|
||||
.text_highlight_ranges::<HoveredLinkState>()
|
||||
.unwrap_or_default()
|
||||
.1
|
||||
.is_empty());
|
||||
});
|
||||
|
||||
// Does not open the directory
|
||||
cx.simulate_click(screen_coord, Modifiers::secondary_key());
|
||||
cx.update_workspace(|workspace, cx| assert_eq!(workspace.items(cx).count(), 1));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use crate::{
|
||||
display_map::{InlayOffset, ToDisplayPoint},
|
||||
hover_links::{InlayHighlight, RangeInEditor},
|
||||
is_invisible,
|
||||
scroll::ScrollAmount,
|
||||
Anchor, AnchorRangeExt, DisplayPoint, DisplayRow, Editor, EditorSettings, EditorSnapshot,
|
||||
Hover, RangeToAnchorExt,
|
||||
@@ -11,7 +12,7 @@ use gpui::{
|
||||
StyleRefinement, Styled, Task, TextStyleRefinement, View, ViewContext,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use language::{DiagnosticEntry, Language, LanguageRegistry};
|
||||
use language::{Diagnostic, DiagnosticEntry, Language, LanguageRegistry};
|
||||
use lsp::DiagnosticSeverity;
|
||||
use markdown::{Markdown, MarkdownStyle};
|
||||
use multi_buffer::ToOffset;
|
||||
@@ -199,7 +200,6 @@ fn show_hover(
|
||||
if editor.pending_rename.is_some() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let snapshot = editor.snapshot(cx);
|
||||
|
||||
let (buffer, buffer_position) = editor
|
||||
@@ -259,7 +259,7 @@ fn show_hover(
|
||||
}
|
||||
|
||||
// If there's a diagnostic, assign it on the hover state and notify
|
||||
let local_diagnostic = snapshot
|
||||
let mut local_diagnostic = snapshot
|
||||
.buffer_snapshot
|
||||
.diagnostics_in_range::<_, usize>(anchor..anchor, false)
|
||||
// Find the entry with the most specific range
|
||||
@@ -281,6 +281,42 @@ fn show_hover(
|
||||
})
|
||||
});
|
||||
|
||||
if let Some(invisible) = snapshot
|
||||
.buffer_snapshot
|
||||
.chars_at(anchor)
|
||||
.next()
|
||||
.filter(|&c| is_invisible(c))
|
||||
{
|
||||
let after = snapshot.buffer_snapshot.anchor_after(
|
||||
anchor.to_offset(&snapshot.buffer_snapshot) + invisible.len_utf8(),
|
||||
);
|
||||
local_diagnostic = Some(DiagnosticEntry {
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::HINT,
|
||||
message: format!("Unicode character U+{:02X}", invisible as u32),
|
||||
..Default::default()
|
||||
},
|
||||
range: anchor..after,
|
||||
})
|
||||
} else if let Some(invisible) = snapshot
|
||||
.buffer_snapshot
|
||||
.reversed_chars_at(anchor)
|
||||
.next()
|
||||
.filter(|&c| is_invisible(c))
|
||||
{
|
||||
let before = snapshot.buffer_snapshot.anchor_before(
|
||||
anchor.to_offset(&snapshot.buffer_snapshot) - invisible.len_utf8(),
|
||||
);
|
||||
local_diagnostic = Some(DiagnosticEntry {
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::HINT,
|
||||
message: format!("Unicode character U+{:02X}", invisible as u32),
|
||||
..Default::default()
|
||||
},
|
||||
range: before..anchor,
|
||||
})
|
||||
}
|
||||
|
||||
let diagnostic_popover = if let Some(local_diagnostic) = local_diagnostic {
|
||||
let text = match local_diagnostic.diagnostic.source {
|
||||
Some(ref source) => {
|
||||
@@ -288,7 +324,6 @@ fn show_hover(
|
||||
}
|
||||
None => local_diagnostic.diagnostic.message.clone(),
|
||||
};
|
||||
|
||||
let mut border_color: Option<Hsla> = None;
|
||||
let mut background_color: Option<Hsla> = None;
|
||||
|
||||
@@ -344,7 +379,6 @@ fn show_hover(
|
||||
Markdown::new_text(text, markdown_style.clone(), None, cx, None)
|
||||
})
|
||||
.ok();
|
||||
|
||||
Some(DiagnosticPopover {
|
||||
local_diagnostic,
|
||||
primary_diagnostic,
|
||||
@@ -432,7 +466,6 @@ fn show_hover(
|
||||
cx.notify();
|
||||
cx.refresh();
|
||||
})?;
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.log_err()
|
||||
|
||||
@@ -16,10 +16,10 @@ use util::RangeExt;
|
||||
use workspace::Item;
|
||||
|
||||
use crate::{
|
||||
editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, ApplyAllDiffHunks,
|
||||
ApplyDiffHunk, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight,
|
||||
DisplayRow, DisplaySnapshot, Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk,
|
||||
RevertFile, RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff,
|
||||
editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, ApplyDiffHunk,
|
||||
BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow,
|
||||
DisplaySnapshot, Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile,
|
||||
RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -352,11 +352,7 @@ impl Editor {
|
||||
None
|
||||
}
|
||||
|
||||
pub(crate) fn apply_all_diff_hunks(
|
||||
&mut self,
|
||||
_: &ApplyAllDiffHunks,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
pub(crate) fn apply_all_diff_hunks(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let buffers = self.buffer.read(cx).all_buffers();
|
||||
for branch_buffer in buffers {
|
||||
branch_buffer.update(cx, |branch_buffer, cx| {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::{ApplyAllDiffHunks, Editor, EditorEvent, SemanticsProvider};
|
||||
use crate::{Editor, EditorEvent, SemanticsProvider};
|
||||
use collections::HashSet;
|
||||
use futures::{channel::mpsc, future::join_all};
|
||||
use gpui::{AppContext, EventEmitter, FocusableView, Model, Render, Subscription, Task, View};
|
||||
@@ -8,7 +8,7 @@ use project::Project;
|
||||
use smol::stream::StreamExt;
|
||||
use std::{any::TypeId, ops::Range, rc::Rc, time::Duration};
|
||||
use text::ToOffset;
|
||||
use ui::{prelude::*, ButtonLike, KeyBinding};
|
||||
use ui::prelude::*;
|
||||
use workspace::{
|
||||
searchable::SearchableItemHandle, Item, ItemHandle as _, ToolbarItemEvent, ToolbarItemLocation,
|
||||
ToolbarItemView, Workspace,
|
||||
@@ -232,10 +232,7 @@ impl ProposedChangesEditor {
|
||||
|
||||
impl Render for ProposedChangesEditor {
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
div()
|
||||
.size_full()
|
||||
.key_context("ProposedChangesEditor")
|
||||
.child(self.editor.clone())
|
||||
self.editor.clone()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -334,21 +331,17 @@ impl ProposedChangesEditorToolbar {
|
||||
}
|
||||
|
||||
impl Render for ProposedChangesEditorToolbar {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let button_like = ButtonLike::new("apply-changes").child(Label::new("Apply All"));
|
||||
|
||||
match &self.current_editor {
|
||||
Some(editor) => {
|
||||
let focus_handle = editor.focus_handle(cx);
|
||||
let keybinding = KeyBinding::for_action_in(&ApplyAllDiffHunks, &focus_handle, cx)
|
||||
.map(|binding| binding.into_any_element());
|
||||
|
||||
button_like.children(keybinding).on_click({
|
||||
move |_event, cx| focus_handle.dispatch_action(&ApplyAllDiffHunks, cx)
|
||||
})
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let editor = self.current_editor.clone();
|
||||
Button::new("apply-changes", "Apply All").on_click(move |_, cx| {
|
||||
if let Some(editor) = &editor {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.editor.update(cx, |editor, cx| {
|
||||
editor.apply_all_diff_hunks(cx);
|
||||
})
|
||||
});
|
||||
}
|
||||
None => button_like.disabled(true),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
ops::{Deref, DerefMut, Range},
|
||||
path::Path,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
@@ -67,12 +66,10 @@ impl EditorLspTestContext {
|
||||
);
|
||||
language_registry.add(Arc::new(language));
|
||||
|
||||
let root = Self::root_path();
|
||||
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(root, json!({ "dir": { file_name.clone(): "" }}))
|
||||
.insert_tree("/root", json!({ "dir": { file_name.clone(): "" }}))
|
||||
.await;
|
||||
|
||||
let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
|
||||
@@ -82,7 +79,7 @@ impl EditorLspTestContext {
|
||||
let mut cx = VisualTestContext::from_window(*window.deref(), cx);
|
||||
project
|
||||
.update(&mut cx, |project, cx| {
|
||||
project.find_or_create_worktree(root, true, cx)
|
||||
project.find_or_create_worktree("/root", true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -111,7 +108,7 @@ impl EditorLspTestContext {
|
||||
},
|
||||
lsp,
|
||||
workspace,
|
||||
buffer_lsp_url: lsp::Url::from_file_path(root.join("dir").join(file_name)).unwrap(),
|
||||
buffer_lsp_url: lsp::Url::from_file_path(format!("/root/dir/{file_name}")).unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -126,7 +123,6 @@ impl EditorLspTestContext {
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
line_comments: vec!["// ".into(), "/// ".into(), "//! ".into()],
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::LANGUAGE.into()),
|
||||
@@ -313,16 +309,6 @@ impl EditorLspTestContext {
|
||||
pub fn notify<T: notification::Notification>(&self, params: T::Params) {
|
||||
self.lsp.notify::<T>(params);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn root_path() -> &'static Path {
|
||||
Path::new("C:\\root")
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
fn root_path() -> &'static Path {
|
||||
Path::new("/root")
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for EditorLspTestContext {
|
||||
|
||||
@@ -17,7 +17,6 @@ use project::{FakeFs, Project};
|
||||
use std::{
|
||||
any::TypeId,
|
||||
ops::{Deref, DerefMut, Range},
|
||||
path::Path,
|
||||
sync::{
|
||||
atomic::{AtomicUsize, Ordering},
|
||||
Arc,
|
||||
@@ -43,18 +42,17 @@ impl EditorTestContext {
|
||||
pub async fn new(cx: &mut gpui::TestAppContext) -> EditorTestContext {
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
// fs.insert_file("/file", "".to_owned()).await;
|
||||
let root = Self::root_path();
|
||||
fs.insert_tree(
|
||||
root,
|
||||
"/root",
|
||||
serde_json::json!({
|
||||
"file": "",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs, [root], cx).await;
|
||||
let project = Project::test(fs, ["/root".as_ref()], cx).await;
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer(root.join("file"), cx)
|
||||
project.open_local_buffer("/root/file", cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -73,16 +71,6 @@ impl EditorTestContext {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn root_path() -> &'static Path {
|
||||
Path::new("C:\\root")
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
fn root_path() -> &'static Path {
|
||||
Path::new("/root")
|
||||
}
|
||||
|
||||
pub async fn for_editor(editor: WindowHandle<Editor>, cx: &mut gpui::TestAppContext) -> Self {
|
||||
let editor_view = editor.root_view(cx).unwrap();
|
||||
Self {
|
||||
|
||||
@@ -8,8 +8,7 @@ use collections::HashMap;
|
||||
use futures::{Future, FutureExt};
|
||||
use gpui::AsyncAppContext;
|
||||
use language::{
|
||||
CodeLabel, HighlightId, Language, LanguageServerName, LanguageToolchainStore, LspAdapter,
|
||||
LspAdapterDelegate,
|
||||
CodeLabel, HighlightId, Language, LanguageServerName, LspAdapter, LspAdapterDelegate,
|
||||
};
|
||||
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions};
|
||||
use serde::Serialize;
|
||||
@@ -195,7 +194,6 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
_cx: &mut AsyncAppContext,
|
||||
) -> Result<Value> {
|
||||
let delegate = delegate.clone();
|
||||
|
||||
@@ -37,7 +37,7 @@ use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use indexed_docs::{IndexedDocsRegistry, ProviderId};
|
||||
use language::{
|
||||
LanguageConfig, LanguageMatcher, LanguageName, LanguageQueries, LanguageRegistry,
|
||||
LoadedLanguage, QUERY_FILENAME_PREFIXES,
|
||||
QUERY_FILENAME_PREFIXES,
|
||||
};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::ContextProviderWithTasks;
|
||||
@@ -1102,21 +1102,14 @@ impl ExtensionStore {
|
||||
let config = std::fs::read_to_string(language_path.join("config.toml"))?;
|
||||
let config: LanguageConfig = ::toml::from_str(&config)?;
|
||||
let queries = load_plugin_queries(&language_path);
|
||||
let context_provider =
|
||||
std::fs::read_to_string(language_path.join("tasks.json"))
|
||||
.ok()
|
||||
.and_then(|contents| {
|
||||
let definitions =
|
||||
serde_json_lenient::from_str(&contents).log_err()?;
|
||||
Some(Arc::new(ContextProviderWithTasks::new(definitions)) as Arc<_>)
|
||||
});
|
||||
let tasks = std::fs::read_to_string(language_path.join("tasks.json"))
|
||||
.ok()
|
||||
.and_then(|contents| {
|
||||
let definitions = serde_json_lenient::from_str(&contents).log_err()?;
|
||||
Some(Arc::new(ContextProviderWithTasks::new(definitions)) as Arc<_>)
|
||||
});
|
||||
|
||||
Ok(LoadedLanguage {
|
||||
config,
|
||||
queries,
|
||||
context_provider,
|
||||
toolchain_provider: None,
|
||||
})
|
||||
Ok((config, queries, tasks))
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -790,9 +790,9 @@ impl FileFinderDelegate {
|
||||
let mut path_matches = Vec::new();
|
||||
|
||||
let abs_file_exists = if let Ok(task) = project.update(&mut cx, |this, cx| {
|
||||
this.resolve_abs_file_path(query.path_query(), cx)
|
||||
this.abs_file_path_exists(query.path_query(), cx)
|
||||
}) {
|
||||
task.await.is_some()
|
||||
task.await
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
@@ -4,7 +4,7 @@ use gpui::{HighlightStyle, Model, StyledText};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::{Entry, PathMatchCandidateSet, Project, ProjectPath, WorktreeId};
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
path::PathBuf,
|
||||
sync::{
|
||||
atomic::{self, AtomicBool},
|
||||
Arc,
|
||||
@@ -254,7 +254,6 @@ impl PickerDelegate for NewPathDelegate {
|
||||
.trim()
|
||||
.trim_start_matches("./")
|
||||
.trim_start_matches('/');
|
||||
|
||||
let (dir, suffix) = if let Some(index) = query.rfind('/') {
|
||||
let suffix = if index + 1 < query.len() {
|
||||
Some(query[index + 1..].to_string())
|
||||
@@ -318,14 +317,6 @@ impl PickerDelegate for NewPathDelegate {
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm_completion(
|
||||
&mut self,
|
||||
_: String,
|
||||
cx: &mut ViewContext<Picker<Self>>,
|
||||
) -> Option<String> {
|
||||
self.confirm_update_query(cx)
|
||||
}
|
||||
|
||||
fn confirm_update_query(&mut self, cx: &mut ViewContext<Picker<Self>>) -> Option<String> {
|
||||
let m = self.matches.get(self.selected_index)?;
|
||||
if m.is_dir(self.project.read(cx), cx) {
|
||||
@@ -431,32 +422,7 @@ impl NewPathDelegate {
|
||||
) {
|
||||
cx.notify();
|
||||
if query.is_empty() {
|
||||
self.matches = self
|
||||
.project
|
||||
.read(cx)
|
||||
.worktrees(cx)
|
||||
.flat_map(|worktree| {
|
||||
let worktree_id = worktree.read(cx).id();
|
||||
worktree
|
||||
.read(cx)
|
||||
.child_entries(Path::new(""))
|
||||
.filter_map(move |entry| {
|
||||
entry.is_dir().then(|| Match {
|
||||
path_match: Some(PathMatch {
|
||||
score: 1.0,
|
||||
positions: Default::default(),
|
||||
worktree_id: worktree_id.to_usize(),
|
||||
path: entry.path.clone(),
|
||||
path_prefix: "".into(),
|
||||
is_dir: entry.is_dir(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
}),
|
||||
suffix: None,
|
||||
})
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
self.matches = vec![];
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -220,11 +220,7 @@ impl PickerDelegate for OpenPathDelegate {
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm_completion(
|
||||
&mut self,
|
||||
query: String,
|
||||
_: &mut ViewContext<Picker<Self>>,
|
||||
) -> Option<String> {
|
||||
fn confirm_completion(&self, query: String) -> Option<String> {
|
||||
Some(
|
||||
maybe!({
|
||||
let m = self.matches.get(self.selected_index)?;
|
||||
|
||||
@@ -813,7 +813,6 @@ struct FakeFsState {
|
||||
root: Arc<Mutex<FakeFsEntry>>,
|
||||
next_inode: u64,
|
||||
next_mtime: SystemTime,
|
||||
git_event_tx: smol::channel::Sender<PathBuf>,
|
||||
event_txs: Vec<smol::channel::Sender<Vec<PathEvent>>>,
|
||||
events_paused: bool,
|
||||
buffered_events: Vec<PathEvent>,
|
||||
@@ -866,22 +865,14 @@ impl FakeFsState {
|
||||
let mut entry_stack = Vec::new();
|
||||
'outer: loop {
|
||||
let mut path_components = path.components().peekable();
|
||||
let mut prefix = None;
|
||||
while let Some(component) = path_components.next() {
|
||||
match component {
|
||||
Component::Prefix(prefix_component) => prefix = Some(prefix_component),
|
||||
Component::Prefix(_) => panic!("prefix paths aren't supported"),
|
||||
Component::RootDir => {
|
||||
entry_stack.clear();
|
||||
entry_stack.push(self.root.clone());
|
||||
canonical_path.clear();
|
||||
match prefix {
|
||||
Some(prefix_component) => {
|
||||
canonical_path = PathBuf::from(prefix_component.as_os_str());
|
||||
// Prefixes like `C:\\` are represented without their trailing slash, so we have to re-add it.
|
||||
canonical_path.push(std::path::MAIN_SEPARATOR_STR);
|
||||
}
|
||||
None => canonical_path = PathBuf::from(std::path::MAIN_SEPARATOR_STR),
|
||||
}
|
||||
canonical_path.push("/");
|
||||
}
|
||||
Component::CurDir => {}
|
||||
Component::ParentDir => {
|
||||
@@ -903,7 +894,7 @@ impl FakeFsState {
|
||||
}
|
||||
}
|
||||
entry_stack.push(entry.clone());
|
||||
canonical_path = canonical_path.join(name);
|
||||
canonical_path.push(name);
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
@@ -965,15 +956,9 @@ pub static FS_DOT_GIT: std::sync::LazyLock<&'static OsStr> =
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl FakeFs {
|
||||
/// We need to use something large enough for Windows and Unix to consider this a new file.
|
||||
/// https://doc.rust-lang.org/nightly/std/time/struct.SystemTime.html#platform-specific-behavior
|
||||
const SYSTEMTIME_INTERVAL: u64 = 100;
|
||||
|
||||
pub fn new(executor: gpui::BackgroundExecutor) -> Arc<Self> {
|
||||
let (tx, mut rx) = smol::channel::bounded::<PathBuf>(10);
|
||||
|
||||
let this = Arc::new(Self {
|
||||
executor: executor.clone(),
|
||||
Arc::new(Self {
|
||||
executor,
|
||||
state: Mutex::new(FakeFsState {
|
||||
root: Arc::new(Mutex::new(FakeFsEntry::Dir {
|
||||
inode: 0,
|
||||
@@ -982,7 +967,6 @@ impl FakeFs {
|
||||
entries: Default::default(),
|
||||
git_repo_state: None,
|
||||
})),
|
||||
git_event_tx: tx,
|
||||
next_mtime: SystemTime::UNIX_EPOCH,
|
||||
next_inode: 1,
|
||||
event_txs: Default::default(),
|
||||
@@ -991,22 +975,7 @@ impl FakeFs {
|
||||
read_dir_call_count: 0,
|
||||
metadata_call_count: 0,
|
||||
}),
|
||||
});
|
||||
|
||||
executor.spawn({
|
||||
let this = this.clone();
|
||||
async move {
|
||||
while let Some(git_event) = rx.next().await {
|
||||
if let Some(mut state) = this.state.try_lock() {
|
||||
state.emit_event([(git_event, None)]);
|
||||
} else {
|
||||
panic!("Failed to lock file system state, this execution would have caused a test hang");
|
||||
}
|
||||
}
|
||||
}
|
||||
}).detach();
|
||||
|
||||
this
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set_next_mtime(&self, next_mtime: SystemTime) {
|
||||
@@ -1020,7 +989,7 @@ impl FakeFs {
|
||||
let new_mtime = state.next_mtime;
|
||||
let new_inode = state.next_inode;
|
||||
state.next_inode += 1;
|
||||
state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL);
|
||||
state.next_mtime += Duration::from_nanos(1);
|
||||
state
|
||||
.write_path(path, move |entry| {
|
||||
match entry {
|
||||
@@ -1073,7 +1042,7 @@ impl FakeFs {
|
||||
let inode = state.next_inode;
|
||||
let mtime = state.next_mtime;
|
||||
state.next_inode += 1;
|
||||
state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL);
|
||||
state.next_mtime += Duration::from_nanos(1);
|
||||
let file = Arc::new(Mutex::new(FakeFsEntry::File {
|
||||
inode,
|
||||
mtime,
|
||||
@@ -1200,12 +1169,7 @@ impl FakeFs {
|
||||
let mut entry = entry.lock();
|
||||
|
||||
if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry {
|
||||
let repo_state = git_repo_state.get_or_insert_with(|| {
|
||||
Arc::new(Mutex::new(FakeGitRepositoryState::new(
|
||||
dot_git.to_path_buf(),
|
||||
state.git_event_tx.clone(),
|
||||
)))
|
||||
});
|
||||
let repo_state = git_repo_state.get_or_insert_with(Default::default);
|
||||
let mut repo_state = repo_state.lock();
|
||||
|
||||
f(&mut repo_state);
|
||||
@@ -1220,22 +1184,7 @@ impl FakeFs {
|
||||
|
||||
pub fn set_branch_name(&self, dot_git: &Path, branch: Option<impl Into<String>>) {
|
||||
self.with_git_state(dot_git, true, |state| {
|
||||
let branch = branch.map(Into::into);
|
||||
state.branches.extend(branch.clone());
|
||||
state.current_branch_name = branch.map(Into::into)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn insert_branches(&self, dot_git: &Path, branches: &[&str]) {
|
||||
self.with_git_state(dot_git, true, |state| {
|
||||
if let Some(first) = branches.first() {
|
||||
if state.current_branch_name.is_none() {
|
||||
state.current_branch_name = Some(first.to_string())
|
||||
}
|
||||
}
|
||||
state
|
||||
.branches
|
||||
.extend(branches.iter().map(ToString::to_string));
|
||||
state.branch_name = branch.map(Into::into)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1435,16 +1384,15 @@ impl Fs for FakeFs {
|
||||
let mut created_dirs = Vec::new();
|
||||
let mut cur_path = PathBuf::new();
|
||||
for component in path.components() {
|
||||
let should_skip = matches!(component, Component::Prefix(..) | Component::RootDir);
|
||||
let mut state = self.state.lock();
|
||||
cur_path.push(component);
|
||||
if should_skip {
|
||||
if cur_path == Path::new("/") {
|
||||
continue;
|
||||
}
|
||||
let mut state = self.state.lock();
|
||||
|
||||
let inode = state.next_inode;
|
||||
let mtime = state.next_mtime;
|
||||
state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL);
|
||||
state.next_mtime += Duration::from_nanos(1);
|
||||
state.next_inode += 1;
|
||||
state.write_path(&cur_path, |entry| {
|
||||
entry.or_insert_with(|| {
|
||||
@@ -1470,7 +1418,7 @@ impl Fs for FakeFs {
|
||||
let mut state = self.state.lock();
|
||||
let inode = state.next_inode;
|
||||
let mtime = state.next_mtime;
|
||||
state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL);
|
||||
state.next_mtime += Duration::from_nanos(1);
|
||||
state.next_inode += 1;
|
||||
let file = Arc::new(Mutex::new(FakeFsEntry::File {
|
||||
inode,
|
||||
@@ -1605,7 +1553,7 @@ impl Fs for FakeFs {
|
||||
let mut state = self.state.lock();
|
||||
let mtime = state.next_mtime;
|
||||
let inode = util::post_inc(&mut state.next_inode);
|
||||
state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL);
|
||||
state.next_mtime += Duration::from_nanos(1);
|
||||
let source_entry = state.read_path(&source)?;
|
||||
let content = source_entry.lock().file_content(&source)?.clone();
|
||||
let mut kind = Some(PathEventKind::Created);
|
||||
@@ -1875,12 +1823,7 @@ impl Fs for FakeFs {
|
||||
let mut entry = entry.lock();
|
||||
if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry {
|
||||
let state = git_repo_state
|
||||
.get_or_insert_with(|| {
|
||||
Arc::new(Mutex::new(FakeGitRepositoryState::new(
|
||||
abs_dot_git.to_path_buf(),
|
||||
state.git_event_tx.clone(),
|
||||
)))
|
||||
})
|
||||
.get_or_insert_with(|| Arc::new(Mutex::new(FakeGitRepositoryState::default())))
|
||||
.clone();
|
||||
Some(git::repository::FakeGitRepository::open(state))
|
||||
} else {
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
use crate::GitHostingProviderRegistry;
|
||||
use crate::{blame::Blame, status::GitStatus};
|
||||
use anyhow::{Context, Result};
|
||||
use collections::{HashMap, HashSet};
|
||||
use collections::HashMap;
|
||||
use git2::BranchType;
|
||||
use gpui::SharedString;
|
||||
use parking_lot::Mutex;
|
||||
use rope::Rope;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -18,7 +17,7 @@ use util::ResultExt;
|
||||
#[derive(Clone, Debug, Hash, PartialEq)]
|
||||
pub struct Branch {
|
||||
pub is_head: bool,
|
||||
pub name: SharedString,
|
||||
pub name: Box<str>,
|
||||
/// Timestamp of most recent commit, normalized to Unix Epoch format.
|
||||
pub unix_timestamp: Option<i64>,
|
||||
}
|
||||
@@ -42,7 +41,6 @@ pub trait GitRepository: Send + Sync {
|
||||
fn branches(&self) -> Result<Vec<Branch>>;
|
||||
fn change_branch(&self, _: &str) -> Result<()>;
|
||||
fn create_branch(&self, _: &str) -> Result<()>;
|
||||
fn branch_exits(&self, _: &str) -> Result<bool>;
|
||||
|
||||
fn blame(&self, path: &Path, content: Rope) -> Result<crate::blame::Blame>;
|
||||
}
|
||||
@@ -134,18 +132,6 @@ impl GitRepository for RealGitRepository {
|
||||
GitStatus::new(&self.git_binary_path, &working_directory, path_prefixes)
|
||||
}
|
||||
|
||||
fn branch_exits(&self, name: &str) -> Result<bool> {
|
||||
let repo = self.repository.lock();
|
||||
let branch = repo.find_branch(name, BranchType::Local);
|
||||
match branch {
|
||||
Ok(_) => Ok(true),
|
||||
Err(e) => match e.code() {
|
||||
git2::ErrorCode::NotFound => Ok(false),
|
||||
_ => Err(anyhow::anyhow!(e)),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn branches(&self) -> Result<Vec<Branch>> {
|
||||
let repo = self.repository.lock();
|
||||
let local_branches = repo.branches(Some(BranchType::Local))?;
|
||||
@@ -153,11 +139,7 @@ impl GitRepository for RealGitRepository {
|
||||
.filter_map(|branch| {
|
||||
branch.ok().and_then(|(branch, _)| {
|
||||
let is_head = branch.is_head();
|
||||
let name = branch
|
||||
.name()
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|name| name.to_string().into())?;
|
||||
let name = branch.name().ok().flatten().map(Box::from)?;
|
||||
let timestamp = branch.get().peel_to_commit().ok()?.time();
|
||||
let unix_timestamp = timestamp.seconds();
|
||||
let timezone_offset = timestamp.offset_minutes();
|
||||
@@ -219,20 +201,17 @@ impl GitRepository for RealGitRepository {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct FakeGitRepository {
|
||||
state: Arc<Mutex<FakeGitRepositoryState>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct FakeGitRepositoryState {
|
||||
pub path: PathBuf,
|
||||
pub event_emitter: smol::channel::Sender<PathBuf>,
|
||||
pub index_contents: HashMap<PathBuf, String>,
|
||||
pub blames: HashMap<PathBuf, Blame>,
|
||||
pub worktree_statuses: HashMap<RepoPath, GitFileStatus>,
|
||||
pub current_branch_name: Option<String>,
|
||||
pub branches: HashSet<String>,
|
||||
pub branch_name: Option<String>,
|
||||
}
|
||||
|
||||
impl FakeGitRepository {
|
||||
@@ -241,20 +220,6 @@ impl FakeGitRepository {
|
||||
}
|
||||
}
|
||||
|
||||
impl FakeGitRepositoryState {
|
||||
pub fn new(path: PathBuf, event_emitter: smol::channel::Sender<PathBuf>) -> Self {
|
||||
FakeGitRepositoryState {
|
||||
path,
|
||||
event_emitter,
|
||||
index_contents: Default::default(),
|
||||
blames: Default::default(),
|
||||
worktree_statuses: Default::default(),
|
||||
current_branch_name: Default::default(),
|
||||
branches: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl GitRepository for FakeGitRepository {
|
||||
fn reload_index(&self) {}
|
||||
|
||||
@@ -269,7 +234,7 @@ impl GitRepository for FakeGitRepository {
|
||||
|
||||
fn branch_name(&self) -> Option<String> {
|
||||
let state = self.state.lock();
|
||||
state.current_branch_name.clone()
|
||||
state.branch_name.clone()
|
||||
}
|
||||
|
||||
fn head_sha(&self) -> Option<String> {
|
||||
@@ -299,41 +264,18 @@ impl GitRepository for FakeGitRepository {
|
||||
}
|
||||
|
||||
fn branches(&self) -> Result<Vec<Branch>> {
|
||||
let state = self.state.lock();
|
||||
let current_branch = &state.current_branch_name;
|
||||
Ok(state
|
||||
.branches
|
||||
.iter()
|
||||
.map(|branch_name| Branch {
|
||||
is_head: Some(branch_name) == current_branch.as_ref(),
|
||||
name: branch_name.into(),
|
||||
unix_timestamp: None,
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
fn branch_exits(&self, name: &str) -> Result<bool> {
|
||||
let state = self.state.lock();
|
||||
Ok(state.branches.contains(name))
|
||||
Ok(vec![])
|
||||
}
|
||||
|
||||
fn change_branch(&self, name: &str) -> Result<()> {
|
||||
let mut state = self.state.lock();
|
||||
state.current_branch_name = Some(name.to_owned());
|
||||
state
|
||||
.event_emitter
|
||||
.try_send(state.path.clone())
|
||||
.expect("Dropped repo change event");
|
||||
state.branch_name = Some(name.to_owned());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_branch(&self, name: &str) -> Result<()> {
|
||||
let mut state = self.state.lock();
|
||||
state.branches.insert(name.to_owned());
|
||||
state
|
||||
.event_emitter
|
||||
.try_send(state.path.clone())
|
||||
.expect("Dropped repo change event");
|
||||
state.branch_name = Some(name.to_owned());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
@@ -485,7 +485,7 @@ impl Render for TextInput {
|
||||
div()
|
||||
.flex()
|
||||
.key_context("TextInput")
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.track_focus(&self.focus_handle)
|
||||
.cursor(CursorStyle::IBeam)
|
||||
.on_action(cx.listener(Self::backspace))
|
||||
.on_action(cx.listener(Self::delete))
|
||||
@@ -549,7 +549,7 @@ impl Render for InputExample {
|
||||
let num_keystrokes = self.recent_keystrokes.len();
|
||||
div()
|
||||
.bg(rgb(0xaaaaaa))
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.track_focus(&self.focus_handle)
|
||||
.flex()
|
||||
.flex_col()
|
||||
.size_full()
|
||||
|
||||
@@ -217,7 +217,6 @@ pub(crate) type KeystrokeObserver =
|
||||
type QuitHandler = Box<dyn FnOnce(&mut AppContext) -> LocalBoxFuture<'static, ()> + 'static>;
|
||||
type ReleaseListener = Box<dyn FnOnce(&mut dyn Any, &mut AppContext) + 'static>;
|
||||
type NewViewListener = Box<dyn FnMut(AnyView, &mut WindowContext) + 'static>;
|
||||
type NewModelListener = Box<dyn FnMut(AnyModel, &mut AppContext) + 'static>;
|
||||
|
||||
/// Contains the state of the full application, and passed as a reference to a variety of callbacks.
|
||||
/// Other contexts such as [ModelContext], [WindowContext], and [ViewContext] deref to this type, making it the most general context type.
|
||||
@@ -238,7 +237,6 @@ pub struct AppContext {
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
pub(crate) globals_by_type: FxHashMap<TypeId, Box<dyn Any>>,
|
||||
pub(crate) entities: EntityMap,
|
||||
pub(crate) new_model_observers: SubscriberSet<TypeId, NewModelListener>,
|
||||
pub(crate) new_view_observers: SubscriberSet<TypeId, NewViewListener>,
|
||||
pub(crate) windows: SlotMap<WindowId, Option<Window>>,
|
||||
pub(crate) window_handles: FxHashMap<WindowId, AnyWindowHandle>,
|
||||
@@ -258,9 +256,6 @@ pub struct AppContext {
|
||||
pub(crate) layout_id_buffer: Vec<LayoutId>, // We recycle this memory across layout requests.
|
||||
pub(crate) propagate_event: bool,
|
||||
pub(crate) prompt_builder: Option<PromptBuilder>,
|
||||
|
||||
#[cfg(any(test, feature = "test-support", debug_assertions))]
|
||||
pub(crate) name: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl AppContext {
|
||||
@@ -298,7 +293,6 @@ impl AppContext {
|
||||
globals_by_type: FxHashMap::default(),
|
||||
entities,
|
||||
new_view_observers: SubscriberSet::new(),
|
||||
new_model_observers: SubscriberSet::new(),
|
||||
window_handles: FxHashMap::default(),
|
||||
windows: SlotMap::with_key(),
|
||||
keymap: Rc::new(RefCell::new(Keymap::default())),
|
||||
@@ -315,9 +309,6 @@ impl AppContext {
|
||||
layout_id_buffer: Default::default(),
|
||||
propagate_event: true,
|
||||
prompt_builder: Some(PromptBuilder::Default),
|
||||
|
||||
#[cfg(any(test, feature = "test-support", debug_assertions))]
|
||||
name: None,
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -997,7 +988,6 @@ impl AppContext {
|
||||
}
|
||||
|
||||
/// Move the global of the given type to the stack.
|
||||
#[track_caller]
|
||||
pub(crate) fn lease_global<G: Global>(&mut self) -> GlobalLease<G> {
|
||||
GlobalLease::new(
|
||||
self.globals_by_type
|
||||
@@ -1019,7 +1009,6 @@ impl AppContext {
|
||||
activate();
|
||||
subscription
|
||||
}
|
||||
|
||||
/// Arrange for the given function to be invoked whenever a view of the specified type is created.
|
||||
/// The function will be passed a mutable reference to the view along with an appropriate context.
|
||||
pub fn observe_new_views<V: 'static>(
|
||||
@@ -1039,31 +1028,6 @@ impl AppContext {
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn new_model_observer(&self, key: TypeId, value: NewModelListener) -> Subscription {
|
||||
let (subscription, activate) = self.new_model_observers.insert(key, value);
|
||||
activate();
|
||||
subscription
|
||||
}
|
||||
|
||||
/// Arrange for the given function to be invoked whenever a view of the specified type is created.
|
||||
/// The function will be passed a mutable reference to the view along with an appropriate context.
|
||||
pub fn observe_new_models<T: 'static>(
|
||||
&self,
|
||||
on_new: impl 'static + Fn(&mut T, &mut ModelContext<T>),
|
||||
) -> Subscription {
|
||||
self.new_model_observer(
|
||||
TypeId::of::<T>(),
|
||||
Box::new(move |any_model: AnyModel, cx: &mut AppContext| {
|
||||
any_model
|
||||
.downcast::<T>()
|
||||
.unwrap()
|
||||
.update(cx, |model_state, cx| {
|
||||
on_new(model_state, cx);
|
||||
})
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
/// Observe the release of a model or view. The callback is invoked after the model or view
|
||||
/// has no more strong references but before it has been dropped.
|
||||
pub fn observe_release<E, T>(
|
||||
@@ -1355,12 +1319,6 @@ impl AppContext {
|
||||
|
||||
(task, is_first)
|
||||
}
|
||||
|
||||
/// Get the name for this App.
|
||||
#[cfg(any(test, feature = "test-support", debug_assertions))]
|
||||
pub fn get_name(&self) -> &'static str {
|
||||
self.name.as_ref().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl Context for AppContext {
|
||||
@@ -1375,21 +1333,8 @@ impl Context for AppContext {
|
||||
) -> Model<T> {
|
||||
self.update(|cx| {
|
||||
let slot = cx.entities.reserve();
|
||||
let model = slot.clone();
|
||||
let entity = build_model(&mut ModelContext::new(cx, slot.downgrade()));
|
||||
cx.entities.insert(slot, entity);
|
||||
|
||||
// Non-generic part to avoid leaking SubscriberSet to invokers of `new_view`.
|
||||
fn notify_observers(cx: &mut AppContext, tid: TypeId, model: AnyModel) {
|
||||
cx.new_model_observers.clone().retain(&tid, |observer| {
|
||||
let any_model = model.clone();
|
||||
(observer)(any_model, cx);
|
||||
true
|
||||
});
|
||||
}
|
||||
notify_observers(cx, TypeId::of::<T>(), AnyModel::from(model.clone()));
|
||||
|
||||
model
|
||||
cx.entities.insert(slot, entity)
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -536,15 +536,6 @@ impl AnyWeakModel {
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for AnyWeakModel {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct(type_name::<Self>())
|
||||
.field("entity_id", &self.entity_id)
|
||||
.field("entity_type", &self.entity_type)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<WeakModel<T>> for AnyWeakModel {
|
||||
fn from(model: WeakModel<T>) -> Self {
|
||||
model.any_model
|
||||
|
||||
@@ -478,12 +478,6 @@ impl TestAppContext {
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
/// Set a name for this App.
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn set_name(&mut self, name: &'static str) {
|
||||
self.update(|cx| cx.name = Some(name))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static> Model<T> {
|
||||
|
||||
@@ -340,7 +340,6 @@ impl Element for UniformList {
|
||||
visible_range.clone(),
|
||||
bounds,
|
||||
item_height,
|
||||
self.item_count,
|
||||
cx,
|
||||
);
|
||||
let available_space = size(
|
||||
@@ -397,7 +396,6 @@ pub trait UniformListDecoration {
|
||||
visible_range: Range<usize>,
|
||||
bounds: Bounds<Pixels>,
|
||||
item_height: Pixels,
|
||||
item_count: usize,
|
||||
cx: &mut WindowContext,
|
||||
) -> AnyElement;
|
||||
}
|
||||
|
||||
@@ -57,7 +57,6 @@ pub trait UpdateGlobal {
|
||||
}
|
||||
|
||||
impl<T: Global> UpdateGlobal for T {
|
||||
#[track_caller]
|
||||
fn update_global<C, F, R>(cx: &mut C, update: F) -> R
|
||||
where
|
||||
C: BorrowAppContext,
|
||||
|
||||
@@ -306,7 +306,6 @@ where
|
||||
self.borrow_mut().set_global(global)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn update_global<G, R>(&mut self, f: impl FnOnce(&mut G, &mut Self) -> R) -> R
|
||||
where
|
||||
G: Global,
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
/// impl Render for Editor {
|
||||
/// fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
/// div()
|
||||
/// .track_focus(&self.focus_handle(cx))
|
||||
/// .track_focus(&self.focus_handle)
|
||||
/// .keymap_context("Editor")
|
||||
/// .on_action(cx.listener(Editor::undo))
|
||||
/// .on_action(cx.listener(Editor::redo))
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use crate::{
|
||||
black, fill, point, px, size, Bounds, Hsla, LineLayout, Pixels, Point, Result, SharedString,
|
||||
StrikethroughStyle, UnderlineStyle, WindowContext, WrapBoundary, WrappedLineLayout,
|
||||
black, fill, point, px, size, Bounds, Half, Hsla, LineLayout, Pixels, Point, Result,
|
||||
SharedString, StrikethroughStyle, UnderlineStyle, WindowContext, WrapBoundary,
|
||||
WrappedLineLayout,
|
||||
};
|
||||
use derive_more::{Deref, DerefMut};
|
||||
use smallvec::SmallVec;
|
||||
@@ -129,8 +130,9 @@ fn paint_line(
|
||||
let text_system = cx.text_system().clone();
|
||||
let mut glyph_origin = origin;
|
||||
let mut prev_glyph_position = Point::default();
|
||||
let mut max_glyph_size = size(px(0.), px(0.));
|
||||
for (run_ix, run) in layout.runs.iter().enumerate() {
|
||||
let max_glyph_size = text_system.bounding_box(run.font_id, layout.font_size).size;
|
||||
max_glyph_size = text_system.bounding_box(run.font_id, layout.font_size).size;
|
||||
|
||||
for (glyph_ix, glyph) in run.glyphs.iter().enumerate() {
|
||||
glyph_origin.x += glyph.position.x - prev_glyph_position.x;
|
||||
@@ -139,6 +141,9 @@ fn paint_line(
|
||||
wraps.next();
|
||||
if let Some((background_origin, background_color)) = current_background.as_mut()
|
||||
{
|
||||
if glyph_origin.x == background_origin.x {
|
||||
background_origin.x -= max_glyph_size.width.half()
|
||||
}
|
||||
cx.paint_quad(fill(
|
||||
Bounds {
|
||||
origin: *background_origin,
|
||||
@@ -150,6 +155,9 @@ fn paint_line(
|
||||
background_origin.y += line_height;
|
||||
}
|
||||
if let Some((underline_origin, underline_style)) = current_underline.as_mut() {
|
||||
if glyph_origin.x == underline_origin.x {
|
||||
underline_origin.x -= max_glyph_size.width.half();
|
||||
};
|
||||
cx.paint_underline(
|
||||
*underline_origin,
|
||||
glyph_origin.x - underline_origin.x,
|
||||
@@ -161,6 +169,9 @@ fn paint_line(
|
||||
if let Some((strikethrough_origin, strikethrough_style)) =
|
||||
current_strikethrough.as_mut()
|
||||
{
|
||||
if glyph_origin.x == strikethrough_origin.x {
|
||||
strikethrough_origin.x -= max_glyph_size.width.half();
|
||||
};
|
||||
cx.paint_strikethrough(
|
||||
*strikethrough_origin,
|
||||
glyph_origin.x - strikethrough_origin.x,
|
||||
@@ -179,7 +190,18 @@ fn paint_line(
|
||||
let mut finished_underline: Option<(Point<Pixels>, UnderlineStyle)> = None;
|
||||
let mut finished_strikethrough: Option<(Point<Pixels>, StrikethroughStyle)> = None;
|
||||
if glyph.index >= run_end {
|
||||
if let Some(style_run) = decoration_runs.next() {
|
||||
let mut style_run = decoration_runs.next();
|
||||
|
||||
// ignore style runs that apply to a partial glyph
|
||||
while let Some(run) = style_run {
|
||||
if glyph.index < run_end + (run.len as usize) {
|
||||
break;
|
||||
}
|
||||
run_end += run.len as usize;
|
||||
style_run = decoration_runs.next();
|
||||
}
|
||||
|
||||
if let Some(style_run) = style_run {
|
||||
if let Some((_, background_color)) = &mut current_background {
|
||||
if style_run.background_color.as_ref() != Some(background_color) {
|
||||
finished_background = current_background.take();
|
||||
@@ -240,10 +262,14 @@ fn paint_line(
|
||||
}
|
||||
|
||||
if let Some((background_origin, background_color)) = finished_background {
|
||||
let mut width = glyph_origin.x - background_origin.x;
|
||||
if width == px(0.) {
|
||||
width = px(5.)
|
||||
};
|
||||
cx.paint_quad(fill(
|
||||
Bounds {
|
||||
origin: background_origin,
|
||||
size: size(glyph_origin.x - background_origin.x, line_height),
|
||||
size: size(width, line_height),
|
||||
},
|
||||
background_color,
|
||||
));
|
||||
@@ -299,7 +325,10 @@ fn paint_line(
|
||||
last_line_end_x -= glyph.position.x;
|
||||
}
|
||||
|
||||
if let Some((background_origin, background_color)) = current_background.take() {
|
||||
if let Some((mut background_origin, background_color)) = current_background.take() {
|
||||
if last_line_end_x == background_origin.x {
|
||||
background_origin.x -= max_glyph_size.width.half()
|
||||
};
|
||||
cx.paint_quad(fill(
|
||||
Bounds {
|
||||
origin: background_origin,
|
||||
@@ -309,7 +338,10 @@ fn paint_line(
|
||||
));
|
||||
}
|
||||
|
||||
if let Some((underline_start, underline_style)) = current_underline.take() {
|
||||
if let Some((mut underline_start, underline_style)) = current_underline.take() {
|
||||
if last_line_end_x == underline_start.x {
|
||||
underline_start.x -= max_glyph_size.width.half()
|
||||
};
|
||||
cx.paint_underline(
|
||||
underline_start,
|
||||
last_line_end_x - underline_start.x,
|
||||
@@ -317,7 +349,10 @@ fn paint_line(
|
||||
);
|
||||
}
|
||||
|
||||
if let Some((strikethrough_start, strikethrough_style)) = current_strikethrough.take() {
|
||||
if let Some((mut strikethrough_start, strikethrough_style)) = current_strikethrough.take() {
|
||||
if last_line_end_x == strikethrough_start.x {
|
||||
strikethrough_start.x -= max_glyph_size.width.half()
|
||||
};
|
||||
cx.paint_strikethrough(
|
||||
strikethrough_start,
|
||||
last_line_end_x - strikethrough_start.x,
|
||||
|
||||
@@ -271,7 +271,7 @@ impl Render for ImageView {
|
||||
.left_0();
|
||||
|
||||
div()
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.track_focus(&self.focus_handle)
|
||||
.size_full()
|
||||
.child(checkered_background)
|
||||
.child(
|
||||
|
||||
@@ -501,6 +501,8 @@ pub struct Chunk<'a> {
|
||||
pub is_unnecessary: bool,
|
||||
/// Whether this chunk of text was originally a tab character.
|
||||
pub is_tab: bool,
|
||||
/// Whether this chunk of text is an invisible character.
|
||||
pub is_invisible: bool,
|
||||
/// An optional recipe for how the chunk should be presented.
|
||||
pub renderer: Option<ChunkRenderer>,
|
||||
}
|
||||
@@ -1967,27 +1969,18 @@ impl Buffer {
|
||||
let new_text_length = new_text.len();
|
||||
let old_start = range.start.to_point(&before_edit);
|
||||
let new_start = (delta + range.start as isize) as usize;
|
||||
let range_len = range.end - range.start;
|
||||
delta += new_text_length as isize - range_len as isize;
|
||||
delta += new_text_length as isize - (range.end as isize - range.start as isize);
|
||||
|
||||
// Decide what range of the insertion to auto-indent, and whether
|
||||
// the first line of the insertion should be considered a newly-inserted line
|
||||
// or an edit to an existing line.
|
||||
let mut range_of_insertion_to_indent = 0..new_text_length;
|
||||
let mut first_line_is_new = true;
|
||||
let mut first_line_is_new = false;
|
||||
let mut original_indent_column = None;
|
||||
|
||||
let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
|
||||
let old_line_end = before_edit.line_len(old_start.row);
|
||||
|
||||
if old_start.column > old_line_start {
|
||||
first_line_is_new = false;
|
||||
}
|
||||
|
||||
if !new_text.contains('\n')
|
||||
&& (old_start.column + (range_len as u32) < old_line_end
|
||||
|| old_line_end == old_line_start)
|
||||
// When inserting an entire line at the beginning of an existing line,
|
||||
// treat the insertion as new.
|
||||
if new_text.contains('\n')
|
||||
&& old_start.column <= before_edit.indent_size_for_line(old_start.row).len
|
||||
{
|
||||
first_line_is_new = false;
|
||||
first_line_is_new = true;
|
||||
}
|
||||
|
||||
// When inserting text starting with a newline, avoid auto-indenting the
|
||||
@@ -1997,7 +1990,7 @@ impl Buffer {
|
||||
first_line_is_new = true;
|
||||
}
|
||||
|
||||
let mut original_indent_column = None;
|
||||
// Avoid auto-indenting after the insertion.
|
||||
if let AutoindentMode::Block {
|
||||
original_indent_columns,
|
||||
} = &mode
|
||||
@@ -2009,8 +2002,6 @@ impl Buffer {
|
||||
)
|
||||
.len
|
||||
}));
|
||||
|
||||
// Avoid auto-indenting the line after the edit.
|
||||
if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
|
||||
range_of_insertion_to_indent.end -= 1;
|
||||
}
|
||||
@@ -4046,7 +4037,7 @@ impl<'a> BufferChunks<'a> {
|
||||
let old_range = std::mem::replace(&mut self.range, range.clone());
|
||||
self.chunks.set_range(self.range.clone());
|
||||
if let Some(highlights) = self.highlights.as_mut() {
|
||||
if old_range.start <= self.range.start && old_range.end >= self.range.end {
|
||||
if old_range.start >= self.range.start && old_range.end <= self.range.end {
|
||||
// Reuse existing highlights stack, as the new range is a subrange of the old one.
|
||||
highlights
|
||||
.stack
|
||||
@@ -4103,10 +4094,6 @@ impl<'a> BufferChunks<'a> {
|
||||
diagnostic_endpoints
|
||||
.sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
|
||||
*diagnostics = diagnostic_endpoints.into_iter().peekable();
|
||||
self.hint_depth = 0;
|
||||
self.error_depth = 0;
|
||||
self.warning_depth = 0;
|
||||
self.information_depth = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4226,7 +4213,6 @@ impl<'a> Iterator for BufferChunks<'a> {
|
||||
if self.range.start == self.chunks.offset() + chunk.len() {
|
||||
self.chunks.next().unwrap();
|
||||
}
|
||||
|
||||
Some(Chunk {
|
||||
text: slice,
|
||||
syntax_highlight_id: highlight_id,
|
||||
|
||||
@@ -1241,6 +1241,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"
|
||||
@@ -1255,74 +1256,6 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
|
||||
"
|
||||
.unindent()
|
||||
);
|
||||
|
||||
// Insert a newline after the open brace. It is auto-indented
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a() {«
|
||||
»
|
||||
c
|
||||
.f
|
||||
.g();
|
||||
d
|
||||
.f
|
||||
.g();
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"
|
||||
fn a() {
|
||||
ˇ
|
||||
c
|
||||
.f
|
||||
.g();
|
||||
d
|
||||
.f
|
||||
.g();
|
||||
}
|
||||
"
|
||||
.unindent()
|
||||
.replace("ˇ", "")
|
||||
);
|
||||
|
||||
// Manually outdent the line. It stays outdented.
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a() {
|
||||
«»
|
||||
c
|
||||
.f
|
||||
.g();
|
||||
d
|
||||
.f
|
||||
.g();
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"
|
||||
fn a() {
|
||||
|
||||
c
|
||||
.f
|
||||
.g();
|
||||
d
|
||||
.f
|
||||
.g();
|
||||
}
|
||||
"
|
||||
.unindent()
|
||||
);
|
||||
|
||||
buffer
|
||||
});
|
||||
|
||||
|
||||
@@ -15,7 +15,6 @@ mod outline;
|
||||
pub mod proto;
|
||||
mod syntax_map;
|
||||
mod task_context;
|
||||
mod toolchain;
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod buffer_tests;
|
||||
@@ -29,7 +28,7 @@ use futures::Future;
|
||||
use gpui::{AppContext, AsyncAppContext, Model, SharedString, Task};
|
||||
pub use highlight_map::HighlightMap;
|
||||
use http_client::HttpClient;
|
||||
pub use language_registry::{LanguageName, LoadedLanguage};
|
||||
pub use language_registry::LanguageName;
|
||||
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions};
|
||||
use parking_lot::Mutex;
|
||||
use regex::Regex;
|
||||
@@ -62,7 +61,6 @@ use syntax_map::{QueryCursorHandle, SyntaxSnapshot};
|
||||
use task::RunnableTag;
|
||||
pub use task_context::{ContextProvider, RunnableRange};
|
||||
use theme::SyntaxTheme;
|
||||
pub use toolchain::{LanguageToolchainStore, Toolchain, ToolchainList, ToolchainLister};
|
||||
use tree_sitter::{self, wasmtime, Query, QueryCursor, WasmStore};
|
||||
use util::serde::default_true;
|
||||
|
||||
@@ -504,7 +502,6 @@ pub trait LspAdapter: 'static + Send + Sync {
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
_: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
_cx: &mut AsyncAppContext,
|
||||
) -> Result<Value> {
|
||||
Ok(serde_json::json!({}))
|
||||
@@ -858,7 +855,6 @@ pub struct Language {
|
||||
pub(crate) config: LanguageConfig,
|
||||
pub(crate) grammar: Option<Arc<Grammar>>,
|
||||
pub(crate) context_provider: Option<Arc<dyn ContextProvider>>,
|
||||
pub(crate) toolchain: Option<Arc<dyn ToolchainLister>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
|
||||
@@ -987,7 +983,6 @@ impl Language {
|
||||
})
|
||||
}),
|
||||
context_provider: None,
|
||||
toolchain: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -996,11 +991,6 @@ impl Language {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_toolchain_lister(mut self, provider: Option<Arc<dyn ToolchainLister>>) -> Self {
|
||||
self.toolchain = provider;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_queries(mut self, queries: LanguageQueries) -> Result<Self> {
|
||||
if let Some(query) = queries.highlights {
|
||||
self = self
|
||||
@@ -1371,10 +1361,6 @@ impl Language {
|
||||
self.context_provider.clone()
|
||||
}
|
||||
|
||||
pub fn toolchain_lister(&self) -> Option<Arc<dyn ToolchainLister>> {
|
||||
self.toolchain.clone()
|
||||
}
|
||||
|
||||
pub fn highlight_text<'a>(
|
||||
self: &'a Arc<Self>,
|
||||
text: &'a Rope,
|
||||
|
||||
@@ -4,7 +4,7 @@ use crate::{
|
||||
},
|
||||
task_context::ContextProvider,
|
||||
with_parser, CachedLspAdapter, File, Language, LanguageConfig, LanguageId, LanguageMatcher,
|
||||
LanguageServerName, LspAdapter, ToolchainLister, PLAIN_TEXT,
|
||||
LanguageServerName, LspAdapter, PLAIN_TEXT,
|
||||
};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use collections::{hash_map, HashMap, HashSet};
|
||||
@@ -75,13 +75,6 @@ impl<'a> From<&'a str> for LanguageName {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LanguageName> for String {
|
||||
fn from(value: LanguageName) -> Self {
|
||||
let value: &str = &value.0;
|
||||
Self::from(value)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LanguageRegistry {
|
||||
state: RwLock<LanguageRegistryState>,
|
||||
language_server_download_dir: Option<Arc<Path>>,
|
||||
@@ -130,7 +123,16 @@ pub struct AvailableLanguage {
|
||||
name: LanguageName,
|
||||
grammar: Option<Arc<str>>,
|
||||
matcher: LanguageMatcher,
|
||||
load: Arc<dyn Fn() -> Result<LoadedLanguage> + 'static + Send + Sync>,
|
||||
load: Arc<
|
||||
dyn Fn() -> Result<(
|
||||
LanguageConfig,
|
||||
LanguageQueries,
|
||||
Option<Arc<dyn ContextProvider>>,
|
||||
)>
|
||||
+ 'static
|
||||
+ Send
|
||||
+ Sync,
|
||||
>,
|
||||
loaded: bool,
|
||||
}
|
||||
|
||||
@@ -198,13 +200,6 @@ struct LspBinaryStatusSender {
|
||||
txs: Arc<Mutex<Vec<mpsc::UnboundedSender<(LanguageServerName, LanguageServerBinaryStatus)>>>>,
|
||||
}
|
||||
|
||||
pub struct LoadedLanguage {
|
||||
pub config: LanguageConfig,
|
||||
pub queries: LanguageQueries,
|
||||
pub context_provider: Option<Arc<dyn ContextProvider>>,
|
||||
pub toolchain_provider: Option<Arc<dyn ToolchainLister>>,
|
||||
}
|
||||
|
||||
impl LanguageRegistry {
|
||||
pub fn new(executor: BackgroundExecutor) -> Self {
|
||||
let this = Self {
|
||||
@@ -288,14 +283,7 @@ impl LanguageRegistry {
|
||||
config.name.clone(),
|
||||
config.grammar.clone(),
|
||||
config.matcher.clone(),
|
||||
move || {
|
||||
Ok(LoadedLanguage {
|
||||
config: config.clone(),
|
||||
queries: Default::default(),
|
||||
toolchain_provider: None,
|
||||
context_provider: None,
|
||||
})
|
||||
},
|
||||
move || Ok((config.clone(), Default::default(), None)),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -436,7 +424,14 @@ impl LanguageRegistry {
|
||||
name: LanguageName,
|
||||
grammar_name: Option<Arc<str>>,
|
||||
matcher: LanguageMatcher,
|
||||
load: impl Fn() -> Result<LoadedLanguage> + 'static + Send + Sync,
|
||||
load: impl Fn() -> Result<(
|
||||
LanguageConfig,
|
||||
LanguageQueries,
|
||||
Option<Arc<dyn ContextProvider>>,
|
||||
)>
|
||||
+ 'static
|
||||
+ Send
|
||||
+ Sync,
|
||||
) {
|
||||
let load = Arc::new(load);
|
||||
let state = &mut *self.state.write();
|
||||
@@ -731,18 +726,16 @@ impl LanguageRegistry {
|
||||
self.executor
|
||||
.spawn(async move {
|
||||
let language = async {
|
||||
let loaded_language = (language_load)()?;
|
||||
if let Some(grammar) = loaded_language.config.grammar.clone() {
|
||||
let grammar = Some(this.get_or_load_grammar(grammar).await?);
|
||||
let (config, queries, provider) = (language_load)()?;
|
||||
|
||||
Language::new_with_id(id, loaded_language.config, grammar)
|
||||
.with_context_provider(loaded_language.context_provider)
|
||||
.with_toolchain_lister(loaded_language.toolchain_provider)
|
||||
.with_queries(loaded_language.queries)
|
||||
if let Some(grammar) = config.grammar.clone() {
|
||||
let grammar = Some(this.get_or_load_grammar(grammar).await?);
|
||||
Language::new_with_id(id, config, grammar)
|
||||
.with_context_provider(provider)
|
||||
.with_queries(queries)
|
||||
} else {
|
||||
Ok(Language::new_with_id(id, loaded_language.config, None)
|
||||
.with_context_provider(loaded_language.context_provider)
|
||||
.with_toolchain_lister(loaded_language.toolchain_provider))
|
||||
Ok(Language::new_with_id(id, config, None)
|
||||
.with_context_provider(provider))
|
||||
}
|
||||
}
|
||||
.await;
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
//! Provides support for language toolchains.
|
||||
//!
|
||||
//! A language can have associated toolchains,
|
||||
//! which is a set of tools used to interact with the projects written in said language.
|
||||
//! For example, a Python project can have an associated virtual environment; a Rust project can have a toolchain override.
|
||||
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use gpui::{AsyncAppContext, SharedString};
|
||||
use settings::WorktreeId;
|
||||
|
||||
use crate::LanguageName;
|
||||
|
||||
/// Represents a single toolchain.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Toolchain {
|
||||
/// User-facing label
|
||||
pub name: SharedString,
|
||||
pub path: SharedString,
|
||||
pub language_name: LanguageName,
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
pub trait ToolchainLister: Send + Sync {
|
||||
async fn list(&self, _: PathBuf) -> ToolchainList;
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
pub trait LanguageToolchainStore {
|
||||
async fn active_toolchain(
|
||||
self: Arc<Self>,
|
||||
worktree_id: WorktreeId,
|
||||
language_name: LanguageName,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Option<Toolchain>;
|
||||
}
|
||||
|
||||
type DefaultIndex = usize;
|
||||
#[derive(Default, Clone)]
|
||||
pub struct ToolchainList {
|
||||
pub toolchains: Vec<Toolchain>,
|
||||
pub default: Option<DefaultIndex>,
|
||||
pub groups: Box<[(usize, SharedString)]>,
|
||||
}
|
||||
|
||||
impl ToolchainList {
|
||||
pub fn toolchains(&self) -> &[Toolchain] {
|
||||
&self.toolchains
|
||||
}
|
||||
pub fn default_toolchain(&self) -> Option<Toolchain> {
|
||||
self.default.and_then(|ix| self.toolchains.get(ix)).cloned()
|
||||
}
|
||||
pub fn group_for_index(&self, index: usize) -> Option<(usize, SharedString)> {
|
||||
if index >= self.toolchains.len() {
|
||||
return None;
|
||||
}
|
||||
let first_equal_or_greater = self
|
||||
.groups
|
||||
.partition_point(|(group_lower_bound, _)| group_lower_bound <= &index);
|
||||
self.groups
|
||||
.get(first_equal_or_greater.checked_sub(1)?)
|
||||
.cloned()
|
||||
}
|
||||
}
|
||||
@@ -38,7 +38,7 @@ menu.workspace = true
|
||||
ollama = { workspace = true, features = ["schemars"] }
|
||||
open_ai = { workspace = true, features = ["schemars"] }
|
||||
parking_lot.workspace = true
|
||||
proto.workspace = true
|
||||
proto = { workspace = true, features = ["test-support"] }
|
||||
project.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
@@ -62,7 +62,6 @@ env_logger.workspace = true
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
log.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
proto = { workspace = true, features = ["test-support"] }
|
||||
rand.workspace = true
|
||||
text = { workspace = true, features = ["test-support"] }
|
||||
unindent.workspace = true
|
||||
|
||||
@@ -505,14 +505,10 @@ pub fn map_to_language_model_completion_events(
|
||||
LanguageModelToolUse {
|
||||
id: tool_use.id,
|
||||
name: tool_use.name,
|
||||
input: if tool_use.input_json.is_empty() {
|
||||
serde_json::Value::Null
|
||||
} else {
|
||||
serde_json::Value::from_str(
|
||||
&tool_use.input_json,
|
||||
)
|
||||
.map_err(|err| anyhow!(err))?
|
||||
},
|
||||
input: serde_json::Value::from_str(
|
||||
&tool_use.input_json,
|
||||
)
|
||||
.map_err(|err| anyhow!(err))?,
|
||||
},
|
||||
))
|
||||
})),
|
||||
|
||||
@@ -54,7 +54,6 @@ pub struct OllamaLanguageModelProvider {
|
||||
pub struct State {
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
available_models: Vec<ollama::Model>,
|
||||
fetch_model_task: Option<Task<Result<()>>>,
|
||||
_subscription: Subscription,
|
||||
}
|
||||
|
||||
@@ -90,11 +89,6 @@ impl State {
|
||||
})
|
||||
}
|
||||
|
||||
fn restart_fetch_models_task(&mut self, cx: &mut ModelContext<Self>) {
|
||||
let task = self.fetch_models(cx);
|
||||
self.fetch_model_task.replace(task);
|
||||
}
|
||||
|
||||
fn authenticate(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
if self.is_authenticated() {
|
||||
Task::ready(Ok(()))
|
||||
@@ -108,29 +102,17 @@ impl OllamaLanguageModelProvider {
|
||||
pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut AppContext) -> Self {
|
||||
let this = Self {
|
||||
http_client: http_client.clone(),
|
||||
state: cx.new_model(|cx| {
|
||||
let subscription = cx.observe_global::<SettingsStore>({
|
||||
let mut settings = AllLanguageModelSettings::get_global(cx).ollama.clone();
|
||||
move |this: &mut State, cx| {
|
||||
let new_settings = &AllLanguageModelSettings::get_global(cx).ollama;
|
||||
if &settings != new_settings {
|
||||
settings = new_settings.clone();
|
||||
this.restart_fetch_models_task(cx);
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
State {
|
||||
http_client,
|
||||
available_models: Default::default(),
|
||||
fetch_model_task: None,
|
||||
_subscription: subscription,
|
||||
}
|
||||
state: cx.new_model(|cx| State {
|
||||
http_client,
|
||||
available_models: Default::default(),
|
||||
_subscription: cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
|
||||
this.fetch_models(cx).detach();
|
||||
cx.notify();
|
||||
}),
|
||||
}),
|
||||
};
|
||||
this.state
|
||||
.update(cx, |state, cx| state.restart_fetch_models_task(cx));
|
||||
.update(cx, |state, cx| state.fetch_models(cx).detach());
|
||||
this
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1237,22 +1237,6 @@ impl Render for LspLogToolbarItemView {
|
||||
view.show_rpc_trace_for_server(row.server_id, cx);
|
||||
}),
|
||||
);
|
||||
if server_selected && row.selected_entry == LogKind::Rpc {
|
||||
let selected_ix = menu.select_last();
|
||||
// Each language server has:
|
||||
// 1. A title.
|
||||
// 2. Server logs.
|
||||
// 3. Server trace.
|
||||
// 4. RPC messages.
|
||||
// 5. Server capabilities
|
||||
// Thus, if nth server's RPC is selected, the index of selected entry should match this formula
|
||||
let _expected_index = ix * 5 + 3;
|
||||
debug_assert_eq!(
|
||||
Some(_expected_index),
|
||||
selected_ix,
|
||||
"Could not scroll to a just added LSP menu item"
|
||||
);
|
||||
}
|
||||
menu = menu.entry(
|
||||
SERVER_CAPABILITIES,
|
||||
None,
|
||||
@@ -1260,6 +1244,14 @@ impl Render for LspLogToolbarItemView {
|
||||
view.show_capabilities_for_server(row.server_id, cx);
|
||||
}),
|
||||
);
|
||||
if server_selected && row.selected_entry == LogKind::Rpc {
|
||||
let selected_ix = menu.select_last();
|
||||
debug_assert_eq!(
|
||||
Some(ix * 4 + 3),
|
||||
selected_ix,
|
||||
"Could not scroll to a just added LSP menu item"
|
||||
);
|
||||
}
|
||||
}
|
||||
menu
|
||||
})
|
||||
|
||||
@@ -10,7 +10,7 @@ workspace = true
|
||||
|
||||
[features]
|
||||
test-support = [
|
||||
"load-grammars"
|
||||
"tree-sitter"
|
||||
]
|
||||
load-grammars = [
|
||||
"tree-sitter-bash",
|
||||
@@ -47,11 +47,6 @@ log.workspace = true
|
||||
lsp.workspace = true
|
||||
node_runtime.workspace = true
|
||||
paths.workspace = true
|
||||
pet.workspace = true
|
||||
pet-core.workspace = true
|
||||
pet-conda.workspace = true
|
||||
pet-poetry.workspace = true
|
||||
pet-reporter.workspace = true
|
||||
project.workspace = true
|
||||
regex.workspace = true
|
||||
rope.workspace = true
|
||||
@@ -87,8 +82,3 @@ text.workspace = true
|
||||
theme = { workspace = true, features = ["test-support"] }
|
||||
unindent.workspace = true
|
||||
workspace = { workspace = true, features = ["test-support"] }
|
||||
tree-sitter-typescript.workspace = true
|
||||
tree-sitter-python.workspace = true
|
||||
tree-sitter-go.workspace = true
|
||||
tree-sitter-c.workspace = true
|
||||
tree-sitter-css.workspace = true
|
||||
|
||||
@@ -7,9 +7,7 @@ use feature_flags::FeatureFlagAppExt;
|
||||
use futures::StreamExt;
|
||||
use gpui::{AppContext, AsyncAppContext};
|
||||
use http_client::github::{latest_github_release, GitHubLspBinaryVersion};
|
||||
use language::{
|
||||
LanguageRegistry, LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate,
|
||||
};
|
||||
use language::{LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate};
|
||||
use lsp::LanguageServerBinary;
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::ContextProviderWithTasks;
|
||||
@@ -200,7 +198,6 @@ impl LspAdapter for JsonLspAdapter {
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
_: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<Value> {
|
||||
cx.update(|cx| {
|
||||
|
||||
@@ -3,7 +3,7 @@ use gpui::{AppContext, UpdateGlobal};
|
||||
use json::json_task_context;
|
||||
pub use language::*;
|
||||
use node_runtime::NodeRuntime;
|
||||
use python::{PythonContextProvider, PythonToolchainProvider};
|
||||
use python::PythonContextProvider;
|
||||
use rust_embed::RustEmbed;
|
||||
use settings::SettingsStore;
|
||||
use smol::stream::StreamExt;
|
||||
@@ -61,14 +61,7 @@ pub fn init(languages: Arc<LanguageRegistry>, node_runtime: NodeRuntime, cx: &mu
|
||||
config.name.clone(),
|
||||
config.grammar.clone(),
|
||||
config.matcher.clone(),
|
||||
move || {
|
||||
Ok(LoadedLanguage {
|
||||
config: config.clone(),
|
||||
queries: load_queries($name),
|
||||
context_provider: None,
|
||||
toolchain_provider: None,
|
||||
})
|
||||
},
|
||||
move || Ok((config.clone(), load_queries($name), None)),
|
||||
);
|
||||
};
|
||||
($name:literal, $adapters:expr) => {
|
||||
@@ -82,14 +75,7 @@ pub fn init(languages: Arc<LanguageRegistry>, node_runtime: NodeRuntime, cx: &mu
|
||||
config.name.clone(),
|
||||
config.grammar.clone(),
|
||||
config.matcher.clone(),
|
||||
move || {
|
||||
Ok(LoadedLanguage {
|
||||
config: config.clone(),
|
||||
queries: load_queries($name),
|
||||
context_provider: None,
|
||||
toolchain_provider: None,
|
||||
})
|
||||
},
|
||||
move || Ok((config.clone(), load_queries($name), None)),
|
||||
);
|
||||
};
|
||||
($name:literal, $adapters:expr, $context_provider:expr) => {
|
||||
@@ -104,33 +90,11 @@ pub fn init(languages: Arc<LanguageRegistry>, node_runtime: NodeRuntime, cx: &mu
|
||||
config.grammar.clone(),
|
||||
config.matcher.clone(),
|
||||
move || {
|
||||
Ok(LoadedLanguage {
|
||||
config: config.clone(),
|
||||
queries: load_queries($name),
|
||||
context_provider: Some(Arc::new($context_provider)),
|
||||
toolchain_provider: None,
|
||||
})
|
||||
},
|
||||
);
|
||||
};
|
||||
($name:literal, $adapters:expr, $context_provider:expr, $toolchain_provider:expr) => {
|
||||
let config = load_config($name);
|
||||
// typeck helper
|
||||
let adapters: Vec<Arc<dyn LspAdapter>> = $adapters;
|
||||
for adapter in adapters {
|
||||
languages.register_lsp_adapter(config.name.clone(), adapter);
|
||||
}
|
||||
languages.register_language(
|
||||
config.name.clone(),
|
||||
config.grammar.clone(),
|
||||
config.matcher.clone(),
|
||||
move || {
|
||||
Ok(LoadedLanguage {
|
||||
config: config.clone(),
|
||||
queries: load_queries($name),
|
||||
context_provider: Some(Arc::new($context_provider)),
|
||||
toolchain_provider: Some($toolchain_provider),
|
||||
})
|
||||
Ok((
|
||||
config.clone(),
|
||||
load_queries($name),
|
||||
Some(Arc::new($context_provider)),
|
||||
))
|
||||
},
|
||||
);
|
||||
};
|
||||
@@ -177,8 +141,7 @@ pub fn init(languages: Arc<LanguageRegistry>, node_runtime: NodeRuntime, cx: &mu
|
||||
vec![Arc::new(python::PythonLspAdapter::new(
|
||||
node_runtime.clone(),
|
||||
))],
|
||||
PythonContextProvider,
|
||||
Arc::new(PythonToolchainProvider::default()) as Arc<dyn ToolchainLister>
|
||||
PythonContextProvider
|
||||
);
|
||||
language!(
|
||||
"rust",
|
||||
|
||||
@@ -3,16 +3,9 @@ use async_trait::async_trait;
|
||||
use collections::HashMap;
|
||||
use gpui::AppContext;
|
||||
use gpui::AsyncAppContext;
|
||||
use language::LanguageName;
|
||||
use language::LanguageToolchainStore;
|
||||
use language::Toolchain;
|
||||
use language::ToolchainList;
|
||||
use language::ToolchainLister;
|
||||
use language::{ContextProvider, LanguageServerName, LspAdapter, LspAdapterDelegate};
|
||||
use lsp::LanguageServerBinary;
|
||||
use node_runtime::NodeRuntime;
|
||||
use pet_core::python_environment::PythonEnvironmentKind;
|
||||
use pet_core::Configuration;
|
||||
use project::lsp_store::language_server_settings;
|
||||
use serde_json::Value;
|
||||
|
||||
@@ -207,35 +200,12 @@ impl LspAdapter for PythonLspAdapter {
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
adapter: &Arc<dyn LspAdapterDelegate>,
|
||||
toolchains: Arc<dyn LanguageToolchainStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<Value> {
|
||||
let toolchain = toolchains
|
||||
.active_toolchain(adapter.worktree_id(), LanguageName::new("Python"), cx)
|
||||
.await;
|
||||
cx.update(move |cx| {
|
||||
let mut user_settings =
|
||||
language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx)
|
||||
.and_then(|s| s.settings.clone())
|
||||
.unwrap_or_default();
|
||||
|
||||
// If python.pythonPath is not set in user config, do so using our toolchain picker.
|
||||
if let Some(toolchain) = toolchain {
|
||||
if user_settings.is_null() {
|
||||
user_settings = Value::Object(serde_json::Map::default());
|
||||
}
|
||||
let object = user_settings.as_object_mut().unwrap();
|
||||
if let Some(python) = object
|
||||
.entry("python")
|
||||
.or_insert(Value::Object(serde_json::Map::default()))
|
||||
.as_object_mut()
|
||||
{
|
||||
python
|
||||
.entry("pythonPath")
|
||||
.or_insert(Value::String(toolchain.path.into()));
|
||||
}
|
||||
}
|
||||
user_settings
|
||||
cx.update(|cx| {
|
||||
language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx)
|
||||
.and_then(|s| s.settings.clone())
|
||||
.unwrap_or_default()
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -350,83 +320,6 @@ fn python_module_name_from_relative_path(relative_path: &str) -> String {
|
||||
.to_string()
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct PythonToolchainProvider {}
|
||||
|
||||
static ENV_PRIORITY_LIST: &'static [PythonEnvironmentKind] = &[
|
||||
// Prioritize non-Conda environments.
|
||||
PythonEnvironmentKind::Poetry,
|
||||
PythonEnvironmentKind::Pipenv,
|
||||
PythonEnvironmentKind::VirtualEnvWrapper,
|
||||
PythonEnvironmentKind::Venv,
|
||||
PythonEnvironmentKind::VirtualEnv,
|
||||
PythonEnvironmentKind::Conda,
|
||||
PythonEnvironmentKind::Pyenv,
|
||||
PythonEnvironmentKind::GlobalPaths,
|
||||
PythonEnvironmentKind::Homebrew,
|
||||
];
|
||||
|
||||
fn env_priority(kind: Option<PythonEnvironmentKind>) -> usize {
|
||||
if let Some(kind) = kind {
|
||||
ENV_PRIORITY_LIST
|
||||
.iter()
|
||||
.position(|blessed_env| blessed_env == &kind)
|
||||
.unwrap_or(ENV_PRIORITY_LIST.len())
|
||||
} else {
|
||||
// Unknown toolchains are less useful than non-blessed ones.
|
||||
ENV_PRIORITY_LIST.len() + 1
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl ToolchainLister for PythonToolchainProvider {
|
||||
async fn list(&self, worktree_root: PathBuf) -> ToolchainList {
|
||||
let environment = pet_core::os_environment::EnvironmentApi::new();
|
||||
let locators = pet::locators::create_locators(
|
||||
Arc::new(pet_conda::Conda::from(&environment)),
|
||||
Arc::new(pet_poetry::Poetry::from(&environment)),
|
||||
&environment,
|
||||
);
|
||||
let mut config = Configuration::default();
|
||||
config.workspace_directories = Some(vec![worktree_root]);
|
||||
let reporter = pet_reporter::collect::create_reporter();
|
||||
pet::find::find_and_report_envs(&reporter, config, &locators, &environment, None);
|
||||
|
||||
let mut toolchains = reporter
|
||||
.environments
|
||||
.lock()
|
||||
.ok()
|
||||
.map_or(Vec::new(), |mut guard| std::mem::take(&mut guard));
|
||||
toolchains.sort_by(|lhs, rhs| {
|
||||
env_priority(lhs.kind)
|
||||
.cmp(&env_priority(rhs.kind))
|
||||
.then_with(|| lhs.executable.cmp(&rhs.executable))
|
||||
});
|
||||
let mut toolchains: Vec<_> = toolchains
|
||||
.into_iter()
|
||||
.filter_map(|toolchain| {
|
||||
let name = if let Some(version) = &toolchain.version {
|
||||
format!("Python {version} ({:?})", toolchain.kind?)
|
||||
} else {
|
||||
format!("{:?}", toolchain.kind?)
|
||||
}
|
||||
.into();
|
||||
Some(Toolchain {
|
||||
name,
|
||||
path: toolchain.executable?.to_str()?.to_owned().into(),
|
||||
language_name: LanguageName::new("Python"),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
toolchains.dedup();
|
||||
ToolchainList {
|
||||
toolchains,
|
||||
default: None,
|
||||
groups: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use gpui::{BorrowAppContext, Context, ModelContext, TestAppContext};
|
||||
|
||||
@@ -5,9 +5,6 @@ line_comments = ["// ", "/// ", "//! "]
|
||||
autoclose_before = ";:.,=}])>"
|
||||
brackets = [
|
||||
{ start = "{", end = "}", close = true, newline = true },
|
||||
{ start = "r#\"", end = "\"#", close = true, newline = true, not_in = ["string", "comment"] },
|
||||
{ start = "r##\"", end = "\"##", close = true, newline = true, not_in = ["string", "comment"] },
|
||||
{ start = "r###\"", end = "\"###", close = true, newline = true, not_in = ["string", "comment"] },
|
||||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
{ start = "<", end = ">", close = false, newline = true, not_in = ["string", "comment"] },
|
||||
|
||||
@@ -3,7 +3,7 @@ use async_trait::async_trait;
|
||||
use collections::HashMap;
|
||||
use futures::StreamExt;
|
||||
use gpui::AsyncAppContext;
|
||||
use language::{LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate};
|
||||
use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
|
||||
use lsp::LanguageServerBinary;
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::lsp_store::language_server_settings;
|
||||
@@ -111,7 +111,6 @@ impl LspAdapter for TailwindLspAdapter {
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<Value> {
|
||||
let tailwind_user_settings = cx.update(|cx| {
|
||||
|
||||
@@ -5,7 +5,7 @@ use async_trait::async_trait;
|
||||
use collections::HashMap;
|
||||
use gpui::AsyncAppContext;
|
||||
use http_client::github::{build_asset_url, AssetKind, GitHubLspBinaryVersion};
|
||||
use language::{LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate};
|
||||
use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
|
||||
use lsp::{CodeActionKind, LanguageServerBinary};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::lsp_store::language_server_settings;
|
||||
@@ -230,7 +230,6 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<Value> {
|
||||
let override_options = cx.update(|cx| {
|
||||
@@ -326,7 +325,6 @@ impl LspAdapter for EsLintLspAdapter {
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<Value> {
|
||||
let workspace_root = delegate.worktree_root_path();
|
||||
|
||||
@@ -2,7 +2,7 @@ use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use collections::HashMap;
|
||||
use gpui::AsyncAppContext;
|
||||
use language::{LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate};
|
||||
use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
|
||||
use lsp::{CodeActionKind, LanguageServerBinary};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::lsp_store::language_server_settings;
|
||||
@@ -183,7 +183,6 @@ impl LspAdapter for VtslsLspAdapter {
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<Value> {
|
||||
let tsdk_path = Self::tsdk_path(delegate).await;
|
||||
|
||||
@@ -3,8 +3,7 @@ use async_trait::async_trait;
|
||||
use futures::StreamExt;
|
||||
use gpui::AsyncAppContext;
|
||||
use language::{
|
||||
language_settings::AllLanguageSettings, LanguageServerName, LanguageToolchainStore, LspAdapter,
|
||||
LspAdapterDelegate,
|
||||
language_settings::AllLanguageSettings, LanguageServerName, LspAdapter, LspAdapterDelegate,
|
||||
};
|
||||
use lsp::LanguageServerBinary;
|
||||
use node_runtime::NodeRuntime;
|
||||
@@ -93,7 +92,6 @@ impl LspAdapter for YamlLspAdapter {
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<Value> {
|
||||
let location = SettingsLocation {
|
||||
|
||||
@@ -1177,8 +1177,6 @@ impl FakeLanguageServer {
|
||||
let (stdout_writer, stdout_reader) = async_pipe::pipe();
|
||||
let (notifications_tx, notifications_rx) = channel::unbounded();
|
||||
|
||||
let root = Self::root_path();
|
||||
|
||||
let mut server = LanguageServer::new_internal(
|
||||
server_id,
|
||||
stdin_writer,
|
||||
@@ -1186,8 +1184,8 @@ impl FakeLanguageServer {
|
||||
None::<async_pipe::PipeReader>,
|
||||
Arc::new(Mutex::new(None)),
|
||||
None,
|
||||
root,
|
||||
root,
|
||||
Path::new("/"),
|
||||
Path::new("/"),
|
||||
None,
|
||||
cx.clone(),
|
||||
|_| {},
|
||||
@@ -1203,8 +1201,8 @@ impl FakeLanguageServer {
|
||||
None::<async_pipe::PipeReader>,
|
||||
Arc::new(Mutex::new(None)),
|
||||
None,
|
||||
root,
|
||||
root,
|
||||
Path::new("/"),
|
||||
Path::new("/"),
|
||||
None,
|
||||
cx,
|
||||
move |msg| {
|
||||
@@ -1240,16 +1238,6 @@ impl FakeLanguageServer {
|
||||
|
||||
(server, fake)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn root_path() -> &'static Path {
|
||||
Path::new("C:\\")
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
fn root_path() -> &'static Path {
|
||||
Path::new("/")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
|
||||
@@ -234,10 +234,6 @@ impl<'a> MarkdownParser<'a> {
|
||||
text.push('\n');
|
||||
}
|
||||
|
||||
// We want to ignore any inline HTML tags in the text but keep
|
||||
// the text between them
|
||||
Event::InlineHtml(_) => {}
|
||||
|
||||
Event::Text(t) => {
|
||||
text.push_str(t.as_ref());
|
||||
|
||||
@@ -630,8 +626,6 @@ impl<'a> MarkdownParser<'a> {
|
||||
// Otherwise we need to insert the block after all the nested items
|
||||
// that have been parsed so far
|
||||
items.extend(block);
|
||||
} else {
|
||||
self.cursor += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -853,16 +847,6 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_text_with_inline_html() {
|
||||
let parsed = parse("This is a paragraph with an inline HTML <sometag>tag</sometag>.").await;
|
||||
|
||||
assert_eq!(
|
||||
parsed.children,
|
||||
vec![p("This is a paragraph with an inline HTML tag.", 0..63),],
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_raw_links_detection() {
|
||||
let parsed = parse("Checkout this https://zed.dev link").await;
|
||||
@@ -1106,26 +1090,6 @@ Some other content
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_list_item_with_inline_html() {
|
||||
let parsed = parse(
|
||||
"\
|
||||
* This is a list item with an inline HTML <sometag>tag</sometag>.
|
||||
",
|
||||
)
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
parsed.children,
|
||||
vec![list_item(
|
||||
0..67,
|
||||
1,
|
||||
Unordered,
|
||||
vec![p("This is a list item with an inline HTML tag.", 4..44),],
|
||||
),],
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_nested_list_with_paragraph_inside() {
|
||||
let parsed = parse(
|
||||
|
||||
@@ -479,7 +479,7 @@ impl Render for MarkdownPreviewView {
|
||||
v_flex()
|
||||
.id("MarkdownPreview")
|
||||
.key_context("MarkdownPreview")
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.track_focus(&self.focus_handle)
|
||||
.size_full()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.p_4()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user