Compare commits
9 Commits
expand-sel
...
faster_mac
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ec3e1ad498 | ||
|
|
e03fd56b77 | ||
|
|
f43b15ef84 | ||
|
|
8ad9743cd5 | ||
|
|
28aa34d80c | ||
|
|
7097861be5 | ||
|
|
660e3006b4 | ||
|
|
0ed0bf5f2f | ||
|
|
8a6ea2d390 |
282
.github/workflows/ci.yml
vendored
282
.github/workflows/ci.yml
vendored
@@ -25,194 +25,8 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
RUSTFLAGS: "-D warnings"
|
||||
|
||||
jobs:
|
||||
migration_checks:
|
||||
name: Check Postgres and Protobuf migrations, mergability
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
timeout-minutes: 60
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0 # fetch full history
|
||||
|
||||
- name: Remove untracked files
|
||||
run: git clean -df
|
||||
|
||||
- name: Find modified migrations
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
export SQUAWK_GITHUB_TOKEN=${{ github.token }}
|
||||
. ./script/squawk
|
||||
|
||||
- name: Ensure fresh merge
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ];
|
||||
then
|
||||
echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> $GITHUB_ENV
|
||||
else
|
||||
git checkout -B temp
|
||||
git merge -q origin/$GITHUB_BASE_REF -m "merge main into temp"
|
||||
echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- uses: bufbuild/buf-setup-action@v1
|
||||
with:
|
||||
version: v1.29.0
|
||||
- uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
input: "crates/proto/proto/"
|
||||
against: "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"
|
||||
|
||||
style:
|
||||
timeout-minutes: 60
|
||||
name: Check formatting and spelling
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- buildjet-8vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- name: Run style checks
|
||||
uses: ./.github/actions/check_style
|
||||
|
||||
- name: Check for typos
|
||||
uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6
|
||||
with:
|
||||
config: ./typos.toml
|
||||
|
||||
macos_tests:
|
||||
timeout-minutes: 60
|
||||
name: (macOS) Run Clippy and tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: cargo clippy
|
||||
run: ./script/clippy
|
||||
|
||||
- name: Check unused dependencies
|
||||
uses: bnjbvr/cargo-machete@main
|
||||
|
||||
- name: Check licenses
|
||||
run: |
|
||||
script/check-licenses
|
||||
script/generate-licenses /tmp/zed_licenses_output
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
- name: Build collab
|
||||
run: cargo build -p collab
|
||||
|
||||
- name: Build other binaries and features
|
||||
run: |
|
||||
cargo build --workspace --bins --all-features
|
||||
cargo check -p gpui --features "macos-blade"
|
||||
cargo build -p remote_server
|
||||
|
||||
linux_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Run Clippy and tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "buildjet"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: cargo clippy
|
||||
run: ./script/clippy
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
- name: Build Zed
|
||||
run: cargo build -p zed
|
||||
|
||||
build_remote_server:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Build Remote Server
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "buildjet"
|
||||
|
||||
- name: Install Clang & Mold
|
||||
run: ./script/remote-server && ./script/install-mold 2.34.0
|
||||
|
||||
- name: Build Remote Server
|
||||
run: cargo build -p remote_server
|
||||
|
||||
# todo(windows): Actually run the tests
|
||||
windows_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Windows) Run Clippy and tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: hosted-windows-1
|
||||
steps:
|
||||
# more info here:- https://github.com/rust-lang/cargo/issues/13020
|
||||
- name: Enable longer pathnames for git
|
||||
run: git config --system core.longpaths true
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "github"
|
||||
|
||||
- name: cargo clippy
|
||||
# Windows can't run shell scripts, so we need to use `cargo xtask`.
|
||||
run: cargo xtask clippy
|
||||
|
||||
- name: Build Zed
|
||||
run: cargo build
|
||||
|
||||
bundle-mac:
|
||||
timeout-minutes: 60
|
||||
name: Create a macOS bundle
|
||||
@@ -220,7 +34,6 @@ jobs:
|
||||
- self-hosted
|
||||
- bundle
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
needs: [macos_tests]
|
||||
env:
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
@@ -296,7 +109,6 @@ jobs:
|
||||
|
||||
- uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
name: Upload app bundle to release
|
||||
if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
@@ -309,97 +121,3 @@ jobs:
|
||||
body_path: target/release-notes.md
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
bundle-linux:
|
||||
timeout-minutes: 60
|
||||
name: Create a Linux bundle
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2004
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux && ./script/install-mold 2.34.0
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
|
||||
- name: Create Linux .tar.gz bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload app bundle to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: |
|
||||
target/zed-remote-server-linux-x86_64.gz
|
||||
target/release/zed-linux-x86_64.tar.gz
|
||||
body: ""
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
bundle-linux-aarch64: # this runs on ubuntu22.04
|
||||
timeout-minutes: 60
|
||||
name: Create arm64 Linux bundle
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204-arm
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
|
||||
- name: Create and upload Linux .tar.gz bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload app bundle to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: |
|
||||
target/zed-remote-server-linux-aarch64.gz
|
||||
target/release/zed-linux-aarch64.tar.gz
|
||||
body: ""
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
# Code of Conduct
|
||||
|
||||
The Code of Conduct for this repository can be found online at [zed.dev/code-of-conduct](https://zed.dev/code-of-conduct).
|
||||
The Code of Conduct for this repository can be found online at [zed.dev/docs/code-of-conduct](https://zed.dev/docs/code-of-conduct).
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
Thanks for your interest in contributing to Zed, the collaborative platform that is also a code editor!
|
||||
|
||||
All activity in Zed forums is subject to our [Code of Conduct](https://zed.dev/code-of-conduct). Additionally, contributors must sign our [Contributor License Agreement](https://zed.dev/cla) before their contributions can be merged.
|
||||
All activity in Zed forums is subject to our [Code of Conduct](https://zed.dev/docs/code-of-conduct). Additionally, contributors must sign our [Contributor License Agreement](https://zed.dev/cla) before their contributions can be merged.
|
||||
|
||||
## Contribution ideas
|
||||
|
||||
|
||||
554
Cargo.lock
generated
554
Cargo.lock
generated
@@ -16,7 +16,6 @@ dependencies = [
|
||||
"project",
|
||||
"smallvec",
|
||||
"ui",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
@@ -292,12 +291,6 @@ dependencies = [
|
||||
"syn 2.0.76",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arraydeque"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7d902e3d592a523def97af8f317b08ce16b7ab854c1985a0c671e6f15cebc236"
|
||||
|
||||
[[package]]
|
||||
name = "arrayref"
|
||||
version = "0.3.8"
|
||||
@@ -392,7 +385,7 @@ dependencies = [
|
||||
"ctor",
|
||||
"db",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
@@ -1587,7 +1580,7 @@ dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"cexpr",
|
||||
"clang-sys",
|
||||
"itertools 0.12.1",
|
||||
"itertools 0.10.5",
|
||||
"lazy_static",
|
||||
"lazycell",
|
||||
"proc-macro2",
|
||||
@@ -2558,7 +2551,7 @@ dependencies = [
|
||||
"dashmap 6.0.1",
|
||||
"derive_more",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"envy",
|
||||
"file_finder",
|
||||
"fs",
|
||||
@@ -2713,7 +2706,7 @@ dependencies = [
|
||||
"command_palette_hooks",
|
||||
"ctor",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"fuzzy",
|
||||
"go_to_line",
|
||||
"gpui",
|
||||
@@ -3490,7 +3483,7 @@ dependencies = [
|
||||
"collections",
|
||||
"ctor",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"language",
|
||||
@@ -3678,7 +3671,7 @@ dependencies = [
|
||||
"ctor",
|
||||
"db",
|
||||
"emojis",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"file_icons",
|
||||
"futures 0.3.30",
|
||||
"fuzzy",
|
||||
@@ -3885,19 +3878,6 @@ dependencies = [
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.10.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580"
|
||||
dependencies = [
|
||||
"humantime",
|
||||
"is-terminal",
|
||||
"log",
|
||||
"regex",
|
||||
"termcolor",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.11.5"
|
||||
@@ -4006,7 +3986,7 @@ dependencies = [
|
||||
"client",
|
||||
"clock",
|
||||
"collections",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"git",
|
||||
@@ -4101,7 +4081,7 @@ dependencies = [
|
||||
"client",
|
||||
"collections",
|
||||
"ctor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
@@ -4143,7 +4123,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"extension",
|
||||
"fs",
|
||||
"language",
|
||||
@@ -4302,7 +4282,7 @@ dependencies = [
|
||||
"collections",
|
||||
"ctor",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"file_icons",
|
||||
"futures 0.3.30",
|
||||
"fuzzy",
|
||||
@@ -4910,13 +4890,12 @@ dependencies = [
|
||||
"git",
|
||||
"gpui",
|
||||
"http_client",
|
||||
"indoc",
|
||||
"pretty_assertions",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"unindent",
|
||||
"url",
|
||||
"util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5058,7 +5037,7 @@ dependencies = [
|
||||
"ctor",
|
||||
"derive_more",
|
||||
"embed-resource",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"etagere",
|
||||
"filedescriptor",
|
||||
"flume",
|
||||
@@ -5248,15 +5227,6 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7"
|
||||
dependencies = [
|
||||
"hashbrown 0.14.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.9.1"
|
||||
@@ -5585,7 +5555,7 @@ dependencies = [
|
||||
"httpdate",
|
||||
"itoa",
|
||||
"pin-project-lite",
|
||||
"socket2 0.5.7",
|
||||
"socket2 0.4.10",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
@@ -6155,20 +6125,6 @@ dependencies = [
|
||||
"simple_asn1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jupyter-serde"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0a444fb3f87ee6885eb316028cc998c7d84811663ef95d78c419419423d5a054"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "khronos-egl"
|
||||
version = "6.0.0"
|
||||
@@ -6229,7 +6185,7 @@ dependencies = [
|
||||
"collections",
|
||||
"ctor",
|
||||
"ec4rs",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"fuzzy",
|
||||
"git",
|
||||
@@ -6286,7 +6242,7 @@ dependencies = [
|
||||
"copilot",
|
||||
"ctor",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"feature_flags",
|
||||
"futures 0.3.30",
|
||||
"google_ai",
|
||||
@@ -6343,7 +6299,7 @@ dependencies = [
|
||||
"collections",
|
||||
"copilot",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"language",
|
||||
@@ -6377,11 +6333,6 @@ dependencies = [
|
||||
"lsp",
|
||||
"node_runtime",
|
||||
"paths",
|
||||
"pet",
|
||||
"pet-conda",
|
||||
"pet-core",
|
||||
"pet-poetry",
|
||||
"pet-reporter",
|
||||
"project",
|
||||
"regex",
|
||||
"rope",
|
||||
@@ -6489,7 +6440,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-targets 0.52.6",
|
||||
"windows-targets 0.48.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6678,7 +6629,7 @@ dependencies = [
|
||||
"async-pipe",
|
||||
"collections",
|
||||
"ctor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"log",
|
||||
@@ -6761,7 +6712,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assets",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"language",
|
||||
@@ -6874,7 +6825,7 @@ dependencies = [
|
||||
"clap",
|
||||
"clap_complete",
|
||||
"elasticlunr-rs",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures-util",
|
||||
"handlebars 5.1.2",
|
||||
"ignore",
|
||||
@@ -7056,15 +7007,6 @@ dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "msvc_spectre_libs"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8661ace213a0a130c7c5b9542df5023aedf092a02008ccf477b39ff108990305"
|
||||
dependencies = [
|
||||
"cc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "multi_buffer"
|
||||
version = "0.1.0"
|
||||
@@ -7073,7 +7015,7 @@ dependencies = [
|
||||
"clock",
|
||||
"collections",
|
||||
"ctor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"itertools 0.13.0",
|
||||
@@ -7152,21 +7094,6 @@ dependencies = [
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nbformat"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "146074ad45cab20f5d98ccded164826158471f21d04f96e40b9872529e10979d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"jupyter-serde",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ndk"
|
||||
version = "0.8.0"
|
||||
@@ -7802,10 +7729,8 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smallvec",
|
||||
"smol",
|
||||
"theme",
|
||||
"ui",
|
||||
"util",
|
||||
"workspace",
|
||||
"worktree",
|
||||
@@ -8048,366 +7973,6 @@ dependencies = [
|
||||
"sha2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"env_logger 0.10.2",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-conda",
|
||||
"pet-core",
|
||||
"pet-env-var-path",
|
||||
"pet-fs",
|
||||
"pet-global-virtualenvs",
|
||||
"pet-homebrew",
|
||||
"pet-jsonrpc",
|
||||
"pet-linux-global-python",
|
||||
"pet-mac-commandlinetools",
|
||||
"pet-mac-python-org",
|
||||
"pet-mac-xcode",
|
||||
"pet-pipenv",
|
||||
"pet-poetry",
|
||||
"pet-pyenv",
|
||||
"pet-python-utils",
|
||||
"pet-reporter",
|
||||
"pet-telemetry",
|
||||
"pet-venv",
|
||||
"pet-virtualenv",
|
||||
"pet-virtualenvwrapper",
|
||||
"pet-windows-registry",
|
||||
"pet-windows-store",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-conda"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"env_logger 0.10.2",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-reporter",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"yaml-rust2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-core"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-fs",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-env-var-path"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-conda",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-fs"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-global-virtualenvs"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-conda",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-virtualenv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-homebrew"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-conda",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-jsonrpc"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"env_logger 0.10.2",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-linux-global-python"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-mac-commandlinetools"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-mac-python-org"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-mac-xcode"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-pipenv"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-poetry"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-reporter",
|
||||
"pet-virtualenv",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"toml 0.8.19",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-pyenv"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-conda",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-reporter",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-python-utils"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"env_logger 0.10.2",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-reporter"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"env_logger 0.10.2",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-jsonrpc",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-telemetry"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"env_logger 0.10.2",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-venv"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-virtualenv"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-virtualenvwrapper"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-windows-registry"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-conda",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
"pet-windows-store",
|
||||
"regex",
|
||||
"winreg 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pet-windows-store"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"msvc_spectre_libs",
|
||||
"pet-core",
|
||||
"pet-fs",
|
||||
"pet-python-utils",
|
||||
"pet-virtualenv",
|
||||
"regex",
|
||||
"winreg 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "petgraph"
|
||||
version = "0.6.5"
|
||||
@@ -8496,7 +8061,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"ctor",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"gpui",
|
||||
"menu",
|
||||
"serde",
|
||||
@@ -8842,7 +8407,7 @@ dependencies = [
|
||||
"client",
|
||||
"clock",
|
||||
"collections",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
"fuzzy",
|
||||
@@ -9554,11 +9119,10 @@ dependencies = [
|
||||
"async-watch",
|
||||
"backtrace",
|
||||
"cargo_toml",
|
||||
"chrono",
|
||||
"clap",
|
||||
"client",
|
||||
"clock",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"fork",
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
@@ -9574,8 +9138,6 @@ dependencies = [
|
||||
"node_runtime",
|
||||
"paths",
|
||||
"project",
|
||||
"proto",
|
||||
"release_channel",
|
||||
"remote",
|
||||
"reqwest_client",
|
||||
"rpc",
|
||||
@@ -9585,7 +9147,6 @@ dependencies = [
|
||||
"settings",
|
||||
"shellexpand 2.1.2",
|
||||
"smol",
|
||||
"telemetry_events",
|
||||
"toml 0.8.19",
|
||||
"util",
|
||||
"worktree",
|
||||
@@ -9612,8 +9173,7 @@ dependencies = [
|
||||
"collections",
|
||||
"command_palette_hooks",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"feature_flags",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"http_client",
|
||||
@@ -9623,9 +9183,7 @@ dependencies = [
|
||||
"languages",
|
||||
"log",
|
||||
"markdown_preview",
|
||||
"menu",
|
||||
"multi_buffer",
|
||||
"nbformat",
|
||||
"project",
|
||||
"runtimelib",
|
||||
"schemars",
|
||||
@@ -9895,11 +9453,10 @@ dependencies = [
|
||||
"arrayvec",
|
||||
"criterion",
|
||||
"ctor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"gpui",
|
||||
"log",
|
||||
"rand 0.8.5",
|
||||
"rayon",
|
||||
"smallvec",
|
||||
"sum_tree",
|
||||
"unicode-segmentation",
|
||||
@@ -9927,7 +9484,7 @@ dependencies = [
|
||||
"base64 0.22.1",
|
||||
"chrono",
|
||||
"collections",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"parking_lot",
|
||||
@@ -9965,9 +9522,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "runtimelib"
|
||||
version = "0.16.0"
|
||||
version = "0.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "263588fe9593333c4bfde258c9021fc64e766ea434e070c6b67c7100536d6499"
|
||||
checksum = "a7d76d28b882a7b889ebb04e79bc2b160b3061821ea596ff0f4a838fc7a76db0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-dispatcher",
|
||||
@@ -9979,7 +9536,6 @@ dependencies = [
|
||||
"dirs 5.0.1",
|
||||
"futures 0.3.30",
|
||||
"glob",
|
||||
"jupyter-serde",
|
||||
"rand 0.8.5",
|
||||
"ring 0.17.8",
|
||||
"serde",
|
||||
@@ -10517,7 +10073,7 @@ dependencies = [
|
||||
"client",
|
||||
"clock",
|
||||
"collections",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
@@ -11210,7 +10766,7 @@ dependencies = [
|
||||
"futures-io",
|
||||
"futures-util",
|
||||
"hashbrown 0.14.5",
|
||||
"hashlink 0.9.1",
|
||||
"hashlink",
|
||||
"hex",
|
||||
"indexmap 2.4.0",
|
||||
"log",
|
||||
@@ -11534,7 +11090,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"arrayvec",
|
||||
"ctor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"log",
|
||||
"rand 0.8.5",
|
||||
"rayon",
|
||||
@@ -11548,7 +11104,7 @@ dependencies = [
|
||||
"client",
|
||||
"collections",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"http_client",
|
||||
@@ -11847,7 +11403,7 @@ dependencies = [
|
||||
"collections",
|
||||
"ctor",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"gpui",
|
||||
"language",
|
||||
"menu",
|
||||
@@ -12054,7 +11610,7 @@ dependencies = [
|
||||
"clock",
|
||||
"collections",
|
||||
"ctor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"gpui",
|
||||
"http_client",
|
||||
"log",
|
||||
@@ -12543,21 +12099,6 @@ dependencies = [
|
||||
"winnow 0.6.18",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toolchain_selector"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"editor",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"language",
|
||||
"picker",
|
||||
"project",
|
||||
"ui",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "topological-sort"
|
||||
version = "0.2.2"
|
||||
@@ -13303,7 +12844,6 @@ dependencies = [
|
||||
"git",
|
||||
"gpui",
|
||||
"picker",
|
||||
"project",
|
||||
"ui",
|
||||
"util",
|
||||
"workspace",
|
||||
@@ -14165,7 +13705,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -14727,7 +14267,7 @@ dependencies = [
|
||||
"collections",
|
||||
"db",
|
||||
"derive_more",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
"git",
|
||||
@@ -14764,13 +14304,12 @@ dependencies = [
|
||||
"anyhow",
|
||||
"clock",
|
||||
"collections",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
"fuzzy",
|
||||
"git",
|
||||
"git2",
|
||||
"git_hosting_providers",
|
||||
"gpui",
|
||||
"http_client",
|
||||
"ignore",
|
||||
@@ -14935,17 +14474,6 @@ dependencies = [
|
||||
"clap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yaml-rust2"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8902160c4e6f2fb145dbe9d6760a75e3c9522d8bf796ed7047c85919ac7115f8"
|
||||
dependencies = [
|
||||
"arraydeque",
|
||||
"encoding_rs",
|
||||
"hashlink 0.8.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yansi"
|
||||
version = "1.0.1"
|
||||
@@ -15059,7 +14587,7 @@ dependencies = [
|
||||
"db",
|
||||
"diagnostics",
|
||||
"editor",
|
||||
"env_logger 0.11.5",
|
||||
"env_logger",
|
||||
"extension",
|
||||
"extensions_ui",
|
||||
"feature_flags",
|
||||
@@ -15098,7 +14626,6 @@ dependencies = [
|
||||
"project",
|
||||
"project_panel",
|
||||
"project_symbols",
|
||||
"proto",
|
||||
"quick_action_bar",
|
||||
"recent_projects",
|
||||
"release_channel",
|
||||
@@ -15127,7 +14654,6 @@ dependencies = [
|
||||
"theme",
|
||||
"theme_selector",
|
||||
"time",
|
||||
"toolchain_selector",
|
||||
"tree-sitter-md",
|
||||
"tree-sitter-rust",
|
||||
"ui",
|
||||
@@ -15175,7 +14701,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed_dart"
|
||||
version = "0.1.2"
|
||||
version = "0.1.1"
|
||||
dependencies = [
|
||||
"zed_extension_api 0.1.0",
|
||||
]
|
||||
|
||||
13
Cargo.toml
13
Cargo.toml
@@ -117,7 +117,6 @@ members = [
|
||||
"crates/theme_selector",
|
||||
"crates/time_format",
|
||||
"crates/title_bar",
|
||||
"crates/toolchain_selector",
|
||||
"crates/ui",
|
||||
"crates/ui_input",
|
||||
"crates/ui_macros",
|
||||
@@ -291,7 +290,6 @@ theme_importer = { path = "crates/theme_importer" }
|
||||
theme_selector = { path = "crates/theme_selector" }
|
||||
time_format = { path = "crates/time_format" }
|
||||
title_bar = { path = "crates/title_bar" }
|
||||
toolchain_selector = { path = "crates/toolchain_selector" }
|
||||
ui = { path = "crates/ui" }
|
||||
ui_input = { path = "crates/ui_input" }
|
||||
ui_macros = { path = "crates/ui_macros" }
|
||||
@@ -371,7 +369,6 @@ linkify = "0.10.0"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
|
||||
markup5ever_rcdom = "0.3.0"
|
||||
nanoid = "0.4"
|
||||
nbformat = "0.3.1"
|
||||
nix = "0.29"
|
||||
num-format = "0.4.4"
|
||||
once_cell = "1.19.0"
|
||||
@@ -379,11 +376,6 @@ ordered-float = "2.1.1"
|
||||
palette = { version = "0.7.5", default-features = false, features = ["std"] }
|
||||
parking_lot = "0.12.1"
|
||||
pathdiff = "0.2"
|
||||
pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
postage = { version = "0.5", features = ["futures-traits"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
profiling = "1"
|
||||
@@ -392,7 +384,6 @@ prost-build = "0.9"
|
||||
prost-types = "0.9"
|
||||
pulldown-cmark = { version = "0.12.0", default-features = false }
|
||||
rand = "0.8.5"
|
||||
rayon = "1.8"
|
||||
regex = "1.5"
|
||||
repair_json = "0.1.0"
|
||||
reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f6998da16bbca97b6dddda9be7827c50e29", default-features = false, features = [
|
||||
@@ -404,7 +395,7 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f
|
||||
"stream",
|
||||
] }
|
||||
rsa = "0.9.6"
|
||||
runtimelib = { version = "0.16.0", default-features = false, features = [
|
||||
runtimelib = { version = "0.15", default-features = false, features = [
|
||||
"async-dispatcher-runtime",
|
||||
] }
|
||||
rustc-demangle = "0.1.23"
|
||||
@@ -473,7 +464,7 @@ tree-sitter-typescript = "0.23"
|
||||
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" }
|
||||
unicase = "2.6"
|
||||
unindent = "0.1.7"
|
||||
unicode-segmentation = "1.10"
|
||||
unicode-segmentation = "1.11"
|
||||
url = "2.2"
|
||||
uuid = { version = "1.1.2", features = ["v4", "v5", "serde"] }
|
||||
wasmparser = "0.215"
|
||||
|
||||
@@ -58,7 +58,6 @@
|
||||
"gitignore": "vcs",
|
||||
"gitkeep": "vcs",
|
||||
"gitmodules": "vcs",
|
||||
"gleam": "gleam",
|
||||
"go": "go",
|
||||
"gql": "graphql",
|
||||
"graphql": "graphql",
|
||||
@@ -84,7 +83,6 @@
|
||||
"j2k": "image",
|
||||
"java": "java",
|
||||
"jfif": "image",
|
||||
"jl": "julia",
|
||||
"jp2": "image",
|
||||
"jpeg": "image",
|
||||
"jpg": "image",
|
||||
@@ -92,6 +90,7 @@
|
||||
"json": "storage",
|
||||
"jsonc": "storage",
|
||||
"jsx": "react",
|
||||
"julia": "julia",
|
||||
"jxl": "image",
|
||||
"kt": "kotlin",
|
||||
"ldf": "storage",
|
||||
@@ -265,9 +264,6 @@
|
||||
"fsharp": {
|
||||
"icon": "icons/file_icons/fsharp.svg"
|
||||
},
|
||||
"gleam": {
|
||||
"icon": "icons/file_icons/gleam.svg"
|
||||
},
|
||||
"go": {
|
||||
"icon": "icons/file_icons/go.svg"
|
||||
},
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16" fill="none">
|
||||
<path fill-rule="evenodd" fill="black" d="M 3.828125 14.601562 C 3.894531 15.726562 5.183594 16.375 6.132812 15.785156 L 6.136719 15.785156 L 8.988281 13.824219 C 8.996094 13.816406 9.007812 13.8125 9.015625 13.804688 C 9.203125 13.675781 9.4375 13.636719 9.65625 13.691406 L 12.988281 14.550781 C 14.105469 14.839844 15.140625 13.769531 14.8125 12.667969 L 13.832031 9.386719 C 13.769531 9.167969 13.800781 8.9375 13.921875 8.75 C 13.921875 8.746094 13.925781 8.746094 13.925781 8.746094 L 15.777344 5.863281 L 15.777344 5.859375 C 15.78125 5.851562 15.785156 5.84375 15.789062 5.835938 L 15.792969 5.835938 C 16.382812 4.871094 15.6875 3.582031 14.542969 3.554688 L 11.109375 3.472656 C 10.878906 3.464844 10.664062 3.359375 10.519531 3.183594 L 8.339844 0.542969 C 8.019531 0.152344 7.550781 -0.015625 7.105469 0.0078125 L 7.101562 0.0078125 C 7.039062 0.0117188 6.976562 0.0195312 6.914062 0.0273438 C 6.414062 0.117188 5.945312 0.453125 5.75 1 L 4.609375 4.222656 C 4.535156 4.4375 4.367188 4.613281 4.152344 4.695312 L 0.957031 5.945312 C -0.121094 6.363281 -0.328125 7.835938 0.589844 8.535156 L 3.316406 10.609375 C 3.5 10.75 3.609375 10.960938 3.625 11.191406 Z M 7.515625 1.847656 C 7.421875 1.730469 7.296875 1.695312 7.183594 1.714844 C 7.066406 1.734375 6.960938 1.8125 6.914062 1.953125 L 5.867188 4.902344 C 5.699219 5.382812 5.328125 5.765625 4.851562 5.949219 L 1.925781 7.09375 C 1.785156 7.148438 1.710938 7.253906 1.695312 7.371094 C 1.679688 7.484375 1.71875 7.605469 1.839844 7.695312 L 4.335938 9.597656 C 4.742188 9.90625 4.992188 10.375 5.023438 10.882812 L 5.207031 14.003906 C 5.214844 14.152344 5.296875 14.253906 5.398438 14.304688 C 5.503906 14.355469 5.632812 14.355469 5.757812 14.269531 L 8.347656 12.492188 C 8.765625 12.207031 9.292969 12.113281 9.785156 12.242188 L 12.824219 13.027344 C 12.972656 13.066406 13.09375 13.023438 13.175781 12.9375 C 13.257812 12.855469 13.296875 12.734375 13.253906 12.589844 L 12.355469 9.589844 C 12.210938 9.105469 12.285156 8.578125 12.558594 8.148438 L 14.253906 5.511719 C 14.335938 5.386719 14.332031 5.257812 14.277344 5.15625 C 14.222656 5.054688 14.117188 4.980469 13.964844 4.976562 L 10.824219 4.902344 C 10.316406 4.886719 9.835938 4.65625 9.511719 4.261719 Z M 7.515625 1.847656 "/>
|
||||
<path fill="black" d="M 5.71875 7.257812 C 5.671875 7.25 5.628906 7.246094 5.582031 7.246094 C 5.09375 7.246094 4.695312 7.644531 4.695312 8.128906 C 4.695312 8.613281 5.09375 9.011719 5.582031 9.011719 C 6.070312 9.011719 6.46875 8.613281 6.46875 8.128906 C 6.46875 7.6875 6.140625 7.320312 5.71875 7.257812 Z M 5.71875 7.257812 "/>
|
||||
<path fill="black" d="M 11.019531 7.953125 C 10.976562 7.957031 10.929688 7.960938 10.886719 7.960938 C 10.398438 7.960938 10 7.5625 10 7.078125 C 10 6.59375 10.398438 6.195312 10.886719 6.195312 C 11.371094 6.195312 11.773438 6.59375 11.773438 7.078125 C 11.773438 7.519531 11.445312 7.886719 11.019531 7.953125 Z M 11.019531 7.953125 "/>
|
||||
<path fill="black" d="M 7.269531 9.089844 C 7.53125 8.988281 7.828125 9.113281 7.933594 9.375 C 8.125 9.859375 8.503906 9.996094 8.796875 9.949219 C 9.082031 9.898438 9.378906 9.664062 9.378906 9.136719 C 9.378906 8.855469 9.605469 8.628906 9.886719 8.628906 C 10.167969 8.628906 10.398438 8.855469 10.398438 9.136719 C 10.398438 10.140625 9.757812 10.816406 8.96875 10.949219 C 8.1875 11.078125 7.351562 10.664062 6.988281 9.75 C 6.882812 9.488281 7.011719 9.195312 7.269531 9.089844 Z M 7.269531 9.089844 "/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 3.5 KiB |
@@ -1,7 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M8.33333 8H3" stroke="#FBF1C7" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11.6667 4H3" stroke="#FBF1C7" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11.6667 12H3" stroke="#FBF1C7" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M13.6667 6.66663L11 9.33329" stroke="#FBF1C7" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11 6.66663L13.6667 9.33329" stroke="#FBF1C7" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 579 B |
@@ -313,15 +313,6 @@
|
||||
"ctrl-k ctrl-l": "editor::ToggleFold",
|
||||
"ctrl-k ctrl-[": "editor::FoldRecursive",
|
||||
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
|
||||
"ctrl-k ctrl-1": ["editor::FoldAtLevel", { "level": 1 }],
|
||||
"ctrl-k ctrl-2": ["editor::FoldAtLevel", { "level": 2 }],
|
||||
"ctrl-k ctrl-3": ["editor::FoldAtLevel", { "level": 3 }],
|
||||
"ctrl-k ctrl-4": ["editor::FoldAtLevel", { "level": 4 }],
|
||||
"ctrl-k ctrl-5": ["editor::FoldAtLevel", { "level": 5 }],
|
||||
"ctrl-k ctrl-6": ["editor::FoldAtLevel", { "level": 6 }],
|
||||
"ctrl-k ctrl-7": ["editor::FoldAtLevel", { "level": 7 }],
|
||||
"ctrl-k ctrl-8": ["editor::FoldAtLevel", { "level": 8 }],
|
||||
"ctrl-k ctrl-9": ["editor::FoldAtLevel", { "level": 9 }],
|
||||
"ctrl-k ctrl-0": "editor::FoldAll",
|
||||
"ctrl-k ctrl-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
@@ -514,13 +505,6 @@
|
||||
"ctrl-enter": "assistant::InlineAssist"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProposedChangesEditor",
|
||||
"bindings": {
|
||||
"ctrl-shift-y": "editor::ApplyDiffHunk",
|
||||
"ctrl-alt-a": "editor::ApplyAllDiffHunks"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && jupyter && !ContextEditor",
|
||||
"bindings": {
|
||||
@@ -532,7 +516,6 @@
|
||||
"context": "ContextEditor > Editor",
|
||||
"bindings": {
|
||||
"ctrl-enter": "assistant::Assist",
|
||||
"ctrl-shift-enter": "assistant::Edit",
|
||||
"ctrl-s": "workspace::Save",
|
||||
"ctrl->": "assistant::QuoteSelection",
|
||||
"ctrl-<": "assistant::InsertIntoEditor",
|
||||
|
||||
@@ -201,7 +201,6 @@
|
||||
"context": "ContextEditor > Editor",
|
||||
"bindings": {
|
||||
"cmd-enter": "assistant::Assist",
|
||||
"cmd-shift-enter": "assistant::Edit",
|
||||
"cmd-s": "workspace::Save",
|
||||
"cmd->": "assistant::QuoteSelection",
|
||||
"cmd-<": "assistant::InsertIntoEditor",
|
||||
@@ -351,15 +350,6 @@
|
||||
"cmd-k cmd-l": "editor::ToggleFold",
|
||||
"cmd-k cmd-[": "editor::FoldRecursive",
|
||||
"cmd-k cmd-]": "editor::UnfoldRecursive",
|
||||
"cmd-k cmd-1": ["editor::FoldAtLevel", { "level": 1 }],
|
||||
"cmd-k cmd-2": ["editor::FoldAtLevel", { "level": 2 }],
|
||||
"cmd-k cmd-3": ["editor::FoldAtLevel", { "level": 3 }],
|
||||
"cmd-k cmd-4": ["editor::FoldAtLevel", { "level": 4 }],
|
||||
"cmd-k cmd-5": ["editor::FoldAtLevel", { "level": 5 }],
|
||||
"cmd-k cmd-6": ["editor::FoldAtLevel", { "level": 6 }],
|
||||
"cmd-k cmd-7": ["editor::FoldAtLevel", { "level": 7 }],
|
||||
"cmd-k cmd-8": ["editor::FoldAtLevel", { "level": 8 }],
|
||||
"cmd-k cmd-9": ["editor::FoldAtLevel", { "level": 9 }],
|
||||
"cmd-k cmd-0": "editor::FoldAll",
|
||||
"cmd-k cmd-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
@@ -548,13 +538,6 @@
|
||||
"ctrl-enter": "assistant::InlineAssist"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProposedChangesEditor",
|
||||
"bindings": {
|
||||
"cmd-shift-y": "editor::ApplyDiffHunk",
|
||||
"cmd-shift-a": "editor::ApplyAllDiffHunks"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "PromptEditor",
|
||||
"bindings": {
|
||||
|
||||
@@ -88,6 +88,7 @@ origin: (f64, f64),
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<description>Update the Rectangle's new function to take an origin parameter</description>
|
||||
<operation>update</operation>
|
||||
<old_text>
|
||||
fn new(width: f64, height: f64) -> Self {
|
||||
@@ -116,6 +117,7 @@ pub struct Circle {
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<description>Update the Circle's new function to take an origin parameter</description>
|
||||
<operation>update</operation>
|
||||
<old_text>
|
||||
fn new(radius: f64) -> Self {
|
||||
@@ -132,6 +134,7 @@ fn new(origin: (f64, f64), radius: f64) -> Self {
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<description>Add an import for the std::fmt module</description>
|
||||
<operation>insert_before</operation>
|
||||
<old_text>
|
||||
struct Rectangle {
|
||||
@@ -144,10 +147,7 @@ use std::fmt;
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<description>
|
||||
Add a manual Display implementation for Rectangle.
|
||||
Currently, this is the same as a derived Display implementation.
|
||||
</description>
|
||||
<description>Add a Display implementation for Rectangle</description>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>
|
||||
Rectangle { width, height }
|
||||
@@ -169,6 +169,7 @@ impl fmt::Display for Rectangle {
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<description>Add an import for the `std::fmt` module</description>
|
||||
<operation>insert_before</operation>
|
||||
<old_text>
|
||||
struct Circle {
|
||||
@@ -180,6 +181,7 @@ use std::fmt;
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<description>Add a Display implementation for Circle</description>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>
|
||||
Circle { radius }
|
||||
|
||||
@@ -346,6 +346,8 @@
|
||||
"git_status": true,
|
||||
// Amount of indentation for nested items.
|
||||
"indent_size": 20,
|
||||
// Whether to show indent guides in the project panel.
|
||||
"indent_guides": true,
|
||||
// Whether to reveal it in the project panel automatically,
|
||||
// when a corresponding project entry becomes active.
|
||||
// Gitignored entries are never auto revealed.
|
||||
@@ -369,17 +371,6 @@
|
||||
/// 5. Never show the scrollbar:
|
||||
/// "never"
|
||||
"show": null
|
||||
},
|
||||
// Settings related to indent guides in the project panel.
|
||||
"indent_guides": {
|
||||
// When to show indent guides in the project panel.
|
||||
// This setting can take two values:
|
||||
//
|
||||
// 1. Always show indent guides:
|
||||
// "always"
|
||||
// 2. Never show indent guides:
|
||||
// "never"
|
||||
"show": "always"
|
||||
}
|
||||
},
|
||||
"outline_panel": {
|
||||
@@ -403,18 +394,7 @@
|
||||
"auto_reveal_entries": true,
|
||||
/// Whether to fold directories automatically
|
||||
/// when a directory has only one directory inside.
|
||||
"auto_fold_dirs": true,
|
||||
// Settings related to indent guides in the outline panel.
|
||||
"indent_guides": {
|
||||
// When to show indent guides in the outline panel.
|
||||
// This setting can take two values:
|
||||
//
|
||||
// 1. Always show indent guides:
|
||||
// "always"
|
||||
// 2. Never show indent guides:
|
||||
// "never"
|
||||
"show": "always"
|
||||
}
|
||||
"auto_fold_dirs": true
|
||||
},
|
||||
"collaboration_panel": {
|
||||
// Whether to show the collaboration panel button in the status bar.
|
||||
@@ -797,7 +777,6 @@
|
||||
"tasks": {
|
||||
"variables": {}
|
||||
},
|
||||
"toolchain": { "name": "default", "path": "default" },
|
||||
// An object whose keys are language names, and whose values
|
||||
// are arrays of filenames or extensions of files that should
|
||||
// use those languages.
|
||||
|
||||
@@ -23,7 +23,6 @@ language.workspace = true
|
||||
project.workspace = true
|
||||
smallvec.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
workspace.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
@@ -13,8 +13,7 @@ use language::{
|
||||
use project::{EnvironmentErrorMessage, LanguageServerProgress, Project, WorktreeId};
|
||||
use smallvec::SmallVec;
|
||||
use std::{cmp::Reverse, fmt::Write, sync::Arc, time::Duration};
|
||||
use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip};
|
||||
use util::truncate_and_trailoff;
|
||||
use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle};
|
||||
use workspace::{item::ItemHandle, StatusItemView, Workspace};
|
||||
|
||||
actions!(activity_indicator, [ShowErrorMessage]);
|
||||
@@ -447,8 +446,6 @@ impl ActivityIndicator {
|
||||
|
||||
impl EventEmitter<Event> for ActivityIndicator {}
|
||||
|
||||
const MAX_MESSAGE_LEN: usize = 50;
|
||||
|
||||
impl Render for ActivityIndicator {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let result = h_flex()
|
||||
@@ -459,7 +456,6 @@ impl Render for ActivityIndicator {
|
||||
return result;
|
||||
};
|
||||
let this = cx.view().downgrade();
|
||||
let truncate_content = content.message.len() > MAX_MESSAGE_LEN;
|
||||
result.gap_2().child(
|
||||
PopoverMenu::new("activity-indicator-popover")
|
||||
.trigger(
|
||||
@@ -468,21 +464,7 @@ impl Render for ActivityIndicator {
|
||||
.id("activity-indicator-status")
|
||||
.gap_2()
|
||||
.children(content.icon)
|
||||
.map(|button| {
|
||||
if truncate_content {
|
||||
button
|
||||
.child(
|
||||
Label::new(truncate_and_trailoff(
|
||||
&content.message,
|
||||
MAX_MESSAGE_LEN,
|
||||
))
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.tooltip(move |cx| Tooltip::text(&content.message, cx))
|
||||
} else {
|
||||
button.child(Label::new(content.message).size(LabelSize::Small))
|
||||
}
|
||||
})
|
||||
.child(Label::new(content.message).size(LabelSize::Small))
|
||||
.when_some(content.on_click, |this, handler| {
|
||||
this.on_click(cx.listener(move |this, _, cx| {
|
||||
handler(this, cx);
|
||||
|
||||
@@ -41,10 +41,12 @@ use prompts::PromptLoadingParams;
|
||||
use semantic_index::{CloudEmbeddingProvider, SemanticDb};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{update_settings_file, Settings, SettingsStore};
|
||||
use slash_command::workflow_command::WorkflowSlashCommand;
|
||||
use slash_command::{
|
||||
auto_command, cargo_workspace_command, context_server_command, default_command, delta_command,
|
||||
diagnostics_command, docs_command, fetch_command, file_command, now_command, project_command,
|
||||
prompt_command, search_command, symbols_command, tab_command, terminal_command,
|
||||
workflow_command,
|
||||
};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
@@ -57,7 +59,6 @@ actions!(
|
||||
assistant,
|
||||
[
|
||||
Assist,
|
||||
Edit,
|
||||
Split,
|
||||
CopyCode,
|
||||
CycleMessageRole,
|
||||
@@ -297,64 +298,25 @@ fn register_context_server_handlers(cx: &mut AppContext) {
|
||||
return;
|
||||
};
|
||||
|
||||
if protocol.capable(context_servers::protocol::ServerCapability::Prompts) {
|
||||
if let Some(prompts) = protocol.list_prompts().await.log_err() {
|
||||
for prompt in prompts
|
||||
.into_iter()
|
||||
.filter(context_server_command::acceptable_prompt)
|
||||
{
|
||||
log::info!(
|
||||
"registering context server command: {:?}",
|
||||
prompt.name
|
||||
);
|
||||
context_server_registry.register_command(
|
||||
server.id.clone(),
|
||||
prompt.name.as_str(),
|
||||
);
|
||||
slash_command_registry.register_command(
|
||||
context_server_command::ContextServerSlashCommand::new(
|
||||
&server, prompt,
|
||||
),
|
||||
true,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
cx.update_model(
|
||||
&manager,
|
||||
|manager: &mut context_servers::manager::ContextServerManager, cx| {
|
||||
let tool_registry = ToolRegistry::global(cx);
|
||||
let context_server_registry = ContextServerRegistry::global(cx);
|
||||
if let Some(server) = manager.get_server(server_id) {
|
||||
cx.spawn(|_, _| async move {
|
||||
let Some(protocol) = server.client.read().clone() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if protocol.capable(context_servers::protocol::ServerCapability::Tools) {
|
||||
if let Some(tools) = protocol.list_tools().await.log_err() {
|
||||
for tool in tools.tools {
|
||||
log::info!(
|
||||
"registering context server tool: {:?}",
|
||||
tool.name
|
||||
);
|
||||
context_server_registry.register_tool(
|
||||
server.id.clone(),
|
||||
tool.name.as_str(),
|
||||
);
|
||||
tool_registry.register_tool(
|
||||
tools::context_server_tool::ContextServerTool::new(
|
||||
server.id.clone(),
|
||||
tool
|
||||
),
|
||||
);
|
||||
}
|
||||
if let Some(prompts) = protocol.list_prompts().await.log_err() {
|
||||
for prompt in prompts
|
||||
.into_iter()
|
||||
.filter(context_server_command::acceptable_prompt)
|
||||
{
|
||||
log::info!(
|
||||
"registering context server command: {:?}",
|
||||
prompt.name
|
||||
);
|
||||
context_server_registry.register_command(
|
||||
server.id.clone(),
|
||||
prompt.name.as_str(),
|
||||
);
|
||||
slash_command_registry.register_command(
|
||||
context_server_command::ContextServerSlashCommand::new(
|
||||
&server, prompt,
|
||||
),
|
||||
true,
|
||||
);
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -372,14 +334,6 @@ fn register_context_server_handlers(cx: &mut AppContext) {
|
||||
context_server_registry.unregister_command(&server_id, &command_name);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(tools) = context_server_registry.get_tools(server_id) {
|
||||
let tool_registry = ToolRegistry::global(cx);
|
||||
for tool_name in tools {
|
||||
tool_registry.unregister_tool_by_name(&tool_name);
|
||||
context_server_registry.unregister_tool(&server_id, &tool_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
@@ -443,6 +397,22 @@ fn register_slash_commands(prompt_builder: Option<Arc<PromptBuilder>>, cx: &mut
|
||||
slash_command_registry.register_command(fetch_command::FetchSlashCommand, false);
|
||||
|
||||
if let Some(prompt_builder) = prompt_builder {
|
||||
cx.observe_global::<SettingsStore>({
|
||||
let slash_command_registry = slash_command_registry.clone();
|
||||
let prompt_builder = prompt_builder.clone();
|
||||
move |cx| {
|
||||
if AssistantSettings::get_global(cx).are_live_diffs_enabled(cx) {
|
||||
slash_command_registry.register_command(
|
||||
workflow_command::WorkflowSlashCommand::new(prompt_builder.clone()),
|
||||
true,
|
||||
);
|
||||
} else {
|
||||
slash_command_registry.unregister_command_by_name(WorkflowSlashCommand::NAME);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.observe_flag::<project_command::ProjectSlashCommandFeatureFlag, _>({
|
||||
let slash_command_registry = slash_command_registry.clone();
|
||||
move |is_enabled, _cx| {
|
||||
|
||||
@@ -13,11 +13,10 @@ use crate::{
|
||||
terminal_inline_assistant::TerminalInlineAssistant,
|
||||
Assist, AssistantPatch, AssistantPatchStatus, CacheStatus, ConfirmCommand, Content, Context,
|
||||
ContextEvent, ContextId, ContextStore, ContextStoreEvent, CopyCode, CycleMessageRole,
|
||||
DeployHistory, DeployPromptLibrary, Edit, InlineAssistant, InsertDraggedFiles,
|
||||
InsertIntoEditor, Message, MessageId, MessageMetadata, MessageStatus, ModelPickerDelegate,
|
||||
ModelSelector, NewContext, PendingSlashCommand, PendingSlashCommandStatus, QuoteSelection,
|
||||
RemoteContextMetadata, RequestType, SavedContextMetadata, Split, ToggleFocus,
|
||||
ToggleModelSelector,
|
||||
DeployHistory, DeployPromptLibrary, InlineAssistant, InsertDraggedFiles, InsertIntoEditor,
|
||||
Message, MessageId, MessageMetadata, MessageStatus, ModelPickerDelegate, ModelSelector,
|
||||
NewContext, PendingSlashCommand, PendingSlashCommandStatus, QuoteSelection,
|
||||
RemoteContextMetadata, SavedContextMetadata, Split, ToggleFocus, ToggleModelSelector,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use assistant_slash_command::{SlashCommand, SlashCommandOutputSection};
|
||||
@@ -1447,8 +1446,8 @@ struct ScrollPosition {
|
||||
}
|
||||
|
||||
struct PatchViewState {
|
||||
block_id: CustomBlockId,
|
||||
// crease_id: CreaseId,
|
||||
footer_block_id: CustomBlockId,
|
||||
crease_id: CreaseId,
|
||||
editor: Option<PatchEditorState>,
|
||||
update_task: Option<Task<()>>,
|
||||
}
|
||||
@@ -1589,11 +1588,23 @@ impl ContextEditor {
|
||||
}
|
||||
|
||||
fn assist(&mut self, _: &Assist, cx: &mut ViewContext<Self>) {
|
||||
self.send_to_model(RequestType::Chat, cx);
|
||||
}
|
||||
let provider = LanguageModelRegistry::read_global(cx).active_provider();
|
||||
if provider
|
||||
.as_ref()
|
||||
.map_or(false, |provider| provider.must_accept_terms(cx))
|
||||
{
|
||||
self.show_accept_terms = true;
|
||||
cx.notify();
|
||||
return;
|
||||
}
|
||||
|
||||
fn edit(&mut self, _: &Edit, cx: &mut ViewContext<Self>) {
|
||||
self.send_to_model(RequestType::SuggestEdits, cx);
|
||||
if self.focus_active_patch(cx) {
|
||||
return;
|
||||
}
|
||||
|
||||
self.last_error = None;
|
||||
self.send_to_model(cx);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn focus_active_patch(&mut self, cx: &mut ViewContext<Self>) -> bool {
|
||||
@@ -1611,27 +1622,8 @@ impl ContextEditor {
|
||||
false
|
||||
}
|
||||
|
||||
fn send_to_model(&mut self, request_type: RequestType, cx: &mut ViewContext<Self>) {
|
||||
let provider = LanguageModelRegistry::read_global(cx).active_provider();
|
||||
if provider
|
||||
.as_ref()
|
||||
.map_or(false, |provider| provider.must_accept_terms(cx))
|
||||
{
|
||||
self.show_accept_terms = true;
|
||||
cx.notify();
|
||||
return;
|
||||
}
|
||||
|
||||
if self.focus_active_patch(cx) {
|
||||
return;
|
||||
}
|
||||
|
||||
self.last_error = None;
|
||||
|
||||
if let Some(user_message) = self
|
||||
.context
|
||||
.update(cx, |context, cx| context.assist(request_type, cx))
|
||||
{
|
||||
fn send_to_model(&mut self, cx: &mut ViewContext<Self>) {
|
||||
if let Some(user_message) = self.context.update(cx, |context, cx| context.assist(cx)) {
|
||||
let new_selection = {
|
||||
let cursor = user_message
|
||||
.start
|
||||
@@ -1648,8 +1640,6 @@ impl ContextEditor {
|
||||
// Avoid scrolling to the new cursor position so the assistant's output is stable.
|
||||
cx.defer(|this, _| this.scroll_position = None);
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn cancel(&mut self, _: &editor::actions::Cancel, cx: &mut ViewContext<Self>) {
|
||||
@@ -1677,10 +1667,8 @@ impl ContextEditor {
|
||||
});
|
||||
}
|
||||
|
||||
fn cursors(&self, cx: &mut WindowContext) -> Vec<usize> {
|
||||
let selections = self
|
||||
.editor
|
||||
.update(cx, |editor, cx| editor.selections.all::<usize>(cx));
|
||||
fn cursors(&self, cx: &AppContext) -> Vec<usize> {
|
||||
let selections = self.editor.read(cx).selections.all::<usize>(cx);
|
||||
selections
|
||||
.into_iter()
|
||||
.map(|selection| selection.head())
|
||||
@@ -1894,7 +1882,7 @@ impl ContextEditor {
|
||||
);
|
||||
});
|
||||
|
||||
Crease::inline(
|
||||
Crease::new(
|
||||
start..end,
|
||||
placeholder,
|
||||
fold_toggle("tool-use"),
|
||||
@@ -2005,7 +1993,7 @@ impl ContextEditor {
|
||||
let end = buffer
|
||||
.anchor_in_excerpt(excerpt_id, command.source_range.end)
|
||||
.unwrap();
|
||||
Crease::inline(start..end, placeholder, render_toggle, render_trailer)
|
||||
Crease::new(start..end, placeholder, render_toggle, render_trailer)
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
@@ -2133,7 +2121,7 @@ impl ContextEditor {
|
||||
|
||||
let buffer_row = MultiBufferRow(start.to_point(&buffer).row);
|
||||
|
||||
let crease = Crease::inline(
|
||||
let crease = Crease::new(
|
||||
start..end,
|
||||
placeholder,
|
||||
fold_toggle("tool-use"),
|
||||
@@ -2170,8 +2158,8 @@ impl ContextEditor {
|
||||
for range in removed {
|
||||
if let Some(state) = self.patches.remove(range) {
|
||||
editors_to_close.extend(state.editor.and_then(|state| state.editor.upgrade()));
|
||||
removed_block_ids.insert(state.block_id);
|
||||
// removed_crease_ids.push(state.crease_id);
|
||||
removed_block_ids.insert(state.footer_block_id);
|
||||
removed_crease_ids.push(state.crease_id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2200,14 +2188,12 @@ impl ContextEditor {
|
||||
let max_width = cx.max_width;
|
||||
let gutter_width = cx.gutter_dimensions.full_width();
|
||||
let block_id = cx.block_id;
|
||||
let selected = cx.selected;
|
||||
this.update(&mut **cx, |this, cx| {
|
||||
this.render_patch(
|
||||
this.render_patch_footer(
|
||||
patch_range.clone(),
|
||||
max_width,
|
||||
gutter_width,
|
||||
block_id,
|
||||
selected,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -2217,9 +2203,26 @@ impl ContextEditor {
|
||||
}
|
||||
});
|
||||
|
||||
let header_placeholder = FoldPlaceholder {
|
||||
render: {
|
||||
let this = this.clone();
|
||||
let patch_range = range.clone();
|
||||
Arc::new(move |fold_id, _range, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.render_patch_header(patch_range.clone(), fold_id, cx)
|
||||
})
|
||||
.ok()
|
||||
.flatten()
|
||||
.unwrap_or_else(|| Empty.into_any())
|
||||
})
|
||||
},
|
||||
constrain_width: false,
|
||||
merge_adjacent: false,
|
||||
};
|
||||
|
||||
let should_refold;
|
||||
if let Some(state) = self.patches.get_mut(&range) {
|
||||
replaced_blocks.insert(state.block_id, render_block);
|
||||
replaced_blocks.insert(state.footer_block_id, render_block);
|
||||
if let Some(editor_state) = &state.editor {
|
||||
if editor_state.opened_patch != patch {
|
||||
state.update_task = Some({
|
||||
@@ -2239,30 +2242,30 @@ impl ContextEditor {
|
||||
let block_ids = editor.insert_blocks(
|
||||
[BlockProperties {
|
||||
height: path_count as u32 + 1,
|
||||
style: BlockStyle::Fixed,
|
||||
style: BlockStyle::Flex,
|
||||
render: render_block,
|
||||
placement: BlockPlacement::Replace(patch_start..patch_end),
|
||||
placement: BlockPlacement::Below(patch_start),
|
||||
priority: 0,
|
||||
}],
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
|
||||
// let new_crease_ids = editor.insert_creases(
|
||||
// [Crease::new(
|
||||
// patch_start..patch_end,
|
||||
// header_placeholder.clone(),
|
||||
// fold_toggle("patch-header"),
|
||||
// |_, _, _| Empty.into_any_element(),
|
||||
// )],
|
||||
// cx,
|
||||
// );
|
||||
let new_crease_ids = editor.insert_creases(
|
||||
[Crease::new(
|
||||
patch_start..patch_end,
|
||||
header_placeholder.clone(),
|
||||
fold_toggle("patch-header"),
|
||||
|_, _, _| Empty.into_any_element(),
|
||||
)],
|
||||
cx,
|
||||
);
|
||||
|
||||
self.patches.insert(
|
||||
range.clone(),
|
||||
PatchViewState {
|
||||
block_id: block_ids[0],
|
||||
// crease_id: new_crease_ids[0],
|
||||
footer_block_id: block_ids[0],
|
||||
crease_id: new_crease_ids[0],
|
||||
editor: None,
|
||||
update_task: None,
|
||||
},
|
||||
@@ -2272,8 +2275,8 @@ impl ContextEditor {
|
||||
}
|
||||
|
||||
if should_refold {
|
||||
// editor.unfold_ranges([patch_start..patch_end], true, false, cx);
|
||||
// editor.fold_ranges([(patch_start..patch_end, header_placeholder)], false, cx);
|
||||
editor.unfold_ranges([patch_start..patch_end], true, false, cx);
|
||||
editor.fold_ranges([(patch_start..patch_end, header_placeholder)], false, cx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2310,7 +2313,7 @@ impl ContextEditor {
|
||||
let buffer_row = MultiBufferRow(start.to_point(&buffer).row);
|
||||
buffer_rows_to_fold.insert(buffer_row);
|
||||
creases.push(
|
||||
Crease::inline(
|
||||
Crease::new(
|
||||
start..end,
|
||||
FoldPlaceholder {
|
||||
render: render_fold_icon_button(
|
||||
@@ -2372,9 +2375,7 @@ impl ContextEditor {
|
||||
}
|
||||
|
||||
fn update_active_patch(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let newest_cursor = self.editor.update(cx, |editor, cx| {
|
||||
editor.selections.newest::<Point>(cx).head()
|
||||
});
|
||||
let newest_cursor = self.editor.read(cx).selections.newest::<Point>(cx).head();
|
||||
let context = self.context.read(cx);
|
||||
|
||||
let new_patch = context.patch_containing(newest_cursor, cx).cloned();
|
||||
@@ -2781,40 +2782,39 @@ impl ContextEditor {
|
||||
) -> Option<(String, bool)> {
|
||||
const CODE_FENCE_DELIMITER: &'static str = "```";
|
||||
|
||||
let context_editor = context_editor_view.read(cx).editor.clone();
|
||||
context_editor.update(cx, |context_editor, cx| {
|
||||
if context_editor.selections.newest::<Point>(cx).is_empty() {
|
||||
let snapshot = context_editor.buffer().read(cx).snapshot(cx);
|
||||
let (_, _, snapshot) = snapshot.as_singleton()?;
|
||||
let context_editor = context_editor_view.read(cx).editor.read(cx);
|
||||
|
||||
let head = context_editor.selections.newest::<Point>(cx).head();
|
||||
let offset = snapshot.point_to_offset(head);
|
||||
if context_editor.selections.newest::<Point>(cx).is_empty() {
|
||||
let snapshot = context_editor.buffer().read(cx).snapshot(cx);
|
||||
let (_, _, snapshot) = snapshot.as_singleton()?;
|
||||
|
||||
let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?;
|
||||
let mut text = snapshot
|
||||
.text_for_range(surrounding_code_block_range)
|
||||
.collect::<String>();
|
||||
let head = context_editor.selections.newest::<Point>(cx).head();
|
||||
let offset = snapshot.point_to_offset(head);
|
||||
|
||||
// If there is no newline trailing the closing three-backticks, then
|
||||
// tree-sitter-md extends the range of the content node to include
|
||||
// the backticks.
|
||||
if text.ends_with(CODE_FENCE_DELIMITER) {
|
||||
text.drain((text.len() - CODE_FENCE_DELIMITER.len())..);
|
||||
}
|
||||
let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?;
|
||||
let mut text = snapshot
|
||||
.text_for_range(surrounding_code_block_range)
|
||||
.collect::<String>();
|
||||
|
||||
(!text.is_empty()).then_some((text, true))
|
||||
} else {
|
||||
let anchor = context_editor.selections.newest_anchor();
|
||||
let text = context_editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.read(cx)
|
||||
.text_for_range(anchor.range())
|
||||
.collect::<String>();
|
||||
|
||||
(!text.is_empty()).then_some((text, false))
|
||||
// If there is no newline trailing the closing three-backticks, then
|
||||
// tree-sitter-md extends the range of the content node to include
|
||||
// the backticks.
|
||||
if text.ends_with(CODE_FENCE_DELIMITER) {
|
||||
text.drain((text.len() - CODE_FENCE_DELIMITER.len())..);
|
||||
}
|
||||
})
|
||||
|
||||
(!text.is_empty()).then_some((text, true))
|
||||
} else {
|
||||
let anchor = context_editor.selections.newest_anchor();
|
||||
let text = context_editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.read(cx)
|
||||
.text_for_range(anchor.range())
|
||||
.collect::<String>();
|
||||
|
||||
(!text.is_empty()).then_some((text, false))
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_selection(
|
||||
@@ -3085,7 +3085,7 @@ impl ContextEditor {
|
||||
crease_title,
|
||||
cx.view().downgrade(),
|
||||
);
|
||||
let crease = Crease::inline(
|
||||
let crease = Crease::new(
|
||||
anchor_before..anchor_after,
|
||||
fold_placeholder,
|
||||
render_quote_selection_output_toggle,
|
||||
@@ -3280,7 +3280,7 @@ impl ContextEditor {
|
||||
|
||||
let buffer_row = MultiBufferRow(start.to_point(&buffer).row);
|
||||
buffer_rows_to_fold.insert(buffer_row);
|
||||
Crease::inline(
|
||||
Crease::new(
|
||||
start..end,
|
||||
FoldPlaceholder {
|
||||
constrain_width: false,
|
||||
@@ -3431,13 +3431,33 @@ impl ContextEditor {
|
||||
.unwrap_or_else(|| Cow::Borrowed(DEFAULT_TAB_TITLE))
|
||||
}
|
||||
|
||||
fn render_patch(
|
||||
fn render_patch_header(
|
||||
&self,
|
||||
range: Range<text::Anchor>,
|
||||
_id: FoldId,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<AnyElement> {
|
||||
let patch = self.context.read(cx).patch_for_range(&range, cx)?;
|
||||
let theme = cx.theme().clone();
|
||||
Some(
|
||||
h_flex()
|
||||
.px_1()
|
||||
.py_0p5()
|
||||
.border_b_1()
|
||||
.border_color(theme.status().info_border)
|
||||
.gap_1()
|
||||
.child(Icon::new(IconName::Diff).size(IconSize::Small))
|
||||
.child(Label::new(patch.title.clone()).size(LabelSize::Small))
|
||||
.into_any(),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_patch_footer(
|
||||
&mut self,
|
||||
range: Range<text::Anchor>,
|
||||
max_width: Pixels,
|
||||
gutter_width: Pixels,
|
||||
id: BlockId,
|
||||
selected: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<AnyElement> {
|
||||
let snapshot = self.editor.update(cx, |editor, cx| editor.snapshot(cx));
|
||||
@@ -3448,6 +3468,10 @@ impl ContextEditor {
|
||||
.anchor_in_excerpt(excerpt_id, range.start)
|
||||
.unwrap();
|
||||
|
||||
if !snapshot.intersects_fold(anchor) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let patch = self.context.read(cx).patch_for_range(&range, cx)?;
|
||||
let paths = patch
|
||||
.paths()
|
||||
@@ -3456,18 +3480,10 @@ impl ContextEditor {
|
||||
|
||||
Some(
|
||||
v_flex()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.border_1()
|
||||
.border_color(if selected {
|
||||
cx.theme().colors().border_focused
|
||||
} else {
|
||||
cx.theme().colors().border
|
||||
})
|
||||
.id(id)
|
||||
.ml(gutter_width)
|
||||
.p_2()
|
||||
.rounded_md()
|
||||
.min_h(cx.line_height() * 3.)
|
||||
.pl(gutter_width)
|
||||
.w(max_width)
|
||||
.py_2()
|
||||
.cursor(CursorStyle::PointingHand)
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
this.editor.update(cx, |editor, cx| {
|
||||
@@ -3477,7 +3493,6 @@ impl ContextEditor {
|
||||
});
|
||||
this.focus_active_patch(cx);
|
||||
}))
|
||||
.child(Label::new(patch.title.clone()))
|
||||
.children(paths.into_iter().map(|path| {
|
||||
h_flex()
|
||||
.pl_1()
|
||||
@@ -3629,13 +3644,7 @@ impl ContextEditor {
|
||||
button.tooltip(move |_| tooltip.clone())
|
||||
})
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.child(Label::new(
|
||||
if AssistantSettings::get_global(cx).are_live_diffs_enabled(cx) {
|
||||
"Chat"
|
||||
} else {
|
||||
"Send"
|
||||
},
|
||||
))
|
||||
.child(Label::new("Send"))
|
||||
.children(
|
||||
KeyBinding::for_action_in(&Assist, &focus_handle, cx)
|
||||
.map(|binding| binding.into_any_element()),
|
||||
@@ -3645,57 +3654,6 @@ impl ContextEditor {
|
||||
})
|
||||
}
|
||||
|
||||
fn render_edit_button(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let focus_handle = self.focus_handle(cx).clone();
|
||||
|
||||
let (style, tooltip) = match token_state(&self.context, cx) {
|
||||
Some(TokenState::NoTokensLeft { .. }) => (
|
||||
ButtonStyle::Tinted(TintColor::Negative),
|
||||
Some(Tooltip::text("Token limit reached", cx)),
|
||||
),
|
||||
Some(TokenState::HasMoreTokens {
|
||||
over_warn_threshold,
|
||||
..
|
||||
}) => {
|
||||
let (style, tooltip) = if over_warn_threshold {
|
||||
(
|
||||
ButtonStyle::Tinted(TintColor::Warning),
|
||||
Some(Tooltip::text("Token limit is close to exhaustion", cx)),
|
||||
)
|
||||
} else {
|
||||
(ButtonStyle::Filled, None)
|
||||
};
|
||||
(style, tooltip)
|
||||
}
|
||||
None => (ButtonStyle::Filled, None),
|
||||
};
|
||||
|
||||
let provider = LanguageModelRegistry::read_global(cx).active_provider();
|
||||
|
||||
let has_configuration_error = configuration_error(cx).is_some();
|
||||
let needs_to_accept_terms = self.show_accept_terms
|
||||
&& provider
|
||||
.as_ref()
|
||||
.map_or(false, |provider| provider.must_accept_terms(cx));
|
||||
let disabled = has_configuration_error || needs_to_accept_terms;
|
||||
|
||||
ButtonLike::new("edit_button")
|
||||
.disabled(disabled)
|
||||
.style(style)
|
||||
.when_some(tooltip, |button, tooltip| {
|
||||
button.tooltip(move |_| tooltip.clone())
|
||||
})
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.child(Label::new("Suggest Edits"))
|
||||
.children(
|
||||
KeyBinding::for_action_in(&Edit, &focus_handle, cx)
|
||||
.map(|binding| binding.into_any_element()),
|
||||
)
|
||||
.on_click(move |_event, cx| {
|
||||
focus_handle.dispatch_action(&Edit, cx);
|
||||
})
|
||||
}
|
||||
|
||||
fn render_last_error(&self, cx: &mut ViewContext<Self>) -> Option<AnyElement> {
|
||||
let last_error = self.last_error.as_ref()?;
|
||||
|
||||
@@ -3952,7 +3910,6 @@ impl Render for ContextEditor {
|
||||
.capture_action(cx.listener(ContextEditor::paste))
|
||||
.capture_action(cx.listener(ContextEditor::cycle_message_role))
|
||||
.capture_action(cx.listener(ContextEditor::confirm_command))
|
||||
.on_action(cx.listener(ContextEditor::edit))
|
||||
.on_action(cx.listener(ContextEditor::assist))
|
||||
.on_action(cx.listener(ContextEditor::split))
|
||||
.size_full()
|
||||
@@ -4017,21 +3974,7 @@ impl Render for ContextEditor {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_end()
|
||||
.when(
|
||||
AssistantSettings::get_global(cx).are_live_diffs_enabled(cx),
|
||||
|buttons| {
|
||||
buttons
|
||||
.items_center()
|
||||
.gap_1p5()
|
||||
.child(self.render_edit_button(cx))
|
||||
.child(
|
||||
Label::new("or")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
},
|
||||
)
|
||||
.child(self.render_send_button(cx)),
|
||||
.child(div().child(self.render_send_button(cx))),
|
||||
),
|
||||
),
|
||||
)
|
||||
@@ -4764,7 +4707,7 @@ impl Render for ConfigurationView {
|
||||
|
||||
let mut element = v_flex()
|
||||
.id("assistant-configuration-view")
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.track_focus(&self.focus_handle)
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.size_full()
|
||||
.overflow_y_scroll()
|
||||
|
||||
@@ -66,14 +66,6 @@ impl ContextId {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum RequestType {
|
||||
/// Request a normal chat response from the model.
|
||||
Chat,
|
||||
/// Add a preamble to the message, which tells the model to return a structured response that suggests edits.
|
||||
SuggestEdits,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum ContextOperation {
|
||||
InsertMessage {
|
||||
@@ -1036,7 +1028,7 @@ impl Context {
|
||||
}
|
||||
|
||||
pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
|
||||
let request = self.to_completion_request(RequestType::SuggestEdits, cx); // Conservatively assume SuggestEdits, since it takes more tokens.
|
||||
let request = self.to_completion_request(cx);
|
||||
let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
|
||||
return;
|
||||
};
|
||||
@@ -1179,7 +1171,7 @@ impl Context {
|
||||
}
|
||||
|
||||
let request = {
|
||||
let mut req = self.to_completion_request(RequestType::Chat, cx);
|
||||
let mut req = self.to_completion_request(cx);
|
||||
// Skip the last message because it's likely to change and
|
||||
// therefore would be a waste to cache.
|
||||
req.messages.pop();
|
||||
@@ -1867,11 +1859,7 @@ impl Context {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn assist(
|
||||
&mut self,
|
||||
request_type: RequestType,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Option<MessageAnchor> {
|
||||
pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let provider = model_registry.active_provider()?;
|
||||
let model = model_registry.active_model()?;
|
||||
@@ -1884,7 +1872,7 @@ impl Context {
|
||||
// Compute which messages to cache, including the last one.
|
||||
self.mark_cache_anchors(&model.cache_configuration(), false, cx);
|
||||
|
||||
let mut request = self.to_completion_request(request_type, cx);
|
||||
let mut request = self.to_completion_request(cx);
|
||||
|
||||
if cx.has_flag::<ToolUseFeatureFlag>() {
|
||||
let tool_registry = ToolRegistry::global(cx);
|
||||
@@ -2086,11 +2074,7 @@ impl Context {
|
||||
Some(user_message)
|
||||
}
|
||||
|
||||
pub fn to_completion_request(
|
||||
&self,
|
||||
request_type: RequestType,
|
||||
cx: &AppContext,
|
||||
) -> LanguageModelRequest {
|
||||
pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest {
|
||||
let buffer = self.buffer.read(cx);
|
||||
|
||||
let mut contents = self.contents(cx).peekable();
|
||||
@@ -2179,25 +2163,6 @@ impl Context {
|
||||
completion_request.messages.push(request_message);
|
||||
}
|
||||
|
||||
if let RequestType::SuggestEdits = request_type {
|
||||
if let Ok(preamble) = self.prompt_builder.generate_workflow_prompt() {
|
||||
let last_elem_index = completion_request.messages.len();
|
||||
|
||||
completion_request
|
||||
.messages
|
||||
.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![MessageContent::Text(preamble)],
|
||||
cache: false,
|
||||
});
|
||||
|
||||
// The preamble message should be sent right before the last actual user message.
|
||||
completion_request
|
||||
.messages
|
||||
.swap(last_elem_index, last_elem_index.saturating_sub(1));
|
||||
}
|
||||
}
|
||||
|
||||
completion_request
|
||||
}
|
||||
|
||||
@@ -2512,7 +2477,7 @@ impl Context {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut request = self.to_completion_request(RequestType::Chat, cx);
|
||||
let mut request = self.to_completion_request(cx);
|
||||
request.messages.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![
|
||||
|
||||
@@ -636,7 +636,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn one".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
description: "add a `two` function".into(),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
@@ -690,7 +690,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn zero".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
description: "add a `two` function".into(),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
@@ -754,7 +754,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn zero".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
description: "add a `two` function".into(),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
@@ -798,7 +798,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn zero".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
description: "add a `two` function".into(),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::{
|
||||
assistant_settings::AssistantSettings, humanize_token_count, prompts::PromptBuilder,
|
||||
AssistantPanel, AssistantPanelEvent, CharOperation, CycleNextInlineAssist,
|
||||
CyclePreviousInlineAssist, LineDiff, LineOperation, ModelSelector, RequestType, StreamingDiff,
|
||||
CyclePreviousInlineAssist, LineDiff, LineOperation, ModelSelector, StreamingDiff,
|
||||
};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use client::{telemetry::Telemetry, ErrorExt};
|
||||
@@ -54,7 +54,7 @@ use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
|
||||
use terminal_view::terminal_panel::TerminalPanel;
|
||||
use text::{OffsetRangeExt, ToPoint as _};
|
||||
use theme::ThemeSettings;
|
||||
use ui::{prelude::*, text_for_action, CheckboxWithLabel, IconButtonShape, Popover, Tooltip};
|
||||
use ui::{prelude::*, CheckboxWithLabel, IconButtonShape, Popover, Tooltip};
|
||||
use util::{RangeExt, ResultExt};
|
||||
use workspace::{notifications::NotificationId, ItemHandle, Toast, Workspace};
|
||||
|
||||
@@ -189,16 +189,11 @@ impl InlineAssistant {
|
||||
initial_prompt: Option<String>,
|
||||
cx: &mut WindowContext,
|
||||
) {
|
||||
let (snapshot, initial_selections) = editor.update(cx, |editor, cx| {
|
||||
(
|
||||
editor.buffer().read(cx).snapshot(cx),
|
||||
editor.selections.all::<Point>(cx),
|
||||
)
|
||||
});
|
||||
let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
|
||||
|
||||
let mut selections = Vec::<Selection<Point>>::new();
|
||||
let mut newest_selection = None;
|
||||
for mut selection in initial_selections {
|
||||
for mut selection in editor.read(cx).selections.all::<Point>(cx) {
|
||||
if selection.end > selection.start {
|
||||
selection.start.column = 0;
|
||||
// If the selection ends at the start of the line, we don't want to include it.
|
||||
@@ -571,13 +566,10 @@ impl InlineAssistant {
|
||||
return;
|
||||
};
|
||||
|
||||
if editor.read(cx).selections.count() == 1 {
|
||||
let (selection, buffer) = editor.update(cx, |editor, cx| {
|
||||
(
|
||||
editor.selections.newest::<usize>(cx),
|
||||
editor.buffer().read(cx).snapshot(cx),
|
||||
)
|
||||
});
|
||||
let editor = editor.read(cx);
|
||||
if editor.selections.count() == 1 {
|
||||
let selection = editor.selections.newest::<usize>(cx);
|
||||
let buffer = editor.buffer().read(cx).snapshot(cx);
|
||||
for assist_id in &editor_assists.assist_ids {
|
||||
let assist = &self.assists[assist_id];
|
||||
let assist_range = assist.range.to_offset(&buffer);
|
||||
@@ -602,13 +594,10 @@ impl InlineAssistant {
|
||||
return;
|
||||
};
|
||||
|
||||
if editor.read(cx).selections.count() == 1 {
|
||||
let (selection, buffer) = editor.update(cx, |editor, cx| {
|
||||
(
|
||||
editor.selections.newest::<usize>(cx),
|
||||
editor.buffer().read(cx).snapshot(cx),
|
||||
)
|
||||
});
|
||||
let editor = editor.read(cx);
|
||||
if editor.selections.count() == 1 {
|
||||
let selection = editor.selections.newest::<usize>(cx);
|
||||
let buffer = editor.buffer().read(cx).snapshot(cx);
|
||||
let mut closest_assist_fallback = None;
|
||||
for assist_id in &editor_assists.assist_ids {
|
||||
let assist = &self.assists[assist_id];
|
||||
@@ -1607,7 +1596,7 @@ impl PromptEditor {
|
||||
// always show the cursor (even when it isn't focused) because
|
||||
// typing in one will make what you typed appear in all of them.
|
||||
editor.set_show_cursor_when_unfocused(true, cx);
|
||||
editor.set_placeholder_text(Self::placeholder_text(codegen.read(cx), cx), cx);
|
||||
editor.set_placeholder_text("Add a prompt…", cx);
|
||||
editor
|
||||
});
|
||||
|
||||
@@ -1664,7 +1653,6 @@ impl PromptEditor {
|
||||
self.editor = cx.new_view(|cx| {
|
||||
let mut editor = Editor::auto_height(Self::MAX_LINES as usize, cx);
|
||||
editor.set_soft_wrap_mode(language::language_settings::SoftWrap::EditorWidth, cx);
|
||||
editor.set_placeholder_text(Self::placeholder_text(self.codegen.read(cx), cx), cx);
|
||||
editor.set_placeholder_text("Add a prompt…", cx);
|
||||
editor.set_text(prompt, cx);
|
||||
if focus {
|
||||
@@ -1675,20 +1663,6 @@ impl PromptEditor {
|
||||
self.subscribe_to_editor(cx);
|
||||
}
|
||||
|
||||
fn placeholder_text(codegen: &Codegen, cx: &WindowContext) -> String {
|
||||
let context_keybinding = text_for_action(&crate::ToggleFocus, cx)
|
||||
.map(|keybinding| format!(" • {keybinding} for context"))
|
||||
.unwrap_or_default();
|
||||
|
||||
let action = if codegen.is_insertion {
|
||||
"Generate"
|
||||
} else {
|
||||
"Transform"
|
||||
};
|
||||
|
||||
format!("{action}…{context_keybinding} • ↓↑ for history")
|
||||
}
|
||||
|
||||
fn prompt(&self, cx: &AppContext) -> String {
|
||||
self.editor.read(cx).text(cx)
|
||||
}
|
||||
@@ -2245,7 +2219,7 @@ impl InlineAssist {
|
||||
.read(cx)
|
||||
.active_context(cx)?
|
||||
.read(cx)
|
||||
.to_completion_request(RequestType::Chat, cx),
|
||||
.to_completion_request(cx),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
@@ -2286,7 +2260,6 @@ pub struct Codegen {
|
||||
initial_transaction_id: Option<TransactionId>,
|
||||
telemetry: Option<Arc<Telemetry>>,
|
||||
builder: Arc<PromptBuilder>,
|
||||
is_insertion: bool,
|
||||
}
|
||||
|
||||
impl Codegen {
|
||||
@@ -2309,7 +2282,6 @@ impl Codegen {
|
||||
)
|
||||
});
|
||||
let mut this = Self {
|
||||
is_insertion: range.to_offset(&buffer.read(cx).snapshot(cx)).is_empty(),
|
||||
alternatives: vec![codegen],
|
||||
active_alternative: 0,
|
||||
seen_alternatives: HashSet::default(),
|
||||
@@ -2711,7 +2683,7 @@ impl CodegenAlternative {
|
||||
|
||||
let prompt = self
|
||||
.builder
|
||||
.generate_inline_transformation_prompt(user_prompt, language_name, buffer, range)
|
||||
.generate_content_prompt(user_prompt, language_name, buffer, range)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to generate content prompt: {}", e))?;
|
||||
|
||||
let mut messages = Vec::new();
|
||||
|
||||
@@ -33,21 +33,21 @@ pub enum AssistantEditKind {
|
||||
Update {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
description: String,
|
||||
},
|
||||
Create {
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
description: String,
|
||||
},
|
||||
InsertBefore {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
description: String,
|
||||
},
|
||||
InsertAfter {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
description: String,
|
||||
},
|
||||
Delete {
|
||||
old_text: String,
|
||||
@@ -86,37 +86,19 @@ enum SearchDirection {
|
||||
Diagonal,
|
||||
}
|
||||
|
||||
// A measure of the currently quality of an in-progress fuzzy search.
|
||||
//
|
||||
// Uses 60 bits to store a numeric cost, and 4 bits to store the preceding
|
||||
// operation in the search.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
struct SearchState {
|
||||
cost: u32,
|
||||
score: u32,
|
||||
direction: SearchDirection,
|
||||
}
|
||||
|
||||
impl SearchState {
|
||||
fn new(cost: u32, direction: SearchDirection) -> Self {
|
||||
Self { cost, direction }
|
||||
}
|
||||
}
|
||||
|
||||
struct SearchMatrix {
|
||||
cols: usize,
|
||||
data: Vec<SearchState>,
|
||||
}
|
||||
|
||||
impl SearchMatrix {
|
||||
fn new(rows: usize, cols: usize) -> Self {
|
||||
SearchMatrix {
|
||||
cols,
|
||||
data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
|
||||
}
|
||||
}
|
||||
|
||||
fn get(&self, row: usize, col: usize) -> SearchState {
|
||||
self.data[row * self.cols + col]
|
||||
}
|
||||
|
||||
fn set(&mut self, row: usize, col: usize, cost: SearchState) {
|
||||
self.data[row * self.cols + col] = cost;
|
||||
fn new(score: u32, direction: SearchDirection) -> Self {
|
||||
Self { score, direction }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -205,23 +187,23 @@ impl AssistantEdit {
|
||||
"update" => AssistantEditKind::Update {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
description,
|
||||
description: description.ok_or_else(|| anyhow!("missing description"))?,
|
||||
},
|
||||
"insert_before" => AssistantEditKind::InsertBefore {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
description,
|
||||
description: description.ok_or_else(|| anyhow!("missing description"))?,
|
||||
},
|
||||
"insert_after" => AssistantEditKind::InsertAfter {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
description,
|
||||
description: description.ok_or_else(|| anyhow!("missing description"))?,
|
||||
},
|
||||
"delete" => AssistantEditKind::Delete {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
},
|
||||
"create" => AssistantEditKind::Create {
|
||||
description,
|
||||
description: description.ok_or_else(|| anyhow!("missing description"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
},
|
||||
_ => Err(anyhow!("unknown operation {operation:?}"))?,
|
||||
@@ -282,7 +264,7 @@ impl AssistantEditKind {
|
||||
ResolvedEdit {
|
||||
range,
|
||||
new_text,
|
||||
description,
|
||||
description: Some(description),
|
||||
}
|
||||
}
|
||||
Self::Create {
|
||||
@@ -290,7 +272,7 @@ impl AssistantEditKind {
|
||||
description,
|
||||
} => ResolvedEdit {
|
||||
range: text::Anchor::MIN..text::Anchor::MAX,
|
||||
description,
|
||||
description: Some(description),
|
||||
new_text,
|
||||
},
|
||||
Self::InsertBefore {
|
||||
@@ -303,7 +285,7 @@ impl AssistantEditKind {
|
||||
ResolvedEdit {
|
||||
range: range.start..range.start,
|
||||
new_text,
|
||||
description,
|
||||
description: Some(description),
|
||||
}
|
||||
}
|
||||
Self::InsertAfter {
|
||||
@@ -316,7 +298,7 @@ impl AssistantEditKind {
|
||||
ResolvedEdit {
|
||||
range: range.end..range.end,
|
||||
new_text,
|
||||
description,
|
||||
description: Some(description),
|
||||
}
|
||||
}
|
||||
Self::Delete { old_text } => {
|
||||
@@ -332,29 +314,44 @@ impl AssistantEditKind {
|
||||
|
||||
fn resolve_location(buffer: &text::BufferSnapshot, search_query: &str) -> Range<text::Anchor> {
|
||||
const INSERTION_COST: u32 = 3;
|
||||
const DELETION_COST: u32 = 10;
|
||||
const WHITESPACE_INSERTION_COST: u32 = 1;
|
||||
const DELETION_COST: u32 = 3;
|
||||
const WHITESPACE_DELETION_COST: u32 = 1;
|
||||
const EQUALITY_BONUS: u32 = 5;
|
||||
|
||||
struct Matrix {
|
||||
cols: usize,
|
||||
data: Vec<SearchState>,
|
||||
}
|
||||
|
||||
impl Matrix {
|
||||
fn new(rows: usize, cols: usize) -> Self {
|
||||
Matrix {
|
||||
cols,
|
||||
data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
|
||||
}
|
||||
}
|
||||
|
||||
fn get(&self, row: usize, col: usize) -> SearchState {
|
||||
self.data[row * self.cols + col]
|
||||
}
|
||||
|
||||
fn set(&mut self, row: usize, col: usize, cost: SearchState) {
|
||||
self.data[row * self.cols + col] = cost;
|
||||
}
|
||||
}
|
||||
|
||||
let buffer_len = buffer.len();
|
||||
let query_len = search_query.len();
|
||||
let mut matrix = SearchMatrix::new(query_len + 1, buffer_len + 1);
|
||||
let mut leading_deletion_cost = 0_u32;
|
||||
let mut matrix = Matrix::new(query_len + 1, buffer_len + 1);
|
||||
|
||||
for (row, query_byte) in search_query.bytes().enumerate() {
|
||||
let deletion_cost = if query_byte.is_ascii_whitespace() {
|
||||
WHITESPACE_DELETION_COST
|
||||
} else {
|
||||
DELETION_COST
|
||||
};
|
||||
|
||||
leading_deletion_cost = leading_deletion_cost.saturating_add(deletion_cost);
|
||||
matrix.set(
|
||||
row + 1,
|
||||
0,
|
||||
SearchState::new(leading_deletion_cost, SearchDirection::Diagonal),
|
||||
);
|
||||
|
||||
for (col, buffer_byte) in buffer.bytes_in_range(0..buffer.len()).flatten().enumerate() {
|
||||
let deletion_cost = if query_byte.is_ascii_whitespace() {
|
||||
WHITESPACE_DELETION_COST
|
||||
} else {
|
||||
DELETION_COST
|
||||
};
|
||||
let insertion_cost = if buffer_byte.is_ascii_whitespace() {
|
||||
WHITESPACE_INSERTION_COST
|
||||
} else {
|
||||
@@ -362,35 +359,38 @@ impl AssistantEditKind {
|
||||
};
|
||||
|
||||
let up = SearchState::new(
|
||||
matrix.get(row, col + 1).cost.saturating_add(deletion_cost),
|
||||
matrix.get(row, col + 1).score.saturating_sub(deletion_cost),
|
||||
SearchDirection::Up,
|
||||
);
|
||||
let left = SearchState::new(
|
||||
matrix.get(row + 1, col).cost.saturating_add(insertion_cost),
|
||||
matrix
|
||||
.get(row + 1, col)
|
||||
.score
|
||||
.saturating_sub(insertion_cost),
|
||||
SearchDirection::Left,
|
||||
);
|
||||
let diagonal = SearchState::new(
|
||||
if query_byte == *buffer_byte {
|
||||
matrix.get(row, col).cost
|
||||
matrix.get(row, col).score.saturating_add(EQUALITY_BONUS)
|
||||
} else {
|
||||
matrix
|
||||
.get(row, col)
|
||||
.cost
|
||||
.saturating_add(deletion_cost + insertion_cost)
|
||||
.score
|
||||
.saturating_sub(deletion_cost + insertion_cost)
|
||||
},
|
||||
SearchDirection::Diagonal,
|
||||
);
|
||||
matrix.set(row + 1, col + 1, up.min(left).min(diagonal));
|
||||
matrix.set(row + 1, col + 1, up.max(left).max(diagonal));
|
||||
}
|
||||
}
|
||||
|
||||
// Traceback to find the best match
|
||||
let mut best_buffer_end = buffer_len;
|
||||
let mut best_cost = u32::MAX;
|
||||
let mut best_score = 0;
|
||||
for col in 1..=buffer_len {
|
||||
let cost = matrix.get(query_len, col).cost;
|
||||
if cost < best_cost {
|
||||
best_cost = cost;
|
||||
let score = matrix.get(query_len, col).score;
|
||||
if score > best_score {
|
||||
best_score = score;
|
||||
best_buffer_end = col;
|
||||
}
|
||||
}
|
||||
@@ -560,84 +560,89 @@ mod tests {
|
||||
language_settings::AllLanguageSettings, Language, LanguageConfig, LanguageMatcher,
|
||||
};
|
||||
use settings::SettingsStore;
|
||||
use text::{OffsetRangeExt, Point};
|
||||
use ui::BorrowAppContext;
|
||||
use unindent::Unindent as _;
|
||||
use util::test::{generate_marked_text, marked_text_ranges};
|
||||
|
||||
#[gpui::test]
|
||||
fn test_resolve_location(cx: &mut AppContext) {
|
||||
assert_location_resolution(
|
||||
concat!(
|
||||
" Lorem\n",
|
||||
"« ipsum\n",
|
||||
" dolor sit amet»\n",
|
||||
" consecteur",
|
||||
),
|
||||
"ipsum\ndolor",
|
||||
cx,
|
||||
);
|
||||
{
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::local(
|
||||
concat!(
|
||||
" Lorem\n",
|
||||
" ipsum\n",
|
||||
" dolor sit amet\n",
|
||||
" consecteur",
|
||||
),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
assert_eq!(
|
||||
AssistantEditKind::resolve_location(&snapshot, "ipsum\ndolor").to_point(&snapshot),
|
||||
Point::new(1, 0)..Point::new(2, 18)
|
||||
);
|
||||
}
|
||||
|
||||
assert_location_resolution(
|
||||
&"
|
||||
«fn foo1(a: usize) -> usize {
|
||||
40
|
||||
}»
|
||||
{
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::local(
|
||||
concat!(
|
||||
"fn foo1(a: usize) -> usize {\n",
|
||||
" 40\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"fn foo2(b: usize) -> usize {\n",
|
||||
" 42\n",
|
||||
"}\n",
|
||||
),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
assert_eq!(
|
||||
AssistantEditKind::resolve_location(&snapshot, "fn foo1(b: usize) {\n40\n}")
|
||||
.to_point(&snapshot),
|
||||
Point::new(0, 0)..Point::new(2, 1)
|
||||
);
|
||||
}
|
||||
|
||||
fn foo2(b: usize) -> usize {
|
||||
42
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
"fn foo1(b: usize) {\n40\n}",
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_location_resolution(
|
||||
&"
|
||||
fn main() {
|
||||
« Foo
|
||||
.bar()
|
||||
.baz()
|
||||
.qux()»
|
||||
}
|
||||
|
||||
fn foo2(b: usize) -> usize {
|
||||
42
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
"Foo.bar.baz.qux()",
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_location_resolution(
|
||||
&"
|
||||
class Something {
|
||||
one() { return 1; }
|
||||
« two() { return 2222; }
|
||||
three() { return 333; }
|
||||
four() { return 4444; }
|
||||
five() { return 5555; }
|
||||
six() { return 6666; }
|
||||
» seven() { return 7; }
|
||||
eight() { return 8; }
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
&"
|
||||
two() { return 2222; }
|
||||
four() { return 4444; }
|
||||
five() { return 5555; }
|
||||
six() { return 6666; }
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
{
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::local(
|
||||
concat!(
|
||||
"fn main() {\n",
|
||||
" Foo\n",
|
||||
" .bar()\n",
|
||||
" .baz()\n",
|
||||
" .qux()\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"fn foo2(b: usize) -> usize {\n",
|
||||
" 42\n",
|
||||
"}\n",
|
||||
),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
assert_eq!(
|
||||
AssistantEditKind::resolve_location(&snapshot, "Foo.bar.baz.qux()")
|
||||
.to_point(&snapshot),
|
||||
Point::new(1, 0)..Point::new(4, 14)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_resolve_edits(cx: &mut AppContext) {
|
||||
init_test(cx);
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
cx.update_global::<SettingsStore, _>(|settings, cx| {
|
||||
settings.update_user_settings::<AllLanguageSettings>(cx, |_| {});
|
||||
});
|
||||
|
||||
assert_edits(
|
||||
"
|
||||
@@ -670,7 +675,7 @@ mod tests {
|
||||
last_name: String,
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
description: "".into(),
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
@@ -685,7 +690,7 @@ mod tests {
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
description: "".into(),
|
||||
},
|
||||
],
|
||||
"
|
||||
@@ -729,7 +734,7 @@ mod tests {
|
||||
qux();
|
||||
}"
|
||||
.unindent(),
|
||||
description: Some("implement bar".into()),
|
||||
description: "implement bar".into(),
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
@@ -742,7 +747,7 @@ mod tests {
|
||||
bar();
|
||||
}"
|
||||
.unindent(),
|
||||
description: Some("call bar in foo".into()),
|
||||
description: "call bar in foo".into(),
|
||||
},
|
||||
AssistantEditKind::InsertAfter {
|
||||
old_text: "
|
||||
@@ -757,7 +762,7 @@ mod tests {
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: Some("implement qux".into()),
|
||||
description: "implement qux".into(),
|
||||
},
|
||||
],
|
||||
"
|
||||
@@ -809,7 +814,7 @@ mod tests {
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
description: "pick better number".into(),
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
@@ -824,7 +829,7 @@ mod tests {
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
description: "pick better number".into(),
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
@@ -839,7 +844,7 @@ mod tests {
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
description: "pick better number".into(),
|
||||
},
|
||||
],
|
||||
"
|
||||
@@ -860,69 +865,6 @@ mod tests {
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_edits(
|
||||
"
|
||||
impl Person {
|
||||
fn set_name(&mut self, name: String) {
|
||||
self.name = name;
|
||||
}
|
||||
|
||||
fn name(&self) -> String {
|
||||
return self.name;
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
vec![
|
||||
AssistantEditKind::Update {
|
||||
old_text: "self.name = name;".unindent(),
|
||||
new_text: "self._name = name;".unindent(),
|
||||
description: None,
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "return self.name;\n".unindent(),
|
||||
new_text: "return self._name;\n".unindent(),
|
||||
description: None,
|
||||
},
|
||||
],
|
||||
"
|
||||
impl Person {
|
||||
fn set_name(&mut self, name: String) {
|
||||
self._name = name;
|
||||
}
|
||||
|
||||
fn name(&self) -> String {
|
||||
return self._name;
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut AppContext) {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
cx.update_global::<SettingsStore, _>(|settings, cx| {
|
||||
settings.update_user_settings::<AllLanguageSettings>(cx, |_| {});
|
||||
});
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_location_resolution(
|
||||
text_with_expected_range: &str,
|
||||
query: &str,
|
||||
cx: &mut AppContext,
|
||||
) {
|
||||
let (text, _) = marked_text_ranges(text_with_expected_range, false);
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text.clone(), cx));
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let range = AssistantEditKind::resolve_location(&snapshot, query).to_offset(&snapshot);
|
||||
let text_with_actual_range = generate_marked_text(&text, &[range], false);
|
||||
pretty_assertions::assert_eq!(text_with_actual_range, text_with_expected_range);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
|
||||
@@ -204,7 +204,7 @@ impl PromptBuilder {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn generate_inline_transformation_prompt(
|
||||
pub fn generate_content_prompt(
|
||||
&self,
|
||||
user_prompt: String,
|
||||
language_name: Option<&LanguageName>,
|
||||
|
||||
@@ -34,6 +34,7 @@ pub mod search_command;
|
||||
pub mod symbols_command;
|
||||
pub mod tab_command;
|
||||
pub mod terminal_command;
|
||||
pub mod workflow_command;
|
||||
|
||||
pub(crate) struct SlashCommandCompletionProvider {
|
||||
cancel_flag: Mutex<Arc<AtomicBool>>,
|
||||
|
||||
82
crates/assistant/src/slash_command/workflow_command.rs
Normal file
82
crates/assistant/src/slash_command/workflow_command.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
SlashCommandResult,
|
||||
};
|
||||
use gpui::{Task, WeakView};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::prompts::PromptBuilder;
|
||||
|
||||
pub(crate) struct WorkflowSlashCommand {
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
}
|
||||
|
||||
impl WorkflowSlashCommand {
|
||||
pub const NAME: &'static str = "workflow";
|
||||
|
||||
pub fn new(prompt_builder: Arc<PromptBuilder>) -> Self {
|
||||
Self { prompt_builder }
|
||||
}
|
||||
}
|
||||
|
||||
impl SlashCommand for WorkflowSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
Self::NAME.into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Insert prompt to opt into the edit workflow".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.description()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<SlashCommandResult> {
|
||||
let prompt_builder = self.prompt_builder.clone();
|
||||
cx.spawn(|_cx| async move {
|
||||
let text = prompt_builder.generate_workflow_prompt()?;
|
||||
let range = 0..text.len();
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range,
|
||||
icon: IconName::Route,
|
||||
label: "Workflow".into(),
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
}
|
||||
.to_event_stream())
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::{
|
||||
humanize_token_count, prompts::PromptBuilder, AssistantPanel, AssistantPanelEvent,
|
||||
ModelSelector, RequestType, DEFAULT_CONTEXT_LINES,
|
||||
ModelSelector, DEFAULT_CONTEXT_LINES,
|
||||
};
|
||||
use anyhow::{Context as _, Result};
|
||||
use client::telemetry::Telemetry;
|
||||
@@ -251,7 +251,7 @@ impl TerminalInlineAssistant {
|
||||
.read(cx)
|
||||
.active_context(cx)?
|
||||
.read(cx)
|
||||
.to_completion_request(RequestType::Chat, cx),
|
||||
.to_completion_request(cx),
|
||||
)
|
||||
})
|
||||
} else {
|
||||
|
||||
@@ -1,2 +1 @@
|
||||
pub mod context_server_tool;
|
||||
pub mod now_tool;
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
use anyhow::{anyhow, bail};
|
||||
use assistant_tool::Tool;
|
||||
use context_servers::manager::ContextServerManager;
|
||||
use context_servers::types;
|
||||
use gpui::Task;
|
||||
|
||||
pub struct ContextServerTool {
|
||||
server_id: String,
|
||||
tool: types::Tool,
|
||||
}
|
||||
|
||||
impl ContextServerTool {
|
||||
pub fn new(server_id: impl Into<String>, tool: types::Tool) -> Self {
|
||||
Self {
|
||||
server_id: server_id.into(),
|
||||
tool,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Tool for ContextServerTool {
|
||||
fn name(&self) -> String {
|
||||
self.tool.name.clone()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
self.tool.description.clone().unwrap_or_default()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
match &self.tool.input_schema {
|
||||
serde_json::Value::Null => {
|
||||
serde_json::json!({ "type": "object", "properties": [] })
|
||||
}
|
||||
serde_json::Value::Object(map) if map.is_empty() => {
|
||||
serde_json::json!({ "type": "object", "properties": [] })
|
||||
}
|
||||
_ => self.tool.input_schema.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: std::sync::Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_workspace: gpui::WeakView<workspace::Workspace>,
|
||||
cx: &mut ui::WindowContext,
|
||||
) -> gpui::Task<gpui::Result<String>> {
|
||||
let manager = ContextServerManager::global(cx);
|
||||
let manager = manager.read(cx);
|
||||
if let Some(server) = manager.get_server(&self.server_id) {
|
||||
cx.foreground_executor().spawn({
|
||||
let tool_name = self.tool.name.clone();
|
||||
async move {
|
||||
let Some(protocol) = server.client.read().clone() else {
|
||||
bail!("Context server not initialized");
|
||||
};
|
||||
|
||||
let arguments = if let serde_json::Value::Object(map) = input {
|
||||
Some(map.into_iter().collect())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
log::trace!(
|
||||
"Running tool: {} with arguments: {:?}",
|
||||
tool_name,
|
||||
arguments
|
||||
);
|
||||
let response = protocol.run_tool(tool_name, arguments).await?;
|
||||
|
||||
let tool_result = match response.tool_result {
|
||||
serde_json::Value::String(s) => s,
|
||||
_ => serde_json::to_string(&response.tool_result)?,
|
||||
};
|
||||
Ok(tool_result)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
Task::ready(Err(anyhow!("Context server not found")))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -84,9 +84,9 @@ pub struct AutoUpdater {
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct JsonRelease {
|
||||
pub version: String,
|
||||
pub url: String,
|
||||
struct JsonRelease {
|
||||
version: String,
|
||||
url: String,
|
||||
}
|
||||
|
||||
struct MacOsUnmounter {
|
||||
@@ -482,7 +482,7 @@ impl AutoUpdater {
|
||||
release_channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<(JsonRelease, String)> {
|
||||
) -> Result<(String, String)> {
|
||||
let this = cx.update(|cx| {
|
||||
cx.default_global::<GlobalAutoUpdate>()
|
||||
.0
|
||||
@@ -504,7 +504,7 @@ impl AutoUpdater {
|
||||
let update_request_body = build_remote_server_update_request_body(cx)?;
|
||||
let body = serde_json::to_string(&update_request_body)?;
|
||||
|
||||
Ok((release, body))
|
||||
Ok((release.url, body))
|
||||
}
|
||||
|
||||
async fn get_release(
|
||||
|
||||
@@ -3,7 +3,7 @@ mod channel_index;
|
||||
use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat, ChannelMessage};
|
||||
use anyhow::{anyhow, Result};
|
||||
use channel_index::ChannelIndex;
|
||||
use client::{ChannelId, Client, ClientSettings, Subscription, User, UserId, UserStore};
|
||||
use client::{ChannelId, Client, ClientSettings, ProjectId, Subscription, User, UserId, UserStore};
|
||||
use collections::{hash_map, HashMap, HashSet};
|
||||
use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
|
||||
use gpui::{
|
||||
@@ -33,11 +33,30 @@ struct NotesVersion {
|
||||
version: clock::Global,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct HostedProject {
|
||||
project_id: ProjectId,
|
||||
channel_id: ChannelId,
|
||||
name: SharedString,
|
||||
_visibility: proto::ChannelVisibility,
|
||||
}
|
||||
impl From<proto::HostedProject> for HostedProject {
|
||||
fn from(project: proto::HostedProject) -> Self {
|
||||
Self {
|
||||
project_id: ProjectId(project.project_id),
|
||||
channel_id: ChannelId(project.channel_id),
|
||||
_visibility: project.visibility(),
|
||||
name: project.name.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
pub struct ChannelStore {
|
||||
pub channel_index: ChannelIndex,
|
||||
channel_invitations: Vec<Arc<Channel>>,
|
||||
channel_participants: HashMap<ChannelId, Vec<Arc<User>>>,
|
||||
channel_states: HashMap<ChannelId, ChannelState>,
|
||||
hosted_projects: HashMap<ProjectId, HostedProject>,
|
||||
|
||||
outgoing_invites: HashSet<(ChannelId, UserId)>,
|
||||
update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>,
|
||||
opened_buffers: HashMap<ChannelId, OpenedModelHandle<ChannelBuffer>>,
|
||||
@@ -66,6 +85,7 @@ pub struct ChannelState {
|
||||
observed_notes_version: NotesVersion,
|
||||
observed_chat_message: Option<u64>,
|
||||
role: Option<ChannelRole>,
|
||||
projects: HashSet<ProjectId>,
|
||||
}
|
||||
|
||||
impl Channel {
|
||||
@@ -196,6 +216,7 @@ impl ChannelStore {
|
||||
channel_invitations: Vec::default(),
|
||||
channel_index: ChannelIndex::default(),
|
||||
channel_participants: Default::default(),
|
||||
hosted_projects: Default::default(),
|
||||
outgoing_invites: Default::default(),
|
||||
opened_buffers: Default::default(),
|
||||
opened_chats: Default::default(),
|
||||
@@ -295,6 +316,19 @@ impl ChannelStore {
|
||||
self.channel_index.by_id().get(&channel_id)
|
||||
}
|
||||
|
||||
pub fn projects_for_id(&self, channel_id: ChannelId) -> Vec<(SharedString, ProjectId)> {
|
||||
let mut projects: Vec<(SharedString, ProjectId)> = self
|
||||
.channel_states
|
||||
.get(&channel_id)
|
||||
.map(|state| state.projects.clone())
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.flat_map(|id| Some((self.hosted_projects.get(&id)?.name.clone(), id)))
|
||||
.collect();
|
||||
projects.sort();
|
||||
projects
|
||||
}
|
||||
|
||||
pub fn has_open_channel_buffer(&self, channel_id: ChannelId, _cx: &AppContext) -> bool {
|
||||
if let Some(buffer) = self.opened_buffers.get(&channel_id) {
|
||||
if let OpenedModelHandle::Open(buffer) = buffer {
|
||||
@@ -1068,7 +1102,9 @@ impl ChannelStore {
|
||||
let channels_changed = !payload.channels.is_empty()
|
||||
|| !payload.delete_channels.is_empty()
|
||||
|| !payload.latest_channel_message_ids.is_empty()
|
||||
|| !payload.latest_channel_buffer_versions.is_empty();
|
||||
|| !payload.latest_channel_buffer_versions.is_empty()
|
||||
|| !payload.hosted_projects.is_empty()
|
||||
|| !payload.deleted_hosted_projects.is_empty();
|
||||
|
||||
if channels_changed {
|
||||
if !payload.delete_channels.is_empty() {
|
||||
@@ -1125,6 +1161,34 @@ impl ChannelStore {
|
||||
.or_default()
|
||||
.update_latest_message_id(latest_channel_message.message_id);
|
||||
}
|
||||
|
||||
for hosted_project in payload.hosted_projects {
|
||||
let hosted_project: HostedProject = hosted_project.into();
|
||||
if let Some(old_project) = self
|
||||
.hosted_projects
|
||||
.insert(hosted_project.project_id, hosted_project.clone())
|
||||
{
|
||||
self.channel_states
|
||||
.entry(old_project.channel_id)
|
||||
.or_default()
|
||||
.remove_hosted_project(old_project.project_id);
|
||||
}
|
||||
self.channel_states
|
||||
.entry(hosted_project.channel_id)
|
||||
.or_default()
|
||||
.add_hosted_project(hosted_project.project_id);
|
||||
}
|
||||
|
||||
for hosted_project_id in payload.deleted_hosted_projects {
|
||||
let hosted_project_id = ProjectId(hosted_project_id);
|
||||
|
||||
if let Some(old_project) = self.hosted_projects.remove(&hosted_project_id) {
|
||||
self.channel_states
|
||||
.entry(old_project.channel_id)
|
||||
.or_default()
|
||||
.remove_hosted_project(old_project.project_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
@@ -1231,4 +1295,12 @@ impl ChannelState {
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn add_hosted_project(&mut self, project_id: ProjectId) {
|
||||
self.projects.insert(project_id);
|
||||
}
|
||||
|
||||
fn remove_hosted_project(&mut self, project_id: ProjectId) {
|
||||
self.projects.remove(&project_id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,7 +48,6 @@ pub struct Collaborator {
|
||||
pub peer_id: proto::PeerId,
|
||||
pub replica_id: ReplicaId,
|
||||
pub user_id: UserId,
|
||||
pub is_host: bool,
|
||||
}
|
||||
|
||||
impl PartialOrd for User {
|
||||
@@ -825,7 +824,6 @@ impl Collaborator {
|
||||
peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
|
||||
replica_id: message.replica_id as ReplicaId,
|
||||
user_id: message.user_id as UserId,
|
||||
is_host: message.is_host,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -252,10 +252,7 @@ async fn create_billing_subscription(
|
||||
|
||||
let default_model = llm_db.model(rpc::LanguageModelProvider::Anthropic, "claude-3-5-sonnet")?;
|
||||
let stripe_model = stripe_billing.register_model(default_model).await?;
|
||||
let success_url = format!(
|
||||
"{}/account?checkout_complete=1",
|
||||
app.config.zed_dot_dev_url()
|
||||
);
|
||||
let success_url = format!("{}/account", app.config.zed_dot_dev_url());
|
||||
let checkout_session_url = stripe_billing
|
||||
.checkout(customer_id, &user.github_login, &stripe_model, &success_url)
|
||||
.await?;
|
||||
|
||||
@@ -617,6 +617,7 @@ pub struct ChannelsForUser {
|
||||
pub channels: Vec<Channel>,
|
||||
pub channel_memberships: Vec<channel_member::Model>,
|
||||
pub channel_participants: HashMap<ChannelId, Vec<UserId>>,
|
||||
pub hosted_projects: Vec<proto::HostedProject>,
|
||||
pub invited_channels: Vec<Channel>,
|
||||
|
||||
pub observed_buffer_versions: Vec<proto::ChannelBufferVersion>,
|
||||
@@ -740,7 +741,6 @@ impl ProjectCollaborator {
|
||||
peer_id: Some(self.connection_id.into()),
|
||||
replica_id: self.replica_id.0 as u32,
|
||||
user_id: self.user_id.to_proto(),
|
||||
is_host: self.is_host,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ pub mod contacts;
|
||||
pub mod contributors;
|
||||
pub mod embeddings;
|
||||
pub mod extensions;
|
||||
pub mod hosted_projects;
|
||||
pub mod messages;
|
||||
pub mod notifications;
|
||||
pub mod processed_stripe_events;
|
||||
|
||||
@@ -116,7 +116,6 @@ impl Database {
|
||||
peer_id: Some(collaborator.connection().into()),
|
||||
user_id: collaborator.user_id.to_proto(),
|
||||
replica_id: collaborator.replica_id.0 as u32,
|
||||
is_host: false,
|
||||
})
|
||||
.collect(),
|
||||
})
|
||||
@@ -223,7 +222,6 @@ impl Database {
|
||||
peer_id: Some(collaborator.connection().into()),
|
||||
user_id: collaborator.user_id.to_proto(),
|
||||
replica_id: collaborator.replica_id.0 as u32,
|
||||
is_host: false,
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
@@ -259,7 +257,6 @@ impl Database {
|
||||
peer_id: Some(db_collaborator.connection().into()),
|
||||
replica_id: db_collaborator.replica_id.0 as u32,
|
||||
user_id: db_collaborator.user_id.to_proto(),
|
||||
is_host: false,
|
||||
})
|
||||
} else {
|
||||
collaborator_ids_to_remove.push(db_collaborator.id);
|
||||
@@ -388,7 +385,6 @@ impl Database {
|
||||
peer_id: Some(connection.into()),
|
||||
replica_id: row.replica_id.0 as u32,
|
||||
user_id: row.user_id.to_proto(),
|
||||
is_host: false,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -615,10 +615,15 @@ impl Database {
|
||||
.observed_channel_messages(&channel_ids, user_id, tx)
|
||||
.await?;
|
||||
|
||||
let hosted_projects = self
|
||||
.get_hosted_projects(&channel_ids, &roles_by_channel_id, tx)
|
||||
.await?;
|
||||
|
||||
Ok(ChannelsForUser {
|
||||
channel_memberships,
|
||||
channels,
|
||||
invited_channels,
|
||||
hosted_projects,
|
||||
channel_participants,
|
||||
latest_buffer_versions,
|
||||
latest_channel_messages,
|
||||
|
||||
85
crates/collab/src/db/queries/hosted_projects.rs
Normal file
85
crates/collab/src/db/queries/hosted_projects.rs
Normal file
@@ -0,0 +1,85 @@
|
||||
use rpc::{proto, ErrorCode};
|
||||
|
||||
use super::*;
|
||||
|
||||
impl Database {
|
||||
pub async fn get_hosted_projects(
|
||||
&self,
|
||||
channel_ids: &[ChannelId],
|
||||
roles: &HashMap<ChannelId, ChannelRole>,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<proto::HostedProject>> {
|
||||
let projects = hosted_project::Entity::find()
|
||||
.find_also_related(project::Entity)
|
||||
.filter(hosted_project::Column::ChannelId.is_in(channel_ids.iter().map(|id| id.0)))
|
||||
.all(tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.flat_map(|(hosted_project, project)| {
|
||||
if hosted_project.deleted_at.is_some() {
|
||||
return None;
|
||||
}
|
||||
match hosted_project.visibility {
|
||||
ChannelVisibility::Public => {}
|
||||
ChannelVisibility::Members => {
|
||||
let is_visible = roles
|
||||
.get(&hosted_project.channel_id)
|
||||
.map(|role| role.can_see_all_descendants())
|
||||
.unwrap_or(false);
|
||||
if !is_visible {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
};
|
||||
Some(proto::HostedProject {
|
||||
project_id: project?.id.to_proto(),
|
||||
channel_id: hosted_project.channel_id.to_proto(),
|
||||
name: hosted_project.name.clone(),
|
||||
visibility: hosted_project.visibility.into(),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(projects)
|
||||
}
|
||||
|
||||
pub async fn get_hosted_project(
|
||||
&self,
|
||||
hosted_project_id: HostedProjectId,
|
||||
user_id: UserId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<(hosted_project::Model, ChannelRole)> {
|
||||
let project = hosted_project::Entity::find_by_id(hosted_project_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?;
|
||||
let channel = channel::Entity::find_by_id(project.channel_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!(ErrorCode::NoSuchChannel))?;
|
||||
|
||||
let role = match project.visibility {
|
||||
ChannelVisibility::Public => {
|
||||
self.check_user_is_channel_participant(&channel, user_id, tx)
|
||||
.await?
|
||||
}
|
||||
ChannelVisibility::Members => {
|
||||
self.check_user_is_channel_member(&channel, user_id, tx)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
Ok((project, role))
|
||||
}
|
||||
|
||||
pub async fn is_hosted_project(&self, project_id: ProjectId) -> Result<bool> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.map(|project| project.hosted_project_id.is_some())
|
||||
.ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
@@ -68,6 +68,7 @@ impl Database {
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
id: ActiveValue::NotSet,
|
||||
hosted_project_id: ActiveValue::Set(None),
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
@@ -535,6 +536,39 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
/// Adds the given connection to the specified hosted project
|
||||
pub async fn join_hosted_project(
|
||||
&self,
|
||||
id: ProjectId,
|
||||
user_id: UserId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<(Project, ReplicaId)> {
|
||||
self.transaction(|tx| async move {
|
||||
let (project, hosted_project) = project::Entity::find_by_id(id)
|
||||
.find_also_related(hosted_project::Entity)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("hosted project is no longer shared"))?;
|
||||
|
||||
let Some(hosted_project) = hosted_project else {
|
||||
return Err(anyhow!("project is not hosted"))?;
|
||||
};
|
||||
|
||||
let channel = channel::Entity::find_by_id(hosted_project.channel_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such channel"))?;
|
||||
|
||||
let role = self
|
||||
.check_user_is_channel_participant(&channel, user_id, &tx)
|
||||
.await?;
|
||||
|
||||
self.join_project_internal(project, user_id, connection, role, &tx)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_project(&self, id: ProjectId) -> Result<project::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(project::Entity::find_by_id(id)
|
||||
|
||||
@@ -18,6 +18,7 @@ pub mod extension;
|
||||
pub mod extension_version;
|
||||
pub mod feature_flag;
|
||||
pub mod follower;
|
||||
pub mod hosted_project;
|
||||
pub mod language_server;
|
||||
pub mod notification;
|
||||
pub mod notification_kind;
|
||||
|
||||
27
crates/collab/src/db/tables/hosted_project.rs
Normal file
27
crates/collab/src/db/tables/hosted_project.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use crate::db::{ChannelId, ChannelVisibility, HostedProjectId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "hosted_projects")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: HostedProjectId,
|
||||
pub channel_id: ChannelId,
|
||||
pub name: String,
|
||||
pub visibility: ChannelVisibility,
|
||||
pub deleted_at: Option<DateTime>,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_one = "super::project::Entity")]
|
||||
Project,
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::db::{ProjectId, Result, RoomId, ServerId, UserId};
|
||||
use crate::db::{HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId};
|
||||
use anyhow::anyhow;
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
@@ -12,6 +12,7 @@ pub struct Model {
|
||||
pub host_user_id: Option<UserId>,
|
||||
pub host_connection_id: Option<i32>,
|
||||
pub host_connection_server_id: Option<ServerId>,
|
||||
pub hosted_project_id: Option<HostedProjectId>,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
@@ -49,6 +50,12 @@ pub enum Relation {
|
||||
Collaborators,
|
||||
#[sea_orm(has_many = "super::language_server::Entity")]
|
||||
LanguageServers,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::hosted_project::Entity",
|
||||
from = "Column::HostedProjectId",
|
||||
to = "super::hosted_project::Column::Id"
|
||||
)]
|
||||
HostedProject,
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
@@ -81,4 +88,10 @@ impl Related<super::language_server::Entity> for Entity {
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::hosted_project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::HostedProject.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
@@ -121,13 +121,11 @@ async fn test_channel_buffers(db: &Arc<Database>) {
|
||||
user_id: a_id.to_proto(),
|
||||
peer_id: Some(rpc::proto::PeerId { id: 1, owner_id }),
|
||||
replica_id: 0,
|
||||
is_host: false,
|
||||
},
|
||||
rpc::proto::Collaborator {
|
||||
user_id: b_id.to_proto(),
|
||||
peer_id: Some(rpc::proto::PeerId { id: 2, owner_id }),
|
||||
replica_id: 1,
|
||||
is_host: false,
|
||||
}
|
||||
]
|
||||
);
|
||||
|
||||
@@ -449,10 +449,6 @@ async fn check_usage_limit(
|
||||
model_name: &str,
|
||||
claims: &LlmTokenClaims,
|
||||
) -> Result<()> {
|
||||
if claims.is_staff {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let model = state.db.model(provider, model_name)?;
|
||||
let usage = state
|
||||
.db
|
||||
@@ -517,6 +513,11 @@ async fn check_usage_limit(
|
||||
];
|
||||
|
||||
for (used, limit, usage_measure) in checks {
|
||||
// Temporarily bypass rate-limiting for staff members.
|
||||
if claims.is_staff {
|
||||
continue;
|
||||
}
|
||||
|
||||
if used > limit {
|
||||
let resource = match usage_measure {
|
||||
UsageMeasure::RequestsPerMinute => "requests_per_minute",
|
||||
|
||||
@@ -287,6 +287,7 @@ impl Server {
|
||||
.add_request_handler(share_project)
|
||||
.add_message_handler(unshare_project)
|
||||
.add_request_handler(join_project)
|
||||
.add_request_handler(join_hosted_project)
|
||||
.add_message_handler(leave_project)
|
||||
.add_request_handler(update_project)
|
||||
.add_request_handler(update_worktree)
|
||||
@@ -307,8 +308,6 @@ impl Server {
|
||||
.add_request_handler(forward_read_only_project_request::<proto::InlayHints>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::ResolveInlayHint>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GitBranches>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::UpdateGitBranch>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::GetCompletions>)
|
||||
.add_request_handler(
|
||||
forward_mutating_project_request::<proto::ApplyCompletionAdditionalEdits>,
|
||||
@@ -1794,6 +1793,11 @@ impl JoinProjectInternalResponse for Response<proto::JoinProject> {
|
||||
Response::<proto::JoinProject>::send(self, result)
|
||||
}
|
||||
}
|
||||
impl JoinProjectInternalResponse for Response<proto::JoinHostedProject> {
|
||||
fn send(self, result: proto::JoinProjectResponse) -> Result<()> {
|
||||
Response::<proto::JoinHostedProject>::send(self, result)
|
||||
}
|
||||
}
|
||||
|
||||
fn join_project_internal(
|
||||
response: impl JoinProjectInternalResponse,
|
||||
@@ -1827,7 +1831,6 @@ fn join_project_internal(
|
||||
peer_id: Some(session.connection_id.into()),
|
||||
replica_id: replica_id.0 as u32,
|
||||
user_id: guest_user_id.to_proto(),
|
||||
is_host: false,
|
||||
}),
|
||||
};
|
||||
|
||||
@@ -1918,6 +1921,11 @@ async fn leave_project(request: proto::LeaveProject, session: Session) -> Result
|
||||
let sender_id = session.connection_id;
|
||||
let project_id = ProjectId::from_proto(request.project_id);
|
||||
let db = session.db().await;
|
||||
if db.is_hosted_project(project_id).await? {
|
||||
let project = db.leave_hosted_project(project_id, sender_id).await?;
|
||||
project_left(&project, &session);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let (room, project) = &*db.leave_project(project_id, sender_id).await?;
|
||||
tracing::info!(
|
||||
@@ -1933,6 +1941,24 @@ async fn leave_project(request: proto::LeaveProject, session: Session) -> Result
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn join_hosted_project(
|
||||
request: proto::JoinHostedProject,
|
||||
response: Response<proto::JoinHostedProject>,
|
||||
session: Session,
|
||||
) -> Result<()> {
|
||||
let (mut project, replica_id) = session
|
||||
.db()
|
||||
.await
|
||||
.join_hosted_project(
|
||||
ProjectId(request.project_id as i32),
|
||||
session.user_id(),
|
||||
session.connection_id,
|
||||
)
|
||||
.await?;
|
||||
|
||||
join_project_internal(response, session, &mut project, &replica_id)
|
||||
}
|
||||
|
||||
/// Updates other participants with changes to the project
|
||||
async fn update_project(
|
||||
request: proto::UpdateProject,
|
||||
@@ -4174,6 +4200,7 @@ fn build_channels_update(channels: ChannelsForUser) -> proto::UpdateChannels {
|
||||
update.channel_invitations.push(channel.to_proto());
|
||||
}
|
||||
|
||||
update.hosted_projects = channels.hosted_projects;
|
||||
update
|
||||
}
|
||||
|
||||
|
||||
@@ -1978,7 +1978,6 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||
enabled: false,
|
||||
delay_ms: None,
|
||||
min_column: None,
|
||||
show_commit_summary: false,
|
||||
});
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
|
||||
@@ -1957,10 +1957,9 @@ async fn test_following_to_channel_notes_without_a_shared_project(
|
||||
});
|
||||
channel_notes_1_b.update(cx_b, |notes, cx| {
|
||||
assert_eq!(notes.channel(cx).unwrap().name, "channel-1");
|
||||
notes.editor.update(cx, |editor, cx| {
|
||||
assert_eq!(editor.text(cx), "Hello from A.");
|
||||
assert_eq!(editor.selections.ranges::<usize>(cx), &[3..4]);
|
||||
})
|
||||
let editor = notes.editor.read(cx);
|
||||
assert_eq!(editor.text(cx), "Hello from A.");
|
||||
assert_eq!(editor.selections.ranges::<usize>(cx), &[3..4]);
|
||||
});
|
||||
|
||||
// Client A opens the notes for channel 2.
|
||||
|
||||
@@ -21,8 +21,8 @@ use language::{
|
||||
language_settings::{
|
||||
AllLanguageSettings, Formatter, FormatterList, PrettierSettings, SelectedFormatter,
|
||||
},
|
||||
tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, FakeLspAdapter,
|
||||
Language, LanguageConfig, LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope,
|
||||
tree_sitter_rust, Diagnostic, DiagnosticEntry, FakeLspAdapter, Language, LanguageConfig,
|
||||
LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope,
|
||||
};
|
||||
use live_kit_client::MacOSDisplay;
|
||||
use lsp::LanguageServerId;
|
||||
@@ -4461,7 +4461,7 @@ async fn test_prettier_formatting_buffer(
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
|
||||
Some(tree_sitter_rust::LANGUAGE.into()),
|
||||
)));
|
||||
let mut fake_language_servers = client_a.language_registry().register_fake_lsp(
|
||||
"TypeScript",
|
||||
@@ -6575,95 +6575,3 @@ async fn test_context_collaboration_with_reconnect(
|
||||
assert!(context.buffer().read(cx).read_only());
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_remote_git_branches(
|
||||
executor: BackgroundExecutor,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
) {
|
||||
let mut server = TestServer::start(executor.clone()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
server
|
||||
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
.await;
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree("/project", serde_json::json!({ ".git":{} }))
|
||||
.await;
|
||||
let branches = ["main", "dev", "feature-1"];
|
||||
client_a
|
||||
.fs()
|
||||
.insert_branches(Path::new("/project/.git"), &branches);
|
||||
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/project", cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
let root_path = ProjectPath::root_path(worktree_id);
|
||||
// Client A sees that a guest has joined.
|
||||
executor.run_until_parked();
|
||||
|
||||
let branches_b = cx_b
|
||||
.update(|cx| project_b.update(cx, |project, cx| project.branches(root_path.clone(), cx)))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_branch = branches[2];
|
||||
|
||||
let branches_b = branches_b
|
||||
.into_iter()
|
||||
.map(|branch| branch.name)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(&branches_b, &branches);
|
||||
|
||||
cx_b.update(|cx| {
|
||||
project_b.update(cx, |project, cx| {
|
||||
project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
|
||||
let host_branch = cx_a.update(|cx| {
|
||||
project_a.update(cx, |project, cx| {
|
||||
project.worktree_store().update(cx, |worktree_store, cx| {
|
||||
worktree_store
|
||||
.current_branch(root_path.clone(), cx)
|
||||
.unwrap()
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(host_branch.as_ref(), branches[2]);
|
||||
|
||||
// Also try creating a new branch
|
||||
cx_b.update(|cx| {
|
||||
project_b.update(cx, |project, cx| {
|
||||
project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
|
||||
let host_branch = cx_a.update(|cx| {
|
||||
project_a.update(cx, |project, cx| {
|
||||
project.worktree_store().update(cx, |worktree_store, cx| {
|
||||
worktree_store.current_branch(root_path, cx).unwrap()
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(host_branch.as_ref(), "totally-new-branch");
|
||||
}
|
||||
|
||||
@@ -1,27 +1,14 @@
|
||||
use crate::tests::TestServer;
|
||||
use call::ActiveCall;
|
||||
use collections::HashSet;
|
||||
use fs::{FakeFs, Fs as _};
|
||||
use futures::StreamExt as _;
|
||||
use gpui::{BackgroundExecutor, Context as _, TestAppContext, UpdateGlobal as _};
|
||||
use gpui::{Context as _, TestAppContext};
|
||||
use http_client::BlockedHttpClient;
|
||||
use language::{
|
||||
language_settings::{
|
||||
language_settings, AllLanguageSettings, Formatter, FormatterList, PrettierSettings,
|
||||
SelectedFormatter,
|
||||
},
|
||||
tree_sitter_typescript, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
|
||||
LanguageRegistry,
|
||||
};
|
||||
use language::{language_settings::language_settings, LanguageRegistry};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::{
|
||||
lsp_store::{FormatTarget, FormatTrigger},
|
||||
ProjectPath,
|
||||
};
|
||||
use project::ProjectPath;
|
||||
use remote::SshRemoteClient;
|
||||
use remote_server::{HeadlessAppState, HeadlessProject};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
@@ -187,311 +174,3 @@ async fn test_sharing_an_ssh_remote_project(
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ssh_collaboration_git_branches(
|
||||
executor: BackgroundExecutor,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
server_cx: &mut TestAppContext,
|
||||
) {
|
||||
cx_a.set_name("a");
|
||||
cx_b.set_name("b");
|
||||
server_cx.set_name("server");
|
||||
|
||||
let mut server = TestServer::start(executor.clone()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
server
|
||||
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
.await;
|
||||
|
||||
// Set up project on remote FS
|
||||
let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
|
||||
let remote_fs = FakeFs::new(server_cx.executor());
|
||||
remote_fs
|
||||
.insert_tree("/project", serde_json::json!({ ".git":{} }))
|
||||
.await;
|
||||
|
||||
let branches = ["main", "dev", "feature-1"];
|
||||
remote_fs.insert_branches(Path::new("/project/.git"), &branches);
|
||||
|
||||
// User A connects to the remote project via SSH.
|
||||
server_cx.update(HeadlessProject::init);
|
||||
let remote_http_client = Arc::new(BlockedHttpClient);
|
||||
let node = NodeRuntime::unavailable();
|
||||
let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
|
||||
let headless_project = server_cx.new_model(|cx| {
|
||||
client::init_settings(cx);
|
||||
HeadlessProject::new(
|
||||
HeadlessAppState {
|
||||
session: server_ssh,
|
||||
fs: remote_fs.clone(),
|
||||
http_client: remote_http_client,
|
||||
node_runtime: node,
|
||||
languages,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
|
||||
let (project_a, worktree_id) = client_a
|
||||
.build_ssh_project("/project", client_ssh, cx_a)
|
||||
.await;
|
||||
|
||||
// While the SSH worktree is being scanned, user A shares the remote project.
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// User B joins the project.
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Give client A sometime to see that B has joined, and that the headless server
|
||||
// has some git repositories
|
||||
executor.run_until_parked();
|
||||
|
||||
let root_path = ProjectPath::root_path(worktree_id);
|
||||
|
||||
let branches_b = cx_b
|
||||
.update(|cx| project_b.update(cx, |project, cx| project.branches(root_path.clone(), cx)))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_branch = branches[2];
|
||||
|
||||
let branches_b = branches_b
|
||||
.into_iter()
|
||||
.map(|branch| branch.name)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(&branches_b, &branches);
|
||||
|
||||
cx_b.update(|cx| {
|
||||
project_b.update(cx, |project, cx| {
|
||||
project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
|
||||
let server_branch = server_cx.update(|cx| {
|
||||
headless_project.update(cx, |headless_project, cx| {
|
||||
headless_project
|
||||
.worktree_store
|
||||
.update(cx, |worktree_store, cx| {
|
||||
worktree_store
|
||||
.current_branch(root_path.clone(), cx)
|
||||
.unwrap()
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(server_branch.as_ref(), branches[2]);
|
||||
|
||||
// Also try creating a new branch
|
||||
cx_b.update(|cx| {
|
||||
project_b.update(cx, |project, cx| {
|
||||
project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
|
||||
let server_branch = server_cx.update(|cx| {
|
||||
headless_project.update(cx, |headless_project, cx| {
|
||||
headless_project
|
||||
.worktree_store
|
||||
.update(cx, |worktree_store, cx| {
|
||||
worktree_store.current_branch(root_path, cx).unwrap()
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(server_branch.as_ref(), "totally-new-branch");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ssh_collaboration_formatting_with_prettier(
|
||||
executor: BackgroundExecutor,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
server_cx: &mut TestAppContext,
|
||||
) {
|
||||
cx_a.set_name("a");
|
||||
cx_b.set_name("b");
|
||||
server_cx.set_name("server");
|
||||
|
||||
let mut server = TestServer::start(executor.clone()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
server
|
||||
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
.await;
|
||||
|
||||
let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
|
||||
let remote_fs = FakeFs::new(server_cx.executor());
|
||||
let buffer_text = "let one = \"two\"";
|
||||
let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX;
|
||||
remote_fs
|
||||
.insert_tree("/project", serde_json::json!({ "a.ts": buffer_text }))
|
||||
.await;
|
||||
|
||||
let test_plugin = "test_plugin";
|
||||
let ts_lang = Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
name: "TypeScript".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["ts".to_string()],
|
||||
..LanguageMatcher::default()
|
||||
},
|
||||
..LanguageConfig::default()
|
||||
},
|
||||
Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
|
||||
));
|
||||
client_a.language_registry().add(ts_lang.clone());
|
||||
client_b.language_registry().add(ts_lang.clone());
|
||||
|
||||
let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
|
||||
let mut fake_language_servers = languages.register_fake_lsp(
|
||||
"TypeScript",
|
||||
FakeLspAdapter {
|
||||
prettier_plugins: vec![test_plugin],
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
|
||||
// User A connects to the remote project via SSH.
|
||||
server_cx.update(HeadlessProject::init);
|
||||
let remote_http_client = Arc::new(BlockedHttpClient);
|
||||
let _headless_project = server_cx.new_model(|cx| {
|
||||
client::init_settings(cx);
|
||||
HeadlessProject::new(
|
||||
HeadlessAppState {
|
||||
session: server_ssh,
|
||||
fs: remote_fs.clone(),
|
||||
http_client: remote_http_client,
|
||||
node_runtime: NodeRuntime::unavailable(),
|
||||
languages,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
|
||||
let (project_a, worktree_id) = client_a
|
||||
.build_ssh_project("/project", client_ssh, cx_a)
|
||||
.await;
|
||||
|
||||
// While the SSH worktree is being scanned, user A shares the remote project.
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// User B joins the project.
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
executor.run_until_parked();
|
||||
|
||||
// Opens the buffer and formats it
|
||||
let buffer_b = project_b
|
||||
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
|
||||
.await
|
||||
.expect("user B opens buffer for formatting");
|
||||
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
|
||||
file.defaults.formatter = Some(SelectedFormatter::Auto);
|
||||
file.defaults.prettier = Some(PrettierSettings {
|
||||
allowed: true,
|
||||
..PrettierSettings::default()
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
cx_b.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
|
||||
file.defaults.formatter = Some(SelectedFormatter::List(FormatterList(
|
||||
vec![Formatter::LanguageServer { name: None }].into(),
|
||||
)));
|
||||
file.defaults.prettier = Some(PrettierSettings {
|
||||
allowed: true,
|
||||
..PrettierSettings::default()
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
let fake_language_server = fake_language_servers.next().await.unwrap();
|
||||
fake_language_server.handle_request::<lsp::request::Formatting, _, _>(|_, _| async move {
|
||||
panic!(
|
||||
"Unexpected: prettier should be preferred since it's enabled and language supports it"
|
||||
)
|
||||
});
|
||||
|
||||
project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project.format(
|
||||
HashSet::from_iter([buffer_b.clone()]),
|
||||
true,
|
||||
FormatTrigger::Save,
|
||||
FormatTarget::Buffer,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
|
||||
buffer_text.to_string() + "\n" + prettier_format_suffix,
|
||||
"Prettier formatting was not applied to client buffer after client's request"
|
||||
);
|
||||
|
||||
// User A opens and formats the same buffer too
|
||||
let buffer_a = project_a
|
||||
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
|
||||
.await
|
||||
.expect("user A opens buffer for formatting");
|
||||
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
|
||||
file.defaults.formatter = Some(SelectedFormatter::Auto);
|
||||
file.defaults.prettier = Some(PrettierSettings {
|
||||
allowed: true,
|
||||
..PrettierSettings::default()
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
project_a
|
||||
.update(cx_a, |project, cx| {
|
||||
project.format(
|
||||
HashSet::from_iter([buffer_a.clone()]),
|
||||
true,
|
||||
FormatTrigger::Manual,
|
||||
FormatTarget::Buffer,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
|
||||
buffer_text.to_string() + "\n" + prettier_format_suffix + "\n" + prettier_format_suffix,
|
||||
"Prettier formatting was not applied to client buffer after host's request"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ use self::channel_modal::ChannelModal;
|
||||
use crate::{channel_view::ChannelView, chat_panel::ChatPanel, CollaborationPanelSettings};
|
||||
use call::ActiveCall;
|
||||
use channel::{Channel, ChannelEvent, ChannelStore};
|
||||
use client::{ChannelId, Client, Contact, User, UserStore};
|
||||
use client::{ChannelId, Client, Contact, ProjectId, User, UserStore};
|
||||
use contact_finder::ContactFinder;
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use editor::{Editor, EditorElement, EditorStyle};
|
||||
@@ -182,6 +182,10 @@ enum ListEntry {
|
||||
ChannelEditor {
|
||||
depth: usize,
|
||||
},
|
||||
HostedProject {
|
||||
id: ProjectId,
|
||||
name: SharedString,
|
||||
},
|
||||
Contact {
|
||||
contact: Arc<Contact>,
|
||||
calling: bool,
|
||||
@@ -562,6 +566,7 @@ impl CollabPanel {
|
||||
}
|
||||
}
|
||||
|
||||
let hosted_projects = channel_store.projects_for_id(channel.id);
|
||||
let has_children = channel_store
|
||||
.channel_at_index(mat.candidate_id + 1)
|
||||
.map_or(false, |next_channel| {
|
||||
@@ -595,6 +600,10 @@ impl CollabPanel {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (name, id) in hosted_projects {
|
||||
self.entries.push(ListEntry::HostedProject { id, name });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1020,6 +1029,40 @@ impl CollabPanel {
|
||||
.tooltip(move |cx| Tooltip::text("Open Chat", cx))
|
||||
}
|
||||
|
||||
fn render_channel_project(
|
||||
&self,
|
||||
id: ProjectId,
|
||||
name: &SharedString,
|
||||
is_selected: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> impl IntoElement {
|
||||
ListItem::new(ElementId::NamedInteger(
|
||||
"channel-project".into(),
|
||||
id.0 as usize,
|
||||
))
|
||||
.indent_level(2)
|
||||
.indent_step_size(px(20.))
|
||||
.selected(is_selected)
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
if let Some(workspace) = this.workspace.upgrade() {
|
||||
let app_state = workspace.read(cx).app_state().clone();
|
||||
workspace::join_hosted_project(id, app_state, cx).detach_and_prompt_err(
|
||||
"Failed to open project",
|
||||
cx,
|
||||
|_, _| None,
|
||||
)
|
||||
}
|
||||
}))
|
||||
.start_slot(
|
||||
h_flex()
|
||||
.relative()
|
||||
.gap_1()
|
||||
.child(IconButton::new(0, IconName::FileTree)),
|
||||
)
|
||||
.child(Label::new(name.clone()))
|
||||
.tooltip(move |cx| Tooltip::text("Open Project", cx))
|
||||
}
|
||||
|
||||
fn has_subchannels(&self, ix: usize) -> bool {
|
||||
self.entries.get(ix).map_or(false, |entry| {
|
||||
if let ListEntry::Channel { has_children, .. } = entry {
|
||||
@@ -1495,6 +1538,12 @@ impl CollabPanel {
|
||||
ListEntry::ChannelChat { channel_id } => {
|
||||
self.join_channel_chat(*channel_id, cx)
|
||||
}
|
||||
ListEntry::HostedProject {
|
||||
id: _id,
|
||||
name: _name,
|
||||
} => {
|
||||
// todo()
|
||||
}
|
||||
ListEntry::OutgoingRequest(_) => {}
|
||||
ListEntry::ChannelEditor { .. } => {}
|
||||
}
|
||||
@@ -2108,6 +2157,10 @@ impl CollabPanel {
|
||||
ListEntry::ChannelChat { channel_id } => self
|
||||
.render_channel_chat(*channel_id, is_selected, cx)
|
||||
.into_any_element(),
|
||||
|
||||
ListEntry::HostedProject { id, name } => self
|
||||
.render_channel_project(*id, name, is_selected, cx)
|
||||
.into_any_element(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2726,7 +2779,7 @@ impl Render for CollabPanel {
|
||||
.on_action(cx.listener(CollabPanel::collapse_selected_channel))
|
||||
.on_action(cx.listener(CollabPanel::expand_selected_channel))
|
||||
.on_action(cx.listener(CollabPanel::start_move_selected_channel))
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.track_focus(&self.focus_handle)
|
||||
.size_full()
|
||||
.child(if self.user_store.read(cx).current_user().is_none() {
|
||||
self.render_signed_out(cx)
|
||||
@@ -2845,6 +2898,11 @@ impl PartialEq for ListEntry {
|
||||
return channel_1.id == channel_2.id;
|
||||
}
|
||||
}
|
||||
ListEntry::HostedProject { id, .. } => {
|
||||
if let ListEntry::HostedProject { id: other_id, .. } = other {
|
||||
return id == other_id;
|
||||
}
|
||||
}
|
||||
ListEntry::ChannelNotes { channel_id } => {
|
||||
if let ListEntry::ChannelNotes {
|
||||
channel_id: other_id,
|
||||
|
||||
@@ -180,39 +180,6 @@ impl InitializedContextServerProtocol {
|
||||
|
||||
Ok(completion)
|
||||
}
|
||||
|
||||
/// List MCP tools.
|
||||
pub async fn list_tools(&self) -> Result<types::ListToolsResponse> {
|
||||
self.check_capability(ServerCapability::Tools)?;
|
||||
|
||||
let response = self
|
||||
.inner
|
||||
.request::<types::ListToolsResponse>(types::RequestType::ListTools.as_str(), ())
|
||||
.await?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
/// Executes a tool with the given arguments
|
||||
pub async fn run_tool<P: AsRef<str>>(
|
||||
&self,
|
||||
tool: P,
|
||||
arguments: Option<HashMap<String, serde_json::Value>>,
|
||||
) -> Result<types::CallToolResponse> {
|
||||
self.check_capability(ServerCapability::Tools)?;
|
||||
|
||||
let params = types::CallToolParams {
|
||||
name: tool.as_ref().to_string(),
|
||||
arguments,
|
||||
};
|
||||
|
||||
let response: types::CallToolResponse = self
|
||||
.inner
|
||||
.request(types::RequestType::CallTool.as_str(), params)
|
||||
.await?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
}
|
||||
|
||||
impl InitializedContextServerProtocol {
|
||||
|
||||
@@ -9,8 +9,7 @@ struct GlobalContextServerRegistry(Arc<ContextServerRegistry>);
|
||||
impl Global for GlobalContextServerRegistry {}
|
||||
|
||||
pub struct ContextServerRegistry {
|
||||
command_registry: RwLock<HashMap<String, Vec<Arc<str>>>>,
|
||||
tool_registry: RwLock<HashMap<String, Vec<Arc<str>>>>,
|
||||
registry: RwLock<HashMap<String, Vec<Arc<str>>>>,
|
||||
}
|
||||
|
||||
impl ContextServerRegistry {
|
||||
@@ -21,14 +20,13 @@ impl ContextServerRegistry {
|
||||
pub fn register(cx: &mut AppContext) {
|
||||
cx.set_global(GlobalContextServerRegistry(Arc::new(
|
||||
ContextServerRegistry {
|
||||
command_registry: RwLock::new(HashMap::default()),
|
||||
tool_registry: RwLock::new(HashMap::default()),
|
||||
registry: RwLock::new(HashMap::default()),
|
||||
},
|
||||
)))
|
||||
}
|
||||
|
||||
pub fn register_command(&self, server_id: String, command_name: &str) {
|
||||
let mut registry = self.command_registry.write();
|
||||
let mut registry = self.registry.write();
|
||||
registry
|
||||
.entry(server_id)
|
||||
.or_default()
|
||||
@@ -36,34 +34,14 @@ impl ContextServerRegistry {
|
||||
}
|
||||
|
||||
pub fn unregister_command(&self, server_id: &str, command_name: &str) {
|
||||
let mut registry = self.command_registry.write();
|
||||
let mut registry = self.registry.write();
|
||||
if let Some(commands) = registry.get_mut(server_id) {
|
||||
commands.retain(|name| name.as_ref() != command_name);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_commands(&self, server_id: &str) -> Option<Vec<Arc<str>>> {
|
||||
let registry = self.command_registry.read();
|
||||
registry.get(server_id).cloned()
|
||||
}
|
||||
|
||||
pub fn register_tool(&self, server_id: String, tool_name: &str) {
|
||||
let mut registry = self.tool_registry.write();
|
||||
registry
|
||||
.entry(server_id)
|
||||
.or_default()
|
||||
.push(tool_name.into());
|
||||
}
|
||||
|
||||
pub fn unregister_tool(&self, server_id: &str, tool_name: &str) {
|
||||
let mut registry = self.tool_registry.write();
|
||||
if let Some(tools) = registry.get_mut(server_id) {
|
||||
tools.retain(|name| name.as_ref() != tool_name);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_tools(&self, server_id: &str) -> Option<Vec<Arc<str>>> {
|
||||
let registry = self.tool_registry.read();
|
||||
let registry = self.registry.read();
|
||||
registry.get(server_id).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,8 +16,6 @@ pub enum RequestType {
|
||||
PromptsList,
|
||||
CompletionComplete,
|
||||
Ping,
|
||||
ListTools,
|
||||
ListResourceTemplates,
|
||||
}
|
||||
|
||||
impl RequestType {
|
||||
@@ -34,8 +32,6 @@ impl RequestType {
|
||||
RequestType::PromptsList => "prompts/list",
|
||||
RequestType::CompletionComplete => "completion/complete",
|
||||
RequestType::Ping => "ping",
|
||||
RequestType::ListTools => "tools/list",
|
||||
RequestType::ListResourceTemplates => "resources/templates/list",
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -406,17 +402,3 @@ pub struct Completion {
|
||||
pub values: Vec<String>,
|
||||
pub total: CompletionTotal,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CallToolResponse {
|
||||
pub tool_result: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListToolsResponse {
|
||||
pub tools: Vec<Tool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
@@ -185,7 +185,7 @@ impl Render for CopilotCodeVerification {
|
||||
|
||||
v_flex()
|
||||
.id("copilot code verification")
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.track_focus(&self.focus_handle)
|
||||
.elevation_3(cx)
|
||||
.w_96()
|
||||
.items_center()
|
||||
|
||||
@@ -101,7 +101,7 @@ impl Render for ProjectDiagnosticsEditor {
|
||||
};
|
||||
|
||||
div()
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.track_focus(&self.focus_handle)
|
||||
.when(self.path_states.is_empty(), |el| {
|
||||
el.key_context("EmptyPane")
|
||||
})
|
||||
|
||||
@@ -986,7 +986,6 @@ fn editor_blocks(
|
||||
em_width: px(0.),
|
||||
max_width: px(0.),
|
||||
block_id,
|
||||
selected: false,
|
||||
editor_style: &editor::EditorStyle::default(),
|
||||
});
|
||||
let element = element.downcast_mut::<Stateful<Div>>().unwrap();
|
||||
|
||||
@@ -136,12 +136,11 @@ impl DiagnosticIndicator {
|
||||
}
|
||||
|
||||
fn update(&mut self, editor: View<Editor>, cx: &mut ViewContext<Self>) {
|
||||
let (buffer, cursor_position) = editor.update(cx, |editor, cx| {
|
||||
let buffer = editor.buffer().read(cx).snapshot(cx);
|
||||
let cursor_position = editor.selections.newest::<usize>(cx).head();
|
||||
(buffer, cursor_position)
|
||||
});
|
||||
let editor = editor.read(cx);
|
||||
let buffer = editor.buffer().read(cx);
|
||||
let cursor_position = editor.selections.newest::<usize>(cx).head();
|
||||
let new_diagnostic = buffer
|
||||
.snapshot(cx)
|
||||
.diagnostics_in_range::<_, usize>(cursor_position..cursor_position, false)
|
||||
.filter(|entry| !entry.range.is_empty())
|
||||
.min_by_key(|entry| (entry.diagnostic.severity, entry.range.len()))
|
||||
|
||||
@@ -76,12 +76,12 @@ theme.workspace = true
|
||||
tree-sitter-html = { workspace = true, optional = true }
|
||||
tree-sitter-rust = { workspace = true, optional = true }
|
||||
tree-sitter-typescript = { workspace = true, optional = true }
|
||||
unicode-segmentation.workspace = true
|
||||
unindent = { workspace = true, optional = true }
|
||||
ui.workspace = true
|
||||
url.workspace = true
|
||||
util.workspace = true
|
||||
workspace.workspace = true
|
||||
unicode-segmentation.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
ctor.workspace = true
|
||||
|
||||
@@ -153,10 +153,6 @@ pub struct DeleteToPreviousWordStart {
|
||||
pub ignore_newlines: bool,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Clone, Deserialize, Default)]
|
||||
pub struct FoldAtLevel {
|
||||
pub level: u32,
|
||||
}
|
||||
impl_actions!(
|
||||
editor,
|
||||
[
|
||||
@@ -186,7 +182,6 @@ impl_actions!(
|
||||
ToggleCodeActions,
|
||||
ToggleComments,
|
||||
UnfoldAt,
|
||||
FoldAtLevel
|
||||
]
|
||||
);
|
||||
|
||||
@@ -198,7 +193,6 @@ gpui::actions!(
|
||||
AcceptPartialInlineCompletion,
|
||||
AddSelectionAbove,
|
||||
AddSelectionBelow,
|
||||
ApplyAllDiffHunks,
|
||||
ApplyDiffHunk,
|
||||
Backspace,
|
||||
Cancel,
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
//! of several smaller structures that form a hierarchy (starting at the bottom):
|
||||
//! - [`InlayMap`] that decides where the [`Inlay`]s should be displayed.
|
||||
//! - [`FoldMap`] that decides where the fold indicators should be; it also tracks parts of a source file that are currently folded.
|
||||
//! - [`TabMap`] that keeps track of hard tabs in a buffer.
|
||||
//! - [`CharMap`] that replaces tabs and non-printable characters
|
||||
//! - [`WrapMap`] that handles soft wrapping.
|
||||
//! - [`BlockMap`] that tracks custom blocks such as diagnostics that should be displayed within buffer.
|
||||
//! - [`DisplayMap`] that adds background highlights to the regions of text.
|
||||
@@ -18,11 +18,11 @@
|
||||
//! [EditorElement]: crate::element::EditorElement
|
||||
|
||||
mod block_map;
|
||||
mod char_map;
|
||||
mod crease_map;
|
||||
mod fold_map;
|
||||
mod inlay_map;
|
||||
pub(crate) mod invisibles;
|
||||
mod tab_map;
|
||||
mod invisibles;
|
||||
mod wrap_map;
|
||||
|
||||
use crate::{
|
||||
@@ -33,6 +33,7 @@ pub use block_map::{
|
||||
BlockPlacement, BlockPoint, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
|
||||
};
|
||||
use block_map::{BlockRow, BlockSnapshot};
|
||||
use char_map::{CharMap, CharSnapshot};
|
||||
use collections::{HashMap, HashSet};
|
||||
pub use crease_map::*;
|
||||
pub use fold_map::{Fold, FoldId, FoldPlaceholder, FoldPoint};
|
||||
@@ -43,7 +44,7 @@ use gpui::{
|
||||
pub(crate) use inlay_map::Inlay;
|
||||
use inlay_map::{InlayMap, InlaySnapshot};
|
||||
pub use inlay_map::{InlayOffset, InlayPoint};
|
||||
use invisibles::{is_invisible, replacement};
|
||||
pub use invisibles::is_invisible;
|
||||
use language::{
|
||||
language_settings::language_settings, ChunkRenderer, OffsetUtf16, Point,
|
||||
Subscription as BufferSubscription,
|
||||
@@ -58,15 +59,13 @@ use std::{
|
||||
any::TypeId,
|
||||
borrow::Cow,
|
||||
fmt::Debug,
|
||||
iter,
|
||||
num::NonZeroU32,
|
||||
ops::{Add, Range, Sub},
|
||||
sync::Arc,
|
||||
};
|
||||
use sum_tree::{Bias, TreeMap};
|
||||
use tab_map::{TabMap, TabSnapshot};
|
||||
use text::LineIndent;
|
||||
use ui::{div, px, IntoElement, ParentElement, SharedString, Styled, WindowContext};
|
||||
use ui::{px, WindowContext};
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
use wrap_map::{WrapMap, WrapSnapshot};
|
||||
|
||||
@@ -98,7 +97,7 @@ pub struct DisplayMap {
|
||||
/// Decides where the fold indicators should be and tracks parts of a source file that are currently folded.
|
||||
fold_map: FoldMap,
|
||||
/// Keeps track of hard tabs in a buffer.
|
||||
tab_map: TabMap,
|
||||
char_map: CharMap,
|
||||
/// Handles soft wrapping.
|
||||
wrap_map: Model<WrapMap>,
|
||||
/// Tracks custom blocks such as diagnostics that should be displayed within buffer.
|
||||
@@ -135,7 +134,7 @@ impl DisplayMap {
|
||||
let crease_map = CreaseMap::new(&buffer_snapshot);
|
||||
let (inlay_map, snapshot) = InlayMap::new(buffer_snapshot);
|
||||
let (fold_map, snapshot) = FoldMap::new(snapshot);
|
||||
let (tab_map, snapshot) = TabMap::new(snapshot, tab_size);
|
||||
let (char_map, snapshot) = CharMap::new(snapshot, tab_size);
|
||||
let (wrap_map, snapshot) = WrapMap::new(snapshot, font, font_size, wrap_width, cx);
|
||||
let block_map = BlockMap::new(
|
||||
snapshot,
|
||||
@@ -152,7 +151,7 @@ impl DisplayMap {
|
||||
buffer_subscription,
|
||||
fold_map,
|
||||
inlay_map,
|
||||
tab_map,
|
||||
char_map,
|
||||
wrap_map,
|
||||
block_map,
|
||||
crease_map,
|
||||
@@ -170,17 +169,17 @@ impl DisplayMap {
|
||||
let (inlay_snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits);
|
||||
let (fold_snapshot, edits) = self.fold_map.read(inlay_snapshot.clone(), edits);
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (tab_snapshot, edits) = self.tab_map.sync(fold_snapshot.clone(), edits, tab_size);
|
||||
let (char_snapshot, edits) = self.char_map.sync(fold_snapshot.clone(), edits, tab_size);
|
||||
let (wrap_snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(tab_snapshot.clone(), edits, cx));
|
||||
.update(cx, |map, cx| map.sync(char_snapshot.clone(), edits, cx));
|
||||
let block_snapshot = self.block_map.read(wrap_snapshot.clone(), edits).snapshot;
|
||||
|
||||
DisplaySnapshot {
|
||||
buffer_snapshot: self.buffer.read(cx).snapshot(cx),
|
||||
fold_snapshot,
|
||||
inlay_snapshot,
|
||||
tab_snapshot,
|
||||
char_snapshot,
|
||||
wrap_snapshot,
|
||||
block_snapshot,
|
||||
crease_snapshot: self.crease_map.snapshot(),
|
||||
@@ -216,13 +215,13 @@ impl DisplayMap {
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
|
||||
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
self.block_map.read(snapshot, edits);
|
||||
let (snapshot, edits) = fold_map.fold(ranges);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
@@ -240,13 +239,13 @@ impl DisplayMap {
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
|
||||
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
self.block_map.read(snapshot, edits);
|
||||
let (snapshot, edits) = fold_map.unfold(ranges, inclusive);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
@@ -255,7 +254,7 @@ impl DisplayMap {
|
||||
|
||||
pub fn insert_creases(
|
||||
&mut self,
|
||||
creases: impl IntoIterator<Item = Crease<Anchor>>,
|
||||
creases: impl IntoIterator<Item = Crease>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Vec<CreaseId> {
|
||||
let snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
@@ -281,7 +280,7 @@ impl DisplayMap {
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
|
||||
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
@@ -299,7 +298,7 @@ impl DisplayMap {
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
|
||||
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
@@ -317,7 +316,7 @@ impl DisplayMap {
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
|
||||
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
@@ -335,7 +334,7 @@ impl DisplayMap {
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
|
||||
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
@@ -411,7 +410,7 @@ impl DisplayMap {
|
||||
let (snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits);
|
||||
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
@@ -419,7 +418,7 @@ impl DisplayMap {
|
||||
|
||||
let (snapshot, edits) = self.inlay_map.splice(to_remove, to_insert);
|
||||
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
@@ -465,105 +464,13 @@ pub struct HighlightedChunk<'a> {
|
||||
pub renderer: Option<ChunkRenderer>,
|
||||
}
|
||||
|
||||
impl<'a> HighlightedChunk<'a> {
|
||||
fn highlight_invisibles(
|
||||
self,
|
||||
editor_style: &'a EditorStyle,
|
||||
) -> impl Iterator<Item = Self> + 'a {
|
||||
let mut chars = self.text.chars().peekable();
|
||||
let mut text = self.text;
|
||||
let style = self.style;
|
||||
let is_tab = self.is_tab;
|
||||
let renderer = self.renderer;
|
||||
iter::from_fn(move || {
|
||||
let mut prefix_len = 0;
|
||||
while let Some(&ch) = chars.peek() {
|
||||
if !is_invisible(ch) {
|
||||
prefix_len += ch.len_utf8();
|
||||
chars.next();
|
||||
continue;
|
||||
}
|
||||
if prefix_len > 0 {
|
||||
let (prefix, suffix) = text.split_at(prefix_len);
|
||||
text = suffix;
|
||||
return Some(HighlightedChunk {
|
||||
text: prefix,
|
||||
style,
|
||||
is_tab,
|
||||
renderer: renderer.clone(),
|
||||
});
|
||||
}
|
||||
chars.next();
|
||||
let (prefix, suffix) = text.split_at(ch.len_utf8());
|
||||
text = suffix;
|
||||
if let Some(replacement) = replacement(ch) {
|
||||
let background = editor_style.status.hint_background;
|
||||
let underline = editor_style.status.hint;
|
||||
return Some(HighlightedChunk {
|
||||
text: prefix,
|
||||
style: None,
|
||||
is_tab: false,
|
||||
renderer: Some(ChunkRenderer {
|
||||
render: Arc::new(move |_| {
|
||||
div()
|
||||
.child(replacement)
|
||||
.bg(background)
|
||||
.text_decoration_1()
|
||||
.text_decoration_color(underline)
|
||||
.into_any_element()
|
||||
}),
|
||||
constrain_width: false,
|
||||
}),
|
||||
});
|
||||
} else {
|
||||
let invisible_highlight = HighlightStyle {
|
||||
background_color: Some(editor_style.status.hint_background),
|
||||
underline: Some(UnderlineStyle {
|
||||
color: Some(editor_style.status.hint),
|
||||
thickness: px(1.),
|
||||
wavy: false,
|
||||
}),
|
||||
..Default::default()
|
||||
};
|
||||
let invisible_style = if let Some(mut style) = style {
|
||||
style.highlight(invisible_highlight);
|
||||
style
|
||||
} else {
|
||||
invisible_highlight
|
||||
};
|
||||
|
||||
return Some(HighlightedChunk {
|
||||
text: prefix,
|
||||
style: Some(invisible_style),
|
||||
is_tab: false,
|
||||
renderer: renderer.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if !text.is_empty() {
|
||||
let remainder = text;
|
||||
text = "";
|
||||
Some(HighlightedChunk {
|
||||
text: remainder,
|
||||
style,
|
||||
is_tab,
|
||||
renderer: renderer.clone(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DisplaySnapshot {
|
||||
pub buffer_snapshot: MultiBufferSnapshot,
|
||||
pub fold_snapshot: FoldSnapshot,
|
||||
pub crease_snapshot: CreaseSnapshot,
|
||||
inlay_snapshot: InlaySnapshot,
|
||||
tab_snapshot: TabSnapshot,
|
||||
char_snapshot: CharSnapshot,
|
||||
wrap_snapshot: WrapSnapshot,
|
||||
block_snapshot: BlockSnapshot,
|
||||
text_highlights: TextHighlights,
|
||||
@@ -660,16 +567,16 @@ impl DisplaySnapshot {
|
||||
new_start..new_end
|
||||
}
|
||||
|
||||
pub fn point_to_display_point(&self, point: MultiBufferPoint, bias: Bias) -> DisplayPoint {
|
||||
fn point_to_display_point(&self, point: MultiBufferPoint, bias: Bias) -> DisplayPoint {
|
||||
let inlay_point = self.inlay_snapshot.to_inlay_point(point);
|
||||
let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias);
|
||||
let tab_point = self.tab_snapshot.to_tab_point(fold_point);
|
||||
let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point);
|
||||
let char_point = self.char_snapshot.to_char_point(fold_point);
|
||||
let wrap_point = self.wrap_snapshot.char_point_to_wrap_point(char_point);
|
||||
let block_point = self.block_snapshot.to_block_point(wrap_point);
|
||||
DisplayPoint(block_point)
|
||||
}
|
||||
|
||||
pub fn display_point_to_point(&self, point: DisplayPoint, bias: Bias) -> Point {
|
||||
fn display_point_to_point(&self, point: DisplayPoint, bias: Bias) -> Point {
|
||||
self.inlay_snapshot
|
||||
.to_buffer_point(self.display_point_to_inlay_point(point, bias))
|
||||
}
|
||||
@@ -691,22 +598,22 @@ impl DisplaySnapshot {
|
||||
|
||||
fn display_point_to_inlay_point(&self, point: DisplayPoint, bias: Bias) -> InlayPoint {
|
||||
let block_point = point.0;
|
||||
let wrap_point = self.block_snapshot.to_wrap_point(block_point, bias);
|
||||
let tab_point = self.wrap_snapshot.to_tab_point(wrap_point);
|
||||
let fold_point = self.tab_snapshot.to_fold_point(tab_point, bias).0;
|
||||
let wrap_point = self.block_snapshot.to_wrap_point(block_point);
|
||||
let char_point = self.wrap_snapshot.to_char_point(wrap_point);
|
||||
let fold_point = self.char_snapshot.to_fold_point(char_point, bias).0;
|
||||
fold_point.to_inlay_point(&self.fold_snapshot)
|
||||
}
|
||||
|
||||
pub fn display_point_to_fold_point(&self, point: DisplayPoint, bias: Bias) -> FoldPoint {
|
||||
let block_point = point.0;
|
||||
let wrap_point = self.block_snapshot.to_wrap_point(block_point, bias);
|
||||
let tab_point = self.wrap_snapshot.to_tab_point(wrap_point);
|
||||
self.tab_snapshot.to_fold_point(tab_point, bias).0
|
||||
let wrap_point = self.block_snapshot.to_wrap_point(block_point);
|
||||
let char_point = self.wrap_snapshot.to_char_point(wrap_point);
|
||||
self.char_snapshot.to_fold_point(char_point, bias).0
|
||||
}
|
||||
|
||||
pub fn fold_point_to_display_point(&self, fold_point: FoldPoint) -> DisplayPoint {
|
||||
let tab_point = self.tab_snapshot.to_tab_point(fold_point);
|
||||
let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point);
|
||||
let char_point = self.char_snapshot.to_char_point(fold_point);
|
||||
let wrap_point = self.wrap_snapshot.char_point_to_wrap_point(char_point);
|
||||
let block_point = self.block_snapshot.to_block_point(wrap_point);
|
||||
DisplayPoint(block_point)
|
||||
}
|
||||
@@ -771,7 +678,7 @@ impl DisplaySnapshot {
|
||||
suggestion: Some(editor_style.suggestions_style),
|
||||
},
|
||||
)
|
||||
.flat_map(|chunk| {
|
||||
.map(|chunk| {
|
||||
let mut highlight_style = chunk
|
||||
.syntax_highlight_id
|
||||
.and_then(|id| id.style(&editor_style.syntax));
|
||||
@@ -784,6 +691,23 @@ impl DisplaySnapshot {
|
||||
}
|
||||
}
|
||||
|
||||
if chunk.is_invisible {
|
||||
let invisible_highlight = HighlightStyle {
|
||||
background_color: Some(editor_style.status.hint_background),
|
||||
underline: Some(UnderlineStyle {
|
||||
color: Some(editor_style.status.hint),
|
||||
thickness: px(1.),
|
||||
wavy: false,
|
||||
}),
|
||||
..Default::default()
|
||||
};
|
||||
if let Some(highlight_style) = highlight_style.as_mut() {
|
||||
highlight_style.highlight(invisible_highlight);
|
||||
} else {
|
||||
highlight_style = Some(invisible_highlight);
|
||||
}
|
||||
}
|
||||
|
||||
let mut diagnostic_highlight = HighlightStyle::default();
|
||||
|
||||
if chunk.is_unnecessary {
|
||||
@@ -814,7 +738,6 @@ impl DisplaySnapshot {
|
||||
is_tab: chunk.is_tab,
|
||||
renderer: chunk.renderer,
|
||||
}
|
||||
.highlight_invisibles(editor_style)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -881,8 +804,9 @@ impl DisplaySnapshot {
|
||||
layout_line.closest_index_for_x(x) as u32
|
||||
}
|
||||
|
||||
pub fn grapheme_at(&self, mut point: DisplayPoint) -> Option<SharedString> {
|
||||
pub fn grapheme_at(&self, mut point: DisplayPoint) -> Option<String> {
|
||||
point = DisplayPoint(self.block_snapshot.clip_point(point.0, Bias::Left));
|
||||
|
||||
let chars = self
|
||||
.text_chunks(point.row())
|
||||
.flat_map(str::chars)
|
||||
@@ -903,15 +827,12 @@ impl DisplaySnapshot {
|
||||
!end
|
||||
}
|
||||
});
|
||||
chars.collect::<String>().graphemes(true).next().map(|s| {
|
||||
if let Some(invisible) = s.chars().next().filter(|&c| is_invisible(c)) {
|
||||
replacement(invisible).unwrap_or(s).to_owned().into()
|
||||
} else if s == "\n" {
|
||||
" ".into()
|
||||
} else {
|
||||
s.to_owned().into()
|
||||
}
|
||||
})
|
||||
|
||||
chars
|
||||
.collect::<String>()
|
||||
.graphemes(true)
|
||||
.next()
|
||||
.map(|s| s.to_owned())
|
||||
}
|
||||
|
||||
pub fn buffer_chars_at(&self, mut offset: usize) -> impl Iterator<Item = (char, usize)> + '_ {
|
||||
@@ -990,7 +911,7 @@ impl DisplaySnapshot {
|
||||
pub fn soft_wrap_indent(&self, display_row: DisplayRow) -> Option<u32> {
|
||||
let wrap_row = self
|
||||
.block_snapshot
|
||||
.to_wrap_point(BlockPoint::new(display_row.0, 0), Bias::Left)
|
||||
.to_wrap_point(BlockPoint::new(display_row.0, 0))
|
||||
.row();
|
||||
self.wrap_snapshot.soft_wrap_indent(wrap_row)
|
||||
}
|
||||
@@ -1054,27 +975,19 @@ impl DisplaySnapshot {
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn crease_for_buffer_row(&self, buffer_row: MultiBufferRow) -> Option<Crease<Point>> {
|
||||
pub fn foldable_range(
|
||||
&self,
|
||||
buffer_row: MultiBufferRow,
|
||||
) -> Option<(Range<Point>, FoldPlaceholder)> {
|
||||
let start = MultiBufferPoint::new(buffer_row.0, self.buffer_snapshot.line_len(buffer_row));
|
||||
if let Some(crease) = self
|
||||
.crease_snapshot
|
||||
.query_row(buffer_row, &self.buffer_snapshot)
|
||||
{
|
||||
match crease {
|
||||
Crease::Inline {
|
||||
range,
|
||||
placeholder,
|
||||
render_toggle,
|
||||
render_trailer,
|
||||
metadata,
|
||||
} => Some(Crease::Inline {
|
||||
range: range.to_point(&self.buffer_snapshot),
|
||||
placeholder: placeholder.clone(),
|
||||
render_toggle: render_toggle.clone(),
|
||||
render_trailer: render_trailer.clone(),
|
||||
metadata: metadata.clone(),
|
||||
}),
|
||||
}
|
||||
Some((
|
||||
crease.range.to_point(&self.buffer_snapshot),
|
||||
crease.placeholder.clone(),
|
||||
))
|
||||
} else if self.starts_indent(MultiBufferRow(start.row))
|
||||
&& !self.is_line_folded(MultiBufferRow(start.row))
|
||||
{
|
||||
@@ -1111,13 +1024,7 @@ impl DisplaySnapshot {
|
||||
.line_len(MultiBufferRow(row_before_line_breaks.row)),
|
||||
);
|
||||
|
||||
Some(Crease::Inline {
|
||||
range: start..row_before_line_breaks,
|
||||
placeholder: self.fold_placeholder,
|
||||
render_toggle: None,
|
||||
render_trailer: None,
|
||||
metadata: None,
|
||||
})
|
||||
Some((start..row_before_line_breaks, self.fold_placeholder.clone()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@@ -1236,9 +1143,9 @@ impl DisplayPoint {
|
||||
}
|
||||
|
||||
pub fn to_offset(self, map: &DisplaySnapshot, bias: Bias) -> usize {
|
||||
let wrap_point = map.block_snapshot.to_wrap_point(self.0, bias);
|
||||
let tab_point = map.wrap_snapshot.to_tab_point(wrap_point);
|
||||
let fold_point = map.tab_snapshot.to_fold_point(tab_point, bias).0;
|
||||
let wrap_point = map.block_snapshot.to_wrap_point(self.0);
|
||||
let char_point = map.wrap_snapshot.to_char_point(wrap_point);
|
||||
let fold_point = map.char_snapshot.to_fold_point(char_point, bias).0;
|
||||
let inlay_point = fold_point.to_inlay_point(&map.fold_snapshot);
|
||||
map.inlay_snapshot
|
||||
.to_buffer_offset(map.inlay_snapshot.to_offset(inlay_point))
|
||||
@@ -1274,23 +1181,17 @@ pub mod tests {
|
||||
use super::*;
|
||||
use crate::{movement, test::marked_display_snapshot};
|
||||
use block_map::BlockPlacement;
|
||||
use gpui::{
|
||||
div, font, observe, px, AppContext, BorrowAppContext, Context, Element, Hsla, Rgba,
|
||||
};
|
||||
use gpui::{div, font, observe, px, AppContext, BorrowAppContext, Context, Element, Hsla};
|
||||
use language::{
|
||||
language_settings::{AllLanguageSettings, AllLanguageSettingsContent},
|
||||
Buffer, Diagnostic, DiagnosticEntry, DiagnosticSet, Language, LanguageConfig,
|
||||
LanguageMatcher,
|
||||
Buffer, Language, LanguageConfig, LanguageMatcher,
|
||||
};
|
||||
use lsp::LanguageServerId;
|
||||
use project::Project;
|
||||
use rand::{prelude::*, Rng};
|
||||
use settings::SettingsStore;
|
||||
use smol::stream::StreamExt;
|
||||
use std::{env, sync::Arc};
|
||||
use text::PointUtf16;
|
||||
use theme::{LoadThemes, SyntaxTheme};
|
||||
use unindent::Unindent as _;
|
||||
use util::test::{marked_text_ranges, sample_text};
|
||||
use Bias::*;
|
||||
|
||||
@@ -1352,7 +1253,7 @@ pub mod tests {
|
||||
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
|
||||
log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
|
||||
log::info!("fold text: {:?}", snapshot.fold_snapshot.text());
|
||||
log::info!("tab text: {:?}", snapshot.tab_snapshot.text());
|
||||
log::info!("char text: {:?}", snapshot.char_snapshot.text());
|
||||
log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text());
|
||||
log::info!("block text: {:?}", snapshot.block_snapshot.text());
|
||||
log::info!("display text: {:?}", snapshot.text());
|
||||
@@ -1467,7 +1368,7 @@ pub mod tests {
|
||||
fold_count = snapshot.fold_count();
|
||||
log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
|
||||
log::info!("fold text: {:?}", snapshot.fold_snapshot.text());
|
||||
log::info!("tab text: {:?}", snapshot.tab_snapshot.text());
|
||||
log::info!("char text: {:?}", snapshot.char_snapshot.text());
|
||||
log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text());
|
||||
log::info!("block text: {:?}", snapshot.block_snapshot.text());
|
||||
log::info!("display text: {:?}", snapshot.text());
|
||||
@@ -1747,6 +1648,8 @@ pub mod tests {
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_chunks(cx: &mut gpui::TestAppContext) {
|
||||
use unindent::Unindent as _;
|
||||
|
||||
let text = r#"
|
||||
fn outer() {}
|
||||
|
||||
@@ -1843,335 +1746,12 @@ pub mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_chunks_with_syntax_highlighting_across_blocks(cx: &mut gpui::TestAppContext) {
|
||||
cx.background_executor
|
||||
.set_block_on_ticks(usize::MAX..=usize::MAX);
|
||||
|
||||
let text = r#"
|
||||
const A: &str = "
|
||||
one
|
||||
two
|
||||
three
|
||||
";
|
||||
const B: &str = "four";
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let theme = SyntaxTheme::new_test(vec![
|
||||
("string", Hsla::red()),
|
||||
("punctuation", Hsla::blue()),
|
||||
("keyword", Hsla::green()),
|
||||
]);
|
||||
let language = Arc::new(
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::LANGUAGE.into()),
|
||||
)
|
||||
.with_highlights_query(
|
||||
r#"
|
||||
(string_literal) @string
|
||||
"const" @keyword
|
||||
[":" ";"] @punctuation
|
||||
"#,
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
language.set_theme(&theme);
|
||||
|
||||
cx.update(|cx| init_test(cx, |_| {}));
|
||||
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx));
|
||||
cx.condition(&buffer, |buf, _| !buf.is_parsing()).await;
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx));
|
||||
|
||||
let map = cx.new_model(|cx| {
|
||||
DisplayMap::new(
|
||||
buffer,
|
||||
font("Courier"),
|
||||
px(16.0),
|
||||
None,
|
||||
true,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
FoldPlaceholder::test(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
// Insert a block in the middle of a multi-line string literal
|
||||
map.update(cx, |map, cx| {
|
||||
map.insert_blocks(
|
||||
[BlockProperties {
|
||||
placement: BlockPlacement::Below(
|
||||
buffer_snapshot.anchor_before(Point::new(1, 0)),
|
||||
),
|
||||
height: 1,
|
||||
style: BlockStyle::Sticky,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}],
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
pretty_assertions::assert_eq!(
|
||||
cx.update(|cx| syntax_chunks(DisplayRow(0)..DisplayRow(7), &map, &theme, cx)),
|
||||
[
|
||||
("const".into(), Some(Hsla::green())),
|
||||
(" A".into(), None),
|
||||
(":".into(), Some(Hsla::blue())),
|
||||
(" &str = ".into(), None),
|
||||
("\"\n one\n".into(), Some(Hsla::red())),
|
||||
("\n".into(), None),
|
||||
(" two\n three\n\"".into(), Some(Hsla::red())),
|
||||
(";".into(), Some(Hsla::blue())),
|
||||
("\n".into(), None),
|
||||
("const".into(), Some(Hsla::green())),
|
||||
(" B".into(), None),
|
||||
(":".into(), Some(Hsla::blue())),
|
||||
(" &str = ".into(), None),
|
||||
("\"four\"".into(), Some(Hsla::red())),
|
||||
(";".into(), Some(Hsla::blue())),
|
||||
("\n".into(), None),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_chunks_with_diagnostics_across_blocks(cx: &mut gpui::TestAppContext) {
|
||||
cx.background_executor
|
||||
.set_block_on_ticks(usize::MAX..=usize::MAX);
|
||||
|
||||
let text = r#"
|
||||
struct A {
|
||||
b: usize;
|
||||
}
|
||||
const c: usize = 1;
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
cx.update(|cx| init_test(cx, |_| {}));
|
||||
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text, cx));
|
||||
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.update_diagnostics(
|
||||
LanguageServerId(0),
|
||||
DiagnosticSet::new(
|
||||
[DiagnosticEntry {
|
||||
range: PointUtf16::new(0, 0)..PointUtf16::new(2, 1),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
group_id: 1,
|
||||
message: "hi".into(),
|
||||
..Default::default()
|
||||
},
|
||||
}],
|
||||
buffer,
|
||||
),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx));
|
||||
|
||||
let map = cx.new_model(|cx| {
|
||||
DisplayMap::new(
|
||||
buffer,
|
||||
font("Courier"),
|
||||
px(16.0),
|
||||
None,
|
||||
true,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
FoldPlaceholder::test(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let black = gpui::black().to_rgb();
|
||||
let red = gpui::red().to_rgb();
|
||||
|
||||
// Insert a block in the middle of a multi-line diagnostic.
|
||||
map.update(cx, |map, cx| {
|
||||
map.highlight_text(
|
||||
TypeId::of::<usize>(),
|
||||
vec![
|
||||
buffer_snapshot.anchor_before(Point::new(3, 9))
|
||||
..buffer_snapshot.anchor_after(Point::new(3, 14)),
|
||||
buffer_snapshot.anchor_before(Point::new(3, 17))
|
||||
..buffer_snapshot.anchor_after(Point::new(3, 18)),
|
||||
],
|
||||
red.into(),
|
||||
);
|
||||
map.insert_blocks(
|
||||
[BlockProperties {
|
||||
placement: BlockPlacement::Below(
|
||||
buffer_snapshot.anchor_before(Point::new(1, 0)),
|
||||
),
|
||||
height: 1,
|
||||
style: BlockStyle::Sticky,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}],
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let mut chunks = Vec::<(String, Option<DiagnosticSeverity>, Rgba)>::new();
|
||||
for chunk in snapshot.chunks(DisplayRow(0)..DisplayRow(5), true, Default::default()) {
|
||||
let color = chunk
|
||||
.highlight_style
|
||||
.and_then(|style| style.color)
|
||||
.map_or(black, |color| color.to_rgb());
|
||||
if let Some((last_chunk, last_severity, last_color)) = chunks.last_mut() {
|
||||
if *last_severity == chunk.diagnostic_severity && *last_color == color {
|
||||
last_chunk.push_str(chunk.text);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
chunks.push((chunk.text.to_string(), chunk.diagnostic_severity, color));
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
chunks,
|
||||
[
|
||||
(
|
||||
"struct A {\n b: usize;\n".into(),
|
||||
Some(DiagnosticSeverity::ERROR),
|
||||
black
|
||||
),
|
||||
("\n".into(), None, black),
|
||||
("}".into(), Some(DiagnosticSeverity::ERROR), black),
|
||||
("\nconst c: ".into(), None, black),
|
||||
("usize".into(), None, red),
|
||||
(" = ".into(), None, black),
|
||||
("1".into(), None, red),
|
||||
(";\n".into(), None, black),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_point_translation_with_replace_blocks(cx: &mut gpui::TestAppContext) {
|
||||
cx.background_executor
|
||||
.set_block_on_ticks(usize::MAX..=usize::MAX);
|
||||
|
||||
cx.update(|cx| init_test(cx, |_| {}));
|
||||
|
||||
let buffer = cx.update(|cx| MultiBuffer::build_simple("abcde\nfghij\nklmno\npqrst", cx));
|
||||
let buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx));
|
||||
let map = cx.new_model(|cx| {
|
||||
DisplayMap::new(
|
||||
buffer.clone(),
|
||||
font("Courier"),
|
||||
px(16.0),
|
||||
None,
|
||||
true,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
FoldPlaceholder::test(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let snapshot = map.update(cx, |map, cx| {
|
||||
map.insert_blocks(
|
||||
[BlockProperties {
|
||||
placement: BlockPlacement::Replace(
|
||||
buffer_snapshot.anchor_before(Point::new(1, 2))
|
||||
..buffer_snapshot.anchor_after(Point::new(2, 3)),
|
||||
),
|
||||
height: 4,
|
||||
style: BlockStyle::Fixed,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}],
|
||||
cx,
|
||||
);
|
||||
map.snapshot(cx)
|
||||
});
|
||||
|
||||
assert_eq!(snapshot.text(), "abcde\n\n\n\n\npqrst");
|
||||
|
||||
let point_to_display_points = [
|
||||
(Point::new(1, 0), DisplayPoint::new(DisplayRow(1), 0)),
|
||||
(Point::new(2, 0), DisplayPoint::new(DisplayRow(1), 0)),
|
||||
(Point::new(3, 0), DisplayPoint::new(DisplayRow(5), 0)),
|
||||
];
|
||||
for (buffer_point, display_point) in point_to_display_points {
|
||||
assert_eq!(
|
||||
snapshot.point_to_display_point(buffer_point, Bias::Left),
|
||||
display_point,
|
||||
"point_to_display_point({:?}, Bias::Left)",
|
||||
buffer_point
|
||||
);
|
||||
assert_eq!(
|
||||
snapshot.point_to_display_point(buffer_point, Bias::Right),
|
||||
display_point,
|
||||
"point_to_display_point({:?}, Bias::Right)",
|
||||
buffer_point
|
||||
);
|
||||
}
|
||||
|
||||
let display_points_to_points = [
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(1), 0),
|
||||
Point::new(1, 0),
|
||||
Point::new(2, 5),
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(2), 0),
|
||||
Point::new(1, 0),
|
||||
Point::new(2, 5),
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(3), 0),
|
||||
Point::new(1, 0),
|
||||
Point::new(2, 5),
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(4), 0),
|
||||
Point::new(1, 0),
|
||||
Point::new(2, 5),
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(5), 0),
|
||||
Point::new(3, 0),
|
||||
Point::new(3, 0),
|
||||
),
|
||||
];
|
||||
for (display_point, left_buffer_point, right_buffer_point) in display_points_to_points {
|
||||
assert_eq!(
|
||||
snapshot.display_point_to_point(display_point, Bias::Left),
|
||||
left_buffer_point,
|
||||
"display_point_to_point({:?}, Bias::Left)",
|
||||
display_point
|
||||
);
|
||||
assert_eq!(
|
||||
snapshot.display_point_to_point(display_point, Bias::Right),
|
||||
right_buffer_point,
|
||||
"display_point_to_point({:?}, Bias::Right)",
|
||||
display_point
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// todo(linux) fails due to pixel differences in text rendering
|
||||
#[cfg(target_os = "macos")]
|
||||
#[gpui::test]
|
||||
async fn test_chunks_with_soft_wrapping(cx: &mut gpui::TestAppContext) {
|
||||
use unindent::Unindent as _;
|
||||
|
||||
cx.background_executor
|
||||
.set_block_on_ticks(usize::MAX..=usize::MAX);
|
||||
|
||||
@@ -2443,7 +2023,7 @@ pub mod tests {
|
||||
snapshot.anchor_before(Point::new(2, 0))..snapshot.anchor_after(Point::new(3, 3));
|
||||
|
||||
map.crease_map.insert(
|
||||
[Crease::inline(
|
||||
[Crease::new(
|
||||
range,
|
||||
FoldPlaceholder::test(),
|
||||
|_row, _status, _toggle, _cx| div(),
|
||||
|
||||
@@ -265,7 +265,6 @@ pub struct BlockContext<'a, 'b> {
|
||||
pub em_width: Pixels,
|
||||
pub line_height: Pixels,
|
||||
pub block_id: BlockId,
|
||||
pub selected: bool,
|
||||
pub editor_style: &'b EditorStyle,
|
||||
}
|
||||
|
||||
@@ -1312,6 +1311,7 @@ impl BlockSnapshot {
|
||||
let (output_start_row, input_start_row) = cursor.start();
|
||||
let (output_end_row, input_end_row) = cursor.end(&());
|
||||
let output_start = Point::new(output_start_row.0, 0);
|
||||
let output_end = Point::new(output_end_row.0, 0);
|
||||
let input_start = Point::new(input_start_row.0, 0);
|
||||
let input_end = Point::new(input_end_row.0, 0);
|
||||
|
||||
@@ -1319,10 +1319,10 @@ impl BlockSnapshot {
|
||||
Some(Block::Custom(block))
|
||||
if matches!(block.placement, BlockPlacement::Replace(_)) =>
|
||||
{
|
||||
if ((bias == Bias::Left || search_left) && output_start <= point.0)
|
||||
|| (!search_left && output_start >= point.0)
|
||||
{
|
||||
if bias == Bias::Left {
|
||||
return BlockPoint(output_start);
|
||||
} else {
|
||||
return BlockPoint(Point::new(output_end.row - 1, 0));
|
||||
}
|
||||
}
|
||||
None => {
|
||||
@@ -1364,7 +1364,12 @@ impl BlockSnapshot {
|
||||
cursor.seek(&WrapRow(wrap_point.row()), Bias::Right, &());
|
||||
if let Some(transform) = cursor.item() {
|
||||
if transform.block.is_some() {
|
||||
BlockPoint::new(cursor.start().1 .0, 0)
|
||||
let wrap_start = WrapPoint::new(cursor.start().0 .0, 0);
|
||||
if wrap_start == wrap_point {
|
||||
BlockPoint::new(cursor.start().1 .0, 0)
|
||||
} else {
|
||||
BlockPoint::new(cursor.end(&()).1 .0 - 1, 0)
|
||||
}
|
||||
} else {
|
||||
let (input_start_row, output_start_row) = cursor.start();
|
||||
let input_start = Point::new(input_start_row.0, 0);
|
||||
@@ -1377,7 +1382,7 @@ impl BlockSnapshot {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_wrap_point(&self, block_point: BlockPoint, bias: Bias) -> WrapPoint {
|
||||
pub fn to_wrap_point(&self, block_point: BlockPoint) -> WrapPoint {
|
||||
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
|
||||
cursor.seek(&BlockRow(block_point.row), Bias::Right, &());
|
||||
if let Some(transform) = cursor.item() {
|
||||
@@ -1386,9 +1391,7 @@ impl BlockSnapshot {
|
||||
if block.place_below() {
|
||||
let wrap_row = cursor.start().1 .0 - 1;
|
||||
WrapPoint::new(wrap_row, self.wrap_snapshot.line_len(wrap_row))
|
||||
} else if block.place_above() {
|
||||
WrapPoint::new(cursor.start().1 .0, 0)
|
||||
} else if bias == Bias::Left {
|
||||
} else if block.place_above() || block_point.row == cursor.start().0 .0 {
|
||||
WrapPoint::new(cursor.start().1 .0, 0)
|
||||
} else {
|
||||
let wrap_row = cursor.end(&()).1 .0 - 1;
|
||||
@@ -1663,7 +1666,7 @@ fn offset_for_row(s: &str, target: u32) -> (u32, usize) {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::display_map::{
|
||||
fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap, wrap_map::WrapMap,
|
||||
char_map::CharMap, fold_map::FoldMap, inlay_map::InlayMap, wrap_map::WrapMap,
|
||||
};
|
||||
use gpui::{div, font, px, AppContext, Context as _, Element};
|
||||
use language::{Buffer, Capability};
|
||||
@@ -1698,9 +1701,9 @@ mod tests {
|
||||
let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
|
||||
let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 1.try_into().unwrap());
|
||||
let (mut char_map, char_snapshot) = CharMap::new(fold_snapshot, 1.try_into().unwrap());
|
||||
let (wrap_map, wraps_snapshot) =
|
||||
cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
|
||||
cx.update(|cx| WrapMap::new(char_snapshot, font("Helvetica"), px(14.0), None, cx));
|
||||
let mut block_map = BlockMap::new(wraps_snapshot.clone(), true, 1, 1, 1);
|
||||
|
||||
let mut writer = block_map.write(wraps_snapshot.clone(), Default::default());
|
||||
@@ -1763,19 +1766,19 @@ mod tests {
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
snapshot.to_wrap_point(BlockPoint::new(0, 3), Bias::Left),
|
||||
snapshot.to_wrap_point(BlockPoint::new(0, 3)),
|
||||
WrapPoint::new(0, 3)
|
||||
);
|
||||
assert_eq!(
|
||||
snapshot.to_wrap_point(BlockPoint::new(1, 0), Bias::Left),
|
||||
snapshot.to_wrap_point(BlockPoint::new(1, 0)),
|
||||
WrapPoint::new(1, 0)
|
||||
);
|
||||
assert_eq!(
|
||||
snapshot.to_wrap_point(BlockPoint::new(3, 0), Bias::Left),
|
||||
snapshot.to_wrap_point(BlockPoint::new(3, 0)),
|
||||
WrapPoint::new(1, 0)
|
||||
);
|
||||
assert_eq!(
|
||||
snapshot.to_wrap_point(BlockPoint::new(7, 0), Bias::Left),
|
||||
snapshot.to_wrap_point(BlockPoint::new(7, 0)),
|
||||
WrapPoint::new(3, 3)
|
||||
);
|
||||
|
||||
@@ -1848,10 +1851,10 @@ mod tests {
|
||||
let (inlay_snapshot, inlay_edits) =
|
||||
inlay_map.sync(buffer_snapshot, subscription.consume().into_inner());
|
||||
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
|
||||
let (tab_snapshot, tab_edits) =
|
||||
tab_map.sync(fold_snapshot, fold_edits, 4.try_into().unwrap());
|
||||
let (char_snapshot, tab_edits) =
|
||||
char_map.sync(fold_snapshot, fold_edits, 4.try_into().unwrap());
|
||||
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
|
||||
wrap_map.sync(tab_snapshot, tab_edits, cx)
|
||||
wrap_map.sync(char_snapshot, tab_edits, cx)
|
||||
});
|
||||
let snapshot = block_map.read(wraps_snapshot, wrap_edits);
|
||||
assert_eq!(snapshot.text(), "aaa\n\nb!!!\n\n\nbb\nccc\nddd\n\n\n");
|
||||
@@ -1911,8 +1914,9 @@ mod tests {
|
||||
let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx);
|
||||
let (_, inlay_snapshot) = InlayMap::new(multi_buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, wraps_snapshot) = WrapMap::new(tab_snapshot, font, font_size, Some(wrap_width), cx);
|
||||
let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, wraps_snapshot) =
|
||||
WrapMap::new(char_snapshot, font, font_size, Some(wrap_width), cx);
|
||||
|
||||
let block_map = BlockMap::new(wraps_snapshot.clone(), true, 1, 1, 1);
|
||||
let snapshot = block_map.read(wraps_snapshot, Default::default());
|
||||
@@ -1949,9 +1953,9 @@ mod tests {
|
||||
let _subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
|
||||
let (_inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 1.try_into().unwrap());
|
||||
let (_char_map, char_snapshot) = CharMap::new(fold_snapshot, 1.try_into().unwrap());
|
||||
let (_wrap_map, wraps_snapshot) =
|
||||
cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
|
||||
cx.update(|cx| WrapMap::new(char_snapshot, font("Helvetica"), px(14.0), None, cx));
|
||||
let mut block_map = BlockMap::new(wraps_snapshot.clone(), false, 1, 1, 0);
|
||||
|
||||
let mut writer = block_map.write(wraps_snapshot.clone(), Default::default());
|
||||
@@ -2051,9 +2055,15 @@ mod tests {
|
||||
let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, wraps_snapshot) = cx.update(|cx| {
|
||||
WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), Some(px(60.)), cx)
|
||||
WrapMap::new(
|
||||
char_snapshot,
|
||||
font("Helvetica"),
|
||||
px(14.0),
|
||||
Some(px(60.)),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let mut block_map = BlockMap::new(wraps_snapshot.clone(), true, 1, 1, 0);
|
||||
|
||||
@@ -2096,7 +2106,7 @@ mod tests {
|
||||
let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let tab_size = 1.try_into().unwrap();
|
||||
let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, tab_size);
|
||||
let (mut tab_map, tab_snapshot) = CharMap::new(fold_snapshot, tab_size);
|
||||
let (wrap_map, wraps_snapshot) =
|
||||
cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
|
||||
let mut block_map = BlockMap::new(wraps_snapshot.clone(), false, 1, 1, 0);
|
||||
@@ -2247,9 +2257,9 @@ mod tests {
|
||||
let mut buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
|
||||
let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (mut char_map, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (wrap_map, wraps_snapshot) = cx
|
||||
.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), font_size, wrap_width, cx));
|
||||
.update(|cx| WrapMap::new(char_snapshot, font("Helvetica"), font_size, wrap_width, cx));
|
||||
let mut block_map = BlockMap::new(
|
||||
wraps_snapshot,
|
||||
true,
|
||||
@@ -2311,10 +2321,10 @@ mod tests {
|
||||
let (inlay_snapshot, inlay_edits) =
|
||||
inlay_map.sync(buffer_snapshot.clone(), vec![]);
|
||||
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
|
||||
let (tab_snapshot, tab_edits) =
|
||||
tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (char_snapshot, tab_edits) =
|
||||
char_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
|
||||
wrap_map.sync(tab_snapshot, tab_edits, cx)
|
||||
wrap_map.sync(char_snapshot, tab_edits, cx)
|
||||
});
|
||||
let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
|
||||
block_map.insert(block_properties.iter().map(|props| BlockProperties {
|
||||
@@ -2336,10 +2346,10 @@ mod tests {
|
||||
let (inlay_snapshot, inlay_edits) =
|
||||
inlay_map.sync(buffer_snapshot.clone(), vec![]);
|
||||
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
|
||||
let (tab_snapshot, tab_edits) =
|
||||
tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (char_snapshot, tab_edits) =
|
||||
char_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
|
||||
wrap_map.sync(tab_snapshot, tab_edits, cx)
|
||||
wrap_map.sync(char_snapshot, tab_edits, cx)
|
||||
});
|
||||
let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
|
||||
block_map.remove(block_ids_to_remove);
|
||||
@@ -2359,9 +2369,9 @@ mod tests {
|
||||
let (inlay_snapshot, inlay_edits) =
|
||||
inlay_map.sync(buffer_snapshot.clone(), buffer_edits);
|
||||
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
|
||||
let (tab_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (char_snapshot, tab_edits) = char_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
|
||||
wrap_map.sync(tab_snapshot, tab_edits, cx)
|
||||
wrap_map.sync(char_snapshot, tab_edits, cx)
|
||||
});
|
||||
let blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits);
|
||||
assert_eq!(
|
||||
@@ -2476,7 +2486,7 @@ mod tests {
|
||||
.row as usize];
|
||||
|
||||
let soft_wrapped = wraps_snapshot
|
||||
.to_tab_point(WrapPoint::new(wrap_row, 0))
|
||||
.to_char_point(WrapPoint::new(wrap_row, 0))
|
||||
.column()
|
||||
> 0;
|
||||
expected_buffer_rows.push(if soft_wrapped { None } else { buffer_row });
|
||||
@@ -2613,15 +2623,10 @@ mod tests {
|
||||
|
||||
// Ensure that conversion between block points and wrap points is stable.
|
||||
for row in 0..=blocks_snapshot.wrap_snapshot.max_point().row() {
|
||||
let wrap_point = WrapPoint::new(row, 0);
|
||||
let block_point = blocks_snapshot.to_block_point(wrap_point);
|
||||
let left_wrap_point = blocks_snapshot.to_wrap_point(block_point, Bias::Left);
|
||||
let right_wrap_point = blocks_snapshot.to_wrap_point(block_point, Bias::Right);
|
||||
assert_eq!(blocks_snapshot.to_block_point(left_wrap_point), block_point);
|
||||
assert_eq!(
|
||||
blocks_snapshot.to_block_point(right_wrap_point),
|
||||
block_point
|
||||
);
|
||||
let original_wrap_point = WrapPoint::new(row, 0);
|
||||
let block_point = blocks_snapshot.to_block_point(original_wrap_point);
|
||||
let wrap_point = blocks_snapshot.to_wrap_point(block_point);
|
||||
assert_eq!(blocks_snapshot.to_block_point(wrap_point), block_point);
|
||||
}
|
||||
|
||||
let mut block_point = BlockPoint::new(0, 0);
|
||||
@@ -2629,12 +2634,10 @@ mod tests {
|
||||
let left_point = blocks_snapshot.clip_point(block_point, Bias::Left);
|
||||
let left_buffer_point = blocks_snapshot.to_point(left_point, Bias::Left);
|
||||
assert_eq!(
|
||||
blocks_snapshot
|
||||
.to_block_point(blocks_snapshot.to_wrap_point(left_point, Bias::Left)),
|
||||
blocks_snapshot.to_block_point(blocks_snapshot.to_wrap_point(left_point)),
|
||||
left_point,
|
||||
"block point: {:?}, wrap point: {:?}",
|
||||
block_point,
|
||||
blocks_snapshot.to_wrap_point(left_point, Bias::Left)
|
||||
"wrap point: {:?}",
|
||||
blocks_snapshot.to_wrap_point(left_point)
|
||||
);
|
||||
assert_eq!(
|
||||
left_buffer_point,
|
||||
@@ -2646,12 +2649,10 @@ mod tests {
|
||||
let right_point = blocks_snapshot.clip_point(block_point, Bias::Right);
|
||||
let right_buffer_point = blocks_snapshot.to_point(right_point, Bias::Right);
|
||||
assert_eq!(
|
||||
blocks_snapshot
|
||||
.to_block_point(blocks_snapshot.to_wrap_point(right_point, Bias::Right)),
|
||||
blocks_snapshot.to_block_point(blocks_snapshot.to_wrap_point(right_point)),
|
||||
right_point,
|
||||
"block point: {:?}, wrap point: {:?}",
|
||||
block_point,
|
||||
blocks_snapshot.to_wrap_point(right_point, Bias::Right)
|
||||
"wrap point: {:?}",
|
||||
blocks_snapshot.to_wrap_point(right_point)
|
||||
);
|
||||
assert_eq!(
|
||||
right_buffer_point,
|
||||
@@ -2687,8 +2688,7 @@ mod tests {
|
||||
|
||||
impl BlockSnapshot {
|
||||
fn to_point(&self, point: BlockPoint, bias: Bias) -> Point {
|
||||
self.wrap_snapshot
|
||||
.to_point(self.to_wrap_point(point, bias), bias)
|
||||
self.wrap_snapshot.to_point(self.to_wrap_point(point), bias)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use super::{
|
||||
fold_map::{self, FoldChunks, FoldEdit, FoldPoint, FoldSnapshot},
|
||||
invisibles::{is_invisible, replacement},
|
||||
Highlights,
|
||||
};
|
||||
use language::{Chunk, Point};
|
||||
@@ -9,14 +10,14 @@ use sum_tree::Bias;
|
||||
|
||||
const MAX_EXPANSION_COLUMN: u32 = 256;
|
||||
|
||||
/// Keeps track of hard tabs in a text buffer.
|
||||
/// Keeps track of hard tabs and non-printable characters in a text buffer.
|
||||
///
|
||||
/// See the [`display_map` module documentation](crate::display_map) for more information.
|
||||
pub struct TabMap(TabSnapshot);
|
||||
pub struct CharMap(CharSnapshot);
|
||||
|
||||
impl TabMap {
|
||||
pub fn new(fold_snapshot: FoldSnapshot, tab_size: NonZeroU32) -> (Self, TabSnapshot) {
|
||||
let snapshot = TabSnapshot {
|
||||
impl CharMap {
|
||||
pub fn new(fold_snapshot: FoldSnapshot, tab_size: NonZeroU32) -> (Self, CharSnapshot) {
|
||||
let snapshot = CharSnapshot {
|
||||
fold_snapshot,
|
||||
tab_size,
|
||||
max_expansion_column: MAX_EXPANSION_COLUMN,
|
||||
@@ -26,7 +27,7 @@ impl TabMap {
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn set_max_expansion_column(&mut self, column: u32) -> TabSnapshot {
|
||||
pub fn set_max_expansion_column(&mut self, column: u32) -> CharSnapshot {
|
||||
self.0.max_expansion_column = column;
|
||||
self.0.clone()
|
||||
}
|
||||
@@ -36,9 +37,9 @@ impl TabMap {
|
||||
fold_snapshot: FoldSnapshot,
|
||||
mut fold_edits: Vec<FoldEdit>,
|
||||
tab_size: NonZeroU32,
|
||||
) -> (TabSnapshot, Vec<TabEdit>) {
|
||||
) -> (CharSnapshot, Vec<TabEdit>) {
|
||||
let old_snapshot = &mut self.0;
|
||||
let mut new_snapshot = TabSnapshot {
|
||||
let mut new_snapshot = CharSnapshot {
|
||||
fold_snapshot,
|
||||
tab_size,
|
||||
max_expansion_column: old_snapshot.max_expansion_column,
|
||||
@@ -137,15 +138,15 @@ impl TabMap {
|
||||
let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot);
|
||||
let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot);
|
||||
tab_edits.push(TabEdit {
|
||||
old: old_snapshot.to_tab_point(old_start)..old_snapshot.to_tab_point(old_end),
|
||||
new: new_snapshot.to_tab_point(new_start)..new_snapshot.to_tab_point(new_end),
|
||||
old: old_snapshot.to_char_point(old_start)..old_snapshot.to_char_point(old_end),
|
||||
new: new_snapshot.to_char_point(new_start)..new_snapshot.to_char_point(new_end),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
new_snapshot.version += 1;
|
||||
tab_edits.push(TabEdit {
|
||||
old: TabPoint::zero()..old_snapshot.max_point(),
|
||||
new: TabPoint::zero()..new_snapshot.max_point(),
|
||||
old: CharPoint::zero()..old_snapshot.max_point(),
|
||||
new: CharPoint::zero()..new_snapshot.max_point(),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -155,14 +156,14 @@ impl TabMap {
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TabSnapshot {
|
||||
pub struct CharSnapshot {
|
||||
pub fold_snapshot: FoldSnapshot,
|
||||
pub tab_size: NonZeroU32,
|
||||
pub max_expansion_column: u32,
|
||||
pub version: usize,
|
||||
}
|
||||
|
||||
impl TabSnapshot {
|
||||
impl CharSnapshot {
|
||||
pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
|
||||
&self.fold_snapshot.inlay_snapshot.buffer
|
||||
}
|
||||
@@ -170,7 +171,7 @@ impl TabSnapshot {
|
||||
pub fn line_len(&self, row: u32) -> u32 {
|
||||
let max_point = self.max_point();
|
||||
if row < max_point.row() {
|
||||
self.to_tab_point(FoldPoint::new(row, self.fold_snapshot.line_len(row)))
|
||||
self.to_char_point(FoldPoint::new(row, self.fold_snapshot.line_len(row)))
|
||||
.0
|
||||
.column
|
||||
} else {
|
||||
@@ -179,10 +180,10 @@ impl TabSnapshot {
|
||||
}
|
||||
|
||||
pub fn text_summary(&self) -> TextSummary {
|
||||
self.text_summary_for_range(TabPoint::zero()..self.max_point())
|
||||
self.text_summary_for_range(CharPoint::zero()..self.max_point())
|
||||
}
|
||||
|
||||
pub fn text_summary_for_range(&self, range: Range<TabPoint>) -> TextSummary {
|
||||
pub fn text_summary_for_range(&self, range: Range<CharPoint>) -> TextSummary {
|
||||
let input_start = self.to_fold_point(range.start, Bias::Left).0;
|
||||
let input_end = self.to_fold_point(range.end, Bias::Right).0;
|
||||
let input_summary = self
|
||||
@@ -211,7 +212,7 @@ impl TabSnapshot {
|
||||
} else {
|
||||
for _ in self
|
||||
.chunks(
|
||||
TabPoint::new(range.end.row(), 0)..range.end,
|
||||
CharPoint::new(range.end.row(), 0)..range.end,
|
||||
false,
|
||||
Highlights::default(),
|
||||
)
|
||||
@@ -232,7 +233,7 @@ impl TabSnapshot {
|
||||
|
||||
pub fn chunks<'a>(
|
||||
&'a self,
|
||||
range: Range<TabPoint>,
|
||||
range: Range<CharPoint>,
|
||||
language_aware: bool,
|
||||
highlights: Highlights<'a>,
|
||||
) -> TabChunks<'a> {
|
||||
@@ -279,7 +280,7 @@ impl TabSnapshot {
|
||||
#[cfg(test)]
|
||||
pub fn text(&self) -> String {
|
||||
self.chunks(
|
||||
TabPoint::zero()..self.max_point(),
|
||||
CharPoint::zero()..self.max_point(),
|
||||
false,
|
||||
Highlights::default(),
|
||||
)
|
||||
@@ -287,24 +288,24 @@ impl TabSnapshot {
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn max_point(&self) -> TabPoint {
|
||||
self.to_tab_point(self.fold_snapshot.max_point())
|
||||
pub fn max_point(&self) -> CharPoint {
|
||||
self.to_char_point(self.fold_snapshot.max_point())
|
||||
}
|
||||
|
||||
pub fn clip_point(&self, point: TabPoint, bias: Bias) -> TabPoint {
|
||||
self.to_tab_point(
|
||||
pub fn clip_point(&self, point: CharPoint, bias: Bias) -> CharPoint {
|
||||
self.to_char_point(
|
||||
self.fold_snapshot
|
||||
.clip_point(self.to_fold_point(point, bias).0, bias),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn to_tab_point(&self, input: FoldPoint) -> TabPoint {
|
||||
pub fn to_char_point(&self, input: FoldPoint) -> CharPoint {
|
||||
let chars = self.fold_snapshot.chars_at(FoldPoint::new(input.row(), 0));
|
||||
let expanded = self.expand_tabs(chars, input.column());
|
||||
TabPoint::new(input.row(), expanded)
|
||||
CharPoint::new(input.row(), expanded)
|
||||
}
|
||||
|
||||
pub fn to_fold_point(&self, output: TabPoint, bias: Bias) -> (FoldPoint, u32, u32) {
|
||||
pub fn to_fold_point(&self, output: CharPoint, bias: Bias) -> (FoldPoint, u32, u32) {
|
||||
let chars = self.fold_snapshot.chars_at(FoldPoint::new(output.row(), 0));
|
||||
let expanded = output.column();
|
||||
let (collapsed, expanded_char_column, to_next_stop) =
|
||||
@@ -316,13 +317,13 @@ impl TabSnapshot {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn make_tab_point(&self, point: Point, bias: Bias) -> TabPoint {
|
||||
pub fn make_char_point(&self, point: Point, bias: Bias) -> CharPoint {
|
||||
let inlay_point = self.fold_snapshot.inlay_snapshot.to_inlay_point(point);
|
||||
let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias);
|
||||
self.to_tab_point(fold_point)
|
||||
self.to_char_point(fold_point)
|
||||
}
|
||||
|
||||
pub fn to_point(&self, point: TabPoint, bias: Bias) -> Point {
|
||||
pub fn to_point(&self, point: CharPoint, bias: Bias) -> Point {
|
||||
let fold_point = self.to_fold_point(point, bias).0;
|
||||
let inlay_point = fold_point.to_inlay_point(&self.fold_snapshot);
|
||||
self.fold_snapshot
|
||||
@@ -345,6 +346,9 @@ impl TabSnapshot {
|
||||
let tab_len = tab_size - expanded_chars % tab_size;
|
||||
expanded_bytes += tab_len;
|
||||
expanded_chars += tab_len;
|
||||
} else if let Some(replacement) = replacement(c) {
|
||||
expanded_chars += replacement.chars().count() as u32;
|
||||
expanded_bytes += replacement.len() as u32;
|
||||
} else {
|
||||
expanded_bytes += c.len_utf8() as u32;
|
||||
expanded_chars += 1;
|
||||
@@ -384,6 +388,9 @@ impl TabSnapshot {
|
||||
Bias::Right => (collapsed_bytes + 1, expanded_chars, 0),
|
||||
};
|
||||
}
|
||||
} else if let Some(replacement) = replacement(c) {
|
||||
expanded_chars += replacement.chars().count() as u32;
|
||||
expanded_bytes += replacement.len() as u32;
|
||||
} else {
|
||||
expanded_chars += 1;
|
||||
expanded_bytes += c.len_utf8() as u32;
|
||||
@@ -405,9 +412,9 @@ impl TabSnapshot {
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
|
||||
pub struct TabPoint(pub Point);
|
||||
pub struct CharPoint(pub Point);
|
||||
|
||||
impl TabPoint {
|
||||
impl CharPoint {
|
||||
pub fn new(row: u32, column: u32) -> Self {
|
||||
Self(Point::new(row, column))
|
||||
}
|
||||
@@ -425,13 +432,13 @@ impl TabPoint {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Point> for TabPoint {
|
||||
impl From<Point> for CharPoint {
|
||||
fn from(point: Point) -> Self {
|
||||
Self(point)
|
||||
}
|
||||
}
|
||||
|
||||
pub type TabEdit = text::Edit<TabPoint>;
|
||||
pub type TabEdit = text::Edit<CharPoint>;
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct TextSummary {
|
||||
@@ -486,7 +493,7 @@ impl<'a> std::ops::AddAssign<&'a Self> for TextSummary {
|
||||
const SPACES: &str = " ";
|
||||
|
||||
pub struct TabChunks<'a> {
|
||||
snapshot: &'a TabSnapshot,
|
||||
snapshot: &'a CharSnapshot,
|
||||
fold_chunks: FoldChunks<'a>,
|
||||
chunk: Chunk<'a>,
|
||||
column: u32,
|
||||
@@ -499,7 +506,7 @@ pub struct TabChunks<'a> {
|
||||
}
|
||||
|
||||
impl<'a> TabChunks<'a> {
|
||||
pub(crate) fn seek(&mut self, range: Range<TabPoint>) {
|
||||
pub(crate) fn seek(&mut self, range: Range<CharPoint>) {
|
||||
let (input_start, expanded_char_column, to_next_stop) =
|
||||
self.snapshot.to_fold_point(range.start, Bias::Left);
|
||||
let input_column = input_start.column();
|
||||
@@ -584,6 +591,37 @@ impl<'a> Iterator for TabChunks<'a> {
|
||||
self.input_column = 0;
|
||||
self.output_position += Point::new(1, 0);
|
||||
}
|
||||
_ if is_invisible(c) => {
|
||||
if ix > 0 {
|
||||
let (prefix, suffix) = self.chunk.text.split_at(ix);
|
||||
self.chunk.text = suffix;
|
||||
return Some(Chunk {
|
||||
text: prefix,
|
||||
is_invisible: false,
|
||||
..self.chunk.clone()
|
||||
});
|
||||
}
|
||||
let c_len = c.len_utf8();
|
||||
let replacement = replacement(c).unwrap_or(&self.chunk.text[..c_len]);
|
||||
if self.chunk.text.len() >= c_len {
|
||||
self.chunk.text = &self.chunk.text[c_len..];
|
||||
} else {
|
||||
self.chunk.text = "";
|
||||
}
|
||||
let len = replacement.chars().count() as u32;
|
||||
let next_output_position = cmp::min(
|
||||
self.output_position + Point::new(0, len),
|
||||
self.max_output_position,
|
||||
);
|
||||
self.column += len;
|
||||
self.input_column += 1;
|
||||
self.output_position = next_output_position;
|
||||
return Some(Chunk {
|
||||
text: replacement,
|
||||
is_invisible: true,
|
||||
..self.chunk.clone()
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
self.column += 1;
|
||||
if !self.inside_leading_tab {
|
||||
@@ -613,11 +651,11 @@ mod tests {
|
||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
|
||||
assert_eq!(tab_snapshot.expand_tabs("\t".chars(), 0), 0);
|
||||
assert_eq!(tab_snapshot.expand_tabs("\t".chars(), 1), 4);
|
||||
assert_eq!(tab_snapshot.expand_tabs("\ta".chars(), 2), 5);
|
||||
assert_eq!(char_snapshot.expand_tabs("\t".chars(), 0), 0);
|
||||
assert_eq!(char_snapshot.expand_tabs("\t".chars(), 1), 4);
|
||||
assert_eq!(char_snapshot.expand_tabs("\ta".chars(), 2), 5);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -630,16 +668,16 @@ mod tests {
|
||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, mut tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, mut char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
|
||||
tab_snapshot.max_expansion_column = max_expansion_column;
|
||||
assert_eq!(tab_snapshot.text(), output);
|
||||
char_snapshot.max_expansion_column = max_expansion_column;
|
||||
assert_eq!(char_snapshot.text(), output);
|
||||
|
||||
for (ix, c) in input.char_indices() {
|
||||
assert_eq!(
|
||||
tab_snapshot
|
||||
char_snapshot
|
||||
.chunks(
|
||||
TabPoint::new(0, ix as u32)..tab_snapshot.max_point(),
|
||||
CharPoint::new(0, ix as u32)..char_snapshot.max_point(),
|
||||
false,
|
||||
Highlights::default(),
|
||||
)
|
||||
@@ -653,13 +691,13 @@ mod tests {
|
||||
let input_point = Point::new(0, ix as u32);
|
||||
let output_point = Point::new(0, output.find(c).unwrap() as u32);
|
||||
assert_eq!(
|
||||
tab_snapshot.to_tab_point(FoldPoint(input_point)),
|
||||
TabPoint(output_point),
|
||||
"to_tab_point({input_point:?})"
|
||||
char_snapshot.to_char_point(FoldPoint(input_point)),
|
||||
CharPoint(output_point),
|
||||
"to_char_point({input_point:?})"
|
||||
);
|
||||
assert_eq!(
|
||||
tab_snapshot
|
||||
.to_fold_point(TabPoint(output_point), Bias::Left)
|
||||
char_snapshot
|
||||
.to_fold_point(CharPoint(output_point), Bias::Left)
|
||||
.0,
|
||||
FoldPoint(input_point),
|
||||
"to_fold_point({output_point:?})"
|
||||
@@ -677,10 +715,10 @@ mod tests {
|
||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, mut tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, mut char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
|
||||
tab_snapshot.max_expansion_column = max_expansion_column;
|
||||
assert_eq!(tab_snapshot.text(), input);
|
||||
char_snapshot.max_expansion_column = max_expansion_column;
|
||||
assert_eq!(char_snapshot.text(), input);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -691,10 +729,10 @@ mod tests {
|
||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
|
||||
assert_eq!(
|
||||
chunks(&tab_snapshot, TabPoint::zero()),
|
||||
chunks(&char_snapshot, CharPoint::zero()),
|
||||
vec![
|
||||
(" ".to_string(), true),
|
||||
(" ".to_string(), false),
|
||||
@@ -703,7 +741,7 @@ mod tests {
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
chunks(&tab_snapshot, TabPoint::new(0, 2)),
|
||||
chunks(&char_snapshot, CharPoint::new(0, 2)),
|
||||
vec![
|
||||
(" ".to_string(), true),
|
||||
(" ".to_string(), false),
|
||||
@@ -712,7 +750,7 @@ mod tests {
|
||||
]
|
||||
);
|
||||
|
||||
fn chunks(snapshot: &TabSnapshot, start: TabPoint) -> Vec<(String, bool)> {
|
||||
fn chunks(snapshot: &CharSnapshot, start: CharPoint) -> Vec<(String, bool)> {
|
||||
let mut chunks = Vec::new();
|
||||
let mut was_tab = false;
|
||||
let mut text = String::new();
|
||||
@@ -758,12 +796,12 @@ mod tests {
|
||||
let (inlay_snapshot, _) = inlay_map.randomly_mutate(&mut 0, &mut rng);
|
||||
log::info!("InlayMap text: {:?}", inlay_snapshot.text());
|
||||
|
||||
let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size);
|
||||
let tabs_snapshot = tab_map.set_max_expansion_column(32);
|
||||
let (mut char_map, _) = CharMap::new(fold_snapshot.clone(), tab_size);
|
||||
let tabs_snapshot = char_map.set_max_expansion_column(32);
|
||||
|
||||
let text = text::Rope::from(tabs_snapshot.text().as_str());
|
||||
log::info!(
|
||||
"TabMap text (tab size: {}): {:?}",
|
||||
"CharMap text (tab size: {}): {:?}",
|
||||
tab_size,
|
||||
tabs_snapshot.text(),
|
||||
);
|
||||
@@ -771,11 +809,11 @@ mod tests {
|
||||
for _ in 0..5 {
|
||||
let end_row = rng.gen_range(0..=text.max_point().row);
|
||||
let end_column = rng.gen_range(0..=text.line_len(end_row));
|
||||
let mut end = TabPoint(text.clip_point(Point::new(end_row, end_column), Bias::Right));
|
||||
let mut end = CharPoint(text.clip_point(Point::new(end_row, end_column), Bias::Right));
|
||||
let start_row = rng.gen_range(0..=text.max_point().row);
|
||||
let start_column = rng.gen_range(0..=text.line_len(start_row));
|
||||
let mut start =
|
||||
TabPoint(text.clip_point(Point::new(start_row, start_column), Bias::Left));
|
||||
CharPoint(text.clip_point(Point::new(start_row, start_column), Bias::Left));
|
||||
if start > end {
|
||||
mem::swap(&mut start, &mut end);
|
||||
}
|
||||
@@ -2,7 +2,7 @@ use collections::HashMap;
|
||||
use gpui::{AnyElement, IntoElement};
|
||||
use multi_buffer::{Anchor, AnchorRangeExt, MultiBufferRow, MultiBufferSnapshot, ToPoint};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{cmp::Ordering, fmt::Debug, ops::Range, sync::Arc};
|
||||
use std::{cmp::Ordering, ops::Range, sync::Arc};
|
||||
use sum_tree::{Bias, SeekTarget, SumTree};
|
||||
use text::Point;
|
||||
use ui::{IconName, SharedString, WindowContext};
|
||||
@@ -45,15 +45,15 @@ impl CreaseSnapshot {
|
||||
&'a self,
|
||||
row: MultiBufferRow,
|
||||
snapshot: &'a MultiBufferSnapshot,
|
||||
) -> Option<&'a Crease<Anchor>> {
|
||||
) -> Option<&'a Crease> {
|
||||
let start = snapshot.anchor_before(Point::new(row.0, 0));
|
||||
let mut cursor = self.creases.cursor::<ItemSummary>(snapshot);
|
||||
cursor.seek(&start, Bias::Left, snapshot);
|
||||
while let Some(item) = cursor.item() {
|
||||
match Ord::cmp(&item.crease.range().start.to_point(snapshot).row, &row.0) {
|
||||
match Ord::cmp(&item.crease.range.start.to_point(snapshot).row, &row.0) {
|
||||
Ordering::Less => cursor.next(snapshot),
|
||||
Ordering::Equal => {
|
||||
if item.crease.range().start.is_valid(snapshot) {
|
||||
if item.crease.range.start.is_valid(snapshot) {
|
||||
return Some(&item.crease);
|
||||
} else {
|
||||
cursor.next(snapshot);
|
||||
@@ -69,7 +69,7 @@ impl CreaseSnapshot {
|
||||
&'a self,
|
||||
range: Range<MultiBufferRow>,
|
||||
snapshot: &'a MultiBufferSnapshot,
|
||||
) -> impl 'a + Iterator<Item = &'a Crease<Anchor>> {
|
||||
) -> impl 'a + Iterator<Item = &'a Crease> {
|
||||
let start = snapshot.anchor_before(Point::new(range.start.0, 0));
|
||||
let mut cursor = self.creases.cursor::<ItemSummary>(snapshot);
|
||||
cursor.seek(&start, Bias::Left, snapshot);
|
||||
@@ -77,9 +77,8 @@ impl CreaseSnapshot {
|
||||
std::iter::from_fn(move || {
|
||||
while let Some(item) = cursor.item() {
|
||||
cursor.next(snapshot);
|
||||
let crease_range = item.crease.range();
|
||||
let crease_start = crease_range.start.to_point(snapshot);
|
||||
let crease_end = crease_range.end.to_point(snapshot);
|
||||
let crease_start = item.crease.range.start.to_point(snapshot);
|
||||
let crease_end = item.crease.range.end.to_point(snapshot);
|
||||
if crease_end.row > range.end.0 {
|
||||
continue;
|
||||
}
|
||||
@@ -100,9 +99,8 @@ impl CreaseSnapshot {
|
||||
|
||||
cursor.next(snapshot);
|
||||
while let Some(item) = cursor.item() {
|
||||
let crease_range = item.crease.range();
|
||||
let start_point = crease_range.start.to_point(snapshot);
|
||||
let end_point = crease_range.end.to_point(snapshot);
|
||||
let start_point = item.crease.range.start.to_point(snapshot);
|
||||
let end_point = item.crease.range.end.to_point(snapshot);
|
||||
results.push((item.id, start_point..end_point));
|
||||
cursor.next(snapshot);
|
||||
}
|
||||
@@ -125,14 +123,12 @@ type RenderTrailerFn =
|
||||
Arc<dyn Send + Sync + Fn(MultiBufferRow, bool, &mut WindowContext) -> AnyElement>;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum Crease<T> {
|
||||
Inline {
|
||||
range: Range<T>,
|
||||
placeholder: FoldPlaceholder,
|
||||
render_toggle: RenderToggleFn,
|
||||
render_trailer: RenderTrailerFn,
|
||||
metadata: Option<CreaseMetadata>,
|
||||
},
|
||||
pub struct Crease {
|
||||
pub range: Range<Anchor>,
|
||||
pub placeholder: FoldPlaceholder,
|
||||
pub render_toggle: RenderToggleFn,
|
||||
pub render_trailer: RenderTrailerFn,
|
||||
pub metadata: Option<CreaseMetadata>,
|
||||
}
|
||||
|
||||
/// Metadata about a [`Crease`], that is used for serialization.
|
||||
@@ -142,9 +138,9 @@ pub struct CreaseMetadata {
|
||||
pub label: SharedString,
|
||||
}
|
||||
|
||||
impl<T> Crease<T> {
|
||||
pub fn inline<RenderToggle, ToggleElement, RenderTrailer, TrailerElement>(
|
||||
range: Range<T>,
|
||||
impl Crease {
|
||||
pub fn new<RenderToggle, ToggleElement, RenderTrailer, TrailerElement>(
|
||||
range: Range<Anchor>,
|
||||
placeholder: FoldPlaceholder,
|
||||
render_toggle: RenderToggle,
|
||||
render_trailer: RenderTrailer,
|
||||
@@ -168,7 +164,7 @@ impl<T> Crease<T> {
|
||||
+ 'static,
|
||||
TrailerElement: IntoElement,
|
||||
{
|
||||
Crease::Inline {
|
||||
Crease {
|
||||
range,
|
||||
placeholder,
|
||||
render_toggle: Arc::new(move |row, folded, toggle, cx| {
|
||||
@@ -181,52 +177,24 @@ impl<T> Crease<T> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_metadata(self, metadata: CreaseMetadata) -> Self {
|
||||
match self {
|
||||
Crease::Inline {
|
||||
range,
|
||||
placeholder,
|
||||
render_toggle,
|
||||
render_trailer,
|
||||
..
|
||||
} => Crease::Inline {
|
||||
range,
|
||||
placeholder,
|
||||
render_toggle,
|
||||
render_trailer,
|
||||
metadata: Some(metadata),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn range(&self) -> &Range<T> {
|
||||
match self {
|
||||
Crease::Inline { range, .. } => range,
|
||||
}
|
||||
pub fn with_metadata(mut self, metadata: CreaseMetadata) -> Self {
|
||||
self.metadata = Some(metadata);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::fmt::Debug for Crease<T>
|
||||
where
|
||||
T: Debug,
|
||||
{
|
||||
impl std::fmt::Debug for Crease {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Crease::Inline {
|
||||
range, metadata, ..
|
||||
} => f
|
||||
.debug_struct("Crease::Inline")
|
||||
.field("range", range)
|
||||
.field("metadata", metadata)
|
||||
.finish_non_exhaustive(),
|
||||
}
|
||||
f.debug_struct("Crease")
|
||||
.field("range", &self.range)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct CreaseItem {
|
||||
id: CreaseId,
|
||||
crease: Crease<Anchor>,
|
||||
crease: Crease,
|
||||
}
|
||||
|
||||
impl CreaseMap {
|
||||
@@ -236,7 +204,7 @@ impl CreaseMap {
|
||||
|
||||
pub fn insert(
|
||||
&mut self,
|
||||
creases: impl IntoIterator<Item = Crease<Anchor>>,
|
||||
creases: impl IntoIterator<Item = Crease>,
|
||||
snapshot: &MultiBufferSnapshot,
|
||||
) -> Vec<CreaseId> {
|
||||
let mut new_ids = Vec::new();
|
||||
@@ -244,12 +212,11 @@ impl CreaseMap {
|
||||
let mut new_creases = SumTree::new(snapshot);
|
||||
let mut cursor = self.snapshot.creases.cursor::<ItemSummary>(snapshot);
|
||||
for crease in creases {
|
||||
let crease_range = crease.range().clone();
|
||||
new_creases.append(cursor.slice(&crease_range, Bias::Left, snapshot), snapshot);
|
||||
new_creases.append(cursor.slice(&crease.range, Bias::Left, snapshot), snapshot);
|
||||
|
||||
let id = self.next_id;
|
||||
self.next_id.0 += 1;
|
||||
self.id_to_range.insert(id, crease_range);
|
||||
self.id_to_range.insert(id, crease.range.clone());
|
||||
new_creases.push(CreaseItem { crease, id }, snapshot);
|
||||
new_ids.push(id);
|
||||
}
|
||||
@@ -326,7 +293,7 @@ impl sum_tree::Item for CreaseItem {
|
||||
|
||||
fn summary(&self, _cx: &MultiBufferSnapshot) -> Self::Summary {
|
||||
ItemSummary {
|
||||
range: self.crease.range().clone(),
|
||||
range: self.crease.range.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -359,13 +326,13 @@ mod test {
|
||||
|
||||
// Insert creases
|
||||
let creases = [
|
||||
Crease::inline(
|
||||
Crease::new(
|
||||
snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(1, 5)),
|
||||
FoldPlaceholder::test(),
|
||||
|_row, _folded, _toggle, _cx| div(),
|
||||
|_row, _folded, _cx| div(),
|
||||
),
|
||||
Crease::inline(
|
||||
Crease::new(
|
||||
snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_after(Point::new(3, 5)),
|
||||
FoldPlaceholder::test(),
|
||||
|_row, _folded, _toggle, _cx| div(),
|
||||
@@ -405,19 +372,19 @@ mod test {
|
||||
let mut crease_map = CreaseMap::new(&snapshot);
|
||||
|
||||
let creases = [
|
||||
Crease::inline(
|
||||
Crease::new(
|
||||
snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(1, 5)),
|
||||
FoldPlaceholder::test(),
|
||||
|_row, _folded, _toggle, _cx| div(),
|
||||
|_row, _folded, _cx| div(),
|
||||
),
|
||||
Crease::inline(
|
||||
Crease::new(
|
||||
snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_after(Point::new(3, 5)),
|
||||
FoldPlaceholder::test(),
|
||||
|_row, _folded, _toggle, _cx| div(),
|
||||
|_row, _folded, _cx| div(),
|
||||
),
|
||||
Crease::inline(
|
||||
Crease::new(
|
||||
snapshot.anchor_before(Point::new(5, 0))..snapshot.anchor_after(Point::new(5, 5)),
|
||||
FoldPlaceholder::test(),
|
||||
|_row, _folded, _toggle, _cx| div(),
|
||||
@@ -435,12 +402,12 @@ mod test {
|
||||
let range = MultiBufferRow(2)..MultiBufferRow(5);
|
||||
let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect();
|
||||
assert_eq!(creases.len(), 1);
|
||||
assert_eq!(creases[0].range().start.to_point(&snapshot).row, 3);
|
||||
assert_eq!(creases[0].range.start.to_point(&snapshot).row, 3);
|
||||
|
||||
let range = MultiBufferRow(0)..MultiBufferRow(2);
|
||||
let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect();
|
||||
assert_eq!(creases.len(), 1);
|
||||
assert_eq!(creases[0].range().start.to_point(&snapshot).row, 1);
|
||||
assert_eq!(creases[0].range.start.to_point(&snapshot).row, 1);
|
||||
|
||||
let range = MultiBufferRow(6)..MultiBufferRow(7);
|
||||
let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect();
|
||||
|
||||
@@ -255,22 +255,6 @@ impl<'a> InlayChunks<'a> {
|
||||
self.buffer_chunk = None;
|
||||
self.output_offset = new_range.start;
|
||||
self.max_output_offset = new_range.end;
|
||||
|
||||
let mut highlight_endpoints = Vec::new();
|
||||
if let Some(text_highlights) = self.highlights.text_highlights {
|
||||
if !text_highlights.is_empty() {
|
||||
self.snapshot.apply_text_highlights(
|
||||
&mut self.transforms,
|
||||
&new_range,
|
||||
text_highlights,
|
||||
&mut highlight_endpoints,
|
||||
);
|
||||
self.transforms.seek(&new_range.start, Bias::Right, &());
|
||||
highlight_endpoints.sort();
|
||||
}
|
||||
}
|
||||
self.highlight_endpoints = highlight_endpoints.into_iter().peekable();
|
||||
self.active_highlights.clear();
|
||||
}
|
||||
|
||||
pub fn offset(&self) -> InlayOffset {
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use collections::HashMap;
|
||||
|
||||
// Invisibility in a Unicode context is not well defined, so we have to guess.
|
||||
//
|
||||
// We highlight all ASCII control codes, and unicode whitespace because they are likely
|
||||
// confused with an ASCII space in a programming context (U+0020).
|
||||
// confused with a normal space (U+0020).
|
||||
//
|
||||
// We also highlight the handful of blank non-space characters:
|
||||
// U+2800 BRAILLE PATTERN BLANK - Category: So
|
||||
@@ -34,38 +38,62 @@ pub fn is_invisible(c: char) -> bool {
|
||||
if c <= '\u{1f}' {
|
||||
c != '\t' && c != '\n' && c != '\r'
|
||||
} else if c >= '\u{7f}' {
|
||||
c <= '\u{9f}'
|
||||
|| (c.is_whitespace() && c != IDEOGRAPHIC_SPACE)
|
||||
|| contains(c, &FORMAT)
|
||||
|| contains(c, &OTHER)
|
||||
c <= '\u{9f}' || c.is_whitespace() || contains(c, &FORMAT) || contains(c, &OTHER)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
// ASCII control characters have fancy unicode glyphs, everything else
|
||||
// is replaced by a space - unless it is used in combining characters in
|
||||
// which case we need to leave it in the string.
|
||||
|
||||
pub(crate) fn replacement(c: char) -> Option<&'static str> {
|
||||
if c <= '\x1f' {
|
||||
Some(C0_SYMBOLS[c as usize])
|
||||
} else if c == '\x7f' {
|
||||
Some(DEL)
|
||||
if !is_invisible(c) {
|
||||
return None;
|
||||
}
|
||||
if c <= '\x7f' {
|
||||
REPLACEMENTS.get(&c).copied()
|
||||
} else if contains(c, &PRESERVE) {
|
||||
None
|
||||
} else {
|
||||
Some("\u{2007}") // fixed width space
|
||||
Some(" ")
|
||||
}
|
||||
}
|
||||
// IDEOGRAPHIC SPACE is common alongside Chinese and other wide character sets.
|
||||
// We don't highlight this for now (as it already shows up wide in the editor),
|
||||
// but could if we tracked state in the classifier.
|
||||
const IDEOGRAPHIC_SPACE: char = '\u{3000}';
|
||||
|
||||
const C0_SYMBOLS: &'static [&'static str] = &[
|
||||
"␀", "␁", "␂", "␃", "␄", "␅", "␆", "␇", "␈", "␉", "␊", "␋", "␌", "␍", "␎", "␏", "␐", "␑", "␒",
|
||||
"␓", "␔", "␕", "␖", "␗", "␘", "␙", "␚", "␛", "␜", "␝", "␞", "␟",
|
||||
];
|
||||
const DEL: &'static str = "␡";
|
||||
const REPLACEMENTS: LazyLock<HashMap<char, &'static str>> = LazyLock::new(|| {
|
||||
[
|
||||
('\x00', "␀"),
|
||||
('\x01', "␁"),
|
||||
('\x02', "␂"),
|
||||
('\x03', "␃"),
|
||||
('\x04', "␄"),
|
||||
('\x05', "␅"),
|
||||
('\x06', "␆"),
|
||||
('\x07', "␇"),
|
||||
('\x08', "␈"),
|
||||
('\x0B', "␋"),
|
||||
('\x0C', "␌"),
|
||||
('\x0D', "␍"),
|
||||
('\x0E', "␎"),
|
||||
('\x0F', "␏"),
|
||||
('\x10', "␐"),
|
||||
('\x11', "␑"),
|
||||
('\x12', "␒"),
|
||||
('\x13', "␓"),
|
||||
('\x14', "␔"),
|
||||
('\x15', "␕"),
|
||||
('\x16', "␖"),
|
||||
('\x17', "␗"),
|
||||
('\x18', "␘"),
|
||||
('\x19', "␙"),
|
||||
('\x1A', "␚"),
|
||||
('\x1B', "␛"),
|
||||
('\x1C', "␜"),
|
||||
('\x1D', "␝"),
|
||||
('\x1E', "␞"),
|
||||
('\x1F', "␟"),
|
||||
('\u{007F}', "␡"),
|
||||
]
|
||||
.into_iter()
|
||||
.collect()
|
||||
});
|
||||
|
||||
// generated using ucd-generate: ucd-generate general-category --include Format --chars ucd-16.0.0
|
||||
pub const FORMAT: &'static [(char, char)] = &[
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use super::{
|
||||
char_map::{self, CharPoint, CharSnapshot, TabEdit},
|
||||
fold_map::FoldBufferRows,
|
||||
tab_map::{self, TabEdit, TabPoint, TabSnapshot},
|
||||
Highlights,
|
||||
};
|
||||
use gpui::{AppContext, Context, Font, LineWrapper, Model, ModelContext, Pixels, Task};
|
||||
@@ -12,7 +12,7 @@ use std::{cmp, collections::VecDeque, mem, ops::Range, time::Duration};
|
||||
use sum_tree::{Bias, Cursor, SumTree};
|
||||
use text::Patch;
|
||||
|
||||
pub use super::tab_map::TextSummary;
|
||||
pub use super::char_map::TextSummary;
|
||||
pub type WrapEdit = text::Edit<u32>;
|
||||
|
||||
/// Handles soft wrapping of text.
|
||||
@@ -20,7 +20,7 @@ pub type WrapEdit = text::Edit<u32>;
|
||||
/// See the [`display_map` module documentation](crate::display_map) for more information.
|
||||
pub struct WrapMap {
|
||||
snapshot: WrapSnapshot,
|
||||
pending_edits: VecDeque<(TabSnapshot, Vec<TabEdit>)>,
|
||||
pending_edits: VecDeque<(CharSnapshot, Vec<TabEdit>)>,
|
||||
interpolated_edits: Patch<u32>,
|
||||
edits_since_sync: Patch<u32>,
|
||||
wrap_width: Option<Pixels>,
|
||||
@@ -30,7 +30,7 @@ pub struct WrapMap {
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct WrapSnapshot {
|
||||
tab_snapshot: TabSnapshot,
|
||||
char_snapshot: CharSnapshot,
|
||||
transforms: SumTree<Transform>,
|
||||
interpolated: bool,
|
||||
}
|
||||
@@ -51,11 +51,11 @@ struct TransformSummary {
|
||||
pub struct WrapPoint(pub Point);
|
||||
|
||||
pub struct WrapChunks<'a> {
|
||||
input_chunks: tab_map::TabChunks<'a>,
|
||||
input_chunks: char_map::TabChunks<'a>,
|
||||
input_chunk: Chunk<'a>,
|
||||
output_position: WrapPoint,
|
||||
max_output_row: u32,
|
||||
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
|
||||
transforms: Cursor<'a, Transform, (WrapPoint, CharPoint)>,
|
||||
snapshot: &'a WrapSnapshot,
|
||||
}
|
||||
|
||||
@@ -66,7 +66,7 @@ pub struct WrapBufferRows<'a> {
|
||||
output_row: u32,
|
||||
soft_wrapped: bool,
|
||||
max_output_row: u32,
|
||||
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
|
||||
transforms: Cursor<'a, Transform, (WrapPoint, CharPoint)>,
|
||||
}
|
||||
|
||||
impl<'a> WrapBufferRows<'a> {
|
||||
@@ -86,7 +86,7 @@ impl<'a> WrapBufferRows<'a> {
|
||||
|
||||
impl WrapMap {
|
||||
pub fn new(
|
||||
tab_snapshot: TabSnapshot,
|
||||
char_snapshot: CharSnapshot,
|
||||
font: Font,
|
||||
font_size: Pixels,
|
||||
wrap_width: Option<Pixels>,
|
||||
@@ -99,7 +99,7 @@ impl WrapMap {
|
||||
pending_edits: Default::default(),
|
||||
interpolated_edits: Default::default(),
|
||||
edits_since_sync: Default::default(),
|
||||
snapshot: WrapSnapshot::new(tab_snapshot),
|
||||
snapshot: WrapSnapshot::new(char_snapshot),
|
||||
background_task: None,
|
||||
};
|
||||
this.set_wrap_width(wrap_width, cx);
|
||||
@@ -117,17 +117,17 @@ impl WrapMap {
|
||||
|
||||
pub fn sync(
|
||||
&mut self,
|
||||
tab_snapshot: TabSnapshot,
|
||||
char_snapshot: CharSnapshot,
|
||||
edits: Vec<TabEdit>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> (WrapSnapshot, Patch<u32>) {
|
||||
if self.wrap_width.is_some() {
|
||||
self.pending_edits.push_back((tab_snapshot, edits));
|
||||
self.pending_edits.push_back((char_snapshot, edits));
|
||||
self.flush_edits(cx);
|
||||
} else {
|
||||
self.edits_since_sync = self
|
||||
.edits_since_sync
|
||||
.compose(self.snapshot.interpolate(tab_snapshot, &edits));
|
||||
.compose(self.snapshot.interpolate(char_snapshot, &edits));
|
||||
self.snapshot.interpolated = false;
|
||||
}
|
||||
|
||||
@@ -177,11 +177,11 @@ impl WrapMap {
|
||||
let (font, font_size) = self.font_with_size.clone();
|
||||
let task = cx.background_executor().spawn(async move {
|
||||
let mut line_wrapper = text_system.line_wrapper(font, font_size);
|
||||
let tab_snapshot = new_snapshot.tab_snapshot.clone();
|
||||
let range = TabPoint::zero()..tab_snapshot.max_point();
|
||||
let char_snapshot = new_snapshot.char_snapshot.clone();
|
||||
let range = CharPoint::zero()..char_snapshot.max_point();
|
||||
let edits = new_snapshot
|
||||
.update(
|
||||
tab_snapshot,
|
||||
char_snapshot,
|
||||
&[TabEdit {
|
||||
old: range.clone(),
|
||||
new: range.clone(),
|
||||
@@ -221,7 +221,7 @@ impl WrapMap {
|
||||
} else {
|
||||
let old_rows = self.snapshot.transforms.summary().output.lines.row + 1;
|
||||
self.snapshot.transforms = SumTree::default();
|
||||
let summary = self.snapshot.tab_snapshot.text_summary();
|
||||
let summary = self.snapshot.char_snapshot.text_summary();
|
||||
if !summary.lines.is_zero() {
|
||||
self.snapshot
|
||||
.transforms
|
||||
@@ -239,8 +239,8 @@ impl WrapMap {
|
||||
fn flush_edits(&mut self, cx: &mut ModelContext<Self>) {
|
||||
if !self.snapshot.interpolated {
|
||||
let mut to_remove_len = 0;
|
||||
for (tab_snapshot, _) in &self.pending_edits {
|
||||
if tab_snapshot.version <= self.snapshot.tab_snapshot.version {
|
||||
for (char_snapshot, _) in &self.pending_edits {
|
||||
if char_snapshot.version <= self.snapshot.char_snapshot.version {
|
||||
to_remove_len += 1;
|
||||
} else {
|
||||
break;
|
||||
@@ -262,9 +262,9 @@ impl WrapMap {
|
||||
let update_task = cx.background_executor().spawn(async move {
|
||||
let mut edits = Patch::default();
|
||||
let mut line_wrapper = text_system.line_wrapper(font, font_size);
|
||||
for (tab_snapshot, tab_edits) in pending_edits {
|
||||
for (char_snapshot, tab_edits) in pending_edits {
|
||||
let wrap_edits = snapshot
|
||||
.update(tab_snapshot, &tab_edits, wrap_width, &mut line_wrapper)
|
||||
.update(char_snapshot, &tab_edits, wrap_width, &mut line_wrapper)
|
||||
.await;
|
||||
edits = edits.compose(&wrap_edits);
|
||||
}
|
||||
@@ -301,11 +301,11 @@ impl WrapMap {
|
||||
|
||||
let was_interpolated = self.snapshot.interpolated;
|
||||
let mut to_remove_len = 0;
|
||||
for (tab_snapshot, edits) in &self.pending_edits {
|
||||
if tab_snapshot.version <= self.snapshot.tab_snapshot.version {
|
||||
for (char_snapshot, edits) in &self.pending_edits {
|
||||
if char_snapshot.version <= self.snapshot.char_snapshot.version {
|
||||
to_remove_len += 1;
|
||||
} else {
|
||||
let interpolated_edits = self.snapshot.interpolate(tab_snapshot.clone(), edits);
|
||||
let interpolated_edits = self.snapshot.interpolate(char_snapshot.clone(), edits);
|
||||
self.edits_since_sync = self.edits_since_sync.compose(&interpolated_edits);
|
||||
self.interpolated_edits = self.interpolated_edits.compose(&interpolated_edits);
|
||||
}
|
||||
@@ -318,45 +318,49 @@ impl WrapMap {
|
||||
}
|
||||
|
||||
impl WrapSnapshot {
|
||||
fn new(tab_snapshot: TabSnapshot) -> Self {
|
||||
fn new(char_snapshot: CharSnapshot) -> Self {
|
||||
let mut transforms = SumTree::default();
|
||||
let extent = tab_snapshot.text_summary();
|
||||
let extent = char_snapshot.text_summary();
|
||||
if !extent.lines.is_zero() {
|
||||
transforms.push(Transform::isomorphic(extent), &());
|
||||
}
|
||||
Self {
|
||||
transforms,
|
||||
tab_snapshot,
|
||||
char_snapshot,
|
||||
interpolated: true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
|
||||
self.tab_snapshot.buffer_snapshot()
|
||||
self.char_snapshot.buffer_snapshot()
|
||||
}
|
||||
|
||||
fn interpolate(&mut self, new_tab_snapshot: TabSnapshot, tab_edits: &[TabEdit]) -> Patch<u32> {
|
||||
fn interpolate(
|
||||
&mut self,
|
||||
new_char_snapshot: CharSnapshot,
|
||||
tab_edits: &[TabEdit],
|
||||
) -> Patch<u32> {
|
||||
let mut new_transforms;
|
||||
if tab_edits.is_empty() {
|
||||
new_transforms = self.transforms.clone();
|
||||
} else {
|
||||
let mut old_cursor = self.transforms.cursor::<TabPoint>(&());
|
||||
let mut old_cursor = self.transforms.cursor::<CharPoint>(&());
|
||||
|
||||
let mut tab_edits_iter = tab_edits.iter().peekable();
|
||||
new_transforms =
|
||||
old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right, &());
|
||||
|
||||
while let Some(edit) = tab_edits_iter.next() {
|
||||
if edit.new.start > TabPoint::from(new_transforms.summary().input.lines) {
|
||||
let summary = new_tab_snapshot.text_summary_for_range(
|
||||
TabPoint::from(new_transforms.summary().input.lines)..edit.new.start,
|
||||
if edit.new.start > CharPoint::from(new_transforms.summary().input.lines) {
|
||||
let summary = new_char_snapshot.text_summary_for_range(
|
||||
CharPoint::from(new_transforms.summary().input.lines)..edit.new.start,
|
||||
);
|
||||
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
||||
}
|
||||
|
||||
if !edit.new.is_empty() {
|
||||
new_transforms.push_or_extend(Transform::isomorphic(
|
||||
new_tab_snapshot.text_summary_for_range(edit.new.clone()),
|
||||
new_char_snapshot.text_summary_for_range(edit.new.clone()),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -365,7 +369,7 @@ impl WrapSnapshot {
|
||||
if next_edit.old.start > old_cursor.end(&()) {
|
||||
if old_cursor.end(&()) > edit.old.end {
|
||||
let summary = self
|
||||
.tab_snapshot
|
||||
.char_snapshot
|
||||
.text_summary_for_range(edit.old.end..old_cursor.end(&()));
|
||||
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
||||
}
|
||||
@@ -379,7 +383,7 @@ impl WrapSnapshot {
|
||||
} else {
|
||||
if old_cursor.end(&()) > edit.old.end {
|
||||
let summary = self
|
||||
.tab_snapshot
|
||||
.char_snapshot
|
||||
.text_summary_for_range(edit.old.end..old_cursor.end(&()));
|
||||
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
||||
}
|
||||
@@ -392,7 +396,7 @@ impl WrapSnapshot {
|
||||
let old_snapshot = mem::replace(
|
||||
self,
|
||||
WrapSnapshot {
|
||||
tab_snapshot: new_tab_snapshot,
|
||||
char_snapshot: new_char_snapshot,
|
||||
transforms: new_transforms,
|
||||
interpolated: true,
|
||||
},
|
||||
@@ -403,7 +407,7 @@ impl WrapSnapshot {
|
||||
|
||||
async fn update(
|
||||
&mut self,
|
||||
new_tab_snapshot: TabSnapshot,
|
||||
new_char_snapshot: CharSnapshot,
|
||||
tab_edits: &[TabEdit],
|
||||
wrap_width: Pixels,
|
||||
line_wrapper: &mut LineWrapper,
|
||||
@@ -440,27 +444,27 @@ impl WrapSnapshot {
|
||||
new_transforms = self.transforms.clone();
|
||||
} else {
|
||||
let mut row_edits = row_edits.into_iter().peekable();
|
||||
let mut old_cursor = self.transforms.cursor::<TabPoint>(&());
|
||||
let mut old_cursor = self.transforms.cursor::<CharPoint>(&());
|
||||
|
||||
new_transforms = old_cursor.slice(
|
||||
&TabPoint::new(row_edits.peek().unwrap().old_rows.start, 0),
|
||||
&CharPoint::new(row_edits.peek().unwrap().old_rows.start, 0),
|
||||
Bias::Right,
|
||||
&(),
|
||||
);
|
||||
|
||||
while let Some(edit) = row_edits.next() {
|
||||
if edit.new_rows.start > new_transforms.summary().input.lines.row {
|
||||
let summary = new_tab_snapshot.text_summary_for_range(
|
||||
TabPoint(new_transforms.summary().input.lines)
|
||||
..TabPoint::new(edit.new_rows.start, 0),
|
||||
let summary = new_char_snapshot.text_summary_for_range(
|
||||
CharPoint(new_transforms.summary().input.lines)
|
||||
..CharPoint::new(edit.new_rows.start, 0),
|
||||
);
|
||||
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
||||
}
|
||||
|
||||
let mut line = String::new();
|
||||
let mut remaining = None;
|
||||
let mut chunks = new_tab_snapshot.chunks(
|
||||
TabPoint::new(edit.new_rows.start, 0)..new_tab_snapshot.max_point(),
|
||||
let mut chunks = new_char_snapshot.chunks(
|
||||
CharPoint::new(edit.new_rows.start, 0)..new_char_snapshot.max_point(),
|
||||
false,
|
||||
Highlights::default(),
|
||||
);
|
||||
@@ -507,19 +511,19 @@ impl WrapSnapshot {
|
||||
}
|
||||
new_transforms.extend(edit_transforms, &());
|
||||
|
||||
old_cursor.seek_forward(&TabPoint::new(edit.old_rows.end, 0), Bias::Right, &());
|
||||
old_cursor.seek_forward(&CharPoint::new(edit.old_rows.end, 0), Bias::Right, &());
|
||||
if let Some(next_edit) = row_edits.peek() {
|
||||
if next_edit.old_rows.start > old_cursor.end(&()).row() {
|
||||
if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) {
|
||||
let summary = self.tab_snapshot.text_summary_for_range(
|
||||
TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
|
||||
if old_cursor.end(&()) > CharPoint::new(edit.old_rows.end, 0) {
|
||||
let summary = self.char_snapshot.text_summary_for_range(
|
||||
CharPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
|
||||
);
|
||||
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
||||
}
|
||||
old_cursor.next(&());
|
||||
new_transforms.append(
|
||||
old_cursor.slice(
|
||||
&TabPoint::new(next_edit.old_rows.start, 0),
|
||||
&CharPoint::new(next_edit.old_rows.start, 0),
|
||||
Bias::Right,
|
||||
&(),
|
||||
),
|
||||
@@ -527,9 +531,9 @@ impl WrapSnapshot {
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) {
|
||||
let summary = self.tab_snapshot.text_summary_for_range(
|
||||
TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
|
||||
if old_cursor.end(&()) > CharPoint::new(edit.old_rows.end, 0) {
|
||||
let summary = self.char_snapshot.text_summary_for_range(
|
||||
CharPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
|
||||
);
|
||||
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
||||
}
|
||||
@@ -542,7 +546,7 @@ impl WrapSnapshot {
|
||||
let old_snapshot = mem::replace(
|
||||
self,
|
||||
WrapSnapshot {
|
||||
tab_snapshot: new_tab_snapshot,
|
||||
char_snapshot: new_char_snapshot,
|
||||
transforms: new_transforms,
|
||||
interpolated: false,
|
||||
},
|
||||
@@ -595,17 +599,17 @@ impl WrapSnapshot {
|
||||
) -> WrapChunks<'a> {
|
||||
let output_start = WrapPoint::new(rows.start, 0);
|
||||
let output_end = WrapPoint::new(rows.end, 0);
|
||||
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
||||
let mut transforms = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
|
||||
transforms.seek(&output_start, Bias::Right, &());
|
||||
let mut input_start = TabPoint(transforms.start().1 .0);
|
||||
let mut input_start = CharPoint(transforms.start().1 .0);
|
||||
if transforms.item().map_or(false, |t| t.is_isomorphic()) {
|
||||
input_start.0 += output_start.0 - transforms.start().0 .0;
|
||||
}
|
||||
let input_end = self
|
||||
.to_tab_point(output_end)
|
||||
.min(self.tab_snapshot.max_point());
|
||||
.to_char_point(output_end)
|
||||
.min(self.char_snapshot.max_point());
|
||||
WrapChunks {
|
||||
input_chunks: self.tab_snapshot.chunks(
|
||||
input_chunks: self.char_snapshot.chunks(
|
||||
input_start..input_end,
|
||||
language_aware,
|
||||
highlights,
|
||||
@@ -623,7 +627,7 @@ impl WrapSnapshot {
|
||||
}
|
||||
|
||||
pub fn line_len(&self, row: u32) -> u32 {
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
|
||||
cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left, &());
|
||||
if cursor
|
||||
.item()
|
||||
@@ -631,7 +635,7 @@ impl WrapSnapshot {
|
||||
{
|
||||
let overshoot = row - cursor.start().0.row();
|
||||
let tab_row = cursor.start().1.row() + overshoot;
|
||||
let tab_line_len = self.tab_snapshot.line_len(tab_row);
|
||||
let tab_line_len = self.char_snapshot.line_len(tab_row);
|
||||
if overshoot == 0 {
|
||||
cursor.start().0.column() + (tab_line_len - cursor.start().1.column())
|
||||
} else {
|
||||
@@ -648,15 +652,17 @@ impl WrapSnapshot {
|
||||
let start = WrapPoint::new(rows.start, 0);
|
||||
let end = WrapPoint::new(rows.end, 0);
|
||||
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
|
||||
cursor.seek(&start, Bias::Right, &());
|
||||
if let Some(transform) = cursor.item() {
|
||||
let start_in_transform = start.0 - cursor.start().0 .0;
|
||||
let end_in_transform = cmp::min(end, cursor.end(&()).0).0 - cursor.start().0 .0;
|
||||
if transform.is_isomorphic() {
|
||||
let tab_start = TabPoint(cursor.start().1 .0 + start_in_transform);
|
||||
let tab_end = TabPoint(cursor.start().1 .0 + end_in_transform);
|
||||
summary += &self.tab_snapshot.text_summary_for_range(tab_start..tab_end);
|
||||
let char_start = CharPoint(cursor.start().1 .0 + start_in_transform);
|
||||
let char_end = CharPoint(cursor.start().1 .0 + end_in_transform);
|
||||
summary += &self
|
||||
.char_snapshot
|
||||
.text_summary_for_range(char_start..char_end);
|
||||
} else {
|
||||
debug_assert_eq!(start_in_transform.row, end_in_transform.row);
|
||||
let indent_len = end_in_transform.column - start_in_transform.column;
|
||||
@@ -681,9 +687,9 @@ impl WrapSnapshot {
|
||||
let end_in_transform = end.0 - cursor.start().0 .0;
|
||||
if transform.is_isomorphic() {
|
||||
let char_start = cursor.start().1;
|
||||
let char_end = TabPoint(char_start.0 + end_in_transform);
|
||||
let char_end = CharPoint(char_start.0 + end_in_transform);
|
||||
summary += &self
|
||||
.tab_snapshot
|
||||
.char_snapshot
|
||||
.text_summary_for_range(char_start..char_end);
|
||||
} else {
|
||||
debug_assert_eq!(end_in_transform, Point::new(1, 0));
|
||||
@@ -718,14 +724,14 @@ impl WrapSnapshot {
|
||||
}
|
||||
|
||||
pub fn buffer_rows(&self, start_row: u32) -> WrapBufferRows {
|
||||
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
||||
let mut transforms = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
|
||||
transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
|
||||
let mut input_row = transforms.start().1.row();
|
||||
if transforms.item().map_or(false, |t| t.is_isomorphic()) {
|
||||
input_row += start_row - transforms.start().0.row();
|
||||
}
|
||||
let soft_wrapped = transforms.item().map_or(false, |t| !t.is_isomorphic());
|
||||
let mut input_buffer_rows = self.tab_snapshot.buffer_rows(input_row);
|
||||
let mut input_buffer_rows = self.char_snapshot.buffer_rows(input_row);
|
||||
let input_buffer_row = input_buffer_rows.next().unwrap();
|
||||
WrapBufferRows {
|
||||
transforms,
|
||||
@@ -737,26 +743,26 @@ impl WrapSnapshot {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint {
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
||||
pub fn to_char_point(&self, point: WrapPoint) -> CharPoint {
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
|
||||
cursor.seek(&point, Bias::Right, &());
|
||||
let mut tab_point = cursor.start().1 .0;
|
||||
let mut char_point = cursor.start().1 .0;
|
||||
if cursor.item().map_or(false, |t| t.is_isomorphic()) {
|
||||
tab_point += point.0 - cursor.start().0 .0;
|
||||
char_point += point.0 - cursor.start().0 .0;
|
||||
}
|
||||
TabPoint(tab_point)
|
||||
CharPoint(char_point)
|
||||
}
|
||||
|
||||
pub fn to_point(&self, point: WrapPoint, bias: Bias) -> Point {
|
||||
self.tab_snapshot.to_point(self.to_tab_point(point), bias)
|
||||
self.char_snapshot.to_point(self.to_char_point(point), bias)
|
||||
}
|
||||
|
||||
pub fn make_wrap_point(&self, point: Point, bias: Bias) -> WrapPoint {
|
||||
self.tab_point_to_wrap_point(self.tab_snapshot.make_tab_point(point, bias))
|
||||
self.char_point_to_wrap_point(self.char_snapshot.make_char_point(point, bias))
|
||||
}
|
||||
|
||||
pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint {
|
||||
let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(&());
|
||||
pub fn char_point_to_wrap_point(&self, point: CharPoint) -> WrapPoint {
|
||||
let mut cursor = self.transforms.cursor::<(CharPoint, WrapPoint)>(&());
|
||||
cursor.seek(&point, Bias::Right, &());
|
||||
WrapPoint(cursor.start().1 .0 + (point.0 - cursor.start().0 .0))
|
||||
}
|
||||
@@ -771,7 +777,10 @@ impl WrapSnapshot {
|
||||
}
|
||||
}
|
||||
|
||||
self.tab_point_to_wrap_point(self.tab_snapshot.clip_point(self.to_tab_point(point), bias))
|
||||
self.char_point_to_wrap_point(
|
||||
self.char_snapshot
|
||||
.clip_point(self.to_char_point(point), bias),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn prev_row_boundary(&self, mut point: WrapPoint) -> u32 {
|
||||
@@ -781,7 +790,7 @@ impl WrapSnapshot {
|
||||
|
||||
*point.column_mut() = 0;
|
||||
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
|
||||
cursor.seek(&point, Bias::Right, &());
|
||||
if cursor.item().is_none() {
|
||||
cursor.prev(&());
|
||||
@@ -801,7 +810,7 @@ impl WrapSnapshot {
|
||||
pub fn next_row_boundary(&self, mut point: WrapPoint) -> Option<u32> {
|
||||
point.0 += Point::new(1, 0);
|
||||
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
||||
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
|
||||
cursor.seek(&point, Bias::Right, &());
|
||||
while let Some(transform) = cursor.item() {
|
||||
if transform.is_isomorphic() && cursor.start().1.column() == 0 {
|
||||
@@ -833,8 +842,8 @@ impl WrapSnapshot {
|
||||
#[cfg(test)]
|
||||
{
|
||||
assert_eq!(
|
||||
TabPoint::from(self.transforms.summary().input.lines),
|
||||
self.tab_snapshot.max_point()
|
||||
CharPoint::from(self.transforms.summary().input.lines),
|
||||
self.char_snapshot.max_point()
|
||||
);
|
||||
|
||||
{
|
||||
@@ -847,18 +856,18 @@ impl WrapSnapshot {
|
||||
}
|
||||
|
||||
let text = language::Rope::from(self.text().as_str());
|
||||
let mut input_buffer_rows = self.tab_snapshot.buffer_rows(0);
|
||||
let mut input_buffer_rows = self.char_snapshot.buffer_rows(0);
|
||||
let mut expected_buffer_rows = Vec::new();
|
||||
let mut prev_tab_row = 0;
|
||||
for display_row in 0..=self.max_point().row() {
|
||||
let tab_point = self.to_tab_point(WrapPoint::new(display_row, 0));
|
||||
if tab_point.row() == prev_tab_row && display_row != 0 {
|
||||
let char_point = self.to_char_point(WrapPoint::new(display_row, 0));
|
||||
if char_point.row() == prev_tab_row && display_row != 0 {
|
||||
expected_buffer_rows.push(None);
|
||||
} else {
|
||||
expected_buffer_rows.push(input_buffer_rows.next().unwrap());
|
||||
}
|
||||
|
||||
prev_tab_row = tab_point.row();
|
||||
prev_tab_row = char_point.row();
|
||||
assert_eq!(self.line_len(display_row), text.line_len(display_row));
|
||||
}
|
||||
|
||||
@@ -880,14 +889,14 @@ impl<'a> WrapChunks<'a> {
|
||||
let output_start = WrapPoint::new(rows.start, 0);
|
||||
let output_end = WrapPoint::new(rows.end, 0);
|
||||
self.transforms.seek(&output_start, Bias::Right, &());
|
||||
let mut input_start = TabPoint(self.transforms.start().1 .0);
|
||||
let mut input_start = CharPoint(self.transforms.start().1 .0);
|
||||
if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
|
||||
input_start.0 += output_start.0 - self.transforms.start().0 .0;
|
||||
}
|
||||
let input_end = self
|
||||
.snapshot
|
||||
.to_tab_point(output_end)
|
||||
.min(self.snapshot.tab_snapshot.max_point());
|
||||
.to_char_point(output_end)
|
||||
.min(self.snapshot.char_snapshot.max_point());
|
||||
self.input_chunks.seek(input_start..input_end);
|
||||
self.input_chunk = Chunk::default();
|
||||
self.output_position = output_start;
|
||||
@@ -942,13 +951,11 @@ impl<'a> Iterator for WrapChunks<'a> {
|
||||
} else {
|
||||
*self.output_position.column_mut() += char_len as u32;
|
||||
}
|
||||
|
||||
if self.output_position >= transform_end {
|
||||
self.transforms.next(&());
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let (prefix, suffix) = self.input_chunk.text.split_at(input_len);
|
||||
self.input_chunk.text = suffix;
|
||||
Some(Chunk {
|
||||
@@ -1103,7 +1110,7 @@ impl sum_tree::Summary for TransformSummary {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> sum_tree::Dimension<'a, TransformSummary> for TabPoint {
|
||||
impl<'a> sum_tree::Dimension<'a, TransformSummary> for CharPoint {
|
||||
fn zero(_cx: &()) -> Self {
|
||||
Default::default()
|
||||
}
|
||||
@@ -1113,7 +1120,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for TabPoint {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> sum_tree::SeekTarget<'a, TransformSummary, TransformSummary> for TabPoint {
|
||||
impl<'a> sum_tree::SeekTarget<'a, TransformSummary, TransformSummary> for CharPoint {
|
||||
fn cmp(&self, cursor_location: &TransformSummary, _: &()) -> std::cmp::Ordering {
|
||||
Ord::cmp(&self.0, &cursor_location.input.lines)
|
||||
}
|
||||
@@ -1161,7 +1168,7 @@ fn consolidate_wrap_edits(edits: Vec<WrapEdit>) -> Vec<WrapEdit> {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap},
|
||||
display_map::{char_map::CharMap, fold_map::FoldMap, inlay_map::InlayMap},
|
||||
MultiBuffer,
|
||||
};
|
||||
use gpui::{font, px, test::observe};
|
||||
@@ -1213,9 +1220,9 @@ mod tests {
|
||||
log::info!("InlayMap text: {:?}", inlay_snapshot.text());
|
||||
let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot.clone());
|
||||
log::info!("FoldMap text: {:?}", fold_snapshot.text());
|
||||
let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size);
|
||||
let tabs_snapshot = tab_map.set_max_expansion_column(32);
|
||||
log::info!("TabMap text: {:?}", tabs_snapshot.text());
|
||||
let (mut char_map, _) = CharMap::new(fold_snapshot.clone(), tab_size);
|
||||
let tabs_snapshot = char_map.set_max_expansion_column(32);
|
||||
log::info!("CharMap text: {:?}", tabs_snapshot.text());
|
||||
|
||||
let mut line_wrapper = text_system.line_wrapper(font.clone(), font_size);
|
||||
let unwrapped_text = tabs_snapshot.text();
|
||||
@@ -1261,7 +1268,7 @@ mod tests {
|
||||
20..=39 => {
|
||||
for (fold_snapshot, fold_edits) in fold_map.randomly_mutate(&mut rng) {
|
||||
let (tabs_snapshot, tab_edits) =
|
||||
tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
char_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (mut snapshot, wrap_edits) =
|
||||
wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
|
||||
snapshot.check_invariants();
|
||||
@@ -1274,7 +1281,7 @@ mod tests {
|
||||
inlay_map.randomly_mutate(&mut next_inlay_id, &mut rng);
|
||||
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
|
||||
let (tabs_snapshot, tab_edits) =
|
||||
tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
char_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (mut snapshot, wrap_edits) =
|
||||
wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
|
||||
snapshot.check_invariants();
|
||||
@@ -1298,8 +1305,8 @@ mod tests {
|
||||
log::info!("InlayMap text: {:?}", inlay_snapshot.text());
|
||||
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
|
||||
log::info!("FoldMap text: {:?}", fold_snapshot.text());
|
||||
let (tabs_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
log::info!("TabMap text: {:?}", tabs_snapshot.text());
|
||||
let (tabs_snapshot, tab_edits) = char_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
log::info!("CharMap text: {:?}", tabs_snapshot.text());
|
||||
|
||||
let unwrapped_text = tabs_snapshot.text();
|
||||
let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
|
||||
@@ -1345,7 +1352,7 @@ mod tests {
|
||||
|
||||
if tab_size.get() == 1
|
||||
|| !wrapped_snapshot
|
||||
.tab_snapshot
|
||||
.char_snapshot
|
||||
.fold_snapshot
|
||||
.text()
|
||||
.contains('\t')
|
||||
|
||||
@@ -131,9 +131,7 @@ use project::{
|
||||
use rand::prelude::*;
|
||||
use rpc::{proto::*, ErrorExt};
|
||||
use scroll::{Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide};
|
||||
use selections_collection::{
|
||||
resolve_selections, MutableSelectionsCollection, SelectionsCollection,
|
||||
};
|
||||
use selections_collection::{resolve_multiple, MutableSelectionsCollection, SelectionsCollection};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{update_settings_file, Settings, SettingsLocation, SettingsStore};
|
||||
use smallvec::SmallVec;
|
||||
@@ -3246,21 +3244,9 @@ impl Editor {
|
||||
}
|
||||
|
||||
if enabled && pair.start.ends_with(text.as_ref()) {
|
||||
let prefix_len = pair.start.len() - text.len();
|
||||
let preceding_text_matches_prefix = prefix_len == 0
|
||||
|| (selection.start.column >= (prefix_len as u32)
|
||||
&& snapshot.contains_str_at(
|
||||
Point::new(
|
||||
selection.start.row,
|
||||
selection.start.column - (prefix_len as u32),
|
||||
),
|
||||
&pair.start[..prefix_len],
|
||||
));
|
||||
if preceding_text_matches_prefix {
|
||||
bracket_pair = Some(pair.clone());
|
||||
is_bracket_pair_start = true;
|
||||
break;
|
||||
}
|
||||
bracket_pair = Some(pair.clone());
|
||||
is_bracket_pair_start = true;
|
||||
break;
|
||||
}
|
||||
if pair.end.as_str() == text.as_ref() {
|
||||
bracket_pair = Some(pair.clone());
|
||||
@@ -3277,6 +3263,8 @@ impl Editor {
|
||||
self.use_auto_surround && snapshot_settings.use_auto_surround;
|
||||
if selection.is_empty() {
|
||||
if is_bracket_pair_start {
|
||||
let prefix_len = bracket_pair.start.len() - text.len();
|
||||
|
||||
// If the inserted text is a suffix of an opening bracket and the
|
||||
// selection is preceded by the rest of the opening bracket, then
|
||||
// insert the closing bracket.
|
||||
@@ -3284,25 +3272,20 @@ impl Editor {
|
||||
.chars_at(selection.start)
|
||||
.next()
|
||||
.map_or(true, |c| scope.should_autoclose_before(c));
|
||||
|
||||
let is_closing_quote = if bracket_pair.end == bracket_pair.start
|
||||
&& bracket_pair.start.len() == 1
|
||||
{
|
||||
let target = bracket_pair.start.chars().next().unwrap();
|
||||
let current_line_count = snapshot
|
||||
.reversed_chars_at(selection.start)
|
||||
.take_while(|&c| c != '\n')
|
||||
.filter(|&c| c == target)
|
||||
.count();
|
||||
current_line_count % 2 == 1
|
||||
} else {
|
||||
false
|
||||
};
|
||||
let preceding_text_matches_prefix = prefix_len == 0
|
||||
|| (selection.start.column >= (prefix_len as u32)
|
||||
&& snapshot.contains_str_at(
|
||||
Point::new(
|
||||
selection.start.row,
|
||||
selection.start.column - (prefix_len as u32),
|
||||
),
|
||||
&bracket_pair.start[..prefix_len],
|
||||
));
|
||||
|
||||
if autoclose
|
||||
&& bracket_pair.close
|
||||
&& following_text_allows_autoclose
|
||||
&& !is_closing_quote
|
||||
&& preceding_text_matches_prefix
|
||||
{
|
||||
let anchor = snapshot.anchor_before(selection.end);
|
||||
new_selections.push((selection.map(|_| anchor), text.len()));
|
||||
@@ -3473,8 +3456,8 @@ impl Editor {
|
||||
}
|
||||
let new_anchor_selections = new_selections.iter().map(|e| &e.0);
|
||||
let new_selection_deltas = new_selections.iter().map(|e| e.1);
|
||||
let map = this.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let new_selections = resolve_selections::<usize, _>(new_anchor_selections, &map)
|
||||
let snapshot = this.buffer.read(cx).read(cx);
|
||||
let new_selections = resolve_multiple::<usize, _>(new_anchor_selections, &snapshot)
|
||||
.zip(new_selection_deltas)
|
||||
.map(|(selection, delta)| Selection {
|
||||
id: selection.id,
|
||||
@@ -3487,20 +3470,18 @@ impl Editor {
|
||||
|
||||
let mut i = 0;
|
||||
for (position, delta, selection_id, pair) in new_autoclose_regions {
|
||||
let position = position.to_offset(&map.buffer_snapshot) + delta;
|
||||
let start = map.buffer_snapshot.anchor_before(position);
|
||||
let end = map.buffer_snapshot.anchor_after(position);
|
||||
let position = position.to_offset(&snapshot) + delta;
|
||||
let start = snapshot.anchor_before(position);
|
||||
let end = snapshot.anchor_after(position);
|
||||
while let Some(existing_state) = this.autoclose_regions.get(i) {
|
||||
match existing_state.range.start.cmp(&start, &map.buffer_snapshot) {
|
||||
match existing_state.range.start.cmp(&start, &snapshot) {
|
||||
Ordering::Less => i += 1,
|
||||
Ordering::Greater => break,
|
||||
Ordering::Equal => {
|
||||
match end.cmp(&existing_state.range.end, &map.buffer_snapshot) {
|
||||
Ordering::Less => i += 1,
|
||||
Ordering::Equal => break,
|
||||
Ordering::Greater => break,
|
||||
}
|
||||
}
|
||||
Ordering::Equal => match end.cmp(&existing_state.range.end, &snapshot) {
|
||||
Ordering::Less => i += 1,
|
||||
Ordering::Equal => break,
|
||||
Ordering::Greater => break,
|
||||
},
|
||||
}
|
||||
}
|
||||
this.autoclose_regions.insert(
|
||||
@@ -3513,6 +3494,7 @@ impl Editor {
|
||||
);
|
||||
}
|
||||
|
||||
drop(snapshot);
|
||||
let had_active_inline_completion = this.has_active_inline_completion(cx);
|
||||
this.change_selections_inner(Some(Autoscroll::fit()), false, cx, |s| {
|
||||
s.select(new_selections)
|
||||
@@ -4028,7 +4010,7 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
(selection, enclosing)
|
||||
(selection.clone(), enclosing)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -6706,7 +6688,7 @@ impl Editor {
|
||||
buffer.edit([(range, text)], None, cx);
|
||||
}
|
||||
});
|
||||
this.fold_creases(refold_ranges, true, cx);
|
||||
this.fold_ranges(refold_ranges, true, cx);
|
||||
this.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select(new_selections);
|
||||
})
|
||||
@@ -6800,7 +6782,7 @@ impl Editor {
|
||||
buffer.edit([(range, text)], None, cx);
|
||||
}
|
||||
});
|
||||
this.fold_creases(refold_ranges, true, cx);
|
||||
this.fold_ranges(refold_ranges, true, cx);
|
||||
this.change_selections(Some(Autoscroll::fit()), cx, |s| s.select(new_selections));
|
||||
});
|
||||
}
|
||||
@@ -9632,8 +9614,8 @@ impl Editor {
|
||||
let Some(provider) = self.semantics_provider.clone() else {
|
||||
return Task::ready(Ok(Navigated::No));
|
||||
};
|
||||
let head = self.selections.newest::<usize>(cx).head();
|
||||
let buffer = self.buffer.read(cx);
|
||||
let head = self.selections.newest::<usize>(cx).head();
|
||||
let (buffer, head) = if let Some(text_anchor) = buffer.text_anchor_for_position(head, cx) {
|
||||
text_anchor
|
||||
} else {
|
||||
@@ -9940,8 +9922,8 @@ impl Editor {
|
||||
_: &FindAllReferences,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<Task<Result<Navigated>>> {
|
||||
let selection = self.selections.newest::<usize>(cx);
|
||||
let multi_buffer = self.buffer.read(cx);
|
||||
let selection = self.selections.newest::<usize>(cx);
|
||||
let head = selection.head();
|
||||
|
||||
let multi_buffer_snapshot = multi_buffer.snapshot(cx);
|
||||
@@ -10348,9 +10330,8 @@ impl Editor {
|
||||
self.show_local_selections = true;
|
||||
|
||||
if moving_cursor {
|
||||
let cursor_in_rename_editor = rename.editor.update(cx, |editor, cx| {
|
||||
editor.selections.newest::<usize>(cx).head()
|
||||
});
|
||||
let rename_editor = rename.editor.read(cx);
|
||||
let cursor_in_rename_editor = rename_editor.selections.newest::<usize>(cx).head();
|
||||
|
||||
// Update the selection to match the position of the selection inside
|
||||
// the rename editor.
|
||||
@@ -10464,7 +10445,7 @@ impl Editor {
|
||||
|
||||
fn cancel_language_server_work(
|
||||
&mut self,
|
||||
_: &actions::CancelLanguageServerWork,
|
||||
_: &CancelLanguageServerWork,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let Some(project) = self.project.clone() {
|
||||
@@ -10703,7 +10684,7 @@ impl Editor {
|
||||
}
|
||||
|
||||
pub fn fold(&mut self, _: &actions::Fold, cx: &mut ViewContext<Self>) {
|
||||
let mut to_fold = Vec::new();
|
||||
let mut fold_ranges = Vec::new();
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let selections = self.selections.all_adjusted(cx);
|
||||
|
||||
@@ -10715,10 +10696,12 @@ impl Editor {
|
||||
let mut found = false;
|
||||
let mut row = range.start.row;
|
||||
while row <= range.end.row {
|
||||
if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) {
|
||||
if let Some((foldable_range, fold_text)) =
|
||||
{ display_map.foldable_range(MultiBufferRow(row)) }
|
||||
{
|
||||
found = true;
|
||||
row = crease.range().end.row + 1;
|
||||
to_fold.push(crease);
|
||||
row = foldable_range.end.row + 1;
|
||||
fold_ranges.push((foldable_range, fold_text));
|
||||
} else {
|
||||
row += 1
|
||||
}
|
||||
@@ -10729,9 +10712,11 @@ impl Editor {
|
||||
}
|
||||
|
||||
for row in (0..=range.start.row).rev() {
|
||||
if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) {
|
||||
if crease.range().end.row >= buffer_start_row {
|
||||
to_fold.push(crease);
|
||||
if let Some((foldable_range, fold_text)) =
|
||||
display_map.foldable_range(MultiBufferRow(row))
|
||||
{
|
||||
if foldable_range.end.row >= buffer_start_row {
|
||||
fold_ranges.push((foldable_range, fold_text));
|
||||
if row <= range.start.row {
|
||||
break;
|
||||
}
|
||||
@@ -10740,58 +10725,26 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
self.fold_creases(to_fold, true, cx);
|
||||
}
|
||||
|
||||
fn fold_at_level(&mut self, fold_at: &FoldAtLevel, cx: &mut ViewContext<Self>) {
|
||||
let fold_at_level = fold_at.level;
|
||||
let snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let mut to_fold = Vec::new();
|
||||
let mut stack = vec![(0, snapshot.max_buffer_row().0, 1)];
|
||||
|
||||
while let Some((mut start_row, end_row, current_level)) = stack.pop() {
|
||||
while start_row < end_row {
|
||||
match self
|
||||
.snapshot(cx)
|
||||
.crease_for_buffer_row(MultiBufferRow(start_row))
|
||||
{
|
||||
Some(crease) => {
|
||||
let nested_start_row = crease.range().start.row + 1;
|
||||
let nested_end_row = crease.range().end.row;
|
||||
|
||||
if current_level < fold_at_level {
|
||||
stack.push((nested_start_row, nested_end_row, current_level + 1));
|
||||
} else if current_level == fold_at_level {
|
||||
to_fold.push(crease);
|
||||
}
|
||||
|
||||
start_row = nested_end_row + 1;
|
||||
}
|
||||
None => start_row += 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.fold_creases(to_fold, true, cx);
|
||||
self.fold_ranges(fold_ranges, true, cx);
|
||||
}
|
||||
|
||||
pub fn fold_all(&mut self, _: &actions::FoldAll, cx: &mut ViewContext<Self>) {
|
||||
let mut fold_ranges = Vec::new();
|
||||
let snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
||||
for row in 0..snapshot.max_buffer_row().0 {
|
||||
if let Some(foldable_range) =
|
||||
self.snapshot(cx).crease_for_buffer_row(MultiBufferRow(row))
|
||||
for row in 0..display_map.max_buffer_row().0 {
|
||||
if let Some((foldable_range, fold_text)) =
|
||||
display_map.foldable_range(MultiBufferRow(row))
|
||||
{
|
||||
fold_ranges.push(foldable_range);
|
||||
fold_ranges.push((foldable_range, fold_text));
|
||||
}
|
||||
}
|
||||
|
||||
self.fold_creases(fold_ranges, true, cx);
|
||||
self.fold_ranges(fold_ranges, true, cx);
|
||||
}
|
||||
|
||||
pub fn fold_recursive(&mut self, _: &actions::FoldRecursive, cx: &mut ViewContext<Self>) {
|
||||
let mut to_fold = Vec::new();
|
||||
let mut fold_ranges = Vec::new();
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let selections = self.selections.all_adjusted(cx);
|
||||
|
||||
@@ -10802,9 +10755,11 @@ impl Editor {
|
||||
if range.start.row != range.end.row {
|
||||
let mut found = false;
|
||||
for row in range.start.row..=range.end.row {
|
||||
if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) {
|
||||
if let Some((foldable_range, fold_text)) =
|
||||
{ display_map.foldable_range(MultiBufferRow(row)) }
|
||||
{
|
||||
found = true;
|
||||
to_fold.push(crease);
|
||||
fold_ranges.push((foldable_range, fold_text));
|
||||
}
|
||||
}
|
||||
if found {
|
||||
@@ -10813,9 +10768,11 @@ impl Editor {
|
||||
}
|
||||
|
||||
for row in (0..=range.start.row).rev() {
|
||||
if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) {
|
||||
if crease.range().end.row >= buffer_start_row {
|
||||
to_fold.push(crease);
|
||||
if let Some((foldable_range, fold_text)) =
|
||||
display_map.foldable_range(MultiBufferRow(row))
|
||||
{
|
||||
if foldable_range.end.row >= buffer_start_row {
|
||||
fold_ranges.push((foldable_range, fold_text));
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
@@ -10823,21 +10780,21 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
self.fold_creases(to_fold, true, cx);
|
||||
self.fold_ranges(fold_ranges, true, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at(&mut self, fold_at: &FoldAt, cx: &mut ViewContext<Self>) {
|
||||
let buffer_row = fold_at.buffer_row;
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
||||
if let Some(crease) = display_map.crease_for_buffer_row(buffer_row) {
|
||||
if let Some((fold_range, placeholder)) = display_map.foldable_range(buffer_row) {
|
||||
let autoscroll = self
|
||||
.selections
|
||||
.all::<Point>(cx)
|
||||
.iter()
|
||||
.any(|selection| crease.range().overlaps(&selection.range()));
|
||||
.any(|selection| fold_range.overlaps(&selection.range()));
|
||||
|
||||
self.fold_creases([crease], autoscroll, cx);
|
||||
self.fold_ranges([(fold_range, placeholder)], autoscroll, cx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10924,12 +10881,12 @@ impl Editor {
|
||||
(s.start..s.end, display_map.fold_placeholder.clone())
|
||||
}
|
||||
});
|
||||
self.fold_creases(ranges, true, cx);
|
||||
self.fold_ranges(ranges, true, cx);
|
||||
}
|
||||
|
||||
pub fn fold_creases<T: ToOffset + Clone>(
|
||||
pub fn fold_ranges<T: ToOffset + Clone>(
|
||||
&mut self,
|
||||
ranges: impl IntoIterator<Item = Crease<T>>,
|
||||
ranges: impl IntoIterator<Item = (Range<T>, FoldPlaceholder)>,
|
||||
auto_scroll: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
@@ -11099,7 +11056,7 @@ impl Editor {
|
||||
|
||||
pub fn insert_creases(
|
||||
&mut self,
|
||||
creases: impl IntoIterator<Item = Crease<Anchor>>,
|
||||
creases: impl IntoIterator<Item = Crease>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Vec<CreaseId> {
|
||||
self.display_map
|
||||
@@ -11593,9 +11550,9 @@ impl Editor {
|
||||
}
|
||||
|
||||
pub fn copy_file_location(&mut self, _: &CopyFileLocation, cx: &mut ViewContext<Self>) {
|
||||
let selection = self.selections.newest::<Point>(cx).start.row + 1;
|
||||
if let Some(file) = self.target_file(cx) {
|
||||
if let Some(path) = file.path().to_str() {
|
||||
let selection = self.selections.newest::<Point>(cx).start.row + 1;
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(format!("{path}:{selection}")));
|
||||
}
|
||||
}
|
||||
@@ -12371,10 +12328,9 @@ impl Editor {
|
||||
return;
|
||||
};
|
||||
|
||||
let selections = self.selections.all::<usize>(cx);
|
||||
let buffer = self.buffer.read(cx);
|
||||
let mut new_selections_by_buffer = HashMap::default();
|
||||
for selection in selections {
|
||||
for selection in self.selections.all::<usize>(cx) {
|
||||
for (buffer, range, _) in
|
||||
buffer.range_to_buffer_ranges(selection.start..selection.end, cx)
|
||||
{
|
||||
@@ -12419,7 +12375,6 @@ impl Editor {
|
||||
}
|
||||
|
||||
fn open_excerpts_common(&mut self, split: bool, cx: &mut ViewContext<Self>) {
|
||||
let selections = self.selections.all::<usize>(cx);
|
||||
let buffer = self.buffer.read(cx);
|
||||
if buffer.is_singleton() {
|
||||
cx.propagate();
|
||||
@@ -12432,7 +12387,7 @@ impl Editor {
|
||||
};
|
||||
|
||||
let mut new_selections_by_buffer = HashMap::default();
|
||||
for selection in selections {
|
||||
for selection in self.selections.all::<usize>(cx) {
|
||||
for (mut buffer_handle, mut range, _) in
|
||||
buffer.range_to_buffer_ranges(selection.range(), cx)
|
||||
{
|
||||
@@ -12548,7 +12503,7 @@ impl Editor {
|
||||
fn selection_replacement_ranges(
|
||||
&self,
|
||||
range: Range<OffsetUtf16>,
|
||||
cx: &mut AppContext,
|
||||
cx: &AppContext,
|
||||
) -> Vec<Range<OffsetUtf16>> {
|
||||
let selections = self.selections.all::<OffsetUtf16>(cx);
|
||||
let newest_selection = selections
|
||||
@@ -13583,22 +13538,24 @@ impl EditorSnapshot {
|
||||
.crease_snapshot
|
||||
.query_row(buffer_row, &self.buffer_snapshot)
|
||||
{
|
||||
match crease {
|
||||
Crease::Inline { render_toggle, .. } => {
|
||||
let toggle_callback = Arc::new(move |folded, cx: &mut WindowContext| {
|
||||
if folded {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.fold_at(&crate::FoldAt { buffer_row }, cx)
|
||||
});
|
||||
} else {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.unfold_at(&crate::UnfoldAt { buffer_row }, cx)
|
||||
});
|
||||
}
|
||||
let toggle_callback = Arc::new(move |folded, cx: &mut WindowContext| {
|
||||
if folded {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.fold_at(&crate::FoldAt { buffer_row }, cx)
|
||||
});
|
||||
} else {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.unfold_at(&crate::UnfoldAt { buffer_row }, cx)
|
||||
});
|
||||
Some((render_toggle)(buffer_row, folded, toggle_callback, cx))
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Some((crease.render_toggle)(
|
||||
buffer_row,
|
||||
folded,
|
||||
toggle_callback,
|
||||
cx,
|
||||
))
|
||||
} else if folded
|
||||
|| (self.starts_indent(buffer_row) && (row_contains_cursor || self.gutter_hovered))
|
||||
{
|
||||
@@ -13625,10 +13582,10 @@ impl EditorSnapshot {
|
||||
cx: &mut WindowContext,
|
||||
) -> Option<AnyElement> {
|
||||
let folded = self.is_line_folded(buffer_row);
|
||||
let Crease::Inline { render_trailer, .. } = self
|
||||
let crease = self
|
||||
.crease_snapshot
|
||||
.query_row(buffer_row, &self.buffer_snapshot)?;
|
||||
Some((render_trailer)(buffer_row, folded, cx))
|
||||
Some((crease.render_trailer)(buffer_row, folded, cx))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14189,7 +14146,7 @@ pub fn diagnostic_block_renderer(
|
||||
.relative()
|
||||
.size_full()
|
||||
.pl(cx.gutter_dimensions.width)
|
||||
.w(cx.max_width - cx.gutter_dimensions.full_width())
|
||||
.w(cx.max_width + cx.gutter_dimensions.width)
|
||||
.child(
|
||||
div()
|
||||
.flex()
|
||||
|
||||
@@ -592,7 +592,7 @@ fn test_clone(cx: &mut TestAppContext) {
|
||||
|
||||
_ = editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(None, cx, |s| s.select_ranges(selection_ranges.clone()));
|
||||
editor.fold_creases(
|
||||
editor.fold_ranges(
|
||||
[
|
||||
(Point::new(1, 0)..Point::new(2, 0), FoldPlaceholder::test()),
|
||||
(Point::new(3, 0)..Point::new(4, 0), FoldPlaceholder::test()),
|
||||
@@ -1080,112 +1080,6 @@ fn test_fold_action_multiple_line_breaks(cx: &mut TestAppContext) {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_fold_at_level(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let view = cx.add_window(|cx| {
|
||||
let buffer = MultiBuffer::build_simple(
|
||||
&"
|
||||
class Foo:
|
||||
# Hello!
|
||||
|
||||
def a():
|
||||
print(1)
|
||||
|
||||
def b():
|
||||
print(2)
|
||||
|
||||
|
||||
class Bar:
|
||||
# World!
|
||||
|
||||
def a():
|
||||
print(1)
|
||||
|
||||
def b():
|
||||
print(2)
|
||||
|
||||
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
build_editor(buffer.clone(), cx)
|
||||
});
|
||||
|
||||
_ = view.update(cx, |view, cx| {
|
||||
view.fold_at_level(&FoldAtLevel { level: 2 }, cx);
|
||||
assert_eq!(
|
||||
view.display_text(cx),
|
||||
"
|
||||
class Foo:
|
||||
# Hello!
|
||||
|
||||
def a():⋯
|
||||
|
||||
def b():⋯
|
||||
|
||||
|
||||
class Bar:
|
||||
# World!
|
||||
|
||||
def a():⋯
|
||||
|
||||
def b():⋯
|
||||
|
||||
|
||||
"
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
view.fold_at_level(&FoldAtLevel { level: 1 }, cx);
|
||||
assert_eq!(
|
||||
view.display_text(cx),
|
||||
"
|
||||
class Foo:⋯
|
||||
|
||||
|
||||
class Bar:⋯
|
||||
|
||||
|
||||
"
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
view.unfold_all(&UnfoldAll, cx);
|
||||
view.fold_at_level(&FoldAtLevel { level: 0 }, cx);
|
||||
assert_eq!(
|
||||
view.display_text(cx),
|
||||
"
|
||||
class Foo:
|
||||
# Hello!
|
||||
|
||||
def a():
|
||||
print(1)
|
||||
|
||||
def b():
|
||||
print(2)
|
||||
|
||||
|
||||
class Bar:
|
||||
# World!
|
||||
|
||||
def a():
|
||||
print(1)
|
||||
|
||||
def b():
|
||||
print(2)
|
||||
|
||||
|
||||
"
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
assert_eq!(view.display_text(cx), view.buffer.read(cx).read(cx).text());
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_move_cursor(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
@@ -1279,7 +1173,7 @@ fn test_move_cursor_multibyte(cx: &mut TestAppContext) {
|
||||
assert_eq!('α'.len_utf8(), 2);
|
||||
|
||||
_ = view.update(cx, |view, cx| {
|
||||
view.fold_creases(
|
||||
view.fold_ranges(
|
||||
vec![
|
||||
(Point::new(0, 6)..Point::new(0, 12), FoldPlaceholder::test()),
|
||||
(Point::new(1, 2)..Point::new(1, 4), FoldPlaceholder::test()),
|
||||
@@ -3871,7 +3765,7 @@ fn test_move_line_up_down(cx: &mut TestAppContext) {
|
||||
build_editor(buffer, cx)
|
||||
});
|
||||
_ = view.update(cx, |view, cx| {
|
||||
view.fold_creases(
|
||||
view.fold_ranges(
|
||||
vec![
|
||||
(Point::new(0, 2)..Point::new(1, 2), FoldPlaceholder::test()),
|
||||
(Point::new(2, 3)..Point::new(4, 1), FoldPlaceholder::test()),
|
||||
@@ -3989,76 +3883,6 @@ fn test_move_line_up_down_with_blocks(cx: &mut TestAppContext) {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_selections_and_replace_blocks(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
cx.set_state(
|
||||
&"
|
||||
ˇzero
|
||||
one
|
||||
two
|
||||
three
|
||||
four
|
||||
five
|
||||
"
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
// Create a four-line block that replaces three lines of text.
|
||||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let snapshot = &snapshot.buffer_snapshot;
|
||||
let placement = BlockPlacement::Replace(
|
||||
snapshot.anchor_after(Point::new(1, 0))..snapshot.anchor_after(Point::new(3, 0)),
|
||||
);
|
||||
editor.insert_blocks(
|
||||
[BlockProperties {
|
||||
placement,
|
||||
height: 4,
|
||||
style: BlockStyle::Sticky,
|
||||
render: Box::new(|_| gpui::div().into_any_element()),
|
||||
priority: 0,
|
||||
}],
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// Move down so that the cursor touches the block.
|
||||
cx.update_editor(|editor, cx| {
|
||||
editor.move_down(&Default::default(), cx);
|
||||
});
|
||||
cx.assert_editor_state(
|
||||
&"
|
||||
zero
|
||||
«one
|
||||
two
|
||||
threeˇ»
|
||||
four
|
||||
five
|
||||
"
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
// Move down past the block.
|
||||
cx.update_editor(|editor, cx| {
|
||||
editor.move_down(&Default::default(), cx);
|
||||
});
|
||||
cx.assert_editor_state(
|
||||
&"
|
||||
zero
|
||||
one
|
||||
two
|
||||
three
|
||||
ˇfour
|
||||
five
|
||||
"
|
||||
.unindent(),
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_transpose(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
@@ -4770,7 +4594,7 @@ fn test_split_selection_into_lines(cx: &mut TestAppContext) {
|
||||
build_editor(buffer, cx)
|
||||
});
|
||||
_ = view.update(cx, |view, cx| {
|
||||
view.fold_creases(
|
||||
view.fold_ranges(
|
||||
vec![
|
||||
(Point::new(0, 2)..Point::new(1, 2), FoldPlaceholder::test()),
|
||||
(Point::new(2, 3)..Point::new(4, 1), FoldPlaceholder::test()),
|
||||
@@ -5451,7 +5275,7 @@ async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) {
|
||||
// Ensure that we keep expanding the selection if the larger selection starts or ends within
|
||||
// a fold.
|
||||
editor.update(cx, |view, cx| {
|
||||
view.fold_creases(
|
||||
view.fold_ranges(
|
||||
vec![
|
||||
(
|
||||
Point::new(0, 21)..Point::new(0, 24),
|
||||
@@ -12998,7 +12822,7 @@ fn test_crease_insertion_and_rendering(cx: &mut TestAppContext) {
|
||||
callback: Arc<dyn Fn(bool, &mut WindowContext) + Send + Sync>,
|
||||
}
|
||||
|
||||
let crease = Crease::inline(
|
||||
let crease = Crease::new(
|
||||
range,
|
||||
FoldPlaceholder::test(),
|
||||
{
|
||||
|
||||
@@ -25,7 +25,7 @@ use crate::{
|
||||
MULTI_BUFFER_EXCERPT_HEADER_HEIGHT,
|
||||
};
|
||||
use client::ParticipantIndex;
|
||||
use collections::{BTreeMap, HashMap, HashSet};
|
||||
use collections::{BTreeMap, HashMap};
|
||||
use git::{blame::BlameEntry, diff::DiffHunkStatus, Oid};
|
||||
use gpui::Subscription;
|
||||
use gpui::{
|
||||
@@ -337,7 +337,6 @@ impl EditorElement {
|
||||
register_action(view, cx, Editor::open_url);
|
||||
register_action(view, cx, Editor::open_file);
|
||||
register_action(view, cx, Editor::fold);
|
||||
register_action(view, cx, Editor::fold_at_level);
|
||||
register_action(view, cx, Editor::fold_all);
|
||||
register_action(view, cx, Editor::fold_at);
|
||||
register_action(view, cx, Editor::fold_recursive);
|
||||
@@ -446,7 +445,6 @@ impl EditorElement {
|
||||
register_action(view, cx, Editor::accept_inline_completion);
|
||||
register_action(view, cx, Editor::revert_file);
|
||||
register_action(view, cx, Editor::revert_selected_hunks);
|
||||
register_action(view, cx, Editor::apply_all_diff_hunks);
|
||||
register_action(view, cx, Editor::apply_selected_diff_hunks);
|
||||
register_action(view, cx, Editor::open_active_item_in_terminal);
|
||||
register_action(view, cx, Editor::reload_file)
|
||||
@@ -812,7 +810,6 @@ impl EditorElement {
|
||||
&self,
|
||||
start_anchor: Anchor,
|
||||
end_anchor: Anchor,
|
||||
local_selections: &[Selection<Point>],
|
||||
snapshot: &EditorSnapshot,
|
||||
start_row: DisplayRow,
|
||||
end_row: DisplayRow,
|
||||
@@ -825,127 +822,129 @@ impl EditorElement {
|
||||
let mut selections: Vec<(PlayerColor, Vec<SelectionLayout>)> = Vec::new();
|
||||
let mut active_rows = BTreeMap::new();
|
||||
let mut newest_selection_head = None;
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
if editor.show_local_selections {
|
||||
let mut layouts = Vec::new();
|
||||
let newest = editor.selections.newest(cx);
|
||||
for selection in local_selections.iter().cloned() {
|
||||
let is_empty = selection.start == selection.end;
|
||||
let is_newest = selection == newest;
|
||||
let editor = self.editor.read(cx);
|
||||
|
||||
let layout = SelectionLayout::new(
|
||||
selection,
|
||||
editor.selections.line_mode,
|
||||
editor.cursor_shape,
|
||||
&snapshot.display_snapshot,
|
||||
is_newest,
|
||||
editor.leader_peer_id.is_none(),
|
||||
None,
|
||||
);
|
||||
if is_newest {
|
||||
newest_selection_head = Some(layout.head);
|
||||
}
|
||||
if editor.show_local_selections {
|
||||
let mut local_selections: Vec<Selection<Point>> = editor
|
||||
.selections
|
||||
.disjoint_in_range(start_anchor..end_anchor, cx);
|
||||
local_selections.extend(editor.selections.pending(cx));
|
||||
let mut layouts = Vec::new();
|
||||
let newest = editor.selections.newest(cx);
|
||||
for selection in local_selections.drain(..) {
|
||||
let is_empty = selection.start == selection.end;
|
||||
let is_newest = selection == newest;
|
||||
|
||||
for row in cmp::max(layout.active_rows.start.0, start_row.0)
|
||||
..=cmp::min(layout.active_rows.end.0, end_row.0)
|
||||
{
|
||||
let contains_non_empty_selection =
|
||||
active_rows.entry(DisplayRow(row)).or_insert(!is_empty);
|
||||
*contains_non_empty_selection |= !is_empty;
|
||||
}
|
||||
layouts.push(layout);
|
||||
let layout = SelectionLayout::new(
|
||||
selection,
|
||||
editor.selections.line_mode,
|
||||
editor.cursor_shape,
|
||||
&snapshot.display_snapshot,
|
||||
is_newest,
|
||||
editor.leader_peer_id.is_none(),
|
||||
None,
|
||||
);
|
||||
if is_newest {
|
||||
newest_selection_head = Some(layout.head);
|
||||
}
|
||||
|
||||
let player = if editor.read_only(cx) {
|
||||
cx.theme().players().read_only()
|
||||
} else {
|
||||
self.style.local_player
|
||||
};
|
||||
|
||||
selections.push((player, layouts));
|
||||
for row in cmp::max(layout.active_rows.start.0, start_row.0)
|
||||
..=cmp::min(layout.active_rows.end.0, end_row.0)
|
||||
{
|
||||
let contains_non_empty_selection =
|
||||
active_rows.entry(DisplayRow(row)).or_insert(!is_empty);
|
||||
*contains_non_empty_selection |= !is_empty;
|
||||
}
|
||||
layouts.push(layout);
|
||||
}
|
||||
|
||||
if let Some(collaboration_hub) = &editor.collaboration_hub {
|
||||
// When following someone, render the local selections in their color.
|
||||
if let Some(leader_id) = editor.leader_peer_id {
|
||||
if let Some(collaborator) = collaboration_hub.collaborators(cx).get(&leader_id)
|
||||
let player = if editor.read_only(cx) {
|
||||
cx.theme().players().read_only()
|
||||
} else {
|
||||
self.style.local_player
|
||||
};
|
||||
|
||||
selections.push((player, layouts));
|
||||
}
|
||||
|
||||
if let Some(collaboration_hub) = &editor.collaboration_hub {
|
||||
// When following someone, render the local selections in their color.
|
||||
if let Some(leader_id) = editor.leader_peer_id {
|
||||
if let Some(collaborator) = collaboration_hub.collaborators(cx).get(&leader_id) {
|
||||
if let Some(participant_index) = collaboration_hub
|
||||
.user_participant_indices(cx)
|
||||
.get(&collaborator.user_id)
|
||||
{
|
||||
if let Some(participant_index) = collaboration_hub
|
||||
.user_participant_indices(cx)
|
||||
.get(&collaborator.user_id)
|
||||
{
|
||||
if let Some((local_selection_style, _)) = selections.first_mut() {
|
||||
*local_selection_style = cx
|
||||
.theme()
|
||||
.players()
|
||||
.color_for_participant(participant_index.0);
|
||||
}
|
||||
if let Some((local_selection_style, _)) = selections.first_mut() {
|
||||
*local_selection_style = cx
|
||||
.theme()
|
||||
.players()
|
||||
.color_for_participant(participant_index.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut remote_selections = HashMap::default();
|
||||
for selection in snapshot.remote_selections_in_range(
|
||||
&(start_anchor..end_anchor),
|
||||
collaboration_hub.as_ref(),
|
||||
cx,
|
||||
) {
|
||||
let selection_style =
|
||||
Self::get_participant_color(selection.participant_index, cx);
|
||||
|
||||
// Don't re-render the leader's selections, since the local selections
|
||||
// match theirs.
|
||||
if Some(selection.peer_id) == editor.leader_peer_id {
|
||||
continue;
|
||||
}
|
||||
let key = HoveredCursor {
|
||||
replica_id: selection.replica_id,
|
||||
selection_id: selection.selection.id,
|
||||
};
|
||||
|
||||
let is_shown =
|
||||
editor.show_cursor_names || editor.hovered_cursors.contains_key(&key);
|
||||
|
||||
remote_selections
|
||||
.entry(selection.replica_id)
|
||||
.or_insert((selection_style, Vec::new()))
|
||||
.1
|
||||
.push(SelectionLayout::new(
|
||||
selection.selection,
|
||||
selection.line_mode,
|
||||
selection.cursor_shape,
|
||||
&snapshot.display_snapshot,
|
||||
false,
|
||||
false,
|
||||
if is_shown { selection.user_name } else { None },
|
||||
));
|
||||
}
|
||||
|
||||
selections.extend(remote_selections.into_values());
|
||||
} else if !editor.is_focused(cx) && editor.show_cursor_when_unfocused {
|
||||
let player = if editor.read_only(cx) {
|
||||
cx.theme().players().read_only()
|
||||
} else {
|
||||
self.style.local_player
|
||||
};
|
||||
let layouts = snapshot
|
||||
.buffer_snapshot
|
||||
.selections_in_range(&(start_anchor..end_anchor), true)
|
||||
.map(move |(_, line_mode, cursor_shape, selection)| {
|
||||
SelectionLayout::new(
|
||||
selection,
|
||||
line_mode,
|
||||
cursor_shape,
|
||||
&snapshot.display_snapshot,
|
||||
false,
|
||||
false,
|
||||
None,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
selections.push((player, layouts));
|
||||
}
|
||||
});
|
||||
|
||||
let mut remote_selections = HashMap::default();
|
||||
for selection in snapshot.remote_selections_in_range(
|
||||
&(start_anchor..end_anchor),
|
||||
collaboration_hub.as_ref(),
|
||||
cx,
|
||||
) {
|
||||
let selection_style = Self::get_participant_color(selection.participant_index, cx);
|
||||
|
||||
// Don't re-render the leader's selections, since the local selections
|
||||
// match theirs.
|
||||
if Some(selection.peer_id) == editor.leader_peer_id {
|
||||
continue;
|
||||
}
|
||||
let key = HoveredCursor {
|
||||
replica_id: selection.replica_id,
|
||||
selection_id: selection.selection.id,
|
||||
};
|
||||
|
||||
let is_shown =
|
||||
editor.show_cursor_names || editor.hovered_cursors.contains_key(&key);
|
||||
|
||||
remote_selections
|
||||
.entry(selection.replica_id)
|
||||
.or_insert((selection_style, Vec::new()))
|
||||
.1
|
||||
.push(SelectionLayout::new(
|
||||
selection.selection,
|
||||
selection.line_mode,
|
||||
selection.cursor_shape,
|
||||
&snapshot.display_snapshot,
|
||||
false,
|
||||
false,
|
||||
if is_shown { selection.user_name } else { None },
|
||||
));
|
||||
}
|
||||
|
||||
selections.extend(remote_selections.into_values());
|
||||
} else if !editor.is_focused(cx) && editor.show_cursor_when_unfocused {
|
||||
let player = if editor.read_only(cx) {
|
||||
cx.theme().players().read_only()
|
||||
} else {
|
||||
self.style.local_player
|
||||
};
|
||||
let layouts = snapshot
|
||||
.buffer_snapshot
|
||||
.selections_in_range(&(start_anchor..end_anchor), true)
|
||||
.map(move |(_, line_mode, cursor_shape, selection)| {
|
||||
SelectionLayout::new(
|
||||
selection,
|
||||
line_mode,
|
||||
cursor_shape,
|
||||
&snapshot.display_snapshot,
|
||||
false,
|
||||
false,
|
||||
None,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
selections.push((player, layouts));
|
||||
}
|
||||
(selections, active_rows, newest_selection_head)
|
||||
}
|
||||
|
||||
@@ -992,7 +991,6 @@ impl EditorElement {
|
||||
&self,
|
||||
snapshot: &EditorSnapshot,
|
||||
selections: &[(PlayerColor, Vec<SelectionLayout>)],
|
||||
block_start_rows: &HashSet<DisplayRow>,
|
||||
visible_display_row_range: Range<DisplayRow>,
|
||||
line_layouts: &[LineWithInvisibles],
|
||||
text_hitbox: &Hitbox,
|
||||
@@ -1012,10 +1010,7 @@ impl EditorElement {
|
||||
let cursor_position = selection.head;
|
||||
|
||||
let in_range = visible_display_row_range.contains(&cursor_position.row());
|
||||
if (selection.is_local && !editor.show_local_cursors(cx))
|
||||
|| !in_range
|
||||
|| block_start_rows.contains(&cursor_position.row())
|
||||
{
|
||||
if (selection.is_local && !editor.show_local_cursors(cx)) || !in_range {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -1035,13 +1030,18 @@ impl EditorElement {
|
||||
.or_else(|| {
|
||||
if cursor_column == 0 {
|
||||
snapshot.placeholder_text().and_then(|s| {
|
||||
s.graphemes(true).next().map(|s| s.to_string().into())
|
||||
s.graphemes(true).next().map(|s| s.to_owned())
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.and_then(|text| {
|
||||
.and_then(|grapheme| {
|
||||
let text = if grapheme == "\n" {
|
||||
SharedString::from(" ")
|
||||
} else {
|
||||
SharedString::from(grapheme)
|
||||
};
|
||||
let len = text.len();
|
||||
|
||||
let font = cursor_row_layout
|
||||
@@ -1851,25 +1851,23 @@ impl EditorElement {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let (newest_selection_head, is_relative) = self.editor.update(cx, |editor, cx| {
|
||||
let newest_selection_head = newest_selection_head.unwrap_or_else(|| {
|
||||
let newest = editor.selections.newest::<Point>(cx);
|
||||
SelectionLayout::new(
|
||||
newest,
|
||||
editor.selections.line_mode,
|
||||
editor.cursor_shape,
|
||||
&snapshot.display_snapshot,
|
||||
true,
|
||||
true,
|
||||
None,
|
||||
)
|
||||
.head
|
||||
});
|
||||
let is_relative = editor.should_use_relative_line_numbers(cx);
|
||||
(newest_selection_head, is_relative)
|
||||
let editor = self.editor.read(cx);
|
||||
let newest_selection_head = newest_selection_head.unwrap_or_else(|| {
|
||||
let newest = editor.selections.newest::<Point>(cx);
|
||||
SelectionLayout::new(
|
||||
newest,
|
||||
editor.selections.line_mode,
|
||||
editor.cursor_shape,
|
||||
&snapshot.display_snapshot,
|
||||
true,
|
||||
true,
|
||||
None,
|
||||
)
|
||||
.head
|
||||
});
|
||||
let font_size = self.style.text.font_size.to_pixels(cx.rem_size());
|
||||
|
||||
let is_relative = editor.should_use_relative_line_numbers(cx);
|
||||
let relative_to = if is_relative {
|
||||
Some(newest_selection_head.row())
|
||||
} else {
|
||||
@@ -2068,14 +2066,14 @@ impl EditorElement {
|
||||
editor_width: Pixels,
|
||||
scroll_width: &mut Pixels,
|
||||
resized_blocks: &mut HashMap<CustomBlockId, u32>,
|
||||
selections: &[Selection<Point>],
|
||||
cx: &mut WindowContext,
|
||||
) -> (AnyElement, Size<Pixels>) {
|
||||
let mut element = match block {
|
||||
Block::Custom(block) => {
|
||||
let block_start = block.start().to_point(&snapshot.buffer_snapshot);
|
||||
let block_end = block.end().to_point(&snapshot.buffer_snapshot);
|
||||
let align_to = block_start.to_display_point(snapshot);
|
||||
let align_to = block
|
||||
.start()
|
||||
.to_point(&snapshot.buffer_snapshot)
|
||||
.to_display_point(snapshot);
|
||||
let anchor_x = text_x
|
||||
+ if rows.contains(&align_to.row()) {
|
||||
line_layouts[align_to.row().minus(rows.start) as usize]
|
||||
@@ -2085,18 +2083,6 @@ impl EditorElement {
|
||||
.x_for_index(align_to.column() as usize)
|
||||
};
|
||||
|
||||
let selected = selections
|
||||
.binary_search_by(|selection| {
|
||||
if selection.end <= block_start {
|
||||
Ordering::Less
|
||||
} else if selection.start >= block_end {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
Ordering::Equal
|
||||
}
|
||||
})
|
||||
.is_ok();
|
||||
|
||||
div()
|
||||
.size_full()
|
||||
.child(block.render(&mut BlockContext {
|
||||
@@ -2106,7 +2092,6 @@ impl EditorElement {
|
||||
line_height,
|
||||
em_width,
|
||||
block_id,
|
||||
selected,
|
||||
max_width: text_hitbox.size.width.max(*scroll_width),
|
||||
editor_style: &self.style,
|
||||
}))
|
||||
@@ -2444,7 +2429,6 @@ impl EditorElement {
|
||||
text_x: Pixels,
|
||||
line_height: Pixels,
|
||||
line_layouts: &[LineWithInvisibles],
|
||||
selections: &[Selection<Point>],
|
||||
cx: &mut WindowContext,
|
||||
) -> Result<Vec<BlockLayout>, HashMap<CustomBlockId, u32>> {
|
||||
let (fixed_blocks, non_fixed_blocks) = snapshot
|
||||
@@ -2481,7 +2465,6 @@ impl EditorElement {
|
||||
editor_width,
|
||||
scroll_width,
|
||||
&mut resized_blocks,
|
||||
selections,
|
||||
cx,
|
||||
);
|
||||
fixed_block_max_width = fixed_block_max_width.max(element_size.width + em_width);
|
||||
@@ -2526,7 +2509,6 @@ impl EditorElement {
|
||||
editor_width,
|
||||
scroll_width,
|
||||
&mut resized_blocks,
|
||||
selections,
|
||||
cx,
|
||||
);
|
||||
|
||||
@@ -2572,7 +2554,6 @@ impl EditorElement {
|
||||
editor_width,
|
||||
scroll_width,
|
||||
&mut resized_blocks,
|
||||
selections,
|
||||
cx,
|
||||
);
|
||||
|
||||
@@ -2601,7 +2582,6 @@ impl EditorElement {
|
||||
fn layout_blocks(
|
||||
&self,
|
||||
blocks: &mut Vec<BlockLayout>,
|
||||
block_starts: &mut HashSet<DisplayRow>,
|
||||
hitbox: &Hitbox,
|
||||
line_height: Pixels,
|
||||
scroll_pixel_position: gpui::Point<Pixels>,
|
||||
@@ -2609,7 +2589,6 @@ impl EditorElement {
|
||||
) {
|
||||
for block in blocks {
|
||||
let mut origin = if let Some(row) = block.row {
|
||||
block_starts.insert(row);
|
||||
hitbox.origin
|
||||
+ point(
|
||||
Pixels::ZERO,
|
||||
@@ -4177,16 +4156,7 @@ fn render_inline_blame_entry(
|
||||
let relative_timestamp = blame_entry_relative_timestamp(&blame_entry);
|
||||
|
||||
let author = blame_entry.author.as_deref().unwrap_or_default();
|
||||
let summary_enabled = ProjectSettings::get_global(cx)
|
||||
.git
|
||||
.show_inline_commit_summary();
|
||||
|
||||
let text = match blame_entry.summary.as_ref() {
|
||||
Some(summary) if summary_enabled => {
|
||||
format!("{}, {} - {}", author, relative_timestamp, summary)
|
||||
}
|
||||
_ => format!("{}, {}", author, relative_timestamp),
|
||||
};
|
||||
let text = format!("{}, {}", author, relative_timestamp);
|
||||
|
||||
let details = blame.read(cx).details_for_entry(&blame_entry);
|
||||
|
||||
@@ -5120,19 +5090,9 @@ impl Element for EditorElement {
|
||||
cx,
|
||||
);
|
||||
|
||||
let local_selections: Vec<Selection<Point>> =
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let mut selections = editor
|
||||
.selections
|
||||
.disjoint_in_range(start_anchor..end_anchor, cx);
|
||||
selections.extend(editor.selections.pending(cx));
|
||||
selections
|
||||
});
|
||||
|
||||
let (selections, active_rows, newest_selection_head) = self.layout_selections(
|
||||
start_anchor,
|
||||
end_anchor,
|
||||
&local_selections,
|
||||
&snapshot,
|
||||
start_row,
|
||||
end_row,
|
||||
@@ -5205,7 +5165,6 @@ impl Element for EditorElement {
|
||||
gutter_dimensions.full_width(),
|
||||
line_height,
|
||||
&line_layouts,
|
||||
&local_selections,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -5345,11 +5304,9 @@ impl Element for EditorElement {
|
||||
cx,
|
||||
);
|
||||
|
||||
let mut block_start_rows = HashSet::default();
|
||||
cx.with_element_namespace("blocks", |cx| {
|
||||
self.layout_blocks(
|
||||
&mut blocks,
|
||||
&mut block_start_rows,
|
||||
&hitbox,
|
||||
line_height,
|
||||
scroll_pixel_position,
|
||||
@@ -5366,7 +5323,6 @@ impl Element for EditorElement {
|
||||
let visible_cursors = self.layout_visible_cursors(
|
||||
&snapshot,
|
||||
&selections,
|
||||
&block_start_rows,
|
||||
start_row..end_row,
|
||||
&line_layouts,
|
||||
&text_hitbox,
|
||||
|
||||
@@ -368,15 +368,12 @@ impl GitBlame {
|
||||
.spawn({
|
||||
let snapshot = snapshot.clone();
|
||||
async move {
|
||||
let Some(Blame {
|
||||
let Blame {
|
||||
entries,
|
||||
permalinks,
|
||||
messages,
|
||||
remote_url,
|
||||
}) = blame.await?
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
} = blame.await?;
|
||||
|
||||
let entries = build_blame_entry_sum_tree(entries, snapshot.max_point().row);
|
||||
let commit_details = parse_commit_messages(
|
||||
@@ -388,16 +385,13 @@ impl GitBlame {
|
||||
)
|
||||
.await;
|
||||
|
||||
anyhow::Ok(Some((entries, commit_details)))
|
||||
anyhow::Ok((entries, commit_details))
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
this.update(&mut cx, |this, cx| match result {
|
||||
Ok(None) => {
|
||||
// Nothing to do, e.g. no repository found
|
||||
}
|
||||
Ok(Some((entries, commit_details))) => {
|
||||
Ok((entries, commit_details)) => {
|
||||
this.buffer_edits = buffer_edits;
|
||||
this.buffer_snapshot = snapshot;
|
||||
this.entries = entries;
|
||||
@@ -416,7 +410,11 @@ impl GitBlame {
|
||||
} else {
|
||||
// If we weren't triggered by a user, we just log errors in the background, instead of sending
|
||||
// notifications.
|
||||
log::error!("failed to get git blame data: {error:?}");
|
||||
// Except for `NoRepositoryError`, which can happen often if a user has inline-blame turned on
|
||||
// and opens a non-git file.
|
||||
if error.downcast_ref::<project::NoRepositoryError>().is_none() {
|
||||
log::error!("failed to get git blame data: {error:?}");
|
||||
}
|
||||
}
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -706,11 +706,10 @@ pub(crate) async fn find_file(
|
||||
) -> Option<ResolvedPath> {
|
||||
project
|
||||
.update(cx, |project, cx| {
|
||||
project.resolve_path_in_buffer(&candidate_file_path, buffer, cx)
|
||||
project.resolve_existing_file_path(&candidate_file_path, buffer, cx)
|
||||
})
|
||||
.ok()?
|
||||
.await
|
||||
.filter(|s| s.is_file())
|
||||
}
|
||||
|
||||
if let Some(existing_path) = check_path(&candidate_file_path, &project, buffer, cx).await {
|
||||
@@ -1613,46 +1612,4 @@ mod tests {
|
||||
assert_eq!(file_path.to_str().unwrap(), "/root/dir/file2.rs");
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_hover_directories(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
lsp::ServerCapabilities {
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
// Insert a new file
|
||||
let fs = cx.update_workspace(|workspace, cx| workspace.project().read(cx).fs().clone());
|
||||
fs.as_fake()
|
||||
.insert_file("/root/dir/file2.rs", "This is file2.rs".as_bytes().to_vec())
|
||||
.await;
|
||||
|
||||
cx.set_state(indoc! {"
|
||||
You can't open ../diˇr because it's a directory.
|
||||
"});
|
||||
|
||||
// File does not exist
|
||||
let screen_coord = cx.pixel_position(indoc! {"
|
||||
You can't open ../diˇr because it's a directory.
|
||||
"});
|
||||
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
|
||||
|
||||
// No highlight
|
||||
cx.update_editor(|editor, cx| {
|
||||
assert!(editor
|
||||
.snapshot(cx)
|
||||
.text_highlight_ranges::<HoveredLinkState>()
|
||||
.unwrap_or_default()
|
||||
.1
|
||||
.is_empty());
|
||||
});
|
||||
|
||||
// Does not open the directory
|
||||
cx.simulate_click(screen_coord, Modifiers::secondary_key());
|
||||
cx.update_workspace(|workspace, cx| assert_eq!(workspace.items(cx).count(), 1));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use crate::{
|
||||
display_map::{invisibles::is_invisible, InlayOffset, ToDisplayPoint},
|
||||
display_map::{InlayOffset, ToDisplayPoint},
|
||||
hover_links::{InlayHighlight, RangeInEditor},
|
||||
is_invisible,
|
||||
scroll::ScrollAmount,
|
||||
Anchor, AnchorRangeExt, DisplayPoint, DisplayRow, Editor, EditorSettings, EditorSnapshot,
|
||||
Hover, RangeToAnchorExt,
|
||||
@@ -199,7 +200,6 @@ fn show_hover(
|
||||
if editor.pending_rename.is_some() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let snapshot = editor.snapshot(cx);
|
||||
|
||||
let (buffer, buffer_position) = editor
|
||||
@@ -280,6 +280,7 @@ fn show_hover(
|
||||
range: entry.range.to_anchors(&snapshot.buffer_snapshot),
|
||||
})
|
||||
});
|
||||
|
||||
if let Some(invisible) = snapshot
|
||||
.buffer_snapshot
|
||||
.chars_at(anchor)
|
||||
@@ -323,7 +324,6 @@ fn show_hover(
|
||||
}
|
||||
None => local_diagnostic.diagnostic.message.clone(),
|
||||
};
|
||||
|
||||
let mut border_color: Option<Hsla> = None;
|
||||
let mut background_color: Option<Hsla> = None;
|
||||
|
||||
@@ -379,7 +379,6 @@ fn show_hover(
|
||||
Markdown::new_text(text, markdown_style.clone(), None, cx, None)
|
||||
})
|
||||
.ok();
|
||||
|
||||
Some(DiagnosticPopover {
|
||||
local_diagnostic,
|
||||
primary_diagnostic,
|
||||
@@ -467,7 +466,6 @@ fn show_hover(
|
||||
cx.notify();
|
||||
cx.refresh();
|
||||
})?;
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.log_err()
|
||||
|
||||
@@ -16,10 +16,10 @@ use util::RangeExt;
|
||||
use workspace::Item;
|
||||
|
||||
use crate::{
|
||||
editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, ApplyAllDiffHunks,
|
||||
ApplyDiffHunk, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight,
|
||||
DisplayRow, DisplaySnapshot, Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk,
|
||||
RevertFile, RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff,
|
||||
editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, ApplyDiffHunk,
|
||||
BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow,
|
||||
DisplaySnapshot, Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile,
|
||||
RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -352,11 +352,7 @@ impl Editor {
|
||||
None
|
||||
}
|
||||
|
||||
pub(crate) fn apply_all_diff_hunks(
|
||||
&mut self,
|
||||
_: &ApplyAllDiffHunks,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
pub(crate) fn apply_all_diff_hunks(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let buffers = self.buffer.read(cx).all_buffers();
|
||||
for branch_buffer in buffers {
|
||||
branch_buffer.update(cx, |branch_buffer, cx| {
|
||||
|
||||
@@ -41,9 +41,9 @@ pub(super) fn refresh_linked_ranges(this: &mut Editor, cx: &mut ViewContext<Edit
|
||||
return None;
|
||||
}
|
||||
let project = this.project.clone()?;
|
||||
let selections = this.selections.all::<usize>(cx);
|
||||
let buffer = this.buffer.read(cx);
|
||||
let mut applicable_selections = vec![];
|
||||
let selections = this.selections.all::<usize>(cx);
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
for selection in selections {
|
||||
let cursor_position = selection.head();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::{ApplyAllDiffHunks, Editor, EditorEvent, SemanticsProvider};
|
||||
use crate::{Editor, EditorEvent, SemanticsProvider};
|
||||
use collections::HashSet;
|
||||
use futures::{channel::mpsc, future::join_all};
|
||||
use gpui::{AppContext, EventEmitter, FocusableView, Model, Render, Subscription, Task, View};
|
||||
@@ -8,7 +8,7 @@ use project::Project;
|
||||
use smol::stream::StreamExt;
|
||||
use std::{any::TypeId, ops::Range, rc::Rc, time::Duration};
|
||||
use text::ToOffset;
|
||||
use ui::{prelude::*, ButtonLike, KeyBinding};
|
||||
use ui::prelude::*;
|
||||
use workspace::{
|
||||
searchable::SearchableItemHandle, Item, ItemHandle as _, ToolbarItemEvent, ToolbarItemLocation,
|
||||
ToolbarItemView, Workspace,
|
||||
@@ -232,10 +232,7 @@ impl ProposedChangesEditor {
|
||||
|
||||
impl Render for ProposedChangesEditor {
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
div()
|
||||
.size_full()
|
||||
.key_context("ProposedChangesEditor")
|
||||
.child(self.editor.clone())
|
||||
self.editor.clone()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -334,21 +331,17 @@ impl ProposedChangesEditorToolbar {
|
||||
}
|
||||
|
||||
impl Render for ProposedChangesEditorToolbar {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let button_like = ButtonLike::new("apply-changes").child(Label::new("Apply All"));
|
||||
|
||||
match &self.current_editor {
|
||||
Some(editor) => {
|
||||
let focus_handle = editor.focus_handle(cx);
|
||||
let keybinding = KeyBinding::for_action_in(&ApplyAllDiffHunks, &focus_handle, cx)
|
||||
.map(|binding| binding.into_any_element());
|
||||
|
||||
button_like.children(keybinding).on_click({
|
||||
move |_event, cx| focus_handle.dispatch_action(&ApplyAllDiffHunks, cx)
|
||||
})
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let editor = self.current_editor.clone();
|
||||
Button::new("apply-changes", "Apply All").on_click(move |_, cx| {
|
||||
if let Some(editor) = &editor {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.editor.update(cx, |editor, cx| {
|
||||
editor.apply_all_diff_hunks(cx);
|
||||
})
|
||||
});
|
||||
}
|
||||
None => button_like.disabled(true),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::{
|
||||
cell::Ref,
|
||||
cmp, iter, mem,
|
||||
iter, mem,
|
||||
ops::{Deref, DerefMut, Range, Sub},
|
||||
sync::Arc,
|
||||
};
|
||||
@@ -8,14 +8,14 @@ use std::{
|
||||
use collections::HashMap;
|
||||
use gpui::{AppContext, Model, Pixels};
|
||||
use itertools::Itertools;
|
||||
use language::{Bias, Point, Selection, SelectionGoal, TextDimension};
|
||||
use language::{Bias, Point, Selection, SelectionGoal, TextDimension, ToPoint};
|
||||
use util::post_inc;
|
||||
|
||||
use crate::{
|
||||
display_map::{DisplayMap, DisplaySnapshot, ToDisplayPoint},
|
||||
movement::TextLayoutDetails,
|
||||
Anchor, DisplayPoint, DisplayRow, ExcerptId, MultiBuffer, MultiBufferSnapshot, SelectMode,
|
||||
ToOffset, ToPoint,
|
||||
ToOffset,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -96,25 +96,27 @@ impl SelectionsCollection {
|
||||
|
||||
pub fn pending<D: TextDimension + Ord + Sub<D, Output = D>>(
|
||||
&self,
|
||||
cx: &mut AppContext,
|
||||
cx: &AppContext,
|
||||
) -> Option<Selection<D>> {
|
||||
let map = self.display_map(cx);
|
||||
let selection = resolve_selections(self.pending_anchor().as_ref(), &map).next();
|
||||
selection
|
||||
self.pending_anchor()
|
||||
.as_ref()
|
||||
.map(|pending| pending.map(|p| p.summary::<D>(&self.buffer(cx))))
|
||||
}
|
||||
|
||||
pub(crate) fn pending_mode(&self) -> Option<SelectMode> {
|
||||
self.pending.as_ref().map(|pending| pending.mode.clone())
|
||||
}
|
||||
|
||||
pub fn all<'a, D>(&self, cx: &mut AppContext) -> Vec<Selection<D>>
|
||||
pub fn all<'a, D>(&self, cx: &AppContext) -> Vec<Selection<D>>
|
||||
where
|
||||
D: 'a + TextDimension + Ord + Sub<D, Output = D>,
|
||||
{
|
||||
let map = self.display_map(cx);
|
||||
let disjoint_anchors = &self.disjoint;
|
||||
let mut disjoint = resolve_selections::<D, _>(disjoint_anchors.iter(), &map).peekable();
|
||||
let mut disjoint =
|
||||
resolve_multiple::<D, _>(disjoint_anchors.iter(), &self.buffer(cx)).peekable();
|
||||
|
||||
let mut pending_opt = self.pending::<D>(cx);
|
||||
|
||||
iter::from_fn(move || {
|
||||
if let Some(pending) = pending_opt.as_mut() {
|
||||
while let Some(next_selection) = disjoint.peek() {
|
||||
@@ -192,62 +194,39 @@ impl SelectionsCollection {
|
||||
pub fn disjoint_in_range<'a, D>(
|
||||
&self,
|
||||
range: Range<Anchor>,
|
||||
cx: &mut AppContext,
|
||||
cx: &AppContext,
|
||||
) -> Vec<Selection<D>>
|
||||
where
|
||||
D: 'a + TextDimension + Ord + Sub<D, Output = D> + std::fmt::Debug,
|
||||
{
|
||||
let map = self.display_map(cx);
|
||||
let buffer = self.buffer(cx);
|
||||
let start_ix = match self
|
||||
.disjoint
|
||||
.binary_search_by(|probe| probe.end.cmp(&range.start, &map.buffer_snapshot))
|
||||
.binary_search_by(|probe| probe.end.cmp(&range.start, &buffer))
|
||||
{
|
||||
Ok(ix) | Err(ix) => ix,
|
||||
};
|
||||
let end_ix = match self
|
||||
.disjoint
|
||||
.binary_search_by(|probe| probe.start.cmp(&range.end, &map.buffer_snapshot))
|
||||
.binary_search_by(|probe| probe.start.cmp(&range.end, &buffer))
|
||||
{
|
||||
Ok(ix) => ix + 1,
|
||||
Err(ix) => ix,
|
||||
};
|
||||
resolve_selections(&self.disjoint[start_ix..end_ix], &map).collect()
|
||||
resolve_multiple(&self.disjoint[start_ix..end_ix], &buffer).collect()
|
||||
}
|
||||
|
||||
pub fn all_display(
|
||||
&self,
|
||||
cx: &mut AppContext,
|
||||
) -> (DisplaySnapshot, Vec<Selection<DisplayPoint>>) {
|
||||
let map = self.display_map(cx);
|
||||
let disjoint_anchors = &self.disjoint;
|
||||
let mut disjoint = resolve_selections_display(disjoint_anchors.iter(), &map).peekable();
|
||||
let mut pending_opt =
|
||||
resolve_selections_display(self.pending_anchor().as_ref(), &map).next();
|
||||
let selections = iter::from_fn(move || {
|
||||
if let Some(pending) = pending_opt.as_mut() {
|
||||
while let Some(next_selection) = disjoint.peek() {
|
||||
if pending.start <= next_selection.end && pending.end >= next_selection.start {
|
||||
let next_selection = disjoint.next().unwrap();
|
||||
if next_selection.start < pending.start {
|
||||
pending.start = next_selection.start;
|
||||
}
|
||||
if next_selection.end > pending.end {
|
||||
pending.end = next_selection.end;
|
||||
}
|
||||
} else if next_selection.end < pending.start {
|
||||
return disjoint.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
pending_opt.take()
|
||||
} else {
|
||||
disjoint.next()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
(map, selections)
|
||||
let display_map = self.display_map(cx);
|
||||
let selections = self
|
||||
.all::<Point>(cx)
|
||||
.into_iter()
|
||||
.map(|selection| selection.map(|point| point.to_display_point(&display_map)))
|
||||
.collect();
|
||||
(display_map, selections)
|
||||
}
|
||||
|
||||
pub fn newest_anchor(&self) -> &Selection<Anchor> {
|
||||
@@ -260,20 +239,16 @@ impl SelectionsCollection {
|
||||
|
||||
pub fn newest<D: TextDimension + Ord + Sub<D, Output = D>>(
|
||||
&self,
|
||||
cx: &mut AppContext,
|
||||
cx: &AppContext,
|
||||
) -> Selection<D> {
|
||||
let map = self.display_map(cx);
|
||||
let selection = resolve_selections([self.newest_anchor()], &map)
|
||||
.next()
|
||||
.unwrap();
|
||||
selection
|
||||
resolve(self.newest_anchor(), &self.buffer(cx))
|
||||
}
|
||||
|
||||
pub fn newest_display(&self, cx: &mut AppContext) -> Selection<DisplayPoint> {
|
||||
let map = self.display_map(cx);
|
||||
let selection = resolve_selections_display([self.newest_anchor()], &map)
|
||||
.next()
|
||||
.unwrap();
|
||||
let display_map = self.display_map(cx);
|
||||
let selection = self
|
||||
.newest_anchor()
|
||||
.map(|point| point.to_display_point(&display_map));
|
||||
selection
|
||||
}
|
||||
|
||||
@@ -287,13 +262,9 @@ impl SelectionsCollection {
|
||||
|
||||
pub fn oldest<D: TextDimension + Ord + Sub<D, Output = D>>(
|
||||
&self,
|
||||
cx: &mut AppContext,
|
||||
cx: &AppContext,
|
||||
) -> Selection<D> {
|
||||
let map = self.display_map(cx);
|
||||
let selection = resolve_selections([self.oldest_anchor()], &map)
|
||||
.next()
|
||||
.unwrap();
|
||||
selection
|
||||
resolve(self.oldest_anchor(), &self.buffer(cx))
|
||||
}
|
||||
|
||||
pub fn first_anchor(&self) -> Selection<Anchor> {
|
||||
@@ -305,14 +276,14 @@ impl SelectionsCollection {
|
||||
|
||||
pub fn first<D: TextDimension + Ord + Sub<D, Output = D>>(
|
||||
&self,
|
||||
cx: &mut AppContext,
|
||||
cx: &AppContext,
|
||||
) -> Selection<D> {
|
||||
self.all(cx).first().unwrap().clone()
|
||||
}
|
||||
|
||||
pub fn last<D: TextDimension + Ord + Sub<D, Output = D>>(
|
||||
&self,
|
||||
cx: &mut AppContext,
|
||||
cx: &AppContext,
|
||||
) -> Selection<D> {
|
||||
self.all(cx).last().unwrap().clone()
|
||||
}
|
||||
@@ -327,7 +298,7 @@ impl SelectionsCollection {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn ranges<D: TextDimension + Ord + Sub<D, Output = D> + std::fmt::Debug>(
|
||||
&self,
|
||||
cx: &mut AppContext,
|
||||
cx: &AppContext,
|
||||
) -> Vec<Range<D>> {
|
||||
self.all::<D>(cx)
|
||||
.iter()
|
||||
@@ -343,14 +314,15 @@ impl SelectionsCollection {
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn display_ranges(&self, cx: &mut AppContext) -> Vec<Range<DisplayPoint>> {
|
||||
self.all_display(cx)
|
||||
.1
|
||||
.into_iter()
|
||||
.map(|selection| {
|
||||
if selection.reversed {
|
||||
selection.end..selection.start
|
||||
let display_map = self.display_map(cx);
|
||||
self.disjoint_anchors()
|
||||
.iter()
|
||||
.chain(self.pending_anchor().as_ref())
|
||||
.map(|s| {
|
||||
if s.reversed {
|
||||
s.end.to_display_point(&display_map)..s.start.to_display_point(&display_map)
|
||||
} else {
|
||||
selection.start..selection.end
|
||||
s.start.to_display_point(&display_map)..s.end.to_display_point(&display_map)
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
@@ -503,7 +475,7 @@ impl<'a> MutableSelectionsCollection<'a> {
|
||||
where
|
||||
T: 'a + ToOffset + ToPoint + TextDimension + Ord + Sub<T, Output = T> + std::marker::Copy,
|
||||
{
|
||||
let mut selections = self.collection.all(self.cx);
|
||||
let mut selections = self.all(self.cx);
|
||||
let mut start = range.start.to_offset(&self.buffer());
|
||||
let mut end = range.end.to_offset(&self.buffer());
|
||||
let reversed = if start > end {
|
||||
@@ -564,9 +536,9 @@ impl<'a> MutableSelectionsCollection<'a> {
|
||||
}
|
||||
|
||||
pub fn select_anchors(&mut self, selections: Vec<Selection<Anchor>>) {
|
||||
let map = self.display_map();
|
||||
let buffer = self.buffer.read(self.cx).snapshot(self.cx);
|
||||
let resolved_selections =
|
||||
resolve_selections::<usize, _>(&selections, &map).collect::<Vec<_>>();
|
||||
resolve_multiple::<usize, _>(&selections, &buffer).collect::<Vec<_>>();
|
||||
self.select(resolved_selections);
|
||||
}
|
||||
|
||||
@@ -676,16 +648,19 @@ impl<'a> MutableSelectionsCollection<'a> {
|
||||
) {
|
||||
let mut changed = false;
|
||||
let display_map = self.display_map();
|
||||
let (_, selections) = self.collection.all_display(self.cx);
|
||||
let selections = selections
|
||||
let selections = self
|
||||
.all::<Point>(self.cx)
|
||||
.into_iter()
|
||||
.map(|selection| {
|
||||
let mut moved_selection = selection.clone();
|
||||
let mut moved_selection =
|
||||
selection.map(|point| point.to_display_point(&display_map));
|
||||
move_selection(&display_map, &mut moved_selection);
|
||||
let moved_selection =
|
||||
moved_selection.map(|display_point| display_point.to_point(&display_map));
|
||||
if selection != moved_selection {
|
||||
changed = true;
|
||||
}
|
||||
moved_selection.map(|display_point| display_point.to_point(&display_map))
|
||||
moved_selection
|
||||
})
|
||||
.collect();
|
||||
|
||||
@@ -701,7 +676,6 @@ impl<'a> MutableSelectionsCollection<'a> {
|
||||
let mut changed = false;
|
||||
let snapshot = self.buffer().clone();
|
||||
let selections = self
|
||||
.collection
|
||||
.all::<usize>(self.cx)
|
||||
.into_iter()
|
||||
.map(|selection| {
|
||||
@@ -826,8 +800,8 @@ impl<'a> MutableSelectionsCollection<'a> {
|
||||
.collect();
|
||||
|
||||
if !adjusted_disjoint.is_empty() {
|
||||
let map = self.display_map();
|
||||
let resolved_selections = resolve_selections(adjusted_disjoint.iter(), &map).collect();
|
||||
let resolved_selections =
|
||||
resolve_multiple(adjusted_disjoint.iter(), &self.buffer()).collect();
|
||||
self.select::<usize>(resolved_selections);
|
||||
}
|
||||
|
||||
@@ -871,77 +845,34 @@ impl<'a> DerefMut for MutableSelectionsCollection<'a> {
|
||||
}
|
||||
|
||||
// Panics if passed selections are not in order
|
||||
fn resolve_selections_display<'a>(
|
||||
selections: impl 'a + IntoIterator<Item = &'a Selection<Anchor>>,
|
||||
map: &'a DisplaySnapshot,
|
||||
) -> impl 'a + Iterator<Item = Selection<DisplayPoint>> {
|
||||
let (to_summarize, selections) = selections.into_iter().tee();
|
||||
let mut summaries = map
|
||||
.buffer_snapshot
|
||||
.summaries_for_anchors::<Point, _>(to_summarize.flat_map(|s| [&s.start, &s.end]))
|
||||
.into_iter();
|
||||
let mut selections = selections
|
||||
.map(move |s| {
|
||||
let start = summaries.next().unwrap();
|
||||
let end = summaries.next().unwrap();
|
||||
|
||||
let display_start = map.point_to_display_point(start, Bias::Left);
|
||||
let display_end = if start == end {
|
||||
map.point_to_display_point(end, Bias::Right)
|
||||
} else {
|
||||
map.point_to_display_point(end, Bias::Left)
|
||||
};
|
||||
|
||||
Selection {
|
||||
id: s.id,
|
||||
start: display_start,
|
||||
end: display_end,
|
||||
reversed: s.reversed,
|
||||
goal: s.goal,
|
||||
}
|
||||
})
|
||||
.peekable();
|
||||
iter::from_fn(move || {
|
||||
let mut selection = selections.next()?;
|
||||
while let Some(next_selection) = selections.peek() {
|
||||
if selection.end >= next_selection.start {
|
||||
selection.end = cmp::max(selection.end, next_selection.end.clone());
|
||||
selections.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Some(selection)
|
||||
})
|
||||
}
|
||||
|
||||
// Panics if passed selections are not in order
|
||||
pub(crate) fn resolve_selections<'a, D, I>(
|
||||
pub(crate) fn resolve_multiple<'a, D, I>(
|
||||
selections: I,
|
||||
map: &'a DisplaySnapshot,
|
||||
snapshot: &MultiBufferSnapshot,
|
||||
) -> impl 'a + Iterator<Item = Selection<D>>
|
||||
where
|
||||
D: TextDimension + Clone + Ord + Sub<D, Output = D>,
|
||||
D: TextDimension + Ord + Sub<D, Output = D>,
|
||||
I: 'a + IntoIterator<Item = &'a Selection<Anchor>>,
|
||||
{
|
||||
let (to_convert, selections) = resolve_selections_display(selections, map).tee();
|
||||
let mut converted_endpoints = map
|
||||
.buffer_snapshot
|
||||
.dimensions_from_points::<D>(to_convert.flat_map(|s| {
|
||||
let start = map.display_point_to_point(s.start, Bias::Left);
|
||||
let end = map.display_point_to_point(s.end, Bias::Right);
|
||||
[start, end]
|
||||
}))
|
||||
let (to_summarize, selections) = selections.into_iter().tee();
|
||||
let mut summaries = snapshot
|
||||
.summaries_for_anchors::<D, _>(
|
||||
to_summarize
|
||||
.flat_map(|s| [&s.start, &s.end])
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.into_iter();
|
||||
selections.map(move |s| {
|
||||
let start = converted_endpoints.next().unwrap();
|
||||
let end = converted_endpoints.next().unwrap();
|
||||
Selection {
|
||||
id: s.id,
|
||||
start,
|
||||
end,
|
||||
reversed: s.reversed,
|
||||
goal: s.goal,
|
||||
}
|
||||
selections.map(move |s| Selection {
|
||||
id: s.id,
|
||||
start: summaries.next().unwrap(),
|
||||
end: summaries.next().unwrap(),
|
||||
reversed: s.reversed,
|
||||
goal: s.goal,
|
||||
})
|
||||
}
|
||||
|
||||
fn resolve<D: TextDimension + Ord + Sub<D, Output = D>>(
|
||||
selection: &Selection<Anchor>,
|
||||
buffer: &MultiBufferSnapshot,
|
||||
) -> Selection<D> {
|
||||
selection.map(|p| p.summary::<D>(buffer))
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
ops::{Deref, DerefMut, Range},
|
||||
path::Path,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
@@ -67,12 +66,10 @@ impl EditorLspTestContext {
|
||||
);
|
||||
language_registry.add(Arc::new(language));
|
||||
|
||||
let root = Self::root_path();
|
||||
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(root, json!({ "dir": { file_name.clone(): "" }}))
|
||||
.insert_tree("/root", json!({ "dir": { file_name.clone(): "" }}))
|
||||
.await;
|
||||
|
||||
let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
|
||||
@@ -82,7 +79,7 @@ impl EditorLspTestContext {
|
||||
let mut cx = VisualTestContext::from_window(*window.deref(), cx);
|
||||
project
|
||||
.update(&mut cx, |project, cx| {
|
||||
project.find_or_create_worktree(root, true, cx)
|
||||
project.find_or_create_worktree("/root", true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -111,7 +108,7 @@ impl EditorLspTestContext {
|
||||
},
|
||||
lsp,
|
||||
workspace,
|
||||
buffer_lsp_url: lsp::Url::from_file_path(root.join("dir").join(file_name)).unwrap(),
|
||||
buffer_lsp_url: lsp::Url::from_file_path(format!("/root/dir/{file_name}")).unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -126,7 +123,6 @@ impl EditorLspTestContext {
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
line_comments: vec!["// ".into(), "/// ".into(), "//! ".into()],
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::LANGUAGE.into()),
|
||||
@@ -313,16 +309,6 @@ impl EditorLspTestContext {
|
||||
pub fn notify<T: notification::Notification>(&self, params: T::Params) {
|
||||
self.lsp.notify::<T>(params);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn root_path() -> &'static Path {
|
||||
Path::new("C:\\root")
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
fn root_path() -> &'static Path {
|
||||
Path::new("/root")
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for EditorLspTestContext {
|
||||
|
||||
@@ -17,7 +17,6 @@ use project::{FakeFs, Project};
|
||||
use std::{
|
||||
any::TypeId,
|
||||
ops::{Deref, DerefMut, Range},
|
||||
path::Path,
|
||||
sync::{
|
||||
atomic::{AtomicUsize, Ordering},
|
||||
Arc,
|
||||
@@ -43,18 +42,17 @@ impl EditorTestContext {
|
||||
pub async fn new(cx: &mut gpui::TestAppContext) -> EditorTestContext {
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
// fs.insert_file("/file", "".to_owned()).await;
|
||||
let root = Self::root_path();
|
||||
fs.insert_tree(
|
||||
root,
|
||||
"/root",
|
||||
serde_json::json!({
|
||||
"file": "",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs, [root], cx).await;
|
||||
let project = Project::test(fs, ["/root".as_ref()], cx).await;
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer(root.join("file"), cx)
|
||||
project.open_local_buffer("/root/file", cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -73,16 +71,6 @@ impl EditorTestContext {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn root_path() -> &'static Path {
|
||||
Path::new("C:\\root")
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
fn root_path() -> &'static Path {
|
||||
Path::new("/root")
|
||||
}
|
||||
|
||||
pub async fn for_editor(editor: WindowHandle<Editor>, cx: &mut gpui::TestAppContext) -> Self {
|
||||
let editor_view = editor.root_view(cx).unwrap();
|
||||
Self {
|
||||
|
||||
@@ -8,8 +8,7 @@ use collections::HashMap;
|
||||
use futures::{Future, FutureExt};
|
||||
use gpui::AsyncAppContext;
|
||||
use language::{
|
||||
CodeLabel, HighlightId, Language, LanguageServerName, LanguageToolchainStore, LspAdapter,
|
||||
LspAdapterDelegate,
|
||||
CodeLabel, HighlightId, Language, LanguageServerName, LspAdapter, LspAdapterDelegate,
|
||||
};
|
||||
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions};
|
||||
use serde::Serialize;
|
||||
@@ -195,7 +194,6 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
_cx: &mut AsyncAppContext,
|
||||
) -> Result<Value> {
|
||||
let delegate = delegate.clone();
|
||||
|
||||
@@ -37,7 +37,7 @@ use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use indexed_docs::{IndexedDocsRegistry, ProviderId};
|
||||
use language::{
|
||||
LanguageConfig, LanguageMatcher, LanguageName, LanguageQueries, LanguageRegistry,
|
||||
LoadedLanguage, QUERY_FILENAME_PREFIXES,
|
||||
QUERY_FILENAME_PREFIXES,
|
||||
};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::ContextProviderWithTasks;
|
||||
@@ -1102,21 +1102,14 @@ impl ExtensionStore {
|
||||
let config = std::fs::read_to_string(language_path.join("config.toml"))?;
|
||||
let config: LanguageConfig = ::toml::from_str(&config)?;
|
||||
let queries = load_plugin_queries(&language_path);
|
||||
let context_provider =
|
||||
std::fs::read_to_string(language_path.join("tasks.json"))
|
||||
.ok()
|
||||
.and_then(|contents| {
|
||||
let definitions =
|
||||
serde_json_lenient::from_str(&contents).log_err()?;
|
||||
Some(Arc::new(ContextProviderWithTasks::new(definitions)) as Arc<_>)
|
||||
});
|
||||
let tasks = std::fs::read_to_string(language_path.join("tasks.json"))
|
||||
.ok()
|
||||
.and_then(|contents| {
|
||||
let definitions = serde_json_lenient::from_str(&contents).log_err()?;
|
||||
Some(Arc::new(ContextProviderWithTasks::new(definitions)) as Arc<_>)
|
||||
});
|
||||
|
||||
Ok(LoadedLanguage {
|
||||
config,
|
||||
queries,
|
||||
context_provider,
|
||||
toolchain_provider: None,
|
||||
})
|
||||
Ok((config, queries, tasks))
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -59,12 +59,6 @@ impl FeatureFlag for ZedPro {
|
||||
const NAME: &'static str = "zed-pro";
|
||||
}
|
||||
|
||||
pub struct NotebookFeatureFlag;
|
||||
|
||||
impl FeatureFlag for NotebookFeatureFlag {
|
||||
const NAME: &'static str = "notebooks";
|
||||
}
|
||||
|
||||
pub struct AutoCommand {}
|
||||
impl FeatureFlag for AutoCommand {
|
||||
const NAME: &'static str = "auto-command";
|
||||
|
||||
@@ -790,9 +790,9 @@ impl FileFinderDelegate {
|
||||
let mut path_matches = Vec::new();
|
||||
|
||||
let abs_file_exists = if let Ok(task) = project.update(&mut cx, |this, cx| {
|
||||
this.resolve_abs_file_path(query.path_query(), cx)
|
||||
this.abs_file_path_exists(query.path_query(), cx)
|
||||
}) {
|
||||
task.await.is_some()
|
||||
task.await
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
@@ -4,7 +4,7 @@ use gpui::{HighlightStyle, Model, StyledText};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::{Entry, PathMatchCandidateSet, Project, ProjectPath, WorktreeId};
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
path::PathBuf,
|
||||
sync::{
|
||||
atomic::{self, AtomicBool},
|
||||
Arc,
|
||||
@@ -254,7 +254,6 @@ impl PickerDelegate for NewPathDelegate {
|
||||
.trim()
|
||||
.trim_start_matches("./")
|
||||
.trim_start_matches('/');
|
||||
|
||||
let (dir, suffix) = if let Some(index) = query.rfind('/') {
|
||||
let suffix = if index + 1 < query.len() {
|
||||
Some(query[index + 1..].to_string())
|
||||
@@ -318,14 +317,6 @@ impl PickerDelegate for NewPathDelegate {
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm_completion(
|
||||
&mut self,
|
||||
_: String,
|
||||
cx: &mut ViewContext<Picker<Self>>,
|
||||
) -> Option<String> {
|
||||
self.confirm_update_query(cx)
|
||||
}
|
||||
|
||||
fn confirm_update_query(&mut self, cx: &mut ViewContext<Picker<Self>>) -> Option<String> {
|
||||
let m = self.matches.get(self.selected_index)?;
|
||||
if m.is_dir(self.project.read(cx), cx) {
|
||||
@@ -431,32 +422,7 @@ impl NewPathDelegate {
|
||||
) {
|
||||
cx.notify();
|
||||
if query.is_empty() {
|
||||
self.matches = self
|
||||
.project
|
||||
.read(cx)
|
||||
.worktrees(cx)
|
||||
.flat_map(|worktree| {
|
||||
let worktree_id = worktree.read(cx).id();
|
||||
worktree
|
||||
.read(cx)
|
||||
.child_entries(Path::new(""))
|
||||
.filter_map(move |entry| {
|
||||
entry.is_dir().then(|| Match {
|
||||
path_match: Some(PathMatch {
|
||||
score: 1.0,
|
||||
positions: Default::default(),
|
||||
worktree_id: worktree_id.to_usize(),
|
||||
path: entry.path.clone(),
|
||||
path_prefix: "".into(),
|
||||
is_dir: entry.is_dir(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
}),
|
||||
suffix: None,
|
||||
})
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
self.matches = vec![];
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -220,11 +220,7 @@ impl PickerDelegate for OpenPathDelegate {
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm_completion(
|
||||
&mut self,
|
||||
query: String,
|
||||
_: &mut ViewContext<Picker<Self>>,
|
||||
) -> Option<String> {
|
||||
fn confirm_completion(&self, query: String) -> Option<String> {
|
||||
Some(
|
||||
maybe!({
|
||||
let m = self.matches.get(self.selected_index)?;
|
||||
|
||||
@@ -813,7 +813,6 @@ struct FakeFsState {
|
||||
root: Arc<Mutex<FakeFsEntry>>,
|
||||
next_inode: u64,
|
||||
next_mtime: SystemTime,
|
||||
git_event_tx: smol::channel::Sender<PathBuf>,
|
||||
event_txs: Vec<smol::channel::Sender<Vec<PathEvent>>>,
|
||||
events_paused: bool,
|
||||
buffered_events: Vec<PathEvent>,
|
||||
@@ -866,22 +865,14 @@ impl FakeFsState {
|
||||
let mut entry_stack = Vec::new();
|
||||
'outer: loop {
|
||||
let mut path_components = path.components().peekable();
|
||||
let mut prefix = None;
|
||||
while let Some(component) = path_components.next() {
|
||||
match component {
|
||||
Component::Prefix(prefix_component) => prefix = Some(prefix_component),
|
||||
Component::Prefix(_) => panic!("prefix paths aren't supported"),
|
||||
Component::RootDir => {
|
||||
entry_stack.clear();
|
||||
entry_stack.push(self.root.clone());
|
||||
canonical_path.clear();
|
||||
match prefix {
|
||||
Some(prefix_component) => {
|
||||
canonical_path = PathBuf::from(prefix_component.as_os_str());
|
||||
// Prefixes like `C:\\` are represented without their trailing slash, so we have to re-add it.
|
||||
canonical_path.push(std::path::MAIN_SEPARATOR_STR);
|
||||
}
|
||||
None => canonical_path = PathBuf::from(std::path::MAIN_SEPARATOR_STR),
|
||||
}
|
||||
canonical_path.push("/");
|
||||
}
|
||||
Component::CurDir => {}
|
||||
Component::ParentDir => {
|
||||
@@ -903,7 +894,7 @@ impl FakeFsState {
|
||||
}
|
||||
}
|
||||
entry_stack.push(entry.clone());
|
||||
canonical_path = canonical_path.join(name);
|
||||
canonical_path.push(name);
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
@@ -965,15 +956,9 @@ pub static FS_DOT_GIT: std::sync::LazyLock<&'static OsStr> =
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl FakeFs {
|
||||
/// We need to use something large enough for Windows and Unix to consider this a new file.
|
||||
/// https://doc.rust-lang.org/nightly/std/time/struct.SystemTime.html#platform-specific-behavior
|
||||
const SYSTEMTIME_INTERVAL: u64 = 100;
|
||||
|
||||
pub fn new(executor: gpui::BackgroundExecutor) -> Arc<Self> {
|
||||
let (tx, mut rx) = smol::channel::bounded::<PathBuf>(10);
|
||||
|
||||
let this = Arc::new(Self {
|
||||
executor: executor.clone(),
|
||||
Arc::new(Self {
|
||||
executor,
|
||||
state: Mutex::new(FakeFsState {
|
||||
root: Arc::new(Mutex::new(FakeFsEntry::Dir {
|
||||
inode: 0,
|
||||
@@ -982,7 +967,6 @@ impl FakeFs {
|
||||
entries: Default::default(),
|
||||
git_repo_state: None,
|
||||
})),
|
||||
git_event_tx: tx,
|
||||
next_mtime: SystemTime::UNIX_EPOCH,
|
||||
next_inode: 1,
|
||||
event_txs: Default::default(),
|
||||
@@ -991,22 +975,7 @@ impl FakeFs {
|
||||
read_dir_call_count: 0,
|
||||
metadata_call_count: 0,
|
||||
}),
|
||||
});
|
||||
|
||||
executor.spawn({
|
||||
let this = this.clone();
|
||||
async move {
|
||||
while let Some(git_event) = rx.next().await {
|
||||
if let Some(mut state) = this.state.try_lock() {
|
||||
state.emit_event([(git_event, None)]);
|
||||
} else {
|
||||
panic!("Failed to lock file system state, this execution would have caused a test hang");
|
||||
}
|
||||
}
|
||||
}
|
||||
}).detach();
|
||||
|
||||
this
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set_next_mtime(&self, next_mtime: SystemTime) {
|
||||
@@ -1020,7 +989,7 @@ impl FakeFs {
|
||||
let new_mtime = state.next_mtime;
|
||||
let new_inode = state.next_inode;
|
||||
state.next_inode += 1;
|
||||
state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL);
|
||||
state.next_mtime += Duration::from_nanos(1);
|
||||
state
|
||||
.write_path(path, move |entry| {
|
||||
match entry {
|
||||
@@ -1073,7 +1042,7 @@ impl FakeFs {
|
||||
let inode = state.next_inode;
|
||||
let mtime = state.next_mtime;
|
||||
state.next_inode += 1;
|
||||
state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL);
|
||||
state.next_mtime += Duration::from_nanos(1);
|
||||
let file = Arc::new(Mutex::new(FakeFsEntry::File {
|
||||
inode,
|
||||
mtime,
|
||||
@@ -1200,12 +1169,7 @@ impl FakeFs {
|
||||
let mut entry = entry.lock();
|
||||
|
||||
if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry {
|
||||
let repo_state = git_repo_state.get_or_insert_with(|| {
|
||||
Arc::new(Mutex::new(FakeGitRepositoryState::new(
|
||||
dot_git.to_path_buf(),
|
||||
state.git_event_tx.clone(),
|
||||
)))
|
||||
});
|
||||
let repo_state = git_repo_state.get_or_insert_with(Default::default);
|
||||
let mut repo_state = repo_state.lock();
|
||||
|
||||
f(&mut repo_state);
|
||||
@@ -1220,22 +1184,7 @@ impl FakeFs {
|
||||
|
||||
pub fn set_branch_name(&self, dot_git: &Path, branch: Option<impl Into<String>>) {
|
||||
self.with_git_state(dot_git, true, |state| {
|
||||
let branch = branch.map(Into::into);
|
||||
state.branches.extend(branch.clone());
|
||||
state.current_branch_name = branch.map(Into::into)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn insert_branches(&self, dot_git: &Path, branches: &[&str]) {
|
||||
self.with_git_state(dot_git, true, |state| {
|
||||
if let Some(first) = branches.first() {
|
||||
if state.current_branch_name.is_none() {
|
||||
state.current_branch_name = Some(first.to_string())
|
||||
}
|
||||
}
|
||||
state
|
||||
.branches
|
||||
.extend(branches.iter().map(ToString::to_string));
|
||||
state.branch_name = branch.map(Into::into)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1435,16 +1384,15 @@ impl Fs for FakeFs {
|
||||
let mut created_dirs = Vec::new();
|
||||
let mut cur_path = PathBuf::new();
|
||||
for component in path.components() {
|
||||
let should_skip = matches!(component, Component::Prefix(..) | Component::RootDir);
|
||||
let mut state = self.state.lock();
|
||||
cur_path.push(component);
|
||||
if should_skip {
|
||||
if cur_path == Path::new("/") {
|
||||
continue;
|
||||
}
|
||||
let mut state = self.state.lock();
|
||||
|
||||
let inode = state.next_inode;
|
||||
let mtime = state.next_mtime;
|
||||
state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL);
|
||||
state.next_mtime += Duration::from_nanos(1);
|
||||
state.next_inode += 1;
|
||||
state.write_path(&cur_path, |entry| {
|
||||
entry.or_insert_with(|| {
|
||||
@@ -1470,7 +1418,7 @@ impl Fs for FakeFs {
|
||||
let mut state = self.state.lock();
|
||||
let inode = state.next_inode;
|
||||
let mtime = state.next_mtime;
|
||||
state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL);
|
||||
state.next_mtime += Duration::from_nanos(1);
|
||||
state.next_inode += 1;
|
||||
let file = Arc::new(Mutex::new(FakeFsEntry::File {
|
||||
inode,
|
||||
@@ -1605,7 +1553,7 @@ impl Fs for FakeFs {
|
||||
let mut state = self.state.lock();
|
||||
let mtime = state.next_mtime;
|
||||
let inode = util::post_inc(&mut state.next_inode);
|
||||
state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL);
|
||||
state.next_mtime += Duration::from_nanos(1);
|
||||
let source_entry = state.read_path(&source)?;
|
||||
let content = source_entry.lock().file_content(&source)?.clone();
|
||||
let mut kind = Some(PathEventKind::Created);
|
||||
@@ -1875,12 +1823,7 @@ impl Fs for FakeFs {
|
||||
let mut entry = entry.lock();
|
||||
if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry {
|
||||
let state = git_repo_state
|
||||
.get_or_insert_with(|| {
|
||||
Arc::new(Mutex::new(FakeGitRepositoryState::new(
|
||||
abs_dot_git.to_path_buf(),
|
||||
state.git_event_tx.clone(),
|
||||
)))
|
||||
})
|
||||
.get_or_insert_with(|| Arc::new(Mutex::new(FakeGitRepositoryState::default())))
|
||||
.clone();
|
||||
Some(git::repository::FakeGitRepository::open(state))
|
||||
} else {
|
||||
|
||||
@@ -1,10 +1,4 @@
|
||||
pub mod blame;
|
||||
pub mod commit;
|
||||
pub mod diff;
|
||||
mod hosting_provider;
|
||||
mod remote;
|
||||
pub mod repository;
|
||||
pub mod status;
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -13,10 +7,16 @@ use std::fmt;
|
||||
use std::str::FromStr;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
pub use crate::hosting_provider::*;
|
||||
pub use crate::remote::*;
|
||||
pub use git2 as libgit;
|
||||
|
||||
pub use crate::hosting_provider::*;
|
||||
|
||||
pub mod blame;
|
||||
pub mod commit;
|
||||
pub mod diff;
|
||||
pub mod repository;
|
||||
pub mod status;
|
||||
|
||||
pub static DOT_GIT: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".git"));
|
||||
pub static COOKIES: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new("cookies"));
|
||||
pub static FSMONITOR_DAEMON: LazyLock<&'static OsStr> =
|
||||
|
||||
@@ -69,7 +69,7 @@ pub trait GitHostingProvider {
|
||||
/// Returns a formatted range of line numbers to be placed in a permalink URL.
|
||||
fn format_line_numbers(&self, start_line: u32, end_line: u32) -> String;
|
||||
|
||||
fn parse_remote_url(&self, url: &str) -> Option<ParsedGitRemote>;
|
||||
fn parse_remote_url<'a>(&self, url: &'a str) -> Option<ParsedGitRemote<'a>>;
|
||||
|
||||
fn extract_pull_request(
|
||||
&self,
|
||||
@@ -111,12 +111,6 @@ impl GitHostingProviderRegistry {
|
||||
cx.global::<GlobalGitHostingProviderRegistry>().0.clone()
|
||||
}
|
||||
|
||||
/// Returns the global [`GitHostingProviderRegistry`], if one is set.
|
||||
pub fn try_global(cx: &AppContext) -> Option<Arc<Self>> {
|
||||
cx.try_global::<GlobalGitHostingProviderRegistry>()
|
||||
.map(|registry| registry.0.clone())
|
||||
}
|
||||
|
||||
/// Returns the global [`GitHostingProviderRegistry`].
|
||||
///
|
||||
/// Inserts a default [`GitHostingProviderRegistry`] if one does not yet exist.
|
||||
@@ -159,10 +153,10 @@ impl GitHostingProviderRegistry {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct ParsedGitRemote {
|
||||
pub owner: Arc<str>,
|
||||
pub repo: Arc<str>,
|
||||
#[derive(Debug)]
|
||||
pub struct ParsedGitRemote<'a> {
|
||||
pub owner: &'a str,
|
||||
pub repo: &'a str,
|
||||
}
|
||||
|
||||
pub fn parse_git_remote_url(
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
use derive_more::Deref;
|
||||
use url::Url;
|
||||
|
||||
/// The URL to a Git remote.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Deref)]
|
||||
pub struct RemoteUrl(Url);
|
||||
|
||||
impl std::str::FromStr for RemoteUrl {
|
||||
type Err = url::ParseError;
|
||||
|
||||
fn from_str(input: &str) -> Result<Self, Self::Err> {
|
||||
if input.starts_with("git@") {
|
||||
// Rewrite remote URLs like `git@github.com:user/repo.git` to `ssh://git@github.com/user/repo.git`
|
||||
let ssh_url = input.replacen(':', "/", 1).replace("git@", "ssh://git@");
|
||||
Ok(RemoteUrl(Url::parse(&ssh_url)?))
|
||||
} else {
|
||||
Ok(RemoteUrl(Url::parse(input)?))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parsing_valid_remote_urls() {
|
||||
let valid_urls = vec![
|
||||
(
|
||||
"https://github.com/octocat/zed.git",
|
||||
"https",
|
||||
"github.com",
|
||||
"/octocat/zed.git",
|
||||
),
|
||||
(
|
||||
"git@github.com:octocat/zed.git",
|
||||
"ssh",
|
||||
"github.com",
|
||||
"/octocat/zed.git",
|
||||
),
|
||||
(
|
||||
"ssh://git@github.com/octocat/zed.git",
|
||||
"ssh",
|
||||
"github.com",
|
||||
"/octocat/zed.git",
|
||||
),
|
||||
(
|
||||
"file:///path/to/local/zed",
|
||||
"file",
|
||||
"",
|
||||
"/path/to/local/zed",
|
||||
),
|
||||
];
|
||||
|
||||
for (input, expected_scheme, expected_host, expected_path) in valid_urls {
|
||||
let parsed = input.parse::<RemoteUrl>().expect("failed to parse URL");
|
||||
let url = parsed.0;
|
||||
assert_eq!(
|
||||
url.scheme(),
|
||||
expected_scheme,
|
||||
"unexpected scheme for {input:?}",
|
||||
);
|
||||
assert_eq!(
|
||||
url.host_str().unwrap_or(""),
|
||||
expected_host,
|
||||
"unexpected host for {input:?}",
|
||||
);
|
||||
assert_eq!(url.path(), expected_path, "unexpected path for {input:?}");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parsing_invalid_remote_urls() {
|
||||
let invalid_urls = vec!["not_a_url", "http://"];
|
||||
|
||||
for url in invalid_urls {
|
||||
assert!(
|
||||
url.parse::<RemoteUrl>().is_err(),
|
||||
"expected \"{url}\" to not parse as a Git remote URL",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,8 @@
|
||||
use crate::GitHostingProviderRegistry;
|
||||
use crate::{blame::Blame, status::GitStatus};
|
||||
use anyhow::{Context, Result};
|
||||
use collections::{HashMap, HashSet};
|
||||
use collections::HashMap;
|
||||
use git2::BranchType;
|
||||
use gpui::SharedString;
|
||||
use parking_lot::Mutex;
|
||||
use rope::Rope;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -18,7 +17,7 @@ use util::ResultExt;
|
||||
#[derive(Clone, Debug, Hash, PartialEq)]
|
||||
pub struct Branch {
|
||||
pub is_head: bool,
|
||||
pub name: SharedString,
|
||||
pub name: Box<str>,
|
||||
/// Timestamp of most recent commit, normalized to Unix Epoch format.
|
||||
pub unix_timestamp: Option<i64>,
|
||||
}
|
||||
@@ -42,7 +41,6 @@ pub trait GitRepository: Send + Sync {
|
||||
fn branches(&self) -> Result<Vec<Branch>>;
|
||||
fn change_branch(&self, _: &str) -> Result<()>;
|
||||
fn create_branch(&self, _: &str) -> Result<()>;
|
||||
fn branch_exits(&self, _: &str) -> Result<bool>;
|
||||
|
||||
fn blame(&self, path: &Path, content: Rope) -> Result<crate::blame::Blame>;
|
||||
}
|
||||
@@ -134,18 +132,6 @@ impl GitRepository for RealGitRepository {
|
||||
GitStatus::new(&self.git_binary_path, &working_directory, path_prefixes)
|
||||
}
|
||||
|
||||
fn branch_exits(&self, name: &str) -> Result<bool> {
|
||||
let repo = self.repository.lock();
|
||||
let branch = repo.find_branch(name, BranchType::Local);
|
||||
match branch {
|
||||
Ok(_) => Ok(true),
|
||||
Err(e) => match e.code() {
|
||||
git2::ErrorCode::NotFound => Ok(false),
|
||||
_ => Err(anyhow::anyhow!(e)),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn branches(&self) -> Result<Vec<Branch>> {
|
||||
let repo = self.repository.lock();
|
||||
let local_branches = repo.branches(Some(BranchType::Local))?;
|
||||
@@ -153,11 +139,7 @@ impl GitRepository for RealGitRepository {
|
||||
.filter_map(|branch| {
|
||||
branch.ok().and_then(|(branch, _)| {
|
||||
let is_head = branch.is_head();
|
||||
let name = branch
|
||||
.name()
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|name| name.to_string().into())?;
|
||||
let name = branch.name().ok().flatten().map(Box::from)?;
|
||||
let timestamp = branch.get().peel_to_commit().ok()?.time();
|
||||
let unix_timestamp = timestamp.seconds();
|
||||
let timezone_offset = timestamp.offset_minutes();
|
||||
@@ -219,20 +201,17 @@ impl GitRepository for RealGitRepository {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct FakeGitRepository {
|
||||
state: Arc<Mutex<FakeGitRepositoryState>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct FakeGitRepositoryState {
|
||||
pub path: PathBuf,
|
||||
pub event_emitter: smol::channel::Sender<PathBuf>,
|
||||
pub index_contents: HashMap<PathBuf, String>,
|
||||
pub blames: HashMap<PathBuf, Blame>,
|
||||
pub worktree_statuses: HashMap<RepoPath, GitFileStatus>,
|
||||
pub current_branch_name: Option<String>,
|
||||
pub branches: HashSet<String>,
|
||||
pub branch_name: Option<String>,
|
||||
}
|
||||
|
||||
impl FakeGitRepository {
|
||||
@@ -241,20 +220,6 @@ impl FakeGitRepository {
|
||||
}
|
||||
}
|
||||
|
||||
impl FakeGitRepositoryState {
|
||||
pub fn new(path: PathBuf, event_emitter: smol::channel::Sender<PathBuf>) -> Self {
|
||||
FakeGitRepositoryState {
|
||||
path,
|
||||
event_emitter,
|
||||
index_contents: Default::default(),
|
||||
blames: Default::default(),
|
||||
worktree_statuses: Default::default(),
|
||||
current_branch_name: Default::default(),
|
||||
branches: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl GitRepository for FakeGitRepository {
|
||||
fn reload_index(&self) {}
|
||||
|
||||
@@ -269,7 +234,7 @@ impl GitRepository for FakeGitRepository {
|
||||
|
||||
fn branch_name(&self) -> Option<String> {
|
||||
let state = self.state.lock();
|
||||
state.current_branch_name.clone()
|
||||
state.branch_name.clone()
|
||||
}
|
||||
|
||||
fn head_sha(&self) -> Option<String> {
|
||||
@@ -299,41 +264,18 @@ impl GitRepository for FakeGitRepository {
|
||||
}
|
||||
|
||||
fn branches(&self) -> Result<Vec<Branch>> {
|
||||
let state = self.state.lock();
|
||||
let current_branch = &state.current_branch_name;
|
||||
Ok(state
|
||||
.branches
|
||||
.iter()
|
||||
.map(|branch_name| Branch {
|
||||
is_head: Some(branch_name) == current_branch.as_ref(),
|
||||
name: branch_name.into(),
|
||||
unix_timestamp: None,
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
fn branch_exits(&self, name: &str) -> Result<bool> {
|
||||
let state = self.state.lock();
|
||||
Ok(state.branches.contains(name))
|
||||
Ok(vec![])
|
||||
}
|
||||
|
||||
fn change_branch(&self, name: &str) -> Result<()> {
|
||||
let mut state = self.state.lock();
|
||||
state.current_branch_name = Some(name.to_owned());
|
||||
state
|
||||
.event_emitter
|
||||
.try_send(state.path.clone())
|
||||
.expect("Dropped repo change event");
|
||||
state.branch_name = Some(name.to_owned());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_branch(&self, name: &str) -> Result<()> {
|
||||
let mut state = self.state.lock();
|
||||
state.branches.insert(name.to_owned());
|
||||
state
|
||||
.event_emitter
|
||||
.try_send(state.path.clone())
|
||||
.expect("Dropped repo change event");
|
||||
state.branch_name = Some(name.to_owned());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
@@ -22,9 +22,8 @@ regex.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
url.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
indoc.workspace = true
|
||||
unindent.workspace = true
|
||||
serde_json.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
|
||||
@@ -2,7 +2,6 @@ mod providers;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use git::repository::GitRepository;
|
||||
use git::GitHostingProviderRegistry;
|
||||
use gpui::AppContext;
|
||||
|
||||
@@ -11,27 +10,17 @@ pub use crate::providers::*;
|
||||
/// Initializes the Git hosting providers.
|
||||
pub fn init(cx: &AppContext) {
|
||||
let provider_registry = GitHostingProviderRegistry::global(cx);
|
||||
provider_registry.register_hosting_provider(Arc::new(Bitbucket));
|
||||
provider_registry.register_hosting_provider(Arc::new(Codeberg));
|
||||
provider_registry.register_hosting_provider(Arc::new(Gitee));
|
||||
|
||||
// The providers are stored in a `BTreeMap`, so insertion order matters.
|
||||
// GitHub comes first.
|
||||
provider_registry.register_hosting_provider(Arc::new(Github));
|
||||
provider_registry.register_hosting_provider(Arc::new(Gitlab::new()));
|
||||
|
||||
// Then GitLab.
|
||||
provider_registry.register_hosting_provider(Arc::new(Gitlab));
|
||||
|
||||
// Then the other providers, in the order they were added.
|
||||
provider_registry.register_hosting_provider(Arc::new(Gitee));
|
||||
provider_registry.register_hosting_provider(Arc::new(Bitbucket));
|
||||
provider_registry.register_hosting_provider(Arc::new(Sourcehut));
|
||||
}
|
||||
|
||||
/// Registers additional Git hosting providers.
|
||||
///
|
||||
/// These require information from the Git repository to construct, so their
|
||||
/// registration is deferred until we have a Git repository initialized.
|
||||
pub fn register_additional_providers(
|
||||
provider_registry: Arc<GitHostingProviderRegistry>,
|
||||
repository: Arc<dyn GitRepository>,
|
||||
) {
|
||||
let Some(origin_url) = repository.remote_url("origin") else {
|
||||
return;
|
||||
};
|
||||
|
||||
if let Ok(gitlab_self_hosted) = Gitlab::from_remote_url(&origin_url) {
|
||||
provider_registry.register_hosting_provider(Arc::new(gitlab_self_hosted));
|
||||
}
|
||||
provider_registry.register_hosting_provider(Arc::new(Codeberg));
|
||||
}
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use url::Url;
|
||||
|
||||
use git::{
|
||||
BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote,
|
||||
RemoteUrl,
|
||||
};
|
||||
use git::{BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote};
|
||||
|
||||
pub struct Bitbucket;
|
||||
|
||||
@@ -30,22 +25,18 @@ impl GitHostingProvider for Bitbucket {
|
||||
format!("lines-{start_line}:{end_line}")
|
||||
}
|
||||
|
||||
fn parse_remote_url(&self, url: &str) -> Option<ParsedGitRemote> {
|
||||
let url = RemoteUrl::from_str(url).ok()?;
|
||||
fn parse_remote_url<'a>(&self, url: &'a str) -> Option<ParsedGitRemote<'a>> {
|
||||
if url.contains("bitbucket.org") {
|
||||
let (_, repo_with_owner) = url.trim_end_matches(".git").split_once("bitbucket.org")?;
|
||||
let (owner, repo) = repo_with_owner
|
||||
.trim_start_matches('/')
|
||||
.trim_start_matches(':')
|
||||
.split_once('/')?;
|
||||
|
||||
let host = url.host_str()?;
|
||||
if host != "bitbucket.org" {
|
||||
return None;
|
||||
return Some(ParsedGitRemote { owner, repo });
|
||||
}
|
||||
|
||||
let mut path_segments = url.path_segments()?;
|
||||
let owner = path_segments.next()?;
|
||||
let repo = path_segments.next()?.trim_end_matches(".git");
|
||||
|
||||
Some(ParsedGitRemote {
|
||||
owner: owner.into(),
|
||||
repo: repo.into(),
|
||||
})
|
||||
None
|
||||
}
|
||||
|
||||
fn build_commit_permalink(
|
||||
@@ -84,62 +75,53 @@ impl GitHostingProvider for Bitbucket {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::sync::Arc;
|
||||
|
||||
use git::{parse_git_remote_url, GitHostingProviderRegistry};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_ssh_url() {
|
||||
let parsed_remote = Bitbucket
|
||||
.parse_remote_url("git@bitbucket.org:zed-industries/zed.git")
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
}
|
||||
);
|
||||
fn test_parse_git_remote_url_bitbucket_https_with_username() {
|
||||
let provider_registry = Arc::new(GitHostingProviderRegistry::new());
|
||||
provider_registry.register_hosting_provider(Arc::new(Bitbucket));
|
||||
let url = "https://thorstenballzed@bitbucket.org/thorstenzed/testingrepo.git";
|
||||
let (provider, parsed) = parse_git_remote_url(provider_registry, url).unwrap();
|
||||
assert_eq!(provider.name(), "Bitbucket");
|
||||
assert_eq!(parsed.owner, "thorstenzed");
|
||||
assert_eq!(parsed.repo, "testingrepo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_https_url() {
|
||||
let parsed_remote = Bitbucket
|
||||
.parse_remote_url("https://bitbucket.org/zed-industries/zed.git")
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
}
|
||||
);
|
||||
fn test_parse_git_remote_url_bitbucket_https_without_username() {
|
||||
let provider_registry = Arc::new(GitHostingProviderRegistry::new());
|
||||
provider_registry.register_hosting_provider(Arc::new(Bitbucket));
|
||||
let url = "https://bitbucket.org/thorstenzed/testingrepo.git";
|
||||
let (provider, parsed) = parse_git_remote_url(provider_registry, url).unwrap();
|
||||
assert_eq!(provider.name(), "Bitbucket");
|
||||
assert_eq!(parsed.owner, "thorstenzed");
|
||||
assert_eq!(parsed.repo, "testingrepo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_https_url_with_username() {
|
||||
let parsed_remote = Bitbucket
|
||||
.parse_remote_url("https://thorstenballzed@bitbucket.org/zed-industries/zed.git")
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
}
|
||||
);
|
||||
fn test_parse_git_remote_url_bitbucket_git() {
|
||||
let provider_registry = Arc::new(GitHostingProviderRegistry::new());
|
||||
provider_registry.register_hosting_provider(Arc::new(Bitbucket));
|
||||
let url = "git@bitbucket.org:thorstenzed/testingrepo.git";
|
||||
let (provider, parsed) = parse_git_remote_url(provider_registry, url).unwrap();
|
||||
assert_eq!(provider.name(), "Bitbucket");
|
||||
assert_eq!(parsed.owner, "thorstenzed");
|
||||
assert_eq!(parsed.repo, "testingrepo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_bitbucket_permalink() {
|
||||
fn test_build_bitbucket_permalink_from_ssh_url() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "thorstenzed",
|
||||
repo: "testingrepo",
|
||||
};
|
||||
let permalink = Bitbucket.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "f00b4r",
|
||||
path: "main.rs",
|
||||
@@ -147,17 +129,18 @@ mod tests {
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://bitbucket.org/zed-industries/zed/src/f00b4r/main.rs";
|
||||
let expected_url = "https://bitbucket.org/thorstenzed/testingrepo/src/f00b4r/main.rs";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_bitbucket_permalink_with_single_line_selection() {
|
||||
fn test_build_bitbucket_permalink_from_ssh_url_single_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "thorstenzed",
|
||||
repo: "testingrepo",
|
||||
};
|
||||
let permalink = Bitbucket.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "f00b4r",
|
||||
path: "main.rs",
|
||||
@@ -165,17 +148,19 @@ mod tests {
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://bitbucket.org/zed-industries/zed/src/f00b4r/main.rs#lines-7";
|
||||
let expected_url =
|
||||
"https://bitbucket.org/thorstenzed/testingrepo/src/f00b4r/main.rs#lines-7";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_bitbucket_permalink_with_multi_line_selection() {
|
||||
fn test_build_bitbucket_permalink_from_ssh_url_multi_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "thorstenzed",
|
||||
repo: "testingrepo",
|
||||
};
|
||||
let permalink = Bitbucket.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "f00b4r",
|
||||
path: "main.rs",
|
||||
@@ -184,7 +169,7 @@ mod tests {
|
||||
);
|
||||
|
||||
let expected_url =
|
||||
"https://bitbucket.org/zed-industries/zed/src/f00b4r/main.rs#lines-24:48";
|
||||
"https://bitbucket.org/thorstenzed/testingrepo/src/f00b4r/main.rs#lines-24:48";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{bail, Context, Result};
|
||||
@@ -10,7 +9,6 @@ use url::Url;
|
||||
|
||||
use git::{
|
||||
BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, Oid, ParsedGitRemote,
|
||||
RemoteUrl,
|
||||
};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -105,22 +103,19 @@ impl GitHostingProvider for Codeberg {
|
||||
format!("L{start_line}-L{end_line}")
|
||||
}
|
||||
|
||||
fn parse_remote_url(&self, url: &str) -> Option<ParsedGitRemote> {
|
||||
let url = RemoteUrl::from_str(url).ok()?;
|
||||
fn parse_remote_url<'a>(&self, url: &'a str) -> Option<ParsedGitRemote<'a>> {
|
||||
if url.starts_with("git@codeberg.org:") || url.starts_with("https://codeberg.org/") {
|
||||
let repo_with_owner = url
|
||||
.trim_start_matches("git@codeberg.org:")
|
||||
.trim_start_matches("https://codeberg.org/")
|
||||
.trim_end_matches(".git");
|
||||
|
||||
let host = url.host_str()?;
|
||||
if host != "codeberg.org" {
|
||||
return None;
|
||||
let (owner, repo) = repo_with_owner.split_once('/')?;
|
||||
|
||||
return Some(ParsedGitRemote { owner, repo });
|
||||
}
|
||||
|
||||
let mut path_segments = url.path_segments()?;
|
||||
let owner = path_segments.next()?;
|
||||
let repo = path_segments.next()?.trim_end_matches(".git");
|
||||
|
||||
Some(ParsedGitRemote {
|
||||
owner: owner.into(),
|
||||
repo: repo.into(),
|
||||
})
|
||||
None
|
||||
}
|
||||
|
||||
fn build_commit_permalink(
|
||||
@@ -175,47 +170,16 @@ impl GitHostingProvider for Codeberg {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_ssh_url() {
|
||||
let parsed_remote = Codeberg
|
||||
.parse_remote_url("git@codeberg.org:zed-industries/zed.git")
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_https_url() {
|
||||
let parsed_remote = Codeberg
|
||||
.parse_remote_url("https://codeberg.org/zed-industries/zed.git")
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_codeberg_permalink() {
|
||||
fn test_build_codeberg_permalink_from_ssh_url() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "rajveermalviya",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Codeberg.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
@@ -223,17 +187,18 @@ mod tests {
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://codeberg.org/zed-industries/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs";
|
||||
let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_codeberg_permalink_with_single_line_selection() {
|
||||
fn test_build_codeberg_permalink_from_ssh_url_single_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "rajveermalviya",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Codeberg.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
@@ -241,17 +206,18 @@ mod tests {
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://codeberg.org/zed-industries/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs#L7";
|
||||
let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs#L7";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_codeberg_permalink_with_multi_line_selection() {
|
||||
fn test_build_codeberg_permalink_from_ssh_url_multi_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "rajveermalviya",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Codeberg.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
@@ -259,7 +225,64 @@ mod tests {
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://codeberg.org/zed-industries/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs#L24-L48";
|
||||
let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs#L24-L48";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_codeberg_permalink_from_https_url() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "rajveermalviya",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Codeberg.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/zed/src/main.rs";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_codeberg_permalink_from_https_url_single_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "rajveermalviya",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Codeberg.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/zed/src/main.rs#L7";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_codeberg_permalink_from_https_url_multi_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "rajveermalviya",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Codeberg.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/zed/src/main.rs#L24-L48";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use url::Url;
|
||||
|
||||
use git::{
|
||||
BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote,
|
||||
RemoteUrl,
|
||||
};
|
||||
use git::{BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote};
|
||||
|
||||
pub struct Gitee;
|
||||
|
||||
@@ -30,22 +25,19 @@ impl GitHostingProvider for Gitee {
|
||||
format!("L{start_line}-{end_line}")
|
||||
}
|
||||
|
||||
fn parse_remote_url(&self, url: &str) -> Option<ParsedGitRemote> {
|
||||
let url = RemoteUrl::from_str(url).ok()?;
|
||||
fn parse_remote_url<'a>(&self, url: &'a str) -> Option<ParsedGitRemote<'a>> {
|
||||
if url.starts_with("git@gitee.com:") || url.starts_with("https://gitee.com/") {
|
||||
let repo_with_owner = url
|
||||
.trim_start_matches("git@gitee.com:")
|
||||
.trim_start_matches("https://gitee.com/")
|
||||
.trim_end_matches(".git");
|
||||
|
||||
let host = url.host_str()?;
|
||||
if host != "gitee.com" {
|
||||
return None;
|
||||
let (owner, repo) = repo_with_owner.split_once('/')?;
|
||||
|
||||
return Some(ParsedGitRemote { owner, repo });
|
||||
}
|
||||
|
||||
let mut path_segments = url.path_segments()?;
|
||||
let owner = path_segments.next()?;
|
||||
let repo = path_segments.next()?.trim_end_matches(".git");
|
||||
|
||||
Some(ParsedGitRemote {
|
||||
owner: owner.into(),
|
||||
repo: repo.into(),
|
||||
})
|
||||
None
|
||||
}
|
||||
|
||||
fn build_commit_permalink(
|
||||
@@ -84,47 +76,16 @@ impl GitHostingProvider for Gitee {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_ssh_url() {
|
||||
let parsed_remote = Gitee
|
||||
.parse_remote_url("git@gitee.com:zed-industries/zed.git")
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_https_url() {
|
||||
let parsed_remote = Gitee
|
||||
.parse_remote_url("https://gitee.com/zed-industries/zed.git")
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_gitee_permalink() {
|
||||
fn test_build_gitee_permalink_from_ssh_url() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "libkitten",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Gitee.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
@@ -132,17 +93,18 @@ mod tests {
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitee.com/zed-industries/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs";
|
||||
let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_gitee_permalink_with_single_line_selection() {
|
||||
fn test_build_gitee_permalink_from_ssh_url_single_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "libkitten",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Gitee.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
@@ -150,17 +112,18 @@ mod tests {
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitee.com/zed-industries/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs#L7";
|
||||
let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs#L7";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_gitee_permalink_with_multi_line_selection() {
|
||||
fn test_build_gitee_permalink_from_ssh_url_multi_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "libkitten",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Gitee.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
@@ -168,7 +131,64 @@ mod tests {
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitee.com/zed-industries/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs#L24-48";
|
||||
let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs#L24-48";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_gitee_permalink_from_https_url() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "libkitten",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Gitee.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/zed/src/main.rs";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_gitee_permalink_from_https_url_single_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "libkitten",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Gitee.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/zed/src/main.rs#L7";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_gitee_permalink_from_https_url_multi_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "libkitten",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Gitee.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/zed/src/main.rs#L24-48";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use std::str::FromStr;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
|
||||
use anyhow::{bail, Context, Result};
|
||||
@@ -11,7 +10,7 @@ use url::Url;
|
||||
|
||||
use git::{
|
||||
BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, Oid, ParsedGitRemote,
|
||||
PullRequest, RemoteUrl,
|
||||
PullRequest,
|
||||
};
|
||||
|
||||
fn pull_request_number_regex() -> &'static Regex {
|
||||
@@ -108,22 +107,19 @@ impl GitHostingProvider for Github {
|
||||
format!("L{start_line}-L{end_line}")
|
||||
}
|
||||
|
||||
fn parse_remote_url(&self, url: &str) -> Option<ParsedGitRemote> {
|
||||
let url = RemoteUrl::from_str(url).ok()?;
|
||||
fn parse_remote_url<'a>(&self, url: &'a str) -> Option<ParsedGitRemote<'a>> {
|
||||
if url.starts_with("git@github.com:") || url.starts_with("https://github.com/") {
|
||||
let repo_with_owner = url
|
||||
.trim_start_matches("git@github.com:")
|
||||
.trim_start_matches("https://github.com/")
|
||||
.trim_end_matches(".git");
|
||||
|
||||
let host = url.host_str()?;
|
||||
if host != "github.com" {
|
||||
return None;
|
||||
let (owner, repo) = repo_with_owner.split_once('/')?;
|
||||
|
||||
return Some(ParsedGitRemote { owner, repo });
|
||||
}
|
||||
|
||||
let mut path_segments = url.path_segments()?;
|
||||
let owner = path_segments.next()?;
|
||||
let repo = path_segments.next()?.trim_end_matches(".git");
|
||||
|
||||
Some(ParsedGitRemote {
|
||||
owner: owner.into(),
|
||||
repo: repo.into(),
|
||||
})
|
||||
None
|
||||
}
|
||||
|
||||
fn build_commit_permalink(
|
||||
@@ -197,61 +193,16 @@ impl GitHostingProvider for Github {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use indoc::indoc;
|
||||
use pretty_assertions::assert_eq;
|
||||
// TODO: Replace with `indoc`.
|
||||
use unindent::Unindent;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_ssh_url() {
|
||||
let parsed_remote = Github
|
||||
.parse_remote_url("git@github.com:zed-industries/zed.git")
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_https_url() {
|
||||
let parsed_remote = Github
|
||||
.parse_remote_url("https://github.com/zed-industries/zed.git")
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_https_url_with_username() {
|
||||
let parsed_remote = Github
|
||||
.parse_remote_url("https://jlannister@github.com/some-org/some-repo.git")
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "some-org".into(),
|
||||
repo: "some-repo".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_github_permalink_from_ssh_url() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
owner: "zed-industries",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Github.build_permalink(
|
||||
remote,
|
||||
@@ -267,30 +218,13 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_github_permalink() {
|
||||
fn test_build_github_permalink_from_ssh_url_single_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "zed-industries",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Github.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://github.com/zed-industries/zed/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_github_permalink_with_single_line_selection() {
|
||||
let permalink = Github.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
@@ -303,12 +237,13 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_github_permalink_with_multi_line_selection() {
|
||||
fn test_build_github_permalink_from_ssh_url_multi_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "zed-industries",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Github.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
@@ -320,18 +255,75 @@ mod tests {
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_github_permalink_from_https_url() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "zed-industries",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Github.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://github.com/zed-industries/zed/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_github_permalink_from_https_url_single_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "zed-industries",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Github.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://github.com/zed-industries/zed/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs#L7";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_github_permalink_from_https_url_multi_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "zed-industries",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Github.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://github.com/zed-industries/zed/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs#L24-L48";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_github_pull_requests() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
owner: "zed-industries",
|
||||
repo: "zed",
|
||||
};
|
||||
|
||||
let message = "This does not contain a pull request";
|
||||
assert!(Github.extract_pull_request(&remote, message).is_none());
|
||||
|
||||
// Pull request number at end of first line
|
||||
let message = indoc! {r#"
|
||||
let message = r#"
|
||||
project panel: do not expand collapsed worktrees on "collapse all entries" (#10687)
|
||||
|
||||
Fixes #10597
|
||||
@@ -340,7 +332,7 @@ mod tests {
|
||||
|
||||
- Fixed "project panel: collapse all entries" expanding collapsed worktrees.
|
||||
"#
|
||||
};
|
||||
.unindent();
|
||||
|
||||
assert_eq!(
|
||||
Github
|
||||
@@ -352,12 +344,12 @@ mod tests {
|
||||
);
|
||||
|
||||
// Pull request number in middle of line, which we want to ignore
|
||||
let message = indoc! {r#"
|
||||
let message = r#"
|
||||
Follow-up to #10687 to fix problems
|
||||
|
||||
See the original PR, this is a fix.
|
||||
"#
|
||||
};
|
||||
.unindent();
|
||||
assert_eq!(Github.extract_pull_request(&remote, &message), None);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,60 +1,16 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::{anyhow, bail, Result};
|
||||
use url::Url;
|
||||
use util::maybe;
|
||||
|
||||
use git::{
|
||||
BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote,
|
||||
RemoteUrl,
|
||||
};
|
||||
use git::{BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Gitlab {
|
||||
name: String,
|
||||
base_url: Url,
|
||||
}
|
||||
|
||||
impl Gitlab {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
name: "GitLab".to_string(),
|
||||
base_url: Url::parse("https://gitlab.com").unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_remote_url(remote_url: &str) -> Result<Self> {
|
||||
let host = maybe!({
|
||||
if let Some(remote_url) = remote_url.strip_prefix("git@") {
|
||||
if let Some((host, _)) = remote_url.trim_start_matches("git@").split_once(':') {
|
||||
return Some(host.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
Url::parse(&remote_url)
|
||||
.ok()
|
||||
.and_then(|remote_url| remote_url.host_str().map(|host| host.to_string()))
|
||||
})
|
||||
.ok_or_else(|| anyhow!("URL has no host"))?;
|
||||
|
||||
if !host.contains("gitlab") {
|
||||
bail!("not a GitLab URL");
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
name: "GitLab Self-Hosted".to_string(),
|
||||
base_url: Url::parse(&format!("https://{}", host))?,
|
||||
})
|
||||
}
|
||||
}
|
||||
pub struct Gitlab;
|
||||
|
||||
impl GitHostingProvider for Gitlab {
|
||||
fn name(&self) -> String {
|
||||
self.name.clone()
|
||||
"GitLab".to_string()
|
||||
}
|
||||
|
||||
fn base_url(&self) -> Url {
|
||||
self.base_url.clone()
|
||||
Url::parse("https://gitlab.com").unwrap()
|
||||
}
|
||||
|
||||
fn supports_avatars(&self) -> bool {
|
||||
@@ -69,22 +25,19 @@ impl GitHostingProvider for Gitlab {
|
||||
format!("L{start_line}-{end_line}")
|
||||
}
|
||||
|
||||
fn parse_remote_url(&self, url: &str) -> Option<ParsedGitRemote> {
|
||||
let url = RemoteUrl::from_str(url).ok()?;
|
||||
fn parse_remote_url<'a>(&self, url: &'a str) -> Option<ParsedGitRemote<'a>> {
|
||||
if url.starts_with("git@gitlab.com:") || url.starts_with("https://gitlab.com/") {
|
||||
let repo_with_owner = url
|
||||
.trim_start_matches("git@gitlab.com:")
|
||||
.trim_start_matches("https://gitlab.com/")
|
||||
.trim_end_matches(".git");
|
||||
|
||||
let host = url.host_str()?;
|
||||
if host != self.base_url.host_str()? {
|
||||
return None;
|
||||
let (owner, repo) = repo_with_owner.split_once('/')?;
|
||||
|
||||
return Some(ParsedGitRemote { owner, repo });
|
||||
}
|
||||
|
||||
let mut path_segments = url.path_segments()?;
|
||||
let owner = path_segments.next()?;
|
||||
let repo = path_segments.next()?.trim_end_matches(".git");
|
||||
|
||||
Some(ParsedGitRemote {
|
||||
owner: owner.into(),
|
||||
repo: repo.into(),
|
||||
})
|
||||
None
|
||||
}
|
||||
|
||||
fn build_commit_permalink(
|
||||
@@ -126,65 +79,16 @@ impl GitHostingProvider for Gitlab {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_ssh_url() {
|
||||
let parsed_remote = Gitlab::new()
|
||||
.parse_remote_url("git@gitlab.com:zed-industries/zed.git")
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_https_url() {
|
||||
let parsed_remote = Gitlab::new()
|
||||
.parse_remote_url("https://gitlab.com/zed-industries/zed.git")
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_self_hosted_ssh_url() {
|
||||
let remote_url = "git@gitlab.my-enterprise.com:zed-industries/zed.git";
|
||||
|
||||
let parsed_remote = Gitlab::from_remote_url(remote_url)
|
||||
.unwrap()
|
||||
.parse_remote_url(remote_url)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_gitlab_permalink() {
|
||||
let permalink = Gitlab::new().build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
fn test_build_gitlab_permalink_from_ssh_url() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "zed-industries",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Gitlab.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
@@ -197,12 +101,13 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_gitlab_permalink_with_single_line_selection() {
|
||||
let permalink = Gitlab::new().build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
fn test_build_gitlab_permalink_from_ssh_url_single_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "zed-industries",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Gitlab.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
@@ -215,12 +120,13 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_gitlab_permalink_with_multi_line_selection() {
|
||||
let permalink = Gitlab::new().build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
fn test_build_gitlab_permalink_from_ssh_url_multi_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "zed-industries",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Gitlab.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
@@ -233,36 +139,13 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_gitlab_self_hosted_permalink_from_ssh_url() {
|
||||
let gitlab =
|
||||
Gitlab::from_remote_url("git@gitlab.some-enterprise.com:zed-industries/zed.git")
|
||||
.unwrap();
|
||||
let permalink = gitlab.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitlab.some-enterprise.com/zed-industries/zed/-/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_gitlab_self_hosted_permalink_from_https_url() {
|
||||
let gitlab =
|
||||
Gitlab::from_remote_url("https://gitlab-instance.big-co.com/zed-industries/zed.git")
|
||||
.unwrap();
|
||||
let permalink = gitlab.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
fn test_build_gitlab_permalink_from_https_url() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "zed-industries",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Gitlab.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
|
||||
path: "crates/zed/src/main.rs",
|
||||
@@ -270,7 +153,45 @@ mod tests {
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitlab-instance.big-co.com/zed-industries/zed/-/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs";
|
||||
let expected_url = "https://gitlab.com/zed-industries/zed/-/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_gitlab_permalink_from_https_url_single_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "zed-industries",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Gitlab.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitlab.com/zed-industries/zed/-/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs#L7";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_gitlab_permalink_from_https_url_multi_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "zed-industries",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Gitlab.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitlab.com/zed-industries/zed/-/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs#L24-48";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use url::Url;
|
||||
|
||||
use git::{
|
||||
BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote,
|
||||
RemoteUrl,
|
||||
};
|
||||
use git::{BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote};
|
||||
|
||||
pub struct Sourcehut;
|
||||
|
||||
@@ -30,27 +25,21 @@ impl GitHostingProvider for Sourcehut {
|
||||
format!("L{start_line}-{end_line}")
|
||||
}
|
||||
|
||||
fn parse_remote_url(&self, url: &str) -> Option<ParsedGitRemote> {
|
||||
let url = RemoteUrl::from_str(url).ok()?;
|
||||
fn parse_remote_url<'a>(&self, url: &'a str) -> Option<ParsedGitRemote<'a>> {
|
||||
if url.starts_with("git@git.sr.ht:") || url.starts_with("https://git.sr.ht/") {
|
||||
// sourcehut indicates a repo with '.git' suffix as a separate repo.
|
||||
// For example, "git@git.sr.ht:~username/repo" and "git@git.sr.ht:~username/repo.git"
|
||||
// are two distinct repositories.
|
||||
let repo_with_owner = url
|
||||
.trim_start_matches("git@git.sr.ht:~")
|
||||
.trim_start_matches("https://git.sr.ht/~");
|
||||
|
||||
let host = url.host_str()?;
|
||||
if host != "git.sr.ht" {
|
||||
return None;
|
||||
let (owner, repo) = repo_with_owner.split_once('/')?;
|
||||
|
||||
return Some(ParsedGitRemote { owner, repo });
|
||||
}
|
||||
|
||||
let mut path_segments = url.path_segments()?;
|
||||
let owner = path_segments.next()?.trim_start_matches('~');
|
||||
// We don't trim the `.git` suffix here like we do elsewhere, as
|
||||
// sourcehut treats a repo with `.git` suffix as a separate repo.
|
||||
//
|
||||
// For example, `git@git.sr.ht:~username/repo` and `git@git.sr.ht:~username/repo.git`
|
||||
// are two distinct repositories.
|
||||
let repo = path_segments.next()?;
|
||||
|
||||
Some(ParsedGitRemote {
|
||||
owner: owner.into(),
|
||||
repo: repo.into(),
|
||||
})
|
||||
None
|
||||
}
|
||||
|
||||
fn build_commit_permalink(
|
||||
@@ -89,62 +78,16 @@ impl GitHostingProvider for Sourcehut {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_ssh_url() {
|
||||
let parsed_remote = Sourcehut
|
||||
.parse_remote_url("git@git.sr.ht:~zed-industries/zed")
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_ssh_url_with_git_suffix() {
|
||||
let parsed_remote = Sourcehut
|
||||
.parse_remote_url("git@git.sr.ht:~zed-industries/zed.git")
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed.git".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_https_url() {
|
||||
let parsed_remote = Sourcehut
|
||||
.parse_remote_url("https://git.sr.ht/~zed-industries/zed")
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_sourcehut_permalink() {
|
||||
fn test_build_sourcehut_permalink_from_ssh_url() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "rajveermalviya",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Sourcehut.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
@@ -152,17 +95,18 @@ mod tests {
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://git.sr.ht/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs";
|
||||
let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_sourcehut_permalink_with_git_suffix() {
|
||||
fn test_build_sourcehut_permalink_from_ssh_url_with_git_prefix() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "rajveermalviya",
|
||||
repo: "zed.git",
|
||||
};
|
||||
let permalink = Sourcehut.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed.git".into(),
|
||||
},
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
@@ -170,17 +114,18 @@ mod tests {
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://git.sr.ht/~zed-industries/zed.git/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs";
|
||||
let expected_url = "https://git.sr.ht/~rajveermalviya/zed.git/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_sourcehut_permalink_with_single_line_selection() {
|
||||
fn test_build_sourcehut_permalink_from_ssh_url_single_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "rajveermalviya",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Sourcehut.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
@@ -188,17 +133,18 @@ mod tests {
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://git.sr.ht/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L7";
|
||||
let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L7";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_sourcehut_permalink_with_multi_line_selection() {
|
||||
fn test_build_sourcehut_permalink_from_ssh_url_multi_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "rajveermalviya",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Sourcehut.build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
@@ -206,7 +152,64 @@ mod tests {
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://git.sr.ht/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L24-48";
|
||||
let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L24-48";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_sourcehut_permalink_from_https_url() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "rajveermalviya",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Sourcehut.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/zed/src/main.rs";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_sourcehut_permalink_from_https_url_single_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "rajveermalviya",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Sourcehut.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/zed/src/main.rs#L7";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_sourcehut_permalink_from_https_url_multi_line_selection() {
|
||||
let remote = ParsedGitRemote {
|
||||
owner: "rajveermalviya",
|
||||
repo: "zed",
|
||||
};
|
||||
let permalink = Sourcehut.build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/zed/src/main.rs#L24-48";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,34 +37,34 @@ impl CursorPosition {
|
||||
}
|
||||
|
||||
fn update_position(&mut self, editor: View<Editor>, cx: &mut ViewContext<Self>) {
|
||||
editor.update(cx, |editor, cx| {
|
||||
let buffer = editor.buffer().read(cx).snapshot(cx);
|
||||
let editor = editor.read(cx);
|
||||
let buffer = editor.buffer().read(cx).snapshot(cx);
|
||||
|
||||
self.selected_count = Default::default();
|
||||
self.selected_count.selections = editor.selections.count();
|
||||
let mut last_selection: Option<Selection<usize>> = None;
|
||||
for selection in editor.selections.all::<usize>(cx) {
|
||||
self.selected_count.characters += buffer
|
||||
.text_for_range(selection.start..selection.end)
|
||||
.map(|t| t.chars().count())
|
||||
.sum::<usize>();
|
||||
if last_selection
|
||||
.as_ref()
|
||||
.map_or(true, |last_selection| selection.id > last_selection.id)
|
||||
{
|
||||
last_selection = Some(selection);
|
||||
self.selected_count = Default::default();
|
||||
self.selected_count.selections = editor.selections.count();
|
||||
let mut last_selection: Option<Selection<usize>> = None;
|
||||
for selection in editor.selections.all::<usize>(cx) {
|
||||
self.selected_count.characters += buffer
|
||||
.text_for_range(selection.start..selection.end)
|
||||
.map(|t| t.chars().count())
|
||||
.sum::<usize>();
|
||||
if last_selection
|
||||
.as_ref()
|
||||
.map_or(true, |last_selection| selection.id > last_selection.id)
|
||||
{
|
||||
last_selection = Some(selection);
|
||||
}
|
||||
}
|
||||
for selection in editor.selections.all::<Point>(cx) {
|
||||
if selection.end != selection.start {
|
||||
self.selected_count.lines += (selection.end.row - selection.start.row) as usize;
|
||||
if selection.end.column != 0 {
|
||||
self.selected_count.lines += 1;
|
||||
}
|
||||
}
|
||||
for selection in editor.selections.all::<Point>(cx) {
|
||||
if selection.end != selection.start {
|
||||
self.selected_count.lines += (selection.end.row - selection.start.row) as usize;
|
||||
if selection.end.column != 0 {
|
||||
self.selected_count.lines += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
self.position = last_selection.map(|s| s.head().to_point(&buffer));
|
||||
});
|
||||
}
|
||||
self.position = last_selection.map(|s| s.head().to_point(&buffer));
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
|
||||
@@ -56,8 +56,8 @@ impl GoToLine {
|
||||
}
|
||||
|
||||
pub fn new(active_editor: View<Editor>, cx: &mut ViewContext<Self>) -> Self {
|
||||
let cursor =
|
||||
active_editor.update(cx, |editor, cx| editor.selections.last::<Point>(cx).head());
|
||||
let editor = active_editor.read(cx);
|
||||
let cursor = editor.selections.last::<Point>(cx).head();
|
||||
|
||||
let line = cursor.row + 1;
|
||||
let column = cursor.column + 1;
|
||||
|
||||
@@ -485,7 +485,7 @@ impl Render for TextInput {
|
||||
div()
|
||||
.flex()
|
||||
.key_context("TextInput")
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.track_focus(&self.focus_handle)
|
||||
.cursor(CursorStyle::IBeam)
|
||||
.on_action(cx.listener(Self::backspace))
|
||||
.on_action(cx.listener(Self::delete))
|
||||
@@ -549,7 +549,7 @@ impl Render for InputExample {
|
||||
let num_keystrokes = self.recent_keystrokes.len();
|
||||
div()
|
||||
.bg(rgb(0xaaaaaa))
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.track_focus(&self.focus_handle)
|
||||
.flex()
|
||||
.flex_col()
|
||||
.size_full()
|
||||
|
||||
@@ -217,7 +217,6 @@ pub(crate) type KeystrokeObserver =
|
||||
type QuitHandler = Box<dyn FnOnce(&mut AppContext) -> LocalBoxFuture<'static, ()> + 'static>;
|
||||
type ReleaseListener = Box<dyn FnOnce(&mut dyn Any, &mut AppContext) + 'static>;
|
||||
type NewViewListener = Box<dyn FnMut(AnyView, &mut WindowContext) + 'static>;
|
||||
type NewModelListener = Box<dyn FnMut(AnyModel, &mut AppContext) + 'static>;
|
||||
|
||||
/// Contains the state of the full application, and passed as a reference to a variety of callbacks.
|
||||
/// Other contexts such as [ModelContext], [WindowContext], and [ViewContext] deref to this type, making it the most general context type.
|
||||
@@ -238,7 +237,6 @@ pub struct AppContext {
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
pub(crate) globals_by_type: FxHashMap<TypeId, Box<dyn Any>>,
|
||||
pub(crate) entities: EntityMap,
|
||||
pub(crate) new_model_observers: SubscriberSet<TypeId, NewModelListener>,
|
||||
pub(crate) new_view_observers: SubscriberSet<TypeId, NewViewListener>,
|
||||
pub(crate) windows: SlotMap<WindowId, Option<Window>>,
|
||||
pub(crate) window_handles: FxHashMap<WindowId, AnyWindowHandle>,
|
||||
@@ -258,9 +256,6 @@ pub struct AppContext {
|
||||
pub(crate) layout_id_buffer: Vec<LayoutId>, // We recycle this memory across layout requests.
|
||||
pub(crate) propagate_event: bool,
|
||||
pub(crate) prompt_builder: Option<PromptBuilder>,
|
||||
|
||||
#[cfg(any(test, feature = "test-support", debug_assertions))]
|
||||
pub(crate) name: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl AppContext {
|
||||
@@ -298,7 +293,6 @@ impl AppContext {
|
||||
globals_by_type: FxHashMap::default(),
|
||||
entities,
|
||||
new_view_observers: SubscriberSet::new(),
|
||||
new_model_observers: SubscriberSet::new(),
|
||||
window_handles: FxHashMap::default(),
|
||||
windows: SlotMap::with_key(),
|
||||
keymap: Rc::new(RefCell::new(Keymap::default())),
|
||||
@@ -315,9 +309,6 @@ impl AppContext {
|
||||
layout_id_buffer: Default::default(),
|
||||
propagate_event: true,
|
||||
prompt_builder: Some(PromptBuilder::Default),
|
||||
|
||||
#[cfg(any(test, feature = "test-support", debug_assertions))]
|
||||
name: None,
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -997,7 +988,6 @@ impl AppContext {
|
||||
}
|
||||
|
||||
/// Move the global of the given type to the stack.
|
||||
#[track_caller]
|
||||
pub(crate) fn lease_global<G: Global>(&mut self) -> GlobalLease<G> {
|
||||
GlobalLease::new(
|
||||
self.globals_by_type
|
||||
@@ -1019,7 +1009,6 @@ impl AppContext {
|
||||
activate();
|
||||
subscription
|
||||
}
|
||||
|
||||
/// Arrange for the given function to be invoked whenever a view of the specified type is created.
|
||||
/// The function will be passed a mutable reference to the view along with an appropriate context.
|
||||
pub fn observe_new_views<V: 'static>(
|
||||
@@ -1039,31 +1028,6 @@ impl AppContext {
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn new_model_observer(&self, key: TypeId, value: NewModelListener) -> Subscription {
|
||||
let (subscription, activate) = self.new_model_observers.insert(key, value);
|
||||
activate();
|
||||
subscription
|
||||
}
|
||||
|
||||
/// Arrange for the given function to be invoked whenever a view of the specified type is created.
|
||||
/// The function will be passed a mutable reference to the view along with an appropriate context.
|
||||
pub fn observe_new_models<T: 'static>(
|
||||
&self,
|
||||
on_new: impl 'static + Fn(&mut T, &mut ModelContext<T>),
|
||||
) -> Subscription {
|
||||
self.new_model_observer(
|
||||
TypeId::of::<T>(),
|
||||
Box::new(move |any_model: AnyModel, cx: &mut AppContext| {
|
||||
any_model
|
||||
.downcast::<T>()
|
||||
.unwrap()
|
||||
.update(cx, |model_state, cx| {
|
||||
on_new(model_state, cx);
|
||||
})
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
/// Observe the release of a model or view. The callback is invoked after the model or view
|
||||
/// has no more strong references but before it has been dropped.
|
||||
pub fn observe_release<E, T>(
|
||||
@@ -1355,12 +1319,6 @@ impl AppContext {
|
||||
|
||||
(task, is_first)
|
||||
}
|
||||
|
||||
/// Get the name for this App.
|
||||
#[cfg(any(test, feature = "test-support", debug_assertions))]
|
||||
pub fn get_name(&self) -> &'static str {
|
||||
self.name.as_ref().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl Context for AppContext {
|
||||
@@ -1375,21 +1333,8 @@ impl Context for AppContext {
|
||||
) -> Model<T> {
|
||||
self.update(|cx| {
|
||||
let slot = cx.entities.reserve();
|
||||
let model = slot.clone();
|
||||
let entity = build_model(&mut ModelContext::new(cx, slot.downgrade()));
|
||||
cx.entities.insert(slot, entity);
|
||||
|
||||
// Non-generic part to avoid leaking SubscriberSet to invokers of `new_view`.
|
||||
fn notify_observers(cx: &mut AppContext, tid: TypeId, model: AnyModel) {
|
||||
cx.new_model_observers.clone().retain(&tid, |observer| {
|
||||
let any_model = model.clone();
|
||||
(observer)(any_model, cx);
|
||||
true
|
||||
});
|
||||
}
|
||||
notify_observers(cx, TypeId::of::<T>(), AnyModel::from(model.clone()));
|
||||
|
||||
model
|
||||
cx.entities.insert(slot, entity)
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -536,15 +536,6 @@ impl AnyWeakModel {
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for AnyWeakModel {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct(type_name::<Self>())
|
||||
.field("entity_id", &self.entity_id)
|
||||
.field("entity_type", &self.entity_type)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<WeakModel<T>> for AnyWeakModel {
|
||||
fn from(model: WeakModel<T>) -> Self {
|
||||
model.any_model
|
||||
|
||||
@@ -478,12 +478,6 @@ impl TestAppContext {
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
/// Set a name for this App.
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn set_name(&mut self, name: &'static str) {
|
||||
self.update(|cx| cx.name = Some(name))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static> Model<T> {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user