Compare commits

..

1 Commits

Author SHA1 Message Date
Conrad Irwin
d8d8f90893 vim-global 2025-01-31 12:59:24 -07:00
478 changed files with 11810 additions and 22148 deletions

View File

@@ -1,26 +0,0 @@
name: "Run tests on Windows"
description: "Runs the tests on Windows"
inputs:
working-directory:
description: "The working directory"
required: true
default: "."
runs:
using: "composite"
steps:
- name: Install Rust
shell: pwsh
working-directory: ${{ inputs.working-directory }}
run: cargo install cargo-nextest --locked
- name: Install Node
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
with:
node-version: "18"
- name: Run tests
shell: pwsh
working-directory: ${{ inputs.working-directory }}
run: cargo nextest run --workspace --no-fail-fast

View File

@@ -135,7 +135,6 @@ jobs:
cargo check -p gpui --features "macos-blade"
cargo check -p workspace
cargo build -p remote_server
cargo check -p gpui --examples
script/check-rust-livekit-macos
# Since the macOS runners are stateful, so we need to remove the config file to prevent potential bug.
@@ -182,7 +181,6 @@ jobs:
run: |
cargo build -p zed
cargo check -p workspace
cargo check -p gpui --examples
# Even the Linux runner is not stateful, in theory there is no need to do this cleanup.
# But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code
@@ -228,6 +226,7 @@ jobs:
if: always()
run: rm -rf ./../.cargo
# todo(windows): Actually run the tests
windows_tests:
timeout-minutes: 60
name: (Windows) Run Clippy and tests
@@ -237,55 +236,33 @@ jobs:
# more info here:- https://github.com/rust-lang/cargo/issues/13020
- name: Enable longer pathnames for git
run: git config --system core.longpaths true
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
- name: Create Dev Drive using ReFS
run: ./script/setup-dev-driver.ps1
# actions/checkout does not let us clone into anywhere outside ${{ github.workspace }}, so we have to copy the clone...
- name: Copy Git Repo to Dev Drive
run: |
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.ZED_WORKSPACE }}" -Recurse
- name: Cache dependencies
uses: swatinem/rust-cache@f0deed1e0edfc6a9be95417288c0e1099b1eeec3 # v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
workspaces: ${{ env.ZED_WORKSPACE }}
cache-provider: "github"
- name: Configure CI
run: |
mkdir -p ${{ env.CARGO_HOME }} -ErrorAction Ignore
cp ./.cargo/ci-config.toml ${{ env.CARGO_HOME }}/config.toml
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
- name: cargo clippy
working-directory: ${{ env.ZED_WORKSPACE }}
# Windows can't run shell scripts, so we need to use `cargo xtask`.
run: cargo xtask clippy
- name: Run tests
uses: ./.github/actions/run_tests_windows
with:
working-directory: ${{ env.ZED_WORKSPACE }}
- name: Build Zed
working-directory: ${{ env.ZED_WORKSPACE }}
run: cargo build
- name: Check dev drive space
working-directory: ${{ env.ZED_WORKSPACE }}
# `setup-dev-driver.ps1` creates a 100GB drive, with CI taking up ~45GB of the drive.
run: ./script/exit-ci-if-dev-drive-is-full.ps1 95
# Since the Windows runners are stateful, so we need to remove the config file to prevent potential bug.
- name: Clean CI config file
if: always()
run: Remove-Item -Path "${{ env.CARGO_HOME }}/config.toml" -Force
run: Remove-Item -Path "./../.cargo" -Recurse -Force
bundle-mac:
timeout-minutes: 120

View File

@@ -63,10 +63,3 @@ jobs:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy .cloudflare/docs-proxy/src/worker.js
- name: Preserve Wrangler logs
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4
if: always()
with:
name: wrangler_logs
path: /home/runner/.config/.wrangler/logs/

View File

@@ -24,13 +24,11 @@ jobs:
- name: Prettier Check on /docs
working-directory: ./docs
run: |
pnpm dlx prettier@${PRETTIER_VERSION} . --check || {
pnpm dlx prettier . --check || {
echo "To fix, run from the root of the zed repo:"
echo " cd docs && pnpm dlx prettier@${PRETTIER_VERSION} . --write && cd .."
echo " cd docs && pnpm dlx prettier . --write && cd .."
false
}
env:
PRETTIER_VERSION: 3.5.0
- name: Check for Typos with Typos-CLI
uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6

54
.gitignore vendored
View File

@@ -1,35 +1,35 @@
**/*.db
**/cargo-target
**/target
**/venv
*.wasm
*.xcodeproj
.DS_Store
.blob_store
.build
.envrc
.flatpak-builder
.idea
.netrc
.pytest_cache
.swiftpm
.swiftpm/config/registries.json
.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
.venv
.vscode
.wrangler
/.direnv
/assets/*licenses.*
/crates/collab/seed.json
/crates/theme/schemas/theme.json
/crates/zed/resources/flatpak/flatpak-cargo-sources.json
/dev.zed.Zed*.json
.envrc
.idea
**/target
**/cargo-target
/zed.xcworkspace
.DS_Store
/plugins/bin
/script/node_modules
/zed.xcworkspace
DerivedData/
/crates/theme/schemas/theme.json
/crates/collab/seed.json
/crates/zed/resources/flatpak/flatpak-cargo-sources.json
/dev.zed.Zed*.json
/assets/*licenses.*
**/venv
.build
*.wasm
Packages
*.xcodeproj
xcuserdata/
DerivedData/
.swiftpm/config/registries.json
.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
.netrc
.swiftpm
**/*.db
.pytest_cache
.venv
.blob_store
.vscode
.wrangler
.flatpak-builder
# Don't commit any secrets to the repo.
.env.secret.toml

View File

@@ -52,9 +52,3 @@ Zed is made up of several smaller crates - let's go over those you're most likel
- [`rpc`](/crates/rpc) defines messages to be exchanged with collaboration server.
- [`theme`](/crates/theme) defines the theme system and provides a default theme.
- [`ui`](/crates/ui) is a collection of UI components and common patterns used throughout Zed.
- [`cli`](/crates/cli) is the CLI crate which invokes the Zed binary.
- [`zed`](/crates/zed) is where all things come together, and the `main` entry point for Zed.
## Packaging Zed
Check our [notes for packaging Zed](https://zed.dev/docs/development/linux#notes-for-packaging-zed).

675
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -26,15 +26,12 @@ members = [
"crates/collections",
"crates/command_palette",
"crates/command_palette_hooks",
"crates/component",
"crates/component_preview",
"crates/context_server",
"crates/context_server_settings",
"crates/copilot",
"crates/db",
"crates/deepseek",
"crates/diagnostics",
"crates/diff",
"crates/docs_preprocessor",
"crates/editor",
"crates/evals",
@@ -83,7 +80,6 @@ members = [
"crates/markdown_preview",
"crates/media",
"crates/menu",
"crates/migrator",
"crates/multi_buffer",
"crates/node_runtime",
"crates/notifications",
@@ -91,7 +87,6 @@ members = [
"crates/open_ai",
"crates/outline",
"crates/outline_panel",
"crates/panel",
"crates/paths",
"crates/picker",
"crates/prettier",
@@ -108,6 +103,7 @@ members = [
"crates/remote_server",
"crates/repl",
"crates/reqwest_client",
"crates/reqwest_client",
"crates/rich_text",
"crates/rope",
"crates/rpc",
@@ -148,7 +144,7 @@ members = [
"crates/ui_input",
"crates/ui_macros",
"crates/util",
"crates/util_macros",
"crates/vcs_menu",
"crates/vim",
"crates/vim_mode_setting",
"crates/welcome",
@@ -156,6 +152,7 @@ members = [
"crates/worktree",
"crates/zed",
"crates/zed_actions",
"crates/zed_predict_onboarding",
"crates/zeta",
#
@@ -228,15 +225,12 @@ collab_ui = { path = "crates/collab_ui" }
collections = { path = "crates/collections" }
command_palette = { path = "crates/command_palette" }
command_palette_hooks = { path = "crates/command_palette_hooks" }
component = { path = "crates/component" }
component_preview = { path = "crates/component_preview" }
context_server = { path = "crates/context_server" }
context_server_settings = { path = "crates/context_server_settings" }
copilot = { path = "crates/copilot" }
db = { path = "crates/db" }
deepseek = { path = "crates/deepseek" }
diagnostics = { path = "crates/diagnostics" }
diff = { path = "crates/diff" }
editor = { path = "crates/editor" }
extension = { path = "crates/extension" }
extension_host = { path = "crates/extension_host" }
@@ -249,8 +243,8 @@ fs = { path = "crates/fs" }
fsevent = { path = "crates/fsevent" }
fuzzy = { path = "crates/fuzzy" }
git = { path = "crates/git" }
git_hosting_providers = { path = "crates/git_hosting_providers" }
git_ui = { path = "crates/git_ui" }
git_hosting_providers = { path = "crates/git_hosting_providers" }
go_to_line = { path = "crates/go_to_line" }
google_ai = { path = "crates/google_ai" }
gpui = { path = "crates/gpui", default-features = false, features = [
@@ -283,7 +277,6 @@ markdown = { path = "crates/markdown" }
markdown_preview = { path = "crates/markdown_preview" }
media = { path = "crates/media" }
menu = { path = "crates/menu" }
migrator = { path = "crates/migrator" }
multi_buffer = { path = "crates/multi_buffer" }
node_runtime = { path = "crates/node_runtime" }
notifications = { path = "crates/notifications" }
@@ -292,7 +285,6 @@ open_ai = { path = "crates/open_ai" }
outline = { path = "crates/outline" }
outline_panel = { path = "crates/outline_panel" }
paths = { path = "crates/paths" }
panel = { path = "crates/panel" }
picker = { path = "crates/picker" }
plugin = { path = "crates/plugin" }
plugin_macros = { path = "crates/plugin_macros" }
@@ -348,7 +340,7 @@ ui = { path = "crates/ui" }
ui_input = { path = "crates/ui_input" }
ui_macros = { path = "crates/ui_macros" }
util = { path = "crates/util" }
util_macros = { path = "crates/util_macros" }
vcs_menu = { path = "crates/vcs_menu" }
vim = { path = "crates/vim" }
vim_mode_setting = { path = "crates/vim_mode_setting" }
welcome = { path = "crates/welcome" }
@@ -356,6 +348,7 @@ workspace = { path = "crates/workspace" }
worktree = { path = "crates/worktree" }
zed = { path = "crates/zed" }
zed_actions = { path = "crates/zed_actions" }
zed_predict_onboarding = { path = "crates/zed_predict_onboarding" }
zeta = { path = "crates/zeta" }
#
@@ -368,7 +361,7 @@ alacritty_terminal = { git = "https://github.com/alacritty/alacritty.git", rev =
any_vec = "0.14"
anyhow = "1.0.86"
arrayvec = { version = "0.7.4", features = ["serde"] }
ashpd = { version = "0.10", default-features = false, features = ["async-std"] }
ashpd = { version = "0.10", default-features = false, features = ["async-std"]}
async-compat = "0.2.1"
async-compression = { version = "0.4", features = ["gzip", "futures-io"] }
async-dispatcher = "0.1"
@@ -430,12 +423,7 @@ jupyter-websocket-client = { version = "0.9.0" }
libc = "0.2"
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
linkify = "0.10.0"
linkme = "0.3.31"
livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "811ceae29fabee455f110c56cd66b3f49a7e5003", features = [
"dispatcher",
"services-dispatcher",
"rustls-tls-native-roots",
], default-features = false }
livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev="060964da10574cd9bf06463a53bf6e0769c5c45e", features = ["dispatcher", "services-dispatcher", "rustls-tls-native-roots"], default-features = false }
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
markup5ever_rcdom = "0.3.0"
nanoid = "0.4"
@@ -455,13 +443,11 @@ pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git"
pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1abe5cec5ebfbe97ca71746a4cfc7fe89bddf8e0" }
postage = { version = "0.5", features = ["futures-traits"] }
pretty_assertions = { version = "1.3.0", features = ["unstable"] }
proc-macro2 = "1.0.93"
profiling = "1"
prost = "0.9"
prost-build = "0.9"
prost-types = "0.9"
pulldown-cmark = { version = "0.12.0", default-features = false }
quote = "1.0.9"
rand = "0.8.5"
rayon = "1.8"
regex = "1.5"
@@ -481,7 +467,7 @@ runtimelib = { version = "0.25.0", default-features = false, features = [
rustc-demangle = "0.1.23"
rust-embed = { version = "8.4", features = ["include-exclude"] }
rustc-hash = "2.1.0"
rustls = { version = "0.23.22" }
rustls = "0.21.12"
rustls-native-certs = "0.8.0"
schemars = { version = "0.8", features = ["impl_json_schema", "indexmap2"] }
semver = "1.0"
@@ -505,7 +491,6 @@ sqlformat = "0.2"
strsim = "0.11"
strum = { version = "0.26.0", features = ["derive"] }
subtle = "2.5.0"
syn = { version = "1.0.72", features = ["full", "extra-traits"] }
sys-locale = "0.3.1"
sysinfo = "0.31.0"
take-until = "0.2.0"
@@ -530,7 +515,6 @@ tree-sitter-cpp = "0.23"
tree-sitter-css = "0.23"
tree-sitter-elixir = "0.3"
tree-sitter-embedded-template = "0.23.0"
tree-sitter-gitcommit = {git = "https://github.com/zed-industries/tree-sitter-git-commit", rev = "88309716a69dd13ab83443721ba6e0b491d37ee9"}
tree-sitter-go = "0.23"
tree-sitter-go-mod = { git = "https://github.com/camdencheek/tree-sitter-go-mod", rev = "6efb59652d30e0e9cd5f3b3a669afd6f1a926d3c", package = "tree-sitter-gomod" }
tree-sitter-gowork = { git = "https://github.com/zed-industries/tree-sitter-go-work", rev = "acb0617bf7f4fda02c6217676cc64acb89536dc7" }
@@ -564,7 +548,6 @@ wasmtime = { version = "24", default-features = false, features = [
wasmtime-wasi = "24"
which = "6.0.0"
wit-component = "0.201"
zed_llm_client = "0.4"
zstd = "0.11"
metal = "0.31"
@@ -625,7 +608,6 @@ features = [
# TODO livekit https://github.com/RustAudio/cpal/pull/891
[patch.crates-io]
cpal = { git = "https://github.com/zed-industries/cpal", rev = "fd8bc2fd39f1f5fdee5a0690656caff9a26d9d50" }
real-async-tls = { git = "https://github.com/zed-industries/async-tls", rev = "1e759a4b5e370f87dc15e40756ac4f8815b61d9d", package = "async-tls"}
[profile.dev]
split-debuginfo = "unpacked"
@@ -679,6 +661,7 @@ telemetry_events = { codegen-units = 1 }
theme_selector = { codegen-units = 1 }
time_format = { codegen-units = 1 }
ui_input = { codegen-units = 1 }
vcs_menu = { codegen-units = 1 }
zed_actions = { codegen-units = 1 }
[profile.release]

View File

@@ -1,4 +1,4 @@
Copyright 2022 - 2025 Zed Industries, Inc.
Copyright 2022 - 2024 Zed Industries, Inc.

View File

@@ -1,4 +1,4 @@
Copyright 2022 - 2025 Zed Industries, Inc.
Copyright 2022 - 2024 Zed Industries, Inc.
Licensed under the Apache License, Version 2.0 (the "License");

View File

@@ -1 +0,0 @@
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg"><circle cx="7.25" cy="7.25" r="3" fill="currentColor"></circle></svg>

Before

Width:  |  Height:  |  Size: 165 B

View File

@@ -86,8 +86,8 @@
"hpp": "cpp",
"hrl": "erlang",
"hs": "haskell",
"htm": "html",
"html": "html",
"htm": "template",
"html": "template",
"hxx": "cpp",
"ib": "storage",
"ico": "image",
@@ -101,7 +101,7 @@
"jpeg": "image",
"jpg": "image",
"js": "javascript",
"json": "json",
"json": "storage",
"jsonc": "storage",
"jsx": "react",
"jxl": "image",
@@ -150,19 +150,8 @@
"postcss": "css",
"ppt": "document",
"pptx": "document",
"prettier.config.cjs": "prettier",
"prettier.config.js": "prettier",
"prettier.config.mjs": "prettier",
"prettierignore": "prettier",
"prettierrc": "prettier",
"prettierrc.cjs": "prettier",
"prettierrc.js": "prettier",
"prettierrc.json": "prettier",
"prettierrc.json5": "prettier",
"prettierrc.mjs": "prettier",
"prettierrc.toml": "prettier",
"prettierrc.yaml": "prettier",
"prettierrc.yml": "prettier",
"prisma": "prisma",
"profile": "terminal",
"ps1": "terminal",

View File

@@ -1,6 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M5 5C5 3.89543 5.89543 3 7 3H9C10.1046 3 11 3.89543 11 5V6H5V5Z" stroke="black" stroke-width="1.5"/>
<path d="M8 9V11" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
<circle cx="8" cy="9" r="1" fill="black"/>
<rect x="3.75" y="5.75" width="8.5" height="7.5" rx="1.25" stroke="black" stroke-width="1.5" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 452 B

View File

@@ -1,4 +1,4 @@
<svg width="440" height="128" xmlns="http://www.w3.org/2000/svg">
<svg width="420" height="128" xmlns="http://www.w3.org/2000/svg">
<defs>
<pattern id="tilePattern" width="22" height="22" patternUnits="userSpaceOnUse">
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">

Before

Width:  |  Height:  |  Size: 971 B

After

Width:  |  Height:  |  Size: 971 B

View File

@@ -1,6 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path opacity="0.6" fill-rule="evenodd" clip-rule="evenodd" d="M6.75 9.31247L8.25 10.5576V11.75H1.75V10.0803L4.49751 7.44273L5.65909 8.40693L3.73923 10.25H6.75V9.31247ZM8.25 5.85739V4.25H6.31358L8.25 5.85739ZM1.75 5.16209V7.1H3.25V6.4072L1.75 5.16209Z" fill="black"/>
<path opacity="0.6" fill-rule="evenodd" clip-rule="evenodd" d="M10.9624 9.40853L11.9014 8L10.6241 6.08397L9.37598 6.91603L10.0986 8L9.80184 8.44518L10.9624 9.40853Z" fill="black"/>
<path opacity="0.6" fill-rule="evenodd" clip-rule="evenodd" d="M12.8936 11.0116L14.9014 8L12.6241 4.58397L11.376 5.41603L13.0986 8L11.7331 10.0483L12.8936 11.0116Z" fill="black"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M14.1225 13.809C14.0341 13.9146 13.877 13.9289 13.7711 13.8409L1.19311 3.40021C1.08659 3.31178 1.07221 3.15362 1.16104 3.04743L1.87752 2.19101C1.96588 2.0854 2.123 2.07112 2.22895 2.15906L14.8069 12.5998C14.9134 12.6882 14.9278 12.8464 14.839 12.9526L14.1225 13.809Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.0 KiB

View File

@@ -32,7 +32,7 @@
"ctrl-q": "zed::Quit",
"f11": "zed::ToggleFullScreen",
"ctrl-alt-z": "zeta::RateCompletions",
"ctrl-shift-i": "edit_prediction::ToggleMenu"
"ctrl-shift-i": "inline_completion::ToggleMenu"
}
},
{
@@ -122,8 +122,7 @@
"ctrl-i": "editor::ShowSignatureHelp",
"alt-g b": "editor::ToggleGitBlame",
"menu": "editor::OpenContextMenu",
"shift-f10": "editor::OpenContextMenu",
"ctrl-shift-e": "editor::ToggleEditPrediction"
"shift-f10": "editor::OpenContextMenu"
}
},
{
@@ -146,17 +145,17 @@
}
},
{
"context": "Editor && mode == full && edit_prediction",
"context": "Editor && mode == full && inline_completion",
"bindings": {
"alt-]": "editor::NextEditPrediction",
"alt-[": "editor::PreviousEditPrediction",
"alt-right": "editor::AcceptPartialEditPrediction"
"alt-]": "editor::NextInlineCompletion",
"alt-[": "editor::PreviousInlineCompletion",
"alt-right": "editor::AcceptPartialInlineCompletion"
}
},
{
"context": "Editor && !edit_prediction",
"context": "Editor && !inline_completion",
"bindings": {
"alt-\\": "editor::ShowEditPrediction"
"alt-\\": "editor::ShowInlineCompletion"
}
},
{
@@ -204,8 +203,8 @@
"enter": "search::SelectNextMatch",
"shift-enter": "search::SelectPrevMatch",
"alt-enter": "search::SelectAllMatches",
"find": "search::FocusSearch",
"ctrl-f": "search::FocusSearch",
"find": "search::FocusSearch",
"ctrl-h": "search::ToggleReplace",
"ctrl-l": "search::ToggleSelection"
}
@@ -275,8 +274,8 @@
"ctrl-pagedown": "pane::ActivateNextItem",
"ctrl-shift-pageup": "pane::SwapItemLeft",
"ctrl-shift-pagedown": "pane::SwapItemRight",
"ctrl-f4": ["pane::CloseActiveItem", { "close_pinned": false }],
"ctrl-w": ["pane::CloseActiveItem", { "close_pinned": false }],
"ctrl-f4": "pane::CloseActiveItem",
"ctrl-w": "pane::CloseActiveItem",
"alt-ctrl-t": ["pane::CloseInactiveItems", { "close_pinned": false }],
"alt-ctrl-shift-w": "workspace::CloseInactiveTabsAndPanes",
"ctrl-k e": ["pane::CloseItemsToTheLeft", { "close_pinned": false }],
@@ -291,15 +290,15 @@
"f3": "search::SelectNextMatch",
"ctrl-alt-shift-g": "search::SelectPrevMatch",
"shift-f3": "search::SelectPrevMatch",
"shift-find": "project_search::ToggleFocus",
"ctrl-shift-f": "project_search::ToggleFocus",
"shift-find": "project_search::ToggleFocus",
"ctrl-alt-shift-h": "search::ToggleReplace",
"ctrl-alt-shift-l": "search::ToggleSelection",
"alt-enter": "search::SelectAllMatches",
"alt-c": "search::ToggleCaseSensitive",
"alt-w": "search::ToggleWholeWord",
"alt-find": "project_search::ToggleFilters",
"alt-ctrl-f": "project_search::ToggleFilters",
"alt-find": "project_search::ToggleFilters",
"ctrl-alt-shift-r": "search::ToggleRegex",
"ctrl-alt-shift-x": "search::ToggleRegex",
"alt-r": "search::ToggleRegex",
@@ -349,15 +348,15 @@
"ctrl-k ctrl-l": "editor::ToggleFold",
"ctrl-k ctrl-[": "editor::FoldRecursive",
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
"ctrl-k ctrl-1": ["editor::FoldAtLevel", 1],
"ctrl-k ctrl-2": ["editor::FoldAtLevel", 2],
"ctrl-k ctrl-3": ["editor::FoldAtLevel", 3],
"ctrl-k ctrl-4": ["editor::FoldAtLevel", 4],
"ctrl-k ctrl-5": ["editor::FoldAtLevel", 5],
"ctrl-k ctrl-6": ["editor::FoldAtLevel", 6],
"ctrl-k ctrl-7": ["editor::FoldAtLevel", 7],
"ctrl-k ctrl-8": ["editor::FoldAtLevel", 8],
"ctrl-k ctrl-9": ["editor::FoldAtLevel", 9],
"ctrl-k ctrl-1": ["editor::FoldAtLevel", { "level": 1 }],
"ctrl-k ctrl-2": ["editor::FoldAtLevel", { "level": 2 }],
"ctrl-k ctrl-3": ["editor::FoldAtLevel", { "level": 3 }],
"ctrl-k ctrl-4": ["editor::FoldAtLevel", { "level": 4 }],
"ctrl-k ctrl-5": ["editor::FoldAtLevel", { "level": 5 }],
"ctrl-k ctrl-6": ["editor::FoldAtLevel", { "level": 6 }],
"ctrl-k ctrl-7": ["editor::FoldAtLevel", { "level": 7 }],
"ctrl-k ctrl-8": ["editor::FoldAtLevel", { "level": 8 }],
"ctrl-k ctrl-9": ["editor::FoldAtLevel", { "level": 9 }],
"ctrl-k ctrl-0": "editor::FoldAll",
"ctrl-k ctrl-j": "editor::UnfoldAll",
"ctrl-space": "editor::ShowCompletions",
@@ -433,14 +432,14 @@
"ctrl-alt-s": "workspace::SaveAll",
"ctrl-k m": "language_selector::Toggle",
"escape": "workspace::Unfollow",
"ctrl-k ctrl-left": "workspace::ActivatePaneLeft",
"ctrl-k ctrl-right": "workspace::ActivatePaneRight",
"ctrl-k ctrl-up": "workspace::ActivatePaneUp",
"ctrl-k ctrl-down": "workspace::ActivatePaneDown",
"ctrl-k shift-left": "workspace::SwapPaneLeft",
"ctrl-k shift-right": "workspace::SwapPaneRight",
"ctrl-k shift-up": "workspace::SwapPaneUp",
"ctrl-k shift-down": "workspace::SwapPaneDown",
"ctrl-k ctrl-left": ["workspace::ActivatePaneInDirection", "Left"],
"ctrl-k ctrl-right": ["workspace::ActivatePaneInDirection", "Right"],
"ctrl-k ctrl-up": ["workspace::ActivatePaneInDirection", "Up"],
"ctrl-k ctrl-down": ["workspace::ActivatePaneInDirection", "Down"],
"ctrl-k shift-left": ["workspace::SwapPaneInDirection", "Left"],
"ctrl-k shift-right": ["workspace::SwapPaneInDirection", "Right"],
"ctrl-k shift-up": ["workspace::SwapPaneInDirection", "Up"],
"ctrl-k shift-down": ["workspace::SwapPaneInDirection", "Down"],
"ctrl-shift-x": "zed::Extensions",
"ctrl-shift-r": "task::Rerun",
"ctrl-alt-r": "task::Rerun",
@@ -454,8 +453,8 @@
{
"context": "ApplicationMenu",
"bindings": {
"left": "app_menu::ActivateMenuLeft",
"right": "app_menu::ActivateMenuRight"
"left": ["app_menu::NavigateApplicationMenuInDirection", "Left"],
"right": ["app_menu::NavigateApplicationMenuInDirection", "Right"]
}
},
// Bindings from Sublime Text
@@ -497,22 +496,17 @@
},
{
"context": "Editor && showing_completions",
"use_key_equivalents": true,
"bindings": {
"enter": "editor::ConfirmCompletion",
"tab": "editor::ComposeCompletion"
}
},
{
"context": "Editor && edit_prediction",
"context": "Editor && inline_completion && !showing_completions",
"use_key_equivalents": true,
"bindings": {
// Changing the modifier currently breaks accepting while you also an LSP completions menu open
"alt-enter": "editor::AcceptEditPrediction"
}
},
{
"context": "Editor && edit_prediction && !edit_prediction_requires_modifier",
"bindings": {
"tab": "editor::AcceptEditPrediction"
"tab": "editor::AcceptInlineCompletion"
}
},
{
@@ -536,7 +530,8 @@
{
"bindings": {
"ctrl-alt-shift-f": "workspace::FollowNextCollaborator",
"ctrl-alt-i": "zed::DebugElements"
"ctrl-alt-i": "zed::DebugElements",
"ctrl-:": "editor::ToggleInlayHints"
}
},
{
@@ -554,8 +549,7 @@
"ctrl-shift-e": "pane::RevealInProjectPanel",
"ctrl-f8": "editor::GoToHunk",
"ctrl-shift-f8": "editor::GoToPrevHunk",
"ctrl-enter": "assistant::InlineAssist",
"ctrl-:": "editor::ToggleInlayHints"
"ctrl-enter": "assistant::InlineAssist"
}
},
{
@@ -601,12 +595,14 @@
},
{
"context": "MessageEditor > Editor",
"use_key_equivalents": true,
"bindings": {
"enter": "assistant2::Chat"
}
},
{
"context": "ContextStrip",
"use_key_equivalents": true,
"bindings": {
"up": "assistant2::FocusUp",
"right": "assistant2::FocusRight",
@@ -684,8 +680,8 @@
"ctrl-delete": ["project_panel::Delete", { "skip_prompt": false }],
"alt-ctrl-r": "project_panel::RevealInFileManager",
"ctrl-shift-enter": "project_panel::OpenWithSystem",
"shift-find": "project_panel::NewSearchInDirectory",
"ctrl-shift-f": "project_panel::NewSearchInDirectory",
"shift-find": "project_panel::NewSearchInDirectory",
"shift-down": "menu::SelectNext",
"shift-up": "menu::SelectPrev",
"escape": "menu::Cancel"
@@ -699,32 +695,30 @@
},
{
"context": "GitPanel && !CommitEditor",
"use_key_equivalents": true,
"bindings": {
"escape": "git_panel::Close"
}
},
{
"context": "GitPanel && ChangesList",
"use_key_equivalents": true,
"bindings": {
"up": "menu::SelectPrev",
"down": "menu::SelectNext",
"enter": "menu::Confirm",
"space": "git::ToggleStaged",
"ctrl-space": "git::StageAll",
"ctrl-shift-space": "git::UnstageAll",
"tab": "git_panel::FocusEditor",
"shift-tab": "git_panel::FocusEditor",
"escape": "git_panel::ToggleFocus"
"ctrl-shift-space": "git::UnstageAll"
}
},
{
"context": "GitPanel > Editor",
"context": "GitPanel && CommitEditor > Editor",
"use_key_equivalents": true,
"bindings": {
"escape": "git_panel::FocusChanges",
"ctrl-enter": "git::Commit",
"tab": "git_panel::FocusChanges",
"shift-tab": "git_panel::FocusChanges",
"alt-up": "git_panel::FocusChanges"
"ctrl-enter": "git::CommitChanges",
"ctrl-shift-enter": "git::CommitAllChanges"
}
},
{
@@ -832,6 +826,7 @@
},
{
"context": "ZedPredictModal",
"use_key_equivalents": true,
"bindings": {
"escape": "menu::Cancel"
}

View File

@@ -39,8 +39,8 @@
"cmd-m": "zed::Minimize",
"fn-f": "zed::ToggleFullScreen",
"ctrl-cmd-f": "zed::ToggleFullScreen",
"ctrl-cmd-z": "zeta::RateCompletions",
"ctrl-cmd-i": "edit_prediction::ToggleMenu"
"ctrl-shift-z": "zeta::RateCompletions",
"ctrl-shift-i": "inline_completion::ToggleMenu"
}
},
{
@@ -132,8 +132,7 @@
"cmd-alt-g b": "editor::ToggleGitBlame",
"cmd-i": "editor::ShowSignatureHelp",
"ctrl-f12": "editor::GoToDeclaration",
"alt-ctrl-f12": "editor::GoToDeclarationSplit",
"ctrl-cmd-e": "editor::ToggleEditPrediction"
"alt-ctrl-f12": "editor::GoToDeclarationSplit"
}
},
{
@@ -156,19 +155,19 @@
}
},
{
"context": "Editor && mode == full && edit_prediction",
"context": "Editor && mode == full && inline_completion",
"use_key_equivalents": true,
"bindings": {
"alt-tab": "editor::NextEditPrediction",
"alt-shift-tab": "editor::PreviousEditPrediction",
"ctrl-cmd-right": "editor::AcceptPartialEditPrediction"
"alt-tab": "editor::NextInlineCompletion",
"alt-shift-tab": "editor::PreviousInlineCompletion",
"ctrl-cmd-right": "editor::AcceptPartialInlineCompletion"
}
},
{
"context": "Editor && !edit_prediction",
"context": "Editor && !inline_completion",
"use_key_equivalents": true,
"bindings": {
"alt-tab": "editor::ShowEditPrediction"
"alt-tab": "editor::ShowInlineCompletion"
}
},
{
@@ -350,7 +349,7 @@
"cmd-}": "pane::ActivateNextItem",
"ctrl-shift-pageup": "pane::SwapItemLeft",
"ctrl-shift-pagedown": "pane::SwapItemRight",
"cmd-w": ["pane::CloseActiveItem", { "close_pinned": false }],
"cmd-w": "pane::CloseActiveItem",
"alt-cmd-t": ["pane::CloseInactiveItems", { "close_pinned": false }],
"ctrl-alt-cmd-w": "workspace::CloseInactiveTabsAndPanes",
"cmd-k e": ["pane::CloseItemsToTheLeft", { "close_pinned": false }],
@@ -414,15 +413,15 @@
"cmd-k cmd-l": "editor::ToggleFold",
"cmd-k cmd-[": "editor::FoldRecursive",
"cmd-k cmd-]": "editor::UnfoldRecursive",
"cmd-k cmd-1": ["editor::FoldAtLevel", 1],
"cmd-k cmd-2": ["editor::FoldAtLevel", 2],
"cmd-k cmd-3": ["editor::FoldAtLevel", 3],
"cmd-k cmd-4": ["editor::FoldAtLevel", 4],
"cmd-k cmd-5": ["editor::FoldAtLevel", 5],
"cmd-k cmd-6": ["editor::FoldAtLevel", 6],
"cmd-k cmd-7": ["editor::FoldAtLevel", 7],
"cmd-k cmd-8": ["editor::FoldAtLevel", 8],
"cmd-k cmd-9": ["editor::FoldAtLevel", 9],
"cmd-k cmd-1": ["editor::FoldAtLevel", { "level": 1 }],
"cmd-k cmd-2": ["editor::FoldAtLevel", { "level": 2 }],
"cmd-k cmd-3": ["editor::FoldAtLevel", { "level": 3 }],
"cmd-k cmd-4": ["editor::FoldAtLevel", { "level": 4 }],
"cmd-k cmd-5": ["editor::FoldAtLevel", { "level": 5 }],
"cmd-k cmd-6": ["editor::FoldAtLevel", { "level": 6 }],
"cmd-k cmd-7": ["editor::FoldAtLevel", { "level": 7 }],
"cmd-k cmd-8": ["editor::FoldAtLevel", { "level": 8 }],
"cmd-k cmd-9": ["editor::FoldAtLevel", { "level": 9 }],
"cmd-k cmd-0": "editor::FoldAll",
"cmd-k cmd-j": "editor::UnfoldAll",
// Using `ctrl-space` in Zed requires disabling the macOS global shortcut.
@@ -510,14 +509,14 @@
"cmd-alt-s": "workspace::SaveAll",
"cmd-k m": "language_selector::Toggle",
"escape": "workspace::Unfollow",
"cmd-k cmd-left": "workspace::ActivatePaneLeft",
"cmd-k cmd-right": "workspace::ActivatePaneRight",
"cmd-k cmd-up": "workspace::ActivatePaneUp",
"cmd-k cmd-down": "workspace::ActivatePaneDown",
"cmd-k shift-left": "workspace::SwapPaneLeft",
"cmd-k shift-right": "workspace::SwapPaneRight",
"cmd-k shift-up": "workspace::SwapPaneUp",
"cmd-k shift-down": "workspace::SwapPaneDown",
"cmd-k cmd-left": ["workspace::ActivatePaneInDirection", "Left"],
"cmd-k cmd-right": ["workspace::ActivatePaneInDirection", "Right"],
"cmd-k cmd-up": ["workspace::ActivatePaneInDirection", "Up"],
"cmd-k cmd-down": ["workspace::ActivatePaneInDirection", "Down"],
"cmd-k shift-left": ["workspace::SwapPaneInDirection", "Left"],
"cmd-k shift-right": ["workspace::SwapPaneInDirection", "Right"],
"cmd-k shift-up": ["workspace::SwapPaneInDirection", "Up"],
"cmd-k shift-down": ["workspace::SwapPaneInDirection", "Down"],
"cmd-shift-x": "zed::Extensions"
}
},
@@ -581,17 +580,10 @@
}
},
{
"context": "Editor && edit_prediction",
"bindings": {
// Changing the modifier currently breaks accepting while you also an LSP completions menu open
"alt-tab": "editor::AcceptEditPrediction"
}
},
{
"context": "Editor && edit_prediction && !edit_prediction_requires_modifier",
"context": "Editor && inline_completion && !showing_completions",
"use_key_equivalents": true,
"bindings": {
"tab": "editor::AcceptEditPrediction"
"tab": "editor::AcceptInlineCompletion"
}
},
{
@@ -620,7 +612,8 @@
"ctrl-alt-cmd-f": "workspace::FollowNextCollaborator",
// TODO: Move this to a dock open action
"cmd-shift-c": "collab_panel::ToggleFocus",
"cmd-alt-i": "zed::DebugElements"
"cmd-alt-i": "zed::DebugElements",
"ctrl-:": "editor::ToggleInlayHints"
}
},
{
@@ -633,8 +626,7 @@
"cmd-shift-e": "pane::RevealInProjectPanel",
"cmd-f8": "editor::GoToHunk",
"cmd-shift-f8": "editor::GoToPrevHunk",
"ctrl-enter": "assistant::InlineAssist",
"ctrl-:": "editor::ToggleInlayHints"
"ctrl-enter": "assistant::InlineAssist"
}
},
{
@@ -716,6 +708,13 @@
"space": "project_panel::Open"
}
},
{
"context": "GitPanel && !CommitEditor",
"use_key_equivalents": true,
"bindings": {
"escape": "git_panel::Close"
}
},
{
"context": "GitPanel && ChangesList",
"use_key_equivalents": true,
@@ -728,21 +727,17 @@
"space": "git::ToggleStaged",
"cmd-shift-space": "git::StageAll",
"ctrl-shift-space": "git::UnstageAll",
"alt-down": "git_panel::FocusEditor",
"tab": "git_panel::FocusEditor",
"shift-tab": "git_panel::FocusEditor",
"escape": "git_panel::ToggleFocus"
"alt-down": "git_panel::FocusEditor"
}
},
{
"context": "GitPanel > Editor",
"context": "GitPanel && CommitEditor > Editor",
"use_key_equivalents": true,
"bindings": {
"enter": "editor::Newline",
"cmd-enter": "git::Commit",
"tab": "git_panel::FocusChanges",
"shift-tab": "git_panel::FocusChanges",
"alt-up": "git_panel::FocusChanges"
"alt-up": "git_panel::FocusChanges",
"escape": "git_panel::FocusChanges",
"cmd-enter": "git::CommitChanges",
"cmd-alt-enter": "git::CommitAllChanges"
}
},
{

View File

@@ -2,8 +2,8 @@
{
"context": "VimControl && !menu",
"bindings": {
"i": ["vim::PushObject", { "around": false }],
"a": ["vim::PushObject", { "around": true }],
"i": ["vim::PushOperator", { "Object": { "around": false } }],
"a": ["vim::PushOperator", { "Object": { "around": true } }],
"left": "vim::Left",
"h": "vim::Left",
"backspace": "vim::Backspace",
@@ -54,10 +54,10 @@
// "b": "vim::PreviousSubwordStart",
// "e": "vim::NextSubwordEnd",
// "g e": "vim::PreviousSubwordEnd",
"shift-w": ["vim::NextWordStart", { "ignore_punctuation": true }],
"shift-e": ["vim::NextWordEnd", { "ignore_punctuation": true }],
"shift-b": ["vim::PreviousWordStart", { "ignore_punctuation": true }],
"g shift-e": ["vim::PreviousWordEnd", { "ignore_punctuation": true }],
"shift-w": ["vim::NextWordStart", { "ignorePunctuation": true }],
"shift-e": ["vim::NextWordEnd", { "ignorePunctuation": true }],
"shift-b": ["vim::PreviousWordStart", { "ignorePunctuation": true }],
"g shift-e": ["vim::PreviousWordEnd", { "ignorePunctuation": true }],
"/": "vim::Search",
"g /": "pane::DeploySearch",
"?": ["vim::Search", { "backwards": true }],
@@ -70,20 +70,20 @@
"[ {": ["vim::UnmatchedBackward", { "char": "{" }],
"] )": ["vim::UnmatchedForward", { "char": ")" }],
"[ (": ["vim::UnmatchedBackward", { "char": "(" }],
"f": ["vim::PushFindForward", { "before": false }],
"t": ["vim::PushFindForward", { "before": true }],
"shift-f": ["vim::PushFindBackward", { "after": false }],
"shift-t": ["vim::PushFindBackward", { "after": true }],
"m": "vim::PushMark",
"'": ["vim::PushJump", { "line": true }],
"`": ["vim::PushJump", { "line": false }],
"f": ["vim::PushOperator", { "FindForward": { "before": false } }],
"t": ["vim::PushOperator", { "FindForward": { "before": true } }],
"shift-f": ["vim::PushOperator", { "FindBackward": { "after": false } }],
"shift-t": ["vim::PushOperator", { "FindBackward": { "after": true } }],
"m": ["vim::PushOperator", "Mark"],
"'": ["vim::PushOperator", { "Jump": { "line": true } }],
"`": ["vim::PushOperator", { "Jump": { "line": false } }],
";": "vim::RepeatFind",
",": "vim::RepeatFindReversed",
"ctrl-o": "pane::GoBack",
"ctrl-i": "pane::GoForward",
"ctrl-]": "editor::GoToDefinition",
"escape": "vim::SwitchToNormalMode",
"ctrl-[": "vim::SwitchToNormalMode",
"escape": ["vim::SwitchMode", "Normal"],
"ctrl-[": ["vim::SwitchMode", "Normal"],
"v": "vim::ToggleVisual",
"shift-v": "vim::ToggleVisualLine",
"ctrl-g": "vim::ShowLocation",
@@ -102,7 +102,6 @@
"ctrl-e": "vim::LineDown",
"ctrl-y": "vim::LineUp",
// "g" commands
"g r": "vim::PushReplaceWithRegister",
"g g": "vim::StartOfDocument",
"g h": "editor::Hover",
"g t": "pane::ActivateNextItem",
@@ -125,17 +124,17 @@
"g .": "editor::ToggleCodeActions", // zed specific
"g shift-a": "editor::FindAllReferences", // zed specific
"g space": "editor::OpenExcerpts", // zed specific
"g *": ["vim::MoveToNext", { "partial_word": true }],
"g #": ["vim::MoveToPrev", { "partial_word": true }],
"g j": ["vim::Down", { "display_lines": true }],
"g down": ["vim::Down", { "display_lines": true }],
"g k": ["vim::Up", { "display_lines": true }],
"g up": ["vim::Up", { "display_lines": true }],
"g $": ["vim::EndOfLine", { "display_lines": true }],
"g end": ["vim::EndOfLine", { "display_lines": true }],
"g 0": ["vim::StartOfLine", { "display_lines": true }],
"g home": ["vim::StartOfLine", { "display_lines": true }],
"g ^": ["vim::FirstNonWhitespace", { "display_lines": true }],
"g *": ["vim::MoveToNext", { "partialWord": true }],
"g #": ["vim::MoveToPrev", { "partialWord": true }],
"g j": ["vim::Down", { "displayLines": true }],
"g down": ["vim::Down", { "displayLines": true }],
"g k": ["vim::Up", { "displayLines": true }],
"g up": ["vim::Up", { "displayLines": true }],
"g $": ["vim::EndOfLine", { "displayLines": true }],
"g end": ["vim::EndOfLine", { "displayLines": true }],
"g 0": ["vim::StartOfLine", { "displayLines": true }],
"g home": ["vim::StartOfLine", { "displayLines": true }],
"g ^": ["vim::FirstNonWhitespace", { "displayLines": true }],
"g v": "vim::RestoreVisualSelection",
"g ]": "editor::GoToDiagnostic",
"g [": "editor::GoToPrevDiagnostic",
@@ -147,7 +146,7 @@
"shift-l": "vim::WindowBottom",
"q": "vim::ToggleRecord",
"shift-q": "vim::ReplayLastRecording",
"@": "vim::PushReplayRegister",
"@": ["vim::PushOperator", "ReplayRegister"],
// z commands
"z enter": ["workspace::SendKeystrokes", "z t ^"],
"z -": ["workspace::SendKeystrokes", "z b ^"],
@@ -166,8 +165,8 @@
"z f": "editor::FoldSelectedRanges",
"z shift-m": "editor::FoldAll",
"z shift-r": "editor::UnfoldAll",
"shift-z shift-q": ["pane::CloseActiveItem", { "save_intent": "skip" }],
"shift-z shift-z": ["pane::CloseActiveItem", { "save_intent": "save_all" }],
"shift-z shift-q": ["pane::CloseActiveItem", { "saveIntent": "skip" }],
"shift-z shift-z": ["pane::CloseActiveItem", { "saveIntent": "saveAll" }],
// Count support
"1": ["vim::Number", 1],
"2": ["vim::Number", 2],
@@ -194,13 +193,13 @@
"escape": "editor::Cancel",
":": "command_palette::Toggle",
".": "vim::Repeat",
"c": "vim::PushChange",
"c": ["vim::PushOperator", "Change"],
"shift-c": "vim::ChangeToEndOfLine",
"d": "vim::PushDelete",
"d": ["vim::PushOperator", "Delete"],
"shift-d": "vim::DeleteToEndOfLine",
"shift-j": "vim::JoinLines",
"g shift-j": "vim::JoinLinesNoWhitespace",
"y": "vim::PushYank",
"y": ["vim::PushOperator", "Yank"],
"shift-y": "vim::YankLine",
"i": "vim::InsertBefore",
"shift-i": "vim::InsertFirstNonWhitespace",
@@ -217,19 +216,19 @@
"shift-p": ["vim::Paste", { "before": true }],
"u": "vim::Undo",
"ctrl-r": "vim::Redo",
"r": "vim::PushReplace",
"r": ["vim::PushOperator", "Replace"],
"s": "vim::Substitute",
"shift-s": "vim::SubstituteLine",
">": "vim::PushIndent",
"<": "vim::PushOutdent",
"=": "vim::PushAutoIndent",
"!": "vim::PushShellCommand",
"g u": "vim::PushLowercase",
"g shift-u": "vim::PushUppercase",
"g ~": "vim::PushOppositeCase",
"\"": "vim::PushRegister",
"g w": "vim::PushRewrap",
"g q": "vim::PushRewrap",
">": ["vim::PushOperator", "Indent"],
"<": ["vim::PushOperator", "Outdent"],
"=": ["vim::PushOperator", "AutoIndent"],
"!": ["vim::PushOperator", "ShellCommand"],
"g u": ["vim::PushOperator", "Lowercase"],
"g shift-u": ["vim::PushOperator", "Uppercase"],
"g ~": ["vim::PushOperator", "OppositeCase"],
"\"": ["vim::PushOperator", "Register"],
"g w": ["vim::PushOperator", "Rewrap"],
"g q": ["vim::PushOperator", "Rewrap"],
"ctrl-pagedown": "pane::ActivateNextItem",
"ctrl-pageup": "pane::ActivatePrevItem",
"insert": "vim::InsertBefore",
@@ -240,7 +239,7 @@
"[ d": "editor::GoToPrevDiagnostic",
"] c": "editor::GoToHunk",
"[ c": "editor::GoToPrevHunk",
"g c": "vim::PushToggleComments"
"g c": ["vim::PushOperator", "ToggleComments"]
}
},
{
@@ -265,14 +264,14 @@
"y": "vim::VisualYank",
"shift-y": "vim::VisualYankLine",
"p": "vim::Paste",
"shift-p": ["vim::Paste", { "preserve_clipboard": true }],
"shift-p": ["vim::Paste", { "preserveClipboard": true }],
"c": "vim::Substitute",
"s": "vim::Substitute",
"shift-r": "vim::SubstituteLine",
"shift-s": "vim::SubstituteLine",
"~": "vim::ChangeCase",
"*": ["vim::MoveToNext", { "partial_word": true }],
"#": ["vim::MoveToPrev", { "partial_word": true }],
"*": ["vim::MoveToNext", { "partialWord": true }],
"#": ["vim::MoveToPrev", { "partialWord": true }],
"ctrl-a": "vim::Increment",
"ctrl-x": "vim::Decrement",
"g ctrl-a": ["vim::Increment", { "step": true }],
@@ -283,19 +282,19 @@
"g shift-a": "vim::VisualInsertEndOfLine",
"shift-j": "vim::JoinLines",
"g shift-j": "vim::JoinLinesNoWhitespace",
"r": "vim::PushReplace",
"ctrl-c": "vim::SwitchToNormalMode",
"ctrl-[": "vim::SwitchToNormalMode",
"escape": "vim::SwitchToNormalMode",
"r": ["vim::PushOperator", "Replace"],
"ctrl-c": ["vim::SwitchMode", "Normal"],
"ctrl-[": ["vim::SwitchMode", "Normal"],
"escape": ["vim::SwitchMode", "Normal"],
">": "vim::Indent",
"<": "vim::Outdent",
"=": "vim::AutoIndent",
"!": "vim::ShellCommand",
"i": ["vim::PushObject", { "around": false }],
"a": ["vim::PushObject", { "around": true }],
"i": ["vim::PushOperator", { "Object": { "around": false } }],
"a": ["vim::PushOperator", { "Object": { "around": true } }],
"g c": "vim::ToggleComments",
"g q": "vim::Rewrap",
"\"": "vim::PushRegister",
"\"": ["vim::PushOperator", "Register"],
// tree-sitter related commands
"[ x": "editor::SelectLargerSyntaxNode",
"] x": "editor::SelectSmallerSyntaxNode"
@@ -310,19 +309,19 @@
"ctrl-x": null,
"ctrl-x ctrl-o": "editor::ShowCompletions",
"ctrl-x ctrl-a": "assistant::InlineAssist", // zed specific
"ctrl-x ctrl-c": "editor::ShowEditPrediction", // zed specific
"ctrl-x ctrl-c": "editor::ShowInlineCompletion", // zed specific
"ctrl-x ctrl-l": "editor::ToggleCodeActions", // zed specific
"ctrl-x ctrl-z": "editor::Cancel",
"ctrl-w": "editor::DeleteToPreviousWordStart",
"ctrl-u": "editor::DeleteToBeginningOfLine",
"ctrl-t": "vim::Indent",
"ctrl-d": "vim::Outdent",
"ctrl-k": ["vim::PushDigraph", {}],
"ctrl-v": ["vim::PushLiteral", {}],
"ctrl-k": ["vim::PushOperator", { "Digraph": {} }],
"ctrl-v": ["vim::PushOperator", { "Literal": {} }],
"ctrl-shift-v": "editor::Paste", // note: this is *very* similar to ctrl-v in vim, but ctrl-shift-v on linux is the typical shortcut for paste when ctrl-v is already in use.
"ctrl-q": ["vim::PushLiteral", {}],
"ctrl-shift-q": ["vim::PushLiteral", {}],
"ctrl-r": "vim::PushRegister",
"ctrl-q": ["vim::PushOperator", { "Literal": {} }],
"ctrl-shift-q": ["vim::PushOperator", { "Literal": {} }],
"ctrl-r": ["vim::PushOperator", "Register"],
"insert": "vim::ToggleReplace",
"ctrl-o": "vim::TemporaryNormal"
}
@@ -357,11 +356,11 @@
"ctrl-c": "vim::NormalBefore",
"ctrl-[": "vim::NormalBefore",
"escape": "vim::NormalBefore",
"ctrl-k": ["vim::PushDigraph", {}],
"ctrl-v": ["vim::PushLiteral", {}],
"ctrl-k": ["vim::PushOperator", { "Digraph": {} }],
"ctrl-v": ["vim::PushOperator", { "Literal": {} }],
"ctrl-shift-v": "editor::Paste", // note: this is *very* similar to ctrl-v in vim, but ctrl-shift-v on linux is the typical shortcut for paste when ctrl-v is already in use.
"ctrl-q": ["vim::PushLiteral", {}],
"ctrl-shift-q": ["vim::PushLiteral", {}],
"ctrl-q": ["vim::PushOperator", { "Literal": {} }],
"ctrl-shift-q": ["vim::PushOperator", { "Literal": {} }],
"backspace": "vim::UndoReplace",
"tab": "vim::Tab",
"enter": "vim::Enter",
@@ -376,15 +375,9 @@
"ctrl-c": "vim::ClearOperators",
"ctrl-[": "vim::ClearOperators",
"escape": "vim::ClearOperators",
"ctrl-k": ["vim::PushDigraph", {}],
"ctrl-v": ["vim::PushLiteral", {}],
"ctrl-q": ["vim::PushLiteral", {}]
}
},
{
"context": "Editor && vim_mode == waiting && (vim_operator == ys || vim_operator == cs)",
"bindings": {
"escape": "vim::SwitchToNormalMode"
"ctrl-k": ["vim::PushOperator", { "Digraph": {} }],
"ctrl-v": ["vim::PushOperator", { "Literal": {} }],
"ctrl-q": ["vim::PushOperator", { "Literal": {} }]
}
},
{
@@ -400,10 +393,10 @@
"context": "vim_operator == a || vim_operator == i || vim_operator == cs",
"bindings": {
"w": "vim::Word",
"shift-w": ["vim::Word", { "ignore_punctuation": true }],
"shift-w": ["vim::Word", { "ignorePunctuation": true }],
// Subword TextObject
// "w": "vim::Subword",
// "shift-w": ["vim::Subword", { "ignore_punctuation": true }],
// "shift-w": ["vim::Subword", { "ignorePunctuation": true }],
"t": "vim::Tag",
"s": "vim::Sentence",
"p": "vim::Paragraph",
@@ -426,10 +419,9 @@
">": "vim::AngleBrackets",
"a": "vim::Argument",
"i": "vim::IndentObj",
"shift-i": ["vim::IndentObj", { "include_below": true }],
"shift-i": ["vim::IndentObj", { "includeBelow": true }],
"f": "vim::Method",
"c": "vim::Class",
"e": "vim::EntireFile"
"c": "vim::Class"
}
},
{
@@ -437,14 +429,14 @@
"bindings": {
"c": "vim::CurrentLine",
"d": "editor::Rename", // zed specific
"s": ["vim::PushChangeSurrounds", {}]
"s": ["vim::PushOperator", { "ChangeSurrounds": {} }]
}
},
{
"context": "vim_operator == d",
"bindings": {
"d": "vim::CurrentLine",
"s": "vim::PushDeleteSurrounds",
"s": ["vim::PushOperator", "DeleteSurrounds"],
"o": "editor::ToggleSelectedDiffHunks", // "d o"
"p": "editor::RevertSelectedHunks" // "d p"
}
@@ -483,7 +475,7 @@
"context": "vim_operator == y",
"bindings": {
"y": "vim::CurrentLine",
"s": ["vim::PushAddSurrounds", {}]
"s": ["vim::PushOperator", { "AddSurrounds": {} }]
}
},
{
@@ -573,34 +565,34 @@
}
},
{
"context": "GitPanel || ProjectPanel || CollabPanel || OutlinePanel || ChatPanel || VimControl || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView",
"context": "ProjectPanel || CollabPanel || OutlinePanel || ChatPanel || VimControl || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView",
"bindings": {
// window related commands (ctrl-w X)
"ctrl-w": null,
"ctrl-w left": "workspace::ActivatePaneLeft",
"ctrl-w right": "workspace::ActivatePaneRight",
"ctrl-w up": "workspace::ActivatePaneUp",
"ctrl-w down": "workspace::ActivatePaneDown",
"ctrl-w ctrl-h": "workspace::ActivatePaneLeft",
"ctrl-w ctrl-l": "workspace::ActivatePaneRight",
"ctrl-w ctrl-k": "workspace::ActivatePaneUp",
"ctrl-w ctrl-j": "workspace::ActivatePaneDown",
"ctrl-w h": "workspace::ActivatePaneLeft",
"ctrl-w l": "workspace::ActivatePaneRight",
"ctrl-w k": "workspace::ActivatePaneUp",
"ctrl-w j": "workspace::ActivatePaneDown",
"ctrl-w shift-left": "workspace::SwapPaneLeft",
"ctrl-w shift-right": "workspace::SwapPaneRight",
"ctrl-w shift-up": "workspace::SwapPaneUp",
"ctrl-w shift-down": "workspace::SwapPaneDown",
"ctrl-w shift-h": "workspace::SwapPaneLeft",
"ctrl-w shift-l": "workspace::SwapPaneRight",
"ctrl-w shift-k": "workspace::SwapPaneUp",
"ctrl-w shift-j": "workspace::SwapPaneDown",
"ctrl-w >": "vim::ResizePaneRight",
"ctrl-w <": "vim::ResizePaneLeft",
"ctrl-w -": "vim::ResizePaneDown",
"ctrl-w +": "vim::ResizePaneUp",
"ctrl-w left": ["workspace::ActivatePaneInDirection", "Left"],
"ctrl-w right": ["workspace::ActivatePaneInDirection", "Right"],
"ctrl-w up": ["workspace::ActivatePaneInDirection", "Up"],
"ctrl-w down": ["workspace::ActivatePaneInDirection", "Down"],
"ctrl-w ctrl-h": ["workspace::ActivatePaneInDirection", "Left"],
"ctrl-w ctrl-l": ["workspace::ActivatePaneInDirection", "Right"],
"ctrl-w ctrl-k": ["workspace::ActivatePaneInDirection", "Up"],
"ctrl-w ctrl-j": ["workspace::ActivatePaneInDirection", "Down"],
"ctrl-w h": ["workspace::ActivatePaneInDirection", "Left"],
"ctrl-w l": ["workspace::ActivatePaneInDirection", "Right"],
"ctrl-w k": ["workspace::ActivatePaneInDirection", "Up"],
"ctrl-w j": ["workspace::ActivatePaneInDirection", "Down"],
"ctrl-w shift-left": ["workspace::SwapPaneInDirection", "Left"],
"ctrl-w shift-right": ["workspace::SwapPaneInDirection", "Right"],
"ctrl-w shift-up": ["workspace::SwapPaneInDirection", "Up"],
"ctrl-w shift-down": ["workspace::SwapPaneInDirection", "Down"],
"ctrl-w shift-h": ["workspace::SwapPaneInDirection", "Left"],
"ctrl-w shift-l": ["workspace::SwapPaneInDirection", "Right"],
"ctrl-w shift-k": ["workspace::SwapPaneInDirection", "Up"],
"ctrl-w shift-j": ["workspace::SwapPaneInDirection", "Down"],
"ctrl-w >": ["vim::ResizePane", "Widen"],
"ctrl-w <": ["vim::ResizePane", "Narrow"],
"ctrl-w -": ["vim::ResizePane", "Shorten"],
"ctrl-w +": ["vim::ResizePane", "Lengthen"],
"ctrl-w _": "vim::MaximizePane",
"ctrl-w =": "vim::ResetPaneSizes",
"ctrl-w g t": "pane::ActivateNextItem",
@@ -618,12 +610,10 @@
"ctrl-w shift-s": "pane::SplitHorizontal",
"ctrl-w ctrl-s": "pane::SplitHorizontal",
"ctrl-w s": "pane::SplitHorizontal",
"ctrl-w ctrl-c": "pane::CloseActiveItem",
"ctrl-w c": "pane::CloseActiveItem",
"ctrl-w ctrl-q": "pane::CloseActiveItem",
"ctrl-w q": "pane::CloseActiveItem",
"ctrl-w ctrl-a": "pane::CloseAllItems",
"ctrl-w a": "pane::CloseAllItems",
"ctrl-w ctrl-c": "pane::CloseAllItems",
"ctrl-w c": "pane::CloseAllItems",
"ctrl-w ctrl-q": "pane::CloseAllItems",
"ctrl-w q": "pane::CloseAllItems",
"ctrl-w ctrl-o": "workspace::CloseInactiveTabsAndPanes",
"ctrl-w o": "workspace::CloseInactiveTabsAndPanes",
"ctrl-w ctrl-n": "workspace::NewFileSplitHorizontal",
@@ -631,7 +621,7 @@
}
},
{
"context": "GitPanel || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView || Welcome",
"context": "EmptyPane || SharedScreen || MarkdownPreview || KeyContextView || Welcome",
"bindings": {
":": "command_palette::Toggle",
"g /": "pane::DeploySearch"

View File

@@ -24,8 +24,8 @@
"base_keymap": "VSCode",
// Features that can be globally enabled or disabled
"features": {
// Which edit prediction provider to use.
"edit_prediction_provider": "copilot"
// Which inline completion provider to use.
"inline_completion_provider": "copilot"
},
// The name of a font to use for rendering text in the editor
"buffer_font_family": "Zed Plex Mono",
@@ -93,13 +93,6 @@
// workspace when the centered layout is used.
"right_padding": 0.2
},
// All settings related to the image viewer.
"image_viewer": {
// The unit for image file sizes.
// By default we're setting it to binary.
// The second option is decimal.
"unit": "binary"
},
// The key to use for adding multiple cursors
// Currently "alt" or "cmd_or_ctrl" (also aliased as
// "cmd" and "ctrl") are supported.
@@ -168,9 +161,9 @@
/// Whether to show the signature help after completion or a bracket pair inserted.
/// If `auto_signature_help` is enabled, this setting will be treated as enabled also.
"show_signature_help_after_edits": false,
/// Whether to show the edit predictions next to the completions provided by a language server.
/// Only has an effect if edit prediction provider supports it.
"show_edit_predictions_in_menu": true,
/// Whether to show the inline completions next to the completions provided by a language server.
/// Only has an effect if inline completion provider supports it.
"show_inline_completions_in_menu": true,
// Whether to show wrap guides (vertical rulers) in the editor.
// Setting this to true will show a guide at the 'preferred_line_length' value
// if 'soft_wrap' is set to 'preferred_line_length', and will show any
@@ -203,12 +196,12 @@
// Otherwise(when `true`), the closing characters are always skipped over and auto-removed
// no matter how they were inserted.
"always_treat_brackets_as_autoclosed": false,
// Controls whether edit predictions are shown immediately (true)
// or manually by triggering `editor::ShowEditPrediction` (false).
"show_edit_predictions": true,
// Controls whether edit predictions are shown in a given language scope.
// Controls whether inline completions are shown immediately (true)
// or manually by triggering `editor::ShowInlineCompletion` (false).
"show_inline_completions": true,
// Controls whether inline completions are shown in a given language scope.
// Example: ["string", "comment"]
"edit_predictions_disabled_in": [],
"inline_completions_disabled_in": [],
// Whether to show tabs and spaces in the editor.
// This setting can take four values:
//
@@ -781,25 +774,9 @@
// 2. Load direnv configuration through the shell hook, works for POSIX shells and fish.
// "load_direnv": "shell_hook"
"load_direnv": "direct",
"edit_predictions": {
// A list of globs representing files that edit predictions should be disabled for.
// There's a sensible default list of globs already included.
// Any addition to this list will be merged with the default list.
"disabled_globs": [
"**/.env*",
"**/*.pem",
"**/*.key",
"**/*.cert",
"**/*.crt",
"**/secrets.yml"
],
// When to show edit predictions previews in buffer.
// This setting takes two possible values:
// 1. Display inline when there are no language server completions available.
// "inline_preview": "auto"
// 2. Display inline when holding modifier key (alt by default).
// "inline_preview": "when_holding_modifier"
"inline_preview": "auto"
"inline_completions": {
// A list of globs representing files that inline completions should be disabled for.
"disabled_globs": [".env"]
},
// Settings specific to journaling
"journal": {

View File

@@ -81,7 +81,7 @@
"terminal.ansi.bright_green": "#4d6140ff",
"terminal.ansi.dim_green": "#d1e0bfff",
"terminal.ansi.yellow": "#dec184ff",
"terminal.ansi.bright_yellow": "#e5c07bff",
"terminal.ansi.bright_yellow": "#786441ff",
"terminal.ansi.dim_yellow": "#f1dfc1ff",
"terminal.ansi.blue": "#74ade8ff",
"terminal.ansi.bright_blue": "#385378ff",
@@ -457,7 +457,7 @@
"terminal.ansi.bright_green": "#b2cfa9ff",
"terminal.ansi.dim_green": "#354d2eff",
"terminal.ansi.yellow": "#dec184ff",
"terminal.ansi.bright_yellow": "#826221ff",
"terminal.ansi.bright_yellow": "#f1dfc1ff",
"terminal.ansi.dim_yellow": "#786441ff",
"terminal.ansi.blue": "#5c78e2ff",
"terminal.ansi.bright_blue": "#b5baf2ff",

View File

@@ -250,7 +250,7 @@ pub async fn stream_completion(
.map(|output| output.0)
}
/// <https://docs.anthropic.com/en/api/rate-limits#response-headers>
/// https://docs.anthropic.com/en/api/rate-limits#response-headers
#[derive(Debug)]
pub struct RateLimitInfo {
pub requests_limit: usize,
@@ -626,7 +626,7 @@ pub struct ApiError {
}
/// An Anthropic API error code.
/// <https://docs.anthropic.com/en/api/errors#http-errors>
/// https://docs.anthropic.com/en/api/errors#http-errors
#[derive(Debug, PartialEq, Eq, Clone, Copy, EnumString)]
#[strum(serialize_all = "snake_case")]
pub enum ApiErrorCode {

View File

@@ -3,7 +3,7 @@ use std::sync::LazyLock;
/// Returns whether the given country code is supported by Anthropic.
///
/// <https://www.anthropic.com/supported-countries>
/// https://www.anthropic.com/supported-countries
pub fn is_supported_country(country_code: &str) -> bool {
SUPPORTED_COUNTRIES.contains(&country_code)
}

View File

@@ -250,10 +250,10 @@ impl AssistantPanel {
)
.child(
PopoverMenu::new("assistant-panel-popover-menu")
.trigger_with_tooltip(
.trigger(
IconButton::new("menu", IconName::EllipsisVertical)
.icon_size(IconSize::Small),
Tooltip::text("Toggle Assistant Menu"),
.icon_size(IconSize::Small)
.tooltip(Tooltip::text("Toggle Assistant Menu")),
)
.menu(move |window, cx| {
let zoom_label = if _pane.read(cx).is_zoomed() {

View File

@@ -1595,22 +1595,22 @@ impl Render for PromptEditor {
IconButton::new("context", IconName::SettingsAlt)
.shape(IconButtonShape::Square)
.icon_size(IconSize::Small)
.icon_color(Color::Muted),
move |window, cx| {
Tooltip::with_meta(
format!(
"Using {}",
LanguageModelRegistry::read_global(cx)
.active_model()
.map(|model| model.name().0)
.unwrap_or_else(|| "No model selected".into()),
),
None,
"Change Model",
window,
cx,
)
},
.icon_color(Color::Muted)
.tooltip(move |window, cx| {
Tooltip::with_meta(
format!(
"Using {}",
LanguageModelRegistry::read_global(cx)
.active_model()
.map(|model| model.name().0)
.unwrap_or_else(|| "No model selected".into()),
),
None,
"Change Model",
window,
cx,
)
}),
))
.map(|el| {
let CodegenStatus::Error(error) = self.codegen.read(cx).status(cx) else {

View File

@@ -646,22 +646,22 @@ impl Render for PromptEditor {
IconButton::new("context", IconName::SettingsAlt)
.shape(IconButtonShape::Square)
.icon_size(IconSize::Small)
.icon_color(Color::Muted),
move |window, cx| {
Tooltip::with_meta(
format!(
"Using {}",
LanguageModelRegistry::read_global(cx)
.active_model()
.map(|model| model.name().0)
.unwrap_or_else(|| "No model selected".into()),
),
None,
"Change Model",
window,
cx,
)
},
.icon_color(Color::Muted)
.tooltip(move |window, cx| {
Tooltip::with_meta(
format!(
"Using {}",
LanguageModelRegistry::read_global(cx)
.active_model()
.map(|model| model.name().0)
.unwrap_or_else(|| "No model selected".into()),
),
None,
"Change Model",
window,
cx,
)
}),
))
.children(
if let CodegenStatus::Error(error) = &self.codegen.read(cx).status {

View File

@@ -74,16 +74,16 @@ impl Render for AssistantModelSelector {
.color(Color::Muted)
.size(IconSize::XSmall),
),
),
move |window, cx| {
Tooltip::for_action_in(
"Change Model",
&ToggleModelSelector,
&focus_handle,
window,
cx,
)
},
.tooltip(move |window, cx| {
Tooltip::for_action_in(
"Change Model",
&ToggleModelSelector,
&focus_handle,
window,
cx,
)
}),
)
.with_handle(self.menu_handle.clone())
}

View File

@@ -442,7 +442,7 @@ impl AssistantPanel {
fn handle_assistant_configuration_event(
&mut self,
_entity: &Entity<AssistantConfiguration>,
_model: &Entity<AssistantConfiguration>,
event: &AssistantConfigurationEvent,
window: &mut Window,
cx: &mut Context<Self>,
@@ -660,11 +660,11 @@ impl AssistantPanel {
.gap(DynamicSpacing::Base02.rems(cx))
.child(
PopoverMenu::new("assistant-toolbar-new-popover-menu")
.trigger_with_tooltip(
.trigger(
IconButton::new("new", IconName::Plus)
.icon_size(IconSize::Small)
.style(ButtonStyle::Subtle),
Tooltip::text("New…"),
.style(ButtonStyle::Subtle)
.tooltip(Tooltip::text("New…")),
)
.anchor(Corner::TopRight)
.with_handle(self.new_item_context_menu_handle.clone())
@@ -677,11 +677,11 @@ impl AssistantPanel {
)
.child(
PopoverMenu::new("assistant-toolbar-history-popover-menu")
.trigger_with_tooltip(
.trigger(
IconButton::new("open-history", IconName::HistoryRerun)
.icon_size(IconSize::Small)
.style(ButtonStyle::Subtle),
Tooltip::text("History…"),
.style(ButtonStyle::Subtle)
.tooltip(Tooltip::text("History…")),
)
.anchor(Corner::TopRight)
.with_handle(self.open_history_context_menu_handle.clone())

View File

@@ -79,8 +79,8 @@ impl ContextStore {
project.open_buffer(project_path.clone(), cx)
})?;
let buffer_entity = open_buffer_task.await?;
let buffer_id = this.update(&mut cx, |_, cx| buffer_entity.read(cx).remote_id())?;
let buffer_model = open_buffer_task.await?;
let buffer_id = this.update(&mut cx, |_, cx| buffer_model.read(cx).remote_id())?;
let already_included = this.update(&mut cx, |this, _cx| {
match this.will_include_buffer(buffer_id, &project_path.path) {
@@ -98,10 +98,10 @@ impl ContextStore {
}
let (buffer_info, text_task) = this.update(&mut cx, |_, cx| {
let buffer = buffer_entity.read(cx);
let buffer = buffer_model.read(cx);
collect_buffer_info_and_text(
project_path.path.clone(),
buffer_entity,
buffer_model,
buffer,
cx.to_async(),
)
@@ -119,18 +119,18 @@ impl ContextStore {
pub fn add_file_from_buffer(
&mut self,
buffer_entity: Entity<Buffer>,
buffer_model: Entity<Buffer>,
cx: &mut Context<Self>,
) -> Task<Result<()>> {
cx.spawn(|this, mut cx| async move {
let (buffer_info, text_task) = this.update(&mut cx, |_, cx| {
let buffer = buffer_entity.read(cx);
let buffer = buffer_model.read(cx);
let Some(file) = buffer.file() else {
return Err(anyhow!("Buffer has no path."));
};
Ok(collect_buffer_info_and_text(
file.path().clone(),
buffer_entity,
buffer_model,
buffer,
cx.to_async(),
))
@@ -207,11 +207,11 @@ impl ContextStore {
let mut buffer_infos = Vec::new();
let mut text_tasks = Vec::new();
this.update(&mut cx, |_, cx| {
for (path, buffer_entity) in files.into_iter().zip(buffers) {
let buffer_entity = buffer_entity?;
let buffer = buffer_entity.read(cx);
for (path, buffer_model) in files.into_iter().zip(buffers) {
let buffer_model = buffer_model?;
let buffer = buffer_model.read(cx);
let (buffer_info, text_task) =
collect_buffer_info_and_text(path, buffer_entity, buffer, cx.to_async());
collect_buffer_info_and_text(path, buffer_model, buffer, cx.to_async());
buffer_infos.push(buffer_info);
text_tasks.push(text_task);
}
@@ -429,7 +429,7 @@ pub enum FileInclusion {
// ContextBuffer without text.
struct BufferInfo {
buffer_entity: Entity<Buffer>,
buffer_model: Entity<Buffer>,
id: BufferId,
version: clock::Global,
}
@@ -437,7 +437,7 @@ struct BufferInfo {
fn make_context_buffer(info: BufferInfo, text: SharedString) -> ContextBuffer {
ContextBuffer {
id: info.id,
buffer: info.buffer_entity,
buffer: info.buffer_model,
version: info.version,
text,
}
@@ -445,13 +445,13 @@ fn make_context_buffer(info: BufferInfo, text: SharedString) -> ContextBuffer {
fn collect_buffer_info_and_text(
path: Arc<Path>,
buffer_entity: Entity<Buffer>,
buffer_model: Entity<Buffer>,
buffer: &Buffer,
cx: AsyncApp,
) -> (BufferInfo, Task<SharedString>) {
let buffer_info = BufferInfo {
id: buffer.remote_id(),
buffer_entity,
buffer_model,
version: buffer.version(),
};
// Important to collect version at the same time as content so that staleness logic is correct.

View File

@@ -92,8 +92,8 @@ impl ContextStrip {
let active_item = workspace.read(cx).active_item(cx)?;
let editor = active_item.to_any().downcast::<Editor>().ok()?.read(cx);
let active_buffer_entity = editor.buffer().read(cx).as_singleton()?;
let active_buffer = active_buffer_entity.read(cx);
let active_buffer_model = editor.buffer().read(cx).as_singleton()?;
let active_buffer = active_buffer_model.read(cx);
let path = active_buffer.file()?.path();
@@ -115,7 +115,7 @@ impl ContextStrip {
Some(SuggestedContext::File {
name,
buffer: active_buffer_entity.downgrade(),
buffer: active_buffer_model.downgrade(),
icon_path,
})
}
@@ -393,9 +393,9 @@ impl Render for ContextStrip {
.on_action(cx.listener(Self::remove_focused_context))
.on_action(cx.listener(Self::accept_suggested_context))
.on_children_prepainted({
let entity = cx.entity().downgrade();
let model = cx.entity().downgrade();
move |children_bounds, _window, cx| {
entity
model
.update(cx, |this, _| {
this.children_bounds = Some(children_bounds);
})
@@ -411,22 +411,22 @@ impl Render for ContextStrip {
Some(context_picker.clone())
})
.trigger_with_tooltip(
.trigger(
IconButton::new("add-context", IconName::Plus)
.icon_size(IconSize::Small)
.style(ui::ButtonStyle::Filled),
{
let focus_handle = focus_handle.clone();
move |window, cx| {
Tooltip::for_action_in(
"Add Context",
&ToggleContextPicker,
&focus_handle,
window,
cx,
)
}
},
.style(ui::ButtonStyle::Filled)
.tooltip({
let focus_handle = focus_handle.clone();
move |window, cx| {
Tooltip::for_action_in(
"Add Context",
&ToggleContextPicker,
&focus_handle,
window,
cx,
)
}
}),
)
.attach(gpui::Corner::TopLeft)
.anchor(gpui::Corner::BottomLeft)

View File

@@ -12,7 +12,6 @@ use language_model_selector::LanguageModelSelector;
use rope::Point;
use settings::Settings;
use std::time::Duration;
use text::Bias;
use theme::ThemeSettings;
use ui::{
prelude::*, ButtonLike, KeyBinding, PopoverMenu, PopoverMenuHandle, Switch, TintColor, Tooltip,
@@ -240,10 +239,7 @@ impl MessageEditor {
let snapshot = editor.buffer().read(cx).snapshot(cx);
let newest_cursor = editor.selections.newest::<Point>(cx).head();
if newest_cursor.column > 0 {
let behind_cursor = snapshot.clip_point(
Point::new(newest_cursor.row, newest_cursor.column - 1),
Bias::Left,
);
let behind_cursor = Point::new(newest_cursor.row, newest_cursor.column - 1);
let char_behind_cursor = snapshot.chars_at(behind_cursor).next();
if char_behind_cursor == Some('@') {
self.inline_context_picker_menu_handle.show(window, cx);

View File

@@ -459,7 +459,7 @@ impl ContextEditor {
window: &mut Window,
cx: &mut Context<Self>,
) {
if self.editor.read(cx).has_visible_completions_menu() {
if self.editor.read(cx).has_active_completions_menu() {
return;
}
@@ -832,13 +832,12 @@ impl ContextEditor {
let render_block: RenderBlock = Arc::new({
let this = this.clone();
let patch_range = range.clone();
move |cx: &mut BlockContext| {
move |cx: &mut BlockContext<'_, '_>| {
let max_width = cx.max_width;
let gutter_width = cx.gutter_dimensions.full_width();
let block_id = cx.block_id;
let selected = cx.selected;
let window = &mut cx.window;
this.update(cx.app, |this, cx| {
this.update_in(cx, |this, window, cx| {
this.render_patch_block(
patch_range.clone(),
max_width,
@@ -2359,8 +2358,8 @@ impl ContextEditor {
.icon(IconName::Plus)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.icon_position(IconPosition::Start),
Tooltip::text("Type / to insert via keyboard"),
.icon_position(IconPosition::Start)
.tooltip(Tooltip::text("Type / to insert via keyboard")),
)
}
@@ -3323,10 +3322,10 @@ impl Render for ContextEditorToolbarItem {
.color(Color::Muted)
.size(IconSize::XSmall),
),
),
move |window, cx| {
Tooltip::for_action("Change Model", &ToggleModelSelector, window, cx)
},
)
.tooltip(move |window, cx| {
Tooltip::for_action("Change Model", &ToggleModelSelector, window, cx)
}),
)
.with_handle(self.language_model_selector_menu_handle.clone()),
)

View File

@@ -31,11 +31,11 @@ use std::{
use util::{ResultExt, TryFutureExt};
pub(crate) fn init(client: &AnyProtoClient) {
client.add_entity_message_handler(ContextStore::handle_advertise_contexts);
client.add_entity_request_handler(ContextStore::handle_open_context);
client.add_entity_request_handler(ContextStore::handle_create_context);
client.add_entity_message_handler(ContextStore::handle_update_context);
client.add_entity_request_handler(ContextStore::handle_synchronize_contexts);
client.add_model_message_handler(ContextStore::handle_advertise_contexts);
client.add_model_request_handler(ContextStore::handle_open_context);
client.add_model_request_handler(ContextStore::handle_create_context);
client.add_model_message_handler(ContextStore::handle_update_context);
client.add_model_request_handler(ContextStore::handle_synchronize_contexts);
}
#[derive(Clone)]
@@ -144,9 +144,11 @@ impl ContextStore {
this.handle_project_changed(project.clone(), cx);
this.synchronize_contexts(cx);
this.register_context_server_handlers(cx);
this.reload(cx).detach_and_log_err(cx);
this
})?;
this.update(&mut cx, |this, cx| this.reload(cx))?
.await
.log_err();
Ok(this)
})
@@ -310,7 +312,7 @@ impl ContextStore {
.client
.subscribe_to_entity(remote_id)
.log_err()
.map(|subscription| subscription.set_entity(&cx.entity(), &mut cx.to_async()));
.map(|subscription| subscription.set_model(&cx.entity(), &mut cx.to_async()));
self.advertise_contexts(cx);
} else {
self.client_subscription = None;

View File

@@ -5,7 +5,7 @@ use assistant_slash_command::{AfterCompletion, SlashCommandLine, SlashCommandWor
use editor::{CompletionProvider, Editor};
use fuzzy::{match_strings, StringMatchCandidate};
use gpui::{App, Context, Entity, Task, WeakEntity, Window};
use language::{Anchor, Buffer, CompletionDocumentation, LanguageServerId, ToPoint};
use language::{Anchor, Buffer, Documentation, LanguageServerId, ToPoint};
use parking_lot::Mutex;
use project::CompletionIntent;
use rope::Point;
@@ -120,9 +120,7 @@ impl SlashCommandCompletionProvider {
});
Some(project::Completion {
old_range: name_range.clone(),
documentation: Some(CompletionDocumentation::SingleLine(
command.description(),
)),
documentation: Some(Documentation::SingleLine(command.description())),
new_text,
label: command.label(cx),
server_id: LanguageServerId(0),

View File

@@ -1,22 +1,17 @@
use std::sync::Arc;
use assistant_slash_command::SlashCommandWorkingSet;
use gpui::{AnyElement, AnyView, DismissEvent, SharedString, Task, WeakEntity};
use gpui::{AnyElement, DismissEvent, SharedString, Task, WeakEntity};
use picker::{Picker, PickerDelegate, PickerEditorPosition};
use ui::{prelude::*, ListItem, ListItemSpacing, PopoverMenu, PopoverTrigger, Tooltip};
use crate::context_editor::ContextEditor;
#[derive(IntoElement)]
pub(super) struct SlashCommandSelector<T, TT>
where
T: PopoverTrigger + ButtonCommon,
TT: Fn(&mut Window, &mut App) -> AnyView + 'static,
{
pub(super) struct SlashCommandSelector<T: PopoverTrigger> {
working_set: Arc<SlashCommandWorkingSet>,
active_context_editor: WeakEntity<ContextEditor>,
trigger: T,
tooltip: TT,
}
#[derive(Clone)]
@@ -53,22 +48,16 @@ pub(crate) struct SlashCommandDelegate {
selected_index: usize,
}
impl<T, TT> SlashCommandSelector<T, TT>
where
T: PopoverTrigger + ButtonCommon,
TT: Fn(&mut Window, &mut App) -> AnyView + 'static,
{
impl<T: PopoverTrigger> SlashCommandSelector<T> {
pub(crate) fn new(
working_set: Arc<SlashCommandWorkingSet>,
active_context_editor: WeakEntity<ContextEditor>,
trigger: T,
tooltip: TT,
) -> Self {
SlashCommandSelector {
working_set,
active_context_editor,
trigger,
tooltip,
}
}
}
@@ -252,11 +241,7 @@ impl PickerDelegate for SlashCommandDelegate {
}
}
impl<T, TT> RenderOnce for SlashCommandSelector<T, TT>
where
T: PopoverTrigger + ButtonCommon,
TT: Fn(&mut Window, &mut App) -> AnyView + 'static,
{
impl<T: PopoverTrigger> RenderOnce for SlashCommandSelector<T> {
fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement {
let all_models = self
.working_set
@@ -337,7 +322,7 @@ where
.ok();
PopoverMenu::new("model-switcher")
.menu(move |_window, _cx| Some(picker_view.clone()))
.trigger_with_tooltip(self.trigger, self.tooltip)
.trigger(self.trigger)
.attach(gpui::Corner::TopLeft)
.anchor(gpui::Corner::BottomLeft)
.offset(gpui::Point {

View File

@@ -434,7 +434,7 @@ pub struct LegacyAssistantSettingsContent {
pub default_open_ai_model: Option<OpenAiModel>,
/// OpenAI API base URL to use when creating new chats.
///
/// Default: <https://api.openai.com/v1>
/// Default: https://api.openai.com/v1
pub openai_api_url: Option<String>,
}

View File

@@ -323,14 +323,7 @@ fn collect_files(
)))?;
directory_stack.push(entry.path.clone());
} else {
// todo(windows)
// Potential bug: this assumes that the path separator is always `\` on Windows
let entry_name = format!(
"{}{}{}",
prefix_paths,
std::path::MAIN_SEPARATOR_STR,
&filename
);
let entry_name = format!("{}/{}", prefix_paths, &filename);
events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection {
icon: IconName::Folder,
label: entry_name.clone().into(),
@@ -462,7 +455,6 @@ mod custom_path_matcher {
use std::{fmt::Debug as _, path::Path};
use globset::{Glob, GlobSet, GlobSetBuilder};
use util::paths::SanitizedPath;
#[derive(Clone, Debug, Default)]
pub struct PathMatcher {
@@ -489,7 +481,7 @@ mod custom_path_matcher {
pub fn new(globs: &[String]) -> Result<Self, globset::Error> {
let globs = globs
.into_iter()
.map(|glob| Glob::new(&SanitizedPath::from(glob).to_glob_string()))
.map(|glob| Glob::new(&glob))
.collect::<Result<Vec<_>, _>>()?;
let sources = globs.iter().map(|glob| glob.glob().to_owned()).collect();
let sources_with_trailing_slash = globs
@@ -515,9 +507,7 @@ mod custom_path_matcher {
.zip(self.sources_with_trailing_slash.iter())
.any(|(source, with_slash)| {
let as_bytes = other_path.as_os_str().as_encoded_bytes();
// todo(windows)
// Potential bug: this assumes that the path separator is always `\` on Windows
let with_slash = if source.ends_with(std::path::MAIN_SEPARATOR_STR) {
let with_slash = if source.ends_with("/") {
source.as_bytes()
} else {
with_slash.as_bytes()
@@ -579,7 +569,6 @@ mod test {
use serde_json::json;
use settings::SettingsStore;
use smol::stream::StreamExt;
use util::{path, separator};
use super::collect_files;
@@ -603,7 +592,7 @@ mod test {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/root"),
"/root",
json!({
"dir": {
"subdir": {
@@ -618,7 +607,7 @@ mod test {
)
.await;
let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
let project = Project::test(fs, ["/root".as_ref()], cx).await;
let result_1 =
cx.update(|cx| collect_files(project.clone(), &["root/dir".to_string()], cx));
@@ -626,7 +615,7 @@ mod test {
.await
.unwrap();
assert!(result_1.text.starts_with(separator!("root/dir")));
assert!(result_1.text.starts_with("root/dir"));
// 4 files + 2 directories
assert_eq!(result_1.sections.len(), 6);
@@ -642,7 +631,7 @@ mod test {
cx.update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx).boxed());
let result = SlashCommandOutput::from_event_stream(result).await.unwrap();
assert!(result.text.starts_with(separator!("root/dir")));
assert!(result.text.starts_with("root/dir"));
// 5 files + 2 directories
assert_eq!(result.sections.len(), 7);
@@ -656,7 +645,7 @@ mod test {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/zed"),
"/zed",
json!({
"assets": {
"dir1": {
@@ -681,7 +670,7 @@ mod test {
)
.await;
let project = Project::test(fs, [path!("/zed").as_ref()], cx).await;
let project = Project::test(fs, ["/zed".as_ref()], cx).await;
let result =
cx.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx));
@@ -690,36 +679,27 @@ mod test {
.unwrap();
// Sanity check
assert!(result.text.starts_with(separator!("zed/assets/themes\n")));
assert!(result.text.starts_with("zed/assets/themes\n"));
assert_eq!(result.sections.len(), 7);
// Ensure that full file paths are included in the real output
assert!(result
.text
.contains(separator!("zed/assets/themes/andromeda/LICENSE")));
assert!(result
.text
.contains(separator!("zed/assets/themes/ayu/LICENSE")));
assert!(result
.text
.contains(separator!("zed/assets/themes/summercamp/LICENSE")));
assert!(result.text.contains("zed/assets/themes/andromeda/LICENSE"));
assert!(result.text.contains("zed/assets/themes/ayu/LICENSE"));
assert!(result.text.contains("zed/assets/themes/summercamp/LICENSE"));
assert_eq!(result.sections[5].label, "summercamp");
// Ensure that things are in descending order, with properly relativized paths
assert_eq!(
result.sections[0].label,
separator!("zed/assets/themes/andromeda/LICENSE")
"zed/assets/themes/andromeda/LICENSE"
);
assert_eq!(result.sections[1].label, "andromeda");
assert_eq!(
result.sections[2].label,
separator!("zed/assets/themes/ayu/LICENSE")
);
assert_eq!(result.sections[2].label, "zed/assets/themes/ayu/LICENSE");
assert_eq!(result.sections[3].label, "ayu");
assert_eq!(
result.sections[4].label,
separator!("zed/assets/themes/summercamp/LICENSE")
"zed/assets/themes/summercamp/LICENSE"
);
// Ensure that the project lasts until after the last await
@@ -732,7 +712,7 @@ mod test {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/zed"),
"/zed",
json!({
"assets": {
"themes": {
@@ -752,7 +732,7 @@ mod test {
)
.await;
let project = Project::test(fs, [path!("/zed").as_ref()], cx).await;
let project = Project::test(fs, ["/zed".as_ref()], cx).await;
let result =
cx.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx));
@@ -760,29 +740,26 @@ mod test {
.await
.unwrap();
assert!(result.text.starts_with(separator!("zed/assets/themes\n")));
assert_eq!(
result.sections[0].label,
separator!("zed/assets/themes/LICENSE")
);
assert!(result.text.starts_with("zed/assets/themes\n"));
assert_eq!(result.sections[0].label, "zed/assets/themes/LICENSE");
assert_eq!(
result.sections[1].label,
separator!("zed/assets/themes/summercamp/LICENSE")
"zed/assets/themes/summercamp/LICENSE"
);
assert_eq!(
result.sections[2].label,
separator!("zed/assets/themes/summercamp/subdir/LICENSE")
"zed/assets/themes/summercamp/subdir/LICENSE"
);
assert_eq!(
result.sections[3].label,
separator!("zed/assets/themes/summercamp/subdir/subsubdir/LICENSE")
"zed/assets/themes/summercamp/subdir/subsubdir/LICENSE"
);
assert_eq!(result.sections[4].label, "subsubdir");
assert_eq!(result.sections[5].label, "subdir");
assert_eq!(result.sections[6].label, "summercamp");
assert_eq!(result.sections[7].label, separator!("zed/assets/themes"));
assert_eq!(result.sections[7].label, "zed/assets/themes");
assert_eq!(result.text, separator!("zed/assets/themes\n```zed/assets/themes/LICENSE\n1\n```\n\nsummercamp\n```zed/assets/themes/summercamp/LICENSE\n1\n```\n\nsubdir\n```zed/assets/themes/summercamp/subdir/LICENSE\n1\n```\n\nsubsubdir\n```zed/assets/themes/summercamp/subdir/subsubdir/LICENSE\n3\n```\n\n"));
assert_eq!(result.text, "zed/assets/themes\n```zed/assets/themes/LICENSE\n1\n```\n\nsummercamp\n```zed/assets/themes/summercamp/LICENSE\n1\n```\n\nsubdir\n```zed/assets/themes/summercamp/subdir/LICENSE\n1\n```\n\nsubsubdir\n```zed/assets/themes/summercamp/subdir/subsubdir/LICENSE\n3\n```\n\n");
// Ensure that the project lasts until after the last await
drop(project);

View File

@@ -9,7 +9,7 @@ use release_channel::{AppVersion, ReleaseChannel};
use serde::Deserialize;
use smol::io::AsyncReadExt;
use util::ResultExt as _;
use workspace::notifications::{show_app_notification, NotificationId};
use workspace::notifications::NotificationId;
use workspace::Workspace;
use crate::update_notification::UpdateNotification;
@@ -17,7 +17,6 @@ use crate::update_notification::UpdateNotification;
actions!(auto_update, [ViewReleaseNotesLocally]);
pub fn init(cx: &mut App) {
notify_if_app_was_updated(cx);
cx.observe_new(|workspace: &mut Workspace, _window, _cx| {
workspace.register_action(|workspace, _: &ViewReleaseNotesLocally, window, cx| {
view_release_notes_locally(workspace, window, cx);
@@ -125,35 +124,31 @@ fn view_release_notes_locally(
.detach();
}
/// Shows a notification across all workspaces if an update was previously automatically installed
/// and this notification had not yet been shown.
pub fn notify_if_app_was_updated(cx: &mut App) {
let Some(updater) = AutoUpdater::get(cx) else {
return;
};
pub fn notify_of_any_new_update(window: &mut Window, cx: &mut Context<Workspace>) -> Option<()> {
let updater = AutoUpdater::get(cx)?;
let version = updater.read(cx).current_version();
let should_show_notification = updater.read(cx).should_show_update_notification(cx);
cx.spawn(|cx| async move {
cx.spawn_in(window, |workspace, mut cx| async move {
let should_show_notification = should_show_notification.await?;
if should_show_notification {
cx.update(|cx| {
show_app_notification(
workspace.update(&mut cx, |workspace, cx| {
let workspace_handle = workspace.weak_handle();
workspace.show_notification(
NotificationId::unique::<UpdateNotification>(),
cx,
move |cx| {
let workspace_handle = cx.entity().downgrade();
cx.new(|_| UpdateNotification::new(version, workspace_handle))
},
|cx| cx.new(|_| UpdateNotification::new(version, workspace_handle)),
);
updater.update(cx, |updater, cx| {
updater
.set_should_show_update_notification(false, cx)
.detach_and_log_err(cx);
})
});
})?;
}
anyhow::Ok(())
})
.detach();
None
}

View File

@@ -532,10 +532,6 @@ impl Room {
&self.local_participant
}
pub fn local_participant_user(&self, cx: &App) -> Option<Arc<User>> {
self.user_store.read(cx).current_user()
}
pub fn remote_participants(&self) -> &BTreeMap<u64, RemoteParticipant> {
&self.remote_participants
}

View File

@@ -588,10 +588,6 @@ impl Room {
&self.local_participant
}
pub fn local_participant_user(&self, cx: &App) -> Option<Arc<User>> {
self.user_store.read(cx).current_user()
}
pub fn remote_participants(&self) -> &BTreeMap<u64, RemoteParticipant> {
&self.remote_participants
}

View File

@@ -15,8 +15,8 @@ use util::ResultExt;
pub const ACKNOWLEDGE_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(250);
pub(crate) fn init(client: &AnyProtoClient) {
client.add_entity_message_handler(ChannelBuffer::handle_update_channel_buffer);
client.add_entity_message_handler(ChannelBuffer::handle_update_channel_buffer_collaborators);
client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer);
client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer_collaborators);
}
pub struct ChannelBuffer {
@@ -81,7 +81,7 @@ impl ChannelBuffer {
collaborators: Default::default(),
acknowledge_task: None,
channel_id: channel.id,
subscription: Some(subscription.set_entity(&cx.entity(), &mut cx.to_async())),
subscription: Some(subscription.set_model(&cx.entity(), &mut cx.to_async())),
user_store,
channel_store,
};

View File

@@ -95,9 +95,9 @@ pub enum ChannelChatEvent {
impl EventEmitter<ChannelChatEvent> for ChannelChat {}
pub fn init(client: &AnyProtoClient) {
client.add_entity_message_handler(ChannelChat::handle_message_sent);
client.add_entity_message_handler(ChannelChat::handle_message_removed);
client.add_entity_message_handler(ChannelChat::handle_message_updated);
client.add_model_message_handler(ChannelChat::handle_message_sent);
client.add_model_message_handler(ChannelChat::handle_message_removed);
client.add_model_message_handler(ChannelChat::handle_message_updated);
}
impl ChannelChat {
@@ -132,7 +132,7 @@ impl ChannelChat {
last_acknowledged_id: None,
rng: StdRng::from_entropy(),
first_loaded_message_id: None,
_subscription: subscription.set_entity(&cx.entity(), &mut cx.to_async()),
_subscription: subscription.set_model(&cx.entity(), &mut cx.to_async()),
}
})?;
Self::handle_loaded_messages(

View File

@@ -39,8 +39,8 @@ pub struct ChannelStore {
channel_states: HashMap<ChannelId, ChannelState>,
outgoing_invites: HashSet<(ChannelId, UserId)>,
update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>,
opened_buffers: HashMap<ChannelId, OpenEntityHandle<ChannelBuffer>>,
opened_chats: HashMap<ChannelId, OpenEntityHandle<ChannelChat>>,
opened_buffers: HashMap<ChannelId, OpenedModelHandle<ChannelBuffer>>,
opened_chats: HashMap<ChannelId, OpenedModelHandle<ChannelChat>>,
client: Arc<Client>,
did_subscribe: bool,
user_store: Entity<UserStore>,
@@ -142,7 +142,7 @@ pub enum ChannelEvent {
impl EventEmitter<ChannelEvent> for ChannelStore {}
enum OpenEntityHandle<E> {
enum OpenedModelHandle<E> {
Open(WeakEntity<E>),
Loading(Shared<Task<Result<Entity<E>, Arc<anyhow::Error>>>>),
}
@@ -292,7 +292,7 @@ impl ChannelStore {
pub fn has_open_channel_buffer(&self, channel_id: ChannelId, _cx: &App) -> bool {
if let Some(buffer) = self.opened_buffers.get(&channel_id) {
if let OpenEntityHandle::Open(buffer) = buffer {
if let OpenedModelHandle::Open(buffer) = buffer {
return buffer.upgrade().is_some();
}
}
@@ -453,7 +453,7 @@ impl ChannelStore {
fn open_channel_resource<T, F, Fut>(
&mut self,
channel_id: ChannelId,
get_map: fn(&mut Self) -> &mut HashMap<ChannelId, OpenEntityHandle<T>>,
get_map: fn(&mut Self) -> &mut HashMap<ChannelId, OpenedModelHandle<T>>,
load: F,
cx: &mut Context<Self>,
) -> Task<Result<Entity<T>>>
@@ -465,15 +465,15 @@ impl ChannelStore {
let task = loop {
match get_map(self).entry(channel_id) {
hash_map::Entry::Occupied(e) => match e.get() {
OpenEntityHandle::Open(entity) => {
if let Some(entity) = entity.upgrade() {
break Task::ready(Ok(entity)).shared();
OpenedModelHandle::Open(model) => {
if let Some(model) = model.upgrade() {
break Task::ready(Ok(model)).shared();
} else {
get_map(self).remove(&channel_id);
continue;
}
}
OpenEntityHandle::Loading(task) => {
OpenedModelHandle::Loading(task) => {
break task.clone();
}
},
@@ -490,7 +490,7 @@ impl ChannelStore {
})
.shared();
e.insert(OpenEntityHandle::Loading(task.clone()));
e.insert(OpenedModelHandle::Loading(task.clone()));
cx.spawn({
let task = task.clone();
move |this, mut cx| async move {
@@ -499,7 +499,7 @@ impl ChannelStore {
Ok(model) => {
get_map(this).insert(
channel_id,
OpenEntityHandle::Open(model.downgrade()),
OpenedModelHandle::Open(model.downgrade()),
);
}
Err(_) => {
@@ -900,7 +900,7 @@ impl ChannelStore {
self.disconnect_channel_buffers_task.take();
for chat in self.opened_chats.values() {
if let OpenEntityHandle::Open(chat) = chat {
if let OpenedModelHandle::Open(chat) = chat {
if let Some(chat) = chat.upgrade() {
chat.update(cx, |chat, cx| {
chat.rejoin(cx);
@@ -911,7 +911,7 @@ impl ChannelStore {
let mut buffer_versions = Vec::new();
for buffer in self.opened_buffers.values() {
if let OpenEntityHandle::Open(buffer) = buffer {
if let OpenedModelHandle::Open(buffer) = buffer {
if let Some(buffer) = buffer.upgrade() {
let channel_buffer = buffer.read(cx);
let buffer = channel_buffer.buffer().read(cx);
@@ -937,7 +937,7 @@ impl ChannelStore {
this.update(&mut cx, |this, cx| {
this.opened_buffers.retain(|_, buffer| match buffer {
OpenEntityHandle::Open(channel_buffer) => {
OpenedModelHandle::Open(channel_buffer) => {
let Some(channel_buffer) = channel_buffer.upgrade() else {
return false;
};
@@ -998,7 +998,7 @@ impl ChannelStore {
false
})
}
OpenEntityHandle::Loading(_) => true,
OpenedModelHandle::Loading(_) => true,
});
})
.ok();
@@ -1018,7 +1018,7 @@ impl ChannelStore {
if let Some(this) = this.upgrade() {
this.update(&mut cx, |this, cx| {
for (_, buffer) in this.opened_buffers.drain() {
if let OpenEntityHandle::Open(buffer) = buffer {
if let OpenedModelHandle::Open(buffer) = buffer {
if let Some(buffer) = buffer.upgrade() {
buffer.update(cx, |buffer, cx| buffer.disconnect(cx));
}
@@ -1082,7 +1082,7 @@ impl ChannelStore {
{
continue;
}
if let Some(OpenEntityHandle::Open(buffer)) =
if let Some(OpenedModelHandle::Open(buffer)) =
self.opened_buffers.remove(&channel_id)
{
if let Some(buffer) = buffer.upgrade() {
@@ -1098,7 +1098,7 @@ impl ChannelStore {
let channel_changed = index.insert(channel);
if channel_changed {
if let Some(OpenEntityHandle::Open(buffer)) = self.opened_buffers.get(&id) {
if let Some(OpenedModelHandle::Open(buffer)) = self.opened_buffers.get(&id) {
if let Some(buffer) = buffer.upgrade() {
buffer.update(cx, ChannelBuffer::channel_changed);
}

View File

@@ -1,5 +1,3 @@
use std::process::Command;
fn main() {
if std::env::var("ZED_UPDATE_EXPLANATION").is_ok() {
println!(r#"cargo:rustc-cfg=feature="no-bundled-uninstall""#);
@@ -10,18 +8,4 @@ fn main() {
// Weakly link ScreenCaptureKit to ensure can be used on macOS 10.15+.
println!("cargo:rustc-link-arg=-Wl,-weak_framework,ScreenCaptureKit");
}
// Populate git sha environment variable if git is available
println!("cargo:rerun-if-changed=../../.git/logs/HEAD");
if let Some(output) = Command::new("git")
.args(["rev-parse", "HEAD"])
.output()
.ok()
.filter(|output| output.status.success())
{
let git_sha = String::from_utf8_lossy(&output.stdout);
let git_sha = git_sha.trim();
println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}");
}
}

View File

@@ -33,19 +33,7 @@ trait InstalledApp {
#[command(
name = "zed",
disable_version_flag = true,
before_help = "The Zed CLI binary.
This CLI is a separate binary that invokes Zed.
Examples:
`zed`
Simply opens Zed
`zed --foreground`
Runs in foreground (shows all logs)
`zed path-to-your-project`
Open your project in Zed
`zed -n path-to-file `
Open file/folder in a new window",
after_help = "To read from stdin, append '-', e.g. 'ps axf | zed -'"
after_help = "To read from stdin, append '-' (e.g. 'ps axf | zed -')"
)]
struct Args {
/// Wait for all of the given paths to be opened/closed before exiting.
@@ -57,9 +45,10 @@ struct Args {
/// Create a new workspace
#[arg(short, long, overrides_with = "add")]
new: bool,
/// The paths to open in Zed (space-separated).
/// A sequence of space-separated paths that you want to open.
///
/// Use `path:line:column` syntax to open a file at the given line and column.
/// Use `path:line:row` syntax to open a file at a specific location.
/// Non-existing paths and directories will ignore `:line:row` suffix.
paths_with_position: Vec<String>,
/// Print Zed's version and the app path.
#[arg(short, long)]
@@ -339,17 +328,13 @@ mod linux {
impl InstalledApp for App {
fn zed_version_string(&self) -> String {
format!(
"Zed {}{}{} {}",
"Zed {}{} {}",
if *RELEASE_CHANNEL == "stable" {
"".to_string()
} else {
format!("{} ", *RELEASE_CHANNEL)
format!(" {} ", *RELEASE_CHANNEL)
},
option_env!("RELEASE_VERSION").unwrap_or_default(),
match option_env!("ZED_COMMIT_SHA") {
Some(commit_sha) => format!(" {commit_sha} "),
None => "".to_string(),
},
self.0.display(),
)
}

View File

@@ -146,8 +146,6 @@ pub fn init_settings(cx: &mut App) {
}
pub fn init(client: &Arc<Client>, cx: &mut App) {
let _ = rustls::crypto::aws_lc_rs::default_provider().install_default();
let client = Arc::downgrade(client);
cx.on_action({
let client = client.clone();
@@ -381,7 +379,7 @@ pub struct PendingEntitySubscription<T: 'static> {
}
impl<T: 'static> PendingEntitySubscription<T> {
pub fn set_entity(mut self, entity: &Entity<T>, cx: &AsyncApp) -> Subscription {
pub fn set_model(mut self, model: &Entity<T>, cx: &AsyncApp) -> Subscription {
self.consumed = true;
let mut handlers = self.client.handler_set.lock();
let id = (TypeId::of::<T>(), self.remote_id);
@@ -394,7 +392,7 @@ impl<T: 'static> PendingEntitySubscription<T> {
handlers.entities_by_type_and_remote_id.insert(
id,
EntityMessageSubscriber::Entity {
handle: entity.downgrade().into(),
handle: model.downgrade().into(),
},
);
drop(handlers);
@@ -688,8 +686,8 @@ impl Client {
H: 'static + Sync + Fn(Entity<E>, TypedEnvelope<M>, AsyncApp) -> F + Send + Sync,
F: 'static + Future<Output = Result<()>>,
{
self.add_message_handler_impl(entity, move |entity, message, _, cx| {
handler(entity, message, cx)
self.add_message_handler_impl(entity, move |model, message, _, cx| {
handler(model, message, cx)
})
}
@@ -711,7 +709,7 @@ impl Client {
let message_type_id = TypeId::of::<M>();
let mut state = self.handler_set.lock();
state
.entities_by_message_type
.models_by_message_type
.insert(message_type_id, entity.into());
let prev_handler = state.message_handlers.insert(
@@ -740,7 +738,7 @@ impl Client {
pub fn add_request_handler<M, E, H, F>(
self: &Arc<Self>,
entity: WeakEntity<E>,
model: WeakEntity<E>,
handler: H,
) -> Subscription
where
@@ -749,7 +747,7 @@ impl Client {
H: 'static + Sync + Fn(Entity<E>, TypedEnvelope<M>, AsyncApp) -> F + Send + Sync,
F: 'static + Future<Output = Result<M::Response>>,
{
self.add_message_handler_impl(entity, move |handle, envelope, this, cx| {
self.add_message_handler_impl(model, move |handle, envelope, this, cx| {
Self::respond_to_request(envelope.receipt(), handler(handle, envelope, cx), this)
})
}
@@ -1133,8 +1131,15 @@ impl Client {
for error in root_certs.errors {
log::warn!("error loading native certs: {:?}", error);
}
root_store.add_parsable_certificates(root_certs.certs);
root_store.add_parsable_certificates(
&root_certs
.certs
.into_iter()
.map(|cert| cert.as_ref().to_owned())
.collect::<Vec<_>>(),
);
rustls::ClientConfig::builder()
.with_safe_defaults()
.with_root_certificates(root_store)
.with_no_client_auth()
};
@@ -1943,9 +1948,9 @@ mod tests {
let (done_tx1, done_rx1) = smol::channel::unbounded();
let (done_tx2, done_rx2) = smol::channel::unbounded();
AnyProtoClient::from(client.clone()).add_entity_message_handler(
move |entity: Entity<TestEntity>, _: TypedEnvelope<proto::JoinProject>, mut cx| {
match entity.update(&mut cx, |entity, _| entity.id).unwrap() {
AnyProtoClient::from(client.clone()).add_model_message_handler(
move |model: Entity<TestModel>, _: TypedEnvelope<proto::JoinProject>, mut cx| {
match model.update(&mut cx, |model, _| model.id).unwrap() {
1 => done_tx1.try_send(()).unwrap(),
2 => done_tx2.try_send(()).unwrap(),
_ => unreachable!(),
@@ -1953,15 +1958,15 @@ mod tests {
async { Ok(()) }
},
);
let entity1 = cx.new(|_| TestEntity {
let model1 = cx.new(|_| TestModel {
id: 1,
subscription: None,
});
let entity2 = cx.new(|_| TestEntity {
let model2 = cx.new(|_| TestModel {
id: 2,
subscription: None,
});
let entity3 = cx.new(|_| TestEntity {
let model3 = cx.new(|_| TestModel {
id: 3,
subscription: None,
});
@@ -1969,17 +1974,17 @@ mod tests {
let _subscription1 = client
.subscribe_to_entity(1)
.unwrap()
.set_entity(&entity1, &mut cx.to_async());
.set_model(&model1, &mut cx.to_async());
let _subscription2 = client
.subscribe_to_entity(2)
.unwrap()
.set_entity(&entity2, &mut cx.to_async());
.set_model(&model2, &mut cx.to_async());
// Ensure dropping a subscription for the same entity type still allows receiving of
// messages for other entity IDs of the same type.
let subscription3 = client
.subscribe_to_entity(3)
.unwrap()
.set_entity(&entity3, &mut cx.to_async());
.set_model(&model3, &mut cx.to_async());
drop(subscription3);
server.send(proto::JoinProject { project_id: 1 });
@@ -2001,11 +2006,11 @@ mod tests {
});
let server = FakeServer::for_client(user_id, &client, cx).await;
let entity = cx.new(|_| TestEntity::default());
let model = cx.new(|_| TestModel::default());
let (done_tx1, _done_rx1) = smol::channel::unbounded();
let (done_tx2, done_rx2) = smol::channel::unbounded();
let subscription1 = client.add_message_handler(
entity.downgrade(),
model.downgrade(),
move |_, _: TypedEnvelope<proto::Ping>, _| {
done_tx1.try_send(()).unwrap();
async { Ok(()) }
@@ -2013,7 +2018,7 @@ mod tests {
);
drop(subscription1);
let _subscription2 = client.add_message_handler(
entity.downgrade(),
model.downgrade(),
move |_, _: TypedEnvelope<proto::Ping>, _| {
done_tx2.try_send(()).unwrap();
async { Ok(()) }
@@ -2036,27 +2041,27 @@ mod tests {
});
let server = FakeServer::for_client(user_id, &client, cx).await;
let entity = cx.new(|_| TestEntity::default());
let model = cx.new(|_| TestModel::default());
let (done_tx, done_rx) = smol::channel::unbounded();
let subscription = client.add_message_handler(
entity.clone().downgrade(),
move |entity: Entity<TestEntity>, _: TypedEnvelope<proto::Ping>, mut cx| {
entity
.update(&mut cx, |entity, _| entity.subscription.take())
model.clone().downgrade(),
move |model: Entity<TestModel>, _: TypedEnvelope<proto::Ping>, mut cx| {
model
.update(&mut cx, |model, _| model.subscription.take())
.unwrap();
done_tx.try_send(()).unwrap();
async { Ok(()) }
},
);
entity.update(cx, |entity, _| {
entity.subscription = Some(subscription);
model.update(cx, |model, _| {
model.subscription = Some(subscription);
});
server.send(proto::Ping {});
done_rx.recv().await.unwrap();
}
#[derive(Default)]
struct TestEntity {
struct TestModel {
id: usize,
subscription: Option<Subscription>,
}

View File

@@ -3,6 +3,7 @@ mod event_coalescer;
use crate::TelemetrySettings;
use anyhow::Result;
use clock::SystemClock;
use collections::{HashMap, HashSet};
use futures::channel::mpsc;
use futures::{Future, StreamExt};
use gpui::{App, BackgroundExecutor, Task};
@@ -11,13 +12,14 @@ use parking_lot::Mutex;
use release_channel::ReleaseChannel;
use settings::{Settings, SettingsStore};
use sha2::{Digest, Sha256};
use std::collections::{HashMap, HashSet};
use std::fs::File;
use std::io::Write;
use std::sync::LazyLock;
use std::time::Instant;
use std::{env, mem, path::PathBuf, sync::Arc, time::Duration};
use telemetry_events::{AssistantEvent, AssistantPhase, Event, EventRequestBody, EventWrapper};
use telemetry_events::{
AppEvent, AssistantEvent, AssistantPhase, EditEvent, Event, EventRequestBody, EventWrapper,
};
use util::{ResultExt, TryFutureExt};
use worktree::{UpdatedEntriesSet, WorktreeId};
@@ -283,7 +285,7 @@ impl Telemetry {
// TestAppContext ends up calling this function on shutdown and it panics when trying to find the TelemetrySettings
#[cfg(not(any(test, feature = "test-support")))]
fn shutdown_telemetry(self: &Arc<Self>) -> impl Future<Output = ()> {
telemetry::event!("App Closed");
self.report_app_event("close".to_string());
// TODO: close final edit period and make sure it's sent
Task::ready(())
}
@@ -353,23 +355,30 @@ impl Telemetry {
);
}
pub fn report_app_event(self: &Arc<Self>, operation: String) -> Event {
let event = Event::App(AppEvent { operation });
self.report_event(event.clone());
event
}
pub fn log_edit_event(self: &Arc<Self>, environment: &'static str, is_via_ssh: bool) {
let mut state = self.state.lock();
let period_data = state.event_coalescer.log_event(environment);
drop(state);
if let Some((start, end, environment)) = period_data {
let duration = end
.saturating_duration_since(start)
.min(Duration::from_secs(60 * 60 * 24))
.as_millis() as i64;
let event = Event::Edit(EditEvent {
duration: end
.saturating_duration_since(start)
.min(Duration::from_secs(60 * 60 * 24))
.as_millis() as i64,
environment: environment.to_string(),
is_via_ssh,
});
telemetry::event!(
"Editor Edited",
duration = duration,
environment = environment.to_string(),
is_via_ssh = is_via_ssh
);
self.report_event(event);
}
}
@@ -413,8 +422,9 @@ impl Telemetry {
.collect()
};
// Done on purpose to avoid calling `self.state.lock()` multiple times
for project_type_name in project_type_names {
telemetry::event!("Project Opened", project_type = project_type_name);
self.report_app_event(format!("open {} project", project_type_name));
}
}
@@ -580,7 +590,6 @@ mod tests {
use clock::FakeSystemClock;
use gpui::TestAppContext;
use http_client::FakeHttpClient;
use telemetry_events::FlexibleEvent;
#[gpui::test]
fn test_telemetry_flush_on_max_queue_size(cx: &mut TestAppContext) {
@@ -600,17 +609,15 @@ mod tests {
assert!(is_empty_state(&telemetry));
let first_date_time = clock.utc_now();
let event_properties = HashMap::from_iter([(
"test_key".to_string(),
serde_json::Value::String("test_value".to_string()),
)]);
let operation = "test".to_string();
let event = FlexibleEvent {
event_type: "test".to_string(),
event_properties,
};
telemetry.report_event(Event::Flexible(event.clone()));
let event = telemetry.report_app_event(operation.clone());
assert_eq!(
event,
Event::App(AppEvent {
operation: operation.clone(),
})
);
assert_eq!(telemetry.state.lock().events_queue.len(), 1);
assert!(telemetry.state.lock().flush_events_task.is_some());
assert_eq!(
@@ -620,7 +627,13 @@ mod tests {
clock.advance(Duration::from_millis(100));
telemetry.report_event(Event::Flexible(event.clone()));
let event = telemetry.report_app_event(operation.clone());
assert_eq!(
event,
Event::App(AppEvent {
operation: operation.clone(),
})
);
assert_eq!(telemetry.state.lock().events_queue.len(), 2);
assert!(telemetry.state.lock().flush_events_task.is_some());
assert_eq!(
@@ -630,7 +643,13 @@ mod tests {
clock.advance(Duration::from_millis(100));
telemetry.report_event(Event::Flexible(event.clone()));
let event = telemetry.report_app_event(operation.clone());
assert_eq!(
event,
Event::App(AppEvent {
operation: operation.clone(),
})
);
assert_eq!(telemetry.state.lock().events_queue.len(), 3);
assert!(telemetry.state.lock().flush_events_task.is_some());
assert_eq!(
@@ -641,7 +660,14 @@ mod tests {
clock.advance(Duration::from_millis(100));
// Adding a 4th event should cause a flush
telemetry.report_event(Event::Flexible(event));
let event = telemetry.report_app_event(operation.clone());
assert_eq!(
event,
Event::App(AppEvent {
operation: operation.clone(),
})
);
assert!(is_empty_state(&telemetry));
});
}
@@ -664,19 +690,17 @@ mod tests {
telemetry.start(system_id, installation_id, session_id, cx);
assert!(is_empty_state(&telemetry));
let first_date_time = clock.utc_now();
let operation = "test".to_string();
let event_properties = HashMap::from_iter([(
"test_key".to_string(),
serde_json::Value::String("test_value".to_string()),
)]);
let event = FlexibleEvent {
event_type: "test".to_string(),
event_properties,
};
telemetry.report_event(Event::Flexible(event));
let event = telemetry.report_app_event(operation.clone());
assert_eq!(
event,
Event::App(AppEvent {
operation: operation.clone(),
})
);
assert_eq!(telemetry.state.lock().events_queue.len(), 1);
assert!(telemetry.state.lock().flush_events_task.is_some());
assert_eq!(

View File

@@ -201,8 +201,9 @@ impl UserStore {
cx.update(|cx| {
if let Some(info) = info {
let staff =
info.staff && !*feature_flags::ZED_DISABLE_STAFF;
let disable_staff = std::env::var("ZED_DISABLE_STAFF")
.map_or(false, |v| !v.is_empty() && v != "0");
let staff = info.staff && !disable_staff;
cx.update_flags(staff, info.flags);
client.telemetry.set_authenticated_user_info(
Some(info.metrics_id.clone()),

View File

@@ -33,7 +33,6 @@ clock.workspace = true
collections.workspace = true
dashmap.workspace = true
derive_more.workspace = true
diff.workspace = true
envy = "0.4.2"
futures.workspace = true
google_ai.workspace = true
@@ -131,7 +130,7 @@ worktree = { workspace = true, features = ["test-support"] }
livekit_client_macos = { workspace = true, features = ["test-support"] }
[target.'cfg(not(target_os = "macos"))'.dev-dependencies]
livekit_client = { workspace = true, features = ["test-support"] }
livekit_client = {workspace = true, features = ["test-support"] }
[package.metadata.cargo-machete]
ignored = ["async-stripe"]

View File

@@ -100,7 +100,6 @@ CREATE TABLE "worktree_repositories" (
"branch" VARCHAR,
"scan_id" INTEGER NOT NULL,
"is_deleted" BOOL NOT NULL,
"current_merge_conflicts" VARCHAR,
PRIMARY KEY(project_id, worktree_id, work_directory_id),
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
@@ -402,15 +401,6 @@ CREATE TABLE extension_versions (
schema_version INTEGER NOT NULL DEFAULT 0,
wasm_api_version TEXT,
download_count INTEGER NOT NULL DEFAULT 0,
provides_themes BOOLEAN NOT NULL DEFAULT FALSE,
provides_icon_themes BOOLEAN NOT NULL DEFAULT FALSE,
provides_languages BOOLEAN NOT NULL DEFAULT FALSE,
provides_grammars BOOLEAN NOT NULL DEFAULT FALSE,
provides_language_servers BOOLEAN NOT NULL DEFAULT FALSE,
provides_context_servers BOOLEAN NOT NULL DEFAULT FALSE,
provides_slash_commands BOOLEAN NOT NULL DEFAULT FALSE,
provides_indexed_docs_providers BOOLEAN NOT NULL DEFAULT FALSE,
provides_snippets BOOLEAN NOT NULL DEFAULT FALSE,
PRIMARY KEY (extension_id, version)
);
@@ -440,7 +430,6 @@ CREATE TABLE IF NOT EXISTS billing_customers (
id INTEGER PRIMARY KEY AUTOINCREMENT,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
user_id INTEGER NOT NULL REFERENCES users(id),
has_overdue_invoices BOOLEAN NOT NULL DEFAULT FALSE,
stripe_customer_id TEXT NOT NULL
);

View File

@@ -1,2 +0,0 @@
alter table billing_customers
add column has_overdue_invoices bool not null default false;

View File

@@ -1,10 +0,0 @@
alter table extension_versions
add column provides_themes bool not null default false,
add column provides_icon_themes bool not null default false,
add column provides_languages bool not null default false,
add column provides_grammars bool not null default false,
add column provides_language_servers bool not null default false,
add column provides_context_servers bool not null default false,
add column provides_slash_commands bool not null default false,
add column provides_indexed_docs_providers bool not null default false,
add column provides_snippets bool not null default false;

View File

@@ -1,2 +0,0 @@
ALTER TABLE worktree_repositories
ADD COLUMN current_merge_conflicts VARCHAR NULL;

View File

@@ -249,31 +249,29 @@ async fn create_billing_subscription(
));
}
let existing_billing_customer = app.db.get_billing_customer_by_user_id(user.id).await?;
if let Some(existing_billing_customer) = &existing_billing_customer {
if existing_billing_customer.has_overdue_invoices {
return Err(Error::http(
StatusCode::PAYMENT_REQUIRED,
"user has overdue invoices".into(),
));
}
if app.db.has_overdue_billing_subscriptions(user.id).await? {
return Err(Error::http(
StatusCode::PAYMENT_REQUIRED,
"user has overdue billing subscriptions".into(),
));
}
let customer_id = if let Some(existing_customer) = existing_billing_customer {
CustomerId::from_str(&existing_customer.stripe_customer_id)
.context("failed to parse customer ID")?
} else {
let customer = Customer::create(
&stripe_client,
CreateCustomer {
email: user.email_address.as_deref(),
..Default::default()
},
)
.await?;
let customer_id =
if let Some(existing_customer) = app.db.get_billing_customer_by_user_id(user.id).await? {
CustomerId::from_str(&existing_customer.stripe_customer_id)
.context("failed to parse customer ID")?
} else {
let customer = Customer::create(
&stripe_client,
CreateCustomer {
email: user.email_address.as_deref(),
..Default::default()
},
)
.await?;
customer.id
};
customer.id
};
let default_model = llm_db.model(rpc::LanguageModelProvider::Anthropic, "claude-3-5-sonnet")?;
let stripe_model = stripe_billing.register_model(default_model).await?;
@@ -668,27 +666,6 @@ async fn handle_customer_subscription_event(
.await?
.ok_or_else(|| anyhow!("billing customer not found"))?;
let was_canceled_due_to_payment_failure = subscription.status == SubscriptionStatus::Canceled
&& subscription
.cancellation_details
.as_ref()
.and_then(|details| details.reason)
.map_or(false, |reason| {
reason == CancellationDetailsReason::PaymentFailed
});
if was_canceled_due_to_payment_failure {
app.db
.update_billing_customer(
billing_customer.id,
&UpdateBillingCustomerParams {
has_overdue_invoices: ActiveValue::set(true),
..Default::default()
},
)
.await?;
}
if let Some(existing_subscription) = app
.db
.get_billing_subscription_by_stripe_subscription_id(&subscription.id)

View File

@@ -495,10 +495,6 @@ fn for_snowflake(
body.events.into_iter().flat_map(move |event| {
let timestamp =
first_event_at + Duration::milliseconds(event.milliseconds_since_first_event);
// We will need to double check, but I believe all of the events that
// are being transformed here are now migrated over to use the
// telemetry::event! macro, as of this commit so this code can go away
// when we feel enough users have upgraded past this point.
let (event_type, mut event_properties) = match &event.event {
Event::Editor(e) => (
match e.operation.as_str() {
@@ -510,7 +506,7 @@ fn for_snowflake(
),
Event::InlineCompletion(e) => (
format!(
"Edit Prediction {}",
"Inline Completion {}",
if e.suggestion_accepted {
"Accepted"
} else {
@@ -520,7 +516,7 @@ fn for_snowflake(
serde_json::to_value(e).unwrap(),
),
Event::InlineCompletionRating(e) => (
"Edit Prediction Rated".to_string(),
"Inline Completion Rated".to_string(),
serde_json::to_value(e).unwrap(),
),
Event::Call(e) => {

View File

@@ -9,11 +9,10 @@ use axum::{
routing::get,
Extension, Json, Router,
};
use collections::{BTreeSet, HashMap};
use rpc::{ExtensionApiManifest, ExtensionProvides, GetExtensionsResponse};
use collections::HashMap;
use rpc::{ExtensionApiManifest, GetExtensionsResponse};
use semantic_version::SemanticVersion;
use serde::Deserialize;
use std::str::FromStr;
use std::{sync::Arc, time::Duration};
use time::PrimitiveDateTime;
use util::{maybe, ResultExt};
@@ -36,14 +35,6 @@ pub fn router() -> Router {
#[derive(Debug, Deserialize)]
struct GetExtensionsParams {
filter: Option<String>,
/// A comma-delimited list of features that the extension must provide.
///
/// For example:
/// - `themes`
/// - `themes,icon-themes`
/// - `languages,language-servers`
#[serde(default)]
provides: Option<String>,
#[serde(default)]
max_schema_version: i32,
}
@@ -52,22 +43,9 @@ async fn get_extensions(
Extension(app): Extension<Arc<AppState>>,
Query(params): Query<GetExtensionsParams>,
) -> Result<Json<GetExtensionsResponse>> {
let provides_filter = params.provides.map(|provides| {
provides
.split(',')
.map(|value| value.trim())
.filter_map(|value| ExtensionProvides::from_str(value).ok())
.collect::<BTreeSet<_>>()
});
let mut extensions = app
.db
.get_extensions(
params.filter.as_deref(),
provides_filter.as_ref(),
params.max_schema_version,
500,
)
.get_extensions(params.filter.as_deref(), params.max_schema_version, 500)
.await?;
if let Some(filter) = params.filter.as_deref() {
@@ -413,7 +391,6 @@ async fn fetch_extension_manifest(
repository: manifest.repository,
schema_version: manifest.schema_version.unwrap_or(0),
wasm_api_version: manifest.wasm_api_version,
provides: manifest.provides,
published_at,
})
}

View File

@@ -6,11 +6,10 @@ pub mod tests;
use crate::{executor::Executor, Error, Result};
use anyhow::anyhow;
use collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use collections::{BTreeMap, HashMap, HashSet};
use dashmap::DashMap;
use futures::StreamExt;
use rand::{prelude::StdRng, Rng, SeedableRng};
use rpc::ExtensionProvides;
use rpc::{
proto::{self},
ConnectionId, ExtensionMetadata,
@@ -782,7 +781,6 @@ pub struct NewExtensionVersion {
pub repository: String,
pub schema_version: i32,
pub wasm_api_version: Option<String>,
pub provides: BTreeSet<ExtensionProvides>,
pub published_at: PrimitiveDateTime,
}

View File

@@ -10,7 +10,6 @@ pub struct CreateBillingCustomerParams {
pub struct UpdateBillingCustomerParams {
pub user_id: ActiveValue<UserId>,
pub stripe_customer_id: ActiveValue<String>,
pub has_overdue_invoices: ActiveValue<bool>,
}
impl Database {
@@ -44,7 +43,6 @@ impl Database {
id: ActiveValue::set(id),
user_id: params.user_id.clone(),
stripe_customer_id: params.stripe_customer_id.clone(),
has_overdue_invoices: params.has_overdue_invoices.clone(),
..Default::default()
})
.exec(&*tx)

View File

@@ -170,4 +170,40 @@ impl Database {
})
.await
}
/// Returns whether the user has any overdue billing subscriptions.
pub async fn has_overdue_billing_subscriptions(&self, user_id: UserId) -> Result<bool> {
Ok(self.count_overdue_billing_subscriptions(user_id).await? > 0)
}
/// Returns the count of the overdue billing subscriptions for the user with the specified ID.
///
/// This includes subscriptions:
/// - Whose status is `past_due`
/// - Whose status is `canceled` and the cancellation reason is `payment_failed`
pub async fn count_overdue_billing_subscriptions(&self, user_id: UserId) -> Result<usize> {
self.transaction(|tx| async move {
let past_due = billing_subscription::Column::StripeSubscriptionStatus
.eq(StripeSubscriptionStatus::PastDue);
let payment_failed = billing_subscription::Column::StripeSubscriptionStatus
.eq(StripeSubscriptionStatus::Canceled)
.and(
billing_subscription::Column::StripeCancellationReason
.eq(StripeCancellationReason::PaymentFailed),
);
let count = billing_subscription::Entity::find()
.inner_join(billing_customer::Entity)
.filter(
billing_customer::Column::UserId
.eq(user_id)
.and(past_due.or(payment_failed)),
)
.count(&*tx)
.await?;
Ok(count as usize)
})
.await
}
}

View File

@@ -10,7 +10,6 @@ impl Database {
pub async fn get_extensions(
&self,
filter: Option<&str>,
provides_filter: Option<&BTreeSet<ExtensionProvides>>,
max_schema_version: i32,
limit: usize,
) -> Result<Vec<ExtensionMetadata>> {
@@ -27,10 +26,6 @@ impl Database {
condition = condition.add(Expr::cust_with_expr("name ILIKE $1", fuzzy_name_filter));
}
if let Some(provides_filter) = provides_filter {
condition = apply_provides_filter(condition, provides_filter);
}
self.get_extensions_where(condition, Some(limit as u64), &tx)
.await
})
@@ -287,39 +282,6 @@ impl Database {
description: ActiveValue::Set(version.description.clone()),
schema_version: ActiveValue::Set(version.schema_version),
wasm_api_version: ActiveValue::Set(version.wasm_api_version.clone()),
provides_themes: ActiveValue::Set(
version.provides.contains(&ExtensionProvides::Themes),
),
provides_icon_themes: ActiveValue::Set(
version.provides.contains(&ExtensionProvides::IconThemes),
),
provides_languages: ActiveValue::Set(
version.provides.contains(&ExtensionProvides::Languages),
),
provides_grammars: ActiveValue::Set(
version.provides.contains(&ExtensionProvides::Grammars),
),
provides_language_servers: ActiveValue::Set(
version
.provides
.contains(&ExtensionProvides::LanguageServers),
),
provides_context_servers: ActiveValue::Set(
version
.provides
.contains(&ExtensionProvides::ContextServers),
),
provides_slash_commands: ActiveValue::Set(
version.provides.contains(&ExtensionProvides::SlashCommands),
),
provides_indexed_docs_providers: ActiveValue::Set(
version
.provides
.contains(&ExtensionProvides::IndexedDocsProviders),
),
provides_snippets: ActiveValue::Set(
version.provides.contains(&ExtensionProvides::Snippets),
),
download_count: ActiveValue::NotSet,
}
}))
@@ -390,55 +352,10 @@ impl Database {
}
}
fn apply_provides_filter(
mut condition: Condition,
provides_filter: &BTreeSet<ExtensionProvides>,
) -> Condition {
if provides_filter.contains(&ExtensionProvides::Themes) {
condition = condition.add(extension_version::Column::ProvidesThemes.eq(true));
}
if provides_filter.contains(&ExtensionProvides::IconThemes) {
condition = condition.add(extension_version::Column::ProvidesIconThemes.eq(true));
}
if provides_filter.contains(&ExtensionProvides::Languages) {
condition = condition.add(extension_version::Column::ProvidesLanguages.eq(true));
}
if provides_filter.contains(&ExtensionProvides::Grammars) {
condition = condition.add(extension_version::Column::ProvidesGrammars.eq(true));
}
if provides_filter.contains(&ExtensionProvides::LanguageServers) {
condition = condition.add(extension_version::Column::ProvidesLanguageServers.eq(true));
}
if provides_filter.contains(&ExtensionProvides::ContextServers) {
condition = condition.add(extension_version::Column::ProvidesContextServers.eq(true));
}
if provides_filter.contains(&ExtensionProvides::SlashCommands) {
condition = condition.add(extension_version::Column::ProvidesSlashCommands.eq(true));
}
if provides_filter.contains(&ExtensionProvides::IndexedDocsProviders) {
condition = condition.add(extension_version::Column::ProvidesIndexedDocsProviders.eq(true));
}
if provides_filter.contains(&ExtensionProvides::Snippets) {
condition = condition.add(extension_version::Column::ProvidesSnippets.eq(true));
}
condition
}
fn metadata_from_extension_and_version(
extension: extension::Model,
version: extension_version::Model,
) -> ExtensionMetadata {
let provides = version.provides();
ExtensionMetadata {
id: extension.external_id.into(),
manifest: rpc::ExtensionApiManifest {
@@ -453,7 +370,6 @@ fn metadata_from_extension_and_version(
repository: version.repository,
schema_version: Some(version.schema_version),
wasm_api_version: version.wasm_api_version,
provides,
},
published_at: convert_time_to_chrono(version.published_at),

View File

@@ -333,9 +333,6 @@ impl Database {
scan_id: ActiveValue::set(update.scan_id as i64),
branch: ActiveValue::set(repository.branch.clone()),
is_deleted: ActiveValue::set(false),
current_merge_conflicts: ActiveValue::Set(Some(
serde_json::to_string(&repository.current_merge_conflicts).unwrap(),
)),
},
))
.on_conflict(
@@ -772,13 +769,6 @@ impl Database {
updated_statuses.push(db_status_to_proto(status_entry)?);
}
let current_merge_conflicts = db_repository_entry
.current_merge_conflicts
.as_ref()
.map(|conflicts| serde_json::from_str(&conflicts))
.transpose()?
.unwrap_or_default();
worktree.repository_entries.insert(
db_repository_entry.work_directory_id as u64,
proto::RepositoryEntry {
@@ -786,7 +776,6 @@ impl Database {
branch: db_repository_entry.branch,
updated_statuses,
removed_statuses: Vec::new(),
current_merge_conflicts,
},
);
}

View File

@@ -736,19 +736,11 @@ impl Database {
}
}
let current_merge_conflicts = db_repository
.current_merge_conflicts
.as_ref()
.map(|conflicts| serde_json::from_str(&conflicts))
.transpose()?
.unwrap_or_default();
worktree.updated_repositories.push(proto::RepositoryEntry {
work_directory_id: db_repository.work_directory_id as u64,
branch: db_repository.branch,
updated_statuses,
removed_statuses,
current_merge_conflicts,
});
}
}

View File

@@ -9,7 +9,6 @@ pub struct Model {
pub id: BillingCustomerId,
pub user_id: UserId,
pub stripe_customer_id: String,
pub has_overdue_invoices: bool,
pub created_at: DateTime,
}

View File

@@ -1,6 +1,4 @@
use crate::db::ExtensionId;
use collections::BTreeSet;
use rpc::ExtensionProvides;
use sea_orm::entity::prelude::*;
use time::PrimitiveDateTime;
@@ -18,58 +16,6 @@ pub struct Model {
pub schema_version: i32,
pub wasm_api_version: Option<String>,
pub download_count: i64,
pub provides_themes: bool,
pub provides_icon_themes: bool,
pub provides_languages: bool,
pub provides_grammars: bool,
pub provides_language_servers: bool,
pub provides_context_servers: bool,
pub provides_slash_commands: bool,
pub provides_indexed_docs_providers: bool,
pub provides_snippets: bool,
}
impl Model {
pub fn provides(&self) -> BTreeSet<ExtensionProvides> {
let mut provides = BTreeSet::default();
if self.provides_themes {
provides.insert(ExtensionProvides::Themes);
}
if self.provides_icon_themes {
provides.insert(ExtensionProvides::IconThemes);
}
if self.provides_languages {
provides.insert(ExtensionProvides::Languages);
}
if self.provides_grammars {
provides.insert(ExtensionProvides::Grammars);
}
if self.provides_language_servers {
provides.insert(ExtensionProvides::LanguageServers);
}
if self.provides_context_servers {
provides.insert(ExtensionProvides::ContextServers);
}
if self.provides_slash_commands {
provides.insert(ExtensionProvides::SlashCommands);
}
if self.provides_indexed_docs_providers {
provides.insert(ExtensionProvides::IndexedDocsProviders);
}
if self.provides_snippets {
provides.insert(ExtensionProvides::Snippets);
}
provides
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@@ -13,8 +13,6 @@ pub struct Model {
pub scan_id: i64,
pub branch: Option<String>,
pub is_deleted: bool,
// JSON array typed string
pub current_merge_conflicts: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@@ -1,6 +1,6 @@
use std::sync::Arc;
use crate::db::billing_subscription::StripeSubscriptionStatus;
use crate::db::billing_subscription::{StripeCancellationReason, StripeSubscriptionStatus};
use crate::db::tests::new_test_user;
use crate::db::{CreateBillingCustomerParams, CreateBillingSubscriptionParams};
use crate::test_both_dbs;
@@ -88,3 +88,113 @@ async fn test_get_active_billing_subscriptions(db: &Arc<Database>) {
assert_eq!(subscription_count, 0);
}
}
test_both_dbs!(
test_count_overdue_billing_subscriptions,
test_count_overdue_billing_subscriptions_postgres,
test_count_overdue_billing_subscriptions_sqlite
);
async fn test_count_overdue_billing_subscriptions(db: &Arc<Database>) {
// A user with no subscription has no overdue billing subscriptions.
{
let user_id = new_test_user(db, "no-subscription-user@example.com").await;
let subscription_count = db
.count_overdue_billing_subscriptions(user_id)
.await
.unwrap();
assert_eq!(subscription_count, 0);
}
// A user with a past-due subscription has an overdue billing subscription.
{
let user_id = new_test_user(db, "past-due-user@example.com").await;
let customer = db
.create_billing_customer(&CreateBillingCustomerParams {
user_id,
stripe_customer_id: "cus_past_due_user".into(),
})
.await
.unwrap();
assert_eq!(customer.stripe_customer_id, "cus_past_due_user".to_string());
db.create_billing_subscription(&CreateBillingSubscriptionParams {
billing_customer_id: customer.id,
stripe_subscription_id: "sub_past_due_user".into(),
stripe_subscription_status: StripeSubscriptionStatus::PastDue,
stripe_cancellation_reason: None,
})
.await
.unwrap();
let subscription_count = db
.count_overdue_billing_subscriptions(user_id)
.await
.unwrap();
assert_eq!(subscription_count, 1);
}
// A user with a canceled subscription with a reason of `payment_failed` has an overdue billing subscription.
{
let user_id =
new_test_user(db, "canceled-subscription-payment-failed-user@example.com").await;
let customer = db
.create_billing_customer(&CreateBillingCustomerParams {
user_id,
stripe_customer_id: "cus_canceled_subscription_payment_failed_user".into(),
})
.await
.unwrap();
assert_eq!(
customer.stripe_customer_id,
"cus_canceled_subscription_payment_failed_user".to_string()
);
db.create_billing_subscription(&CreateBillingSubscriptionParams {
billing_customer_id: customer.id,
stripe_subscription_id: "sub_canceled_subscription_payment_failed_user".into(),
stripe_subscription_status: StripeSubscriptionStatus::Canceled,
stripe_cancellation_reason: Some(StripeCancellationReason::PaymentFailed),
})
.await
.unwrap();
let subscription_count = db
.count_overdue_billing_subscriptions(user_id)
.await
.unwrap();
assert_eq!(subscription_count, 1);
}
// A user with a canceled subscription with a reason of `cancellation_requested` has no overdue billing subscriptions.
{
let user_id = new_test_user(db, "canceled-subscription-user@example.com").await;
let customer = db
.create_billing_customer(&CreateBillingCustomerParams {
user_id,
stripe_customer_id: "cus_canceled_subscription_user".into(),
})
.await
.unwrap();
assert_eq!(
customer.stripe_customer_id,
"cus_canceled_subscription_user".to_string()
);
db.create_billing_subscription(&CreateBillingSubscriptionParams {
billing_customer_id: customer.id,
stripe_subscription_id: "sub_canceled_subscription_user".into(),
stripe_subscription_status: StripeSubscriptionStatus::Canceled,
stripe_cancellation_reason: Some(StripeCancellationReason::CancellationRequested),
})
.await
.unwrap();
let subscription_count = db
.count_overdue_billing_subscriptions(user_id)
.await
.unwrap();
assert_eq!(subscription_count, 0);
}
}

View File

@@ -1,14 +1,10 @@
use std::collections::BTreeSet;
use std::sync::Arc;
use rpc::ExtensionProvides;
use super::Database;
use crate::db::ExtensionVersionConstraints;
use crate::{
db::{queries::extensions::convert_time_to_chrono, ExtensionMetadata, NewExtensionVersion},
test_both_dbs,
};
use std::sync::Arc;
test_both_dbs!(
test_extensions,
@@ -20,7 +16,7 @@ async fn test_extensions(db: &Arc<Database>) {
let versions = db.get_known_extension_versions().await.unwrap();
assert!(versions.is_empty());
let extensions = db.get_extensions(None, None, 1, 5).await.unwrap();
let extensions = db.get_extensions(None, 1, 5).await.unwrap();
assert!(extensions.is_empty());
let t0 = time::OffsetDateTime::from_unix_timestamp_nanos(0).unwrap();
@@ -41,7 +37,6 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: None,
provides: BTreeSet::default(),
published_at: t0,
},
NewExtensionVersion {
@@ -52,7 +47,6 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: None,
provides: BTreeSet::default(),
published_at: t0,
},
],
@@ -67,7 +61,6 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext2/repo".into(),
schema_version: 0,
wasm_api_version: None,
provides: BTreeSet::default(),
published_at: t0,
}],
),
@@ -90,7 +83,7 @@ async fn test_extensions(db: &Arc<Database>) {
);
// The latest version of each extension is returned.
let extensions = db.get_extensions(None, None, 1, 5).await.unwrap();
let extensions = db.get_extensions(None, 1, 5).await.unwrap();
assert_eq!(
extensions,
&[
@@ -104,7 +97,6 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: Some(1),
wasm_api_version: None,
provides: BTreeSet::default(),
},
published_at: t0_chrono,
download_count: 0,
@@ -119,7 +111,6 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext2/repo".into(),
schema_version: Some(0),
wasm_api_version: None,
provides: BTreeSet::default(),
},
published_at: t0_chrono,
download_count: 0
@@ -128,7 +119,7 @@ async fn test_extensions(db: &Arc<Database>) {
);
// Extensions with too new of a schema version are excluded.
let extensions = db.get_extensions(None, None, 0, 5).await.unwrap();
let extensions = db.get_extensions(None, 0, 5).await.unwrap();
assert_eq!(
extensions,
&[ExtensionMetadata {
@@ -141,7 +132,6 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext2/repo".into(),
schema_version: Some(0),
wasm_api_version: None,
provides: BTreeSet::default(),
},
published_at: t0_chrono,
download_count: 0
@@ -168,7 +158,7 @@ async fn test_extensions(db: &Arc<Database>) {
.unwrap());
// Extensions are returned in descending order of total downloads.
let extensions = db.get_extensions(None, None, 1, 5).await.unwrap();
let extensions = db.get_extensions(None, 1, 5).await.unwrap();
assert_eq!(
extensions,
&[
@@ -182,7 +172,6 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext2/repo".into(),
schema_version: Some(0),
wasm_api_version: None,
provides: BTreeSet::default(),
},
published_at: t0_chrono,
download_count: 7
@@ -197,7 +186,6 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: Some(1),
wasm_api_version: None,
provides: BTreeSet::default(),
},
published_at: t0_chrono,
download_count: 5,
@@ -219,7 +207,6 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: None,
provides: BTreeSet::default(),
published_at: t0,
}],
),
@@ -233,7 +220,6 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext2/repo".into(),
schema_version: 0,
wasm_api_version: None,
provides: BTreeSet::default(),
published_at: t0,
}],
),
@@ -258,7 +244,7 @@ async fn test_extensions(db: &Arc<Database>) {
.collect()
);
let extensions = db.get_extensions(None, None, 1, 5).await.unwrap();
let extensions = db.get_extensions(None, 1, 5).await.unwrap();
assert_eq!(
extensions,
&[
@@ -272,7 +258,6 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext2/repo".into(),
schema_version: Some(0),
wasm_api_version: None,
provides: BTreeSet::default(),
},
published_at: t0_chrono,
download_count: 7
@@ -287,7 +272,6 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: Some(1),
wasm_api_version: None,
provides: BTreeSet::default(),
},
published_at: t0_chrono,
download_count: 5,
@@ -306,7 +290,7 @@ async fn test_extensions_by_id(db: &Arc<Database>) {
let versions = db.get_known_extension_versions().await.unwrap();
assert!(versions.is_empty());
let extensions = db.get_extensions(None, None, 1, 5).await.unwrap();
let extensions = db.get_extensions(None, 1, 5).await.unwrap();
assert!(extensions.is_empty());
let t0 = time::OffsetDateTime::from_unix_timestamp_nanos(0).unwrap();
@@ -327,10 +311,6 @@ async fn test_extensions_by_id(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: Some("0.0.4".into()),
provides: BTreeSet::from_iter([
ExtensionProvides::Grammars,
ExtensionProvides::Languages,
]),
published_at: t0,
},
NewExtensionVersion {
@@ -341,11 +321,6 @@ async fn test_extensions_by_id(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: Some("0.0.4".into()),
provides: BTreeSet::from_iter([
ExtensionProvides::Grammars,
ExtensionProvides::Languages,
ExtensionProvides::LanguageServers,
]),
published_at: t0,
},
NewExtensionVersion {
@@ -356,11 +331,6 @@ async fn test_extensions_by_id(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: Some("0.0.5".into()),
provides: BTreeSet::from_iter([
ExtensionProvides::Grammars,
ExtensionProvides::Languages,
ExtensionProvides::LanguageServers,
]),
published_at: t0,
},
],
@@ -375,7 +345,6 @@ async fn test_extensions_by_id(db: &Arc<Database>) {
repository: "ext2/repo".into(),
schema_version: 0,
wasm_api_version: None,
provides: BTreeSet::default(),
published_at: t0,
}],
),
@@ -409,11 +378,6 @@ async fn test_extensions_by_id(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: Some(1),
wasm_api_version: Some("0.0.4".into()),
provides: BTreeSet::from_iter([
ExtensionProvides::Grammars,
ExtensionProvides::Languages,
ExtensionProvides::LanguageServers,
]),
},
published_at: t0_chrono,
download_count: 0,

View File

@@ -309,8 +309,7 @@ impl Server {
.add_request_handler(forward_read_only_project_request::<proto::ResolveInlayHint>)
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
.add_request_handler(forward_read_only_project_request::<proto::GitBranches>)
.add_request_handler(forward_read_only_project_request::<proto::OpenUnstagedDiff>)
.add_request_handler(forward_read_only_project_request::<proto::OpenUncommittedDiff>)
.add_request_handler(forward_read_only_project_request::<proto::GetStagedText>)
.add_request_handler(
forward_mutating_project_request::<proto::RegisterBufferWithLanguageServers>,
)
@@ -349,7 +348,7 @@ impl Server {
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateBufferFile>)
.add_message_handler(broadcast_project_message_from_host::<proto::BufferReloaded>)
.add_message_handler(broadcast_project_message_from_host::<proto::BufferSaved>)
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateDiffBases>)
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateDiffBase>)
.add_request_handler(get_users)
.add_request_handler(fuzzy_search_users)
.add_request_handler(request_contact)
@@ -395,7 +394,6 @@ impl Server {
.add_request_handler(forward_mutating_project_request::<proto::Stage>)
.add_request_handler(forward_mutating_project_request::<proto::Unstage>)
.add_request_handler(forward_mutating_project_request::<proto::Commit>)
.add_request_handler(forward_mutating_project_request::<proto::OpenCommitMessageBuffer>)
.add_message_handler(broadcast_project_message_from_host::<proto::AdvertiseContexts>)
.add_message_handler(update_context)
.add_request_handler({

View File

@@ -342,7 +342,7 @@ async fn test_multiple_handles_to_channel_buffer(
future::try_join3(channel_buffer_1, channel_buffer_2, channel_buffer_3)
.await
.unwrap();
let channel_buffer_entity_id = channel_buffer.entity_id();
let channel_buffer_model_id = channel_buffer.entity_id();
assert_eq!(channel_buffer, channel_buffer_2);
assert_eq!(channel_buffer, channel_buffer_3);
@@ -366,7 +366,7 @@ async fn test_multiple_handles_to_channel_buffer(
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
.await
.unwrap();
assert_ne!(channel_buffer.entity_id(), channel_buffer_entity_id);
assert_ne!(channel_buffer.entity_id(), channel_buffer_model_id);
channel_buffer.update(cx_a, |buffer, cx| {
buffer.buffer().update(cx, |buffer, _| {
assert_eq!(buffer.text(), "hello");

View File

@@ -1991,9 +1991,10 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
.collect(),
remote_url: Some("git@github.com:zed-industries/zed.git".to_string()),
};
client_a
.fs()
.set_blame_for_repo(Path::new("/my-repo/.git"), vec![("file.txt".into(), blame)]);
client_a.fs().set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(Path::new("file.txt"), blame)],
);
let (project_a, worktree_id) = client_a.build_local_project("/my-repo", cx_a).await;
let project_id = active_call_a

View File

@@ -2558,27 +2558,13 @@ async fn test_git_diff_base_change(
let project_remote = client_b.join_remote_project(project_id, cx_b).await;
let staged_text = "
let diff_base = "
one
three
"
.unindent();
let committed_text = "
one
TWO
three
"
.unindent();
let new_committed_text = "
one
TWO_HUNDRED
three
"
.unindent();
let new_staged_text = "
let new_diff_base = "
one
two
"
@@ -2586,11 +2572,7 @@ async fn test_git_diff_base_change(
client_a.fs().set_index_for_repo(
Path::new("/dir/.git"),
&[("a.txt".into(), staged_text.clone())],
);
client_a.fs().set_head_for_repo(
Path::new("/dir/.git"),
&[("a.txt".into(), committed_text.clone())],
&[(Path::new("a.txt"), diff_base.clone())],
);
// Create the buffer
@@ -2598,25 +2580,25 @@ async fn test_git_diff_base_change(
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
.unwrap();
let local_unstaged_diff_a = project_local
let change_set_local_a = project_local
.update(cx_a, |p, cx| {
p.open_unstaged_diff(buffer_local_a.clone(), cx)
p.open_unstaged_changes(buffer_local_a.clone(), cx)
})
.await
.unwrap();
// Wait for it to catch up to the new diff
executor.run_until_parked();
local_unstaged_diff_a.read_with(cx_a, |diff, cx| {
change_set_local_a.read_with(cx_a, |change_set, cx| {
let buffer = buffer_local_a.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(staged_text.as_str())
change_set.base_text_string().as_deref(),
Some(diff_base.as_str())
);
diff::assert_hunks(
diff.snapshot.hunks_in_row_range(0..4, buffer),
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff.base_text_string().unwrap(),
&diff_base,
&[(1..2, "", "two\n")],
);
});
@@ -2626,113 +2608,73 @@ async fn test_git_diff_base_change(
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
.unwrap();
let remote_unstaged_diff_a = project_remote
let change_set_remote_a = project_remote
.update(cx_b, |p, cx| {
p.open_unstaged_diff(buffer_remote_a.clone(), cx)
p.open_unstaged_changes(buffer_remote_a.clone(), cx)
})
.await
.unwrap();
// Wait remote buffer to catch up to the new diff
executor.run_until_parked();
remote_unstaged_diff_a.read_with(cx_b, |diff, cx| {
change_set_remote_a.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_a.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(staged_text.as_str())
change_set.base_text_string().as_deref(),
Some(diff_base.as_str())
);
diff::assert_hunks(
diff.snapshot.hunks_in_row_range(0..4, buffer),
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff.base_text_string().unwrap(),
&diff_base,
&[(1..2, "", "two\n")],
);
});
// Open uncommitted changes on the guest, without opening them on the host first
let remote_uncommitted_diff_a = project_remote
.update(cx_b, |p, cx| {
p.open_uncommitted_diff(buffer_remote_a.clone(), cx)
})
.await
.unwrap();
executor.run_until_parked();
remote_uncommitted_diff_a.read_with(cx_b, |diff, cx| {
let buffer = buffer_remote_a.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(committed_text.as_str())
);
diff::assert_hunks(
diff.snapshot.hunks_in_row_range(0..4, buffer),
buffer,
&diff.base_text_string().unwrap(),
&[(1..2, "TWO\n", "two\n")],
);
});
// Update the index text of the open buffer
// Update the staged text of the open buffer
client_a.fs().set_index_for_repo(
Path::new("/dir/.git"),
&[("a.txt".into(), new_staged_text.clone())],
);
client_a.fs().set_head_for_repo(
Path::new("/dir/.git"),
&[("a.txt".into(), new_committed_text.clone())],
&[(Path::new("a.txt"), new_diff_base.clone())],
);
// Wait for buffer_local_a to receive it
executor.run_until_parked();
local_unstaged_diff_a.read_with(cx_a, |diff, cx| {
change_set_local_a.read_with(cx_a, |change_set, cx| {
let buffer = buffer_local_a.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(new_staged_text.as_str())
change_set.base_text_string().as_deref(),
Some(new_diff_base.as_str())
);
diff::assert_hunks(
diff.snapshot.hunks_in_row_range(0..4, buffer),
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff.base_text_string().unwrap(),
&new_diff_base,
&[(2..3, "", "three\n")],
);
});
remote_unstaged_diff_a.read_with(cx_b, |diff, cx| {
change_set_remote_a.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_a.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(new_staged_text.as_str())
change_set.base_text_string().as_deref(),
Some(new_diff_base.as_str())
);
diff::assert_hunks(
diff.snapshot.hunks_in_row_range(0..4, buffer),
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff.base_text_string().unwrap(),
&new_diff_base,
&[(2..3, "", "three\n")],
);
});
remote_uncommitted_diff_a.read_with(cx_b, |diff, cx| {
let buffer = buffer_remote_a.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(new_committed_text.as_str())
);
diff::assert_hunks(
diff.snapshot.hunks_in_row_range(0..4, buffer),
buffer,
&diff.base_text_string().unwrap(),
&[(1..2, "TWO_HUNDRED\n", "two\n")],
);
});
// Nested git dir
let staged_text = "
let diff_base = "
one
three
"
.unindent();
let new_staged_text = "
let new_diff_base = "
one
two
"
@@ -2740,7 +2682,7 @@ async fn test_git_diff_base_change(
client_a.fs().set_index_for_repo(
Path::new("/dir/sub/.git"),
&[("b.txt".into(), staged_text.clone())],
&[(Path::new("b.txt"), diff_base.clone())],
);
// Create the buffer
@@ -2748,25 +2690,25 @@ async fn test_git_diff_base_change(
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
.await
.unwrap();
let local_unstaged_diff_b = project_local
let change_set_local_b = project_local
.update(cx_a, |p, cx| {
p.open_unstaged_diff(buffer_local_b.clone(), cx)
p.open_unstaged_changes(buffer_local_b.clone(), cx)
})
.await
.unwrap();
// Wait for it to catch up to the new diff
executor.run_until_parked();
local_unstaged_diff_b.read_with(cx_a, |diff, cx| {
change_set_local_b.read_with(cx_a, |change_set, cx| {
let buffer = buffer_local_b.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(staged_text.as_str())
change_set.base_text_string().as_deref(),
Some(diff_base.as_str())
);
diff::assert_hunks(
diff.snapshot.hunks_in_row_range(0..4, buffer),
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff.base_text_string().unwrap(),
&diff_base,
&[(1..2, "", "two\n")],
);
});
@@ -2776,60 +2718,60 @@ async fn test_git_diff_base_change(
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
.await
.unwrap();
let remote_unstaged_diff_b = project_remote
let change_set_remote_b = project_remote
.update(cx_b, |p, cx| {
p.open_unstaged_diff(buffer_remote_b.clone(), cx)
p.open_unstaged_changes(buffer_remote_b.clone(), cx)
})
.await
.unwrap();
executor.run_until_parked();
remote_unstaged_diff_b.read_with(cx_b, |diff, cx| {
change_set_remote_b.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_b.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(staged_text.as_str())
change_set.base_text_string().as_deref(),
Some(diff_base.as_str())
);
diff::assert_hunks(
diff.snapshot.hunks_in_row_range(0..4, buffer),
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&staged_text,
&diff_base,
&[(1..2, "", "two\n")],
);
});
// Updatet the staged text
// Update the staged text
client_a.fs().set_index_for_repo(
Path::new("/dir/sub/.git"),
&[("b.txt".into(), new_staged_text.clone())],
&[(Path::new("b.txt"), new_diff_base.clone())],
);
// Wait for buffer_local_b to receive it
executor.run_until_parked();
local_unstaged_diff_b.read_with(cx_a, |diff, cx| {
change_set_local_b.read_with(cx_a, |change_set, cx| {
let buffer = buffer_local_b.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(new_staged_text.as_str())
change_set.base_text_string().as_deref(),
Some(new_diff_base.as_str())
);
diff::assert_hunks(
diff.snapshot.hunks_in_row_range(0..4, buffer),
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&new_staged_text,
&new_diff_base,
&[(2..3, "", "three\n")],
);
});
remote_unstaged_diff_b.read_with(cx_b, |diff, cx| {
change_set_remote_b.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_b.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(new_staged_text.as_str())
change_set.base_text_string().as_deref(),
Some(new_diff_base.as_str())
);
diff::assert_hunks(
diff.snapshot.hunks_in_row_range(0..4, buffer),
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&new_staged_text,
&new_diff_base,
&[(2..3, "", "three\n")],
);
});

View File

@@ -953,8 +953,8 @@ impl RandomizedTest for ProjectCollaborationTest {
let dot_git_dir = repo_path.join(".git");
let contents = contents
.into_iter()
.map(|(path, contents)| (path.into(), contents))
.iter()
.map(|(path, contents)| (path.as_path(), contents.clone()))
.collect::<Vec<_>>();
if client.fs().metadata(&dot_git_dir).await?.is_none() {
client.fs().create_dir(&dot_git_dir).await?;
@@ -1339,7 +1339,7 @@ impl RandomizedTest for ProjectCollaborationTest {
project
.buffer_store()
.read(cx)
.get_unstaged_diff(host_buffer.read(cx).remote_id(), cx)
.get_unstaged_changes(host_buffer.read(cx).remote_id())
.unwrap()
.read(cx)
.base_text_string()
@@ -1348,7 +1348,7 @@ impl RandomizedTest for ProjectCollaborationTest {
project
.buffer_store()
.read(cx)
.get_unstaged_diff(guest_buffer.read(cx).remote_id(), cx)
.get_unstaged_changes(guest_buffer.read(cx).remote_id())
.unwrap()
.read(cx)
.base_text_string()

View File

@@ -849,10 +849,10 @@ impl TestClient {
) -> (Entity<Workspace>, &'a mut VisualTestContext) {
let window = cx.update(|cx| cx.active_window().unwrap().downcast::<Workspace>().unwrap());
let entity = window.root(cx).unwrap();
let model = window.root(cx).unwrap();
let cx = VisualTestContext::from_window(*window.deref(), cx).as_mut();
// it might be nice to try and cleanup these at the end of each test.
(entity, cx)
(model, cx)
}
}
@@ -861,9 +861,9 @@ pub fn open_channel_notes(
cx: &mut VisualTestContext,
) -> Task<anyhow::Result<Entity<ChannelView>>> {
let window = cx.update(|_, cx| cx.active_window().unwrap().downcast::<Workspace>().unwrap());
let entity = window.root(cx).unwrap();
let model = window.root(cx).unwrap();
cx.update(|window, cx| ChannelView::open(channel_id, None, entity.clone(), window, cx))
cx.update(|window, cx| ChannelView::open(channel_id, None, model.clone(), window, cx))
}
impl Drop for TestClient {

View File

@@ -97,14 +97,14 @@ impl ChatPanel {
});
cx.new(|cx| {
let entity = cx.entity().downgrade();
let model = cx.entity().downgrade();
let message_list = ListState::new(
0,
gpui::ListAlignment::Bottom,
px(1000.),
move |ix, window, cx| {
if let Some(entity) = entity.upgrade() {
entity.update(cx, |this: &mut Self, cx| {
if let Some(model) = model.upgrade() {
model.update(cx, |this: &mut Self, cx| {
this.render_message(ix, window, cx).into_any_element()
})
} else {

View File

@@ -239,14 +239,14 @@ impl CollabPanel {
)
.detach();
let entity = cx.entity().downgrade();
let model = cx.entity().downgrade();
let list_state = ListState::new(
0,
gpui::ListAlignment::Top,
px(1000.),
move |ix, window, cx| {
if let Some(entity) = entity.upgrade() {
entity.update(cx, |this, cx| this.render_list_entry(ix, window, cx))
if let Some(model) = model.upgrade() {
model.update(cx, |this, cx| this.render_list_entry(ix, window, cx))
} else {
div().into_any()
}

View File

@@ -110,13 +110,13 @@ impl NotificationPanel {
})
.detach();
let entity = cx.entity().downgrade();
let model = cx.entity().downgrade();
let notification_list =
ListState::new(0, ListAlignment::Top, px(1000.), move |ix, window, cx| {
entity
model
.upgrade()
.and_then(|entity| {
entity.update(cx, |this, cx| this.render_notification(ix, window, cx))
.and_then(|model| {
model.update(cx, |this, cx| this.render_notification(ix, window, cx))
})
.unwrap_or_else(|| div().into_any())
});
@@ -323,9 +323,9 @@ impl NotificationPanel {
.justify_end()
.child(Button::new("decline", "Decline").on_click({
let notification = notification.clone();
let entity = cx.entity().clone();
let model = cx.entity().clone();
move |_, _, cx| {
entity.update(cx, |this, cx| {
model.update(cx, |this, cx| {
this.respond_to_notification(
notification.clone(),
false,
@@ -336,9 +336,9 @@ impl NotificationPanel {
}))
.child(Button::new("accept", "Accept").on_click({
let notification = notification.clone();
let entity = cx.entity().clone();
let model = cx.entity().clone();
move |_, _, cx| {
entity.update(cx, |this, cx| {
model.update(cx, |this, cx| {
this.respond_to_notification(
notification.clone(),
true,

View File

@@ -1,23 +0,0 @@
[package]
name = "component"
version = "0.1.0"
edition.workspace = true
publish.workspace = true
license = "GPL-3.0-or-later"
[lints]
workspace = true
[lib]
path = "src/component.rs"
[dependencies]
collections.workspace = true
gpui.workspace = true
linkme.workspace = true
once_cell = "1.20.3"
parking_lot.workspace = true
theme.workspace = true
[features]
default = []

View File

@@ -1,305 +0,0 @@
use std::ops::{Deref, DerefMut};
use collections::HashMap;
use gpui::{div, prelude::*, AnyElement, App, IntoElement, RenderOnce, SharedString, Window};
use linkme::distributed_slice;
use once_cell::sync::Lazy;
use parking_lot::RwLock;
use theme::ActiveTheme;
pub trait Component {
fn scope() -> Option<&'static str>;
fn name() -> &'static str {
std::any::type_name::<Self>()
}
fn description() -> Option<&'static str> {
None
}
}
pub trait ComponentPreview: Component {
fn preview(_window: &mut Window, _cx: &App) -> AnyElement;
}
#[distributed_slice]
pub static __ALL_COMPONENTS: [fn()] = [..];
#[distributed_slice]
pub static __ALL_PREVIEWS: [fn()] = [..];
pub static COMPONENT_DATA: Lazy<RwLock<ComponentRegistry>> =
Lazy::new(|| RwLock::new(ComponentRegistry::new()));
pub struct ComponentRegistry {
components: Vec<(Option<&'static str>, &'static str, Option<&'static str>)>,
previews: HashMap<&'static str, fn(&mut Window, &App) -> AnyElement>,
}
impl ComponentRegistry {
fn new() -> Self {
ComponentRegistry {
components: Vec::new(),
previews: HashMap::default(),
}
}
}
pub fn init() {
let component_fns: Vec<_> = __ALL_COMPONENTS.iter().cloned().collect();
let preview_fns: Vec<_> = __ALL_PREVIEWS.iter().cloned().collect();
for f in component_fns {
f();
}
for f in preview_fns {
f();
}
}
pub fn register_component<T: Component>() {
let component_data = (T::scope(), T::name(), T::description());
COMPONENT_DATA.write().components.push(component_data);
}
pub fn register_preview<T: ComponentPreview>() {
let preview_data = (T::name(), T::preview as fn(&mut Window, &App) -> AnyElement);
COMPONENT_DATA
.write()
.previews
.insert(preview_data.0, preview_data.1);
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ComponentId(pub &'static str);
#[derive(Clone)]
pub struct ComponentMetadata {
name: SharedString,
scope: Option<SharedString>,
description: Option<SharedString>,
preview: Option<fn(&mut Window, &App) -> AnyElement>,
}
impl ComponentMetadata {
pub fn name(&self) -> SharedString {
self.name.clone()
}
pub fn scope(&self) -> Option<SharedString> {
self.scope.clone()
}
pub fn description(&self) -> Option<SharedString> {
self.description.clone()
}
pub fn preview(&self) -> Option<fn(&mut Window, &App) -> AnyElement> {
self.preview
}
}
pub struct AllComponents(pub HashMap<ComponentId, ComponentMetadata>);
impl AllComponents {
pub fn new() -> Self {
AllComponents(HashMap::default())
}
/// Returns all components with previews
pub fn all_previews(&self) -> Vec<&ComponentMetadata> {
self.0.values().filter(|c| c.preview.is_some()).collect()
}
/// Returns all components with previews sorted by name
pub fn all_previews_sorted(&self) -> Vec<ComponentMetadata> {
let mut previews: Vec<ComponentMetadata> =
self.all_previews().into_iter().cloned().collect();
previews.sort_by_key(|a| a.name());
previews
}
/// Returns all components
pub fn all(&self) -> Vec<&ComponentMetadata> {
self.0.values().collect()
}
/// Returns all components sorted by name
pub fn all_sorted(&self) -> Vec<ComponentMetadata> {
let mut components: Vec<ComponentMetadata> = self.all().into_iter().cloned().collect();
components.sort_by_key(|a| a.name());
components
}
}
impl Deref for AllComponents {
type Target = HashMap<ComponentId, ComponentMetadata>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for AllComponents {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
pub fn components() -> AllComponents {
let data = COMPONENT_DATA.read();
let mut all_components = AllComponents::new();
for &(scope, name, description) in &data.components {
let scope = scope.map(Into::into);
let preview = data.previews.get(name).cloned();
all_components.insert(
ComponentId(name),
ComponentMetadata {
name: name.into(),
scope,
description: description.map(Into::into),
preview,
},
);
}
all_components
}
/// Which side of the preview to show labels on
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
pub enum ExampleLabelSide {
/// Left side
Left,
/// Right side
Right,
#[default]
/// Top side
Top,
/// Bottom side
Bottom,
}
/// A single example of a component.
#[derive(IntoElement)]
pub struct ComponentExample {
variant_name: SharedString,
element: AnyElement,
label_side: ExampleLabelSide,
grow: bool,
}
impl RenderOnce for ComponentExample {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
let base = div().flex();
let base = match self.label_side {
ExampleLabelSide::Right => base.flex_row(),
ExampleLabelSide::Left => base.flex_row_reverse(),
ExampleLabelSide::Bottom => base.flex_col(),
ExampleLabelSide::Top => base.flex_col_reverse(),
};
base.gap_1()
.text_xs()
.text_color(cx.theme().colors().text_muted)
.when(self.grow, |this| this.flex_1())
.child(self.element)
.child(self.variant_name)
.into_any_element()
}
}
impl ComponentExample {
/// Create a new example with the given variant name and example value.
pub fn new(variant_name: impl Into<SharedString>, element: AnyElement) -> Self {
Self {
variant_name: variant_name.into(),
element,
label_side: ExampleLabelSide::default(),
grow: false,
}
}
/// Set the example to grow to fill the available horizontal space.
pub fn grow(mut self) -> Self {
self.grow = true;
self
}
}
/// A group of component examples.
#[derive(IntoElement)]
pub struct ComponentExampleGroup {
pub title: Option<SharedString>,
pub examples: Vec<ComponentExample>,
pub grow: bool,
}
impl RenderOnce for ComponentExampleGroup {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
div()
.flex_col()
.text_sm()
.text_color(cx.theme().colors().text_muted)
.when(self.grow, |this| this.w_full().flex_1())
.when_some(self.title, |this, title| this.gap_4().child(title))
.child(
div()
.flex()
.items_start()
.w_full()
.gap_6()
.children(self.examples)
.into_any_element(),
)
.into_any_element()
}
}
impl ComponentExampleGroup {
/// Create a new group of examples with the given title.
pub fn new(examples: Vec<ComponentExample>) -> Self {
Self {
title: None,
examples,
grow: false,
}
}
/// Create a new group of examples with the given title.
pub fn with_title(title: impl Into<SharedString>, examples: Vec<ComponentExample>) -> Self {
Self {
title: Some(title.into()),
examples,
grow: false,
}
}
/// Set the group to grow to fill the available horizontal space.
pub fn grow(mut self) -> Self {
self.grow = true;
self
}
}
/// Create a single example
pub fn single_example(
variant_name: impl Into<SharedString>,
example: AnyElement,
) -> ComponentExample {
ComponentExample::new(variant_name, example)
}
/// Create a group of examples without a title
pub fn example_group(examples: Vec<ComponentExample>) -> ComponentExampleGroup {
ComponentExampleGroup::new(examples)
}
/// Create a group of examples with a title
pub fn example_group_with_title(
title: impl Into<SharedString>,
examples: Vec<ComponentExample>,
) -> ComponentExampleGroup {
ComponentExampleGroup::with_title(title, examples)
}

View File

@@ -1,21 +0,0 @@
[package]
name = "component_preview"
version = "0.1.0"
edition.workspace = true
publish.workspace = true
license = "GPL-3.0-or-later"
[lints]
workspace = true
[lib]
path = "src/component_preview.rs"
[features]
default = []
[dependencies]
component.workspace = true
gpui.workspace = true
ui.workspace = true
workspace.workspace = true

View File

@@ -1,178 +0,0 @@
//! # Component Preview
//!
//! A view for exploring Zed components.
use component::{components, ComponentMetadata};
use gpui::{prelude::*, App, EventEmitter, FocusHandle, Focusable, Window};
use ui::prelude::*;
use workspace::{item::ItemEvent, Item, Workspace, WorkspaceId};
pub fn init(cx: &mut App) {
cx.observe_new(|workspace: &mut Workspace, _, _cx| {
workspace.register_action(
|workspace, _: &workspace::OpenComponentPreview, window, cx| {
let component_preview = cx.new(ComponentPreview::new);
workspace.add_item_to_active_pane(
Box::new(component_preview),
None,
true,
window,
cx,
)
},
);
})
.detach();
}
struct ComponentPreview {
focus_handle: FocusHandle,
}
impl ComponentPreview {
pub fn new(cx: &mut Context<Self>) -> Self {
Self {
focus_handle: cx.focus_handle(),
}
}
fn render_sidebar(&self, _window: &Window, _cx: &Context<Self>) -> impl IntoElement {
let components = components().all_sorted();
let sorted_components = components.clone();
v_flex().gap_px().p_1().children(
sorted_components
.into_iter()
.map(|component| self.render_sidebar_entry(&component, _cx)),
)
}
fn render_sidebar_entry(
&self,
component: &ComponentMetadata,
_cx: &Context<Self>,
) -> impl IntoElement {
h_flex()
.w_40()
.px_1p5()
.py_1()
.child(component.name().clone())
}
fn render_preview(
&self,
component: &ComponentMetadata,
window: &mut Window,
cx: &Context<Self>,
) -> impl IntoElement {
let name = component.name();
let scope = component.scope();
let description = component.description();
v_group()
.w_full()
.gap_4()
.p_8()
.rounded_md()
.child(
v_flex()
.gap_1()
.child(
h_flex()
.gap_1()
.text_xl()
.child(div().child(name))
.when_some(scope, |this, scope| {
this.child(div().opacity(0.5).child(format!("({})", scope)))
}),
)
.when_some(description, |this, description| {
this.child(
div()
.text_ui_sm(cx)
.text_color(cx.theme().colors().text_muted)
.max_w(px(600.0))
.child(description),
)
}),
)
.when_some(component.preview(), |this, preview| {
this.child(preview(window, cx))
})
.into_any_element()
}
fn render_previews(&self, window: &mut Window, cx: &Context<Self>) -> impl IntoElement {
v_flex()
.id("component-previews")
.size_full()
.overflow_y_scroll()
.p_4()
.gap_2()
.children(
components()
.all_previews_sorted()
.iter()
.map(|component| self.render_preview(component, window, cx)),
)
}
}
impl Render for ComponentPreview {
fn render(&mut self, window: &mut Window, cx: &mut Context<'_, Self>) -> impl IntoElement {
h_flex()
.id("component-preview")
.key_context("ComponentPreview")
.items_start()
.overflow_hidden()
.size_full()
.max_h_full()
.track_focus(&self.focus_handle)
.px_2()
.bg(cx.theme().colors().editor_background)
.child(self.render_sidebar(window, cx))
.child(self.render_previews(window, cx))
}
}
impl EventEmitter<ItemEvent> for ComponentPreview {}
impl Focusable for ComponentPreview {
fn focus_handle(&self, _: &App) -> gpui::FocusHandle {
self.focus_handle.clone()
}
}
impl Item for ComponentPreview {
type Event = ItemEvent;
fn tab_content_text(&self, _window: &Window, _cx: &App) -> Option<SharedString> {
Some("Component Preview".into())
}
fn telemetry_event_text(&self) -> Option<&'static str> {
None
}
fn show_toolbar(&self) -> bool {
false
}
fn clone_on_split(
&self,
_workspace_id: Option<WorkspaceId>,
_window: &mut Window,
cx: &mut Context<Self>,
) -> Option<gpui::Entity<Self>>
where
Self: Sized,
{
Some(cx.new(Self::new))
}
fn to_item_events(event: &Self::Event, mut f: impl FnMut(workspace::item::ItemEvent)) {
f(*event)
}
}

View File

@@ -59,20 +59,20 @@ workspace.workspace = true
async-std = { version = "1.12.0", features = ["unstable"] }
[dev-dependencies]
client = { workspace = true, features = ["test-support"] }
indoc.workspace = true
serde_json.workspace = true
clock = { workspace = true, features = ["test-support"] }
client = { workspace = true, features = ["test-support"] }
collections = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
fs = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
http_client = { workspace = true, features = ["test-support"] }
indoc.workspace = true
language = { workspace = true, features = ["test-support"] }
lsp = { workspace = true, features = ["test-support"] }
node_runtime = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] }
rpc = { workspace = true, features = ["test-support"] }
serde_json.workspace = true
settings = { workspace = true, features = ["test-support"] }
theme = { workspace = true, features = ["test-support"] }
util = { workspace = true, features = ["test-support"] }

View File

@@ -17,7 +17,7 @@ use gpui::{
use http_client::github::get_release_by_tag_name;
use http_client::HttpClient;
use language::{
language_settings::{all_language_settings, language_settings, EditPredictionProvider},
language_settings::{all_language_settings, language_settings, InlineCompletionProvider},
point_from_lsp, point_to_lsp, Anchor, Bias, Buffer, BufferSnapshot, Language, PointUtf16,
ToPointUtf16,
};
@@ -368,8 +368,8 @@ impl Copilot {
let server_id = self.server_id;
let http = self.http.clone();
let node_runtime = self.node_runtime.clone();
if all_language_settings(None, cx).edit_predictions.provider
== EditPredictionProvider::Copilot
if all_language_settings(None, cx).inline_completions.provider
== InlineCompletionProvider::Copilot
{
if matches!(self.server, CopilotServer::Disabled) {
let start_task = cx
@@ -1061,7 +1061,6 @@ async fn get_copilot_lsp(http: Arc<dyn HttpClient>) -> anyhow::Result<PathBuf> {
mod tests {
use super::*;
use gpui::TestAppContext;
use util::path;
#[gpui::test(iterations = 10)]
async fn test_buffer_management(cx: &mut TestAppContext) {
@@ -1124,7 +1123,7 @@ mod tests {
buffer_1.update(cx, |buffer, cx| {
buffer.file_updated(
Arc::new(File {
abs_path: path!("/root/child/buffer-1").into(),
abs_path: "/root/child/buffer-1".into(),
path: Path::new("child/buffer-1").into(),
}),
cx,
@@ -1137,7 +1136,7 @@ mod tests {
text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri),
}
);
let buffer_1_uri = lsp::Url::from_file_path(path!("/root/child/buffer-1")).unwrap();
let buffer_1_uri = lsp::Url::from_file_path("/root/child/buffer-1").unwrap();
assert_eq!(
lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
.await,

View File

@@ -36,8 +36,8 @@ pub enum Model {
Gpt3_5Turbo,
#[serde(alias = "o1", rename = "o1")]
O1,
#[serde(alias = "o1-mini", rename = "o3-mini")]
O3Mini,
#[serde(alias = "o1-mini", rename = "o1-mini")]
O1Mini,
#[serde(alias = "claude-3-5-sonnet", rename = "claude-3.5-sonnet")]
Claude3_5Sonnet,
}
@@ -46,7 +46,7 @@ impl Model {
pub fn uses_streaming(&self) -> bool {
match self {
Self::Gpt4o | Self::Gpt4 | Self::Gpt3_5Turbo | Self::Claude3_5Sonnet => true,
Self::O3Mini | Self::O1 => false,
Self::O1Mini | Self::O1 => false,
}
}
@@ -56,7 +56,7 @@ impl Model {
"gpt-4" => Ok(Self::Gpt4),
"gpt-3.5-turbo" => Ok(Self::Gpt3_5Turbo),
"o1" => Ok(Self::O1),
"o3-mini" => Ok(Self::O3Mini),
"o1-mini" => Ok(Self::O1Mini),
"claude-3-5-sonnet" => Ok(Self::Claude3_5Sonnet),
_ => Err(anyhow!("Invalid model id: {}", id)),
}
@@ -67,7 +67,7 @@ impl Model {
Self::Gpt3_5Turbo => "gpt-3.5-turbo",
Self::Gpt4 => "gpt-4",
Self::Gpt4o => "gpt-4o",
Self::O3Mini => "o3-mini",
Self::O1Mini => "o1-mini",
Self::O1 => "o1",
Self::Claude3_5Sonnet => "claude-3-5-sonnet",
}
@@ -78,7 +78,7 @@ impl Model {
Self::Gpt3_5Turbo => "GPT-3.5",
Self::Gpt4 => "GPT-4",
Self::Gpt4o => "GPT-4o",
Self::O3Mini => "o3-mini",
Self::O1Mini => "o1-mini",
Self::O1 => "o1",
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
}
@@ -89,7 +89,7 @@ impl Model {
Self::Gpt4o => 64000,
Self::Gpt4 => 32768,
Self::Gpt3_5Turbo => 12288,
Self::O3Mini => 20000,
Self::O1Mini => 20000,
Self::O1 => 20000,
Self::Claude3_5Sonnet => 200_000,
}

View File

@@ -1,9 +1,11 @@
use crate::{Completion, Copilot};
use anyhow::Result;
use gpui::{App, Context, Entity, EntityId, Task};
use inline_completion::{Direction, EditPredictionProvider, InlineCompletion};
use language::{language_settings::AllLanguageSettings, Buffer, OffsetRangeExt, ToOffset};
use project::Project;
use inline_completion::{Direction, InlineCompletion, InlineCompletionProvider};
use language::{
language_settings::{all_language_settings, AllLanguageSettings},
Buffer, OffsetRangeExt, ToOffset,
};
use settings::Settings;
use std::{path::Path, time::Duration};
@@ -48,7 +50,7 @@ impl CopilotCompletionProvider {
}
}
impl EditPredictionProvider for CopilotCompletionProvider {
impl InlineCompletionProvider for CopilotCompletionProvider {
fn name() -> &'static str {
"copilot"
}
@@ -61,22 +63,33 @@ impl EditPredictionProvider for CopilotCompletionProvider {
false
}
fn show_completions_in_normal_mode() -> bool {
false
}
fn is_refreshing(&self) -> bool {
self.pending_refresh.is_some()
}
fn is_enabled(
&self,
_buffer: &Entity<Buffer>,
_cursor_position: language::Anchor,
buffer: &Entity<Buffer>,
cursor_position: language::Anchor,
cx: &App,
) -> bool {
self.copilot.read(cx).status().is_authorized()
if !self.copilot.read(cx).status().is_authorized() {
return false;
}
let buffer = buffer.read(cx);
let file = buffer.file();
let language = buffer.language_at(cursor_position);
let settings = all_language_settings(file, cx);
settings.inline_completions_enabled(language.as_ref(), file.map(|f| f.path().as_ref()), cx)
}
fn refresh(
&mut self,
_project: Option<Entity<Project>>,
buffer: Entity<Buffer>,
cursor_position: language::Anchor,
debounce: bool,
@@ -192,7 +205,7 @@ impl EditPredictionProvider for CopilotCompletionProvider {
fn discard(&mut self, cx: &mut Context<Self>) {
let settings = AllLanguageSettings::get_global(cx);
let copilot_enabled = settings.show_inline_completions(None, cx);
let copilot_enabled = settings.inline_completions_enabled(None, None, cx);
if !copilot_enabled {
return;
@@ -242,7 +255,6 @@ impl EditPredictionProvider for CopilotCompletionProvider {
} else {
let position = cursor_position.bias_right(buffer);
Some(InlineCompletion {
id: None,
edits: vec![(position..position, completion_text.into())],
edit_preview: None,
})
@@ -278,10 +290,7 @@ mod tests {
use serde_json::json;
use settings::SettingsStore;
use std::future::Future;
use util::{
path,
test::{marked_text_ranges_by, TextRangeMarker},
};
use util::test::{marked_text_ranges_by, TextRangeMarker};
#[gpui::test(iterations = 10)]
async fn test_copilot(executor: BackgroundExecutor, cx: &mut TestAppContext) {
@@ -302,7 +311,7 @@ mod tests {
.await;
let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot));
cx.update_editor(|editor, window, cx| {
editor.set_edit_prediction_provider(Some(copilot_provider), window, cx)
editor.set_inline_completion_provider(Some(copilot_provider), window, cx)
});
cx.set_state(indoc! {"
@@ -332,6 +341,7 @@ mod tests {
executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
cx.update_editor(|editor, window, cx| {
assert!(editor.context_menu_visible());
assert!(!editor.context_menu_contains_inline_completion());
assert!(!editor.has_active_inline_completion());
// Since we have both, the copilot suggestion is not shown inline
assert_eq!(editor.text(cx), "one.\ntwo\nthree\n");
@@ -384,11 +394,12 @@ mod tests {
assert_eq!(editor.text(cx), "one.\ntwo\nthree\n");
});
// Ensure existing edit prediction is interpolated when inserting again.
// Ensure existing inline completion is interpolated when inserting again.
cx.simulate_keystroke("c");
executor.run_until_parked();
cx.update_editor(|editor, _, cx| {
assert!(!editor.context_menu_visible());
assert!(!editor.context_menu_contains_inline_completion());
assert!(editor.has_active_inline_completion());
assert_eq!(editor.display_text(cx), "one.copilot1\ntwo\nthree\n");
assert_eq!(editor.text(cx), "one.c\ntwo\nthree\n");
@@ -408,6 +419,7 @@ mod tests {
cx.update_editor(|editor, window, cx| {
assert!(!editor.context_menu_visible());
assert!(editor.has_active_inline_completion());
assert!(!editor.context_menu_contains_inline_completion());
assert_eq!(editor.display_text(cx), "one.copilot2\ntwo\nthree\n");
assert_eq!(editor.text(cx), "one.c\ntwo\nthree\n");
@@ -437,8 +449,8 @@ mod tests {
assert_eq!(editor.display_text(cx), "one.copilot2\ntwo\nthree\n");
assert_eq!(editor.text(cx), "one.co\ntwo\nthree\n");
// AcceptEditPrediction when there is an active suggestion inserts it.
editor.accept_edit_prediction(&Default::default(), window, cx);
// AcceptInlineCompletion when there is an active suggestion inserts it.
editor.accept_inline_completion(&Default::default(), window, cx);
assert!(!editor.has_active_inline_completion());
assert_eq!(editor.display_text(cx), "one.copilot2\ntwo\nthree\n");
assert_eq!(editor.text(cx), "one.copilot2\ntwo\nthree\n");
@@ -483,7 +495,7 @@ mod tests {
);
cx.update_editor(|editor, window, cx| {
editor.next_edit_prediction(&Default::default(), window, cx)
editor.next_inline_completion(&Default::default(), window, cx)
});
executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
cx.update_editor(|editor, window, cx| {
@@ -497,8 +509,8 @@ mod tests {
assert_eq!(editor.text(cx), "fn foo() {\n \n}");
assert_eq!(editor.display_text(cx), "fn foo() {\n let x = 4;\n}");
// Using AcceptEditPrediction again accepts the suggestion.
editor.accept_edit_prediction(&Default::default(), window, cx);
// Using AcceptInlineCompletion again accepts the suggestion.
editor.accept_inline_completion(&Default::default(), window, cx);
assert!(!editor.has_active_inline_completion());
assert_eq!(editor.text(cx), "fn foo() {\n let x = 4;\n}");
assert_eq!(editor.display_text(cx), "fn foo() {\n let x = 4;\n}");
@@ -527,7 +539,7 @@ mod tests {
.await;
let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot));
cx.update_editor(|editor, window, cx| {
editor.set_edit_prediction_provider(Some(copilot_provider), window, cx)
editor.set_inline_completion_provider(Some(copilot_provider), window, cx)
});
// Setup the editor with a completion request.
@@ -651,7 +663,7 @@ mod tests {
.await;
let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot));
cx.update_editor(|editor, window, cx| {
editor.set_edit_prediction_provider(Some(copilot_provider), window, cx)
editor.set_inline_completion_provider(Some(copilot_provider), window, cx)
});
cx.set_state(indoc! {"
@@ -670,7 +682,7 @@ mod tests {
vec![],
);
cx.update_editor(|editor, window, cx| {
editor.next_edit_prediction(&Default::default(), window, cx)
editor.next_inline_completion(&Default::default(), window, cx)
});
executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
cx.update_editor(|editor, window, cx| {
@@ -741,7 +753,7 @@ mod tests {
let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot));
editor
.update(cx, |editor, window, cx| {
editor.set_edit_prediction_provider(Some(copilot_provider), window, cx)
editor.set_inline_completion_provider(Some(copilot_provider), window, cx)
})
.unwrap();
@@ -759,7 +771,7 @@ mod tests {
editor.change_selections(None, window, cx, |s| {
s.select_ranges([Point::new(1, 5)..Point::new(1, 5)])
});
editor.next_edit_prediction(&Default::default(), window, cx);
editor.next_inline_completion(&Default::default(), window, cx);
});
executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
_ = editor.update(cx, |editor, _, cx| {
@@ -835,7 +847,7 @@ mod tests {
.await;
let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot));
cx.update_editor(|editor, window, cx| {
editor.set_edit_prediction_provider(Some(copilot_provider), window, cx)
editor.set_inline_completion_provider(Some(copilot_provider), window, cx)
});
cx.set_state(indoc! {"
@@ -863,7 +875,7 @@ mod tests {
vec![],
);
cx.update_editor(|editor, window, cx| {
editor.next_edit_prediction(&Default::default(), window, cx)
editor.next_inline_completion(&Default::default(), window, cx)
});
executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
cx.update_editor(|editor, _, cx| {
@@ -922,6 +934,7 @@ mod tests {
executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
cx.update_editor(|editor, _, cx| {
assert!(editor.context_menu_visible());
assert!(!editor.context_menu_contains_inline_completion());
assert!(!editor.has_active_inline_completion(),);
assert_eq!(editor.text(cx), "one\ntwo.\nthree\n");
});
@@ -931,7 +944,7 @@ mod tests {
async fn test_copilot_disabled_globs(executor: BackgroundExecutor, cx: &mut TestAppContext) {
init_test(cx, |settings| {
settings
.edit_predictions
.inline_completions
.get_or_insert(Default::default())
.disabled_globs = Some(vec![".env*".to_string()]);
});
@@ -940,24 +953,24 @@ mod tests {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/test"),
"/test",
json!({
".env": "SECRET=something\n",
"README.md": "hello\nworld\nhow\nare\nyou\ntoday"
}),
)
.await;
let project = Project::test(fs, [path!("/test").as_ref()], cx).await;
let project = Project::test(fs, ["/test".as_ref()], cx).await;
let private_buffer = project
.update(cx, |project, cx| {
project.open_local_buffer(path!("/test/.env"), cx)
project.open_local_buffer("/test/.env", cx)
})
.await
.unwrap();
let public_buffer = project
.update(cx, |project, cx| {
project.open_local_buffer(path!("/test/README.md"), cx)
project.open_local_buffer("/test/README.md", cx)
})
.await
.unwrap();
@@ -993,7 +1006,7 @@ mod tests {
let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot));
editor
.update(cx, |editor, window, cx| {
editor.set_edit_prediction_provider(Some(copilot_provider), window, cx)
editor.set_inline_completion_provider(Some(copilot_provider), window, cx)
})
.unwrap();

View File

@@ -750,7 +750,7 @@ impl Item for ProjectDiagnosticsEditor {
}
fn telemetry_event_text(&self) -> Option<&'static str> {
Some("Project Diagnostics Opened")
Some("project diagnostics")
}
fn for_each_project_item(
@@ -933,7 +933,7 @@ fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock {
.when_some(diagnostic.code.as_ref(), |stack, code| {
stack.child(
div()
.child(SharedString::from(format!("({code:?})")))
.child(SharedString::from(format!("({code})")))
.text_color(color.text_muted),
)
}),

View File

@@ -18,7 +18,7 @@ use std::{
path::{Path, PathBuf},
};
use unindent::Unindent as _;
use util::{path, post_inc, RandomCharIter};
use util::{post_inc, RandomCharIter};
#[ctor::ctor]
fn init_logger() {
@@ -33,7 +33,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/test"),
"/test",
json!({
"consts.rs": "
const a: i32 = 'a';
@@ -59,7 +59,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
.await;
let language_server_id = LanguageServerId(0);
let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*window, cx);
@@ -70,7 +70,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
lsp_store
.update_diagnostic_entries(
language_server_id,
PathBuf::from(path!("/test/main.rs")),
PathBuf::from("/test/main.rs"),
None,
vec![
DiagnosticEntry {
@@ -150,7 +150,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
});
// Open the project diagnostics view while there are already diagnostics.
let diagnostics = window.build_entity(cx, |window, cx| {
let diagnostics = window.build_model(cx, |window, cx| {
ProjectDiagnosticsEditor::new_with_context(
1,
true,
@@ -234,7 +234,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
lsp_store
.update_diagnostic_entries(
language_server_id,
PathBuf::from(path!("/test/consts.rs")),
PathBuf::from("/test/consts.rs"),
None,
vec![DiagnosticEntry {
range: Unclipped(PointUtf16::new(0, 15))..Unclipped(PointUtf16::new(0, 15)),
@@ -341,7 +341,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
lsp_store
.update_diagnostic_entries(
language_server_id,
PathBuf::from(path!("/test/consts.rs")),
PathBuf::from("/test/consts.rs"),
None,
vec![
DiagnosticEntry {
@@ -464,7 +464,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/test"),
"/test",
json!({
"main.js": "
a();
@@ -479,13 +479,13 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
let server_id_1 = LanguageServerId(100);
let server_id_2 = LanguageServerId(101);
let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*window, cx);
let workspace = window.root(cx).unwrap();
let diagnostics = window.build_entity(cx, |window, cx| {
let diagnostics = window.build_model(cx, |window, cx| {
ProjectDiagnosticsEditor::new_with_context(
1,
true,
@@ -504,7 +504,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
lsp_store
.update_diagnostic_entries(
server_id_1,
PathBuf::from(path!("/test/main.js")),
PathBuf::from("/test/main.js"),
None,
vec![DiagnosticEntry {
range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 1)),
@@ -557,7 +557,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
lsp_store
.update_diagnostic_entries(
server_id_2,
PathBuf::from(path!("/test/main.js")),
PathBuf::from("/test/main.js"),
None,
vec![DiagnosticEntry {
range: Unclipped(PointUtf16::new(1, 0))..Unclipped(PointUtf16::new(1, 1)),
@@ -619,7 +619,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
lsp_store
.update_diagnostic_entries(
server_id_1,
PathBuf::from(path!("/test/main.js")),
PathBuf::from("/test/main.js"),
None,
vec![DiagnosticEntry {
range: Unclipped(PointUtf16::new(2, 0))..Unclipped(PointUtf16::new(2, 1)),
@@ -638,7 +638,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
lsp_store
.update_diagnostic_entries(
server_id_2,
PathBuf::from(path!("/test/main.rs")),
PathBuf::from("/test/main.rs"),
None,
vec![],
cx,
@@ -689,7 +689,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
lsp_store
.update_diagnostic_entries(
server_id_2,
PathBuf::from(path!("/test/main.js")),
PathBuf::from("/test/main.js"),
None,
vec![DiagnosticEntry {
range: Unclipped(PointUtf16::new(3, 0))..Unclipped(PointUtf16::new(3, 1)),
@@ -755,15 +755,15 @@ async fn test_random_diagnostics(cx: &mut TestAppContext, mut rng: StdRng) {
.unwrap_or(10);
let fs = FakeFs::new(cx.executor());
fs.insert_tree(path!("/test"), json!({})).await;
fs.insert_tree("/test", json!({})).await;
let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*window, cx);
let workspace = window.root(cx).unwrap();
let mutated_diagnostics = window.build_entity(cx, |window, cx| {
let mutated_diagnostics = window.build_model(cx, |window, cx| {
ProjectDiagnosticsEditor::new_with_context(
1,
true,
@@ -817,7 +817,7 @@ async fn test_random_diagnostics(cx: &mut TestAppContext, mut rng: StdRng) {
// insert a set of diagnostics for a new path
_ => {
let path: PathBuf =
format!(path!("/test/{}.rs"), post_inc(&mut next_filename)).into();
format!("/test/{}.rs", post_inc(&mut next_filename)).into();
let len = rng.gen_range(128..256);
let content =
RandomCharIter::new(&mut rng).take(len).collect::<String>();
@@ -870,7 +870,7 @@ async fn test_random_diagnostics(cx: &mut TestAppContext, mut rng: StdRng) {
cx.run_until_parked();
log::info!("constructing reference diagnostics view");
let reference_diagnostics = window.build_entity(cx, |window, cx| {
let reference_diagnostics = window.build_model(cx, |window, cx| {
ProjectDiagnosticsEditor::new_with_context(
1,
true,
@@ -891,7 +891,7 @@ async fn test_random_diagnostics(cx: &mut TestAppContext, mut rng: StdRng) {
for diagnostic in diagnostics {
let found_excerpt = reference_excerpts.iter().any(|info| {
let row_range = info.range.context.start.row..info.range.context.end.row;
info.path == path.strip_prefix(path!("/test")).unwrap()
info.path == path.strip_prefix("/test").unwrap()
&& info.language_server == language_server_id
&& row_range.contains(&diagnostic.range.start.0.row)
});

View File

@@ -86,6 +86,9 @@ impl Render for DiagnosticIndicator {
h_flex()
.gap_2()
.pl_1()
.border_l_1()
.border_color(cx.theme().colors().border)
.child(
ButtonLike::new("diagnostic-indicator")
.child(diagnostic_indicator)
@@ -157,7 +160,7 @@ impl DiagnosticIndicator {
(buffer, cursor_position)
});
let new_diagnostic = buffer
.diagnostics_in_range::<usize>(cursor_position..cursor_position)
.diagnostics_in_range::<_, usize>(cursor_position..cursor_position)
.filter(|entry| !entry.range.is_empty())
.min_by_key(|entry| (entry.diagnostic.severity, entry.range.len()))
.map(|entry| entry.diagnostic);

View File

@@ -1,32 +0,0 @@
[package]
name = "diff"
version = "0.1.0"
edition.workspace = true
publish.workspace = true
license = "GPL-3.0-or-later"
[lints]
workspace = true
[lib]
path = "src/diff.rs"
[features]
test-support = []
[dependencies]
futures.workspace = true
git2.workspace = true
gpui.workspace = true
language.workspace = true
log.workspace = true
rope.workspace = true
sum_tree.workspace = true
text.workspace = true
util.workspace = true
[dev-dependencies]
pretty_assertions.workspace = true
serde_json.workspace = true
text = { workspace = true, features = ["test-support"] }
unindent.workspace = true

View File

@@ -1 +0,0 @@
../../LICENSE-GPL

View File

@@ -38,8 +38,8 @@ clock.workspace = true
collections.workspace = true
convert_case.workspace = true
db.workspace = true
diff.workspace = true
emojis.workspace = true
feature_flags.workspace = true
file_icons.workspace = true
futures.workspace = true
fuzzy.workspace = true
@@ -88,7 +88,7 @@ url.workspace = true
util.workspace = true
uuid.workspace = true
workspace.workspace = true
zed_actions.workspace = true
zed_predict_onboarding.workspace = true
[dev-dependencies]
ctor.workspace = true

View File

@@ -3,64 +3,56 @@ use super::*;
use gpui::{action_as, action_with_deprecated_aliases};
use schemars::JsonSchema;
use util::serde::default_true;
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct SelectNext {
#[serde(default)]
pub replace_newest: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct SelectPrevious {
#[serde(default)]
pub replace_newest: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct MoveToBeginningOfLine {
#[serde(default = "default_true")]
pub stop_at_soft_wraps: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct SelectToBeginningOfLine {
#[serde(default)]
pub(super) stop_at_soft_wraps: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct MovePageUp {
#[serde(default)]
pub(super) center_cursor: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct MovePageDown {
#[serde(default)]
pub(super) center_cursor: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct MoveToEndOfLine {
#[serde(default = "default_true")]
pub stop_at_soft_wraps: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct SelectToEndOfLine {
#[serde(default)]
pub(super) stop_at_soft_wraps: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct ToggleCodeActions {
// Display row from which the action was deployed.
#[serde(default)]
@@ -69,28 +61,24 @@ pub struct ToggleCodeActions {
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct ConfirmCompletion {
#[serde(default)]
pub item_ix: Option<usize>,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct ComposeCompletion {
#[serde(default)]
pub item_ix: Option<usize>,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct ConfirmCodeAction {
#[serde(default)]
pub item_ix: Option<usize>,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct ToggleComments {
#[serde(default)]
pub advance_downwards: bool,
@@ -99,70 +87,60 @@ pub struct ToggleComments {
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct FoldAt {
#[serde(skip)]
pub buffer_row: MultiBufferRow,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct UnfoldAt {
#[serde(skip)]
pub buffer_row: MultiBufferRow,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct MoveUpByLines {
#[serde(default)]
pub(super) lines: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct MoveDownByLines {
#[serde(default)]
pub(super) lines: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct SelectUpByLines {
#[serde(default)]
pub(super) lines: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct SelectDownByLines {
#[serde(default)]
pub(super) lines: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct ExpandExcerpts {
#[serde(default)]
pub(super) lines: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct ExpandExcerptsUp {
#[serde(default)]
pub(super) lines: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct ExpandExcerptsDown {
#[serde(default)]
pub(super) lines: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct ShowCompletions {
#[serde(default)]
pub(super) trigger: Option<String>,
@@ -172,24 +150,23 @@ pub struct ShowCompletions {
pub struct HandleInput(pub String);
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct DeleteToNextWordEnd {
#[serde(default)]
pub ignore_newlines: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct DeleteToPreviousWordStart {
#[serde(default)]
pub ignore_newlines: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
pub struct FoldAtLevel(pub u32);
pub struct FoldAtLevel {
pub level: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct SpawnNearestTask {
#[serde(default)]
pub reveal: task::RevealStrategy,
@@ -239,9 +216,9 @@ impl_actions!(
gpui::actions!(
editor,
[
AcceptEditPrediction,
AcceptInlineCompletion,
AcceptPartialCopilotSuggestion,
AcceptPartialEditPrediction,
AcceptPartialInlineCompletion,
AddSelectionAbove,
AddSelectionBelow,
ApplyAllDiffHunks,
@@ -333,7 +310,7 @@ gpui::actions!(
Newline,
NewlineAbove,
NewlineBelow,
NextEditPrediction,
NextInlineCompletion,
NextScreen,
OpenContextMenu,
OpenExcerpts,
@@ -348,7 +325,7 @@ gpui::actions!(
PageDown,
PageUp,
Paste,
PreviousEditPrediction,
PreviousInlineCompletion,
Redo,
RedoSelection,
Rename,
@@ -384,7 +361,7 @@ gpui::actions!(
SelectToStartOfParagraph,
SelectUp,
ShowCharacterPalette,
ShowEditPrediction,
ShowInlineCompletion,
ShowSignatureHelp,
ShuffleLines,
SortLinesCaseInsensitive,
@@ -398,7 +375,7 @@ gpui::actions!(
ToggleGitBlameInline,
ToggleIndentGuides,
ToggleInlayHints,
ToggleEditPrediction,
ToggleInlineCompletions,
ToggleLineNumbers,
SwapSelectionEnds,
SetMark,

View File

@@ -1,16 +1,17 @@
use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::{
div, px, uniform_list, AnyElement, BackgroundExecutor, Div, Entity, FontWeight,
ListSizingBehavior, ScrollStrategy, SharedString, Size, StrikethroughStyle, StyledText,
UniformListScrollHandle, WeakEntity,
div, pulsating_between, px, uniform_list, Animation, AnimationExt, AnyElement,
BackgroundExecutor, Div, Entity, FontWeight, ListSizingBehavior, ScrollStrategy, SharedString,
Size, StrikethroughStyle, StyledText, UniformListScrollHandle, WeakEntity,
};
use language::Buffer;
use language::{CodeLabel, CompletionDocumentation};
use language::{CodeLabel, Documentation};
use lsp::LanguageServerId;
use multi_buffer::{Anchor, ExcerptId};
use ordered_float::OrderedFloat;
use project::{CodeAction, Completion, TaskSourceKind};
use settings::Settings;
use std::time::Duration;
use std::{
cell::RefCell,
cmp::{min, Reverse},
@@ -25,9 +26,11 @@ use workspace::Workspace;
use crate::{
actions::{ConfirmCodeAction, ConfirmCompletion},
display_map::DisplayPoint,
render_parsed_markdown, split_words, styled_runs_for_code_label, CodeActionProvider,
CompletionId, CompletionProvider, DisplayRow, Editor, EditorStyle, ResolvedTasks,
};
use crate::{AcceptInlineCompletion, InlineCompletionMenuHint, InlineCompletionText};
pub const MENU_GAP: Pixels = px(4.);
pub const MENU_ASIDE_X_PADDING: Pixels = px(16.);
@@ -111,10 +114,10 @@ impl CodeContextMenu {
}
}
pub fn origin(&self) -> ContextMenuOrigin {
pub fn origin(&self, cursor_position: DisplayPoint) -> ContextMenuOrigin {
match self {
CodeContextMenu::Completions(menu) => menu.origin(),
CodeContextMenu::CodeActions(menu) => menu.origin(),
CodeContextMenu::Completions(menu) => menu.origin(cursor_position),
CodeContextMenu::CodeActions(menu) => menu.origin(cursor_position),
}
}
@@ -151,7 +154,7 @@ impl CodeContextMenu {
}
pub enum ContextMenuOrigin {
Cursor,
EditorPoint(DisplayPoint),
GutterIndicator(DisplayRow),
}
@@ -163,7 +166,7 @@ pub struct CompletionsMenu {
pub buffer: Entity<Buffer>,
pub completions: Rc<RefCell<Box<[Completion]>>>,
match_candidates: Rc<[StringMatchCandidate]>,
pub entries: Rc<RefCell<Vec<StringMatch>>>,
pub entries: Rc<RefCell<Vec<CompletionEntry>>>,
pub selected_item: usize,
scroll_handle: UniformListScrollHandle,
resolve_completions: bool,
@@ -171,6 +174,12 @@ pub struct CompletionsMenu {
last_rendered_range: Rc<RefCell<Option<Range<usize>>>>,
}
#[derive(Clone, Debug)]
pub(crate) enum CompletionEntry {
Match(StringMatch),
InlineCompletionHint(InlineCompletionMenuHint),
}
impl CompletionsMenu {
pub fn new(
id: CompletionId,
@@ -235,11 +244,13 @@ impl CompletionsMenu {
let entries = choices
.iter()
.enumerate()
.map(|(id, completion)| StringMatch {
candidate_id: id,
score: 1.,
positions: vec![],
string: completion.clone(),
.map(|(id, completion)| {
CompletionEntry::Match(StringMatch {
candidate_id: id,
score: 1.,
positions: vec![],
string: completion.clone(),
})
})
.collect::<Vec<_>>();
Self {
@@ -329,6 +340,24 @@ impl CompletionsMenu {
}
}
pub fn show_inline_completion_hint(&mut self, hint: InlineCompletionMenuHint) {
let hint = CompletionEntry::InlineCompletionHint(hint);
let mut entries = self.entries.borrow_mut();
match entries.first() {
Some(CompletionEntry::InlineCompletionHint { .. }) => {
entries[0] = hint;
}
_ => {
entries.insert(0, hint);
// When `y_flipped`, need to scroll to bring it into view.
if self.selected_item == 0 {
self.scroll_handle
.scroll_to_item(self.selected_item, ScrollStrategy::Top);
}
}
}
}
pub fn resolve_visible_completions(
&mut self,
provider: Option<&dyn CompletionProvider>,
@@ -377,15 +406,17 @@ impl CompletionsMenu {
// This filtering doesn't happen if the completions are currently being updated.
let completions = self.completions.borrow();
let candidate_ids = entry_indices
.map(|i| entries[i].candidate_id)
.flat_map(|i| Self::entry_candidate_id(&entries[i]))
.filter(|i| completions[*i].documentation.is_none());
// Current selection is always resolved even if it already has documentation, to handle
// out-of-spec language servers that return more results later.
let selected_candidate_id = entries[self.selected_item].candidate_id;
let candidate_ids = iter::once(selected_candidate_id)
.chain(candidate_ids.filter(|id| *id != selected_candidate_id))
.collect::<Vec<usize>>();
let candidate_ids = match Self::entry_candidate_id(&entries[self.selected_item]) {
None => candidate_ids.collect::<Vec<usize>>(),
Some(selected_candidate_id) => iter::once(selected_candidate_id)
.chain(candidate_ids.filter(|id| *id != selected_candidate_id))
.collect::<Vec<usize>>(),
};
drop(entries);
if candidate_ids.is_empty() {
@@ -407,12 +438,19 @@ impl CompletionsMenu {
.detach();
}
fn entry_candidate_id(entry: &CompletionEntry) -> Option<usize> {
match entry {
CompletionEntry::Match(entry) => Some(entry.candidate_id),
CompletionEntry::InlineCompletionHint { .. } => None,
}
}
pub fn visible(&self) -> bool {
!self.entries.borrow().is_empty()
}
fn origin(&self) -> ContextMenuOrigin {
ContextMenuOrigin::Cursor
fn origin(&self, cursor_position: DisplayPoint) -> ContextMenuOrigin {
ContextMenuOrigin::EditorPoint(cursor_position)
}
fn render(
@@ -430,18 +468,23 @@ impl CompletionsMenu {
.borrow()
.iter()
.enumerate()
.max_by_key(|(_, mat)| {
let completion = &completions[mat.candidate_id];
let documentation = &completion.documentation;
.max_by_key(|(_, mat)| match mat {
CompletionEntry::Match(mat) => {
let completion = &completions[mat.candidate_id];
let documentation = &completion.documentation;
let mut len = completion.label.text.chars().count();
if let Some(CompletionDocumentation::SingleLine(text)) = documentation {
if show_completion_documentation {
len += text.chars().count();
let mut len = completion.label.text.chars().count();
if let Some(Documentation::SingleLine(text)) = documentation {
if show_completion_documentation {
len += text.chars().count();
}
}
}
len
len
}
CompletionEntry::InlineCompletionHint(hint) => {
"Zed AI / ".chars().count() + hint.label().chars().count()
}
})
.map(|(ix, _)| ix);
drop(completions);
@@ -465,82 +508,177 @@ impl CompletionsMenu {
.enumerate()
.map(|(ix, mat)| {
let item_ix = start_ix + ix;
let completion = &completions_guard[mat.candidate_id];
let documentation = if show_completion_documentation {
&completion.documentation
} else {
&None
};
let buffer_font = theme::ThemeSettings::get_global(cx).buffer_font.clone();
let base_label = h_flex()
.gap_1()
.child(div().font(buffer_font.clone()).child("Zed AI"))
.child(div().px_0p5().child("/").opacity(0.2));
let filter_start = completion.label.filter_range.start;
let highlights = gpui::combine_highlights(
mat.ranges().map(|range| {
(
filter_start + range.start..filter_start + range.end,
FontWeight::BOLD.into(),
)
}),
styled_runs_for_code_label(&completion.label, &style.syntax).map(
|(range, mut highlight)| {
// Ignore font weight for syntax highlighting, as we'll use it
// for fuzzy matches.
highlight.font_weight = None;
if completion.lsp_completion.deprecated.unwrap_or(false) {
highlight.strikethrough = Some(StrikethroughStyle {
thickness: 1.0.into(),
..Default::default()
});
highlight.color = Some(cx.theme().colors().text_muted);
}
match mat {
CompletionEntry::Match(mat) => {
let candidate_id = mat.candidate_id;
let completion = &completions_guard[candidate_id];
(range, highlight)
},
),
);
let documentation = if show_completion_documentation {
&completion.documentation
} else {
&None
};
let completion_label = StyledText::new(completion.label.text.clone())
.with_highlights(&style.text, highlights);
let documentation_label = if let Some(
CompletionDocumentation::SingleLine(text),
) = documentation
{
if text.trim().is_empty() {
None
} else {
Some(
Label::new(text.clone())
.ml_4()
.size(LabelSize::Small)
.color(Color::Muted),
let filter_start = completion.label.filter_range.start;
let highlights = gpui::combine_highlights(
mat.ranges().map(|range| {
(
filter_start + range.start..filter_start + range.end,
FontWeight::BOLD.into(),
)
}),
styled_runs_for_code_label(&completion.label, &style.syntax)
.map(|(range, mut highlight)| {
// Ignore font weight for syntax highlighting, as we'll use it
// for fuzzy matches.
highlight.font_weight = None;
if completion.lsp_completion.deprecated.unwrap_or(false)
{
highlight.strikethrough =
Some(StrikethroughStyle {
thickness: 1.0.into(),
..Default::default()
});
highlight.color =
Some(cx.theme().colors().text_muted);
}
(range, highlight)
}),
);
let completion_label =
StyledText::new(completion.label.text.clone())
.with_highlights(&style.text, highlights);
let documentation_label =
if let Some(Documentation::SingleLine(text)) = documentation {
if text.trim().is_empty() {
None
} else {
Some(
Label::new(text.clone())
.ml_4()
.size(LabelSize::Small)
.color(Color::Muted),
)
}
} else {
None
};
let color_swatch = completion
.color()
.map(|color| div().size_4().bg(color).rounded_sm());
div().min_w(px(220.)).max_w(px(540.)).child(
ListItem::new(mat.candidate_id)
.inset(true)
.toggle_state(item_ix == selected_item)
.on_click(cx.listener(move |editor, _event, window, cx| {
cx.stop_propagation();
if let Some(task) = editor.confirm_completion(
&ConfirmCompletion {
item_ix: Some(item_ix),
},
window,
cx,
) {
task.detach_and_log_err(cx)
}
}))
.start_slot::<Div>(color_swatch)
.child(h_flex().overflow_hidden().child(completion_label))
.end_slot::<Label>(documentation_label),
)
}
} else {
None
};
let color_swatch = completion
.color()
.map(|color| div().size_4().bg(color).rounded_sm());
CompletionEntry::InlineCompletionHint(
hint @ InlineCompletionMenuHint::None,
) => div().min_w(px(250.)).max_w(px(500.)).child(
ListItem::new("inline-completion")
.inset(true)
.toggle_state(item_ix == selected_item)
.start_slot(Icon::new(IconName::ZedPredict))
.child(
base_label.child(
StyledText::new(hint.label())
.with_highlights(&style.text, None),
),
),
),
CompletionEntry::InlineCompletionHint(
hint @ InlineCompletionMenuHint::Loading,
) => div().min_w(px(250.)).max_w(px(500.)).child(
ListItem::new("inline-completion")
.inset(true)
.toggle_state(item_ix == selected_item)
.start_slot(Icon::new(IconName::ZedPredict))
.child(base_label.child({
let text_style = style.text.clone();
StyledText::new(hint.label())
.with_highlights(&text_style, None)
.with_animation(
"pulsating-label",
Animation::new(Duration::from_secs(1))
.repeat()
.with_easing(pulsating_between(0.4, 0.8)),
move |text, delta| {
let mut text_style = text_style.clone();
text_style.color =
text_style.color.opacity(delta);
text.with_highlights(&text_style, None)
},
)
})),
),
CompletionEntry::InlineCompletionHint(
hint @ InlineCompletionMenuHint::PendingTermsAcceptance,
) => div().min_w(px(250.)).max_w(px(500.)).child(
ListItem::new("inline-completion")
.inset(true)
.toggle_state(item_ix == selected_item)
.start_slot(Icon::new(IconName::ZedPredict))
.child(
base_label.child(
StyledText::new(hint.label())
.with_highlights(&style.text, None),
),
)
.on_click(cx.listener(move |editor, _event, window, cx| {
cx.stop_propagation();
editor.toggle_zed_predict_onboarding(window, cx);
})),
),
div().min_w(px(280.)).max_w(px(540.)).child(
ListItem::new(mat.candidate_id)
.inset(true)
.toggle_state(item_ix == selected_item)
.on_click(cx.listener(move |editor, _event, window, cx| {
cx.stop_propagation();
if let Some(task) = editor.confirm_completion(
&ConfirmCompletion {
item_ix: Some(item_ix),
},
window,
cx,
) {
task.detach_and_log_err(cx)
}
}))
.start_slot::<Div>(color_swatch)
.child(h_flex().overflow_hidden().child(completion_label))
.end_slot::<Label>(documentation_label),
)
CompletionEntry::InlineCompletionHint(
hint @ InlineCompletionMenuHint::Loaded { .. },
) => div().min_w(px(250.)).max_w(px(500.)).child(
ListItem::new("inline-completion")
.inset(true)
.toggle_state(item_ix == selected_item)
.start_slot(Icon::new(IconName::ZedPredict))
.child(
base_label.child(
StyledText::new(hint.label())
.with_highlights(&style.text, None),
),
)
.on_click(cx.listener(move |editor, _event, window, cx| {
cx.stop_propagation();
editor.accept_inline_completion(
&AcceptInlineCompletion {},
window,
cx,
);
})),
),
}
})
.collect()
},
@@ -566,25 +704,42 @@ impl CompletionsMenu {
return None;
}
let mat = &self.entries.borrow()[self.selected_item];
let multiline_docs = match self.completions.borrow_mut()[mat.candidate_id]
.documentation
.as_ref()?
{
CompletionDocumentation::MultiLinePlainText(text) => {
div().child(SharedString::from(text.clone()))
let multiline_docs = match &self.entries.borrow()[self.selected_item] {
CompletionEntry::Match(mat) => {
match self.completions.borrow_mut()[mat.candidate_id]
.documentation
.as_ref()?
{
Documentation::MultiLinePlainText(text) => {
div().child(SharedString::from(text.clone()))
}
Documentation::MultiLineMarkdown(parsed) if !parsed.text.is_empty() => div()
.child(render_parsed_markdown(
"completions_markdown",
parsed,
&style,
workspace,
cx,
)),
Documentation::MultiLineMarkdown(_) => return None,
Documentation::SingleLine(_) => return None,
Documentation::Undocumented => return None,
}
}
CompletionDocumentation::MultiLineMarkdown(parsed) if !parsed.text.is_empty() => div()
.child(render_parsed_markdown(
"completions_markdown",
parsed,
&style,
workspace,
cx,
)),
CompletionDocumentation::MultiLineMarkdown(_) => return None,
CompletionDocumentation::SingleLine(_) => return None,
CompletionDocumentation::Undocumented => return None,
CompletionEntry::InlineCompletionHint(InlineCompletionMenuHint::Loaded { text }) => {
match text {
InlineCompletionText::Edit(highlighted_edits) => div()
.mx_1()
.rounded_md()
.bg(cx.theme().colors().editor_background)
.child(
gpui::StyledText::new(highlighted_edits.text.clone())
.with_highlights(&style.text, highlighted_edits.highlights.clone()),
),
InlineCompletionText::Move(text) => div().child(text.clone()),
}
}
CompletionEntry::InlineCompletionHint(_) => return None,
};
Some(
@@ -603,6 +758,11 @@ impl CompletionsMenu {
}
pub async fn filter(&mut self, query: Option<&str>, executor: BackgroundExecutor) {
let inline_completion_was_selected = self.selected_item == 0
&& self.entries.borrow().first().map_or(false, |entry| {
matches!(entry, CompletionEntry::InlineCompletionHint(_))
});
let mut matches = if let Some(query) = query {
fuzzy::match_strings(
&self.match_candidates,
@@ -696,9 +856,23 @@ impl CompletionsMenu {
}
drop(completions);
*self.entries.borrow_mut() = matches;
self.selected_item = 0;
// This keeps the display consistent when y_flipped.
let mut entries = self.entries.borrow_mut();
let new_selection = if let Some(CompletionEntry::InlineCompletionHint(_)) = entries.first()
{
entries.truncate(1);
if inline_completion_was_selected || matches.is_empty() {
0
} else {
1
}
} else {
entries.truncate(0);
0
};
entries.extend(matches.into_iter().map(CompletionEntry::Match));
self.selected_item = new_selection;
// Scroll to 0 even if the LSP completion is the only one selected. This keeps the display
// consistent when y_flipped.
self.scroll_handle.scroll_to_item(0, ScrollStrategy::Top);
}
}
@@ -898,11 +1072,11 @@ impl CodeActionsMenu {
!self.actions.is_empty()
}
fn origin(&self) -> ContextMenuOrigin {
fn origin(&self, cursor_position: DisplayPoint) -> ContextMenuOrigin {
if let Some(row) = self.deployed_from_indicator {
ContextMenuOrigin::GutterIndicator(row)
} else {
ContextMenuOrigin::Cursor
ContextMenuOrigin::EditorPoint(cursor_position)
}
}

View File

@@ -508,7 +508,7 @@ impl DisplayMap {
pub(crate) fn splice_inlays(
&mut self,
to_remove: &[InlayId],
to_remove: Vec<InlayId>,
to_insert: Vec<Inlay>,
cx: &mut Context<Self>,
) {
@@ -1142,7 +1142,12 @@ impl DisplaySnapshot {
}
pub fn line_indent_for_buffer_row(&self, buffer_row: MultiBufferRow) -> LineIndent {
self.buffer_snapshot.line_indent_for_row(buffer_row)
let (buffer, range) = self
.buffer_snapshot
.buffer_line_for_row(buffer_row)
.unwrap();
buffer.line_indent_for_row(range.start.row)
}
pub fn line_len(&self, row: DisplayRow) -> u32 {
@@ -1433,10 +1438,7 @@ impl ToDisplayPoint for Anchor {
#[cfg(test)]
pub mod tests {
use super::*;
use crate::{
movement,
test::{marked_display_snapshot, test_font},
};
use crate::{movement, test::marked_display_snapshot};
use block_map::BlockPlacement;
use gpui::{
div, font, observe, px, App, AppContext as _, BorrowAppContext, Element, Hsla, Rgba,
@@ -1495,11 +1497,10 @@ pub mod tests {
}
});
let font = test_font();
let map = cx.new(|cx| {
DisplayMap::new(
buffer.clone(),
font,
font("Helvetica"),
font_size,
wrap_width,
true,

View File

@@ -1992,9 +1992,8 @@ fn offset_for_row(s: &str, target: u32) -> (u32, usize) {
#[cfg(test)]
mod tests {
use super::*;
use crate::{
display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap, wrap_map::WrapMap},
test::test_font,
use crate::display_map::{
fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap, wrap_map::WrapMap,
};
use gpui::{div, font, px, App, AppContext as _, Element};
use itertools::Itertools;
@@ -2228,7 +2227,7 @@ mod tests {
multi_buffer
});
let font = test_font();
let font = font("Helvetica");
let font_size = px(14.);
let font_id = cx.text_system().resolve_font(&font);
let mut wrap_width = px(0.);
@@ -3070,9 +3069,8 @@ mod tests {
let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
let font = test_font();
let (wrap_map, wraps_snapshot) =
cx.update(|cx| WrapMap::new(tab_snapshot, font, font_size, wrap_width, cx));
let (wrap_map, wraps_snapshot) = cx
.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), font_size, wrap_width, cx));
let mut block_map = BlockMap::new(
wraps_snapshot,
true,

View File

@@ -545,7 +545,7 @@ impl InlayMap {
pub fn splice(
&mut self,
to_remove: &[InlayId],
to_remove: Vec<InlayId>,
to_insert: Vec<Inlay>,
) -> (InlaySnapshot, Vec<InlayEdit>) {
let snapshot = &mut self.snapshot;
@@ -653,7 +653,7 @@ impl InlayMap {
}
log::info!("removing inlays: {:?}", to_remove);
let (snapshot, edits) = self.splice(&to_remove, to_insert);
let (snapshot, edits) = self.splice(to_remove, to_insert);
(snapshot, edits)
}
}
@@ -1171,7 +1171,7 @@ mod tests {
let mut next_inlay_id = 0;
let (inlay_snapshot, _) = inlay_map.splice(
&[],
Vec::new(),
vec![Inlay {
id: InlayId::Hint(post_inc(&mut next_inlay_id)),
position: buffer.read(cx).snapshot(cx).anchor_after(3),
@@ -1247,7 +1247,7 @@ mod tests {
assert_eq!(inlay_snapshot.text(), "abxyDzefghi");
let (inlay_snapshot, _) = inlay_map.splice(
&[],
Vec::new(),
vec![
Inlay {
id: InlayId::Hint(post_inc(&mut next_inlay_id)),
@@ -1444,11 +1444,7 @@ mod tests {
// The inlays can be manually removed.
let (inlay_snapshot, _) = inlay_map.splice(
&inlay_map
.inlays
.iter()
.map(|inlay| inlay.id)
.collect::<Vec<InlayId>>(),
inlay_map.inlays.iter().map(|inlay| inlay.id).collect(),
Vec::new(),
);
assert_eq!(inlay_snapshot.text(), "abxJKLyDzefghi");
@@ -1462,7 +1458,7 @@ mod tests {
let mut next_inlay_id = 0;
let (inlay_snapshot, _) = inlay_map.splice(
&[],
Vec::new(),
vec![
Inlay {
id: InlayId::Hint(post_inc(&mut next_inlay_id)),

View File

@@ -979,7 +979,6 @@ impl<'a> Iterator for WrapRows<'a> {
Some(if soft_wrapped {
RowInfo {
buffer_id: None,
buffer_row: None,
multibuffer_row: None,
diff_status,
@@ -1169,10 +1168,9 @@ mod tests {
use super::*;
use crate::{
display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap},
test::test_font,
MultiBuffer,
};
use gpui::{px, test::observe};
use gpui::{font, px, test::observe};
use rand::prelude::*;
use settings::SettingsStore;
use smol::stream::StreamExt;
@@ -1197,8 +1195,7 @@ mod tests {
Some(px(rng.gen_range(0.0..=1000.0)))
};
let tab_size = NonZeroU32::new(rng.gen_range(1..=4)).unwrap();
let font = test_font();
let font = font("Helvetica");
let _font_id = text_system.font_id(&font);
let font_size = px(14.0);

File diff suppressed because it is too large Load Diff

View File

@@ -35,7 +35,7 @@ pub struct EditorSettings {
pub auto_signature_help: bool,
pub show_signature_help_after_edits: bool,
pub jupyter: Jupyter,
pub show_edit_predictions_in_menu: bool,
pub show_inline_completions_in_menu: bool,
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
@@ -368,11 +368,11 @@ pub struct EditorSettingsContent {
/// Default: false
pub show_signature_help_after_edits: Option<bool>,
/// Whether to show the edit predictions next to the completions provided by a language server.
/// Only has an effect if edit prediction provider supports it.
/// Whether to show the inline completions next to the completions provided by a language server.
/// Only has an effect if inline completion provider supports it.
///
/// Default: true
pub show_edit_predictions_in_menu: Option<bool>,
pub show_inline_completions_in_menu: Option<bool>,
/// Jupyter REPL settings.
pub jupyter: Option<JupyterContent>,

View File

@@ -7,7 +7,6 @@ use crate::{
},
JoinLines,
};
use diff::{BufferDiff, DiffHunkStatus};
use futures::StreamExt;
use gpui::{
div, BackgroundExecutor, SemanticVersion, TestAppContext, UpdateGlobal, VisualTestContext,
@@ -27,7 +26,7 @@ use language_settings::{Formatter, FormatterList, IndentGuideSettings};
use multi_buffer::IndentGuide;
use parking_lot::Mutex;
use pretty_assertions::{assert_eq, assert_ne};
use project::FakeFs;
use project::{buffer_store::BufferChangeSet, FakeFs};
use project::{
lsp_command::SIGNATURE_HELP_HIGHLIGHT_CURRENT,
project_settings::{LspSettings, ProjectSettings},
@@ -41,9 +40,8 @@ use std::{
use test::{build_editor_with_project, editor_lsp_test_context::rust_lang};
use unindent::Unindent;
use util::{
assert_set_eq, path,
assert_set_eq,
test::{marked_text_ranges, marked_text_ranges_by, sample_text, TextRangeMarker},
uri,
};
use workspace::{
item::{FollowEvent, FollowableItem, Item, ItemHandle},
@@ -64,9 +62,9 @@ fn test_edit_events(cx: &mut TestAppContext) {
let editor1 = cx.add_window({
let events = events.clone();
|window, cx| {
let entity = cx.entity().clone();
let model = cx.entity().clone();
cx.subscribe_in(
&entity,
&model,
window,
move |_, _, event: &EditorEvent, _, _| match event {
EditorEvent::Edited { .. } => events.borrow_mut().push(("editor1", "edited")),
@@ -1159,7 +1157,7 @@ fn test_fold_at_level(cx: &mut TestAppContext) {
});
_ = editor.update(cx, |editor, window, cx| {
editor.fold_at_level(&FoldAtLevel(2), window, cx);
editor.fold_at_level(&FoldAtLevel { level: 2 }, window, cx);
assert_eq!(
editor.display_text(cx),
"
@@ -1183,7 +1181,7 @@ fn test_fold_at_level(cx: &mut TestAppContext) {
.unindent(),
);
editor.fold_at_level(&FoldAtLevel(1), window, cx);
editor.fold_at_level(&FoldAtLevel { level: 1 }, window, cx);
assert_eq!(
editor.display_text(cx),
"
@@ -1198,7 +1196,7 @@ fn test_fold_at_level(cx: &mut TestAppContext) {
);
editor.unfold_all(&UnfoldAll, window, cx);
editor.fold_at_level(&FoldAtLevel(0), window, cx);
editor.fold_at_level(&FoldAtLevel { level: 0 }, window, cx);
assert_eq!(
editor.display_text(cx),
"
@@ -5621,13 +5619,13 @@ async fn test_fold_function_bodies(cx: &mut gpui::TestAppContext) {
let base_text = r#"
impl A {
// this is an uncommitted comment
// this is an unstaged comment
fn b() {
c();
}
// this is another uncommitted comment
// this is another unstaged comment
fn d() {
// e
@@ -5670,13 +5668,13 @@ async fn test_fold_function_bodies(cx: &mut gpui::TestAppContext) {
cx.assert_state_with_diff(
"
ˇimpl A {
- // this is an uncommitted comment
- // this is an unstaged comment
fn b() {
c();
}
- // this is another uncommitted comment
- // this is another unstaged comment
-
fn d() {
// e
@@ -5693,13 +5691,13 @@ async fn test_fold_function_bodies(cx: &mut gpui::TestAppContext) {
let expected_display_text = "
impl A {
// this is an uncommitted comment
// this is an unstaged comment
fn b() {
}
// this is another uncommitted comment
// this is another unstaged comment
fn d() {
@@ -7076,9 +7074,9 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let fs = FakeFs::new(cx.executor());
fs.insert_file(path!("/file.rs"), Default::default()).await;
fs.insert_file("/file.rs", Default::default()).await;
let project = Project::test(fs, [path!("/file.rs").as_ref()], cx).await;
let project = Project::test(fs, ["/file.rs".as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
@@ -7094,9 +7092,7 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) {
);
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer(path!("/file.rs"), cx)
})
.update(cx, |project, cx| project.open_local_buffer("/file.rs", cx))
.await
.unwrap();
@@ -7121,7 +7117,7 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) {
.handle_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
assert_eq!(
params.text_document.uri,
lsp::Url::from_file_path(path!("/file.rs")).unwrap()
lsp::Url::from_file_path("/file.rs").unwrap()
);
assert_eq!(params.options.tab_size, 4);
Ok(Some(vec![lsp::TextEdit::new(
@@ -7149,7 +7145,7 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) {
fake_server.handle_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
assert_eq!(
params.text_document.uri,
lsp::Url::from_file_path(path!("/file.rs")).unwrap()
lsp::Url::from_file_path("/file.rs").unwrap()
);
futures::future::pending::<()>().await;
unreachable!()
@@ -7206,7 +7202,7 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) {
.handle_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
assert_eq!(
params.text_document.uri,
lsp::Url::from_file_path(path!("/file.rs")).unwrap()
lsp::Url::from_file_path("/file.rs").unwrap()
);
assert_eq!(params.options.tab_size, 8);
Ok(Some(vec![]))
@@ -7241,7 +7237,7 @@ async fn test_multibuffer_format_during_save(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/a"),
"/a",
json!({
"main.rs": sample_text_1,
"other.rs": sample_text_2,
@@ -7250,7 +7246,7 @@ async fn test_multibuffer_format_during_save(cx: &mut gpui::TestAppContext) {
)
.await;
let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let project = Project::test(fs, ["/a".as_ref()], cx).await;
let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx);
@@ -7425,20 +7421,20 @@ async fn test_multibuffer_format_during_save(cx: &mut gpui::TestAppContext) {
assert!(cx.read(|cx| !multi_buffer_editor.is_dirty(cx)));
assert_eq!(
multi_buffer_editor.update(cx, |editor, cx| editor.text(cx)),
uri!("a|o[file:///a/main.rs formatted]bbbb\ncccc\n\nffff\ngggg\n\njjjj\n\nlll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|\nr\n\nuuuu\n\nvvvv\nwwww\nxxxx\n\n{{{{\n||||\n\n\u{7f}\u{7f}\u{7f}\u{7f}"),
"a|o[file:///a/main.rs formatted]bbbb\ncccc\n\nffff\ngggg\n\njjjj\n\nlll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|\nr\n\nuuuu\n\nvvvv\nwwww\nxxxx\n\n{{{{\n||||\n\n\u{7f}\u{7f}\u{7f}\u{7f}",
);
buffer_1.update(cx, |buffer, _| {
assert!(!buffer.is_dirty());
assert_eq!(
buffer.text(),
uri!("a|o[file:///a/main.rs formatted]bbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n"),
"a|o[file:///a/main.rs formatted]bbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n",
)
});
buffer_2.update(cx, |buffer, _| {
assert!(!buffer.is_dirty());
assert_eq!(
buffer.text(),
uri!("lll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|oooo\npppp\nr\nssss\ntttt\nuuuu\n"),
"lll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|oooo\npppp\nr\nssss\ntttt\nuuuu\n",
)
});
buffer_3.update(cx, |buffer, _| {
@@ -7452,9 +7448,9 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let fs = FakeFs::new(cx.executor());
fs.insert_file(path!("/file.rs"), Default::default()).await;
fs.insert_file("/file.rs", Default::default()).await;
let project = Project::test(fs, [path!("/").as_ref()], cx).await;
let project = Project::test(fs, ["/file.rs".as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
@@ -7470,9 +7466,7 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) {
);
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer(path!("/file.rs"), cx)
})
.update(cx, |project, cx| project.open_local_buffer("/file.rs", cx))
.await
.unwrap();
@@ -7497,7 +7491,7 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) {
.handle_request::<lsp::request::RangeFormatting, _, _>(move |params, _| async move {
assert_eq!(
params.text_document.uri,
lsp::Url::from_file_path(path!("/file.rs")).unwrap()
lsp::Url::from_file_path("/file.rs").unwrap()
);
assert_eq!(params.options.tab_size, 4);
Ok(Some(vec![lsp::TextEdit::new(
@@ -7525,7 +7519,7 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) {
move |params, _| async move {
assert_eq!(
params.text_document.uri,
lsp::Url::from_file_path(path!("/file.rs")).unwrap()
lsp::Url::from_file_path("/file.rs").unwrap()
);
futures::future::pending::<()>().await;
unreachable!()
@@ -7583,7 +7577,7 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) {
.handle_request::<lsp::request::RangeFormatting, _, _>(move |params, _| async move {
assert_eq!(
params.text_document.uri,
lsp::Url::from_file_path(path!("/file.rs")).unwrap()
lsp::Url::from_file_path("/file.rs").unwrap()
);
assert_eq!(params.options.tab_size, 8);
Ok(Some(vec![]))
@@ -7603,9 +7597,9 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) {
});
let fs = FakeFs::new(cx.executor());
fs.insert_file(path!("/file.rs"), Default::default()).await;
fs.insert_file("/file.rs", Default::default()).await;
let project = Project::test(fs, [path!("/").as_ref()], cx).await;
let project = Project::test(fs, ["/file.rs".as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(Arc::new(Language::new(
@@ -7639,9 +7633,7 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) {
);
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer(path!("/file.rs"), cx)
})
.update(cx, |project, cx| project.open_local_buffer("/file.rs", cx))
.await
.unwrap();
@@ -7671,7 +7663,7 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) {
.handle_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
assert_eq!(
params.text_document.uri,
lsp::Url::from_file_path(path!("/file.rs")).unwrap()
lsp::Url::from_file_path("/file.rs").unwrap()
);
assert_eq!(params.options.tab_size, 4);
Ok(Some(vec![lsp::TextEdit::new(
@@ -7695,7 +7687,7 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) {
fake_server.handle_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
assert_eq!(
params.text_document.uri,
lsp::Url::from_file_path(path!("/file.rs")).unwrap()
lsp::Url::from_file_path("/file.rs").unwrap()
);
futures::future::pending::<()>().await;
unreachable!()
@@ -8735,14 +8727,14 @@ async fn test_multiline_completion(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/a"),
"/a",
json!({
"main.ts": "a",
}),
)
.await;
let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let project = Project::test(fs, ["/a".as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
let typescript_language = Arc::new(Language::new(
LanguageConfig {
@@ -8802,7 +8794,7 @@ async fn test_multiline_completion(cx: &mut gpui::TestAppContext) {
.unwrap();
let _buffer = project
.update(cx, |project, cx| {
project.open_local_buffer_with_lsp(path!("/a/main.ts"), cx)
project.open_local_buffer_with_lsp("/a/main.ts", cx)
})
.await
.unwrap();
@@ -10204,15 +10196,15 @@ async fn test_following(cx: &mut gpui::TestAppContext) {
let is_still_following = Rc::new(RefCell::new(true));
let follower_edit_event_count = Rc::new(RefCell::new(0));
let pending_update = Rc::new(RefCell::new(None));
let leader_entity = leader.root(cx).unwrap();
let follower_entity = follower.root(cx).unwrap();
let leader_model = leader.root(cx).unwrap();
let follower_model = follower.root(cx).unwrap();
_ = follower.update(cx, {
let update = pending_update.clone();
let is_still_following = is_still_following.clone();
let follower_edit_event_count = follower_edit_event_count.clone();
|_, window, cx| {
cx.subscribe_in(
&leader_entity,
&leader_model,
window,
move |_, leader, event, window, cx| {
leader.read(cx).add_event_to_update_proto(
@@ -10226,7 +10218,7 @@ async fn test_following(cx: &mut gpui::TestAppContext) {
.detach();
cx.subscribe_in(
&follower_entity,
&follower_model,
window,
move |_, _, event: &EditorEvent, _window, _cx| {
if matches!(Editor::to_follow_event(event), Some(FollowEvent::Unfollow)) {
@@ -10392,11 +10384,11 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) {
// Start following the editor when it has no excerpts.
let mut state_message =
leader.update_in(cx, |leader, window, cx| leader.to_state_proto(window, cx));
let workspace_entity = workspace.root(cx).unwrap();
let workspace_model = workspace.root(cx).unwrap();
let follower_1 = cx
.update_window(*workspace.deref(), |_, window, cx| {
Editor::from_state_proto(
workspace_entity,
workspace_model,
ViewId {
creator: Default::default(),
id: 0,
@@ -10494,11 +10486,11 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) {
// Start following separately after it already has excerpts.
let mut state_message =
leader.update_in(cx, |leader, window, cx| leader.to_state_proto(window, cx));
let workspace_entity = workspace.root(cx).unwrap();
let workspace_model = workspace.root(cx).unwrap();
let follower_2 = cx
.update_window(*workspace.deref(), |_, window, cx| {
Editor::from_state_proto(
workspace_entity,
workspace_model,
ViewId {
creator: Default::default(),
id: 0,
@@ -10578,7 +10570,7 @@ async fn go_to_prev_overlapping_diagnostic(
.update_diagnostics(
LanguageServerId(0),
lsp::PublishDiagnosticsParams {
uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
uri: lsp::Url::from_file_path("/root/file").unwrap(),
version: None,
diagnostics: vec![
lsp::Diagnostic {
@@ -10653,176 +10645,6 @@ async fn go_to_prev_overlapping_diagnostic(
"});
}
#[gpui::test]
async fn cycle_through_same_place_diagnostics(
executor: BackgroundExecutor,
cx: &mut gpui::TestAppContext,
) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
let lsp_store =
cx.update_editor(|editor, _, cx| editor.project.as_ref().unwrap().read(cx).lsp_store());
cx.set_state(indoc! {"
ˇfn func(abc def: i32) -> u32 {
}
"});
cx.update(|_, cx| {
lsp_store.update(cx, |lsp_store, cx| {
lsp_store
.update_diagnostics(
LanguageServerId(0),
lsp::PublishDiagnosticsParams {
uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
version: None,
diagnostics: vec![
lsp::Diagnostic {
range: lsp::Range::new(
lsp::Position::new(0, 11),
lsp::Position::new(0, 12),
),
severity: Some(lsp::DiagnosticSeverity::ERROR),
..Default::default()
},
lsp::Diagnostic {
range: lsp::Range::new(
lsp::Position::new(0, 12),
lsp::Position::new(0, 15),
),
severity: Some(lsp::DiagnosticSeverity::ERROR),
..Default::default()
},
lsp::Diagnostic {
range: lsp::Range::new(
lsp::Position::new(0, 12),
lsp::Position::new(0, 15),
),
severity: Some(lsp::DiagnosticSeverity::ERROR),
..Default::default()
},
lsp::Diagnostic {
range: lsp::Range::new(
lsp::Position::new(0, 25),
lsp::Position::new(0, 28),
),
severity: Some(lsp::DiagnosticSeverity::ERROR),
..Default::default()
},
],
},
&[],
cx,
)
.unwrap()
});
});
executor.run_until_parked();
//// Backward
// Fourth diagnostic
cx.update_editor(|editor, window, cx| {
editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, window, cx);
});
cx.assert_editor_state(indoc! {"
fn func(abc def: i32) -> ˇu32 {
}
"});
// Third diagnostic
cx.update_editor(|editor, window, cx| {
editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, window, cx);
});
cx.assert_editor_state(indoc! {"
fn func(abc ˇdef: i32) -> u32 {
}
"});
// Second diagnostic, same place
cx.update_editor(|editor, window, cx| {
editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, window, cx);
});
cx.assert_editor_state(indoc! {"
fn func(abc ˇdef: i32) -> u32 {
}
"});
// First diagnostic
cx.update_editor(|editor, window, cx| {
editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, window, cx);
});
cx.assert_editor_state(indoc! {"
fn func(abcˇ def: i32) -> u32 {
}
"});
// Wrapped over, fourth diagnostic
cx.update_editor(|editor, window, cx| {
editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, window, cx);
});
cx.assert_editor_state(indoc! {"
fn func(abc def: i32) -> ˇu32 {
}
"});
cx.update_editor(|editor, window, cx| {
editor.move_to_beginning(&MoveToBeginning, window, cx);
});
cx.assert_editor_state(indoc! {"
ˇfn func(abc def: i32) -> u32 {
}
"});
//// Forward
// First diagnostic
cx.update_editor(|editor, window, cx| {
editor.go_to_diagnostic(&GoToDiagnostic, window, cx);
});
cx.assert_editor_state(indoc! {"
fn func(abcˇ def: i32) -> u32 {
}
"});
// Second diagnostic
cx.update_editor(|editor, window, cx| {
editor.go_to_diagnostic(&GoToDiagnostic, window, cx);
});
cx.assert_editor_state(indoc! {"
fn func(abc ˇdef: i32) -> u32 {
}
"});
// Third diagnostic, same place
cx.update_editor(|editor, window, cx| {
editor.go_to_diagnostic(&GoToDiagnostic, window, cx);
});
cx.assert_editor_state(indoc! {"
fn func(abc ˇdef: i32) -> u32 {
}
"});
// Fourth diagnostic
cx.update_editor(|editor, window, cx| {
editor.go_to_diagnostic(&GoToDiagnostic, window, cx);
});
cx.assert_editor_state(indoc! {"
fn func(abc def: i32) -> ˇu32 {
}
"});
// Wrapped around, first diagnostic
cx.update_editor(|editor, window, cx| {
editor.go_to_diagnostic(&GoToDiagnostic, window, cx);
});
cx.assert_editor_state(indoc! {"
fn func(abcˇ def: i32) -> u32 {
}
"});
}
#[gpui::test]
async fn test_diagnostics_with_links(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -10841,7 +10663,7 @@ async fn test_diagnostics_with_links(cx: &mut TestAppContext) {
lsp_store.update_diagnostics(
LanguageServerId(0),
lsp::PublishDiagnosticsParams {
uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
uri: lsp::Url::from_file_path("/root/file").unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 12)),
@@ -11101,14 +10923,14 @@ async fn test_on_type_formatting_not_triggered(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/a"),
"/a",
json!({
"main.rs": "fn main() { let a = 5; }",
"other.rs": "// Test file",
}),
)
.await;
let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let project = Project::test(fs, ["/a".as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(Arc::new(Language::new(
@@ -11160,7 +10982,7 @@ async fn test_on_type_formatting_not_triggered(cx: &mut gpui::TestAppContext) {
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer(path!("/a/main.rs"), cx)
project.open_local_buffer("/a/main.rs", cx)
})
.await
.unwrap();
@@ -11180,7 +11002,7 @@ async fn test_on_type_formatting_not_triggered(cx: &mut gpui::TestAppContext) {
fake_server.handle_request::<lsp::request::OnTypeFormatting, _, _>(|params, _| async move {
assert_eq!(
params.text_document_position.text_document.uri,
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
lsp::Url::from_file_path("/a/main.rs").unwrap(),
);
assert_eq!(
params.text_document_position.position,
@@ -11218,7 +11040,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::Test
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/a"),
"/a",
json!({
"main.rs": "fn main() { let a = 5; }",
"other.rs": "// Test file",
@@ -11226,7 +11048,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::Test
)
.await;
let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let project = Project::test(fs, ["/a".as_ref()], cx).await;
let server_restarts = Arc::new(AtomicUsize::new(0));
let closure_restarts = Arc::clone(&server_restarts);
@@ -11266,7 +11088,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::Test
let _window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let _buffer = project
.update(cx, |project, cx| {
project.open_local_buffer_with_lsp(path!("/a/main.rs"), cx)
project.open_local_buffer_with_lsp("/a/main.rs", cx)
})
.await
.unwrap();
@@ -11885,7 +11707,10 @@ async fn test_completions_default_resolve_data_handling(cx: &mut gpui::TestAppCo
.entries
.borrow()
.iter()
.map(|mat| mat.string.clone())
.flat_map(|c| match c {
CompletionEntry::Match(mat) => Some(mat.string.clone()),
_ => None,
})
.collect::<Vec<String>>(),
items_out
.iter()
@@ -12027,7 +11852,13 @@ async fn test_completions_in_languages_with_extra_word_characters(cx: &mut gpui:
fn completion_menu_entries(menu: &CompletionsMenu) -> Vec<String> {
let entries = menu.entries.borrow();
entries.iter().map(|mat| mat.string.clone()).collect()
entries
.iter()
.flat_map(|e| match e {
CompletionEntry::Match(mat) => Some(mat.string.clone()),
_ => None,
})
.collect()
}
#[gpui::test]
@@ -12039,9 +11870,9 @@ async fn test_document_format_with_prettier(cx: &mut gpui::TestAppContext) {
});
let fs = FakeFs::new(cx.executor());
fs.insert_file(path!("/file.ts"), Default::default()).await;
fs.insert_file("/file.ts", Default::default()).await;
let project = Project::test(fs, [path!("/file.ts").as_ref()], cx).await;
let project = Project::test(fs, ["/file.ts".as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(Arc::new(Language::new(
@@ -12073,9 +11904,7 @@ async fn test_document_format_with_prettier(cx: &mut gpui::TestAppContext) {
let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX;
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer(path!("/file.ts"), cx)
})
.update(cx, |project, cx| project.open_local_buffer("/file.ts", cx))
.await
.unwrap();
@@ -12611,10 +12440,11 @@ async fn test_multibuffer_reverts(cx: &mut gpui::TestAppContext) {
(buffer_2.clone(), base_text_2),
(buffer_3.clone(), base_text_3),
] {
let diff = cx.new(|cx| BufferDiff::new_with_base_text(&diff_base, &buffer, cx));
let change_set = cx
.new(|cx| BufferChangeSet::new_with_base_text(diff_base.to_string(), &buffer, cx));
editor
.buffer
.update(cx, |buffer, cx| buffer.add_diff(diff, cx));
.update(cx, |buffer, cx| buffer.add_change_set(change_set, cx));
}
});
cx.executor().run_until_parked();
@@ -13304,10 +13134,12 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
(buffer_2.clone(), file_2_old),
(buffer_3.clone(), file_3_old),
] {
let diff = cx.new(|cx| BufferDiff::new_with_base_text(&diff_base, &buffer, cx));
let change_set = cx.new(|cx| {
BufferChangeSet::new_with_base_text(diff_base.to_string(), &buffer, cx)
});
editor
.buffer
.update(cx, |buffer, cx| buffer.add_diff(diff, cx));
.update(cx, |buffer, cx| buffer.add_change_set(change_set, cx));
}
})
.unwrap();
@@ -13389,7 +13221,7 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
init_test(cx, |_| {});
let base = "aaa\nbbb\nccc\nddd\neee\nfff\nggg\n";
let text = "aaa\nBBB\nBB2\nccc\nDDD\nEEE\nfff\nggg\nhhh\niii\n";
let text = "aaa\nBBB\nBB2\nccc\nDDD\nEEE\nfff\nggg\n";
let buffer = cx.new(|cx| Buffer::local(text.to_string(), cx));
let multi_buffer = cx.new(|cx| {
@@ -13402,11 +13234,7 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
primary: None,
},
ExcerptRange {
context: Point::new(4, 0)..Point::new(7, 0),
primary: None,
},
ExcerptRange {
context: Point::new(9, 0)..Point::new(10, 0),
context: Point::new(5, 0)..Point::new(7, 0),
primary: None,
},
],
@@ -13420,10 +13248,11 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
});
editor
.update(cx, |editor, _window, cx| {
let diff = cx.new(|cx| BufferDiff::new_with_base_text(base, &buffer, cx));
let change_set =
cx.new(|cx| BufferChangeSet::new_with_base_text(base.to_string(), &buffer, cx));
editor
.buffer
.update(cx, |buffer, cx| buffer.add_diff(diff, cx))
.update(cx, |buffer, cx| buffer.add_change_set(change_set, cx))
})
.unwrap();
@@ -13435,22 +13264,14 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
});
cx.executor().run_until_parked();
// When the start of a hunk coincides with the start of its excerpt,
// the hunk is expanded. When the start of a a hunk is earlier than
// the start of its excerpt, the hunk is not expanded.
cx.assert_state_with_diff(
"
ˇaaa
- bbb
+ BBB
- ddd
- eee
+ DDD
+ EEE
fff
iii
"
.unindent(),
);
@@ -13688,8 +13509,8 @@ async fn test_toggling_adjacent_diff_hunks(cx: &mut TestAppContext) {
cx.set_state(indoc! { "
one
ˇTWO
three
TWO
ˇthree
four
five
"});
@@ -13702,14 +13523,15 @@ async fn test_toggling_adjacent_diff_hunks(cx: &mut TestAppContext) {
indoc! { "
one
- two
+ ˇTWO
three
+ TWO
ˇthree
four
five
"}
.to_string(),
);
cx.update_editor(|editor, window, cx| {
editor.move_up(&Default::default(), window, cx);
editor.move_up(&Default::default(), window, cx);
editor.toggle_selected_diff_hunks(&Default::default(), window, cx);
});
@@ -14589,10 +14411,15 @@ async fn test_indent_guide_with_expanded_diff_hunks(cx: &mut gpui::TestAppContex
editor.buffer().update(cx, |multibuffer, cx| {
let buffer = multibuffer.as_singleton().unwrap();
let diff = cx.new(|cx| BufferDiff::new_with_base_text(base_text, &buffer, cx));
let change_set = cx.new(|cx| {
let mut change_set = BufferChangeSet::new(&buffer, cx);
let _ =
change_set.set_base_text(base_text.into(), buffer.read(cx).text_snapshot(), cx);
change_set
});
multibuffer.set_all_diff_hunks_expanded(cx);
multibuffer.add_diff(diff, cx);
multibuffer.add_change_set(change_set, cx);
buffer.read(cx).remote_id()
})
@@ -15045,7 +14872,7 @@ async fn test_multi_buffer_folding(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/a"),
"/a",
json!({
"first.rs": sample_text_1,
"second.rs": sample_text_2,
@@ -15053,7 +14880,7 @@ async fn test_multi_buffer_folding(cx: &mut gpui::TestAppContext) {
}),
)
.await;
let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let project = Project::test(fs, ["/a".as_ref()], cx).await;
let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx);
let worktree = project.update(cx, |project, cx| {
@@ -15229,7 +15056,7 @@ async fn test_multi_buffer_single_excerpts_folding(cx: &mut gpui::TestAppContext
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/a"),
"/a",
json!({
"first.rs": sample_text_1,
"second.rs": sample_text_2,
@@ -15237,7 +15064,7 @@ async fn test_multi_buffer_single_excerpts_folding(cx: &mut gpui::TestAppContext
}),
)
.await;
let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let project = Project::test(fs, ["/a".as_ref()], cx).await;
let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx);
let worktree = project.update(cx, |project, cx| {
@@ -15376,13 +15203,13 @@ async fn test_multi_buffer_with_single_excerpt_folding(cx: &mut gpui::TestAppCon
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/a"),
"/a",
json!({
"main.rs": sample_text,
}),
)
.await;
let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let project = Project::test(fs, ["/a".as_ref()], cx).await;
let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx);
let worktree = project.update(cx, |project, cx| {
@@ -15590,7 +15417,7 @@ async fn assert_highlighted_edits(
edits: Vec<(Range<Point>, String)>,
include_deletions: bool,
cx: &mut TestAppContext,
assertion_fn: impl Fn(HighlightedText, &App),
assertion_fn: impl Fn(HighlightedEdits, &App),
) {
let window = cx.add_window(|window, cx| {
let buffer = MultiBuffer::build_simple(text, cx);
@@ -15642,7 +15469,8 @@ async fn assert_highlighted_edits(
&edit_preview,
include_deletions,
cx,
);
)
.expect("Missing highlighted edits");
assertion_fn(highlighted_edits, cx)
});
}

File diff suppressed because it is too large Load Diff

View File

@@ -1 +1,2 @@
pub mod blame;
pub mod project_diff;

Some files were not shown because too many files have changed in this diff Show More