Compare commits

..

1 Commits

Author SHA1 Message Date
Conrad Irwin
e1aeda24ba WIP connection pool 2024-10-23 16:42:09 -06:00
172 changed files with 8190 additions and 6670 deletions

View File

@@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
fetch-depth: 0

View File

@@ -18,7 +18,7 @@ jobs:
- buildjet-16vcpu-ubuntu-2204
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
ref: ${{ github.event.inputs.branch }}
ssh-key: ${{ secrets.ZED_BOT_DEPLOY_KEY }}

View File

@@ -36,7 +36,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
fetch-depth: 0 # fetch full history
@@ -78,26 +78,25 @@ jobs:
- buildjet-8vcpu-ubuntu-2204
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
- name: Run style checks
uses: ./.github/actions/check_style
- name: Check for typos
uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6
uses: crate-ci/typos@v1.24.6
with:
config: ./typos.toml
macos_tests:
timeout-minutes: 60
name: (macOS) Run Clippy and tests
if: github.repository_owner == 'zed-industries'
runs-on:
- self-hosted
- test
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -127,7 +126,6 @@ jobs:
linux_tests:
timeout-minutes: 60
name: (Linux) Run Clippy and tests
if: github.repository_owner == 'zed-industries'
runs-on:
- buildjet-16vcpu-ubuntu-2204
steps:
@@ -135,7 +133,7 @@ jobs:
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -160,7 +158,6 @@ jobs:
build_remote_server:
timeout-minutes: 60
name: (Linux) Build Remote Server
if: github.repository_owner == 'zed-industries'
runs-on:
- buildjet-16vcpu-ubuntu-2204
steps:
@@ -168,7 +165,7 @@ jobs:
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -188,11 +185,10 @@ jobs:
windows_tests:
timeout-minutes: 60
name: (Windows) Run Clippy and tests
if: github.repository_owner == 'zed-industries'
runs-on: hosted-windows-1
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -233,7 +229,7 @@ jobs:
node-version: "18"
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
# We need to fetch more than one commit so that `script/draft-release-notes`
# is able to diff between the current and previous tag.
@@ -318,7 +314,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -365,7 +361,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false

View File

@@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
if: github.repository_owner == 'zed-industries'
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
- name: Set up uv
uses: astral-sh/setup-uv@f3bcaebff5eace81a1c062af9f9011aae482ca9d # v3
with:

View File

@@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
if: github.repository_owner == 'zed-industries'
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
- name: Set up uv
uses: astral-sh/setup-uv@f3bcaebff5eace81a1c062af9f9011aae482ca9d # v3
with:

View File

@@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
with:

View File

@@ -13,7 +13,7 @@ jobs:
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false

View File

@@ -17,7 +17,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
fetch-depth: 0
@@ -36,7 +36,7 @@ jobs:
needs: style
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
fetch-depth: 0
@@ -71,7 +71,7 @@ jobs:
run: doctl registry login
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -97,7 +97,7 @@ jobs:
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false

View File

@@ -15,7 +15,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
with:
@@ -31,7 +31,7 @@ jobs:
}
- name: Check for Typos with Typos-CLI
uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6
uses: crate-ci/typos@v1.24.6
with:
config: ./typos.toml
files: ./docs/

View File

@@ -16,7 +16,7 @@ jobs:
- ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false

View File

@@ -27,7 +27,7 @@ jobs:
node-version: "18"
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false

View File

@@ -23,7 +23,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
fetch-depth: 0
@@ -44,7 +44,7 @@ jobs:
needs: style
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -75,7 +75,7 @@ jobs:
node-version: "18"
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -109,7 +109,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -149,7 +149,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -182,7 +182,7 @@ jobs:
- bundle-linux-arm
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
fetch-depth: 0

53
Cargo.lock generated
View File

@@ -261,9 +261,9 @@ checksum = "34cd60c5e3152cef0a592f1b296f1cc93715d89d2551d85315828c3a09575ff4"
[[package]]
name = "anyhow"
version = "1.0.91"
version = "1.0.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8"
checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6"
[[package]]
name = "approx"
@@ -453,11 +453,9 @@ dependencies = [
"anyhow",
"collections",
"derive_more",
"futures 0.3.30",
"gpui",
"language",
"parking_lot",
"pretty_assertions",
"serde",
"serde_json",
"workspace",
@@ -2550,6 +2548,7 @@ dependencies = [
"ctor",
"dashmap 6.0.1",
"derive_more",
"dev_server_projects",
"editor",
"env_logger",
"envy",
@@ -2560,6 +2559,7 @@ dependencies = [
"git_hosting_providers",
"google_ai",
"gpui",
"headless",
"hex",
"http_client",
"hyper 0.14.30",
@@ -3474,6 +3474,18 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "dev_server_projects"
version = "0.1.0"
dependencies = [
"anyhow",
"client",
"gpui",
"rpc",
"serde",
"serde_json",
]
[[package]]
name = "diagnostics"
version = "0.1.0"
@@ -3711,6 +3723,7 @@ dependencies = [
"tree-sitter-rust",
"tree-sitter-typescript",
"ui",
"unicode-segmentation",
"unindent",
"url",
"util",
@@ -5259,6 +5272,28 @@ dependencies = [
"http 0.2.12",
]
[[package]]
name = "headless"
version = "0.1.0"
dependencies = [
"anyhow",
"client",
"extension",
"fs",
"futures 0.3.30",
"gpui",
"language",
"log",
"node_runtime",
"postage",
"project",
"proto",
"settings",
"shellexpand 2.1.2",
"signal-hook",
"util",
]
[[package]]
name = "heck"
version = "0.3.3"
@@ -8406,6 +8441,7 @@ dependencies = [
"client",
"clock",
"collections",
"dev_server_projects",
"env_logger",
"fs",
"futures 0.3.30",
@@ -8477,7 +8513,6 @@ dependencies = [
"serde_derive",
"serde_json",
"settings",
"smallvec",
"theme",
"ui",
"util",
@@ -8943,6 +8978,8 @@ version = "0.1.0"
dependencies = [
"anyhow",
"auto_update",
"client",
"dev_server_projects",
"editor",
"file_finder",
"futures 0.3.30",
@@ -8959,12 +8996,14 @@ dependencies = [
"project",
"release_channel",
"remote",
"rpc",
"schemars",
"serde",
"serde_json",
"settings",
"smol",
"task",
"terminal_view",
"theme",
"ui",
"util",
@@ -11870,6 +11909,7 @@ dependencies = [
"client",
"collections",
"command_palette",
"dev_server_projects",
"editor",
"extensions_ui",
"feature_flags",
@@ -14266,6 +14306,7 @@ dependencies = [
"collections",
"db",
"derive_more",
"dev_server_projects",
"env_logger",
"fs",
"futures 0.3.30",
@@ -14584,6 +14625,7 @@ dependencies = [
"command_palette_hooks",
"copilot",
"db",
"dev_server_projects",
"diagnostics",
"editor",
"env_logger",
@@ -14599,6 +14641,7 @@ dependencies = [
"git_hosting_providers",
"go_to_line",
"gpui",
"headless",
"http_client",
"image_viewer",
"inline_completion_button",

View File

@@ -23,6 +23,7 @@ members = [
"crates/context_servers",
"crates/copilot",
"crates/db",
"crates/dev_server_projects",
"crates/diagnostics",
"crates/docs_preprocessor",
"crates/editor",
@@ -44,6 +45,7 @@ members = [
"crates/google_ai",
"crates/gpui",
"crates/gpui_macros",
"crates/headless",
"crates/html_to_markdown",
"crates/http_client",
"crates/image_viewer",
@@ -199,6 +201,7 @@ command_palette_hooks = { path = "crates/command_palette_hooks" }
context_servers = { path = "crates/context_servers" }
copilot = { path = "crates/copilot" }
db = { path = "crates/db" }
dev_server_projects = { path = "crates/dev_server_projects" }
diagnostics = { path = "crates/diagnostics" }
editor = { path = "crates/editor" }
extension = { path = "crates/extension" }
@@ -216,6 +219,7 @@ go_to_line = { path = "crates/go_to_line" }
google_ai = { path = "crates/google_ai" }
gpui = { path = "crates/gpui", default-features = false, features = ["http_client"]}
gpui_macros = { path = "crates/gpui_macros" }
headless = { path = "crates/headless" }
html_to_markdown = { path = "crates/html_to_markdown" }
http_client = { path = "crates/http_client" }
image_viewer = { path = "crates/image_viewer" }
@@ -464,7 +468,7 @@ tree-sitter-typescript = "0.23"
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" }
unicase = "2.6"
unindent = "0.1.7"
unicode-segmentation = "1.10"
unicode-segmentation = "1.11"
url = "2.2"
uuid = { version = "1.1.2", features = ["v4", "v5", "serde"] }
wasmparser = "0.215"

View File

@@ -313,15 +313,6 @@
"ctrl-k ctrl-l": "editor::ToggleFold",
"ctrl-k ctrl-[": "editor::FoldRecursive",
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
"ctrl-k ctrl-1": ["editor::FoldAtLevel", { "level": 1 }],
"ctrl-k ctrl-2": ["editor::FoldAtLevel", { "level": 2 }],
"ctrl-k ctrl-3": ["editor::FoldAtLevel", { "level": 3 }],
"ctrl-k ctrl-4": ["editor::FoldAtLevel", { "level": 4 }],
"ctrl-k ctrl-5": ["editor::FoldAtLevel", { "level": 5 }],
"ctrl-k ctrl-6": ["editor::FoldAtLevel", { "level": 6 }],
"ctrl-k ctrl-7": ["editor::FoldAtLevel", { "level": 7 }],
"ctrl-k ctrl-8": ["editor::FoldAtLevel", { "level": 8 }],
"ctrl-k ctrl-9": ["editor::FoldAtLevel", { "level": 9 }],
"ctrl-k ctrl-0": "editor::FoldAll",
"ctrl-k ctrl-j": "editor::UnfoldAll",
"ctrl-space": "editor::ShowCompletions",
@@ -514,13 +505,6 @@
"ctrl-enter": "assistant::InlineAssist"
}
},
{
"context": "ProposedChangesEditor",
"bindings": {
"ctrl-shift-y": "editor::ApplyDiffHunk",
"ctrl-alt-a": "editor::ApplyAllDiffHunks"
}
},
{
"context": "Editor && jupyter && !ContextEditor",
"bindings": {

View File

@@ -349,15 +349,7 @@
"alt-cmd-]": "editor::UnfoldLines",
"cmd-k cmd-l": "editor::ToggleFold",
"cmd-k cmd-[": "editor::FoldRecursive",
"cmd-k cmd-1": ["editor::FoldAtLevel", { "level": 1 }],
"cmd-k cmd-2": ["editor::FoldAtLevel", { "level": 2 }],
"cmd-k cmd-3": ["editor::FoldAtLevel", { "level": 3 }],
"cmd-k cmd-4": ["editor::FoldAtLevel", { "level": 4 }],
"cmd-k cmd-5": ["editor::FoldAtLevel", { "level": 5 }],
"cmd-k cmd-6": ["editor::FoldAtLevel", { "level": 6 }],
"cmd-k cmd-7": ["editor::FoldAtLevel", { "level": 7 }],
"cmd-k cmd-8": ["editor::FoldAtLevel", { "level": 8 }],
"cmd-k cmd-9": ["editor::FoldAtLevel", { "level": 9 }],
"cmd-k cmd-]": "editor::UnfoldRecursive",
"cmd-k cmd-0": "editor::FoldAll",
"cmd-k cmd-j": "editor::UnfoldAll",
"ctrl-space": "editor::ShowCompletions",
@@ -546,13 +538,6 @@
"ctrl-enter": "assistant::InlineAssist"
}
},
{
"context": "ProposedChangesEditor",
"bindings": {
"cmd-shift-y": "editor::ApplyDiffHunk",
"cmd-shift-a": "editor::ApplyAllDiffHunks"
}
},
{
"context": "PromptEditor",
"bindings": {

View File

@@ -88,6 +88,7 @@ origin: (f64, f64),
<edit>
<path>src/shapes/rectangle.rs</path>
<description>Update the Rectangle's new function to take an origin parameter</description>
<operation>update</operation>
<old_text>
fn new(width: f64, height: f64) -> Self {
@@ -116,6 +117,7 @@ pub struct Circle {
<edit>
<path>src/shapes/circle.rs</path>
<description>Update the Circle's new function to take an origin parameter</description>
<operation>update</operation>
<old_text>
fn new(radius: f64) -> Self {
@@ -132,6 +134,7 @@ fn new(origin: (f64, f64), radius: f64) -> Self {
<edit>
<path>src/shapes/rectangle.rs</path>
<description>Add an import for the std::fmt module</description>
<operation>insert_before</operation>
<old_text>
struct Rectangle {
@@ -144,10 +147,7 @@ use std::fmt;
<edit>
<path>src/shapes/rectangle.rs</path>
<description>
Add a manual Display implementation for Rectangle.
Currently, this is the same as a derived Display implementation.
</description>
<description>Add a Display implementation for Rectangle</description>
<operation>insert_after</operation>
<old_text>
Rectangle { width, height }
@@ -169,6 +169,7 @@ impl fmt::Display for Rectangle {
<edit>
<path>src/shapes/circle.rs</path>
<description>Add an import for the `std::fmt` module</description>
<operation>insert_before</operation>
<old_text>
struct Circle {
@@ -180,6 +181,7 @@ use std::fmt;
<edit>
<path>src/shapes/circle.rs</path>
<description>Add a Display implementation for Circle</description>
<operation>insert_after</operation>
<old_text>
Circle { radius }

View File

@@ -346,8 +346,6 @@
"git_status": true,
// Amount of indentation for nested items.
"indent_size": 20,
// Whether to show indent guides in the project panel.
"indent_guides": true,
// Whether to reveal it in the project panel automatically,
// when a corresponding project entry becomes active.
// Gitignored entries are never auto revealed.
@@ -1101,13 +1099,13 @@
// }
"command_aliases": {},
// ssh_connections is an array of ssh connections.
// By default this setting is null, which disables the direct ssh connection support.
// You can configure these from `project: Open Remote` in the command palette.
// Zed's ssh support will pull configuration from your ~/.ssh too.
// Examples:
// [
// {
// "host": "example-box",
// // "port": 22, "username": "test", "args": ["-i", "/home/user/.ssh/id_rsa"]
// "projects": [
// {
// "paths": ["/home/user/code/zed"]
@@ -1115,7 +1113,7 @@
// ]
// }
// ]
"ssh_connections": [],
"ssh_connections": null,
// Configures the Context Server Protocol binaries
//
// Examples:

View File

@@ -26,8 +26,8 @@ use collections::{BTreeSet, HashMap, HashSet};
use editor::{
actions::{FoldAt, MoveToEndOfLine, Newline, ShowCompletions, UnfoldAt},
display_map::{
BlockContext, BlockId, BlockPlacement, BlockProperties, BlockStyle, Crease, CreaseMetadata,
CustomBlockId, FoldId, RenderBlock, ToDisplayPoint,
BlockContext, BlockDisposition, BlockId, BlockProperties, BlockStyle, Crease,
CreaseMetadata, CustomBlockId, FoldId, RenderBlock, ToDisplayPoint,
},
scroll::{Autoscroll, AutoscrollStrategy},
Anchor, Editor, EditorEvent, ProposedChangeLocation, ProposedChangesEditor, RowExt,
@@ -963,7 +963,7 @@ impl AssistantPanel {
fn new_context(&mut self, cx: &mut ViewContext<Self>) -> Option<View<ContextEditor>> {
let project = self.project.read(cx);
if project.is_via_collab() {
if project.is_via_collab() && project.dev_server_project_id().is_none() {
let task = self
.context_store
.update(cx, |store, cx| store.create_remote_context(cx));
@@ -2009,12 +2009,13 @@ impl ContextEditor {
})
.map(|(command, error_message)| BlockProperties {
style: BlockStyle::Fixed,
height: 1,
placement: BlockPlacement::Below(Anchor {
position: Anchor {
buffer_id: Some(buffer_id),
excerpt_id,
text_anchor: command.source_range.start,
}),
},
height: 1,
disposition: BlockDisposition::Below,
render: slash_command_error_block_renderer(error_message),
priority: 0,
}),
@@ -2241,10 +2242,11 @@ impl ContextEditor {
} else {
let block_ids = editor.insert_blocks(
[BlockProperties {
position: patch_start,
height: path_count as u32 + 1,
style: BlockStyle::Flex,
render: render_block,
placement: BlockPlacement::Below(patch_start),
disposition: BlockDisposition::Below,
priority: 0,
}],
None,
@@ -2729,13 +2731,12 @@ impl ContextEditor {
})
};
let create_block_properties = |message: &Message| BlockProperties {
position: buffer
.anchor_in_excerpt(excerpt_id, message.anchor_range.start)
.unwrap(),
height: 2,
style: BlockStyle::Sticky,
placement: BlockPlacement::Above(
buffer
.anchor_in_excerpt(excerpt_id, message.anchor_range.start)
.unwrap(),
),
disposition: BlockDisposition::Above,
priority: usize::MAX,
render: render_block(MessageMetadata::from(message)),
};
@@ -3371,7 +3372,7 @@ impl ContextEditor {
let anchor = buffer.anchor_in_excerpt(excerpt_id, anchor).unwrap();
let image = render_image.clone();
anchor.is_valid(&buffer).then(|| BlockProperties {
placement: BlockPlacement::Above(anchor),
position: anchor,
height: MAX_HEIGHT_IN_LINES,
style: BlockStyle::Sticky,
render: Box::new(move |cx| {
@@ -3392,6 +3393,8 @@ impl ContextEditor {
)
.into_any_element()
}),
disposition: BlockDisposition::Above,
priority: 0,
})
})
@@ -3946,7 +3949,7 @@ impl Render for ContextEditor {
.bg(cx.theme().colors().editor_background)
.child(
h_flex()
.gap_1()
.gap_2()
.child(render_inject_context_menu(cx.view().downgrade(), cx))
.child(
IconButton::new("quote-button", IconName::Quote)
@@ -4246,11 +4249,11 @@ fn render_inject_context_menu(
slash_command_picker::SlashCommandSelector::new(
commands.clone(),
active_context_editor,
Button::new("trigger", "Add Context")
.icon(IconName::Plus)
IconButton::new("trigger", IconName::SlashSquare)
.icon_size(IconSize::Small)
.icon_position(IconPosition::Start)
.tooltip(|cx| Tooltip::text("Type / to insert via keyboard", cx)),
.tooltip(|cx| {
Tooltip::with_meta("Insert Context", None, "Type / to insert via keyboard", cx)
}),
)
}

View File

@@ -7,7 +7,7 @@ use crate::{
};
use anyhow::{anyhow, Context as _, Result};
use assistant_slash_command::{
SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry, SlashCommandResult,
SlashCommandOutputSection, SlashCommandRegistry, SlashCommandResult,
};
use assistant_tool::ToolRegistry;
use client::{self, proto, telemetry::Telemetry};
@@ -1688,13 +1688,19 @@ impl Context {
let command_range = command_range.clone();
async move {
let output = output.await;
let output = match output {
Ok(output) => SlashCommandOutput::from_event_stream(output).await,
Err(err) => Err(err),
};
this.update(&mut cx, |this, cx| match output {
Ok(mut output) => {
output.ensure_valid_section_ranges();
// Ensure section ranges are valid.
for section in &mut output.sections {
section.range.start = section.range.start.min(output.text.len());
section.range.end = section.range.end.min(output.text.len());
while !output.text.is_char_boundary(section.range.start) {
section.range.start -= 1;
}
while !output.text.is_char_boundary(section.range.end) {
section.range.end += 1;
}
}
// Ensure there is a newline after the last section.
if ensure_trailing_newline {

View File

@@ -636,7 +636,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
kind: AssistantEditKind::InsertAfter {
old_text: "fn one".into(),
new_text: "fn two() {}".into(),
description: Some("add a `two` function".into()),
description: "add a `two` function".into(),
},
}]],
cx,
@@ -690,7 +690,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
kind: AssistantEditKind::InsertAfter {
old_text: "fn zero".into(),
new_text: "fn two() {}".into(),
description: Some("add a `two` function".into()),
description: "add a `two` function".into(),
},
}]],
cx,
@@ -754,7 +754,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
kind: AssistantEditKind::InsertAfter {
old_text: "fn zero".into(),
new_text: "fn two() {}".into(),
description: Some("add a `two` function".into()),
description: "add a `two` function".into(),
},
}]],
cx,
@@ -798,7 +798,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
kind: AssistantEditKind::InsertAfter {
old_text: "fn zero".into(),
new_text: "fn two() {}".into(),
description: Some("add a `two` function".into()),
description: "add a `two` function".into(),
},
}]],
cx,
@@ -1097,8 +1097,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
text: output_text,
sections,
run_commands_in_text: false,
}
.to_event_stream())),
})),
true,
false,
cx,
@@ -1422,7 +1421,6 @@ impl SlashCommand for FakeSlashCommand {
text: format!("Executed fake command: {}", self.0),
sections: vec![],
run_commands_in_text: false,
}
.to_event_stream()))
}))
}
}

View File

@@ -9,7 +9,7 @@ use collections::{hash_map, HashMap, HashSet, VecDeque};
use editor::{
actions::{MoveDown, MoveUp, SelectAll},
display_map::{
BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
BlockContext, BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
ToDisplayPoint,
},
Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorElement, EditorEvent, EditorMode,
@@ -54,7 +54,7 @@ use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
use terminal_view::terminal_panel::TerminalPanel;
use text::{OffsetRangeExt, ToPoint as _};
use theme::ThemeSettings;
use ui::{prelude::*, text_for_action, CheckboxWithLabel, IconButtonShape, Popover, Tooltip};
use ui::{prelude::*, CheckboxWithLabel, IconButtonShape, Popover, Tooltip};
use util::{RangeExt, ResultExt};
use workspace::{notifications::NotificationId, ItemHandle, Toast, Workspace};
@@ -446,14 +446,15 @@ impl InlineAssistant {
let assist_blocks = vec![
BlockProperties {
style: BlockStyle::Sticky,
placement: BlockPlacement::Above(range.start),
position: range.start,
height: prompt_editor_height,
render: build_assist_editor_renderer(prompt_editor),
disposition: BlockDisposition::Above,
priority: 0,
},
BlockProperties {
style: BlockStyle::Sticky,
placement: BlockPlacement::Below(range.end),
position: range.end,
height: 0,
render: Box::new(|cx| {
v_flex()
@@ -463,6 +464,7 @@ impl InlineAssistant {
.border_color(cx.theme().status().info_border)
.into_any_element()
}),
disposition: BlockDisposition::Below,
priority: 0,
},
];
@@ -1177,7 +1179,7 @@ impl InlineAssistant {
let height =
deleted_lines_editor.update(cx, |editor, cx| editor.max_point(cx).row().0 + 1);
new_blocks.push(BlockProperties {
placement: BlockPlacement::Above(new_row),
position: new_row,
height,
style: BlockStyle::Flex,
render: Box::new(move |cx| {
@@ -1189,6 +1191,7 @@ impl InlineAssistant {
.child(deleted_lines_editor.clone())
.into_any_element()
}),
disposition: BlockDisposition::Above,
priority: 0,
});
}
@@ -1596,7 +1599,7 @@ impl PromptEditor {
// always show the cursor (even when it isn't focused) because
// typing in one will make what you typed appear in all of them.
editor.set_show_cursor_when_unfocused(true, cx);
editor.set_placeholder_text(Self::placeholder_text(codegen.read(cx), cx), cx);
editor.set_placeholder_text("Add a prompt…", cx);
editor
});
@@ -1653,7 +1656,6 @@ impl PromptEditor {
self.editor = cx.new_view(|cx| {
let mut editor = Editor::auto_height(Self::MAX_LINES as usize, cx);
editor.set_soft_wrap_mode(language::language_settings::SoftWrap::EditorWidth, cx);
editor.set_placeholder_text(Self::placeholder_text(self.codegen.read(cx), cx), cx);
editor.set_placeholder_text("Add a prompt…", cx);
editor.set_text(prompt, cx);
if focus {
@@ -1664,20 +1666,6 @@ impl PromptEditor {
self.subscribe_to_editor(cx);
}
fn placeholder_text(codegen: &Codegen, cx: &WindowContext) -> String {
let context_keybinding = text_for_action(&crate::ToggleFocus, cx)
.map(|keybinding| format!("{keybinding} for context"))
.unwrap_or_default();
let action = if codegen.is_insertion {
"Generate"
} else {
"Transform"
};
format!("{action}{context_keybinding} • ↓↑ for history")
}
fn prompt(&self, cx: &AppContext) -> String {
self.editor.read(cx).text(cx)
}
@@ -2275,7 +2263,6 @@ pub struct Codegen {
initial_transaction_id: Option<TransactionId>,
telemetry: Option<Arc<Telemetry>>,
builder: Arc<PromptBuilder>,
is_insertion: bool,
}
impl Codegen {
@@ -2298,7 +2285,6 @@ impl Codegen {
)
});
let mut this = Self {
is_insertion: range.to_offset(&buffer.read(cx).snapshot(cx)).is_empty(),
alternatives: vec![codegen],
active_alternative: 0,
seen_alternatives: HashSet::default(),
@@ -2700,7 +2686,7 @@ impl CodegenAlternative {
let prompt = self
.builder
.generate_inline_transformation_prompt(user_prompt, language_name, buffer, range)
.generate_content_prompt(user_prompt, language_name, buffer, range)
.map_err(|e| anyhow::anyhow!("Failed to generate content prompt: {}", e))?;
let mut messages = Vec::new();

View File

@@ -158,34 +158,39 @@ impl PickerDelegate for ModelPickerDelegate {
.spacing(ListItemSpacing::Sparse)
.selected(selected)
.start_slot(
div().pr_0p5().child(
div().pr_1().child(
Icon::new(model_info.icon)
.color(Color::Muted)
.size(IconSize::Medium),
),
)
.child(
h_flex().w_full().justify_between().min_w(px(200.)).child(
h_flex()
.gap_1p5()
.child(Label::new(model_info.model.name().0.clone()))
.child(
Label::new(provider_name)
.size(LabelSize::XSmall)
.color(Color::Muted),
)
.children(match model_info.availability {
LanguageModelAvailability::Public => None,
LanguageModelAvailability::RequiresPlan(Plan::Free) => None,
LanguageModelAvailability::RequiresPlan(Plan::ZedPro) => {
show_badges.then(|| {
Label::new("Pro")
.size(LabelSize::XSmall)
.color(Color::Muted)
})
}
}),
),
h_flex()
.w_full()
.justify_between()
.font_buffer(cx)
.min_w(px(240.))
.child(
h_flex()
.gap_2()
.child(Label::new(model_info.model.name().0.clone()))
.child(
Label::new(provider_name)
.size(LabelSize::XSmall)
.color(Color::Muted),
)
.children(match model_info.availability {
LanguageModelAvailability::Public => None,
LanguageModelAvailability::RequiresPlan(Plan::Free) => None,
LanguageModelAvailability::RequiresPlan(Plan::ZedPro) => {
show_badges.then(|| {
Label::new("Pro")
.size(LabelSize::XSmall)
.color(Color::Muted)
})
}
}),
),
)
.end_slot(div().when(model_info.is_selected, |this| {
this.child(
@@ -207,7 +212,7 @@ impl PickerDelegate for ModelPickerDelegate {
h_flex()
.w_full()
.border_t_1()
.border_color(cx.theme().colors().border_variant)
.border_color(cx.theme().colors().border)
.p_1()
.gap_4()
.justify_between()

View File

@@ -33,21 +33,21 @@ pub enum AssistantEditKind {
Update {
old_text: String,
new_text: String,
description: Option<String>,
description: String,
},
Create {
new_text: String,
description: Option<String>,
description: String,
},
InsertBefore {
old_text: String,
new_text: String,
description: Option<String>,
description: String,
},
InsertAfter {
old_text: String,
new_text: String,
description: Option<String>,
description: String,
},
Delete {
old_text: String,
@@ -86,37 +86,19 @@ enum SearchDirection {
Diagonal,
}
// A measure of the currently quality of an in-progress fuzzy search.
//
// Uses 60 bits to store a numeric cost, and 4 bits to store the preceding
// operation in the search.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
struct SearchState {
cost: u32,
score: u32,
direction: SearchDirection,
}
impl SearchState {
fn new(cost: u32, direction: SearchDirection) -> Self {
Self { cost, direction }
}
}
struct SearchMatrix {
cols: usize,
data: Vec<SearchState>,
}
impl SearchMatrix {
fn new(rows: usize, cols: usize) -> Self {
SearchMatrix {
cols,
data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
}
}
fn get(&self, row: usize, col: usize) -> SearchState {
self.data[row * self.cols + col]
}
fn set(&mut self, row: usize, col: usize, cost: SearchState) {
self.data[row * self.cols + col] = cost;
fn new(score: u32, direction: SearchDirection) -> Self {
Self { score, direction }
}
}
@@ -205,23 +187,23 @@ impl AssistantEdit {
"update" => AssistantEditKind::Update {
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
description,
description: description.ok_or_else(|| anyhow!("missing description"))?,
},
"insert_before" => AssistantEditKind::InsertBefore {
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
description,
description: description.ok_or_else(|| anyhow!("missing description"))?,
},
"insert_after" => AssistantEditKind::InsertAfter {
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
description,
description: description.ok_or_else(|| anyhow!("missing description"))?,
},
"delete" => AssistantEditKind::Delete {
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
},
"create" => AssistantEditKind::Create {
description,
description: description.ok_or_else(|| anyhow!("missing description"))?,
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
},
_ => Err(anyhow!("unknown operation {operation:?}"))?,
@@ -282,7 +264,7 @@ impl AssistantEditKind {
ResolvedEdit {
range,
new_text,
description,
description: Some(description),
}
}
Self::Create {
@@ -290,7 +272,7 @@ impl AssistantEditKind {
description,
} => ResolvedEdit {
range: text::Anchor::MIN..text::Anchor::MAX,
description,
description: Some(description),
new_text,
},
Self::InsertBefore {
@@ -303,7 +285,7 @@ impl AssistantEditKind {
ResolvedEdit {
range: range.start..range.start,
new_text,
description,
description: Some(description),
}
}
Self::InsertAfter {
@@ -316,7 +298,7 @@ impl AssistantEditKind {
ResolvedEdit {
range: range.end..range.end,
new_text,
description,
description: Some(description),
}
}
Self::Delete { old_text } => {
@@ -332,29 +314,44 @@ impl AssistantEditKind {
fn resolve_location(buffer: &text::BufferSnapshot, search_query: &str) -> Range<text::Anchor> {
const INSERTION_COST: u32 = 3;
const DELETION_COST: u32 = 10;
const WHITESPACE_INSERTION_COST: u32 = 1;
const DELETION_COST: u32 = 3;
const WHITESPACE_DELETION_COST: u32 = 1;
const EQUALITY_BONUS: u32 = 5;
struct Matrix {
cols: usize,
data: Vec<SearchState>,
}
impl Matrix {
fn new(rows: usize, cols: usize) -> Self {
Matrix {
cols,
data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
}
}
fn get(&self, row: usize, col: usize) -> SearchState {
self.data[row * self.cols + col]
}
fn set(&mut self, row: usize, col: usize, cost: SearchState) {
self.data[row * self.cols + col] = cost;
}
}
let buffer_len = buffer.len();
let query_len = search_query.len();
let mut matrix = SearchMatrix::new(query_len + 1, buffer_len + 1);
let mut leading_deletion_cost = 0_u32;
let mut matrix = Matrix::new(query_len + 1, buffer_len + 1);
for (row, query_byte) in search_query.bytes().enumerate() {
let deletion_cost = if query_byte.is_ascii_whitespace() {
WHITESPACE_DELETION_COST
} else {
DELETION_COST
};
leading_deletion_cost = leading_deletion_cost.saturating_add(deletion_cost);
matrix.set(
row + 1,
0,
SearchState::new(leading_deletion_cost, SearchDirection::Diagonal),
);
for (col, buffer_byte) in buffer.bytes_in_range(0..buffer.len()).flatten().enumerate() {
let deletion_cost = if query_byte.is_ascii_whitespace() {
WHITESPACE_DELETION_COST
} else {
DELETION_COST
};
let insertion_cost = if buffer_byte.is_ascii_whitespace() {
WHITESPACE_INSERTION_COST
} else {
@@ -362,35 +359,38 @@ impl AssistantEditKind {
};
let up = SearchState::new(
matrix.get(row, col + 1).cost.saturating_add(deletion_cost),
matrix.get(row, col + 1).score.saturating_sub(deletion_cost),
SearchDirection::Up,
);
let left = SearchState::new(
matrix.get(row + 1, col).cost.saturating_add(insertion_cost),
matrix
.get(row + 1, col)
.score
.saturating_sub(insertion_cost),
SearchDirection::Left,
);
let diagonal = SearchState::new(
if query_byte == *buffer_byte {
matrix.get(row, col).cost
matrix.get(row, col).score.saturating_add(EQUALITY_BONUS)
} else {
matrix
.get(row, col)
.cost
.saturating_add(deletion_cost + insertion_cost)
.score
.saturating_sub(deletion_cost + insertion_cost)
},
SearchDirection::Diagonal,
);
matrix.set(row + 1, col + 1, up.min(left).min(diagonal));
matrix.set(row + 1, col + 1, up.max(left).max(diagonal));
}
}
// Traceback to find the best match
let mut best_buffer_end = buffer_len;
let mut best_cost = u32::MAX;
let mut best_score = 0;
for col in 1..=buffer_len {
let cost = matrix.get(query_len, col).cost;
if cost < best_cost {
best_cost = cost;
let score = matrix.get(query_len, col).score;
if score > best_score {
best_score = score;
best_buffer_end = col;
}
}
@@ -560,84 +560,89 @@ mod tests {
language_settings::AllLanguageSettings, Language, LanguageConfig, LanguageMatcher,
};
use settings::SettingsStore;
use text::{OffsetRangeExt, Point};
use ui::BorrowAppContext;
use unindent::Unindent as _;
use util::test::{generate_marked_text, marked_text_ranges};
#[gpui::test]
fn test_resolve_location(cx: &mut AppContext) {
assert_location_resolution(
concat!(
" Lorem\n",
"« ipsum\n",
" dolor sit amet»\n",
" consecteur",
),
"ipsum\ndolor",
cx,
);
{
let buffer = cx.new_model(|cx| {
Buffer::local(
concat!(
" Lorem\n",
" ipsum\n",
" dolor sit amet\n",
" consecteur",
),
cx,
)
});
let snapshot = buffer.read(cx).snapshot();
assert_eq!(
AssistantEditKind::resolve_location(&snapshot, "ipsum\ndolor").to_point(&snapshot),
Point::new(1, 0)..Point::new(2, 18)
);
}
assert_location_resolution(
&"
«fn foo1(a: usize) -> usize {
40
{
let buffer = cx.new_model(|cx| {
Buffer::local(
concat!(
"fn foo1(a: usize) -> usize {\n",
" 40\n",
"}\n",
"\n",
"fn foo2(b: usize) -> usize {\n",
" 42\n",
"}\n",
),
cx,
)
});
let snapshot = buffer.read(cx).snapshot();
assert_eq!(
AssistantEditKind::resolve_location(&snapshot, "fn foo1(b: usize) {\n40\n}")
.to_point(&snapshot),
Point::new(0, 0)..Point::new(2, 1)
);
}
fn foo2(b: usize) -> usize {
42
}
"
.unindent(),
"fn foo1(b: usize) {\n40\n}",
cx,
);
assert_location_resolution(
&"
fn main() {
« Foo
.bar()
.baz()
.qux()»
}
fn foo2(b: usize) -> usize {
42
}
"
.unindent(),
"Foo.bar.baz.qux()",
cx,
);
assert_location_resolution(
&"
class Something {
one() { return 1; }
« two() { return 2222; }
three() { return 333; }
four() { return 4444; }
five() { return 5555; }
six() { return 6666; }
» seven() { return 7; }
eight() { return 8; }
}
"
.unindent(),
&"
two() { return 2222; }
four() { return 4444; }
five() { return 5555; }
six() { return 6666; }
"
.unindent(),
cx,
);
{
let buffer = cx.new_model(|cx| {
Buffer::local(
concat!(
"fn main() {\n",
" Foo\n",
" .bar()\n",
" .baz()\n",
" .qux()\n",
"}\n",
"\n",
"fn foo2(b: usize) -> usize {\n",
" 42\n",
"}\n",
),
cx,
)
});
let snapshot = buffer.read(cx).snapshot();
assert_eq!(
AssistantEditKind::resolve_location(&snapshot, "Foo.bar.baz.qux()")
.to_point(&snapshot),
Point::new(1, 0)..Point::new(4, 14)
);
}
}
#[gpui::test]
fn test_resolve_edits(cx: &mut AppContext) {
init_test(cx);
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
language::init(cx);
cx.update_global::<SettingsStore, _>(|settings, cx| {
settings.update_user_settings::<AllLanguageSettings>(cx, |_| {});
});
assert_edits(
"
@@ -670,7 +675,7 @@ mod tests {
last_name: String,
"
.unindent(),
description: None,
description: "".into(),
},
AssistantEditKind::Update {
old_text: "
@@ -685,7 +690,7 @@ mod tests {
}
"
.unindent(),
description: None,
description: "".into(),
},
],
"
@@ -729,7 +734,7 @@ mod tests {
qux();
}"
.unindent(),
description: Some("implement bar".into()),
description: "implement bar".into(),
},
AssistantEditKind::Update {
old_text: "
@@ -742,7 +747,7 @@ mod tests {
bar();
}"
.unindent(),
description: Some("call bar in foo".into()),
description: "call bar in foo".into(),
},
AssistantEditKind::InsertAfter {
old_text: "
@@ -757,7 +762,7 @@ mod tests {
}
"
.unindent(),
description: Some("implement qux".into()),
description: "implement qux".into(),
},
],
"
@@ -809,7 +814,7 @@ mod tests {
}
"
.unindent(),
description: None,
description: "pick better number".into(),
},
AssistantEditKind::Update {
old_text: "
@@ -824,7 +829,7 @@ mod tests {
}
"
.unindent(),
description: None,
description: "pick better number".into(),
},
AssistantEditKind::Update {
old_text: "
@@ -839,7 +844,7 @@ mod tests {
}
"
.unindent(),
description: None,
description: "pick better number".into(),
},
],
"
@@ -860,69 +865,6 @@ mod tests {
.unindent(),
cx,
);
assert_edits(
"
impl Person {
fn set_name(&mut self, name: String) {
self.name = name;
}
fn name(&self) -> String {
return self.name;
}
}
"
.unindent(),
vec![
AssistantEditKind::Update {
old_text: "self.name = name;".unindent(),
new_text: "self._name = name;".unindent(),
description: None,
},
AssistantEditKind::Update {
old_text: "return self.name;\n".unindent(),
new_text: "return self._name;\n".unindent(),
description: None,
},
],
"
impl Person {
fn set_name(&mut self, name: String) {
self._name = name;
}
fn name(&self) -> String {
return self._name;
}
}
"
.unindent(),
cx,
);
}
fn init_test(cx: &mut AppContext) {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
language::init(cx);
cx.update_global::<SettingsStore, _>(|settings, cx| {
settings.update_user_settings::<AllLanguageSettings>(cx, |_| {});
});
}
#[track_caller]
fn assert_location_resolution(
text_with_expected_range: &str,
query: &str,
cx: &mut AppContext,
) {
let (text, _) = marked_text_ranges(text_with_expected_range, false);
let buffer = cx.new_model(|cx| Buffer::local(text.clone(), cx));
let snapshot = buffer.read(cx).snapshot();
let range = AssistantEditKind::resolve_location(&snapshot, query).to_offset(&snapshot);
let text_with_actual_range = generate_marked_text(&text, &[range], false);
pretty_assertions::assert_eq!(text_with_actual_range, text_with_expected_range);
}
#[track_caller]

View File

@@ -204,7 +204,7 @@ impl PromptBuilder {
Ok(())
}
pub fn generate_inline_transformation_prompt(
pub fn generate_content_prompt(
&self,
user_prompt: String,
language_name: Option<&LanguageName>,

View File

@@ -147,8 +147,7 @@ impl SlashCommand for AutoCommand {
text: prompt,
sections: Vec::new(),
run_commands_in_text: true,
}
.to_event_stream())
})
})
}
}

View File

@@ -147,8 +147,7 @@ impl SlashCommand for CargoWorkspaceSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
}
.to_event_stream())
})
})
});
output.unwrap_or_else(|error| Task::ready(Err(error)))

View File

@@ -185,8 +185,7 @@ impl SlashCommand for ContextServerSlashCommand {
}],
text: prompt,
run_commands_in_text: false,
}
.to_event_stream())
})
})
} else {
Task::ready(Err(anyhow!("Context server not found")))

View File

@@ -78,8 +78,7 @@ impl SlashCommand for DefaultSlashCommand {
}],
text,
run_commands_in_text: true,
}
.to_event_stream())
})
})
}
}

View File

@@ -1,5 +1,5 @@
use crate::slash_command::file_command::{FileCommandMetadata, FileSlashCommand};
use anyhow::{anyhow, Result};
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
@@ -38,7 +38,7 @@ impl SlashCommand for DeltaSlashCommand {
_workspace: Option<WeakView<Workspace>>,
_cx: &mut WindowContext,
) -> Task<Result<Vec<ArgumentCompletion>>> {
Task::ready(Err(anyhow!("this command does not require argument")))
unimplemented!()
}
fn run(
@@ -86,28 +86,25 @@ impl SlashCommand for DeltaSlashCommand {
.zip(file_command_new_outputs)
{
if let Ok(new_output) = new_output {
if let Ok(new_output) = SlashCommandOutput::from_event_stream(new_output).await
{
if let Some(file_command_range) = new_output.sections.first() {
let new_text = &new_output.text[file_command_range.range.clone()];
if old_text.chars().ne(new_text.chars()) {
output.sections.extend(new_output.sections.into_iter().map(
|section| SlashCommandOutputSection {
range: output.text.len() + section.range.start
..output.text.len() + section.range.end,
icon: section.icon,
label: section.label,
metadata: section.metadata,
},
));
output.text.push_str(&new_output.text);
}
if let Some(file_command_range) = new_output.sections.first() {
let new_text = &new_output.text[file_command_range.range.clone()];
if old_text.chars().ne(new_text.chars()) {
output.sections.extend(new_output.sections.into_iter().map(
|section| SlashCommandOutputSection {
range: output.text.len() + section.range.start
..output.text.len() + section.range.end,
icon: section.icon,
label: section.label,
metadata: section.metadata,
},
));
output.text.push_str(&new_output.text);
}
}
}
}
Ok(output.to_event_stream())
Ok(output)
})
}
}

View File

@@ -180,11 +180,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx);
cx.spawn(move |_| async move {
task.await?
.map(|output| output.to_event_stream())
.ok_or_else(|| anyhow!("No diagnostics found"))
})
cx.spawn(move |_| async move { task.await?.ok_or_else(|| anyhow!("No diagnostics found")) })
}
}

View File

@@ -356,8 +356,7 @@ impl SlashCommand for DocsSlashCommand {
})
.collect(),
run_commands_in_text: false,
}
.to_event_stream())
})
})
}
}

View File

@@ -167,8 +167,7 @@ impl SlashCommand for FetchSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
}
.to_event_stream())
})
})
}
}

View File

@@ -1,16 +1,13 @@
use anyhow::{anyhow, Context as _, Result};
use assistant_slash_command::{
AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandContent, SlashCommandEvent,
SlashCommandOutput, SlashCommandOutputSection, SlashCommandResult,
AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandOutput,
SlashCommandOutputSection, SlashCommandResult,
};
use futures::channel::mpsc;
use futures::Stream;
use fuzzy::PathMatch;
use gpui::{AppContext, Model, Task, View, WeakView};
use language::{BufferSnapshot, CodeLabel, HighlightId, LineEnding, LspAdapterDelegate};
use project::{PathMatchCandidateSet, Project};
use serde::{Deserialize, Serialize};
use smol::stream::StreamExt;
use std::{
fmt::Write,
ops::{Range, RangeInclusive},
@@ -197,12 +194,7 @@ impl SlashCommand for FileSlashCommand {
return Task::ready(Err(anyhow!("missing path")));
};
Task::ready(Ok(collect_files(
workspace.read(cx).project().clone(),
arguments,
cx,
)
.boxed()))
collect_files(workspace.read(cx).project().clone(), arguments, cx)
}
}
@@ -210,7 +202,7 @@ fn collect_files(
project: Model<Project>,
glob_inputs: &[String],
cx: &mut AppContext,
) -> impl Stream<Item = Result<SlashCommandEvent>> {
) -> Task<SlashCommandResult> {
let Ok(matchers) = glob_inputs
.into_iter()
.map(|glob_input| {
@@ -219,7 +211,7 @@ fn collect_files(
})
.collect::<anyhow::Result<Vec<custom_path_matcher::PathMatcher>>>()
else {
return futures::stream::once(async { Err(anyhow!("invalid path")) }).boxed();
return Task::ready(Err(anyhow!("invalid path")));
};
let project_handle = project.downgrade();
@@ -229,11 +221,11 @@ fn collect_files(
.map(|worktree| worktree.read(cx).snapshot())
.collect::<Vec<_>>();
let (events_tx, events_rx) = mpsc::unbounded();
cx.spawn(|mut cx| async move {
let mut output = SlashCommandOutput::default();
for snapshot in snapshots {
let worktree_id = snapshot.id();
let mut directory_stack: Vec<Arc<Path>> = Vec::new();
let mut directory_stack: Vec<(Arc<Path>, String, usize)> = Vec::new();
let mut folded_directory_names_stack = Vec::new();
let mut is_top_level_directory = true;
@@ -249,19 +241,17 @@ fn collect_files(
continue;
}
while let Some(dir) = directory_stack.last() {
while let Some((dir, _, _)) = directory_stack.last() {
if entry.path.starts_with(dir) {
break;
}
directory_stack.pop().unwrap();
events_tx
.unbounded_send(Ok(SlashCommandEvent::EndSection { metadata: None }))?;
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false,
},
)))?;
let (_, entry_name, start) = directory_stack.pop().unwrap();
output.sections.push(build_entry_output_section(
start..output.text.len().saturating_sub(1),
Some(&PathBuf::from(entry_name)),
true,
None,
));
}
let filename = entry
@@ -293,46 +283,23 @@ fn collect_files(
continue;
}
let prefix_paths = folded_directory_names_stack.drain(..).as_slice().join("/");
let entry_start = output.text.len();
if prefix_paths.is_empty() {
let label = if is_top_level_directory {
if is_top_level_directory {
output
.text
.push_str(&path_including_worktree_name.to_string_lossy());
is_top_level_directory = false;
path_including_worktree_name.to_string_lossy().to_string()
} else {
filename
};
events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection {
icon: IconName::Folder,
label: label.clone().into(),
metadata: None,
}))?;
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
SlashCommandContent::Text {
text: label,
run_commands_in_text: false,
},
)))?;
directory_stack.push(entry.path.clone());
output.text.push_str(&filename);
}
directory_stack.push((entry.path.clone(), filename, entry_start));
} else {
let entry_name = format!("{}/{}", prefix_paths, &filename);
events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection {
icon: IconName::Folder,
label: entry_name.clone().into(),
metadata: None,
}))?;
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
SlashCommandContent::Text {
text: entry_name,
run_commands_in_text: false,
},
)))?;
directory_stack.push(entry.path.clone());
output.text.push_str(&entry_name);
directory_stack.push((entry.path.clone(), entry_name, entry_start));
}
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false,
},
)))?;
output.text.push('\n');
} else if entry.is_file() {
let Some(open_buffer_task) = project_handle
.update(&mut cx, |project, cx| {
@@ -343,7 +310,6 @@ fn collect_files(
continue;
};
if let Some(buffer) = open_buffer_task.await.log_err() {
let mut output = SlashCommandOutput::default();
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
append_buffer_to_output(
&snapshot,
@@ -351,24 +317,33 @@ fn collect_files(
&mut output,
)
.log_err();
let mut buffer_events = output.to_event_stream();
while let Some(event) = buffer_events.next().await {
events_tx.unbounded_send(event)?;
}
}
}
}
while let Some(_) = directory_stack.pop() {
events_tx.unbounded_send(Ok(SlashCommandEvent::EndSection { metadata: None }))?;
while let Some((dir, entry, start)) = directory_stack.pop() {
if directory_stack.is_empty() {
let mut root_path = PathBuf::new();
root_path.push(snapshot.root_name());
root_path.push(&dir);
output.sections.push(build_entry_output_section(
start..output.text.len(),
Some(&root_path),
true,
None,
));
} else {
output.sections.push(build_entry_output_section(
start..output.text.len(),
Some(&PathBuf::from(entry.as_str())),
true,
None,
));
}
}
}
anyhow::Ok(())
Ok(output)
})
.detach_and_log_err(cx);
events_rx.boxed()
}
pub fn codeblock_fence_for_path(
@@ -553,14 +528,11 @@ pub fn append_buffer_to_output(
#[cfg(test)]
mod test {
use assistant_slash_command::SlashCommandOutput;
use fs::FakeFs;
use gpui::TestAppContext;
use pretty_assertions::assert_eq;
use project::Project;
use serde_json::json;
use settings::SettingsStore;
use smol::stream::StreamExt;
use crate::slash_command::file_command::collect_files;
@@ -601,9 +573,8 @@ mod test {
let project = Project::test(fs, ["/root".as_ref()], cx).await;
let result_1 =
cx.update(|cx| collect_files(project.clone(), &["root/dir".to_string()], cx));
let result_1 = SlashCommandOutput::from_event_stream(result_1.boxed())
let result_1 = cx
.update(|cx| collect_files(project.clone(), &["root/dir".to_string()], cx))
.await
.unwrap();
@@ -611,17 +582,17 @@ mod test {
// 4 files + 2 directories
assert_eq!(result_1.sections.len(), 6);
let result_2 =
cx.update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx));
let result_2 = SlashCommandOutput::from_event_stream(result_2.boxed())
let result_2 = cx
.update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx))
.await
.unwrap();
assert_eq!(result_1, result_2);
let result =
cx.update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx).boxed());
let result = SlashCommandOutput::from_event_stream(result).await.unwrap();
let result = cx
.update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx))
.await
.unwrap();
assert!(result.text.starts_with("root/dir"));
// 5 files + 2 directories
@@ -664,9 +635,8 @@ mod test {
let project = Project::test(fs, ["/zed".as_ref()], cx).await;
let result =
cx.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx));
let result = SlashCommandOutput::from_event_stream(result.boxed())
let result = cx
.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx))
.await
.unwrap();
@@ -726,9 +696,8 @@ mod test {
let project = Project::test(fs, ["/zed".as_ref()], cx).await;
let result =
cx.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx));
let result = SlashCommandOutput::from_event_stream(result.boxed())
let result = cx
.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx))
.await
.unwrap();
@@ -751,8 +720,6 @@ mod test {
assert_eq!(result.sections[6].label, "summercamp");
assert_eq!(result.sections[7].label, "zed/assets/themes");
assert_eq!(result.text, "zed/assets/themes\n```zed/assets/themes/LICENSE\n1\n```\n\nsummercamp\n```zed/assets/themes/summercamp/LICENSE\n1\n```\n\nsubdir\n```zed/assets/themes/summercamp/subdir/LICENSE\n1\n```\n\nsubsubdir\n```zed/assets/themes/summercamp/subdir/subsubdir/LICENSE\n3\n```\n\n");
// Ensure that the project lasts until after the last await
drop(project);
}

View File

@@ -63,7 +63,6 @@ impl SlashCommand for NowSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
}
.to_event_stream()))
}))
}
}

View File

@@ -162,8 +162,7 @@ impl SlashCommand for ProjectSlashCommand {
text: output,
sections,
run_commands_in_text: true,
}
.to_event_stream())
})
})
.await
})

View File

@@ -102,8 +102,7 @@ impl SlashCommand for PromptSlashCommand {
metadata: None,
}],
run_commands_in_text: true,
}
.to_event_stream())
})
})
}
}

View File

@@ -130,7 +130,6 @@ impl SlashCommand for SearchSlashCommand {
sections,
run_commands_in_text: false,
}
.to_event_stream()
})
.await;

View File

@@ -85,8 +85,7 @@ impl SlashCommand for OutlineSlashCommand {
}],
text: outline_text,
run_commands_in_text: false,
}
.to_event_stream())
})
})
});

View File

@@ -150,7 +150,7 @@ impl SlashCommand for TabSlashCommand {
for (full_path, buffer, _) in tab_items_search.await? {
append_buffer_to_output(&buffer, full_path.as_deref(), &mut output).log_err();
}
Ok(output.to_event_stream())
Ok(output)
})
}
}

View File

@@ -97,8 +97,7 @@ impl SlashCommand for TerminalSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
}
.to_event_stream()))
}))
}
}

View File

@@ -75,8 +75,7 @@ impl SlashCommand for WorkflowSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
}
.to_event_stream())
})
})
}
}

View File

@@ -178,7 +178,7 @@ impl PickerDelegate for SlashCommandDelegate {
SlashCommandEntry::Info(info) => Some(
ListItem::new(ix)
.inset(true)
.spacing(ListItemSpacing::Dense)
.spacing(ListItemSpacing::Sparse)
.selected(selected)
.child(
h_flex()
@@ -224,7 +224,7 @@ impl PickerDelegate for SlashCommandDelegate {
SlashCommandEntry::Advert { renderer, .. } => Some(
ListItem::new(ix)
.inset(true)
.spacing(ListItemSpacing::Dense)
.spacing(ListItemSpacing::Sparse)
.selected(selected)
.child(renderer(cx)),
),

View File

@@ -15,15 +15,9 @@ path = "src/assistant_slash_command.rs"
anyhow.workspace = true
collections.workspace = true
derive_more.workspace = true
futures.workspace = true
gpui.workspace = true
language.workspace = true
parking_lot.workspace = true
serde.workspace = true
serde_json.workspace = true
workspace.workspace = true
[dev-dependencies]
gpui = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
workspace = { workspace = true, features = ["test-support"] }

View File

@@ -1,8 +1,6 @@
mod slash_command_registry;
use anyhow::Result;
use futures::stream::{self, BoxStream};
use futures::StreamExt;
use gpui::{AnyElement, AppContext, ElementId, SharedString, Task, WeakView, WindowContext};
use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate, OffsetRangeExt};
use serde::{Deserialize, Serialize};
@@ -58,7 +56,7 @@ pub struct ArgumentCompletion {
pub replace_previous_arguments: bool,
}
pub type SlashCommandResult = Result<BoxStream<'static, Result<SlashCommandEvent>>>;
pub type SlashCommandResult = Result<SlashCommandOutput>;
pub trait SlashCommand: 'static + Send + Sync {
fn name(&self) -> String;
@@ -100,146 +98,13 @@ pub type RenderFoldPlaceholder = Arc<
+ Fn(ElementId, Arc<dyn Fn(&mut WindowContext)>, &mut WindowContext) -> AnyElement,
>;
#[derive(Debug, PartialEq, Eq)]
pub enum SlashCommandContent {
Text {
text: String,
run_commands_in_text: bool,
},
}
#[derive(Debug, PartialEq, Eq)]
pub enum SlashCommandEvent {
StartSection {
icon: IconName,
label: SharedString,
metadata: Option<serde_json::Value>,
},
Content(SlashCommandContent),
EndSection {
metadata: Option<serde_json::Value>,
},
}
#[derive(Debug, Default, PartialEq, Clone)]
#[derive(Debug, Default, PartialEq)]
pub struct SlashCommandOutput {
pub text: String,
pub sections: Vec<SlashCommandOutputSection<usize>>,
pub run_commands_in_text: bool,
}
impl SlashCommandOutput {
pub fn ensure_valid_section_ranges(&mut self) {
for section in &mut self.sections {
section.range.start = section.range.start.min(self.text.len());
section.range.end = section.range.end.min(self.text.len());
while !self.text.is_char_boundary(section.range.start) {
section.range.start -= 1;
}
while !self.text.is_char_boundary(section.range.end) {
section.range.end += 1;
}
}
}
/// Returns this [`SlashCommandOutput`] as a stream of [`SlashCommandEvent`]s.
pub fn to_event_stream(mut self) -> BoxStream<'static, Result<SlashCommandEvent>> {
self.ensure_valid_section_ranges();
let mut events = Vec::new();
let mut last_section_end = 0;
for section in self.sections {
if last_section_end < section.range.start {
events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text {
text: self
.text
.get(last_section_end..section.range.start)
.unwrap_or_default()
.to_string(),
run_commands_in_text: self.run_commands_in_text,
})));
}
events.push(Ok(SlashCommandEvent::StartSection {
icon: section.icon,
label: section.label,
metadata: section.metadata.clone(),
}));
events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text {
text: self
.text
.get(section.range.start..section.range.end)
.unwrap_or_default()
.to_string(),
run_commands_in_text: self.run_commands_in_text,
})));
events.push(Ok(SlashCommandEvent::EndSection {
metadata: section.metadata,
}));
last_section_end = section.range.end;
}
if last_section_end < self.text.len() {
events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text {
text: self.text[last_section_end..].to_string(),
run_commands_in_text: self.run_commands_in_text,
})));
}
stream::iter(events).boxed()
}
pub async fn from_event_stream(
mut events: BoxStream<'static, Result<SlashCommandEvent>>,
) -> Result<SlashCommandOutput> {
let mut output = SlashCommandOutput::default();
let mut section_stack = Vec::new();
while let Some(event) = events.next().await {
match event? {
SlashCommandEvent::StartSection {
icon,
label,
metadata,
} => {
let start = output.text.len();
section_stack.push(SlashCommandOutputSection {
range: start..start,
icon,
label,
metadata,
});
}
SlashCommandEvent::Content(SlashCommandContent::Text {
text,
run_commands_in_text,
}) => {
output.text.push_str(&text);
output.run_commands_in_text = run_commands_in_text;
if let Some(section) = section_stack.last_mut() {
section.range.end = output.text.len();
}
}
SlashCommandEvent::EndSection { metadata } => {
if let Some(mut section) = section_stack.pop() {
section.metadata = metadata;
output.sections.push(section);
}
}
}
}
while let Some(section) = section_stack.pop() {
output.sections.push(section);
}
Ok(output)
}
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct SlashCommandOutputSection<T> {
pub range: Range<T>,
@@ -253,243 +118,3 @@ impl SlashCommandOutputSection<language::Anchor> {
self.range.start.is_valid(buffer) && !self.range.to_offset(buffer).is_empty()
}
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
use serde_json::json;
use super::*;
#[gpui::test]
async fn test_slash_command_output_to_events_round_trip() {
// Test basic output consisting of a single section.
{
let text = "Hello, world!".to_string();
let range = 0..text.len();
let output = SlashCommandOutput {
text,
sections: vec![SlashCommandOutputSection {
range,
icon: IconName::Code,
label: "Section 1".into(),
metadata: None,
}],
run_commands_in_text: false,
};
let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
let events = events
.into_iter()
.filter_map(|event| event.ok())
.collect::<Vec<_>>();
assert_eq!(
events,
vec![
SlashCommandEvent::StartSection {
icon: IconName::Code,
label: "Section 1".into(),
metadata: None
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Hello, world!".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection { metadata: None }
]
);
let new_output =
SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
.await
.unwrap();
assert_eq!(new_output, output);
}
// Test output where the sections do not comprise all of the text.
{
let text = "Apple\nCucumber\nBanana\n".to_string();
let output = SlashCommandOutput {
text,
sections: vec![
SlashCommandOutputSection {
range: 0..6,
icon: IconName::Check,
label: "Fruit".into(),
metadata: None,
},
SlashCommandOutputSection {
range: 15..22,
icon: IconName::Check,
label: "Fruit".into(),
metadata: None,
},
],
run_commands_in_text: false,
};
let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
let events = events
.into_iter()
.filter_map(|event| event.ok())
.collect::<Vec<_>>();
assert_eq!(
events,
vec![
SlashCommandEvent::StartSection {
icon: IconName::Check,
label: "Fruit".into(),
metadata: None
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Apple\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection { metadata: None },
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Cucumber\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::StartSection {
icon: IconName::Check,
label: "Fruit".into(),
metadata: None
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Banana\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection { metadata: None }
]
);
let new_output =
SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
.await
.unwrap();
assert_eq!(new_output, output);
}
// Test output consisting of multiple sections.
{
let text = "Line 1\nLine 2\nLine 3\nLine 4\n".to_string();
let output = SlashCommandOutput {
text,
sections: vec![
SlashCommandOutputSection {
range: 0..6,
icon: IconName::FileCode,
label: "Section 1".into(),
metadata: Some(json!({ "a": true })),
},
SlashCommandOutputSection {
range: 7..13,
icon: IconName::FileDoc,
label: "Section 2".into(),
metadata: Some(json!({ "b": true })),
},
SlashCommandOutputSection {
range: 14..20,
icon: IconName::FileGit,
label: "Section 3".into(),
metadata: Some(json!({ "c": true })),
},
SlashCommandOutputSection {
range: 21..27,
icon: IconName::FileToml,
label: "Section 4".into(),
metadata: Some(json!({ "d": true })),
},
],
run_commands_in_text: false,
};
let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
let events = events
.into_iter()
.filter_map(|event| event.ok())
.collect::<Vec<_>>();
assert_eq!(
events,
vec![
SlashCommandEvent::StartSection {
icon: IconName::FileCode,
label: "Section 1".into(),
metadata: Some(json!({ "a": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Line 1".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection {
metadata: Some(json!({ "a": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::StartSection {
icon: IconName::FileDoc,
label: "Section 2".into(),
metadata: Some(json!({ "b": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Line 2".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection {
metadata: Some(json!({ "b": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::StartSection {
icon: IconName::FileGit,
label: "Section 3".into(),
metadata: Some(json!({ "c": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Line 3".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection {
metadata: Some(json!({ "c": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::StartSection {
icon: IconName::FileToml,
label: "Section 4".into(),
metadata: Some(json!({ "d": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Line 4".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection {
metadata: Some(json!({ "d": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false
}),
]
);
let new_output =
SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
.await
.unwrap();
assert_eq!(new_output, output);
}
}
}

View File

@@ -11,7 +11,6 @@ use gpui::{
};
use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPreviewView};
use paths::remote_servers_dir;
use schemars::JsonSchema;
use serde::Deserialize;
use serde_derive::Serialize;
@@ -432,11 +431,10 @@ impl AutoUpdater {
cx.notify();
}
pub async fn download_remote_server_release(
pub async fn get_latest_remote_server_release(
os: &str,
arch: &str,
release_channel: ReleaseChannel,
version: Option<SemanticVersion>,
mut release_channel: ReleaseChannel,
cx: &mut AsyncAppContext,
) -> Result<PathBuf> {
let this = cx.update(|cx| {
@@ -446,12 +444,15 @@ impl AutoUpdater {
.ok_or_else(|| anyhow!("auto-update not initialized"))
})??;
let release = Self::get_release(
if release_channel == ReleaseChannel::Dev {
release_channel = ReleaseChannel::Nightly;
}
let release = Self::get_latest_release(
&this,
"zed-remote-server",
os,
arch,
version,
Some(release_channel),
cx,
)
@@ -466,21 +467,17 @@ impl AutoUpdater {
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
if smol::fs::metadata(&version_path).await.is_err() {
log::info!(
"downloading zed-remote-server {os} {arch} version {}",
release.version
);
log::info!("downloading zed-remote-server {os} {arch}");
download_remote_server_binary(&version_path, release, client, cx).await?;
}
Ok(version_path)
}
pub async fn get_remote_server_release_url(
pub async fn get_latest_remote_server_release_url(
os: &str,
arch: &str,
release_channel: ReleaseChannel,
version: Option<SemanticVersion>,
mut release_channel: ReleaseChannel,
cx: &mut AsyncAppContext,
) -> Result<(String, String)> {
let this = cx.update(|cx| {
@@ -490,12 +487,15 @@ impl AutoUpdater {
.ok_or_else(|| anyhow!("auto-update not initialized"))
})??;
let release = Self::get_release(
if release_channel == ReleaseChannel::Dev {
release_channel = ReleaseChannel::Nightly;
}
let release = Self::get_latest_release(
&this,
"zed-remote-server",
os,
arch,
version,
Some(release_channel),
cx,
)
@@ -507,56 +507,6 @@ impl AutoUpdater {
Ok((release.url, body))
}
async fn get_release(
this: &Model<Self>,
asset: &str,
os: &str,
arch: &str,
version: Option<SemanticVersion>,
release_channel: Option<ReleaseChannel>,
cx: &mut AsyncAppContext,
) -> Result<JsonRelease> {
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
if let Some(version) = version {
let channel = release_channel.map(|c| c.dev_name()).unwrap_or("stable");
let url = format!("/api/releases/{channel}/{version}/{asset}-{os}-{arch}.gz?update=1",);
Ok(JsonRelease {
version: version.to_string(),
url: client.build_url(&url),
})
} else {
let mut url_string = client.build_url(&format!(
"/api/releases/latest?asset={}&os={}&arch={}",
asset, os, arch
));
if let Some(param) = release_channel.and_then(|c| c.release_query_param()) {
url_string += "&";
url_string += param;
}
let mut response = client.get(&url_string, Default::default(), true).await?;
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
if !response.status().is_success() {
return Err(anyhow!(
"failed to fetch release: {:?}",
String::from_utf8_lossy(&body),
));
}
serde_json::from_slice(body.as_slice()).with_context(|| {
format!(
"error deserializing release {:?}",
String::from_utf8_lossy(&body),
)
})
}
}
async fn get_latest_release(
this: &Model<Self>,
asset: &str,
@@ -565,7 +515,38 @@ impl AutoUpdater {
release_channel: Option<ReleaseChannel>,
cx: &mut AsyncAppContext,
) -> Result<JsonRelease> {
Self::get_release(this, asset, os, arch, None, release_channel, cx).await
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
let mut url_string = client.build_url(&format!(
"/api/releases/latest?asset={}&os={}&arch={}",
asset, os, arch
));
if let Some(param) = release_channel.and_then(|c| c.release_query_param()) {
url_string += "&";
url_string += param;
}
let mut response = client.get(&url_string, Default::default(), true).await?;
let mut body = Vec::new();
response
.body_mut()
.read_to_end(&mut body)
.await
.context("error reading release")?;
if !response.status().is_success() {
Err(anyhow!(
"failed to fetch release: {:?}",
String::from_utf8_lossy(&body),
))?;
}
serde_json::from_slice(body.as_slice()).with_context(|| {
format!(
"error deserializing release {:?}",
String::from_utf8_lossy(&body),
)
})
}
async fn update(this: Model<Self>, mut cx: AsyncAppContext) -> Result<()> {
@@ -680,15 +661,12 @@ async fn download_remote_server_binary(
client: Arc<HttpClientWithUrl>,
cx: &AsyncAppContext,
) -> Result<()> {
let temp = tempfile::Builder::new().tempfile_in(remote_servers_dir())?;
let mut temp_file = File::create(&temp).await?;
let mut target_file = File::create(&target_path).await?;
let update_request_body = build_remote_server_update_request_body(cx)?;
let request_body = AsyncBody::from(serde_json::to_string(&update_request_body)?);
let mut response = client.get(&release.url, request_body, true).await?;
smol::io::copy(response.body_mut(), &mut temp_file).await?;
smol::fs::rename(&temp, &target_path).await?;
smol::io::copy(response.body_mut(), &mut target_file).await?;
Ok(())
}

View File

@@ -1194,15 +1194,26 @@ impl Room {
project: Model<Project>,
cx: &mut ModelContext<Self>,
) -> Task<Result<u64>> {
if let Some(project_id) = project.read(cx).remote_id() {
return Task::ready(Ok(project_id));
}
let request = if let Some(dev_server_project_id) = project.read(cx).dev_server_project_id()
{
self.client.request(proto::ShareProject {
room_id: self.id(),
worktrees: vec![],
dev_server_project_id: Some(dev_server_project_id.0),
is_ssh_project: false,
})
} else {
if let Some(project_id) = project.read(cx).remote_id() {
return Task::ready(Ok(project_id));
}
let request = self.client.request(proto::ShareProject {
room_id: self.id(),
worktrees: project.read(cx).worktree_metadata_protos(cx),
is_ssh_project: project.read(cx).is_via_ssh(),
});
self.client.request(proto::ShareProject {
room_id: self.id(),
worktrees: project.read(cx).worktree_metadata_protos(cx),
dev_server_project_id: None,
is_ssh_project: project.read(cx).is_via_ssh(),
})
};
cx.spawn(|this, mut cx| async move {
let response = request.await?;

View File

@@ -15,6 +15,7 @@ pub enum CliRequest {
urls: Vec<String>,
wait: bool,
open_new_workspace: Option<bool>,
dev_server_token: Option<String>,
env: Option<HashMap<String, String>>,
},
}

View File

@@ -151,12 +151,6 @@ fn main() -> Result<()> {
}
}
if let Some(_) = args.dev_server_token {
return Err(anyhow::anyhow!(
"Dev servers were removed in v0.157.x please upgrade to SSH remoting: https://zed.dev/docs/remote-development"
))?;
}
let sender: JoinHandle<anyhow::Result<()>> = thread::spawn({
let exit_status = exit_status.clone();
move || {
@@ -168,6 +162,7 @@ fn main() -> Result<()> {
urls,
wait: args.wait,
open_new_workspace,
dev_server_token: args.dev_server_token,
env,
})?;

View File

@@ -30,6 +30,7 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsSources};
use socks::connect_socks_proxy_stream;
use std::fmt;
use std::pin::Pin;
use std::{
any::TypeId,
@@ -53,6 +54,15 @@ pub use rpc::*;
pub use telemetry_events::Event;
pub use user::*;
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct DevServerToken(pub String);
impl fmt::Display for DevServerToken {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
static ZED_SERVER_URL: LazyLock<Option<String>> =
LazyLock::new(|| std::env::var("ZED_SERVER_URL").ok());
static ZED_RPC_URL: LazyLock<Option<String>> = LazyLock::new(|| std::env::var("ZED_RPC_URL").ok());
@@ -294,14 +304,20 @@ struct ClientState {
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Credentials {
pub user_id: u64,
pub access_token: String,
pub enum Credentials {
DevServer { token: DevServerToken },
User { user_id: u64, access_token: String },
}
impl Credentials {
pub fn authorization_header(&self) -> String {
format!("{} {}", self.user_id, self.access_token)
match self {
Credentials::DevServer { token } => format!("dev-server-token {}", token),
Credentials::User {
user_id,
access_token,
} => format!("{} {}", user_id, access_token),
}
}
}
@@ -584,11 +600,11 @@ impl Client {
}
pub fn user_id(&self) -> Option<u64> {
self.state
.read()
.credentials
.as_ref()
.map(|credentials| credentials.user_id)
if let Some(Credentials::User { user_id, .. }) = self.state.read().credentials.as_ref() {
Some(*user_id)
} else {
None
}
}
pub fn peer_id(&self) -> Option<PeerId> {
@@ -777,6 +793,11 @@ impl Client {
.is_some()
}
pub fn set_dev_server_token(&self, token: DevServerToken) -> &Self {
self.state.write().credentials = Some(Credentials::DevServer { token });
self
}
#[async_recursion(?Send)]
pub async fn authenticate_and_connect(
self: &Arc<Self>,
@@ -827,7 +848,9 @@ impl Client {
}
}
let credentials = credentials.unwrap();
self.set_id(credentials.user_id);
if let Credentials::User { user_id, .. } = &credentials {
self.set_id(*user_id);
}
if was_disconnected {
self.set_status(Status::Connecting, cx);
@@ -843,8 +866,9 @@ impl Client {
Ok(conn) => {
self.state.write().credentials = Some(credentials.clone());
if !read_from_provider && IMPERSONATE_LOGIN.is_none() {
self.credentials_provider.write_credentials(credentials.user_id, credentials.access_token, cx).await.log_err();
if let Credentials::User{user_id, access_token} = credentials {
self.credentials_provider.write_credentials(user_id, access_token, cx).await.log_err();
}
}
futures::select_biased! {
@@ -1277,7 +1301,7 @@ impl Client {
.decrypt_string(&access_token)
.context("failed to decrypt access token")?;
Ok(Credentials {
Ok(Credentials::User {
user_id: user_id.parse()?,
access_token,
})
@@ -1398,7 +1422,7 @@ impl Client {
// Use the admin API token to authenticate as the impersonated user.
api_token.insert_str(0, "ADMIN_TOKEN:");
Ok(Credentials {
Ok(Credentials::User {
user_id: response.user.id,
access_token: api_token,
})
@@ -1643,7 +1667,7 @@ impl CredentialsProvider for DevelopmentCredentialsProvider {
let credentials: DevelopmentCredentials = serde_json::from_slice(&json).log_err()?;
Some(Credentials {
Some(Credentials::User {
user_id: credentials.user_id,
access_token: credentials.access_token,
})
@@ -1697,7 +1721,7 @@ impl CredentialsProvider for KeychainCredentialsProvider {
.await
.log_err()??;
Some(Credentials {
Some(Credentials::User {
user_id: user_id.parse().ok()?,
access_token: String::from_utf8(access_token).ok()?,
})
@@ -1831,7 +1855,7 @@ mod tests {
// Time out when client tries to connect.
client.override_authenticate(move |cx| {
cx.background_executor().spawn(async move {
Ok(Credentials {
Ok(Credentials::User {
user_id,
access_token: "token".into(),
})

View File

@@ -49,7 +49,7 @@ impl FakeServer {
let mut state = state.lock();
state.auth_count += 1;
let access_token = state.access_token.to_string();
Ok(Credentials {
Ok(Credentials::User {
user_id: client_user_id,
access_token,
})
@@ -73,7 +73,7 @@ impl FakeServer {
}
if credentials
!= (Credentials {
!= (Credentials::User {
user_id: client_user_id,
access_token: state.lock().access_token.to_string(),
})

View File

@@ -28,6 +28,9 @@ impl std::fmt::Display for ChannelId {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
pub struct ProjectId(pub u64);
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
pub struct DevServerId(pub u64);
#[derive(
Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize,
)]

View File

@@ -86,6 +86,7 @@ client = { workspace = true, features = ["test-support"] }
collab_ui = { workspace = true, features = ["test-support"] }
collections = { workspace = true, features = ["test-support"] }
ctor.workspace = true
dev_server_projects.workspace = true
editor = { workspace = true, features = ["test-support"] }
env_logger.workspace = true
file_finder.workspace = true
@@ -93,6 +94,7 @@ fs = { workspace = true, features = ["test-support"] }
git = { workspace = true, features = ["test-support"] }
git_hosting_providers.workspace = true
gpui = { workspace = true, features = ["test-support"] }
headless.workspace = true
hyper.workspace = true
indoc.workspace = true
language = { workspace = true, features = ["test-support"] }

View File

@@ -252,10 +252,7 @@ async fn create_billing_subscription(
let default_model = llm_db.model(rpc::LanguageModelProvider::Anthropic, "claude-3-5-sonnet")?;
let stripe_model = stripe_billing.register_model(default_model).await?;
let success_url = format!(
"{}/account?checkout_complete=1",
app.config.zed_dot_dev_url()
);
let success_url = format!("{}/account", app.config.zed_dot_dev_url());
let checkout_session_url = stripe_billing
.checkout(customer_id, &user.github_login, &stripe_model, &success_url)
.await?;

View File

@@ -1,5 +1,5 @@
use crate::{
db::{self, AccessTokenId, Database, UserId},
db::{self, dev_server, AccessTokenId, Database, DevServerId, UserId},
rpc::Principal,
AppState, Error, Result,
};
@@ -44,10 +44,19 @@ pub async fn validate_header<B>(mut req: Request<B>, next: Next<B>) -> impl Into
let first = auth_header.next().unwrap_or("");
if first == "dev-server-token" {
Err(Error::http(
StatusCode::UNAUTHORIZED,
"Dev servers were removed in Zed 0.157 please upgrade to SSH remoting".to_string(),
))?;
let dev_server_token = auth_header.next().ok_or_else(|| {
Error::http(
StatusCode::BAD_REQUEST,
"missing dev-server-token token in authorization header".to_string(),
)
})?;
let dev_server = verify_dev_server_token(dev_server_token, &state.db)
.await
.map_err(|e| Error::http(StatusCode::UNAUTHORIZED, format!("{}", e)))?;
req.extensions_mut()
.insert(Principal::DevServer(dev_server));
return Ok::<_, Error>(next.run(req).await);
}
let user_id = UserId(first.parse().map_err(|_| {
@@ -231,6 +240,41 @@ pub async fn verify_access_token(
})
}
pub fn generate_dev_server_token(id: usize, access_token: String) -> String {
format!("{}.{}", id, access_token)
}
pub async fn verify_dev_server_token(
dev_server_token: &str,
db: &Arc<Database>,
) -> anyhow::Result<dev_server::Model> {
let (id, token) = split_dev_server_token(dev_server_token)?;
let token_hash = hash_access_token(token);
let server = db.get_dev_server(id).await?;
if server
.hashed_token
.as_bytes()
.ct_eq(token_hash.as_ref())
.into()
{
Ok(server)
} else {
Err(anyhow!("wrong token for dev server"))
}
}
// a dev_server_token has the format <id>.<base64>. This is to make them
// relatively easy to copy/paste around.
pub fn split_dev_server_token(dev_server_token: &str) -> anyhow::Result<(DevServerId, &str)> {
let mut parts = dev_server_token.splitn(2, '.');
let id = DevServerId(parts.next().unwrap_or_default().parse()?);
let token = parts
.next()
.ok_or_else(|| anyhow!("invalid dev server token format"))?;
Ok((id, token))
}
#[cfg(test)]
mod test {
use rand::thread_rng;

View File

@@ -726,6 +726,7 @@ pub struct Project {
pub collaborators: Vec<ProjectCollaborator>,
pub worktrees: BTreeMap<u64, Worktree>,
pub language_servers: Vec<proto::LanguageServer>,
pub dev_server_project_id: Option<DevServerProjectId>,
}
pub struct ProjectCollaborator {

View File

@@ -79,6 +79,7 @@ id_type!(ChannelChatParticipantId);
id_type!(ChannelId);
id_type!(ChannelMemberId);
id_type!(ContactId);
id_type!(DevServerId);
id_type!(ExtensionId);
id_type!(FlagId);
id_type!(FollowerId);
@@ -88,6 +89,7 @@ id_type!(NotificationId);
id_type!(NotificationKindId);
id_type!(ProjectCollaboratorId);
id_type!(ProjectId);
id_type!(DevServerProjectId);
id_type!(ReplicaId);
id_type!(RoomId);
id_type!(RoomParticipantId);
@@ -275,6 +277,12 @@ impl From<ChannelVisibility> for i32 {
}
}
#[derive(Copy, Clone, Debug, Serialize, PartialEq)]
pub enum PrincipalId {
UserId(UserId),
DevServerId(DevServerId),
}
/// Indicate whether a [Buffer] has permissions to edit.
#[derive(PartialEq, Clone, Copy, Debug)]
pub enum Capability {

View File

@@ -8,6 +8,8 @@ pub mod buffers;
pub mod channels;
pub mod contacts;
pub mod contributors;
pub mod dev_server_projects;
pub mod dev_servers;
pub mod embeddings;
pub mod extensions;
pub mod hosted_projects;

View File

@@ -1 +1,365 @@
use anyhow::anyhow;
use rpc::{
proto::{self},
ConnectionId,
};
use sea_orm::{
ActiveModelTrait, ActiveValue, ColumnTrait, Condition, DatabaseTransaction, EntityTrait,
IntoActiveModel, ModelTrait, QueryFilter,
};
use crate::db::ProjectId;
use super::{
dev_server, dev_server_project, project, project_collaborator, worktree, Database, DevServerId,
DevServerProjectId, RejoinedProject, ResharedProject, ServerId, UserId,
};
impl Database {
pub async fn get_dev_server_project(
&self,
dev_server_project_id: DevServerProjectId,
) -> crate::Result<dev_server_project::Model> {
self.transaction(|tx| async move {
Ok(
dev_server_project::Entity::find_by_id(dev_server_project_id)
.one(&*tx)
.await?
.ok_or_else(|| {
anyhow!("no dev server project with id {}", dev_server_project_id)
})?,
)
})
.await
}
pub async fn get_projects_for_dev_server(
&self,
dev_server_id: DevServerId,
) -> crate::Result<Vec<proto::DevServerProject>> {
self.transaction(|tx| async move {
self.get_projects_for_dev_server_internal(dev_server_id, &tx)
.await
})
.await
}
pub async fn get_projects_for_dev_server_internal(
&self,
dev_server_id: DevServerId,
tx: &DatabaseTransaction,
) -> crate::Result<Vec<proto::DevServerProject>> {
let servers = dev_server_project::Entity::find()
.filter(dev_server_project::Column::DevServerId.eq(dev_server_id))
.find_also_related(project::Entity)
.all(tx)
.await?;
Ok(servers
.into_iter()
.map(|(dev_server_project, project)| dev_server_project.to_proto(project))
.collect())
}
pub async fn dev_server_project_ids_for_user(
&self,
user_id: UserId,
tx: &DatabaseTransaction,
) -> crate::Result<Vec<DevServerProjectId>> {
let dev_servers = dev_server::Entity::find()
.filter(dev_server::Column::UserId.eq(user_id))
.find_with_related(dev_server_project::Entity)
.all(tx)
.await?;
Ok(dev_servers
.into_iter()
.flat_map(|(_, projects)| projects.into_iter().map(|p| p.id))
.collect())
}
pub async fn owner_for_dev_server_project(
&self,
dev_server_project_id: DevServerProjectId,
tx: &DatabaseTransaction,
) -> crate::Result<UserId> {
let dev_server = dev_server_project::Entity::find_by_id(dev_server_project_id)
.find_also_related(dev_server::Entity)
.one(tx)
.await?
.and_then(|(_, dev_server)| dev_server)
.ok_or_else(|| anyhow!("no dev server project"))?;
Ok(dev_server.user_id)
}
pub async fn get_stale_dev_server_projects(
&self,
connection: ConnectionId,
) -> crate::Result<Vec<ProjectId>> {
self.transaction(|tx| async move {
let projects = project::Entity::find()
.filter(
Condition::all()
.add(project::Column::HostConnectionId.eq(connection.id))
.add(project::Column::HostConnectionServerId.eq(connection.owner_id)),
)
.all(&*tx)
.await?;
Ok(projects.into_iter().map(|p| p.id).collect())
})
.await
}
pub async fn create_dev_server_project(
&self,
dev_server_id: DevServerId,
path: &str,
user_id: UserId,
) -> crate::Result<(dev_server_project::Model, proto::DevServerProjectsUpdate)> {
self.transaction(|tx| async move {
let dev_server = dev_server::Entity::find_by_id(dev_server_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?;
if dev_server.user_id != user_id {
return Err(anyhow!("not your dev server"))?;
}
let project = dev_server_project::Entity::insert(dev_server_project::ActiveModel {
id: ActiveValue::NotSet,
dev_server_id: ActiveValue::Set(dev_server_id),
paths: ActiveValue::Set(dev_server_project::JSONPaths(vec![path.to_string()])),
})
.exec_with_returning(&*tx)
.await?;
let status = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok((project, status))
})
.await
}
pub async fn update_dev_server_project(
&self,
id: DevServerProjectId,
paths: &[String],
user_id: UserId,
) -> crate::Result<(dev_server_project::Model, proto::DevServerProjectsUpdate)> {
self.transaction(move |tx| async move {
let paths = paths.to_owned();
let Some((project, Some(dev_server))) = dev_server_project::Entity::find_by_id(id)
.find_also_related(dev_server::Entity)
.one(&*tx)
.await?
else {
return Err(anyhow!("no such dev server project"))?;
};
if dev_server.user_id != user_id {
return Err(anyhow!("not your dev server"))?;
}
let mut project = project.into_active_model();
project.paths = ActiveValue::Set(dev_server_project::JSONPaths(paths));
let project = project.update(&*tx).await?;
let status = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok((project, status))
})
.await
}
pub async fn delete_dev_server_project(
&self,
dev_server_project_id: DevServerProjectId,
dev_server_id: DevServerId,
user_id: UserId,
) -> crate::Result<(Vec<proto::DevServerProject>, proto::DevServerProjectsUpdate)> {
self.transaction(|tx| async move {
project::Entity::delete_many()
.filter(project::Column::DevServerProjectId.eq(dev_server_project_id))
.exec(&*tx)
.await?;
let result = dev_server_project::Entity::delete_by_id(dev_server_project_id)
.exec(&*tx)
.await?;
if result.rows_affected != 1 {
return Err(anyhow!(
"no dev server project with id {}",
dev_server_project_id
))?;
}
let status = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
let projects = self
.get_projects_for_dev_server_internal(dev_server_id, &tx)
.await?;
Ok((projects, status))
})
.await
}
pub async fn share_dev_server_project(
&self,
dev_server_project_id: DevServerProjectId,
dev_server_id: DevServerId,
connection: ConnectionId,
worktrees: &[proto::WorktreeMetadata],
) -> crate::Result<(
proto::DevServerProject,
UserId,
proto::DevServerProjectsUpdate,
)> {
self.transaction(|tx| async move {
let dev_server = dev_server::Entity::find_by_id(dev_server_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?;
let dev_server_project = dev_server_project::Entity::find_by_id(dev_server_project_id)
.one(&*tx)
.await?
.ok_or_else(|| {
anyhow!("no dev server project with id {}", dev_server_project_id)
})?;
if dev_server_project.dev_server_id != dev_server_id {
return Err(anyhow!("dev server project shared from wrong server"))?;
}
let project = project::ActiveModel {
room_id: ActiveValue::Set(None),
host_user_id: ActiveValue::Set(None),
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
host_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
id: ActiveValue::NotSet,
hosted_project_id: ActiveValue::Set(None),
dev_server_project_id: ActiveValue::Set(Some(dev_server_project_id)),
}
.insert(&*tx)
.await?;
if !worktrees.is_empty() {
worktree::Entity::insert_many(worktrees.iter().map(|worktree| {
worktree::ActiveModel {
id: ActiveValue::set(worktree.id as i64),
project_id: ActiveValue::set(project.id),
abs_path: ActiveValue::set(worktree.abs_path.clone()),
root_name: ActiveValue::set(worktree.root_name.clone()),
visible: ActiveValue::set(worktree.visible),
scan_id: ActiveValue::set(0),
completed_scan_id: ActiveValue::set(0),
}
}))
.exec(&*tx)
.await?;
}
let status = self
.dev_server_projects_update_internal(dev_server.user_id, &tx)
.await?;
Ok((
dev_server_project.to_proto(Some(project)),
dev_server.user_id,
status,
))
})
.await
}
pub async fn reshare_dev_server_projects(
&self,
reshared_projects: &Vec<proto::UpdateProject>,
dev_server_id: DevServerId,
connection: ConnectionId,
) -> crate::Result<Vec<ResharedProject>> {
self.transaction(|tx| async move {
let mut ret = Vec::new();
for reshared_project in reshared_projects {
let project_id = ProjectId::from_proto(reshared_project.project_id);
let (project, dev_server_project) = project::Entity::find_by_id(project_id)
.find_also_related(dev_server_project::Entity)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("project does not exist"))?;
if dev_server_project.map(|rp| rp.dev_server_id) != Some(dev_server_id) {
return Err(anyhow!("dev server project reshared from wrong server"))?;
}
let Ok(old_connection_id) = project.host_connection() else {
return Err(anyhow!("dev server project was not shared"))?;
};
project::Entity::update(project::ActiveModel {
id: ActiveValue::set(project_id),
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
host_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
..Default::default()
})
.exec(&*tx)
.await?;
let collaborators = project
.find_related(project_collaborator::Entity)
.all(&*tx)
.await?;
self.update_project_worktrees(project_id, &reshared_project.worktrees, &tx)
.await?;
ret.push(super::ResharedProject {
id: project_id,
old_connection_id,
collaborators: collaborators
.iter()
.map(|collaborator| super::ProjectCollaborator {
connection_id: collaborator.connection(),
user_id: collaborator.user_id,
replica_id: collaborator.replica_id,
is_host: collaborator.is_host,
})
.collect(),
worktrees: reshared_project.worktrees.clone(),
});
}
Ok(ret)
})
.await
}
pub async fn rejoin_dev_server_projects(
&self,
rejoined_projects: &Vec<proto::RejoinProject>,
user_id: UserId,
connection_id: ConnectionId,
) -> crate::Result<Vec<RejoinedProject>> {
self.transaction(|tx| async move {
let mut ret = Vec::new();
for rejoined_project in rejoined_projects {
if let Some(project) = self
.rejoin_project_internal(&tx, rejoined_project, user_id, connection_id)
.await?
{
ret.push(project);
}
}
Ok(ret)
})
.await
}
}

View File

@@ -1 +1,222 @@
use rpc::proto;
use sea_orm::{
ActiveValue, ColumnTrait, DatabaseTransaction, EntityTrait, IntoActiveModel, QueryFilter,
};
use super::{dev_server, dev_server_project, Database, DevServerId, UserId};
impl Database {
pub async fn get_dev_server(
&self,
dev_server_id: DevServerId,
) -> crate::Result<dev_server::Model> {
self.transaction(|tx| async move {
Ok(dev_server::Entity::find_by_id(dev_server_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow::anyhow!("no dev server with id {}", dev_server_id))?)
})
.await
}
pub async fn get_dev_server_for_user(
&self,
dev_server_id: DevServerId,
user_id: UserId,
) -> crate::Result<dev_server::Model> {
self.transaction(|tx| async move {
let server = dev_server::Entity::find_by_id(dev_server_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow::anyhow!("no dev server with id {}", dev_server_id))?;
if server.user_id != user_id {
return Err(anyhow::anyhow!(
"dev server {} is not owned by user {}",
dev_server_id,
user_id
))?;
}
Ok(server)
})
.await
}
pub async fn get_dev_servers(&self, user_id: UserId) -> crate::Result<Vec<dev_server::Model>> {
self.transaction(|tx| async move {
Ok(dev_server::Entity::find()
.filter(dev_server::Column::UserId.eq(user_id))
.all(&*tx)
.await?)
})
.await
}
pub async fn dev_server_projects_update(
&self,
user_id: UserId,
) -> crate::Result<proto::DevServerProjectsUpdate> {
self.transaction(|tx| async move {
self.dev_server_projects_update_internal(user_id, &tx).await
})
.await
}
pub async fn dev_server_projects_update_internal(
&self,
user_id: UserId,
tx: &DatabaseTransaction,
) -> crate::Result<proto::DevServerProjectsUpdate> {
let dev_servers = dev_server::Entity::find()
.filter(dev_server::Column::UserId.eq(user_id))
.all(tx)
.await?;
let dev_server_projects = dev_server_project::Entity::find()
.filter(
dev_server_project::Column::DevServerId
.is_in(dev_servers.iter().map(|d| d.id).collect::<Vec<_>>()),
)
.find_also_related(super::project::Entity)
.all(tx)
.await?;
Ok(proto::DevServerProjectsUpdate {
dev_servers: dev_servers
.into_iter()
.map(|d| d.to_proto(proto::DevServerStatus::Offline))
.collect(),
dev_server_projects: dev_server_projects
.into_iter()
.map(|(dev_server_project, project)| dev_server_project.to_proto(project))
.collect(),
})
}
pub async fn create_dev_server(
&self,
name: &str,
ssh_connection_string: Option<&str>,
hashed_access_token: &str,
user_id: UserId,
) -> crate::Result<(dev_server::Model, proto::DevServerProjectsUpdate)> {
self.transaction(|tx| async move {
if name.trim().is_empty() {
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
}
let dev_server = dev_server::Entity::insert(dev_server::ActiveModel {
id: ActiveValue::NotSet,
hashed_token: ActiveValue::Set(hashed_access_token.to_string()),
name: ActiveValue::Set(name.trim().to_string()),
user_id: ActiveValue::Set(user_id),
ssh_connection_string: ActiveValue::Set(
ssh_connection_string.map(ToOwned::to_owned),
),
})
.exec_with_returning(&*tx)
.await?;
let dev_server_projects = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok((dev_server, dev_server_projects))
})
.await
}
pub async fn update_dev_server_token(
&self,
id: DevServerId,
hashed_token: &str,
user_id: UserId,
) -> crate::Result<proto::DevServerProjectsUpdate> {
self.transaction(|tx| async move {
let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
return Err(anyhow::anyhow!("no dev server with id {}", id))?;
};
if dev_server.user_id != user_id {
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
}
dev_server::Entity::update(dev_server::ActiveModel {
hashed_token: ActiveValue::Set(hashed_token.to_string()),
..dev_server.clone().into_active_model()
})
.exec(&*tx)
.await?;
let dev_server_projects = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok(dev_server_projects)
})
.await
}
pub async fn rename_dev_server(
&self,
id: DevServerId,
name: &str,
ssh_connection_string: Option<&str>,
user_id: UserId,
) -> crate::Result<proto::DevServerProjectsUpdate> {
self.transaction(|tx| async move {
let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
return Err(anyhow::anyhow!("no dev server with id {}", id))?;
};
if dev_server.user_id != user_id || name.trim().is_empty() {
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
}
dev_server::Entity::update(dev_server::ActiveModel {
name: ActiveValue::Set(name.trim().to_string()),
ssh_connection_string: ActiveValue::Set(
ssh_connection_string.map(ToOwned::to_owned),
),
..dev_server.clone().into_active_model()
})
.exec(&*tx)
.await?;
let dev_server_projects = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok(dev_server_projects)
})
.await
}
pub async fn delete_dev_server(
&self,
id: DevServerId,
user_id: UserId,
) -> crate::Result<proto::DevServerProjectsUpdate> {
self.transaction(|tx| async move {
let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
return Err(anyhow::anyhow!("no dev server with id {}", id))?;
};
if dev_server.user_id != user_id {
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
}
dev_server_project::Entity::delete_many()
.filter(dev_server_project::Column::DevServerId.eq(id))
.exec(&*tx)
.await?;
dev_server::Entity::delete(dev_server.into_active_model())
.exec(&*tx)
.await?;
let dev_server_projects = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok(dev_server_projects)
})
.await
}
}

View File

@@ -32,6 +32,7 @@ impl Database {
connection: ConnectionId,
worktrees: &[proto::WorktreeMetadata],
is_ssh_project: bool,
dev_server_project_id: Option<DevServerProjectId>,
) -> Result<TransactionGuard<(ProjectId, proto::Room)>> {
self.room_transaction(room_id, |tx| async move {
let participant = room_participant::Entity::find()
@@ -60,6 +61,38 @@ impl Database {
return Err(anyhow!("guests cannot share projects"))?;
}
if let Some(dev_server_project_id) = dev_server_project_id {
let project = project::Entity::find()
.filter(project::Column::DevServerProjectId.eq(Some(dev_server_project_id)))
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no remote project"))?;
let (_, dev_server) = dev_server_project::Entity::find_by_id(dev_server_project_id)
.find_also_related(dev_server::Entity)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no dev_server_project"))?;
if !dev_server.is_some_and(|dev_server| dev_server.user_id == participant.user_id) {
return Err(anyhow!("not your dev server"))?;
}
if project.room_id.is_some() {
return Err(anyhow!("project already shared"))?;
};
let project = project::Entity::update(project::ActiveModel {
room_id: ActiveValue::Set(Some(room_id)),
..project.into_active_model()
})
.exec(&*tx)
.await?;
let room = self.get_room(room_id, &tx).await?;
return Ok((project.id, room));
}
let project = project::ActiveModel {
room_id: ActiveValue::set(Some(participant.room_id)),
host_user_id: ActiveValue::set(Some(participant.user_id)),
@@ -69,6 +102,7 @@ impl Database {
))),
id: ActiveValue::NotSet,
hosted_project_id: ActiveValue::Set(None),
dev_server_project_id: ActiveValue::Set(None),
}
.insert(&*tx)
.await?;
@@ -122,6 +156,7 @@ impl Database {
&self,
project_id: ProjectId,
connection: ConnectionId,
user_id: Option<UserId>,
) -> Result<TransactionGuard<(bool, Option<proto::Room>, Vec<ConnectionId>)>> {
self.project_transaction(project_id, |tx| async move {
let guest_connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
@@ -137,6 +172,25 @@ impl Database {
if project.host_connection()? == connection {
return Ok((true, room, guest_connection_ids));
}
if let Some(dev_server_project_id) = project.dev_server_project_id {
if let Some(user_id) = user_id {
if user_id
!= self
.owner_for_dev_server_project(dev_server_project_id, &tx)
.await?
{
Err(anyhow!("cannot unshare a project hosted by another user"))?
}
project::Entity::update(project::ActiveModel {
room_id: ActiveValue::Set(None),
..project.into_active_model()
})
.exec(&*tx)
.await?;
return Ok((false, room, guest_connection_ids));
}
}
Err(anyhow!("cannot unshare a project hosted by another user"))?
})
.await
@@ -218,16 +272,6 @@ impl Database {
update: &proto::UpdateWorktree,
connection: ConnectionId,
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
if update.removed_entries.len() > proto::MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE
|| update.updated_entries.len() > proto::MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE
{
return Err(anyhow!(
"invalid worktree update. removed entries: {}, updated entries: {}",
update.removed_entries.len(),
update.updated_entries.len()
))?;
}
let project_id = ProjectId::from_proto(update.project_id);
let worktree_id = update.worktree_id as i64;
self.project_transaction(project_id, |tx| async move {
@@ -579,6 +623,17 @@ impl Database {
.await
}
pub async fn find_dev_server_project(&self, id: DevServerProjectId) -> Result<project::Model> {
self.transaction(|tx| async move {
Ok(project::Entity::find()
.filter(project::Column::DevServerProjectId.eq(id))
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such project"))?)
})
.await
}
/// Adds the given connection to the specified project
/// in the current room.
pub async fn join_project(
@@ -589,7 +644,13 @@ impl Database {
) -> Result<TransactionGuard<(Project, ReplicaId)>> {
self.project_transaction(project_id, |tx| async move {
let (project, role) = self
.access_project(project_id, connection, Capability::ReadOnly, &tx)
.access_project(
project_id,
connection,
PrincipalId::UserId(user_id),
Capability::ReadOnly,
&tx,
)
.await?;
self.join_project_internal(project, user_id, connection, role, &tx)
.await
@@ -780,6 +841,7 @@ impl Database {
worktree_id: None,
})
.collect(),
dev_server_project_id: project.dev_server_project_id,
};
Ok((project, replica_id as ReplicaId))
}
@@ -935,14 +997,29 @@ impl Database {
&self,
project_id: ProjectId,
connection_id: ConnectionId,
principal_id: PrincipalId,
capability: Capability,
tx: &DatabaseTransaction,
) -> Result<(project::Model, ChannelRole)> {
let project = project::Entity::find_by_id(project_id)
let (mut project, dev_server_project) = project::Entity::find_by_id(project_id)
.find_also_related(dev_server_project::Entity)
.one(tx)
.await?
.ok_or_else(|| anyhow!("no such project"))?;
let user_id = match principal_id {
PrincipalId::DevServerId(_) => {
if project
.host_connection()
.is_ok_and(|connection| connection == connection_id)
{
return Ok((project, ChannelRole::Admin));
}
return Err(anyhow!("not the project host"))?;
}
PrincipalId::UserId(user_id) => user_id,
};
let role_from_room = if let Some(room_id) = project.room_id {
room_participant::Entity::find()
.filter(room_participant::Column::RoomId.eq(room_id))
@@ -953,8 +1030,34 @@ impl Database {
} else {
None
};
let role_from_dev_server = if let Some(dev_server_project) = dev_server_project {
let dev_server = dev_server::Entity::find_by_id(dev_server_project.dev_server_id)
.one(tx)
.await?
.ok_or_else(|| anyhow!("no such channel"))?;
if user_id == dev_server.user_id {
// If the user left the room "uncleanly" they may rejoin the
// remote project before leave_room runs. IN that case kick
// the project out of the room pre-emptively.
if role_from_room.is_none() {
project = project::Entity::update(project::ActiveModel {
room_id: ActiveValue::Set(None),
..project.into_active_model()
})
.exec(tx)
.await?;
}
Some(ChannelRole::Admin)
} else {
None
}
} else {
None
};
let role = role_from_room.unwrap_or(ChannelRole::Banned);
let role = role_from_dev_server
.or(role_from_room)
.unwrap_or(ChannelRole::Banned);
match capability {
Capability::ReadWrite => {
@@ -977,10 +1080,17 @@ impl Database {
&self,
project_id: ProjectId,
connection_id: ConnectionId,
user_id: UserId,
) -> Result<ConnectionId> {
self.project_transaction(project_id, |tx| async move {
let (project, _) = self
.access_project(project_id, connection_id, Capability::ReadOnly, &tx)
.access_project(
project_id,
connection_id,
PrincipalId::UserId(user_id),
Capability::ReadOnly,
&tx,
)
.await?;
project.host_connection()
})
@@ -993,10 +1103,17 @@ impl Database {
&self,
project_id: ProjectId,
connection_id: ConnectionId,
user_id: UserId,
) -> Result<ConnectionId> {
self.project_transaction(project_id, |tx| async move {
let (project, _) = self
.access_project(project_id, connection_id, Capability::ReadWrite, &tx)
.access_project(
project_id,
connection_id,
PrincipalId::UserId(user_id),
Capability::ReadWrite,
&tx,
)
.await?;
project.host_connection()
})
@@ -1004,16 +1121,47 @@ impl Database {
.map(|guard| guard.into_inner())
}
/// Returns the host connection for a request to join a shared project.
pub async fn host_for_owner_project_request(
&self,
project_id: ProjectId,
_connection_id: ConnectionId,
user_id: UserId,
) -> Result<ConnectionId> {
self.project_transaction(project_id, |tx| async move {
let (project, dev_server_project) = project::Entity::find_by_id(project_id)
.find_also_related(dev_server_project::Entity)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such project"))?;
let Some(dev_server_project) = dev_server_project else {
return Err(anyhow!("not a dev server project"))?;
};
let dev_server = dev_server::Entity::find_by_id(dev_server_project.dev_server_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such dev server"))?;
if dev_server.user_id != user_id {
return Err(anyhow!("not your project"))?;
}
project.host_connection()
})
.await
.map(|guard| guard.into_inner())
}
pub async fn connections_for_buffer_update(
&self,
project_id: ProjectId,
principal_id: PrincipalId,
connection_id: ConnectionId,
capability: Capability,
) -> Result<TransactionGuard<(ConnectionId, Vec<ConnectionId>)>> {
self.project_transaction(project_id, |tx| async move {
// Authorize
let (project, _) = self
.access_project(project_id, connection_id, capability, &tx)
.access_project(project_id, connection_id, principal_id, capability, &tx)
.await?;
let host_connection_id = project.host_connection()?;

View File

@@ -858,6 +858,25 @@ impl Database {
.all(&*tx)
.await?;
// if any project in the room has a remote-project-id that belongs to a dev server that this user owns.
let dev_server_projects_for_user = self
.dev_server_project_ids_for_user(leaving_participant.user_id, &tx)
.await?;
let dev_server_projects_to_unshare = project::Entity::find()
.filter(
Condition::all()
.add(project::Column::RoomId.eq(room_id))
.add(
project::Column::DevServerProjectId
.is_in(dev_server_projects_for_user.clone()),
),
)
.all(&*tx)
.await?
.into_iter()
.map(|project| project.id)
.collect::<HashSet<_>>();
let mut left_projects = HashMap::default();
let mut collaborators = project_collaborator::Entity::find()
.filter(project_collaborator::Column::ProjectId.is_in(project_ids))
@@ -880,7 +899,9 @@ impl Database {
left_project.connection_ids.push(collaborator_connection_id);
}
if collaborator.is_host && collaborator.connection() == connection {
if (collaborator.is_host && collaborator.connection() == connection)
|| dev_server_projects_to_unshare.contains(&collaborator.project_id)
{
left_project.should_unshare = true;
}
}
@@ -923,6 +944,17 @@ impl Database {
.exec(&*tx)
.await?;
if !dev_server_projects_to_unshare.is_empty() {
project::Entity::update_many()
.filter(project::Column::Id.is_in(dev_server_projects_to_unshare))
.set(project::ActiveModel {
room_id: ActiveValue::Set(None),
..Default::default()
})
.exec(&*tx)
.await?;
}
let (channel, room) = self.get_channel_room(room_id, &tx).await?;
let deleted = if room.participants.is_empty() {
let result = room::Entity::delete_by_id(room_id).exec(&*tx).await?;
@@ -1291,6 +1323,26 @@ impl Database {
project.worktree_root_names.push(db_worktree.root_name);
}
}
} else if let Some(dev_server_project_id) = db_project.dev_server_project_id {
let host = self
.owner_for_dev_server_project(dev_server_project_id, tx)
.await?;
if let Some((_, participant)) = participants
.iter_mut()
.find(|(_, v)| v.user_id == host.to_proto())
{
participant.projects.push(proto::ParticipantProject {
id: db_project.id.to_proto(),
worktree_root_names: Default::default(),
});
let project = participant.projects.last_mut().unwrap();
for db_worktree in db_worktrees {
if db_worktree.visible {
project.worktree_root_names.push(db_worktree.root_name);
}
}
}
}
}

View File

@@ -13,6 +13,8 @@ pub mod channel_message;
pub mod channel_message_mention;
pub mod contact;
pub mod contributor;
pub mod dev_server;
pub mod dev_server_project;
pub mod embedding;
pub mod extension;
pub mod extension_version;

View File

@@ -0,0 +1,39 @@
use crate::db::{DevServerId, UserId};
use rpc::proto;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "dev_servers")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: DevServerId,
pub name: String,
pub user_id: UserId,
pub hashed_token: String,
pub ssh_connection_string: Option<String>,
}
impl ActiveModelBehavior for ActiveModel {}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::dev_server_project::Entity")]
RemoteProject,
}
impl Related<super::dev_server_project::Entity> for Entity {
fn to() -> RelationDef {
Relation::RemoteProject.def()
}
}
impl Model {
pub fn to_proto(&self, status: proto::DevServerStatus) -> proto::DevServer {
proto::DevServer {
dev_server_id: self.id.to_proto(),
name: self.name.clone(),
status: status as i32,
ssh_connection_string: self.ssh_connection_string.clone(),
}
}
}

View File

@@ -0,0 +1,59 @@
use super::project;
use crate::db::{DevServerId, DevServerProjectId};
use rpc::proto;
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "dev_server_projects")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: DevServerProjectId,
pub dev_server_id: DevServerId,
pub paths: JSONPaths,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct JSONPaths(pub Vec<String>);
impl ActiveModelBehavior for ActiveModel {}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_one = "super::project::Entity")]
Project,
#[sea_orm(
belongs_to = "super::dev_server::Entity",
from = "Column::DevServerId",
to = "super::dev_server::Column::Id"
)]
DevServer,
}
impl Related<super::project::Entity> for Entity {
fn to() -> RelationDef {
Relation::Project.def()
}
}
impl Related<super::dev_server::Entity> for Entity {
fn to() -> RelationDef {
Relation::DevServer.def()
}
}
impl Model {
pub fn to_proto(&self, project: Option<project::Model>) -> proto::DevServerProject {
proto::DevServerProject {
id: self.id.to_proto(),
project_id: project.map(|p| p.id.to_proto()),
dev_server_id: self.dev_server_id.to_proto(),
path: self.paths().first().cloned().unwrap_or_default(),
paths: self.paths().clone(),
}
}
pub fn paths(&self) -> &Vec<String> {
&self.paths.0
}
}

View File

@@ -1,4 +1,4 @@
use crate::db::{HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId};
use crate::db::{DevServerProjectId, HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId};
use anyhow::anyhow;
use rpc::ConnectionId;
use sea_orm::entity::prelude::*;
@@ -13,6 +13,7 @@ pub struct Model {
pub host_connection_id: Option<i32>,
pub host_connection_server_id: Option<ServerId>,
pub hosted_project_id: Option<HostedProjectId>,
pub dev_server_project_id: Option<DevServerProjectId>,
}
impl Model {
@@ -56,6 +57,12 @@ pub enum Relation {
to = "super::hosted_project::Column::Id"
)]
HostedProject,
#[sea_orm(
belongs_to = "super::dev_server_project::Entity",
from = "Column::DevServerProjectId",
to = "super::dev_server_project::Column::Id"
)]
RemoteProject,
}
impl Related<super::user::Entity> for Entity {
@@ -94,4 +101,10 @@ impl Related<super::hosted_project::Entity> for Entity {
}
}
impl Related<super::dev_server_project::Entity> for Entity {
fn to() -> RelationDef {
Relation::RemoteProject.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -540,18 +540,18 @@ async fn test_project_count(db: &Arc<Database>) {
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false)
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, None)
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1);
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false)
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, None)
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
// Projects shared by admins aren't counted.
db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false)
db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false, None)
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);

View File

@@ -84,8 +84,6 @@ async fn main() -> Result<()> {
let config = envy::from_env::<Config>().expect("error loading config");
init_tracing(&config);
init_panic_hook();
let mut app = Router::new()
.route("/", get(handle_root))
.route("/healthz", get(handle_liveness_probe))
@@ -380,20 +378,3 @@ pub fn init_tracing(config: &Config) -> Option<()> {
None
}
fn init_panic_hook() {
std::panic::set_hook(Box::new(move |panic_info| {
let panic_message = match panic_info.payload().downcast_ref::<&'static str>() {
Some(message) => *message,
None => match panic_info.payload().downcast_ref::<String>() {
Some(message) => message.as_str(),
None => "Box<Any>",
},
};
let backtrace = std::backtrace::Backtrace::force_capture();
let location = panic_info
.location()
.map(|loc| format!("{}:{}", loc.file(), loc.line()));
tracing::error!(panic = true, ?location, %panic_message, %backtrace, "Server Panic");
}));
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
use crate::db::{ChannelId, ChannelRole, UserId};
use crate::db::{ChannelId, ChannelRole, DevServerId, PrincipalId, UserId};
use anyhow::{anyhow, Result};
use collections::{BTreeMap, HashMap, HashSet};
use rpc::ConnectionId;
use rpc::{proto, ConnectionId};
use semantic_version::SemanticVersion;
use serde::Serialize;
use std::fmt;
@@ -11,7 +11,9 @@ use tracing::instrument;
pub struct ConnectionPool {
connections: BTreeMap<ConnectionId, Connection>,
connected_users: BTreeMap<UserId, ConnectedPrincipal>,
connected_dev_servers: BTreeMap<DevServerId, ConnectionId>,
channels: ChannelPool,
offline_dev_servers: HashSet<DevServerId>,
}
#[derive(Default, Serialize)]
@@ -30,13 +32,13 @@ impl fmt::Display for ZedVersion {
impl ZedVersion {
pub fn can_collaborate(&self) -> bool {
self.0 >= SemanticVersion::new(0, 157, 0)
self.0 >= SemanticVersion::new(0, 151, 0)
}
}
#[derive(Serialize)]
pub struct Connection {
pub user_id: UserId,
pub principal_id: PrincipalId,
pub admin: bool,
pub zed_version: ZedVersion,
}
@@ -45,6 +47,7 @@ impl ConnectionPool {
pub fn reset(&mut self) {
self.connections.clear();
self.connected_users.clear();
self.connected_dev_servers.clear();
self.channels.clear();
}
@@ -63,7 +66,7 @@ impl ConnectionPool {
self.connections.insert(
connection_id,
Connection {
user_id,
principal_id: PrincipalId::UserId(user_id),
admin,
zed_version,
},
@@ -72,6 +75,25 @@ impl ConnectionPool {
connected_user.connection_ids.insert(connection_id);
}
pub fn add_dev_server(
&mut self,
connection_id: ConnectionId,
dev_server_id: DevServerId,
zed_version: ZedVersion,
) {
self.connections.insert(
connection_id,
Connection {
principal_id: PrincipalId::DevServerId(dev_server_id),
admin: false,
zed_version,
},
);
self.connected_dev_servers
.insert(dev_server_id, connection_id);
}
#[instrument(skip(self))]
pub fn remove_connection(&mut self, connection_id: ConnectionId) -> Result<()> {
let connection = self
@@ -79,18 +101,28 @@ impl ConnectionPool {
.get_mut(&connection_id)
.ok_or_else(|| anyhow!("no such connection"))?;
let user_id = connection.user_id;
let connected_user = self.connected_users.get_mut(&user_id).unwrap();
connected_user.connection_ids.remove(&connection_id);
if connected_user.connection_ids.is_empty() {
self.connected_users.remove(&user_id);
self.channels.remove_user(&user_id);
};
match connection.principal_id {
PrincipalId::UserId(user_id) => {
let connected_user = self.connected_users.get_mut(&user_id).unwrap();
connected_user.connection_ids.remove(&connection_id);
if connected_user.connection_ids.is_empty() {
self.connected_users.remove(&user_id);
self.channels.remove_user(&user_id);
}
}
PrincipalId::DevServerId(dev_server_id) => {
self.connected_dev_servers.remove(&dev_server_id);
self.offline_dev_servers.remove(&dev_server_id);
}
}
self.connections.remove(&connection_id).unwrap();
Ok(())
}
pub fn set_dev_server_offline(&mut self, dev_server_id: DevServerId) {
self.offline_dev_servers.insert(dev_server_id);
}
pub fn connections(&self) -> impl Iterator<Item = &Connection> {
self.connections.values()
}
@@ -115,6 +147,42 @@ impl ConnectionPool {
.copied()
}
pub fn dev_server_status(&self, dev_server_id: DevServerId) -> proto::DevServerStatus {
if self.dev_server_connection_id(dev_server_id).is_some()
&& !self.offline_dev_servers.contains(&dev_server_id)
{
proto::DevServerStatus::Online
} else {
proto::DevServerStatus::Offline
}
}
pub fn dev_server_connection_id(&self, dev_server_id: DevServerId) -> Option<ConnectionId> {
self.connected_dev_servers.get(&dev_server_id).copied()
}
pub fn online_dev_server_connection_id(
&self,
dev_server_id: DevServerId,
) -> Result<ConnectionId> {
match self.connected_dev_servers.get(&dev_server_id) {
Some(cid) => Ok(*cid),
None => Err(anyhow!(proto::ErrorCode::DevServerOffline)),
}
}
pub fn dev_server_connection_id_supporting(
&self,
dev_server_id: DevServerId,
required: ZedVersion,
) -> Result<ConnectionId> {
match self.connected_dev_servers.get(&dev_server_id) {
Some(cid) if self.connections[cid].zed_version >= required => Ok(*cid),
Some(_) => Err(anyhow!(proto::ErrorCode::RemoteUpgradeRequired)),
None => Err(anyhow!(proto::ErrorCode::DevServerOffline)),
}
}
pub fn channel_user_ids(
&self,
channel_id: ChannelId,
@@ -159,22 +227,39 @@ impl ConnectionPool {
#[cfg(test)]
pub fn check_invariants(&self) {
for (connection_id, connection) in &self.connections {
assert!(self
.connected_users
.get(&connection.user_id)
.unwrap()
.connection_ids
.contains(connection_id));
match &connection.principal_id {
PrincipalId::UserId(user_id) => {
assert!(self
.connected_users
.get(user_id)
.unwrap()
.connection_ids
.contains(connection_id));
}
PrincipalId::DevServerId(dev_server_id) => {
assert_eq!(
self.connected_dev_servers.get(dev_server_id).unwrap(),
connection_id
);
}
}
}
for (user_id, state) in &self.connected_users {
for connection_id in &state.connection_ids {
assert_eq!(
self.connections.get(connection_id).unwrap().user_id,
*user_id
self.connections.get(connection_id).unwrap().principal_id,
PrincipalId::UserId(*user_id)
);
}
}
for (dev_server_id, connection_id) in &self.connected_dev_servers {
assert_eq!(
self.connections.get(connection_id).unwrap().principal_id,
PrincipalId::DevServerId(*dev_server_id)
);
}
}
}

View File

@@ -8,6 +8,7 @@ mod channel_buffer_tests;
mod channel_guest_tests;
mod channel_message_tests;
mod channel_tests;
mod dev_server_tests;
mod editor_tests;
mod following_tests;
mod integration_tests;

View File

@@ -95,9 +95,7 @@ async fn test_channel_guest_promotion(cx_a: &mut TestAppContext, cx_b: &mut Test
let room_b = cx_b
.read(ActiveCall::global)
.update(cx_b, |call, _| call.room().unwrap().clone());
cx_b.simulate_keystrokes("cmd-p");
cx_a.run_until_parked();
cx_b.simulate_keystrokes("1 enter");
cx_b.simulate_keystrokes("cmd-p 1 enter");
let (project_b, editor_b) = workspace_b.update(cx_b, |workspace, cx| {
(

View File

@@ -0,0 +1,643 @@
use std::{path::Path, sync::Arc};
use call::ActiveCall;
use editor::Editor;
use fs::Fs;
use gpui::{TestAppContext, VisualTestContext, WindowHandle};
use rpc::{proto::DevServerStatus, ErrorCode, ErrorExt};
use serde_json::json;
use workspace::{AppState, Workspace};
use crate::tests::{following_tests::join_channel, TestServer};
use super::TestClient;
#[gpui::test]
async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
let (server, client) = TestServer::start1(cx).await;
let store = cx.update(|cx| dev_server_projects::Store::global(cx).clone());
let resp = store
.update(cx, |store, cx| {
store.create_dev_server("server-1".to_string(), None, cx)
})
.await
.unwrap();
store.update(cx, |store, _| {
assert_eq!(store.dev_servers().len(), 1);
assert_eq!(store.dev_servers()[0].name, "server-1");
assert_eq!(store.dev_servers()[0].status, DevServerStatus::Offline);
});
let dev_server = server.create_dev_server(resp.access_token, cx2).await;
cx.executor().run_until_parked();
store.update(cx, |store, _| {
assert_eq!(store.dev_servers()[0].status, DevServerStatus::Online);
});
dev_server
.fs()
.insert_tree(
"/remote",
json!({
"1.txt": "remote\nremote\nremote",
"2.js": "function two() { return 2; }",
"3.rs": "mod test",
}),
)
.await;
store
.update(cx, |store, cx| {
store.create_dev_server_project(
client::DevServerId(resp.dev_server_id),
"/remote".to_string(),
cx,
)
})
.await
.unwrap();
cx.executor().run_until_parked();
let remote_workspace = store
.update(cx, |store, cx| {
let projects = store.dev_server_projects();
assert_eq!(projects.len(), 1);
assert_eq!(projects[0].paths, vec!["/remote"]);
workspace::join_dev_server_project(
projects[0].id,
projects[0].project_id.unwrap(),
client.app_state.clone(),
None,
cx,
)
})
.await
.unwrap();
cx.executor().run_until_parked();
let cx = VisualTestContext::from_window(remote_workspace.into(), cx).as_mut();
cx.simulate_keystrokes("cmd-p 1 enter");
let editor = remote_workspace
.update(cx, |ws, cx| {
ws.active_item_as::<Editor>(cx).unwrap().clone()
})
.unwrap();
editor.update(cx, |ed, cx| {
assert_eq!(ed.text(cx).to_string(), "remote\nremote\nremote");
});
cx.simulate_input("wow!");
cx.simulate_keystrokes("cmd-s");
let content = dev_server
.fs()
.load(Path::new("/remote/1.txt"))
.await
.unwrap();
assert_eq!(content, "wow!remote\nremote\nremote\n");
}
#[gpui::test]
async fn test_dev_server_env_files(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.executor().run_until_parked();
let cx1 = VisualTestContext::from_window(remote_workspace.into(), cx1).as_mut();
cx1.simulate_keystrokes("cmd-p . e enter");
let editor = remote_workspace
.update(cx1, |ws, cx| {
ws.active_item_as::<Editor>(cx).unwrap().clone()
})
.unwrap();
editor.update(cx1, |ed, cx| {
assert_eq!(ed.text(cx).to_string(), "SECRET");
});
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
join_channel(channel_id, &client2, cx2).await.unwrap();
cx2.executor().run_until_parked();
let (workspace2, cx2) = client2.active_workspace(cx2);
let editor = workspace2.update(cx2, |ws, cx| {
ws.active_item_as::<Editor>(cx).unwrap().clone()
});
// TODO: it'd be nice to hide .env files from other people
editor.update(cx2, |ed, cx| {
assert_eq!(ed.text(cx).to_string(), "SECRET");
});
}
async fn create_dev_server_project(
server: &TestServer,
client_app_state: Arc<AppState>,
cx: &mut TestAppContext,
cx_devserver: &mut TestAppContext,
) -> (TestClient, WindowHandle<Workspace>) {
let store = cx.update(|cx| dev_server_projects::Store::global(cx).clone());
let resp = store
.update(cx, |store, cx| {
store.create_dev_server("server-1".to_string(), None, cx)
})
.await
.unwrap();
let dev_server = server
.create_dev_server(resp.access_token, cx_devserver)
.await;
cx.executor().run_until_parked();
dev_server
.fs()
.insert_tree(
"/remote",
json!({
"1.txt": "remote\nremote\nremote",
".env": "SECRET",
}),
)
.await;
store
.update(cx, |store, cx| {
store.create_dev_server_project(
client::DevServerId(resp.dev_server_id),
"/remote".to_string(),
cx,
)
})
.await
.unwrap();
cx.executor().run_until_parked();
let workspace = store
.update(cx, |store, cx| {
let projects = store.dev_server_projects();
assert_eq!(projects.len(), 1);
assert_eq!(projects[0].paths, vec!["/remote"]);
workspace::join_dev_server_project(
projects[0].id,
projects[0].project_id.unwrap(),
client_app_state,
None,
cx,
)
})
.await
.unwrap();
cx.executor().run_until_parked();
(dev_server, workspace)
}
#[gpui::test]
async fn test_dev_server_leave_room(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
join_channel(channel_id, &client2, cx2).await.unwrap();
cx2.executor().run_until_parked();
cx1.update(|cx| ActiveCall::global(cx).update(cx, |active_call, cx| active_call.hang_up(cx)))
.await
.unwrap();
cx1.executor().run_until_parked();
let (workspace, cx2) = client2.active_workspace(cx2);
cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx)));
}
#[gpui::test]
async fn test_dev_server_delete(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
join_channel(channel_id, &client2, cx2).await.unwrap();
cx2.executor().run_until_parked();
cx1.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, cx| {
store.delete_dev_server_project(store.dev_server_projects().first().unwrap().id, cx)
})
})
.await
.unwrap();
cx1.executor().run_until_parked();
let (workspace, cx2) = client2.active_workspace(cx2);
cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx)));
cx1.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, _| {
assert_eq!(store.dev_server_projects().len(), 0);
})
})
}
#[gpui::test]
async fn test_dev_server_rename(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
join_channel(channel_id, &client2, cx2).await.unwrap();
cx2.executor().run_until_parked();
cx1.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, cx| {
store.rename_dev_server(
store.dev_servers().first().unwrap().id,
"name-edited".to_string(),
None,
cx,
)
})
})
.await
.unwrap();
cx1.executor().run_until_parked();
cx1.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, _| {
assert_eq!(store.dev_servers().first().unwrap().name, "name-edited");
})
})
}
#[gpui::test]
async fn test_dev_server_refresh_access_token(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
cx4: &mut gpui::TestAppContext,
) {
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
join_channel(channel_id, &client2, cx2).await.unwrap();
cx2.executor().run_until_parked();
// Regenerate the access token
let new_token_response = cx1
.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, cx| {
store.regenerate_dev_server_token(store.dev_servers().first().unwrap().id, cx)
})
})
.await
.unwrap();
cx1.executor().run_until_parked();
// Assert that the other client was disconnected
let (workspace, cx2) = client2.active_workspace(cx2);
cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx)));
// Assert that the owner of the dev server does not see the dev server as online anymore
let (workspace, cx1) = client1.active_workspace(cx1);
cx1.update(|cx| {
assert!(workspace.read(cx).project().read(cx).is_disconnected(cx));
dev_server_projects::Store::global(cx).update(cx, |store, _| {
assert_eq!(
store.dev_servers().first().unwrap().status,
DevServerStatus::Offline
);
})
});
// Reconnect the dev server with the new token
let _dev_server = server
.create_dev_server(new_token_response.access_token, cx4)
.await;
cx1.executor().run_until_parked();
// Assert that the dev server is online again
cx1.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, _| {
assert_eq!(store.dev_servers().len(), 1);
assert_eq!(
store.dev_servers().first().unwrap().status,
DevServerStatus::Online
);
})
});
}
#[gpui::test]
async fn test_dev_server_reconnect(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (mut server, client1) = TestServer::start1(cx1).await;
let channel_id = server
.make_channel("test", None, (&client1, cx1), &mut [])
.await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
drop(client1);
let client2 = server.create_client(cx2, "user_a").await;
let store = cx2.update(|cx| dev_server_projects::Store::global(cx).clone());
store
.update(cx2, |store, cx| {
let projects = store.dev_server_projects();
workspace::join_dev_server_project(
projects[0].id,
projects[0].project_id.unwrap(),
client2.app_state.clone(),
None,
cx,
)
})
.await
.unwrap();
}
#[gpui::test]
async fn test_dev_server_restart(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
let (server, client1) = TestServer::start1(cx1).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
let cx = VisualTestContext::from_window(remote_workspace.into(), cx1).as_mut();
server.reset().await;
cx.run_until_parked();
cx.simulate_keystrokes("cmd-p 1 enter");
remote_workspace
.update(cx, |ws, cx| {
ws.active_item_as::<Editor>(cx)
.unwrap()
.update(cx, |ed, cx| {
assert_eq!(ed.text(cx).to_string(), "remote\nremote\nremote");
})
})
.unwrap();
}
#[gpui::test]
async fn test_create_dev_server_project_path_validation(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (server, client1) = TestServer::start1(cx1).await;
let _channel_id = server
.make_channel("test", None, (&client1, cx1), &mut [])
.await;
// Creating a project with a path that does exist should not fail
let (_dev_server, _) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
cx1.executor().run_until_parked();
let store = cx1.update(|cx| dev_server_projects::Store::global(cx).clone());
let resp = store
.update(cx1, |store, cx| {
store.create_dev_server("server-2".to_string(), None, cx)
})
.await
.unwrap();
cx1.executor().run_until_parked();
let _dev_server = server.create_dev_server(resp.access_token, cx3).await;
cx1.executor().run_until_parked();
// Creating a remote project with a path that does not exist should fail
let result = store
.update(cx1, |store, cx| {
store.create_dev_server_project(
client::DevServerId(resp.dev_server_id),
"/notfound".to_string(),
cx,
)
})
.await;
cx1.executor().run_until_parked();
let error = result.unwrap_err();
assert!(matches!(
error.error_code(),
ErrorCode::DevServerProjectPathDoesNotExist
));
}
#[gpui::test]
async fn test_save_as_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
let (server, client1) = TestServer::start1(cx1).await;
// Creating a project with a path that does exist should not fail
let (dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
let mut cx = VisualTestContext::from_window(remote_workspace.into(), cx1);
cx.simulate_keystrokes("cmd-p 1 enter");
cx.simulate_keystrokes("cmd-shift-s");
cx.simulate_input("2.txt");
cx.simulate_keystrokes("enter");
cx.executor().run_until_parked();
let title = remote_workspace
.update(&mut cx, |ws, cx| {
let active_item = ws.active_item(cx).unwrap();
active_item.tab_description(0, cx).unwrap()
})
.unwrap();
assert_eq!(title, "2.txt");
let path = Path::new("/remote/2.txt");
assert_eq!(
dev_server.fs().load(path).await.unwrap(),
"remote\nremote\nremote"
);
}
#[gpui::test]
async fn test_new_file_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
let (server, client1) = TestServer::start1(cx1).await;
// Creating a project with a path that does exist should not fail
let (dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
let mut cx = VisualTestContext::from_window(remote_workspace.into(), cx1);
cx.simulate_keystrokes("cmd-n");
cx.simulate_input("new!");
cx.simulate_keystrokes("cmd-shift-s");
cx.simulate_input("2.txt");
cx.simulate_keystrokes("enter");
cx.executor().run_until_parked();
let title = remote_workspace
.update(&mut cx, |ws, cx| {
ws.active_item(cx).unwrap().tab_description(0, cx).unwrap()
})
.unwrap();
assert_eq!(title, "2.txt");
let path = Path::new("/remote/2.txt");
assert_eq!(dev_server.fs().load(path).await.unwrap(), "new!");
}

View File

@@ -1589,9 +1589,8 @@ async fn test_following_stops_on_unshare(cx_a: &mut TestAppContext, cx_b: &mut T
.await;
let (workspace_b, cx_b) = client_b.join_workspace(channel_id, cx_b).await;
cx_a.simulate_keystrokes("cmd-p");
cx_a.simulate_keystrokes("cmd-p 2 enter");
cx_a.run_until_parked();
cx_a.simulate_keystrokes("2 enter");
let editor_a = workspace_a.update(cx_a, |workspace, cx| {
workspace.active_item_as::<Editor>(cx).unwrap()
@@ -2042,9 +2041,7 @@ async fn test_following_to_channel_notes_other_workspace(
share_workspace(&workspace_a, cx_a).await.unwrap();
// a opens 1.txt
cx_a.simulate_keystrokes("cmd-p");
cx_a.run_until_parked();
cx_a.simulate_keystrokes("1 enter");
cx_a.simulate_keystrokes("cmd-p 1 enter");
cx_a.run_until_parked();
workspace_a.update(cx_a, |workspace, cx| {
let editor = workspace.active_item(cx).unwrap();
@@ -2101,9 +2098,7 @@ async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut
share_workspace(&workspace_a, cx_a).await.unwrap();
// a opens 1.txt
cx_a.simulate_keystrokes("cmd-p");
cx_a.run_until_parked();
cx_a.simulate_keystrokes("1 enter");
cx_a.simulate_keystrokes("cmd-p 1 enter");
cx_a.run_until_parked();
workspace_a.update(cx_a, |workspace, cx| {
let editor = workspace.active_item(cx).unwrap();
@@ -2123,9 +2118,7 @@ async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut
cx_b.simulate_keystrokes("down");
// a opens a different file while not followed
cx_a.simulate_keystrokes("cmd-p");
cx_a.run_until_parked();
cx_a.simulate_keystrokes("2 enter");
cx_a.simulate_keystrokes("cmd-p 2 enter");
workspace_b.update(cx_b, |workspace, cx| {
let editor = workspace.active_item_as::<Editor>(cx).unwrap();
@@ -2135,9 +2128,7 @@ async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut
// a opens a file in a new window
let (_, cx_a2) = client_a.build_test_workspace(&mut cx_a2).await;
cx_a2.update(|cx| cx.activate_window());
cx_a2.simulate_keystrokes("cmd-p");
cx_a2.run_until_parked();
cx_a2.simulate_keystrokes("3 enter");
cx_a2.simulate_keystrokes("cmd-p 3 enter");
cx_a2.run_until_parked();
// b starts following a again

View File

@@ -26,7 +26,7 @@ async fn test_sharing_an_ssh_remote_project(
.await;
// Set up project on remote FS
let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
let (port, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
let remote_fs = FakeFs::new(server_cx.executor());
remote_fs
.insert_tree(
@@ -67,7 +67,7 @@ async fn test_sharing_an_ssh_remote_project(
)
});
let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
let client_ssh = SshRemoteClient::fake_client(port, cx_a).await;
let (project_a, worktree_id) = client_a
.build_ssh_project("/code/project1", client_ssh, cx_a)
.await;

View File

@@ -1,4 +1,5 @@
use crate::{
auth::split_dev_server_token,
db::{tests::TestDb, NewUserParams, UserId},
executor::Executor,
rpc::{Principal, Server, ZedVersion, CLEANUP_TIMEOUT, RECONNECT_TIMEOUT},
@@ -203,7 +204,7 @@ impl TestServer {
.override_authenticate(move |cx| {
cx.spawn(|_| async move {
let access_token = "the-token".to_string();
Ok(Credentials {
Ok(Credentials::User {
user_id: user_id.to_proto(),
access_token,
})
@@ -212,7 +213,7 @@ impl TestServer {
.override_establish_connection(move |credentials, cx| {
assert_eq!(
credentials,
&Credentials {
&Credentials::User {
user_id: user_id.0 as u64,
access_token: "the-token".into()
}
@@ -296,6 +297,7 @@ impl TestServer {
collab_ui::init(&app_state, cx);
file_finder::init(cx);
menu::init();
dev_server_projects::init(client.clone(), cx);
settings::KeymapFile::load_asset(os_keymap, cx).unwrap();
language_model::LanguageModelRegistry::test(cx);
assistant::context_store::init(&client.clone().into());
@@ -317,6 +319,135 @@ impl TestServer {
client
}
pub async fn create_dev_server(
&self,
access_token: String,
cx: &mut TestAppContext,
) -> TestClient {
cx.update(|cx| {
if cx.has_global::<SettingsStore>() {
panic!("Same cx used to create two test clients")
}
let settings = SettingsStore::test(cx);
cx.set_global(settings);
release_channel::init(SemanticVersion::default(), cx);
client::init_settings(cx);
});
let (dev_server_id, _) = split_dev_server_token(&access_token).unwrap();
let clock = Arc::new(FakeSystemClock::default());
let http = FakeHttpClient::with_404_response();
let mut client = cx.update(|cx| Client::new(clock, http.clone(), cx));
let server = self.server.clone();
let db = self.app_state.db.clone();
let connection_killers = self.connection_killers.clone();
let forbid_connections = self.forbid_connections.clone();
Arc::get_mut(&mut client)
.unwrap()
.set_id(1)
.set_dev_server_token(client::DevServerToken(access_token.clone()))
.override_establish_connection(move |credentials, cx| {
assert_eq!(
credentials,
&Credentials::DevServer {
token: client::DevServerToken(access_token.to_string())
}
);
let server = server.clone();
let db = db.clone();
let connection_killers = connection_killers.clone();
let forbid_connections = forbid_connections.clone();
cx.spawn(move |cx| async move {
if forbid_connections.load(SeqCst) {
Err(EstablishConnectionError::other(anyhow!(
"server is forbidding connections"
)))
} else {
let (client_conn, server_conn, killed) =
Connection::in_memory(cx.background_executor().clone());
let (connection_id_tx, connection_id_rx) = oneshot::channel();
let dev_server = db
.get_dev_server(dev_server_id)
.await
.expect("retrieving dev_server failed");
cx.background_executor()
.spawn(server.handle_connection(
server_conn,
"dev-server".to_string(),
Principal::DevServer(dev_server),
ZedVersion(SemanticVersion::new(1, 0, 0)),
None,
Some(connection_id_tx),
Executor::Deterministic(cx.background_executor().clone()),
))
.detach();
let connection_id = connection_id_rx.await.map_err(|e| {
EstablishConnectionError::Other(anyhow!(
"{} (is server shutting down?)",
e
))
})?;
connection_killers
.lock()
.insert(connection_id.into(), killed);
Ok(client_conn)
}
})
});
let fs = FakeFs::new(cx.executor());
let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
let workspace_store = cx.new_model(|cx| WorkspaceStore::new(client.clone(), cx));
let language_registry = Arc::new(LanguageRegistry::test(cx.executor()));
let session = cx.new_model(|cx| AppSession::new(Session::test(), cx));
let app_state = Arc::new(workspace::AppState {
client: client.clone(),
user_store: user_store.clone(),
workspace_store,
languages: language_registry,
fs: fs.clone(),
build_window_options: |_, _| Default::default(),
node_runtime: NodeRuntime::unavailable(),
session,
});
cx.update(|cx| {
theme::init(theme::LoadThemes::JustBase, cx);
Project::init(&client, cx);
client::init(&client, cx);
language::init(cx);
editor::init(cx);
workspace::init(app_state.clone(), cx);
call::init(client.clone(), user_store.clone(), cx);
channel::init(&client, user_store.clone(), cx);
notifications::init(client.clone(), user_store, cx);
collab_ui::init(&app_state, cx);
file_finder::init(cx);
menu::init();
headless::init(
client.clone(),
headless::AppState {
languages: app_state.languages.clone(),
user_store: app_state.user_store.clone(),
fs: fs.clone(),
node_runtime: app_state.node_runtime.clone(),
},
cx,
)
})
.await
.unwrap();
TestClient {
app_state,
username: "dev-server".to_string(),
channel_store: cx.read(ChannelStore::global).clone(),
notification_store: cx.read(NotificationStore::global).clone(),
state: Default::default(),
}
}
pub fn disconnect_client(&self, peer_id: PeerId) {
self.connection_killers
.lock()

View File

@@ -0,0 +1,23 @@
[package]
name = "dev_server_projects"
version = "0.1.0"
edition = "2021"
publish = false
license = "GPL-3.0-or-later"
[lints]
workspace = true
[lib]
path = "src/dev_server_projects.rs"
doctest = false
[dependencies]
anyhow.workspace = true
gpui.workspace = true
serde.workspace = true
client.workspace = true
rpc.workspace = true
[dev-dependencies]
serde_json.workspace = true

View File

@@ -0,0 +1 @@
../../LICENSE-GPL

View File

@@ -1 +1,249 @@
use anyhow::Result;
use gpui::{AppContext, AsyncAppContext, Context, Global, Model, ModelContext, SharedString, Task};
use rpc::{
proto::{self, DevServerStatus},
TypedEnvelope,
};
use std::{collections::HashMap, sync::Arc};
use client::{Client, ProjectId};
pub use client::{DevServerId, DevServerProjectId};
pub struct Store {
dev_server_projects: HashMap<DevServerProjectId, DevServerProject>,
dev_servers: HashMap<DevServerId, DevServer>,
_subscriptions: Vec<client::Subscription>,
client: Arc<Client>,
}
#[derive(Debug, Clone)]
pub struct DevServerProject {
pub id: DevServerProjectId,
pub project_id: Option<ProjectId>,
pub paths: Vec<SharedString>,
pub dev_server_id: DevServerId,
}
impl From<proto::DevServerProject> for DevServerProject {
fn from(project: proto::DevServerProject) -> Self {
Self {
id: DevServerProjectId(project.id),
project_id: project.project_id.map(ProjectId),
paths: project.paths.into_iter().map(|path| path.into()).collect(),
dev_server_id: DevServerId(project.dev_server_id),
}
}
}
#[derive(Debug, Clone)]
pub struct DevServer {
pub id: DevServerId,
pub name: SharedString,
pub ssh_connection_string: Option<SharedString>,
pub status: DevServerStatus,
}
impl From<proto::DevServer> for DevServer {
fn from(dev_server: proto::DevServer) -> Self {
Self {
id: DevServerId(dev_server.dev_server_id),
status: dev_server.status(),
name: dev_server.name.into(),
ssh_connection_string: dev_server.ssh_connection_string.map(|s| s.into()),
}
}
}
struct GlobalStore(Model<Store>);
impl Global for GlobalStore {}
pub fn init(client: Arc<Client>, cx: &mut AppContext) {
let store = cx.new_model(|cx| Store::new(client, cx));
cx.set_global(GlobalStore(store));
}
impl Store {
pub fn global(cx: &AppContext) -> Model<Store> {
cx.global::<GlobalStore>().0.clone()
}
pub fn new(client: Arc<Client>, cx: &ModelContext<Self>) -> Self {
Self {
dev_server_projects: Default::default(),
dev_servers: Default::default(),
_subscriptions: vec![client
.add_message_handler(cx.weak_model(), Self::handle_dev_server_projects_update)],
client,
}
}
pub fn projects_for_server(&self, id: DevServerId) -> Vec<DevServerProject> {
let mut projects: Vec<DevServerProject> = self
.dev_server_projects
.values()
.filter(|project| project.dev_server_id == id)
.cloned()
.collect();
projects.sort_by_key(|p| (p.paths.clone(), p.id));
projects
}
pub fn dev_servers(&self) -> Vec<DevServer> {
let mut dev_servers: Vec<DevServer> = self.dev_servers.values().cloned().collect();
dev_servers.sort_by_key(|d| (d.status == DevServerStatus::Offline, d.name.clone(), d.id));
dev_servers
}
pub fn dev_server(&self, id: DevServerId) -> Option<&DevServer> {
self.dev_servers.get(&id)
}
pub fn dev_server_status(&self, id: DevServerId) -> DevServerStatus {
self.dev_server(id)
.map(|server| server.status)
.unwrap_or(DevServerStatus::Offline)
}
pub fn dev_server_projects(&self) -> Vec<DevServerProject> {
let mut projects: Vec<DevServerProject> =
self.dev_server_projects.values().cloned().collect();
projects.sort_by_key(|p| (p.paths.clone(), p.id));
projects
}
pub fn dev_server_project(&self, id: DevServerProjectId) -> Option<&DevServerProject> {
self.dev_server_projects.get(&id)
}
pub fn dev_server_for_project(&self, id: DevServerProjectId) -> Option<&DevServer> {
self.dev_server_project(id)
.and_then(|project| self.dev_server(project.dev_server_id))
}
async fn handle_dev_server_projects_update(
this: Model<Self>,
envelope: TypedEnvelope<proto::DevServerProjectsUpdate>,
mut cx: AsyncAppContext,
) -> Result<()> {
this.update(&mut cx, |this, cx| {
this.dev_servers = envelope
.payload
.dev_servers
.into_iter()
.map(|dev_server| (DevServerId(dev_server.dev_server_id), dev_server.into()))
.collect();
this.dev_server_projects = envelope
.payload
.dev_server_projects
.into_iter()
.map(|project| (DevServerProjectId(project.id), project.into()))
.collect();
cx.notify();
})?;
Ok(())
}
pub fn create_dev_server_project(
&mut self,
dev_server_id: DevServerId,
path: String,
cx: &mut ModelContext<Self>,
) -> Task<Result<proto::CreateDevServerProjectResponse>> {
let client = self.client.clone();
cx.background_executor().spawn(async move {
client
.request(proto::CreateDevServerProject {
dev_server_id: dev_server_id.0,
path,
})
.await
})
}
pub fn create_dev_server(
&mut self,
name: String,
ssh_connection_string: Option<String>,
cx: &mut ModelContext<Self>,
) -> Task<Result<proto::CreateDevServerResponse>> {
let client = self.client.clone();
cx.background_executor().spawn(async move {
let result = client
.request(proto::CreateDevServer {
name,
ssh_connection_string,
})
.await?;
Ok(result)
})
}
pub fn rename_dev_server(
&mut self,
dev_server_id: DevServerId,
name: String,
ssh_connection_string: Option<String>,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
let client = self.client.clone();
cx.background_executor().spawn(async move {
client
.request(proto::RenameDevServer {
dev_server_id: dev_server_id.0,
name,
ssh_connection_string,
})
.await?;
Ok(())
})
}
pub fn regenerate_dev_server_token(
&mut self,
dev_server_id: DevServerId,
cx: &mut ModelContext<Self>,
) -> Task<Result<proto::RegenerateDevServerTokenResponse>> {
let client = self.client.clone();
cx.background_executor().spawn(async move {
client
.request(proto::RegenerateDevServerToken {
dev_server_id: dev_server_id.0,
})
.await
})
}
pub fn delete_dev_server(
&mut self,
id: DevServerId,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
let client = self.client.clone();
cx.background_executor().spawn(async move {
client
.request(proto::DeleteDevServer {
dev_server_id: id.0,
})
.await?;
Ok(())
})
}
pub fn delete_dev_server_project(
&mut self,
id: DevServerProjectId,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
let client = self.client.clone();
cx.background_executor().spawn(async move {
client
.request(proto::DeleteDevServerProject {
dev_server_project_id: id.0,
})
.await?;
Ok(())
})
}
}

View File

@@ -9,7 +9,7 @@ use anyhow::Result;
use collections::{BTreeSet, HashSet};
use editor::{
diagnostic_block_renderer,
display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, RenderBlock},
display_map::{BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, RenderBlock},
highlight_diagnostic_message,
scroll::Autoscroll,
Editor, EditorEvent, ExcerptId, ExcerptRange, MultiBuffer, ToOffset,
@@ -439,10 +439,11 @@ impl ProjectDiagnosticsEditor {
primary.message.split('\n').next().unwrap().to_string();
group_state.block_count += 1;
blocks_to_add.push(BlockProperties {
placement: BlockPlacement::Above(header_position),
position: header_position,
height: 2,
style: BlockStyle::Sticky,
render: diagnostic_header_renderer(primary),
disposition: BlockDisposition::Above,
priority: 0,
});
}
@@ -458,15 +459,13 @@ impl ProjectDiagnosticsEditor {
if !diagnostic.message.is_empty() {
group_state.block_count += 1;
blocks_to_add.push(BlockProperties {
placement: BlockPlacement::Below((
excerpt_id,
entry.range.start,
)),
position: (excerpt_id, entry.range.start),
height: diagnostic.message.matches('\n').count() as u32 + 1,
style: BlockStyle::Fixed,
render: diagnostic_block_renderer(
diagnostic, None, true, true,
),
disposition: BlockDisposition::Below,
priority: 0,
});
}
@@ -499,24 +498,13 @@ impl ProjectDiagnosticsEditor {
editor.remove_blocks(blocks_to_remove, None, cx);
let block_ids = editor.insert_blocks(
blocks_to_add.into_iter().flat_map(|block| {
let placement = match block.placement {
BlockPlacement::Above((excerpt_id, text_anchor)) => BlockPlacement::Above(
excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor)?,
),
BlockPlacement::Below((excerpt_id, text_anchor)) => BlockPlacement::Below(
excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor)?,
),
BlockPlacement::Replace(_) => {
unreachable!(
"no Replace block should have been pushed to blocks_to_add"
)
}
};
let (excerpt_id, text_anchor) = block.position;
Some(BlockProperties {
placement,
position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor)?,
height: block.height,
style: block.style,
render: block.render,
disposition: block.disposition,
priority: 0,
})
}),

View File

@@ -81,6 +81,7 @@ ui.workspace = true
url.workspace = true
util.workspace = true
workspace.workspace = true
unicode-segmentation.workspace = true
[dev-dependencies]
ctor.workspace = true

View File

@@ -18,7 +18,7 @@ pub struct SelectPrevious {
#[derive(PartialEq, Clone, Deserialize, Default)]
pub struct MoveToBeginningOfLine {
#[serde(default = "default_true")]
pub stop_at_soft_wraps: bool,
pub(super) stop_at_soft_wraps: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default)]
@@ -153,10 +153,6 @@ pub struct DeleteToPreviousWordStart {
pub ignore_newlines: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default)]
pub struct FoldAtLevel {
pub level: u32,
}
impl_actions!(
editor,
[
@@ -186,7 +182,6 @@ impl_actions!(
ToggleCodeActions,
ToggleComments,
UnfoldAt,
FoldAtLevel
]
);
@@ -198,7 +193,6 @@ gpui::actions!(
AcceptPartialInlineCompletion,
AddSelectionAbove,
AddSelectionBelow,
ApplyAllDiffHunks,
ApplyDiffHunk,
Backspace,
Cancel,

View File

@@ -8,7 +8,7 @@
//! of several smaller structures that form a hierarchy (starting at the bottom):
//! - [`InlayMap`] that decides where the [`Inlay`]s should be displayed.
//! - [`FoldMap`] that decides where the fold indicators should be; it also tracks parts of a source file that are currently folded.
//! - [`TabMap`] that keeps track of hard tabs in a buffer.
//! - [`CharMap`] that replaces tabs and non-printable characters
//! - [`WrapMap`] that handles soft wrapping.
//! - [`BlockMap`] that tracks custom blocks such as diagnostics that should be displayed within buffer.
//! - [`DisplayMap`] that adds background highlights to the regions of text.
@@ -18,20 +18,22 @@
//! [EditorElement]: crate::element::EditorElement
mod block_map;
mod char_map;
mod crease_map;
mod fold_map;
mod inlay_map;
mod tab_map;
mod invisibles;
mod wrap_map;
use crate::{
hover_links::InlayHighlight, movement::TextLayoutDetails, EditorStyle, InlayId, RowExt,
};
pub use block_map::{
Block, BlockBufferRows, BlockChunks as DisplayChunks, BlockContext, BlockId, BlockMap,
BlockPlacement, BlockPoint, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
Block, BlockBufferRows, BlockChunks as DisplayChunks, BlockContext, BlockDisposition, BlockId,
BlockMap, BlockPoint, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
};
use block_map::{BlockRow, BlockSnapshot};
use char_map::{CharMap, CharSnapshot};
use collections::{HashMap, HashSet};
pub use crease_map::*;
pub use fold_map::{Fold, FoldId, FoldPlaceholder, FoldPoint};
@@ -42,6 +44,7 @@ use gpui::{
pub(crate) use inlay_map::Inlay;
use inlay_map::{InlayMap, InlaySnapshot};
pub use inlay_map::{InlayOffset, InlayPoint};
pub use invisibles::is_invisible;
use language::{
language_settings::language_settings, ChunkRenderer, OffsetUtf16, Point,
Subscription as BufferSubscription,
@@ -61,9 +64,9 @@ use std::{
sync::Arc,
};
use sum_tree::{Bias, TreeMap};
use tab_map::{TabMap, TabSnapshot};
use text::LineIndent;
use ui::WindowContext;
use ui::{px, WindowContext};
use unicode_segmentation::UnicodeSegmentation;
use wrap_map::{WrapMap, WrapSnapshot};
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
@@ -94,7 +97,7 @@ pub struct DisplayMap {
/// Decides where the fold indicators should be and tracks parts of a source file that are currently folded.
fold_map: FoldMap,
/// Keeps track of hard tabs in a buffer.
tab_map: TabMap,
char_map: CharMap,
/// Handles soft wrapping.
wrap_map: Model<WrapMap>,
/// Tracks custom blocks such as diagnostics that should be displayed within buffer.
@@ -131,7 +134,7 @@ impl DisplayMap {
let crease_map = CreaseMap::new(&buffer_snapshot);
let (inlay_map, snapshot) = InlayMap::new(buffer_snapshot);
let (fold_map, snapshot) = FoldMap::new(snapshot);
let (tab_map, snapshot) = TabMap::new(snapshot, tab_size);
let (char_map, snapshot) = CharMap::new(snapshot, tab_size);
let (wrap_map, snapshot) = WrapMap::new(snapshot, font, font_size, wrap_width, cx);
let block_map = BlockMap::new(
snapshot,
@@ -148,7 +151,7 @@ impl DisplayMap {
buffer_subscription,
fold_map,
inlay_map,
tab_map,
char_map,
wrap_map,
block_map,
crease_map,
@@ -166,17 +169,17 @@ impl DisplayMap {
let (inlay_snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits);
let (fold_snapshot, edits) = self.fold_map.read(inlay_snapshot.clone(), edits);
let tab_size = Self::tab_size(&self.buffer, cx);
let (tab_snapshot, edits) = self.tab_map.sync(fold_snapshot.clone(), edits, tab_size);
let (char_snapshot, edits) = self.char_map.sync(fold_snapshot.clone(), edits, tab_size);
let (wrap_snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(tab_snapshot.clone(), edits, cx));
.update(cx, |map, cx| map.sync(char_snapshot.clone(), edits, cx));
let block_snapshot = self.block_map.read(wrap_snapshot.clone(), edits).snapshot;
DisplaySnapshot {
buffer_snapshot: self.buffer.read(cx).snapshot(cx),
fold_snapshot,
inlay_snapshot,
tab_snapshot,
char_snapshot,
wrap_snapshot,
block_snapshot,
crease_snapshot: self.crease_map.snapshot(),
@@ -212,13 +215,13 @@ impl DisplayMap {
let tab_size = Self::tab_size(&self.buffer, cx);
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
self.block_map.read(snapshot, edits);
let (snapshot, edits) = fold_map.fold(ranges);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
@@ -236,13 +239,13 @@ impl DisplayMap {
let tab_size = Self::tab_size(&self.buffer, cx);
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
self.block_map.read(snapshot, edits);
let (snapshot, edits) = fold_map.unfold(ranges, inclusive);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
@@ -277,7 +280,7 @@ impl DisplayMap {
let tab_size = Self::tab_size(&self.buffer, cx);
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
@@ -295,7 +298,7 @@ impl DisplayMap {
let tab_size = Self::tab_size(&self.buffer, cx);
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
@@ -313,7 +316,7 @@ impl DisplayMap {
let tab_size = Self::tab_size(&self.buffer, cx);
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
@@ -331,7 +334,7 @@ impl DisplayMap {
let tab_size = Self::tab_size(&self.buffer, cx);
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
@@ -407,7 +410,7 @@ impl DisplayMap {
let (snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits);
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
let tab_size = Self::tab_size(&self.buffer, cx);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
@@ -415,7 +418,7 @@ impl DisplayMap {
let (snapshot, edits) = self.inlay_map.splice(to_remove, to_insert);
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
@@ -467,7 +470,7 @@ pub struct DisplaySnapshot {
pub fold_snapshot: FoldSnapshot,
pub crease_snapshot: CreaseSnapshot,
inlay_snapshot: InlaySnapshot,
tab_snapshot: TabSnapshot,
char_snapshot: CharSnapshot,
wrap_snapshot: WrapSnapshot,
block_snapshot: BlockSnapshot,
text_highlights: TextHighlights,
@@ -567,8 +570,8 @@ impl DisplaySnapshot {
fn point_to_display_point(&self, point: MultiBufferPoint, bias: Bias) -> DisplayPoint {
let inlay_point = self.inlay_snapshot.to_inlay_point(point);
let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias);
let tab_point = self.tab_snapshot.to_tab_point(fold_point);
let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point);
let char_point = self.char_snapshot.to_char_point(fold_point);
let wrap_point = self.wrap_snapshot.char_point_to_wrap_point(char_point);
let block_point = self.block_snapshot.to_block_point(wrap_point);
DisplayPoint(block_point)
}
@@ -596,21 +599,21 @@ impl DisplaySnapshot {
fn display_point_to_inlay_point(&self, point: DisplayPoint, bias: Bias) -> InlayPoint {
let block_point = point.0;
let wrap_point = self.block_snapshot.to_wrap_point(block_point);
let tab_point = self.wrap_snapshot.to_tab_point(wrap_point);
let fold_point = self.tab_snapshot.to_fold_point(tab_point, bias).0;
let char_point = self.wrap_snapshot.to_char_point(wrap_point);
let fold_point = self.char_snapshot.to_fold_point(char_point, bias).0;
fold_point.to_inlay_point(&self.fold_snapshot)
}
pub fn display_point_to_fold_point(&self, point: DisplayPoint, bias: Bias) -> FoldPoint {
let block_point = point.0;
let wrap_point = self.block_snapshot.to_wrap_point(block_point);
let tab_point = self.wrap_snapshot.to_tab_point(wrap_point);
self.tab_snapshot.to_fold_point(tab_point, bias).0
let char_point = self.wrap_snapshot.to_char_point(wrap_point);
self.char_snapshot.to_fold_point(char_point, bias).0
}
pub fn fold_point_to_display_point(&self, fold_point: FoldPoint) -> DisplayPoint {
let tab_point = self.tab_snapshot.to_tab_point(fold_point);
let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point);
let char_point = self.char_snapshot.to_char_point(fold_point);
let wrap_point = self.wrap_snapshot.char_point_to_wrap_point(char_point);
let block_point = self.block_snapshot.to_block_point(wrap_point);
DisplayPoint(block_point)
}
@@ -688,6 +691,23 @@ impl DisplaySnapshot {
}
}
if chunk.is_invisible {
let invisible_highlight = HighlightStyle {
background_color: Some(editor_style.status.hint_background),
underline: Some(UnderlineStyle {
color: Some(editor_style.status.hint),
thickness: px(1.),
wavy: false,
}),
..Default::default()
};
if let Some(highlight_style) = highlight_style.as_mut() {
highlight_style.highlight(invisible_highlight);
} else {
highlight_style = Some(invisible_highlight);
}
}
let mut diagnostic_highlight = HighlightStyle::default();
if chunk.is_unnecessary {
@@ -784,12 +804,11 @@ impl DisplaySnapshot {
layout_line.closest_index_for_x(x) as u32
}
pub fn display_chars_at(
&self,
mut point: DisplayPoint,
) -> impl Iterator<Item = (char, DisplayPoint)> + '_ {
pub fn grapheme_at(&self, mut point: DisplayPoint) -> Option<String> {
point = DisplayPoint(self.block_snapshot.clip_point(point.0, Bias::Left));
self.text_chunks(point.row())
let chars = self
.text_chunks(point.row())
.flat_map(str::chars)
.skip_while({
let mut column = 0;
@@ -799,16 +818,21 @@ impl DisplaySnapshot {
!at_point
}
})
.map(move |ch| {
let result = (ch, point);
if ch == '\n' {
*point.row_mut() += 1;
*point.column_mut() = 0;
} else {
*point.column_mut() += ch.len_utf8() as u32;
.take_while({
let mut prev = false;
move |char| {
let now = char.is_ascii();
let end = char.is_ascii() && (char.is_ascii_whitespace() || prev);
prev = now;
!end
}
result
})
});
chars
.collect::<String>()
.graphemes(true)
.next()
.map(|s| s.to_owned())
}
pub fn buffer_chars_at(&self, mut offset: usize) -> impl Iterator<Item = (char, usize)> + '_ {
@@ -1120,8 +1144,8 @@ impl DisplayPoint {
pub fn to_offset(self, map: &DisplaySnapshot, bias: Bias) -> usize {
let wrap_point = map.block_snapshot.to_wrap_point(self.0);
let tab_point = map.wrap_snapshot.to_tab_point(wrap_point);
let fold_point = map.tab_snapshot.to_fold_point(tab_point, bias).0;
let char_point = map.wrap_snapshot.to_char_point(wrap_point);
let fold_point = map.char_snapshot.to_fold_point(char_point, bias).0;
let inlay_point = fold_point.to_inlay_point(&map.fold_snapshot);
map.inlay_snapshot
.to_buffer_offset(map.inlay_snapshot.to_offset(inlay_point))
@@ -1156,7 +1180,6 @@ impl ToDisplayPoint for Anchor {
pub mod tests {
use super::*;
use crate::{movement, test::marked_display_snapshot};
use block_map::BlockPlacement;
use gpui::{div, font, observe, px, AppContext, BorrowAppContext, Context, Element, Hsla};
use language::{
language_settings::{AllLanguageSettings, AllLanguageSettingsContent},
@@ -1168,7 +1191,6 @@ pub mod tests {
use smol::stream::StreamExt;
use std::{env, sync::Arc};
use theme::{LoadThemes, SyntaxTheme};
use unindent::Unindent as _;
use util::test::{marked_text_ranges, sample_text};
use Bias::*;
@@ -1230,7 +1252,7 @@ pub mod tests {
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
log::info!("fold text: {:?}", snapshot.fold_snapshot.text());
log::info!("tab text: {:?}", snapshot.tab_snapshot.text());
log::info!("char text: {:?}", snapshot.char_snapshot.text());
log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text());
log::info!("block text: {:?}", snapshot.block_snapshot.text());
log::info!("display text: {:?}", snapshot.text());
@@ -1271,22 +1293,24 @@ pub mod tests {
Bias::Left,
));
let placement = if rng.gen() {
BlockPlacement::Above(position)
let disposition = if rng.gen() {
BlockDisposition::Above
} else {
BlockPlacement::Below(position)
BlockDisposition::Below
};
let height = rng.gen_range(1..5);
log::info!(
"inserting block {:?} with height {}",
placement.as_ref().map(|p| p.to_point(&buffer)),
"inserting block {:?} {:?} with height {}",
disposition,
position.to_point(&buffer),
height
);
let priority = rng.gen_range(1..100);
BlockProperties {
placement,
style: BlockStyle::Fixed,
position,
height,
disposition,
render: Box::new(|_| div().into_any()),
priority,
}
@@ -1345,7 +1369,7 @@ pub mod tests {
fold_count = snapshot.fold_count();
log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
log::info!("fold text: {:?}", snapshot.fold_snapshot.text());
log::info!("tab text: {:?}", snapshot.tab_snapshot.text());
log::info!("char text: {:?}", snapshot.char_snapshot.text());
log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text());
log::info!("block text: {:?}", snapshot.block_snapshot.text());
log::info!("display text: {:?}", snapshot.text());
@@ -1625,6 +1649,8 @@ pub mod tests {
#[gpui::test]
async fn test_chunks(cx: &mut gpui::TestAppContext) {
use unindent::Unindent as _;
let text = r#"
fn outer() {}
@@ -1721,110 +1747,12 @@ pub mod tests {
);
}
#[gpui::test]
async fn test_chunks_with_syntax_highlighting_across_blocks(cx: &mut gpui::TestAppContext) {
cx.background_executor
.set_block_on_ticks(usize::MAX..=usize::MAX);
let text = r#"
const A: &str = "
one
two
three
";
const B: &str = "four";
"#
.unindent();
let theme = SyntaxTheme::new_test(vec![
("string", Hsla::red()),
("punctuation", Hsla::blue()),
("keyword", Hsla::green()),
]);
let language = Arc::new(
Language::new(
LanguageConfig {
name: "Rust".into(),
..Default::default()
},
Some(tree_sitter_rust::LANGUAGE.into()),
)
.with_highlights_query(
r#"
(string_literal) @string
"const" @keyword
[":" ";"] @punctuation
"#,
)
.unwrap(),
);
language.set_theme(&theme);
cx.update(|cx| init_test(cx, |_| {}));
let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx));
cx.condition(&buffer, |buf, _| !buf.is_parsing()).await;
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx));
let map = cx.new_model(|cx| {
DisplayMap::new(
buffer,
font("Courier"),
px(16.0),
None,
true,
1,
1,
0,
FoldPlaceholder::test(),
cx,
)
});
// Insert a block in the middle of a multi-line string literal
map.update(cx, |map, cx| {
map.insert_blocks(
[BlockProperties {
placement: BlockPlacement::Below(
buffer_snapshot.anchor_before(Point::new(1, 0)),
),
height: 1,
style: BlockStyle::Sticky,
render: Box::new(|_| div().into_any()),
priority: 0,
}],
cx,
)
});
pretty_assertions::assert_eq!(
cx.update(|cx| syntax_chunks(DisplayRow(0)..DisplayRow(7), &map, &theme, cx)),
[
("const".into(), Some(Hsla::green())),
(" A".into(), None),
(":".into(), Some(Hsla::blue())),
(" &str = ".into(), None),
("\"\n one\n".into(), Some(Hsla::red())),
("\n".into(), None),
(" two\n three\n\"".into(), Some(Hsla::red())),
(";".into(), Some(Hsla::blue())),
("\n".into(), None),
("const".into(), Some(Hsla::green())),
(" B".into(), None),
(":".into(), Some(Hsla::blue())),
(" &str = ".into(), None),
("\"four\"".into(), Some(Hsla::red())),
(";".into(), Some(Hsla::blue())),
("\n".into(), None),
]
);
}
// todo(linux) fails due to pixel differences in text rendering
#[cfg(target_os = "macos")]
#[gpui::test]
async fn test_chunks_with_soft_wrapping(cx: &mut gpui::TestAppContext) {
use unindent::Unindent as _;
cx.background_executor
.set_block_on_ticks(usize::MAX..=usize::MAX);

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,6 @@
use super::{
fold_map::{self, FoldChunks, FoldEdit, FoldPoint, FoldSnapshot},
invisibles::{is_invisible, replacement},
Highlights,
};
use language::{Chunk, Point};
@@ -9,14 +10,14 @@ use sum_tree::Bias;
const MAX_EXPANSION_COLUMN: u32 = 256;
/// Keeps track of hard tabs in a text buffer.
/// Keeps track of hard tabs and non-printable characters in a text buffer.
///
/// See the [`display_map` module documentation](crate::display_map) for more information.
pub struct TabMap(TabSnapshot);
pub struct CharMap(CharSnapshot);
impl TabMap {
pub fn new(fold_snapshot: FoldSnapshot, tab_size: NonZeroU32) -> (Self, TabSnapshot) {
let snapshot = TabSnapshot {
impl CharMap {
pub fn new(fold_snapshot: FoldSnapshot, tab_size: NonZeroU32) -> (Self, CharSnapshot) {
let snapshot = CharSnapshot {
fold_snapshot,
tab_size,
max_expansion_column: MAX_EXPANSION_COLUMN,
@@ -26,7 +27,7 @@ impl TabMap {
}
#[cfg(test)]
pub fn set_max_expansion_column(&mut self, column: u32) -> TabSnapshot {
pub fn set_max_expansion_column(&mut self, column: u32) -> CharSnapshot {
self.0.max_expansion_column = column;
self.0.clone()
}
@@ -36,9 +37,9 @@ impl TabMap {
fold_snapshot: FoldSnapshot,
mut fold_edits: Vec<FoldEdit>,
tab_size: NonZeroU32,
) -> (TabSnapshot, Vec<TabEdit>) {
) -> (CharSnapshot, Vec<TabEdit>) {
let old_snapshot = &mut self.0;
let mut new_snapshot = TabSnapshot {
let mut new_snapshot = CharSnapshot {
fold_snapshot,
tab_size,
max_expansion_column: old_snapshot.max_expansion_column,
@@ -137,15 +138,15 @@ impl TabMap {
let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot);
let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot);
tab_edits.push(TabEdit {
old: old_snapshot.to_tab_point(old_start)..old_snapshot.to_tab_point(old_end),
new: new_snapshot.to_tab_point(new_start)..new_snapshot.to_tab_point(new_end),
old: old_snapshot.to_char_point(old_start)..old_snapshot.to_char_point(old_end),
new: new_snapshot.to_char_point(new_start)..new_snapshot.to_char_point(new_end),
});
}
} else {
new_snapshot.version += 1;
tab_edits.push(TabEdit {
old: TabPoint::zero()..old_snapshot.max_point(),
new: TabPoint::zero()..new_snapshot.max_point(),
old: CharPoint::zero()..old_snapshot.max_point(),
new: CharPoint::zero()..new_snapshot.max_point(),
});
}
@@ -155,14 +156,14 @@ impl TabMap {
}
#[derive(Clone)]
pub struct TabSnapshot {
pub struct CharSnapshot {
pub fold_snapshot: FoldSnapshot,
pub tab_size: NonZeroU32,
pub max_expansion_column: u32,
pub version: usize,
}
impl TabSnapshot {
impl CharSnapshot {
pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
&self.fold_snapshot.inlay_snapshot.buffer
}
@@ -170,7 +171,7 @@ impl TabSnapshot {
pub fn line_len(&self, row: u32) -> u32 {
let max_point = self.max_point();
if row < max_point.row() {
self.to_tab_point(FoldPoint::new(row, self.fold_snapshot.line_len(row)))
self.to_char_point(FoldPoint::new(row, self.fold_snapshot.line_len(row)))
.0
.column
} else {
@@ -179,10 +180,10 @@ impl TabSnapshot {
}
pub fn text_summary(&self) -> TextSummary {
self.text_summary_for_range(TabPoint::zero()..self.max_point())
self.text_summary_for_range(CharPoint::zero()..self.max_point())
}
pub fn text_summary_for_range(&self, range: Range<TabPoint>) -> TextSummary {
pub fn text_summary_for_range(&self, range: Range<CharPoint>) -> TextSummary {
let input_start = self.to_fold_point(range.start, Bias::Left).0;
let input_end = self.to_fold_point(range.end, Bias::Right).0;
let input_summary = self
@@ -211,7 +212,7 @@ impl TabSnapshot {
} else {
for _ in self
.chunks(
TabPoint::new(range.end.row(), 0)..range.end,
CharPoint::new(range.end.row(), 0)..range.end,
false,
Highlights::default(),
)
@@ -232,7 +233,7 @@ impl TabSnapshot {
pub fn chunks<'a>(
&'a self,
range: Range<TabPoint>,
range: Range<CharPoint>,
language_aware: bool,
highlights: Highlights<'a>,
) -> TabChunks<'a> {
@@ -251,7 +252,6 @@ impl TabSnapshot {
};
TabChunks {
snapshot: self,
fold_chunks: self.fold_snapshot.chunks(
input_start..input_end,
language_aware,
@@ -279,7 +279,7 @@ impl TabSnapshot {
#[cfg(test)]
pub fn text(&self) -> String {
self.chunks(
TabPoint::zero()..self.max_point(),
CharPoint::zero()..self.max_point(),
false,
Highlights::default(),
)
@@ -287,24 +287,24 @@ impl TabSnapshot {
.collect()
}
pub fn max_point(&self) -> TabPoint {
self.to_tab_point(self.fold_snapshot.max_point())
pub fn max_point(&self) -> CharPoint {
self.to_char_point(self.fold_snapshot.max_point())
}
pub fn clip_point(&self, point: TabPoint, bias: Bias) -> TabPoint {
self.to_tab_point(
pub fn clip_point(&self, point: CharPoint, bias: Bias) -> CharPoint {
self.to_char_point(
self.fold_snapshot
.clip_point(self.to_fold_point(point, bias).0, bias),
)
}
pub fn to_tab_point(&self, input: FoldPoint) -> TabPoint {
pub fn to_char_point(&self, input: FoldPoint) -> CharPoint {
let chars = self.fold_snapshot.chars_at(FoldPoint::new(input.row(), 0));
let expanded = self.expand_tabs(chars, input.column());
TabPoint::new(input.row(), expanded)
CharPoint::new(input.row(), expanded)
}
pub fn to_fold_point(&self, output: TabPoint, bias: Bias) -> (FoldPoint, u32, u32) {
pub fn to_fold_point(&self, output: CharPoint, bias: Bias) -> (FoldPoint, u32, u32) {
let chars = self.fold_snapshot.chars_at(FoldPoint::new(output.row(), 0));
let expanded = output.column();
let (collapsed, expanded_char_column, to_next_stop) =
@@ -316,13 +316,13 @@ impl TabSnapshot {
)
}
pub fn make_tab_point(&self, point: Point, bias: Bias) -> TabPoint {
pub fn make_char_point(&self, point: Point, bias: Bias) -> CharPoint {
let inlay_point = self.fold_snapshot.inlay_snapshot.to_inlay_point(point);
let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias);
self.to_tab_point(fold_point)
self.to_char_point(fold_point)
}
pub fn to_point(&self, point: TabPoint, bias: Bias) -> Point {
pub fn to_point(&self, point: CharPoint, bias: Bias) -> Point {
let fold_point = self.to_fold_point(point, bias).0;
let inlay_point = fold_point.to_inlay_point(&self.fold_snapshot);
self.fold_snapshot
@@ -345,6 +345,9 @@ impl TabSnapshot {
let tab_len = tab_size - expanded_chars % tab_size;
expanded_bytes += tab_len;
expanded_chars += tab_len;
} else if let Some(replacement) = replacement(c) {
expanded_chars += replacement.chars().count() as u32;
expanded_bytes += replacement.len() as u32;
} else {
expanded_bytes += c.len_utf8() as u32;
expanded_chars += 1;
@@ -384,6 +387,9 @@ impl TabSnapshot {
Bias::Right => (collapsed_bytes + 1, expanded_chars, 0),
};
}
} else if let Some(replacement) = replacement(c) {
expanded_chars += replacement.chars().count() as u32;
expanded_bytes += replacement.len() as u32;
} else {
expanded_chars += 1;
expanded_bytes += c.len_utf8() as u32;
@@ -405,9 +411,9 @@ impl TabSnapshot {
}
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
pub struct TabPoint(pub Point);
pub struct CharPoint(pub Point);
impl TabPoint {
impl CharPoint {
pub fn new(row: u32, column: u32) -> Self {
Self(Point::new(row, column))
}
@@ -425,13 +431,13 @@ impl TabPoint {
}
}
impl From<Point> for TabPoint {
impl From<Point> for CharPoint {
fn from(point: Point) -> Self {
Self(point)
}
}
pub type TabEdit = text::Edit<TabPoint>;
pub type TabEdit = text::Edit<CharPoint>;
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct TextSummary {
@@ -486,7 +492,6 @@ impl<'a> std::ops::AddAssign<&'a Self> for TextSummary {
const SPACES: &str = " ";
pub struct TabChunks<'a> {
snapshot: &'a TabSnapshot,
fold_chunks: FoldChunks<'a>,
chunk: Chunk<'a>,
column: u32,
@@ -498,37 +503,6 @@ pub struct TabChunks<'a> {
inside_leading_tab: bool,
}
impl<'a> TabChunks<'a> {
pub(crate) fn seek(&mut self, range: Range<TabPoint>) {
let (input_start, expanded_char_column, to_next_stop) =
self.snapshot.to_fold_point(range.start, Bias::Left);
let input_column = input_start.column();
let input_start = input_start.to_offset(&self.snapshot.fold_snapshot);
let input_end = self
.snapshot
.to_fold_point(range.end, Bias::Right)
.0
.to_offset(&self.snapshot.fold_snapshot);
let to_next_stop = if range.start.0 + Point::new(0, to_next_stop) > range.end.0 {
range.end.column() - range.start.column()
} else {
to_next_stop
};
self.fold_chunks.seek(input_start..input_end);
self.input_column = input_column;
self.column = expanded_char_column;
self.output_position = range.start.0;
self.max_output_position = range.end.0;
self.chunk = Chunk {
text: &SPACES[0..(to_next_stop as usize)],
is_tab: true,
..Default::default()
};
self.inside_leading_tab = to_next_stop > 0;
}
}
impl<'a> Iterator for TabChunks<'a> {
type Item = Chunk<'a>;
@@ -584,6 +558,37 @@ impl<'a> Iterator for TabChunks<'a> {
self.input_column = 0;
self.output_position += Point::new(1, 0);
}
_ if is_invisible(c) => {
if ix > 0 {
let (prefix, suffix) = self.chunk.text.split_at(ix);
self.chunk.text = suffix;
return Some(Chunk {
text: prefix,
is_invisible: false,
..self.chunk.clone()
});
}
let c_len = c.len_utf8();
let replacement = replacement(c).unwrap_or(&self.chunk.text[..c_len]);
if self.chunk.text.len() >= c_len {
self.chunk.text = &self.chunk.text[c_len..];
} else {
self.chunk.text = "";
}
let len = replacement.chars().count() as u32;
let next_output_position = cmp::min(
self.output_position + Point::new(0, len),
self.max_output_position,
);
self.column += len;
self.input_column += 1;
self.output_position = next_output_position;
return Some(Chunk {
text: replacement,
is_invisible: true,
..self.chunk.clone()
});
}
_ => {
self.column += 1;
if !self.inside_leading_tab {
@@ -613,11 +618,11 @@ mod tests {
let buffer_snapshot = buffer.read(cx).snapshot(cx);
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
assert_eq!(tab_snapshot.expand_tabs("\t".chars(), 0), 0);
assert_eq!(tab_snapshot.expand_tabs("\t".chars(), 1), 4);
assert_eq!(tab_snapshot.expand_tabs("\ta".chars(), 2), 5);
assert_eq!(char_snapshot.expand_tabs("\t".chars(), 0), 0);
assert_eq!(char_snapshot.expand_tabs("\t".chars(), 1), 4);
assert_eq!(char_snapshot.expand_tabs("\ta".chars(), 2), 5);
}
#[gpui::test]
@@ -630,16 +635,16 @@ mod tests {
let buffer_snapshot = buffer.read(cx).snapshot(cx);
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_, mut tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
let (_, mut char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
tab_snapshot.max_expansion_column = max_expansion_column;
assert_eq!(tab_snapshot.text(), output);
char_snapshot.max_expansion_column = max_expansion_column;
assert_eq!(char_snapshot.text(), output);
for (ix, c) in input.char_indices() {
assert_eq!(
tab_snapshot
char_snapshot
.chunks(
TabPoint::new(0, ix as u32)..tab_snapshot.max_point(),
CharPoint::new(0, ix as u32)..char_snapshot.max_point(),
false,
Highlights::default(),
)
@@ -653,13 +658,13 @@ mod tests {
let input_point = Point::new(0, ix as u32);
let output_point = Point::new(0, output.find(c).unwrap() as u32);
assert_eq!(
tab_snapshot.to_tab_point(FoldPoint(input_point)),
TabPoint(output_point),
"to_tab_point({input_point:?})"
char_snapshot.to_char_point(FoldPoint(input_point)),
CharPoint(output_point),
"to_char_point({input_point:?})"
);
assert_eq!(
tab_snapshot
.to_fold_point(TabPoint(output_point), Bias::Left)
char_snapshot
.to_fold_point(CharPoint(output_point), Bias::Left)
.0,
FoldPoint(input_point),
"to_fold_point({output_point:?})"
@@ -677,10 +682,10 @@ mod tests {
let buffer_snapshot = buffer.read(cx).snapshot(cx);
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_, mut tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
let (_, mut char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
tab_snapshot.max_expansion_column = max_expansion_column;
assert_eq!(tab_snapshot.text(), input);
char_snapshot.max_expansion_column = max_expansion_column;
assert_eq!(char_snapshot.text(), input);
}
#[gpui::test]
@@ -691,10 +696,10 @@ mod tests {
let buffer_snapshot = buffer.read(cx).snapshot(cx);
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
assert_eq!(
chunks(&tab_snapshot, TabPoint::zero()),
chunks(&char_snapshot, CharPoint::zero()),
vec![
(" ".to_string(), true),
(" ".to_string(), false),
@@ -703,7 +708,7 @@ mod tests {
]
);
assert_eq!(
chunks(&tab_snapshot, TabPoint::new(0, 2)),
chunks(&char_snapshot, CharPoint::new(0, 2)),
vec![
(" ".to_string(), true),
(" ".to_string(), false),
@@ -712,7 +717,7 @@ mod tests {
]
);
fn chunks(snapshot: &TabSnapshot, start: TabPoint) -> Vec<(String, bool)> {
fn chunks(snapshot: &CharSnapshot, start: CharPoint) -> Vec<(String, bool)> {
let mut chunks = Vec::new();
let mut was_tab = false;
let mut text = String::new();
@@ -758,12 +763,12 @@ mod tests {
let (inlay_snapshot, _) = inlay_map.randomly_mutate(&mut 0, &mut rng);
log::info!("InlayMap text: {:?}", inlay_snapshot.text());
let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size);
let tabs_snapshot = tab_map.set_max_expansion_column(32);
let (mut char_map, _) = CharMap::new(fold_snapshot.clone(), tab_size);
let tabs_snapshot = char_map.set_max_expansion_column(32);
let text = text::Rope::from(tabs_snapshot.text().as_str());
log::info!(
"TabMap text (tab size: {}): {:?}",
"CharMap text (tab size: {}): {:?}",
tab_size,
tabs_snapshot.text(),
);
@@ -771,11 +776,11 @@ mod tests {
for _ in 0..5 {
let end_row = rng.gen_range(0..=text.max_point().row);
let end_column = rng.gen_range(0..=text.line_len(end_row));
let mut end = TabPoint(text.clip_point(Point::new(end_row, end_column), Bias::Right));
let mut end = CharPoint(text.clip_point(Point::new(end_row, end_column), Bias::Right));
let start_row = rng.gen_range(0..=text.max_point().row);
let start_column = rng.gen_range(0..=text.line_len(start_row));
let mut start =
TabPoint(text.clip_point(Point::new(start_row, start_column), Bias::Left));
CharPoint(text.clip_point(Point::new(start_row, start_column), Bias::Left));
if start > end {
mem::swap(&mut start, &mut end);
}

View File

@@ -1100,17 +1100,6 @@ pub struct FoldBufferRows<'a> {
fold_point: FoldPoint,
}
impl<'a> FoldBufferRows<'a> {
pub(crate) fn seek(&mut self, row: u32) {
let fold_point = FoldPoint::new(row, 0);
self.cursor.seek(&fold_point, Bias::Left, &());
let overshoot = fold_point.0 - self.cursor.start().0 .0;
let inlay_point = InlayPoint(self.cursor.start().1 .0 + overshoot);
self.input_buffer_rows.seek(inlay_point.row());
self.fold_point = fold_point;
}
}
impl<'a> Iterator for FoldBufferRows<'a> {
type Item = Option<u32>;
@@ -1146,38 +1135,6 @@ pub struct FoldChunks<'a> {
max_output_offset: FoldOffset,
}
impl<'a> FoldChunks<'a> {
pub(crate) fn seek(&mut self, range: Range<FoldOffset>) {
self.transform_cursor.seek(&range.start, Bias::Right, &());
let inlay_start = {
let overshoot = range.start.0 - self.transform_cursor.start().0 .0;
self.transform_cursor.start().1 + InlayOffset(overshoot)
};
let transform_end = self.transform_cursor.end(&());
let inlay_end = if self
.transform_cursor
.item()
.map_or(true, |transform| transform.is_fold())
{
inlay_start
} else if range.end < transform_end.0 {
let overshoot = range.end.0 - self.transform_cursor.start().0 .0;
self.transform_cursor.start().1 + InlayOffset(overshoot)
} else {
transform_end.1
};
self.inlay_chunks.seek(inlay_start..inlay_end);
self.inlay_chunk = None;
self.inlay_offset = inlay_start;
self.output_offset = range.start;
self.max_output_offset = range.end;
}
}
impl<'a> Iterator for FoldChunks<'a> {
type Item = Chunk<'a>;

View File

@@ -0,0 +1,157 @@
use std::sync::LazyLock;
use collections::HashMap;
// Invisibility in a Unicode context is not well defined, so we have to guess.
//
// We highlight all ASCII control codes, and unicode whitespace because they are likely
// confused with a normal space (U+0020).
//
// We also highlight the handful of blank non-space characters:
// U+2800 BRAILLE PATTERN BLANK - Category: So
// U+115F HANGUL CHOSEONG FILLER - Category: Lo
// U+1160 HANGUL CHOSEONG FILLER - Category: Lo
// U+3164 HANGUL FILLER - Category: Lo
// U+FFA0 HALFWIDTH HANGUL FILLER - Category: Lo
// U+FFFC OBJECT REPLACEMENT CHARACTER - Category: So
//
// For the rest of Unicode, invisibility happens for two reasons:
// * A Format character (like a byte order mark or right-to-left override)
// * An invisible Nonspacing Mark character (like U+034F, or variation selectors)
//
// We don't consider unassigned codepoints invisible as the font renderer already shows
// a replacement character in that case (and there are a *lot* of them)
//
// Control characters are mostly fine to highlight; except:
// * U+E0020..=U+E007F are used in emoji flags. We don't highlight them right now, but we could if we tightened our heuristics.
// * U+200D is used to join characters. We highlight this but don't replace it. As our font system ignores mid-glyph highlights this mostly works to highlight unexpected uses.
//
// Nonspacing marks are handled like U+200D. This means that mid-glyph we ignore them, but
// probably causes issues with end-of-glyph usage.
//
// ref: https://invisible-characters.com
// ref: https://www.compart.com/en/unicode/category/Cf
// ref: https://gist.github.com/ConradIrwin/f759e1fc29267143c4c7895aa495dca5?h=1
// ref: https://unicode.org/Public/emoji/13.0/emoji-test.txt
// https://github.com/bits/UTF-8-Unicode-Test-Documents/blob/master/UTF-8_sequence_separated/utf8_sequence_0-0x10ffff_assigned_including-unprintable-asis.txt
pub fn is_invisible(c: char) -> bool {
if c <= '\u{1f}' {
c != '\t' && c != '\n' && c != '\r'
} else if c >= '\u{7f}' {
c <= '\u{9f}' || c.is_whitespace() || contains(c, &FORMAT) || contains(c, &OTHER)
} else {
false
}
}
pub(crate) fn replacement(c: char) -> Option<&'static str> {
if !is_invisible(c) {
return None;
}
if c <= '\x7f' {
REPLACEMENTS.get(&c).copied()
} else if contains(c, &PRESERVE) {
None
} else {
Some(" ")
}
}
const REPLACEMENTS: LazyLock<HashMap<char, &'static str>> = LazyLock::new(|| {
[
('\x00', ""),
('\x01', ""),
('\x02', ""),
('\x03', ""),
('\x04', ""),
('\x05', ""),
('\x06', ""),
('\x07', ""),
('\x08', ""),
('\x0B', ""),
('\x0C', ""),
('\x0D', ""),
('\x0E', ""),
('\x0F', ""),
('\x10', ""),
('\x11', ""),
('\x12', ""),
('\x13', ""),
('\x14', ""),
('\x15', ""),
('\x16', ""),
('\x17', ""),
('\x18', ""),
('\x19', ""),
('\x1A', ""),
('\x1B', ""),
('\x1C', ""),
('\x1D', ""),
('\x1E', ""),
('\x1F', ""),
('\u{007F}', ""),
]
.into_iter()
.collect()
});
// generated using ucd-generate: ucd-generate general-category --include Format --chars ucd-16.0.0
pub const FORMAT: &'static [(char, char)] = &[
('\u{ad}', '\u{ad}'),
('\u{600}', '\u{605}'),
('\u{61c}', '\u{61c}'),
('\u{6dd}', '\u{6dd}'),
('\u{70f}', '\u{70f}'),
('\u{890}', '\u{891}'),
('\u{8e2}', '\u{8e2}'),
('\u{180e}', '\u{180e}'),
('\u{200b}', '\u{200f}'),
('\u{202a}', '\u{202e}'),
('\u{2060}', '\u{2064}'),
('\u{2066}', '\u{206f}'),
('\u{feff}', '\u{feff}'),
('\u{fff9}', '\u{fffb}'),
('\u{110bd}', '\u{110bd}'),
('\u{110cd}', '\u{110cd}'),
('\u{13430}', '\u{1343f}'),
('\u{1bca0}', '\u{1bca3}'),
('\u{1d173}', '\u{1d17a}'),
('\u{e0001}', '\u{e0001}'),
('\u{e0020}', '\u{e007f}'),
];
// hand-made base on https://invisible-characters.com (Excluding Cf)
pub const OTHER: &'static [(char, char)] = &[
('\u{034f}', '\u{034f}'),
('\u{115F}', '\u{1160}'),
('\u{17b4}', '\u{17b5}'),
('\u{180b}', '\u{180d}'),
('\u{2800}', '\u{2800}'),
('\u{3164}', '\u{3164}'),
('\u{fe00}', '\u{fe0d}'),
('\u{ffa0}', '\u{ffa0}'),
('\u{fffc}', '\u{fffc}'),
('\u{e0100}', '\u{e01ef}'),
];
// a subset of FORMAT/OTHER that may appear within glyphs
const PRESERVE: &'static [(char, char)] = &[
('\u{034f}', '\u{034f}'),
('\u{200d}', '\u{200d}'),
('\u{17b4}', '\u{17b5}'),
('\u{180b}', '\u{180d}'),
('\u{e0061}', '\u{e007a}'),
('\u{e007f}', '\u{e007f}'),
];
fn contains(c: char, list: &[(char, char)]) -> bool {
for (start, end) in list {
if c < *start {
return false;
}
if c <= *end {
return true;
}
}
false
}

View File

@@ -1,6 +1,6 @@
use super::{
char_map::{self, CharPoint, CharSnapshot, TabEdit},
fold_map::FoldBufferRows,
tab_map::{self, TabEdit, TabPoint, TabSnapshot},
Highlights,
};
use gpui::{AppContext, Context, Font, LineWrapper, Model, ModelContext, Pixels, Task};
@@ -12,7 +12,7 @@ use std::{cmp, collections::VecDeque, mem, ops::Range, time::Duration};
use sum_tree::{Bias, Cursor, SumTree};
use text::Patch;
pub use super::tab_map::TextSummary;
pub use super::char_map::TextSummary;
pub type WrapEdit = text::Edit<u32>;
/// Handles soft wrapping of text.
@@ -20,7 +20,7 @@ pub type WrapEdit = text::Edit<u32>;
/// See the [`display_map` module documentation](crate::display_map) for more information.
pub struct WrapMap {
snapshot: WrapSnapshot,
pending_edits: VecDeque<(TabSnapshot, Vec<TabEdit>)>,
pending_edits: VecDeque<(CharSnapshot, Vec<TabEdit>)>,
interpolated_edits: Patch<u32>,
edits_since_sync: Patch<u32>,
wrap_width: Option<Pixels>,
@@ -30,7 +30,7 @@ pub struct WrapMap {
#[derive(Clone)]
pub struct WrapSnapshot {
tab_snapshot: TabSnapshot,
char_snapshot: CharSnapshot,
transforms: SumTree<Transform>,
interpolated: bool,
}
@@ -51,12 +51,11 @@ struct TransformSummary {
pub struct WrapPoint(pub Point);
pub struct WrapChunks<'a> {
input_chunks: tab_map::TabChunks<'a>,
input_chunks: char_map::TabChunks<'a>,
input_chunk: Chunk<'a>,
output_position: WrapPoint,
max_output_row: u32,
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
snapshot: &'a WrapSnapshot,
transforms: Cursor<'a, Transform, (WrapPoint, CharPoint)>,
}
#[derive(Clone)]
@@ -66,27 +65,12 @@ pub struct WrapBufferRows<'a> {
output_row: u32,
soft_wrapped: bool,
max_output_row: u32,
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
}
impl<'a> WrapBufferRows<'a> {
pub(crate) fn seek(&mut self, start_row: u32) {
self.transforms
.seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
let mut input_row = self.transforms.start().1.row();
if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_row += start_row - self.transforms.start().0.row();
}
self.soft_wrapped = self.transforms.item().map_or(false, |t| !t.is_isomorphic());
self.input_buffer_rows.seek(input_row);
self.input_buffer_row = self.input_buffer_rows.next().unwrap();
self.output_row = start_row;
}
transforms: Cursor<'a, Transform, (WrapPoint, CharPoint)>,
}
impl WrapMap {
pub fn new(
tab_snapshot: TabSnapshot,
char_snapshot: CharSnapshot,
font: Font,
font_size: Pixels,
wrap_width: Option<Pixels>,
@@ -99,7 +83,7 @@ impl WrapMap {
pending_edits: Default::default(),
interpolated_edits: Default::default(),
edits_since_sync: Default::default(),
snapshot: WrapSnapshot::new(tab_snapshot),
snapshot: WrapSnapshot::new(char_snapshot),
background_task: None,
};
this.set_wrap_width(wrap_width, cx);
@@ -117,17 +101,17 @@ impl WrapMap {
pub fn sync(
&mut self,
tab_snapshot: TabSnapshot,
char_snapshot: CharSnapshot,
edits: Vec<TabEdit>,
cx: &mut ModelContext<Self>,
) -> (WrapSnapshot, Patch<u32>) {
if self.wrap_width.is_some() {
self.pending_edits.push_back((tab_snapshot, edits));
self.pending_edits.push_back((char_snapshot, edits));
self.flush_edits(cx);
} else {
self.edits_since_sync = self
.edits_since_sync
.compose(self.snapshot.interpolate(tab_snapshot, &edits));
.compose(self.snapshot.interpolate(char_snapshot, &edits));
self.snapshot.interpolated = false;
}
@@ -177,11 +161,11 @@ impl WrapMap {
let (font, font_size) = self.font_with_size.clone();
let task = cx.background_executor().spawn(async move {
let mut line_wrapper = text_system.line_wrapper(font, font_size);
let tab_snapshot = new_snapshot.tab_snapshot.clone();
let range = TabPoint::zero()..tab_snapshot.max_point();
let char_snapshot = new_snapshot.char_snapshot.clone();
let range = CharPoint::zero()..char_snapshot.max_point();
let edits = new_snapshot
.update(
tab_snapshot,
char_snapshot,
&[TabEdit {
old: range.clone(),
new: range.clone(),
@@ -221,7 +205,7 @@ impl WrapMap {
} else {
let old_rows = self.snapshot.transforms.summary().output.lines.row + 1;
self.snapshot.transforms = SumTree::default();
let summary = self.snapshot.tab_snapshot.text_summary();
let summary = self.snapshot.char_snapshot.text_summary();
if !summary.lines.is_zero() {
self.snapshot
.transforms
@@ -239,8 +223,8 @@ impl WrapMap {
fn flush_edits(&mut self, cx: &mut ModelContext<Self>) {
if !self.snapshot.interpolated {
let mut to_remove_len = 0;
for (tab_snapshot, _) in &self.pending_edits {
if tab_snapshot.version <= self.snapshot.tab_snapshot.version {
for (char_snapshot, _) in &self.pending_edits {
if char_snapshot.version <= self.snapshot.char_snapshot.version {
to_remove_len += 1;
} else {
break;
@@ -262,9 +246,9 @@ impl WrapMap {
let update_task = cx.background_executor().spawn(async move {
let mut edits = Patch::default();
let mut line_wrapper = text_system.line_wrapper(font, font_size);
for (tab_snapshot, tab_edits) in pending_edits {
for (char_snapshot, tab_edits) in pending_edits {
let wrap_edits = snapshot
.update(tab_snapshot, &tab_edits, wrap_width, &mut line_wrapper)
.update(char_snapshot, &tab_edits, wrap_width, &mut line_wrapper)
.await;
edits = edits.compose(&wrap_edits);
}
@@ -301,11 +285,11 @@ impl WrapMap {
let was_interpolated = self.snapshot.interpolated;
let mut to_remove_len = 0;
for (tab_snapshot, edits) in &self.pending_edits {
if tab_snapshot.version <= self.snapshot.tab_snapshot.version {
for (char_snapshot, edits) in &self.pending_edits {
if char_snapshot.version <= self.snapshot.char_snapshot.version {
to_remove_len += 1;
} else {
let interpolated_edits = self.snapshot.interpolate(tab_snapshot.clone(), edits);
let interpolated_edits = self.snapshot.interpolate(char_snapshot.clone(), edits);
self.edits_since_sync = self.edits_since_sync.compose(&interpolated_edits);
self.interpolated_edits = self.interpolated_edits.compose(&interpolated_edits);
}
@@ -318,45 +302,49 @@ impl WrapMap {
}
impl WrapSnapshot {
fn new(tab_snapshot: TabSnapshot) -> Self {
fn new(char_snapshot: CharSnapshot) -> Self {
let mut transforms = SumTree::default();
let extent = tab_snapshot.text_summary();
let extent = char_snapshot.text_summary();
if !extent.lines.is_zero() {
transforms.push(Transform::isomorphic(extent), &());
}
Self {
transforms,
tab_snapshot,
char_snapshot,
interpolated: true,
}
}
pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
self.tab_snapshot.buffer_snapshot()
self.char_snapshot.buffer_snapshot()
}
fn interpolate(&mut self, new_tab_snapshot: TabSnapshot, tab_edits: &[TabEdit]) -> Patch<u32> {
fn interpolate(
&mut self,
new_char_snapshot: CharSnapshot,
tab_edits: &[TabEdit],
) -> Patch<u32> {
let mut new_transforms;
if tab_edits.is_empty() {
new_transforms = self.transforms.clone();
} else {
let mut old_cursor = self.transforms.cursor::<TabPoint>(&());
let mut old_cursor = self.transforms.cursor::<CharPoint>(&());
let mut tab_edits_iter = tab_edits.iter().peekable();
new_transforms =
old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right, &());
while let Some(edit) = tab_edits_iter.next() {
if edit.new.start > TabPoint::from(new_transforms.summary().input.lines) {
let summary = new_tab_snapshot.text_summary_for_range(
TabPoint::from(new_transforms.summary().input.lines)..edit.new.start,
if edit.new.start > CharPoint::from(new_transforms.summary().input.lines) {
let summary = new_char_snapshot.text_summary_for_range(
CharPoint::from(new_transforms.summary().input.lines)..edit.new.start,
);
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
if !edit.new.is_empty() {
new_transforms.push_or_extend(Transform::isomorphic(
new_tab_snapshot.text_summary_for_range(edit.new.clone()),
new_char_snapshot.text_summary_for_range(edit.new.clone()),
));
}
@@ -365,7 +353,7 @@ impl WrapSnapshot {
if next_edit.old.start > old_cursor.end(&()) {
if old_cursor.end(&()) > edit.old.end {
let summary = self
.tab_snapshot
.char_snapshot
.text_summary_for_range(edit.old.end..old_cursor.end(&()));
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
@@ -379,7 +367,7 @@ impl WrapSnapshot {
} else {
if old_cursor.end(&()) > edit.old.end {
let summary = self
.tab_snapshot
.char_snapshot
.text_summary_for_range(edit.old.end..old_cursor.end(&()));
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
@@ -392,7 +380,7 @@ impl WrapSnapshot {
let old_snapshot = mem::replace(
self,
WrapSnapshot {
tab_snapshot: new_tab_snapshot,
char_snapshot: new_char_snapshot,
transforms: new_transforms,
interpolated: true,
},
@@ -403,7 +391,7 @@ impl WrapSnapshot {
async fn update(
&mut self,
new_tab_snapshot: TabSnapshot,
new_char_snapshot: CharSnapshot,
tab_edits: &[TabEdit],
wrap_width: Pixels,
line_wrapper: &mut LineWrapper,
@@ -440,27 +428,27 @@ impl WrapSnapshot {
new_transforms = self.transforms.clone();
} else {
let mut row_edits = row_edits.into_iter().peekable();
let mut old_cursor = self.transforms.cursor::<TabPoint>(&());
let mut old_cursor = self.transforms.cursor::<CharPoint>(&());
new_transforms = old_cursor.slice(
&TabPoint::new(row_edits.peek().unwrap().old_rows.start, 0),
&CharPoint::new(row_edits.peek().unwrap().old_rows.start, 0),
Bias::Right,
&(),
);
while let Some(edit) = row_edits.next() {
if edit.new_rows.start > new_transforms.summary().input.lines.row {
let summary = new_tab_snapshot.text_summary_for_range(
TabPoint(new_transforms.summary().input.lines)
..TabPoint::new(edit.new_rows.start, 0),
let summary = new_char_snapshot.text_summary_for_range(
CharPoint(new_transforms.summary().input.lines)
..CharPoint::new(edit.new_rows.start, 0),
);
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
let mut line = String::new();
let mut remaining = None;
let mut chunks = new_tab_snapshot.chunks(
TabPoint::new(edit.new_rows.start, 0)..new_tab_snapshot.max_point(),
let mut chunks = new_char_snapshot.chunks(
CharPoint::new(edit.new_rows.start, 0)..new_char_snapshot.max_point(),
false,
Highlights::default(),
);
@@ -507,19 +495,19 @@ impl WrapSnapshot {
}
new_transforms.extend(edit_transforms, &());
old_cursor.seek_forward(&TabPoint::new(edit.old_rows.end, 0), Bias::Right, &());
old_cursor.seek_forward(&CharPoint::new(edit.old_rows.end, 0), Bias::Right, &());
if let Some(next_edit) = row_edits.peek() {
if next_edit.old_rows.start > old_cursor.end(&()).row() {
if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) {
let summary = self.tab_snapshot.text_summary_for_range(
TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
if old_cursor.end(&()) > CharPoint::new(edit.old_rows.end, 0) {
let summary = self.char_snapshot.text_summary_for_range(
CharPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
);
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
old_cursor.next(&());
new_transforms.append(
old_cursor.slice(
&TabPoint::new(next_edit.old_rows.start, 0),
&CharPoint::new(next_edit.old_rows.start, 0),
Bias::Right,
&(),
),
@@ -527,9 +515,9 @@ impl WrapSnapshot {
);
}
} else {
if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) {
let summary = self.tab_snapshot.text_summary_for_range(
TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
if old_cursor.end(&()) > CharPoint::new(edit.old_rows.end, 0) {
let summary = self.char_snapshot.text_summary_for_range(
CharPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
);
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
@@ -542,7 +530,7 @@ impl WrapSnapshot {
let old_snapshot = mem::replace(
self,
WrapSnapshot {
tab_snapshot: new_tab_snapshot,
char_snapshot: new_char_snapshot,
transforms: new_transforms,
interpolated: false,
},
@@ -595,17 +583,17 @@ impl WrapSnapshot {
) -> WrapChunks<'a> {
let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0);
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
let mut transforms = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
transforms.seek(&output_start, Bias::Right, &());
let mut input_start = TabPoint(transforms.start().1 .0);
let mut input_start = CharPoint(transforms.start().1 .0);
if transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_start.0 += output_start.0 - transforms.start().0 .0;
}
let input_end = self
.to_tab_point(output_end)
.min(self.tab_snapshot.max_point());
.to_char_point(output_end)
.min(self.char_snapshot.max_point());
WrapChunks {
input_chunks: self.tab_snapshot.chunks(
input_chunks: self.char_snapshot.chunks(
input_start..input_end,
language_aware,
highlights,
@@ -614,7 +602,6 @@ impl WrapSnapshot {
output_position: output_start,
max_output_row: rows.end,
transforms,
snapshot: self,
}
}
@@ -623,7 +610,7 @@ impl WrapSnapshot {
}
pub fn line_len(&self, row: u32) -> u32 {
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left, &());
if cursor
.item()
@@ -631,7 +618,7 @@ impl WrapSnapshot {
{
let overshoot = row - cursor.start().0.row();
let tab_row = cursor.start().1.row() + overshoot;
let tab_line_len = self.tab_snapshot.line_len(tab_row);
let tab_line_len = self.char_snapshot.line_len(tab_row);
if overshoot == 0 {
cursor.start().0.column() + (tab_line_len - cursor.start().1.column())
} else {
@@ -642,65 +629,6 @@ impl WrapSnapshot {
}
}
pub fn text_summary_for_range(&self, rows: Range<u32>) -> TextSummary {
let mut summary = TextSummary::default();
let start = WrapPoint::new(rows.start, 0);
let end = WrapPoint::new(rows.end, 0);
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
cursor.seek(&start, Bias::Right, &());
if let Some(transform) = cursor.item() {
let start_in_transform = start.0 - cursor.start().0 .0;
let end_in_transform = cmp::min(end, cursor.end(&()).0).0 - cursor.start().0 .0;
if transform.is_isomorphic() {
let tab_start = TabPoint(cursor.start().1 .0 + start_in_transform);
let tab_end = TabPoint(cursor.start().1 .0 + end_in_transform);
summary += &self.tab_snapshot.text_summary_for_range(tab_start..tab_end);
} else {
debug_assert_eq!(start_in_transform.row, end_in_transform.row);
let indent_len = end_in_transform.column - start_in_transform.column;
summary += &TextSummary {
lines: Point::new(0, indent_len),
first_line_chars: indent_len,
last_line_chars: indent_len,
longest_row: 0,
longest_row_chars: indent_len,
};
}
cursor.next(&());
}
if rows.end > cursor.start().0.row() {
summary += &cursor
.summary::<_, TransformSummary>(&WrapPoint::new(rows.end, 0), Bias::Right, &())
.output;
if let Some(transform) = cursor.item() {
let end_in_transform = end.0 - cursor.start().0 .0;
if transform.is_isomorphic() {
let char_start = cursor.start().1;
let char_end = TabPoint(char_start.0 + end_in_transform);
summary += &self
.tab_snapshot
.text_summary_for_range(char_start..char_end);
} else {
debug_assert_eq!(end_in_transform, Point::new(1, 0));
summary += &TextSummary {
lines: Point::new(1, 0),
first_line_chars: 0,
last_line_chars: 0,
longest_row: 0,
longest_row_chars: 0,
};
}
}
}
summary
}
pub fn soft_wrap_indent(&self, row: u32) -> Option<u32> {
let mut cursor = self.transforms.cursor::<WrapPoint>(&());
cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right, &());
@@ -718,14 +646,14 @@ impl WrapSnapshot {
}
pub fn buffer_rows(&self, start_row: u32) -> WrapBufferRows {
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
let mut transforms = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
let mut input_row = transforms.start().1.row();
if transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_row += start_row - transforms.start().0.row();
}
let soft_wrapped = transforms.item().map_or(false, |t| !t.is_isomorphic());
let mut input_buffer_rows = self.tab_snapshot.buffer_rows(input_row);
let mut input_buffer_rows = self.char_snapshot.buffer_rows(input_row);
let input_buffer_row = input_buffer_rows.next().unwrap();
WrapBufferRows {
transforms,
@@ -737,26 +665,26 @@ impl WrapSnapshot {
}
}
pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint {
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
pub fn to_char_point(&self, point: WrapPoint) -> CharPoint {
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
cursor.seek(&point, Bias::Right, &());
let mut tab_point = cursor.start().1 .0;
let mut char_point = cursor.start().1 .0;
if cursor.item().map_or(false, |t| t.is_isomorphic()) {
tab_point += point.0 - cursor.start().0 .0;
char_point += point.0 - cursor.start().0 .0;
}
TabPoint(tab_point)
CharPoint(char_point)
}
pub fn to_point(&self, point: WrapPoint, bias: Bias) -> Point {
self.tab_snapshot.to_point(self.to_tab_point(point), bias)
self.char_snapshot.to_point(self.to_char_point(point), bias)
}
pub fn make_wrap_point(&self, point: Point, bias: Bias) -> WrapPoint {
self.tab_point_to_wrap_point(self.tab_snapshot.make_tab_point(point, bias))
self.char_point_to_wrap_point(self.char_snapshot.make_char_point(point, bias))
}
pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint {
let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(&());
pub fn char_point_to_wrap_point(&self, point: CharPoint) -> WrapPoint {
let mut cursor = self.transforms.cursor::<(CharPoint, WrapPoint)>(&());
cursor.seek(&point, Bias::Right, &());
WrapPoint(cursor.start().1 .0 + (point.0 - cursor.start().0 .0))
}
@@ -771,7 +699,10 @@ impl WrapSnapshot {
}
}
self.tab_point_to_wrap_point(self.tab_snapshot.clip_point(self.to_tab_point(point), bias))
self.char_point_to_wrap_point(
self.char_snapshot
.clip_point(self.to_char_point(point), bias),
)
}
pub fn prev_row_boundary(&self, mut point: WrapPoint) -> u32 {
@@ -781,7 +712,7 @@ impl WrapSnapshot {
*point.column_mut() = 0;
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
cursor.seek(&point, Bias::Right, &());
if cursor.item().is_none() {
cursor.prev(&());
@@ -801,7 +732,7 @@ impl WrapSnapshot {
pub fn next_row_boundary(&self, mut point: WrapPoint) -> Option<u32> {
point.0 += Point::new(1, 0);
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
cursor.seek(&point, Bias::Right, &());
while let Some(transform) = cursor.item() {
if transform.is_isomorphic() && cursor.start().1.column() == 0 {
@@ -814,27 +745,12 @@ impl WrapSnapshot {
None
}
#[cfg(test)]
pub fn text(&self) -> String {
self.text_chunks(0).collect()
}
#[cfg(test)]
pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> {
self.chunks(
wrap_row..self.max_point().row() + 1,
false,
Highlights::default(),
)
.map(|h| h.text)
}
fn check_invariants(&self) {
#[cfg(test)]
{
assert_eq!(
TabPoint::from(self.transforms.summary().input.lines),
self.tab_snapshot.max_point()
CharPoint::from(self.transforms.summary().input.lines),
self.char_snapshot.max_point()
);
{
@@ -847,18 +763,18 @@ impl WrapSnapshot {
}
let text = language::Rope::from(self.text().as_str());
let mut input_buffer_rows = self.tab_snapshot.buffer_rows(0);
let mut input_buffer_rows = self.char_snapshot.buffer_rows(0);
let mut expected_buffer_rows = Vec::new();
let mut prev_tab_row = 0;
for display_row in 0..=self.max_point().row() {
let tab_point = self.to_tab_point(WrapPoint::new(display_row, 0));
if tab_point.row() == prev_tab_row && display_row != 0 {
let char_point = self.to_char_point(WrapPoint::new(display_row, 0));
if char_point.row() == prev_tab_row && display_row != 0 {
expected_buffer_rows.push(None);
} else {
expected_buffer_rows.push(input_buffer_rows.next().unwrap());
}
prev_tab_row = tab_point.row();
prev_tab_row = char_point.row();
assert_eq!(self.line_len(display_row), text.line_len(display_row));
}
@@ -875,26 +791,6 @@ impl WrapSnapshot {
}
}
impl<'a> WrapChunks<'a> {
pub(crate) fn seek(&mut self, rows: Range<u32>) {
let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0);
self.transforms.seek(&output_start, Bias::Right, &());
let mut input_start = TabPoint(self.transforms.start().1 .0);
if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_start.0 += output_start.0 - self.transforms.start().0 .0;
}
let input_end = self
.snapshot
.to_tab_point(output_end)
.min(self.snapshot.tab_snapshot.max_point());
self.input_chunks.seek(input_start..input_end);
self.input_chunk = Chunk::default();
self.output_position = output_start;
self.max_output_row = rows.end;
}
}
impl<'a> Iterator for WrapChunks<'a> {
type Item = Chunk<'a>;
@@ -942,13 +838,11 @@ impl<'a> Iterator for WrapChunks<'a> {
} else {
*self.output_position.column_mut() += char_len as u32;
}
if self.output_position >= transform_end {
self.transforms.next(&());
break;
}
}
let (prefix, suffix) = self.input_chunk.text.split_at(input_len);
self.input_chunk.text = suffix;
Some(Chunk {
@@ -1103,7 +997,7 @@ impl sum_tree::Summary for TransformSummary {
}
}
impl<'a> sum_tree::Dimension<'a, TransformSummary> for TabPoint {
impl<'a> sum_tree::Dimension<'a, TransformSummary> for CharPoint {
fn zero(_cx: &()) -> Self {
Default::default()
}
@@ -1113,7 +1007,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for TabPoint {
}
}
impl<'a> sum_tree::SeekTarget<'a, TransformSummary, TransformSummary> for TabPoint {
impl<'a> sum_tree::SeekTarget<'a, TransformSummary, TransformSummary> for CharPoint {
fn cmp(&self, cursor_location: &TransformSummary, _: &()) -> std::cmp::Ordering {
Ord::cmp(&self.0, &cursor_location.input.lines)
}
@@ -1161,7 +1055,7 @@ fn consolidate_wrap_edits(edits: Vec<WrapEdit>) -> Vec<WrapEdit> {
mod tests {
use super::*;
use crate::{
display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap},
display_map::{char_map::CharMap, fold_map::FoldMap, inlay_map::InlayMap},
MultiBuffer,
};
use gpui::{font, px, test::observe};
@@ -1213,9 +1107,9 @@ mod tests {
log::info!("InlayMap text: {:?}", inlay_snapshot.text());
let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot.clone());
log::info!("FoldMap text: {:?}", fold_snapshot.text());
let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size);
let tabs_snapshot = tab_map.set_max_expansion_column(32);
log::info!("TabMap text: {:?}", tabs_snapshot.text());
let (mut char_map, _) = CharMap::new(fold_snapshot.clone(), tab_size);
let tabs_snapshot = char_map.set_max_expansion_column(32);
log::info!("CharMap text: {:?}", tabs_snapshot.text());
let mut line_wrapper = text_system.line_wrapper(font.clone(), font_size);
let unwrapped_text = tabs_snapshot.text();
@@ -1261,7 +1155,7 @@ mod tests {
20..=39 => {
for (fold_snapshot, fold_edits) in fold_map.randomly_mutate(&mut rng) {
let (tabs_snapshot, tab_edits) =
tab_map.sync(fold_snapshot, fold_edits, tab_size);
char_map.sync(fold_snapshot, fold_edits, tab_size);
let (mut snapshot, wrap_edits) =
wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
snapshot.check_invariants();
@@ -1274,7 +1168,7 @@ mod tests {
inlay_map.randomly_mutate(&mut next_inlay_id, &mut rng);
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
let (tabs_snapshot, tab_edits) =
tab_map.sync(fold_snapshot, fold_edits, tab_size);
char_map.sync(fold_snapshot, fold_edits, tab_size);
let (mut snapshot, wrap_edits) =
wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
snapshot.check_invariants();
@@ -1298,8 +1192,8 @@ mod tests {
log::info!("InlayMap text: {:?}", inlay_snapshot.text());
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
log::info!("FoldMap text: {:?}", fold_snapshot.text());
let (tabs_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size);
log::info!("TabMap text: {:?}", tabs_snapshot.text());
let (tabs_snapshot, tab_edits) = char_map.sync(fold_snapshot, fold_edits, tab_size);
log::info!("CharMap text: {:?}", tabs_snapshot.text());
let unwrapped_text = tabs_snapshot.text();
let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
@@ -1345,7 +1239,7 @@ mod tests {
if tab_size.get() == 1
|| !wrapped_snapshot
.tab_snapshot
.char_snapshot
.fold_snapshot
.text()
.contains('\t')
@@ -1442,6 +1336,19 @@ mod tests {
}
impl WrapSnapshot {
pub fn text(&self) -> String {
self.text_chunks(0).collect()
}
pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> {
self.chunks(
wrap_row..self.max_point().row() + 1,
false,
Highlights::default(),
)
.map(|h| h.text)
}
fn verify_chunks(&mut self, rng: &mut impl Rng) {
for _ in 0..5 {
let mut end_row = rng.gen_range(0..=self.max_point().row());

View File

@@ -223,7 +223,6 @@ pub fn render_parsed_markdown(
}
}),
);
// hello
let mut links = Vec::new();
let mut link_ranges = Vec::new();
@@ -3283,25 +3282,10 @@ impl Editor {
&bracket_pair.start[..prefix_len],
));
let is_closing_quote = if bracket_pair.end == bracket_pair.start
&& bracket_pair.start.len() == 1
{
let target = bracket_pair.start.chars().next().unwrap();
let current_line_count = snapshot
.reversed_chars_at(selection.start)
.take_while(|&c| c != '\n')
.filter(|&c| c == target)
.count();
current_line_count % 2 == 1
} else {
false
};
if autoclose
&& bracket_pair.close
&& following_text_allows_autoclose
&& preceding_text_matches_prefix
&& !is_closing_quote
{
let anchor = snapshot.anchor_before(selection.end);
new_selections.push((selection.map(|_| anchor), text.len()));
@@ -3785,9 +3769,6 @@ impl Editor {
pub fn newline_below(&mut self, _: &NewlineBelow, cx: &mut ViewContext<Self>) {
let buffer = self.buffer.read(cx);
let snapshot = buffer.snapshot(cx);
//
//
//
let mut edits = Vec::new();
let mut rows = Vec::new();
@@ -10229,7 +10210,7 @@ impl Editor {
let block_id = this.insert_blocks(
[BlockProperties {
style: BlockStyle::Flex,
placement: BlockPlacement::Below(range.start),
position: range.start,
height: 1,
render: Box::new({
let rename_editor = rename_editor.clone();
@@ -10265,6 +10246,7 @@ impl Editor {
.into_any_element()
}
}),
disposition: BlockDisposition::Below,
priority: 0,
}],
Some(Autoscroll::fit()),
@@ -10549,11 +10531,10 @@ impl Editor {
let message_height = diagnostic.message.matches('\n').count() as u32 + 1;
BlockProperties {
style: BlockStyle::Fixed,
placement: BlockPlacement::Below(
buffer.anchor_after(entry.range.start),
),
position: buffer.anchor_after(entry.range.start),
height: message_height,
render: diagnostic_block_renderer(diagnostic, None, true, true),
disposition: BlockDisposition::Below,
priority: 0,
}
}),
@@ -10747,44 +10728,15 @@ impl Editor {
self.fold_ranges(fold_ranges, true, cx);
}
fn fold_at_level(&mut self, fold_at: &FoldAtLevel, cx: &mut ViewContext<Self>) {
let fold_at_level = fold_at.level;
let snapshot = self.buffer.read(cx).snapshot(cx);
let mut fold_ranges = Vec::new();
let mut stack = vec![(0, snapshot.max_buffer_row().0, 1)];
while let Some((mut start_row, end_row, current_level)) = stack.pop() {
while start_row < end_row {
match self.snapshot(cx).foldable_range(MultiBufferRow(start_row)) {
Some(foldable_range) => {
let nested_start_row = foldable_range.0.start.row + 1;
let nested_end_row = foldable_range.0.end.row;
if current_level == fold_at_level {
fold_ranges.push(foldable_range);
}
if current_level <= fold_at_level {
stack.push((nested_start_row, nested_end_row, current_level + 1));
}
start_row = nested_end_row + 1;
}
None => start_row += 1,
}
}
}
self.fold_ranges(fold_ranges, true, cx);
}
pub fn fold_all(&mut self, _: &actions::FoldAll, cx: &mut ViewContext<Self>) {
let mut fold_ranges = Vec::new();
let snapshot = self.buffer.read(cx).snapshot(cx);
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
for row in 0..snapshot.max_buffer_row().0 {
if let Some(foldable_range) = self.snapshot(cx).foldable_range(MultiBufferRow(row)) {
fold_ranges.push(foldable_range);
for row in 0..display_map.max_buffer_row().0 {
if let Some((foldable_range, fold_text)) =
display_map.foldable_range(MultiBufferRow(row))
{
fold_ranges.push((foldable_range, fold_text));
}
}

View File

@@ -1080,112 +1080,6 @@ fn test_fold_action_multiple_line_breaks(cx: &mut TestAppContext) {
});
}
#[gpui::test]
fn test_fold_at_level(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let view = cx.add_window(|cx| {
let buffer = MultiBuffer::build_simple(
&"
class Foo:
# Hello!
def a():
print(1)
def b():
print(2)
class Bar:
# World!
def a():
print(1)
def b():
print(2)
"
.unindent(),
cx,
);
build_editor(buffer.clone(), cx)
});
_ = view.update(cx, |view, cx| {
view.fold_at_level(&FoldAtLevel { level: 2 }, cx);
assert_eq!(
view.display_text(cx),
"
class Foo:
# Hello!
def a():⋯
def b():⋯
class Bar:
# World!
def a():⋯
def b():⋯
"
.unindent(),
);
view.fold_at_level(&FoldAtLevel { level: 1 }, cx);
assert_eq!(
view.display_text(cx),
"
class Foo:⋯
class Bar:⋯
"
.unindent(),
);
view.unfold_all(&UnfoldAll, cx);
view.fold_at_level(&FoldAtLevel { level: 0 }, cx);
assert_eq!(
view.display_text(cx),
"
class Foo:
# Hello!
def a():
print(1)
def b():
print(2)
class Bar:
# World!
def a():
print(1)
def b():
print(2)
"
.unindent(),
);
assert_eq!(view.display_text(cx), view.buffer.read(cx).read(cx).text());
});
}
#[gpui::test]
fn test_move_cursor(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -3974,7 +3868,8 @@ fn test_move_line_up_down_with_blocks(cx: &mut TestAppContext) {
editor.insert_blocks(
[BlockProperties {
style: BlockStyle::Fixed,
placement: BlockPlacement::Below(snapshot.anchor_after(Point::new(2, 0))),
position: snapshot.anchor_after(Point::new(2, 0)),
disposition: BlockDisposition::Below,
height: 1,
render: Box::new(|_| div().into_any()),
priority: 0,

View File

@@ -68,6 +68,7 @@ use sum_tree::Bias;
use theme::{ActiveTheme, Appearance, PlayerColor};
use ui::prelude::*;
use ui::{h_flex, ButtonLike, ButtonStyle, ContextMenu, Tooltip};
use unicode_segmentation::UnicodeSegmentation;
use util::RangeExt;
use util::ResultExt;
use workspace::{item::Item, Workspace};
@@ -336,7 +337,6 @@ impl EditorElement {
register_action(view, cx, Editor::open_url);
register_action(view, cx, Editor::open_file);
register_action(view, cx, Editor::fold);
register_action(view, cx, Editor::fold_at_level);
register_action(view, cx, Editor::fold_all);
register_action(view, cx, Editor::fold_at);
register_action(view, cx, Editor::fold_recursive);
@@ -445,7 +445,6 @@ impl EditorElement {
register_action(view, cx, Editor::accept_inline_completion);
register_action(view, cx, Editor::revert_file);
register_action(view, cx, Editor::revert_selected_hunks);
register_action(view, cx, Editor::apply_all_diff_hunks);
register_action(view, cx, Editor::apply_selected_diff_hunks);
register_action(view, cx, Editor::open_active_item_in_terminal);
register_action(view, cx, Editor::reload_file)
@@ -1027,23 +1026,21 @@ impl EditorElement {
}
let block_text = if let CursorShape::Block = selection.cursor_shape {
snapshot
.display_chars_at(cursor_position)
.next()
.grapheme_at(cursor_position)
.or_else(|| {
if cursor_column == 0 {
snapshot
.placeholder_text()
.and_then(|s| s.chars().next())
.map(|c| (c, cursor_position))
snapshot.placeholder_text().and_then(|s| {
s.graphemes(true).next().map(|s| s.to_owned())
})
} else {
None
}
})
.and_then(|(character, _)| {
let text = if character == '\n' {
.and_then(|grapheme| {
let text = if grapheme == "\n" {
SharedString::from(" ")
} else {
SharedString::from(character.to_string())
SharedString::from(grapheme)
};
let len = text.len();
@@ -2074,7 +2071,7 @@ impl EditorElement {
let mut element = match block {
Block::Custom(block) => {
let align_to = block
.start()
.position()
.to_point(&snapshot.buffer_snapshot)
.to_display_point(snapshot);
let anchor_x = text_x
@@ -6297,7 +6294,7 @@ fn compute_auto_height_layout(
mod tests {
use super::*;
use crate::{
display_map::{BlockPlacement, BlockProperties},
display_map::{BlockDisposition, BlockProperties},
editor_tests::{init_test, update_test_language_settings},
Editor, MultiBuffer,
};
@@ -6553,8 +6550,9 @@ mod tests {
editor.insert_blocks(
[BlockProperties {
style: BlockStyle::Fixed,
placement: BlockPlacement::Above(Anchor::min()),
disposition: BlockDisposition::Above,
height: 3,
position: Anchor::min(),
render: Box::new(|cx| div().h(3. * cx.line_height()).into_any()),
priority: 0,
}],

View File

@@ -1,6 +1,7 @@
use crate::{
display_map::{InlayOffset, ToDisplayPoint},
hover_links::{InlayHighlight, RangeInEditor},
is_invisible,
scroll::ScrollAmount,
Anchor, AnchorRangeExt, DisplayPoint, DisplayRow, Editor, EditorSettings, EditorSnapshot,
Hover, RangeToAnchorExt,
@@ -11,7 +12,7 @@ use gpui::{
StyleRefinement, Styled, Task, TextStyleRefinement, View, ViewContext,
};
use itertools::Itertools;
use language::{DiagnosticEntry, Language, LanguageRegistry};
use language::{Diagnostic, DiagnosticEntry, Language, LanguageRegistry};
use lsp::DiagnosticSeverity;
use markdown::{Markdown, MarkdownStyle};
use multi_buffer::ToOffset;
@@ -199,7 +200,6 @@ fn show_hover(
if editor.pending_rename.is_some() {
return None;
}
let snapshot = editor.snapshot(cx);
let (buffer, buffer_position) = editor
@@ -259,7 +259,7 @@ fn show_hover(
}
// If there's a diagnostic, assign it on the hover state and notify
let local_diagnostic = snapshot
let mut local_diagnostic = snapshot
.buffer_snapshot
.diagnostics_in_range::<_, usize>(anchor..anchor, false)
// Find the entry with the most specific range
@@ -281,6 +281,42 @@ fn show_hover(
})
});
if let Some(invisible) = snapshot
.buffer_snapshot
.chars_at(anchor)
.next()
.filter(|&c| is_invisible(c))
{
let after = snapshot.buffer_snapshot.anchor_after(
anchor.to_offset(&snapshot.buffer_snapshot) + invisible.len_utf8(),
);
local_diagnostic = Some(DiagnosticEntry {
diagnostic: Diagnostic {
severity: DiagnosticSeverity::HINT,
message: format!("Unicode character U+{:02X}", invisible as u32),
..Default::default()
},
range: anchor..after,
})
} else if let Some(invisible) = snapshot
.buffer_snapshot
.reversed_chars_at(anchor)
.next()
.filter(|&c| is_invisible(c))
{
let before = snapshot.buffer_snapshot.anchor_before(
anchor.to_offset(&snapshot.buffer_snapshot) - invisible.len_utf8(),
);
local_diagnostic = Some(DiagnosticEntry {
diagnostic: Diagnostic {
severity: DiagnosticSeverity::HINT,
message: format!("Unicode character U+{:02X}", invisible as u32),
..Default::default()
},
range: before..anchor,
})
}
let diagnostic_popover = if let Some(local_diagnostic) = local_diagnostic {
let text = match local_diagnostic.diagnostic.source {
Some(ref source) => {
@@ -288,7 +324,6 @@ fn show_hover(
}
None => local_diagnostic.diagnostic.message.clone(),
};
let mut border_color: Option<Hsla> = None;
let mut background_color: Option<Hsla> = None;
@@ -344,7 +379,6 @@ fn show_hover(
Markdown::new_text(text, markdown_style.clone(), None, cx, None)
})
.ok();
Some(DiagnosticPopover {
local_diagnostic,
primary_diagnostic,
@@ -432,7 +466,6 @@ fn show_hover(
cx.notify();
cx.refresh();
})?;
anyhow::Ok(())
}
.log_err()

View File

@@ -16,10 +16,10 @@ use util::RangeExt;
use workspace::Item;
use crate::{
editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, ApplyAllDiffHunks,
ApplyDiffHunk, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight,
DisplayRow, DisplaySnapshot, Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk,
RevertFile, RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff,
editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, ApplyDiffHunk,
BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow,
DisplaySnapshot, Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile,
RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff,
};
#[derive(Debug, Clone)]
@@ -352,11 +352,7 @@ impl Editor {
None
}
pub(crate) fn apply_all_diff_hunks(
&mut self,
_: &ApplyAllDiffHunks,
cx: &mut ViewContext<Self>,
) {
pub(crate) fn apply_all_diff_hunks(&mut self, cx: &mut ViewContext<Self>) {
let buffers = self.buffer.read(cx).all_buffers();
for branch_buffer in buffers {
branch_buffer.update(cx, |branch_buffer, cx| {
@@ -421,9 +417,10 @@ impl Editor {
};
BlockProperties {
placement: BlockPlacement::Above(hunk.multi_buffer_range.start),
position: hunk.multi_buffer_range.start,
height: 1,
style: BlockStyle::Sticky,
disposition: BlockDisposition::Above,
priority: 0,
render: Box::new({
let editor = cx.view().clone();
@@ -703,9 +700,10 @@ impl Editor {
let hunk = hunk.clone();
let height = editor_height.max(deleted_text_height);
BlockProperties {
placement: BlockPlacement::Above(hunk.multi_buffer_range.start),
position: hunk.multi_buffer_range.start,
height,
style: BlockStyle::Flex,
disposition: BlockDisposition::Above,
priority: 0,
render: Box::new(move |cx| {
let width = EditorElement::diff_hunk_strip_width(cx.line_height());

View File

@@ -1,4 +1,4 @@
use crate::{ApplyAllDiffHunks, Editor, EditorEvent, SemanticsProvider};
use crate::{Editor, EditorEvent, SemanticsProvider};
use collections::HashSet;
use futures::{channel::mpsc, future::join_all};
use gpui::{AppContext, EventEmitter, FocusableView, Model, Render, Subscription, Task, View};
@@ -8,7 +8,7 @@ use project::Project;
use smol::stream::StreamExt;
use std::{any::TypeId, ops::Range, rc::Rc, time::Duration};
use text::ToOffset;
use ui::{prelude::*, ButtonLike, KeyBinding};
use ui::prelude::*;
use workspace::{
searchable::SearchableItemHandle, Item, ItemHandle as _, ToolbarItemEvent, ToolbarItemLocation,
ToolbarItemView, Workspace,
@@ -232,10 +232,7 @@ impl ProposedChangesEditor {
impl Render for ProposedChangesEditor {
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
div()
.size_full()
.key_context("ProposedChangesEditor")
.child(self.editor.clone())
self.editor.clone()
}
}
@@ -334,21 +331,17 @@ impl ProposedChangesEditorToolbar {
}
impl Render for ProposedChangesEditorToolbar {
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
let button_like = ButtonLike::new("apply-changes").child(Label::new("Apply All"));
match &self.current_editor {
Some(editor) => {
let focus_handle = editor.focus_handle(cx);
let keybinding = KeyBinding::for_action_in(&ApplyAllDiffHunks, &focus_handle, cx)
.map(|binding| binding.into_any_element());
button_like.children(keybinding).on_click({
move |_event, cx| focus_handle.dispatch_action(&ApplyAllDiffHunks, cx)
})
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
let editor = self.current_editor.clone();
Button::new("apply-changes", "Apply All").on_click(move |_, cx| {
if let Some(editor) = &editor {
editor.update(cx, |editor, cx| {
editor.editor.update(cx, |editor, cx| {
editor.apply_all_diff_hunks(cx);
})
});
}
None => button_like.disabled(true),
}
})
}
}

View File

@@ -1,7 +1,6 @@
use std::{
borrow::Cow,
ops::{Deref, DerefMut, Range},
path::Path,
sync::Arc,
};
@@ -67,12 +66,10 @@ impl EditorLspTestContext {
);
language_registry.add(Arc::new(language));
let root = Self::root_path();
app_state
.fs
.as_fake()
.insert_tree(root, json!({ "dir": { file_name.clone(): "" }}))
.insert_tree("/root", json!({ "dir": { file_name.clone(): "" }}))
.await;
let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
@@ -82,7 +79,7 @@ impl EditorLspTestContext {
let mut cx = VisualTestContext::from_window(*window.deref(), cx);
project
.update(&mut cx, |project, cx| {
project.find_or_create_worktree(root, true, cx)
project.find_or_create_worktree("/root", true, cx)
})
.await
.unwrap();
@@ -111,7 +108,7 @@ impl EditorLspTestContext {
},
lsp,
workspace,
buffer_lsp_url: lsp::Url::from_file_path(root.join("dir").join(file_name)).unwrap(),
buffer_lsp_url: lsp::Url::from_file_path(format!("/root/dir/{file_name}")).unwrap(),
}
}
@@ -126,7 +123,6 @@ impl EditorLspTestContext {
path_suffixes: vec!["rs".to_string()],
..Default::default()
},
line_comments: vec!["// ".into(), "/// ".into(), "//! ".into()],
..Default::default()
},
Some(tree_sitter_rust::LANGUAGE.into()),
@@ -313,16 +309,6 @@ impl EditorLspTestContext {
pub fn notify<T: notification::Notification>(&self, params: T::Params) {
self.lsp.notify::<T>(params);
}
#[cfg(target_os = "windows")]
fn root_path() -> &'static Path {
Path::new("C:\\root")
}
#[cfg(not(target_os = "windows"))]
fn root_path() -> &'static Path {
Path::new("/root")
}
}
impl Deref for EditorLspTestContext {

View File

@@ -128,8 +128,7 @@ impl SlashCommand for ExtensionSlashCommand {
})
.collect(),
run_commands_in_text: false,
}
.to_event_stream())
})
})
}
}

View File

@@ -5,7 +5,6 @@ mod file_finder_settings;
mod new_path_prompt;
mod open_path_prompt;
use futures::future::join_all;
pub use open_path_prompt::OpenPathDelegate;
use collections::HashMap;
@@ -60,7 +59,7 @@ impl FileFinder {
fn register(workspace: &mut Workspace, _: &mut ViewContext<Workspace>) {
workspace.register_action(|workspace, action: &workspace::ToggleFileFinder, cx| {
let Some(file_finder) = workspace.active_modal::<Self>(cx) else {
Self::open(workspace, action.separate_history, cx).detach();
Self::open(workspace, action.separate_history, cx);
return;
};
@@ -73,13 +72,8 @@ impl FileFinder {
});
}
fn open(
workspace: &mut Workspace,
separate_history: bool,
cx: &mut ViewContext<Workspace>,
) -> Task<()> {
fn open(workspace: &mut Workspace, separate_history: bool, cx: &mut ViewContext<Workspace>) {
let project = workspace.project().read(cx);
let fs = project.fs();
let currently_opened_path = workspace
.active_item(cx)
@@ -94,51 +88,28 @@ impl FileFinder {
let history_items = workspace
.recent_navigation_history(Some(MAX_RECENT_SELECTIONS), cx)
.into_iter()
.filter_map(|(project_path, abs_path)| {
if project.entry_for_path(&project_path, cx).is_some() {
return Some(Task::ready(Some(FoundPath::new(project_path, abs_path))));
}
let abs_path = abs_path?;
if project.is_local() {
let fs = fs.clone();
Some(cx.background_executor().spawn(async move {
if fs.is_file(&abs_path).await {
Some(FoundPath::new(project_path, Some(abs_path)))
} else {
None
}
}))
} else {
Some(Task::ready(Some(FoundPath::new(
project_path,
Some(abs_path),
))))
}
.filter(|(_, history_abs_path)| match history_abs_path {
Some(abs_path) => history_file_exists(abs_path),
None => true,
})
.map(|(history_path, abs_path)| FoundPath::new(history_path, abs_path))
.collect::<Vec<_>>();
cx.spawn(move |workspace, mut cx| async move {
let history_items = join_all(history_items).await.into_iter().flatten();
workspace
.update(&mut cx, |workspace, cx| {
let project = workspace.project().clone();
let weak_workspace = cx.view().downgrade();
workspace.toggle_modal(cx, |cx| {
let delegate = FileFinderDelegate::new(
cx.view().downgrade(),
weak_workspace,
project,
currently_opened_path,
history_items.collect(),
separate_history,
cx,
);
let project = workspace.project().clone();
let weak_workspace = cx.view().downgrade();
workspace.toggle_modal(cx, |cx| {
let delegate = FileFinderDelegate::new(
cx.view().downgrade(),
weak_workspace,
project,
currently_opened_path,
history_items,
separate_history,
cx,
);
FileFinder::new(delegate, cx)
});
})
.ok();
})
FileFinder::new(delegate, cx)
});
}
fn new(delegate: FileFinderDelegate, cx: &mut ViewContext<Self>) -> Self {
@@ -485,6 +456,16 @@ impl FoundPath {
const MAX_RECENT_SELECTIONS: usize = 20;
#[cfg(not(test))]
fn history_file_exists(abs_path: &PathBuf) -> bool {
abs_path.exists()
}
#[cfg(test)]
fn history_file_exists(abs_path: &Path) -> bool {
!abs_path.ends_with("nonexistent.rs")
}
pub enum Event {
Selected(ProjectPath),
Dismissed,

View File

@@ -4,7 +4,7 @@ use super::*;
use editor::Editor;
use gpui::{Entity, TestAppContext, VisualTestContext};
use menu::{Confirm, SelectNext, SelectPrev};
use project::{RemoveOptions, FS_WATCH_LATENCY};
use project::FS_WATCH_LATENCY;
use serde_json::json;
use workspace::{AppState, ToggleFileFinder, Workspace};
@@ -1450,15 +1450,6 @@ async fn test_nonexistent_history_items_not_shown(cx: &mut gpui::TestAppContext)
open_close_queried_buffer("non", 1, "nonexistent.rs", &workspace, cx).await;
open_close_queried_buffer("thi", 1, "third.rs", &workspace, cx).await;
open_close_queried_buffer("fir", 1, "first.rs", &workspace, cx).await;
app_state
.fs
.remove_file(
Path::new("/src/test/nonexistent.rs"),
RemoveOptions::default(),
)
.await
.unwrap();
cx.run_until_parked();
let picker = open_file_picker(&workspace, cx);
cx.simulate_input("rs");

View File

@@ -865,20 +865,14 @@ impl FakeFsState {
let mut entry_stack = Vec::new();
'outer: loop {
let mut path_components = path.components().peekable();
let mut prefix = None;
while let Some(component) = path_components.next() {
match component {
Component::Prefix(prefix_component) => prefix = Some(prefix_component),
Component::Prefix(_) => panic!("prefix paths aren't supported"),
Component::RootDir => {
entry_stack.clear();
entry_stack.push(self.root.clone());
canonical_path.clear();
match prefix {
Some(prefix_component) => {
canonical_path.push(prefix_component.as_os_str());
}
None => canonical_path.push("/"),
}
canonical_path.push("/");
}
Component::CurDir => {}
Component::ParentDir => {
@@ -1390,12 +1384,11 @@ impl Fs for FakeFs {
let mut created_dirs = Vec::new();
let mut cur_path = PathBuf::new();
for component in path.components() {
let should_skip = matches!(component, Component::Prefix(..) | Component::RootDir);
let mut state = self.state.lock();
cur_path.push(component);
if should_skip {
if cur_path == Path::new("/") {
continue;
}
let mut state = self.state.lock();
let inode = state.next_inode;
let mtime = state.next_mtime;

View File

@@ -48,7 +48,6 @@ where
item_count,
item_to_measure_index: 0,
render_items: Box::new(render_range),
decorations: Vec::new(),
interactivity: Interactivity {
element_id: Some(id),
base_style: Box::new(base_style),
@@ -70,7 +69,6 @@ pub struct UniformList {
item_to_measure_index: usize,
render_items:
Box<dyn for<'a> Fn(Range<usize>, &'a mut WindowContext) -> SmallVec<[AnyElement; 64]>>,
decorations: Vec<Box<dyn UniformListDecoration>>,
interactivity: Interactivity,
scroll_handle: Option<UniformListScrollHandle>,
sizing_behavior: ListSizingBehavior,
@@ -80,7 +78,6 @@ pub struct UniformList {
/// Frame state used by the [UniformList].
pub struct UniformListFrameState {
items: SmallVec<[AnyElement; 32]>,
decorations: SmallVec<[AnyElement; 1]>,
}
/// A handle for controlling the scroll position of a uniform list.
@@ -188,7 +185,6 @@ impl Element for UniformList {
layout_id,
UniformListFrameState {
items: SmallVec::new(),
decorations: SmallVec::new(),
},
)
}
@@ -296,10 +292,9 @@ impl Element for UniformList {
..cmp::min(last_visible_element_ix, self.item_count);
let mut items = (self.render_items)(visible_range.clone(), cx);
let content_mask = ContentMask { bounds };
cx.with_content_mask(Some(content_mask), |cx| {
for (mut item, ix) in items.into_iter().zip(visible_range.clone()) {
for (mut item, ix) in items.into_iter().zip(visible_range) {
let item_origin = padded_bounds.origin
+ point(
if can_scroll_horizontally {
@@ -322,34 +317,6 @@ impl Element for UniformList {
item.prepaint_at(item_origin, cx);
frame_state.items.push(item);
}
let bounds = Bounds::new(
padded_bounds.origin
+ point(
if can_scroll_horizontally {
scroll_offset.x + padding.left
} else {
scroll_offset.x
},
scroll_offset.y + padding.top,
),
padded_bounds.size,
);
for decoration in &self.decorations {
let mut decoration = decoration.as_ref().compute(
visible_range.clone(),
bounds,
item_height,
cx,
);
let available_space = size(
AvailableSpace::Definite(bounds.size.width),
AvailableSpace::Definite(bounds.size.height),
);
decoration.layout_as_root(available_space, cx);
decoration.prepaint_at(bounds.origin, cx);
frame_state.decorations.push(decoration);
}
});
}
@@ -371,9 +338,6 @@ impl Element for UniformList {
for item in &mut request_layout.items {
item.paint(cx);
}
for decoration in &mut request_layout.decorations {
decoration.paint(cx);
}
})
}
}
@@ -386,20 +350,6 @@ impl IntoElement for UniformList {
}
}
/// A decoration for a [`UniformList`]. This can be used for various things,
/// such as rendering indent guides, or other visual effects.
pub trait UniformListDecoration {
/// Compute the decoration element, given the visible range of list items,
/// the bounds of the list, and the height of each item.
fn compute(
&self,
visible_range: Range<usize>,
bounds: Bounds<Pixels>,
item_height: Pixels,
cx: &mut WindowContext,
) -> AnyElement;
}
impl UniformList {
/// Selects a specific list item for measurement.
pub fn with_width_from_item(mut self, item_index: Option<usize>) -> Self {
@@ -432,12 +382,6 @@ impl UniformList {
self
}
/// Adds a decoration element to the list.
pub fn with_decoration(mut self, decoration: impl UniformListDecoration + 'static) -> Self {
self.decorations.push(Box::new(decoration));
self
}
fn measure_item(&self, list_width: Option<Pixels>, cx: &mut WindowContext) -> Size<Pixels> {
if self.item_count == 0 {
return Size::default();

Some files were not shown because too many files have changed in this diff Show More