Compare commits

..

2 Commits

Author SHA1 Message Date
Nate Butler
58a7a277df wip 2024-10-21 10:20:26 -04:00
Nate Butler
5e9f084f12 Start on quick commit UI PoC 2024-10-21 09:33:15 -04:00
209 changed files with 7210 additions and 5817 deletions

View File

@@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
fetch-depth: 0

View File

@@ -18,7 +18,7 @@ jobs:
- buildjet-16vcpu-ubuntu-2204
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
ref: ${{ github.event.inputs.branch }}
ssh-key: ${{ secrets.ZED_BOT_DEPLOY_KEY }}

View File

@@ -36,7 +36,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
fetch-depth: 0 # fetch full history
@@ -78,13 +78,13 @@ jobs:
- buildjet-8vcpu-ubuntu-2204
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
- name: Run style checks
uses: ./.github/actions/check_style
- name: Check for typos
uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6
uses: crate-ci/typos@v1.24.6
with:
config: ./typos.toml
@@ -96,7 +96,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -133,7 +133,7 @@ jobs:
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -165,7 +165,7 @@ jobs:
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -188,7 +188,7 @@ jobs:
runs-on: hosted-windows-1
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -229,7 +229,7 @@ jobs:
node-version: "18"
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
# We need to fetch more than one commit so that `script/draft-release-notes`
# is able to diff between the current and previous tag.
@@ -314,7 +314,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -361,7 +361,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -386,6 +386,7 @@ jobs:
- name: Upload app bundle to release
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
with:
draft: true
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}

View File

@@ -14,10 +14,10 @@ jobs:
stale-issue-message: >
Hi there! 👋
We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. If you are able to reproduce this issue in the latest version of Zed, please let us know by commenting on this issue, and we will keep it open. If you can't reproduce it, feel free to close the issue yourself. Otherwise, we'll close it in 7 days.
We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. Are you able to reproduce this issue in the latest version of Zed? If so, please let us know by commenting on this issue and we will keep it open; otherwise, we'll close it in 7 days. Feel free to open a new issue if you're seeing this message after the issue has been closed.
Thanks for your help!
close-issue-message: "This issue was closed due to inactivity. If you're still experiencing this problem, please open a new issue with a link to this issue."
close-issue-message: "This issue was closed due to inactivity. If you're still experiencing this problem, please open a new issue with a link to this issue."
# We will increase `days-before-stale` to 365 on or after Jan 24th,
# 2024. This date marks one year since migrating issues from
# 'community' to 'zed' repository. The migration added activity to all

View File

@@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
if: github.repository_owner == 'zed-industries'
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
- name: Set up uv
uses: astral-sh/setup-uv@f3bcaebff5eace81a1c062af9f9011aae482ca9d # v3
with:

View File

@@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
if: github.repository_owner == 'zed-industries'
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
- name: Set up uv
uses: astral-sh/setup-uv@f3bcaebff5eace81a1c062af9f9011aae482ca9d # v3
with:

View File

@@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
with:

View File

@@ -13,7 +13,7 @@ jobs:
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false

View File

@@ -17,7 +17,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
fetch-depth: 0
@@ -36,7 +36,7 @@ jobs:
needs: style
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
fetch-depth: 0
@@ -71,7 +71,7 @@ jobs:
run: doctl registry login
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -97,7 +97,7 @@ jobs:
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false

View File

@@ -15,7 +15,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
with:
@@ -31,7 +31,7 @@ jobs:
}
- name: Check for Typos with Typos-CLI
uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6
uses: crate-ci/typos@v1.24.6
with:
config: ./typos.toml
files: ./docs/

View File

@@ -16,7 +16,7 @@ jobs:
- ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false

View File

@@ -27,7 +27,7 @@ jobs:
node-version: "18"
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false

View File

@@ -23,7 +23,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
fetch-depth: 0
@@ -44,7 +44,7 @@ jobs:
needs: style
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -75,7 +75,7 @@ jobs:
node-version: "18"
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -109,7 +109,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -149,7 +149,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -182,7 +182,7 @@ jobs:
- bundle-linux-arm
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
fetch-depth: 0

93
Cargo.lock generated
View File

@@ -261,9 +261,9 @@ checksum = "34cd60c5e3152cef0a592f1b296f1cc93715d89d2551d85315828c3a09575ff4"
[[package]]
name = "anyhow"
version = "1.0.91"
version = "1.0.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8"
checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6"
[[package]]
name = "approx"
@@ -453,11 +453,9 @@ dependencies = [
"anyhow",
"collections",
"derive_more",
"futures 0.3.30",
"gpui",
"language",
"parking_lot",
"pretty_assertions",
"serde",
"serde_json",
"workspace",
@@ -1011,7 +1009,6 @@ dependencies = [
"smol",
"tempfile",
"util",
"which 6.0.3",
"workspace",
]
@@ -1580,7 +1577,7 @@ dependencies = [
"bitflags 2.6.0",
"cexpr",
"clang-sys",
"itertools 0.10.5",
"itertools 0.12.1",
"lazy_static",
"lazycell",
"proc-macro2",
@@ -2550,6 +2547,7 @@ dependencies = [
"ctor",
"dashmap 6.0.1",
"derive_more",
"dev_server_projects",
"editor",
"env_logger",
"envy",
@@ -2560,6 +2558,7 @@ dependencies = [
"git_hosting_providers",
"google_ai",
"gpui",
"headless",
"hex",
"http_client",
"hyper 0.14.30",
@@ -3474,6 +3473,18 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "dev_server_projects"
version = "0.1.0"
dependencies = [
"anyhow",
"client",
"gpui",
"rpc",
"serde",
"serde_json",
]
[[package]]
name = "diagnostics"
version = "0.1.0"
@@ -3711,7 +3722,6 @@ dependencies = [
"tree-sitter-rust",
"tree-sitter-typescript",
"ui",
"unicode-segmentation",
"unindent",
"url",
"util",
@@ -5260,6 +5270,28 @@ dependencies = [
"http 0.2.12",
]
[[package]]
name = "headless"
version = "0.1.0"
dependencies = [
"anyhow",
"client",
"extension",
"fs",
"futures 0.3.30",
"gpui",
"language",
"log",
"node_runtime",
"postage",
"project",
"proto",
"settings",
"shellexpand 2.1.2",
"signal-hook",
"util",
]
[[package]]
name = "heck"
version = "0.3.3"
@@ -5555,7 +5587,7 @@ dependencies = [
"httpdate",
"itoa",
"pin-project-lite",
"socket2 0.4.10",
"socket2 0.5.7",
"tokio",
"tower-service",
"tracing",
@@ -6198,7 +6230,6 @@ dependencies = [
"lsp",
"parking_lot",
"postage",
"pretty_assertions",
"pulldown-cmark 0.12.1",
"rand 0.8.5",
"regex",
@@ -6265,7 +6296,6 @@ dependencies = [
"settings",
"smol",
"strum 0.25.0",
"telemetry_events",
"text",
"theme",
"thiserror",
@@ -6441,7 +6471,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
dependencies = [
"cfg-if",
"windows-targets 0.48.5",
"windows-targets 0.52.6",
]
[[package]]
@@ -8408,6 +8438,7 @@ dependencies = [
"client",
"clock",
"collections",
"dev_server_projects",
"env_logger",
"fs",
"futures 0.3.30",
@@ -8479,7 +8510,6 @@ dependencies = [
"serde_derive",
"serde_json",
"settings",
"smallvec",
"theme",
"ui",
"util",
@@ -8945,6 +8975,8 @@ version = "0.1.0"
dependencies = [
"anyhow",
"auto_update",
"client",
"dev_server_projects",
"editor",
"file_finder",
"futures 0.3.30",
@@ -8953,7 +8985,6 @@ dependencies = [
"itertools 0.13.0",
"language",
"log",
"markdown",
"menu",
"ordered-float 2.10.1",
"paths",
@@ -8961,13 +8992,14 @@ dependencies = [
"project",
"release_channel",
"remote",
"rpc",
"schemars",
"serde",
"serde_json",
"settings",
"smol",
"task",
"theme",
"terminal_view",
"ui",
"util",
"workspace",
@@ -9124,7 +9156,6 @@ dependencies = [
"client",
"clock",
"env_logger",
"fork",
"fs",
"futures 0.3.30",
"git",
@@ -9133,7 +9164,6 @@ dependencies = [
"http_client",
"language",
"languages",
"libc",
"log",
"lsp",
"node_runtime",
@@ -11872,6 +11902,7 @@ dependencies = [
"client",
"collections",
"command_palette",
"dev_server_projects",
"editor",
"extensions_ui",
"feature_flags",
@@ -12405,7 +12436,7 @@ checksum = "2545046bd1473dac6c626659cc2567c6c0ff302fc8b84a56c4243378276f7f57"
[[package]]
name = "tree-sitter-md"
version = "0.3.2"
source = "git+https://github.com/tree-sitter-grammars/tree-sitter-markdown?rev=9a23c1a96c0513d8fc6520972beedd419a973539#9a23c1a96c0513d8fc6520972beedd419a973539"
source = "git+https://github.com/zed-industries/tree-sitter-markdown?rev=4cfa6aad6b75052a5077c80fd934757d9267d81b#4cfa6aad6b75052a5077c80fd934757d9267d81b"
dependencies = [
"cc",
"tree-sitter-language",
@@ -12573,9 +12604,11 @@ version = "0.1.0"
dependencies = [
"editor",
"gpui",
"menu",
"settings",
"theme",
"ui",
"workspace",
]
[[package]]
@@ -13389,9 +13422,9 @@ dependencies = [
[[package]]
name = "wasmtime-wasi"
version = "24.0.1"
version = "24.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fda03f5bfd5c4cc09f75c7e44846663f25f2c48a2d688fbfb5c7a33af6cf34f5"
checksum = "545ae8298ffce025604f7480f9c7d6948c985bef7ce9aee249ef79307813e83c"
dependencies = [
"anyhow",
"async-trait",
@@ -13644,9 +13677,9 @@ dependencies = [
[[package]]
name = "wiggle"
version = "24.0.1"
version = "24.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d3b31bd2b4d2d82a4b747b8dbc45f566214214a4ffdc5690429a73bc221dc8a"
checksum = "cc850ca3c02c5835934d23f28cec4c5a3fb66fe0b4ecd968bbb35609dda5ddc0"
dependencies = [
"anyhow",
"async-trait",
@@ -13659,9 +13692,9 @@ dependencies = [
[[package]]
name = "wiggle-generate"
version = "24.0.1"
version = "24.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2c6136b195fc12067aa9d4e7a5baf118729394df7bc7cbf8c63119bc9f2a7cd"
checksum = "634b8804a67200bcb43ea8af5f7c53e862439a086b68b16fd333454bc74d5aab"
dependencies = [
"anyhow",
"heck 0.4.1",
@@ -13674,9 +13707,9 @@ dependencies = [
[[package]]
name = "wiggle-macro"
version = "24.0.1"
version = "24.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a41eaceee468da976ac43b85c4eb82e482f828d5e8e56f49f90dfac2d9bc3b4"
checksum = "474b7cbdb942c74031e619d66c600bba7f73867c5800fc2c2306cf307649be2f"
dependencies = [
"proc-macro2",
"quote",
@@ -13706,7 +13739,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys 0.48.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -14268,6 +14301,7 @@ dependencies = [
"collections",
"db",
"derive_more",
"dev_server_projects",
"env_logger",
"fs",
"futures 0.3.30",
@@ -14562,7 +14596,7 @@ dependencies = [
[[package]]
name = "zed"
version = "0.160.0"
version = "0.159.0"
dependencies = [
"activity_indicator",
"anyhow",
@@ -14586,6 +14620,7 @@ dependencies = [
"command_palette_hooks",
"copilot",
"db",
"dev_server_projects",
"diagnostics",
"editor",
"env_logger",
@@ -14601,6 +14636,7 @@ dependencies = [
"git_hosting_providers",
"go_to_line",
"gpui",
"headless",
"http_client",
"image_viewer",
"inline_completion_button",
@@ -14668,6 +14704,7 @@ dependencies = [
"winresource",
"workspace",
"zed_actions",
"zstd",
]
[[package]]
@@ -14799,7 +14836,7 @@ dependencies = [
[[package]]
name = "zed_php"
version = "0.2.2"
version = "0.2.1"
dependencies = [
"zed_extension_api 0.1.0",
]

View File

@@ -23,6 +23,7 @@ members = [
"crates/context_servers",
"crates/copilot",
"crates/db",
"crates/dev_server_projects",
"crates/diagnostics",
"crates/docs_preprocessor",
"crates/editor",
@@ -44,6 +45,7 @@ members = [
"crates/google_ai",
"crates/gpui",
"crates/gpui_macros",
"crates/headless",
"crates/html_to_markdown",
"crates/http_client",
"crates/image_viewer",
@@ -199,6 +201,7 @@ command_palette_hooks = { path = "crates/command_palette_hooks" }
context_servers = { path = "crates/context_servers" }
copilot = { path = "crates/copilot" }
db = { path = "crates/db" }
dev_server_projects = { path = "crates/dev_server_projects" }
diagnostics = { path = "crates/diagnostics" }
editor = { path = "crates/editor" }
extension = { path = "crates/extension" }
@@ -216,6 +219,7 @@ go_to_line = { path = "crates/go_to_line" }
google_ai = { path = "crates/google_ai" }
gpui = { path = "crates/gpui", default-features = false, features = ["http_client"]}
gpui_macros = { path = "crates/gpui_macros" }
headless = { path = "crates/headless" }
html_to_markdown = { path = "crates/html_to_markdown" }
http_client = { path = "crates/http_client" }
image_viewer = { path = "crates/image_viewer" }
@@ -455,7 +459,7 @@ tree-sitter-diff = "0.1.0"
tree-sitter-html = "0.20"
tree-sitter-jsdoc = "0.23"
tree-sitter-json = "0.23"
tree-sitter-md = { git = "https://github.com/tree-sitter-grammars/tree-sitter-markdown", rev = "9a23c1a96c0513d8fc6520972beedd419a973539" }
tree-sitter-md = { git = "https://github.com/zed-industries/tree-sitter-markdown", rev = "4cfa6aad6b75052a5077c80fd934757d9267d81b" }
tree-sitter-python = "0.23"
tree-sitter-regex = "0.23"
tree-sitter-ruby = "0.23"
@@ -464,7 +468,7 @@ tree-sitter-typescript = "0.23"
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" }
unicase = "2.6"
unindent = "0.1.7"
unicode-segmentation = "1.11"
unicode-segmentation = "1.10"
url = "2.2"
uuid = { version = "1.1.2", features = ["v4", "v5", "serde"] }
wasmparser = "0.215"

View File

@@ -65,7 +65,6 @@
"h": "c",
"handlebars": "code",
"hbs": "template",
"hcl": "hcl",
"heex": "elixir",
"heic": "image",
"heif": "image",
@@ -90,7 +89,6 @@
"json": "storage",
"jsonc": "storage",
"jsx": "react",
"julia": "julia",
"jxl": "image",
"kt": "kotlin",
"ldf": "storage",
@@ -118,7 +116,6 @@
"myd": "storage",
"myi": "storage",
"nim": "nim",
"nix": "nix",
"nu": "terminal",
"odp": "document",
"ods": "document",
@@ -146,15 +143,12 @@
"rb": "ruby",
"rebar.config": "erlang",
"rkt": "code",
"roc": "roc",
"rs": "rust",
"rtf": "document",
"sass": "sass",
"sav": "storage",
"sc": "scala",
"scala": "scala",
"scm": "code",
"scss": "sass",
"sdf": "storage",
"sh": "terminal",
"sql": "storage",
@@ -188,7 +182,6 @@
"yaml": "settings",
"yml": "settings",
"yrl": "erlang",
"zig": "zig",
"zlogin": "terminal",
"zsh": "terminal",
"zsh_aliases": "terminal",
@@ -273,9 +266,6 @@
"haskell": {
"icon": "icons/file_icons/haskell.svg"
},
"hcl": {
"icon": "icons/file_icons/hcl.svg"
},
"heroku": {
"icon": "icons/file_icons/heroku.svg"
},
@@ -288,9 +278,6 @@
"javascript": {
"icon": "icons/file_icons/javascript.svg"
},
"julia": {
"icon": "icons/file_icons/julia.svg"
},
"kotlin": {
"icon": "icons/file_icons/kotlin.svg"
},
@@ -306,9 +293,6 @@
"nim": {
"icon": "icons/file_icons/nim.svg"
},
"nix": {
"icon": "icons/file_icons/nix.svg"
},
"ocaml": {
"icon": "icons/file_icons/ocaml.svg"
},
@@ -333,18 +317,12 @@
"react": {
"icon": "icons/file_icons/react.svg"
},
"roc": {
"icon": "icons/file_icons/roc.svg"
},
"ruby": {
"icon": "icons/file_icons/ruby.svg"
},
"rust": {
"icon": "icons/file_icons/rust.svg"
},
"sass": {
"icon": "icons/file_icons/sass.svg"
},
"scala": {
"icon": "icons/file_icons/scala.svg"
},
@@ -383,9 +361,6 @@
},
"vue": {
"icon": "icons/file_icons/vue.svg"
},
"zig": {
"icon": "icons/file_icons/zig.svg"
}
}
}

View File

@@ -1,3 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.11466 3.11809C7.21859 3.37393 7.09545 3.66558 6.83961 3.76952L4.31181 4.79643C4.1233 4.87302 4 5.05619 4 5.25967V11.5C4 11.7761 3.77614 12 3.5 12H2.5C2.22386 12 2 11.7761 2 11.5V4.41827C2 3.90959 2.30825 3.45164 2.77953 3.26018L6.08686 1.91658C6.34269 1.81265 6.63434 1.93579 6.73828 2.19163L7.11466 3.11809ZM10.5 1.99999C10.7761 1.99999 11 2.22384 11 2.49999V10.5C11 10.7761 10.7761 11 10.5 11H9.5C9.22386 11 9 10.7761 9 10.5V9.49999C9 9.22384 8.77614 8.99999 8.5 8.99999H7.5C7.22386 8.99999 7 9.22384 7 9.49999V13.5C7 13.7761 6.77614 14 6.5 14H5.5C5.22386 14 5 13.7761 5 13.5V5.53124C5 5.25509 5.22386 5.03124 5.5 5.03124H6.5C6.77614 5.03124 7 5.25509 7 5.53124V6.49999C7 6.77613 7.22386 6.99999 7.5 6.99999H8.5C8.77614 6.99999 9 6.77613 9 6.49999V2.49999C9 2.22384 9.22386 1.99999 9.5 1.99999H10.5ZM13.5 4.03124C13.7761 4.03124 14 4.2551 14 4.53124L14 11.5847C14 12.0859 13.7006 12.5386 13.2394 12.7349L9.99399 14.1159C9.7399 14.224 9.44626 14.1057 9.33813 13.8516L8.94658 12.9315C8.83845 12.6774 8.95678 12.3837 9.21087 12.2756L11.6958 11.2182C11.8802 11.1397 12 10.9586 12 10.7581L12 4.53124C12 4.2551 12.2238 4.03124 12.5 4.03124L13.5 4.03124Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -1,5 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<circle cx="8" cy="5" r="2.75" fill="black"/>
<circle cx="4.75" cy="11" r="2.75" fill="black" fill-opacity="0.5"/>
<circle cx="11.25" cy="11" r="2.75" fill="black" fill-opacity="0.75"/>
</svg>

Before

Width:  |  Height:  |  Size: 289 B

View File

@@ -1,8 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M6.00005 4.76556L4.76569 2.74996M6.00005 4.76556L3.75 4.76563M6.00005 4.76556L7.25006 4.7656" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
<path d="M10.0232 11.2311L11.2675 13.2406M10.0232 11.2311L12.2732 11.2199M10.0232 11.2311L8.7732 11.2373" stroke="black" stroke-opacity="0.5" stroke-width="1.5" stroke-linecap="round"/>
<path d="M9.99025 4.91551L10.9985 2.77781M9.99025 4.91551L8.75599 3.03419M9.99025 4.91551L10.6759 5.9607" stroke="black" stroke-opacity="0.5" stroke-width="1.5" stroke-linecap="round"/>
<path d="M6.0323 11.1009L5.03465 13.2436M6.0323 11.1009L7.27585 12.9761M6.0323 11.1009L5.34151 10.0592" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
<path d="M11.883 8.19023L14.2466 8.19287M11.883 8.19023L13.0602 6.27268M11.883 8.19023L11.229 9.25547" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
<path d="M4.12354 7.8356L1.76002 7.84465M4.12354 7.8356L2.95585 9.75894M4.12354 7.8356L4.7723 6.76713" stroke="black" stroke-opacity="0.5" stroke-width="1.5" stroke-linecap="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -1,7 +0,0 @@
<svg width="17" height="16" viewBox="0 0 17 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M5.51497 2.02702L1.92042 1.95067C1.69543 1.94589 1.57917 2.21756 1.73796 2.37702L6.24865 6.9068C6.42388 7.08277 6.72071 6.92326 6.67067 6.68002L5.75454 2.22659C5.73103 2.11231 5.63161 2.02949 5.51497 2.02702Z" fill="black" fill-opacity="0.5"/>
<path d="M8.05816 7.38492L12.1366 8.02844C12.3704 8.06532 12.5198 7.78697 12.3599 7.61255L7.30439 2.09814C7.13336 1.91159 6.82522 2.06811 6.87499 2.31624L7.852 7.18714C7.87257 7.28971 7.95483 7.36862 8.05816 7.38492Z" fill="black"/>
<path d="M9.0952 10.9797L11.3824 9.35081C11.564 9.22151 11.4983 8.93722 11.2785 8.90058L8.496 8.43683C8.31974 8.40746 8.17047 8.56712 8.21162 8.74101L8.70689 10.8337C8.74777 11.0064 8.95062 11.0827 9.0952 10.9797Z" fill="black" fill-opacity="0.5"/>
<path d="M5.10282 13.9632L7.59108 12.4532C7.68331 12.3972 7.72923 12.2884 7.70498 12.1832L6.75736 8.07484C6.699 7.8218 6.34133 7.81448 6.27266 8.06491L4.73201 13.6834C4.67223 13.9014 4.90954 14.0805 5.10282 13.9632Z" fill="black"/>
<path d="M11.3183 4.89351L13.1588 7.03149L15.535 6.14302C15.7099 6.07761 15.754 5.85043 15.6161 5.72438L13.7222 3.99219L11.4546 4.48614C11.2695 4.52645 11.1947 4.74995 11.3183 4.89351Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -1,3 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M6.92096 7.00668C7.87408 7.83549 10.0987 7.48203 10.9376 7.06254C12.8751 6.09381 13.9407 4.39379 12.6407 2.90629C11.0157 1.04692 6.24221 2.49998 4.89844 3.40625C3.55467 4.31252 2.67972 5.53126 2.89071 7.1719C3.1017 8.81254 4.68758 9.7422 6.03128 10.3203C5.38786 10.5616 3.8517 11.0388 3.3125 11.7188C2.71341 12.4742 3.04343 14 4.51577 14C7.15639 14 7.59539 11.1486 7.14847 10.4375C7.88773 10.1295 8.49597 9.96169 9.40138 9.77081C9.63831 9.72087 9.65457 9.46395 9.41295 9.44827C8.80252 9.40864 7.30567 9.8489 6.92096 9.97657C5.78909 9.35157 4.51016 7.93818 4.59378 6.87501C4.68676 5.6928 5.27676 5.07603 6.84508 4.21876C8.01705 3.57813 10.258 3.10695 11.25 3.62501C12.6563 4.35936 10.7875 5.75599 9.92969 6.32031C9.28179 6.74656 8.21971 6.77513 7.22979 6.61435C6.99371 6.576 6.74048 6.84974 6.92096 7.00668ZM5.6719 12.4643C6.35508 11.9894 6.45471 11.1076 6.29955 10.8844C5.76663 11.0874 4.36593 11.9102 4.75111 12.4643C4.90628 12.6875 5.31358 12.7134 5.6719 12.4643Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -1,5 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M14.25 12H11C10.794 12 10.6764 11.7648 10.8 11.6L11.925 10.1C11.9722 10.037 12.0463 10 12.125 10H12.75C12.8881 10 13 9.88807 13 9.75V6.25C13 6.11193 12.8881 6 12.75 6H12.4045C12.2187 6 12.0978 5.80442 12.1809 5.6382L12.9309 4.1382C12.9732 4.0535 13.0598 4 13.1545 4H14.25C14.3881 4 14.5 4.11193 14.5 4.25V11.75C14.5 11.8881 14.3881 12 14.25 12Z" fill="black"/>
<path d="M1.75 4H5C5.20601 4 5.32361 4.23519 5.2 4.4L4.075 5.9C4.02779 5.96295 3.95369 6 3.875 6H3.25C3.11193 6 3 6.11193 3 6.25V9.75C3 9.88807 3.11193 10 3.25 10H3.59549C3.78134 10 3.90221 10.1956 3.8191 10.3618L3.0691 11.8618C3.02675 11.9465 2.94018 12 2.84549 12H1.75C1.61193 12 1.5 11.8881 1.5 11.75V4.25C1.5 4.11193 1.61193 4 1.75 4Z" fill="black"/>
<path d="M7.55748 6H5.95006C5.74177 6 5.62482 5.76022 5.75306 5.59609L6.92493 4.09609C6.97231 4.03544 7.04498 4 7.12194 4H9.93075C9.97607 4 10.0205 3.98769 10.0594 3.96437L11.6408 3.0155C11.8641 2.88154 12.1179 3.13555 11.9837 3.3587L8.22612 9.6083C8.12629 9.77433 8.24508 9.98591 8.43881 9.98712L10.0039 9.9969C10.2092 9.99818 10.3255 10.2327 10.2023 10.3969L9.075 11.9C9.02779 11.963 8.95369 12 8.875 12H6.55383C6.51835 12 6.48328 12.0076 6.45094 12.0222L4.32473 12.9824C4.10122 13.0833 3.88113 12.8356 4.00771 12.6255L7.77161 6.37903C7.87201 6.2124 7.75202 6 7.55748 6Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.4 KiB

View File

@@ -346,8 +346,6 @@
"git_status": true,
// Amount of indentation for nested items.
"indent_size": 20,
// Whether to show indent guides in the project panel.
"indent_guides": true,
// Whether to reveal it in the project panel automatically,
// when a corresponding project entry becomes active.
// Gitignored entries are never auto revealed.
@@ -805,7 +803,7 @@
/// You can override this to use a version of node that is not in $PATH with:
/// {
/// "node": {
/// "path": "/path/to/node"
/// "node_path": "/path/to/node"
/// "npm_path": "/path/to/npm" (defaults to node_path/../npm)
/// }
/// }
@@ -1101,13 +1099,13 @@
// }
"command_aliases": {},
// ssh_connections is an array of ssh connections.
// By default this setting is null, which disables the direct ssh connection support.
// You can configure these from `project: Open Remote` in the command palette.
// Zed's ssh support will pull configuration from your ~/.ssh too.
// Examples:
// [
// {
// "host": "example-box",
// // "port": 22, "username": "test", "args": ["-i", "/home/user/.ssh/id_rsa"]
// "projects": [
// {
// "paths": ["/home/user/code/zed"]
@@ -1115,7 +1113,7 @@
// ]
// }
// ]
"ssh_connections": [],
"ssh_connections": null,
// Configures the Context Server Protocol binaries
//
// Examples:

View File

@@ -29,13 +29,13 @@ pub struct AnthropicModelCacheConfiguration {
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
pub enum Model {
#[default]
#[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")]
#[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-20240620")]
Claude3_5Sonnet,
#[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")]
#[serde(rename = "claude-3-opus", alias = "claude-3-opus-20240229")]
Claude3Opus,
#[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-latest")]
#[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-20240229")]
Claude3Sonnet,
#[serde(rename = "claude-3-haiku", alias = "claude-3-haiku-latest")]
#[serde(rename = "claude-3-haiku", alias = "claude-3-haiku-20240307")]
Claude3Haiku,
#[serde(rename = "custom")]
Custom {
@@ -69,10 +69,10 @@ impl Model {
pub fn id(&self) -> &str {
match self {
Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
Model::Claude3Opus => "claude-3-opus-latest",
Model::Claude3Sonnet => "claude-3-sonnet-latest",
Model::Claude3Haiku => "claude-3-haiku-latest",
Model::Claude3_5Sonnet => "claude-3-5-sonnet-20240620",
Model::Claude3Opus => "claude-3-opus-20240229",
Model::Claude3Sonnet => "claude-3-sonnet-20240229",
Model::Claude3Haiku => "claude-3-haiku-20240307",
Self::Custom { name, .. } => name,
}
}
@@ -229,7 +229,7 @@ pub struct RateLimitInfo {
}
impl RateLimitInfo {
pub fn from_headers(headers: &HeaderMap<HeaderValue>) -> Result<Self> {
fn from_headers(headers: &HeaderMap<HeaderValue>) -> Result<Self> {
let tokens_limit = get_header("anthropic-ratelimit-tokens-limit", headers)?.parse()?;
let requests_limit = get_header("anthropic-ratelimit-requests-limit", headers)?.parse()?;
let tokens_remaining =

View File

@@ -356,10 +356,8 @@ impl AssistantPanel {
let project = workspace.project().clone();
pane.set_custom_drop_handle(cx, move |_, dropped_item, cx| {
let action = maybe!({
if project.read(cx).is_local() {
if let Some(paths) = dropped_item.downcast_ref::<ExternalPaths>() {
return Some(InsertDraggedFiles::ExternalFiles(paths.paths().to_vec()));
}
if let Some(paths) = dropped_item.downcast_ref::<ExternalPaths>() {
return Some(InsertDraggedFiles::ExternalFiles(paths.paths().to_vec()));
}
let project_paths = if let Some(tab) = dropped_item.downcast_ref::<DraggedTab>()
@@ -963,7 +961,7 @@ impl AssistantPanel {
fn new_context(&mut self, cx: &mut ViewContext<Self>) -> Option<View<ContextEditor>> {
let project = self.project.read(cx);
if project.is_via_collab() {
if project.is_via_collab() && project.dev_server_project_id().is_none() {
let task = self
.context_store
.update(cx, |store, cx| store.create_remote_context(cx));

View File

@@ -7,7 +7,7 @@ use crate::{
};
use anyhow::{anyhow, Context as _, Result};
use assistant_slash_command::{
SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry, SlashCommandResult,
SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry,
};
use assistant_tool::ToolRegistry;
use client::{self, proto, telemetry::Telemetry};
@@ -23,7 +23,6 @@ use gpui::{
use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
use language_model::{
logging::report_assistant_event,
provider::cloud::{MaxMonthlySpendReachedError, PaymentRequiredError},
LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent,
LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
@@ -1678,7 +1677,7 @@ impl Context {
pub fn insert_command_output(
&mut self,
command_range: Range<language::Anchor>,
output: Task<SlashCommandResult>,
output: Task<Result<SlashCommandOutput>>,
ensure_trailing_newline: bool,
expand_result: bool,
cx: &mut ModelContext<Self>,
@@ -1689,13 +1688,19 @@ impl Context {
let command_range = command_range.clone();
async move {
let output = output.await;
let output = match output {
Ok(output) => SlashCommandOutput::from_event_stream(output).await,
Err(err) => Err(err),
};
this.update(&mut cx, |this, cx| match output {
Ok(mut output) => {
output.ensure_valid_section_ranges();
// Ensure section ranges are valid.
for section in &mut output.sections {
section.range.start = section.range.start.min(output.text.len());
section.range.end = section.range.end.min(output.text.len());
while !output.text.is_char_boundary(section.range.start) {
section.range.start -= 1;
}
while !output.text.is_char_boundary(section.range.end) {
section.range.end += 1;
}
}
// Ensure there is a newline after the last section.
if ensure_trailing_newline {
@@ -1929,7 +1934,6 @@ impl Context {
});
match event {
LanguageModelCompletionEvent::StartMessage { .. } => {}
LanguageModelCompletionEvent::Stop(reason) => {
stop_reason = reason;
}
@@ -2035,28 +2039,23 @@ impl Context {
None
};
let language_name = this
.buffer
.read(cx)
.language()
.map(|language| language.name());
report_assistant_event(
AssistantEvent {
if let Some(telemetry) = this.telemetry.as_ref() {
let language_name = this
.buffer
.read(cx)
.language()
.map(|language| language.name());
telemetry.report_assistant_event(AssistantEvent {
conversation_id: Some(this.id.0.clone()),
kind: AssistantKind::Panel,
phase: AssistantPhase::Response,
message_id: None,
model: model.telemetry_id(),
model_provider: model.provider_id().to_string(),
response_latency,
error_message,
language_name: language_name.map(|name| name.to_proto()),
},
this.telemetry.clone(),
cx.http_client(),
model.api_key(cx),
cx.background_executor(),
);
});
}
if let Ok(stop_reason) = result {
match stop_reason {
@@ -2488,8 +2487,7 @@ impl Context {
request.messages.push(LanguageModelRequestMessage {
role: Role::User,
content: vec![
"Generate a concise 3-7 word title for this conversation, omitting punctuation"
.into(),
"Summarize the context into a short title without punctuation.".into(),
],
cache: false,
});
@@ -2500,7 +2498,7 @@ impl Context {
let mut messages = stream.await?;
let mut replaced = !replace_old;
while let Some(message) = messages.stream.next().await {
while let Some(message) = messages.next().await {
let text = message?;
let mut lines = text.lines();
this.update(&mut cx, |this, cx| {

View File

@@ -6,7 +6,7 @@ use crate::{
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandRegistry, SlashCommandResult,
SlashCommandRegistry,
};
use collections::HashSet;
use fs::FakeFs;
@@ -1097,8 +1097,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
text: output_text,
sections,
run_commands_in_text: false,
}
.to_event_stream())),
})),
true,
false,
cx,
@@ -1417,12 +1416,11 @@ impl SlashCommand for FakeSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
_cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
Task::ready(Ok(SlashCommandOutput {
text: format!("Executed fake command: {}", self.0),
sections: vec![],
run_commands_in_text: false,
}
.to_event_stream()))
}))
}
}

View File

@@ -21,7 +21,9 @@ use fs::Fs;
use futures::{
channel::mpsc,
future::{BoxFuture, LocalBoxFuture},
join, SinkExt, Stream, StreamExt,
join,
stream::{self, BoxStream},
SinkExt, Stream, StreamExt,
};
use gpui::{
anchored, deferred, point, AnyElement, AppContext, ClickEvent, EventEmitter, FocusHandle,
@@ -30,8 +32,7 @@ use gpui::{
};
use language::{Buffer, IndentKind, Point, Selection, TransactionId};
use language_model::{
logging::report_assistant_event, LanguageModel, LanguageModelRegistry, LanguageModelRequest,
LanguageModelRequestMessage, LanguageModelTextStream, Role,
LanguageModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role,
};
use multi_buffer::MultiBufferRow;
use parking_lot::Mutex;
@@ -235,13 +236,12 @@ impl InlineAssistant {
};
codegen_ranges.push(start..end);
if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() {
if let Some(telemetry) = self.telemetry.as_ref() {
if let Some(telemetry) = self.telemetry.as_ref() {
if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() {
telemetry.report_assistant_event(AssistantEvent {
conversation_id: None,
kind: AssistantKind::Inline,
phase: AssistantPhase::Invoked,
message_id: None,
model: model.telemetry_id(),
model_provider: model.provider_id().to_string(),
response_latency: None,
@@ -745,6 +745,33 @@ impl InlineAssistant {
pub fn finish_assist(&mut self, assist_id: InlineAssistId, undo: bool, cx: &mut WindowContext) {
if let Some(assist) = self.assists.get(&assist_id) {
if let Some(telemetry) = self.telemetry.as_ref() {
if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() {
let language_name = assist.editor.upgrade().and_then(|editor| {
let multibuffer = editor.read(cx).buffer().read(cx);
let ranges = multibuffer.range_to_buffer_ranges(assist.range.clone(), cx);
ranges
.first()
.and_then(|(buffer, _, _)| buffer.read(cx).language())
.map(|language| language.name())
});
telemetry.report_assistant_event(AssistantEvent {
conversation_id: None,
kind: AssistantKind::Inline,
phase: if undo {
AssistantPhase::Rejected
} else {
AssistantPhase::Accepted
},
model: model.telemetry_id(),
model_provider: model.provider_id().to_string(),
response_latency: None,
error_message: None,
language_name: language_name.map(|name| name.to_proto()),
});
}
}
let assist_group_id = assist.group_id;
if self.assist_groups[&assist_group_id].linked {
for assist_id in self.unlink_assist_group(assist_group_id, cx) {
@@ -779,45 +806,12 @@ impl InlineAssistant {
}
}
let active_alternative = assist.codegen.read(cx).active_alternative().clone();
let message_id = active_alternative.read(cx).message_id.clone();
if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() {
let language_name = assist.editor.upgrade().and_then(|editor| {
let multibuffer = editor.read(cx).buffer().read(cx);
let ranges = multibuffer.range_to_buffer_ranges(assist.range.clone(), cx);
ranges
.first()
.and_then(|(buffer, _, _)| buffer.read(cx).language())
.map(|language| language.name())
});
report_assistant_event(
AssistantEvent {
conversation_id: None,
kind: AssistantKind::Inline,
message_id,
phase: if undo {
AssistantPhase::Rejected
} else {
AssistantPhase::Accepted
},
model: model.telemetry_id(),
model_provider: model.provider_id().to_string(),
response_latency: None,
error_message: None,
language_name: language_name.map(|name| name.to_proto()),
},
self.telemetry.clone(),
cx.http_client(),
model.api_key(cx),
cx.background_executor(),
);
}
if undo {
assist.codegen.update(cx, |codegen, cx| codegen.undo(cx));
} else {
self.confirmed_assists.insert(assist_id, active_alternative);
let confirmed_alternative = assist.codegen.read(cx).active_alternative().clone();
self.confirmed_assists
.insert(assist_id, confirmed_alternative);
}
}
}
@@ -2262,7 +2256,6 @@ pub enum CodegenEvent {
pub struct Codegen {
alternatives: Vec<Model<CodegenAlternative>>,
active_alternative: usize,
seen_alternatives: HashSet<usize>,
subscriptions: Vec<Subscription>,
buffer: Model<MultiBuffer>,
range: Range<Anchor>,
@@ -2293,7 +2286,6 @@ impl Codegen {
let mut this = Self {
alternatives: vec![codegen],
active_alternative: 0,
seen_alternatives: HashSet::default(),
subscriptions: Vec::new(),
buffer,
range,
@@ -2346,7 +2338,6 @@ impl Codegen {
fn activate(&mut self, index: usize, cx: &mut ModelContext<Self>) {
self.active_alternative()
.update(cx, |codegen, cx| codegen.set_active(false, cx));
self.seen_alternatives.insert(index);
self.active_alternative = index;
self.active_alternative()
.update(cx, |codegen, cx| codegen.set_active(true, cx));
@@ -2476,9 +2467,6 @@ pub struct CodegenAlternative {
active: bool,
edits: Vec<(Range<Anchor>, String)>,
line_operations: Vec<LineOperation>,
request: Option<LanguageModelRequest>,
elapsed_time: Option<f64>,
message_id: Option<String>,
}
enum CodegenStatus {
@@ -2537,7 +2525,6 @@ impl CodegenAlternative {
buffer: buffer.clone(),
old_buffer,
edit_position: None,
message_id: None,
snapshot,
last_equal_ranges: Default::default(),
transformation_transaction_id: None,
@@ -2551,8 +2538,6 @@ impl CodegenAlternative {
edits: Vec::new(),
line_operations: Vec::new(),
range,
request: None,
elapsed_time: None,
}
}
@@ -2642,21 +2627,19 @@ impl CodegenAlternative {
self.edit_position = Some(self.range.start.bias_right(&self.snapshot));
let api_key = model.api_key(cx);
let telemetry_id = model.telemetry_id();
let provider_id = model.provider_id();
let stream: LocalBoxFuture<Result<LanguageModelTextStream>> =
let chunks: LocalBoxFuture<Result<BoxStream<Result<String>>>> =
if user_prompt.trim().to_lowercase() == "delete" {
async { Ok(LanguageModelTextStream::default()) }.boxed_local()
async { Ok(stream::empty().boxed()) }.boxed_local()
} else {
let request = self.build_request(user_prompt, assistant_panel_context, cx)?;
self.request = Some(request.clone());
let stream = cx
let chunks = cx
.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await });
async move { Ok(stream.await?) }.boxed_local()
async move { Ok(chunks.await?.boxed()) }.boxed_local()
};
self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
self.handle_stream(telemetry_id, provider_id.to_string(), chunks, cx);
Ok(())
}
@@ -2721,11 +2704,9 @@ impl CodegenAlternative {
&mut self,
model_telemetry_id: String,
model_provider_id: String,
model_api_key: Option<String>,
stream: impl 'static + Future<Output = Result<LanguageModelTextStream>>,
stream: impl 'static + Future<Output = Result<BoxStream<'static, Result<String>>>>,
cx: &mut ModelContext<Self>,
) {
let start_time = Instant::now();
let snapshot = self.snapshot.clone();
let selected_text = snapshot
.text_for_range(self.range.start..self.range.end)
@@ -2752,7 +2733,6 @@ impl CodegenAlternative {
}
}
let http_client = cx.http_client().clone();
let telemetry = self.telemetry.clone();
let language_name = {
let multibuffer = self.buffer.read(cx);
@@ -2768,21 +2748,15 @@ impl CodegenAlternative {
let mut edit_start = self.range.start.to_offset(&snapshot);
self.generation = cx.spawn(|codegen, mut cx| {
async move {
let stream = stream.await;
let message_id = stream
.as_ref()
.ok()
.and_then(|stream| stream.message_id.clone());
let chunks = stream.await;
let generate = async {
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
let executor = cx.background_executor().clone();
let message_id = message_id.clone();
let line_based_stream_diff: Task<anyhow::Result<()>> =
cx.background_executor().spawn(async move {
let mut response_latency = None;
let request_start = Instant::now();
let diff = async {
let chunks = StripInvalidSpans::new(stream?.stream);
let chunks = StripInvalidSpans::new(chunks?);
futures::pin_mut!(chunks);
let mut diff = StreamingDiff::new(selected_text.to_string());
let mut line_diff = LineDiff::default();
@@ -2878,10 +2852,9 @@ impl CodegenAlternative {
let error_message =
result.as_ref().err().map(|error| error.to_string());
report_assistant_event(
AssistantEvent {
if let Some(telemetry) = telemetry {
telemetry.report_assistant_event(AssistantEvent {
conversation_id: None,
message_id,
kind: AssistantKind::Inline,
phase: AssistantPhase::Response,
model: model_telemetry_id,
@@ -2889,12 +2862,8 @@ impl CodegenAlternative {
response_latency,
error_message,
language_name: language_name.map(|name| name.to_proto()),
},
telemetry,
http_client,
model_api_key,
&executor,
);
});
}
result?;
Ok(())
@@ -2954,18 +2923,14 @@ impl CodegenAlternative {
};
let result = generate.await;
let elapsed_time = start_time.elapsed().as_secs_f64();
codegen
.update(&mut cx, |this, cx| {
this.message_id = message_id;
this.last_equal_ranges.clear();
if let Err(error) = result {
this.status = CodegenStatus::Error(error);
} else {
this.status = CodegenStatus::Done;
}
this.elapsed_time = Some(elapsed_time);
cx.emit(CodegenEvent::Finished);
cx.notify();
})
@@ -3312,10 +3277,6 @@ impl CodeActionProvider for AssistantCodeActionProvider {
range: Range<text::Anchor>,
cx: &mut WindowContext,
) -> Task<Result<Vec<CodeAction>>> {
if !AssistantSettings::get_global(cx).enabled {
return Task::ready(Ok(Vec::new()));
}
let snapshot = buffer.read(cx).snapshot();
let mut range = range.to_point(&snapshot);
@@ -3510,7 +3471,15 @@ mod tests {
)
});
let chunks_tx = simulate_response_stream(codegen, cx);
let (chunks_tx, chunks_rx) = mpsc::unbounded();
codegen.update(cx, |codegen, cx| {
codegen.handle_stream(
String::new(),
String::new(),
future::ready(Ok(chunks_rx.map(Ok).boxed())),
cx,
)
});
let mut new_text = concat!(
" let mut x = 0;\n",
@@ -3574,7 +3543,15 @@ mod tests {
)
});
let chunks_tx = simulate_response_stream(codegen, cx);
let (chunks_tx, chunks_rx) = mpsc::unbounded();
codegen.update(cx, |codegen, cx| {
codegen.handle_stream(
String::new(),
String::new(),
future::ready(Ok(chunks_rx.map(Ok).boxed())),
cx,
)
});
cx.background_executor.run_until_parked();
@@ -3641,7 +3618,15 @@ mod tests {
)
});
let chunks_tx = simulate_response_stream(codegen, cx);
let (chunks_tx, chunks_rx) = mpsc::unbounded();
codegen.update(cx, |codegen, cx| {
codegen.handle_stream(
String::new(),
String::new(),
future::ready(Ok(chunks_rx.map(Ok).boxed())),
cx,
)
});
cx.background_executor.run_until_parked();
@@ -3707,7 +3692,16 @@ mod tests {
)
});
let chunks_tx = simulate_response_stream(codegen, cx);
let (chunks_tx, chunks_rx) = mpsc::unbounded();
codegen.update(cx, |codegen, cx| {
codegen.handle_stream(
String::new(),
String::new(),
future::ready(Ok(chunks_rx.map(Ok).boxed())),
cx,
)
});
let new_text = concat!(
"func main() {\n",
"\tx := 0\n",
@@ -3762,7 +3756,16 @@ mod tests {
)
});
let chunks_tx = simulate_response_stream(codegen.clone(), cx);
let (chunks_tx, chunks_rx) = mpsc::unbounded();
codegen.update(cx, |codegen, cx| {
codegen.handle_stream(
String::new(),
String::new(),
future::ready(Ok(chunks_rx.map(Ok).boxed())),
cx,
)
});
chunks_tx
.unbounded_send("let mut x = 0;\nx += 1;".to_string())
.unwrap();
@@ -3836,26 +3839,6 @@ mod tests {
}
}
fn simulate_response_stream(
codegen: Model<CodegenAlternative>,
cx: &mut TestAppContext,
) -> mpsc::UnboundedSender<String> {
let (chunks_tx, chunks_rx) = mpsc::unbounded();
codegen.update(cx, |codegen, cx| {
codegen.handle_stream(
String::new(),
String::new(),
None,
future::ready(Ok(LanguageModelTextStream {
message_id: None,
stream: chunks_rx.map(Ok).boxed(),
})),
cx,
);
});
chunks_tx
}
fn rust_lang() -> Language {
Language::new(
LanguageConfig {

View File

@@ -717,6 +717,7 @@ mod tests {
);
// Ensure InsertBefore merges correctly with Update of the same text
assert_edits(
"
fn foo() {
@@ -781,90 +782,6 @@ mod tests {
.unindent(),
cx,
);
// Correctly indent new text when replacing multiple adjacent indented blocks.
assert_edits(
"
impl Numbers {
fn one() {
1
}
fn two() {
2
}
fn three() {
3
}
}
"
.unindent(),
vec![
AssistantEditKind::Update {
old_text: "
fn one() {
1
}
"
.unindent(),
new_text: "
fn one() {
101
}
"
.unindent(),
description: "pick better number".into(),
},
AssistantEditKind::Update {
old_text: "
fn two() {
2
}
"
.unindent(),
new_text: "
fn two() {
102
}
"
.unindent(),
description: "pick better number".into(),
},
AssistantEditKind::Update {
old_text: "
fn three() {
3
}
"
.unindent(),
new_text: "
fn three() {
103
}
"
.unindent(),
description: "pick better number".into(),
},
],
"
impl Numbers {
fn one() {
101
}
fn two() {
102
}
fn three() {
103
}
}
"
.unindent(),
cx,
);
}
#[track_caller]

View File

@@ -1,8 +1,7 @@
use super::create_label_for_command;
use super::{SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use feature_flags::FeatureFlag;
use futures::StreamExt;
use gpui::{AppContext, AsyncAppContext, Task, WeakView};
@@ -18,8 +17,6 @@ use ui::{BorrowAppContext, WindowContext};
use util::ResultExt;
use workspace::Workspace;
use crate::slash_command::create_label_for_command;
pub struct AutoSlashCommandFeatureFlag;
impl FeatureFlag for AutoSlashCommandFeatureFlag {
@@ -95,7 +92,7 @@ impl SlashCommand for AutoCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
};
@@ -147,8 +144,7 @@ impl SlashCommand for AutoCommand {
text: prompt,
sections: Vec::new(),
run_commands_in_text: true,
}
.to_event_stream())
})
})
}
}

View File

@@ -1,8 +1,6 @@
use super::{SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Context, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use fs::Fs;
use gpui::{AppContext, Model, Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
@@ -125,7 +123,7 @@ impl SlashCommand for CargoWorkspaceSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let output = workspace.update(cx, |workspace, cx| {
let project = workspace.project().clone();
let fs = workspace.project().read(cx).fs().clone();
@@ -147,8 +145,7 @@ impl SlashCommand for CargoWorkspaceSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
}
.to_event_stream())
})
})
});
output.unwrap_or_else(|error| Task::ready(Err(error)))

View File

@@ -1,7 +1,8 @@
use super::create_label_for_command;
use anyhow::{anyhow, Result};
use assistant_slash_command::{
AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandOutput,
SlashCommandOutputSection, SlashCommandResult,
SlashCommandOutputSection,
};
use collections::HashMap;
use context_servers::{
@@ -16,8 +17,6 @@ use text::LineEnding;
use ui::{IconName, SharedString};
use workspace::Workspace;
use crate::slash_command::create_label_for_command;
pub struct ContextServerSlashCommand {
server_id: String,
prompt: Prompt,
@@ -129,7 +128,7 @@ impl SlashCommand for ContextServerSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let server_id = self.server_id.clone();
let prompt_name = self.prompt.name.clone();
@@ -146,28 +145,7 @@ impl SlashCommand for ContextServerSlashCommand {
return Err(anyhow!("Context server not initialized"));
};
let result = protocol.run_prompt(&prompt_name, prompt_args).await?;
// Check that there are only user roles
if result
.messages
.iter()
.any(|msg| !matches!(msg.role, context_servers::types::SamplingRole::User))
{
return Err(anyhow!(
"Prompt contains non-user roles, which is not supported"
));
}
// Extract text from user messages into a single prompt string
let mut prompt = result
.messages
.into_iter()
.filter_map(|msg| match msg.content {
context_servers::types::SamplingContent::Text { text } => Some(text),
_ => None,
})
.collect::<Vec<String>>()
.join("\n\n");
let mut prompt = result.prompt;
// We must normalize the line endings here, since servers might return CR characters.
LineEnding::normalize(&mut prompt);
@@ -185,8 +163,7 @@ impl SlashCommand for ContextServerSlashCommand {
}],
text: prompt,
run_commands_in_text: false,
}
.to_event_stream())
})
})
} else {
Task::ready(Err(anyhow!("Context server not found")))

View File

@@ -1,9 +1,7 @@
use super::{SlashCommand, SlashCommandOutput};
use crate::prompt_library::PromptStore;
use anyhow::{anyhow, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use gpui::{Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
use std::{
@@ -50,7 +48,7 @@ impl SlashCommand for DefaultSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let store = PromptStore::global(cx);
cx.background_executor().spawn(async move {
let store = store.await?;
@@ -78,8 +76,7 @@ impl SlashCommand for DefaultSlashCommand {
}],
text,
run_commands_in_text: true,
}
.to_event_stream())
})
})
}
}

View File

@@ -1,8 +1,7 @@
use crate::slash_command::file_command::{FileCommandMetadata, FileSlashCommand};
use anyhow::{anyhow, Result};
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use collections::HashSet;
use futures::future;
@@ -38,7 +37,7 @@ impl SlashCommand for DeltaSlashCommand {
_workspace: Option<WeakView<Workspace>>,
_cx: &mut WindowContext,
) -> Task<Result<Vec<ArgumentCompletion>>> {
Task::ready(Err(anyhow!("this command does not require argument")))
unimplemented!()
}
fn run(
@@ -49,7 +48,7 @@ impl SlashCommand for DeltaSlashCommand {
workspace: WeakView<Workspace>,
delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let mut paths = HashSet::default();
let mut file_command_old_outputs = Vec::new();
let mut file_command_new_outputs = Vec::new();
@@ -86,28 +85,25 @@ impl SlashCommand for DeltaSlashCommand {
.zip(file_command_new_outputs)
{
if let Ok(new_output) = new_output {
if let Ok(new_output) = SlashCommandOutput::from_event_stream(new_output).await
{
if let Some(file_command_range) = new_output.sections.first() {
let new_text = &new_output.text[file_command_range.range.clone()];
if old_text.chars().ne(new_text.chars()) {
output.sections.extend(new_output.sections.into_iter().map(
|section| SlashCommandOutputSection {
range: output.text.len() + section.range.start
..output.text.len() + section.range.end,
icon: section.icon,
label: section.label,
metadata: section.metadata,
},
));
output.text.push_str(&new_output.text);
}
if let Some(file_command_range) = new_output.sections.first() {
let new_text = &new_output.text[file_command_range.range.clone()];
if old_text.chars().ne(new_text.chars()) {
output.sections.extend(new_output.sections.into_iter().map(
|section| SlashCommandOutputSection {
range: output.text.len() + section.range.start
..output.text.len() + section.range.end,
icon: section.icon,
label: section.label,
metadata: section.metadata,
},
));
output.text.push_str(&new_output.text);
}
}
}
}
Ok(output.to_event_stream())
Ok(output)
})
}
}

View File

@@ -1,8 +1,6 @@
use super::{create_label_for_command, SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use fuzzy::{PathMatch, StringMatchCandidate};
use gpui::{AppContext, Model, Task, View, WeakView};
use language::{
@@ -21,8 +19,6 @@ use util::paths::PathMatcher;
use util::ResultExt;
use workspace::Workspace;
use crate::slash_command::create_label_for_command;
pub(crate) struct DiagnosticsSlashCommand;
impl DiagnosticsSlashCommand {
@@ -171,7 +167,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow!("workspace was dropped")));
};
@@ -180,11 +176,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx);
cx.spawn(move |_| async move {
task.await?
.map(|output| output.to_event_stream())
.ok_or_else(|| anyhow!("No diagnostics found"))
})
cx.spawn(move |_| async move { task.await?.ok_or_else(|| anyhow!("No diagnostics found")) })
}
}

View File

@@ -6,7 +6,6 @@ use std::time::Duration;
use anyhow::{anyhow, bail, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use gpui::{AppContext, BackgroundExecutor, Model, Task, WeakView};
use indexed_docs::{
@@ -275,7 +274,7 @@ impl SlashCommand for DocsSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
if arguments.is_empty() {
return Task::ready(Err(anyhow!("missing an argument")));
};
@@ -356,8 +355,7 @@ impl SlashCommand for DocsSlashCommand {
})
.collect(),
run_commands_in_text: false,
}
.to_event_stream())
})
})
}
}

View File

@@ -6,7 +6,6 @@ use std::sync::Arc;
use anyhow::{anyhow, bail, Context, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use futures::AsyncReadExt;
use gpui::{Task, WeakView};
@@ -134,7 +133,7 @@ impl SlashCommand for FetchSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let Some(argument) = arguments.first() else {
return Task::ready(Err(anyhow!("missing URL")));
};
@@ -167,8 +166,7 @@ impl SlashCommand for FetchSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
}
.to_event_stream())
})
})
}
}

View File

@@ -1,15 +1,11 @@
use super::{diagnostics_command::collect_buffer_diagnostics, SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Context as _, Result};
use assistant_slash_command::{
AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandContent, SlashCommandEvent,
SlashCommandOutput, SlashCommandOutputSection, SlashCommandResult,
};
use futures::channel::mpsc;
use assistant_slash_command::{AfterCompletion, ArgumentCompletion, SlashCommandOutputSection};
use fuzzy::PathMatch;
use gpui::{AppContext, Model, Task, View, WeakView};
use language::{BufferSnapshot, CodeLabel, HighlightId, LineEnding, LspAdapterDelegate};
use project::{PathMatchCandidateSet, Project};
use serde::{Deserialize, Serialize};
use smol::stream::StreamExt;
use std::{
fmt::Write,
ops::{Range, RangeInclusive},
@@ -20,8 +16,6 @@ use ui::prelude::*;
use util::ResultExt;
use workspace::Workspace;
use crate::slash_command::diagnostics_command::collect_buffer_diagnostics;
pub(crate) struct FileSlashCommand;
impl FileSlashCommand {
@@ -187,7 +181,7 @@ impl SlashCommand for FileSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow!("workspace was dropped")));
};
@@ -204,7 +198,7 @@ fn collect_files(
project: Model<Project>,
glob_inputs: &[String],
cx: &mut AppContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let Ok(matchers) = glob_inputs
.into_iter()
.map(|glob_input| {
@@ -223,11 +217,11 @@ fn collect_files(
.map(|worktree| worktree.read(cx).snapshot())
.collect::<Vec<_>>();
let (events_tx, events_rx) = mpsc::unbounded();
cx.spawn(|mut cx| async move {
let mut output = SlashCommandOutput::default();
for snapshot in snapshots {
let worktree_id = snapshot.id();
let mut directory_stack: Vec<Arc<Path>> = Vec::new();
let mut directory_stack: Vec<(Arc<Path>, String, usize)> = Vec::new();
let mut folded_directory_names_stack = Vec::new();
let mut is_top_level_directory = true;
@@ -243,19 +237,17 @@ fn collect_files(
continue;
}
while let Some(dir) = directory_stack.last() {
while let Some((dir, _, _)) = directory_stack.last() {
if entry.path.starts_with(dir) {
break;
}
directory_stack.pop().unwrap();
events_tx
.unbounded_send(Ok(SlashCommandEvent::EndSection { metadata: None }))?;
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false,
},
)))?;
let (_, entry_name, start) = directory_stack.pop().unwrap();
output.sections.push(build_entry_output_section(
start..output.text.len().saturating_sub(1),
Some(&PathBuf::from(entry_name)),
true,
None,
));
}
let filename = entry
@@ -287,46 +279,23 @@ fn collect_files(
continue;
}
let prefix_paths = folded_directory_names_stack.drain(..).as_slice().join("/");
let entry_start = output.text.len();
if prefix_paths.is_empty() {
let label = if is_top_level_directory {
if is_top_level_directory {
output
.text
.push_str(&path_including_worktree_name.to_string_lossy());
is_top_level_directory = false;
path_including_worktree_name.to_string_lossy().to_string()
} else {
filename
};
events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection {
icon: IconName::Folder,
label: label.clone().into(),
metadata: None,
}))?;
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
SlashCommandContent::Text {
text: label,
run_commands_in_text: false,
},
)))?;
directory_stack.push(entry.path.clone());
output.text.push_str(&filename);
}
directory_stack.push((entry.path.clone(), filename, entry_start));
} else {
let entry_name = format!("{}/{}", prefix_paths, &filename);
events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection {
icon: IconName::Folder,
label: entry_name.clone().into(),
metadata: None,
}))?;
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
SlashCommandContent::Text {
text: entry_name,
run_commands_in_text: false,
},
)))?;
directory_stack.push(entry.path.clone());
output.text.push_str(&entry_name);
directory_stack.push((entry.path.clone(), entry_name, entry_start));
}
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false,
},
)))?;
output.text.push('\n');
} else if entry.is_file() {
let Some(open_buffer_task) = project_handle
.update(&mut cx, |project, cx| {
@@ -337,7 +306,6 @@ fn collect_files(
continue;
};
if let Some(buffer) = open_buffer_task.await.log_err() {
let mut output = SlashCommandOutput::default();
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
append_buffer_to_output(
&snapshot,
@@ -345,19 +313,32 @@ fn collect_files(
&mut output,
)
.log_err();
let mut buffer_events = output.to_event_stream();
while let Some(event) = buffer_events.next().await {
events_tx.unbounded_send(event)?;
}
}
}
}
while let Some(_) = directory_stack.pop() {
events_tx.unbounded_send(Ok(SlashCommandEvent::EndSection { metadata: None }))?;
while let Some((dir, entry, start)) = directory_stack.pop() {
if directory_stack.is_empty() {
let mut root_path = PathBuf::new();
root_path.push(snapshot.root_name());
root_path.push(&dir);
output.sections.push(build_entry_output_section(
start..output.text.len(),
Some(&root_path),
true,
None,
));
} else {
output.sections.push(build_entry_output_section(
start..output.text.len(),
Some(&PathBuf::from(entry.as_str())),
true,
None,
));
}
}
}
Ok(events_rx.boxed())
Ok(output)
})
}
@@ -543,10 +524,8 @@ pub fn append_buffer_to_output(
#[cfg(test)]
mod test {
use assistant_slash_command::SlashCommandOutput;
use fs::FakeFs;
use gpui::TestAppContext;
use pretty_assertions::assert_eq;
use project::Project;
use serde_json::json;
use settings::SettingsStore;
@@ -594,9 +573,6 @@ mod test {
.update(|cx| collect_files(project.clone(), &["root/dir".to_string()], cx))
.await
.unwrap();
let result_1 = SlashCommandOutput::from_event_stream(result_1)
.await
.unwrap();
assert!(result_1.text.starts_with("root/dir"));
// 4 files + 2 directories
@@ -606,9 +582,6 @@ mod test {
.update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx))
.await
.unwrap();
let result_2 = SlashCommandOutput::from_event_stream(result_2)
.await
.unwrap();
assert_eq!(result_1, result_2);
@@ -616,7 +589,6 @@ mod test {
.update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx))
.await
.unwrap();
let result = SlashCommandOutput::from_event_stream(result).await.unwrap();
assert!(result.text.starts_with("root/dir"));
// 5 files + 2 directories
@@ -663,7 +635,6 @@ mod test {
.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx))
.await
.unwrap();
let result = SlashCommandOutput::from_event_stream(result).await.unwrap();
// Sanity check
assert!(result.text.starts_with("zed/assets/themes\n"));
@@ -725,7 +696,6 @@ mod test {
.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx))
.await
.unwrap();
let result = SlashCommandOutput::from_event_stream(result).await.unwrap();
assert!(result.text.starts_with("zed/assets/themes\n"));
assert_eq!(result.sections[0].label, "zed/assets/themes/LICENSE");
@@ -746,8 +716,6 @@ mod test {
assert_eq!(result.sections[6].label, "summercamp");
assert_eq!(result.sections[7].label, "zed/assets/themes");
assert_eq!(result.text, "zed/assets/themes\n```zed/assets/themes/LICENSE\n1\n```\n\nsummercamp\n```zed/assets/themes/summercamp/LICENSE\n1\n```\n\nsubdir\n```zed/assets/themes/summercamp/subdir/LICENSE\n1\n```\n\nsubsubdir\n```zed/assets/themes/summercamp/subdir/subsubdir/LICENSE\n3\n```\n\n");
// Ensure that the project lasts until after the last await
drop(project);
}

View File

@@ -4,7 +4,6 @@ use std::sync::Arc;
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use chrono::Local;
use gpui::{Task, WeakView};
@@ -49,7 +48,7 @@ impl SlashCommand for NowSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
_cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let now = Local::now();
let text = format!("Today is {now}.", now = now.to_rfc2822());
let range = 0..text.len();
@@ -63,7 +62,6 @@ impl SlashCommand for NowSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
}
.to_event_stream()))
}))
}
}

View File

@@ -4,7 +4,7 @@ use super::{
};
use crate::PromptBuilder;
use anyhow::{anyhow, Result};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection, SlashCommandResult};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use feature_flags::FeatureFlag;
use gpui::{AppContext, Task, WeakView, WindowContext};
use language::{Anchor, CodeLabel, LspAdapterDelegate};
@@ -76,7 +76,7 @@ impl SlashCommand for ProjectSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let model_registry = LanguageModelRegistry::read_global(cx);
let current_model = model_registry.active_model();
let prompt_builder = self.prompt_builder.clone();
@@ -162,8 +162,7 @@ impl SlashCommand for ProjectSlashCommand {
text: output,
sections,
run_commands_in_text: true,
}
.to_event_stream())
})
})
.await
})

View File

@@ -1,9 +1,7 @@
use super::{SlashCommand, SlashCommandOutput};
use crate::prompt_library::PromptStore;
use anyhow::{anyhow, Context, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use gpui::{Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
use std::sync::{atomic::AtomicBool, Arc};
@@ -63,7 +61,7 @@ impl SlashCommand for PromptSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let title = arguments.to_owned().join(" ");
if title.trim().is_empty() {
return Task::ready(Err(anyhow!("missing prompt name")));
@@ -102,8 +100,7 @@ impl SlashCommand for PromptSlashCommand {
metadata: None,
}],
run_commands_in_text: true,
}
.to_event_stream())
})
})
}
}

View File

@@ -1,8 +1,10 @@
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
use super::{
create_label_for_command,
file_command::{build_entry_output_section, codeblock_fence_for_path},
SlashCommand, SlashCommandOutput,
};
use anyhow::Result;
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use feature_flags::FeatureFlag;
use gpui::{AppContext, Task, WeakView};
use language::{CodeLabel, LspAdapterDelegate};
@@ -14,9 +16,6 @@ use std::{
use ui::{prelude::*, IconName};
use workspace::Workspace;
use crate::slash_command::create_label_for_command;
use crate::slash_command::file_command::{build_entry_output_section, codeblock_fence_for_path};
pub(crate) struct SearchSlashCommandFeatureFlag;
impl FeatureFlag for SearchSlashCommandFeatureFlag {
@@ -64,7 +63,7 @@ impl SlashCommand for SearchSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
};
@@ -130,7 +129,6 @@ impl SlashCommand for SearchSlashCommand {
sections,
run_commands_in_text: false,
}
.to_event_stream()
})
.await;

View File

@@ -1,8 +1,6 @@
use super::{SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Context as _, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use editor::Editor;
use gpui::{Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
@@ -48,7 +46,7 @@ impl SlashCommand for OutlineSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let output = workspace.update(cx, |workspace, cx| {
let Some(active_item) = workspace.active_item(cx) else {
return Task::ready(Err(anyhow!("no active tab")));
@@ -85,8 +83,7 @@ impl SlashCommand for OutlineSlashCommand {
}],
text: outline_text,
run_commands_in_text: false,
}
.to_event_stream())
})
})
});

View File

@@ -1,8 +1,6 @@
use super::{file_command::append_buffer_to_output, SlashCommand, SlashCommandOutput};
use anyhow::{Context, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use collections::{HashMap, HashSet};
use editor::Editor;
use futures::future::join_all;
@@ -16,8 +14,6 @@ use ui::{ActiveTheme, WindowContext};
use util::ResultExt;
use workspace::Workspace;
use crate::slash_command::file_command::append_buffer_to_output;
pub(crate) struct TabSlashCommand;
const ALL_TABS_COMPLETION_ITEM: &str = "all";
@@ -136,7 +132,7 @@ impl SlashCommand for TabSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let tab_items_search = tab_items_for_queries(
Some(workspace),
arguments,
@@ -150,7 +146,7 @@ impl SlashCommand for TabSlashCommand {
for (full_path, buffer, _) in tab_items_search.await? {
append_buffer_to_output(&buffer, full_path.as_deref(), &mut output).log_err();
}
Ok(output.to_event_stream())
Ok(output)
})
}
}

View File

@@ -4,7 +4,6 @@ use std::sync::Arc;
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use gpui::{AppContext, Task, View, WeakView};
use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate};
@@ -63,7 +62,7 @@ impl SlashCommand for TerminalSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
};
@@ -97,8 +96,7 @@ impl SlashCommand for TerminalSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
}
.to_event_stream()))
}))
}
}

View File

@@ -1,17 +1,17 @@
use std::sync::atomic::AtomicBool;
use crate::prompts::PromptBuilder;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use gpui::{Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
use ui::prelude::*;
use workspace::Workspace;
use crate::prompts::PromptBuilder;
use workspace::Workspace;
pub(crate) struct WorkflowSlashCommand {
prompt_builder: Arc<PromptBuilder>,
@@ -60,7 +60,7 @@ impl SlashCommand for WorkflowSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let prompt_builder = self.prompt_builder.clone();
cx.spawn(|_cx| async move {
let text = prompt_builder.generate_workflow_prompt()?;
@@ -75,8 +75,7 @@ impl SlashCommand for WorkflowSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
}
.to_event_stream())
})
})
}
}

View File

@@ -17,8 +17,7 @@ use gpui::{
};
use language::Buffer;
use language_model::{
logging::report_assistant_event, LanguageModelRegistry, LanguageModelRequest,
LanguageModelRequestMessage, Role,
LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role,
};
use settings::Settings;
use std::{
@@ -307,33 +306,6 @@ impl TerminalInlineAssistant {
this.focus_handle(cx).focus(cx);
})
.log_err();
if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() {
let codegen = assist.codegen.read(cx);
let executor = cx.background_executor().clone();
report_assistant_event(
AssistantEvent {
conversation_id: None,
kind: AssistantKind::InlineTerminal,
message_id: codegen.message_id.clone(),
phase: if undo {
AssistantPhase::Rejected
} else {
AssistantPhase::Accepted
},
model: model.telemetry_id(),
model_provider: model.provider_id().to_string(),
response_latency: None,
error_message: None,
language_name: None,
},
codegen.telemetry.clone(),
cx.http_client(),
model.api_key(cx),
&executor,
);
}
assist.codegen.update(cx, |codegen, cx| {
if undo {
codegen.undo(cx);
@@ -1044,7 +1016,6 @@ pub struct Codegen {
telemetry: Option<Arc<Telemetry>>,
terminal: Model<Terminal>,
generation: Task<()>,
message_id: Option<String>,
transaction: Option<TerminalTransaction>,
}
@@ -1055,7 +1026,6 @@ impl Codegen {
telemetry,
status: CodegenStatus::Idle,
generation: Task::ready(()),
message_id: None,
transaction: None,
}
}
@@ -1065,8 +1035,6 @@ impl Codegen {
return;
};
let model_api_key = model.api_key(cx);
let http_client = cx.http_client();
let telemetry = self.telemetry.clone();
self.status = CodegenStatus::Pending;
self.transaction = Some(TerminalTransaction::start(self.terminal.clone()));
@@ -1075,61 +1043,43 @@ impl Codegen {
let model_provider_id = model.provider_id();
let response = model.stream_completion_text(prompt, &cx).await;
let generate = async {
let message_id = response
.as_ref()
.ok()
.and_then(|response| response.message_id.clone());
let (mut hunks_tx, mut hunks_rx) = mpsc::channel(1);
let task = cx.background_executor().spawn({
let message_id = message_id.clone();
let executor = cx.background_executor().clone();
async move {
let mut response_latency = None;
let request_start = Instant::now();
let task = async {
let mut chunks = response?.stream;
while let Some(chunk) = chunks.next().await {
if response_latency.is_none() {
response_latency = Some(request_start.elapsed());
}
let chunk = chunk?;
hunks_tx.send(chunk).await?;
let task = cx.background_executor().spawn(async move {
let mut response_latency = None;
let request_start = Instant::now();
let task = async {
let mut chunks = response?;
while let Some(chunk) = chunks.next().await {
if response_latency.is_none() {
response_latency = Some(request_start.elapsed());
}
let chunk = chunk?;
hunks_tx.send(chunk).await?;
}
anyhow::Ok(())
};
let result = task.await;
let error_message = result.as_ref().err().map(|error| error.to_string());
report_assistant_event(
AssistantEvent {
conversation_id: None,
kind: AssistantKind::InlineTerminal,
message_id,
phase: AssistantPhase::Response,
model: model_telemetry_id,
model_provider: model_provider_id.to_string(),
response_latency,
error_message,
language_name: None,
},
telemetry,
http_client,
model_api_key,
&executor,
);
result?;
anyhow::Ok(())
}
});
};
this.update(&mut cx, |this, _| {
this.message_id = message_id;
})?;
let result = task.await;
let error_message = result.as_ref().err().map(|error| error.to_string());
if let Some(telemetry) = telemetry {
telemetry.report_assistant_event(AssistantEvent {
conversation_id: None,
kind: AssistantKind::Inline,
phase: AssistantPhase::Response,
model: model_telemetry_id,
model_provider: model_provider_id.to_string(),
response_latency,
error_message,
language_name: None,
});
}
result?;
anyhow::Ok(())
});
while let Some(hunk) = hunks_rx.next().await {
this.update(&mut cx, |this, cx| {

View File

@@ -15,15 +15,9 @@ path = "src/assistant_slash_command.rs"
anyhow.workspace = true
collections.workspace = true
derive_more.workspace = true
futures.workspace = true
gpui.workspace = true
language.workspace = true
parking_lot.workspace = true
serde.workspace = true
serde_json.workspace = true
workspace.workspace = true
[dev-dependencies]
gpui = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
workspace = { workspace = true, features = ["test-support"] }

View File

@@ -1,8 +1,6 @@
mod slash_command_registry;
use anyhow::Result;
use futures::stream::{self, BoxStream};
use futures::StreamExt;
use gpui::{AnyElement, AppContext, ElementId, SharedString, Task, WeakView, WindowContext};
use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate, OffsetRangeExt};
use serde::{Deserialize, Serialize};
@@ -58,8 +56,6 @@ pub struct ArgumentCompletion {
pub replace_previous_arguments: bool,
}
pub type SlashCommandResult = Result<BoxStream<'static, Result<SlashCommandEvent>>>;
pub trait SlashCommand: 'static + Send + Sync {
fn name(&self) -> String;
fn label(&self, _cx: &AppContext) -> CodeLabel {
@@ -91,7 +87,7 @@ pub trait SlashCommand: 'static + Send + Sync {
// perhaps another kind of delegate is needed here.
delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult>;
) -> Task<Result<SlashCommandOutput>>;
}
pub type RenderFoldPlaceholder = Arc<
@@ -100,146 +96,13 @@ pub type RenderFoldPlaceholder = Arc<
+ Fn(ElementId, Arc<dyn Fn(&mut WindowContext)>, &mut WindowContext) -> AnyElement,
>;
#[derive(Debug, PartialEq, Eq)]
pub enum SlashCommandContent {
Text {
text: String,
run_commands_in_text: bool,
},
}
#[derive(Debug, PartialEq, Eq)]
pub enum SlashCommandEvent {
StartSection {
icon: IconName,
label: SharedString,
metadata: Option<serde_json::Value>,
},
Content(SlashCommandContent),
EndSection {
metadata: Option<serde_json::Value>,
},
}
#[derive(Debug, Default, PartialEq, Clone)]
#[derive(Debug, Default, PartialEq)]
pub struct SlashCommandOutput {
pub text: String,
pub sections: Vec<SlashCommandOutputSection<usize>>,
pub run_commands_in_text: bool,
}
impl SlashCommandOutput {
pub fn ensure_valid_section_ranges(&mut self) {
for section in &mut self.sections {
section.range.start = section.range.start.min(self.text.len());
section.range.end = section.range.end.min(self.text.len());
while !self.text.is_char_boundary(section.range.start) {
section.range.start -= 1;
}
while !self.text.is_char_boundary(section.range.end) {
section.range.end += 1;
}
}
}
/// Returns this [`SlashCommandOutput`] as a stream of [`SlashCommandEvent`]s.
pub fn to_event_stream(mut self) -> BoxStream<'static, Result<SlashCommandEvent>> {
self.ensure_valid_section_ranges();
let mut events = Vec::new();
let mut last_section_end = 0;
for section in self.sections {
if last_section_end < section.range.start {
events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text {
text: self
.text
.get(last_section_end..section.range.start)
.unwrap_or_default()
.to_string(),
run_commands_in_text: self.run_commands_in_text,
})));
}
events.push(Ok(SlashCommandEvent::StartSection {
icon: section.icon,
label: section.label,
metadata: section.metadata.clone(),
}));
events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text {
text: self
.text
.get(section.range.start..section.range.end)
.unwrap_or_default()
.to_string(),
run_commands_in_text: self.run_commands_in_text,
})));
events.push(Ok(SlashCommandEvent::EndSection {
metadata: section.metadata,
}));
last_section_end = section.range.end;
}
if last_section_end < self.text.len() {
events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text {
text: self.text[last_section_end..].to_string(),
run_commands_in_text: self.run_commands_in_text,
})));
}
stream::iter(events).boxed()
}
pub async fn from_event_stream(
mut events: BoxStream<'static, Result<SlashCommandEvent>>,
) -> Result<SlashCommandOutput> {
let mut output = SlashCommandOutput::default();
let mut section_stack = Vec::new();
while let Some(event) = events.next().await {
match event? {
SlashCommandEvent::StartSection {
icon,
label,
metadata,
} => {
let start = output.text.len();
section_stack.push(SlashCommandOutputSection {
range: start..start,
icon,
label,
metadata,
});
}
SlashCommandEvent::Content(SlashCommandContent::Text {
text,
run_commands_in_text,
}) => {
output.text.push_str(&text);
output.run_commands_in_text = run_commands_in_text;
if let Some(section) = section_stack.last_mut() {
section.range.end = output.text.len();
}
}
SlashCommandEvent::EndSection { metadata } => {
if let Some(mut section) = section_stack.pop() {
section.metadata = metadata;
output.sections.push(section);
}
}
}
}
while let Some(section) = section_stack.pop() {
output.sections.push(section);
}
Ok(output)
}
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct SlashCommandOutputSection<T> {
pub range: Range<T>,
@@ -253,243 +116,3 @@ impl SlashCommandOutputSection<language::Anchor> {
self.range.start.is_valid(buffer) && !self.range.to_offset(buffer).is_empty()
}
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
use serde_json::json;
use super::*;
#[gpui::test]
async fn test_slash_command_output_to_events_round_trip() {
// Test basic output consisting of a single section.
{
let text = "Hello, world!".to_string();
let range = 0..text.len();
let output = SlashCommandOutput {
text,
sections: vec![SlashCommandOutputSection {
range,
icon: IconName::Code,
label: "Section 1".into(),
metadata: None,
}],
run_commands_in_text: false,
};
let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
let events = events
.into_iter()
.filter_map(|event| event.ok())
.collect::<Vec<_>>();
assert_eq!(
events,
vec![
SlashCommandEvent::StartSection {
icon: IconName::Code,
label: "Section 1".into(),
metadata: None
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Hello, world!".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection { metadata: None }
]
);
let new_output =
SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
.await
.unwrap();
assert_eq!(new_output, output);
}
// Test output where the sections do not comprise all of the text.
{
let text = "Apple\nCucumber\nBanana\n".to_string();
let output = SlashCommandOutput {
text,
sections: vec![
SlashCommandOutputSection {
range: 0..6,
icon: IconName::Check,
label: "Fruit".into(),
metadata: None,
},
SlashCommandOutputSection {
range: 15..22,
icon: IconName::Check,
label: "Fruit".into(),
metadata: None,
},
],
run_commands_in_text: false,
};
let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
let events = events
.into_iter()
.filter_map(|event| event.ok())
.collect::<Vec<_>>();
assert_eq!(
events,
vec![
SlashCommandEvent::StartSection {
icon: IconName::Check,
label: "Fruit".into(),
metadata: None
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Apple\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection { metadata: None },
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Cucumber\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::StartSection {
icon: IconName::Check,
label: "Fruit".into(),
metadata: None
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Banana\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection { metadata: None }
]
);
let new_output =
SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
.await
.unwrap();
assert_eq!(new_output, output);
}
// Test output consisting of multiple sections.
{
let text = "Line 1\nLine 2\nLine 3\nLine 4\n".to_string();
let output = SlashCommandOutput {
text,
sections: vec![
SlashCommandOutputSection {
range: 0..6,
icon: IconName::FileCode,
label: "Section 1".into(),
metadata: Some(json!({ "a": true })),
},
SlashCommandOutputSection {
range: 7..13,
icon: IconName::FileDoc,
label: "Section 2".into(),
metadata: Some(json!({ "b": true })),
},
SlashCommandOutputSection {
range: 14..20,
icon: IconName::FileGit,
label: "Section 3".into(),
metadata: Some(json!({ "c": true })),
},
SlashCommandOutputSection {
range: 21..27,
icon: IconName::FileToml,
label: "Section 4".into(),
metadata: Some(json!({ "d": true })),
},
],
run_commands_in_text: false,
};
let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
let events = events
.into_iter()
.filter_map(|event| event.ok())
.collect::<Vec<_>>();
assert_eq!(
events,
vec![
SlashCommandEvent::StartSection {
icon: IconName::FileCode,
label: "Section 1".into(),
metadata: Some(json!({ "a": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Line 1".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection {
metadata: Some(json!({ "a": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::StartSection {
icon: IconName::FileDoc,
label: "Section 2".into(),
metadata: Some(json!({ "b": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Line 2".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection {
metadata: Some(json!({ "b": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::StartSection {
icon: IconName::FileGit,
label: "Section 3".into(),
metadata: Some(json!({ "c": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Line 3".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection {
metadata: Some(json!({ "c": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::StartSection {
icon: IconName::FileToml,
label: "Section 4".into(),
metadata: Some(json!({ "d": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Line 4".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection {
metadata: Some(json!({ "d": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false
}),
]
);
let new_output =
SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
.await
.unwrap();
assert_eq!(new_output, output);
}
}
}

View File

@@ -32,5 +32,4 @@ settings.workspace = true
smol.workspace = true
tempfile.workspace = true
util.workspace = true
which.workspace = true
workspace.workspace = true

View File

@@ -33,7 +33,6 @@ use std::{
};
use update_notification::UpdateNotification;
use util::ResultExt;
use which::which;
use workspace::notifications::NotificationId;
use workspace::Workspace;
@@ -474,39 +473,6 @@ impl AutoUpdater {
Ok(version_path)
}
pub async fn get_latest_remote_server_release_url(
os: &str,
arch: &str,
mut release_channel: ReleaseChannel,
cx: &mut AsyncAppContext,
) -> Result<(String, String)> {
let this = cx.update(|cx| {
cx.default_global::<GlobalAutoUpdate>()
.0
.clone()
.ok_or_else(|| anyhow!("auto-update not initialized"))
})??;
if release_channel == ReleaseChannel::Dev {
release_channel = ReleaseChannel::Nightly;
}
let release = Self::get_latest_release(
&this,
"zed-remote-server",
os,
arch,
Some(release_channel),
cx,
)
.await?;
let update_request_body = build_remote_server_update_request_body(cx)?;
let body = serde_json::to_string(&update_request_body)?;
Ok((release.url, body))
}
async fn get_latest_release(
this: &Model<Self>,
asset: &str,
@@ -594,12 +560,6 @@ impl AutoUpdater {
"linux" => Ok("zed.tar.gz"),
_ => Err(anyhow!("not supported: {:?}", OS)),
}?;
anyhow::ensure!(
which("rsync").is_ok(),
"Aborting. Could not find rsync which is required for auto-updates."
);
let downloaded_asset = temp_dir.path().join(filename);
download_release(&downloaded_asset, release, client, &cx).await?;
@@ -662,15 +622,6 @@ async fn download_remote_server_binary(
cx: &AsyncAppContext,
) -> Result<()> {
let mut target_file = File::create(&target_path).await?;
let update_request_body = build_remote_server_update_request_body(cx)?;
let request_body = AsyncBody::from(serde_json::to_string(&update_request_body)?);
let mut response = client.get(&release.url, request_body, true).await?;
smol::io::copy(response.body_mut(), &mut target_file).await?;
Ok(())
}
fn build_remote_server_update_request_body(cx: &AsyncAppContext) -> Result<UpdateRequestBody> {
let (installation_id, release_channel, telemetry_enabled, is_staff) = cx.update(|cx| {
let telemetry = Client::global(cx).telemetry().clone();
let is_staff = telemetry.is_staff();
@@ -686,14 +637,17 @@ fn build_remote_server_update_request_body(cx: &AsyncAppContext) -> Result<Updat
is_staff,
)
})?;
Ok(UpdateRequestBody {
let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody {
installation_id,
release_channel,
telemetry: telemetry_enabled,
is_staff,
destination: "remote",
})
})?);
let mut response = client.get(&release.url, request_body, true).await?;
smol::io::copy(response.body_mut(), &mut target_file).await?;
Ok(())
}
async fn download_release(

View File

@@ -1194,15 +1194,26 @@ impl Room {
project: Model<Project>,
cx: &mut ModelContext<Self>,
) -> Task<Result<u64>> {
if let Some(project_id) = project.read(cx).remote_id() {
return Task::ready(Ok(project_id));
}
let request = if let Some(dev_server_project_id) = project.read(cx).dev_server_project_id()
{
self.client.request(proto::ShareProject {
room_id: self.id(),
worktrees: vec![],
dev_server_project_id: Some(dev_server_project_id.0),
is_ssh_project: false,
})
} else {
if let Some(project_id) = project.read(cx).remote_id() {
return Task::ready(Ok(project_id));
}
let request = self.client.request(proto::ShareProject {
room_id: self.id(),
worktrees: project.read(cx).worktree_metadata_protos(cx),
is_ssh_project: project.read(cx).is_via_ssh(),
});
self.client.request(proto::ShareProject {
room_id: self.id(),
worktrees: project.read(cx).worktree_metadata_protos(cx),
dev_server_project_id: None,
is_ssh_project: project.read(cx).is_via_ssh(),
})
};
cx.spawn(|this, mut cx| async move {
let response = request.await?;

View File

@@ -15,6 +15,7 @@ pub enum CliRequest {
urls: Vec<String>,
wait: bool,
open_new_workspace: Option<bool>,
dev_server_token: Option<String>,
env: Option<HashMap<String, String>>,
},
}

View File

@@ -151,12 +151,6 @@ fn main() -> Result<()> {
}
}
if let Some(_) = args.dev_server_token {
return Err(anyhow::anyhow!(
"Dev servers were removed in v0.157.x please upgrade to SSH remoting: https://zed.dev/docs/remote-development"
))?;
}
let sender: JoinHandle<anyhow::Result<()>> = thread::spawn({
let exit_status = exit_status.clone();
move || {
@@ -168,6 +162,7 @@ fn main() -> Result<()> {
urls,
wait: args.wait,
open_new_workspace,
dev_server_token: args.dev_server_token,
env,
})?;

View File

@@ -30,6 +30,7 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsSources};
use socks::connect_socks_proxy_stream;
use std::fmt;
use std::pin::Pin;
use std::{
any::TypeId,
@@ -53,6 +54,15 @@ pub use rpc::*;
pub use telemetry_events::Event;
pub use user::*;
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct DevServerToken(pub String);
impl fmt::Display for DevServerToken {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
static ZED_SERVER_URL: LazyLock<Option<String>> =
LazyLock::new(|| std::env::var("ZED_SERVER_URL").ok());
static ZED_RPC_URL: LazyLock<Option<String>> = LazyLock::new(|| std::env::var("ZED_RPC_URL").ok());
@@ -294,14 +304,20 @@ struct ClientState {
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Credentials {
pub user_id: u64,
pub access_token: String,
pub enum Credentials {
DevServer { token: DevServerToken },
User { user_id: u64, access_token: String },
}
impl Credentials {
pub fn authorization_header(&self) -> String {
format!("{} {}", self.user_id, self.access_token)
match self {
Credentials::DevServer { token } => format!("dev-server-token {}", token),
Credentials::User {
user_id,
access_token,
} => format!("{} {}", user_id, access_token),
}
}
}
@@ -584,11 +600,11 @@ impl Client {
}
pub fn user_id(&self) -> Option<u64> {
self.state
.read()
.credentials
.as_ref()
.map(|credentials| credentials.user_id)
if let Some(Credentials::User { user_id, .. }) = self.state.read().credentials.as_ref() {
Some(*user_id)
} else {
None
}
}
pub fn peer_id(&self) -> Option<PeerId> {
@@ -777,6 +793,11 @@ impl Client {
.is_some()
}
pub fn set_dev_server_token(&self, token: DevServerToken) -> &Self {
self.state.write().credentials = Some(Credentials::DevServer { token });
self
}
#[async_recursion(?Send)]
pub async fn authenticate_and_connect(
self: &Arc<Self>,
@@ -827,7 +848,9 @@ impl Client {
}
}
let credentials = credentials.unwrap();
self.set_id(credentials.user_id);
if let Credentials::User { user_id, .. } = &credentials {
self.set_id(*user_id);
}
if was_disconnected {
self.set_status(Status::Connecting, cx);
@@ -843,8 +866,9 @@ impl Client {
Ok(conn) => {
self.state.write().credentials = Some(credentials.clone());
if !read_from_provider && IMPERSONATE_LOGIN.is_none() {
self.credentials_provider.write_credentials(credentials.user_id, credentials.access_token, cx).await.log_err();
if let Credentials::User{user_id, access_token} = credentials {
self.credentials_provider.write_credentials(user_id, access_token, cx).await.log_err();
}
}
futures::select_biased! {
@@ -1277,7 +1301,7 @@ impl Client {
.decrypt_string(&access_token)
.context("failed to decrypt access token")?;
Ok(Credentials {
Ok(Credentials::User {
user_id: user_id.parse()?,
access_token,
})
@@ -1398,7 +1422,7 @@ impl Client {
// Use the admin API token to authenticate as the impersonated user.
api_token.insert_str(0, "ADMIN_TOKEN:");
Ok(Credentials {
Ok(Credentials::User {
user_id: response.user.id,
access_token: api_token,
})
@@ -1643,7 +1667,7 @@ impl CredentialsProvider for DevelopmentCredentialsProvider {
let credentials: DevelopmentCredentials = serde_json::from_slice(&json).log_err()?;
Some(Credentials {
Some(Credentials::User {
user_id: credentials.user_id,
access_token: credentials.access_token,
})
@@ -1697,7 +1721,7 @@ impl CredentialsProvider for KeychainCredentialsProvider {
.await
.log_err()??;
Some(Credentials {
Some(Credentials::User {
user_id: user_id.parse().ok()?,
access_token: String::from_utf8(access_token).ok()?,
})
@@ -1831,7 +1855,7 @@ mod tests {
// Time out when client tries to connect.
client.override_authenticate(move |cx| {
cx.background_executor().spawn(async move {
Ok(Credentials {
Ok(Credentials::User {
user_id,
access_token: "token".into(),
})

View File

@@ -49,7 +49,7 @@ impl FakeServer {
let mut state = state.lock();
state.auth_count += 1;
let access_token = state.access_token.to_string();
Ok(Credentials {
Ok(Credentials::User {
user_id: client_user_id,
access_token,
})
@@ -73,7 +73,7 @@ impl FakeServer {
}
if credentials
!= (Credentials {
!= (Credentials::User {
user_id: client_user_id,
access_token: state.lock().access_token.to_string(),
})

View File

@@ -28,6 +28,9 @@ impl std::fmt::Display for ChannelId {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
pub struct ProjectId(pub u64);
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
pub struct DevServerId(pub u64);
#[derive(
Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize,
)]

View File

@@ -86,6 +86,7 @@ client = { workspace = true, features = ["test-support"] }
collab_ui = { workspace = true, features = ["test-support"] }
collections = { workspace = true, features = ["test-support"] }
ctor.workspace = true
dev_server_projects.workspace = true
editor = { workspace = true, features = ["test-support"] }
env_logger.workspace = true
file_finder.workspace = true
@@ -93,6 +94,7 @@ fs = { workspace = true, features = ["test-support"] }
git = { workspace = true, features = ["test-support"] }
git_hosting_providers.workspace = true
gpui = { workspace = true, features = ["test-support"] }
headless.workspace = true
hyper.workspace = true
indoc.workspace = true
language = { workspace = true, features = ["test-support"] }

View File

@@ -11,8 +11,7 @@ CREATE TABLE "users" (
"metrics_id" TEXT,
"github_user_id" INTEGER NOT NULL,
"accepted_tos_at" TIMESTAMP WITHOUT TIME ZONE,
"github_user_created_at" TIMESTAMP WITHOUT TIME ZONE,
"custom_llm_monthly_allowance_in_cents" INTEGER
"github_user_created_at" TIMESTAMP WITHOUT TIME ZONE
);
CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login");
CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code");

View File

@@ -1 +0,0 @@
alter table users add column custom_llm_monthly_allowance_in_cents integer;

View File

@@ -34,7 +34,7 @@ use crate::{
db::{billing_subscription::StripeSubscriptionStatus, UserId},
llm::db::LlmDatabase,
};
use crate::{AppState, Cents, Error, Result};
use crate::{AppState, Error, Result};
pub fn router() -> Router {
Router::new()
@@ -226,13 +226,6 @@ async fn create_billing_subscription(
))?
};
if app.db.has_active_billing_subscription(user.id).await? {
return Err(Error::http(
StatusCode::CONFLICT,
"user already has an active subscription".into(),
));
}
let customer_id =
if let Some(existing_customer) = app.db.get_billing_customer_by_user_id(user.id).await? {
CustomerId::from_str(&existing_customer.stripe_customer_id)
@@ -662,33 +655,6 @@ async fn handle_customer_subscription_event(
)
.await?;
} else {
// If the user already has an active billing subscription, ignore the
// event and return an `Ok` to signal that it was processed
// successfully.
//
// There is the possibility that this could cause us to not create a
// subscription in the following scenario:
//
// 1. User has an active subscription A
// 2. User cancels subscription A
// 3. User creates a new subscription B
// 4. We process the new subscription B before the cancellation of subscription A
// 5. User ends up with no subscriptions
//
// In theory this situation shouldn't arise as we try to process the events in the order they occur.
if app
.db
.has_active_billing_subscription(billing_customer.user_id)
.await?
{
log::info!(
"user {user_id} already has an active subscription, skipping creation of subscription {subscription_id}",
user_id = billing_customer.user_id,
subscription_id = subscription.id
);
return Ok(());
}
app.db
.create_billing_subscription(&CreateBillingSubscriptionParams {
billing_customer_id: billing_customer.id,
@@ -714,9 +680,7 @@ struct GetMonthlySpendParams {
#[derive(Debug, Serialize)]
struct GetMonthlySpendResponse {
monthly_free_tier_spend_in_cents: u32,
monthly_free_tier_allowance_in_cents: u32,
monthly_spend_in_cents: u32,
monthly_spend_in_cents: i32,
}
async fn get_monthly_spend(
@@ -736,22 +700,13 @@ async fn get_monthly_spend(
));
};
let free_tier = user
.custom_llm_monthly_allowance_in_cents
.map(|allowance| Cents(allowance as u32))
.unwrap_or(FREE_TIER_MONTHLY_SPENDING_LIMIT);
let spending_for_month = llm_db
let monthly_spend = llm_db
.get_user_spending_for_month(user.id, Utc::now())
.await?;
let free_tier_spend = Cents::min(spending_for_month, free_tier);
let monthly_spend = spending_for_month.saturating_sub(free_tier);
.await?
.saturating_sub(FREE_TIER_MONTHLY_SPENDING_LIMIT);
Ok(Json(GetMonthlySpendResponse {
monthly_free_tier_spend_in_cents: free_tier_spend.0,
monthly_free_tier_allowance_in_cents: free_tier.0,
monthly_spend_in_cents: monthly_spend.0,
monthly_spend_in_cents: monthly_spend.0 as i32,
}))
}

View File

@@ -1,5 +1,5 @@
use crate::{
db::{self, AccessTokenId, Database, UserId},
db::{self, dev_server, AccessTokenId, Database, DevServerId, UserId},
rpc::Principal,
AppState, Error, Result,
};
@@ -44,10 +44,19 @@ pub async fn validate_header<B>(mut req: Request<B>, next: Next<B>) -> impl Into
let first = auth_header.next().unwrap_or("");
if first == "dev-server-token" {
Err(Error::http(
StatusCode::UNAUTHORIZED,
"Dev servers were removed in Zed 0.157 please upgrade to SSH remoting".to_string(),
))?;
let dev_server_token = auth_header.next().ok_or_else(|| {
Error::http(
StatusCode::BAD_REQUEST,
"missing dev-server-token token in authorization header".to_string(),
)
})?;
let dev_server = verify_dev_server_token(dev_server_token, &state.db)
.await
.map_err(|e| Error::http(StatusCode::UNAUTHORIZED, format!("{}", e)))?;
req.extensions_mut()
.insert(Principal::DevServer(dev_server));
return Ok::<_, Error>(next.run(req).await);
}
let user_id = UserId(first.parse().map_err(|_| {
@@ -231,6 +240,41 @@ pub async fn verify_access_token(
})
}
pub fn generate_dev_server_token(id: usize, access_token: String) -> String {
format!("{}.{}", id, access_token)
}
pub async fn verify_dev_server_token(
dev_server_token: &str,
db: &Arc<Database>,
) -> anyhow::Result<dev_server::Model> {
let (id, token) = split_dev_server_token(dev_server_token)?;
let token_hash = hash_access_token(token);
let server = db.get_dev_server(id).await?;
if server
.hashed_token
.as_bytes()
.ct_eq(token_hash.as_ref())
.into()
{
Ok(server)
} else {
Err(anyhow!("wrong token for dev server"))
}
}
// a dev_server_token has the format <id>.<base64>. This is to make them
// relatively easy to copy/paste around.
pub fn split_dev_server_token(dev_server_token: &str) -> anyhow::Result<(DevServerId, &str)> {
let mut parts = dev_server_token.splitn(2, '.');
let id = DevServerId(parts.next().unwrap_or_default().parse()?);
let token = parts
.next()
.ok_or_else(|| anyhow!("invalid dev server token format"))?;
Ok((id, token))
}
#[cfg(test)]
mod test {
use rand::thread_rng;

View File

@@ -726,6 +726,7 @@ pub struct Project {
pub collaborators: Vec<ProjectCollaborator>,
pub worktrees: BTreeMap<u64, Worktree>,
pub language_servers: Vec<proto::LanguageServer>,
pub dev_server_project_id: Option<DevServerProjectId>,
}
pub struct ProjectCollaborator {

View File

@@ -79,6 +79,7 @@ id_type!(ChannelChatParticipantId);
id_type!(ChannelId);
id_type!(ChannelMemberId);
id_type!(ContactId);
id_type!(DevServerId);
id_type!(ExtensionId);
id_type!(FlagId);
id_type!(FollowerId);
@@ -88,6 +89,7 @@ id_type!(NotificationId);
id_type!(NotificationKindId);
id_type!(ProjectCollaboratorId);
id_type!(ProjectId);
id_type!(DevServerProjectId);
id_type!(ReplicaId);
id_type!(RoomId);
id_type!(RoomParticipantId);
@@ -275,6 +277,12 @@ impl From<ChannelVisibility> for i32 {
}
}
#[derive(Copy, Clone, Debug, Serialize, PartialEq)]
pub enum PrincipalId {
UserId(UserId),
DevServerId(DevServerId),
}
/// Indicate whether a [Buffer] has permissions to edit.
#[derive(PartialEq, Clone, Copy, Debug)]
pub enum Capability {

View File

@@ -8,6 +8,8 @@ pub mod buffers;
pub mod channels;
pub mod contacts;
pub mod contributors;
pub mod dev_server_projects;
pub mod dev_servers;
pub mod embeddings;
pub mod extensions;
pub mod hosted_projects;

View File

@@ -1 +1,365 @@
use anyhow::anyhow;
use rpc::{
proto::{self},
ConnectionId,
};
use sea_orm::{
ActiveModelTrait, ActiveValue, ColumnTrait, Condition, DatabaseTransaction, EntityTrait,
IntoActiveModel, ModelTrait, QueryFilter,
};
use crate::db::ProjectId;
use super::{
dev_server, dev_server_project, project, project_collaborator, worktree, Database, DevServerId,
DevServerProjectId, RejoinedProject, ResharedProject, ServerId, UserId,
};
impl Database {
pub async fn get_dev_server_project(
&self,
dev_server_project_id: DevServerProjectId,
) -> crate::Result<dev_server_project::Model> {
self.transaction(|tx| async move {
Ok(
dev_server_project::Entity::find_by_id(dev_server_project_id)
.one(&*tx)
.await?
.ok_or_else(|| {
anyhow!("no dev server project with id {}", dev_server_project_id)
})?,
)
})
.await
}
pub async fn get_projects_for_dev_server(
&self,
dev_server_id: DevServerId,
) -> crate::Result<Vec<proto::DevServerProject>> {
self.transaction(|tx| async move {
self.get_projects_for_dev_server_internal(dev_server_id, &tx)
.await
})
.await
}
pub async fn get_projects_for_dev_server_internal(
&self,
dev_server_id: DevServerId,
tx: &DatabaseTransaction,
) -> crate::Result<Vec<proto::DevServerProject>> {
let servers = dev_server_project::Entity::find()
.filter(dev_server_project::Column::DevServerId.eq(dev_server_id))
.find_also_related(project::Entity)
.all(tx)
.await?;
Ok(servers
.into_iter()
.map(|(dev_server_project, project)| dev_server_project.to_proto(project))
.collect())
}
pub async fn dev_server_project_ids_for_user(
&self,
user_id: UserId,
tx: &DatabaseTransaction,
) -> crate::Result<Vec<DevServerProjectId>> {
let dev_servers = dev_server::Entity::find()
.filter(dev_server::Column::UserId.eq(user_id))
.find_with_related(dev_server_project::Entity)
.all(tx)
.await?;
Ok(dev_servers
.into_iter()
.flat_map(|(_, projects)| projects.into_iter().map(|p| p.id))
.collect())
}
pub async fn owner_for_dev_server_project(
&self,
dev_server_project_id: DevServerProjectId,
tx: &DatabaseTransaction,
) -> crate::Result<UserId> {
let dev_server = dev_server_project::Entity::find_by_id(dev_server_project_id)
.find_also_related(dev_server::Entity)
.one(tx)
.await?
.and_then(|(_, dev_server)| dev_server)
.ok_or_else(|| anyhow!("no dev server project"))?;
Ok(dev_server.user_id)
}
pub async fn get_stale_dev_server_projects(
&self,
connection: ConnectionId,
) -> crate::Result<Vec<ProjectId>> {
self.transaction(|tx| async move {
let projects = project::Entity::find()
.filter(
Condition::all()
.add(project::Column::HostConnectionId.eq(connection.id))
.add(project::Column::HostConnectionServerId.eq(connection.owner_id)),
)
.all(&*tx)
.await?;
Ok(projects.into_iter().map(|p| p.id).collect())
})
.await
}
pub async fn create_dev_server_project(
&self,
dev_server_id: DevServerId,
path: &str,
user_id: UserId,
) -> crate::Result<(dev_server_project::Model, proto::DevServerProjectsUpdate)> {
self.transaction(|tx| async move {
let dev_server = dev_server::Entity::find_by_id(dev_server_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?;
if dev_server.user_id != user_id {
return Err(anyhow!("not your dev server"))?;
}
let project = dev_server_project::Entity::insert(dev_server_project::ActiveModel {
id: ActiveValue::NotSet,
dev_server_id: ActiveValue::Set(dev_server_id),
paths: ActiveValue::Set(dev_server_project::JSONPaths(vec![path.to_string()])),
})
.exec_with_returning(&*tx)
.await?;
let status = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok((project, status))
})
.await
}
pub async fn update_dev_server_project(
&self,
id: DevServerProjectId,
paths: &[String],
user_id: UserId,
) -> crate::Result<(dev_server_project::Model, proto::DevServerProjectsUpdate)> {
self.transaction(move |tx| async move {
let paths = paths.to_owned();
let Some((project, Some(dev_server))) = dev_server_project::Entity::find_by_id(id)
.find_also_related(dev_server::Entity)
.one(&*tx)
.await?
else {
return Err(anyhow!("no such dev server project"))?;
};
if dev_server.user_id != user_id {
return Err(anyhow!("not your dev server"))?;
}
let mut project = project.into_active_model();
project.paths = ActiveValue::Set(dev_server_project::JSONPaths(paths));
let project = project.update(&*tx).await?;
let status = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok((project, status))
})
.await
}
pub async fn delete_dev_server_project(
&self,
dev_server_project_id: DevServerProjectId,
dev_server_id: DevServerId,
user_id: UserId,
) -> crate::Result<(Vec<proto::DevServerProject>, proto::DevServerProjectsUpdate)> {
self.transaction(|tx| async move {
project::Entity::delete_many()
.filter(project::Column::DevServerProjectId.eq(dev_server_project_id))
.exec(&*tx)
.await?;
let result = dev_server_project::Entity::delete_by_id(dev_server_project_id)
.exec(&*tx)
.await?;
if result.rows_affected != 1 {
return Err(anyhow!(
"no dev server project with id {}",
dev_server_project_id
))?;
}
let status = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
let projects = self
.get_projects_for_dev_server_internal(dev_server_id, &tx)
.await?;
Ok((projects, status))
})
.await
}
pub async fn share_dev_server_project(
&self,
dev_server_project_id: DevServerProjectId,
dev_server_id: DevServerId,
connection: ConnectionId,
worktrees: &[proto::WorktreeMetadata],
) -> crate::Result<(
proto::DevServerProject,
UserId,
proto::DevServerProjectsUpdate,
)> {
self.transaction(|tx| async move {
let dev_server = dev_server::Entity::find_by_id(dev_server_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?;
let dev_server_project = dev_server_project::Entity::find_by_id(dev_server_project_id)
.one(&*tx)
.await?
.ok_or_else(|| {
anyhow!("no dev server project with id {}", dev_server_project_id)
})?;
if dev_server_project.dev_server_id != dev_server_id {
return Err(anyhow!("dev server project shared from wrong server"))?;
}
let project = project::ActiveModel {
room_id: ActiveValue::Set(None),
host_user_id: ActiveValue::Set(None),
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
host_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
id: ActiveValue::NotSet,
hosted_project_id: ActiveValue::Set(None),
dev_server_project_id: ActiveValue::Set(Some(dev_server_project_id)),
}
.insert(&*tx)
.await?;
if !worktrees.is_empty() {
worktree::Entity::insert_many(worktrees.iter().map(|worktree| {
worktree::ActiveModel {
id: ActiveValue::set(worktree.id as i64),
project_id: ActiveValue::set(project.id),
abs_path: ActiveValue::set(worktree.abs_path.clone()),
root_name: ActiveValue::set(worktree.root_name.clone()),
visible: ActiveValue::set(worktree.visible),
scan_id: ActiveValue::set(0),
completed_scan_id: ActiveValue::set(0),
}
}))
.exec(&*tx)
.await?;
}
let status = self
.dev_server_projects_update_internal(dev_server.user_id, &tx)
.await?;
Ok((
dev_server_project.to_proto(Some(project)),
dev_server.user_id,
status,
))
})
.await
}
pub async fn reshare_dev_server_projects(
&self,
reshared_projects: &Vec<proto::UpdateProject>,
dev_server_id: DevServerId,
connection: ConnectionId,
) -> crate::Result<Vec<ResharedProject>> {
self.transaction(|tx| async move {
let mut ret = Vec::new();
for reshared_project in reshared_projects {
let project_id = ProjectId::from_proto(reshared_project.project_id);
let (project, dev_server_project) = project::Entity::find_by_id(project_id)
.find_also_related(dev_server_project::Entity)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("project does not exist"))?;
if dev_server_project.map(|rp| rp.dev_server_id) != Some(dev_server_id) {
return Err(anyhow!("dev server project reshared from wrong server"))?;
}
let Ok(old_connection_id) = project.host_connection() else {
return Err(anyhow!("dev server project was not shared"))?;
};
project::Entity::update(project::ActiveModel {
id: ActiveValue::set(project_id),
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
host_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
..Default::default()
})
.exec(&*tx)
.await?;
let collaborators = project
.find_related(project_collaborator::Entity)
.all(&*tx)
.await?;
self.update_project_worktrees(project_id, &reshared_project.worktrees, &tx)
.await?;
ret.push(super::ResharedProject {
id: project_id,
old_connection_id,
collaborators: collaborators
.iter()
.map(|collaborator| super::ProjectCollaborator {
connection_id: collaborator.connection(),
user_id: collaborator.user_id,
replica_id: collaborator.replica_id,
is_host: collaborator.is_host,
})
.collect(),
worktrees: reshared_project.worktrees.clone(),
});
}
Ok(ret)
})
.await
}
pub async fn rejoin_dev_server_projects(
&self,
rejoined_projects: &Vec<proto::RejoinProject>,
user_id: UserId,
connection_id: ConnectionId,
) -> crate::Result<Vec<RejoinedProject>> {
self.transaction(|tx| async move {
let mut ret = Vec::new();
for rejoined_project in rejoined_projects {
if let Some(project) = self
.rejoin_project_internal(&tx, rejoined_project, user_id, connection_id)
.await?
{
ret.push(project);
}
}
Ok(ret)
})
.await
}
}

View File

@@ -1 +1,222 @@
use rpc::proto;
use sea_orm::{
ActiveValue, ColumnTrait, DatabaseTransaction, EntityTrait, IntoActiveModel, QueryFilter,
};
use super::{dev_server, dev_server_project, Database, DevServerId, UserId};
impl Database {
pub async fn get_dev_server(
&self,
dev_server_id: DevServerId,
) -> crate::Result<dev_server::Model> {
self.transaction(|tx| async move {
Ok(dev_server::Entity::find_by_id(dev_server_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow::anyhow!("no dev server with id {}", dev_server_id))?)
})
.await
}
pub async fn get_dev_server_for_user(
&self,
dev_server_id: DevServerId,
user_id: UserId,
) -> crate::Result<dev_server::Model> {
self.transaction(|tx| async move {
let server = dev_server::Entity::find_by_id(dev_server_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow::anyhow!("no dev server with id {}", dev_server_id))?;
if server.user_id != user_id {
return Err(anyhow::anyhow!(
"dev server {} is not owned by user {}",
dev_server_id,
user_id
))?;
}
Ok(server)
})
.await
}
pub async fn get_dev_servers(&self, user_id: UserId) -> crate::Result<Vec<dev_server::Model>> {
self.transaction(|tx| async move {
Ok(dev_server::Entity::find()
.filter(dev_server::Column::UserId.eq(user_id))
.all(&*tx)
.await?)
})
.await
}
pub async fn dev_server_projects_update(
&self,
user_id: UserId,
) -> crate::Result<proto::DevServerProjectsUpdate> {
self.transaction(|tx| async move {
self.dev_server_projects_update_internal(user_id, &tx).await
})
.await
}
pub async fn dev_server_projects_update_internal(
&self,
user_id: UserId,
tx: &DatabaseTransaction,
) -> crate::Result<proto::DevServerProjectsUpdate> {
let dev_servers = dev_server::Entity::find()
.filter(dev_server::Column::UserId.eq(user_id))
.all(tx)
.await?;
let dev_server_projects = dev_server_project::Entity::find()
.filter(
dev_server_project::Column::DevServerId
.is_in(dev_servers.iter().map(|d| d.id).collect::<Vec<_>>()),
)
.find_also_related(super::project::Entity)
.all(tx)
.await?;
Ok(proto::DevServerProjectsUpdate {
dev_servers: dev_servers
.into_iter()
.map(|d| d.to_proto(proto::DevServerStatus::Offline))
.collect(),
dev_server_projects: dev_server_projects
.into_iter()
.map(|(dev_server_project, project)| dev_server_project.to_proto(project))
.collect(),
})
}
pub async fn create_dev_server(
&self,
name: &str,
ssh_connection_string: Option<&str>,
hashed_access_token: &str,
user_id: UserId,
) -> crate::Result<(dev_server::Model, proto::DevServerProjectsUpdate)> {
self.transaction(|tx| async move {
if name.trim().is_empty() {
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
}
let dev_server = dev_server::Entity::insert(dev_server::ActiveModel {
id: ActiveValue::NotSet,
hashed_token: ActiveValue::Set(hashed_access_token.to_string()),
name: ActiveValue::Set(name.trim().to_string()),
user_id: ActiveValue::Set(user_id),
ssh_connection_string: ActiveValue::Set(
ssh_connection_string.map(ToOwned::to_owned),
),
})
.exec_with_returning(&*tx)
.await?;
let dev_server_projects = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok((dev_server, dev_server_projects))
})
.await
}
pub async fn update_dev_server_token(
&self,
id: DevServerId,
hashed_token: &str,
user_id: UserId,
) -> crate::Result<proto::DevServerProjectsUpdate> {
self.transaction(|tx| async move {
let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
return Err(anyhow::anyhow!("no dev server with id {}", id))?;
};
if dev_server.user_id != user_id {
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
}
dev_server::Entity::update(dev_server::ActiveModel {
hashed_token: ActiveValue::Set(hashed_token.to_string()),
..dev_server.clone().into_active_model()
})
.exec(&*tx)
.await?;
let dev_server_projects = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok(dev_server_projects)
})
.await
}
pub async fn rename_dev_server(
&self,
id: DevServerId,
name: &str,
ssh_connection_string: Option<&str>,
user_id: UserId,
) -> crate::Result<proto::DevServerProjectsUpdate> {
self.transaction(|tx| async move {
let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
return Err(anyhow::anyhow!("no dev server with id {}", id))?;
};
if dev_server.user_id != user_id || name.trim().is_empty() {
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
}
dev_server::Entity::update(dev_server::ActiveModel {
name: ActiveValue::Set(name.trim().to_string()),
ssh_connection_string: ActiveValue::Set(
ssh_connection_string.map(ToOwned::to_owned),
),
..dev_server.clone().into_active_model()
})
.exec(&*tx)
.await?;
let dev_server_projects = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok(dev_server_projects)
})
.await
}
pub async fn delete_dev_server(
&self,
id: DevServerId,
user_id: UserId,
) -> crate::Result<proto::DevServerProjectsUpdate> {
self.transaction(|tx| async move {
let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
return Err(anyhow::anyhow!("no dev server with id {}", id))?;
};
if dev_server.user_id != user_id {
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
}
dev_server_project::Entity::delete_many()
.filter(dev_server_project::Column::DevServerId.eq(id))
.exec(&*tx)
.await?;
dev_server::Entity::delete(dev_server.into_active_model())
.exec(&*tx)
.await?;
let dev_server_projects = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok(dev_server_projects)
})
.await
}
}

View File

@@ -32,6 +32,7 @@ impl Database {
connection: ConnectionId,
worktrees: &[proto::WorktreeMetadata],
is_ssh_project: bool,
dev_server_project_id: Option<DevServerProjectId>,
) -> Result<TransactionGuard<(ProjectId, proto::Room)>> {
self.room_transaction(room_id, |tx| async move {
let participant = room_participant::Entity::find()
@@ -60,6 +61,38 @@ impl Database {
return Err(anyhow!("guests cannot share projects"))?;
}
if let Some(dev_server_project_id) = dev_server_project_id {
let project = project::Entity::find()
.filter(project::Column::DevServerProjectId.eq(Some(dev_server_project_id)))
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no remote project"))?;
let (_, dev_server) = dev_server_project::Entity::find_by_id(dev_server_project_id)
.find_also_related(dev_server::Entity)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no dev_server_project"))?;
if !dev_server.is_some_and(|dev_server| dev_server.user_id == participant.user_id) {
return Err(anyhow!("not your dev server"))?;
}
if project.room_id.is_some() {
return Err(anyhow!("project already shared"))?;
};
let project = project::Entity::update(project::ActiveModel {
room_id: ActiveValue::Set(Some(room_id)),
..project.into_active_model()
})
.exec(&*tx)
.await?;
let room = self.get_room(room_id, &tx).await?;
return Ok((project.id, room));
}
let project = project::ActiveModel {
room_id: ActiveValue::set(Some(participant.room_id)),
host_user_id: ActiveValue::set(Some(participant.user_id)),
@@ -69,6 +102,7 @@ impl Database {
))),
id: ActiveValue::NotSet,
hosted_project_id: ActiveValue::Set(None),
dev_server_project_id: ActiveValue::Set(None),
}
.insert(&*tx)
.await?;
@@ -122,6 +156,7 @@ impl Database {
&self,
project_id: ProjectId,
connection: ConnectionId,
user_id: Option<UserId>,
) -> Result<TransactionGuard<(bool, Option<proto::Room>, Vec<ConnectionId>)>> {
self.project_transaction(project_id, |tx| async move {
let guest_connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
@@ -137,6 +172,25 @@ impl Database {
if project.host_connection()? == connection {
return Ok((true, room, guest_connection_ids));
}
if let Some(dev_server_project_id) = project.dev_server_project_id {
if let Some(user_id) = user_id {
if user_id
!= self
.owner_for_dev_server_project(dev_server_project_id, &tx)
.await?
{
Err(anyhow!("cannot unshare a project hosted by another user"))?
}
project::Entity::update(project::ActiveModel {
room_id: ActiveValue::Set(None),
..project.into_active_model()
})
.exec(&*tx)
.await?;
return Ok((false, room, guest_connection_ids));
}
}
Err(anyhow!("cannot unshare a project hosted by another user"))?
})
.await
@@ -218,16 +272,6 @@ impl Database {
update: &proto::UpdateWorktree,
connection: ConnectionId,
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
if update.removed_entries.len() > proto::MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE
|| update.updated_entries.len() > proto::MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE
{
return Err(anyhow!(
"invalid worktree update. removed entries: {}, updated entries: {}",
update.removed_entries.len(),
update.updated_entries.len()
))?;
}
let project_id = ProjectId::from_proto(update.project_id);
let worktree_id = update.worktree_id as i64;
self.project_transaction(project_id, |tx| async move {
@@ -579,6 +623,17 @@ impl Database {
.await
}
pub async fn find_dev_server_project(&self, id: DevServerProjectId) -> Result<project::Model> {
self.transaction(|tx| async move {
Ok(project::Entity::find()
.filter(project::Column::DevServerProjectId.eq(id))
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such project"))?)
})
.await
}
/// Adds the given connection to the specified project
/// in the current room.
pub async fn join_project(
@@ -589,7 +644,13 @@ impl Database {
) -> Result<TransactionGuard<(Project, ReplicaId)>> {
self.project_transaction(project_id, |tx| async move {
let (project, role) = self
.access_project(project_id, connection, Capability::ReadOnly, &tx)
.access_project(
project_id,
connection,
PrincipalId::UserId(user_id),
Capability::ReadOnly,
&tx,
)
.await?;
self.join_project_internal(project, user_id, connection, role, &tx)
.await
@@ -780,6 +841,7 @@ impl Database {
worktree_id: None,
})
.collect(),
dev_server_project_id: project.dev_server_project_id,
};
Ok((project, replica_id as ReplicaId))
}
@@ -935,14 +997,29 @@ impl Database {
&self,
project_id: ProjectId,
connection_id: ConnectionId,
principal_id: PrincipalId,
capability: Capability,
tx: &DatabaseTransaction,
) -> Result<(project::Model, ChannelRole)> {
let project = project::Entity::find_by_id(project_id)
let (mut project, dev_server_project) = project::Entity::find_by_id(project_id)
.find_also_related(dev_server_project::Entity)
.one(tx)
.await?
.ok_or_else(|| anyhow!("no such project"))?;
let user_id = match principal_id {
PrincipalId::DevServerId(_) => {
if project
.host_connection()
.is_ok_and(|connection| connection == connection_id)
{
return Ok((project, ChannelRole::Admin));
}
return Err(anyhow!("not the project host"))?;
}
PrincipalId::UserId(user_id) => user_id,
};
let role_from_room = if let Some(room_id) = project.room_id {
room_participant::Entity::find()
.filter(room_participant::Column::RoomId.eq(room_id))
@@ -953,8 +1030,34 @@ impl Database {
} else {
None
};
let role_from_dev_server = if let Some(dev_server_project) = dev_server_project {
let dev_server = dev_server::Entity::find_by_id(dev_server_project.dev_server_id)
.one(tx)
.await?
.ok_or_else(|| anyhow!("no such channel"))?;
if user_id == dev_server.user_id {
// If the user left the room "uncleanly" they may rejoin the
// remote project before leave_room runs. IN that case kick
// the project out of the room pre-emptively.
if role_from_room.is_none() {
project = project::Entity::update(project::ActiveModel {
room_id: ActiveValue::Set(None),
..project.into_active_model()
})
.exec(tx)
.await?;
}
Some(ChannelRole::Admin)
} else {
None
}
} else {
None
};
let role = role_from_room.unwrap_or(ChannelRole::Banned);
let role = role_from_dev_server
.or(role_from_room)
.unwrap_or(ChannelRole::Banned);
match capability {
Capability::ReadWrite => {
@@ -977,10 +1080,17 @@ impl Database {
&self,
project_id: ProjectId,
connection_id: ConnectionId,
user_id: UserId,
) -> Result<ConnectionId> {
self.project_transaction(project_id, |tx| async move {
let (project, _) = self
.access_project(project_id, connection_id, Capability::ReadOnly, &tx)
.access_project(
project_id,
connection_id,
PrincipalId::UserId(user_id),
Capability::ReadOnly,
&tx,
)
.await?;
project.host_connection()
})
@@ -993,10 +1103,17 @@ impl Database {
&self,
project_id: ProjectId,
connection_id: ConnectionId,
user_id: UserId,
) -> Result<ConnectionId> {
self.project_transaction(project_id, |tx| async move {
let (project, _) = self
.access_project(project_id, connection_id, Capability::ReadWrite, &tx)
.access_project(
project_id,
connection_id,
PrincipalId::UserId(user_id),
Capability::ReadWrite,
&tx,
)
.await?;
project.host_connection()
})
@@ -1004,16 +1121,47 @@ impl Database {
.map(|guard| guard.into_inner())
}
/// Returns the host connection for a request to join a shared project.
pub async fn host_for_owner_project_request(
&self,
project_id: ProjectId,
_connection_id: ConnectionId,
user_id: UserId,
) -> Result<ConnectionId> {
self.project_transaction(project_id, |tx| async move {
let (project, dev_server_project) = project::Entity::find_by_id(project_id)
.find_also_related(dev_server_project::Entity)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such project"))?;
let Some(dev_server_project) = dev_server_project else {
return Err(anyhow!("not a dev server project"))?;
};
let dev_server = dev_server::Entity::find_by_id(dev_server_project.dev_server_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such dev server"))?;
if dev_server.user_id != user_id {
return Err(anyhow!("not your project"))?;
}
project.host_connection()
})
.await
.map(|guard| guard.into_inner())
}
pub async fn connections_for_buffer_update(
&self,
project_id: ProjectId,
principal_id: PrincipalId,
connection_id: ConnectionId,
capability: Capability,
) -> Result<TransactionGuard<(ConnectionId, Vec<ConnectionId>)>> {
self.project_transaction(project_id, |tx| async move {
// Authorize
let (project, _) = self
.access_project(project_id, connection_id, capability, &tx)
.access_project(project_id, connection_id, principal_id, capability, &tx)
.await?;
let host_connection_id = project.host_connection()?;

View File

@@ -858,6 +858,25 @@ impl Database {
.all(&*tx)
.await?;
// if any project in the room has a remote-project-id that belongs to a dev server that this user owns.
let dev_server_projects_for_user = self
.dev_server_project_ids_for_user(leaving_participant.user_id, &tx)
.await?;
let dev_server_projects_to_unshare = project::Entity::find()
.filter(
Condition::all()
.add(project::Column::RoomId.eq(room_id))
.add(
project::Column::DevServerProjectId
.is_in(dev_server_projects_for_user.clone()),
),
)
.all(&*tx)
.await?
.into_iter()
.map(|project| project.id)
.collect::<HashSet<_>>();
let mut left_projects = HashMap::default();
let mut collaborators = project_collaborator::Entity::find()
.filter(project_collaborator::Column::ProjectId.is_in(project_ids))
@@ -880,7 +899,9 @@ impl Database {
left_project.connection_ids.push(collaborator_connection_id);
}
if collaborator.is_host && collaborator.connection() == connection {
if (collaborator.is_host && collaborator.connection() == connection)
|| dev_server_projects_to_unshare.contains(&collaborator.project_id)
{
left_project.should_unshare = true;
}
}
@@ -923,6 +944,17 @@ impl Database {
.exec(&*tx)
.await?;
if !dev_server_projects_to_unshare.is_empty() {
project::Entity::update_many()
.filter(project::Column::Id.is_in(dev_server_projects_to_unshare))
.set(project::ActiveModel {
room_id: ActiveValue::Set(None),
..Default::default()
})
.exec(&*tx)
.await?;
}
let (channel, room) = self.get_channel_room(room_id, &tx).await?;
let deleted = if room.participants.is_empty() {
let result = room::Entity::delete_by_id(room_id).exec(&*tx).await?;
@@ -1291,6 +1323,26 @@ impl Database {
project.worktree_root_names.push(db_worktree.root_name);
}
}
} else if let Some(dev_server_project_id) = db_project.dev_server_project_id {
let host = self
.owner_for_dev_server_project(dev_server_project_id, tx)
.await?;
if let Some((_, participant)) = participants
.iter_mut()
.find(|(_, v)| v.user_id == host.to_proto())
{
participant.projects.push(proto::ParticipantProject {
id: db_project.id.to_proto(),
worktree_root_names: Default::default(),
});
let project = participant.projects.last_mut().unwrap();
for db_worktree in db_worktrees {
if db_worktree.visible {
project.worktree_root_names.push(db_worktree.root_name);
}
}
}
}
}

View File

@@ -13,6 +13,8 @@ pub mod channel_message;
pub mod channel_message_mention;
pub mod contact;
pub mod contributor;
pub mod dev_server;
pub mod dev_server_project;
pub mod embedding;
pub mod extension;
pub mod extension_version;

View File

@@ -0,0 +1,39 @@
use crate::db::{DevServerId, UserId};
use rpc::proto;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "dev_servers")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: DevServerId,
pub name: String,
pub user_id: UserId,
pub hashed_token: String,
pub ssh_connection_string: Option<String>,
}
impl ActiveModelBehavior for ActiveModel {}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::dev_server_project::Entity")]
RemoteProject,
}
impl Related<super::dev_server_project::Entity> for Entity {
fn to() -> RelationDef {
Relation::RemoteProject.def()
}
}
impl Model {
pub fn to_proto(&self, status: proto::DevServerStatus) -> proto::DevServer {
proto::DevServer {
dev_server_id: self.id.to_proto(),
name: self.name.clone(),
status: status as i32,
ssh_connection_string: self.ssh_connection_string.clone(),
}
}
}

View File

@@ -0,0 +1,59 @@
use super::project;
use crate::db::{DevServerId, DevServerProjectId};
use rpc::proto;
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "dev_server_projects")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: DevServerProjectId,
pub dev_server_id: DevServerId,
pub paths: JSONPaths,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct JSONPaths(pub Vec<String>);
impl ActiveModelBehavior for ActiveModel {}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_one = "super::project::Entity")]
Project,
#[sea_orm(
belongs_to = "super::dev_server::Entity",
from = "Column::DevServerId",
to = "super::dev_server::Column::Id"
)]
DevServer,
}
impl Related<super::project::Entity> for Entity {
fn to() -> RelationDef {
Relation::Project.def()
}
}
impl Related<super::dev_server::Entity> for Entity {
fn to() -> RelationDef {
Relation::DevServer.def()
}
}
impl Model {
pub fn to_proto(&self, project: Option<project::Model>) -> proto::DevServerProject {
proto::DevServerProject {
id: self.id.to_proto(),
project_id: project.map(|p| p.id.to_proto()),
dev_server_id: self.dev_server_id.to_proto(),
path: self.paths().first().cloned().unwrap_or_default(),
paths: self.paths().clone(),
}
}
pub fn paths(&self) -> &Vec<String> {
&self.paths.0
}
}

View File

@@ -1,4 +1,4 @@
use crate::db::{HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId};
use crate::db::{DevServerProjectId, HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId};
use anyhow::anyhow;
use rpc::ConnectionId;
use sea_orm::entity::prelude::*;
@@ -13,6 +13,7 @@ pub struct Model {
pub host_connection_id: Option<i32>,
pub host_connection_server_id: Option<ServerId>,
pub hosted_project_id: Option<HostedProjectId>,
pub dev_server_project_id: Option<DevServerProjectId>,
}
impl Model {
@@ -56,6 +57,12 @@ pub enum Relation {
to = "super::hosted_project::Column::Id"
)]
HostedProject,
#[sea_orm(
belongs_to = "super::dev_server_project::Entity",
from = "Column::DevServerProjectId",
to = "super::dev_server_project::Column::Id"
)]
RemoteProject,
}
impl Related<super::user::Entity> for Entity {
@@ -94,4 +101,10 @@ impl Related<super::hosted_project::Entity> for Entity {
}
}
impl Related<super::dev_server_project::Entity> for Entity {
fn to() -> RelationDef {
Relation::RemoteProject.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -21,7 +21,6 @@ pub struct Model {
pub metrics_id: Uuid,
pub created_at: NaiveDateTime,
pub accepted_tos_at: Option<NaiveDateTime>,
pub custom_llm_monthly_allowance_in_cents: Option<i32>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@@ -540,18 +540,18 @@ async fn test_project_count(db: &Arc<Database>) {
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false)
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, None)
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1);
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false)
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, None)
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
// Projects shared by admins aren't counted.
db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false)
db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false, None)
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);

View File

@@ -459,9 +459,8 @@ async fn check_usage_limit(
Utc::now(),
)
.await?;
let free_tier = claims.free_tier_monthly_spending_limit();
if usage.spending_this_month >= free_tier {
if usage.spending_this_month >= FREE_TIER_MONTHLY_SPENDING_LIMIT {
if !claims.has_llm_subscription {
return Err(Error::http(
StatusCode::PAYMENT_REQUIRED,
@@ -469,7 +468,9 @@ async fn check_usage_limit(
));
}
if (usage.spending_this_month - free_tier) >= Cents(claims.max_monthly_spend_in_cents) {
if (usage.spending_this_month - FREE_TIER_MONTHLY_SPENDING_LIMIT)
>= Cents(claims.max_monthly_spend_in_cents)
{
return Err(Error::Http(
StatusCode::FORBIDDEN,
"Maximum spending limit reached for this month.".to_string(),
@@ -639,7 +640,6 @@ impl<S> Drop for TokenCountingStream<S> {
tokens,
claims.has_llm_subscription,
Cents(claims.max_monthly_spend_in_cents),
claims.free_tier_monthly_spending_limit(),
Utc::now(),
)
.await

View File

@@ -1,5 +1,5 @@
use crate::db::UserId;
use crate::llm::Cents;
use crate::{db::UserId, llm::FREE_TIER_MONTHLY_SPENDING_LIMIT};
use chrono::{Datelike, Duration};
use futures::StreamExt as _;
use rpc::LanguageModelProvider;
@@ -299,7 +299,6 @@ impl LlmDatabase {
tokens: TokenUsage,
has_llm_subscription: bool,
max_monthly_spend: Cents,
free_tier_monthly_spending_limit: Cents,
now: DateTimeUtc,
) -> Result<Usage> {
self.transaction(|tx| async move {
@@ -411,9 +410,9 @@ impl LlmDatabase {
);
if !is_staff
&& spending_this_month > free_tier_monthly_spending_limit
&& spending_this_month > FREE_TIER_MONTHLY_SPENDING_LIMIT
&& has_llm_subscription
&& (spending_this_month - free_tier_monthly_spending_limit) <= max_monthly_spend
&& (spending_this_month - FREE_TIER_MONTHLY_SPENDING_LIMIT) <= max_monthly_spend
{
billing_event::ActiveModel {
id: ActiveValue::not_set(),

View File

@@ -66,7 +66,6 @@ async fn test_billing_limit_exceeded(db: &mut LlmDatabase) {
usage,
true,
max_monthly_spend,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
now,
)
.await
@@ -104,7 +103,6 @@ async fn test_billing_limit_exceeded(db: &mut LlmDatabase) {
usage_2,
true,
max_monthly_spend,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
now,
)
.await
@@ -134,7 +132,6 @@ async fn test_billing_limit_exceeded(db: &mut LlmDatabase) {
model,
usage_exceeding,
true,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
max_monthly_spend,
now,
)

View File

@@ -1,4 +1,3 @@
use crate::llm::FREE_TIER_MONTHLY_SPENDING_LIMIT;
use crate::{
db::UserId,
llm::db::{
@@ -50,7 +49,6 @@ async fn test_tracking_usage(db: &mut LlmDatabase) {
},
false,
Cents::ZERO,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
now,
)
.await
@@ -70,7 +68,6 @@ async fn test_tracking_usage(db: &mut LlmDatabase) {
},
false,
Cents::ZERO,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
now,
)
.await
@@ -127,7 +124,6 @@ async fn test_tracking_usage(db: &mut LlmDatabase) {
},
false,
Cents::ZERO,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
now,
)
.await
@@ -184,7 +180,6 @@ async fn test_tracking_usage(db: &mut LlmDatabase) {
},
false,
Cents::ZERO,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
now,
)
.await
@@ -227,7 +222,6 @@ async fn test_tracking_usage(db: &mut LlmDatabase) {
},
false,
Cents::ZERO,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
now,
)
.await
@@ -265,7 +259,6 @@ async fn test_tracking_usage(db: &mut LlmDatabase) {
},
false,
Cents::ZERO,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
now,
)
.await

View File

@@ -1,7 +1,8 @@
use crate::db::user;
use crate::llm::{DEFAULT_MAX_MONTHLY_SPEND, FREE_TIER_MONTHLY_SPENDING_LIMIT};
use crate::Cents;
use crate::{db::billing_preference, Config};
use crate::llm::DEFAULT_MAX_MONTHLY_SPEND;
use crate::{
db::{billing_preference, UserId},
Config,
};
use anyhow::{anyhow, Result};
use chrono::Utc;
use jsonwebtoken::{DecodingKey, EncodingKey, Header, Validation};
@@ -21,7 +22,6 @@ pub struct LlmTokenClaims {
pub has_llm_closed_beta_feature_flag: bool,
pub has_llm_subscription: bool,
pub max_monthly_spend_in_cents: u32,
pub custom_llm_monthly_allowance_in_cents: Option<u32>,
pub plan: rpc::proto::Plan,
}
@@ -30,7 +30,8 @@ const LLM_TOKEN_LIFETIME: Duration = Duration::from_secs(60 * 60);
impl LlmTokenClaims {
#[allow(clippy::too_many_arguments)]
pub fn create(
user: &user::Model,
user_id: UserId,
github_user_login: String,
is_staff: bool,
billing_preferences: Option<billing_preference::Model>,
has_llm_closed_beta_feature_flag: bool,
@@ -48,8 +49,8 @@ impl LlmTokenClaims {
iat: now.timestamp() as u64,
exp: (now + LLM_TOKEN_LIFETIME).timestamp() as u64,
jti: uuid::Uuid::new_v4().to_string(),
user_id: user.id.to_proto(),
github_user_login: user.github_login.clone(),
user_id: user_id.to_proto(),
github_user_login,
is_staff,
has_llm_closed_beta_feature_flag,
has_llm_subscription,
@@ -57,9 +58,6 @@ impl LlmTokenClaims {
.map_or(DEFAULT_MAX_MONTHLY_SPEND.0, |preferences| {
preferences.max_monthly_llm_usage_spending_in_cents as u32
}),
custom_llm_monthly_allowance_in_cents: user
.custom_llm_monthly_allowance_in_cents
.map(|allowance| allowance as u32),
plan,
};
@@ -91,12 +89,6 @@ impl LlmTokenClaims {
}
}
}
pub fn free_tier_monthly_spending_limit(&self) -> Cents {
self.custom_llm_monthly_allowance_in_cents
.map(Cents)
.unwrap_or(FREE_TIER_MONTHLY_SPENDING_LIMIT)
}
}
#[derive(Error, Debug)]

View File

@@ -84,8 +84,6 @@ async fn main() -> Result<()> {
let config = envy::from_env::<Config>().expect("error loading config");
init_tracing(&config);
init_panic_hook();
let mut app = Router::new()
.route("/", get(handle_root))
.route("/healthz", get(handle_liveness_probe))
@@ -380,20 +378,3 @@ pub fn init_tracing(config: &Config) -> Option<()> {
None
}
fn init_panic_hook() {
std::panic::set_hook(Box::new(move |panic_info| {
let panic_message = match panic_info.payload().downcast_ref::<&'static str>() {
Some(message) => *message,
None => match panic_info.payload().downcast_ref::<String>() {
Some(message) => message.as_str(),
None => "Box<Any>",
},
};
let backtrace = std::backtrace::Backtrace::force_capture();
let location = panic_info
.location()
.map(|loc| format!("{}:{}", loc.file(), loc.line()));
tracing::error!(panic = true, ?location, %panic_message, %backtrace, "Server Panic");
}));
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
use crate::db::{ChannelId, ChannelRole, UserId};
use crate::db::{ChannelId, ChannelRole, DevServerId, PrincipalId, UserId};
use anyhow::{anyhow, Result};
use collections::{BTreeMap, HashMap, HashSet};
use rpc::ConnectionId;
use rpc::{proto, ConnectionId};
use semantic_version::SemanticVersion;
use serde::Serialize;
use std::fmt;
@@ -11,7 +11,9 @@ use tracing::instrument;
pub struct ConnectionPool {
connections: BTreeMap<ConnectionId, Connection>,
connected_users: BTreeMap<UserId, ConnectedPrincipal>,
connected_dev_servers: BTreeMap<DevServerId, ConnectionId>,
channels: ChannelPool,
offline_dev_servers: HashSet<DevServerId>,
}
#[derive(Default, Serialize)]
@@ -30,13 +32,13 @@ impl fmt::Display for ZedVersion {
impl ZedVersion {
pub fn can_collaborate(&self) -> bool {
self.0 >= SemanticVersion::new(0, 157, 0)
self.0 >= SemanticVersion::new(0, 151, 0)
}
}
#[derive(Serialize)]
pub struct Connection {
pub user_id: UserId,
pub principal_id: PrincipalId,
pub admin: bool,
pub zed_version: ZedVersion,
}
@@ -45,6 +47,7 @@ impl ConnectionPool {
pub fn reset(&mut self) {
self.connections.clear();
self.connected_users.clear();
self.connected_dev_servers.clear();
self.channels.clear();
}
@@ -63,7 +66,7 @@ impl ConnectionPool {
self.connections.insert(
connection_id,
Connection {
user_id,
principal_id: PrincipalId::UserId(user_id),
admin,
zed_version,
},
@@ -72,6 +75,25 @@ impl ConnectionPool {
connected_user.connection_ids.insert(connection_id);
}
pub fn add_dev_server(
&mut self,
connection_id: ConnectionId,
dev_server_id: DevServerId,
zed_version: ZedVersion,
) {
self.connections.insert(
connection_id,
Connection {
principal_id: PrincipalId::DevServerId(dev_server_id),
admin: false,
zed_version,
},
);
self.connected_dev_servers
.insert(dev_server_id, connection_id);
}
#[instrument(skip(self))]
pub fn remove_connection(&mut self, connection_id: ConnectionId) -> Result<()> {
let connection = self
@@ -79,18 +101,28 @@ impl ConnectionPool {
.get_mut(&connection_id)
.ok_or_else(|| anyhow!("no such connection"))?;
let user_id = connection.user_id;
let connected_user = self.connected_users.get_mut(&user_id).unwrap();
connected_user.connection_ids.remove(&connection_id);
if connected_user.connection_ids.is_empty() {
self.connected_users.remove(&user_id);
self.channels.remove_user(&user_id);
};
match connection.principal_id {
PrincipalId::UserId(user_id) => {
let connected_user = self.connected_users.get_mut(&user_id).unwrap();
connected_user.connection_ids.remove(&connection_id);
if connected_user.connection_ids.is_empty() {
self.connected_users.remove(&user_id);
self.channels.remove_user(&user_id);
}
}
PrincipalId::DevServerId(dev_server_id) => {
self.connected_dev_servers.remove(&dev_server_id);
self.offline_dev_servers.remove(&dev_server_id);
}
}
self.connections.remove(&connection_id).unwrap();
Ok(())
}
pub fn set_dev_server_offline(&mut self, dev_server_id: DevServerId) {
self.offline_dev_servers.insert(dev_server_id);
}
pub fn connections(&self) -> impl Iterator<Item = &Connection> {
self.connections.values()
}
@@ -115,6 +147,42 @@ impl ConnectionPool {
.copied()
}
pub fn dev_server_status(&self, dev_server_id: DevServerId) -> proto::DevServerStatus {
if self.dev_server_connection_id(dev_server_id).is_some()
&& !self.offline_dev_servers.contains(&dev_server_id)
{
proto::DevServerStatus::Online
} else {
proto::DevServerStatus::Offline
}
}
pub fn dev_server_connection_id(&self, dev_server_id: DevServerId) -> Option<ConnectionId> {
self.connected_dev_servers.get(&dev_server_id).copied()
}
pub fn online_dev_server_connection_id(
&self,
dev_server_id: DevServerId,
) -> Result<ConnectionId> {
match self.connected_dev_servers.get(&dev_server_id) {
Some(cid) => Ok(*cid),
None => Err(anyhow!(proto::ErrorCode::DevServerOffline)),
}
}
pub fn dev_server_connection_id_supporting(
&self,
dev_server_id: DevServerId,
required: ZedVersion,
) -> Result<ConnectionId> {
match self.connected_dev_servers.get(&dev_server_id) {
Some(cid) if self.connections[cid].zed_version >= required => Ok(*cid),
Some(_) => Err(anyhow!(proto::ErrorCode::RemoteUpgradeRequired)),
None => Err(anyhow!(proto::ErrorCode::DevServerOffline)),
}
}
pub fn channel_user_ids(
&self,
channel_id: ChannelId,
@@ -159,22 +227,39 @@ impl ConnectionPool {
#[cfg(test)]
pub fn check_invariants(&self) {
for (connection_id, connection) in &self.connections {
assert!(self
.connected_users
.get(&connection.user_id)
.unwrap()
.connection_ids
.contains(connection_id));
match &connection.principal_id {
PrincipalId::UserId(user_id) => {
assert!(self
.connected_users
.get(user_id)
.unwrap()
.connection_ids
.contains(connection_id));
}
PrincipalId::DevServerId(dev_server_id) => {
assert_eq!(
self.connected_dev_servers.get(dev_server_id).unwrap(),
connection_id
);
}
}
}
for (user_id, state) in &self.connected_users {
for connection_id in &state.connection_ids {
assert_eq!(
self.connections.get(connection_id).unwrap().user_id,
*user_id
self.connections.get(connection_id).unwrap().principal_id,
PrincipalId::UserId(*user_id)
);
}
}
for (dev_server_id, connection_id) in &self.connected_dev_servers {
assert_eq!(
self.connections.get(connection_id).unwrap().principal_id,
PrincipalId::DevServerId(*dev_server_id)
);
}
}
}

View File

@@ -8,6 +8,7 @@ mod channel_buffer_tests;
mod channel_guest_tests;
mod channel_message_tests;
mod channel_tests;
mod dev_server_tests;
mod editor_tests;
mod following_tests;
mod integration_tests;

View File

@@ -95,9 +95,7 @@ async fn test_channel_guest_promotion(cx_a: &mut TestAppContext, cx_b: &mut Test
let room_b = cx_b
.read(ActiveCall::global)
.update(cx_b, |call, _| call.room().unwrap().clone());
cx_b.simulate_keystrokes("cmd-p");
cx_a.run_until_parked();
cx_b.simulate_keystrokes("1 enter");
cx_b.simulate_keystrokes("cmd-p 1 enter");
let (project_b, editor_b) = workspace_b.update(cx_b, |workspace, cx| {
(

View File

@@ -0,0 +1,643 @@
use std::{path::Path, sync::Arc};
use call::ActiveCall;
use editor::Editor;
use fs::Fs;
use gpui::{TestAppContext, VisualTestContext, WindowHandle};
use rpc::{proto::DevServerStatus, ErrorCode, ErrorExt};
use serde_json::json;
use workspace::{AppState, Workspace};
use crate::tests::{following_tests::join_channel, TestServer};
use super::TestClient;
#[gpui::test]
async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
let (server, client) = TestServer::start1(cx).await;
let store = cx.update(|cx| dev_server_projects::Store::global(cx).clone());
let resp = store
.update(cx, |store, cx| {
store.create_dev_server("server-1".to_string(), None, cx)
})
.await
.unwrap();
store.update(cx, |store, _| {
assert_eq!(store.dev_servers().len(), 1);
assert_eq!(store.dev_servers()[0].name, "server-1");
assert_eq!(store.dev_servers()[0].status, DevServerStatus::Offline);
});
let dev_server = server.create_dev_server(resp.access_token, cx2).await;
cx.executor().run_until_parked();
store.update(cx, |store, _| {
assert_eq!(store.dev_servers()[0].status, DevServerStatus::Online);
});
dev_server
.fs()
.insert_tree(
"/remote",
json!({
"1.txt": "remote\nremote\nremote",
"2.js": "function two() { return 2; }",
"3.rs": "mod test",
}),
)
.await;
store
.update(cx, |store, cx| {
store.create_dev_server_project(
client::DevServerId(resp.dev_server_id),
"/remote".to_string(),
cx,
)
})
.await
.unwrap();
cx.executor().run_until_parked();
let remote_workspace = store
.update(cx, |store, cx| {
let projects = store.dev_server_projects();
assert_eq!(projects.len(), 1);
assert_eq!(projects[0].paths, vec!["/remote"]);
workspace::join_dev_server_project(
projects[0].id,
projects[0].project_id.unwrap(),
client.app_state.clone(),
None,
cx,
)
})
.await
.unwrap();
cx.executor().run_until_parked();
let cx = VisualTestContext::from_window(remote_workspace.into(), cx).as_mut();
cx.simulate_keystrokes("cmd-p 1 enter");
let editor = remote_workspace
.update(cx, |ws, cx| {
ws.active_item_as::<Editor>(cx).unwrap().clone()
})
.unwrap();
editor.update(cx, |ed, cx| {
assert_eq!(ed.text(cx).to_string(), "remote\nremote\nremote");
});
cx.simulate_input("wow!");
cx.simulate_keystrokes("cmd-s");
let content = dev_server
.fs()
.load(Path::new("/remote/1.txt"))
.await
.unwrap();
assert_eq!(content, "wow!remote\nremote\nremote\n");
}
#[gpui::test]
async fn test_dev_server_env_files(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.executor().run_until_parked();
let cx1 = VisualTestContext::from_window(remote_workspace.into(), cx1).as_mut();
cx1.simulate_keystrokes("cmd-p . e enter");
let editor = remote_workspace
.update(cx1, |ws, cx| {
ws.active_item_as::<Editor>(cx).unwrap().clone()
})
.unwrap();
editor.update(cx1, |ed, cx| {
assert_eq!(ed.text(cx).to_string(), "SECRET");
});
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
join_channel(channel_id, &client2, cx2).await.unwrap();
cx2.executor().run_until_parked();
let (workspace2, cx2) = client2.active_workspace(cx2);
let editor = workspace2.update(cx2, |ws, cx| {
ws.active_item_as::<Editor>(cx).unwrap().clone()
});
// TODO: it'd be nice to hide .env files from other people
editor.update(cx2, |ed, cx| {
assert_eq!(ed.text(cx).to_string(), "SECRET");
});
}
async fn create_dev_server_project(
server: &TestServer,
client_app_state: Arc<AppState>,
cx: &mut TestAppContext,
cx_devserver: &mut TestAppContext,
) -> (TestClient, WindowHandle<Workspace>) {
let store = cx.update(|cx| dev_server_projects::Store::global(cx).clone());
let resp = store
.update(cx, |store, cx| {
store.create_dev_server("server-1".to_string(), None, cx)
})
.await
.unwrap();
let dev_server = server
.create_dev_server(resp.access_token, cx_devserver)
.await;
cx.executor().run_until_parked();
dev_server
.fs()
.insert_tree(
"/remote",
json!({
"1.txt": "remote\nremote\nremote",
".env": "SECRET",
}),
)
.await;
store
.update(cx, |store, cx| {
store.create_dev_server_project(
client::DevServerId(resp.dev_server_id),
"/remote".to_string(),
cx,
)
})
.await
.unwrap();
cx.executor().run_until_parked();
let workspace = store
.update(cx, |store, cx| {
let projects = store.dev_server_projects();
assert_eq!(projects.len(), 1);
assert_eq!(projects[0].paths, vec!["/remote"]);
workspace::join_dev_server_project(
projects[0].id,
projects[0].project_id.unwrap(),
client_app_state,
None,
cx,
)
})
.await
.unwrap();
cx.executor().run_until_parked();
(dev_server, workspace)
}
#[gpui::test]
async fn test_dev_server_leave_room(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
join_channel(channel_id, &client2, cx2).await.unwrap();
cx2.executor().run_until_parked();
cx1.update(|cx| ActiveCall::global(cx).update(cx, |active_call, cx| active_call.hang_up(cx)))
.await
.unwrap();
cx1.executor().run_until_parked();
let (workspace, cx2) = client2.active_workspace(cx2);
cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx)));
}
#[gpui::test]
async fn test_dev_server_delete(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
join_channel(channel_id, &client2, cx2).await.unwrap();
cx2.executor().run_until_parked();
cx1.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, cx| {
store.delete_dev_server_project(store.dev_server_projects().first().unwrap().id, cx)
})
})
.await
.unwrap();
cx1.executor().run_until_parked();
let (workspace, cx2) = client2.active_workspace(cx2);
cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx)));
cx1.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, _| {
assert_eq!(store.dev_server_projects().len(), 0);
})
})
}
#[gpui::test]
async fn test_dev_server_rename(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
join_channel(channel_id, &client2, cx2).await.unwrap();
cx2.executor().run_until_parked();
cx1.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, cx| {
store.rename_dev_server(
store.dev_servers().first().unwrap().id,
"name-edited".to_string(),
None,
cx,
)
})
})
.await
.unwrap();
cx1.executor().run_until_parked();
cx1.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, _| {
assert_eq!(store.dev_servers().first().unwrap().name, "name-edited");
})
})
}
#[gpui::test]
async fn test_dev_server_refresh_access_token(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
cx4: &mut gpui::TestAppContext,
) {
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
join_channel(channel_id, &client2, cx2).await.unwrap();
cx2.executor().run_until_parked();
// Regenerate the access token
let new_token_response = cx1
.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, cx| {
store.regenerate_dev_server_token(store.dev_servers().first().unwrap().id, cx)
})
})
.await
.unwrap();
cx1.executor().run_until_parked();
// Assert that the other client was disconnected
let (workspace, cx2) = client2.active_workspace(cx2);
cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx)));
// Assert that the owner of the dev server does not see the dev server as online anymore
let (workspace, cx1) = client1.active_workspace(cx1);
cx1.update(|cx| {
assert!(workspace.read(cx).project().read(cx).is_disconnected(cx));
dev_server_projects::Store::global(cx).update(cx, |store, _| {
assert_eq!(
store.dev_servers().first().unwrap().status,
DevServerStatus::Offline
);
})
});
// Reconnect the dev server with the new token
let _dev_server = server
.create_dev_server(new_token_response.access_token, cx4)
.await;
cx1.executor().run_until_parked();
// Assert that the dev server is online again
cx1.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, _| {
assert_eq!(store.dev_servers().len(), 1);
assert_eq!(
store.dev_servers().first().unwrap().status,
DevServerStatus::Online
);
})
});
}
#[gpui::test]
async fn test_dev_server_reconnect(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (mut server, client1) = TestServer::start1(cx1).await;
let channel_id = server
.make_channel("test", None, (&client1, cx1), &mut [])
.await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
drop(client1);
let client2 = server.create_client(cx2, "user_a").await;
let store = cx2.update(|cx| dev_server_projects::Store::global(cx).clone());
store
.update(cx2, |store, cx| {
let projects = store.dev_server_projects();
workspace::join_dev_server_project(
projects[0].id,
projects[0].project_id.unwrap(),
client2.app_state.clone(),
None,
cx,
)
})
.await
.unwrap();
}
#[gpui::test]
async fn test_dev_server_restart(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
let (server, client1) = TestServer::start1(cx1).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
let cx = VisualTestContext::from_window(remote_workspace.into(), cx1).as_mut();
server.reset().await;
cx.run_until_parked();
cx.simulate_keystrokes("cmd-p 1 enter");
remote_workspace
.update(cx, |ws, cx| {
ws.active_item_as::<Editor>(cx)
.unwrap()
.update(cx, |ed, cx| {
assert_eq!(ed.text(cx).to_string(), "remote\nremote\nremote");
})
})
.unwrap();
}
#[gpui::test]
async fn test_create_dev_server_project_path_validation(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (server, client1) = TestServer::start1(cx1).await;
let _channel_id = server
.make_channel("test", None, (&client1, cx1), &mut [])
.await;
// Creating a project with a path that does exist should not fail
let (_dev_server, _) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
cx1.executor().run_until_parked();
let store = cx1.update(|cx| dev_server_projects::Store::global(cx).clone());
let resp = store
.update(cx1, |store, cx| {
store.create_dev_server("server-2".to_string(), None, cx)
})
.await
.unwrap();
cx1.executor().run_until_parked();
let _dev_server = server.create_dev_server(resp.access_token, cx3).await;
cx1.executor().run_until_parked();
// Creating a remote project with a path that does not exist should fail
let result = store
.update(cx1, |store, cx| {
store.create_dev_server_project(
client::DevServerId(resp.dev_server_id),
"/notfound".to_string(),
cx,
)
})
.await;
cx1.executor().run_until_parked();
let error = result.unwrap_err();
assert!(matches!(
error.error_code(),
ErrorCode::DevServerProjectPathDoesNotExist
));
}
#[gpui::test]
async fn test_save_as_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
let (server, client1) = TestServer::start1(cx1).await;
// Creating a project with a path that does exist should not fail
let (dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
let mut cx = VisualTestContext::from_window(remote_workspace.into(), cx1);
cx.simulate_keystrokes("cmd-p 1 enter");
cx.simulate_keystrokes("cmd-shift-s");
cx.simulate_input("2.txt");
cx.simulate_keystrokes("enter");
cx.executor().run_until_parked();
let title = remote_workspace
.update(&mut cx, |ws, cx| {
let active_item = ws.active_item(cx).unwrap();
active_item.tab_description(0, cx).unwrap()
})
.unwrap();
assert_eq!(title, "2.txt");
let path = Path::new("/remote/2.txt");
assert_eq!(
dev_server.fs().load(path).await.unwrap(),
"remote\nremote\nremote"
);
}
#[gpui::test]
async fn test_new_file_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
let (server, client1) = TestServer::start1(cx1).await;
// Creating a project with a path that does exist should not fail
let (dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
let mut cx = VisualTestContext::from_window(remote_workspace.into(), cx1);
cx.simulate_keystrokes("cmd-n");
cx.simulate_input("new!");
cx.simulate_keystrokes("cmd-shift-s");
cx.simulate_input("2.txt");
cx.simulate_keystrokes("enter");
cx.executor().run_until_parked();
let title = remote_workspace
.update(&mut cx, |ws, cx| {
ws.active_item(cx).unwrap().tab_description(0, cx).unwrap()
})
.unwrap();
assert_eq!(title, "2.txt");
let path = Path::new("/remote/2.txt");
assert_eq!(dev_server.fs().load(path).await.unwrap(), "new!");
}

View File

@@ -1589,9 +1589,8 @@ async fn test_following_stops_on_unshare(cx_a: &mut TestAppContext, cx_b: &mut T
.await;
let (workspace_b, cx_b) = client_b.join_workspace(channel_id, cx_b).await;
cx_a.simulate_keystrokes("cmd-p");
cx_a.simulate_keystrokes("cmd-p 2 enter");
cx_a.run_until_parked();
cx_a.simulate_keystrokes("2 enter");
let editor_a = workspace_a.update(cx_a, |workspace, cx| {
workspace.active_item_as::<Editor>(cx).unwrap()
@@ -2042,9 +2041,7 @@ async fn test_following_to_channel_notes_other_workspace(
share_workspace(&workspace_a, cx_a).await.unwrap();
// a opens 1.txt
cx_a.simulate_keystrokes("cmd-p");
cx_a.run_until_parked();
cx_a.simulate_keystrokes("1 enter");
cx_a.simulate_keystrokes("cmd-p 1 enter");
cx_a.run_until_parked();
workspace_a.update(cx_a, |workspace, cx| {
let editor = workspace.active_item(cx).unwrap();
@@ -2101,9 +2098,7 @@ async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut
share_workspace(&workspace_a, cx_a).await.unwrap();
// a opens 1.txt
cx_a.simulate_keystrokes("cmd-p");
cx_a.run_until_parked();
cx_a.simulate_keystrokes("1 enter");
cx_a.simulate_keystrokes("cmd-p 1 enter");
cx_a.run_until_parked();
workspace_a.update(cx_a, |workspace, cx| {
let editor = workspace.active_item(cx).unwrap();
@@ -2123,9 +2118,7 @@ async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut
cx_b.simulate_keystrokes("down");
// a opens a different file while not followed
cx_a.simulate_keystrokes("cmd-p");
cx_a.run_until_parked();
cx_a.simulate_keystrokes("2 enter");
cx_a.simulate_keystrokes("cmd-p 2 enter");
workspace_b.update(cx_b, |workspace, cx| {
let editor = workspace.active_item_as::<Editor>(cx).unwrap();
@@ -2135,9 +2128,7 @@ async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut
// a opens a file in a new window
let (_, cx_a2) = client_a.build_test_workspace(&mut cx_a2).await;
cx_a2.update(|cx| cx.activate_window());
cx_a2.simulate_keystrokes("cmd-p");
cx_a2.run_until_parked();
cx_a2.simulate_keystrokes("3 enter");
cx_a2.simulate_keystrokes("cmd-p 3 enter");
cx_a2.run_until_parked();
// b starts following a again

View File

@@ -1,4 +1,5 @@
use crate::{
auth::split_dev_server_token,
db::{tests::TestDb, NewUserParams, UserId},
executor::Executor,
rpc::{Principal, Server, ZedVersion, CLEANUP_TIMEOUT, RECONNECT_TIMEOUT},
@@ -203,7 +204,7 @@ impl TestServer {
.override_authenticate(move |cx| {
cx.spawn(|_| async move {
let access_token = "the-token".to_string();
Ok(Credentials {
Ok(Credentials::User {
user_id: user_id.to_proto(),
access_token,
})
@@ -212,7 +213,7 @@ impl TestServer {
.override_establish_connection(move |credentials, cx| {
assert_eq!(
credentials,
&Credentials {
&Credentials::User {
user_id: user_id.0 as u64,
access_token: "the-token".into()
}
@@ -296,6 +297,7 @@ impl TestServer {
collab_ui::init(&app_state, cx);
file_finder::init(cx);
menu::init();
dev_server_projects::init(client.clone(), cx);
settings::KeymapFile::load_asset(os_keymap, cx).unwrap();
language_model::LanguageModelRegistry::test(cx);
assistant::context_store::init(&client.clone().into());
@@ -317,6 +319,135 @@ impl TestServer {
client
}
pub async fn create_dev_server(
&self,
access_token: String,
cx: &mut TestAppContext,
) -> TestClient {
cx.update(|cx| {
if cx.has_global::<SettingsStore>() {
panic!("Same cx used to create two test clients")
}
let settings = SettingsStore::test(cx);
cx.set_global(settings);
release_channel::init(SemanticVersion::default(), cx);
client::init_settings(cx);
});
let (dev_server_id, _) = split_dev_server_token(&access_token).unwrap();
let clock = Arc::new(FakeSystemClock::default());
let http = FakeHttpClient::with_404_response();
let mut client = cx.update(|cx| Client::new(clock, http.clone(), cx));
let server = self.server.clone();
let db = self.app_state.db.clone();
let connection_killers = self.connection_killers.clone();
let forbid_connections = self.forbid_connections.clone();
Arc::get_mut(&mut client)
.unwrap()
.set_id(1)
.set_dev_server_token(client::DevServerToken(access_token.clone()))
.override_establish_connection(move |credentials, cx| {
assert_eq!(
credentials,
&Credentials::DevServer {
token: client::DevServerToken(access_token.to_string())
}
);
let server = server.clone();
let db = db.clone();
let connection_killers = connection_killers.clone();
let forbid_connections = forbid_connections.clone();
cx.spawn(move |cx| async move {
if forbid_connections.load(SeqCst) {
Err(EstablishConnectionError::other(anyhow!(
"server is forbidding connections"
)))
} else {
let (client_conn, server_conn, killed) =
Connection::in_memory(cx.background_executor().clone());
let (connection_id_tx, connection_id_rx) = oneshot::channel();
let dev_server = db
.get_dev_server(dev_server_id)
.await
.expect("retrieving dev_server failed");
cx.background_executor()
.spawn(server.handle_connection(
server_conn,
"dev-server".to_string(),
Principal::DevServer(dev_server),
ZedVersion(SemanticVersion::new(1, 0, 0)),
None,
Some(connection_id_tx),
Executor::Deterministic(cx.background_executor().clone()),
))
.detach();
let connection_id = connection_id_rx.await.map_err(|e| {
EstablishConnectionError::Other(anyhow!(
"{} (is server shutting down?)",
e
))
})?;
connection_killers
.lock()
.insert(connection_id.into(), killed);
Ok(client_conn)
}
})
});
let fs = FakeFs::new(cx.executor());
let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
let workspace_store = cx.new_model(|cx| WorkspaceStore::new(client.clone(), cx));
let language_registry = Arc::new(LanguageRegistry::test(cx.executor()));
let session = cx.new_model(|cx| AppSession::new(Session::test(), cx));
let app_state = Arc::new(workspace::AppState {
client: client.clone(),
user_store: user_store.clone(),
workspace_store,
languages: language_registry,
fs: fs.clone(),
build_window_options: |_, _| Default::default(),
node_runtime: NodeRuntime::unavailable(),
session,
});
cx.update(|cx| {
theme::init(theme::LoadThemes::JustBase, cx);
Project::init(&client, cx);
client::init(&client, cx);
language::init(cx);
editor::init(cx);
workspace::init(app_state.clone(), cx);
call::init(client.clone(), user_store.clone(), cx);
channel::init(&client, user_store.clone(), cx);
notifications::init(client.clone(), user_store, cx);
collab_ui::init(&app_state, cx);
file_finder::init(cx);
menu::init();
headless::init(
client.clone(),
headless::AppState {
languages: app_state.languages.clone(),
user_store: app_state.user_store.clone(),
fs: fs.clone(),
node_runtime: app_state.node_runtime.clone(),
},
cx,
)
})
.await
.unwrap();
TestClient {
app_state,
username: "dev-server".to_string(),
channel_store: cx.read(ChannelStore::global).clone(),
notification_store: cx.read(NotificationStore::global).clone(),
state: Default::default(),
}
}
pub fn disconnect_client(&self, peer_id: PeerId) {
self.connection_killers
.lock()

View File

@@ -11,7 +11,7 @@ use collections::HashMap;
use crate::client::Client;
use crate::types;
const PROTOCOL_VERSION: &str = "2024-10-07";
const PROTOCOL_VERSION: u32 = 1;
pub struct ModelContextProtocol {
inner: Client,
@@ -22,19 +22,12 @@ impl ModelContextProtocol {
Self { inner }
}
fn supported_protocols() -> Vec<types::ProtocolVersion> {
vec![
types::ProtocolVersion::VersionString(PROTOCOL_VERSION.to_string()),
types::ProtocolVersion::VersionNumber(1),
]
}
pub async fn initialize(
self,
client_info: types::Implementation,
) -> Result<InitializedContextServerProtocol> {
let params = types::InitializeParams {
protocol_version: types::ProtocolVersion::VersionString(PROTOCOL_VERSION.to_string()),
protocol_version: PROTOCOL_VERSION,
capabilities: types::ClientCapabilities {
experimental: None,
sampling: None,
@@ -47,13 +40,6 @@ impl ModelContextProtocol {
.request(types::RequestType::Initialize.as_str(), params)
.await?;
if !Self::supported_protocols().contains(&response.protocol_version) {
return Err(anyhow::anyhow!(
"Unsupported protocol version: {:?}",
response.protocol_version
));
}
log::trace!("mcp server info {:?}", response.server_info);
self.inner.notify(

View File

@@ -36,17 +36,10 @@ impl RequestType {
}
}
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum ProtocolVersion {
VersionString(String),
VersionNumber(u32),
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct InitializeParams {
pub protocol_version: ProtocolVersion,
pub protocol_version: u32,
pub capabilities: ClientCapabilities,
pub client_info: Implementation,
}
@@ -138,7 +131,7 @@ pub struct CompletionArgument {
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct InitializeResponse {
pub protocol_version: ProtocolVersion,
pub protocol_version: u32,
pub capabilities: ServerCapabilities,
pub server_info: Implementation,
}
@@ -152,9 +145,10 @@ pub struct ResourcesReadResponse {
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ResourcesListResponse {
pub resources: Vec<Resource>,
#[serde(skip_serializing_if = "Option::is_none")]
pub next_cursor: Option<String>,
pub resource_templates: Option<Vec<ResourceTemplate>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub resources: Option<Vec<Resource>>,
}
#[derive(Debug, Serialize, Deserialize)]
@@ -185,15 +179,13 @@ pub enum SamplingContent {
pub struct PromptsGetResponse {
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
pub messages: Vec<SamplingMessage>,
pub prompt: String,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PromptsListResponse {
pub prompts: Vec<Prompt>,
#[serde(skip_serializing_if = "Option::is_none")]
pub next_cursor: Option<String>,
}
#[derive(Debug, Deserialize)]

View File

@@ -0,0 +1,23 @@
[package]
name = "dev_server_projects"
version = "0.1.0"
edition = "2021"
publish = false
license = "GPL-3.0-or-later"
[lints]
workspace = true
[lib]
path = "src/dev_server_projects.rs"
doctest = false
[dependencies]
anyhow.workspace = true
gpui.workspace = true
serde.workspace = true
client.workspace = true
rpc.workspace = true
[dev-dependencies]
serde_json.workspace = true

View File

@@ -0,0 +1 @@
../../LICENSE-GPL

View File

@@ -1 +1,249 @@
use anyhow::Result;
use gpui::{AppContext, AsyncAppContext, Context, Global, Model, ModelContext, SharedString, Task};
use rpc::{
proto::{self, DevServerStatus},
TypedEnvelope,
};
use std::{collections::HashMap, sync::Arc};
use client::{Client, ProjectId};
pub use client::{DevServerId, DevServerProjectId};
pub struct Store {
dev_server_projects: HashMap<DevServerProjectId, DevServerProject>,
dev_servers: HashMap<DevServerId, DevServer>,
_subscriptions: Vec<client::Subscription>,
client: Arc<Client>,
}
#[derive(Debug, Clone)]
pub struct DevServerProject {
pub id: DevServerProjectId,
pub project_id: Option<ProjectId>,
pub paths: Vec<SharedString>,
pub dev_server_id: DevServerId,
}
impl From<proto::DevServerProject> for DevServerProject {
fn from(project: proto::DevServerProject) -> Self {
Self {
id: DevServerProjectId(project.id),
project_id: project.project_id.map(ProjectId),
paths: project.paths.into_iter().map(|path| path.into()).collect(),
dev_server_id: DevServerId(project.dev_server_id),
}
}
}
#[derive(Debug, Clone)]
pub struct DevServer {
pub id: DevServerId,
pub name: SharedString,
pub ssh_connection_string: Option<SharedString>,
pub status: DevServerStatus,
}
impl From<proto::DevServer> for DevServer {
fn from(dev_server: proto::DevServer) -> Self {
Self {
id: DevServerId(dev_server.dev_server_id),
status: dev_server.status(),
name: dev_server.name.into(),
ssh_connection_string: dev_server.ssh_connection_string.map(|s| s.into()),
}
}
}
struct GlobalStore(Model<Store>);
impl Global for GlobalStore {}
pub fn init(client: Arc<Client>, cx: &mut AppContext) {
let store = cx.new_model(|cx| Store::new(client, cx));
cx.set_global(GlobalStore(store));
}
impl Store {
pub fn global(cx: &AppContext) -> Model<Store> {
cx.global::<GlobalStore>().0.clone()
}
pub fn new(client: Arc<Client>, cx: &ModelContext<Self>) -> Self {
Self {
dev_server_projects: Default::default(),
dev_servers: Default::default(),
_subscriptions: vec![client
.add_message_handler(cx.weak_model(), Self::handle_dev_server_projects_update)],
client,
}
}
pub fn projects_for_server(&self, id: DevServerId) -> Vec<DevServerProject> {
let mut projects: Vec<DevServerProject> = self
.dev_server_projects
.values()
.filter(|project| project.dev_server_id == id)
.cloned()
.collect();
projects.sort_by_key(|p| (p.paths.clone(), p.id));
projects
}
pub fn dev_servers(&self) -> Vec<DevServer> {
let mut dev_servers: Vec<DevServer> = self.dev_servers.values().cloned().collect();
dev_servers.sort_by_key(|d| (d.status == DevServerStatus::Offline, d.name.clone(), d.id));
dev_servers
}
pub fn dev_server(&self, id: DevServerId) -> Option<&DevServer> {
self.dev_servers.get(&id)
}
pub fn dev_server_status(&self, id: DevServerId) -> DevServerStatus {
self.dev_server(id)
.map(|server| server.status)
.unwrap_or(DevServerStatus::Offline)
}
pub fn dev_server_projects(&self) -> Vec<DevServerProject> {
let mut projects: Vec<DevServerProject> =
self.dev_server_projects.values().cloned().collect();
projects.sort_by_key(|p| (p.paths.clone(), p.id));
projects
}
pub fn dev_server_project(&self, id: DevServerProjectId) -> Option<&DevServerProject> {
self.dev_server_projects.get(&id)
}
pub fn dev_server_for_project(&self, id: DevServerProjectId) -> Option<&DevServer> {
self.dev_server_project(id)
.and_then(|project| self.dev_server(project.dev_server_id))
}
async fn handle_dev_server_projects_update(
this: Model<Self>,
envelope: TypedEnvelope<proto::DevServerProjectsUpdate>,
mut cx: AsyncAppContext,
) -> Result<()> {
this.update(&mut cx, |this, cx| {
this.dev_servers = envelope
.payload
.dev_servers
.into_iter()
.map(|dev_server| (DevServerId(dev_server.dev_server_id), dev_server.into()))
.collect();
this.dev_server_projects = envelope
.payload
.dev_server_projects
.into_iter()
.map(|project| (DevServerProjectId(project.id), project.into()))
.collect();
cx.notify();
})?;
Ok(())
}
pub fn create_dev_server_project(
&mut self,
dev_server_id: DevServerId,
path: String,
cx: &mut ModelContext<Self>,
) -> Task<Result<proto::CreateDevServerProjectResponse>> {
let client = self.client.clone();
cx.background_executor().spawn(async move {
client
.request(proto::CreateDevServerProject {
dev_server_id: dev_server_id.0,
path,
})
.await
})
}
pub fn create_dev_server(
&mut self,
name: String,
ssh_connection_string: Option<String>,
cx: &mut ModelContext<Self>,
) -> Task<Result<proto::CreateDevServerResponse>> {
let client = self.client.clone();
cx.background_executor().spawn(async move {
let result = client
.request(proto::CreateDevServer {
name,
ssh_connection_string,
})
.await?;
Ok(result)
})
}
pub fn rename_dev_server(
&mut self,
dev_server_id: DevServerId,
name: String,
ssh_connection_string: Option<String>,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
let client = self.client.clone();
cx.background_executor().spawn(async move {
client
.request(proto::RenameDevServer {
dev_server_id: dev_server_id.0,
name,
ssh_connection_string,
})
.await?;
Ok(())
})
}
pub fn regenerate_dev_server_token(
&mut self,
dev_server_id: DevServerId,
cx: &mut ModelContext<Self>,
) -> Task<Result<proto::RegenerateDevServerTokenResponse>> {
let client = self.client.clone();
cx.background_executor().spawn(async move {
client
.request(proto::RegenerateDevServerToken {
dev_server_id: dev_server_id.0,
})
.await
})
}
pub fn delete_dev_server(
&mut self,
id: DevServerId,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
let client = self.client.clone();
cx.background_executor().spawn(async move {
client
.request(proto::DeleteDevServer {
dev_server_id: id.0,
})
.await?;
Ok(())
})
}
pub fn delete_dev_server_project(
&mut self,
id: DevServerProjectId,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
let client = self.client.clone();
cx.background_executor().spawn(async move {
client
.request(proto::DeleteDevServerProject {
dev_server_project_id: id.0,
})
.await?;
Ok(())
})
}
}

View File

@@ -81,7 +81,6 @@ ui.workspace = true
url.workspace = true
util.workspace = true
workspace.workspace = true
unicode-segmentation.workspace = true
[dev-dependencies]
ctor.workspace = true

View File

@@ -18,7 +18,7 @@ pub struct SelectPrevious {
#[derive(PartialEq, Clone, Deserialize, Default)]
pub struct MoveToBeginningOfLine {
#[serde(default = "default_true")]
pub stop_at_soft_wraps: bool,
pub(super) stop_at_soft_wraps: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default)]

View File

@@ -8,7 +8,7 @@
//! of several smaller structures that form a hierarchy (starting at the bottom):
//! - [`InlayMap`] that decides where the [`Inlay`]s should be displayed.
//! - [`FoldMap`] that decides where the fold indicators should be; it also tracks parts of a source file that are currently folded.
//! - [`CharMap`] that replaces tabs and non-printable characters
//! - [`TabMap`] that keeps track of hard tabs in a buffer.
//! - [`WrapMap`] that handles soft wrapping.
//! - [`BlockMap`] that tracks custom blocks such as diagnostics that should be displayed within buffer.
//! - [`DisplayMap`] that adds background highlights to the regions of text.
@@ -18,11 +18,10 @@
//! [EditorElement]: crate::element::EditorElement
mod block_map;
mod char_map;
mod crease_map;
mod fold_map;
mod inlay_map;
mod invisibles;
mod tab_map;
mod wrap_map;
use crate::{
@@ -33,7 +32,6 @@ pub use block_map::{
BlockMap, BlockPoint, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
};
use block_map::{BlockRow, BlockSnapshot};
use char_map::{CharMap, CharSnapshot};
use collections::{HashMap, HashSet};
pub use crease_map::*;
pub use fold_map::{Fold, FoldId, FoldPlaceholder, FoldPoint};
@@ -44,7 +42,6 @@ use gpui::{
pub(crate) use inlay_map::Inlay;
use inlay_map::{InlayMap, InlaySnapshot};
pub use inlay_map::{InlayOffset, InlayPoint};
pub use invisibles::is_invisible;
use language::{
language_settings::language_settings, ChunkRenderer, OffsetUtf16, Point,
Subscription as BufferSubscription,
@@ -64,9 +61,9 @@ use std::{
sync::Arc,
};
use sum_tree::{Bias, TreeMap};
use tab_map::{TabMap, TabSnapshot};
use text::LineIndent;
use ui::{px, WindowContext};
use unicode_segmentation::UnicodeSegmentation;
use ui::WindowContext;
use wrap_map::{WrapMap, WrapSnapshot};
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
@@ -97,7 +94,7 @@ pub struct DisplayMap {
/// Decides where the fold indicators should be and tracks parts of a source file that are currently folded.
fold_map: FoldMap,
/// Keeps track of hard tabs in a buffer.
char_map: CharMap,
tab_map: TabMap,
/// Handles soft wrapping.
wrap_map: Model<WrapMap>,
/// Tracks custom blocks such as diagnostics that should be displayed within buffer.
@@ -134,7 +131,7 @@ impl DisplayMap {
let crease_map = CreaseMap::new(&buffer_snapshot);
let (inlay_map, snapshot) = InlayMap::new(buffer_snapshot);
let (fold_map, snapshot) = FoldMap::new(snapshot);
let (char_map, snapshot) = CharMap::new(snapshot, tab_size);
let (tab_map, snapshot) = TabMap::new(snapshot, tab_size);
let (wrap_map, snapshot) = WrapMap::new(snapshot, font, font_size, wrap_width, cx);
let block_map = BlockMap::new(
snapshot,
@@ -151,7 +148,7 @@ impl DisplayMap {
buffer_subscription,
fold_map,
inlay_map,
char_map,
tab_map,
wrap_map,
block_map,
crease_map,
@@ -169,17 +166,17 @@ impl DisplayMap {
let (inlay_snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits);
let (fold_snapshot, edits) = self.fold_map.read(inlay_snapshot.clone(), edits);
let tab_size = Self::tab_size(&self.buffer, cx);
let (char_snapshot, edits) = self.char_map.sync(fold_snapshot.clone(), edits, tab_size);
let (tab_snapshot, edits) = self.tab_map.sync(fold_snapshot.clone(), edits, tab_size);
let (wrap_snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(char_snapshot.clone(), edits, cx));
.update(cx, |map, cx| map.sync(tab_snapshot.clone(), edits, cx));
let block_snapshot = self.block_map.read(wrap_snapshot.clone(), edits).snapshot;
DisplaySnapshot {
buffer_snapshot: self.buffer.read(cx).snapshot(cx),
fold_snapshot,
inlay_snapshot,
char_snapshot,
tab_snapshot,
wrap_snapshot,
block_snapshot,
crease_snapshot: self.crease_map.snapshot(),
@@ -215,13 +212,13 @@ impl DisplayMap {
let tab_size = Self::tab_size(&self.buffer, cx);
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
self.block_map.read(snapshot, edits);
let (snapshot, edits) = fold_map.fold(ranges);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
@@ -239,13 +236,13 @@ impl DisplayMap {
let tab_size = Self::tab_size(&self.buffer, cx);
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
self.block_map.read(snapshot, edits);
let (snapshot, edits) = fold_map.unfold(ranges, inclusive);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
@@ -280,7 +277,7 @@ impl DisplayMap {
let tab_size = Self::tab_size(&self.buffer, cx);
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
@@ -298,7 +295,7 @@ impl DisplayMap {
let tab_size = Self::tab_size(&self.buffer, cx);
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
@@ -316,7 +313,7 @@ impl DisplayMap {
let tab_size = Self::tab_size(&self.buffer, cx);
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
@@ -334,7 +331,7 @@ impl DisplayMap {
let tab_size = Self::tab_size(&self.buffer, cx);
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
@@ -410,7 +407,7 @@ impl DisplayMap {
let (snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits);
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
let tab_size = Self::tab_size(&self.buffer, cx);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
@@ -418,7 +415,7 @@ impl DisplayMap {
let (snapshot, edits) = self.inlay_map.splice(to_remove, to_insert);
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
@@ -470,7 +467,7 @@ pub struct DisplaySnapshot {
pub fold_snapshot: FoldSnapshot,
pub crease_snapshot: CreaseSnapshot,
inlay_snapshot: InlaySnapshot,
char_snapshot: CharSnapshot,
tab_snapshot: TabSnapshot,
wrap_snapshot: WrapSnapshot,
block_snapshot: BlockSnapshot,
text_highlights: TextHighlights,
@@ -570,8 +567,8 @@ impl DisplaySnapshot {
fn point_to_display_point(&self, point: MultiBufferPoint, bias: Bias) -> DisplayPoint {
let inlay_point = self.inlay_snapshot.to_inlay_point(point);
let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias);
let char_point = self.char_snapshot.to_char_point(fold_point);
let wrap_point = self.wrap_snapshot.char_point_to_wrap_point(char_point);
let tab_point = self.tab_snapshot.to_tab_point(fold_point);
let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point);
let block_point = self.block_snapshot.to_block_point(wrap_point);
DisplayPoint(block_point)
}
@@ -599,21 +596,21 @@ impl DisplaySnapshot {
fn display_point_to_inlay_point(&self, point: DisplayPoint, bias: Bias) -> InlayPoint {
let block_point = point.0;
let wrap_point = self.block_snapshot.to_wrap_point(block_point);
let char_point = self.wrap_snapshot.to_char_point(wrap_point);
let fold_point = self.char_snapshot.to_fold_point(char_point, bias).0;
let tab_point = self.wrap_snapshot.to_tab_point(wrap_point);
let fold_point = self.tab_snapshot.to_fold_point(tab_point, bias).0;
fold_point.to_inlay_point(&self.fold_snapshot)
}
pub fn display_point_to_fold_point(&self, point: DisplayPoint, bias: Bias) -> FoldPoint {
let block_point = point.0;
let wrap_point = self.block_snapshot.to_wrap_point(block_point);
let char_point = self.wrap_snapshot.to_char_point(wrap_point);
self.char_snapshot.to_fold_point(char_point, bias).0
let tab_point = self.wrap_snapshot.to_tab_point(wrap_point);
self.tab_snapshot.to_fold_point(tab_point, bias).0
}
pub fn fold_point_to_display_point(&self, fold_point: FoldPoint) -> DisplayPoint {
let char_point = self.char_snapshot.to_char_point(fold_point);
let wrap_point = self.wrap_snapshot.char_point_to_wrap_point(char_point);
let tab_point = self.tab_snapshot.to_tab_point(fold_point);
let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point);
let block_point = self.block_snapshot.to_block_point(wrap_point);
DisplayPoint(block_point)
}
@@ -691,23 +688,6 @@ impl DisplaySnapshot {
}
}
if chunk.is_invisible {
let invisible_highlight = HighlightStyle {
background_color: Some(editor_style.status.hint_background),
underline: Some(UnderlineStyle {
color: Some(editor_style.status.hint),
thickness: px(1.),
wavy: false,
}),
..Default::default()
};
if let Some(highlight_style) = highlight_style.as_mut() {
highlight_style.highlight(invisible_highlight);
} else {
highlight_style = Some(invisible_highlight);
}
}
let mut diagnostic_highlight = HighlightStyle::default();
if chunk.is_unnecessary {
@@ -804,11 +784,12 @@ impl DisplaySnapshot {
layout_line.closest_index_for_x(x) as u32
}
pub fn grapheme_at(&self, mut point: DisplayPoint) -> Option<String> {
pub fn display_chars_at(
&self,
mut point: DisplayPoint,
) -> impl Iterator<Item = (char, DisplayPoint)> + '_ {
point = DisplayPoint(self.block_snapshot.clip_point(point.0, Bias::Left));
let chars = self
.text_chunks(point.row())
self.text_chunks(point.row())
.flat_map(str::chars)
.skip_while({
let mut column = 0;
@@ -818,21 +799,16 @@ impl DisplaySnapshot {
!at_point
}
})
.take_while({
let mut prev = false;
move |char| {
let now = char.is_ascii();
let end = char.is_ascii() && (char.is_ascii_whitespace() || prev);
prev = now;
!end
.map(move |ch| {
let result = (ch, point);
if ch == '\n' {
*point.row_mut() += 1;
*point.column_mut() = 0;
} else {
*point.column_mut() += ch.len_utf8() as u32;
}
});
chars
.collect::<String>()
.graphemes(true)
.next()
.map(|s| s.to_owned())
result
})
}
pub fn buffer_chars_at(&self, mut offset: usize) -> impl Iterator<Item = (char, usize)> + '_ {
@@ -1144,8 +1120,8 @@ impl DisplayPoint {
pub fn to_offset(self, map: &DisplaySnapshot, bias: Bias) -> usize {
let wrap_point = map.block_snapshot.to_wrap_point(self.0);
let char_point = map.wrap_snapshot.to_char_point(wrap_point);
let fold_point = map.char_snapshot.to_fold_point(char_point, bias).0;
let tab_point = map.wrap_snapshot.to_tab_point(wrap_point);
let fold_point = map.tab_snapshot.to_fold_point(tab_point, bias).0;
let inlay_point = fold_point.to_inlay_point(&map.fold_snapshot);
map.inlay_snapshot
.to_buffer_offset(map.inlay_snapshot.to_offset(inlay_point))
@@ -1252,7 +1228,7 @@ pub mod tests {
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
log::info!("fold text: {:?}", snapshot.fold_snapshot.text());
log::info!("char text: {:?}", snapshot.char_snapshot.text());
log::info!("tab text: {:?}", snapshot.tab_snapshot.text());
log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text());
log::info!("block text: {:?}", snapshot.block_snapshot.text());
log::info!("display text: {:?}", snapshot.text());
@@ -1369,7 +1345,7 @@ pub mod tests {
fold_count = snapshot.fold_count();
log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
log::info!("fold text: {:?}", snapshot.fold_snapshot.text());
log::info!("char text: {:?}", snapshot.char_snapshot.text());
log::info!("tab text: {:?}", snapshot.tab_snapshot.text());
log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text());
log::info!("block text: {:?}", snapshot.block_snapshot.text());
log::info!("display text: {:?}", snapshot.text());

View File

@@ -1421,7 +1421,7 @@ fn offset_for_row(s: &str, target: u32) -> (u32, usize) {
mod tests {
use super::*;
use crate::display_map::{
char_map::CharMap, fold_map::FoldMap, inlay_map::InlayMap, wrap_map::WrapMap,
fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap, wrap_map::WrapMap,
};
use gpui::{div, font, px, AppContext, Context as _, Element};
use language::{Buffer, Capability};
@@ -1456,9 +1456,9 @@ mod tests {
let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (mut char_map, char_snapshot) = CharMap::new(fold_snapshot, 1.try_into().unwrap());
let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 1.try_into().unwrap());
let (wrap_map, wraps_snapshot) =
cx.update(|cx| WrapMap::new(char_snapshot, font("Helvetica"), px(14.0), None, cx));
cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
let mut block_map = BlockMap::new(wraps_snapshot.clone(), true, 1, 1, 1);
let mut writer = block_map.write(wraps_snapshot.clone(), Default::default());
@@ -1609,10 +1609,10 @@ mod tests {
let (inlay_snapshot, inlay_edits) =
inlay_map.sync(buffer_snapshot, subscription.consume().into_inner());
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
let (char_snapshot, tab_edits) =
char_map.sync(fold_snapshot, fold_edits, 4.try_into().unwrap());
let (tab_snapshot, tab_edits) =
tab_map.sync(fold_snapshot, fold_edits, 4.try_into().unwrap());
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
wrap_map.sync(char_snapshot, tab_edits, cx)
wrap_map.sync(tab_snapshot, tab_edits, cx)
});
let snapshot = block_map.read(wraps_snapshot, wrap_edits);
assert_eq!(snapshot.text(), "aaa\n\nb!!!\n\n\nbb\nccc\nddd\n\n\n");
@@ -1672,9 +1672,8 @@ mod tests {
let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx);
let (_, inlay_snapshot) = InlayMap::new(multi_buffer_snapshot.clone());
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
let (_, wraps_snapshot) =
WrapMap::new(char_snapshot, font, font_size, Some(wrap_width), cx);
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
let (_, wraps_snapshot) = WrapMap::new(tab_snapshot, font, font_size, Some(wrap_width), cx);
let block_map = BlockMap::new(wraps_snapshot.clone(), true, 1, 1, 1);
let snapshot = block_map.read(wraps_snapshot, Default::default());
@@ -1711,9 +1710,9 @@ mod tests {
let _subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
let (_inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
let (_fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_char_map, char_snapshot) = CharMap::new(fold_snapshot, 1.try_into().unwrap());
let (_tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 1.try_into().unwrap());
let (_wrap_map, wraps_snapshot) =
cx.update(|cx| WrapMap::new(char_snapshot, font("Helvetica"), px(14.0), None, cx));
cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
let mut block_map = BlockMap::new(wraps_snapshot.clone(), false, 1, 1, 0);
let mut writer = block_map.write(wraps_snapshot.clone(), Default::default());
@@ -1816,15 +1815,9 @@ mod tests {
let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
let (_, wraps_snapshot) = cx.update(|cx| {
WrapMap::new(
char_snapshot,
font("Helvetica"),
px(14.0),
Some(px(60.)),
cx,
)
WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), Some(px(60.)), cx)
});
let mut block_map = BlockMap::new(wraps_snapshot.clone(), true, 1, 1, 0);
@@ -1892,9 +1885,9 @@ mod tests {
let mut buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (mut char_map, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
let (wrap_map, wraps_snapshot) = cx
.update(|cx| WrapMap::new(char_snapshot, font("Helvetica"), font_size, wrap_width, cx));
.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), font_size, wrap_width, cx));
let mut block_map = BlockMap::new(
wraps_snapshot,
true,
@@ -1951,10 +1944,10 @@ mod tests {
let (inlay_snapshot, inlay_edits) =
inlay_map.sync(buffer_snapshot.clone(), vec![]);
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
let (char_snapshot, tab_edits) =
char_map.sync(fold_snapshot, fold_edits, tab_size);
let (tab_snapshot, tab_edits) =
tab_map.sync(fold_snapshot, fold_edits, tab_size);
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
wrap_map.sync(char_snapshot, tab_edits, cx)
wrap_map.sync(tab_snapshot, tab_edits, cx)
});
let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
let block_ids =
@@ -1983,10 +1976,10 @@ mod tests {
let (inlay_snapshot, inlay_edits) =
inlay_map.sync(buffer_snapshot.clone(), vec![]);
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
let (char_snapshot, tab_edits) =
char_map.sync(fold_snapshot, fold_edits, tab_size);
let (tab_snapshot, tab_edits) =
tab_map.sync(fold_snapshot, fold_edits, tab_size);
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
wrap_map.sync(char_snapshot, tab_edits, cx)
wrap_map.sync(tab_snapshot, tab_edits, cx)
});
let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
block_map.remove(block_ids_to_remove);
@@ -2006,9 +1999,9 @@ mod tests {
let (inlay_snapshot, inlay_edits) =
inlay_map.sync(buffer_snapshot.clone(), buffer_edits);
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
let (char_snapshot, tab_edits) = char_map.sync(fold_snapshot, fold_edits, tab_size);
let (tab_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size);
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
wrap_map.sync(char_snapshot, tab_edits, cx)
wrap_map.sync(tab_snapshot, tab_edits, cx)
});
let blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits);
assert_eq!(
@@ -2091,10 +2084,7 @@ mod tests {
}
}
let soft_wrapped = wraps_snapshot
.to_char_point(WrapPoint::new(row, 0))
.column()
> 0;
let soft_wrapped = wraps_snapshot.to_tab_point(WrapPoint::new(row, 0)).column() > 0;
expected_buffer_rows.push(if soft_wrapped { None } else { buffer_row });
expected_text.push_str(input_line);

View File

@@ -1,157 +0,0 @@
use std::sync::LazyLock;
use collections::HashMap;
// Invisibility in a Unicode context is not well defined, so we have to guess.
//
// We highlight all ASCII control codes, and unicode whitespace because they are likely
// confused with a normal space (U+0020).
//
// We also highlight the handful of blank non-space characters:
// U+2800 BRAILLE PATTERN BLANK - Category: So
// U+115F HANGUL CHOSEONG FILLER - Category: Lo
// U+1160 HANGUL CHOSEONG FILLER - Category: Lo
// U+3164 HANGUL FILLER - Category: Lo
// U+FFA0 HALFWIDTH HANGUL FILLER - Category: Lo
// U+FFFC OBJECT REPLACEMENT CHARACTER - Category: So
//
// For the rest of Unicode, invisibility happens for two reasons:
// * A Format character (like a byte order mark or right-to-left override)
// * An invisible Nonspacing Mark character (like U+034F, or variation selectors)
//
// We don't consider unassigned codepoints invisible as the font renderer already shows
// a replacement character in that case (and there are a *lot* of them)
//
// Control characters are mostly fine to highlight; except:
// * U+E0020..=U+E007F are used in emoji flags. We don't highlight them right now, but we could if we tightened our heuristics.
// * U+200D is used to join characters. We highlight this but don't replace it. As our font system ignores mid-glyph highlights this mostly works to highlight unexpected uses.
//
// Nonspacing marks are handled like U+200D. This means that mid-glyph we ignore them, but
// probably causes issues with end-of-glyph usage.
//
// ref: https://invisible-characters.com
// ref: https://www.compart.com/en/unicode/category/Cf
// ref: https://gist.github.com/ConradIrwin/f759e1fc29267143c4c7895aa495dca5?h=1
// ref: https://unicode.org/Public/emoji/13.0/emoji-test.txt
// https://github.com/bits/UTF-8-Unicode-Test-Documents/blob/master/UTF-8_sequence_separated/utf8_sequence_0-0x10ffff_assigned_including-unprintable-asis.txt
pub fn is_invisible(c: char) -> bool {
if c <= '\u{1f}' {
c != '\t' && c != '\n' && c != '\r'
} else if c >= '\u{7f}' {
c <= '\u{9f}' || c.is_whitespace() || contains(c, &FORMAT) || contains(c, &OTHER)
} else {
false
}
}
pub(crate) fn replacement(c: char) -> Option<&'static str> {
if !is_invisible(c) {
return None;
}
if c <= '\x7f' {
REPLACEMENTS.get(&c).copied()
} else if contains(c, &PRESERVE) {
None
} else {
Some(" ")
}
}
const REPLACEMENTS: LazyLock<HashMap<char, &'static str>> = LazyLock::new(|| {
[
('\x00', ""),
('\x01', ""),
('\x02', ""),
('\x03', ""),
('\x04', ""),
('\x05', ""),
('\x06', ""),
('\x07', ""),
('\x08', ""),
('\x0B', ""),
('\x0C', ""),
('\x0D', ""),
('\x0E', ""),
('\x0F', ""),
('\x10', ""),
('\x11', ""),
('\x12', ""),
('\x13', ""),
('\x14', ""),
('\x15', ""),
('\x16', ""),
('\x17', ""),
('\x18', ""),
('\x19', ""),
('\x1A', ""),
('\x1B', ""),
('\x1C', ""),
('\x1D', ""),
('\x1E', ""),
('\x1F', ""),
('\u{007F}', ""),
]
.into_iter()
.collect()
});
// generated using ucd-generate: ucd-generate general-category --include Format --chars ucd-16.0.0
pub const FORMAT: &'static [(char, char)] = &[
('\u{ad}', '\u{ad}'),
('\u{600}', '\u{605}'),
('\u{61c}', '\u{61c}'),
('\u{6dd}', '\u{6dd}'),
('\u{70f}', '\u{70f}'),
('\u{890}', '\u{891}'),
('\u{8e2}', '\u{8e2}'),
('\u{180e}', '\u{180e}'),
('\u{200b}', '\u{200f}'),
('\u{202a}', '\u{202e}'),
('\u{2060}', '\u{2064}'),
('\u{2066}', '\u{206f}'),
('\u{feff}', '\u{feff}'),
('\u{fff9}', '\u{fffb}'),
('\u{110bd}', '\u{110bd}'),
('\u{110cd}', '\u{110cd}'),
('\u{13430}', '\u{1343f}'),
('\u{1bca0}', '\u{1bca3}'),
('\u{1d173}', '\u{1d17a}'),
('\u{e0001}', '\u{e0001}'),
('\u{e0020}', '\u{e007f}'),
];
// hand-made base on https://invisible-characters.com (Excluding Cf)
pub const OTHER: &'static [(char, char)] = &[
('\u{034f}', '\u{034f}'),
('\u{115F}', '\u{1160}'),
('\u{17b4}', '\u{17b5}'),
('\u{180b}', '\u{180d}'),
('\u{2800}', '\u{2800}'),
('\u{3164}', '\u{3164}'),
('\u{fe00}', '\u{fe0d}'),
('\u{ffa0}', '\u{ffa0}'),
('\u{fffc}', '\u{fffc}'),
('\u{e0100}', '\u{e01ef}'),
];
// a subset of FORMAT/OTHER that may appear within glyphs
const PRESERVE: &'static [(char, char)] = &[
('\u{034f}', '\u{034f}'),
('\u{200d}', '\u{200d}'),
('\u{17b4}', '\u{17b5}'),
('\u{180b}', '\u{180d}'),
('\u{e0061}', '\u{e007a}'),
('\u{e007f}', '\u{e007f}'),
];
fn contains(c: char, list: &[(char, char)]) -> bool {
for (start, end) in list {
if c < *start {
return false;
}
if c <= *end {
return true;
}
}
false
}

View File

@@ -1,6 +1,5 @@
use super::{
fold_map::{self, FoldChunks, FoldEdit, FoldPoint, FoldSnapshot},
invisibles::{is_invisible, replacement},
Highlights,
};
use language::{Chunk, Point};
@@ -10,14 +9,14 @@ use sum_tree::Bias;
const MAX_EXPANSION_COLUMN: u32 = 256;
/// Keeps track of hard tabs and non-printable characters in a text buffer.
/// Keeps track of hard tabs in a text buffer.
///
/// See the [`display_map` module documentation](crate::display_map) for more information.
pub struct CharMap(CharSnapshot);
pub struct TabMap(TabSnapshot);
impl CharMap {
pub fn new(fold_snapshot: FoldSnapshot, tab_size: NonZeroU32) -> (Self, CharSnapshot) {
let snapshot = CharSnapshot {
impl TabMap {
pub fn new(fold_snapshot: FoldSnapshot, tab_size: NonZeroU32) -> (Self, TabSnapshot) {
let snapshot = TabSnapshot {
fold_snapshot,
tab_size,
max_expansion_column: MAX_EXPANSION_COLUMN,
@@ -27,7 +26,7 @@ impl CharMap {
}
#[cfg(test)]
pub fn set_max_expansion_column(&mut self, column: u32) -> CharSnapshot {
pub fn set_max_expansion_column(&mut self, column: u32) -> TabSnapshot {
self.0.max_expansion_column = column;
self.0.clone()
}
@@ -37,9 +36,9 @@ impl CharMap {
fold_snapshot: FoldSnapshot,
mut fold_edits: Vec<FoldEdit>,
tab_size: NonZeroU32,
) -> (CharSnapshot, Vec<TabEdit>) {
) -> (TabSnapshot, Vec<TabEdit>) {
let old_snapshot = &mut self.0;
let mut new_snapshot = CharSnapshot {
let mut new_snapshot = TabSnapshot {
fold_snapshot,
tab_size,
max_expansion_column: old_snapshot.max_expansion_column,
@@ -138,15 +137,15 @@ impl CharMap {
let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot);
let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot);
tab_edits.push(TabEdit {
old: old_snapshot.to_char_point(old_start)..old_snapshot.to_char_point(old_end),
new: new_snapshot.to_char_point(new_start)..new_snapshot.to_char_point(new_end),
old: old_snapshot.to_tab_point(old_start)..old_snapshot.to_tab_point(old_end),
new: new_snapshot.to_tab_point(new_start)..new_snapshot.to_tab_point(new_end),
});
}
} else {
new_snapshot.version += 1;
tab_edits.push(TabEdit {
old: CharPoint::zero()..old_snapshot.max_point(),
new: CharPoint::zero()..new_snapshot.max_point(),
old: TabPoint::zero()..old_snapshot.max_point(),
new: TabPoint::zero()..new_snapshot.max_point(),
});
}
@@ -156,14 +155,14 @@ impl CharMap {
}
#[derive(Clone)]
pub struct CharSnapshot {
pub struct TabSnapshot {
pub fold_snapshot: FoldSnapshot,
pub tab_size: NonZeroU32,
pub max_expansion_column: u32,
pub version: usize,
}
impl CharSnapshot {
impl TabSnapshot {
pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
&self.fold_snapshot.inlay_snapshot.buffer
}
@@ -171,7 +170,7 @@ impl CharSnapshot {
pub fn line_len(&self, row: u32) -> u32 {
let max_point = self.max_point();
if row < max_point.row() {
self.to_char_point(FoldPoint::new(row, self.fold_snapshot.line_len(row)))
self.to_tab_point(FoldPoint::new(row, self.fold_snapshot.line_len(row)))
.0
.column
} else {
@@ -180,10 +179,10 @@ impl CharSnapshot {
}
pub fn text_summary(&self) -> TextSummary {
self.text_summary_for_range(CharPoint::zero()..self.max_point())
self.text_summary_for_range(TabPoint::zero()..self.max_point())
}
pub fn text_summary_for_range(&self, range: Range<CharPoint>) -> TextSummary {
pub fn text_summary_for_range(&self, range: Range<TabPoint>) -> TextSummary {
let input_start = self.to_fold_point(range.start, Bias::Left).0;
let input_end = self.to_fold_point(range.end, Bias::Right).0;
let input_summary = self
@@ -212,7 +211,7 @@ impl CharSnapshot {
} else {
for _ in self
.chunks(
CharPoint::new(range.end.row(), 0)..range.end,
TabPoint::new(range.end.row(), 0)..range.end,
false,
Highlights::default(),
)
@@ -233,7 +232,7 @@ impl CharSnapshot {
pub fn chunks<'a>(
&'a self,
range: Range<CharPoint>,
range: Range<TabPoint>,
language_aware: bool,
highlights: Highlights<'a>,
) -> TabChunks<'a> {
@@ -279,7 +278,7 @@ impl CharSnapshot {
#[cfg(test)]
pub fn text(&self) -> String {
self.chunks(
CharPoint::zero()..self.max_point(),
TabPoint::zero()..self.max_point(),
false,
Highlights::default(),
)
@@ -287,24 +286,24 @@ impl CharSnapshot {
.collect()
}
pub fn max_point(&self) -> CharPoint {
self.to_char_point(self.fold_snapshot.max_point())
pub fn max_point(&self) -> TabPoint {
self.to_tab_point(self.fold_snapshot.max_point())
}
pub fn clip_point(&self, point: CharPoint, bias: Bias) -> CharPoint {
self.to_char_point(
pub fn clip_point(&self, point: TabPoint, bias: Bias) -> TabPoint {
self.to_tab_point(
self.fold_snapshot
.clip_point(self.to_fold_point(point, bias).0, bias),
)
}
pub fn to_char_point(&self, input: FoldPoint) -> CharPoint {
pub fn to_tab_point(&self, input: FoldPoint) -> TabPoint {
let chars = self.fold_snapshot.chars_at(FoldPoint::new(input.row(), 0));
let expanded = self.expand_tabs(chars, input.column());
CharPoint::new(input.row(), expanded)
TabPoint::new(input.row(), expanded)
}
pub fn to_fold_point(&self, output: CharPoint, bias: Bias) -> (FoldPoint, u32, u32) {
pub fn to_fold_point(&self, output: TabPoint, bias: Bias) -> (FoldPoint, u32, u32) {
let chars = self.fold_snapshot.chars_at(FoldPoint::new(output.row(), 0));
let expanded = output.column();
let (collapsed, expanded_char_column, to_next_stop) =
@@ -316,13 +315,13 @@ impl CharSnapshot {
)
}
pub fn make_char_point(&self, point: Point, bias: Bias) -> CharPoint {
pub fn make_tab_point(&self, point: Point, bias: Bias) -> TabPoint {
let inlay_point = self.fold_snapshot.inlay_snapshot.to_inlay_point(point);
let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias);
self.to_char_point(fold_point)
self.to_tab_point(fold_point)
}
pub fn to_point(&self, point: CharPoint, bias: Bias) -> Point {
pub fn to_point(&self, point: TabPoint, bias: Bias) -> Point {
let fold_point = self.to_fold_point(point, bias).0;
let inlay_point = fold_point.to_inlay_point(&self.fold_snapshot);
self.fold_snapshot
@@ -345,9 +344,6 @@ impl CharSnapshot {
let tab_len = tab_size - expanded_chars % tab_size;
expanded_bytes += tab_len;
expanded_chars += tab_len;
} else if let Some(replacement) = replacement(c) {
expanded_chars += replacement.chars().count() as u32;
expanded_bytes += replacement.len() as u32;
} else {
expanded_bytes += c.len_utf8() as u32;
expanded_chars += 1;
@@ -387,9 +383,6 @@ impl CharSnapshot {
Bias::Right => (collapsed_bytes + 1, expanded_chars, 0),
};
}
} else if let Some(replacement) = replacement(c) {
expanded_chars += replacement.chars().count() as u32;
expanded_bytes += replacement.len() as u32;
} else {
expanded_chars += 1;
expanded_bytes += c.len_utf8() as u32;
@@ -411,9 +404,9 @@ impl CharSnapshot {
}
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
pub struct CharPoint(pub Point);
pub struct TabPoint(pub Point);
impl CharPoint {
impl TabPoint {
pub fn new(row: u32, column: u32) -> Self {
Self(Point::new(row, column))
}
@@ -431,13 +424,13 @@ impl CharPoint {
}
}
impl From<Point> for CharPoint {
impl From<Point> for TabPoint {
fn from(point: Point) -> Self {
Self(point)
}
}
pub type TabEdit = text::Edit<CharPoint>;
pub type TabEdit = text::Edit<TabPoint>;
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct TextSummary {
@@ -558,37 +551,6 @@ impl<'a> Iterator for TabChunks<'a> {
self.input_column = 0;
self.output_position += Point::new(1, 0);
}
_ if is_invisible(c) => {
if ix > 0 {
let (prefix, suffix) = self.chunk.text.split_at(ix);
self.chunk.text = suffix;
return Some(Chunk {
text: prefix,
is_invisible: false,
..self.chunk.clone()
});
}
let c_len = c.len_utf8();
let replacement = replacement(c).unwrap_or(&self.chunk.text[..c_len]);
if self.chunk.text.len() >= c_len {
self.chunk.text = &self.chunk.text[c_len..];
} else {
self.chunk.text = "";
}
let len = replacement.chars().count() as u32;
let next_output_position = cmp::min(
self.output_position + Point::new(0, len),
self.max_output_position,
);
self.column += len;
self.input_column += 1;
self.output_position = next_output_position;
return Some(Chunk {
text: replacement,
is_invisible: true,
..self.chunk.clone()
});
}
_ => {
self.column += 1;
if !self.inside_leading_tab {
@@ -618,11 +580,11 @@ mod tests {
let buffer_snapshot = buffer.read(cx).snapshot(cx);
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
assert_eq!(char_snapshot.expand_tabs("\t".chars(), 0), 0);
assert_eq!(char_snapshot.expand_tabs("\t".chars(), 1), 4);
assert_eq!(char_snapshot.expand_tabs("\ta".chars(), 2), 5);
assert_eq!(tab_snapshot.expand_tabs("\t".chars(), 0), 0);
assert_eq!(tab_snapshot.expand_tabs("\t".chars(), 1), 4);
assert_eq!(tab_snapshot.expand_tabs("\ta".chars(), 2), 5);
}
#[gpui::test]
@@ -635,16 +597,16 @@ mod tests {
let buffer_snapshot = buffer.read(cx).snapshot(cx);
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_, mut char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
let (_, mut tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
char_snapshot.max_expansion_column = max_expansion_column;
assert_eq!(char_snapshot.text(), output);
tab_snapshot.max_expansion_column = max_expansion_column;
assert_eq!(tab_snapshot.text(), output);
for (ix, c) in input.char_indices() {
assert_eq!(
char_snapshot
tab_snapshot
.chunks(
CharPoint::new(0, ix as u32)..char_snapshot.max_point(),
TabPoint::new(0, ix as u32)..tab_snapshot.max_point(),
false,
Highlights::default(),
)
@@ -658,13 +620,13 @@ mod tests {
let input_point = Point::new(0, ix as u32);
let output_point = Point::new(0, output.find(c).unwrap() as u32);
assert_eq!(
char_snapshot.to_char_point(FoldPoint(input_point)),
CharPoint(output_point),
"to_char_point({input_point:?})"
tab_snapshot.to_tab_point(FoldPoint(input_point)),
TabPoint(output_point),
"to_tab_point({input_point:?})"
);
assert_eq!(
char_snapshot
.to_fold_point(CharPoint(output_point), Bias::Left)
tab_snapshot
.to_fold_point(TabPoint(output_point), Bias::Left)
.0,
FoldPoint(input_point),
"to_fold_point({output_point:?})"
@@ -682,10 +644,10 @@ mod tests {
let buffer_snapshot = buffer.read(cx).snapshot(cx);
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_, mut char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
let (_, mut tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
char_snapshot.max_expansion_column = max_expansion_column;
assert_eq!(char_snapshot.text(), input);
tab_snapshot.max_expansion_column = max_expansion_column;
assert_eq!(tab_snapshot.text(), input);
}
#[gpui::test]
@@ -696,10 +658,10 @@ mod tests {
let buffer_snapshot = buffer.read(cx).snapshot(cx);
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap());
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
assert_eq!(
chunks(&char_snapshot, CharPoint::zero()),
chunks(&tab_snapshot, TabPoint::zero()),
vec![
(" ".to_string(), true),
(" ".to_string(), false),
@@ -708,7 +670,7 @@ mod tests {
]
);
assert_eq!(
chunks(&char_snapshot, CharPoint::new(0, 2)),
chunks(&tab_snapshot, TabPoint::new(0, 2)),
vec![
(" ".to_string(), true),
(" ".to_string(), false),
@@ -717,7 +679,7 @@ mod tests {
]
);
fn chunks(snapshot: &CharSnapshot, start: CharPoint) -> Vec<(String, bool)> {
fn chunks(snapshot: &TabSnapshot, start: TabPoint) -> Vec<(String, bool)> {
let mut chunks = Vec::new();
let mut was_tab = false;
let mut text = String::new();
@@ -763,12 +725,12 @@ mod tests {
let (inlay_snapshot, _) = inlay_map.randomly_mutate(&mut 0, &mut rng);
log::info!("InlayMap text: {:?}", inlay_snapshot.text());
let (mut char_map, _) = CharMap::new(fold_snapshot.clone(), tab_size);
let tabs_snapshot = char_map.set_max_expansion_column(32);
let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size);
let tabs_snapshot = tab_map.set_max_expansion_column(32);
let text = text::Rope::from(tabs_snapshot.text().as_str());
log::info!(
"CharMap text (tab size: {}): {:?}",
"TabMap text (tab size: {}): {:?}",
tab_size,
tabs_snapshot.text(),
);
@@ -776,11 +738,11 @@ mod tests {
for _ in 0..5 {
let end_row = rng.gen_range(0..=text.max_point().row);
let end_column = rng.gen_range(0..=text.line_len(end_row));
let mut end = CharPoint(text.clip_point(Point::new(end_row, end_column), Bias::Right));
let mut end = TabPoint(text.clip_point(Point::new(end_row, end_column), Bias::Right));
let start_row = rng.gen_range(0..=text.max_point().row);
let start_column = rng.gen_range(0..=text.line_len(start_row));
let mut start =
CharPoint(text.clip_point(Point::new(start_row, start_column), Bias::Left));
TabPoint(text.clip_point(Point::new(start_row, start_column), Bias::Left));
if start > end {
mem::swap(&mut start, &mut end);
}

View File

@@ -1,6 +1,6 @@
use super::{
char_map::{self, CharPoint, CharSnapshot, TabEdit},
fold_map::FoldBufferRows,
tab_map::{self, TabEdit, TabPoint, TabSnapshot},
Highlights,
};
use gpui::{AppContext, Context, Font, LineWrapper, Model, ModelContext, Pixels, Task};
@@ -12,7 +12,7 @@ use std::{cmp, collections::VecDeque, mem, ops::Range, time::Duration};
use sum_tree::{Bias, Cursor, SumTree};
use text::Patch;
pub use super::char_map::TextSummary;
pub use super::tab_map::TextSummary;
pub type WrapEdit = text::Edit<u32>;
/// Handles soft wrapping of text.
@@ -20,7 +20,7 @@ pub type WrapEdit = text::Edit<u32>;
/// See the [`display_map` module documentation](crate::display_map) for more information.
pub struct WrapMap {
snapshot: WrapSnapshot,
pending_edits: VecDeque<(CharSnapshot, Vec<TabEdit>)>,
pending_edits: VecDeque<(TabSnapshot, Vec<TabEdit>)>,
interpolated_edits: Patch<u32>,
edits_since_sync: Patch<u32>,
wrap_width: Option<Pixels>,
@@ -30,7 +30,7 @@ pub struct WrapMap {
#[derive(Clone)]
pub struct WrapSnapshot {
char_snapshot: CharSnapshot,
tab_snapshot: TabSnapshot,
transforms: SumTree<Transform>,
interpolated: bool,
}
@@ -51,11 +51,11 @@ struct TransformSummary {
pub struct WrapPoint(pub Point);
pub struct WrapChunks<'a> {
input_chunks: char_map::TabChunks<'a>,
input_chunks: tab_map::TabChunks<'a>,
input_chunk: Chunk<'a>,
output_position: WrapPoint,
max_output_row: u32,
transforms: Cursor<'a, Transform, (WrapPoint, CharPoint)>,
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
}
#[derive(Clone)]
@@ -65,12 +65,12 @@ pub struct WrapBufferRows<'a> {
output_row: u32,
soft_wrapped: bool,
max_output_row: u32,
transforms: Cursor<'a, Transform, (WrapPoint, CharPoint)>,
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
}
impl WrapMap {
pub fn new(
char_snapshot: CharSnapshot,
tab_snapshot: TabSnapshot,
font: Font,
font_size: Pixels,
wrap_width: Option<Pixels>,
@@ -83,7 +83,7 @@ impl WrapMap {
pending_edits: Default::default(),
interpolated_edits: Default::default(),
edits_since_sync: Default::default(),
snapshot: WrapSnapshot::new(char_snapshot),
snapshot: WrapSnapshot::new(tab_snapshot),
background_task: None,
};
this.set_wrap_width(wrap_width, cx);
@@ -101,17 +101,17 @@ impl WrapMap {
pub fn sync(
&mut self,
char_snapshot: CharSnapshot,
tab_snapshot: TabSnapshot,
edits: Vec<TabEdit>,
cx: &mut ModelContext<Self>,
) -> (WrapSnapshot, Patch<u32>) {
if self.wrap_width.is_some() {
self.pending_edits.push_back((char_snapshot, edits));
self.pending_edits.push_back((tab_snapshot, edits));
self.flush_edits(cx);
} else {
self.edits_since_sync = self
.edits_since_sync
.compose(self.snapshot.interpolate(char_snapshot, &edits));
.compose(self.snapshot.interpolate(tab_snapshot, &edits));
self.snapshot.interpolated = false;
}
@@ -161,11 +161,11 @@ impl WrapMap {
let (font, font_size) = self.font_with_size.clone();
let task = cx.background_executor().spawn(async move {
let mut line_wrapper = text_system.line_wrapper(font, font_size);
let char_snapshot = new_snapshot.char_snapshot.clone();
let range = CharPoint::zero()..char_snapshot.max_point();
let tab_snapshot = new_snapshot.tab_snapshot.clone();
let range = TabPoint::zero()..tab_snapshot.max_point();
let edits = new_snapshot
.update(
char_snapshot,
tab_snapshot,
&[TabEdit {
old: range.clone(),
new: range.clone(),
@@ -205,7 +205,7 @@ impl WrapMap {
} else {
let old_rows = self.snapshot.transforms.summary().output.lines.row + 1;
self.snapshot.transforms = SumTree::default();
let summary = self.snapshot.char_snapshot.text_summary();
let summary = self.snapshot.tab_snapshot.text_summary();
if !summary.lines.is_zero() {
self.snapshot
.transforms
@@ -223,8 +223,8 @@ impl WrapMap {
fn flush_edits(&mut self, cx: &mut ModelContext<Self>) {
if !self.snapshot.interpolated {
let mut to_remove_len = 0;
for (char_snapshot, _) in &self.pending_edits {
if char_snapshot.version <= self.snapshot.char_snapshot.version {
for (tab_snapshot, _) in &self.pending_edits {
if tab_snapshot.version <= self.snapshot.tab_snapshot.version {
to_remove_len += 1;
} else {
break;
@@ -246,9 +246,9 @@ impl WrapMap {
let update_task = cx.background_executor().spawn(async move {
let mut edits = Patch::default();
let mut line_wrapper = text_system.line_wrapper(font, font_size);
for (char_snapshot, tab_edits) in pending_edits {
for (tab_snapshot, tab_edits) in pending_edits {
let wrap_edits = snapshot
.update(char_snapshot, &tab_edits, wrap_width, &mut line_wrapper)
.update(tab_snapshot, &tab_edits, wrap_width, &mut line_wrapper)
.await;
edits = edits.compose(&wrap_edits);
}
@@ -285,11 +285,11 @@ impl WrapMap {
let was_interpolated = self.snapshot.interpolated;
let mut to_remove_len = 0;
for (char_snapshot, edits) in &self.pending_edits {
if char_snapshot.version <= self.snapshot.char_snapshot.version {
for (tab_snapshot, edits) in &self.pending_edits {
if tab_snapshot.version <= self.snapshot.tab_snapshot.version {
to_remove_len += 1;
} else {
let interpolated_edits = self.snapshot.interpolate(char_snapshot.clone(), edits);
let interpolated_edits = self.snapshot.interpolate(tab_snapshot.clone(), edits);
self.edits_since_sync = self.edits_since_sync.compose(&interpolated_edits);
self.interpolated_edits = self.interpolated_edits.compose(&interpolated_edits);
}
@@ -302,49 +302,45 @@ impl WrapMap {
}
impl WrapSnapshot {
fn new(char_snapshot: CharSnapshot) -> Self {
fn new(tab_snapshot: TabSnapshot) -> Self {
let mut transforms = SumTree::default();
let extent = char_snapshot.text_summary();
let extent = tab_snapshot.text_summary();
if !extent.lines.is_zero() {
transforms.push(Transform::isomorphic(extent), &());
}
Self {
transforms,
char_snapshot,
tab_snapshot,
interpolated: true,
}
}
pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
self.char_snapshot.buffer_snapshot()
self.tab_snapshot.buffer_snapshot()
}
fn interpolate(
&mut self,
new_char_snapshot: CharSnapshot,
tab_edits: &[TabEdit],
) -> Patch<u32> {
fn interpolate(&mut self, new_tab_snapshot: TabSnapshot, tab_edits: &[TabEdit]) -> Patch<u32> {
let mut new_transforms;
if tab_edits.is_empty() {
new_transforms = self.transforms.clone();
} else {
let mut old_cursor = self.transforms.cursor::<CharPoint>(&());
let mut old_cursor = self.transforms.cursor::<TabPoint>(&());
let mut tab_edits_iter = tab_edits.iter().peekable();
new_transforms =
old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right, &());
while let Some(edit) = tab_edits_iter.next() {
if edit.new.start > CharPoint::from(new_transforms.summary().input.lines) {
let summary = new_char_snapshot.text_summary_for_range(
CharPoint::from(new_transforms.summary().input.lines)..edit.new.start,
if edit.new.start > TabPoint::from(new_transforms.summary().input.lines) {
let summary = new_tab_snapshot.text_summary_for_range(
TabPoint::from(new_transforms.summary().input.lines)..edit.new.start,
);
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
if !edit.new.is_empty() {
new_transforms.push_or_extend(Transform::isomorphic(
new_char_snapshot.text_summary_for_range(edit.new.clone()),
new_tab_snapshot.text_summary_for_range(edit.new.clone()),
));
}
@@ -353,7 +349,7 @@ impl WrapSnapshot {
if next_edit.old.start > old_cursor.end(&()) {
if old_cursor.end(&()) > edit.old.end {
let summary = self
.char_snapshot
.tab_snapshot
.text_summary_for_range(edit.old.end..old_cursor.end(&()));
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
@@ -367,7 +363,7 @@ impl WrapSnapshot {
} else {
if old_cursor.end(&()) > edit.old.end {
let summary = self
.char_snapshot
.tab_snapshot
.text_summary_for_range(edit.old.end..old_cursor.end(&()));
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
@@ -380,7 +376,7 @@ impl WrapSnapshot {
let old_snapshot = mem::replace(
self,
WrapSnapshot {
char_snapshot: new_char_snapshot,
tab_snapshot: new_tab_snapshot,
transforms: new_transforms,
interpolated: true,
},
@@ -391,7 +387,7 @@ impl WrapSnapshot {
async fn update(
&mut self,
new_char_snapshot: CharSnapshot,
new_tab_snapshot: TabSnapshot,
tab_edits: &[TabEdit],
wrap_width: Pixels,
line_wrapper: &mut LineWrapper,
@@ -428,27 +424,27 @@ impl WrapSnapshot {
new_transforms = self.transforms.clone();
} else {
let mut row_edits = row_edits.into_iter().peekable();
let mut old_cursor = self.transforms.cursor::<CharPoint>(&());
let mut old_cursor = self.transforms.cursor::<TabPoint>(&());
new_transforms = old_cursor.slice(
&CharPoint::new(row_edits.peek().unwrap().old_rows.start, 0),
&TabPoint::new(row_edits.peek().unwrap().old_rows.start, 0),
Bias::Right,
&(),
);
while let Some(edit) = row_edits.next() {
if edit.new_rows.start > new_transforms.summary().input.lines.row {
let summary = new_char_snapshot.text_summary_for_range(
CharPoint(new_transforms.summary().input.lines)
..CharPoint::new(edit.new_rows.start, 0),
let summary = new_tab_snapshot.text_summary_for_range(
TabPoint(new_transforms.summary().input.lines)
..TabPoint::new(edit.new_rows.start, 0),
);
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
let mut line = String::new();
let mut remaining = None;
let mut chunks = new_char_snapshot.chunks(
CharPoint::new(edit.new_rows.start, 0)..new_char_snapshot.max_point(),
let mut chunks = new_tab_snapshot.chunks(
TabPoint::new(edit.new_rows.start, 0)..new_tab_snapshot.max_point(),
false,
Highlights::default(),
);
@@ -495,19 +491,19 @@ impl WrapSnapshot {
}
new_transforms.extend(edit_transforms, &());
old_cursor.seek_forward(&CharPoint::new(edit.old_rows.end, 0), Bias::Right, &());
old_cursor.seek_forward(&TabPoint::new(edit.old_rows.end, 0), Bias::Right, &());
if let Some(next_edit) = row_edits.peek() {
if next_edit.old_rows.start > old_cursor.end(&()).row() {
if old_cursor.end(&()) > CharPoint::new(edit.old_rows.end, 0) {
let summary = self.char_snapshot.text_summary_for_range(
CharPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) {
let summary = self.tab_snapshot.text_summary_for_range(
TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
);
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
old_cursor.next(&());
new_transforms.append(
old_cursor.slice(
&CharPoint::new(next_edit.old_rows.start, 0),
&TabPoint::new(next_edit.old_rows.start, 0),
Bias::Right,
&(),
),
@@ -515,9 +511,9 @@ impl WrapSnapshot {
);
}
} else {
if old_cursor.end(&()) > CharPoint::new(edit.old_rows.end, 0) {
let summary = self.char_snapshot.text_summary_for_range(
CharPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) {
let summary = self.tab_snapshot.text_summary_for_range(
TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
);
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
@@ -530,7 +526,7 @@ impl WrapSnapshot {
let old_snapshot = mem::replace(
self,
WrapSnapshot {
char_snapshot: new_char_snapshot,
tab_snapshot: new_tab_snapshot,
transforms: new_transforms,
interpolated: false,
},
@@ -583,17 +579,17 @@ impl WrapSnapshot {
) -> WrapChunks<'a> {
let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0);
let mut transforms = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
transforms.seek(&output_start, Bias::Right, &());
let mut input_start = CharPoint(transforms.start().1 .0);
let mut input_start = TabPoint(transforms.start().1 .0);
if transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_start.0 += output_start.0 - transforms.start().0 .0;
}
let input_end = self
.to_char_point(output_end)
.min(self.char_snapshot.max_point());
.to_tab_point(output_end)
.min(self.tab_snapshot.max_point());
WrapChunks {
input_chunks: self.char_snapshot.chunks(
input_chunks: self.tab_snapshot.chunks(
input_start..input_end,
language_aware,
highlights,
@@ -610,7 +606,7 @@ impl WrapSnapshot {
}
pub fn line_len(&self, row: u32) -> u32 {
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left, &());
if cursor
.item()
@@ -618,7 +614,7 @@ impl WrapSnapshot {
{
let overshoot = row - cursor.start().0.row();
let tab_row = cursor.start().1.row() + overshoot;
let tab_line_len = self.char_snapshot.line_len(tab_row);
let tab_line_len = self.tab_snapshot.line_len(tab_row);
if overshoot == 0 {
cursor.start().0.column() + (tab_line_len - cursor.start().1.column())
} else {
@@ -646,14 +642,14 @@ impl WrapSnapshot {
}
pub fn buffer_rows(&self, start_row: u32) -> WrapBufferRows {
let mut transforms = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
let mut input_row = transforms.start().1.row();
if transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_row += start_row - transforms.start().0.row();
}
let soft_wrapped = transforms.item().map_or(false, |t| !t.is_isomorphic());
let mut input_buffer_rows = self.char_snapshot.buffer_rows(input_row);
let mut input_buffer_rows = self.tab_snapshot.buffer_rows(input_row);
let input_buffer_row = input_buffer_rows.next().unwrap();
WrapBufferRows {
transforms,
@@ -665,26 +661,26 @@ impl WrapSnapshot {
}
}
pub fn to_char_point(&self, point: WrapPoint) -> CharPoint {
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint {
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
cursor.seek(&point, Bias::Right, &());
let mut char_point = cursor.start().1 .0;
let mut tab_point = cursor.start().1 .0;
if cursor.item().map_or(false, |t| t.is_isomorphic()) {
char_point += point.0 - cursor.start().0 .0;
tab_point += point.0 - cursor.start().0 .0;
}
CharPoint(char_point)
TabPoint(tab_point)
}
pub fn to_point(&self, point: WrapPoint, bias: Bias) -> Point {
self.char_snapshot.to_point(self.to_char_point(point), bias)
self.tab_snapshot.to_point(self.to_tab_point(point), bias)
}
pub fn make_wrap_point(&self, point: Point, bias: Bias) -> WrapPoint {
self.char_point_to_wrap_point(self.char_snapshot.make_char_point(point, bias))
self.tab_point_to_wrap_point(self.tab_snapshot.make_tab_point(point, bias))
}
pub fn char_point_to_wrap_point(&self, point: CharPoint) -> WrapPoint {
let mut cursor = self.transforms.cursor::<(CharPoint, WrapPoint)>(&());
pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint {
let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(&());
cursor.seek(&point, Bias::Right, &());
WrapPoint(cursor.start().1 .0 + (point.0 - cursor.start().0 .0))
}
@@ -699,10 +695,7 @@ impl WrapSnapshot {
}
}
self.char_point_to_wrap_point(
self.char_snapshot
.clip_point(self.to_char_point(point), bias),
)
self.tab_point_to_wrap_point(self.tab_snapshot.clip_point(self.to_tab_point(point), bias))
}
pub fn prev_row_boundary(&self, mut point: WrapPoint) -> u32 {
@@ -712,7 +705,7 @@ impl WrapSnapshot {
*point.column_mut() = 0;
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
cursor.seek(&point, Bias::Right, &());
if cursor.item().is_none() {
cursor.prev(&());
@@ -732,7 +725,7 @@ impl WrapSnapshot {
pub fn next_row_boundary(&self, mut point: WrapPoint) -> Option<u32> {
point.0 += Point::new(1, 0);
let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&());
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
cursor.seek(&point, Bias::Right, &());
while let Some(transform) = cursor.item() {
if transform.is_isomorphic() && cursor.start().1.column() == 0 {
@@ -749,8 +742,8 @@ impl WrapSnapshot {
#[cfg(test)]
{
assert_eq!(
CharPoint::from(self.transforms.summary().input.lines),
self.char_snapshot.max_point()
TabPoint::from(self.transforms.summary().input.lines),
self.tab_snapshot.max_point()
);
{
@@ -763,18 +756,18 @@ impl WrapSnapshot {
}
let text = language::Rope::from(self.text().as_str());
let mut input_buffer_rows = self.char_snapshot.buffer_rows(0);
let mut input_buffer_rows = self.tab_snapshot.buffer_rows(0);
let mut expected_buffer_rows = Vec::new();
let mut prev_tab_row = 0;
for display_row in 0..=self.max_point().row() {
let char_point = self.to_char_point(WrapPoint::new(display_row, 0));
if char_point.row() == prev_tab_row && display_row != 0 {
let tab_point = self.to_tab_point(WrapPoint::new(display_row, 0));
if tab_point.row() == prev_tab_row && display_row != 0 {
expected_buffer_rows.push(None);
} else {
expected_buffer_rows.push(input_buffer_rows.next().unwrap());
}
prev_tab_row = char_point.row();
prev_tab_row = tab_point.row();
assert_eq!(self.line_len(display_row), text.line_len(display_row));
}
@@ -838,11 +831,13 @@ impl<'a> Iterator for WrapChunks<'a> {
} else {
*self.output_position.column_mut() += char_len as u32;
}
if self.output_position >= transform_end {
self.transforms.next(&());
break;
}
}
let (prefix, suffix) = self.input_chunk.text.split_at(input_len);
self.input_chunk.text = suffix;
Some(Chunk {
@@ -997,7 +992,7 @@ impl sum_tree::Summary for TransformSummary {
}
}
impl<'a> sum_tree::Dimension<'a, TransformSummary> for CharPoint {
impl<'a> sum_tree::Dimension<'a, TransformSummary> for TabPoint {
fn zero(_cx: &()) -> Self {
Default::default()
}
@@ -1007,7 +1002,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for CharPoint {
}
}
impl<'a> sum_tree::SeekTarget<'a, TransformSummary, TransformSummary> for CharPoint {
impl<'a> sum_tree::SeekTarget<'a, TransformSummary, TransformSummary> for TabPoint {
fn cmp(&self, cursor_location: &TransformSummary, _: &()) -> std::cmp::Ordering {
Ord::cmp(&self.0, &cursor_location.input.lines)
}
@@ -1055,7 +1050,7 @@ fn consolidate_wrap_edits(edits: Vec<WrapEdit>) -> Vec<WrapEdit> {
mod tests {
use super::*;
use crate::{
display_map::{char_map::CharMap, fold_map::FoldMap, inlay_map::InlayMap},
display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap},
MultiBuffer,
};
use gpui::{font, px, test::observe};
@@ -1107,9 +1102,9 @@ mod tests {
log::info!("InlayMap text: {:?}", inlay_snapshot.text());
let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot.clone());
log::info!("FoldMap text: {:?}", fold_snapshot.text());
let (mut char_map, _) = CharMap::new(fold_snapshot.clone(), tab_size);
let tabs_snapshot = char_map.set_max_expansion_column(32);
log::info!("CharMap text: {:?}", tabs_snapshot.text());
let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size);
let tabs_snapshot = tab_map.set_max_expansion_column(32);
log::info!("TabMap text: {:?}", tabs_snapshot.text());
let mut line_wrapper = text_system.line_wrapper(font.clone(), font_size);
let unwrapped_text = tabs_snapshot.text();
@@ -1155,7 +1150,7 @@ mod tests {
20..=39 => {
for (fold_snapshot, fold_edits) in fold_map.randomly_mutate(&mut rng) {
let (tabs_snapshot, tab_edits) =
char_map.sync(fold_snapshot, fold_edits, tab_size);
tab_map.sync(fold_snapshot, fold_edits, tab_size);
let (mut snapshot, wrap_edits) =
wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
snapshot.check_invariants();
@@ -1168,7 +1163,7 @@ mod tests {
inlay_map.randomly_mutate(&mut next_inlay_id, &mut rng);
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
let (tabs_snapshot, tab_edits) =
char_map.sync(fold_snapshot, fold_edits, tab_size);
tab_map.sync(fold_snapshot, fold_edits, tab_size);
let (mut snapshot, wrap_edits) =
wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
snapshot.check_invariants();
@@ -1192,8 +1187,8 @@ mod tests {
log::info!("InlayMap text: {:?}", inlay_snapshot.text());
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
log::info!("FoldMap text: {:?}", fold_snapshot.text());
let (tabs_snapshot, tab_edits) = char_map.sync(fold_snapshot, fold_edits, tab_size);
log::info!("CharMap text: {:?}", tabs_snapshot.text());
let (tabs_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size);
log::info!("TabMap text: {:?}", tabs_snapshot.text());
let unwrapped_text = tabs_snapshot.text();
let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
@@ -1239,7 +1234,7 @@ mod tests {
if tab_size.get() == 1
|| !wrapped_snapshot
.char_snapshot
.tab_snapshot
.fold_snapshot
.text()
.contains('\t')

Some files were not shown because too many files have changed in this diff Show More