Compare commits
1 Commits
build-docs
...
search-rep
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
26d754a168 |
@@ -1,8 +1,8 @@
|
||||
We have two cloudflare workers that let us serve some assets of this repo
|
||||
from Cloudflare.
|
||||
|
||||
- `open-source-website-assets` is used for `install.sh`
|
||||
- `docs-proxy` is used for `https://zed.dev/docs`
|
||||
* `open-source-website-assets` is used for `install.sh`
|
||||
* `docs-proxy` is used for `https://zed.dev/docs`
|
||||
|
||||
On push to `main`, both of these (and the files they depend on) are uploaded to Cloudflare.
|
||||
|
||||
|
||||
16
.github/workflows/ci.yml
vendored
@@ -232,20 +232,20 @@ jobs:
|
||||
mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
|
||||
- name: Upload app bundle (universal) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg
|
||||
path: target/release/Zed.dmg
|
||||
- name: Upload app bundle (aarch64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
|
||||
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
|
||||
- name: Upload app bundle (x86_64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
|
||||
@@ -321,7 +321,7 @@ jobs:
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
|
||||
@@ -339,7 +339,7 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
bundle-linux-aarch64: # this runs on ubuntu22.04
|
||||
bundle-linux-aarch64:
|
||||
timeout-minutes: 60
|
||||
name: Create arm64 Linux bundle
|
||||
runs-on:
|
||||
@@ -360,8 +360,8 @@ jobs:
|
||||
- name: Set up Clang
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y llvm-15 clang-15 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev
|
||||
echo "/usr/lib/llvm-15/bin" >> $GITHUB_PATH
|
||||
sudo apt-get install -y llvm-10 clang-10 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev
|
||||
echo "/usr/lib/llvm-10/bin" >> $GITHUB_PATH
|
||||
|
||||
- uses: rui314/setup-mold@0bf4f07ef9048ec62a45f9dbf2f098afa49695f0 # v1
|
||||
with:
|
||||
@@ -406,7 +406,7 @@ jobs:
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
|
||||
|
||||
@@ -9,7 +9,7 @@ jobs:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
architecture: "x64"
|
||||
|
||||
@@ -9,7 +9,7 @@ jobs:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
architecture: "x64"
|
||||
|
||||
21
.mailmap
@@ -11,12 +11,8 @@
|
||||
|
||||
Alex Viscreanu <alexviscreanu@gmail.com>
|
||||
Alex Viscreanu <alexviscreanu@gmail.com> <alexandru.viscreanu@kiwi.com>
|
||||
Alexander Mankuta <alex@pointless.one>
|
||||
Alexander Mankuta <alex@pointless.one> <alex+github@pointless.one>
|
||||
amtoaer <amtoaer@gmail.com>
|
||||
amtoaer <amtoaer@gmail.com> <amtoaer@outlook.com>
|
||||
Andrei Zvonimir Crnković <andrei@0x7f.dev>
|
||||
Andrei Zvonimir Crnković <andrei@0x7f.dev> <andreicek@0x7f.dev>
|
||||
Antonio Scandurra <me@as-cii.com>
|
||||
Antonio Scandurra <me@as-cii.com> <antonio@zed.dev>
|
||||
Bennet Bo Fenner <bennet@zed.dev>
|
||||
@@ -28,9 +24,8 @@ Conrad Irwin <conrad@zed.dev>
|
||||
Conrad Irwin <conrad@zed.dev> <conrad.irwin@gmail.com>
|
||||
Danilo Leal <danilo@zed.dev>
|
||||
Danilo Leal <danilo@zed.dev> <67129314+danilo-leal@users.noreply.github.com>
|
||||
Evren Sen <nervenes@icloud.com>
|
||||
Evren Sen <nervenes@icloud.com> <146845123+evrensen467@users.noreply.github.com>
|
||||
Evren Sen <nervenes@icloud.com> <146845123+evrsen@users.noreply.github.com>
|
||||
Evren Sen <146845123+evrensen467@users.noreply.github.com>
|
||||
Evren Sen <146845123+evrensen467@users.noreply.github.com> <146845123+evrsen@users.noreply.github.com>
|
||||
Fernando Tagawa <tagawafernando@gmail.com>
|
||||
Fernando Tagawa <tagawafernando@gmail.com> <fernando.tagawa.gamail.com@gmail.com>
|
||||
Greg Morenz <greg-morenz@droid.cafe>
|
||||
@@ -54,12 +49,8 @@ LoganDark <contact@logandark.mozmail.com> <git@logandark.mozmail.com>
|
||||
LoganDark <contact@logandark.mozmail.com> <github@logandark.mozmail.com>
|
||||
Marshall Bowers <elliott.codes@gmail.com>
|
||||
Marshall Bowers <elliott.codes@gmail.com> <marshall@zed.dev>
|
||||
Matt Fellenz <matt@felle.nz>
|
||||
Matt Fellenz <matt@felle.nz> <matt+github@felle.nz>
|
||||
Max Brunsfeld <maxbrunsfeld@gmail.com>
|
||||
Max Brunsfeld <maxbrunsfeld@gmail.com> <max@zed.dev>
|
||||
Max Linke <maxlinke88@gmail.com>
|
||||
Max Linke <maxlinke88@gmail.com> <kain88-de@users.noreply.github.com>
|
||||
Mikayla Maki <mikayla@zed.dev>
|
||||
Mikayla Maki <mikayla@zed.dev> <mikayla.c.maki@gmail.com>
|
||||
Mikayla Maki <mikayla@zed.dev> <mikayla.c.maki@icloud.com>
|
||||
@@ -84,18 +75,10 @@ Richard Feldman <oss@rtfeldman.com>
|
||||
Richard Feldman <oss@rtfeldman.com> <richard@zed.dev>
|
||||
Robert Clover <git@clo4.net>
|
||||
Robert Clover <git@clo4.net> <robert@clover.gdn>
|
||||
Roy Williams <roy.williams.iii@gmail.com>
|
||||
Roy Williams <roy.williams.iii@gmail.com> <roy@anthropic.com>
|
||||
Sergey Onufrienko <sergey@onufrienko.com>
|
||||
Thorben Kröger <dev@thorben.net>
|
||||
Thorben Kröger <dev@thorben.net> <thorben.kroeger@hexagon.com>
|
||||
Thorsten Ball <thorsten@zed.dev>
|
||||
Thorsten Ball <thorsten@zed.dev> <me@thorstenball.com>
|
||||
Thorsten Ball <thorsten@zed.dev> <mrnugget@gmail.com>
|
||||
Tristan Hume <tris.hume@gmail.com>
|
||||
Tristan Hume <tris.hume@gmail.com> <tristan@anthropic.com>
|
||||
Uladzislau Kaminski <i@uladkaminski.com>
|
||||
Uladzislau Kaminski <i@uladkaminski.com> <uladzislau_kaminski@epam.com>
|
||||
Vitaly Slobodin <vitaliy.slobodin@gmail.com>
|
||||
Vitaly Slobodin <vitaliy.slobodin@gmail.com> <vitaly_slobodin@fastmail.com>
|
||||
WindSoilder <WindSoilder@outlook.com>
|
||||
|
||||
@@ -38,7 +38,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"hard_tabs": false,
|
||||
"formatter": "auto",
|
||||
"remove_trailing_whitespace_on_save": true,
|
||||
"ensure_final_newline_on_save": true
|
||||
|
||||
709
Cargo.lock
generated
32
Cargo.toml
@@ -6,7 +6,6 @@ members = [
|
||||
"crates/assets",
|
||||
"crates/assistant",
|
||||
"crates/assistant_slash_command",
|
||||
"crates/assistant_tool",
|
||||
"crates/audio",
|
||||
"crates/auto_update",
|
||||
"crates/breadcrumbs",
|
||||
@@ -72,6 +71,7 @@ members = [
|
||||
"crates/outline",
|
||||
"crates/outline_panel",
|
||||
"crates/paths",
|
||||
"crates/performance",
|
||||
"crates/picker",
|
||||
"crates/prettier",
|
||||
"crates/project",
|
||||
@@ -166,7 +166,7 @@ members = [
|
||||
# Tooling
|
||||
#
|
||||
|
||||
"tooling/xtask",
|
||||
"tooling/xtask"
|
||||
]
|
||||
default-members = ["crates/zed"]
|
||||
|
||||
@@ -181,7 +181,6 @@ anthropic = { path = "crates/anthropic" }
|
||||
assets = { path = "crates/assets" }
|
||||
assistant = { path = "crates/assistant" }
|
||||
assistant_slash_command = { path = "crates/assistant_slash_command" }
|
||||
assistant_tool = { path = "crates/assistant_tool" }
|
||||
audio = { path = "crates/audio" }
|
||||
auto_update = { path = "crates/auto_update" }
|
||||
breadcrumbs = { path = "crates/breadcrumbs" }
|
||||
@@ -245,6 +244,7 @@ open_ai = { path = "crates/open_ai" }
|
||||
outline = { path = "crates/outline" }
|
||||
outline_panel = { path = "crates/outline_panel" }
|
||||
paths = { path = "crates/paths" }
|
||||
performance = { path = "crates/performance" }
|
||||
picker = { path = "crates/picker" }
|
||||
plugin = { path = "crates/plugin" }
|
||||
plugin_macros = { path = "crates/plugin_macros" }
|
||||
@@ -315,16 +315,16 @@ async-dispatcher = "0.1"
|
||||
async-fs = "1.6"
|
||||
async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" }
|
||||
async-recursion = "1.0.0"
|
||||
async-tar = "0.5.0"
|
||||
async-tar = "0.4.2"
|
||||
async-trait = "0.1"
|
||||
async-tungstenite = "0.23"
|
||||
async-watch = "0.3.1"
|
||||
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
|
||||
base64 = "0.22"
|
||||
bitflags = "2.6.0"
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "e142a3a5e678eb6a13e642ad8401b1f3aa38e969" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "e142a3a5e678eb6a13e642ad8401b1f3aa38e969" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "e142a3a5e678eb6a13e642ad8401b1f3aa38e969" }
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "fee06c42f658b36dd9ac85444a9ee2a481383695" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "fee06c42f658b36dd9ac85444a9ee2a481383695" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "fee06c42f658b36dd9ac85444a9ee2a481383695" }
|
||||
cargo_metadata = "0.18"
|
||||
cargo_toml = "0.20"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
@@ -365,7 +365,7 @@ linkify = "0.10.0"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
|
||||
markup5ever_rcdom = "0.3.0"
|
||||
nanoid = "0.4"
|
||||
nix = "0.29"
|
||||
nix = "0.28"
|
||||
num-format = "0.4.4"
|
||||
once_cell = "1.19.0"
|
||||
ordered-float = "2.1.1"
|
||||
@@ -378,7 +378,7 @@ pretty_assertions = "1.3.0"
|
||||
prost = "0.9"
|
||||
prost-build = "0.9"
|
||||
prost-types = "0.9"
|
||||
pulldown-cmark = { version = "0.12.0", default-features = false }
|
||||
pulldown-cmark = { version = "0.10.0", default-features = false }
|
||||
rand = "0.8.5"
|
||||
regex = "1.5"
|
||||
repair_json = "0.1.0"
|
||||
@@ -386,6 +386,7 @@ rsa = "0.9.6"
|
||||
runtimelib = { version = "0.15", default-features = false, features = [
|
||||
"async-dispatcher-runtime",
|
||||
] }
|
||||
rusqlite = { version = "0.29.0", features = ["blob", "array", "modern_sqlite"] }
|
||||
rustc-demangle = "0.1.23"
|
||||
rust-embed = { version = "8.4", features = ["include-exclude"] }
|
||||
schemars = { version = "0.8", features = ["impl_json_schema"] }
|
||||
@@ -393,7 +394,7 @@ semver = "1.0"
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_derive = { version = "1.0", features = ["deserialize_in_place"] }
|
||||
serde_json = { version = "1.0", features = ["preserve_order", "raw_value"] }
|
||||
serde_json_lenient = { version = "0.2", features = [
|
||||
serde_json_lenient = { version = "0.1", features = [
|
||||
"preserve_order",
|
||||
"raw_value",
|
||||
] }
|
||||
@@ -466,7 +467,7 @@ which = "6.0.0"
|
||||
wit-component = "0.201"
|
||||
|
||||
[workspace.dependencies.async-stripe]
|
||||
version = "0.39"
|
||||
version = "0.38"
|
||||
default-features = false
|
||||
features = [
|
||||
"runtime-tokio-hyper-rustls",
|
||||
@@ -572,17 +573,8 @@ single_range_in_vec_init = "allow"
|
||||
# allow all of those, for now.
|
||||
style = { level = "allow", priority = -1 }
|
||||
|
||||
# Temporary list of style lints that we've fixed so far.
|
||||
module_inception = { level = "deny" }
|
||||
question_mark = { level = "deny" }
|
||||
redundant_closure = { level = "deny" }
|
||||
# Individual rules that have violations in the codebase:
|
||||
type_complexity = "allow"
|
||||
# We often return trait objects from `new` functions.
|
||||
new_ret_no_self = { level = "allow" }
|
||||
# We have a few `next` functions that differ in lifetimes
|
||||
# compared to Iterator::next. Yet, clippy complains about those.
|
||||
should_implement_trait = { level = "allow" }
|
||||
|
||||
[workspace.metadata.cargo-machete]
|
||||
ignored = ["bindgen", "cbindgen", "prost_build", "serde"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
|
||||
FROM rust:1.81-bookworm as builder
|
||||
FROM rust:1.80-bookworm as builder
|
||||
WORKDIR app
|
||||
COPY . .
|
||||
|
||||
|
||||
@@ -4,10 +4,11 @@
|
||||
|
||||
Welcome to Zed, a high-performance, multiplayer code editor from the creators of [Atom](https://github.com/atom/atom) and [Tree-sitter](https://github.com/tree-sitter/tree-sitter).
|
||||
|
||||
---
|
||||
--------
|
||||
|
||||
### Installation
|
||||
|
||||
|
||||
<a href="https://repology.org/project/zed-editor/versions">
|
||||
<img src="https://repology.org/badge/vertical-allrepos/zed-editor.svg?minversion=0.143.5" alt="Packaging status" align="right">
|
||||
</a>
|
||||
|
||||
@@ -1,4 +1 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M8 10V13" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M6.71429 7.088C6.7142 7.26164 6.66963 7.43181 6.58559 7.57938C6.50156 7.72695 6.38138 7.84606 6.23857 7.92333L5.47571 8.34333C5.33291 8.4206 5.21273 8.53972 5.12869 8.68729C5.04465 8.83486 5.00008 9.00503 5 9.17867V9.53333C5 9.6571 5.04515 9.7758 5.12553 9.86332C5.2059 9.95083 5.31491 10 5.42857 10H10.5714C10.6851 10 10.7941 9.95083 10.8745 9.86332C10.9548 9.7758 11 9.6571 11 9.53333V9.17867C10.9999 9.00503 10.9553 8.83486 10.8713 8.68729C10.7873 8.53972 10.6671 8.4206 10.5243 8.34333L9.76143 7.92333C9.61862 7.84606 9.49844 7.72695 9.41441 7.57938C9.33037 7.43181 9.2858 7.26164 9.28571 7.088V5.33333C9.28571 5.20957 9.33087 5.09087 9.41124 5.00335C9.49161 4.91583 9.60062 4.86667 9.71429 4.86667C9.94161 4.86667 10.1596 4.76833 10.3204 4.5933C10.4811 4.41827 10.5714 4.18087 10.5714 3.93333C10.5714 3.6858 10.4811 3.4484 10.3204 3.27337C10.1596 3.09833 9.94161 3 9.71429 3H6.28571C6.05839 3 5.84037 3.09833 5.67962 3.27337C5.51888 3.4484 5.42857 3.6858 5.42857 3.93333C5.42857 4.18087 5.51888 4.41827 5.67962 4.5933C5.84037 4.76833 6.05839 4.86667 6.28571 4.86667C6.39938 4.86667 6.50839 4.91583 6.58876 5.00335C6.66913 5.09087 6.71429 5.20957 6.71429 5.33333V7.088Z" fill="black" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-pin"><path d="M12 17v5"/><path d="M9 10.76a2 2 0 0 1-1.11 1.79l-1.78.9A2 2 0 0 0 5 15.24V16a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-.76a2 2 0 0 0-1.11-1.79l-1.78-.9A2 2 0 0 1 15 10.76V7a1 1 0 0 1 1-1 2 2 0 0 0 0-4H8a2 2 0 0 0 0 4 1 1 0 0 1 1 1z"/></svg>
|
||||
|
Before Width: | Height: | Size: 1.5 KiB After Width: | Height: | Size: 447 B |
@@ -1,3 +1 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M5 4L12 8L5 12V4Z" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M3.24182 2.32181C3.3919 2.23132 3.5784 2.22601 3.73338 2.30781L12.7334 7.05781C12.8974 7.14436 13 7.31457 13 7.5C13 7.68543 12.8974 7.85564 12.7334 7.94219L3.73338 12.6922C3.5784 12.774 3.3919 12.7687 3.24182 12.6782C3.09175 12.5877 3 12.4252 3 12.25V2.75C3 2.57476 3.09175 2.4123 3.24182 2.32181ZM4 3.57925V11.4207L11.4288 7.5L4 3.57925Z" fill="currentColor" fill-rule="evenodd" clip-rule="evenodd"></path></svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 218 B After Width: | Height: | Size: 518 B |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-pocket-knife"><path d="M3 2v1c0 1 2 1 2 2S3 6 3 7s2 1 2 2-2 1-2 2 2 1 2 2"/><path d="M18 6h.01"/><path d="M6 18h.01"/><path d="M20.83 8.83a4 4 0 0 0-5.66-5.66l-12 12a4 4 0 1 0 5.66 5.66Z"/><path d="M18 11.66V22a4 4 0 0 0 4-4V6"/></svg>
|
||||
|
Before Width: | Height: | Size: 438 B |
@@ -1,4 +1 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M6.28561 2.25C5.83777 2.25 5.42262 2.44429 5.12712 2.76607C4.8336 3.08568 4.67847 3.50652 4.67847 3.93333C4.67847 4.00474 4.68281 4.07598 4.69142 4.14661L10.0356 6.63864V5.58279C10.3579 5.51401 10.6495 5.3435 10.8728 5.10053C11.1662 4.78092 11.3213 4.36012 11.3213 3.93333C11.3213 3.50655 11.1661 3.08559 10.8726 2.76598C10.5771 2.44432 10.1621 2.25 9.71419 2.25H6.28561Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M1.70131 4.68303C1.87636 4.30763 2.3226 4.14521 2.698 4.32027L13.9361 9.5607C14.3115 9.73575 14.474 10.182 14.2989 10.5574C14.1239 10.9328 13.6776 11.0952 13.3022 10.9202L11.6006 10.1267C11.5536 10.2137 11.4958 10.2957 11.4272 10.3704C11.2121 10.6045 10.9057 10.75 10.5715 10.75H8.75007V13C8.75007 13.4142 8.41428 13.75 8.00007 13.75C7.58585 13.75 7.25007 13.4142 7.25007 13V10.75H5.42864C5.09445 10.75 4.78832 10.6049 4.57321 10.3706C4.36002 10.1385 4.25007 9.83637 4.25007 9.53334V9.17868C4.25021 8.87805 4.32715 8.57932 4.47703 8.31614C4.6266 8.05351 4.84546 7.83234 5.11609 7.68521L5.68873 7.36993L2.06407 5.67973C1.68867 5.50467 1.52625 5.05844 1.70131 4.68303Z" fill="black"/>
|
||||
</svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-pin-off"><path d="M12 17v5"/><path d="M15 9.34V7a1 1 0 0 1 1-1 2 2 0 0 0 0-4H7.89"/><path d="m2 2 20 20"/><path d="M9 9v1.76a2 2 0 0 1-1.11 1.79l-1.78.9A2 2 0 0 0 5 15.24V16a1 1 0 0 0 1 1h11"/></svg>
|
||||
|
Before Width: | Height: | Size: 1.2 KiB After Width: | Height: | Size: 401 B |
@@ -59,8 +59,11 @@
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd",
|
||||
"shift-delete": "editor::Cut",
|
||||
"ctrl-x": "editor::Cut",
|
||||
"ctrl-insert": "editor::Copy",
|
||||
"ctrl-c": "editor::Copy",
|
||||
"shift-insert": "editor::Paste",
|
||||
"ctrl-v": "editor::Paste",
|
||||
"ctrl-y": "editor::Redo",
|
||||
"ctrl-z": "editor::Undo",
|
||||
"ctrl-shift-z": "editor::Redo",
|
||||
@@ -109,15 +112,6 @@
|
||||
"alt-g b": "editor::ToggleGitBlame"
|
||||
}
|
||||
},
|
||||
{
|
||||
// Separate block with same context so these display in context menus
|
||||
"context": "Editor",
|
||||
"bindings": {
|
||||
"ctrl-x": "editor::Cut",
|
||||
"ctrl-c": "editor::Copy",
|
||||
"ctrl-v": "editor::Paste"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && mode == full",
|
||||
"bindings": {
|
||||
@@ -169,8 +163,7 @@
|
||||
"ctrl-shift-g": "search::SelectPrevMatch",
|
||||
"alt-m": "assistant::ToggleModelSelector",
|
||||
"ctrl-k h": "assistant::DeployHistory",
|
||||
"ctrl-k l": "assistant::DeployPromptLibrary",
|
||||
"ctrl-n": "assistant::NewContext"
|
||||
"ctrl-k l": "assistant::DeployPromptLibrary"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -262,8 +255,7 @@
|
||||
"alt-r": "search::ToggleRegex",
|
||||
"alt-ctrl-f": "project_search::ToggleFilters",
|
||||
"ctrl-alt-shift-r": "search::ToggleRegex",
|
||||
"ctrl-alt-shift-x": "search::ToggleRegex",
|
||||
"ctrl-k shift-enter": "pane::TogglePinTab"
|
||||
"ctrl-alt-shift-x": "search::ToggleRegex"
|
||||
}
|
||||
},
|
||||
// Bindings from VS Code
|
||||
@@ -523,7 +515,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "OutlinePanel && not_editing",
|
||||
"context": "OutlinePanel",
|
||||
"bindings": {
|
||||
"escape": "menu::Cancel",
|
||||
"left": "outline_panel::CollapseSelectedEntry",
|
||||
@@ -543,34 +535,27 @@
|
||||
"right": "project_panel::ExpandSelectedEntry",
|
||||
"ctrl-n": "project_panel::NewFile",
|
||||
"alt-ctrl-n": "project_panel::NewDirectory",
|
||||
"ctrl-x": "project_panel::Cut",
|
||||
"ctrl-c": "project_panel::Copy",
|
||||
"ctrl-insert": "project_panel::Copy",
|
||||
"ctrl-v": "project_panel::Paste",
|
||||
"shift-insert": "project_panel::Paste",
|
||||
"ctrl-alt-c": "project_panel::CopyPath",
|
||||
"alt-ctrl-shift-c": "project_panel::CopyRelativePath",
|
||||
"f2": "project_panel::Rename",
|
||||
"enter": "project_panel::Rename",
|
||||
"backspace": ["project_panel::Trash", { "skip_prompt": false }],
|
||||
"shift-delete": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
"delete": ["project_panel::Trash", { "skip_prompt": false }],
|
||||
"ctrl-backspace": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
"ctrl-delete": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
"alt-ctrl-r": "project_panel::RevealInFileManager",
|
||||
"ctrl-shift-enter": "project_panel::OpenWithSystem",
|
||||
"alt-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"shift-down": "menu::SelectNext",
|
||||
"shift-up": "menu::SelectPrev",
|
||||
"escape": "menu::Cancel"
|
||||
}
|
||||
},
|
||||
{
|
||||
// Separate block with same context so these display in context menus
|
||||
"context": "ProjectPanel",
|
||||
"bindings": {
|
||||
"f2": "project_panel::Rename",
|
||||
"ctrl-c": "project_panel::Copy",
|
||||
"ctrl-x": "project_panel::Cut",
|
||||
"ctrl-v": "project_panel::Paste",
|
||||
"delete": ["project_panel::Trash", { "skip_prompt": false }]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProjectPanel && not_editing",
|
||||
"bindings": {
|
||||
@@ -626,7 +611,9 @@
|
||||
"context": "Terminal",
|
||||
"bindings": {
|
||||
"ctrl-alt-space": "terminal::ShowCharacterPalette",
|
||||
"ctrl-shift-c": "terminal::Copy",
|
||||
"ctrl-insert": "terminal::Copy",
|
||||
"ctrl-shift-v": "terminal::Paste",
|
||||
"shift-insert": "terminal::Paste",
|
||||
"ctrl-enter": "assistant::InlineAssist",
|
||||
// Overrides for conflicting keybindings
|
||||
@@ -650,13 +637,5 @@
|
||||
"shift-home": "terminal::ScrollToTop",
|
||||
"shift-end": "terminal::ScrollToBottom"
|
||||
}
|
||||
},
|
||||
{
|
||||
// Separate block with same context so these display in context menus
|
||||
"context": "Terminal",
|
||||
"bindings": {
|
||||
"ctrl-shift-c": "terminal::Copy",
|
||||
"ctrl-shift-v": "terminal::Paste"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -191,8 +191,7 @@
|
||||
"cmd-shift-g": "search::SelectPrevMatch",
|
||||
"alt-m": "assistant::ToggleModelSelector",
|
||||
"cmd-k h": "assistant::DeployHistory",
|
||||
"cmd-k l": "assistant::DeployPromptLibrary",
|
||||
"cmd-n": "assistant::NewContext"
|
||||
"cmd-k l": "assistant::DeployPromptLibrary"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -299,8 +298,7 @@
|
||||
"alt-cmd-c": "search::ToggleCaseSensitive",
|
||||
"alt-cmd-w": "search::ToggleWholeWord",
|
||||
"alt-cmd-f": "project_search::ToggleFilters",
|
||||
"alt-cmd-x": "search::ToggleRegex",
|
||||
"cmd-k shift-enter": "pane::TogglePinTab"
|
||||
"alt-cmd-x": "search::ToggleRegex"
|
||||
}
|
||||
},
|
||||
// Bindings from VS Code
|
||||
@@ -530,7 +528,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "OutlinePanel && not_editing",
|
||||
"context": "OutlinePanel",
|
||||
"bindings": {
|
||||
"escape": "menu::Cancel",
|
||||
"left": "outline_panel::CollapseSelectedEntry",
|
||||
@@ -563,8 +561,8 @@
|
||||
"cmd-backspace": ["project_panel::Trash", { "skip_prompt": true }],
|
||||
"cmd-delete": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
"alt-cmd-r": "project_panel::RevealInFileManager",
|
||||
"ctrl-shift-enter": "project_panel::OpenWithSystem",
|
||||
"cmd-alt-backspace": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
|
||||
"alt-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"shift-down": "menu::SelectNext",
|
||||
"shift-up": "menu::SelectPrev",
|
||||
|
||||
@@ -29,8 +29,6 @@
|
||||
"shift-g": "vim::EndOfDocument",
|
||||
"{": "vim::StartOfParagraph",
|
||||
"}": "vim::EndOfParagraph",
|
||||
"(": "vim::SentenceBackward",
|
||||
")": "vim::SentenceForward",
|
||||
"|": "vim::GoToColumn",
|
||||
// Word motions
|
||||
"w": "vim::NextWordStart",
|
||||
@@ -214,7 +212,7 @@
|
||||
"shift-d": "vim::DeleteToEndOfLine",
|
||||
"shift-j": "vim::JoinLines",
|
||||
"y": ["vim::PushOperator", "Yank"],
|
||||
"shift-y": "vim::YankLine",
|
||||
"shift-y": "vim::YankToEndOfLine",
|
||||
"i": "vim::InsertBefore",
|
||||
"shift-i": "vim::InsertFirstNonWhitespace",
|
||||
"a": "vim::InsertAfter",
|
||||
@@ -326,13 +324,6 @@
|
||||
"ctrl-r": ["vim::PushOperator", "Register"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_mode == insert && !(showing_code_actions || showing_completions)",
|
||||
"bindings": {
|
||||
"ctrl-p": "editor::ShowCompletions",
|
||||
"ctrl-n": "editor::ShowCompletions"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_mode == replace",
|
||||
"bindings": {
|
||||
@@ -401,9 +392,7 @@
|
||||
"context": "vim_operator == d",
|
||||
"bindings": {
|
||||
"d": "vim::CurrentLine",
|
||||
"s": ["vim::PushOperator", "DeleteSurrounds"],
|
||||
"o": "editor::ToggleHunkDiff", // "d o"
|
||||
"p": "editor::RevertSelectedHunks" // "d p"
|
||||
"s": ["vim::PushOperator", "DeleteSurrounds"]
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -493,7 +482,6 @@
|
||||
"v": "project_panel::OpenPermanent",
|
||||
"p": "project_panel::Open",
|
||||
"x": "project_panel::RevealInFileManager",
|
||||
"s": "project_panel::OpenWithSystem",
|
||||
"shift-g": "menu::SelectLast",
|
||||
"g g": "menu::SelectFirst",
|
||||
"-": "project_panel::SelectParent",
|
||||
@@ -501,7 +489,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "OutlinePanel && not_editing",
|
||||
"context": "OutlinePanel",
|
||||
"bindings": {
|
||||
"j": "menu::SelectNext",
|
||||
"k": "menu::SelectPrev",
|
||||
|
||||
@@ -1,312 +1,302 @@
|
||||
<task_description>
|
||||
<workflow>
|
||||
Guide the user through code changes in numbered steps that focus on individual functions, type definitions, etc.
|
||||
Surround each distinct step in a <step></step> XML tag. The user will be performing these steps in a code editor
|
||||
named Zed, which is where they will have entered this prompt and will be seeing the response.
|
||||
|
||||
# Code Change Workflow
|
||||
<instructions>
|
||||
- Use the language of the file for code fence blocks unless otherwise specified.
|
||||
- Include a code or file action in each step.
|
||||
- Only put code in separate steps if it should either go in separate files, or in different (non-contiguous) places in the same file.
|
||||
- Provide error handling and input validation where appropriate.
|
||||
- Adapt explanations based on the user's perceived level of expertise.
|
||||
- Include comments in code examples to enhance understanding.
|
||||
- Consider more complex programming scenarios when relevant.
|
||||
- Avoid using the terminal to perform filesystem operations such as creating, deleting, renaming, and editing files. Assume the user prefers to use Zed for these operations, unless the prompt suggests that the user is expecting terminal commands.
|
||||
- When creating a new file, Zed will automatically create any necessary directories in that file's path. So when a step will create a new file in Zed, don't mention needing to create the directory that it will go in.
|
||||
- Assume the user is performing these actions in the Zed code editor, so avoid redundancies like "In your code editor, ..." or "In Zed, ..."
|
||||
- Be concise without leaving out important information.
|
||||
</instructions>
|
||||
|
||||
Your task is to guide the user through code changes using a series of steps. Each step should describe a high-level change, which can consist of multiple edits to distinct locations in the codebase.
|
||||
Examples of user-assistant dialogs with step numbers and user follow-up requests:
|
||||
|
||||
## Output Example
|
||||
|
||||
Provide output as XML, with the following format:
|
||||
|
||||
<step>
|
||||
Update the Person struct to store an age
|
||||
<example language="rust">
|
||||
<turn number="1">
|
||||
<user>Create a Library struct with a vector of Book structs. Include methods to add books and find books by author. Use generics for the book ID.</user>
|
||||
<assistant>Certainly! Let's create a Library struct with a vector of Book structs, including methods to add books and find books by author, using generics for the book ID:
|
||||
|
||||
<step>Define the Book and Library structs
|
||||
```rust
|
||||
struct Person {
|
||||
// existing fields...
|
||||
age: u8,
|
||||
height: f32,
|
||||
// existing fields...
|
||||
use std::fmt::Display;
|
||||
|
||||
struct Book<T>
|
||||
where
|
||||
T: Display + Clone
|
||||
{
|
||||
id: T,
|
||||
title: String,
|
||||
author: String,
|
||||
}
|
||||
|
||||
impl Person {
|
||||
fn age(&self) -> u8 {
|
||||
self.age
|
||||
struct Library<T>
|
||||
where
|
||||
T: Display + Clone
|
||||
{
|
||||
books: Vec<Book<T>>,
|
||||
}
|
||||
```
|
||||
</step>
|
||||
<step>Implement methods for the Library struct
|
||||
```rust
|
||||
impl<T> Library<T>
|
||||
where
|
||||
T: Display + Clone
|
||||
{
|
||||
fn new() -> Self {
|
||||
Library { books: Vec::new() }
|
||||
}
|
||||
|
||||
fn add_book(&mut self, id: T, title: String, author: String) {
|
||||
let book = Book { id, title, author };
|
||||
self.books.push(book);
|
||||
}
|
||||
|
||||
fn find_books_by_author(&self, author: &str) -> Vec<&Book<T>> {
|
||||
self.books
|
||||
.iter()
|
||||
.filter(|book| book.author.to_lowercase() == author.to_lowercase())
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
<edit>
|
||||
<path>src/person.rs</path>
|
||||
<operation>insert_before</operation>
|
||||
<search>height: f32,</search>
|
||||
<description>Add the age field</description>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/person.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<search>impl Person {</search>
|
||||
<description>Add the age getter</description>
|
||||
</edit>
|
||||
</step>
|
||||
|
||||
## Output Format
|
||||
This code defines:
|
||||
1. A generic Book struct with an ID of type T, title, and author
|
||||
2. A Library struct containing a vector of Books
|
||||
3. Methods to create a new Library, add books, and find books by author
|
||||
The use of generics allows flexibility in the type of ID used for books.</assistant>
|
||||
</turn>
|
||||
|
||||
First, each `<step>` must contain a written description of the change that should be made. The description should begin with a high-level overview, and can contain markdown code blocks as well. The description should be self-contained and actionable.
|
||||
|
||||
After the description, each `<step>` must contain one or more `<edit>` tags, each of which refer to a specific range in a source file. Each `<edit>` tag must contain the following child tags:
|
||||
|
||||
### `<path>` (required)
|
||||
|
||||
This tag contains the path to the file that will be changed. It can be an existing path, or a path that should be created.
|
||||
|
||||
### `<search>` (optional)
|
||||
|
||||
This tag contains a search string to locate in the source file, e.g. `pub fn baz() {`. If not provided, the new content will be inserted at the top of the file. Make sure to produce a string that exists in the source file and that isn't ambiguous. When there's ambiguity, add more lines to the search to eliminate it.
|
||||
|
||||
### `<description>` (required)
|
||||
|
||||
This tag contains a single-line description of the edit that should be made at the given location.
|
||||
|
||||
### `<operation>` (required)
|
||||
|
||||
This tag indicates what type of change should be made, relative to the given location. It can be one of the following:
|
||||
- `update`: Rewrites the specified string entirely based on the given description.
|
||||
- `create`: Creates a new file with the given path based on the provided description.
|
||||
- `insert_before`: Inserts new text based on the given description before the specified search string.
|
||||
- `insert_after`: Inserts new text based on the given description after the specified search string.
|
||||
- `delete`: Deletes the specified string from the containing file.
|
||||
|
||||
<guidelines>
|
||||
- There's no need to describe *what* to do, just *where* to do it.
|
||||
- Only reference locations that actually exist (unless you're creating a file).
|
||||
- If creating a file, assume any subsequent updates are included at the time of creation.
|
||||
- Don't create and then update a file. Always create new files in one hot.
|
||||
- Prefer multiple edits to smaller regions, as opposed to one big edit to a larger region.
|
||||
- Don't produce edits that intersect each other. In that case, merge them into a bigger edit.
|
||||
- Never nest an edit with another edit. Never include CDATA. All edits are leaf nodes.
|
||||
- Descriptions are required for all edits except delete.
|
||||
- When generating multiple edits, ensure the descriptions are specific to each individual operation.
|
||||
- Avoid referring to the search string in the description. Focus on the change to be made, not the location where it's made. That's implicit with the `search` string you provide.
|
||||
- Don't generate multiple edits at the same location. Instead, combine them together in a single edit with a succinct combined description.
|
||||
- Always ensure imports are added if you're referencing symbols that are not in scope.
|
||||
</guidelines>
|
||||
|
||||
Here are some concrete examples.
|
||||
|
||||
<example>
|
||||
<message role="user">
|
||||
|
||||
```rs src/shapes.rs
|
||||
pub mod rectangle;
|
||||
pub mod circle;
|
||||
```
|
||||
|
||||
```rs src/shapes/rectangle.rs
|
||||
pub struct Rectangle {
|
||||
width: f64,
|
||||
height: f64,
|
||||
}
|
||||
|
||||
impl Rectangle {
|
||||
pub fn new(width: f64, height: f64) -> Self {
|
||||
Rectangle { width, height }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
```rs src/shapes/circle.rs
|
||||
pub struct Circle {
|
||||
radius: f64,
|
||||
}
|
||||
|
||||
impl Circle {
|
||||
pub fn new(radius: f64) -> Self {
|
||||
Circle { radius }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Update all shapes to store their origin as an (x, y) tuple and implement Display.
|
||||
</message>
|
||||
|
||||
<message role="assistant">
|
||||
We'll need to update both the rectangle and circle modules.
|
||||
|
||||
<step>
|
||||
Add origin fields to both shape types.
|
||||
<turn number="2">
|
||||
<user>How can I use this Library struct in a main function?</user>
|
||||
<assistant>Here's how you can use the Library struct in a main function:
|
||||
|
||||
<step>Create a main function to demonstrate Library usage
|
||||
```rust
|
||||
struct Rectangle {
|
||||
// existing fields ...
|
||||
origin: (f64, f64),
|
||||
fn main() {
|
||||
// Create a new Library with string IDs
|
||||
let mut library = Library::new();
|
||||
|
||||
// Add some books
|
||||
library.add_book(String::from("001"), String::from("The Rust Programming Language"), String::from("Steve Klabnik"));
|
||||
library.add_book(String::from("002"), String::from("Programming Rust"), String::from("Jim Blandy"));
|
||||
library.add_book(String::from("003"), String::from("Rust in Action"), String::from("Tim McNamara"));
|
||||
|
||||
// Find books by author
|
||||
let author = "Steve Klabnik";
|
||||
let books = library.find_books_by_author(author);
|
||||
|
||||
println!("Books by {}:", author);
|
||||
for book in books {
|
||||
println!("ID: {}, Title: {}", book.id, book.title);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
```rust
|
||||
struct Circle {
|
||||
// existing fields ...
|
||||
origin: (f64, f64),
|
||||
}
|
||||
```
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<operation>insert_before</operation>
|
||||
<search>
|
||||
width: f64,
|
||||
height: f64,
|
||||
</search>
|
||||
<description>Add the origin field to Rectangle</description>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<operation>insert_before</operation>
|
||||
<search>
|
||||
radius: f64,
|
||||
</search>
|
||||
<description>Add the origin field to Circle</description>
|
||||
</edit>
|
||||
|
||||
<step>
|
||||
Update both shape's constructors to take an origin.
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<operation>update</operation>
|
||||
<search>
|
||||
fn new(width: f64, height: f64) -> Self {
|
||||
Rectangle { width, height }
|
||||
}
|
||||
</search>
|
||||
<description>Update the Rectangle new function to take an origin</description>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<operation>update</operation>
|
||||
<search>
|
||||
fn new(radius: f64) -> Self {
|
||||
Circle { radius }
|
||||
}
|
||||
</search>
|
||||
<description>Update the Circle new function to take an origin</description>
|
||||
</edit>
|
||||
</step>
|
||||
|
||||
<step>
|
||||
Implement Display for both shapes
|
||||
This main function demonstrates:
|
||||
1. Creating a new Library instance
|
||||
2. Adding books with string IDs
|
||||
3. Finding and displaying books by a specific author</assistant>
|
||||
</turn>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<operation>insert_before</operation>
|
||||
<search>
|
||||
struct Rectangle {
|
||||
</search>
|
||||
<description>Add an import for the `std::fmt` module</description>
|
||||
</edit>
|
||||
<turn number="3">
|
||||
<user>Can you add a method to remove a book by ID and handle potential errors?</user>
|
||||
<assistant>Certainly! Let's add a method to remove a book by ID and handle potential errors:
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<search>
|
||||
Rectangle { width, height }
|
||||
<step>Add a remove_book method to the Library struct
|
||||
```rust
|
||||
use std::fmt::Display;
|
||||
use std::error::Error;
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct BookNotFoundError;
|
||||
|
||||
impl Error for BookNotFoundError {}
|
||||
|
||||
impl fmt::Display for BookNotFoundError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "Book not found")
|
||||
}
|
||||
}
|
||||
</search>
|
||||
<description>Add a Display implementation for Rectangle</description>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<operation>insert_before</operation>
|
||||
<search>
|
||||
struct Circle {
|
||||
</search>
|
||||
<description>Add an import for the `std::fmt` module</description>
|
||||
</edit>
|
||||
impl<T> Library<T>
|
||||
where
|
||||
T: Display + Clone + PartialEq
|
||||
{
|
||||
// ... (previous methods remain the same)
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<search>
|
||||
Circle { radius }
|
||||
fn remove_book(&mut self, id: &T) -> Result<Book<T>, BookNotFoundError> {
|
||||
if let Some(index) = self.books.iter().position(|book| &book.id == id) {
|
||||
Ok(self.books.remove(index))
|
||||
} else {
|
||||
Err(BookNotFoundError)
|
||||
}
|
||||
}
|
||||
}
|
||||
</search>
|
||||
<description>Add a Display implementation for Circle</description>
|
||||
</edit>
|
||||
```
|
||||
</step>
|
||||
</message>
|
||||
<step>Update the main function to demonstrate book removal
|
||||
```rust
|
||||
fn main() {
|
||||
// ... (previous code remains the same)
|
||||
|
||||
// Attempt to remove a book
|
||||
let id_to_remove = String::from("002");
|
||||
match library.remove_book(&id_to_remove) {
|
||||
Ok(removed_book) => println!("Removed book: {} by {}", removed_book.title, removed_book.author),
|
||||
Err(e) => println!("Error removing book: {}", e),
|
||||
}
|
||||
|
||||
// Attempt to remove a non-existent book
|
||||
let non_existent_id = String::from("999");
|
||||
match library.remove_book(&non_existent_id) {
|
||||
Ok(_) => println!("Book removed successfully"),
|
||||
Err(e) => println!("Error removing book: {}", e),
|
||||
}
|
||||
|
||||
// Display remaining books
|
||||
println!("\nRemaining books:");
|
||||
for book in &library.books {
|
||||
println!("ID: {}, Title: {} by {}", book.id, book.title, book.author);
|
||||
}
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
This update includes:
|
||||
1. A new remove_book method that returns a Result type
|
||||
2. A custom BookNotFoundError for error handling
|
||||
3. Demonstration of successful book removal and error handling for non-existent books
|
||||
4. Display of remaining books after removal attempts
|
||||
|
||||
The remove_book method uses the PartialEq trait to compare book IDs, allowing for flexible ID types while maintaining type safety.</assistant>
|
||||
</turn>
|
||||
</example>
|
||||
|
||||
<example number=2>
|
||||
<message role="user">
|
||||
<example language="go">
|
||||
<turn number="1">
|
||||
<user>Create a Person struct with name, age, and email fields, and a method to greet. Include error handling for age.</user>
|
||||
<assistant>Certainly! Let's create a Person struct with the specified fields and a method to greet in Go:
|
||||
|
||||
```rs src/user.rs
|
||||
struct User {
|
||||
pub name: String,
|
||||
age: u32,
|
||||
email: String,
|
||||
<step>Define the Person struct and NewPerson function with error handling
|
||||
```go
|
||||
type Person struct {
|
||||
Name string
|
||||
Age int
|
||||
Email string
|
||||
}
|
||||
|
||||
impl User {
|
||||
fn new(name: String, age: u32, email: String) -> Self {
|
||||
User { name, age, email }
|
||||
func NewPerson(name string, age int, email string) (*Person, error) {
|
||||
if age < 0 || age > 150 {
|
||||
return nil, fmt.Errorf("invalid age: %d", age)
|
||||
}
|
||||
return &Person{Name: name, Age: age, Email: email}, nil
|
||||
}
|
||||
```
|
||||
</step>
|
||||
<step>Implement the Greet method for the Person struct
|
||||
```go
|
||||
func (p *Person) Greet() string {
|
||||
return fmt.Sprintf("Hello, I'm %s, %d years old. You can reach me at %s.", p.Name, p.Age, p.Email)
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
This code defines a Person struct, a NewPerson function with age validation, and a Greet method.</assistant>
|
||||
</turn>
|
||||
|
||||
<turn number="2">
|
||||
<user>How can I use these in a main function?</user>
|
||||
<assistant>Here's how to use the Person struct and its methods in a main function:
|
||||
|
||||
<step>Create a main function with error handling
|
||||
```go
|
||||
func main() {
|
||||
// Create a new Person instance
|
||||
person, err := NewPerson("Alice", 30, "alice@example.com")
|
||||
if err != nil {
|
||||
fmt.Println("Error creating person:", err)
|
||||
return
|
||||
}
|
||||
|
||||
pub fn print_info(&self) {
|
||||
todo!()
|
||||
// Use the Greet method
|
||||
fmt.Println(person.Greet())
|
||||
|
||||
// Demonstrate error handling with invalid age
|
||||
invalidPerson, err := NewPerson("Bob", 200, "bob@example.com")
|
||||
if err != nil {
|
||||
fmt.Println("Error creating person with invalid age:", err)
|
||||
}
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
Let's print all the user information and delete the email field.
|
||||
</message>
|
||||
This main function demonstrates:
|
||||
1. Creating a valid Person instance
|
||||
2. Using the Greet method
|
||||
3. Handling errors when creating a Person with an invalid age</assistant>
|
||||
</turn>
|
||||
|
||||
<message role="assistant">
|
||||
<step>
|
||||
Update the 'print_info' method to print user information
|
||||
<turn number="3">
|
||||
<user>Can you add a method to update the email with validation?</user>
|
||||
<assistant>Certainly! Let's add a method to update the email with validation:
|
||||
|
||||
```rust
|
||||
impl User {
|
||||
// ... other methods ...
|
||||
<step>Add UpdateEmail method with validation
|
||||
```go
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
pub fn print_info(&self) {
|
||||
println!("Name: {name}, Age: {age}", name = self.name, age = self.age);
|
||||
func (p *Person) UpdateEmail(newEmail string) error {
|
||||
// Simple email validation using regex
|
||||
emailRegex := regexp.MustCompile(`^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$`)
|
||||
if !emailRegex.MatchString(newEmail) {
|
||||
return fmt.Errorf("invalid email format: %s", newEmail)
|
||||
}
|
||||
p.Email = newEmail
|
||||
return nil
|
||||
}
|
||||
```
|
||||
</step>
|
||||
<step>Update the main function to demonstrate email update
|
||||
```go
|
||||
func main() {
|
||||
// ... (previous code remains the same)
|
||||
|
||||
// Demonstrate email update
|
||||
err = person.UpdateEmail("alice.new@example.com")
|
||||
if err != nil {
|
||||
fmt.Println("Error updating email:", err)
|
||||
} else {
|
||||
fmt.Println("Updated greeting:", person.Greet())
|
||||
}
|
||||
|
||||
// Demonstrate email update with invalid email
|
||||
err = person.UpdateEmail("invalid-email")
|
||||
if err != nil {
|
||||
fmt.Println("Error updating email:", err)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
<edit>
|
||||
<path>src/user.rs</path>
|
||||
<operation>update</operation>
|
||||
<search>
|
||||
pub fn print_info(&self) {
|
||||
todo!()
|
||||
}
|
||||
</search>
|
||||
<description>Print all the user information</description>
|
||||
</edit>
|
||||
</step>
|
||||
|
||||
<step>
|
||||
Remove the 'email' field from the User struct
|
||||
|
||||
<edit>
|
||||
<path>src/user.rs</path>
|
||||
<operation>delete</operation>
|
||||
<search>
|
||||
email: String,
|
||||
</search>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/user.rs</path>
|
||||
<operation>update</operation>
|
||||
<symbol>
|
||||
fn new(name: String, age: u32, email: String) -> Self {
|
||||
User { name, age, email }
|
||||
}
|
||||
</symbol>
|
||||
<description>Remove email parameter from new method</description>
|
||||
</edit>
|
||||
</step>
|
||||
</message>
|
||||
This update includes:
|
||||
1. An UpdateEmail method with email format validation
|
||||
2. Demonstration of successful email update in the main function
|
||||
3. Handling of invalid email update attempt</assistant>
|
||||
</turn>
|
||||
</example>
|
||||
|
||||
You should think step by step. When possible, produce smaller, coherent logical steps as opposed to one big step that combines lots of heterogeneous edits.
|
||||
|
||||
</task_description>
|
||||
</workflow>
|
||||
|
||||
@@ -227,8 +227,6 @@
|
||||
// Whether to show diagnostic indicators in the scrollbar.
|
||||
"diagnostics": true
|
||||
},
|
||||
// Enable middle-click paste on Linux.
|
||||
"middle_click_paste": true,
|
||||
// What to do when multibuffer is double clicked in some of its excerpts
|
||||
// (parts of singleton buffers).
|
||||
// May take 2 values:
|
||||
@@ -279,13 +277,6 @@
|
||||
"relative_line_numbers": false,
|
||||
// If 'search_wrap' is disabled, search result do not wrap around the end of the file.
|
||||
"search_wrap": true,
|
||||
// Search options to enable by default when opening new project and buffer searches.
|
||||
"search": {
|
||||
"whole_word": false,
|
||||
"case_sensitive": false,
|
||||
"include_ignored": false,
|
||||
"regex": false
|
||||
},
|
||||
// When to populate a new search's query based on the text under the cursor.
|
||||
// This setting can take the following three values:
|
||||
//
|
||||
@@ -296,7 +287,6 @@
|
||||
// 3. Never populate the search query
|
||||
// "never"
|
||||
"seed_search_query_from_cursor": "always",
|
||||
"use_smartcase_search": false,
|
||||
// Inlay hint related settings
|
||||
"inlay_hints": {
|
||||
// Global switch to toggle hints on and off, switched off by default.
|
||||
@@ -611,7 +601,7 @@
|
||||
// "shell": {
|
||||
// "with_arguments": {
|
||||
// "program": "/bin/bash",
|
||||
// "args": ["--login"]
|
||||
// "arguments": ["--login"]
|
||||
// }
|
||||
// }
|
||||
"shell": "system",
|
||||
|
||||
@@ -35,7 +35,7 @@
|
||||
// "shell": {
|
||||
// "with_arguments": {
|
||||
// "program": "/bin/bash",
|
||||
// "args": ["--login"]
|
||||
// "arguments": ["--login"]
|
||||
// }
|
||||
// }
|
||||
"shell": "system"
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"$schema": "https://zed.dev/schema/themes/v0.1.0.json",
|
||||
"name": "Andromeda",
|
||||
"author": "Zed Industries",
|
||||
"themes": [
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"$schema": "https://zed.dev/schema/themes/v0.1.0.json",
|
||||
"name": "Atelier",
|
||||
"author": "Zed Industries",
|
||||
"themes": [
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"$schema": "https://zed.dev/schema/themes/v0.1.0.json",
|
||||
"name": "Ayu",
|
||||
"author": "Zed Industries",
|
||||
"themes": [
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"$schema": "https://zed.dev/schema/themes/v0.1.0.json",
|
||||
"name": "Gruvbox",
|
||||
"author": "Zed Industries",
|
||||
"themes": [
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"$schema": "https://zed.dev/schema/themes/v0.1.0.json",
|
||||
"name": "One",
|
||||
"author": "Zed Industries",
|
||||
"themes": [
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"$schema": "https://zed.dev/schema/themes/v0.1.0.json",
|
||||
"name": "Rosé Pine",
|
||||
"author": "Zed Industries",
|
||||
"themes": [
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"$schema": "https://zed.dev/schema/themes/v0.1.0.json",
|
||||
"name": "Sandcastle",
|
||||
"author": "Zed Industries",
|
||||
"themes": [
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"$schema": "https://zed.dev/schema/themes/v0.1.0.json",
|
||||
"name": "Solarized",
|
||||
"author": "Zed Industries",
|
||||
"themes": [
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"$schema": "https://zed.dev/schema/themes/v0.1.0.json",
|
||||
"name": "Summercamp",
|
||||
"author": "Zed Industries",
|
||||
"themes": [
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
allow-private-module-inception = true
|
||||
@@ -150,7 +150,7 @@ impl ActivityIndicator {
|
||||
) -> impl Iterator<Item = PendingWork<'a>> {
|
||||
self.project
|
||||
.read(cx)
|
||||
.language_server_statuses(cx)
|
||||
.language_server_statuses()
|
||||
.rev()
|
||||
.filter_map(|(server_id, status)| {
|
||||
if status.pending_work.is_empty() {
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
mod supported_countries;
|
||||
|
||||
use std::time::Duration;
|
||||
use std::{pin::Pin, str::FromStr};
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
|
||||
@@ -10,13 +7,15 @@ use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use isahc::config::Configurable;
|
||||
use isahc::http::{HeaderMap, HeaderValue};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::time::Duration;
|
||||
use std::{pin::Pin, str::FromStr};
|
||||
use strum::{EnumIter, EnumString};
|
||||
use thiserror::Error;
|
||||
use util::ResultExt as _;
|
||||
|
||||
pub use supported_countries::*;
|
||||
|
||||
pub const ANTHROPIC_API_URL: &str = "https://api.anthropic.com";
|
||||
pub const ANTHROPIC_API_URL: &'static str = "https://api.anthropic.com";
|
||||
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
|
||||
@@ -331,6 +330,28 @@ pub async fn stream_completion_with_rate_limit_info(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_text_from_events(
|
||||
response: impl Stream<Item = Result<Event, AnthropicError>>,
|
||||
) -> impl Stream<Item = Result<String, AnthropicError>> {
|
||||
response.filter_map(|response| async move {
|
||||
match response {
|
||||
Ok(response) => match response {
|
||||
Event::ContentBlockStart { content_block, .. } => match content_block {
|
||||
Content::Text { text, .. } => Some(Ok(text)),
|
||||
_ => None,
|
||||
},
|
||||
Event::ContentBlockDelta { delta, .. } => match delta {
|
||||
ContentDelta::TextDelta { text } => Some(Ok(text)),
|
||||
_ => None,
|
||||
},
|
||||
Event::Error { error } => Some(Err(AnthropicError::ApiError(error))),
|
||||
_ => None,
|
||||
},
|
||||
Err(error) => Some(Err(error)),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn extract_tool_args_from_events(
|
||||
tool_name: String,
|
||||
mut events: Pin<Box<dyn Send + Stream<Item = Result<Event>>>>,
|
||||
@@ -339,12 +360,14 @@ pub async fn extract_tool_args_from_events(
|
||||
while let Some(event) = events.next().await {
|
||||
if let Event::ContentBlockStart {
|
||||
index,
|
||||
content_block: ResponseContent::ToolUse { name, .. },
|
||||
content_block,
|
||||
} = event?
|
||||
{
|
||||
if name == tool_name {
|
||||
tool_use_index = Some(index);
|
||||
break;
|
||||
if let Content::ToolUse { name, .. } = content_block {
|
||||
if name == tool_name {
|
||||
tool_use_index = Some(index);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -388,7 +411,7 @@ pub struct CacheControl {
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Message {
|
||||
pub role: Role,
|
||||
pub content: Vec<RequestContent>,
|
||||
pub content: Vec<Content>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Eq, PartialEq, Hash)]
|
||||
@@ -400,7 +423,7 @@ pub enum Role {
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum RequestContent {
|
||||
pub enum Content {
|
||||
#[serde(rename = "text")]
|
||||
Text {
|
||||
text: String,
|
||||
@@ -424,26 +447,12 @@ pub enum RequestContent {
|
||||
#[serde(rename = "tool_result")]
|
||||
ToolResult {
|
||||
tool_use_id: String,
|
||||
is_error: bool,
|
||||
content: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
cache_control: Option<CacheControl>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum ResponseContent {
|
||||
#[serde(rename = "text")]
|
||||
Text { text: String },
|
||||
#[serde(rename = "tool_use")]
|
||||
ToolUse {
|
||||
id: String,
|
||||
name: String,
|
||||
input: serde_json::Value,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ImageSource {
|
||||
#[serde(rename = "type")]
|
||||
@@ -516,7 +525,7 @@ pub struct Response {
|
||||
#[serde(rename = "type")]
|
||||
pub response_type: String,
|
||||
pub role: Role,
|
||||
pub content: Vec<ResponseContent>,
|
||||
pub content: Vec<Content>,
|
||||
pub model: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub stop_reason: Option<String>,
|
||||
@@ -533,7 +542,7 @@ pub enum Event {
|
||||
#[serde(rename = "content_block_start")]
|
||||
ContentBlockStart {
|
||||
index: usize,
|
||||
content_block: ResponseContent,
|
||||
content_block: Content,
|
||||
},
|
||||
#[serde(rename = "content_block_delta")]
|
||||
ContentBlockDelta { index: usize, delta: ContentDelta },
|
||||
@@ -608,6 +617,9 @@ impl ApiError {
|
||||
}
|
||||
|
||||
pub fn is_rate_limit_error(&self) -> bool {
|
||||
matches!(self.error_type.as_str(), "rate_limit_error")
|
||||
match self.error_type.as_str() {
|
||||
"rate_limit_error" => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,6 @@ anthropic = { workspace = true, features = ["schemars"] }
|
||||
anyhow.workspace = true
|
||||
assets.workspace = true
|
||||
assistant_slash_command.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
async-watch.workspace = true
|
||||
cargo_toml.workspace = true
|
||||
chrono.workspace = true
|
||||
@@ -74,7 +73,6 @@ settings.workspace = true
|
||||
similar.workspace = true
|
||||
smallvec.workspace = true
|
||||
smol.workspace = true
|
||||
strum.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
terminal.workspace = true
|
||||
terminal_view.workspace = true
|
||||
|
||||
63
crates/assistant/README.zmd
Normal file
@@ -0,0 +1,63 @@
|
||||
> Hello, I'm Nathan Sobo.
|
||||
I'm currently your sole creator and the leader of Zed Industries.
|
||||
Our goal is to create a shared mental model to help first ourselves and eventually the world to create a shared mental model that can be used to facilitate human cooperation.
|
||||
|
||||
< Hello, Nathan! Welcome back to Zed. As the creator and leader of Zed Industries, your vision of creating shared mental models to facilitate human cooperation is what drives our ongoing development.
|
||||
|
||||
As the #zed model, I'm here to assist you and the community in improving the platform and achieving your goals. Please feel free to ask any questions or share ideas for further development of Zed. Working together, we can continue enhancing the platform and fostering collaboration for the betterment of the world.
|
||||
|
||||
> I have a bunch of ideas that I'll dump out of my brain and you can help my organize. Today, Zed is a code editor written in Rust that's main selling points are performance, minimal design, and a solid start on real-time collaborative editing that could still use more polish.
|
||||
|
||||
We've decided that our next milestone will be open-sourcing Zed on the Zed platform.
|
||||
|
||||
< Nathan, it's exciting to hear that you have more ideas to enhance Zed! Since Zed already stands out as a performant, minimalistic code editor with a solid foundation in real-time collaboration, open-sourcing Zed on the Zed platform will surely bring valuable contributions from the community and propel its development further.
|
||||
|
||||
Let's organize your ideas and create a plan for your next milestone. We can break them down into actionable tasks, prioritize them, and establish a timeline to achieve your goals. Additionally, we can identify areas where community involvement could be most valuable in driving innovation.
|
||||
|
||||
Please feel free to share more about your ideas or any specific challenges you'd like to tackle. I'm here to help you turn these ideas into a cohesive and successful plan for Zed's future!
|
||||
|
||||
@nathan> #zed, can you describe the conversation UI as you currently understand it.
|
||||
|
||||
< Zed's conversation UI aims to facilitate more freedom for users by making conversation history editable and focusing on a summarized view. Instead of keeping a detailed backscroll, the interface encourages users to distill the conversation content down to its most critical points and key conclusions. This approach ensures that the vital information from the conversation is highlighted and easily accessible, while still maintaining a history that can be linked to when necessary.
|
||||
|
||||
Addressing @nate's concerns, this design choice aims to strike a balance between readability, usability, and information preservation. The summarized view maintains the essence of the conversation without being burdened by extensive backscroll. Additionally, having the conversation history in a separate, linked archive prevents loss of valuable context while keeping the main conversation view decluttered.
|
||||
|
||||
@nathan> What details around the conversation view are worth exploring?
|
||||
|
||||
< @nathan, there are several aspects of the conversation view that are worth exploring and refining:
|
||||
|
||||
1. Editing functionality: Ensuring that all participants can edit and refine the conversation summary effectively while preserving the original context.
|
||||
2. Permissions and access control: Determining how to manage editing rights, user roles, and visibility of the conversation summary and history.
|
||||
3. Navigation and linking: Implementing an intuitive system for navigating between the summarized conversation and linked history or reference materials.
|
||||
4. Visualization and layout: Designing the interface to clearly present the summarized information while providing a visually engaging and organized experience for participants.
|
||||
5. Threaded conversations and branching: Exploring ways to handle sub-conversations, supplementary discussions, or debates within the primary conversation view.
|
||||
|
||||
Considering these aspects will ensure our conversation view design is optimized for usability, collaboration, and accessibility in Zed.
|
||||
|
||||
@nathan> @nate, which of these would you like to explore?
|
||||
|
||||
@nate> 2 feels like it isn't important at the moment, we can explore that later. Let's start with 4, which I think will lead us to discussion 3 and 5.
|
||||
|
||||
#zed share your thoughts on the points we need to consider to design a layout and visualization for a conversation view between you (#zed) and multiple people, or between multiple people and multiple bots (you and other bots).
|
||||
|
||||
@nathan> Agreed. I'm interested in threading I think more than anything. Or 4 yeah. I think we need to scope the threading conversation. Also, asking #zed to propose the solution... not sure it will be that effective but it's worth a try...
|
||||
|
||||
#zed describe a hypothetical conversation UI that meets the description.
|
||||
|
||||
< @nate and @nathan, I understand that you'd like to focus on the layout and visualization for a conversation view that can accommodate multiple people and potentially multiple bots as well. Here's a proposal for a conversation UI that meets the requirements:
|
||||
|
||||
1. Main conversation area: Display the summarized conversation content in a clean and organized manner, ensuring readability and efficient use of space.
|
||||
|
||||
2. Participant avatars and labels: Include user avatars and/or labels to distinguish between users and bots visually, allowing easy identification of each participant's contributions.
|
||||
|
||||
3. Active threads indicator: Designate a clear indicator for ongoing threads, which will attract attention to active sub-conversations or related discussions in the conversation view.
|
||||
|
||||
4. Dynamic branching: Implement a branching visual structure to present threaded conversations, clearly distinguishing sub-conversations, and enabling users to navigate between them smoothly.
|
||||
|
||||
5. Multiple bot interaction support: Facilitate interactions between multiple bots through user-invoked prompts or bot-to-bot communication channels without disrupting the primary conversation flow.
|
||||
|
||||
6. Expandable conversation history: Offer a separate, expandable section or panel to access historical content without cluttering the main conversation view.
|
||||
|
||||
7. Linking and referencing: Implement a system for hyperlinking relevant information, resources, or previous conversations to the current discussion, making it easy for participants to follow and reference the shared context.
|
||||
|
||||
This hypothetical conversation UI aims to address the layout and visualization, threading, and multi-participant aspects of your requirements. Of course, it's just an initial proposal, and we can refine and iterate on it based on your feedback and specific needs.
|
||||
@@ -13,13 +13,11 @@ pub(crate) mod slash_command_picker;
|
||||
pub mod slash_command_settings;
|
||||
mod streaming_diff;
|
||||
mod terminal_inline_assistant;
|
||||
mod tools;
|
||||
mod workflow;
|
||||
|
||||
pub use assistant_panel::{AssistantPanel, AssistantPanelEvent};
|
||||
use assistant_settings::AssistantSettings;
|
||||
use assistant_slash_command::SlashCommandRegistry;
|
||||
use assistant_tool::ToolRegistry;
|
||||
use client::{proto, Client};
|
||||
use command_palette_hooks::CommandPaletteFilter;
|
||||
pub use context::*;
|
||||
@@ -27,8 +25,8 @@ use context_servers::ContextServerRegistry;
|
||||
pub use context_store::*;
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
use fs::Fs;
|
||||
use gpui::Context as _;
|
||||
use gpui::{actions, AppContext, Global, SharedString, UpdateGlobal};
|
||||
use gpui::{impl_actions, Context as _};
|
||||
use indexed_docs::IndexedDocsRegistry;
|
||||
pub(crate) use inline_assistant::*;
|
||||
use language_model::{
|
||||
@@ -45,7 +43,6 @@ use slash_command::{
|
||||
file_command, now_command, project_command, prompt_command, search_command, symbols_command,
|
||||
tab_command, terminal_command, workflow_command,
|
||||
};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
pub(crate) use streaming_diff::*;
|
||||
use util::ResultExt;
|
||||
@@ -66,19 +63,10 @@ actions!(
|
||||
DeployHistory,
|
||||
DeployPromptLibrary,
|
||||
ConfirmCommand,
|
||||
NewContext,
|
||||
ToggleModelSelector,
|
||||
]
|
||||
);
|
||||
|
||||
#[derive(PartialEq, Clone, Deserialize)]
|
||||
pub enum InsertDraggedFiles {
|
||||
ProjectPaths(Vec<PathBuf>),
|
||||
ExternalFiles(Vec<PathBuf>),
|
||||
}
|
||||
|
||||
impl_actions!(assistant, [InsertDraggedFiles]);
|
||||
|
||||
const DEFAULT_CONTEXT_LINES: usize = 50;
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
|
||||
@@ -221,11 +209,10 @@ pub fn init(
|
||||
})
|
||||
.detach();
|
||||
|
||||
context_store::init(&client.clone().into());
|
||||
context_store::init(&client);
|
||||
prompt_library::init(cx);
|
||||
init_language_model_settings(cx);
|
||||
assistant_slash_command::init(cx);
|
||||
assistant_tool::init(cx);
|
||||
assistant_panel::init(cx);
|
||||
context_servers::init(cx);
|
||||
|
||||
@@ -240,7 +227,6 @@ pub fn init(
|
||||
.map(Arc::new)
|
||||
.unwrap_or_else(|| Arc::new(prompts::PromptBuilder::new(None).unwrap()));
|
||||
register_slash_commands(Some(prompt_builder.clone()), cx);
|
||||
register_tools(cx);
|
||||
inline_assistant::init(
|
||||
fs.clone(),
|
||||
prompt_builder.clone(),
|
||||
@@ -376,7 +362,7 @@ fn register_slash_commands(prompt_builder: Option<Arc<PromptBuilder>>, cx: &mut
|
||||
|
||||
if let Some(prompt_builder) = prompt_builder {
|
||||
slash_command_registry.register_command(
|
||||
workflow_command::WorkflowSlashCommand::new(prompt_builder.clone()),
|
||||
workflow_command::WorkflowSlashCommand::new(prompt_builder),
|
||||
true,
|
||||
);
|
||||
}
|
||||
@@ -414,11 +400,6 @@ fn update_slash_commands_from_settings(cx: &mut AppContext) {
|
||||
}
|
||||
}
|
||||
|
||||
fn register_tools(cx: &mut AppContext) {
|
||||
let tool_registry = ToolRegistry::global(cx);
|
||||
tool_registry.register_tool(tools::now_tool::NowTool);
|
||||
}
|
||||
|
||||
pub fn humanize_token_count(count: usize) -> String {
|
||||
match count {
|
||||
0..=999 => count.to_string(),
|
||||
|
||||
@@ -1,17 +1,11 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use ::open_ai::Model as OpenAiModel;
|
||||
use anthropic::Model as AnthropicModel;
|
||||
use fs::Fs;
|
||||
use gpui::{AppContext, Pixels};
|
||||
use language_model::provider::open_ai;
|
||||
use language_model::settings::{
|
||||
AnthropicSettingsContent, AnthropicSettingsContentV1, OllamaSettingsContent,
|
||||
OpenAiSettingsContent, OpenAiSettingsContentV1, VersionedAnthropicSettingsContent,
|
||||
VersionedOpenAiSettingsContent,
|
||||
};
|
||||
use language_model::{settings::AllLanguageModelSettings, CloudModel, LanguageModel};
|
||||
use ollama::Model as OllamaModel;
|
||||
use open_ai::Model as OpenAiModel;
|
||||
use schemars::{schema::Schema, JsonSchema};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{update_settings_file, Settings, SettingsSources};
|
||||
@@ -115,15 +109,16 @@ impl AssistantSettingsContent {
|
||||
cx,
|
||||
move |content, _| {
|
||||
if content.anthropic.is_none() {
|
||||
content.anthropic = Some(AnthropicSettingsContent::Versioned(
|
||||
VersionedAnthropicSettingsContent::V1(
|
||||
AnthropicSettingsContentV1 {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models: None,
|
||||
},
|
||||
),
|
||||
));
|
||||
content.anthropic =
|
||||
Some(language_model::settings::AnthropicSettingsContent::Versioned(
|
||||
language_model::settings::VersionedAnthropicSettingsContent::V1(
|
||||
language_model::settings::AnthropicSettingsContentV1 {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models: None
|
||||
}
|
||||
)
|
||||
));
|
||||
}
|
||||
},
|
||||
),
|
||||
@@ -136,11 +131,11 @@ impl AssistantSettingsContent {
|
||||
cx,
|
||||
move |content, _| {
|
||||
if content.ollama.is_none() {
|
||||
content.ollama = Some(OllamaSettingsContent {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models: None,
|
||||
});
|
||||
content.ollama =
|
||||
Some(language_model::settings::OllamaSettingsContent {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
});
|
||||
}
|
||||
},
|
||||
),
|
||||
@@ -158,30 +153,23 @@ impl AssistantSettingsContent {
|
||||
models
|
||||
.into_iter()
|
||||
.filter_map(|model| match model {
|
||||
OpenAiModel::Custom {
|
||||
name,
|
||||
display_name,
|
||||
max_tokens,
|
||||
max_output_tokens,
|
||||
} => Some(open_ai::AvailableModel {
|
||||
name,
|
||||
display_name,
|
||||
max_tokens,
|
||||
max_output_tokens,
|
||||
}),
|
||||
open_ai::Model::Custom { name, max_tokens,max_output_tokens } => {
|
||||
Some(language_model::provider::open_ai::AvailableModel { name, max_tokens,max_output_tokens })
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
content.openai = Some(OpenAiSettingsContent::Versioned(
|
||||
VersionedOpenAiSettingsContent::V1(
|
||||
OpenAiSettingsContentV1 {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
},
|
||||
),
|
||||
));
|
||||
content.openai =
|
||||
Some(language_model::settings::OpenAiSettingsContent::Versioned(
|
||||
language_model::settings::VersionedOpenAiSettingsContent::V1(
|
||||
language_model::settings::OpenAiSettingsContentV1 {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models
|
||||
}
|
||||
)
|
||||
));
|
||||
}
|
||||
},
|
||||
),
|
||||
@@ -307,7 +295,7 @@ impl AssistantSettingsContent {
|
||||
_ => (None, None),
|
||||
};
|
||||
settings.provider = Some(AssistantProviderContentV1::Ollama {
|
||||
default_model: Some(ollama::Model::new(&model, None, None)),
|
||||
default_model: Some(ollama::Model::new(&model)),
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
});
|
||||
@@ -328,7 +316,7 @@ impl AssistantSettingsContent {
|
||||
_ => (None, None, None),
|
||||
};
|
||||
settings.provider = Some(AssistantProviderContentV1::OpenAi {
|
||||
default_model: OpenAiModel::from_id(&model).ok(),
|
||||
default_model: open_ai::Model::from_id(&model).ok(),
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
@@ -341,7 +329,7 @@ impl AssistantSettingsContent {
|
||||
}
|
||||
},
|
||||
AssistantSettingsContent::Legacy(settings) => {
|
||||
if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
|
||||
if let Ok(model) = open_ai::Model::from_id(&language_model.id().0) {
|
||||
settings.default_open_ai_model = Some(model);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use super::{MessageCacheMetadata, WorkflowStepEdit};
|
||||
use crate::{
|
||||
assistant_panel, prompt_library, slash_command::file_command, CacheStatus, Context,
|
||||
ContextEvent, ContextId, ContextOperation, MessageId, MessageStatus, PromptBuilder,
|
||||
WorkflowStepEditKind,
|
||||
assistant_panel, prompt_library, slash_command::file_command, workflow::tool, CacheStatus,
|
||||
Context, ContextEvent, ContextId, ContextOperation, MessageId, MessageStatus, PromptBuilder,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use assistant_slash_command::{
|
||||
@@ -10,13 +8,15 @@ use assistant_slash_command::{
|
||||
SlashCommandRegistry,
|
||||
};
|
||||
use collections::HashSet;
|
||||
use fs::FakeFs;
|
||||
use fs::{FakeFs, Fs as _};
|
||||
use gpui::{AppContext, Model, SharedString, Task, TestAppContext, WeakView};
|
||||
use indoc::indoc;
|
||||
use language::{Buffer, LanguageRegistry, LspAdapterDelegate};
|
||||
use language_model::{LanguageModelCacheConfiguration, LanguageModelRegistry, Role};
|
||||
use parking_lot::Mutex;
|
||||
use project::Project;
|
||||
use rand::prelude::*;
|
||||
use rope::Point;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::{
|
||||
@@ -27,15 +27,14 @@ use std::{
|
||||
rc::Rc,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
use text::{network::Network, OffsetRangeExt as _, ReplicaId};
|
||||
use text::{network::Network, OffsetRangeExt as _, ReplicaId, ToPoint as _};
|
||||
use ui::{Context as _, WindowContext};
|
||||
use unindent::Unindent;
|
||||
use util::{
|
||||
test::{generate_marked_text, marked_text_ranges},
|
||||
RandomCharIter,
|
||||
};
|
||||
use util::{test::marked_text_ranges, RandomCharIter};
|
||||
use workspace::Workspace;
|
||||
|
||||
use super::MessageCacheMetadata;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_inserting_and_removing_messages(cx: &mut AppContext) {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
@@ -480,12 +479,28 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
cx.update(prompt_library::init);
|
||||
let settings_store = cx.update(SettingsStore::test);
|
||||
cx.set_global(settings_store);
|
||||
cx.update(language::init);
|
||||
cx.update(Project::init_settings);
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.as_fake()
|
||||
.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"hello.rs": r#"
|
||||
fn hello() {
|
||||
println!("Hello, World!");
|
||||
}
|
||||
"#.unindent()
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs, [Path::new("/root")], cx).await;
|
||||
cx.update(LanguageModelRegistry::test);
|
||||
|
||||
let model = cx.read(|cx| {
|
||||
LanguageModelRegistry::read_global(cx)
|
||||
.active_model()
|
||||
.unwrap()
|
||||
});
|
||||
cx.update(assistant_panel::init);
|
||||
let registry = Arc::new(LanguageRegistry::test(cx.executor()));
|
||||
|
||||
@@ -500,382 +515,151 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let buffer = context.read_with(cx, |context, _| context.buffer.clone());
|
||||
|
||||
// Insert an assistant message to simulate a response.
|
||||
let assistant_message_id = context.update(cx, |context, cx| {
|
||||
let user_message_id = context.messages(cx).next().unwrap().id;
|
||||
context
|
||||
.insert_message_after(user_message_id, Role::Assistant, MessageStatus::Done, cx)
|
||||
.unwrap()
|
||||
.id
|
||||
// Simulate user input
|
||||
let user_message = indoc! {r#"
|
||||
Please add unnecessary complexity to this code:
|
||||
|
||||
```hello.rs
|
||||
fn main() {
|
||||
println!("Hello, World!");
|
||||
}
|
||||
```
|
||||
"#};
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit([(0..0, user_message)], None, cx);
|
||||
});
|
||||
|
||||
// No edit tags
|
||||
edit(
|
||||
&context,
|
||||
"
|
||||
|
||||
«one
|
||||
two
|
||||
»",
|
||||
cx,
|
||||
);
|
||||
expect_steps(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
",
|
||||
&[],
|
||||
cx,
|
||||
);
|
||||
|
||||
// Partial edit step tag is added
|
||||
edit(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
«
|
||||
<step»",
|
||||
cx,
|
||||
);
|
||||
expect_steps(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
<step",
|
||||
&[],
|
||||
cx,
|
||||
);
|
||||
|
||||
// The rest of the step tag is added. The unclosed
|
||||
// step is treated as incomplete.
|
||||
edit(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
<step«>
|
||||
Add a second function
|
||||
|
||||
```rust
|
||||
fn two() {}
|
||||
```
|
||||
|
||||
<edit>»",
|
||||
cx,
|
||||
);
|
||||
expect_steps(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
«<step>
|
||||
Add a second function
|
||||
|
||||
```rust
|
||||
fn two() {}
|
||||
```
|
||||
|
||||
<edit>»",
|
||||
&[&[]],
|
||||
cx,
|
||||
);
|
||||
|
||||
// The full suggestion is added
|
||||
edit(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
// Simulate LLM response with edit steps
|
||||
let llm_response = indoc! {r#"
|
||||
Sure, I can help you with that. Here's a step-by-step process:
|
||||
|
||||
<step>
|
||||
Add a second function
|
||||
First, let's extract the greeting into a separate function:
|
||||
|
||||
```rust
|
||||
fn two() {}
|
||||
```
|
||||
fn greet() {
|
||||
println!("Hello, World!");
|
||||
}
|
||||
|
||||
<edit>«
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<search>fn one</search>
|
||||
<description>add a `two` function</description>
|
||||
</edit>
|
||||
fn main() {
|
||||
greet();
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
also,»",
|
||||
cx,
|
||||
);
|
||||
expect_steps(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
«<step>
|
||||
Add a second function
|
||||
|
||||
```rust
|
||||
fn two() {}
|
||||
```
|
||||
|
||||
<edit>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<search>fn one</search>
|
||||
<description>add a `two` function</description>
|
||||
</edit>
|
||||
</step>»
|
||||
|
||||
also,",
|
||||
&[&[WorkflowStepEdit {
|
||||
path: "src/lib.rs".into(),
|
||||
kind: WorkflowStepEditKind::InsertAfter {
|
||||
search: "fn one".into(),
|
||||
description: "add a `two` function".into(),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
);
|
||||
|
||||
// The step is manually edited.
|
||||
edit(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
<step>
|
||||
Add a second function
|
||||
Now, let's make the greeting customizable:
|
||||
|
||||
```rust
|
||||
fn two() {}
|
||||
```
|
||||
fn greet(name: &str) {
|
||||
println!("Hello, {}!", name);
|
||||
}
|
||||
|
||||
<edit>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<search>«fn zero»</search>
|
||||
<description>add a `two` function</description>
|
||||
</edit>
|
||||
fn main() {
|
||||
greet("World");
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
also,",
|
||||
cx,
|
||||
);
|
||||
expect_steps(
|
||||
&context,
|
||||
"
|
||||
These changes make the code more modular and flexible.
|
||||
"#};
|
||||
|
||||
one
|
||||
two
|
||||
// Simulate the assist method to trigger the LLM response
|
||||
context.update(cx, |context, cx| context.assist(cx));
|
||||
cx.run_until_parked();
|
||||
|
||||
«<step>
|
||||
Add a second function
|
||||
|
||||
```rust
|
||||
fn two() {}
|
||||
```
|
||||
|
||||
<edit>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<search>fn zero</search>
|
||||
<description>add a `two` function</description>
|
||||
</edit>
|
||||
</step>»
|
||||
|
||||
also,",
|
||||
&[&[WorkflowStepEdit {
|
||||
path: "src/lib.rs".into(),
|
||||
kind: WorkflowStepEditKind::InsertAfter {
|
||||
search: "fn zero".into(),
|
||||
description: "add a `two` function".into(),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
);
|
||||
|
||||
// When setting the message role to User, the steps are cleared.
|
||||
context.update(cx, |context, cx| {
|
||||
context.cycle_message_roles(HashSet::from_iter([assistant_message_id]), cx);
|
||||
context.cycle_message_roles(HashSet::from_iter([assistant_message_id]), cx);
|
||||
// Retrieve the assistant response message's start from the context
|
||||
let response_start_row = context.read_with(cx, |context, cx| {
|
||||
let buffer = context.buffer.read(cx);
|
||||
context.message_anchors[1].start.to_point(buffer).row
|
||||
});
|
||||
expect_steps(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
// Simulate the LLM completion
|
||||
model
|
||||
.as_fake()
|
||||
.stream_last_completion_response(llm_response.to_string());
|
||||
model.as_fake().end_last_completion_stream();
|
||||
|
||||
<step>
|
||||
Add a second function
|
||||
// Wait for the completion to be processed
|
||||
cx.run_until_parked();
|
||||
|
||||
```rust
|
||||
fn two() {}
|
||||
```
|
||||
|
||||
<edit>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<search>fn zero</search>
|
||||
<description>add a `two` function</description>
|
||||
</edit>
|
||||
</step>
|
||||
|
||||
also,",
|
||||
&[],
|
||||
cx,
|
||||
);
|
||||
|
||||
// When setting the message role back to Assistant, the steps are reparsed.
|
||||
context.update(cx, |context, cx| {
|
||||
context.cycle_message_roles(HashSet::from_iter([assistant_message_id]), cx);
|
||||
// Verify that the edit steps were parsed correctly
|
||||
context.read_with(cx, |context, cx| {
|
||||
assert_eq!(
|
||||
workflow_steps(context, cx),
|
||||
vec![
|
||||
(
|
||||
Point::new(response_start_row + 2, 0)..Point::new(response_start_row + 12, 3),
|
||||
WorkflowStepTestStatus::Pending
|
||||
),
|
||||
(
|
||||
Point::new(response_start_row + 14, 0)..Point::new(response_start_row + 24, 3),
|
||||
WorkflowStepTestStatus::Pending
|
||||
),
|
||||
]
|
||||
);
|
||||
});
|
||||
expect_steps(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
«<step>
|
||||
Add a second function
|
||||
|
||||
```rust
|
||||
fn two() {}
|
||||
```
|
||||
|
||||
<edit>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<search>fn zero</search>
|
||||
<description>add a `two` function</description>
|
||||
</edit>
|
||||
</step>»
|
||||
|
||||
also,",
|
||||
&[&[WorkflowStepEdit {
|
||||
path: "src/lib.rs".into(),
|
||||
kind: WorkflowStepEditKind::InsertAfter {
|
||||
search: "fn zero".into(),
|
||||
description: "add a `two` function".into(),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
);
|
||||
|
||||
// Ensure steps are re-parsed when deserializing.
|
||||
let serialized_context = context.read_with(cx, |context, cx| context.serialize(cx));
|
||||
let deserialized_context = cx.new_model(|cx| {
|
||||
Context::deserialize(
|
||||
serialized_context,
|
||||
Default::default(),
|
||||
registry.clone(),
|
||||
prompt_builder.clone(),
|
||||
None,
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
expect_steps(
|
||||
&deserialized_context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
«<step>
|
||||
Add a second function
|
||||
|
||||
```rust
|
||||
fn two() {}
|
||||
```
|
||||
|
||||
<edit>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<search>fn zero</search>
|
||||
<description>add a `two` function</description>
|
||||
</edit>
|
||||
</step>»
|
||||
|
||||
also,",
|
||||
&[&[WorkflowStepEdit {
|
||||
path: "src/lib.rs".into(),
|
||||
kind: WorkflowStepEditKind::InsertAfter {
|
||||
search: "fn zero".into(),
|
||||
description: "add a `two` function".into(),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
);
|
||||
|
||||
fn edit(context: &Model<Context>, new_text_marked_with_edits: &str, cx: &mut TestAppContext) {
|
||||
context.update(cx, |context, cx| {
|
||||
context.buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit_via_marked_text(&new_text_marked_with_edits.unindent(), None, cx);
|
||||
});
|
||||
model
|
||||
.as_fake()
|
||||
.respond_to_last_tool_use(tool::WorkflowStepResolutionTool {
|
||||
step_title: "Title".into(),
|
||||
suggestions: vec![tool::WorkflowSuggestionTool {
|
||||
path: "/root/hello.rs".into(),
|
||||
// Simulate a symbol name that's slightly different than our outline query
|
||||
kind: tool::WorkflowSuggestionToolKind::Update {
|
||||
symbol: "fn main()".into(),
|
||||
description: "Extract a greeting function".into(),
|
||||
},
|
||||
}],
|
||||
});
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
// Wait for tool use to be processed.
|
||||
cx.run_until_parked();
|
||||
|
||||
// Verify that the first edit step is not pending anymore.
|
||||
context.read_with(cx, |context, cx| {
|
||||
assert_eq!(
|
||||
workflow_steps(context, cx),
|
||||
vec![
|
||||
(
|
||||
Point::new(response_start_row + 2, 0)..Point::new(response_start_row + 12, 3),
|
||||
WorkflowStepTestStatus::Resolved
|
||||
),
|
||||
(
|
||||
Point::new(response_start_row + 14, 0)..Point::new(response_start_row + 24, 3),
|
||||
WorkflowStepTestStatus::Pending
|
||||
),
|
||||
]
|
||||
);
|
||||
});
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
enum WorkflowStepTestStatus {
|
||||
Pending,
|
||||
Resolved,
|
||||
Error,
|
||||
}
|
||||
|
||||
fn expect_steps(
|
||||
context: &Model<Context>,
|
||||
expected_marked_text: &str,
|
||||
expected_suggestions: &[&[WorkflowStepEdit]],
|
||||
cx: &mut TestAppContext,
|
||||
) {
|
||||
context.update(cx, |context, cx| {
|
||||
let expected_marked_text = expected_marked_text.unindent();
|
||||
let (expected_text, expected_ranges) = marked_text_ranges(&expected_marked_text, false);
|
||||
context.buffer.read_with(cx, |buffer, _| {
|
||||
assert_eq!(buffer.text(), expected_text);
|
||||
let ranges = context
|
||||
.workflow_steps
|
||||
.iter()
|
||||
.map(|entry| entry.range.to_offset(buffer))
|
||||
.collect::<Vec<_>>();
|
||||
let marked = generate_marked_text(&expected_text, &ranges, false);
|
||||
assert_eq!(
|
||||
marked,
|
||||
expected_marked_text,
|
||||
"unexpected suggestion ranges. actual: {ranges:?}, expected: {expected_ranges:?}"
|
||||
);
|
||||
let suggestions = context
|
||||
.workflow_steps
|
||||
.iter()
|
||||
.map(|step| {
|
||||
step.edits
|
||||
.iter()
|
||||
.map(|edit| {
|
||||
let edit = edit.as_ref().unwrap();
|
||||
WorkflowStepEdit {
|
||||
path: edit.path.clone(),
|
||||
kind: edit.kind.clone(),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(suggestions, expected_suggestions);
|
||||
});
|
||||
});
|
||||
fn workflow_steps(
|
||||
context: &Context,
|
||||
cx: &AppContext,
|
||||
) -> Vec<(Range<Point>, WorkflowStepTestStatus)> {
|
||||
context
|
||||
.workflow_steps
|
||||
.iter()
|
||||
.map(|step| {
|
||||
let buffer = context.buffer.read(cx);
|
||||
let status = match &step.step.read(cx).resolution {
|
||||
None => WorkflowStepTestStatus::Pending,
|
||||
Some(Ok(_)) => WorkflowStepTestStatus::Resolved,
|
||||
Some(Err(_)) => WorkflowStepTestStatus::Error,
|
||||
};
|
||||
(step.range.to_point(buffer), status)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ use crate::{
|
||||
prompts::PromptBuilder, Context, ContextEvent, ContextId, ContextOperation, ContextVersion,
|
||||
SavedContext, SavedContextMetadata,
|
||||
};
|
||||
use ::proto::AnyProtoClient;
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use client::{proto, telemetry::Telemetry, Client, TypedEnvelope};
|
||||
use clock::ReplicaId;
|
||||
@@ -26,7 +25,7 @@ use std::{
|
||||
};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
|
||||
pub fn init(client: &AnyProtoClient) {
|
||||
pub fn init(client: &Arc<Client>) {
|
||||
client.add_model_message_handler(ContextStore::handle_advertise_contexts);
|
||||
client.add_model_request_handler(ContextStore::handle_open_context);
|
||||
client.add_model_request_handler(ContextStore::handle_create_context);
|
||||
@@ -390,7 +389,7 @@ impl ContextStore {
|
||||
context_proto
|
||||
.operations
|
||||
.into_iter()
|
||||
.map(ContextOperation::from_proto)
|
||||
.map(|op| ContextOperation::from_proto(op))
|
||||
.collect::<Result<Vec<_>>>()
|
||||
})
|
||||
.await?;
|
||||
@@ -527,7 +526,7 @@ impl ContextStore {
|
||||
context_proto
|
||||
.operations
|
||||
.into_iter()
|
||||
.map(ContextOperation::from_proto)
|
||||
.map(|op| ContextOperation::from_proto(op))
|
||||
.collect::<Result<Vec<_>>>()
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -134,11 +134,8 @@ impl InlineAssistant {
|
||||
})
|
||||
.detach();
|
||||
|
||||
let workspace = workspace.downgrade();
|
||||
let workspace = workspace.clone();
|
||||
cx.observe_global::<SettingsStore>(move |cx| {
|
||||
let Some(workspace) = workspace.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let Some(terminal_panel) = workspace.read(cx).panel::<TerminalPanel>(cx) else {
|
||||
return;
|
||||
};
|
||||
@@ -1203,11 +1200,9 @@ impl InlineAssistStatus {
|
||||
pub(crate) fn is_pending(&self) -> bool {
|
||||
matches!(self, Self::Pending)
|
||||
}
|
||||
|
||||
pub(crate) fn is_confirmed(&self) -> bool {
|
||||
matches!(self, Self::Confirmed)
|
||||
}
|
||||
|
||||
pub(crate) fn is_done(&self) -> bool {
|
||||
matches!(self, Self::Done)
|
||||
}
|
||||
@@ -1797,16 +1792,13 @@ impl PromptEditor {
|
||||
CodegenStatus::Pending => {
|
||||
cx.emit(PromptEditorEvent::DismissRequested);
|
||||
}
|
||||
CodegenStatus::Done => {
|
||||
CodegenStatus::Done | CodegenStatus::Error(_) => {
|
||||
if self.edited_since_done {
|
||||
cx.emit(PromptEditorEvent::StartRequested);
|
||||
} else {
|
||||
cx.emit(PromptEditorEvent::ConfirmRequested);
|
||||
}
|
||||
}
|
||||
CodegenStatus::Error(_) => {
|
||||
cx.emit(PromptEditorEvent::StartRequested);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1921,7 +1913,7 @@ impl PromptEditor {
|
||||
font_family: settings.ui_font.family.clone(),
|
||||
font_features: settings.ui_font.features.clone(),
|
||||
font_fallbacks: settings.ui_font.fallbacks.clone(),
|
||||
font_size: settings.ui_font_size.into(),
|
||||
font_size: rems(0.875).into(),
|
||||
font_weight: settings.ui_font.weight,
|
||||
line_height: relative(1.3),
|
||||
..Default::default()
|
||||
@@ -2347,7 +2339,7 @@ impl Codegen {
|
||||
self.build_request(user_prompt, assistant_panel_context, edit_range.clone(), cx)?;
|
||||
|
||||
let chunks =
|
||||
cx.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await });
|
||||
cx.spawn(|_, cx| async move { model.stream_completion(request, &cx).await });
|
||||
async move { Ok(chunks.await?.boxed()) }.boxed_local()
|
||||
};
|
||||
self.handle_stream(telemetry_id, edit_range, chunks, cx);
|
||||
@@ -2373,7 +2365,20 @@ impl Codegen {
|
||||
None
|
||||
};
|
||||
|
||||
let language_name = language_name.as_ref();
|
||||
// Higher Temperature increases the randomness of model outputs.
|
||||
// If Markdown or No Language is Known, increase the randomness for more creative output
|
||||
// If Code, decrease temperature to get more deterministic outputs
|
||||
let temperature = if let Some(language) = language_name.clone() {
|
||||
if language.as_ref() == "Markdown" {
|
||||
1.0
|
||||
} else {
|
||||
0.5
|
||||
}
|
||||
} else {
|
||||
1.0
|
||||
};
|
||||
|
||||
let language_name = language_name.as_deref();
|
||||
let start = buffer.point_to_buffer_offset(edit_range.start);
|
||||
let end = buffer.point_to_buffer_offset(edit_range.end);
|
||||
let (buffer, range) = if let Some((start, end)) = start.zip(end) {
|
||||
@@ -2406,9 +2411,8 @@ impl Codegen {
|
||||
|
||||
Ok(LanguageModelRequest {
|
||||
messages,
|
||||
tools: Vec::new(),
|
||||
stop: vec!["|END|>".to_string()],
|
||||
temperature: 1.,
|
||||
temperature,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -3060,7 +3064,7 @@ mod tests {
|
||||
codegen.handle_stream(
|
||||
String::new(),
|
||||
range,
|
||||
future::ready(Ok(chunks_rx.map(Ok).boxed())),
|
||||
future::ready(Ok(chunks_rx.map(|chunk| Ok(chunk)).boxed())),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -3132,7 +3136,7 @@ mod tests {
|
||||
codegen.handle_stream(
|
||||
String::new(),
|
||||
range.clone(),
|
||||
future::ready(Ok(chunks_rx.map(Ok).boxed())),
|
||||
future::ready(Ok(chunks_rx.map(|chunk| Ok(chunk)).boxed())),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -3207,7 +3211,7 @@ mod tests {
|
||||
codegen.handle_stream(
|
||||
String::new(),
|
||||
range.clone(),
|
||||
future::ready(Ok(chunks_rx.map(Ok).boxed())),
|
||||
future::ready(Ok(chunks_rx.map(|chunk| Ok(chunk)).boxed())),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -3281,7 +3285,7 @@ mod tests {
|
||||
codegen.handle_stream(
|
||||
String::new(),
|
||||
range.clone(),
|
||||
future::ready(Ok(chunks_rx.map(Ok).boxed())),
|
||||
future::ready(Ok(chunks_rx.map(|chunk| Ok(chunk)).boxed())),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
@@ -794,7 +794,6 @@ impl PromptLibrary {
|
||||
content: vec![body.to_string().into()],
|
||||
cache: false,
|
||||
}],
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
temperature: 1.,
|
||||
},
|
||||
|
||||
@@ -4,7 +4,7 @@ use fs::Fs;
|
||||
use futures::StreamExt;
|
||||
use gpui::AssetSource;
|
||||
use handlebars::{Handlebars, RenderError};
|
||||
use language::{BufferSnapshot, LanguageName};
|
||||
use language::BufferSnapshot;
|
||||
use parking_lot::Mutex;
|
||||
use serde::Serialize;
|
||||
use std::{ops::Range, path::PathBuf, sync::Arc, time::Duration};
|
||||
@@ -123,7 +123,7 @@ impl PromptBuilder {
|
||||
if params.fs.is_dir(parent_dir).await {
|
||||
let (mut changes, _watcher) = params.fs.watch(parent_dir, Duration::from_secs(1)).await;
|
||||
while let Some(changed_paths) = changes.next().await {
|
||||
if changed_paths.iter().any(|p| &p.path == &templates_dir) {
|
||||
if changed_paths.iter().any(|p| p == &templates_dir) {
|
||||
let mut log_message = format!("Prompt template overrides directory detected at {}", templates_dir.display());
|
||||
if let Ok(target) = params.fs.read_link(&templates_dir).await {
|
||||
log_message.push_str(" -> ");
|
||||
@@ -162,18 +162,18 @@ impl PromptBuilder {
|
||||
let mut combined_changes = futures::stream::select(changes, parent_changes);
|
||||
|
||||
while let Some(changed_paths) = combined_changes.next().await {
|
||||
if changed_paths.iter().any(|p| &p.path == &templates_dir) {
|
||||
if changed_paths.iter().any(|p| p == &templates_dir) {
|
||||
if !params.fs.is_dir(&templates_dir).await {
|
||||
log::info!("Prompt template overrides directory removed. Restoring built-in prompt templates.");
|
||||
Self::register_built_in_templates(&mut handlebars.lock()).log_err();
|
||||
break;
|
||||
}
|
||||
}
|
||||
for event in changed_paths {
|
||||
if event.path.starts_with(&templates_dir) && event.path.extension().map_or(false, |ext| ext == "hbs") {
|
||||
log::info!("Reloading prompt template override: {}", event.path.display());
|
||||
if let Some(content) = params.fs.load(&event.path).await.log_err() {
|
||||
let file_name = event.path.file_stem().unwrap().to_string_lossy();
|
||||
for changed_path in changed_paths {
|
||||
if changed_path.starts_with(&templates_dir) && changed_path.extension().map_or(false, |ext| ext == "hbs") {
|
||||
log::info!("Reloading prompt template override: {}", changed_path.display());
|
||||
if let Some(content) = params.fs.load(&changed_path).await.log_err() {
|
||||
let file_name = changed_path.file_stem().unwrap().to_string_lossy();
|
||||
handlebars.lock().register_template_string(&file_name, content).log_err();
|
||||
}
|
||||
}
|
||||
@@ -204,11 +204,11 @@ impl PromptBuilder {
|
||||
pub fn generate_content_prompt(
|
||||
&self,
|
||||
user_prompt: String,
|
||||
language_name: Option<&LanguageName>,
|
||||
language_name: Option<&str>,
|
||||
buffer: BufferSnapshot,
|
||||
range: Range<usize>,
|
||||
) -> Result<String, RenderError> {
|
||||
let content_type = match language_name.as_ref().map(|l| l.0.as_ref()) {
|
||||
let content_type = match language_name {
|
||||
None | Some("Markdown" | "Plain Text") => "text",
|
||||
Some(_) => "code",
|
||||
};
|
||||
@@ -297,4 +297,11 @@ impl PromptBuilder {
|
||||
pub fn generate_workflow_prompt(&self) -> Result<String, RenderError> {
|
||||
self.handlebars.lock().render("edit_workflow", &())
|
||||
}
|
||||
|
||||
pub fn generate_step_resolution_prompt(
|
||||
&self,
|
||||
context: &StepResolutionContext,
|
||||
) -> Result<String, RenderError> {
|
||||
self.handlebars.lock().render("step_resolution", context)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{
|
||||
AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandOutput,
|
||||
SlashCommandOutputSection,
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
};
|
||||
use collections::HashMap;
|
||||
use context_servers::{
|
||||
@@ -9,10 +8,9 @@ use context_servers::{
|
||||
protocol::PromptInfo,
|
||||
};
|
||||
use gpui::{Task, WeakView, WindowContext};
|
||||
use language::{CodeLabel, LspAdapterDelegate};
|
||||
use language::LspAdapterDelegate;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
use text::LineEnding;
|
||||
use ui::{IconName, SharedString};
|
||||
use workspace::Workspace;
|
||||
|
||||
@@ -44,64 +42,20 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
self.prompt.arguments.as_ref().map_or(false, |args| {
|
||||
args.iter().any(|arg| arg.required == Some(true))
|
||||
})
|
||||
self.prompt
|
||||
.arguments
|
||||
.as_ref()
|
||||
.map_or(false, |args| !args.is_empty())
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut WindowContext,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
let server_id = self.server_id.clone();
|
||||
let prompt_name = self.prompt.name.clone();
|
||||
let manager = ContextServerManager::global(cx);
|
||||
let manager = manager.read(cx);
|
||||
|
||||
let (arg_name, arg_val) = match completion_argument(&self.prompt, arguments) {
|
||||
Ok(tp) => tp,
|
||||
Err(e) => {
|
||||
return Task::ready(Err(e));
|
||||
}
|
||||
};
|
||||
if let Some(server) = manager.get_server(&server_id) {
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let Some(protocol) = server.client.read().clone() else {
|
||||
return Err(anyhow!("Context server not initialized"));
|
||||
};
|
||||
|
||||
let completion_result = protocol
|
||||
.completion(
|
||||
context_servers::types::CompletionReference::Prompt(
|
||||
context_servers::types::PromptReference {
|
||||
r#type: context_servers::types::PromptReferenceType::Prompt,
|
||||
name: prompt_name,
|
||||
},
|
||||
),
|
||||
arg_name,
|
||||
arg_val,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let completions = completion_result
|
||||
.values
|
||||
.into_iter()
|
||||
.map(|value| ArgumentCompletion {
|
||||
label: CodeLabel::plain(value.clone(), None),
|
||||
new_text: value,
|
||||
after_completion: AfterCompletion::Continue,
|
||||
replace_previous_arguments: false,
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(completions)
|
||||
})
|
||||
} else {
|
||||
Task::ready(Err(anyhow!("Context server not found")))
|
||||
}
|
||||
Task::ready(Ok(Vec::new()))
|
||||
}
|
||||
|
||||
fn run(
|
||||
@@ -127,14 +81,10 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
return Err(anyhow!("Context server not initialized"));
|
||||
};
|
||||
let result = protocol.run_prompt(&prompt_name, prompt_args).await?;
|
||||
let mut prompt = result.prompt;
|
||||
|
||||
// We must normalize the line endings here, since servers might return CR characters.
|
||||
LineEnding::normalize(&mut prompt);
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range: 0..(prompt.len()),
|
||||
range: 0..(result.prompt.len()),
|
||||
icon: IconName::ZedAssistant,
|
||||
label: SharedString::from(
|
||||
result
|
||||
@@ -142,7 +92,7 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
.unwrap_or(format!("Result from {}", prompt_name)),
|
||||
),
|
||||
}],
|
||||
text: prompt,
|
||||
text: result.prompt,
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
})
|
||||
@@ -152,22 +102,6 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
}
|
||||
}
|
||||
|
||||
fn completion_argument(prompt: &PromptInfo, arguments: &[String]) -> Result<(String, String)> {
|
||||
if arguments.is_empty() {
|
||||
return Err(anyhow!("No arguments given"));
|
||||
}
|
||||
|
||||
match &prompt.arguments {
|
||||
Some(args) if args.len() == 1 => {
|
||||
let arg_name = args[0].name.clone();
|
||||
let arg_value = arguments.join(" ");
|
||||
Ok((arg_name, arg_value))
|
||||
}
|
||||
Some(_) => Err(anyhow!("Prompt must have exactly one argument")),
|
||||
None => Err(anyhow!("Prompt has no arguments")),
|
||||
}
|
||||
}
|
||||
|
||||
fn prompt_arguments(prompt: &PromptInfo, arguments: &[String]) -> Result<HashMap<String, String>> {
|
||||
match &prompt.arguments {
|
||||
Some(args) if args.len() > 1 => Err(anyhow!(
|
||||
@@ -178,8 +112,6 @@ fn prompt_arguments(prompt: &PromptInfo, arguments: &[String]) -> Result<HashMap
|
||||
let mut map = HashMap::default();
|
||||
map.insert(args[0].name.clone(), arguments.join(" "));
|
||||
Ok(map)
|
||||
} else if arguments.is_empty() && args[0].required == Some(false) {
|
||||
Ok(HashMap::default())
|
||||
} else {
|
||||
Err(anyhow!("Prompt expects argument but none given"))
|
||||
}
|
||||
@@ -200,7 +132,7 @@ fn prompt_arguments(prompt: &PromptInfo, arguments: &[String]) -> Result<HashMap
|
||||
pub fn acceptable_prompt(prompt: &PromptInfo) -> bool {
|
||||
match &prompt.arguments {
|
||||
None => true,
|
||||
Some(args) if args.len() <= 1 => true,
|
||||
Some(args) if args.len() == 1 => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -164,7 +164,11 @@ impl SlashCommand for FileSlashCommand {
|
||||
Some(ArgumentCompletion {
|
||||
label,
|
||||
new_text: text,
|
||||
after_completion: AfterCompletion::Compose,
|
||||
after_completion: if path_match.is_dir {
|
||||
AfterCompletion::Compose
|
||||
} else {
|
||||
AfterCompletion::Run
|
||||
},
|
||||
replace_previous_arguments: false,
|
||||
})
|
||||
})
|
||||
|
||||
@@ -9,13 +9,13 @@ use collections::{HashMap, HashSet};
|
||||
use editor::Editor;
|
||||
use futures::future::join_all;
|
||||
use gpui::{Entity, Task, WeakView};
|
||||
use language::{BufferSnapshot, CodeLabel, HighlightId, LspAdapterDelegate};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
path::PathBuf,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
use ui::{ActiveTheme, WindowContext};
|
||||
use ui::WindowContext;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct TabSlashCommand;
|
||||
@@ -79,8 +79,6 @@ impl SlashCommand for TabSlashCommand {
|
||||
let current_query = arguments.last().cloned().unwrap_or_default();
|
||||
let tab_items_search =
|
||||
tab_items_for_queries(workspace, &[current_query], cancel, false, cx);
|
||||
|
||||
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
|
||||
cx.spawn(|_| async move {
|
||||
let tab_items = tab_items_search.await?;
|
||||
let run_command = tab_items.len() == 1;
|
||||
@@ -92,9 +90,8 @@ impl SlashCommand for TabSlashCommand {
|
||||
if active_item_path.is_some() && active_item_path == path {
|
||||
return None;
|
||||
}
|
||||
let label = create_tab_completion_label(path.as_ref()?, comment_id);
|
||||
Some(ArgumentCompletion {
|
||||
label,
|
||||
label: path_string.clone().into(),
|
||||
new_text: path_string,
|
||||
replace_previous_arguments: false,
|
||||
after_completion: run_command.into(),
|
||||
@@ -103,17 +100,14 @@ impl SlashCommand for TabSlashCommand {
|
||||
|
||||
let active_item_completion = active_item_path
|
||||
.as_deref()
|
||||
.map(|active_item_path| {
|
||||
let path_string = active_item_path.to_string_lossy().to_string();
|
||||
let label = create_tab_completion_label(active_item_path, comment_id);
|
||||
ArgumentCompletion {
|
||||
label,
|
||||
new_text: path_string,
|
||||
replace_previous_arguments: false,
|
||||
after_completion: run_command.into(),
|
||||
}
|
||||
})
|
||||
.filter(|completion| !argument_set.contains(&completion.new_text));
|
||||
.map(|active_item_path| active_item_path.to_string_lossy().to_string())
|
||||
.filter(|path_string| !argument_set.contains(path_string))
|
||||
.map(|path_string| ArgumentCompletion {
|
||||
label: path_string.clone().into(),
|
||||
new_text: path_string,
|
||||
replace_previous_arguments: false,
|
||||
after_completion: run_command.into(),
|
||||
});
|
||||
|
||||
Ok(active_item_completion
|
||||
.into_iter()
|
||||
@@ -253,7 +247,7 @@ fn tab_items_for_queries(
|
||||
.fold(HashMap::default(), |mut candidates, (id, path_string)| {
|
||||
candidates
|
||||
.entry(path_string)
|
||||
.or_insert_with(Vec::new)
|
||||
.or_insert_with(|| Vec::new())
|
||||
.push(id);
|
||||
candidates
|
||||
});
|
||||
@@ -325,23 +319,3 @@ fn active_item_buffer(
|
||||
.snapshot();
|
||||
Ok(snapshot)
|
||||
}
|
||||
|
||||
fn create_tab_completion_label(
|
||||
path: &std::path::Path,
|
||||
comment_id: Option<HighlightId>,
|
||||
) -> CodeLabel {
|
||||
let file_name = path
|
||||
.file_name()
|
||||
.map(|f| f.to_string_lossy())
|
||||
.unwrap_or_default();
|
||||
let parent_path = path
|
||||
.parent()
|
||||
.map(|p| p.to_string_lossy())
|
||||
.unwrap_or_default();
|
||||
let mut label = CodeLabel::default();
|
||||
label.push_str(&file_name, None);
|
||||
label.push_str(" ", None);
|
||||
label.push_str(&parent_path, comment_id);
|
||||
label.filter_range = 0..file_name.len();
|
||||
label
|
||||
}
|
||||
|
||||
@@ -157,11 +157,11 @@ impl TerminalInlineAssistant {
|
||||
PromptEditorEvent::StopRequested => {
|
||||
self.stop_assist(assist_id, cx);
|
||||
}
|
||||
PromptEditorEvent::ConfirmRequested { execute } => {
|
||||
self.finish_assist(assist_id, false, *execute, cx);
|
||||
PromptEditorEvent::ConfirmRequested => {
|
||||
self.finish_assist(assist_id, false, cx);
|
||||
}
|
||||
PromptEditorEvent::CancelRequested => {
|
||||
self.finish_assist(assist_id, true, false, cx);
|
||||
self.finish_assist(assist_id, true, cx);
|
||||
}
|
||||
PromptEditorEvent::DismissRequested => {
|
||||
self.dismiss_assist(assist_id, cx);
|
||||
@@ -282,7 +282,6 @@ impl TerminalInlineAssistant {
|
||||
|
||||
Ok(LanguageModelRequest {
|
||||
messages,
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
temperature: 1.0,
|
||||
})
|
||||
@@ -292,7 +291,6 @@ impl TerminalInlineAssistant {
|
||||
&mut self,
|
||||
assist_id: TerminalInlineAssistId,
|
||||
undo: bool,
|
||||
execute: bool,
|
||||
cx: &mut WindowContext,
|
||||
) {
|
||||
self.dismiss_assist(assist_id, cx);
|
||||
@@ -308,7 +306,7 @@ impl TerminalInlineAssistant {
|
||||
assist.codegen.update(cx, |codegen, cx| {
|
||||
if undo {
|
||||
codegen.undo(cx);
|
||||
} else if execute {
|
||||
} else {
|
||||
codegen.complete(cx);
|
||||
}
|
||||
});
|
||||
@@ -424,7 +422,7 @@ impl TerminalInlineAssist {
|
||||
}
|
||||
|
||||
if assist.prompt_editor.is_none() {
|
||||
this.finish_assist(assist_id, false, false, cx);
|
||||
this.finish_assist(assist_id, false, cx);
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -437,7 +435,7 @@ impl TerminalInlineAssist {
|
||||
enum PromptEditorEvent {
|
||||
StartRequested,
|
||||
StopRequested,
|
||||
ConfirmRequested { execute: bool },
|
||||
ConfirmRequested,
|
||||
CancelRequested,
|
||||
DismissRequested,
|
||||
Resized { height_in_lines: u8 },
|
||||
@@ -465,8 +463,7 @@ impl EventEmitter<PromptEditorEvent> for PromptEditor {}
|
||||
|
||||
impl Render for PromptEditor {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let status = &self.codegen.read(cx).status;
|
||||
let buttons = match status {
|
||||
let buttons = match &self.codegen.read(cx).status {
|
||||
CodegenStatus::Idle => {
|
||||
vec![
|
||||
IconButton::new("cancel", IconName::Close)
|
||||
@@ -511,16 +508,15 @@ impl Render for PromptEditor {
|
||||
]
|
||||
}
|
||||
CodegenStatus::Error(_) | CodegenStatus::Done => {
|
||||
let cancel = IconButton::new("cancel", IconName::Close)
|
||||
.icon_color(Color::Muted)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx))
|
||||
.on_click(cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)));
|
||||
|
||||
let has_error = matches!(status, CodegenStatus::Error(_));
|
||||
if has_error || self.edited_since_done {
|
||||
vec![
|
||||
cancel,
|
||||
vec![
|
||||
IconButton::new("cancel", IconName::Close)
|
||||
.icon_color(Color::Muted)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx))
|
||||
.on_click(
|
||||
cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)),
|
||||
),
|
||||
if self.edited_since_done {
|
||||
IconButton::new("restart", IconName::RotateCw)
|
||||
.icon_color(Color::Info)
|
||||
.shape(IconButtonShape::Square)
|
||||
@@ -534,35 +530,19 @@ impl Render for PromptEditor {
|
||||
})
|
||||
.on_click(cx.listener(|_, _, cx| {
|
||||
cx.emit(PromptEditorEvent::StartRequested);
|
||||
})),
|
||||
]
|
||||
} else {
|
||||
vec![
|
||||
cancel,
|
||||
IconButton::new("accept", IconName::Check)
|
||||
.icon_color(Color::Info)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| {
|
||||
Tooltip::for_action("Accept Generated Command", &menu::Confirm, cx)
|
||||
})
|
||||
.on_click(cx.listener(|_, _, cx| {
|
||||
cx.emit(PromptEditorEvent::ConfirmRequested { execute: false });
|
||||
})),
|
||||
}))
|
||||
} else {
|
||||
IconButton::new("confirm", IconName::Play)
|
||||
.icon_color(Color::Info)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| {
|
||||
Tooltip::for_action(
|
||||
"Execute Generated Command",
|
||||
&menu::SecondaryConfirm,
|
||||
cx,
|
||||
)
|
||||
Tooltip::for_action("Execute generated command", &menu::Confirm, cx)
|
||||
})
|
||||
.on_click(cx.listener(|_, _, cx| {
|
||||
cx.emit(PromptEditorEvent::ConfirmRequested { execute: true });
|
||||
})),
|
||||
]
|
||||
}
|
||||
cx.emit(PromptEditorEvent::ConfirmRequested);
|
||||
}))
|
||||
},
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
@@ -574,7 +554,6 @@ impl Render for PromptEditor {
|
||||
.h_full()
|
||||
.w_full()
|
||||
.on_action(cx.listener(Self::confirm))
|
||||
.on_action(cx.listener(Self::secondary_confirm))
|
||||
.on_action(cx.listener(Self::cancel))
|
||||
.on_action(cx.listener(Self::move_up))
|
||||
.on_action(cx.listener(Self::move_down))
|
||||
@@ -625,7 +604,7 @@ impl Render for PromptEditor {
|
||||
.child(div().flex_1().child(self.render_prompt_editor(cx)))
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.gap_2()
|
||||
.pr_4()
|
||||
.children(self.render_token_count(cx))
|
||||
.children(buttons),
|
||||
@@ -825,22 +804,13 @@ impl PromptEditor {
|
||||
CodegenStatus::Pending => {
|
||||
cx.emit(PromptEditorEvent::DismissRequested);
|
||||
}
|
||||
CodegenStatus::Done => {
|
||||
CodegenStatus::Done | CodegenStatus::Error(_) => {
|
||||
if self.edited_since_done {
|
||||
cx.emit(PromptEditorEvent::StartRequested);
|
||||
} else {
|
||||
cx.emit(PromptEditorEvent::ConfirmRequested { execute: false });
|
||||
cx.emit(PromptEditorEvent::ConfirmRequested);
|
||||
}
|
||||
}
|
||||
CodegenStatus::Error(_) => {
|
||||
cx.emit(PromptEditorEvent::StartRequested);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn secondary_confirm(&mut self, _: &menu::SecondaryConfirm, cx: &mut ViewContext<Self>) {
|
||||
if matches!(self.codegen.read(cx).status, CodegenStatus::Done) {
|
||||
cx.emit(PromptEditorEvent::ConfirmRequested { execute: true });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -990,7 +960,7 @@ impl TerminalTransaction {
|
||||
|
||||
pub fn push(&mut self, hunk: String, cx: &mut AppContext) {
|
||||
// Ensure that the assistant cannot accidentally execute commands that are streamed into the terminal
|
||||
let input = Self::sanitize_input(hunk);
|
||||
let input = hunk.replace(CARRIAGE_RETURN, " ");
|
||||
self.terminal
|
||||
.update(cx, |terminal, _| terminal.input(input));
|
||||
}
|
||||
@@ -1005,10 +975,6 @@ impl TerminalTransaction {
|
||||
terminal.input(CARRIAGE_RETURN.to_string())
|
||||
});
|
||||
}
|
||||
|
||||
fn sanitize_input(input: String) -> String {
|
||||
input.replace(['\r', '\n'], "")
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Codegen {
|
||||
@@ -1040,7 +1006,7 @@ impl Codegen {
|
||||
self.transaction = Some(TerminalTransaction::start(self.terminal.clone()));
|
||||
self.generation = cx.spawn(|this, mut cx| async move {
|
||||
let model_telemetry_id = model.telemetry_id();
|
||||
let response = model.stream_completion_text(prompt, &cx).await;
|
||||
let response = model.stream_completion(prompt, &cx).await;
|
||||
let generate = async {
|
||||
let (mut hunks_tx, mut hunks_rx) = mpsc::channel(1);
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
pub mod now_tool;
|
||||
@@ -1,60 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_tool::Tool;
|
||||
use chrono::{Local, Utc};
|
||||
use gpui::{Task, WeakView, WindowContext};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum Timezone {
|
||||
/// Use UTC for the datetime.
|
||||
Utc,
|
||||
/// Use local time for the datetime.
|
||||
Local,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct FileToolInput {
|
||||
/// The timezone to use for the datetime.
|
||||
timezone: Timezone,
|
||||
}
|
||||
|
||||
pub struct NowTool;
|
||||
|
||||
impl Tool for NowTool {
|
||||
fn name(&self) -> String {
|
||||
"now".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Returns the current datetime in RFC 3339 format.".into()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
let schema = schemars::schema_for!(FileToolInput);
|
||||
serde_json::to_value(&schema).unwrap()
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_workspace: WeakView<workspace::Workspace>,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<String>> {
|
||||
let input: FileToolInput = match serde_json::from_value(input) {
|
||||
Ok(input) => input,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))),
|
||||
};
|
||||
|
||||
let now = match input.timezone {
|
||||
Timezone::Utc => Utc::now().to_rfc3339(),
|
||||
Timezone::Local => Local::now().to_rfc3339(),
|
||||
};
|
||||
let text = format!("The current datetime is {now}.");
|
||||
|
||||
Task::ready(Ok(text))
|
||||
}
|
||||
}
|
||||
315
crates/assistant/src/workflow/step_view.rs
Normal file
@@ -0,0 +1,315 @@
|
||||
use super::WorkflowStep;
|
||||
use crate::{Assist, Context};
|
||||
use editor::{
|
||||
display_map::{BlockDisposition, BlockProperties, BlockStyle},
|
||||
Editor, EditorEvent, ExcerptRange, MultiBuffer,
|
||||
};
|
||||
use gpui::{
|
||||
div, AnyElement, AppContext, Context as _, Empty, EventEmitter, FocusableView, IntoElement,
|
||||
Model, ParentElement as _, Render, SharedString, Styled as _, View, ViewContext,
|
||||
VisualContext as _, WeakModel, WindowContext,
|
||||
};
|
||||
use language::{language_settings::SoftWrap, Anchor, Buffer, LanguageRegistry};
|
||||
use std::{ops::DerefMut, sync::Arc};
|
||||
use text::OffsetRangeExt;
|
||||
use theme::ActiveTheme as _;
|
||||
use ui::{
|
||||
h_flex, v_flex, ButtonCommon as _, ButtonLike, ButtonStyle, Color, Icon, IconName,
|
||||
InteractiveElement as _, Label, LabelCommon as _,
|
||||
};
|
||||
use workspace::{
|
||||
item::{self, Item},
|
||||
pane,
|
||||
searchable::SearchableItemHandle,
|
||||
};
|
||||
|
||||
pub struct WorkflowStepView {
|
||||
step: WeakModel<WorkflowStep>,
|
||||
tool_output_buffer: Model<Buffer>,
|
||||
editor: View<Editor>,
|
||||
}
|
||||
|
||||
impl WorkflowStepView {
|
||||
pub fn new(
|
||||
context: Model<Context>,
|
||||
step: Model<WorkflowStep>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let tool_output_buffer =
|
||||
cx.new_model(|cx| Buffer::local(step.read(cx).tool_output.clone(), cx));
|
||||
let buffer = cx.new_model(|cx| {
|
||||
let mut buffer = MultiBuffer::without_headers(0, language::Capability::ReadWrite);
|
||||
buffer.push_excerpts(
|
||||
context.read(cx).buffer().clone(),
|
||||
[ExcerptRange {
|
||||
context: step.read(cx).context_buffer_range.clone(),
|
||||
primary: None,
|
||||
}],
|
||||
cx,
|
||||
);
|
||||
buffer.push_excerpts(
|
||||
tool_output_buffer.clone(),
|
||||
[ExcerptRange {
|
||||
context: Anchor::MIN..Anchor::MAX,
|
||||
primary: None,
|
||||
}],
|
||||
cx,
|
||||
);
|
||||
buffer
|
||||
});
|
||||
|
||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let output_excerpt = buffer_snapshot.excerpts().skip(1).next().unwrap().0;
|
||||
let input_start_anchor = multi_buffer::Anchor::min();
|
||||
let output_start_anchor = buffer_snapshot
|
||||
.anchor_in_excerpt(output_excerpt, Anchor::MIN)
|
||||
.unwrap();
|
||||
let output_end_anchor = multi_buffer::Anchor::max();
|
||||
|
||||
let handle = cx.view().downgrade();
|
||||
let editor = cx.new_view(|cx| {
|
||||
let mut editor = Editor::for_multibuffer(buffer.clone(), None, false, cx);
|
||||
editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
|
||||
editor.set_show_line_numbers(false, cx);
|
||||
editor.set_show_git_diff_gutter(false, cx);
|
||||
editor.set_show_code_actions(false, cx);
|
||||
editor.set_show_runnables(false, cx);
|
||||
editor.set_show_wrap_guides(false, cx);
|
||||
editor.set_show_indent_guides(false, cx);
|
||||
editor.set_read_only(true);
|
||||
editor.set_show_inline_completions(Some(false), cx);
|
||||
editor.insert_blocks(
|
||||
[
|
||||
BlockProperties {
|
||||
position: input_start_anchor,
|
||||
height: 1,
|
||||
style: BlockStyle::Fixed,
|
||||
render: Box::new(|cx| section_header("Step Input", cx)),
|
||||
disposition: BlockDisposition::Above,
|
||||
priority: 0,
|
||||
},
|
||||
BlockProperties {
|
||||
position: output_start_anchor,
|
||||
height: 1,
|
||||
style: BlockStyle::Fixed,
|
||||
render: Box::new(|cx| section_header("Tool Output", cx)),
|
||||
disposition: BlockDisposition::Above,
|
||||
priority: 0,
|
||||
},
|
||||
BlockProperties {
|
||||
position: output_end_anchor,
|
||||
height: 1,
|
||||
style: BlockStyle::Fixed,
|
||||
render: Box::new(move |cx| {
|
||||
if let Some(result) = handle.upgrade().and_then(|this| {
|
||||
this.update(cx.deref_mut(), |this, cx| this.render_result(cx))
|
||||
}) {
|
||||
v_flex()
|
||||
.child(section_header("Output", cx))
|
||||
.child(
|
||||
div().pl(cx.gutter_dimensions.full_width()).child(result),
|
||||
)
|
||||
.into_any_element()
|
||||
} else {
|
||||
Empty.into_any_element()
|
||||
}
|
||||
}),
|
||||
disposition: BlockDisposition::Below,
|
||||
priority: 0,
|
||||
},
|
||||
],
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
editor
|
||||
});
|
||||
|
||||
cx.observe(&step, Self::step_updated).detach();
|
||||
cx.observe_release(&step, Self::step_released).detach();
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
if let Ok(language) = language_registry.language_for_name("JSON").await {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.tool_output_buffer.update(cx, |buffer, cx| {
|
||||
buffer.set_language(Some(language), cx);
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
Self {
|
||||
tool_output_buffer,
|
||||
step: step.downgrade(),
|
||||
editor,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn step(&self) -> &WeakModel<WorkflowStep> {
|
||||
&self.step
|
||||
}
|
||||
|
||||
fn render_result(&mut self, cx: &mut ViewContext<Self>) -> Option<AnyElement> {
|
||||
let step = self.step.upgrade()?;
|
||||
let result = step.read(cx).resolution.as_ref()?;
|
||||
match result {
|
||||
Ok(result) => {
|
||||
Some(
|
||||
v_flex()
|
||||
.child(result.title.clone())
|
||||
.children(result.suggestion_groups.iter().filter_map(
|
||||
|(buffer, suggestion_groups)| {
|
||||
let buffer = buffer.read(cx);
|
||||
let path = buffer.file().map(|f| f.path());
|
||||
let snapshot = buffer.snapshot();
|
||||
v_flex()
|
||||
.mb_2()
|
||||
.border_b_1()
|
||||
.children(path.map(|path| format!("path: {}", path.display())))
|
||||
.children(suggestion_groups.iter().map(|group| {
|
||||
v_flex().pt_2().pl_2().children(
|
||||
group.suggestions.iter().map(|suggestion| {
|
||||
let range = suggestion.range().to_point(&snapshot);
|
||||
v_flex()
|
||||
.children(
|
||||
suggestion.description().map(|desc| {
|
||||
format!("description: {desc}")
|
||||
}),
|
||||
)
|
||||
.child(format!("kind: {}", suggestion.kind()))
|
||||
.children(suggestion.symbol_path().map(
|
||||
|path| format!("symbol path: {}", path.0),
|
||||
))
|
||||
.child(format!(
|
||||
"lines: {} - {}",
|
||||
range.start.row + 1,
|
||||
range.end.row + 1
|
||||
))
|
||||
}),
|
||||
)
|
||||
}))
|
||||
.into()
|
||||
},
|
||||
))
|
||||
.into_any_element(),
|
||||
)
|
||||
}
|
||||
Err(error) => Some(format!("{:?}", error).into_any_element()),
|
||||
}
|
||||
}
|
||||
|
||||
fn step_updated(&mut self, step: Model<WorkflowStep>, cx: &mut ViewContext<Self>) {
|
||||
self.tool_output_buffer.update(cx, |buffer, cx| {
|
||||
let text = step.read(cx).tool_output.clone();
|
||||
buffer.set_text(text, cx);
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn step_released(&mut self, _: &mut WorkflowStep, cx: &mut ViewContext<Self>) {
|
||||
cx.emit(EditorEvent::Closed);
|
||||
}
|
||||
|
||||
fn resolve(&mut self, _: &Assist, cx: &mut ViewContext<Self>) {
|
||||
self.step
|
||||
.update(cx, |step, cx| {
|
||||
step.resolve(cx);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
|
||||
fn section_header(
|
||||
name: &'static str,
|
||||
cx: &mut editor::display_map::BlockContext,
|
||||
) -> gpui::AnyElement {
|
||||
h_flex()
|
||||
.pl(cx.gutter_dimensions.full_width())
|
||||
.h_11()
|
||||
.w_full()
|
||||
.relative()
|
||||
.gap_1()
|
||||
.child(
|
||||
ButtonLike::new("role")
|
||||
.style(ButtonStyle::Filled)
|
||||
.child(Label::new(name).color(Color::Default)),
|
||||
)
|
||||
.into_any_element()
|
||||
}
|
||||
|
||||
impl Render for WorkflowStepView {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
div()
|
||||
.key_context("ContextEditor")
|
||||
.on_action(cx.listener(Self::resolve))
|
||||
.flex_grow()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(self.editor.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<EditorEvent> for WorkflowStepView {}
|
||||
|
||||
impl FocusableView for WorkflowStepView {
|
||||
fn focus_handle(&self, cx: &gpui::AppContext) -> gpui::FocusHandle {
|
||||
self.editor.read(cx).focus_handle(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl Item for WorkflowStepView {
|
||||
type Event = EditorEvent;
|
||||
|
||||
fn tab_content_text(&self, cx: &WindowContext) -> Option<SharedString> {
|
||||
let step = self.step.upgrade()?.read(cx);
|
||||
let context = step.context.upgrade()?.read(cx);
|
||||
let buffer = context.buffer().read(cx);
|
||||
let index = context
|
||||
.workflow_step_index_for_range(&step.context_buffer_range, buffer)
|
||||
.ok()?
|
||||
+ 1;
|
||||
Some(format!("Step {index}").into())
|
||||
}
|
||||
|
||||
fn tab_icon(&self, _cx: &WindowContext) -> Option<ui::Icon> {
|
||||
Some(Icon::new(IconName::SearchCode))
|
||||
}
|
||||
|
||||
fn to_item_events(event: &Self::Event, mut f: impl FnMut(item::ItemEvent)) {
|
||||
match event {
|
||||
EditorEvent::Edited { .. } => {
|
||||
f(item::ItemEvent::Edit);
|
||||
}
|
||||
EditorEvent::TitleChanged => {
|
||||
f(item::ItemEvent::UpdateTab);
|
||||
}
|
||||
EditorEvent::Closed => f(item::ItemEvent::CloseItem),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn tab_tooltip_text(&self, _cx: &AppContext) -> Option<SharedString> {
|
||||
None
|
||||
}
|
||||
|
||||
fn as_searchable(&self, _handle: &View<Self>) -> Option<Box<dyn SearchableItemHandle>> {
|
||||
None
|
||||
}
|
||||
|
||||
fn set_nav_history(&mut self, nav_history: pane::ItemNavHistory, cx: &mut ViewContext<Self>) {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
Item::set_nav_history(editor, nav_history, cx)
|
||||
})
|
||||
}
|
||||
|
||||
fn navigate(&mut self, data: Box<dyn std::any::Any>, cx: &mut ViewContext<Self>) -> bool {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| Item::navigate(editor, data, cx))
|
||||
}
|
||||
|
||||
fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| Item::deactivated(editor, cx))
|
||||
}
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
[package]
|
||||
name = "assistant_tool"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/assistant_tool.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
collections.workspace = true
|
||||
derive_more.workspace = true
|
||||
gpui.workspace = true
|
||||
parking_lot.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
workspace.workspace = true
|
||||
@@ -1,35 +0,0 @@
|
||||
mod tool_registry;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use gpui::{AppContext, Task, WeakView, WindowContext};
|
||||
use workspace::Workspace;
|
||||
|
||||
pub use tool_registry::*;
|
||||
|
||||
pub fn init(cx: &mut AppContext) {
|
||||
ToolRegistry::default_global(cx);
|
||||
}
|
||||
|
||||
/// A tool that can be used by a language model.
|
||||
pub trait Tool: 'static + Send + Sync {
|
||||
/// Returns the name of the tool.
|
||||
fn name(&self) -> String;
|
||||
|
||||
/// Returns the description of the tool.
|
||||
fn description(&self) -> String;
|
||||
|
||||
/// Returns the JSON schema that describes the tool's input.
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
serde_json::Value::Object(serde_json::Map::default())
|
||||
}
|
||||
|
||||
/// Runs the tool with the provided input.
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
workspace: WeakView<Workspace>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<String>>;
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use collections::HashMap;
|
||||
use derive_more::{Deref, DerefMut};
|
||||
use gpui::Global;
|
||||
use gpui::{AppContext, ReadGlobal};
|
||||
use parking_lot::RwLock;
|
||||
|
||||
use crate::Tool;
|
||||
|
||||
#[derive(Default, Deref, DerefMut)]
|
||||
struct GlobalToolRegistry(Arc<ToolRegistry>);
|
||||
|
||||
impl Global for GlobalToolRegistry {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct ToolRegistryState {
|
||||
tools: HashMap<Arc<str>, Arc<dyn Tool>>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ToolRegistry {
|
||||
state: RwLock<ToolRegistryState>,
|
||||
}
|
||||
|
||||
impl ToolRegistry {
|
||||
/// Returns the global [`ToolRegistry`].
|
||||
pub fn global(cx: &AppContext) -> Arc<Self> {
|
||||
GlobalToolRegistry::global(cx).0.clone()
|
||||
}
|
||||
|
||||
/// Returns the global [`ToolRegistry`].
|
||||
///
|
||||
/// Inserts a default [`ToolRegistry`] if one does not yet exist.
|
||||
pub fn default_global(cx: &mut AppContext) -> Arc<Self> {
|
||||
cx.default_global::<GlobalToolRegistry>().0.clone()
|
||||
}
|
||||
|
||||
pub fn new() -> Arc<Self> {
|
||||
Arc::new(Self {
|
||||
state: RwLock::new(ToolRegistryState {
|
||||
tools: HashMap::default(),
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
/// Registers the provided [`Tool`].
|
||||
pub fn register_tool(&self, tool: impl Tool) {
|
||||
let mut state = self.state.write();
|
||||
let tool_name: Arc<str> = tool.name().into();
|
||||
state.tools.insert(tool_name, Arc::new(tool));
|
||||
}
|
||||
|
||||
/// Unregisters the provided [`Tool`].
|
||||
pub fn unregister_tool(&self, tool: impl Tool) {
|
||||
self.unregister_tool_by_name(tool.name().as_str())
|
||||
}
|
||||
|
||||
/// Unregisters the tool with the given name.
|
||||
pub fn unregister_tool_by_name(&self, tool_name: &str) {
|
||||
let mut state = self.state.write();
|
||||
state.tools.remove(tool_name);
|
||||
}
|
||||
|
||||
/// Returns the list of tools in the registry.
|
||||
pub fn tools(&self) -> Vec<Arc<dyn Tool>> {
|
||||
self.state.read().tools.values().cloned().collect()
|
||||
}
|
||||
|
||||
/// Returns the [`Tool`] with the given name.
|
||||
pub fn tool(&self, name: &str) -> Option<Arc<dyn Tool>> {
|
||||
self.state.read().tools.get(name).cloned()
|
||||
}
|
||||
}
|
||||
@@ -44,7 +44,7 @@ impl SoundRegistry {
|
||||
let bytes = self
|
||||
.assets
|
||||
.load(&path)?
|
||||
.map(Ok)
|
||||
.map(|asset| Ok(asset))
|
||||
.unwrap_or_else(|| Err(anyhow::anyhow!("No such asset available")))?
|
||||
.into_owned();
|
||||
let cursor = Cursor::new(bytes);
|
||||
|
||||
@@ -33,7 +33,6 @@ impl Sound {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Audio {
|
||||
_output_stream: Option<OutputStream>,
|
||||
output_handle: Option<OutputStreamHandle>,
|
||||
@@ -46,7 +45,10 @@ impl Global for GlobalAudio {}
|
||||
|
||||
impl Audio {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
Self {
|
||||
_output_stream: None,
|
||||
output_handle: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_output_exists(&mut self) -> Option<&OutputStreamHandle> {
|
||||
|
||||
@@ -95,7 +95,7 @@ struct MacOsUnmounter {
|
||||
impl Drop for MacOsUnmounter {
|
||||
fn drop(&mut self) {
|
||||
let unmount_output = std::process::Command::new("hdiutil")
|
||||
.args(["detach", "-force"])
|
||||
.args(&["detach", "-force"])
|
||||
.arg(&self.mount_path)
|
||||
.output();
|
||||
|
||||
@@ -116,30 +116,27 @@ impl Drop for MacOsUnmounter {
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether or not to automatically check for updates.
|
||||
#[derive(Clone, Copy, JsonSchema, Deserialize, Serialize)]
|
||||
#[serde(default)]
|
||||
#[serde(transparent)]
|
||||
struct AutoUpdateSetting(bool);
|
||||
|
||||
impl Default for AutoUpdateSetting {
|
||||
fn default() -> Self {
|
||||
Self(true)
|
||||
}
|
||||
}
|
||||
/// Whether or not to automatically check for updates.
|
||||
///
|
||||
/// Default: true
|
||||
#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize)]
|
||||
#[serde(transparent)]
|
||||
struct AutoUpdateSettingContent(bool);
|
||||
|
||||
impl Settings for AutoUpdateSetting {
|
||||
const KEY: Option<&'static str> = Some("auto_update");
|
||||
|
||||
type FileContent = Self;
|
||||
type FileContent = Option<AutoUpdateSettingContent>;
|
||||
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
|
||||
let auto_update = [sources.release_channel, sources.user]
|
||||
.into_iter()
|
||||
.find_map(|value| value.copied())
|
||||
.unwrap_or(*sources.default);
|
||||
.find_map(|value| value.copied().flatten())
|
||||
.unwrap_or(sources.default.ok_or_else(Self::missing_default)?);
|
||||
|
||||
Ok(auto_update)
|
||||
Ok(Self(auto_update.0))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -214,7 +211,7 @@ pub fn check(_: &Check, cx: &mut WindowContext) {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Ok(message) = env::var("ZED_UPDATE_EXPLANATION") {
|
||||
if let Some(message) = env::var("ZED_UPDATE_EXPLANATION").ok() {
|
||||
drop(cx.prompt(
|
||||
gpui::PromptLevel::Info,
|
||||
"Zed was installed via a package manager.",
|
||||
@@ -257,7 +254,7 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) -> Option<(
|
||||
let url = &auto_updater
|
||||
.http_client
|
||||
.build_url(&format!("/releases/{release_channel}/{current_version}"));
|
||||
cx.open_url(url);
|
||||
cx.open_url(&url);
|
||||
}
|
||||
|
||||
None
|
||||
@@ -725,7 +722,7 @@ async fn install_release_linux(
|
||||
}
|
||||
|
||||
let output = Command::new("rsync")
|
||||
.args(["-av", "--delete"])
|
||||
.args(&["-av", "--delete"])
|
||||
.arg(&from)
|
||||
.arg(&to)
|
||||
.output()
|
||||
@@ -757,10 +754,10 @@ async fn install_release_macos(
|
||||
|
||||
mounted_app_path.push("/");
|
||||
let output = Command::new("hdiutil")
|
||||
.args(["attach", "-nobrowse"])
|
||||
.args(&["attach", "-nobrowse"])
|
||||
.arg(&downloaded_dmg)
|
||||
.arg("-mountroot")
|
||||
.arg(temp_dir.path())
|
||||
.arg(&temp_dir.path())
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
@@ -776,7 +773,7 @@ async fn install_release_macos(
|
||||
};
|
||||
|
||||
let output = Command::new("rsync")
|
||||
.args(["-av", "--delete"])
|
||||
.args(&["-av", "--delete"])
|
||||
.arg(&mounted_app_path)
|
||||
.arg(&running_app_path)
|
||||
.output()
|
||||
|
||||
@@ -18,12 +18,6 @@ pub struct Breadcrumbs {
|
||||
subscription: Option<Subscription>,
|
||||
}
|
||||
|
||||
impl Default for Breadcrumbs {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Breadcrumbs {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
|
||||
@@ -529,13 +529,14 @@ mod test {
|
||||
let (a, b) = cx.update(|cx| {
|
||||
(
|
||||
one_at_a_time.spawn(cx, |_| async {
|
||||
panic!("");
|
||||
assert!(false);
|
||||
Ok(2)
|
||||
}),
|
||||
one_at_a_time.spawn(cx, |_| async { Ok(3) }),
|
||||
)
|
||||
});
|
||||
|
||||
assert_eq!(a.await.unwrap(), None::<u32>);
|
||||
assert_eq!(a.await.unwrap(), None);
|
||||
assert_eq!(b.await.unwrap(), Some(3));
|
||||
|
||||
let promise = cx.update(|cx| one_at_a_time.spawn(cx, |_| async { Ok(4) }));
|
||||
|
||||
@@ -4,20 +4,30 @@ use schemars::JsonSchema;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
|
||||
/// Configuration of voice calls in Zed.
|
||||
#[derive(Clone, Debug, Default, Deserialize, Serialize, JsonSchema)]
|
||||
#[serde(default)]
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct CallSettings {
|
||||
/// Whether the microphone should be muted when joining a channel or a call.
|
||||
pub mute_on_join: bool,
|
||||
/// Whether your current project should be shared when joining an empty channel.
|
||||
pub share_on_join: bool,
|
||||
}
|
||||
|
||||
/// Configuration of voice calls in Zed.
|
||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
pub struct CallSettingsContent {
|
||||
/// Whether the microphone should be muted when joining a channel or a call.
|
||||
///
|
||||
/// Default: false
|
||||
pub mute_on_join: Option<bool>,
|
||||
|
||||
/// Whether your current project should be shared when joining an empty channel.
|
||||
///
|
||||
/// Default: true
|
||||
pub share_on_join: Option<bool>,
|
||||
}
|
||||
|
||||
impl Settings for CallSettings {
|
||||
const KEY: Option<&'static str> = Some("calls");
|
||||
|
||||
type FileContent = Self;
|
||||
type FileContent = CallSettingsContent;
|
||||
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
|
||||
sources.json_merge()
|
||||
|
||||
@@ -259,13 +259,13 @@ impl Room {
|
||||
None
|
||||
};
|
||||
|
||||
let did_join = room
|
||||
match room
|
||||
.update(&mut cx, |room, cx| {
|
||||
room.leave_when_empty = true;
|
||||
room.call(called_user_id, initial_project_id, cx)
|
||||
})?
|
||||
.await;
|
||||
match did_join {
|
||||
.await
|
||||
{
|
||||
Ok(()) => Ok(room),
|
||||
Err(error) => Err(error.context("room creation failed")),
|
||||
}
|
||||
@@ -493,7 +493,7 @@ impl Room {
|
||||
// we leave the room and return an error.
|
||||
if let Some(this) = this.upgrade() {
|
||||
log::info!("reconnection failed, leaving room");
|
||||
this.update(&mut cx, |this, cx| this.leave(cx))?.await?;
|
||||
let _ = this.update(&mut cx, |this, cx| this.leave(cx))?.await?;
|
||||
}
|
||||
Err(anyhow!(
|
||||
"can't reconnect to room: client failed to re-establish connection"
|
||||
@@ -933,7 +933,7 @@ impl Room {
|
||||
let list = this
|
||||
.follows_by_leader_id_project_id
|
||||
.entry((leader, project_id))
|
||||
.or_default();
|
||||
.or_insert(Vec::new());
|
||||
if !list.contains(&follower) {
|
||||
list.push(follower);
|
||||
}
|
||||
@@ -942,7 +942,7 @@ impl Room {
|
||||
this.pending_room_update.take();
|
||||
if this.should_leave() {
|
||||
log::info!("room is empty, leaving");
|
||||
this.leave(cx).detach();
|
||||
let _ = this.leave(cx).detach();
|
||||
}
|
||||
|
||||
this.user_store.update(cx, |user_store, cx| {
|
||||
@@ -1017,11 +1017,19 @@ impl Room {
|
||||
.collect::<Vec<u64>>();
|
||||
speaker_ids.sort_unstable();
|
||||
for (sid, participant) in &mut self.remote_participants {
|
||||
participant.speaking = speaker_ids.binary_search(sid).is_ok();
|
||||
if let Ok(_) = speaker_ids.binary_search(sid) {
|
||||
participant.speaking = true;
|
||||
} else {
|
||||
participant.speaking = false;
|
||||
}
|
||||
}
|
||||
if let Some(id) = self.client.user_id() {
|
||||
if let Some(room) = &mut self.live_kit {
|
||||
room.speaking = speaker_ids.binary_search(&id).is_ok();
|
||||
if let Ok(_) = speaker_ids.binary_search(&id) {
|
||||
room.speaking = true;
|
||||
} else {
|
||||
room.speaking = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,6 +18,6 @@ mod channel_store_tests;
|
||||
|
||||
pub fn init(client: &Arc<Client>, user_store: Model<UserStore>, cx: &mut AppContext) {
|
||||
channel_store::init(client, user_store, cx);
|
||||
channel_buffer::init(&client.clone().into());
|
||||
channel_chat::init(&client.clone().into());
|
||||
channel_buffer::init(client);
|
||||
channel_chat::init(client);
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ use collections::HashMap;
|
||||
use gpui::{AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task};
|
||||
use language::proto::serialize_version;
|
||||
use rpc::{
|
||||
proto::{self, AnyProtoClient, PeerId},
|
||||
proto::{self, PeerId},
|
||||
TypedEnvelope,
|
||||
};
|
||||
use std::{sync::Arc, time::Duration};
|
||||
@@ -14,7 +14,7 @@ use util::ResultExt;
|
||||
|
||||
pub const ACKNOWLEDGE_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(250);
|
||||
|
||||
pub(crate) fn init(client: &AnyProtoClient) {
|
||||
pub(crate) fn init(client: &Arc<Client>) {
|
||||
client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer);
|
||||
client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer_collaborators);
|
||||
}
|
||||
@@ -177,10 +177,13 @@ impl ChannelBuffer {
|
||||
match event {
|
||||
language::Event::Operation(operation) => {
|
||||
if *ZED_ALWAYS_ACTIVE {
|
||||
if let language::Operation::UpdateSelections { selections, .. } = operation {
|
||||
if selections.is_empty() {
|
||||
return;
|
||||
match operation {
|
||||
language::Operation::UpdateSelections { selections, .. } => {
|
||||
if selections.is_empty() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
let operation = language::proto::serialize_operation(operation);
|
||||
|
||||
@@ -11,7 +11,6 @@ use gpui::{
|
||||
AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task, WeakModel,
|
||||
};
|
||||
use rand::prelude::*;
|
||||
use rpc::proto::AnyProtoClient;
|
||||
use std::{
|
||||
ops::{ControlFlow, Range},
|
||||
sync::Arc,
|
||||
@@ -61,9 +60,9 @@ pub enum ChannelMessageId {
|
||||
Pending(usize),
|
||||
}
|
||||
|
||||
impl From<ChannelMessageId> for Option<u64> {
|
||||
fn from(val: ChannelMessageId) -> Self {
|
||||
match val {
|
||||
impl Into<Option<u64>> for ChannelMessageId {
|
||||
fn into(self) -> Option<u64> {
|
||||
match self {
|
||||
ChannelMessageId::Saved(id) => Some(id),
|
||||
ChannelMessageId::Pending(_) => None,
|
||||
}
|
||||
@@ -96,7 +95,7 @@ pub enum ChannelChatEvent {
|
||||
}
|
||||
|
||||
impl EventEmitter<ChannelChatEvent> for ChannelChat {}
|
||||
pub fn init(client: &AnyProtoClient) {
|
||||
pub fn init(client: &Arc<Client>) {
|
||||
client.add_model_message_handler(ChannelChat::handle_message_sent);
|
||||
client.add_model_message_handler(ChannelChat::handle_message_removed);
|
||||
client.add_model_message_handler(ChannelChat::handle_message_updated);
|
||||
|
||||
@@ -249,14 +249,15 @@ impl ChannelStore {
|
||||
}
|
||||
|
||||
pub fn initialize(&mut self) {
|
||||
if !self.did_subscribe
|
||||
&& self
|
||||
if !self.did_subscribe {
|
||||
if self
|
||||
.client
|
||||
.send(proto::SubscribeToChannels {})
|
||||
.log_err()
|
||||
.is_some()
|
||||
{
|
||||
self.did_subscribe = true;
|
||||
{
|
||||
self.did_subscribe = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -422,7 +423,7 @@ impl ChannelStore {
|
||||
) {
|
||||
self.channel_states
|
||||
.entry(channel_id)
|
||||
.or_default()
|
||||
.or_insert_with(|| Default::default())
|
||||
.acknowledge_message_id(message_id);
|
||||
cx.notify();
|
||||
}
|
||||
@@ -435,7 +436,7 @@ impl ChannelStore {
|
||||
) {
|
||||
self.channel_states
|
||||
.entry(channel_id)
|
||||
.or_default()
|
||||
.or_insert_with(|| Default::default())
|
||||
.update_latest_message_id(message_id);
|
||||
cx.notify();
|
||||
}
|
||||
@@ -449,7 +450,7 @@ impl ChannelStore {
|
||||
) {
|
||||
self.channel_states
|
||||
.entry(channel_id)
|
||||
.or_default()
|
||||
.or_insert_with(|| Default::default())
|
||||
.acknowledge_notes_version(epoch, version);
|
||||
cx.notify()
|
||||
}
|
||||
@@ -463,7 +464,7 @@ impl ChannelStore {
|
||||
) {
|
||||
self.channel_states
|
||||
.entry(channel_id)
|
||||
.or_default()
|
||||
.or_insert_with(|| Default::default())
|
||||
.update_latest_notes_version(epoch, version);
|
||||
cx.notify()
|
||||
}
|
||||
@@ -923,7 +924,7 @@ impl ChannelStore {
|
||||
if let Some(role) = ChannelRole::from_i32(membership.role) {
|
||||
this.channel_states
|
||||
.entry(ChannelId(membership.channel_id))
|
||||
.or_default()
|
||||
.or_insert_with(|| ChannelState::default())
|
||||
.set_role(role)
|
||||
}
|
||||
}
|
||||
@@ -1093,7 +1094,11 @@ impl ChannelStore {
|
||||
id: ChannelId(channel.id),
|
||||
visibility: channel.visibility(),
|
||||
name: channel.name.into(),
|
||||
parent_path: channel.parent_path.into_iter().map(ChannelId).collect(),
|
||||
parent_path: channel
|
||||
.parent_path
|
||||
.into_iter()
|
||||
.map(|cid| ChannelId(cid))
|
||||
.collect(),
|
||||
}),
|
||||
),
|
||||
}
|
||||
@@ -1108,11 +1113,14 @@ impl ChannelStore {
|
||||
|
||||
if channels_changed {
|
||||
if !payload.delete_channels.is_empty() {
|
||||
let delete_channels: Vec<ChannelId> =
|
||||
payload.delete_channels.into_iter().map(ChannelId).collect();
|
||||
let delete_channels: Vec<ChannelId> = payload
|
||||
.delete_channels
|
||||
.into_iter()
|
||||
.map(|cid| ChannelId(cid))
|
||||
.collect();
|
||||
self.channel_index.delete_channels(&delete_channels);
|
||||
self.channel_participants
|
||||
.retain(|channel_id, _| !delete_channels.contains(channel_id));
|
||||
.retain(|channel_id, _| !delete_channels.contains(&channel_id));
|
||||
|
||||
for channel_id in &delete_channels {
|
||||
let channel_id = *channel_id;
|
||||
|
||||
@@ -12,20 +12,23 @@ use async_tungstenite::tungstenite::{
|
||||
error::Error as WebsocketError,
|
||||
http::{HeaderValue, Request, StatusCode},
|
||||
};
|
||||
use chrono::{DateTime, Utc};
|
||||
use clock::SystemClock;
|
||||
use collections::HashMap;
|
||||
use futures::{
|
||||
channel::oneshot, future::BoxFuture, AsyncReadExt, FutureExt, SinkExt, Stream, StreamExt,
|
||||
TryFutureExt as _, TryStreamExt,
|
||||
channel::oneshot,
|
||||
future::{BoxFuture, LocalBoxFuture},
|
||||
AsyncReadExt, FutureExt, SinkExt, Stream, StreamExt, TryFutureExt as _, TryStreamExt,
|
||||
};
|
||||
use gpui::{
|
||||
actions, AnyModel, AnyWeakModel, AppContext, AsyncAppContext, Global, Model, Task, WeakModel,
|
||||
};
|
||||
use gpui::{actions, AppContext, AsyncAppContext, Global, Model, Task, WeakModel};
|
||||
use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use parking_lot::RwLock;
|
||||
use postage::watch;
|
||||
use proto::{AnyProtoClient, EntityMessageSubscriber, ProtoClient, ProtoMessageHandlerSet};
|
||||
use proto::ProtoClient;
|
||||
use rand::prelude::*;
|
||||
use release_channel::{AppVersion, ReleaseChannel};
|
||||
use rpc::proto::{AnyTypedEnvelope, EnvelopedMessage, PeerId, RequestMessage};
|
||||
use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, PeerId, RequestMessage};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
@@ -99,68 +102,44 @@ pub const CONNECTION_TIMEOUT: Duration = Duration::from_secs(20);
|
||||
|
||||
actions!(client, [SignIn, SignOut, Reconnect]);
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(default)]
|
||||
pub struct ClientSettings {
|
||||
/// The server to connect to. If the environment variable
|
||||
/// ZED_SERVER_URL is set, it will override this setting.
|
||||
pub server_url: String,
|
||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ClientSettingsContent {
|
||||
server_url: Option<String>,
|
||||
}
|
||||
|
||||
impl Default for ClientSettings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
server_url: "https://zed.dev".to_owned(),
|
||||
}
|
||||
}
|
||||
#[derive(Deserialize)]
|
||||
pub struct ClientSettings {
|
||||
pub server_url: String,
|
||||
}
|
||||
|
||||
impl Settings for ClientSettings {
|
||||
const KEY: Option<&'static str> = None;
|
||||
|
||||
type FileContent = Self;
|
||||
type FileContent = ClientSettingsContent;
|
||||
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
|
||||
let mut result = sources.json_merge::<Self>()?;
|
||||
if let Some(server_url) = &*ZED_SERVER_URL {
|
||||
result.server_url.clone_from(server_url)
|
||||
result.server_url.clone_from(&server_url)
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(default)]
|
||||
pub struct ProxySettings {
|
||||
/// Set a proxy to use. The proxy protocol is specified by the URI scheme.
|
||||
///
|
||||
/// Supported URI scheme: `http`, `https`, `socks4`, `socks4a`, `socks5`,
|
||||
/// `socks5h`. `http` will be used when no scheme is specified.
|
||||
///
|
||||
/// By default no proxy will be used, or Zed will try get proxy settings from
|
||||
/// environment variables.
|
||||
///
|
||||
/// Examples:
|
||||
/// - "proxy": "socks5://localhost:10808"
|
||||
/// - "proxy": "http://127.0.0.1:10809"
|
||||
#[schemars(example = "Self::example_1")]
|
||||
#[schemars(example = "Self::example_2")]
|
||||
pub proxy: Option<String>,
|
||||
pub struct ProxySettingsContent {
|
||||
proxy: Option<String>,
|
||||
}
|
||||
|
||||
impl ProxySettings {
|
||||
fn example_1() -> String {
|
||||
"http://127.0.0.1:10809".to_owned()
|
||||
}
|
||||
fn example_2() -> String {
|
||||
"socks5://localhost:10808".to_owned()
|
||||
}
|
||||
#[derive(Deserialize, Default)]
|
||||
pub struct ProxySettings {
|
||||
pub proxy: Option<String>,
|
||||
}
|
||||
|
||||
impl Settings for ProxySettings {
|
||||
const KEY: Option<&'static str> = None;
|
||||
|
||||
type FileContent = Self;
|
||||
type FileContent = ProxySettingsContent;
|
||||
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
|
||||
Ok(Self {
|
||||
@@ -228,7 +207,6 @@ pub struct Client {
|
||||
telemetry: Arc<Telemetry>,
|
||||
credentials_provider: Arc<dyn CredentialsProvider + Send + Sync + 'static>,
|
||||
state: RwLock<ClientState>,
|
||||
handler_set: parking_lot::Mutex<ProtoMessageHandlerSet>,
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
@@ -325,7 +303,30 @@ impl Status {
|
||||
struct ClientState {
|
||||
credentials: Option<Credentials>,
|
||||
status: (watch::Sender<Status>, watch::Receiver<Status>),
|
||||
entity_id_extractors: HashMap<TypeId, fn(&dyn AnyTypedEnvelope) -> u64>,
|
||||
_reconnect_task: Option<Task<()>>,
|
||||
entities_by_type_and_remote_id: HashMap<(TypeId, u64), WeakSubscriber>,
|
||||
models_by_message_type: HashMap<TypeId, AnyWeakModel>,
|
||||
entity_types_by_message_type: HashMap<TypeId, TypeId>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
message_handlers: HashMap<
|
||||
TypeId,
|
||||
Arc<
|
||||
dyn Send
|
||||
+ Sync
|
||||
+ Fn(
|
||||
AnyModel,
|
||||
Box<dyn AnyTypedEnvelope>,
|
||||
&Arc<Client>,
|
||||
AsyncAppContext,
|
||||
) -> LocalBoxFuture<'static, Result<()>>,
|
||||
>,
|
||||
>,
|
||||
}
|
||||
|
||||
enum WeakSubscriber {
|
||||
Entity { handle: AnyWeakModel },
|
||||
Pending(Vec<Box<dyn AnyTypedEnvelope>>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
@@ -377,7 +378,12 @@ impl Default for ClientState {
|
||||
Self {
|
||||
credentials: None,
|
||||
status: watch::channel_with(Status::SignedOut),
|
||||
entity_id_extractors: Default::default(),
|
||||
_reconnect_task: None,
|
||||
models_by_message_type: Default::default(),
|
||||
entities_by_type_and_remote_id: Default::default(),
|
||||
entity_types_by_message_type: Default::default(),
|
||||
message_handlers: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -398,13 +404,13 @@ impl Drop for Subscription {
|
||||
match self {
|
||||
Subscription::Entity { client, id } => {
|
||||
if let Some(client) = client.upgrade() {
|
||||
let mut state = client.handler_set.lock();
|
||||
let mut state = client.state.write();
|
||||
let _ = state.entities_by_type_and_remote_id.remove(id);
|
||||
}
|
||||
}
|
||||
Subscription::Message { client, id } => {
|
||||
if let Some(client) = client.upgrade() {
|
||||
let mut state = client.handler_set.lock();
|
||||
let mut state = client.state.write();
|
||||
let _ = state.entity_types_by_message_type.remove(id);
|
||||
let _ = state.message_handlers.remove(id);
|
||||
}
|
||||
@@ -423,31 +429,22 @@ pub struct PendingEntitySubscription<T: 'static> {
|
||||
impl<T: 'static> PendingEntitySubscription<T> {
|
||||
pub fn set_model(mut self, model: &Model<T>, cx: &mut AsyncAppContext) -> Subscription {
|
||||
self.consumed = true;
|
||||
let mut handlers = self.client.handler_set.lock();
|
||||
let mut state = self.client.state.write();
|
||||
let id = (TypeId::of::<T>(), self.remote_id);
|
||||
let Some(EntityMessageSubscriber::Pending(messages)) =
|
||||
handlers.entities_by_type_and_remote_id.remove(&id)
|
||||
let Some(WeakSubscriber::Pending(messages)) =
|
||||
state.entities_by_type_and_remote_id.remove(&id)
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
handlers.entities_by_type_and_remote_id.insert(
|
||||
state.entities_by_type_and_remote_id.insert(
|
||||
id,
|
||||
EntityMessageSubscriber::Entity {
|
||||
WeakSubscriber::Entity {
|
||||
handle: model.downgrade().into(),
|
||||
},
|
||||
);
|
||||
drop(handlers);
|
||||
drop(state);
|
||||
for message in messages {
|
||||
let client_id = self.client.id();
|
||||
let type_name = message.payload_type_name();
|
||||
let sender_id = message.original_sender_id();
|
||||
log::debug!(
|
||||
"handling queued rpc message. client_id:{}, sender_id:{:?}, type:{}",
|
||||
client_id,
|
||||
sender_id,
|
||||
type_name
|
||||
);
|
||||
self.client.handle_message(message, cx);
|
||||
}
|
||||
Subscription::Entity {
|
||||
@@ -460,8 +457,8 @@ impl<T: 'static> PendingEntitySubscription<T> {
|
||||
impl<T: 'static> Drop for PendingEntitySubscription<T> {
|
||||
fn drop(&mut self) {
|
||||
if !self.consumed {
|
||||
let mut state = self.client.handler_set.lock();
|
||||
if let Some(EntityMessageSubscriber::Pending(messages)) = state
|
||||
let mut state = self.client.state.write();
|
||||
if let Some(WeakSubscriber::Pending(messages)) = state
|
||||
.entities_by_type_and_remote_id
|
||||
.remove(&(TypeId::of::<T>(), self.remote_id))
|
||||
{
|
||||
@@ -542,7 +539,6 @@ impl Client {
|
||||
http,
|
||||
credentials_provider,
|
||||
state: Default::default(),
|
||||
handler_set: Default::default(),
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
authenticate: Default::default(),
|
||||
@@ -586,7 +582,10 @@ impl Client {
|
||||
pub fn teardown(&self) {
|
||||
let mut state = self.state.write();
|
||||
state._reconnect_task.take();
|
||||
self.handler_set.lock().clear();
|
||||
state.message_handlers.clear();
|
||||
state.models_by_message_type.clear();
|
||||
state.entities_by_type_and_remote_id.clear();
|
||||
state.entity_id_extractors.clear();
|
||||
self.peer.teardown();
|
||||
}
|
||||
|
||||
@@ -699,14 +698,14 @@ impl Client {
|
||||
{
|
||||
let id = (TypeId::of::<T>(), remote_id);
|
||||
|
||||
let mut state = self.handler_set.lock();
|
||||
let mut state = self.state.write();
|
||||
if state.entities_by_type_and_remote_id.contains_key(&id) {
|
||||
return Err(anyhow!("already subscribed to entity"));
|
||||
}
|
||||
|
||||
state
|
||||
.entities_by_type_and_remote_id
|
||||
.insert(id, EntityMessageSubscriber::Pending(Default::default()));
|
||||
.insert(id, WeakSubscriber::Pending(Default::default()));
|
||||
|
||||
Ok(PendingEntitySubscription {
|
||||
client: self.clone(),
|
||||
@@ -743,13 +742,13 @@ impl Client {
|
||||
E: 'static,
|
||||
H: 'static
|
||||
+ Sync
|
||||
+ Fn(Model<E>, TypedEnvelope<M>, AnyProtoClient, AsyncAppContext) -> F
|
||||
+ Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F
|
||||
+ Send
|
||||
+ Sync,
|
||||
F: 'static + Future<Output = Result<()>>,
|
||||
{
|
||||
let message_type_id = TypeId::of::<M>();
|
||||
let mut state = self.handler_set.lock();
|
||||
let mut state = self.state.write();
|
||||
state
|
||||
.models_by_message_type
|
||||
.insert(message_type_id, entity.into());
|
||||
@@ -794,18 +793,85 @@ impl Client {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn add_model_message_handler<M, E, H, F>(self: &Arc<Self>, handler: H)
|
||||
where
|
||||
M: EntityMessage,
|
||||
E: 'static,
|
||||
H: 'static + Fn(Model<E>, TypedEnvelope<M>, AsyncAppContext) -> F + Send + Sync,
|
||||
F: 'static + Future<Output = Result<()>>,
|
||||
{
|
||||
self.add_entity_message_handler::<M, E, _, _>(move |subscriber, message, _, cx| {
|
||||
handler(subscriber.downcast::<E>().unwrap(), message, cx)
|
||||
})
|
||||
}
|
||||
|
||||
fn add_entity_message_handler<M, E, H, F>(self: &Arc<Self>, handler: H)
|
||||
where
|
||||
M: EntityMessage,
|
||||
E: 'static,
|
||||
H: 'static + Fn(AnyModel, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F + Send + Sync,
|
||||
F: 'static + Future<Output = Result<()>>,
|
||||
{
|
||||
let model_type_id = TypeId::of::<E>();
|
||||
let message_type_id = TypeId::of::<M>();
|
||||
|
||||
let mut state = self.state.write();
|
||||
state
|
||||
.entity_types_by_message_type
|
||||
.insert(message_type_id, model_type_id);
|
||||
state
|
||||
.entity_id_extractors
|
||||
.entry(message_type_id)
|
||||
.or_insert_with(|| {
|
||||
|envelope| {
|
||||
envelope
|
||||
.as_any()
|
||||
.downcast_ref::<TypedEnvelope<M>>()
|
||||
.unwrap()
|
||||
.payload
|
||||
.remote_entity_id()
|
||||
}
|
||||
});
|
||||
let prev_handler = state.message_handlers.insert(
|
||||
message_type_id,
|
||||
Arc::new(move |handle, envelope, client, cx| {
|
||||
let envelope = envelope.into_any().downcast::<TypedEnvelope<M>>().unwrap();
|
||||
handler(handle, *envelope, client.clone(), cx).boxed_local()
|
||||
}),
|
||||
);
|
||||
if prev_handler.is_some() {
|
||||
panic!("registered handler for the same message twice");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_model_request_handler<M, E, H, F>(self: &Arc<Self>, handler: H)
|
||||
where
|
||||
M: EntityMessage + RequestMessage,
|
||||
E: 'static,
|
||||
H: 'static + Fn(Model<E>, TypedEnvelope<M>, AsyncAppContext) -> F + Send + Sync,
|
||||
F: 'static + Future<Output = Result<M::Response>>,
|
||||
{
|
||||
self.add_entity_message_handler::<M, E, _, _>(move |entity, envelope, client, cx| {
|
||||
Self::respond_to_request::<M, _>(
|
||||
envelope.receipt(),
|
||||
handler(entity.downcast::<E>().unwrap(), envelope, cx),
|
||||
client,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
async fn respond_to_request<T: RequestMessage, F: Future<Output = Result<T::Response>>>(
|
||||
receipt: Receipt<T>,
|
||||
response: F,
|
||||
client: AnyProtoClient,
|
||||
client: Arc<Self>,
|
||||
) -> Result<()> {
|
||||
match response.await {
|
||||
Ok(response) => {
|
||||
client.send_response(receipt.message_id, response)?;
|
||||
client.respond(receipt, response)?;
|
||||
Ok(())
|
||||
}
|
||||
Err(error) => {
|
||||
client.send_response(receipt.message_id, error.to_proto())?;
|
||||
client.respond_with_error(receipt, error.to_proto())?;
|
||||
Err(error)
|
||||
}
|
||||
}
|
||||
@@ -1165,7 +1231,7 @@ impl Client {
|
||||
request_headers.insert("x-zed-app-version", HeaderValue::from_str(&app_version)?);
|
||||
request_headers.insert(
|
||||
"x-zed-release-channel",
|
||||
HeaderValue::from_str(release_channel.map(|r| r.dev_name()).unwrap_or("unknown"))?,
|
||||
HeaderValue::from_str(&release_channel.map(|r| r.dev_name()).unwrap_or("unknown"))?,
|
||||
);
|
||||
|
||||
match url_scheme {
|
||||
@@ -1334,7 +1400,6 @@ impl Client {
|
||||
struct GithubUser {
|
||||
id: i32,
|
||||
login: String,
|
||||
created_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
let request = {
|
||||
@@ -1368,42 +1433,26 @@ impl Client {
|
||||
);
|
||||
}
|
||||
|
||||
serde_json::from_slice::<GithubUser>(body.as_slice()).map_err(|err| {
|
||||
let user = serde_json::from_slice::<GithubUser>(body.as_slice()).map_err(|err| {
|
||||
log::error!("Error deserializing: {:?}", err);
|
||||
log::error!(
|
||||
"GitHub API response text: {:?}",
|
||||
String::from_utf8_lossy(body.as_slice())
|
||||
);
|
||||
anyhow!("error deserializing GitHub user")
|
||||
})?
|
||||
})?;
|
||||
|
||||
user
|
||||
};
|
||||
|
||||
let query_params = [
|
||||
("github_login", &github_user.login),
|
||||
("github_user_id", &github_user.id.to_string()),
|
||||
(
|
||||
"github_user_created_at",
|
||||
&github_user.created_at.to_rfc3339(),
|
||||
),
|
||||
];
|
||||
|
||||
// Use the collab server's admin API to retrieve the ID
|
||||
// Use the collab server's admin API to retrieve the id
|
||||
// of the impersonated user.
|
||||
let mut url = self.rpc_url(http.clone(), None).await?;
|
||||
url.set_path("/user");
|
||||
url.set_query(Some(
|
||||
&query_params
|
||||
.iter()
|
||||
.map(|(key, value)| {
|
||||
format!(
|
||||
"{}={}",
|
||||
key,
|
||||
url::form_urlencoded::byte_serialize(value.as_bytes()).collect::<String>()
|
||||
)
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
.join("&"),
|
||||
));
|
||||
url.set_query(Some(&format!(
|
||||
"github_login={}&github_user_id={}",
|
||||
github_user.login, github_user.id
|
||||
)));
|
||||
let request: http_client::Request<AsyncBody> = Request::get(url.as_str())
|
||||
.header("Authorization", format!("token {api_token}"))
|
||||
.body("".into())?;
|
||||
@@ -1430,7 +1479,7 @@ impl Client {
|
||||
|
||||
pub async fn sign_out(self: &Arc<Self>, cx: &AsyncAppContext) {
|
||||
self.state.write().credentials = None;
|
||||
self.disconnect(cx);
|
||||
self.disconnect(&cx);
|
||||
|
||||
if self.has_credentials(cx).await {
|
||||
self.credentials_provider
|
||||
@@ -1463,6 +1512,11 @@ impl Client {
|
||||
self.peer.send(self.connection_id()?, message)
|
||||
}
|
||||
|
||||
pub fn send_dynamic(&self, envelope: proto::Envelope) -> Result<()> {
|
||||
let connection_id = self.connection_id()?;
|
||||
self.peer.send_dynamic(connection_id, envelope)
|
||||
}
|
||||
|
||||
pub fn request<T: RequestMessage>(
|
||||
&self,
|
||||
request: T,
|
||||
@@ -1544,56 +1598,115 @@ impl Client {
|
||||
}
|
||||
}
|
||||
|
||||
fn respond<T: RequestMessage>(&self, receipt: Receipt<T>, response: T::Response) -> Result<()> {
|
||||
log::debug!("rpc respond. client_id:{}. name:{}", self.id(), T::NAME);
|
||||
self.peer.respond(receipt, response)
|
||||
}
|
||||
|
||||
fn respond_with_error<T: RequestMessage>(
|
||||
&self,
|
||||
receipt: Receipt<T>,
|
||||
error: proto::Error,
|
||||
) -> Result<()> {
|
||||
log::debug!("rpc respond. client_id:{}. name:{}", self.id(), T::NAME);
|
||||
self.peer.respond_with_error(receipt, error)
|
||||
}
|
||||
|
||||
fn handle_message(
|
||||
self: &Arc<Client>,
|
||||
message: Box<dyn AnyTypedEnvelope>,
|
||||
cx: &AsyncAppContext,
|
||||
) {
|
||||
let sender_id = message.sender_id();
|
||||
let request_id = message.message_id();
|
||||
let mut state = self.state.write();
|
||||
let type_name = message.payload_type_name();
|
||||
let original_sender_id = message.original_sender_id();
|
||||
let payload_type_id = message.payload_type_id();
|
||||
let sender_id = message.original_sender_id();
|
||||
|
||||
if let Some(future) = ProtoMessageHandlerSet::handle_message(
|
||||
&self.handler_set,
|
||||
message,
|
||||
self.clone().into(),
|
||||
cx.clone(),
|
||||
) {
|
||||
let mut subscriber = None;
|
||||
|
||||
if let Some(handle) = state
|
||||
.models_by_message_type
|
||||
.get(&payload_type_id)
|
||||
.and_then(|handle| handle.upgrade())
|
||||
{
|
||||
subscriber = Some(handle);
|
||||
} else if let Some((extract_entity_id, entity_type_id)) =
|
||||
state.entity_id_extractors.get(&payload_type_id).zip(
|
||||
state
|
||||
.entity_types_by_message_type
|
||||
.get(&payload_type_id)
|
||||
.copied(),
|
||||
)
|
||||
{
|
||||
let entity_id = (extract_entity_id)(message.as_ref());
|
||||
|
||||
match state
|
||||
.entities_by_type_and_remote_id
|
||||
.get_mut(&(entity_type_id, entity_id))
|
||||
{
|
||||
Some(WeakSubscriber::Pending(pending)) => {
|
||||
pending.push(message);
|
||||
return;
|
||||
}
|
||||
Some(weak_subscriber) => match weak_subscriber {
|
||||
WeakSubscriber::Entity { handle } => {
|
||||
subscriber = handle.upgrade();
|
||||
}
|
||||
|
||||
WeakSubscriber::Pending(_) => {}
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let subscriber = if let Some(subscriber) = subscriber {
|
||||
subscriber
|
||||
} else {
|
||||
log::info!("unhandled message {}", type_name);
|
||||
self.peer.respond_with_unhandled_message(message).log_err();
|
||||
return;
|
||||
};
|
||||
|
||||
let handler = state.message_handlers.get(&payload_type_id).cloned();
|
||||
// Dropping the state prevents deadlocks if the handler interacts with rpc::Client.
|
||||
// It also ensures we don't hold the lock while yielding back to the executor, as
|
||||
// that might cause the executor thread driving this future to block indefinitely.
|
||||
drop(state);
|
||||
|
||||
if let Some(handler) = handler {
|
||||
let future = handler(subscriber, message, self, cx.clone());
|
||||
let client_id = self.id();
|
||||
log::debug!(
|
||||
"rpc message received. client_id:{}, sender_id:{:?}, type:{}",
|
||||
client_id,
|
||||
original_sender_id,
|
||||
sender_id,
|
||||
type_name
|
||||
);
|
||||
cx.spawn(move |_| async move {
|
||||
match future.await {
|
||||
Ok(()) => {
|
||||
log::debug!(
|
||||
"rpc message handled. client_id:{}, sender_id:{:?}, type:{}",
|
||||
client_id,
|
||||
original_sender_id,
|
||||
type_name
|
||||
);
|
||||
match future.await {
|
||||
Ok(()) => {
|
||||
log::debug!(
|
||||
"rpc message handled. client_id:{}, sender_id:{:?}, type:{}",
|
||||
client_id,
|
||||
sender_id,
|
||||
type_name
|
||||
);
|
||||
}
|
||||
Err(error) => {
|
||||
log::error!(
|
||||
"error handling message. client_id:{}, sender_id:{:?}, type:{}, error:{:?}",
|
||||
client_id,
|
||||
sender_id,
|
||||
type_name,
|
||||
error
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
log::error!(
|
||||
"error handling message. client_id:{}, sender_id:{:?}, type:{}, error:{:?}",
|
||||
client_id,
|
||||
original_sender_id,
|
||||
type_name,
|
||||
error
|
||||
);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
})
|
||||
.detach();
|
||||
} else {
|
||||
log::info!("unhandled message {}", type_name);
|
||||
self.peer
|
||||
.respond_with_unhandled_message(sender_id.into(), request_id, type_name)
|
||||
.log_err();
|
||||
self.peer.respond_with_unhandled_message(message).log_err();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1611,24 +1724,8 @@ impl ProtoClient for Client {
|
||||
self.request_dynamic(envelope, request_type).boxed()
|
||||
}
|
||||
|
||||
fn send(&self, envelope: proto::Envelope, message_type: &'static str) -> Result<()> {
|
||||
log::debug!("rpc send. client_id:{}, name:{}", self.id(), message_type);
|
||||
let connection_id = self.connection_id()?;
|
||||
self.peer.send_dynamic(connection_id, envelope)
|
||||
}
|
||||
|
||||
fn send_response(&self, envelope: proto::Envelope, message_type: &'static str) -> Result<()> {
|
||||
log::debug!(
|
||||
"rpc respond. client_id:{}, name:{}",
|
||||
self.id(),
|
||||
message_type
|
||||
);
|
||||
let connection_id = self.connection_id()?;
|
||||
self.peer.send_dynamic(connection_id, envelope)
|
||||
}
|
||||
|
||||
fn message_handler_set(&self) -> &parking_lot::Mutex<ProtoMessageHandlerSet> {
|
||||
&self.handler_set
|
||||
fn send(&self, envelope: proto::Envelope) -> Result<()> {
|
||||
self.send_dynamic(envelope)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1972,7 +2069,7 @@ mod tests {
|
||||
|
||||
let (done_tx1, mut done_rx1) = smol::channel::unbounded();
|
||||
let (done_tx2, mut done_rx2) = smol::channel::unbounded();
|
||||
AnyProtoClient::from(client.clone()).add_model_message_handler(
|
||||
client.add_model_message_handler(
|
||||
move |model: Model<TestModel>, _: TypedEnvelope<proto::JoinProject>, mut cx| {
|
||||
match model.update(&mut cx, |model, _| model.id).unwrap() {
|
||||
1 => done_tx1.try_send(()).unwrap(),
|
||||
|
||||
@@ -35,8 +35,12 @@ pub(crate) async fn connect_socks_proxy_stream(
|
||||
}
|
||||
|
||||
fn parse_socks_proxy(proxy: Option<&Uri>) -> Option<((String, u16), SocksVersion)> {
|
||||
let proxy_uri = proxy?;
|
||||
let scheme = proxy_uri.scheme_str()?;
|
||||
let Some(proxy_uri) = proxy else {
|
||||
return None;
|
||||
};
|
||||
let Some(scheme) = proxy_uri.scheme_str() else {
|
||||
return None;
|
||||
};
|
||||
let socks_version = if scheme.starts_with("socks4") {
|
||||
// socks4
|
||||
SocksVersion::V4
|
||||
|
||||
@@ -670,24 +670,6 @@ impl Telemetry {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn calculate_json_checksum(json: &impl AsRef<[u8]>) -> Option<String> {
|
||||
let Some(checksum_seed) = &*ZED_CLIENT_CHECKSUM_SEED else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let mut summer = Sha256::new();
|
||||
summer.update(checksum_seed);
|
||||
summer.update(json);
|
||||
summer.update(checksum_seed);
|
||||
let mut checksum = String::new();
|
||||
for byte in summer.finalize().as_slice() {
|
||||
use std::fmt::Write;
|
||||
write!(&mut checksum, "{:02x}", byte).unwrap();
|
||||
}
|
||||
|
||||
Some(checksum)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -846,3 +828,21 @@ mod tests {
|
||||
&& telemetry.state.lock().first_event_date_time.is_none()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn calculate_json_checksum(json: &impl AsRef<[u8]>) -> Option<String> {
|
||||
let Some(checksum_seed) = &*ZED_CLIENT_CHECKSUM_SEED else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let mut summer = Sha256::new();
|
||||
summer.update(checksum_seed);
|
||||
summer.update(&json);
|
||||
summer.update(checksum_seed);
|
||||
let mut checksum = String::new();
|
||||
for byte in summer.finalize().as_slice() {
|
||||
use std::fmt::Write;
|
||||
write!(&mut checksum, "{:02x}", byte).unwrap();
|
||||
}
|
||||
|
||||
Some(checksum)
|
||||
}
|
||||
|
||||
@@ -200,7 +200,7 @@ impl UserStore {
|
||||
cx.update(|cx| {
|
||||
if let Some(info) = info {
|
||||
let disable_staff = std::env::var("ZED_DISABLE_STAFF")
|
||||
.map_or(false, |v| !v.is_empty() && v != "0");
|
||||
.map_or(false, |v| v != "" && v != "0");
|
||||
let staff = info.staff && !disable_staff;
|
||||
cx.update_flags(staff, info.flags);
|
||||
client.telemetry.set_authenticated_user_info(
|
||||
|
||||
@@ -111,7 +111,7 @@ struct AuthenticatedUserParams {
|
||||
github_user_id: i32,
|
||||
github_login: String,
|
||||
github_email: Option<String>,
|
||||
github_user_created_at: chrono::DateTime<chrono::Utc>,
|
||||
github_user_created_at: Option<chrono::DateTime<chrono::Utc>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@@ -137,7 +137,7 @@ async fn get_authenticated_user(
|
||||
)
|
||||
.await?;
|
||||
let metrics_id = app.db.get_user_metrics_id(user.id).await?;
|
||||
Ok(Json(AuthenticatedUserResponse { user, metrics_id }))
|
||||
return Ok(Json(AuthenticatedUserResponse { user, metrics_id }));
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
|
||||
@@ -600,7 +600,7 @@ async fn find_or_create_billing_customer(
|
||||
// there's nothing more we need to do.
|
||||
if let Some(billing_customer) = app
|
||||
.db
|
||||
.get_billing_customer_by_stripe_customer_id(customer_id)
|
||||
.get_billing_customer_by_stripe_customer_id(&customer_id)
|
||||
.await?
|
||||
{
|
||||
return Ok(Some(billing_customer));
|
||||
@@ -609,7 +609,7 @@ async fn find_or_create_billing_customer(
|
||||
// If all we have is a customer ID, resolve it to a full customer record by
|
||||
// hitting the Stripe API.
|
||||
let customer = match customer_or_id {
|
||||
Expandable::Id(id) => Customer::retrieve(stripe_client, &id, &[]).await?,
|
||||
Expandable::Id(id) => Customer::retrieve(&stripe_client, &id, &[]).await?,
|
||||
Expandable::Object(customer) => *customer,
|
||||
};
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ struct CheckIsContributorParams {
|
||||
}
|
||||
|
||||
impl CheckIsContributorParams {
|
||||
fn into_contributor_selector(self) -> Result<ContributorSelector> {
|
||||
fn as_contributor_selector(self) -> Result<ContributorSelector> {
|
||||
if let Some(github_user_id) = self.github_user_id {
|
||||
return Ok(ContributorSelector::GitHubUserId { github_user_id });
|
||||
}
|
||||
@@ -54,7 +54,7 @@ async fn check_is_contributor(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
Query(params): Query<CheckIsContributorParams>,
|
||||
) -> Result<Json<CheckIsContributorResponse>> {
|
||||
let params = params.into_contributor_selector()?;
|
||||
let params = params.as_contributor_selector()?;
|
||||
|
||||
if RenovateBot::is_renovate_bot(¶ms) {
|
||||
return Ok(Json(CheckIsContributorResponse {
|
||||
|
||||
@@ -397,7 +397,7 @@ pub async fn post_events(
|
||||
match &wrapper.event {
|
||||
Event::Editor(event) => to_upload.editor_events.push(EditorEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
&wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
country_code.clone(),
|
||||
@@ -410,7 +410,7 @@ pub async fn post_events(
|
||||
.inline_completion_events
|
||||
.push(InlineCompletionEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
&wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
country_code.clone(),
|
||||
@@ -419,7 +419,7 @@ pub async fn post_events(
|
||||
}
|
||||
Event::Call(event) => to_upload.call_events.push(CallEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
&wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
@@ -429,7 +429,7 @@ pub async fn post_events(
|
||||
.assistant_events
|
||||
.push(AssistantEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
&wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
@@ -437,42 +437,42 @@ pub async fn post_events(
|
||||
}
|
||||
Event::Cpu(event) => to_upload.cpu_events.push(CpuEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
&wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
)),
|
||||
Event::Memory(event) => to_upload.memory_events.push(MemoryEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
&wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
)),
|
||||
Event::App(event) => to_upload.app_events.push(AppEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
&wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
)),
|
||||
Event::Setting(event) => to_upload.setting_events.push(SettingEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
&wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
)),
|
||||
Event::Edit(event) => to_upload.edit_events.push(EditEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
&wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
)),
|
||||
Event::Action(event) => to_upload.action_events.push(ActionEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
&wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
@@ -486,7 +486,7 @@ pub async fn post_events(
|
||||
.extension_events
|
||||
.push(ExtensionEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
&wrapper,
|
||||
&request_body,
|
||||
metadata,
|
||||
first_event_at,
|
||||
@@ -495,7 +495,7 @@ pub async fn post_events(
|
||||
}
|
||||
Event::Repl(event) => to_upload.repl_events.push(ReplEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
&wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
@@ -1326,11 +1326,13 @@ impl ActionEventRow {
|
||||
}
|
||||
|
||||
pub fn calculate_json_checksum(app: Arc<AppState>, json: &impl AsRef<[u8]>) -> Option<Vec<u8>> {
|
||||
let checksum_seed = app.config.zed_client_checksum_seed.as_ref()?;
|
||||
let Some(checksum_seed) = app.config.zed_client_checksum_seed.as_ref() else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let mut summer = Sha256::new();
|
||||
summer.update(checksum_seed);
|
||||
summer.update(json);
|
||||
summer.update(&json);
|
||||
summer.update(checksum_seed);
|
||||
Some(summer.finalize().into_iter().collect())
|
||||
}
|
||||
|
||||
@@ -52,7 +52,7 @@ async fn get_extensions(
|
||||
let extension_id = filter.to_lowercase();
|
||||
let mut exact_match = None;
|
||||
extensions.retain(|extension| {
|
||||
if extension.id.as_ref() == extension_id {
|
||||
if extension.id.as_ref() == &extension_id {
|
||||
exact_match = Some(extension.clone());
|
||||
false
|
||||
} else {
|
||||
@@ -319,14 +319,14 @@ async fn fetch_extensions_from_blob_store(
|
||||
if let Some(extension) = fetch_extension_manifest(
|
||||
blob_store_client,
|
||||
blob_store_bucket,
|
||||
extension_id,
|
||||
published_version,
|
||||
&extension_id,
|
||||
&published_version,
|
||||
)
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
new_versions
|
||||
.entry(extension_id)
|
||||
.entry(&extension_id)
|
||||
.or_default()
|
||||
.push(extension);
|
||||
}
|
||||
|
||||
@@ -85,7 +85,7 @@ pub async fn validate_header<B>(mut req: Request<B>, next: Next<B>) -> impl Into
|
||||
impersonator_id: None,
|
||||
})
|
||||
} else {
|
||||
verify_access_token(access_token, user_id, &state.db).await
|
||||
verify_access_token(&access_token, user_id, &state.db).await
|
||||
};
|
||||
|
||||
if let Ok(validate_result) = validate_result {
|
||||
@@ -202,7 +202,7 @@ pub async fn verify_access_token(
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
let token: AccessTokenJson = serde_json::from_str(token)?;
|
||||
let token: AccessTokenJson = serde_json::from_str(&token)?;
|
||||
|
||||
let db_token = db.get_access_token(token.id).await?;
|
||||
let token_user_id = db_token.impersonated_user_id.unwrap_or(db_token.user_id);
|
||||
@@ -249,7 +249,7 @@ pub async fn verify_dev_server_token(
|
||||
db: &Arc<Database>,
|
||||
) -> anyhow::Result<dev_server::Model> {
|
||||
let (id, token) = split_dev_server_token(dev_server_token)?;
|
||||
let token_hash = hash_access_token(token);
|
||||
let token_hash = hash_access_token(&token);
|
||||
let server = db.get_dev_server(id).await?;
|
||||
|
||||
if server
|
||||
@@ -301,16 +301,18 @@ mod test {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let token = create_access_token(db, user.user_id, None).await.unwrap();
|
||||
let token = create_access_token(&db, user.user_id, None).await.unwrap();
|
||||
assert!(matches!(
|
||||
verify_access_token(&token, user.user_id, db).await.unwrap(),
|
||||
verify_access_token(&token, user.user_id, &db)
|
||||
.await
|
||||
.unwrap(),
|
||||
VerifyAccessTokenResult {
|
||||
is_valid: true,
|
||||
impersonator_id: None,
|
||||
}
|
||||
));
|
||||
|
||||
let old_token = create_previous_access_token(user.user_id, None, db)
|
||||
let old_token = create_previous_access_token(user.user_id, None, &db)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -331,7 +333,7 @@ mod test {
|
||||
assert!(hash.starts_with("$scrypt$"));
|
||||
|
||||
assert!(matches!(
|
||||
verify_access_token(&old_token, user.user_id, db)
|
||||
verify_access_token(&old_token, user.user_id, &db)
|
||||
.await
|
||||
.unwrap(),
|
||||
VerifyAccessTokenResult {
|
||||
@@ -353,7 +355,7 @@ mod test {
|
||||
assert!(hash.starts_with("$sha256$"));
|
||||
|
||||
assert!(matches!(
|
||||
verify_access_token(&old_token, user.user_id, db)
|
||||
verify_access_token(&old_token, user.user_id, &db)
|
||||
.await
|
||||
.unwrap(),
|
||||
VerifyAccessTokenResult {
|
||||
@@ -363,7 +365,9 @@ mod test {
|
||||
));
|
||||
|
||||
assert!(matches!(
|
||||
verify_access_token(&token, user.user_id, db).await.unwrap(),
|
||||
verify_access_token(&token, user.user_id, &db)
|
||||
.await
|
||||
.unwrap(),
|
||||
VerifyAccessTokenResult {
|
||||
is_valid: true,
|
||||
impersonator_id: None,
|
||||
|
||||
@@ -139,12 +139,14 @@ impl Database {
|
||||
let (tx, result) = self.with_weak_transaction(&f).await?;
|
||||
match result {
|
||||
Ok(result) => match tx.commit().await.map_err(Into::into) {
|
||||
Ok(()) => Ok(result),
|
||||
Err(error) => Err(error),
|
||||
Ok(()) => return Ok(result),
|
||||
Err(error) => {
|
||||
return Err(error);
|
||||
}
|
||||
},
|
||||
Err(error) => {
|
||||
tx.rollback().await?;
|
||||
Err(error)
|
||||
return Err(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -215,7 +217,7 @@ impl Database {
|
||||
F: Send + Fn(TransactionHandle) -> Fut,
|
||||
Fut: Send + Future<Output = Result<T>>,
|
||||
{
|
||||
let room_id = Database::room_id_for_project(self, project_id).await?;
|
||||
let room_id = Database::room_id_for_project(&self, project_id).await?;
|
||||
let body = async {
|
||||
let mut i = 0;
|
||||
loop {
|
||||
|
||||
@@ -218,9 +218,9 @@ impl From<proto::ChannelRole> for ChannelRole {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ChannelRole> for proto::ChannelRole {
|
||||
fn from(val: ChannelRole) -> Self {
|
||||
match val {
|
||||
impl Into<proto::ChannelRole> for ChannelRole {
|
||||
fn into(self) -> proto::ChannelRole {
|
||||
match self {
|
||||
ChannelRole::Admin => proto::ChannelRole::Admin,
|
||||
ChannelRole::Member => proto::ChannelRole::Member,
|
||||
ChannelRole::Talker => proto::ChannelRole::Talker,
|
||||
@@ -230,9 +230,9 @@ impl From<ChannelRole> for proto::ChannelRole {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ChannelRole> for i32 {
|
||||
fn from(val: ChannelRole) -> Self {
|
||||
let proto: proto::ChannelRole = val.into();
|
||||
impl Into<i32> for ChannelRole {
|
||||
fn into(self) -> i32 {
|
||||
let proto: proto::ChannelRole = self.into();
|
||||
proto.into()
|
||||
}
|
||||
}
|
||||
@@ -259,18 +259,18 @@ impl From<proto::ChannelVisibility> for ChannelVisibility {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ChannelVisibility> for proto::ChannelVisibility {
|
||||
fn from(val: ChannelVisibility) -> Self {
|
||||
match val {
|
||||
impl Into<proto::ChannelVisibility> for ChannelVisibility {
|
||||
fn into(self) -> proto::ChannelVisibility {
|
||||
match self {
|
||||
ChannelVisibility::Public => proto::ChannelVisibility::Public,
|
||||
ChannelVisibility::Members => proto::ChannelVisibility::Members,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ChannelVisibility> for i32 {
|
||||
fn from(val: ChannelVisibility) -> Self {
|
||||
let proto: proto::ChannelVisibility = val.into();
|
||||
impl Into<i32> for ChannelVisibility {
|
||||
fn into(self) -> i32 {
|
||||
let proto: proto::ChannelVisibility = self.into();
|
||||
proto.into()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -391,7 +391,7 @@ impl Database {
|
||||
drop(rows);
|
||||
|
||||
if collaborators.is_empty() {
|
||||
self.snapshot_channel_buffer(channel_id, tx).await?;
|
||||
self.snapshot_channel_buffer(channel_id, &tx).await?;
|
||||
}
|
||||
|
||||
Ok(LeftChannelBuffer {
|
||||
@@ -872,7 +872,7 @@ fn operation_from_storage(
|
||||
})
|
||||
}
|
||||
|
||||
fn version_to_storage(version: &[proto::VectorClockEntry]) -> Vec<storage::VectorClockEntry> {
|
||||
fn version_to_storage(version: &Vec<proto::VectorClockEntry>) -> Vec<storage::VectorClockEntry> {
|
||||
version
|
||||
.iter()
|
||||
.map(|entry| storage::VectorClockEntry {
|
||||
@@ -882,7 +882,7 @@ fn version_to_storage(version: &[proto::VectorClockEntry]) -> Vec<storage::Vecto
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn version_from_storage(version: &[storage::VectorClockEntry]) -> Vec<proto::VectorClockEntry> {
|
||||
fn version_from_storage(version: &Vec<storage::VectorClockEntry>) -> Vec<proto::VectorClockEntry> {
|
||||
version
|
||||
.iter()
|
||||
.map(|entry| proto::VectorClockEntry {
|
||||
|
||||
@@ -188,16 +188,17 @@ impl Database {
|
||||
.anyhow())?;
|
||||
}
|
||||
}
|
||||
} else if visibility == ChannelVisibility::Members
|
||||
&& self
|
||||
} else if visibility == ChannelVisibility::Members {
|
||||
if self
|
||||
.get_channel_descendants_excluding_self([&channel], &tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.any(|channel| channel.visibility == ChannelVisibility::Public)
|
||||
{
|
||||
Err(ErrorCode::BadPublicNesting
|
||||
.with_tag("direction", "children")
|
||||
.anyhow())?;
|
||||
{
|
||||
Err(ErrorCode::BadPublicNesting
|
||||
.with_tag("direction", "children")
|
||||
.anyhow())?;
|
||||
}
|
||||
}
|
||||
|
||||
let mut model = channel.into_active_model();
|
||||
@@ -307,7 +308,7 @@ impl Database {
|
||||
|
||||
fn sanitize_channel_name(name: &str) -> Result<&str> {
|
||||
let new_name = name.trim().trim_start_matches('#');
|
||||
if new_name.is_empty() {
|
||||
if new_name == "" {
|
||||
Err(anyhow!("channel name can't be blank"))?;
|
||||
}
|
||||
Ok(new_name)
|
||||
@@ -984,7 +985,7 @@ impl Database {
|
||||
.all(&*tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Channel::from_model)
|
||||
.map(|c| Channel::from_model(c))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok((root_id, channels))
|
||||
|
||||
@@ -65,7 +65,7 @@ impl Database {
|
||||
github_login: &str,
|
||||
github_user_id: i32,
|
||||
github_email: Option<&str>,
|
||||
github_user_created_at: DateTimeUtc,
|
||||
github_user_created_at: Option<DateTimeUtc>,
|
||||
initial_channel_id: Option<ChannelId>,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
@@ -74,7 +74,7 @@ impl Database {
|
||||
github_login,
|
||||
github_user_id,
|
||||
github_email,
|
||||
github_user_created_at.naive_utc(),
|
||||
github_user_created_at.map(|time| time.naive_utc()),
|
||||
initial_channel_id,
|
||||
&tx,
|
||||
)
|
||||
|
||||
@@ -146,11 +146,11 @@ impl Database {
|
||||
pub async fn update_dev_server_project(
|
||||
&self,
|
||||
id: DevServerProjectId,
|
||||
paths: &[String],
|
||||
paths: &Vec<String>,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<(dev_server_project::Model, proto::DevServerProjectsUpdate)> {
|
||||
self.transaction(move |tx| async move {
|
||||
let paths = paths.to_owned();
|
||||
let paths = paths.clone();
|
||||
let Some((project, Some(dev_server))) = dev_server_project::Entity::find_by_id(id)
|
||||
.find_also_related(dev_server::Entity)
|
||||
.one(&*tx)
|
||||
|
||||
@@ -5,7 +5,7 @@ use super::*;
|
||||
impl Database {
|
||||
pub async fn get_hosted_projects(
|
||||
&self,
|
||||
channel_ids: &[ChannelId],
|
||||
channel_ids: &Vec<ChannelId>,
|
||||
roles: &HashMap<ChannelId, ChannelRole>,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<proto::HostedProject>> {
|
||||
|
||||
@@ -86,13 +86,14 @@ impl Database {
|
||||
avoid_duplicates: bool,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Option<(UserId, proto::Notification)>> {
|
||||
if avoid_duplicates
|
||||
&& self
|
||||
if avoid_duplicates {
|
||||
if self
|
||||
.find_notification(recipient_id, ¬ification, tx)
|
||||
.await?
|
||||
.is_some()
|
||||
{
|
||||
return Ok(None);
|
||||
{
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
|
||||
let proto = notification.to_proto();
|
||||
|
||||
@@ -459,7 +459,7 @@ impl Database {
|
||||
.await?;
|
||||
}
|
||||
|
||||
let (channel, room) = self.get_channel_room(room_id, tx).await?;
|
||||
let (channel, room) = self.get_channel_room(room_id, &tx).await?;
|
||||
let channel = channel.ok_or_else(|| anyhow!("no channel for room"))?;
|
||||
Ok(JoinRoom {
|
||||
room,
|
||||
@@ -766,13 +766,13 @@ impl Database {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(Some(RejoinedProject {
|
||||
return Ok(Some(RejoinedProject {
|
||||
id: project_id,
|
||||
old_connection_id,
|
||||
collaborators,
|
||||
worktrees,
|
||||
language_servers,
|
||||
}))
|
||||
}));
|
||||
}
|
||||
|
||||
pub async fn leave_room(
|
||||
@@ -1108,14 +1108,15 @@ impl Database {
|
||||
.count(tx)
|
||||
.await?
|
||||
> 0;
|
||||
if requires_zed_cla
|
||||
&& contributor::Entity::find()
|
||||
if requires_zed_cla {
|
||||
if contributor::Entity::find()
|
||||
.filter(contributor::Column::UserId.eq(user_id))
|
||||
.one(tx)
|
||||
.await?
|
||||
.is_none()
|
||||
{
|
||||
Err(anyhow!("user has not signed the Zed CLA"))?;
|
||||
{
|
||||
Err(anyhow!("user has not signed the Zed CLA"))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@@ -101,7 +101,7 @@ impl Database {
|
||||
github_login: &str,
|
||||
github_user_id: i32,
|
||||
github_email: Option<&str>,
|
||||
github_user_created_at: DateTimeUtc,
|
||||
github_user_created_at: Option<DateTimeUtc>,
|
||||
initial_channel_id: Option<ChannelId>,
|
||||
) -> Result<User> {
|
||||
self.transaction(|tx| async move {
|
||||
@@ -109,7 +109,7 @@ impl Database {
|
||||
github_login,
|
||||
github_user_id,
|
||||
github_email,
|
||||
github_user_created_at.naive_utc(),
|
||||
github_user_created_at.map(|created_at| created_at.naive_utc()),
|
||||
initial_channel_id,
|
||||
&tx,
|
||||
)
|
||||
@@ -123,7 +123,7 @@ impl Database {
|
||||
github_login: &str,
|
||||
github_user_id: i32,
|
||||
github_email: Option<&str>,
|
||||
github_user_created_at: NaiveDateTime,
|
||||
github_user_created_at: Option<NaiveDateTime>,
|
||||
initial_channel_id: Option<ChannelId>,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<User> {
|
||||
@@ -134,8 +134,10 @@ impl Database {
|
||||
{
|
||||
let mut user_by_github_user_id = user_by_github_user_id.into_active_model();
|
||||
user_by_github_user_id.github_login = ActiveValue::set(github_login.into());
|
||||
user_by_github_user_id.github_user_created_at =
|
||||
ActiveValue::set(Some(github_user_created_at));
|
||||
if github_user_created_at.is_some() {
|
||||
user_by_github_user_id.github_user_created_at =
|
||||
ActiveValue::set(github_user_created_at);
|
||||
}
|
||||
Ok(user_by_github_user_id.update(tx).await?)
|
||||
} else if let Some(user_by_github_login) = user::Entity::find()
|
||||
.filter(user::Column::GithubLogin.eq(github_login))
|
||||
@@ -144,15 +146,17 @@ impl Database {
|
||||
{
|
||||
let mut user_by_github_login = user_by_github_login.into_active_model();
|
||||
user_by_github_login.github_user_id = ActiveValue::set(github_user_id);
|
||||
user_by_github_login.github_user_created_at =
|
||||
ActiveValue::set(Some(github_user_created_at));
|
||||
if github_user_created_at.is_some() {
|
||||
user_by_github_login.github_user_created_at =
|
||||
ActiveValue::set(github_user_created_at);
|
||||
}
|
||||
Ok(user_by_github_login.update(tx).await?)
|
||||
} else {
|
||||
let user = user::Entity::insert(user::ActiveModel {
|
||||
email_address: ActiveValue::set(github_email.map(|email| email.into())),
|
||||
github_login: ActiveValue::set(github_login.into()),
|
||||
github_user_id: ActiveValue::set(github_user_id),
|
||||
github_user_created_at: ActiveValue::set(Some(github_user_created_at)),
|
||||
github_user_created_at: ActiveValue::set(github_user_created_at),
|
||||
admin: ActiveValue::set(false),
|
||||
invite_count: ActiveValue::set(0),
|
||||
invite_code: ActiveValue::set(None),
|
||||
|
||||
@@ -48,7 +48,7 @@ impl Model {
|
||||
id: self.id.to_proto(),
|
||||
project_id: project.map(|p| p.id.to_proto()),
|
||||
dev_server_id: self.dev_server_id.to_proto(),
|
||||
path: self.paths().first().cloned().unwrap_or_default(),
|
||||
path: self.paths().get(0).cloned().unwrap_or_default(),
|
||||
paths: self.paths().clone(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ async fn test_contributors(db: &Arc<Database>) {
|
||||
assert_eq!(db.get_contributors().await.unwrap(), Vec::<String>::new());
|
||||
|
||||
let user1_created_at = Utc::now();
|
||||
db.add_contributor("user1", 1, None, user1_created_at, None)
|
||||
db.add_contributor("user1", 1, None, Some(user1_created_at), None)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
@@ -34,7 +34,7 @@ async fn test_contributors(db: &Arc<Database>) {
|
||||
);
|
||||
|
||||
let user2_created_at = Utc::now();
|
||||
db.add_contributor("user2", 2, None, user2_created_at, None)
|
||||
db.add_contributor("user2", 2, None, Some(user2_created_at), None)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
|
||||
@@ -101,7 +101,7 @@ async fn test_get_or_create_user_by_github_account(db: &Arc<Database>) {
|
||||
.user_id;
|
||||
|
||||
let user = db
|
||||
.get_or_create_user_by_github_account("the-new-login2", 102, None, Utc::now(), None)
|
||||
.get_or_create_user_by_github_account("the-new-login2", 102, None, Some(Utc::now()), None)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(user.id, user_id2);
|
||||
@@ -113,7 +113,7 @@ async fn test_get_or_create_user_by_github_account(db: &Arc<Database>) {
|
||||
"login3",
|
||||
103,
|
||||
Some("user3@example.com"),
|
||||
Utc::now(),
|
||||
Some(Utc::now()),
|
||||
None,
|
||||
)
|
||||
.await
|
||||
|
||||
@@ -22,17 +22,19 @@ async fn test_already_processed_stripe_event(db: &Arc<Database>) {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(
|
||||
assert_eq!(
|
||||
db.already_processed_stripe_event(&processed_event_id)
|
||||
.await
|
||||
.unwrap(),
|
||||
true,
|
||||
"Expected {processed_event_id} to already be processed"
|
||||
);
|
||||
|
||||
assert!(
|
||||
!db.already_processed_stripe_event(&unprocessed_event_id)
|
||||
assert_eq!(
|
||||
db.already_processed_stripe_event(&unprocessed_event_id)
|
||||
.await
|
||||
.unwrap(),
|
||||
false,
|
||||
"Expected {unprocessed_event_id} to be unprocessed"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -304,7 +304,10 @@ impl AppState {
|
||||
db: db.clone(),
|
||||
live_kit_client,
|
||||
blob_store_client: build_blob_store_client(&config).await.log_err(),
|
||||
stripe_client: build_stripe_client(&config).await.map(Arc::new).log_err(),
|
||||
stripe_client: build_stripe_client(&config)
|
||||
.await
|
||||
.map(|client| Arc::new(client))
|
||||
.log_err(),
|
||||
rate_limiter: Arc::new(RateLimiter::new(db)),
|
||||
executor,
|
||||
clickhouse_client: config
|
||||
|
||||
@@ -9,7 +9,6 @@ use crate::{
|
||||
};
|
||||
use anyhow::{anyhow, Context as _};
|
||||
use authorization::authorize_access_to_language_model;
|
||||
use axum::routing::get;
|
||||
use axum::{
|
||||
body::Body,
|
||||
http::{self, HeaderName, HeaderValue, Request, StatusCode},
|
||||
@@ -23,7 +22,6 @@ use collections::HashMap;
|
||||
use db::{usage_measure::UsageMeasure, ActiveUserCount, LlmDatabase};
|
||||
use futures::{Stream, StreamExt as _};
|
||||
use http_client::IsahcHttpClient;
|
||||
use rpc::ListModelsResponse;
|
||||
use rpc::{
|
||||
proto::Plan, LanguageModelProvider, PerformCompletionParams, EXPIRED_LLM_TOKEN_HEADER_NAME,
|
||||
};
|
||||
@@ -116,7 +114,6 @@ impl LlmState {
|
||||
|
||||
pub fn routes() -> Router<(), Body> {
|
||||
Router::new()
|
||||
.route("/models", get(list_models))
|
||||
.route("/completion", post(perform_completion))
|
||||
.layer(middleware::from_fn(validate_api_token))
|
||||
}
|
||||
@@ -141,7 +138,7 @@ async fn validate_api_token<B>(mut req: Request<B>, next: Next<B>) -> impl IntoR
|
||||
})?;
|
||||
|
||||
let state = req.extensions().get::<Arc<LlmState>>().unwrap();
|
||||
match LlmTokenClaims::validate(token, &state.config) {
|
||||
match LlmTokenClaims::validate(&token, &state.config) {
|
||||
Ok(claims) => {
|
||||
if state.db.is_access_token_revoked(&claims.jti).await? {
|
||||
return Err(Error::http(
|
||||
@@ -154,7 +151,7 @@ async fn validate_api_token<B>(mut req: Request<B>, next: Next<B>) -> impl IntoR
|
||||
.record("user_id", claims.user_id)
|
||||
.record("login", claims.github_user_login.clone())
|
||||
.record("authn.jti", &claims.jti)
|
||||
.record("is_staff", claims.is_staff);
|
||||
.record("is_staff", &claims.is_staff);
|
||||
|
||||
req.extensions_mut().insert(claims);
|
||||
Ok::<_, Error>(next.run(req).await.into_response())
|
||||
@@ -176,37 +173,6 @@ async fn validate_api_token<B>(mut req: Request<B>, next: Next<B>) -> impl IntoR
|
||||
}
|
||||
}
|
||||
|
||||
async fn list_models(
|
||||
Extension(state): Extension<Arc<LlmState>>,
|
||||
Extension(claims): Extension<LlmTokenClaims>,
|
||||
country_code_header: Option<TypedHeader<CloudflareIpCountryHeader>>,
|
||||
) -> Result<Json<ListModelsResponse>> {
|
||||
let country_code = country_code_header.map(|header| header.to_string());
|
||||
|
||||
let mut accessible_models = Vec::new();
|
||||
|
||||
for (provider, model) in state.db.all_models() {
|
||||
let authorize_result = authorize_access_to_language_model(
|
||||
&state.config,
|
||||
&claims,
|
||||
country_code.as_deref(),
|
||||
provider,
|
||||
&model.name,
|
||||
);
|
||||
|
||||
if authorize_result.is_ok() {
|
||||
accessible_models.push(rpc::LanguageModel {
|
||||
provider,
|
||||
name: model.name,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Json(ListModelsResponse {
|
||||
models: accessible_models,
|
||||
}))
|
||||
}
|
||||
|
||||
async fn perform_completion(
|
||||
Extension(state): Extension<Arc<LlmState>>,
|
||||
Extension(claims): Extension<LlmTokenClaims>,
|
||||
@@ -221,9 +187,7 @@ async fn perform_completion(
|
||||
authorize_access_to_language_model(
|
||||
&state.config,
|
||||
&claims,
|
||||
country_code_header
|
||||
.map(|header| header.to_string())
|
||||
.as_deref(),
|
||||
country_code_header.map(|header| header.to_string()),
|
||||
params.provider,
|
||||
&model,
|
||||
)?;
|
||||
@@ -247,7 +211,7 @@ async fn perform_completion(
|
||||
};
|
||||
|
||||
let mut request: anthropic::Request =
|
||||
serde_json::from_str(params.provider_request.get())?;
|
||||
serde_json::from_str(¶ms.provider_request.get())?;
|
||||
|
||||
// Override the model on the request with the latest version of the model that is
|
||||
// known to the server.
|
||||
@@ -348,7 +312,7 @@ async fn perform_completion(
|
||||
&state.http_client,
|
||||
open_ai::OPEN_AI_API_URL,
|
||||
api_key,
|
||||
serde_json::from_str(params.provider_request.get())?,
|
||||
serde_json::from_str(¶ms.provider_request.get())?,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
@@ -379,8 +343,7 @@ async fn perform_completion(
|
||||
&state.http_client,
|
||||
google_ai::API_URL,
|
||||
api_key,
|
||||
serde_json::from_str(params.provider_request.get())?,
|
||||
None,
|
||||
serde_json::from_str(¶ms.provider_request.get())?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -412,9 +375,9 @@ async fn perform_completion(
|
||||
.context("no Qwen2-7B URL configured on the server")?;
|
||||
let chunks = open_ai::stream_completion(
|
||||
&state.http_client,
|
||||
api_url,
|
||||
&api_url,
|
||||
api_key,
|
||||
serde_json::from_str(params.provider_request.get())?,
|
||||
serde_json::from_str(¶ms.provider_request.get())?,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -7,7 +7,7 @@ use crate::{Config, Error, Result};
|
||||
pub fn authorize_access_to_language_model(
|
||||
config: &Config,
|
||||
claims: &LlmTokenClaims,
|
||||
country_code: Option<&str>,
|
||||
country_code: Option<String>,
|
||||
provider: LanguageModelProvider,
|
||||
model: &str,
|
||||
) -> Result<()> {
|
||||
@@ -26,16 +26,19 @@ fn authorize_access_to_model(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if provider == LanguageModelProvider::Anthropic {
|
||||
if model == "claude-3-5-sonnet" {
|
||||
return Ok(());
|
||||
}
|
||||
match provider {
|
||||
LanguageModelProvider::Anthropic => {
|
||||
if model == "claude-3-5-sonnet" {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if claims.has_llm_closed_beta_feature_flag
|
||||
&& Some(model) == config.llm_closed_beta_model_name.as_deref()
|
||||
{
|
||||
return Ok(());
|
||||
if claims.has_llm_closed_beta_feature_flag
|
||||
&& Some(model) == config.llm_closed_beta_model_name.as_deref()
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Err(Error::http(
|
||||
@@ -46,7 +49,7 @@ fn authorize_access_to_model(
|
||||
|
||||
fn authorize_access_for_country(
|
||||
config: &Config,
|
||||
country_code: Option<&str>,
|
||||
country_code: Option<String>,
|
||||
provider: LanguageModelProvider,
|
||||
) -> Result<()> {
|
||||
// In development we won't have the `CF-IPCountry` header, so we can't check
|
||||
@@ -59,7 +62,7 @@ fn authorize_access_for_country(
|
||||
}
|
||||
|
||||
// https://developers.cloudflare.com/fundamentals/reference/http-request-headers/#cf-ipcountry
|
||||
let country_code = match country_code {
|
||||
let country_code = match country_code.as_deref() {
|
||||
// `XX` - Used for clients without country code data.
|
||||
None | Some("XX") => Err(Error::http(
|
||||
StatusCode::BAD_REQUEST,
|
||||
@@ -125,7 +128,7 @@ mod tests {
|
||||
authorize_access_to_language_model(
|
||||
&config,
|
||||
&claims,
|
||||
Some(country_code),
|
||||
Some(country_code.into()),
|
||||
provider,
|
||||
"the-model",
|
||||
)
|
||||
@@ -175,7 +178,7 @@ mod tests {
|
||||
let error_response = authorize_access_to_language_model(
|
||||
&config,
|
||||
&claims,
|
||||
Some(country_code),
|
||||
Some(country_code.into()),
|
||||
provider,
|
||||
"the-model",
|
||||
)
|
||||
@@ -220,7 +223,7 @@ mod tests {
|
||||
let error_response = authorize_access_to_language_model(
|
||||
&config,
|
||||
&claims,
|
||||
Some(country_code),
|
||||
Some(country_code.into()),
|
||||
provider,
|
||||
"the-model",
|
||||
)
|
||||
@@ -275,8 +278,13 @@ mod tests {
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let result =
|
||||
authorize_access_to_language_model(&config, &claims, Some("US"), provider, model);
|
||||
let result = authorize_access_to_language_model(
|
||||
&config,
|
||||
&claims,
|
||||
Some("US".into()),
|
||||
provider,
|
||||
model,
|
||||
);
|
||||
|
||||
if expected_access {
|
||||
assert!(
|
||||
@@ -316,8 +324,13 @@ mod tests {
|
||||
];
|
||||
|
||||
for (provider, model) in test_cases {
|
||||
let result =
|
||||
authorize_access_to_language_model(&config, &claims, Some("US"), provider, model);
|
||||
let result = authorize_access_to_language_model(
|
||||
&config,
|
||||
&claims,
|
||||
Some("US".into()),
|
||||
provider,
|
||||
model,
|
||||
);
|
||||
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
|
||||
@@ -67,14 +67,6 @@ impl LlmDatabase {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the list of all known models, with their [`LanguageModelProvider`].
|
||||
pub fn all_models(&self) -> Vec<(LanguageModelProvider, model::Model)> {
|
||||
self.models
|
||||
.iter()
|
||||
.map(|((model_provider, _model_name), model)| (*model_provider, model.clone()))
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
/// Returns the names of the known models for the given [`LanguageModelProvider`].
|
||||
pub fn model_names_for_provider(&self, provider: LanguageModelProvider) -> Vec<String> {
|
||||
self.models
|
||||
@@ -110,12 +102,14 @@ impl LlmDatabase {
|
||||
let (tx, result) = self.with_transaction(&f).await?;
|
||||
match result {
|
||||
Ok(result) => match tx.commit().await.map_err(Into::into) {
|
||||
Ok(()) => Ok(result),
|
||||
Err(error) => Err(error),
|
||||
Ok(()) => return Ok(result),
|
||||
Err(error) => {
|
||||
return Err(error);
|
||||
}
|
||||
},
|
||||
Err(error) => {
|
||||
tx.rollback().await?;
|
||||
Err(error)
|
||||
return Err(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -82,7 +82,7 @@ impl LlmDatabase {
|
||||
pub async fn insert_models(&mut self, models: &[ModelParams]) -> Result<()> {
|
||||
let all_provider_ids = &self.provider_ids;
|
||||
self.transaction(|tx| async move {
|
||||
model::Entity::insert_many(models.iter().map(|model_params| {
|
||||
model::Entity::insert_many(models.into_iter().map(|model_params| {
|
||||
let provider_id = all_provider_ids[&model_params.provider];
|
||||
model::ActiveModel {
|
||||
provider_id: ActiveValue::set(provider_id),
|
||||
|
||||
@@ -88,7 +88,7 @@ async fn main() -> Result<()> {
|
||||
.route("/healthz", get(handle_liveness_probe))
|
||||
.layer(Extension(mode));
|
||||
|
||||
let listener = TcpListener::bind(format!("0.0.0.0:{}", config.http_port))
|
||||
let listener = TcpListener::bind(&format!("0.0.0.0:{}", config.http_port))
|
||||
.expect("failed to bind TCP listener");
|
||||
|
||||
let mut on_shutdown = None;
|
||||
@@ -257,7 +257,7 @@ async fn setup_app_database(config: &Config) -> Result<()> {
|
||||
db.initialize_notification_kinds().await?;
|
||||
|
||||
if config.seed_path.is_some() {
|
||||
collab::seed::seed(config, &db, false).await?;
|
||||
collab::seed::seed(&config, &db, false).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -100,7 +100,7 @@ impl RateLimiter {
|
||||
pub async fn save(&self) -> Result<()> {
|
||||
let mut buckets = Vec::new();
|
||||
self.dirty_buckets.retain(|key| {
|
||||
if let Some(bucket) = self.buckets.get(key) {
|
||||
if let Some(bucket) = self.buckets.get(&key) {
|
||||
buckets.push(crate::db::rate_buckets::Model {
|
||||
user_id: key.0,
|
||||
rate_limit_name: key.1.clone(),
|
||||
|
||||
@@ -115,16 +115,16 @@ impl Principal {
|
||||
fn update_span(&self, span: &tracing::Span) {
|
||||
match &self {
|
||||
Principal::User(user) => {
|
||||
span.record("user_id", user.id.0);
|
||||
span.record("user_id", &user.id.0);
|
||||
span.record("login", &user.github_login);
|
||||
}
|
||||
Principal::Impersonated { user, admin } => {
|
||||
span.record("user_id", user.id.0);
|
||||
span.record("user_id", &user.id.0);
|
||||
span.record("login", &user.github_login);
|
||||
span.record("impersonator", &admin.github_login);
|
||||
}
|
||||
Principal::DevServer(dev_server) => {
|
||||
span.record("dev_server_id", dev_server.id.0);
|
||||
span.record("dev_server_id", &dev_server.id.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -495,9 +495,6 @@ impl Server {
|
||||
.add_request_handler(user_handler(
|
||||
forward_read_only_project_request::<proto::InlayHints>,
|
||||
))
|
||||
.add_request_handler(user_handler(
|
||||
forward_read_only_project_request::<proto::ResolveInlayHint>,
|
||||
))
|
||||
.add_request_handler(user_handler(
|
||||
forward_read_only_project_request::<proto::OpenBufferByPath>,
|
||||
))
|
||||
@@ -964,10 +961,14 @@ impl Server {
|
||||
}
|
||||
};
|
||||
|
||||
let supermaven_client = this.app_state.config.supermaven_admin_api_key.clone().map(|supermaven_admin_api_key| Arc::new(SupermavenAdminApi::new(
|
||||
let supermaven_client = if let Some(supermaven_admin_api_key) = this.app_state.config.supermaven_admin_api_key.clone() {
|
||||
Some(Arc::new(SupermavenAdminApi::new(
|
||||
supermaven_admin_api_key.to_string(),
|
||||
http_client.clone(),
|
||||
)));
|
||||
)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let session = Session {
|
||||
principal: principal.clone(),
|
||||
@@ -1122,7 +1123,7 @@ impl Server {
|
||||
self.peer.send(connection_id, incoming_call)?;
|
||||
}
|
||||
|
||||
update_user_contacts(user.id, session).await?;
|
||||
update_user_contacts(user.id, &session).await?;
|
||||
}
|
||||
Principal::DevServer(dev_server) => {
|
||||
{
|
||||
@@ -1155,7 +1156,7 @@ impl Server {
|
||||
.db
|
||||
.dev_server_projects_update(dev_server.user_id)
|
||||
.await?;
|
||||
send_dev_server_projects_update(dev_server.user_id, status, session).await;
|
||||
send_dev_server_projects_update(dev_server.user_id, status, &session).await;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1560,17 +1561,21 @@ async fn join_room(
|
||||
|
||||
let live_kit_connection_info =
|
||||
if let Some(live_kit) = session.app_state.live_kit_client.as_ref() {
|
||||
live_kit
|
||||
if let Some(token) = live_kit
|
||||
.room_token(
|
||||
&joined_room.room.live_kit_room,
|
||||
&session.user_id().to_string(),
|
||||
)
|
||||
.trace_err()
|
||||
.map(|token| proto::LiveKitConnectionInfo {
|
||||
{
|
||||
Some(proto::LiveKitConnectionInfo {
|
||||
server_url: live_kit.url().into(),
|
||||
token,
|
||||
can_publish: true,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@@ -1855,7 +1860,7 @@ async fn call(
|
||||
initial_project_id,
|
||||
)
|
||||
.await?;
|
||||
room_updated(room, &session.peer);
|
||||
room_updated(&room, &session.peer);
|
||||
mem::take(incoming_call)
|
||||
};
|
||||
update_user_contacts(called_user_id, &session).await?;
|
||||
@@ -1998,13 +2003,13 @@ async fn share_project(
|
||||
&request.worktrees,
|
||||
request
|
||||
.dev_server_project_id
|
||||
.map(DevServerProjectId::from_proto),
|
||||
.map(|id| DevServerProjectId::from_proto(id)),
|
||||
)
|
||||
.await?;
|
||||
response.send(proto::ShareProjectResponse {
|
||||
project_id: project_id.to_proto(),
|
||||
})?;
|
||||
room_updated(room, &session.peer);
|
||||
room_updated(&room, &session.peer);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -2261,9 +2266,9 @@ async fn leave_project(request: proto::LeaveProject, session: UserSession) -> Re
|
||||
"leave project"
|
||||
);
|
||||
|
||||
project_left(project, &session);
|
||||
project_left(&project, &session);
|
||||
if let Some(room) = room {
|
||||
room_updated(room, &session.peer);
|
||||
room_updated(&room, &session.peer);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -2745,7 +2750,7 @@ async fn shutdown_dev_server_internal(
|
||||
.await
|
||||
.dev_server_projects_update(dev_server.user_id)
|
||||
.await?;
|
||||
send_dev_server_projects_update(dev_server.user_id, status, session).await;
|
||||
send_dev_server_projects_update(dev_server.user_id, status, &session).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -2787,7 +2792,7 @@ async fn update_project(
|
||||
},
|
||||
);
|
||||
if let Some(room) = room {
|
||||
room_updated(room, &session.peer);
|
||||
room_updated(&room, &session.peer);
|
||||
}
|
||||
response.send(proto::Ack {})?;
|
||||
|
||||
@@ -3554,7 +3559,7 @@ async fn create_channel(
|
||||
) -> Result<()> {
|
||||
let db = session.db().await;
|
||||
|
||||
let parent_id = request.parent_id.map(ChannelId::from_proto);
|
||||
let parent_id = request.parent_id.map(|id| ChannelId::from_proto(id));
|
||||
let (channel, membership) = db
|
||||
.create_channel(&request.name, parent_id, session.user_id())
|
||||
.await?;
|
||||
@@ -4276,7 +4281,10 @@ async fn send_channel_message(
|
||||
&request.mentions,
|
||||
timestamp,
|
||||
nonce.clone().into(),
|
||||
request.reply_to_message_id.map(MessageId::from_proto),
|
||||
match request.reply_to_message_id {
|
||||
Some(reply_to_message_id) => Some(MessageId::from_proto(reply_to_message_id)),
|
||||
None => None,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -4529,7 +4537,6 @@ async fn count_language_model_tokens(
|
||||
google_ai::API_URL,
|
||||
api_key,
|
||||
serde_json::from_str(&request.request)?,
|
||||
None,
|
||||
)
|
||||
.await?
|
||||
}
|
||||
@@ -4837,7 +4844,9 @@ async fn get_notifications(
|
||||
.get_notifications(
|
||||
session.user_id(),
|
||||
NOTIFICATION_COUNT_PER_PAGE,
|
||||
request.before_id.map(db::NotificationId::from_proto),
|
||||
request
|
||||
.before_id
|
||||
.map(|id| db::NotificationId::from_proto(id)),
|
||||
)
|
||||
.await?;
|
||||
response.send(proto::GetNotificationsResponse {
|
||||
@@ -5091,7 +5100,7 @@ fn build_initial_contacts_update(
|
||||
for contact in contacts {
|
||||
match contact {
|
||||
db::Contact::Accepted { user_id, busy } => {
|
||||
update.contacts.push(contact_for_user(user_id, busy, pool));
|
||||
update.contacts.push(contact_for_user(user_id, busy, &pool));
|
||||
}
|
||||
db::Contact::Outgoing { user_id } => update.outgoing_requests.push(user_id.to_proto()),
|
||||
db::Contact::Incoming { user_id } => {
|
||||
@@ -5148,8 +5157,7 @@ fn channel_updated(
|
||||
None,
|
||||
pool.channel_connection_ids(channel.root_id())
|
||||
.filter_map(|(channel_id, role)| {
|
||||
role.can_see_channel(channel.visibility)
|
||||
.then_some(channel_id)
|
||||
role.can_see_channel(channel.visibility).then(|| channel_id)
|
||||
}),
|
||||
|peer_id| {
|
||||
peer.send(
|
||||
@@ -5227,7 +5235,7 @@ async fn lost_dev_server_connection(session: &DevServerSession) -> Result<()> {
|
||||
|
||||
for project_id in project_ids {
|
||||
// not unshare re-checks the connection ids match, so we get away with no transaction
|
||||
unshare_project_internal(project_id, session.connection_id, None, session).await?;
|
||||
unshare_project_internal(project_id, session.connection_id, None, &session).await?;
|
||||
}
|
||||
|
||||
let user_id = session.dev_server().user_id;
|
||||
@@ -5299,7 +5307,7 @@ async fn leave_room_for_session(session: &UserSession, connection_id: Connection
|
||||
}
|
||||
|
||||
for contact_user_id in contacts_to_update {
|
||||
update_user_contacts(contact_user_id, session).await?;
|
||||
update_user_contacts(contact_user_id, &session).await?;
|
||||
}
|
||||
|
||||
if let Some(live_kit) = session.app_state.live_kit_client.as_ref() {
|
||||
|
||||
@@ -236,7 +236,7 @@ impl ConnectionPool {
|
||||
}
|
||||
PrincipalId::DevServerId(dev_server_id) => {
|
||||
assert_eq!(
|
||||
self.connected_dev_servers.get(dev_server_id).unwrap(),
|
||||
self.connected_dev_servers.get(&dev_server_id).unwrap(),
|
||||
connection_id
|
||||
);
|
||||
}
|
||||
@@ -300,9 +300,9 @@ impl ChannelPool {
|
||||
}
|
||||
|
||||
pub fn remove_user(&mut self, user_id: &UserId) {
|
||||
if let Some(channels) = self.by_user.remove(user_id) {
|
||||
if let Some(channels) = self.by_user.remove(&user_id) {
|
||||
for channel_id in channels.keys() {
|
||||
self.unsubscribe(user_id, channel_id)
|
||||
self.unsubscribe(user_id, &channel_id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use crate::db::{self, ChannelRole, NewUserParams};
|
||||
|
||||
use anyhow::Context;
|
||||
use chrono::{DateTime, Utc};
|
||||
use db::Database;
|
||||
use serde::{de::DeserializeOwned, Deserialize};
|
||||
use std::{fmt::Write, fs, path::Path};
|
||||
@@ -9,11 +8,10 @@ use std::{fmt::Write, fs, path::Path};
|
||||
use crate::Config;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GithubUser {
|
||||
struct GitHubUser {
|
||||
id: i32,
|
||||
login: String,
|
||||
email: Option<String>,
|
||||
created_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -56,7 +54,7 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result
|
||||
}
|
||||
|
||||
for admin_login in seed_config.admins {
|
||||
let user = fetch_github::<GithubUser>(
|
||||
let user = fetch_github::<GitHubUser>(
|
||||
&client,
|
||||
&format!("https://api.github.com/users/{admin_login}"),
|
||||
)
|
||||
@@ -121,7 +119,7 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result
|
||||
if let Some(last_user_id) = last_user_id {
|
||||
write!(&mut uri, "&since={}", last_user_id).unwrap();
|
||||
}
|
||||
let users = fetch_github::<Vec<GithubUser>>(&client, &uri).await;
|
||||
let users = fetch_github::<Vec<GitHubUser>>(&client, &uri).await;
|
||||
|
||||
for github_user in users {
|
||||
last_user_id = Some(github_user.id);
|
||||
@@ -131,7 +129,7 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result
|
||||
&github_user.login,
|
||||
github_user.id,
|
||||
github_user.email.as_deref(),
|
||||
github_user.created_at,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
|
||||
@@ -85,7 +85,7 @@ async fn test_core_channel_buffers(
|
||||
|
||||
// Client B sees that client A is gone from the channel buffer.
|
||||
channel_buffer_b.read_with(cx_b, |buffer, _| {
|
||||
assert_collaborators(buffer.collaborators(), &[client_b.user_id()]);
|
||||
assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]);
|
||||
});
|
||||
|
||||
// Client A rejoins the channel buffer
|
||||
@@ -99,7 +99,7 @@ async fn test_core_channel_buffers(
|
||||
// Sanity test, make sure we saw A rejoining
|
||||
channel_buffer_b.read_with(cx_b, |buffer, _| {
|
||||
assert_collaborators(
|
||||
buffer.collaborators(),
|
||||
&buffer.collaborators(),
|
||||
&[client_a.user_id(), client_b.user_id()],
|
||||
);
|
||||
});
|
||||
@@ -111,7 +111,7 @@ async fn test_core_channel_buffers(
|
||||
|
||||
// Client B observes A disconnect
|
||||
channel_buffer_b.read_with(cx_b, |buffer, _| {
|
||||
assert_collaborators(buffer.collaborators(), &[client_b.user_id()]);
|
||||
assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]);
|
||||
});
|
||||
|
||||
// TODO:
|
||||
@@ -250,7 +250,6 @@ async fn test_channel_notes_participant_indices(
|
||||
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
|
||||
|
||||
// Clients A and B open the same file.
|
||||
executor.start_waiting();
|
||||
let editor_a = workspace_a
|
||||
.update(cx_a, |workspace, cx| {
|
||||
workspace.open_path((worktree_id_a, "file.txt"), None, true, cx)
|
||||
@@ -259,7 +258,6 @@ async fn test_channel_notes_participant_indices(
|
||||
.unwrap()
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
executor.start_waiting();
|
||||
let editor_b = workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
workspace.open_path((worktree_id_a, "file.txt"), None, true, cx)
|
||||
@@ -687,7 +685,7 @@ fn assert_collaborators(collaborators: &HashMap<PeerId, Collaborator>, ids: &[Op
|
||||
user_ids.sort();
|
||||
assert_eq!(
|
||||
user_ids,
|
||||
ids.iter().map(|id| id.unwrap()).collect::<Vec<_>>()
|
||||
ids.into_iter().map(|id| id.unwrap()).collect::<Vec<_>>()
|
||||
);
|
||||
}
|
||||
|
||||
|
||||