Compare commits

..

4 Commits

Author SHA1 Message Date
Richard Feldman
8998f53222 Drop unused import 2024-09-25 13:56:58 -04:00
Richard Feldman
d84e1244d4 Simplify http timeout defaults 2024-09-25 13:56:58 -04:00
Richard Feldman
8bb7585cab Improve eval error messages 2024-09-25 13:56:58 -04:00
Richard Feldman
9311505e76 Increase HTTP timeouts for evals 2024-09-25 13:56:58 -04:00
195 changed files with 3967 additions and 3806 deletions

View File

@@ -10,7 +10,7 @@ runs:
cargo install cargo-nextest
- name: Install Node
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
with:
node-version: "18"

View File

@@ -41,7 +41,7 @@ jobs:
exit 1
;;
esac
which cargo-set-version > /dev/null || cargo install cargo-edit
which cargo-set-version > /dev/null || cargo install cargo-edit --features vendored-openssl
output=$(cargo set-version -p zed --bump patch 2>&1 | sed 's/.* //')
git commit -am "Bump to $output for @$GITHUB_ACTOR" --author "Zed Bot <hi@zed.dev>"
git tag v${output}${tag_suffix}

View File

@@ -172,7 +172,7 @@ jobs:
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
steps:
- name: Install Node
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
with:
node-version: "18"
@@ -192,12 +192,29 @@ jobs:
- name: Determine version and release channel
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
run: |
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
script/determine-release-channel
set -eu
- name: Draft release notes
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
run: |
version=$(script/get-crate-version zed)
channel=$(cat crates/zed/RELEASE_CHANNEL)
echo "Publishing version: ${version} on release channel ${channel}"
echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV
expected_tag_name=""
case ${channel} in
stable)
expected_tag_name="v${version}";;
preview)
expected_tag_name="v${version}-pre";;
nightly)
expected_tag_name="v${version}-nightly";;
*)
echo "can't publish a release on channel ${channel}"
exit 1;;
esac
if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then
echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}"
exit 1
fi
mkdir -p target/
# Ignore any errors that occur while drafting release notes to not fail the build.
script/draft-release-notes "$version" "$channel" > target/release-notes.md || true
@@ -254,7 +271,7 @@ jobs:
timeout-minutes: 60
name: Create a Linux bundle
runs-on:
- buildjet-16vcpu-ubuntu-2004
- buildjet-16vcpu-ubuntu-2204
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
needs: [linux_tests]
env:
@@ -267,13 +284,34 @@ jobs:
clean: false
- name: Install Linux dependencies
run: ./script/linux && ./script/install-mold 2.34.0
run: ./script/linux
- name: Determine version and release channel
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
run: |
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
script/determine-release-channel
set -eu
version=$(script/get-crate-version zed)
channel=$(cat crates/zed/RELEASE_CHANNEL)
echo "Publishing version: ${version} on release channel ${channel}"
echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV
expected_tag_name=""
case ${channel} in
stable)
expected_tag_name="v${version}";;
preview)
expected_tag_name="v${version}-pre";;
nightly)
expected_tag_name="v${version}-nightly";;
*)
echo "can't publish a release on channel ${channel}"
exit 1;;
esac
if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then
echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}"
exit 1
fi
- name: Create Linux .tar.gz bundle
run: script/bundle-linux
@@ -319,8 +357,29 @@ jobs:
- name: Determine version and release channel
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
run: |
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
script/determine-release-channel
set -eu
version=$(script/get-crate-version zed)
channel=$(cat crates/zed/RELEASE_CHANNEL)
echo "Publishing version: ${version} on release channel ${channel}"
echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV
expected_tag_name=""
case ${channel} in
stable)
expected_tag_name="v${version}";;
preview)
expected_tag_name="v${version}-pre";;
nightly)
expected_tag_name="v${version}-nightly";;
*)
echo "can't publish a release on channel ${channel}"
exit 1;;
esac
if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then
echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}"
exit 1
fi
- name: Create and upload Linux .tar.gz bundle
run: script/bundle-linux

View File

@@ -8,7 +8,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9
- uses: actions/stale@v9
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-issue-message: >

View File

@@ -21,7 +21,7 @@ jobs:
version: 9
- name: Setup Node
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
with:
node-version: "20"
cache: "pnpm"

View File

@@ -76,11 +76,7 @@ jobs:
clean: false
- name: Build docker image
run: |
docker build -f Dockerfile-collab \
--build-arg GITHUB_SHA=$GITHUB_SHA \
--tag registry.digitalocean.com/zed/collab:$GITHUB_SHA \
.
run: docker build . --build-arg GITHUB_SHA=$GITHUB_SHA --tag registry.digitalocean.com/zed/collab:$GITHUB_SHA
- name: Publish docker image
run: docker push registry.digitalocean.com/zed/collab:${GITHUB_SHA}

View File

@@ -22,7 +22,7 @@ jobs:
- buildjet-16vcpu-ubuntu-2204
steps:
- name: Install Node
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
with:
node-version: "18"

View File

@@ -70,7 +70,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Install Node
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
with:
node-version: "18"
@@ -100,7 +100,7 @@ jobs:
name: Create a Linux *.tar.gz bundle for x86
if: github.repository_owner == 'zed-industries'
runs-on:
- buildjet-16vcpu-ubuntu-2004
- buildjet-16vcpu-ubuntu-2204
needs: tests
env:
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
@@ -117,7 +117,7 @@ jobs:
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Install Linux dependencies
run: ./script/linux && ./script/install-mold 2.34.0
run: ./script/linux
- name: Limit target directory size
run: script/clear-target-dir-if-larger-than 100

2
.gitignore vendored
View File

@@ -10,7 +10,7 @@
/crates/collab/seed.json
/crates/zed/resources/flatpak/flatpak-cargo-sources.json
/dev.zed.Zed*.json
/assets/*licenses.*
/assets/*licenses.md
**/venv
.build
*.wasm

View File

@@ -38,10 +38,6 @@
}
}
},
"file_types": {
"Dockerfile": ["Dockerfile*[!dockerignore]"],
"Git Ignore": ["dockerignore"]
},
"hard_tabs": false,
"formatter": "auto",
"remove_trailing_whitespace_on_save": true,

41
Cargo.lock generated
View File

@@ -245,6 +245,7 @@ dependencies = [
"chrono",
"futures 0.3.30",
"http_client",
"isahc",
"schemars",
"serde",
"serde_json",
@@ -894,9 +895,9 @@ dependencies = [
[[package]]
name = "async-trait"
version = "0.1.83"
version = "0.1.82"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd"
checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1"
dependencies = [
"proc-macro2",
"quote",
@@ -2086,9 +2087,9 @@ dependencies = [
[[package]]
name = "cargo_toml"
version = "0.20.5"
version = "0.20.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88da5a13c620b4ca0078845707ea9c3faf11edbc3ffd8497d11d686211cd1ac0"
checksum = "ad639525b1c67b6a298f378417b060fbc04618bea559482a8484381cce27d965"
dependencies = [
"serde",
"toml 0.8.19",
@@ -2282,9 +2283,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.18"
version = "4.5.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0956a43b323ac1afaffc053ed5c4b7c1f1800bacd1683c353aabbb752515dd3"
checksum = "3e5a21b8495e732f1b3c364c9949b201ca7bae518c502c80256c96ad79eaf6ac"
dependencies = [
"clap_builder",
"clap_derive",
@@ -2292,9 +2293,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.18"
version = "4.5.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d72166dd41634086d5803a47eb71ae740e61d84709c36f3c34110173db3961b"
checksum = "8cf2dd12af7a047ad9d6da2b6b249759a22a7abc0f474c1dae1777afa4b21a73"
dependencies = [
"anstream",
"anstyle",
@@ -2314,9 +2315,9 @@ dependencies = [
[[package]]
name = "clap_derive"
version = "4.5.18"
version = "4.5.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab"
checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0"
dependencies = [
"heck 0.5.0",
"proc-macro2",
@@ -2849,6 +2850,7 @@ dependencies = [
"gpui",
"http_client",
"indoc",
"isahc",
"language",
"lsp",
"menu",
@@ -3729,7 +3731,6 @@ dependencies = [
"multi_buffer",
"ordered-float 2.10.1",
"parking_lot",
"pretty_assertions",
"project",
"rand 0.8.5",
"release_channel",
@@ -4127,6 +4128,7 @@ dependencies = [
"gpui",
"http_client",
"indexed_docs",
"isahc",
"isahc_http_client",
"language",
"log",
@@ -4287,6 +4289,7 @@ dependencies = [
"gpui",
"http_client",
"human_bytes",
"isahc",
"language",
"log",
"menu",
@@ -5013,6 +5016,7 @@ dependencies = [
"anyhow",
"futures 0.3.30",
"http_client",
"isahc",
"schemars",
"serde",
"serde_json",
@@ -6284,6 +6288,7 @@ dependencies = [
"http_client",
"image",
"inline_completion_button",
"isahc",
"language",
"log",
"menu",
@@ -6429,9 +6434,9 @@ checksum = "03087c2bad5e1034e8cace5926dec053fb3790248370865f5117a7d0213354c8"
[[package]]
name = "libc"
version = "0.2.159"
version = "0.2.158"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5"
checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439"
[[package]]
name = "libdbus-sys"
@@ -6473,7 +6478,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
dependencies = [
"cfg-if",
"windows-targets 0.52.6",
"windows-targets 0.48.5",
]
[[package]]
@@ -7586,6 +7591,7 @@ dependencies = [
"anyhow",
"futures 0.3.30",
"http_client",
"isahc",
"schemars",
"serde",
"serde_json",
@@ -9116,7 +9122,6 @@ dependencies = [
"gpui",
"http_client",
"language",
"languages",
"log",
"lsp",
"node_runtime",
@@ -10493,7 +10498,6 @@ dependencies = [
"futures 0.3.30",
"gpui",
"parking_lot",
"paths",
"serde",
"serde_json",
"snippet",
@@ -13530,7 +13534,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys 0.59.0",
"windows-sys 0.48.0",
]
[[package]]
@@ -14429,6 +14433,7 @@ dependencies = [
"image_viewer",
"inline_completion_button",
"install_cli",
"isahc",
"isahc_http_client",
"journal",
"language",
@@ -14680,7 +14685,7 @@ dependencies = [
[[package]]
name = "zed_terraform"
version = "0.1.1"
version = "0.1.0"
dependencies = [
"zed_extension_api 0.1.0",
]

View File

@@ -196,7 +196,7 @@
}
},
{
"context": "BufferSearchBar && in_replace > Editor",
"context": "BufferSearchBar && in_replace",
"bindings": {
"enter": "search::ReplaceNext",
"ctrl-enter": "search::ReplaceAll"
@@ -310,11 +310,6 @@
"ctrl-shift-\\": "editor::MoveToEnclosingBracket",
"ctrl-shift-[": "editor::Fold",
"ctrl-shift-]": "editor::UnfoldLines",
"ctrl-k ctrl-l": "editor::ToggleFold",
"ctrl-k ctrl-[": "editor::FoldRecursive",
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
"ctrl-k ctrl-0": "editor::FoldAll",
"ctrl-k ctrl-j": "editor::UnfoldAll",
"ctrl-space": "editor::ShowCompletions",
"ctrl-.": "editor::ToggleCodeActions",
"alt-ctrl-r": "editor::RevealInFileManager",

View File

@@ -232,7 +232,7 @@
}
},
{
"context": "BufferSearchBar && in_replace > Editor",
"context": "BufferSearchBar && in_replace",
"bindings": {
"enter": "search::ReplaceNext",
"cmd-enter": "search::ReplaceAll"
@@ -347,11 +347,6 @@
"cmd-shift-\\": "editor::MoveToEnclosingBracket",
"alt-cmd-[": "editor::Fold",
"alt-cmd-]": "editor::UnfoldLines",
"cmd-k cmd-l": "editor::ToggleFold",
"cmd-k cmd-[": "editor::FoldRecursive",
"cmd-k cmd-]": "editor::UnfoldRecursive",
"cmd-k cmd-0": "editor::FoldAll",
"cmd-k cmd-j": "editor::UnfoldAll",
"ctrl-space": "editor::ShowCompletions",
"cmd-.": "editor::ToggleCodeActions",
"alt-cmd-r": "editor::RevealInFileManager",

View File

@@ -132,15 +132,9 @@
"z z": "editor::ScrollCursorCenter",
"z .": ["workspace::SendKeystrokes", "z z ^"],
"z b": "editor::ScrollCursorBottom",
"z a": "editor::ToggleFold",
"z A": "editor::ToggleFoldRecursive",
"z c": "editor::Fold",
"z C": "editor::FoldRecursive",
"z o": "editor::UnfoldLines",
"z O": "editor::UnfoldRecursive",
"z f": "editor::FoldSelectedRanges",
"z M": "editor::FoldAll",
"z R": "editor::UnfoldAll",
"shift-z shift-q": ["pane::CloseActiveItem", { "saveIntent": "skip" }],
"shift-z shift-z": ["pane::CloseActiveItem", { "saveIntent": "saveAll" }],
// Count support

View File

@@ -535,16 +535,17 @@
// How to soft-wrap long lines of text.
// Possible values:
//
// 1. Prefer a single line generally, unless an overly long line is encountered.
// 1. Do not soft wrap.
// "soft_wrap": "none",
// "soft_wrap": "prefer_line", // (deprecated, same as "none")
// 2. Soft wrap lines that overflow the editor.
// 2. Prefer a single line generally, unless an overly long line is encountered.
// "soft_wrap": "prefer_line",
// 3. Soft wrap lines that overflow the editor.
// "soft_wrap": "editor_width",
// 3. Soft wrap lines at the preferred line length.
// 4. Soft wrap lines at the preferred line length.
// "soft_wrap": "preferred_line_length",
// 4. Soft wrap lines at the preferred line length or the editor width (whichever is smaller).
// 5. Soft wrap lines at the preferred line length or the editor width (whichever is smaller).
// "soft_wrap": "bounded",
"soft_wrap": "none",
"soft_wrap": "prefer_line",
// The column at which to soft-wrap lines, for buffers where soft-wrap
// is enabled.
"preferred_line_length": 80,
@@ -599,11 +600,13 @@
}
},
// Configuration for how direnv configuration should be loaded. May take 2 values:
// 1. Load direnv configuration using `direnv export json` directly.
// "load_direnv": "direct"
// 2. Load direnv configuration through the shell hook, works for POSIX shells and fish.
// 1. Load direnv configuration through the shell hook, works for POSIX shells and fish.
// "load_direnv": "shell_hook"
"load_direnv": "direct",
// 2. Load direnv configuration using `direnv export json` directly.
// This can help with some shells that otherwise would not detect
// the direnv environment, such as nushell or elvish.
// "load_direnv": "direct"
"load_direnv": "shell_hook",
"inline_completions": {
// A list of globs representing files that inline completions should be disabled for.
"disabled_globs": [".env"]
@@ -669,18 +672,6 @@
// 3. Always blink the cursor, ignoring the terminal mode
// "blinking": "on",
"blinking": "terminal_controlled",
// Default cursor shape for the terminal.
// 1. A block that surrounds the following character
// "block"
// 2. A vertical bar
// "bar"
// 3. An underline that runs along the following character
// "underscore"
// 4. A box drawn around the following character
// "hollow"
//
// Default: not set, defaults to "block"
"cursor_shape": null,
// Set whether Alternate Scroll mode (code: ?1007) is active by default.
// Alternate Scroll mode converts mouse scroll events into up / down key
// presses when in the alternate screen (e.g. when running applications
@@ -779,8 +770,7 @@
"**/Zed/**/*.json",
"tsconfig.json",
"pyrightconfig.json"
],
"TOML": ["uv.lock"]
]
},
/// By default use a recent system version of node, or install our own.
/// You can override this to use a version of node that is not in $PATH with:

View File

@@ -20,6 +20,7 @@ anyhow.workspace = true
chrono.workspace = true
futures.workspace = true
http_client.workspace = true
isahc.workspace = true
schemars = { workspace = true, optional = true }
serde.workspace = true
serde_json.workspace = true

View File

@@ -6,8 +6,9 @@ use std::{pin::Pin, str::FromStr};
use anyhow::{anyhow, Context, Result};
use chrono::{DateTime, Utc};
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
use http_client::http::{HeaderMap, HeaderValue};
use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest};
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
use isahc::config::Configurable;
use isahc::http::{HeaderMap, HeaderValue};
use serde::{Deserialize, Serialize};
use strum::{EnumIter, EnumString};
use thiserror::Error;
@@ -288,7 +289,7 @@ pub async fn stream_completion_with_rate_limit_info(
.header("X-Api-Key", api_key)
.header("Content-Type", "application/json");
if let Some(low_speed_timeout) = low_speed_timeout {
request_builder = request_builder.read_timeout(low_speed_timeout);
request_builder = request_builder.low_speed_timeout(100, low_speed_timeout);
}
let serialized_request =
serde_json::to_string(&request).context("failed to serialize request")?;

View File

@@ -72,7 +72,6 @@ use std::{
time::Duration,
};
use terminal_view::{terminal_panel::TerminalPanel, TerminalView};
use text::SelectionGoal;
use ui::TintColor;
use ui::{
prelude::*,
@@ -961,8 +960,7 @@ impl AssistantPanel {
}
fn new_context(&mut self, cx: &mut ViewContext<Self>) -> Option<View<ContextEditor>> {
let project = self.project.read(cx);
if project.is_via_collab() && project.dev_server_project_id().is_none() {
if self.project.read(cx).is_via_collab() {
let task = self
.context_store
.update(cx, |store, cx| store.create_remote_context(cx));
@@ -3439,7 +3437,7 @@ impl ContextEditor {
fn copy(&mut self, _: &editor::actions::Copy, cx: &mut ViewContext<Self>) {
if self.editor.read(cx).selections.count() == 1 {
let (copied_text, metadata, _) = self.get_clipboard_contents(cx);
let (copied_text, metadata) = self.get_clipboard_contents(cx);
cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata(
copied_text,
metadata,
@@ -3453,9 +3451,11 @@ impl ContextEditor {
fn cut(&mut self, _: &editor::actions::Cut, cx: &mut ViewContext<Self>) {
if self.editor.read(cx).selections.count() == 1 {
let (copied_text, metadata, selections) = self.get_clipboard_contents(cx);
let (copied_text, metadata) = self.get_clipboard_contents(cx);
self.editor.update(cx, |editor, cx| {
let selections = editor.selections.all::<Point>(cx);
editor.transact(cx, |this, cx| {
this.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select(selections);
@@ -3475,71 +3475,52 @@ impl ContextEditor {
cx.propagate();
}
fn get_clipboard_contents(
&mut self,
cx: &mut ViewContext<Self>,
) -> (String, CopyMetadata, Vec<text::Selection<usize>>) {
let (snapshot, selection, creases) = self.editor.update(cx, |editor, cx| {
let mut selection = editor.selections.newest::<Point>(cx);
fn get_clipboard_contents(&mut self, cx: &mut ViewContext<Self>) -> (String, CopyMetadata) {
let creases = self.editor.update(cx, |editor, cx| {
let selection = editor.selections.newest::<Point>(cx);
let selection_start = editor.selections.newest::<usize>(cx).start;
let snapshot = editor.buffer().read(cx).snapshot(cx);
editor.display_map.update(cx, |display_map, cx| {
display_map
.snapshot(cx)
.crease_snapshot
.creases_in_range(
MultiBufferRow(selection.start.row)..MultiBufferRow(selection.end.row + 1),
&snapshot,
)
.filter_map(|crease| {
if let Some(metadata) = &crease.metadata {
let start = crease
.range
.start
.to_offset(&snapshot)
.saturating_sub(selection_start);
let end = crease
.range
.end
.to_offset(&snapshot)
.saturating_sub(selection_start);
let is_entire_line = selection.is_empty() || editor.selections.line_mode;
if is_entire_line {
selection.start = Point::new(selection.start.row, 0);
selection.end =
cmp::min(snapshot.max_point(), Point::new(selection.start.row + 1, 0));
selection.goal = SelectionGoal::None;
}
let range_relative_to_selection = start..end;
let selection_start = snapshot.point_to_offset(selection.start);
(
snapshot.clone(),
selection.clone(),
editor.display_map.update(cx, |display_map, cx| {
display_map
.snapshot(cx)
.crease_snapshot
.creases_in_range(
MultiBufferRow(selection.start.row)
..MultiBufferRow(selection.end.row + 1),
&snapshot,
)
.filter_map(|crease| {
if let Some(metadata) = &crease.metadata {
let start = crease
.range
.start
.to_offset(&snapshot)
.saturating_sub(selection_start);
let end = crease
.range
.end
.to_offset(&snapshot)
.saturating_sub(selection_start);
let range_relative_to_selection = start..end;
if range_relative_to_selection.is_empty() {
None
} else {
Some(SelectedCreaseMetadata {
range_relative_to_selection,
crease: metadata.clone(),
})
}
} else {
if range_relative_to_selection.is_empty() {
None
} else {
Some(SelectedCreaseMetadata {
range_relative_to_selection,
crease: metadata.clone(),
})
}
})
.collect::<Vec<_>>()
}),
)
} else {
None
}
})
.collect::<Vec<_>>()
})
});
let selection = selection.map(|point| snapshot.point_to_offset(point));
let context = self.context.read(cx);
let selection = self.editor.read(cx).selections.newest::<usize>(cx);
let mut text = String::new();
for message in context.messages(cx) {
if message.offset_range.start >= selection.range().end {
@@ -3558,7 +3539,7 @@ impl ContextEditor {
}
}
(text, CopyMetadata { creases }, vec![selection])
(text, CopyMetadata { creases })
}
fn paste(&mut self, action: &editor::actions::Paste, cx: &mut ViewContext<Self>) {

View File

@@ -1142,7 +1142,7 @@ impl InlineAssistant {
for row_range in inserted_row_ranges {
editor.highlight_rows::<InlineAssist>(
row_range,
cx.theme().status().info_background,
Some(cx.theme().status().info_background),
false,
cx,
);
@@ -1208,8 +1208,8 @@ impl InlineAssistant {
editor.set_read_only(true);
editor.set_show_inline_completions(Some(false), cx);
editor.highlight_rows::<DeletedLines>(
Anchor::min()..Anchor::max(),
cx.theme().status().deleted_background,
Anchor::min()..=Anchor::max(),
Some(cx.theme().status().deleted_background),
false,
cx,
);
@@ -2557,7 +2557,7 @@ enum CodegenStatus {
#[derive(Default)]
struct Diff {
deleted_row_ranges: Vec<(Anchor, RangeInclusive<u32>)>,
inserted_row_ranges: Vec<Range<Anchor>>,
inserted_row_ranges: Vec<RangeInclusive<Anchor>>,
}
impl Diff {
@@ -3103,7 +3103,7 @@ impl CodegenAlternative {
new_end_row,
new_snapshot.line_len(MultiBufferRow(new_end_row)),
));
self.diff.inserted_row_ranges.push(start..end);
self.diff.inserted_row_ranges.push(start..=end);
new_row += lines;
}
}
@@ -3181,7 +3181,7 @@ impl CodegenAlternative {
new_end_row,
new_snapshot.line_len(MultiBufferRow(new_end_row)),
));
inserted_row_ranges.push(start..end);
inserted_row_ranges.push(start..=end);
new_row += line_count;
}
}

View File

@@ -264,18 +264,6 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) -> Option<(
fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext<Workspace>) {
let release_channel = ReleaseChannel::global(cx);
let url = match release_channel {
ReleaseChannel::Nightly => Some("https://github.com/zed-industries/zed/commits/nightly/"),
ReleaseChannel::Dev => Some("https://github.com/zed-industries/zed/commits/main/"),
_ => None,
};
if let Some(url) = url {
cx.open_url(url);
return;
}
let version = AppVersion::global(cx).to_string();
let client = client::Client::global(cx).http_client();
@@ -357,17 +345,15 @@ pub fn notify_of_any_new_update(cx: &mut ViewContext<Workspace>) -> Option<()> {
let should_show_notification = should_show_notification.await?;
if should_show_notification {
workspace.update(&mut cx, |workspace, cx| {
let workspace_handle = workspace.weak_handle();
workspace.show_notification(
NotificationId::unique::<UpdateNotification>(),
cx,
|cx| cx.new_view(|_| UpdateNotification::new(version, workspace_handle)),
|cx| cx.new_view(|_| UpdateNotification::new(version)),
);
updater.update(cx, |updater, cx| {
updater
.set_should_show_update_notification(false, cx)
.detach_and_log_err(cx);
});
updater
.read(cx)
.set_should_show_update_notification(false, cx)
.detach_and_log_err(cx);
})?;
}
anyhow::Ok(())

View File

@@ -1,18 +1,13 @@
use gpui::{
div, DismissEvent, EventEmitter, InteractiveElement, IntoElement, ParentElement, Render,
SemanticVersion, StatefulInteractiveElement, Styled, ViewContext, WeakView,
SemanticVersion, StatefulInteractiveElement, Styled, ViewContext,
};
use menu::Cancel;
use release_channel::ReleaseChannel;
use util::ResultExt;
use workspace::{
ui::{h_flex, v_flex, Icon, IconName, Label, StyledExt},
Workspace,
};
use workspace::ui::{h_flex, v_flex, Icon, IconName, Label, StyledExt};
pub struct UpdateNotification {
version: SemanticVersion,
workspace: WeakView<Workspace>,
}
impl EventEmitter<DismissEvent> for UpdateNotification {}
@@ -46,11 +41,7 @@ impl Render for UpdateNotification {
.child(Label::new("View the release notes"))
.cursor_pointer()
.on_click(cx.listener(|this, _, cx| {
this.workspace
.update(cx, |workspace, cx| {
crate::view_release_notes_locally(workspace, cx);
})
.log_err();
crate::view_release_notes(&Default::default(), cx);
this.dismiss(&menu::Cancel, cx)
})),
)
@@ -58,8 +49,8 @@ impl Render for UpdateNotification {
}
impl UpdateNotification {
pub fn new(version: SemanticVersion, workspace: WeakView<Workspace>) -> Self {
Self { version, workspace }
pub fn new(version: SemanticVersion) -> Self {
Self { version }
}
pub fn dismiss(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) {

View File

@@ -1,7 +1,7 @@
use editor::Editor;
use gpui::{
Element, EventEmitter, FocusableView, IntoElement, ParentElement, Render, StyledText,
Subscription, ViewContext,
Element, EventEmitter, IntoElement, ParentElement, Render, StyledText, Subscription,
ViewContext,
};
use itertools::Itertools;
use std::cmp;
@@ -90,30 +90,17 @@ impl Render for Breadcrumbs {
ButtonLike::new("toggle outline view")
.child(breadcrumbs_stack)
.style(ButtonStyle::Transparent)
.on_click({
let editor = editor.clone();
move |_, cx| {
if let Some(editor) = editor.upgrade() {
outline::toggle(editor, &editor::actions::ToggleOutline, cx)
}
.on_click(move |_, cx| {
if let Some(editor) = editor.upgrade() {
outline::toggle(editor, &editor::actions::ToggleOutline, cx)
}
})
.tooltip(move |cx| {
if let Some(editor) = editor.upgrade() {
let focus_handle = editor.read(cx).focus_handle(cx);
Tooltip::for_action_in(
"Show symbol outline",
&editor::actions::ToggleOutline,
&focus_handle,
cx,
)
} else {
Tooltip::for_action(
"Show symbol outline",
&editor::actions::ToggleOutline,
cx,
)
}
.tooltip(|cx| {
Tooltip::for_action(
"Show symbol outline",
&editor::actions::ToggleOutline,
cx,
)
}),
),
None => element

View File

@@ -808,7 +808,7 @@ pub fn mentions_to_proto(mentions: &[(Range<usize>, UserId)]) -> Vec<proto::Chat
impl sum_tree::Item for ChannelMessage {
type Summary = ChannelMessageSummary;
fn summary(&self, _cx: &()) -> Self::Summary {
fn summary(&self) -> Self::Summary {
ChannelMessageSummary {
max_id: self.id,
count: 1,

View File

@@ -149,6 +149,18 @@ spec:
secretKeyRef:
name: google-ai
key: api_key
- name: RUNPOD_API_KEY
valueFrom:
secretKeyRef:
name: runpod
key: api_key
optional: true
- name: RUNPOD_API_SUMMARY_URL
valueFrom:
secretKeyRef:
name: runpod
key: summary
optional: true
- name: BLOB_STORE_ACCESS_KEY
valueFrom:
secretKeyRef:

View File

@@ -32,7 +32,6 @@ macro_rules! id_type {
#[allow(unused)]
#[allow(missing_docs)]
pub fn from_proto(value: u64) -> Self {
debug_assert!(value != 0);
Self(value as i32)
}

View File

@@ -285,7 +285,7 @@ impl Database {
)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such project: {project_id}"))?;
.ok_or_else(|| anyhow!("no such project"))?;
// Update metadata.
worktree::Entity::update(worktree::ActiveModel {

View File

@@ -170,6 +170,8 @@ pub struct Config {
pub anthropic_api_key: Option<Arc<str>>,
pub anthropic_staff_api_key: Option<Arc<str>>,
pub llm_closed_beta_model_name: Option<Arc<str>>,
pub runpod_api_key: Option<Arc<str>>,
pub runpod_api_summary_url: Option<Arc<str>>,
pub zed_client_checksum_seed: Option<String>,
pub slack_panics_webhook: Option<String>,
pub auto_join_channel_id: Option<ChannelId>,
@@ -233,6 +235,8 @@ impl Config {
stripe_api_key: None,
stripe_price_id: None,
supermaven_admin_api_key: None,
runpod_api_key: None,
runpod_api_summary_url: None,
user_backfiller_github_access_token: None,
}
}

View File

@@ -400,6 +400,42 @@ async fn perform_completion(
})
.boxed()
}
LanguageModelProvider::Zed => {
let api_key = state
.config
.runpod_api_key
.as_ref()
.context("no Qwen2-7B API key configured on the server")?;
let api_url = state
.config
.runpod_api_summary_url
.as_ref()
.context("no Qwen2-7B URL configured on the server")?;
let chunks = open_ai::stream_completion(
&state.http_client,
api_url,
api_key,
serde_json::from_str(params.provider_request.get())?,
None,
)
.await?;
chunks
.map(|event| {
event.map(|chunk| {
let input_tokens =
chunk.usage.as_ref().map_or(0, |u| u.prompt_tokens) as usize;
let output_tokens =
chunk.usage.as_ref().map_or(0, |u| u.completion_tokens) as usize;
(
serde_json::to_vec(&chunk).unwrap(),
input_tokens,
output_tokens,
)
})
})
.boxed()
}
};
Ok(Response::new(Body::wrap_stream(TokenCountingStream {

View File

@@ -77,6 +77,7 @@ fn authorize_access_for_country(
LanguageModelProvider::Anthropic => anthropic::is_supported_country(country_code),
LanguageModelProvider::OpenAi => open_ai::is_supported_country(country_code),
LanguageModelProvider::Google => google_ai::is_supported_country(country_code),
LanguageModelProvider::Zed => true,
};
if !is_country_supported_by_provider {
Err(Error::http(
@@ -212,6 +213,7 @@ mod tests {
(LanguageModelProvider::Anthropic, "T1"), // Tor
(LanguageModelProvider::OpenAi, "T1"), // Tor
(LanguageModelProvider::Google, "T1"), // Tor
(LanguageModelProvider::Zed, "T1"), // Tor
];
for (provider, country_code) in cases {

View File

@@ -40,6 +40,15 @@ pub async fn seed_database(_config: &Config, db: &mut LlmDatabase, _force: bool)
price_per_million_input_tokens: 25, // $0.25/MTok
price_per_million_output_tokens: 125, // $1.25/MTok
},
ModelParams {
provider: LanguageModelProvider::Zed,
name: "Qwen/Qwen2-7B-Instruct".into(),
max_requests_per_minute: 5,
max_tokens_per_minute: 25_000, // These are arbitrary limits we've set to cap costs; we control this number
max_tokens_per_day: 300_000,
price_per_million_input_tokens: 25,
price_per_million_output_tokens: 125,
},
])
.await
}

View File

@@ -26,6 +26,7 @@ async fn test_initialize_providers(db: &mut LlmDatabase) {
LanguageModelProvider::Anthropic,
LanguageModelProvider::Google,
LanguageModelProvider::OpenAi,
LanguageModelProvider::Zed
]
)
}

View File

@@ -474,6 +474,9 @@ impl Server {
.add_request_handler(user_handler(
forward_read_only_project_request::<proto::GetReferences>,
))
.add_request_handler(user_handler(
forward_read_only_project_request::<proto::SearchProject>,
))
.add_request_handler(user_handler(forward_find_search_candidates_request))
.add_request_handler(user_handler(
forward_read_only_project_request::<proto::GetDocumentHighlights>,
@@ -2295,7 +2298,7 @@ async fn list_remote_directory(
let dev_server_connection_id = session
.connection_pool()
.await
.online_dev_server_connection_id(dev_server_id)?;
.dev_server_connection_id_supporting(dev_server_id, ZedVersion::with_list_directory())?;
session
.db()
@@ -2334,7 +2337,10 @@ async fn update_dev_server_project(
let dev_server_connection_id = session
.connection_pool()
.await
.online_dev_server_connection_id(dev_server_project.dev_server_id)?;
.dev_server_connection_id_supporting(
dev_server_project.dev_server_id,
ZedVersion::with_list_directory(),
)?;
session.peer.send(
dev_server_connection_id,
@@ -2944,6 +2950,40 @@ async fn forward_find_search_candidates_request(
.await
.host_for_read_only_project_request(project_id, session.connection_id, session.user_id())
.await?;
let host_version = session
.connection_pool()
.await
.connection(host_connection_id)
.map(|c| c.zed_version);
if host_version.is_some_and(|host_version| host_version < ZedVersion::with_search_candidates())
{
let query = request.query.ok_or_else(|| anyhow!("missing query"))?;
let search = proto::SearchProject {
project_id: project_id.to_proto(),
query: query.query,
regex: query.regex,
whole_word: query.whole_word,
case_sensitive: query.case_sensitive,
files_to_include: query.files_to_include,
files_to_exclude: query.files_to_exclude,
include_ignored: query.include_ignored,
};
let payload = session
.peer
.forward_request(session.connection_id, host_connection_id, search)
.await?;
return response.send(proto::FindSearchCandidatesResponse {
buffer_ids: payload
.locations
.into_iter()
.map(|loc| loc.buffer_id)
.collect(),
});
}
let payload = session
.peer
.forward_request(session.connection_id, host_connection_id, request)

View File

@@ -32,7 +32,15 @@ impl fmt::Display for ZedVersion {
impl ZedVersion {
pub fn can_collaborate(&self) -> bool {
self.0 >= SemanticVersion::new(0, 151, 0)
self.0 >= SemanticVersion::new(0, 134, 0)
}
pub fn with_list_directory() -> ZedVersion {
ZedVersion(SemanticVersion::new(0, 145, 0))
}
pub fn with_search_candidates() -> ZedVersion {
ZedVersion(SemanticVersion::new(0, 151, 0))
}
}
@@ -161,16 +169,6 @@ impl ConnectionPool {
self.connected_dev_servers.get(&dev_server_id).copied()
}
pub fn online_dev_server_connection_id(
&self,
dev_server_id: DevServerId,
) -> Result<ConnectionId> {
match self.connected_dev_servers.get(&dev_server_id) {
Some(cid) => Ok(*cid),
None => Err(anyhow!(proto::ErrorCode::DevServerOffline)),
}
}
pub fn dev_server_connection_id_supporting(
&self,
dev_server_id: DevServerId,

View File

@@ -246,7 +246,7 @@ async fn test_channel_notes_participant_indices(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
// Clients A and B open the same file.

View File

@@ -7,12 +7,18 @@ use collections::HashMap;
use editor::{
actions::{
ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst, Redo, Rename,
ToggleCodeActions, Undo,
RevertSelectedHunks, ToggleCodeActions, Undo,
},
display_map::DisplayRow,
test::{
editor_hunks,
editor_test_context::{AssertionContextManager, EditorTestContext},
expanded_hunks, expanded_hunks_background_highlights,
},
test::editor_test_context::{AssertionContextManager, EditorTestContext},
Editor,
};
use futures::StreamExt;
use git::diff::DiffHunkStatus;
use gpui::{TestAppContext, UpdateGlobal, VisualContext, VisualTestContext};
use indoc::indoc;
use language::{
@@ -76,7 +82,7 @@ async fn test_host_disconnect(
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
cx_a.background_executor.run_until_parked();
assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer()));
@@ -192,7 +198,7 @@ async fn test_newline_above_or_below_does_not_move_guest_cursor(
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// Open a buffer as client A
let buffer_a = project_a
@@ -308,7 +314,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// Open a file in an editor as the guest.
let buffer_b = project_b
@@ -565,7 +571,7 @@ async fn test_collaborating_with_code_actions(
.unwrap();
// Join the project as client B.
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
let editor_b = workspace_b
.update(cx_b, |workspace, cx| {
@@ -780,7 +786,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
let editor_b = workspace_b
@@ -1030,7 +1036,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
.await
.unwrap();
executor.run_until_parked();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
project_b.read_with(cx_b, |project, cx| {
let status = project.language_server_statuses(cx).next().unwrap().1;
@@ -1126,7 +1132,9 @@ async fn test_share_project(
.await
.unwrap();
let client_b_peer_id = client_b.peer_id().unwrap();
let project_b = client_b.join_remote_project(initial_project.id, cx_b).await;
let project_b = client_b
.build_dev_server_project(initial_project.id, cx_b)
.await;
let replica_id_b = project_b.read_with(cx_b, |project, _| project.replica_id());
@@ -1228,7 +1236,9 @@ async fn test_share_project(
.update(cx_c, |call, cx| call.accept_incoming(cx))
.await
.unwrap();
let _project_c = client_c.join_remote_project(initial_project.id, cx_c).await;
let _project_c = client_c
.build_dev_server_project(initial_project.id, cx_c)
.await;
// Client B closes the editor, and client A sees client B's selections removed.
cx_b.update(move |_| drop(editor_b));
@@ -1287,7 +1297,7 @@ async fn test_on_input_format_from_host_to_guest(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// Open a file in an editor as the host.
let buffer_a = project_a
@@ -1407,7 +1417,7 @@ async fn test_on_input_format_from_guest_to_host(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// Open a file in an editor as the guest.
let buffer_b = project_b
@@ -1570,7 +1580,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
.unwrap();
// Client B joins the project
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
active_call_b
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
.await
@@ -1832,7 +1842,7 @@ async fn test_inlay_hint_refresh_is_forwarded(
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
active_call_b
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
.await
@@ -1960,6 +1970,285 @@ async fn test_inlay_hint_refresh_is_forwarded(
});
}
#[gpui::test]
async fn test_multiple_hunk_types_revert(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
let mut server = TestServer::start(cx_a.executor()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
server
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
.await;
let active_call_a = cx_a.read(ActiveCall::global);
let active_call_b = cx_b.read(ActiveCall::global);
cx_a.update(editor::init);
cx_b.update(editor::init);
client_a.language_registry().add(rust_lang());
client_b.language_registry().add(rust_lang());
let base_text = indoc! {r#"struct Row;
struct Row1;
struct Row2;
struct Row4;
struct Row5;
struct Row6;
struct Row8;
struct Row9;
struct Row10;"#};
client_a
.fs()
.insert_tree(
"/a",
json!({
"main.rs": base_text,
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
active_call_a
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
.await
.unwrap();
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
active_call_b
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
.await
.unwrap();
let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a);
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
let editor_a = workspace_a
.update(cx_a, |workspace, cx| {
workspace.open_path((worktree_id, "main.rs"), None, true, cx)
})
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
let editor_b = workspace_b
.update(cx_b, |workspace, cx| {
workspace.open_path((worktree_id, "main.rs"), None, true, cx)
})
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
let mut editor_cx_a = EditorTestContext {
cx: cx_a.clone(),
window: cx_a.handle(),
editor: editor_a,
assertion_cx: AssertionContextManager::new(),
};
let mut editor_cx_b = EditorTestContext {
cx: cx_b.clone(),
window: cx_b.handle(),
editor: editor_b,
assertion_cx: AssertionContextManager::new(),
};
// host edits the file, that differs from the base text, producing diff hunks
editor_cx_a.set_state(indoc! {r#"struct Row;
struct Row0.1;
struct Row0.2;
struct Row1;
struct Row4;
struct Row5444;
struct Row6;
struct Row9;
struct Row1220;ˇ"#});
editor_cx_a.update_editor(|editor, cx| {
editor
.buffer()
.read(cx)
.as_singleton()
.unwrap()
.update(cx, |buffer, cx| {
buffer.set_diff_base(Some(base_text.into()), cx);
});
});
editor_cx_b.update_editor(|editor, cx| {
editor
.buffer()
.read(cx)
.as_singleton()
.unwrap()
.update(cx, |buffer, cx| {
buffer.set_diff_base(Some(base_text.into()), cx);
});
});
cx_a.executor().run_until_parked();
cx_b.executor().run_until_parked();
// the client selects a range in the updated buffer, expands it to see the diff for each hunk in the selection
// the host does not see the diffs toggled
editor_cx_b.set_selections_state(indoc! {r#"«ˇstruct Row;
struct Row0.1;
struct Row0.2;
struct Row1;
struct Row4;
struct Row5444;
struct Row6;
struct R»ow9;
struct Row1220;"#});
editor_cx_b
.update_editor(|editor, cx| editor.toggle_hunk_diff(&editor::actions::ToggleHunkDiff, cx));
cx_a.executor().run_until_parked();
cx_b.executor().run_until_parked();
editor_cx_a.update_editor(|editor, cx| {
let snapshot = editor.snapshot(cx);
let all_hunks = editor_hunks(editor, &snapshot, cx);
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new());
assert_eq!(
all_hunks,
vec![
(
"".to_string(),
DiffHunkStatus::Added,
DisplayRow(1)..DisplayRow(3)
),
(
"struct Row2;\n".to_string(),
DiffHunkStatus::Removed,
DisplayRow(4)..DisplayRow(4)
),
(
"struct Row5;\n".to_string(),
DiffHunkStatus::Modified,
DisplayRow(6)..DisplayRow(7)
),
(
"struct Row8;\n".to_string(),
DiffHunkStatus::Removed,
DisplayRow(9)..DisplayRow(9)
),
(
"struct Row10;".to_string(),
DiffHunkStatus::Modified,
DisplayRow(10)..DisplayRow(10),
),
]
);
assert_eq!(all_expanded_hunks, Vec::new());
});
editor_cx_b.update_editor(|editor, cx| {
let snapshot = editor.snapshot(cx);
let all_hunks = editor_hunks(editor, &snapshot, cx);
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
assert_eq!(
expanded_hunks_background_highlights(editor, cx),
vec![DisplayRow(1)..=DisplayRow(2), DisplayRow(8)..=DisplayRow(8)],
);
assert_eq!(
all_hunks,
vec![
(
"".to_string(),
DiffHunkStatus::Added,
DisplayRow(1)..DisplayRow(3)
),
(
"struct Row2;\n".to_string(),
DiffHunkStatus::Removed,
DisplayRow(5)..DisplayRow(5)
),
(
"struct Row5;\n".to_string(),
DiffHunkStatus::Modified,
DisplayRow(8)..DisplayRow(9)
),
(
"struct Row8;\n".to_string(),
DiffHunkStatus::Removed,
DisplayRow(12)..DisplayRow(12)
),
(
"struct Row10;".to_string(),
DiffHunkStatus::Modified,
DisplayRow(13)..DisplayRow(13),
),
]
);
assert_eq!(all_expanded_hunks, &all_hunks[..all_hunks.len() - 1]);
});
// the client reverts the hunks, removing the expanded diffs too
// both host and the client observe the reverted state (with one hunk left, not covered by client's selection)
editor_cx_b.update_editor(|editor, cx| {
editor.revert_selected_hunks(&RevertSelectedHunks, cx);
});
cx_a.executor().run_until_parked();
cx_b.executor().run_until_parked();
editor_cx_a.update_editor(|editor, cx| {
let snapshot = editor.snapshot(cx);
let all_hunks = editor_hunks(editor, &snapshot, cx);
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new());
assert_eq!(
all_hunks,
vec![(
"struct Row10;".to_string(),
DiffHunkStatus::Modified,
DisplayRow(10)..DisplayRow(10),
)]
);
assert_eq!(all_expanded_hunks, Vec::new());
});
editor_cx_b.update_editor(|editor, cx| {
let snapshot = editor.snapshot(cx);
let all_hunks = editor_hunks(editor, &snapshot, cx);
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new());
assert_eq!(
all_hunks,
vec![(
"struct Row10;".to_string(),
DiffHunkStatus::Modified,
DisplayRow(10)..DisplayRow(10),
)]
);
assert_eq!(all_expanded_hunks, Vec::new());
});
editor_cx_a.assert_editor_state(indoc! {r#"struct Row;
struct Row1;
struct Row2;
struct Row4;
struct Row5;
struct Row6;
struct Row8;
struct Row9;
struct Row1220;ˇ"#});
editor_cx_b.assert_editor_state(indoc! {r#"«ˇstruct Row;
struct Row1;
struct Row2;
struct Row4;
struct Row5;
struct Row6;
struct Row8;
struct R»ow9;
struct Row1220;"#});
}
#[gpui::test(iterations = 10)]
async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
let mut server = TestServer::start(cx_a.executor()).await;
@@ -2046,7 +2335,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
.unwrap();
// Join the project as client B.
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
let editor_b = workspace_b
.update(cx_b, |workspace, cx| {

View File

@@ -74,7 +74,7 @@ async fn test_basic_following(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
active_call_b
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
.await
@@ -162,7 +162,7 @@ async fn test_basic_following(
executor.run_until_parked();
let active_call_c = cx_c.read(ActiveCall::global);
let project_c = client_c.join_remote_project(project_id, cx_c).await;
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
let (workspace_c, cx_c) = client_c.build_workspace(&project_c, cx_c);
active_call_c
.update(cx_c, |call, cx| call.set_location(Some(&project_c), cx))
@@ -175,7 +175,7 @@ async fn test_basic_following(
cx_d.executor().run_until_parked();
let active_call_d = cx_d.read(ActiveCall::global);
let project_d = client_d.join_remote_project(project_id, cx_d).await;
let project_d = client_d.build_dev_server_project(project_id, cx_d).await;
let (workspace_d, cx_d) = client_d.build_workspace(&project_d, cx_d);
active_call_d
.update(cx_d, |call, cx| call.set_location(Some(&project_d), cx))
@@ -569,7 +569,7 @@ async fn test_following_tab_order(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
active_call_b
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
.await
@@ -686,7 +686,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T
.unwrap();
// Client B joins the project.
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
active_call_b
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
.await
@@ -1199,7 +1199,7 @@ async fn test_auto_unfollowing(cx_a: &mut TestAppContext, cx_b: &mut TestAppCont
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
active_call_b
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
.await
@@ -1335,7 +1335,7 @@ async fn test_peers_simultaneously_following_each_other(
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
executor.run_until_parked();
@@ -1685,7 +1685,7 @@ async fn test_following_into_excluded_file(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
active_call_b
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
.await

View File

@@ -1372,7 +1372,7 @@ async fn test_unshare_project(
.unwrap();
let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
executor.run_until_parked();
assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer()));
@@ -1392,7 +1392,7 @@ async fn test_unshare_project(
assert!(project_b.read_with(cx_b, |project, _| project.is_disconnected()));
// Client C opens the project.
let project_c = client_c.join_remote_project(project_id, cx_c).await;
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
// When client A unshares the project, client C's project becomes read-only.
project_a
@@ -1409,7 +1409,7 @@ async fn test_unshare_project(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_c2 = client_c.join_remote_project(project_id, cx_c).await;
let project_c2 = client_c.build_dev_server_project(project_id, cx_c).await;
executor.run_until_parked();
assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer()));
@@ -1514,9 +1514,9 @@ async fn test_project_reconnect(
.await
.unwrap();
let project_b1 = client_b.join_remote_project(project1_id, cx_b).await;
let project_b2 = client_b.join_remote_project(project2_id, cx_b).await;
let project_b3 = client_b.join_remote_project(project3_id, cx_b).await;
let project_b1 = client_b.build_dev_server_project(project1_id, cx_b).await;
let project_b2 = client_b.build_dev_server_project(project2_id, cx_b).await;
let project_b3 = client_b.build_dev_server_project(project3_id, cx_b).await;
executor.run_until_parked();
let worktree1_id = worktree_a1.read_with(cx_a, |worktree, _| {
@@ -2310,8 +2310,8 @@ async fn test_propagate_saves_and_fs_changes(
.unwrap();
// Join that worktree as clients B and C.
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_c = client_c.join_remote_project(project_id, cx_c).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
let worktree_b = project_b.read_with(cx_b, |p, cx| p.worktrees(cx).next().unwrap());
@@ -2535,7 +2535,7 @@ async fn test_git_diff_base_change(
.await
.unwrap();
let project_remote = client_b.join_remote_project(project_id, cx_b).await;
let project_remote = client_b.build_dev_server_project(project_id, cx_b).await;
let diff_base = "
one
@@ -2791,7 +2791,7 @@ async fn test_git_branch_name(
.await
.unwrap();
let project_remote = client_b.join_remote_project(project_id, cx_b).await;
let project_remote = client_b.build_dev_server_project(project_id, cx_b).await;
client_a
.fs()
.set_branch_name(Path::new("/dir/.git"), Some("branch-1"));
@@ -2836,7 +2836,7 @@ async fn test_git_branch_name(
assert_branch(Some("branch-2"), project, cx)
});
let project_remote_c = client_c.join_remote_project(project_id, cx_c).await;
let project_remote_c = client_c.build_dev_server_project(project_id, cx_c).await;
executor.run_until_parked();
project_remote_c.read_with(cx_c, |project, cx| {
@@ -2891,7 +2891,7 @@ async fn test_git_status_sync(
.await
.unwrap();
let project_remote = client_b.join_remote_project(project_id, cx_b).await;
let project_remote = client_b.build_dev_server_project(project_id, cx_b).await;
// Wait for it to catch up to the new status
executor.run_until_parked();
@@ -2967,7 +2967,7 @@ async fn test_git_status_sync(
});
// And synchronization while joining
let project_remote_c = client_c.join_remote_project(project_id, cx_c).await;
let project_remote_c = client_c.build_dev_server_project(project_id, cx_c).await;
executor.run_until_parked();
project_remote_c.read_with(cx_c, |project, cx| {
@@ -3015,7 +3015,7 @@ async fn test_fs_operations(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
let worktree_b = project_b.read_with(cx_b, |project, cx| project.worktrees(cx).next().unwrap());
@@ -3316,7 +3316,7 @@ async fn test_local_settings(
executor.run_until_parked();
// As client B, join that project and observe the local settings.
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let worktree_b = project_b.read_with(cx_b, |project, cx| project.worktrees(cx).next().unwrap());
executor.run_until_parked();
@@ -3439,7 +3439,7 @@ async fn test_buffer_conflict_after_save(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// Open a buffer as client B
let buffer_b = project_b
@@ -3503,7 +3503,7 @@ async fn test_buffer_reloading(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// Open a buffer as client B
let buffer_b = project_b
@@ -3557,7 +3557,7 @@ async fn test_editing_while_guest_opens_buffer(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// Open a buffer as client A
let buffer_a = project_a
@@ -3605,7 +3605,7 @@ async fn test_leaving_worktree_while_opening_buffer(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// See that a guest has joined as client A.
executor.run_until_parked();
@@ -3652,7 +3652,7 @@ async fn test_canceling_buffer_opening(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let buffer_a = project_a
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
@@ -3709,8 +3709,8 @@ async fn test_leaving_project(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b1 = client_b.join_remote_project(project_id, cx_b).await;
let project_c = client_c.join_remote_project(project_id, cx_c).await;
let project_b1 = client_b.build_dev_server_project(project_id, cx_b).await;
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
// Client A sees that a guest has joined.
executor.run_until_parked();
@@ -3751,7 +3751,7 @@ async fn test_leaving_project(
});
// Client B re-joins the project and can open buffers as before.
let project_b2 = client_b.join_remote_project(project_id, cx_b).await;
let project_b2 = client_b.build_dev_server_project(project_id, cx_b).await;
executor.run_until_parked();
project_a.read_with(cx_a, |project, _| {
@@ -3927,7 +3927,7 @@ async fn test_collaborating_with_diagnostics(
);
// Join the worktree as client B.
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// Wait for server to see the diagnostics update.
executor.run_until_parked();
@@ -3952,7 +3952,7 @@ async fn test_collaborating_with_diagnostics(
});
// Join project as client C and observe the diagnostics.
let project_c = client_c.join_remote_project(project_id, cx_c).await;
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
executor.run_until_parked();
let project_c_diagnostic_summaries =
Rc::new(RefCell::new(project_c.read_with(cx_c, |project, cx| {
@@ -4160,7 +4160,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
.unwrap();
// Join the project as client B and open all three files.
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let guest_buffers = futures::future::try_join_all(file_names.iter().map(|file_name| {
project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, file_name), cx))
}))
@@ -4266,7 +4266,7 @@ async fn test_reloading_buffer_manually(
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
@@ -4364,7 +4364,7 @@ async fn test_formatting_buffer(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
@@ -4409,7 +4409,7 @@ async fn test_formatting_buffer(
file.defaults.formatter = Some(SelectedFormatter::List(FormatterList(
vec![Formatter::External {
command: "awk".into(),
arguments: Some(vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into()),
arguments: vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into(),
}]
.into(),
)));
@@ -4486,7 +4486,7 @@ async fn test_prettier_formatting_buffer(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx));
let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
@@ -4599,7 +4599,7 @@ async fn test_definition(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// Open the file on client B.
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
@@ -4744,7 +4744,7 @@ async fn test_references(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// Open the file on client B.
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "one.rs"), cx));
@@ -4901,7 +4901,7 @@ async fn test_project_search(
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// Perform a search as the guest.
let mut results = HashMap::default();
@@ -4991,7 +4991,7 @@ async fn test_document_highlights(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// Open the file on client B.
let open_b = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx));
@@ -5109,7 +5109,7 @@ async fn test_lsp_hover(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// Open the file as the guest
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx));
@@ -5286,7 +5286,7 @@ async fn test_project_symbols(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// Cause the language server to start.
let open_buffer_task =
@@ -5381,7 +5381,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let open_buffer_task = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
let buffer_b1 = cx_b.executor().spawn(open_buffer_task).await.unwrap();
@@ -6470,7 +6470,7 @@ async fn test_context_collaboration_with_reconnect(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// Client A sees that a guest has joined.
executor.run_until_parked();

View File

@@ -9,7 +9,7 @@ use remote_server::HeadlessProject;
use serde_json::json;
use std::{path::Path, sync::Arc};
#[gpui::test(iterations = 10)]
#[gpui::test]
async fn test_sharing_an_ssh_remote_project(
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
@@ -54,8 +54,9 @@ async fn test_sharing_an_ssh_remote_project(
let (project_a, worktree_id) = client_a
.build_ssh_project("/code/project1", client_ssh, cx_a)
.await;
executor.run_until_parked();
// While the SSH worktree is being scanned, user A shares the remote project.
// User A shares the remote project.
let active_call_a = cx_a.read(ActiveCall::global);
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
@@ -63,30 +64,12 @@ async fn test_sharing_an_ssh_remote_project(
.unwrap();
// User B joins the project.
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let worktree_b = project_b
.update(cx_b, |project, cx| project.worktree_for_id(worktree_id, cx))
.unwrap();
let worktree_a = project_a
.update(cx_a, |project, cx| project.worktree_for_id(worktree_id, cx))
.unwrap();
executor.run_until_parked();
worktree_a.update(cx_a, |worktree, _cx| {
assert_eq!(
worktree.paths().map(Arc::as_ref).collect::<Vec<_>>(),
vec![
Path::new(".zed"),
Path::new(".zed/settings.json"),
Path::new("README.md"),
Path::new("src"),
Path::new("src/lib.rs"),
]
);
});
worktree_b.update(cx_b, |worktree, _cx| {
assert_eq!(
worktree.paths().map(Arc::as_ref).collect::<Vec<_>>(),

View File

@@ -679,6 +679,8 @@ impl TestServer {
stripe_api_key: None,
stripe_price_id: None,
supermaven_admin_api_key: None,
runpod_api_key: None,
runpod_api_summary_url: None,
user_backfiller_github_access_token: None,
},
})
@@ -919,7 +921,7 @@ impl TestClient {
})
}
pub async fn join_remote_project(
pub async fn build_dev_server_project(
&self,
host_project_id: u64,
guest_cx: &mut TestAppContext,

View File

@@ -37,6 +37,7 @@ fs.workspace = true
futures.workspace = true
gpui.workspace = true
http_client.workspace = true
isahc.workspace = true
language.workspace = true
lsp.workspace = true
menu.workspace = true

View File

@@ -7,7 +7,8 @@ use chrono::DateTime;
use fs::Fs;
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt};
use gpui::{AppContext, AsyncAppContext, Global};
use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest};
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
use isahc::config::Configurable;
use paths::home_dir;
use serde::{Deserialize, Serialize};
use settings::watch_config_file;
@@ -274,7 +275,7 @@ async fn request_api_token(
.header("Accept", "application/json");
if let Some(low_speed_timeout) = low_speed_timeout {
request_builder = request_builder.read_timeout(low_speed_timeout);
request_builder = request_builder.low_speed_timeout(100, low_speed_timeout);
}
let request = request_builder.body(AsyncBody::empty())?;
@@ -331,7 +332,7 @@ async fn stream_completion(
.header("Copilot-Integration-Id", "vscode-chat");
if let Some(low_speed_timeout) = low_speed_timeout {
request_builder = request_builder.read_timeout(low_speed_timeout);
request_builder = request_builder.low_speed_timeout(100, low_speed_timeout);
}
let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
let mut response = client.send(request).await?;

View File

@@ -24,8 +24,7 @@ test-support = [
"workspace/test-support",
"tree-sitter-rust",
"tree-sitter-typescript",
"tree-sitter-html",
"unindent",
"tree-sitter-html"
]
[dependencies]
@@ -55,7 +54,6 @@ markdown.workspace = true
multi_buffer.workspace = true
ordered-float.workspace = true
parking_lot.workspace = true
pretty_assertions.workspace = true
project.workspace = true
rand.workspace = true
rpc.workspace = true
@@ -76,7 +74,6 @@ theme.workspace = true
tree-sitter-html = { workspace = true, optional = true }
tree-sitter-rust = { workspace = true, optional = true }
tree-sitter-typescript = { workspace = true, optional = true }
unindent = { workspace = true, optional = true }
ui.workspace = true
url.workspace = true
util.workspace = true

View File

@@ -230,11 +230,7 @@ gpui::actions!(
ExpandMacroRecursively,
FindAllReferences,
Fold,
FoldAll,
FoldRecursive,
FoldSelectedRanges,
ToggleFold,
ToggleFoldRecursive,
Format,
GoToDeclaration,
GoToDeclarationSplit,
@@ -344,9 +340,7 @@ gpui::actions!(
Transpose,
Undo,
UndoSelection,
UnfoldAll,
UnfoldLines,
UnfoldRecursive,
UniqueLinesCaseInsensitive,
UniqueLinesCaseSensitive,
]

View File

@@ -1360,7 +1360,7 @@ impl<'a> Iterator for BlockBufferRows<'a> {
impl sum_tree::Item for Transform {
type Summary = TransformSummary;
fn summary(&self, _cx: &()) -> Self::Summary {
fn summary(&self) -> Self::Summary {
self.summary.clone()
}
}

View File

@@ -291,7 +291,7 @@ impl sum_tree::Summary for ItemSummary {
impl sum_tree::Item for CreaseItem {
type Summary = ItemSummary;
fn summary(&self, _cx: &MultiBufferSnapshot) -> Self::Summary {
fn summary(&self) -> Self::Summary {
ItemSummary {
range: self.crease.range.clone(),
}

View File

@@ -944,7 +944,7 @@ struct TransformSummary {
impl sum_tree::Item for Transform {
type Summary = TransformSummary;
fn summary(&self, _cx: &()) -> Self::Summary {
fn summary(&self) -> Self::Summary {
self.summary.clone()
}
}
@@ -1004,7 +1004,7 @@ impl Default for FoldRange {
impl sum_tree::Item for Fold {
type Summary = FoldSummary;
fn summary(&self, _cx: &MultiBufferSnapshot) -> Self::Summary {
fn summary(&self) -> Self::Summary {
FoldSummary {
start: self.range.start,
end: self.range.end,

View File

@@ -74,7 +74,7 @@ impl Inlay {
impl sum_tree::Item for Transform {
type Summary = TransformSummary;
fn summary(&self, _cx: &()) -> Self::Summary {
fn summary(&self) -> Self::Summary {
match self {
Transform::Isomorphic(summary) => TransformSummary {
input: summary.clone(),

View File

@@ -917,7 +917,7 @@ impl Transform {
impl sum_tree::Item for Transform {
type Summary = TransformSummary;
fn summary(&self, _cx: &()) -> Self::Summary {
fn summary(&self) -> Self::Summary {
self.summary.clone()
}
}

View File

@@ -98,9 +98,7 @@ use language::{
};
use language::{point_to_lsp, BufferRow, CharClassifier, Runnable, RunnableRange};
use linked_editing_ranges::refresh_linked_ranges;
pub use proposed_changes_editor::{
ProposedChangesBuffer, ProposedChangesEditor, ProposedChangesEditorToolbar,
};
use proposed_changes_editor::{ProposedChangesBuffer, ProposedChangesEditor};
use similar::{ChangeTag, TextDiff};
use task::{ResolvedTask, TaskTemplate, TaskVariables};
@@ -156,7 +154,7 @@ use theme::{
};
use ui::{
h_flex, prelude::*, ButtonSize, ButtonStyle, Disclosure, IconButton, IconName, IconSize,
ListItem, Popover, PopoverMenuHandle, Tooltip,
ListItem, Popover, Tooltip,
};
use util::{defer, maybe, post_inc, RangeExt, ResultExt, TryFutureExt};
use workspace::item::{ItemHandle, PreviewTabsSettings};
@@ -376,20 +374,12 @@ pub enum EditorMode {
Full,
}
#[derive(Copy, Clone, Debug)]
#[derive(Clone, Debug)]
pub enum SoftWrap {
/// Prefer not to wrap at all.
///
/// Note: this is currently internal, as actually limited by [`crate::MAX_LINE_LEN`] until it wraps.
/// The mode is used inside git diff hunks, where it's seems currently more useful to not wrap as much as possible.
GitDiff,
/// Prefer a single line generally, unless an overly long line is encountered.
None,
/// Soft wrap lines that exceed the editor width.
PreferLine,
EditorWidth,
/// Soft wrap lines at the preferred line length.
Column(u32),
/// Soft wrap line at the preferred line length or the editor width (whichever is smaller).
Bounded(u32),
}
@@ -572,7 +562,6 @@ pub struct Editor {
nav_history: Option<ItemNavHistory>,
context_menu: RwLock<Option<ContextMenu>>,
mouse_context_menu: Option<MouseContextMenu>,
hunk_controls_menu_handle: PopoverMenuHandle<ui::ContextMenu>,
completion_tasks: Vec<(CompletionId, Task<Option<()>>)>,
signature_help_state: SignatureHelpState,
auto_signature_help: Option<bool>,
@@ -671,7 +660,7 @@ pub struct EditorSnapshot {
show_git_diff_gutter: Option<bool>,
show_code_actions: Option<bool>,
show_runnables: Option<bool>,
git_blame_gutter_max_author_length: Option<usize>,
render_git_blame_gutter: bool,
pub display_snapshot: DisplaySnapshot,
pub placeholder_text: Option<Arc<str>>,
is_focused: bool,
@@ -681,7 +670,7 @@ pub struct EditorSnapshot {
gutter_hovered: bool,
}
const GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED: usize = 20;
const GIT_BLAME_GUTTER_WIDTH_CHARS: f32 = 53.;
#[derive(Default, Debug, Clone, Copy)]
pub struct GutterDimensions {
@@ -821,8 +810,8 @@ impl SelectionHistory {
struct RowHighlight {
index: usize,
range: Range<Anchor>,
color: Hsla,
range: RangeInclusive<Anchor>,
color: Option<Hsla>,
should_autoscroll: bool,
}
@@ -1845,7 +1834,7 @@ impl Editor {
let blink_manager = cx.new_model(|cx| BlinkManager::new(CURSOR_BLINK_INTERVAL, cx));
let soft_wrap_mode_override = matches!(mode, EditorMode::SingleLine { .. })
.then(|| language_settings::SoftWrap::None);
.then(|| language_settings::SoftWrap::PreferLine);
let mut project_subscriptions = Vec::new();
if mode == EditorMode::Full {
@@ -1949,7 +1938,6 @@ impl Editor {
nav_history: None,
context_menu: RwLock::new(None),
mouse_context_menu: None,
hunk_controls_menu_handle: PopoverMenuHandle::default(),
completion_tasks: Default::default(),
signature_help_state: SignatureHelpState::default(),
auto_signature_help: None,
@@ -2219,19 +2207,6 @@ impl Editor {
}
pub fn snapshot(&mut self, cx: &mut WindowContext) -> EditorSnapshot {
let git_blame_gutter_max_author_length = self
.render_git_blame_gutter(cx)
.then(|| {
if let Some(blame) = self.blame.as_ref() {
let max_author_length =
blame.update(cx, |blame, cx| blame.max_author_length(cx));
Some(max_author_length)
} else {
None
}
})
.flatten();
EditorSnapshot {
mode: self.mode,
show_gutter: self.show_gutter,
@@ -2239,7 +2214,7 @@ impl Editor {
show_git_diff_gutter: self.show_git_diff_gutter,
show_code_actions: self.show_code_actions,
show_runnables: self.show_runnables,
git_blame_gutter_max_author_length,
render_git_blame_gutter: self.render_git_blame_gutter(cx),
display_snapshot: self.display_map.update(cx, |map, cx| map.snapshot(cx)),
scroll_anchor: self.scroll_manager.anchor(),
ongoing_scroll: self.scroll_manager.ongoing_scroll(),
@@ -3463,7 +3438,7 @@ impl Editor {
s.select(new_selections)
});
if !bracket_inserted {
if !bracket_inserted && EditorSettings::get_global(cx).use_on_type_format {
if let Some(on_type_format_task) =
this.trigger_on_type_formatting(text.to_string(), cx)
{
@@ -4212,15 +4187,6 @@ impl Editor {
.read(cx)
.text_anchor_for_position(position, cx)?;
let settings = language_settings::language_settings(
buffer.read(cx).language_at(buffer_position).as_ref(),
buffer.read(cx).file(),
cx,
);
if !settings.use_on_type_format {
return None;
}
// OnTypeFormatting returns a list of edits, no need to pass them between Zed instances,
// hence we do LSP request & edit on host side only — add formats to host's history.
let push_to_lsp_host_history = true;
@@ -5368,19 +5334,6 @@ impl Editor {
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.selected(is_active)
.tooltip({
let focus_handle = self.focus_handle.clone();
move |cx| {
Tooltip::for_action_in(
"Toggle Code Actions",
&ToggleCodeActions {
deployed_from_indicator: None,
},
&focus_handle,
cx,
)
}
})
.on_click(cx.listener(move |editor, _e, cx| {
editor.focus(cx);
editor.toggle_code_actions(
@@ -5430,6 +5383,23 @@ impl Editor {
}))
}
fn close_hunk_diff_button(
&self,
hunk: HoveredHunk,
row: DisplayRow,
cx: &mut ViewContext<Self>,
) -> IconButton {
IconButton::new(
("close_hunk_diff_indicator", row.0 as usize),
ui::IconName::Close,
)
.shape(ui::IconButtonShape::Square)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.tooltip(|cx| Tooltip::for_action("Close hunk diff", &ToggleHunkDiff, cx))
.on_click(cx.listener(move |editor, _e, cx| editor.toggle_hovered_hunk(&hunk, cx)))
}
pub fn context_menu_visible(&self) -> bool {
self.context_menu
.read()
@@ -9365,42 +9335,32 @@ impl Editor {
}
}
fn go_to_next_hunk(&mut self, _: &GoToHunk, cx: &mut ViewContext<Self>) {
fn go_to_hunk(&mut self, _: &GoToHunk, cx: &mut ViewContext<Self>) {
let snapshot = self
.display_map
.update(cx, |display_map, cx| display_map.snapshot(cx));
let selection = self.selections.newest::<Point>(cx);
self.go_to_hunk_after_position(&snapshot, selection.head(), cx);
}
fn go_to_hunk_after_position(
&mut self,
snapshot: &DisplaySnapshot,
position: Point,
cx: &mut ViewContext<'_, Editor>,
) -> Option<MultiBufferDiffHunk> {
if let Some(hunk) = self.go_to_next_hunk_in_direction(
snapshot,
position,
if !self.seek_in_direction(
&snapshot,
selection.head(),
false,
snapshot
.buffer_snapshot
.git_diff_hunks_in_range(MultiBufferRow(position.row + 1)..MultiBufferRow::MAX),
cx,
) {
return Some(hunk);
}
let wrapped_point = Point::zero();
self.go_to_next_hunk_in_direction(
snapshot,
wrapped_point,
true,
snapshot.buffer_snapshot.git_diff_hunks_in_range(
MultiBufferRow(wrapped_point.row + 1)..MultiBufferRow::MAX,
MultiBufferRow(selection.head().row + 1)..MultiBufferRow::MAX,
),
cx,
)
) {
let wrapped_point = Point::zero();
self.seek_in_direction(
&snapshot,
wrapped_point,
true,
snapshot.buffer_snapshot.git_diff_hunks_in_range(
MultiBufferRow(wrapped_point.row + 1)..MultiBufferRow::MAX,
),
cx,
);
}
}
fn go_to_prev_hunk(&mut self, _: &GoToPrevHunk, cx: &mut ViewContext<Self>) {
@@ -9409,65 +9369,52 @@ impl Editor {
.update(cx, |display_map, cx| display_map.snapshot(cx));
let selection = self.selections.newest::<Point>(cx);
self.go_to_hunk_before_position(&snapshot, selection.head(), cx);
}
fn go_to_hunk_before_position(
&mut self,
snapshot: &DisplaySnapshot,
position: Point,
cx: &mut ViewContext<'_, Editor>,
) -> Option<MultiBufferDiffHunk> {
if let Some(hunk) = self.go_to_next_hunk_in_direction(
snapshot,
position,
if !self.seek_in_direction(
&snapshot,
selection.head(),
false,
snapshot
.buffer_snapshot
.git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(position.row)),
snapshot.buffer_snapshot.git_diff_hunks_in_range_rev(
MultiBufferRow(0)..MultiBufferRow(selection.head().row),
),
cx,
) {
return Some(hunk);
let wrapped_point = snapshot.buffer_snapshot.max_point();
self.seek_in_direction(
&snapshot,
wrapped_point,
true,
snapshot.buffer_snapshot.git_diff_hunks_in_range_rev(
MultiBufferRow(0)..MultiBufferRow(wrapped_point.row),
),
cx,
);
}
let wrapped_point = snapshot.buffer_snapshot.max_point();
self.go_to_next_hunk_in_direction(
snapshot,
wrapped_point,
true,
snapshot
.buffer_snapshot
.git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(wrapped_point.row)),
cx,
)
}
fn go_to_next_hunk_in_direction(
fn seek_in_direction(
&mut self,
snapshot: &DisplaySnapshot,
initial_point: Point,
is_wrapped: bool,
hunks: impl Iterator<Item = MultiBufferDiffHunk>,
cx: &mut ViewContext<Editor>,
) -> Option<MultiBufferDiffHunk> {
) -> bool {
let display_point = initial_point.to_display_point(snapshot);
let mut hunks = hunks
.map(|hunk| (diff_hunk_to_display(&hunk, snapshot), hunk))
.filter(|(display_hunk, _)| {
is_wrapped || !display_hunk.contains_display_row(display_point.row())
})
.map(|hunk| diff_hunk_to_display(&hunk, snapshot))
.filter(|hunk| is_wrapped || !hunk.contains_display_row(display_point.row()))
.dedup();
if let Some((display_hunk, hunk)) = hunks.next() {
if let Some(hunk) = hunks.next() {
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
let row = display_hunk.start_display_row();
let row = hunk.start_display_row();
let point = DisplayPoint::new(row, 0);
s.select_display_ranges([point..point]);
});
Some(hunk)
true
} else {
None
false
}
}
@@ -9710,7 +9657,7 @@ impl Editor {
if Some(&target.buffer) == editor.buffer.read(cx).as_singleton().as_ref() {
let buffer = target.buffer.read(cx);
let range = check_multiline_range(buffer, range);
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
editor.change_selections(Some(Autoscroll::focused()), cx, |s| {
s.select_ranges([range]);
});
} else {
@@ -10564,79 +10511,17 @@ impl Editor {
}
}
pub fn toggle_fold(&mut self, _: &actions::ToggleFold, cx: &mut ViewContext<Self>) {
let selection = self.selections.newest::<Point>(cx);
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let range = if selection.is_empty() {
let point = selection.head().to_display_point(&display_map);
let start = DisplayPoint::new(point.row(), 0).to_point(&display_map);
let end = DisplayPoint::new(point.row(), display_map.line_len(point.row()))
.to_point(&display_map);
start..end
} else {
selection.range()
};
if display_map.folds_in_range(range).next().is_some() {
self.unfold_lines(&Default::default(), cx)
} else {
self.fold(&Default::default(), cx)
}
}
pub fn toggle_fold_recursive(
&mut self,
_: &actions::ToggleFoldRecursive,
cx: &mut ViewContext<Self>,
) {
let selection = self.selections.newest::<Point>(cx);
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let range = if selection.is_empty() {
let point = selection.head().to_display_point(&display_map);
let start = DisplayPoint::new(point.row(), 0).to_point(&display_map);
let end = DisplayPoint::new(point.row(), display_map.line_len(point.row()))
.to_point(&display_map);
start..end
} else {
selection.range()
};
if display_map.folds_in_range(range).next().is_some() {
self.unfold_recursive(&Default::default(), cx)
} else {
self.fold_recursive(&Default::default(), cx)
}
}
pub fn fold(&mut self, _: &actions::Fold, cx: &mut ViewContext<Self>) {
let mut fold_ranges = Vec::new();
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let selections = self.selections.all_adjusted(cx);
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let selections = self.selections.all_adjusted(cx);
for selection in selections {
let range = selection.range().sorted();
let buffer_start_row = range.start.row;
if range.start.row != range.end.row {
let mut found = false;
let mut row = range.start.row;
while row <= range.end.row {
if let Some((foldable_range, fold_text)) =
{ display_map.foldable_range(MultiBufferRow(row)) }
{
found = true;
row = foldable_range.end.row + 1;
fold_ranges.push((foldable_range, fold_text));
} else {
row += 1
}
}
if found {
continue;
}
}
for row in (0..=range.start.row).rev() {
for row in (0..=range.end.row).rev() {
if let Some((foldable_range, fold_text)) =
display_map.foldable_range(MultiBufferRow(row))
{
@@ -10653,61 +10538,6 @@ impl Editor {
self.fold_ranges(fold_ranges, true, cx);
}
pub fn fold_all(&mut self, _: &actions::FoldAll, cx: &mut ViewContext<Self>) {
let mut fold_ranges = Vec::new();
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
for row in 0..display_map.max_buffer_row().0 {
if let Some((foldable_range, fold_text)) =
display_map.foldable_range(MultiBufferRow(row))
{
fold_ranges.push((foldable_range, fold_text));
}
}
self.fold_ranges(fold_ranges, true, cx);
}
pub fn fold_recursive(&mut self, _: &actions::FoldRecursive, cx: &mut ViewContext<Self>) {
let mut fold_ranges = Vec::new();
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let selections = self.selections.all_adjusted(cx);
for selection in selections {
let range = selection.range().sorted();
let buffer_start_row = range.start.row;
if range.start.row != range.end.row {
let mut found = false;
for row in range.start.row..=range.end.row {
if let Some((foldable_range, fold_text)) =
{ display_map.foldable_range(MultiBufferRow(row)) }
{
found = true;
fold_ranges.push((foldable_range, fold_text));
}
}
if found {
continue;
}
}
for row in (0..=range.start.row).rev() {
if let Some((foldable_range, fold_text)) =
display_map.foldable_range(MultiBufferRow(row))
{
if foldable_range.end.row >= buffer_start_row {
fold_ranges.push((foldable_range, fold_text));
} else {
break;
}
}
}
}
self.fold_ranges(fold_ranges, true, cx);
}
pub fn fold_at(&mut self, fold_at: &FoldAt, cx: &mut ViewContext<Self>) {
let buffer_row = fold_at.buffer_row;
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
@@ -10742,24 +10572,6 @@ impl Editor {
self.unfold_ranges(ranges, true, true, cx);
}
pub fn unfold_recursive(&mut self, _: &UnfoldRecursive, cx: &mut ViewContext<Self>) {
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let selections = self.selections.all::<Point>(cx);
let ranges = selections
.iter()
.map(|s| {
let mut range = s.display_range(&display_map).sorted();
*range.start.column_mut() = 0;
*range.end.column_mut() = display_map.line_len(range.end.row());
let start = range.start.to_point(&display_map);
let end = range.end.to_point(&display_map);
start..end
})
.collect::<Vec<_>>();
self.unfold_ranges(ranges, true, true, cx);
}
pub fn unfold_at(&mut self, unfold_at: &UnfoldAt, cx: &mut ViewContext<Self>) {
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
@@ -10778,16 +10590,6 @@ impl Editor {
self.unfold_ranges(std::iter::once(intersection_range), true, autoscroll, cx)
}
pub fn unfold_all(&mut self, _: &actions::UnfoldAll, cx: &mut ViewContext<Self>) {
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
self.unfold_ranges(
[Point::zero()..display_map.max_point().to_point(&display_map)],
true,
true,
cx,
);
}
pub fn fold_selected_ranges(&mut self, _: &FoldSelectedRanges, cx: &mut ViewContext<Self>) {
let selections = self.selections.all::<Point>(cx);
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
@@ -11064,9 +10866,8 @@ impl Editor {
let settings = self.buffer.read(cx).settings_at(0, cx);
let mode = self.soft_wrap_mode_override.unwrap_or(settings.soft_wrap);
match mode {
language_settings::SoftWrap::PreferLine | language_settings::SoftWrap::None => {
SoftWrap::None
}
language_settings::SoftWrap::None => SoftWrap::None,
language_settings::SoftWrap::PreferLine => SoftWrap::PreferLine,
language_settings::SoftWrap::EditorWidth => SoftWrap::EditorWidth,
language_settings::SoftWrap::PreferredLineLength => {
SoftWrap::Column(settings.preferred_line_length)
@@ -11114,10 +10915,9 @@ impl Editor {
self.soft_wrap_mode_override.take();
} else {
let soft_wrap = match self.soft_wrap_mode(cx) {
SoftWrap::GitDiff => return,
SoftWrap::None => language_settings::SoftWrap::EditorWidth,
SoftWrap::None | SoftWrap::PreferLine => language_settings::SoftWrap::EditorWidth,
SoftWrap::EditorWidth | SoftWrap::Column(_) | SoftWrap::Bounded(_) => {
language_settings::SoftWrap::None
language_settings::SoftWrap::PreferLine
}
};
self.soft_wrap_mode_override = Some(soft_wrap);
@@ -11513,130 +11313,56 @@ impl Editor {
}
}
/// Adds a row highlight for the given range. If a row has multiple highlights, the
/// last highlight added will be used.
///
/// If the range ends at the beginning of a line, then that line will not be highlighted.
/// Adds or removes (on `None` color) a highlight for the rows corresponding to the anchor range given.
/// On matching anchor range, replaces the old highlight; does not clear the other existing highlights.
/// If multiple anchor ranges will produce highlights for the same row, the last range added will be used.
pub fn highlight_rows<T: 'static>(
&mut self,
range: Range<Anchor>,
color: Hsla,
rows: RangeInclusive<Anchor>,
color: Option<Hsla>,
should_autoscroll: bool,
cx: &mut ViewContext<Self>,
) {
let snapshot = self.buffer().read(cx).snapshot(cx);
let row_highlights = self.highlighted_rows.entry(TypeId::of::<T>()).or_default();
let ix = row_highlights.binary_search_by(|highlight| {
Ordering::Equal
.then_with(|| highlight.range.start.cmp(&range.start, &snapshot))
.then_with(|| highlight.range.end.cmp(&range.end, &snapshot))
let existing_highlight_index = row_highlights.binary_search_by(|highlight| {
highlight
.range
.start()
.cmp(rows.start(), &snapshot)
.then(highlight.range.end().cmp(rows.end(), &snapshot))
});
if let Err(mut ix) = ix {
let index = post_inc(&mut self.highlight_order);
// If this range intersects with the preceding highlight, then merge it with
// the preceding highlight. Otherwise insert a new highlight.
let mut merged = false;
if ix > 0 {
let prev_highlight = &mut row_highlights[ix - 1];
if prev_highlight
.range
.end
.cmp(&range.start, &snapshot)
.is_ge()
{
ix -= 1;
if prev_highlight.range.end.cmp(&range.end, &snapshot).is_lt() {
prev_highlight.range.end = range.end;
}
merged = true;
prev_highlight.index = index;
prev_highlight.color = color;
prev_highlight.should_autoscroll = should_autoscroll;
}
}
if !merged {
row_highlights.insert(
ix,
RowHighlight {
range: range.clone(),
index,
color,
should_autoscroll,
},
);
}
// If any of the following highlights intersect with this one, merge them.
while let Some(next_highlight) = row_highlights.get(ix + 1) {
let highlight = &row_highlights[ix];
if next_highlight
.range
.start
.cmp(&highlight.range.end, &snapshot)
.is_le()
{
if next_highlight
.range
.end
.cmp(&highlight.range.end, &snapshot)
.is_gt()
{
row_highlights[ix].range.end = next_highlight.range.end;
}
row_highlights.remove(ix + 1);
} else {
break;
}
match (color, existing_highlight_index) {
(Some(_), Ok(ix)) | (_, Err(ix)) => row_highlights.insert(
ix,
RowHighlight {
index: post_inc(&mut self.highlight_order),
range: rows,
should_autoscroll,
color,
},
),
(None, Ok(i)) => {
row_highlights.remove(i);
}
}
}
/// Remove any highlighted row ranges of the given type that intersect the
/// given ranges.
pub fn remove_highlighted_rows<T: 'static>(
&mut self,
ranges_to_remove: Vec<Range<Anchor>>,
cx: &mut ViewContext<Self>,
) {
let snapshot = self.buffer().read(cx).snapshot(cx);
let row_highlights = self.highlighted_rows.entry(TypeId::of::<T>()).or_default();
let mut ranges_to_remove = ranges_to_remove.iter().peekable();
row_highlights.retain(|highlight| {
while let Some(range_to_remove) = ranges_to_remove.peek() {
match range_to_remove.end.cmp(&highlight.range.start, &snapshot) {
Ordering::Less | Ordering::Equal => {
ranges_to_remove.next();
}
Ordering::Greater => {
match range_to_remove.start.cmp(&highlight.range.end, &snapshot) {
Ordering::Less | Ordering::Equal => {
return false;
}
Ordering::Greater => break,
}
}
}
}
true
})
}
/// Clear all anchor ranges for a certain highlight context type, so no corresponding rows will be highlighted.
pub fn clear_row_highlights<T: 'static>(&mut self) {
self.highlighted_rows.remove(&TypeId::of::<T>());
}
/// For a highlight given context type, gets all anchor ranges that will be used for row highlighting.
pub fn highlighted_rows<T: 'static>(&self) -> impl '_ + Iterator<Item = (Range<Anchor>, Hsla)> {
self.highlighted_rows
.get(&TypeId::of::<T>())
.map_or(&[] as &[_], |vec| vec.as_slice())
.iter()
.map(|highlight| (highlight.range.clone(), highlight.color))
pub fn highlighted_rows<T: 'static>(
&self,
) -> Option<impl Iterator<Item = (&RangeInclusive<Anchor>, Option<&Hsla>)>> {
Some(
self.highlighted_rows
.get(&TypeId::of::<T>())?
.iter()
.map(|highlight| (&highlight.range, highlight.color.as_ref())),
)
}
/// Merges all anchor ranges for all context types ever set, picking the last highlight added in case of a row conflict.
@@ -11654,22 +11380,17 @@ impl Editor {
.fold(
BTreeMap::<DisplayRow, Hsla>::new(),
|mut unique_rows, highlight| {
let start = highlight.range.start.to_display_point(&snapshot);
let end = highlight.range.end.to_display_point(&snapshot);
let start_row = start.row().0;
let end_row = if highlight.range.end.text_anchor != text::Anchor::MAX
&& end.column() == 0
{
end.row().0.saturating_sub(1)
} else {
end.row().0
};
for row in start_row..=end_row {
let start_row = highlight.range.start().to_display_point(&snapshot).row();
let end_row = highlight.range.end().to_display_point(&snapshot).row();
for row in start_row.0..=end_row.0 {
let used_index =
used_highlight_orders.entry(row).or_insert(highlight.index);
if highlight.index >= *used_index {
*used_index = highlight.index;
unique_rows.insert(DisplayRow(row), highlight.color);
match highlight.color {
Some(hsla) => unique_rows.insert(DisplayRow(row), hsla),
None => unique_rows.remove(&DisplayRow(row)),
};
}
}
unique_rows
@@ -11685,11 +11406,10 @@ impl Editor {
.values()
.flat_map(|highlighted_rows| highlighted_rows.iter())
.filter_map(|highlight| {
if highlight.should_autoscroll {
Some(highlight.range.start.to_display_point(snapshot).row())
} else {
None
if highlight.color.is_none() || !highlight.should_autoscroll {
return None;
}
Some(highlight.range.start().to_display_point(snapshot).row())
})
.min()
}
@@ -12195,19 +11915,12 @@ impl Editor {
)),
cx,
);
let old_cursor_shape = self.cursor_shape;
{
let editor_settings = EditorSettings::get_global(cx);
self.scroll_manager.vertical_scroll_margin = editor_settings.vertical_scroll_margin;
self.show_breadcrumbs = editor_settings.toolbar.breadcrumbs;
self.cursor_shape = editor_settings.cursor_shape.unwrap_or_default();
}
if old_cursor_shape != self.cursor_shape {
cx.emit(EditorEvent::CursorShapeChanged);
let editor_settings = EditorSettings::get_global(cx);
if let Some(cursor_shape) = editor_settings.cursor_shape {
self.cursor_shape = cursor_shape;
}
self.scroll_manager.vertical_scroll_margin = editor_settings.vertical_scroll_margin;
self.show_breadcrumbs = editor_settings.toolbar.breadcrumbs;
let project_settings = ProjectSettings::get_global(cx);
self.serialize_dirty_buffers = project_settings.session.restore_unsaved_buffers;
@@ -13217,7 +12930,6 @@ impl EditorSnapshot {
font_id: FontId,
font_size: Pixels,
em_width: Pixels,
em_advance: Pixels,
max_line_number_width: Pixels,
cx: &AppContext,
) -> GutterDimensions {
@@ -13238,7 +12950,7 @@ impl EditorSnapshot {
.unwrap_or(gutter_settings.line_numbers);
let line_gutter_width = if show_line_numbers {
// Avoid flicker-like gutter resizes when the line number gains another digit and only resize the gutter on files with N*10^5 lines.
let min_width_for_number_on_gutter = em_advance * 4.0;
let min_width_for_number_on_gutter = em_width * 4.0;
max_line_number_width.max(min_width_for_number_on_gutter)
} else {
0.0.into()
@@ -13250,19 +12962,9 @@ impl EditorSnapshot {
let show_runnables = self.show_runnables.unwrap_or(gutter_settings.runnables);
let git_blame_entries_width =
self.git_blame_gutter_max_author_length
.map(|max_author_length| {
// Length of the author name, but also space for the commit hash,
// the spacing and the timestamp.
let max_char_count = max_author_length
.min(GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED)
+ 7 // length of commit sha
+ 14 // length of max relative timestamp ("60 minutes ago")
+ 4; // gaps and margins
em_advance * max_char_count
});
let git_blame_entries_width = self
.render_git_blame_gutter
.then_some(em_width * GIT_BLAME_GUTTER_WIDTH_CHARS);
let mut left_padding = git_blame_entries_width.unwrap_or(Pixels::ZERO);
left_padding += if show_code_actions || show_runnables {
@@ -13415,7 +13117,6 @@ pub enum EditorEvent {
TransactionBegun {
transaction_id: clock::Lamport,
},
CursorShapeChanged,
}
impl EventEmitter<EditorEvent> for Editor {}

View File

@@ -13,6 +13,7 @@ pub struct EditorSettings {
pub show_completions_on_input: bool,
pub show_completion_documentation: bool,
pub completion_documentation_secondary_query_debounce: u64,
pub use_on_type_format: bool,
pub toolbar: Toolbar,
pub scrollbar: Scrollbar,
pub gutter: Gutter,
@@ -208,6 +209,11 @@ pub struct EditorSettingsContent {
///
/// Default: 300 ms
pub completion_documentation_secondary_query_debounce: Option<u64>,
/// Whether to use additional LSP queries to format (and amend) the code after
/// every "trigger" symbol input, defined by LSP server capabilities.
///
/// Default: true
pub use_on_type_format: Option<bool>,
/// Toolbar related settings
pub toolbar: Option<ToolbarContent>,
/// Scrollbar related settings

File diff suppressed because it is too large Load Diff

View File

@@ -11,7 +11,7 @@ use crate::{
hover_popover::{
self, hover_at, HOVER_POPOVER_GAP, MIN_POPOVER_CHARACTER_WIDTH, MIN_POPOVER_LINE_HEIGHT,
},
hunk_diff::{diff_hunk_to_display, DisplayDiffHunk},
hunk_diff::{diff_hunk_to_display, DisplayDiffHunk, ExpandedHunk},
hunk_status,
items::BufferSearchHighlights,
mouse_context_menu::{self, MenuPosition, MouseContextMenu},
@@ -20,8 +20,8 @@ use crate::{
DocumentHighlightRead, DocumentHighlightWrite, Editor, EditorMode, EditorSettings,
EditorSnapshot, EditorStyle, ExpandExcerpts, FocusedBlock, GutterDimensions, HalfPageDown,
HalfPageUp, HandleInput, HoveredCursor, HoveredHunk, LineDown, LineUp, OpenExcerpts, PageDown,
PageUp, Point, RowExt, RowRangeExt, SelectPhase, Selection, SoftWrap, ToPoint,
CURSORS_VISIBLE_FOR, GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED, MAX_LINE_LEN,
PageUp, Point, RangeToAnchorExt, RowExt, RowRangeExt, SelectPhase, Selection, SoftWrap,
ToPoint, CURSORS_VISIBLE_FOR, MAX_LINE_LEN,
};
use client::ParticipantIndex;
use collections::{BTreeMap, HashMap};
@@ -302,7 +302,7 @@ impl EditorElement {
}
register_action(view, cx, Editor::go_to_diagnostic);
register_action(view, cx, Editor::go_to_prev_diagnostic);
register_action(view, cx, Editor::go_to_next_hunk);
register_action(view, cx, Editor::go_to_hunk);
register_action(view, cx, Editor::go_to_prev_hunk);
register_action(view, cx, |editor, a, cx| {
editor.go_to_definition(a, cx).detach_and_log_err(cx);
@@ -335,14 +335,8 @@ impl EditorElement {
register_action(view, cx, Editor::open_url);
register_action(view, cx, Editor::open_file);
register_action(view, cx, Editor::fold);
register_action(view, cx, Editor::fold_all);
register_action(view, cx, Editor::fold_at);
register_action(view, cx, Editor::fold_recursive);
register_action(view, cx, Editor::toggle_fold);
register_action(view, cx, Editor::toggle_fold_recursive);
register_action(view, cx, Editor::unfold_lines);
register_action(view, cx, Editor::unfold_recursive);
register_action(view, cx, Editor::unfold_all);
register_action(view, cx, Editor::unfold_at);
register_action(view, cx, Editor::fold_selected_ranges);
register_action(view, cx, Editor::show_completions);
@@ -495,7 +489,28 @@ impl EditorElement {
let mut modifiers = event.modifiers;
if let Some(hovered_hunk) = hovered_hunk {
editor.toggle_hovered_hunk(&hovered_hunk, cx);
if modifiers.control || modifiers.platform {
editor.toggle_hovered_hunk(&hovered_hunk, cx);
} else {
let display_range = hovered_hunk
.multi_buffer_range
.clone()
.to_display_points(&position_map.snapshot);
let hunk_bounds = Self::diff_hunk_bounds(
&position_map.snapshot,
position_map.line_height,
gutter_hitbox.bounds,
&DisplayDiffHunk::Unfolded {
diff_base_byte_range: hovered_hunk.diff_base_byte_range.clone(),
display_row_range: display_range.start.row()..display_range.end.row(),
multi_buffer_range: hovered_hunk.multi_buffer_range.clone(),
status: hovered_hunk.status,
},
);
if hunk_bounds.contains(&event.position) {
editor.open_hunk_context_menu(hovered_hunk, event.position, cx);
}
}
cx.notify();
return;
} else if gutter_hitbox.is_hovered(cx) {
@@ -636,30 +651,11 @@ impl EditorElement {
cx.stop_propagation();
} else if end_selection && pending_nonempty_selections {
cx.stop_propagation();
} else if cfg!(target_os = "linux") && event.button == MouseButton::Middle {
if !text_hitbox.is_hovered(cx) || editor.read_only(cx) {
return;
}
#[cfg(target_os = "linux")]
if EditorSettings::get_global(cx).middle_click_paste {
if let Some(text) = cx.read_from_primary().and_then(|item| item.text()) {
let point_for_position =
position_map.point_for_position(text_hitbox.bounds, event.position);
let position = point_for_position.previous_valid;
editor.select(
SelectPhase::Begin {
position,
add: false,
click_count: 1,
},
cx,
);
editor.insert(&text, cx);
}
cx.stop_propagation()
}
} else if cfg!(target_os = "linux")
&& event.button == MouseButton::Middle
&& (!text_hitbox.is_hovered(cx) || editor.read_only(cx))
{
return;
}
}
@@ -1307,13 +1303,13 @@ impl EditorElement {
let display_hunks = buffer_snapshot
.git_diff_hunks_in_range(buffer_start_row..buffer_end_row)
.filter_map(|hunk| {
let display_hunk = diff_hunk_to_display(&hunk, snapshot);
let mut display_hunk = diff_hunk_to_display(&hunk, snapshot);
if let DisplayDiffHunk::Unfolded {
multi_buffer_range,
status,
..
} = &display_hunk
} = &mut display_hunk
{
let mut is_expanded = false;
while let Some(expanded_hunk) = expanded_hunks.peek() {
@@ -1336,7 +1332,11 @@ impl EditorElement {
}
match status {
DiffHunkStatus::Added => {}
DiffHunkStatus::Modified => {}
DiffHunkStatus::Modified => {
if is_expanded {
*status = DiffHunkStatus::Added;
}
}
DiffHunkStatus::Removed => {
if is_expanded {
return None;
@@ -1470,7 +1470,7 @@ impl EditorElement {
AvailableSpace::MaxContent
};
let scroll_top = scroll_position.y * line_height;
let start_x = em_width;
let start_x = em_width * 1;
let mut last_used_color: Option<(PlayerColor, Oid)> = None;
@@ -1665,16 +1665,7 @@ impl EditorElement {
return None;
}
if snapshot.is_line_folded(multibuffer_row) {
// Skip folded indicators, unless it's the starting line of a fold.
if multibuffer_row
.0
.checked_sub(1)
.map_or(false, |previous_row| {
snapshot.is_line_folded(MultiBufferRow(previous_row))
})
{
return None;
}
return None;
}
let button = editor.render_run_indicator(
&self.style,
@@ -3380,6 +3371,9 @@ impl EditorElement {
for test_indicator in layout.test_indicators.iter_mut() {
test_indicator.paint(cx);
}
for close_indicator in layout.close_indicators.iter_mut() {
close_indicator.paint(cx);
}
if let Some(indicator) = layout.code_actions_indicator.as_mut() {
indicator.paint(cx);
@@ -4165,6 +4159,46 @@ impl EditorElement {
+ 1;
self.column_pixels(digit_count, cx)
}
#[allow(clippy::too_many_arguments)]
fn layout_hunk_diff_close_indicators(
&self,
line_height: Pixels,
scroll_pixel_position: gpui::Point<Pixels>,
gutter_dimensions: &GutterDimensions,
gutter_hitbox: &Hitbox,
rows_with_hunk_bounds: &HashMap<DisplayRow, Bounds<Pixels>>,
expanded_hunks_by_rows: HashMap<DisplayRow, ExpandedHunk>,
cx: &mut WindowContext,
) -> Vec<AnyElement> {
self.editor.update(cx, |editor, cx| {
expanded_hunks_by_rows
.into_iter()
.map(|(display_row, hunk)| {
let button = editor.close_hunk_diff_button(
HoveredHunk {
multi_buffer_range: hunk.hunk_range,
status: hunk.status,
diff_base_byte_range: hunk.diff_base_byte_range,
},
display_row,
cx,
);
prepaint_gutter_button(
button,
display_row,
line_height,
gutter_dimensions,
scroll_pixel_position,
gutter_hitbox,
rows_with_hunk_bounds,
cx,
)
})
.collect()
})
}
}
#[allow(clippy::too_many_arguments)]
@@ -4262,7 +4296,7 @@ fn render_blame_entry(
let short_commit_id = blame_entry.sha.display_short();
let author_name = blame_entry.author.as_deref().unwrap_or("<no name>");
let name = util::truncate_and_trailoff(author_name, GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED);
let name = util::truncate_and_trailoff(author_name, 20);
let details = blame.read(cx).details_for_entry(&blame_entry);
@@ -4274,21 +4308,22 @@ fn render_blame_entry(
h_flex()
.w_full()
.justify_between()
.font_family(style.text.font().family)
.line_height(style.text.line_height)
.id(("blame", ix))
.text_color(cx.theme().status().hint)
.pr_2()
.gap_2()
.child(
h_flex()
.items_center()
.gap_2()
.child(div().text_color(sha_color.cursor).child(short_commit_id))
.child(name),
)
.child(relative_timestamp)
.children([
div()
.text_color(sha_color.cursor)
.child(short_commit_id)
.mr_2(),
div()
.w_full()
.h_flex()
.justify_between()
.text_color(cx.theme().status().hint)
.child(name)
.child(relative_timestamp),
])
.on_mouse_down(MouseButton::Right, {
let blame_entry = blame_entry.clone();
let details = details.clone();
@@ -5003,7 +5038,6 @@ impl Element for EditorElement {
font_id,
font_size,
em_width,
em_advance,
self.max_line_number_width(&snapshot, cx),
cx,
);
@@ -5028,8 +5062,10 @@ impl Element for EditorElement {
snapshot
} else {
let wrap_width = match editor.soft_wrap_mode(cx) {
SoftWrap::GitDiff => None,
SoftWrap::None => Some((MAX_LINE_LEN / 2) as f32 * em_advance),
SoftWrap::None => None,
SoftWrap::PreferLine => {
Some((MAX_LINE_LEN / 2) as f32 * em_advance)
}
SoftWrap::EditorWidth => Some(editor_width),
SoftWrap::Column(column) => Some(column as f32 * em_advance),
SoftWrap::Bounded(column) => {
@@ -5513,6 +5549,15 @@ impl Element for EditorElement {
} else {
Vec::new()
};
let close_indicators = self.layout_hunk_diff_close_indicators(
line_height,
scroll_pixel_position,
&gutter_dimensions,
&gutter_hitbox,
&rows_with_hunk_bounds,
expanded_add_hunks_by_rows,
cx,
);
self.layout_signature_help(
&hitbox,
@@ -5625,6 +5670,7 @@ impl Element for EditorElement {
selections,
mouse_context_menu,
test_indicators,
close_indicators,
code_actions_indicator,
gutter_fold_toggles,
crease_trailers,
@@ -5766,6 +5812,7 @@ pub struct EditorLayout {
selections: Vec<(PlayerColor, Vec<SelectionLayout>)>,
code_actions_indicator: Option<AnyElement>,
test_indicators: Vec<AnyElement>,
close_indicators: Vec<AnyElement>,
gutter_fold_toggles: Vec<Option<AnyElement>>,
crease_trailers: Vec<Option<CreaseTrailerLayout>>,
mouse_context_menu: Option<AnyElement>,
@@ -6315,21 +6362,10 @@ fn compute_auto_height_layout(
.unwrap()
.size
.width;
let em_advance = cx
.text_system()
.advance(font_id, font_size, 'm')
.unwrap()
.width;
let mut snapshot = editor.snapshot(cx);
let gutter_dimensions = snapshot.gutter_dimensions(
font_id,
font_size,
em_width,
em_advance,
max_line_number_width,
cx,
);
let gutter_dimensions =
snapshot.gutter_dimensions(font_id, font_size, em_width, max_line_number_width, cx);
editor.gutter_dimensions = gutter_dimensions;
let text_width = width - gutter_dimensions.width;

View File

@@ -29,7 +29,7 @@ pub struct GitBlameEntrySummary {
impl sum_tree::Item for GitBlameEntry {
type Summary = GitBlameEntrySummary;
fn summary(&self, _cx: &()) -> Self::Summary {
fn summary(&self) -> Self::Summary {
GitBlameEntrySummary { rows: self.rows }
}
}
@@ -207,27 +207,6 @@ impl GitBlame {
})
}
pub fn max_author_length(&mut self, cx: &mut ModelContext<Self>) -> usize {
self.sync(cx);
let mut max_author_length = 0;
for entry in self.entries.iter() {
let author_len = entry
.blame
.as_ref()
.and_then(|entry| entry.author.as_ref())
.map(|author| author.len());
if let Some(author_len) = author_len {
if author_len > max_author_length {
max_author_length = author_len;
}
}
}
max_author_length
}
pub fn blur(&mut self, _: &mut ModelContext<Self>) {
self.focused = false;
}

View File

@@ -32,7 +32,7 @@ pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut ViewCon
opening_range.to_anchors(&snapshot.buffer_snapshot),
closing_range.to_anchors(&snapshot.buffer_snapshot),
],
|theme| theme.editor_document_highlight_bracket_background,
|theme| theme.editor_document_highlight_read_background,
cx,
)
}

View File

@@ -1,23 +1,28 @@
use collections::{hash_map, HashMap, HashSet};
use git::diff::DiffHunkStatus;
use gpui::{Action, AnchorCorner, AppContext, CursorStyle, Hsla, Model, MouseButton, Task, View};
use gpui::{Action, AppContext, CursorStyle, Hsla, Model, MouseButton, Subscription, Task, View};
use language::{Buffer, BufferId, Point};
use multi_buffer::{
Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferDiffHunk, MultiBufferRow,
MultiBufferSnapshot, ToPoint,
};
use std::{ops::Range, sync::Arc};
use ui::{
prelude::*, ActiveTheme, ContextMenu, IconButtonShape, InteractiveElement, IntoElement,
ParentElement, PopoverMenu, Styled, Tooltip, ViewContext, VisualContext,
use settings::SettingsStore;
use std::{
ops::{Range, RangeInclusive},
sync::Arc,
};
use util::RangeExt;
use ui::{
prelude::*, ActiveTheme, ContextMenu, InteractiveElement, IntoElement, ParentElement, Pixels,
Styled, ViewContext, VisualContext,
};
use util::{debug_panic, RangeExt};
use crate::{
editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, BlockDisposition,
BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow, DisplaySnapshot,
Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile,
RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff,
editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections,
mouse_context_menu::MouseContextMenu, BlockDisposition, BlockProperties, BlockStyle,
CustomBlockId, DiffRowHighlight, DisplayRow, DisplaySnapshot, Editor, EditorElement,
EditorSnapshot, ExpandAllHunkDiffs, RangeToAnchorExt, RevertFile, RevertSelectedHunks,
ToDisplayPoint, ToggleHunkDiff,
};
#[derive(Debug, Clone)]
@@ -36,7 +41,7 @@ pub(super) struct ExpandedHunks {
#[derive(Debug, Clone)]
pub(super) struct ExpandedHunk {
pub blocks: Vec<CustomBlockId>,
pub block: Option<CustomBlockId>,
pub hunk_range: Range<Anchor>,
pub diff_base_byte_range: Range<usize>,
pub status: DiffHunkStatus,
@@ -72,6 +77,85 @@ impl ExpandedHunks {
}
impl Editor {
pub(super) fn open_hunk_context_menu(
&mut self,
hovered_hunk: HoveredHunk,
clicked_point: gpui::Point<Pixels>,
cx: &mut ViewContext<Editor>,
) {
let focus_handle = self.focus_handle.clone();
let expanded = self
.expanded_hunks
.hunks(false)
.any(|expanded_hunk| expanded_hunk.hunk_range == hovered_hunk.multi_buffer_range);
let editor_handle = cx.view().clone();
let editor_snapshot = self.snapshot(cx);
let start_point = self
.to_pixel_point(hovered_hunk.multi_buffer_range.start, &editor_snapshot, cx)
.unwrap_or(clicked_point);
let end_point = self
.to_pixel_point(hovered_hunk.multi_buffer_range.start, &editor_snapshot, cx)
.unwrap_or(clicked_point);
let norm =
|a: gpui::Point<Pixels>, b: gpui::Point<Pixels>| (a.x - b.x).abs() + (a.y - b.y).abs();
let closest_source = if norm(start_point, clicked_point) < norm(end_point, clicked_point) {
hovered_hunk.multi_buffer_range.start
} else {
hovered_hunk.multi_buffer_range.end
};
self.mouse_context_menu = MouseContextMenu::pinned_to_editor(
self,
closest_source,
clicked_point,
ContextMenu::build(cx, move |menu, _| {
menu.on_blur_subscription(Subscription::new(|| {}))
.context(focus_handle)
.entry(
if expanded {
"Collapse Hunk"
} else {
"Expand Hunk"
},
Some(ToggleHunkDiff.boxed_clone()),
{
let editor = editor_handle.clone();
let hunk = hovered_hunk.clone();
move |cx| {
editor.update(cx, |editor, cx| {
editor.toggle_hovered_hunk(&hunk, cx);
});
}
},
)
.entry("Revert Hunk", Some(RevertSelectedHunks.boxed_clone()), {
let editor = editor_handle.clone();
let hunk = hovered_hunk.clone();
move |cx| {
let multi_buffer = editor.read(cx).buffer().clone();
let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx);
let mut revert_changes = HashMap::default();
if let Some(hunk) =
crate::hunk_diff::to_diff_hunk(&hunk, &multi_buffer_snapshot)
{
Editor::prepare_revert_change(
&mut revert_changes,
&multi_buffer,
&hunk,
cx,
);
}
if !revert_changes.is_empty() {
editor.update(cx, |editor, cx| editor.revert(revert_changes, cx));
}
}
})
.action("Revert File", RevertFile.boxed_clone())
}),
cx,
)
}
pub(super) fn toggle_hovered_hunk(
&mut self,
hovered_hunk: &HoveredHunk,
@@ -180,8 +264,7 @@ impl Editor {
break;
} else if expanded_hunk_row_range == hunk_to_toggle_row_range {
highlights_to_remove.push(expanded_hunk.hunk_range.clone());
blocks_to_remove
.extend(expanded_hunk.blocks.iter().copied());
blocks_to_remove.extend(expanded_hunk.block);
hunks_to_toggle.next();
retain = false;
break;
@@ -216,7 +299,14 @@ impl Editor {
});
}
editor.remove_highlighted_rows::<DiffRowHighlight>(highlights_to_remove, cx);
for removed_rows in highlights_to_remove {
editor.highlight_rows::<DiffRowHighlight>(
to_inclusive_row_range(removed_rows, &snapshot),
None,
false,
cx,
);
}
editor.remove_blocks(blocks_to_remove, None, cx);
for hunk in hunks_to_expand {
editor.expand_diff_hunk(None, &hunk, cx);
@@ -281,48 +371,33 @@ impl Editor {
Err(ix) => ix,
};
let blocks;
match hunk.status {
let block = match hunk.status {
DiffHunkStatus::Removed => {
blocks = self.insert_blocks(
[
self.hunk_header_block(&hunk, cx),
Self::deleted_text_block(hunk, diff_base_buffer, deleted_text_lines, cx),
],
None,
cx,
);
self.insert_deleted_text_block(diff_base_buffer, deleted_text_lines, hunk, cx)
}
DiffHunkStatus::Added => {
self.highlight_rows::<DiffRowHighlight>(
hunk_start..hunk_end,
added_hunk_color(cx),
to_inclusive_row_range(hunk_start..hunk_end, &snapshot),
Some(added_hunk_color(cx)),
false,
cx,
);
blocks = self.insert_blocks([self.hunk_header_block(&hunk, cx)], None, cx);
None
}
DiffHunkStatus::Modified => {
self.highlight_rows::<DiffRowHighlight>(
hunk_start..hunk_end,
added_hunk_color(cx),
to_inclusive_row_range(hunk_start..hunk_end, &snapshot),
Some(added_hunk_color(cx)),
false,
cx,
);
blocks = self.insert_blocks(
[
self.hunk_header_block(&hunk, cx),
Self::deleted_text_block(hunk, diff_base_buffer, deleted_text_lines, cx),
],
None,
cx,
);
self.insert_deleted_text_block(diff_base_buffer, deleted_text_lines, hunk, cx)
}
};
self.expanded_hunks.hunks.insert(
block_insert_index,
ExpandedHunk {
blocks,
block,
hunk_range: hunk_start..hunk_end,
status: hunk.status,
folded: false,
@@ -333,367 +408,109 @@ impl Editor {
Some(())
}
fn hunk_header_block(
&self,
hunk: &HoveredHunk,
cx: &mut ViewContext<'_, Editor>,
) -> BlockProperties<Anchor> {
let border_color = cx.theme().colors().border_variant;
let gutter_color = match hunk.status {
DiffHunkStatus::Added => cx.theme().status().created,
DiffHunkStatus::Modified => cx.theme().status().modified,
DiffHunkStatus::Removed => cx.theme().status().deleted,
};
BlockProperties {
position: hunk.multi_buffer_range.start,
height: 1,
style: BlockStyle::Sticky,
disposition: BlockDisposition::Above,
priority: 0,
render: Box::new({
let editor = cx.view().clone();
let hunk = hunk.clone();
move |cx| {
let hunk_controls_menu_handle =
editor.read(cx).hunk_controls_menu_handle.clone();
h_flex()
.id(cx.block_id)
.h(cx.line_height())
.w_full()
.border_t_1()
.border_color(border_color)
.bg(cx.theme().colors().editor_background)
.child(
div()
.id("gutter-strip")
.w(EditorElement::diff_hunk_strip_width(cx.line_height()))
.h_full()
.bg(gutter_color)
.cursor(CursorStyle::PointingHand)
.on_click({
let editor = editor.clone();
let hunk = hunk.clone();
move |_event, cx| {
editor.update(cx, |editor, cx| {
editor.toggle_hovered_hunk(&hunk, cx);
});
}
}),
)
.child(
h_flex()
.pl_2()
.pr_6()
.size_full()
.justify_between()
.child(
h_flex()
.gap_1()
.child(
IconButton::new("next-hunk", IconName::ArrowDown)
.shape(IconButtonShape::Square)
.icon_size(IconSize::Small)
.tooltip({
let focus_handle = editor.focus_handle(cx);
move |cx| {
Tooltip::for_action_in(
"Next Hunk",
&GoToHunk,
&focus_handle,
cx,
)
}
})
.on_click({
let editor = editor.clone();
let hunk = hunk.clone();
move |_event, cx| {
editor.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
let position = hunk
.multi_buffer_range
.end
.to_point(
&snapshot.buffer_snapshot,
);
if let Some(hunk) = editor
.go_to_hunk_after_position(
&snapshot, position, cx,
)
{
let multi_buffer_start = snapshot
.buffer_snapshot
.anchor_before(Point::new(
hunk.row_range.start.0,
0,
));
let multi_buffer_end = snapshot
.buffer_snapshot
.anchor_after(Point::new(
hunk.row_range.end.0,
0,
));
editor.expand_diff_hunk(
None,
&HoveredHunk {
multi_buffer_range:
multi_buffer_start
..multi_buffer_end,
status: hunk_status(&hunk),
diff_base_byte_range: hunk
.diff_base_byte_range,
},
cx,
);
}
});
}
}),
)
.child(
IconButton::new("prev-hunk", IconName::ArrowUp)
.shape(IconButtonShape::Square)
.icon_size(IconSize::Small)
.tooltip({
let focus_handle = editor.focus_handle(cx);
move |cx| {
Tooltip::for_action_in(
"Previous Hunk",
&GoToPrevHunk,
&focus_handle,
cx,
)
}
})
.on_click({
let editor = editor.clone();
let hunk = hunk.clone();
move |_event, cx| {
editor.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
let position = hunk
.multi_buffer_range
.start
.to_point(
&snapshot.buffer_snapshot,
);
let hunk = editor
.go_to_hunk_before_position(
&snapshot, position, cx,
);
if let Some(hunk) = hunk {
let multi_buffer_start = snapshot
.buffer_snapshot
.anchor_before(Point::new(
hunk.row_range.start.0,
0,
));
let multi_buffer_end = snapshot
.buffer_snapshot
.anchor_after(Point::new(
hunk.row_range.end.0,
0,
));
editor.expand_diff_hunk(
None,
&HoveredHunk {
multi_buffer_range:
multi_buffer_start
..multi_buffer_end,
status: hunk_status(&hunk),
diff_base_byte_range: hunk
.diff_base_byte_range,
},
cx,
);
}
});
}
}),
)
.child(
IconButton::new("discard", IconName::Undo)
.shape(IconButtonShape::Square)
.icon_size(IconSize::Small)
.tooltip({
let focus_handle = editor.focus_handle(cx);
move |cx| {
Tooltip::for_action_in(
"Discard Hunk",
&RevertSelectedHunks,
&focus_handle,
cx,
)
}
})
.on_click({
let editor = editor.clone();
let hunk = hunk.clone();
move |_event, cx| {
let multi_buffer =
editor.read(cx).buffer().clone();
let multi_buffer_snapshot =
multi_buffer.read(cx).snapshot(cx);
let mut revert_changes = HashMap::default();
if let Some(hunk) =
crate::hunk_diff::to_diff_hunk(
&hunk,
&multi_buffer_snapshot,
)
{
Editor::prepare_revert_change(
&mut revert_changes,
&multi_buffer,
&hunk,
cx,
);
}
if !revert_changes.is_empty() {
editor.update(cx, |editor, cx| {
editor.revert(revert_changes, cx)
});
}
}
}),
)
.child({
let focus = editor.focus_handle(cx);
PopoverMenu::new("hunk-controls-dropdown")
.trigger(
IconButton::new(
"toggle_editor_selections_icon",
IconName::EllipsisVertical,
)
.shape(IconButtonShape::Square)
.icon_size(IconSize::Small)
.style(ButtonStyle::Subtle)
.selected(
hunk_controls_menu_handle.is_deployed(),
)
.when(
!hunk_controls_menu_handle.is_deployed(),
|this| {
this.tooltip(|cx| {
Tooltip::text("Hunk Controls", cx)
})
},
),
)
.anchor(AnchorCorner::TopRight)
.with_handle(hunk_controls_menu_handle)
.menu(move |cx| {
let focus = focus.clone();
let menu =
ContextMenu::build(cx, move |menu, _| {
menu.context(focus.clone()).action(
"Discard All",
RevertFile.boxed_clone(),
)
});
Some(menu)
})
}),
)
.child(
div().child(
IconButton::new("collapse", IconName::Close)
.shape(IconButtonShape::Square)
.icon_size(IconSize::Small)
.tooltip({
let focus_handle = editor.focus_handle(cx);
move |cx| {
Tooltip::for_action_in(
"Collapse Hunk",
&ToggleHunkDiff,
&focus_handle,
cx,
)
}
})
.on_click({
let editor = editor.clone();
let hunk = hunk.clone();
move |_event, cx| {
editor.update(cx, |editor, cx| {
editor.toggle_hovered_hunk(&hunk, cx);
});
}
}),
),
),
)
.into_any_element()
}
}),
}
}
fn deleted_text_block(
hunk: &HoveredHunk,
fn insert_deleted_text_block(
&mut self,
diff_base_buffer: Model<Buffer>,
deleted_text_height: u32,
cx: &mut ViewContext<'_, Editor>,
) -> BlockProperties<Anchor> {
let gutter_color = match hunk.status {
DiffHunkStatus::Added => unreachable!(),
DiffHunkStatus::Modified => cx.theme().status().modified,
DiffHunkStatus::Removed => cx.theme().status().deleted,
};
hunk: &HoveredHunk,
cx: &mut ViewContext<'_, Self>,
) -> Option<CustomBlockId> {
let deleted_hunk_color = deleted_hunk_color(cx);
let (editor_height, editor_with_deleted_text) =
editor_with_deleted_text(diff_base_buffer, deleted_hunk_color, hunk, cx);
let editor = cx.view().clone();
let hunk = hunk.clone();
let height = editor_height.max(deleted_text_height);
BlockProperties {
position: hunk.multi_buffer_range.start,
height,
style: BlockStyle::Flex,
disposition: BlockDisposition::Above,
priority: 0,
render: Box::new(move |cx| {
let width = EditorElement::diff_hunk_strip_width(cx.line_height());
let gutter_dimensions = editor.read(cx.context).gutter_dimensions;
let mut new_block_ids = self.insert_blocks(
Some(BlockProperties {
position: hunk.multi_buffer_range.start,
height,
style: BlockStyle::Flex,
disposition: BlockDisposition::Above,
render: Box::new(move |cx| {
let width = EditorElement::diff_hunk_strip_width(cx.line_height());
let gutter_dimensions = editor.read(cx.context).gutter_dimensions;
h_flex()
.id(cx.block_id)
.bg(deleted_hunk_color)
.h(height as f32 * cx.line_height())
.w_full()
.child(
h_flex()
.id("gutter")
.max_w(gutter_dimensions.full_width())
.min_w(gutter_dimensions.full_width())
.size_full()
.child(
h_flex()
.id("gutter hunk")
.bg(gutter_color)
.pl(gutter_dimensions.margin
+ gutter_dimensions
.git_blame_entries_width
.unwrap_or_default())
.max_w(width)
.min_w(width)
.size_full()
.cursor(CursorStyle::PointingHand)
.on_mouse_down(MouseButton::Left, {
let editor = editor.clone();
let hunk = hunk.clone();
move |_event, cx| {
editor.update(cx, |editor, cx| {
editor.toggle_hovered_hunk(&hunk, cx);
});
}
}),
),
)
.child(editor_with_deleted_text.clone())
.into_any_element()
let close_button = editor.update(cx.context, |editor, cx| {
let editor_snapshot = editor.snapshot(cx);
let hunk_display_range = hunk
.multi_buffer_range
.clone()
.to_display_points(&editor_snapshot);
editor.close_hunk_diff_button(
hunk.clone(),
hunk_display_range.start.row(),
cx,
)
});
h_flex()
.id("gutter with editor")
.bg(deleted_hunk_color)
.h(height as f32 * cx.line_height())
.w_full()
.child(
h_flex()
.id("gutter")
.max_w(gutter_dimensions.full_width())
.min_w(gutter_dimensions.full_width())
.size_full()
.child(
h_flex()
.id("gutter hunk")
.bg(cx.theme().status().deleted)
.pl(gutter_dimensions.margin
+ gutter_dimensions
.git_blame_entries_width
.unwrap_or_default())
.max_w(width)
.min_w(width)
.size_full()
.cursor(CursorStyle::PointingHand)
.on_mouse_down(MouseButton::Left, {
let editor = editor.clone();
let hunk = hunk.clone();
move |event, cx| {
let modifiers = event.modifiers;
if modifiers.control || modifiers.platform {
editor.update(cx, |editor, cx| {
editor.toggle_hovered_hunk(&hunk, cx);
});
} else {
editor.update(cx, |editor, cx| {
editor.open_hunk_context_menu(
hunk.clone(),
event.position,
cx,
);
});
}
}
}),
)
.child(
v_flex()
.size_full()
.pt(rems(0.25))
.justify_start()
.child(close_button),
),
)
.child(editor_with_deleted_text.clone())
.into_any_element()
}),
priority: 0,
}),
None,
cx,
);
if new_block_ids.len() == 1 {
new_block_ids.pop()
} else {
debug_panic!(
"Inserted one editor block but did not receive exactly one block id: {new_block_ids:?}"
);
None
}
}
@@ -704,7 +521,7 @@ impl Editor {
.expanded_hunks
.hunks
.drain(..)
.flat_map(|expanded_hunk| expanded_hunk.blocks.into_iter())
.filter_map(|expanded_hunk| expanded_hunk.block)
.collect::<HashSet<_>>();
if to_remove.is_empty() {
false
@@ -786,7 +603,7 @@ impl Editor {
expanded_hunk.folded = true;
highlights_to_remove
.push(expanded_hunk.hunk_range.clone());
for block in expanded_hunk.blocks.drain(..) {
if let Some(block) = expanded_hunk.block.take() {
blocks_to_remove.insert(block);
}
break;
@@ -833,13 +650,20 @@ impl Editor {
}
}
if !retain {
blocks_to_remove.extend(expanded_hunk.blocks.drain(..));
blocks_to_remove.extend(expanded_hunk.block);
highlights_to_remove.push(expanded_hunk.hunk_range.clone());
}
retain
});
editor.remove_highlighted_rows::<DiffRowHighlight>(highlights_to_remove, cx);
for removed_rows in highlights_to_remove {
editor.highlight_rows::<DiffRowHighlight>(
to_inclusive_row_range(removed_rows, &snapshot),
None,
false,
cx,
);
}
editor.remove_blocks(blocks_to_remove, None, cx);
if let Some(diff_base_buffer) = &diff_base_buffer {
@@ -925,7 +749,7 @@ fn added_hunk_color(cx: &AppContext) -> Hsla {
}
fn deleted_hunk_color(cx: &AppContext) -> Hsla {
let mut deleted_color = cx.theme().status().deleted;
let mut deleted_color = cx.theme().status().git().deleted;
deleted_color.fade_out(0.7);
deleted_color
}
@@ -959,20 +783,37 @@ fn editor_with_deleted_text(
editor.set_read_only(true);
editor.set_show_inline_completions(Some(false), cx);
editor.highlight_rows::<DiffRowHighlight>(
Anchor::min()..Anchor::max(),
deleted_color,
Anchor::min()..=Anchor::max(),
Some(deleted_color),
false,
cx,
);
editor.set_current_line_highlight(Some(CurrentLineHighlight::None));
editor
._subscriptions
.extend([cx.on_blur(&editor.focus_handle, |editor, cx| {
let subscription_editor = parent_editor.clone();
editor._subscriptions.extend([
cx.on_blur(&editor.focus_handle, |editor, cx| {
editor.set_current_line_highlight(Some(CurrentLineHighlight::None));
editor.change_selections(None, cx, |s| {
s.try_cancel();
});
})]);
cx.notify();
}),
cx.on_focus(&editor.focus_handle, move |editor, cx| {
let restored_highlight = if let Some(parent_editor) = subscription_editor.upgrade()
{
parent_editor.read(cx).current_line_highlight
} else {
None
};
editor.set_current_line_highlight(restored_highlight);
cx.notify();
}),
cx.observe_global::<SettingsStore>(|editor, cx| {
if !editor.is_focused(cx) {
editor.set_current_line_highlight(Some(CurrentLineHighlight::None));
}
}),
]);
let parent_editor_for_reverts = parent_editor.clone();
let original_multi_buffer_range = hunk.multi_buffer_range.clone();
let diff_base_range = hunk.diff_base_byte_range.clone();
@@ -1038,6 +879,21 @@ fn buffer_diff_hunk(
None
}
fn to_inclusive_row_range(
row_range: Range<Anchor>,
snapshot: &EditorSnapshot,
) -> RangeInclusive<Anchor> {
let mut display_row_range =
row_range.start.to_display_point(snapshot)..row_range.end.to_display_point(snapshot);
if display_row_range.end.row() > display_row_range.start.row() {
*display_row_range.end.row_mut() -= 1;
}
let point_range = display_row_range.start.to_point(&snapshot.display_snapshot)
..display_row_range.end.to_point(&snapshot.display_snapshot);
let new_range = point_range.to_anchors(&snapshot.buffer_snapshot);
new_range.start..=new_range.end
}
impl DisplayDiffHunk {
pub fn start_display_row(&self) -> DisplayRow {
match self {

View File

@@ -6,13 +6,10 @@ use language::{Buffer, BufferEvent, Capability};
use multi_buffer::{ExcerptRange, MultiBuffer};
use project::Project;
use smol::stream::StreamExt;
use std::{any::TypeId, ops::Range, time::Duration};
use std::{ops::Range, time::Duration};
use text::ToOffset;
use ui::prelude::*;
use workspace::{
searchable::SearchableItemHandle, Item, ItemHandle as _, ToolbarItemEvent, ToolbarItemLocation,
ToolbarItemView,
};
use workspace::Item;
pub struct ProposedChangesEditor {
editor: View<Editor>,
@@ -26,10 +23,6 @@ pub struct ProposedChangesBuffer<T> {
pub ranges: Vec<Range<T>>,
}
pub struct ProposedChangesEditorToolbar {
current_editor: Option<View<ProposedChangesEditor>>,
}
impl ProposedChangesEditor {
pub fn new<T: ToOffset>(
buffers: Vec<ProposedChangesBuffer<T>>,
@@ -103,17 +96,6 @@ impl ProposedChangesEditor {
self.recalculate_diffs_tx.unbounded_send(buffer).ok();
}
}
fn apply_all_changes(&self, cx: &mut ViewContext<Self>) {
let buffers = self.editor.read(cx).buffer.read(cx).all_buffers();
for branch_buffer in buffers {
if let Some(base_buffer) = branch_buffer.read(cx).diff_base_buffer() {
base_buffer.update(cx, |base_buffer, cx| {
base_buffer.merge(&branch_buffer, None, cx)
});
}
}
}
}
impl Render for ProposedChangesEditor {
@@ -140,66 +122,4 @@ impl Item for ProposedChangesEditor {
fn tab_content_text(&self, _cx: &WindowContext) -> Option<SharedString> {
Some("Proposed changes".into())
}
fn as_searchable(&self, _: &View<Self>) -> Option<Box<dyn SearchableItemHandle>> {
Some(Box::new(self.editor.clone()))
}
fn act_as_type<'a>(
&'a self,
type_id: TypeId,
self_handle: &'a View<Self>,
_: &'a AppContext,
) -> Option<gpui::AnyView> {
if type_id == TypeId::of::<Self>() {
Some(self_handle.to_any())
} else if type_id == TypeId::of::<Editor>() {
Some(self.editor.to_any())
} else {
None
}
}
}
impl ProposedChangesEditorToolbar {
pub fn new() -> Self {
Self {
current_editor: None,
}
}
fn get_toolbar_item_location(&self) -> ToolbarItemLocation {
if self.current_editor.is_some() {
ToolbarItemLocation::PrimaryRight
} else {
ToolbarItemLocation::Hidden
}
}
}
impl Render for ProposedChangesEditorToolbar {
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
let editor = self.current_editor.clone();
Button::new("apply-changes", "Apply All").on_click(move |_, cx| {
if let Some(editor) = &editor {
editor.update(cx, |editor, cx| {
editor.apply_all_changes(cx);
});
}
})
}
}
impl EventEmitter<ToolbarItemEvent> for ProposedChangesEditorToolbar {}
impl ToolbarItemView for ProposedChangesEditorToolbar {
fn set_active_pane_item(
&mut self,
active_pane_item: Option<&dyn workspace::ItemHandle>,
_cx: &mut ViewContext<Self>,
) -> workspace::ToolbarItemLocation {
self.current_editor =
active_pane_item.and_then(|item| item.downcast::<ProposedChangesEditor>());
self.get_toolbar_item_location()
}
}

View File

@@ -88,3 +88,116 @@ pub(crate) fn build_editor_with_project(
) -> Editor {
Editor::new(EditorMode::Full, buffer, Some(project), true, cx)
}
#[cfg(any(test, feature = "test-support"))]
pub fn editor_hunks(
editor: &Editor,
snapshot: &DisplaySnapshot,
cx: &mut ViewContext<'_, Editor>,
) -> Vec<(
String,
git::diff::DiffHunkStatus,
std::ops::Range<crate::DisplayRow>,
)> {
use multi_buffer::MultiBufferRow;
use text::Point;
use crate::hunk_status;
snapshot
.buffer_snapshot
.git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX)
.map(|hunk| {
let display_range = Point::new(hunk.row_range.start.0, 0)
.to_display_point(snapshot)
.row()
..Point::new(hunk.row_range.end.0, 0)
.to_display_point(snapshot)
.row();
let (_, buffer, _) = editor
.buffer()
.read(cx)
.excerpt_containing(Point::new(hunk.row_range.start.0, 0), cx)
.expect("no excerpt for expanded buffer's hunk start");
let diff_base = buffer
.read(cx)
.diff_base()
.expect("should have a diff base for expanded hunk")
.slice(hunk.diff_base_byte_range.clone())
.to_string();
(diff_base, hunk_status(&hunk), display_range)
})
.collect()
}
#[cfg(any(test, feature = "test-support"))]
pub fn expanded_hunks(
editor: &Editor,
snapshot: &DisplaySnapshot,
cx: &mut ViewContext<'_, Editor>,
) -> Vec<(
String,
git::diff::DiffHunkStatus,
std::ops::Range<crate::DisplayRow>,
)> {
editor
.expanded_hunks
.hunks(false)
.map(|expanded_hunk| {
let hunk_display_range = expanded_hunk
.hunk_range
.start
.to_display_point(snapshot)
.row()
..expanded_hunk
.hunk_range
.end
.to_display_point(snapshot)
.row();
let (_, buffer, _) = editor
.buffer()
.read(cx)
.excerpt_containing(expanded_hunk.hunk_range.start, cx)
.expect("no excerpt for expanded buffer's hunk start");
let diff_base = buffer
.read(cx)
.diff_base()
.expect("should have a diff base for expanded hunk")
.slice(expanded_hunk.diff_base_byte_range.clone())
.to_string();
(diff_base, expanded_hunk.status, hunk_display_range)
})
.collect()
}
#[cfg(any(test, feature = "test-support"))]
pub fn expanded_hunks_background_highlights(
editor: &mut Editor,
cx: &mut gpui::WindowContext,
) -> Vec<std::ops::RangeInclusive<crate::DisplayRow>> {
use crate::DisplayRow;
let mut highlights = Vec::new();
let mut range_start = 0;
let mut previous_highlighted_row = None;
for (highlighted_row, _) in editor.highlighted_display_rows(cx) {
match previous_highlighted_row {
Some(previous_row) => {
if previous_row + 1 != highlighted_row.0 {
highlights.push(DisplayRow(range_start)..=DisplayRow(previous_row));
range_start = highlighted_row.0;
}
}
None => {
range_start = highlighted_row.0;
}
}
previous_highlighted_row = Some(highlighted_row.0);
}
if let Some(previous_row) = previous_highlighted_row {
highlights.push(DisplayRow(range_start)..=DisplayRow(previous_row));
}
highlights
}

View File

@@ -1,17 +1,17 @@
use crate::{
display_map::ToDisplayPoint, AnchorRangeExt, Autoscroll, DiffRowHighlight, DisplayPoint,
Editor, MultiBuffer, RowExt,
display_map::ToDisplayPoint, AnchorRangeExt, Autoscroll, DisplayPoint, Editor, MultiBuffer,
RowExt,
};
use collections::BTreeMap;
use futures::Future;
use git::diff::DiffHunkStatus;
use gpui::{
AnyWindowHandle, AppContext, Keystroke, ModelContext, Pixels, Point, View, ViewContext,
VisualTestContext, WindowHandle,
VisualTestContext,
};
use indoc::indoc;
use itertools::Itertools;
use language::{Buffer, BufferSnapshot, LanguageRegistry};
use multi_buffer::{ExcerptRange, ToPoint};
use multi_buffer::ExcerptRange;
use parking_lot::RwLock;
use project::{FakeFs, Project};
use std::{
@@ -71,16 +71,6 @@ impl EditorTestContext {
}
}
pub async fn for_editor(editor: WindowHandle<Editor>, cx: &mut gpui::TestAppContext) -> Self {
let editor_view = editor.root_view(cx).unwrap();
Self {
cx: VisualTestContext::from_window(*editor.deref(), cx),
window: editor.into(),
editor: editor_view,
assertion_cx: AssertionContextManager::new(),
}
}
pub fn new_multibuffer<const COUNT: usize>(
cx: &mut gpui::TestAppContext,
excerpts: [&str; COUNT],
@@ -307,85 +297,19 @@ impl EditorTestContext {
state_context
}
#[track_caller]
pub fn assert_diff_hunks(&mut self, expected_diff: String) {
// Normalize the expected diff. If it has no diff markers, then insert blank markers
// before each line. Strip any whitespace-only lines.
let has_diff_markers = expected_diff
.lines()
.any(|line| line.starts_with("+") || line.starts_with("-"));
let expected_diff_text = expected_diff
.split('\n')
.map(|line| {
let trimmed = line.trim();
if trimmed.is_empty() {
String::new()
} else if has_diff_markers {
line.to_string()
} else {
format!(" {line}")
}
})
.join("\n");
// Read the actual diff from the editor's row highlights and block
// decorations.
let actual_diff = self.editor.update(&mut self.cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
let text = editor.text(cx);
let insertions = editor
.highlighted_rows::<DiffRowHighlight>()
.map(|(range, _)| {
let start = range.start.to_point(&snapshot.buffer_snapshot);
let end = range.end.to_point(&snapshot.buffer_snapshot);
start.row..end.row
})
.collect::<Vec<_>>();
let deletions = editor
.expanded_hunks
.hunks
.iter()
.filter_map(|hunk| {
if hunk.blocks.is_empty() {
return None;
}
let row = hunk
.hunk_range
.start
.to_point(&snapshot.buffer_snapshot)
.row;
let (_, buffer, _) = editor
.buffer()
.read(cx)
.excerpt_containing(hunk.hunk_range.start, cx)
.expect("no excerpt for expanded buffer's hunk start");
let deleted_text = buffer
.read(cx)
.diff_base()
.expect("should have a diff base for expanded hunk")
.slice(hunk.diff_base_byte_range.clone())
.to_string();
if let DiffHunkStatus::Modified | DiffHunkStatus::Removed = hunk.status {
Some((row, deleted_text))
} else {
None
}
})
.collect::<Vec<_>>();
format_diff(text, deletions, insertions)
});
pretty_assertions::assert_eq!(actual_diff, expected_diff_text, "unexpected diff state");
}
/// Make an assertion about the editor's text and the ranges and directions
/// of its selections using a string containing embedded range markers.
///
/// See the `util::test::marked_text_ranges` function for more information.
#[track_caller]
pub fn assert_editor_state(&mut self, marked_text: &str) {
let (expected_text, expected_selections) = marked_text_ranges(marked_text, true);
pretty_assertions::assert_eq!(self.buffer_text(), expected_text, "unexpected buffer text");
let (unmarked_text, expected_selections) = marked_text_ranges(marked_text, true);
let buffer_text = self.buffer_text();
if buffer_text != unmarked_text {
panic!("Unmarked text doesn't match buffer text\nBuffer text: {buffer_text:?}\nUnmarked text: {unmarked_text:?}\nRaw buffer text\n{buffer_text}\nRaw unmarked text\n{unmarked_text}");
}
self.assert_selections(expected_selections, marked_text.to_string())
}
@@ -458,56 +382,25 @@ impl EditorTestContext {
let actual_marked_text =
generate_marked_text(&self.buffer_text(), &actual_selections, true);
if expected_selections != actual_selections {
pretty_assertions::assert_eq!(
actual_marked_text,
expected_marked_text,
"{}Editor has unexpected selections",
panic!(
indoc! {"
{}Editor has unexpected selections.
Expected selections:
{}
Actual selections:
{}
"},
self.assertion_context(),
expected_marked_text,
actual_marked_text,
);
}
}
}
fn format_diff(
text: String,
actual_deletions: Vec<(u32, String)>,
actual_insertions: Vec<Range<u32>>,
) -> String {
let mut diff = String::new();
for (row, line) in text.split('\n').enumerate() {
let row = row as u32;
if row > 0 {
diff.push('\n');
}
if let Some(text) = actual_deletions
.iter()
.find_map(|(deletion_row, deleted_text)| {
if *deletion_row == row {
Some(deleted_text)
} else {
None
}
})
{
for line in text.lines() {
diff.push('-');
if !line.is_empty() {
diff.push(' ');
diff.push_str(line);
}
diff.push('\n');
}
}
let marker = if actual_insertions.iter().any(|range| range.contains(&row)) {
"+ "
} else {
" "
};
diff.push_str(format!("{marker}{line}").trim_end());
}
diff
}
impl Deref for EditorTestContext {
type Target = gpui::VisualTestContext;

View File

@@ -108,7 +108,7 @@ fn main() -> Result<()> {
.clone()
.spawn(async move {
if let Err(err) = fetch_evaluation_resources(client, &executor).await {
eprintln!("Error: {}", err);
eprintln!("Error fetching eval resources: {}", err);
exit(1);
}
exit(0);
@@ -118,7 +118,7 @@ fn main() -> Result<()> {
Commands::Run { repo } => {
cx.spawn(|mut cx| async move {
if let Err(err) = run_evaluation(repo, &executor, &mut cx).await {
eprintln!("Error: {}", err);
eprintln!("Error running eval: {}", err);
exit(1);
}
exit(0);
@@ -294,8 +294,8 @@ async fn run_evaluation(
.unwrap();
let node_runtime = NodeRuntime::unavailable();
let evaluations = fs::read(&evaluations_path).expect("failed to read evaluations.json");
let evaluations: Vec<EvaluationProject> = serde_json::from_slice(&evaluations).unwrap();
let evaluations = fs::read(&evaluations_path).expect("failed to read evaluations.json - run `cargo run -p evals --bin eval fetch` to fetch evaluations.json.");
let evaluations: Vec<EvaluationProject> = serde_json::from_slice(&evaluations).expect("evaluations.json was not valid JSON. It may have been corrupted; try deleting it and then running `cargo run -p evals --bin eval fetch` to fetch a new copy.");
let embedding_provider = Arc::new(OpenAiEmbeddingProvider::new(
http_client.clone(),
@@ -322,7 +322,7 @@ async fn run_evaluation(
}
eprint!("\r\x1B[2K");
eprint!(
eprintln!(
"Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured. Project: {}...",
counts.covered_results,
counts.total_results,

View File

@@ -28,6 +28,7 @@ futures.workspace = true
gpui.workspace = true
http_client.workspace = true
indexed_docs.workspace = true
isahc.workspace = true
language.workspace = true
log.workspace = true
lsp.workspace = true

View File

@@ -664,7 +664,7 @@ impl ExtensionStore {
let content_length = response
.headers()
.get(http_client::http::header::CONTENT_LENGTH)
.get(isahc::http::header::CONTENT_LENGTH)
.and_then(|value| value.to_str().ok()?.parse::<usize>().ok());
let mut body = BufReader::new(response.body_mut());

View File

@@ -576,6 +576,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
std::env::consts::ARCH
)
});
let builder_client = IsahcHttpClient::new(None, Some(user_agent));
let extension_store = cx.new_model(|cx| {

View File

@@ -1,5 +1,5 @@
use crate::wasm_host::{wit::ToWasmtimeResult, WasmState};
use ::http_client::{AsyncBody, HttpRequestExt};
use ::http_client::AsyncBody;
use ::settings::{Settings, WorktreeId};
use anyhow::{anyhow, bail, Context, Result};
use async_compression::futures::bufread::GzipDecoder;
@@ -8,6 +8,7 @@ use async_trait::async_trait;
use futures::{io::BufReader, FutureExt as _};
use futures::{lock::Mutex, AsyncReadExt};
use indexed_docs::IndexedDocsDatabase;
use isahc::config::{Configurable, RedirectPolicy};
use language::{
language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate,
};
@@ -296,12 +297,10 @@ fn convert_request(
let mut request = ::http_client::Request::builder()
.method(::http_client::Method::from(extension_request.method))
.uri(&extension_request.url)
.follow_redirects(match extension_request.redirect_policy {
http_client::RedirectPolicy::NoFollow => ::http_client::RedirectPolicy::NoFollow,
http_client::RedirectPolicy::FollowLimit(limit) => {
::http_client::RedirectPolicy::FollowLimit(limit)
}
http_client::RedirectPolicy::FollowAll => ::http_client::RedirectPolicy::FollowAll,
.redirect_policy(match extension_request.redirect_policy {
http_client::RedirectPolicy::NoFollow => RedirectPolicy::None,
http_client::RedirectPolicy::FollowLimit(limit) => RedirectPolicy::Limit(limit),
http_client::RedirectPolicy::FollowAll => RedirectPolicy::Follow,
});
for (key, value) in &extension_request.headers {
request = request.header(key, value);

View File

@@ -1,5 +1,5 @@
use crate::wasm_host::{wit::ToWasmtimeResult, WasmState};
use ::http_client::{AsyncBody, HttpRequestExt};
use ::http_client::AsyncBody;
use ::settings::{Settings, WorktreeId};
use anyhow::{anyhow, bail, Context, Result};
use async_compression::futures::bufread::GzipDecoder;
@@ -8,6 +8,7 @@ use async_trait::async_trait;
use futures::{io::BufReader, FutureExt as _};
use futures::{lock::Mutex, AsyncReadExt};
use indexed_docs::IndexedDocsDatabase;
use isahc::config::{Configurable, RedirectPolicy};
use language::{
language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate,
};
@@ -212,12 +213,10 @@ fn convert_request(
let mut request = ::http_client::Request::builder()
.method(::http_client::Method::from(extension_request.method))
.uri(&extension_request.url)
.follow_redirects(match extension_request.redirect_policy {
http_client::RedirectPolicy::NoFollow => ::http_client::RedirectPolicy::NoFollow,
http_client::RedirectPolicy::FollowLimit(limit) => {
::http_client::RedirectPolicy::FollowLimit(limit)
}
http_client::RedirectPolicy::FollowAll => ::http_client::RedirectPolicy::FollowAll,
.redirect_policy(match extension_request.redirect_policy {
http_client::RedirectPolicy::NoFollow => RedirectPolicy::None,
http_client::RedirectPolicy::FollowLimit(limit) => RedirectPolicy::Limit(limit),
http_client::RedirectPolicy::FollowAll => RedirectPolicy::Follow,
});
for (key, value) in &extension_request.headers {
request = request.header(key, value);

View File

@@ -23,6 +23,7 @@ editor.workspace = true
futures.workspace = true
gpui.workspace = true
human_bytes = "0.4.1"
isahc.workspace = true
http_client.workspace = true
language.workspace = true
log.workspace = true

View File

@@ -11,6 +11,7 @@ use gpui::{
PromptLevel, Render, Task, View, ViewContext,
};
use http_client::HttpClient;
use isahc::Request;
use language::Buffer;
use project::Project;
use regex::Regex;
@@ -298,7 +299,7 @@ impl FeedbackModal {
is_staff: is_staff.unwrap_or(false),
};
let json_bytes = serde_json::to_vec(&request)?;
let request = http_client::http::Request::post(feedback_endpoint)
let request = Request::post(feedback_endpoint)
.header("content-type", "application/json")
.body(json_bytes.into())?;
let mut response = http_client.send(request).await?;

View File

@@ -394,7 +394,7 @@ fn matching_history_items<'a>(
.chars(),
),
};
candidates_paths.insert(&found_path.project, found_path);
candidates_paths.insert(Arc::clone(&found_path.project.path), found_path);
Some((found_path.project.worktree_id, candidate))
})
.fold(
@@ -419,21 +419,17 @@ fn matching_history_items<'a>(
max_results,
)
.into_iter()
.filter_map(|path_match| {
candidates_paths
.remove_entry(&ProjectPath {
worktree_id: WorktreeId::from_usize(path_match.worktree_id),
path: Arc::clone(&path_match.path),
})
.map(|(_, found_path)| {
(
Arc::clone(&path_match.path),
Match::History {
path: found_path.clone(),
panel_match: Some(ProjectPanelOrdMatch(path_match)),
},
)
})
.map(|path_match| {
let (_, found_path) = candidates_paths
.remove_entry(&path_match.path)
.expect("candidate info not found");
(
Arc::clone(&path_match.path),
Match::History {
path: found_path.clone(),
panel_match: Some(ProjectPanelOrdMatch(path_match)),
},
)
}),
);
}

View File

@@ -34,7 +34,7 @@ struct InternalDiffHunk {
impl sum_tree::Item for InternalDiffHunk {
type Summary = DiffHunkSummary;
fn summary(&self, _cx: &text::BufferSnapshot) -> Self::Summary {
fn summary(&self) -> Self::Summary {
DiffHunkSummary {
buffer_range: self.buffer_range.clone(),
}

View File

@@ -3,7 +3,7 @@ use std::sync::Arc;
use anyhow::{bail, Context, Result};
use async_trait::async_trait;
use futures::AsyncReadExt;
use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request};
use http_client::{AsyncBody, HttpClient, Request};
use serde::Deserialize;
use url::Url;
@@ -49,16 +49,14 @@ impl Codeberg {
let url =
format!("https://codeberg.org/api/v1/repos/{repo_owner}/{repo}/git/commits/{commit}");
let mut request = Request::get(&url)
.header("Content-Type", "application/json")
.follow_redirects(http_client::RedirectPolicy::FollowAll);
let mut request = Request::get(&url).header("Content-Type", "application/json");
if let Ok(codeberg_token) = std::env::var("CODEBERG_TOKEN") {
request = request.header("Authorization", format!("Bearer {}", codeberg_token));
}
let mut response = client
.send(request.body(AsyncBody::default())?)
.send_with_redirect_policy(request.body(AsyncBody::default())?, true)
.await
.with_context(|| format!("error fetching Codeberg commit details at {:?}", url))?;

View File

@@ -3,7 +3,7 @@ use std::sync::{Arc, OnceLock};
use anyhow::{bail, Context, Result};
use async_trait::async_trait;
use futures::AsyncReadExt;
use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request};
use http_client::{AsyncBody, HttpClient, Request};
use regex::Regex;
use serde::Deserialize;
use url::Url;
@@ -53,16 +53,14 @@ impl Github {
) -> Result<Option<User>> {
let url = format!("https://api.github.com/repos/{repo_owner}/{repo}/commits/{commit}");
let mut request = Request::get(&url)
.header("Content-Type", "application/json")
.follow_redirects(http_client::RedirectPolicy::FollowAll);
let mut request = Request::get(&url).header("Content-Type", "application/json");
if let Ok(github_token) = std::env::var("GITHUB_TOKEN") {
request = request.header("Authorization", format!("Bearer {}", github_token));
}
let mut response = client
.send(request.body(AsyncBody::default())?)
.send_with_redirect_policy(request.body(AsyncBody::default())?, true)
.await
.with_context(|| format!("error fetching GitHub commit details at {:?}", url))?;

View File

@@ -116,14 +116,12 @@ impl GoToLine {
if let Some(point) = self.point_from_query(cx) {
self.active_editor.update(cx, |active_editor, cx| {
let snapshot = active_editor.snapshot(cx).display_snapshot;
let start = snapshot.buffer_snapshot.clip_point(point, Bias::Left);
let end = start + Point::new(1, 0);
let start = snapshot.buffer_snapshot.anchor_before(start);
let end = snapshot.buffer_snapshot.anchor_after(end);
let point = snapshot.buffer_snapshot.clip_point(point, Bias::Left);
let anchor = snapshot.buffer_snapshot.anchor_before(point);
active_editor.clear_row_highlights::<GoToLineRowHighlights>();
active_editor.highlight_rows::<GoToLineRowHighlights>(
start..end,
cx.theme().colors().editor_highlighted_line_background,
anchor..=anchor,
Some(cx.theme().colors().editor_highlighted_line_background),
true,
cx,
);
@@ -246,13 +244,13 @@ mod tests {
field_1: i32, // display line 3
field_2: i32, // display line 4
} // display line 5
// display line 6
struct Another { // display line 7
field_1: i32, // display line 8
field_2: i32, // display line 9
field_3: i32, // display line 10
field_4: i32, // display line 11
} // display line 12
// display line 7
struct Another { // display line 8
field_1: i32, // display line 9
field_2: i32, // display line 10
field_3: i32, // display line 11
field_4: i32, // display line 12
} // display line 13
"}
}),
)

View File

@@ -18,6 +18,7 @@ schemars = ["dep:schemars"]
anyhow.workspace = true
futures.workspace = true
http_client.workspace = true
isahc.workspace = true
schemars = { workspace = true, optional = true }
serde.workspace = true
serde_json.workspace = true

View File

@@ -2,7 +2,8 @@ mod supported_countries;
use anyhow::{anyhow, Result};
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest};
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
use isahc::config::Configurable;
use serde::{Deserialize, Serialize};
use std::time::Duration;
@@ -29,7 +30,7 @@ pub async fn stream_generate_content(
.header("Content-Type", "application/json");
if let Some(low_speed_timeout) = low_speed_timeout {
request_builder = request_builder.read_timeout(low_speed_timeout);
request_builder = request_builder.low_speed_timeout(100, low_speed_timeout);
};
let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
@@ -84,7 +85,7 @@ pub async fn count_tokens(
.header("Content-Type", "application/json");
if let Some(low_speed_timeout) = low_speed_timeout {
request_builder = request_builder.read_timeout(low_speed_timeout);
request_builder = request_builder.low_speed_timeout(100, low_speed_timeout);
}
let http_request = request_builder.body(AsyncBody::from(request))?;

View File

@@ -1524,9 +1524,10 @@ pub struct KeystrokeEvent {
struct NullHttpClient;
impl HttpClient for NullHttpClient {
fn send(
fn send_with_redirect_policy(
&self,
_req: http_client::Request<http_client::AsyncBody>,
_follow_redirects: bool,
) -> futures::future::BoxFuture<
'static,
Result<http_client::Response<http_client::AsyncBody>, anyhow::Error>,

View File

@@ -858,7 +858,7 @@ impl Styled for List {
impl sum_tree::Item for ListItem {
type Summary = ListItemSummary;
fn summary(&self, _: &()) -> Self::Summary {
fn summary(&self) -> Self::Summary {
match self {
ListItem::Unmeasured { focus_handle } => ListItemSummary {
count: 1,

View File

@@ -2612,12 +2612,6 @@ impl From<ScaledPixels> for f64 {
}
}
impl From<ScaledPixels> for u32 {
fn from(pixels: ScaledPixels) -> Self {
pixels.0 as u32
}
}
/// Represents a length in rems, a unit based on the font-size of the window, which can be assigned with [`WindowContext::set_rem_size`][set_rem_size].
///
/// Rems are used for defining lengths that are scalable and consistent across different UI elements.

View File

@@ -23,8 +23,8 @@ use crate::{
point, Action, AnyWindowHandle, AppContext, AsyncWindowContext, BackgroundExecutor, Bounds,
DevicePixels, DispatchEventResult, Font, FontId, FontMetrics, FontRun, ForegroundExecutor,
GPUSpecs, GlyphId, ImageSource, Keymap, LineLayout, Pixels, PlatformInput, Point,
RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, ScaledPixels, Scene,
SharedString, Size, SvgSize, Task, TaskLabel, WindowContext, DEFAULT_WINDOW_SIZE,
RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Scene, SharedString, Size,
SvgSize, Task, TaskLabel, WindowContext, DEFAULT_WINDOW_SIZE,
};
use anyhow::Result;
use async_task::Runnable;
@@ -381,7 +381,7 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle {
fn set_client_inset(&self, _inset: Pixels) {}
fn gpu_specs(&self) -> Option<GPUSpecs>;
fn update_ime_position(&self, _bounds: Bounds<ScaledPixels>);
fn update_ime_position(&self, _bounds: Bounds<Pixels>);
#[cfg(any(test, feature = "test-support"))]
fn as_test(&mut self) -> Option<&mut TestWindow> {

View File

@@ -84,7 +84,7 @@ use crate::{
use crate::{
AnyWindowHandle, CursorStyle, DisplayId, KeyDownEvent, KeyUpEvent, Keystroke, Modifiers,
ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent,
NavigationDirection, Pixels, PlatformDisplay, PlatformInput, Point, ScaledPixels, ScrollDelta,
NavigationDirection, Pixels, PlatformDisplay, PlatformInput, Point, ScrollDelta,
ScrollWheelEvent, TouchPhase,
};
use crate::{LinuxCommon, WindowParams};
@@ -313,7 +313,7 @@ impl WaylandClientStatePtr {
}
}
pub fn update_ime_position(&self, bounds: Bounds<ScaledPixels>) {
pub fn update_ime_position(&self, bounds: Bounds<Pixels>) {
let client = self.get_client();
let mut state = client.borrow_mut();
if state.composing || state.text_input.is_none() || state.pre_edit_text.is_some() {

View File

@@ -26,7 +26,7 @@ use crate::platform::{PlatformAtlas, PlatformInputHandler, PlatformWindow};
use crate::scene::Scene;
use crate::{
px, size, AnyWindowHandle, Bounds, Decorations, GPUSpecs, Globals, Modifiers, Output, Pixels,
PlatformDisplay, PlatformInput, Point, PromptLevel, ResizeEdge, ScaledPixels, Size, Tiling,
PlatformDisplay, PlatformInput, Point, PromptLevel, ResizeEdge, Size, Tiling,
WaylandClientStatePtr, WindowAppearance, WindowBackgroundAppearance, WindowBounds,
WindowControls, WindowDecorations, WindowParams,
};
@@ -1010,7 +1010,7 @@ impl PlatformWindow for WaylandWindow {
}
}
fn update_ime_position(&self, bounds: Bounds<ScaledPixels>) {
fn update_ime_position(&self, bounds: Bounds<Pixels>) {
let state = self.borrow();
state.client.update_ime_position(bounds);
}
@@ -1046,8 +1046,8 @@ fn update_window(mut state: RefMut<WaylandWindowState>) {
&& state.decorations == WindowDecorations::Server
{
// Promise the compositor that this region of the window surface
// contains no transparent pixels. This allows the compositor to skip
// updating whatever is behind the surface for better performance.
// contains no transparent pixels. This allows the compositor to
// do skip whatever is behind the surface for better performance.
state.surface.set_opaque_region(Some(&region));
} else {
state.surface.set_opaque_region(None);
@@ -1057,6 +1057,7 @@ fn update_window(mut state: RefMut<WaylandWindowState>) {
if state.background_appearance == WindowBackgroundAppearance::Blurred {
if state.blur.is_none() {
let blur = blur_manager.create(&state.surface, &state.globals.qh, ());
blur.set_region(Some(&region));
state.blur = Some(blur);
}
state.blur.as_ref().unwrap().commit();

View File

@@ -38,8 +38,7 @@ use crate::platform::{LinuxCommon, PlatformWindow};
use crate::{
modifiers_from_xinput_info, point, px, AnyWindowHandle, Bounds, ClipboardItem, CursorStyle,
DisplayId, FileDropEvent, Keystroke, Modifiers, ModifiersChangedEvent, Pixels, Platform,
PlatformDisplay, PlatformInput, Point, ScaledPixels, ScrollDelta, Size, TouchPhase,
WindowParams, X11Window,
PlatformDisplay, PlatformInput, Point, ScrollDelta, Size, TouchPhase, WindowParams, X11Window,
};
use super::{button_of_key, modifiers_from_state, pressed_button_from_mask};
@@ -189,7 +188,7 @@ impl X11ClientStatePtr {
}
}
pub fn update_ime_position(&self, bounds: Bounds<ScaledPixels>) {
pub fn update_ime_position(&self, bounds: Bounds<Pixels>) {
let client = self.get_client();
let mut state = client.0.borrow_mut();
if state.composing || state.ximc.is_none() {

View File

@@ -4,9 +4,9 @@ use crate::{
platform::blade::{BladeRenderer, BladeSurfaceConfig},
px, size, AnyWindowHandle, Bounds, Decorations, DevicePixels, ForegroundExecutor, GPUSpecs,
Modifiers, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler,
PlatformWindow, Point, PromptLevel, ResizeEdge, ScaledPixels, Scene, Size, Tiling,
WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowDecorations, WindowKind,
WindowParams, X11ClientStatePtr,
PlatformWindow, Point, PromptLevel, ResizeEdge, Scene, Size, Tiling, WindowAppearance,
WindowBackgroundAppearance, WindowBounds, WindowDecorations, WindowKind, WindowParams,
X11ClientStatePtr,
};
use blade_graphics as gpu;
@@ -1412,7 +1412,7 @@ impl PlatformWindow for X11Window {
}
}
fn update_ime_position(&self, bounds: Bounds<ScaledPixels>) {
fn update_ime_position(&self, bounds: Bounds<Pixels>) {
let mut state = self.0.state.borrow_mut();
let client = state.client.clone();
drop(state);

View File

@@ -70,7 +70,9 @@ mod tests {
unsafe {
let image: id = msg_send![class!(NSImage), alloc];
image.initWithContentsOfFile_(NSString::alloc(nil).init_str("test.jpeg"));
image.initWithContentsOfFile_(
NSString::alloc(nil).init_str("/Users/rtfeldman/Downloads/test.jpeg"),
);
let _size = image.size();
let string = NSString::alloc(nil).init_str("Test String");

View File

@@ -3,9 +3,8 @@ use crate::{
platform::PlatformInputHandler, point, px, size, AnyWindowHandle, Bounds, DisplayLink,
ExternalPaths, FileDropEvent, ForegroundExecutor, KeyDownEvent, Keystroke, Modifiers,
ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels,
PlatformAtlas, PlatformDisplay, PlatformInput, PlatformWindow, Point, PromptLevel,
ScaledPixels, Size, Timer, WindowAppearance, WindowBackgroundAppearance, WindowBounds,
WindowKind, WindowParams,
PlatformAtlas, PlatformDisplay, PlatformInput, PlatformWindow, Point, PromptLevel, Size, Timer,
WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowKind, WindowParams,
};
use block::ConcreteBlock;
use cocoa::{
@@ -1120,7 +1119,7 @@ impl PlatformWindow for MacWindow {
None
}
fn update_ime_position(&self, _bounds: Bounds<ScaledPixels>) {
fn update_ime_position(&self, _bounds: Bounds<Pixels>) {
unsafe {
let input_context: id = msg_send![class!(NSTextInputContext), currentInputContext];
let _: () = msg_send![input_context, invalidateCharacterCoordinates];

View File

@@ -1,8 +1,8 @@
use crate::{
AnyWindowHandle, AtlasKey, AtlasTextureId, AtlasTile, Bounds, DispatchEventResult, GPUSpecs,
Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler, PlatformWindow,
Point, ScaledPixels, Size, TestPlatform, TileId, WindowAppearance, WindowBackgroundAppearance,
WindowBounds, WindowParams,
Point, Size, TestPlatform, TileId, WindowAppearance, WindowBackgroundAppearance, WindowBounds,
WindowParams,
};
use collections::HashMap;
use parking_lot::Mutex;
@@ -274,7 +274,7 @@ impl PlatformWindow for TestWindow {
unimplemented!()
}
fn update_ime_position(&self, _bounds: Bounds<ScaledPixels>) {}
fn update_ime_position(&self, _bounds: Bounds<Pixels>) {}
fn gpu_specs(&self) -> Option<GPUSpecs> {
None

View File

@@ -685,7 +685,7 @@ impl PlatformWindow for WindowsWindow {
Some(self.0.state.borrow().renderer.gpu_specs())
}
fn update_ime_position(&self, _bounds: Bounds<ScaledPixels>) {
fn update_ime_position(&self, _bounds: Bounds<Pixels>) {
// todo(windows)
}
}

View File

@@ -3610,9 +3610,7 @@ impl<'a> WindowContext<'a> {
self.on_next_frame(|cx| {
if let Some(mut input_handler) = cx.window.platform_window.take_input_handler() {
if let Some(bounds) = input_handler.selected_bounds(cx) {
cx.window
.platform_window
.update_ime_position(bounds.scale(cx.scale_factor()));
cx.window.platform_window.update_ime_position(bounds);
}
cx.window.platform_window.set_input_handler(input_handler);
}

View File

@@ -10,46 +10,22 @@ use futures::future::BoxFuture;
use http::request::Builder;
#[cfg(feature = "test-support")]
use std::fmt;
use std::{
sync::{Arc, Mutex},
time::Duration,
};
use std::sync::{Arc, Mutex};
pub use url::Url;
pub struct ReadTimeout(pub Duration);
#[derive(Default, Debug, Clone)]
pub enum RedirectPolicy {
#[default]
NoFollow,
FollowLimit(u32),
FollowAll,
}
pub struct FollowRedirects(pub bool);
pub trait HttpRequestExt {
/// Set a read timeout on the request.
/// For isahc, this is the low_speed_timeout.
/// For other clients, this is the timeout used for read calls when reading the response.
/// In all cases this prevents servers stalling completely, but allows them to send data slowly.
fn read_timeout(self, timeout: Duration) -> Self;
/// Whether or not to follow redirects
fn follow_redirects(self, follow: RedirectPolicy) -> Self;
}
impl HttpRequestExt for http::request::Builder {
fn read_timeout(self, timeout: Duration) -> Self {
self.extension(ReadTimeout(timeout))
}
fn follow_redirects(self, follow: RedirectPolicy) -> Self {
self.extension(follow)
}
}
pub trait HttpClient: 'static + Send + Sync {
fn send(
&self,
req: http::Request<AsyncBody>,
) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
self.send_with_redirect_policy(req, false)
}
// TODO: Make a better API for this
fn send_with_redirect_policy(
&self,
req: Request<AsyncBody>,
follow_redirects: bool,
) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>>;
fn get<'a>(
@@ -58,17 +34,14 @@ pub trait HttpClient: 'static + Send + Sync {
body: AsyncBody,
follow_redirects: bool,
) -> BoxFuture<'a, Result<Response<AsyncBody>, anyhow::Error>> {
let request = Builder::new()
.uri(uri)
.follow_redirects(if follow_redirects {
RedirectPolicy::FollowAll
} else {
RedirectPolicy::NoFollow
})
.body(body);
let request = Builder::new().uri(uri).body(body);
match request {
Ok(request) => Box::pin(async move { self.send(request).await.map_err(Into::into) }),
Ok(request) => Box::pin(async move {
self.send_with_redirect_policy(request, follow_redirects)
.await
.map_err(Into::into)
}),
Err(e) => Box::pin(async move { Err(e.into()) }),
}
}
@@ -119,11 +92,12 @@ impl HttpClientWithProxy {
}
impl HttpClient for HttpClientWithProxy {
fn send(
fn send_with_redirect_policy(
&self,
req: Request<AsyncBody>,
follow_redirects: bool,
) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
self.client.send(req)
self.client.send_with_redirect_policy(req, follow_redirects)
}
fn proxy(&self) -> Option<&Uri> {
@@ -132,11 +106,12 @@ impl HttpClient for HttpClientWithProxy {
}
impl HttpClient for Arc<HttpClientWithProxy> {
fn send(
fn send_with_redirect_policy(
&self,
req: Request<AsyncBody>,
follow_redirects: bool,
) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
self.client.send(req)
self.client.send_with_redirect_policy(req, follow_redirects)
}
fn proxy(&self) -> Option<&Uri> {
@@ -243,11 +218,12 @@ impl HttpClientWithUrl {
}
impl HttpClient for Arc<HttpClientWithUrl> {
fn send(
fn send_with_redirect_policy(
&self,
req: Request<AsyncBody>,
follow_redirects: bool,
) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
self.client.send(req)
self.client.send_with_redirect_policy(req, follow_redirects)
}
fn proxy(&self) -> Option<&Uri> {
@@ -256,11 +232,12 @@ impl HttpClient for Arc<HttpClientWithUrl> {
}
impl HttpClient for HttpClientWithUrl {
fn send(
fn send_with_redirect_policy(
&self,
req: Request<AsyncBody>,
follow_redirects: bool,
) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
self.client.send(req)
self.client.send_with_redirect_policy(req, follow_redirects)
}
fn proxy(&self) -> Option<&Uri> {
@@ -306,6 +283,14 @@ impl HttpClient for BlockedHttpClient {
fn proxy(&self) -> Option<&Uri> {
None
}
fn send_with_redirect_policy(
&self,
req: Request<AsyncBody>,
_: bool,
) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
self.send(req)
}
}
#[cfg(feature = "test-support")]
@@ -367,9 +352,10 @@ impl fmt::Debug for FakeHttpClient {
#[cfg(feature = "test-support")]
impl HttpClient for FakeHttpClient {
fn send(
fn send_with_redirect_policy(
&self,
req: Request<AsyncBody>,
_follow_redirects: bool,
) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
let future = (self.handler)(req);
future

View File

@@ -1,6 +1,7 @@
use std::{mem, sync::Arc, time::Duration};
use futures::future::BoxFuture;
use isahc::config::RedirectPolicy;
use util::maybe;
pub use isahc::config::Configurable;
@@ -8,10 +9,13 @@ pub struct IsahcHttpClient(isahc::HttpClient);
pub use http_client::*;
const DEFAULT_HTTP_TIMEOUT: Duration = Duration::from_secs(60);
impl IsahcHttpClient {
pub fn new(proxy: Option<Uri>, user_agent: Option<String>) -> Arc<IsahcHttpClient> {
let mut builder = isahc::HttpClient::builder()
.connect_timeout(Duration::from_secs(5))
.timeout(DEFAULT_HTTP_TIMEOUT)
.low_speed_timeout(100, Duration::from_secs(5))
.proxy(proxy.clone());
if let Some(agent) = user_agent {
@@ -35,29 +39,18 @@ impl HttpClient for IsahcHttpClient {
None
}
fn send(
fn send_with_redirect_policy(
&self,
req: http_client::http::Request<http_client::AsyncBody>,
follow_redirects: bool,
) -> BoxFuture<'static, Result<http_client::Response<http_client::AsyncBody>, anyhow::Error>>
{
let redirect_policy = req
.extensions()
.get::<http_client::RedirectPolicy>()
.cloned()
.unwrap_or_default();
let read_timeout = req
.extensions()
.get::<http_client::ReadTimeout>()
.map(|t| t.0);
let req = maybe!({
let (mut parts, body) = req.into_parts();
let mut builder = isahc::Request::builder()
.method(parts.method)
.uri(parts.uri)
.version(parts.version);
if let Some(read_timeout) = read_timeout {
builder = builder.low_speed_timeout(100, read_timeout);
}
let headers = builder.headers_mut()?;
mem::swap(headers, &mut parts.headers);
@@ -74,12 +67,10 @@ impl HttpClient for IsahcHttpClient {
};
builder
.redirect_policy(match redirect_policy {
http_client::RedirectPolicy::FollowAll => isahc::config::RedirectPolicy::Follow,
http_client::RedirectPolicy::FollowLimit(limit) => {
isahc::config::RedirectPolicy::Limit(limit)
}
http_client::RedirectPolicy::NoFollow => isahc::config::RedirectPolicy::None,
.redirect_policy(if follow_redirects {
RedirectPolicy::Follow
} else {
RedirectPolicy::None
})
.body(isahc_body)
.ok()

View File

@@ -87,11 +87,7 @@ pub type BufferRow = u32;
#[derive(Clone)]
enum BufferDiffBase {
Git(Rope),
PastBufferVersion {
buffer: Model<Buffer>,
rope: Rope,
operations_to_ignore: Vec<clock::Lamport>,
},
PastBufferVersion(Model<Buffer>, BufferSnapshot),
}
/// An in-memory representation of a source code file, including its text,
@@ -799,15 +795,19 @@ impl Buffer {
let this = cx.handle();
cx.new_model(|cx| {
let mut branch = Self {
diff_base: Some(BufferDiffBase::PastBufferVersion {
buffer: this.clone(),
rope: self.as_rope().clone(),
operations_to_ignore: Vec::new(),
}),
diff_base: Some(BufferDiffBase::PastBufferVersion(
this.clone(),
self.snapshot(),
)),
language: self.language.clone(),
has_conflict: self.has_conflict,
has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
_subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
_subscriptions: vec![cx.subscribe(&this, |branch: &mut Self, _, event, cx| {
if let BufferEvent::Operation { operation, .. } = event {
branch.apply_ops([operation.clone()], cx);
branch.diff_base_version += 1;
}
})],
..Self::build(
self.text.branch(),
None,
@@ -823,74 +823,18 @@ impl Buffer {
})
}
/// Applies all of the changes in `branch` buffer that intersect the given `range`
/// to this buffer.
pub fn merge(
&mut self,
branch: &Model<Self>,
range: Option<Range<Anchor>>,
cx: &mut ModelContext<Self>,
) {
let edits = branch.read_with(cx, |branch, _| {
branch
.edits_since_in_range::<usize>(
&self.version,
range.unwrap_or(Anchor::MIN..Anchor::MAX),
pub fn merge(&mut self, branch: &Model<Self>, cx: &mut ModelContext<Self>) {
let branch = branch.read(cx);
let edits = branch
.edits_since::<usize>(&self.version)
.map(|edit| {
(
edit.old,
branch.text_for_range(edit.new).collect::<String>(),
)
.map(|edit| {
(
edit.old,
branch.text_for_range(edit.new).collect::<String>(),
)
})
.collect::<Vec<_>>()
});
let operation = self.edit(edits, None, cx);
// Prevent this operation from being reapplied to the branch.
branch.update(cx, |branch, cx| {
if let Some(BufferDiffBase::PastBufferVersion {
operations_to_ignore,
..
}) = &mut branch.diff_base
{
operations_to_ignore.extend(operation);
}
cx.emit(BufferEvent::Edited)
});
}
fn on_base_buffer_event(
&mut self,
_: Model<Buffer>,
event: &BufferEvent,
cx: &mut ModelContext<Self>,
) {
if let BufferEvent::Operation { operation, .. } = event {
if let Some(BufferDiffBase::PastBufferVersion {
operations_to_ignore,
..
}) = &mut self.diff_base
{
let mut is_ignored = false;
if let Operation::Buffer(text::Operation::Edit(buffer_operation)) = &operation {
operations_to_ignore.retain(|operation_to_ignore| {
match buffer_operation.timestamp.cmp(&operation_to_ignore) {
Ordering::Less => true,
Ordering::Equal => {
is_ignored = true;
false
}
Ordering::Greater => false,
}
});
}
if !is_ignored {
self.apply_ops([operation.clone()], cx);
self.diff_base_version += 1;
}
}
}
})
.collect::<Vec<_>>();
self.edit(edits, None, cx);
}
#[cfg(test)]
@@ -1073,8 +1017,9 @@ impl Buffer {
/// Returns the current diff base, see [Buffer::set_diff_base].
pub fn diff_base(&self) -> Option<&Rope> {
match self.diff_base.as_ref()? {
BufferDiffBase::Git(rope) | BufferDiffBase::PastBufferVersion { rope, .. } => {
Some(rope)
BufferDiffBase::Git(rope) => Some(rope),
BufferDiffBase::PastBufferVersion(_, buffer_snapshot) => {
Some(buffer_snapshot.as_rope())
}
}
}
@@ -1105,36 +1050,29 @@ impl Buffer {
self.diff_base_version
}
pub fn diff_base_buffer(&self) -> Option<Model<Self>> {
match self.diff_base.as_ref()? {
BufferDiffBase::Git(_) => None,
BufferDiffBase::PastBufferVersion { buffer, .. } => Some(buffer.clone()),
}
}
/// Recomputes the diff.
pub fn recalculate_diff(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
let diff_base_rope = match self.diff_base.as_ref()? {
let diff_base_rope = match self.diff_base.as_mut()? {
BufferDiffBase::Git(rope) => rope.clone(),
BufferDiffBase::PastBufferVersion { buffer, .. } => buffer.read(cx).as_rope().clone(),
BufferDiffBase::PastBufferVersion(base_buffer, base_buffer_snapshot) => {
let new_base_snapshot = base_buffer.read(cx).snapshot();
*base_buffer_snapshot = new_base_snapshot;
base_buffer_snapshot.as_rope().clone()
}
};
let snapshot = self.snapshot();
let mut diff = self.git_diff.clone();
let diff = cx.background_executor().spawn(async move {
diff.update(&diff_base_rope, &snapshot).await;
(diff, diff_base_rope)
diff
});
Some(cx.spawn(|this, mut cx| async move {
let (buffer_diff, diff_base_rope) = diff.await;
let buffer_diff = diff.await;
this.update(&mut cx, |this, cx| {
this.git_diff = buffer_diff;
this.non_text_state_update_count += 1;
if let Some(BufferDiffBase::PastBufferVersion { rope, .. }) = &mut this.diff_base {
*rope = diff_base_rope;
cx.emit(BufferEvent::DiffBaseChanged);
}
cx.emit(BufferEvent::DiffUpdated);
})
.ok();

View File

@@ -2413,98 +2413,80 @@ fn test_branch_and_merge(cx: &mut TestAppContext) {
});
// Edits to the branch are not applied to the base.
branch_buffer.update(cx, |branch_buffer, cx| {
branch_buffer.edit(
[
(Point::new(1, 0)..Point::new(1, 0), "1.5\n"),
(Point::new(2, 0)..Point::new(2, 5), "THREE"),
],
branch_buffer.update(cx, |buffer, cx| {
buffer.edit(
[(Point::new(1, 0)..Point::new(1, 0), "ONE_POINT_FIVE\n")],
None,
cx,
)
});
branch_buffer.read_with(cx, |branch_buffer, cx| {
assert_eq!(base_buffer.read(cx).text(), "one\ntwo\nthree\n");
assert_eq!(branch_buffer.text(), "one\n1.5\ntwo\nTHREE\n");
assert_eq!(branch_buffer.text(), "one\nONE_POINT_FIVE\ntwo\nthree\n");
});
// The branch buffer maintains a diff with respect to its base buffer.
start_recalculating_diff(&branch_buffer, cx);
cx.run_until_parked();
assert_diff_hunks(
&branch_buffer,
cx,
&[(1..2, "", "1.5\n"), (3..4, "three\n", "THREE\n")],
);
// Edits to the base are applied to the branch.
base_buffer.update(cx, |buffer, cx| {
buffer.edit([(Point::new(0, 0)..Point::new(0, 0), "ZERO\n")], None, cx)
});
branch_buffer.read_with(cx, |branch_buffer, cx| {
assert_eq!(base_buffer.read(cx).text(), "ZERO\none\ntwo\nthree\n");
assert_eq!(branch_buffer.text(), "ZERO\none\n1.5\ntwo\nTHREE\n");
assert_eq!(
branch_buffer.text(),
"ZERO\none\nONE_POINT_FIVE\ntwo\nthree\n"
);
});
// Until the git diff recalculation is complete, the git diff references
// the previous content of the base buffer, so that it stays in sync.
start_recalculating_diff(&branch_buffer, cx);
assert_diff_hunks(
&branch_buffer,
cx,
&[(2..3, "", "1.5\n"), (4..5, "three\n", "THREE\n")],
);
cx.run_until_parked();
assert_diff_hunks(
&branch_buffer,
cx,
&[(2..3, "", "1.5\n"), (4..5, "three\n", "THREE\n")],
);
assert_diff_hunks(&branch_buffer, cx, &[(2..3, "", "ONE_POINT_FIVE\n")]);
// Edits to any replica of the base are applied to the branch.
base_buffer_replica.update(cx, |buffer, cx| {
buffer.edit([(Point::new(2, 0)..Point::new(2, 0), "2.5\n")], None, cx)
buffer.edit(
[(Point::new(2, 0)..Point::new(2, 0), "TWO_POINT_FIVE\n")],
None,
cx,
)
});
branch_buffer.read_with(cx, |branch_buffer, cx| {
assert_eq!(base_buffer.read(cx).text(), "ZERO\none\ntwo\n2.5\nthree\n");
assert_eq!(branch_buffer.text(), "ZERO\none\n1.5\ntwo\n2.5\nTHREE\n");
assert_eq!(
base_buffer.read(cx).text(),
"ZERO\none\ntwo\nTWO_POINT_FIVE\nthree\n"
);
assert_eq!(
branch_buffer.text(),
"ZERO\none\nONE_POINT_FIVE\ntwo\nTWO_POINT_FIVE\nthree\n"
);
});
// Merging the branch applies all of its changes to the base.
base_buffer.update(cx, |base_buffer, cx| {
base_buffer.merge(&branch_buffer, None, cx);
});
branch_buffer.update(cx, |branch_buffer, cx| {
base_buffer.merge(&branch_buffer, cx);
assert_eq!(
base_buffer.read(cx).text(),
"ZERO\none\n1.5\ntwo\n2.5\nTHREE\n"
base_buffer.text(),
"ZERO\none\nONE_POINT_FIVE\ntwo\nTWO_POINT_FIVE\nthree\n"
);
assert_eq!(branch_buffer.text(), "ZERO\none\n1.5\ntwo\n2.5\nTHREE\n");
});
}
fn start_recalculating_diff(buffer: &Model<Buffer>, cx: &mut TestAppContext) {
buffer
.update(cx, |buffer, cx| buffer.recalculate_diff(cx).unwrap())
.detach();
}
#[track_caller]
fn assert_diff_hunks(
buffer: &Model<Buffer>,
cx: &mut TestAppContext,
expected_hunks: &[(Range<u32>, &str, &str)],
) {
let (snapshot, diff_base) = buffer.read_with(cx, |buffer, _| {
(buffer.snapshot(), buffer.diff_base().unwrap().to_string())
buffer
.update(cx, |buffer, cx| buffer.recalculate_diff(cx).unwrap())
.detach();
cx.executor().run_until_parked();
buffer.read_with(cx, |buffer, _| {
let snapshot = buffer.snapshot();
assert_hunks(
snapshot.git_diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX),
&snapshot,
&buffer.diff_base().unwrap().to_string(),
expected_hunks,
);
});
assert_hunks(
snapshot.git_diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX),
&snapshot,
&diff_base,
expected_hunks,
);
}
#[gpui::test(iterations = 100)]

View File

@@ -224,7 +224,7 @@ impl DiagnosticSet {
impl sum_tree::Item for DiagnosticEntry<Anchor> {
type Summary = Summary;
fn summary(&self, _cx: &text::BufferSnapshot) -> Self::Summary {
fn summary(&self) -> Self::Summary {
Summary {
start: self.range.start,
end: self.range.end,

View File

@@ -313,10 +313,6 @@ pub trait LspAdapterDelegate: Send + Sync {
fn update_status(&self, language: LanguageServerName, status: LanguageServerBinaryStatus);
async fn language_server_download_dir(&self, name: &LanguageServerName) -> Option<Arc<Path>>;
async fn npm_package_installed_version(
&self,
package_name: &str,
) -> Result<Option<(PathBuf, String)>>;
async fn which(&self, command: &OsStr) -> Option<PathBuf>;
async fn shell_env(&self) -> HashMap<String, String>;
async fn read_text_file(&self, path: PathBuf) -> Result<String>;

View File

@@ -113,9 +113,6 @@ pub struct LanguageSettings {
pub use_autoclose: bool,
/// Whether to automatically surround text with brackets.
pub use_auto_surround: bool,
/// Whether to use additional LSP queries to format (and amend) the code after
/// every "trigger" symbol input, defined by LSP server capabilities.
pub use_on_type_format: bool,
// Controls how the editor handles the autoclosed characters.
pub always_treat_brackets_as_autoclosed: bool,
/// Which code actions to run on save
@@ -336,11 +333,6 @@ pub struct LanguageSettingsContent {
///
/// Default: false
pub always_treat_brackets_as_autoclosed: Option<bool>,
/// Whether to use additional LSP queries to format (and amend) the code after
/// every "trigger" symbol input, defined by LSP server capabilities.
///
/// Default: true
pub use_on_type_format: Option<bool>,
/// Which code actions to run on save after the formatter.
/// These are not run if formatting is off.
///
@@ -379,16 +371,15 @@ pub struct FeaturesContent {
#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum SoftWrap {
/// Prefer a single line generally, unless an overly long line is encountered.
/// Do not soft wrap.
None,
/// Deprecated: use None instead. Left to avoid breakin existing users' configs.
/// Prefer a single line generally, unless an overly long line is encountered.
PreferLine,
/// Soft wrap lines that exceed the editor width.
/// Soft wrap lines that exceed the editor width
EditorWidth,
/// Soft wrap lines at the preferred line length.
/// Soft wrap lines at the preferred line length
PreferredLineLength,
/// Soft wrap line at the preferred line length or the editor width (whichever is smaller).
/// Soft wrap line at the preferred line length or the editor width (whichever is smaller)
Bounded,
}
@@ -661,7 +652,7 @@ pub enum Formatter {
/// The external program to run.
command: Arc<str>,
/// The arguments to pass to the program.
arguments: Option<Arc<[String]>>,
arguments: Arc<[String]>,
},
/// Files should be formatted using code actions executed by language servers.
CodeActions(HashMap<String, bool>),
@@ -1054,7 +1045,6 @@ fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent
merge(&mut settings.soft_wrap, src.soft_wrap);
merge(&mut settings.use_autoclose, src.use_autoclose);
merge(&mut settings.use_auto_surround, src.use_auto_surround);
merge(&mut settings.use_on_type_format, src.use_on_type_format);
merge(
&mut settings.always_treat_brackets_as_autoclosed,
src.always_treat_brackets_as_autoclosed,

View File

@@ -1739,7 +1739,7 @@ impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary>
impl sum_tree::Item for SyntaxLayerEntry {
type Summary = SyntaxLayerSummary;
fn summary(&self, _cx: &BufferSnapshot) -> Self::Summary {
fn summary(&self) -> Self::Summary {
SyntaxLayerSummary {
min_depth: self.depth,
max_depth: self.depth,

View File

@@ -32,6 +32,7 @@ futures.workspace = true
google_ai = { workspace = true, features = ["schemars"] }
gpui.workspace = true
http_client.workspace = true
isahc.workspace = true
inline_completion_button.workspace = true
log.workspace = true
menu.workspace = true

View File

@@ -12,6 +12,7 @@ pub enum CloudModel {
Anthropic(anthropic::Model),
OpenAi(open_ai::Model),
Google(google_ai::Model),
Zed(ZedModel),
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, EnumIter)]
@@ -20,6 +21,26 @@ pub enum ZedModel {
Qwen2_7bInstruct,
}
impl ZedModel {
pub fn id(&self) -> &str {
match self {
ZedModel::Qwen2_7bInstruct => "Qwen/Qwen2-7B-Instruct",
}
}
pub fn display_name(&self) -> &str {
match self {
ZedModel::Qwen2_7bInstruct => "Qwen2 7B Instruct",
}
}
pub fn max_token_count(&self) -> usize {
match self {
ZedModel::Qwen2_7bInstruct => 28000,
}
}
}
impl Default for CloudModel {
fn default() -> Self {
Self::Anthropic(anthropic::Model::default())
@@ -32,6 +53,7 @@ impl CloudModel {
Self::Anthropic(model) => model.id(),
Self::OpenAi(model) => model.id(),
Self::Google(model) => model.id(),
Self::Zed(model) => model.id(),
}
}
@@ -40,6 +62,7 @@ impl CloudModel {
Self::Anthropic(model) => model.display_name(),
Self::OpenAi(model) => model.display_name(),
Self::Google(model) => model.display_name(),
Self::Zed(model) => model.display_name(),
}
}
@@ -55,6 +78,7 @@ impl CloudModel {
Self::Anthropic(model) => model.max_token_count(),
Self::OpenAi(model) => model.max_token_count(),
Self::Google(model) => model.max_token_count(),
Self::Zed(model) => model.max_token_count(),
}
}
@@ -91,6 +115,9 @@ impl CloudModel {
LanguageModelAvailability::RequiresPlan(Plan::ZedPro)
}
},
Self::Zed(model) => match model {
ZedModel::Qwen2_7bInstruct => LanguageModelAvailability::RequiresPlan(Plan::ZedPro),
},
}
}
}

View File

@@ -3,7 +3,7 @@ use crate::provider::anthropic::map_to_language_model_completion_events;
use crate::{
settings::AllLanguageModelSettings, CloudModel, LanguageModel, LanguageModelCacheConfiguration,
LanguageModelId, LanguageModelName, LanguageModelProviderId, LanguageModelProviderName,
LanguageModelProviderState, LanguageModelRequest, RateLimiter,
LanguageModelProviderState, LanguageModelRequest, RateLimiter, ZedModel,
};
use anthropic::AnthropicError;
use anyhow::{anyhow, Result};
@@ -18,7 +18,8 @@ use gpui::{
AnyElement, AnyView, AppContext, AsyncAppContext, FontWeight, Model, ModelContext,
Subscription, Task,
};
use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Response};
use http_client::{AsyncBody, HttpClient, Method, Response};
use isahc::config::Configurable;
use schemars::JsonSchema;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use serde_json::value::RawValue;
@@ -219,6 +220,9 @@ impl LanguageModelProvider for CloudLanguageModelProvider {
models.insert(model.id().to_string(), CloudModel::Google(model));
}
}
for model in ZedModel::iter() {
models.insert(model.id().to_string(), CloudModel::Zed(model));
}
} else {
models.insert(
anthropic::Model::Claude3_5Sonnet.id().to_string(),
@@ -392,7 +396,7 @@ impl CloudLanguageModel {
let response = loop {
let mut request_builder = http_client::Request::builder();
if let Some(low_speed_timeout) = low_speed_timeout {
request_builder = request_builder.read_timeout(low_speed_timeout);
request_builder = request_builder.low_speed_timeout(100, low_speed_timeout);
};
let request = request_builder
.method(Method::POST)
@@ -469,7 +473,7 @@ impl LanguageModel for CloudLanguageModel {
min_total_token: cache.min_total_token,
})
}
CloudModel::OpenAi(_) | CloudModel::Google(_) => None,
CloudModel::OpenAi(_) | CloudModel::Google(_) | CloudModel::Zed(_) => None,
}
}
@@ -499,6 +503,9 @@ impl LanguageModel for CloudLanguageModel {
}
.boxed()
}
CloudModel::Zed(_) => {
count_open_ai_tokens(request, open_ai::Model::ThreePointFiveTurbo, cx)
}
}
}
@@ -597,6 +604,35 @@ impl LanguageModel for CloudLanguageModel {
}
.boxed()
}
CloudModel::Zed(model) => {
let client = self.client.clone();
let mut request = request.into_open_ai(model.id().into(), None);
request.max_tokens = Some(4000);
let llm_api_token = self.llm_api_token.clone();
let future = self.request_limiter.stream(async move {
let response = Self::perform_llm_completion(
client.clone(),
llm_api_token,
PerformCompletionParams {
provider: client::LanguageModelProvider::Zed,
model: request.model.clone(),
provider_request: RawValue::from_string(serde_json::to_string(
&request,
)?)?,
},
None,
)
.await?;
Ok(open_ai::extract_text_from_events(response_lines(response)))
});
async move {
Ok(future
.await?
.map(|result| result.map(LanguageModelCompletionEvent::Text))
.boxed())
}
.boxed()
}
}
}
@@ -700,6 +736,51 @@ impl LanguageModel for CloudLanguageModel {
CloudModel::Google(_) => {
future::ready(Err(anyhow!("tool use not implemented for Google AI"))).boxed()
}
CloudModel::Zed(model) => {
// All Zed models are OpenAI-based at the time of writing.
let mut request = request.into_open_ai(model.id().into(), None);
request.tool_choice = Some(open_ai::ToolChoice::Other(
open_ai::ToolDefinition::Function {
function: open_ai::FunctionDefinition {
name: tool_name.clone(),
description: None,
parameters: None,
},
},
));
request.tools = vec![open_ai::ToolDefinition::Function {
function: open_ai::FunctionDefinition {
name: tool_name.clone(),
description: Some(tool_description),
parameters: Some(input_schema),
},
}];
self.request_limiter
.run(async move {
let response = Self::perform_llm_completion(
client.clone(),
llm_api_token,
PerformCompletionParams {
provider: client::LanguageModelProvider::Zed,
model: request.model.clone(),
provider_request: RawValue::from_string(serde_json::to_string(
&request,
)?)?,
},
None,
)
.await?;
Ok(open_ai::extract_tool_args_from_events(
tool_name,
Box::pin(response_lines(response)),
)
.await?
.boxed())
})
.boxed()
}
}
}
}

View File

@@ -17,7 +17,7 @@ use ui::{prelude::*, Button, Checkbox, ContextMenu, Label, PopoverMenu, Selectio
use workspace::{
item::{Item, ItemHandle},
searchable::{SearchEvent, SearchableItem, SearchableItemHandle},
SplitDirection, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, WorkspaceId,
ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace,
};
const SEND_LINE: &str = "// Send:";
@@ -194,11 +194,12 @@ pub fn init(cx: &mut AppContext) {
workspace.register_action(move |workspace, _: &OpenLanguageServerLogs, cx| {
let project = workspace.project().read(cx);
if project.is_local() {
workspace.split_item(
SplitDirection::Right,
workspace.add_item_to_active_pane(
Box::new(cx.new_view(|cx| {
LspLogView::new(workspace.project().clone(), log_store.clone(), cx)
})),
None,
true,
cx,
);
}
@@ -911,27 +912,6 @@ impl Item for LspLogView {
fn as_searchable(&self, handle: &View<Self>) -> Option<Box<dyn SearchableItemHandle>> {
Some(Box::new(handle.clone()))
}
fn clone_on_split(
&self,
_workspace_id: Option<WorkspaceId>,
cx: &mut ViewContext<Self>,
) -> Option<View<Self>>
where
Self: Sized,
{
Some(cx.new_view(|cx| {
let mut new_view = Self::new(self.project.clone(), self.log_store.clone(), cx);
if let Some(server_id) = self.current_server_id {
match self.active_entry_kind {
LogKind::Rpc => new_view.show_rpc_trace_for_server(server_id, cx),
LogKind::Trace => new_view.show_trace_for_server(server_id, cx),
LogKind::Logs => new_view.show_logs_for_server(server_id, cx),
}
}
new_view
}))
}
}
impl SearchableItem for LspLogView {

Some files were not shown because too many files have changed in this diff Show More