From 7d380e9e187d732a37daa88db85cae48f99af33d Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 7 Oct 2024 14:31:23 -0400 Subject: [PATCH 01/35] Temporarily prevent deploying collab to production (#18825) This PR adds a temporary measure to prevent deploying collab to production, while we investigate some issues stemming from the HTTP client change. Release Notes: - N/A --- .github/workflows/deploy_collab.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index 1e6e6cf280..cf3ffb4dbc 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -3,7 +3,8 @@ name: Publish Collab Server Image on: push: tags: - - collab-production + # Pause production deploys while we investigate an issue. + # - collab-production - collab-staging env: From 8cdb9d6b8590eb5565b858e10d6ea37e1648b737 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 7 Oct 2024 12:03:02 -0700 Subject: [PATCH 02/35] Fix a bug where HTTP errors where being reported incorrectly (#18828) Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- crates/gpui/src/app.rs | 4 +++ crates/http_client/src/http_client.rs | 27 +++++++++++++++++++++ crates/reqwest_client/src/reqwest_client.rs | 6 ++++- crates/ureq_client/src/ureq_client.rs | 15 +++++++++++- 4 files changed, 50 insertions(+), 2 deletions(-) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index bba5f857b4..f81a2092d5 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -1533,4 +1533,8 @@ impl HttpClient for NullHttpClient { fn proxy(&self) -> Option<&http_client::Uri> { None } + + fn type_name(&self) -> &'static str { + type_name::() + } } diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 015c73a448..bf1046d88e 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -11,6 +11,7 @@ use http::request::Builder; #[cfg(feature = "test-support")] use std::fmt; use std::{ + any::type_name, sync::{Arc, LazyLock, Mutex}, time::Duration, }; @@ -72,6 +73,8 @@ impl HttpRequestExt for http::request::Builder { } pub trait HttpClient: 'static + Send + Sync { + fn type_name(&self) -> &'static str; + fn send( &self, req: http::Request, @@ -154,6 +157,10 @@ impl HttpClient for HttpClientWithProxy { fn proxy(&self) -> Option<&Uri> { self.proxy.as_ref() } + + fn type_name(&self) -> &'static str { + self.client.type_name() + } } impl HttpClient for Arc { @@ -167,6 +174,10 @@ impl HttpClient for Arc { fn proxy(&self) -> Option<&Uri> { self.proxy.as_ref() } + + fn type_name(&self) -> &'static str { + self.client.type_name() + } } /// An [`HttpClient`] that has a base URL. @@ -278,6 +289,10 @@ impl HttpClient for Arc { fn proxy(&self) -> Option<&Uri> { self.client.proxy.as_ref() } + + fn type_name(&self) -> &'static str { + self.client.type_name() + } } impl HttpClient for HttpClientWithUrl { @@ -291,6 +306,10 @@ impl HttpClient for HttpClientWithUrl { fn proxy(&self) -> Option<&Uri> { self.client.proxy.as_ref() } + + fn type_name(&self) -> &'static str { + self.client.type_name() + } } pub fn read_proxy_from_env() -> Option { @@ -331,6 +350,10 @@ impl HttpClient for BlockedHttpClient { fn proxy(&self) -> Option<&Uri> { None } + + fn type_name(&self) -> &'static str { + type_name::() + } } #[cfg(feature = "test-support")] @@ -403,4 +426,8 @@ impl HttpClient for FakeHttpClient { fn proxy(&self) -> Option<&Uri> { None } + + fn type_name(&self) -> &'static str { + type_name::() + } } diff --git a/crates/reqwest_client/src/reqwest_client.rs b/crates/reqwest_client/src/reqwest_client.rs index 6e84c58954..b5c274d599 100644 --- a/crates/reqwest_client/src/reqwest_client.rs +++ b/crates/reqwest_client/src/reqwest_client.rs @@ -1,4 +1,4 @@ -use std::{borrow::Cow, io::Read, pin::Pin, task::Poll}; +use std::{any::type_name, borrow::Cow, io::Read, pin::Pin, task::Poll}; use anyhow::anyhow; use bytes::{BufMut, Bytes, BytesMut}; @@ -183,6 +183,10 @@ impl http_client::HttpClient for ReqwestClient { None } + fn type_name(&self) -> &'static str { + type_name::() + } + fn send( &self, req: http::Request, diff --git a/crates/ureq_client/src/ureq_client.rs b/crates/ureq_client/src/ureq_client.rs index 8951e80ac2..d3d8d3aedc 100644 --- a/crates/ureq_client/src/ureq_client.rs +++ b/crates/ureq_client/src/ureq_client.rs @@ -1,3 +1,4 @@ +use std::any::type_name; use std::collections::HashMap; use std::io::Read; use std::sync::Arc; @@ -74,6 +75,10 @@ impl HttpClient for UreqClient { self.proxy_url.as_ref() } + fn type_name(&self) -> &'static str { + type_name::() + } + fn send( &self, request: http::Request, @@ -100,7 +105,15 @@ impl HttpClient for UreqClient { self.background_executor .spawn(async move { - let response = req.send(body)?; + let response = match req.send(body) { + Ok(response) => response, + Err(e) => match e { + ureq::Error::Status(_, response) => response, + ureq::Error::Transport(transport) => { + anyhow::bail!(transport) + } + }, + }; let mut builder = http::Response::builder() .status(response.status()) From 5387a6f7f93e7b7ddd9877e00efd89195b469e4c Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 7 Oct 2024 13:03:26 -0700 Subject: [PATCH 03/35] Fix an issue where LLM requests would block forever (#18830) Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- .github/workflows/deploy_collab.yml | 3 +-- crates/reqwest_client/src/reqwest_client.rs | 27 +++++++++++++++++++-- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index cf3ffb4dbc..1e6e6cf280 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -3,8 +3,7 @@ name: Publish Collab Server Image on: push: tags: - # Pause production deploys while we investigate an issue. - # - collab-production + - collab-production - collab-staging env: diff --git a/crates/reqwest_client/src/reqwest_client.rs b/crates/reqwest_client/src/reqwest_client.rs index b5c274d599..f8698b9080 100644 --- a/crates/reqwest_client/src/reqwest_client.rs +++ b/crates/reqwest_client/src/reqwest_client.rs @@ -163,8 +163,12 @@ impl futures::stream::Stream for WrappedBody { WrappedBodyInner::SyncReader(cursor) => { let mut buf = Vec::new(); match cursor.read_to_end(&mut buf) { - Ok(_) => { - return Poll::Ready(Some(Ok(Bytes::from(buf)))); + Ok(bytes) => { + if bytes == 0 { + return Poll::Ready(None); + } else { + return Poll::Ready(Some(Ok(Bytes::from(buf)))); + } } Err(e) => return Poll::Ready(Some(Err(e))), } @@ -234,3 +238,22 @@ impl http_client::HttpClient for ReqwestClient { .boxed() } } + +#[cfg(test)] +mod test { + + use core::str; + + use http_client::AsyncBody; + use smol::stream::StreamExt; + + use crate::WrappedBody; + + #[tokio::test] + async fn test_sync_streaming_upload() { + let mut body = WrappedBody::new(AsyncBody::from("hello there".to_string())).fuse(); + let result = body.next().await.unwrap().unwrap(); + assert!(body.next().await.is_none()); + assert_eq!(str::from_utf8(&result).unwrap(), "hello there"); + } +} From a15b10986a70c9b2fdff48926ffe55a6edc17337 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 7 Oct 2024 16:17:43 -0400 Subject: [PATCH 04/35] Add ssh initialization events (#18831) Release Notes: - N/A --- crates/client/src/telemetry.rs | 6 +++--- crates/recent_projects/src/dev_servers.rs | 17 ++++++++++++++++- crates/workspace/src/workspace.rs | 6 ++++++ crates/zed/src/main.rs | 2 ++ 4 files changed, 27 insertions(+), 4 deletions(-) diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index ee6da64d22..a8912c2f20 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -485,7 +485,7 @@ impl Telemetry { worktree_id: WorktreeId, updated_entries_set: &UpdatedEntriesSet, ) { - let project_names: Vec = { + let project_type_names: Vec = { let mut state = self.state.lock(); state .worktree_id_map @@ -521,8 +521,8 @@ impl Telemetry { }; // Done on purpose to avoid calling `self.state.lock()` multiple times - for project_name in project_names { - self.report_app_event(format!("open {} project", project_name)); + for project_type_name in project_type_names { + self.report_app_event(format!("open {} project", project_type_name)); } } diff --git a/crates/recent_projects/src/dev_servers.rs b/crates/recent_projects/src/dev_servers.rs index 0a05b6a1e9..fa1d511791 100644 --- a/crates/recent_projects/src/dev_servers.rs +++ b/crates/recent_projects/src/dev_servers.rs @@ -279,6 +279,13 @@ impl DevServerProjects { match connection.await { Some(_) => this .update(&mut cx, |this, cx| { + let _ = this.workspace.update(cx, |workspace, _| { + workspace + .client() + .telemetry() + .report_app_event("create ssh server".to_string()) + }); + this.add_ssh_server(connection_options, cx); this.mode = Mode::Default(None); cx.notify() @@ -422,7 +429,15 @@ impl DevServerProjects { ); cx.new_view(|cx| { - Workspace::new(None, project.clone(), app_state.clone(), cx) + let workspace = + Workspace::new(None, project.clone(), app_state.clone(), cx); + + workspace + .client() + .telemetry() + .report_app_event("create ssh project".to_string()); + + workspace }) }) .log_err(); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index d2ccd9cd4a..814c7fa915 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -5575,6 +5575,12 @@ pub fn open_ssh_project( cx.replace_root_view(|cx| { let mut workspace = Workspace::new(Some(workspace_id), project, app_state.clone(), cx); + + workspace + .client() + .telemetry() + .report_app_event("open ssh project".to_string()); + workspace.set_serialized_ssh_project(serialized_ssh_project); workspace }); diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index adb5feb9fe..535cafbccb 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -528,6 +528,8 @@ fn main() { session_id, cx, ); + + // We should rename these in the future to `first app open`, `first app open for release channel`, and `app open` if let (Some(system_id), Some(installation_id)) = (&system_id, &installation_id) { match (&system_id, &installation_id) { (IdType::New(_), IdType::New(_)) => { From c5d252b83713a708b907a8619ee79dbba9f4be4b Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 7 Oct 2024 16:25:17 -0400 Subject: [PATCH 05/35] collab: Add missing `cmake` dependency to Dockerfile (#18832) This PR adds the missing `cmake` dependency to the Docker image that is now needed in order to build collab. Release Notes: - N/A --- Dockerfile-collab | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Dockerfile-collab b/Dockerfile-collab index 2fb5f04147..7945eb426d 100644 --- a/Dockerfile-collab +++ b/Dockerfile-collab @@ -31,10 +31,12 @@ ENV GITHUB_SHA=$GITHUB_SHA # - Staging: `4f408ec65a3867278322a189b4eb20f1ab51f508` # - Production: `fc4c533d0a8c489e5636a4249d2b52a80039fbd7` # +# Also add `cmake`, since we need it to build `wasmtime`. +# # Installing these as a temporary workaround, but I think ideally we'd want to figure # out what caused them to be included in the first place. RUN apt-get update; \ - apt-get install -y --no-install-recommends libxkbcommon-dev libxkbcommon-x11-dev + apt-get install -y --no-install-recommends libxkbcommon-dev libxkbcommon-x11-dev cmake RUN --mount=type=cache,target=./script/node_modules \ --mount=type=cache,target=/usr/local/cargo/registry \ From d55f0259060a4461cf04ef60c04701285763bb28 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 7 Oct 2024 17:32:49 -0400 Subject: [PATCH 06/35] collab: Track cache writes/reads in LLM usage (#18834) This PR extends the LLM usage tracking to support tracking usage for cache writes and reads for Anthropic models. Release Notes: - N/A --------- Co-authored-by: Antonio Scandurra Co-authored-by: Antonio --- crates/anthropic/src/anthropic.rs | 4 + .../20241007173634_add_cache_token_counts.sql | 11 ++ crates/collab/src/llm.rs | 80 +++++++++---- crates/collab/src/llm/db/queries/usages.rs | 113 +++++++++++++++--- .../src/llm/db/tables/lifetime_usage.rs | 2 + crates/collab/src/llm/db/tables/model.rs | 2 + .../collab/src/llm/db/tables/usage_measure.rs | 2 + crates/collab/src/llm/db/tests/usage_tests.rs | 62 +++++++++- crates/collab/src/llm/telemetry.rs | 4 + 9 files changed, 241 insertions(+), 39 deletions(-) create mode 100644 crates/collab/migrations_llm/20241007173634_add_cache_token_counts.sql diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs index 6b89722842..08c8f27bd9 100644 --- a/crates/anthropic/src/anthropic.rs +++ b/crates/anthropic/src/anthropic.rs @@ -521,6 +521,10 @@ pub struct Usage { pub input_tokens: Option, #[serde(default, skip_serializing_if = "Option::is_none")] pub output_tokens: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub cache_creation_input_tokens: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub cache_read_input_tokens: Option, } #[derive(Debug, Serialize, Deserialize)] diff --git a/crates/collab/migrations_llm/20241007173634_add_cache_token_counts.sql b/crates/collab/migrations_llm/20241007173634_add_cache_token_counts.sql new file mode 100644 index 0000000000..855e46ab02 --- /dev/null +++ b/crates/collab/migrations_llm/20241007173634_add_cache_token_counts.sql @@ -0,0 +1,11 @@ +alter table models + add column price_per_million_cache_creation_input_tokens integer not null default 0, + add column price_per_million_cache_read_input_tokens integer not null default 0; + +alter table usages + add column cache_creation_input_tokens_this_month bigint not null default 0, + add column cache_read_input_tokens_this_month bigint not null default 0; + +alter table lifetime_usages + add column cache_creation_input_tokens bigint not null default 0, + add column cache_read_input_tokens bigint not null default 0; diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index 2d040cfa28..9809985ac7 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -318,22 +318,31 @@ async fn perform_completion( chunks .map(move |event| { let chunk = event?; - let (input_tokens, output_tokens) = match &chunk { + let ( + input_tokens, + output_tokens, + cache_creation_input_tokens, + cache_read_input_tokens, + ) = match &chunk { anthropic::Event::MessageStart { message: anthropic::Response { usage, .. }, } | anthropic::Event::MessageDelta { usage, .. } => ( usage.input_tokens.unwrap_or(0) as usize, usage.output_tokens.unwrap_or(0) as usize, + usage.cache_creation_input_tokens.unwrap_or(0) as usize, + usage.cache_read_input_tokens.unwrap_or(0) as usize, ), - _ => (0, 0), + _ => (0, 0, 0, 0), }; - anyhow::Ok(( - serde_json::to_vec(&chunk).unwrap(), + anyhow::Ok(CompletionChunk { + bytes: serde_json::to_vec(&chunk).unwrap(), input_tokens, output_tokens, - )) + cache_creation_input_tokens, + cache_read_input_tokens, + }) }) .boxed() } @@ -359,11 +368,13 @@ async fn perform_completion( chunk.usage.as_ref().map_or(0, |u| u.prompt_tokens) as usize; let output_tokens = chunk.usage.as_ref().map_or(0, |u| u.completion_tokens) as usize; - ( - serde_json::to_vec(&chunk).unwrap(), + CompletionChunk { + bytes: serde_json::to_vec(&chunk).unwrap(), input_tokens, output_tokens, - ) + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + } }) }) .boxed() @@ -387,13 +398,13 @@ async fn perform_completion( .map(|event| { event.map(|chunk| { // TODO - implement token counting for Google AI - let input_tokens = 0; - let output_tokens = 0; - ( - serde_json::to_vec(&chunk).unwrap(), - input_tokens, - output_tokens, - ) + CompletionChunk { + bytes: serde_json::to_vec(&chunk).unwrap(), + input_tokens: 0, + output_tokens: 0, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + } }) }) .boxed() @@ -407,6 +418,8 @@ async fn perform_completion( model, input_tokens: 0, output_tokens: 0, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, inner_stream: stream, }))) } @@ -551,6 +564,14 @@ async fn check_usage_limit( Ok(()) } +struct CompletionChunk { + bytes: Vec, + input_tokens: usize, + output_tokens: usize, + cache_creation_input_tokens: usize, + cache_read_input_tokens: usize, +} + struct TokenCountingStream { state: Arc, claims: LlmTokenClaims, @@ -558,22 +579,26 @@ struct TokenCountingStream { model: String, input_tokens: usize, output_tokens: usize, + cache_creation_input_tokens: usize, + cache_read_input_tokens: usize, inner_stream: S, } impl Stream for TokenCountingStream where - S: Stream, usize, usize), anyhow::Error>> + Unpin, + S: Stream> + Unpin, { type Item = Result, anyhow::Error>; fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { match Pin::new(&mut self.inner_stream).poll_next(cx) { - Poll::Ready(Some(Ok((mut bytes, input_tokens, output_tokens)))) => { - bytes.push(b'\n'); - self.input_tokens += input_tokens; - self.output_tokens += output_tokens; - Poll::Ready(Some(Ok(bytes))) + Poll::Ready(Some(Ok(mut chunk))) => { + chunk.bytes.push(b'\n'); + self.input_tokens += chunk.input_tokens; + self.output_tokens += chunk.output_tokens; + self.cache_creation_input_tokens += chunk.cache_creation_input_tokens; + self.cache_read_input_tokens += chunk.cache_read_input_tokens; + Poll::Ready(Some(Ok(chunk.bytes))) } Poll::Ready(Some(Err(e))) => Poll::Ready(Some(Err(e))), Poll::Ready(None) => Poll::Ready(None), @@ -590,6 +615,8 @@ impl Drop for TokenCountingStream { let model = std::mem::take(&mut self.model); let input_token_count = self.input_tokens; let output_token_count = self.output_tokens; + let cache_creation_input_token_count = self.cache_creation_input_tokens; + let cache_read_input_token_count = self.cache_read_input_tokens; self.state.executor.spawn_detached(async move { let usage = state .db @@ -599,6 +626,8 @@ impl Drop for TokenCountingStream { provider, &model, input_token_count, + cache_creation_input_token_count, + cache_read_input_token_count, output_token_count, Utc::now(), ) @@ -630,11 +659,20 @@ impl Drop for TokenCountingStream { model, provider: provider.to_string(), input_token_count: input_token_count as u64, + cache_creation_input_token_count: cache_creation_input_token_count + as u64, + cache_read_input_token_count: cache_read_input_token_count as u64, output_token_count: output_token_count as u64, requests_this_minute: usage.requests_this_minute as u64, tokens_this_minute: usage.tokens_this_minute as u64, tokens_this_day: usage.tokens_this_day as u64, input_tokens_this_month: usage.input_tokens_this_month as u64, + cache_creation_input_tokens_this_month: usage + .cache_creation_input_tokens_this_month + as u64, + cache_read_input_tokens_this_month: usage + .cache_read_input_tokens_this_month + as u64, output_tokens_this_month: usage.output_tokens_this_month as u64, spending_this_month: usage.spending_this_month as u64, lifetime_spending: usage.lifetime_spending as u64, diff --git a/crates/collab/src/llm/db/queries/usages.rs b/crates/collab/src/llm/db/queries/usages.rs index 65a0bd6734..128a42bc58 100644 --- a/crates/collab/src/llm/db/queries/usages.rs +++ b/crates/collab/src/llm/db/queries/usages.rs @@ -14,6 +14,8 @@ pub struct Usage { pub tokens_this_minute: usize, pub tokens_this_day: usize, pub input_tokens_this_month: usize, + pub cache_creation_input_tokens_this_month: usize, + pub cache_read_input_tokens_this_month: usize, pub output_tokens_this_month: usize, pub spending_this_month: usize, pub lifetime_spending: usize, @@ -160,17 +162,14 @@ impl LlmDatabase { .all(&*tx) .await?; - let (lifetime_input_tokens, lifetime_output_tokens) = lifetime_usage::Entity::find() + let lifetime_usage = lifetime_usage::Entity::find() .filter( lifetime_usage::Column::UserId .eq(user_id) .and(lifetime_usage::Column::ModelId.eq(model.id)), ) .one(&*tx) - .await? - .map_or((0, 0), |usage| { - (usage.input_tokens as usize, usage.output_tokens as usize) - }); + .await?; let requests_this_minute = self.get_usage_for_measure(&usages, now, UsageMeasure::RequestsPerMinute)?; @@ -180,18 +179,44 @@ impl LlmDatabase { self.get_usage_for_measure(&usages, now, UsageMeasure::TokensPerDay)?; let input_tokens_this_month = self.get_usage_for_measure(&usages, now, UsageMeasure::InputTokensPerMonth)?; + let cache_creation_input_tokens_this_month = self.get_usage_for_measure( + &usages, + now, + UsageMeasure::CacheCreationInputTokensPerMonth, + )?; + let cache_read_input_tokens_this_month = self.get_usage_for_measure( + &usages, + now, + UsageMeasure::CacheReadInputTokensPerMonth, + )?; let output_tokens_this_month = self.get_usage_for_measure(&usages, now, UsageMeasure::OutputTokensPerMonth)?; - let spending_this_month = - calculate_spending(model, input_tokens_this_month, output_tokens_this_month); - let lifetime_spending = - calculate_spending(model, lifetime_input_tokens, lifetime_output_tokens); + let spending_this_month = calculate_spending( + model, + input_tokens_this_month, + cache_creation_input_tokens_this_month, + cache_read_input_tokens_this_month, + output_tokens_this_month, + ); + let lifetime_spending = if let Some(lifetime_usage) = lifetime_usage { + calculate_spending( + model, + lifetime_usage.input_tokens as usize, + lifetime_usage.cache_creation_input_tokens as usize, + lifetime_usage.cache_read_input_tokens as usize, + lifetime_usage.output_tokens as usize, + ) + } else { + 0 + }; Ok(Usage { requests_this_minute, tokens_this_minute, tokens_this_day, input_tokens_this_month, + cache_creation_input_tokens_this_month, + cache_read_input_tokens_this_month, output_tokens_this_month, spending_this_month, lifetime_spending, @@ -208,6 +233,8 @@ impl LlmDatabase { provider: LanguageModelProvider, model_name: &str, input_token_count: usize, + cache_creation_input_tokens: usize, + cache_read_input_tokens: usize, output_token_count: usize, now: DateTimeUtc, ) -> Result { @@ -235,6 +262,10 @@ impl LlmDatabase { &tx, ) .await?; + let total_token_count = input_token_count + + cache_read_input_tokens + + cache_creation_input_tokens + + output_token_count; let tokens_this_minute = self .update_usage_for_measure( user_id, @@ -243,7 +274,7 @@ impl LlmDatabase { &usages, UsageMeasure::TokensPerMinute, now, - input_token_count + output_token_count, + total_token_count, &tx, ) .await?; @@ -255,7 +286,7 @@ impl LlmDatabase { &usages, UsageMeasure::TokensPerDay, now, - input_token_count + output_token_count, + total_token_count, &tx, ) .await?; @@ -271,6 +302,30 @@ impl LlmDatabase { &tx, ) .await?; + let cache_creation_input_tokens_this_month = self + .update_usage_for_measure( + user_id, + is_staff, + model.id, + &usages, + UsageMeasure::CacheCreationInputTokensPerMonth, + now, + cache_creation_input_tokens, + &tx, + ) + .await?; + let cache_read_input_tokens_this_month = self + .update_usage_for_measure( + user_id, + is_staff, + model.id, + &usages, + UsageMeasure::CacheReadInputTokensPerMonth, + now, + cache_read_input_tokens, + &tx, + ) + .await?; let output_tokens_this_month = self .update_usage_for_measure( user_id, @@ -283,8 +338,13 @@ impl LlmDatabase { &tx, ) .await?; - let spending_this_month = - calculate_spending(model, input_tokens_this_month, output_tokens_this_month); + let spending_this_month = calculate_spending( + model, + input_tokens_this_month, + cache_creation_input_tokens_this_month, + cache_read_input_tokens_this_month, + output_tokens_this_month, + ); // Update lifetime usage let lifetime_usage = lifetime_usage::Entity::find() @@ -303,6 +363,12 @@ impl LlmDatabase { input_tokens: ActiveValue::set( usage.input_tokens + input_token_count as i64, ), + cache_creation_input_tokens: ActiveValue::set( + usage.cache_creation_input_tokens + cache_creation_input_tokens as i64, + ), + cache_read_input_tokens: ActiveValue::set( + usage.cache_read_input_tokens + cache_read_input_tokens as i64, + ), output_tokens: ActiveValue::set( usage.output_tokens + output_token_count as i64, ), @@ -327,6 +393,8 @@ impl LlmDatabase { let lifetime_spending = calculate_spending( model, lifetime_usage.input_tokens as usize, + lifetime_usage.cache_creation_input_tokens as usize, + lifetime_usage.cache_read_input_tokens as usize, lifetime_usage.output_tokens as usize, ); @@ -335,6 +403,8 @@ impl LlmDatabase { tokens_this_minute, tokens_this_day, input_tokens_this_month, + cache_creation_input_tokens_this_month, + cache_read_input_tokens_this_month, output_tokens_this_month, spending_this_month, lifetime_spending, @@ -501,13 +571,24 @@ impl LlmDatabase { fn calculate_spending( model: &model::Model, input_tokens_this_month: usize, + cache_creation_input_tokens_this_month: usize, + cache_read_input_tokens_this_month: usize, output_tokens_this_month: usize, ) -> usize { let input_token_cost = input_tokens_this_month * model.price_per_million_input_tokens as usize / 1_000_000; + let cache_creation_input_token_cost = cache_creation_input_tokens_this_month + * model.price_per_million_cache_creation_input_tokens as usize + / 1_000_000; + let cache_read_input_token_cost = cache_read_input_tokens_this_month + * model.price_per_million_cache_read_input_tokens as usize + / 1_000_000; let output_token_cost = output_tokens_this_month * model.price_per_million_output_tokens as usize / 1_000_000; - input_token_cost + output_token_cost + input_token_cost + + cache_creation_input_token_cost + + cache_read_input_token_cost + + output_token_cost } const MINUTE_BUCKET_COUNT: usize = 12; @@ -521,6 +602,8 @@ impl UsageMeasure { UsageMeasure::TokensPerMinute => MINUTE_BUCKET_COUNT, UsageMeasure::TokensPerDay => DAY_BUCKET_COUNT, UsageMeasure::InputTokensPerMonth => MONTH_BUCKET_COUNT, + UsageMeasure::CacheCreationInputTokensPerMonth => MONTH_BUCKET_COUNT, + UsageMeasure::CacheReadInputTokensPerMonth => MONTH_BUCKET_COUNT, UsageMeasure::OutputTokensPerMonth => MONTH_BUCKET_COUNT, } } @@ -531,6 +614,8 @@ impl UsageMeasure { UsageMeasure::TokensPerMinute => Duration::minutes(1), UsageMeasure::TokensPerDay => Duration::hours(24), UsageMeasure::InputTokensPerMonth => Duration::days(30), + UsageMeasure::CacheCreationInputTokensPerMonth => Duration::days(30), + UsageMeasure::CacheReadInputTokensPerMonth => Duration::days(30), UsageMeasure::OutputTokensPerMonth => Duration::days(30), } } diff --git a/crates/collab/src/llm/db/tables/lifetime_usage.rs b/crates/collab/src/llm/db/tables/lifetime_usage.rs index 05ad2d5e94..fc8354699b 100644 --- a/crates/collab/src/llm/db/tables/lifetime_usage.rs +++ b/crates/collab/src/llm/db/tables/lifetime_usage.rs @@ -9,6 +9,8 @@ pub struct Model { pub user_id: UserId, pub model_id: ModelId, pub input_tokens: i64, + pub cache_creation_input_tokens: i64, + pub cache_read_input_tokens: i64, pub output_tokens: i64, } diff --git a/crates/collab/src/llm/db/tables/model.rs b/crates/collab/src/llm/db/tables/model.rs index c87789f27e..4d7d2d8da9 100644 --- a/crates/collab/src/llm/db/tables/model.rs +++ b/crates/collab/src/llm/db/tables/model.rs @@ -14,6 +14,8 @@ pub struct Model { pub max_tokens_per_minute: i64, pub max_tokens_per_day: i64, pub price_per_million_input_tokens: i32, + pub price_per_million_cache_creation_input_tokens: i32, + pub price_per_million_cache_read_input_tokens: i32, pub price_per_million_output_tokens: i32, } diff --git a/crates/collab/src/llm/db/tables/usage_measure.rs b/crates/collab/src/llm/db/tables/usage_measure.rs index 1105d997c2..50c9501e54 100644 --- a/crates/collab/src/llm/db/tables/usage_measure.rs +++ b/crates/collab/src/llm/db/tables/usage_measure.rs @@ -10,6 +10,8 @@ pub enum UsageMeasure { TokensPerMinute, TokensPerDay, InputTokensPerMonth, + CacheCreationInputTokensPerMonth, + CacheReadInputTokensPerMonth, OutputTokensPerMonth, } diff --git a/crates/collab/src/llm/db/tests/usage_tests.rs b/crates/collab/src/llm/db/tests/usage_tests.rs index 905a3dda08..97bcc20e44 100644 --- a/crates/collab/src/llm/db/tests/usage_tests.rs +++ b/crates/collab/src/llm/db/tests/usage_tests.rs @@ -33,12 +33,12 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { let user_id = UserId::from_proto(123); let now = t0; - db.record_usage(user_id, false, provider, model, 1000, 0, now) + db.record_usage(user_id, false, provider, model, 1000, 0, 0, 0, now) .await .unwrap(); let now = t0 + Duration::seconds(10); - db.record_usage(user_id, false, provider, model, 2000, 0, now) + db.record_usage(user_id, false, provider, model, 2000, 0, 0, 0, now) .await .unwrap(); @@ -50,6 +50,8 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { tokens_this_minute: 3000, tokens_this_day: 3000, input_tokens_this_month: 3000, + cache_creation_input_tokens_this_month: 0, + cache_read_input_tokens_this_month: 0, output_tokens_this_month: 0, spending_this_month: 0, lifetime_spending: 0, @@ -65,6 +67,8 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { tokens_this_minute: 2000, tokens_this_day: 3000, input_tokens_this_month: 3000, + cache_creation_input_tokens_this_month: 0, + cache_read_input_tokens_this_month: 0, output_tokens_this_month: 0, spending_this_month: 0, lifetime_spending: 0, @@ -72,7 +76,7 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { ); let now = t0 + Duration::seconds(60); - db.record_usage(user_id, false, provider, model, 3000, 0, now) + db.record_usage(user_id, false, provider, model, 3000, 0, 0, 0, now) .await .unwrap(); @@ -84,6 +88,8 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { tokens_this_minute: 5000, tokens_this_day: 6000, input_tokens_this_month: 6000, + cache_creation_input_tokens_this_month: 0, + cache_read_input_tokens_this_month: 0, output_tokens_this_month: 0, spending_this_month: 0, lifetime_spending: 0, @@ -100,13 +106,15 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { tokens_this_minute: 0, tokens_this_day: 5000, input_tokens_this_month: 6000, + cache_creation_input_tokens_this_month: 0, + cache_read_input_tokens_this_month: 0, output_tokens_this_month: 0, spending_this_month: 0, lifetime_spending: 0, } ); - db.record_usage(user_id, false, provider, model, 4000, 0, now) + db.record_usage(user_id, false, provider, model, 4000, 0, 0, 0, now) .await .unwrap(); @@ -118,6 +126,8 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { tokens_this_minute: 4000, tokens_this_day: 9000, input_tokens_this_month: 10000, + cache_creation_input_tokens_this_month: 0, + cache_read_input_tokens_this_month: 0, output_tokens_this_month: 0, spending_this_month: 0, lifetime_spending: 0, @@ -134,6 +144,50 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { tokens_this_minute: 0, tokens_this_day: 0, input_tokens_this_month: 9000, + cache_creation_input_tokens_this_month: 0, + cache_read_input_tokens_this_month: 0, + output_tokens_this_month: 0, + spending_this_month: 0, + lifetime_spending: 0, + } + ); + + // Test cache creation input tokens + db.record_usage(user_id, false, provider, model, 1000, 500, 0, 0, now) + .await + .unwrap(); + + let usage = db.get_usage(user_id, provider, model, now).await.unwrap(); + assert_eq!( + usage, + Usage { + requests_this_minute: 1, + tokens_this_minute: 1500, + tokens_this_day: 1500, + input_tokens_this_month: 10000, + cache_creation_input_tokens_this_month: 500, + cache_read_input_tokens_this_month: 0, + output_tokens_this_month: 0, + spending_this_month: 0, + lifetime_spending: 0, + } + ); + + // Test cache read input tokens + db.record_usage(user_id, false, provider, model, 1000, 0, 300, 0, now) + .await + .unwrap(); + + let usage = db.get_usage(user_id, provider, model, now).await.unwrap(); + assert_eq!( + usage, + Usage { + requests_this_minute: 2, + tokens_this_minute: 2800, + tokens_this_day: 2800, + input_tokens_this_month: 11000, + cache_creation_input_tokens_this_month: 500, + cache_read_input_tokens_this_month: 300, output_tokens_this_month: 0, spending_this_month: 0, lifetime_spending: 0, diff --git a/crates/collab/src/llm/telemetry.rs b/crates/collab/src/llm/telemetry.rs index 17a2cb9cd3..9daaaf3032 100644 --- a/crates/collab/src/llm/telemetry.rs +++ b/crates/collab/src/llm/telemetry.rs @@ -12,11 +12,15 @@ pub struct LlmUsageEventRow { pub model: String, pub provider: String, pub input_token_count: u64, + pub cache_creation_input_token_count: u64, + pub cache_read_input_token_count: u64, pub output_token_count: u64, pub requests_this_minute: u64, pub tokens_this_minute: u64, pub tokens_this_day: u64, pub input_tokens_this_month: u64, + pub cache_creation_input_tokens_this_month: u64, + pub cache_read_input_tokens_this_month: u64, pub output_tokens_this_month: u64, pub spending_this_month: u64, pub lifetime_spending: u64, From 3c91184726381df24321036dada8c5f626cecbc6 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 7 Oct 2024 18:21:48 -0400 Subject: [PATCH 07/35] collab: Drop mistakenly-added columns from the `usages` table (#18835) This PR drops the `cache_creation_input_tokens_this_month ` and `cache_read_input_tokens_this_month ` columns from the `usages` table in the LLM database. We mistakenly added these in #18834, but these aren't necessary due to the structure of the `usages` table. We weren't actually using these columns anywhere. Release Notes: - N/A --- .../20241007220716_drop_incorrect_usages_columns.sql | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 crates/collab/migrations_llm/20241007220716_drop_incorrect_usages_columns.sql diff --git a/crates/collab/migrations_llm/20241007220716_drop_incorrect_usages_columns.sql b/crates/collab/migrations_llm/20241007220716_drop_incorrect_usages_columns.sql new file mode 100644 index 0000000000..c204451b75 --- /dev/null +++ b/crates/collab/migrations_llm/20241007220716_drop_incorrect_usages_columns.sql @@ -0,0 +1,3 @@ +alter table usages + drop column cache_creation_input_tokens_this_month, + drop column cache_read_input_tokens_this_month; From b0a16a760146e9e07d134572364fac850daa9624 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 7 Oct 2024 16:28:33 -0700 Subject: [PATCH 08/35] Fix bugs with applying hunks from branch buffers (#18721) Release Notes: - N/A --------- Co-authored-by: Marshall --- crates/clock/src/clock.rs | 9 +-- crates/language/src/buffer.rs | 96 +++++++++++++++-------------- crates/language/src/buffer_tests.rs | 66 ++++++++++++++++++-- crates/text/src/text.rs | 18 ++++-- 4 files changed, 128 insertions(+), 61 deletions(-) diff --git a/crates/clock/src/clock.rs b/crates/clock/src/clock.rs index 2b45e4a8fa..acbde90dc1 100644 --- a/crates/clock/src/clock.rs +++ b/crates/clock/src/clock.rs @@ -216,10 +216,11 @@ impl fmt::Debug for Global { if timestamp.replica_id > 0 { write!(f, ", ")?; } - write!(f, "{}: {}", timestamp.replica_id, timestamp.value)?; - } - if self.local_branch_value > 0 { - write!(f, ": {}", self.local_branch_value)?; + if timestamp.replica_id == LOCAL_BRANCH_REPLICA_ID { + write!(f, ": {}", timestamp.value)?; + } else { + write!(f, "{}: {}", timestamp.replica_id, timestamp.value)?; + } } write!(f, "}}") } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 4990b9074f..160e8b3ba9 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -18,6 +18,7 @@ use crate::{ }; use anyhow::{anyhow, Context, Result}; use async_watch as watch; +use clock::Lamport; pub use clock::ReplicaId; use futures::channel::oneshot; use gpui::{ @@ -90,7 +91,7 @@ enum BufferDiffBase { PastBufferVersion { buffer: Model, rope: Rope, - operations_to_ignore: Vec, + merged_operations: Vec, }, } @@ -802,7 +803,7 @@ impl Buffer { diff_base: Some(BufferDiffBase::PastBufferVersion { buffer: this.clone(), rope: self.as_rope().clone(), - operations_to_ignore: Vec::new(), + merged_operations: Default::default(), }), language: self.language.clone(), has_conflict: self.has_conflict, @@ -834,34 +835,32 @@ impl Buffer { return; }; - base_buffer.update(cx, |base_buffer, cx| { - let edits = self - .edits_since::(&base_buffer.version) - .filter_map(|edit| { - if range - .as_ref() - .map_or(true, |range| range.overlaps(&edit.new)) - { - Some((edit.old, self.text_for_range(edit.new).collect::())) - } else { - None - } - }) - .collect::>(); + let mut edits = Vec::new(); + for edit in self.edits_since::(&base_buffer.read(cx).version()) { + if let Some(range) = &range { + if range.start > edit.new.end || edit.new.start > range.end { + continue; + } + } + edits.push(( + edit.old.clone(), + self.text_for_range(edit.new.clone()).collect::(), + )); + } - let operation = base_buffer.edit(edits, None, cx); + let operation = base_buffer.update(cx, |base_buffer, cx| { + cx.emit(BufferEvent::DiffBaseChanged); + base_buffer.edit(edits, None, cx) + }); - // Prevent this operation from being reapplied to the branch. + if let Some(operation) = operation { if let Some(BufferDiffBase::PastBufferVersion { - operations_to_ignore, - .. + merged_operations, .. }) = &mut self.diff_base { - operations_to_ignore.extend(operation); + merged_operations.push(operation); } - - cx.emit(BufferEvent::DiffBaseChanged); - }); + } } fn on_base_buffer_event( @@ -870,31 +869,34 @@ impl Buffer { event: &BufferEvent, cx: &mut ModelContext, ) { - if let BufferEvent::Operation { operation, .. } = event { - if let Some(BufferDiffBase::PastBufferVersion { - operations_to_ignore, - .. - }) = &mut self.diff_base - { - let mut is_ignored = false; - if let Operation::Buffer(text::Operation::Edit(buffer_operation)) = &operation { - operations_to_ignore.retain(|operation_to_ignore| { - match buffer_operation.timestamp.cmp(&operation_to_ignore) { - Ordering::Less => true, - Ordering::Equal => { - is_ignored = true; - false - } - Ordering::Greater => false, - } - }); - } - if !is_ignored { - self.apply_ops([operation.clone()], cx); - self.diff_base_version += 1; - } + let BufferEvent::Operation { operation, .. } = event else { + return; + }; + let Some(BufferDiffBase::PastBufferVersion { + merged_operations, .. + }) = &mut self.diff_base + else { + return; + }; + + let mut operation_to_undo = None; + if let Operation::Buffer(text::Operation::Edit(operation)) = &operation { + if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) { + merged_operations.remove(ix); + operation_to_undo = Some(operation.timestamp); } } + + self.apply_ops([operation.clone()], cx); + + if let Some(timestamp) = operation_to_undo { + let operation = self + .text + .undo_operations([(timestamp, u32::MAX)].into_iter().collect()); + self.send_operation(Operation::Buffer(operation), true, cx); + } + + self.diff_base_version += 1; } #[cfg(test)] diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index da53d5a763..fe390d5510 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -2485,15 +2485,73 @@ fn test_branch_and_merge(cx: &mut TestAppContext) { } #[gpui::test] -fn test_merge_into_base(cx: &mut AppContext) { - init_settings(cx, |_| {}); +fn test_merge_into_base(cx: &mut TestAppContext) { + cx.update(|cx| init_settings(cx, |_| {})); + let base = cx.new_model(|cx| Buffer::local("abcdefghijk", cx)); let branch = base.update(cx, |buffer, cx| buffer.branch(cx)); + + // Make 3 edits, merge one into the base. branch.update(cx, |branch, cx| { - branch.edit([(0..3, "ABC"), (7..9, "HI")], None, cx); + branch.edit([(0..3, "ABC"), (7..9, "HI"), (11..11, "LMN")], None, cx); branch.merge_into_base(Some(5..8), cx); }); - assert_eq!(base.read(cx).text(), "abcdefgHIjk"); + + branch.read_with(cx, |branch, _| assert_eq!(branch.text(), "ABCdefgHIjkLMN")); + base.read_with(cx, |base, _| assert_eq!(base.text(), "abcdefgHIjk")); + + // Undo the one already-merged edit. Merge that into the base. + branch.update(cx, |branch, cx| { + branch.edit([(7..9, "hi")], None, cx); + branch.merge_into_base(Some(5..8), cx); + }); + base.read_with(cx, |base, _| assert_eq!(base.text(), "abcdefghijk")); + + // Merge an insertion into the base. + branch.update(cx, |branch, cx| { + branch.merge_into_base(Some(11..11), cx); + }); + + branch.read_with(cx, |branch, _| assert_eq!(branch.text(), "ABCdefghijkLMN")); + base.read_with(cx, |base, _| assert_eq!(base.text(), "abcdefghijkLMN")); + + // Deleted the inserted text and merge that into the base. + branch.update(cx, |branch, cx| { + branch.edit([(11..14, "")], None, cx); + branch.merge_into_base(Some(10..11), cx); + }); + + base.read_with(cx, |base, _| assert_eq!(base.text(), "abcdefghijk")); +} + +#[gpui::test] +fn test_undo_after_merge_into_base(cx: &mut TestAppContext) { + cx.update(|cx| init_settings(cx, |_| {})); + + let base = cx.new_model(|cx| Buffer::local("abcdefghijk", cx)); + let branch = base.update(cx, |buffer, cx| buffer.branch(cx)); + + // Make 2 edits, merge one into the base. + branch.update(cx, |branch, cx| { + branch.edit([(0..3, "ABC"), (7..9, "HI")], None, cx); + branch.merge_into_base(Some(7..7), cx); + }); + base.read_with(cx, |base, _| assert_eq!(base.text(), "abcdefgHIjk")); + branch.read_with(cx, |branch, _| assert_eq!(branch.text(), "ABCdefgHIjk")); + + // Undo the merge in the base buffer. + base.update(cx, |base, cx| { + base.undo(cx); + }); + base.read_with(cx, |base, _| assert_eq!(base.text(), "abcdefghijk")); + branch.read_with(cx, |branch, _| assert_eq!(branch.text(), "ABCdefgHIjk")); + + // Merge that operation into the base again. + branch.update(cx, |branch, cx| { + branch.merge_into_base(Some(7..7), cx); + }); + base.read_with(cx, |base, _| assert_eq!(base.text(), "abcdefgHIjk")); + branch.read_with(cx, |branch, _| assert_eq!(branch.text(), "ABCdefgHIjk")); } fn start_recalculating_diff(buffer: &Model, cx: &mut TestAppContext) { diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 80eafcf4eb..6c941401be 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1430,16 +1430,22 @@ impl Buffer { counts.insert(edit_id, self.undo_map.undo_count(edit_id) + 1); } + let operation = self.undo_operations(counts); + self.history.push(operation.clone()); + operation + } + + pub fn undo_operations(&mut self, counts: HashMap) -> Operation { + let timestamp = self.lamport_clock.tick(); + let version = self.version(); + self.snapshot.version.observe(timestamp); let undo = UndoOperation { - timestamp: self.lamport_clock.tick(), - version: self.version(), + timestamp, + version, counts, }; self.apply_undo(&undo); - self.snapshot.version.observe(undo.timestamp); - let operation = Operation::Undo(undo); - self.history.push(operation.clone()); - operation + Operation::Undo(undo) } pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) { From 87cc208f9f08ddd3c799135d74be76bdf4c8a8c1 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Mon, 7 Oct 2024 21:04:36 -0400 Subject: [PATCH 09/35] docs: Fix ollama available_models example (#18842) --- docs/src/assistant/configuration.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/assistant/configuration.md b/docs/src/assistant/configuration.md index 9d9c62d8c6..b4d364dc20 100644 --- a/docs/src/assistant/configuration.md +++ b/docs/src/assistant/configuration.md @@ -142,8 +142,8 @@ Depending on your hardware or use-case you may wish to limit or increase the con "low_speed_timeout_in_seconds": 120, "available_models": [ { - "provider": "ollama", - "name": "mistral:latest", + "name": "qwen2.5-coder", + "display_name": "qwen 2.5 coder 32K", "max_tokens": 32768 } ] From 910a773b895f84331683d198f9f3c118fb3e4c5a Mon Sep 17 00:00:00 2001 From: Stanislav Alekseev <43210583+WeetHet@users.noreply.github.com> Date: Tue, 8 Oct 2024 12:36:18 +0300 Subject: [PATCH 10/35] Display environment loading failures in the activity indicator (#18567) As @maan2003 noted in #18473, we should warn the user if direnv call fails Release Notes: - Show a notice in the activity indicator if an error occurs while loading the shell environment --- .../src/activity_indicator.rs | 25 +++- crates/project/src/direnv.rs | 72 ++++++++++ crates/project/src/environment.rs | 136 ++++++++++-------- crates/project/src/project.rs | 19 +++ 4 files changed, 192 insertions(+), 60 deletions(-) create mode 100644 crates/project/src/direnv.rs diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index ace972bf87..2a4f233db1 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -10,7 +10,7 @@ use gpui::{ use language::{ LanguageRegistry, LanguageServerBinaryStatus, LanguageServerId, LanguageServerName, }; -use project::{LanguageServerProgress, Project}; +use project::{EnvironmentErrorMessage, LanguageServerProgress, Project, WorktreeId}; use smallvec::SmallVec; use std::{cmp::Reverse, fmt::Write, sync::Arc, time::Duration}; use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle}; @@ -175,7 +175,30 @@ impl ActivityIndicator { .flatten() } + fn pending_environment_errors<'a>( + &'a self, + cx: &'a AppContext, + ) -> impl Iterator { + self.project.read(cx).shell_environment_errors(cx) + } + fn content_to_render(&mut self, cx: &mut ViewContext) -> Option { + // Show if any direnv calls failed + if let Some((&worktree_id, error)) = self.pending_environment_errors(cx).next() { + return Some(Content { + icon: Some( + Icon::new(IconName::Warning) + .size(IconSize::Small) + .into_any_element(), + ), + message: error.0.clone(), + on_click: Some(Arc::new(move |this, cx| { + this.project.update(cx, |project, cx| { + project.remove_environment_error(cx, worktree_id); + }) + })), + }); + } // Show any language server has pending activity. let mut pending_work = self.pending_language_server_work(cx); if let Some(PendingWork { diff --git a/crates/project/src/direnv.rs b/crates/project/src/direnv.rs new file mode 100644 index 0000000000..682cb4b609 --- /dev/null +++ b/crates/project/src/direnv.rs @@ -0,0 +1,72 @@ +use crate::environment::EnvironmentErrorMessage; +use std::process::ExitStatus; + +#[cfg(not(any(test, feature = "test-support")))] +use {collections::HashMap, std::path::Path, util::ResultExt}; + +#[derive(Clone)] +pub enum DirenvError { + NotFound, + FailedRun, + NonZeroExit(ExitStatus, Vec), + EmptyOutput, + InvalidJson, +} + +impl From for Option { + fn from(value: DirenvError) -> Self { + match value { + DirenvError::NotFound => None, + DirenvError::FailedRun | DirenvError::NonZeroExit(_, _) => { + Some(EnvironmentErrorMessage(String::from( + "Failed to run direnv. See logs for more info", + ))) + } + DirenvError::EmptyOutput => None, + DirenvError::InvalidJson => Some(EnvironmentErrorMessage(String::from( + "Direnv returned invalid json. See logs for more info", + ))), + } + } +} + +#[cfg(not(any(test, feature = "test-support")))] +pub async fn load_direnv_environment(dir: &Path) -> Result, DirenvError> { + let Ok(direnv_path) = which::which("direnv") else { + return Err(DirenvError::NotFound); + }; + + let Some(direnv_output) = smol::process::Command::new(direnv_path) + .args(["export", "json"]) + .env("TERM", "dumb") + .current_dir(dir) + .output() + .await + .log_err() + else { + return Err(DirenvError::FailedRun); + }; + + if !direnv_output.status.success() { + log::error!( + "Loading direnv environment failed ({}), stderr: {}", + direnv_output.status, + String::from_utf8_lossy(&direnv_output.stderr) + ); + return Err(DirenvError::NonZeroExit( + direnv_output.status, + direnv_output.stderr, + )); + } + + let output = String::from_utf8_lossy(&direnv_output.stdout); + if output.is_empty() { + return Err(DirenvError::EmptyOutput); + } + + let Some(env) = serde_json::from_str(&output).log_err() else { + return Err(DirenvError::InvalidJson); + }; + + Ok(env) +} diff --git a/crates/project/src/environment.rs b/crates/project/src/environment.rs index 23d23c9dc6..1f6d5ba3d1 100644 --- a/crates/project/src/environment.rs +++ b/crates/project/src/environment.rs @@ -1,4 +1,3 @@ -use anyhow::Result; use futures::{future::Shared, FutureExt}; use std::{path::Path, sync::Arc}; use util::ResultExt; @@ -17,6 +16,7 @@ pub struct ProjectEnvironment { cli_environment: Option>, get_environment_task: Option>>>>, cached_shell_environments: HashMap>, + environment_error_messages: HashMap, } impl ProjectEnvironment { @@ -37,6 +37,7 @@ impl ProjectEnvironment { cli_environment, get_environment_task: None, cached_shell_environments: Default::default(), + environment_error_messages: Default::default(), } }) } @@ -54,6 +55,7 @@ impl ProjectEnvironment { pub(crate) fn remove_worktree_environment(&mut self, worktree_id: WorktreeId) { self.cached_shell_environments.remove(&worktree_id); + self.environment_error_messages.remove(&worktree_id); } /// Returns the inherited CLI environment, if this project was opened from the Zed CLI. @@ -66,6 +68,18 @@ impl ProjectEnvironment { } } + /// Returns an iterator over all pairs `(worktree_id, error_message)` of + /// environment errors associated with this project environment. + pub(crate) fn environment_errors( + &self, + ) -> impl Iterator { + self.environment_error_messages.iter() + } + + pub(crate) fn remove_environment_error(&mut self, worktree_id: WorktreeId) { + self.environment_error_messages.remove(&worktree_id); + } + /// Returns the project environment, if possible. /// If the project was opened from the CLI, then the inherited CLI environment is returned. /// If it wasn't opened from the CLI, and a worktree is given, then a shell is spawned in @@ -120,25 +134,31 @@ impl ProjectEnvironment { let load_direnv = ProjectSettings::get_global(cx).load_direnv.clone(); cx.spawn(|this, mut cx| async move { - let mut shell_env = cx + let (mut shell_env, error) = cx .background_executor() .spawn({ let cwd = worktree_abs_path.clone(); async move { load_shell_environment(&cwd, &load_direnv).await } }) - .await - .ok(); + .await; if let Some(shell_env) = shell_env.as_mut() { this.update(&mut cx, |this, _| { this.cached_shell_environments - .insert(worktree_id, shell_env.clone()) + .insert(worktree_id, shell_env.clone()); }) .log_err(); set_origin_marker(shell_env, EnvironmentOrigin::WorktreeShell); } + if let Some(error) = error { + this.update(&mut cx, |this, _| { + this.environment_error_messages.insert(worktree_id, error); + }) + .log_err(); + } + shell_env }) } @@ -165,64 +185,62 @@ impl From for String { } } +pub struct EnvironmentErrorMessage(pub String); + +impl EnvironmentErrorMessage { + #[allow(dead_code)] + fn from_str(s: &str) -> Self { + Self(String::from(s)) + } +} + #[cfg(any(test, feature = "test-support"))] async fn load_shell_environment( _dir: &Path, _load_direnv: &DirenvSettings, -) -> Result> { - Ok([("ZED_FAKE_TEST_ENV".into(), "true".into())] +) -> ( + Option>, + Option, +) { + let fake_env = [("ZED_FAKE_TEST_ENV".into(), "true".into())] .into_iter() - .collect()) + .collect(); + (Some(fake_env), None) } #[cfg(not(any(test, feature = "test-support")))] async fn load_shell_environment( dir: &Path, load_direnv: &DirenvSettings, -) -> Result> { - use anyhow::{anyhow, Context}; +) -> ( + Option>, + Option, +) { + use crate::direnv::{load_direnv_environment, DirenvError}; use std::path::PathBuf; use util::parse_env_output; - async fn load_direnv_environment(dir: &Path) -> Result>> { - let Ok(direnv_path) = which::which("direnv") else { - return Ok(None); - }; - - let direnv_output = smol::process::Command::new(direnv_path) - .args(["export", "json"]) - .current_dir(dir) - .output() - .await - .context("failed to spawn direnv to get local environment variables")?; - - anyhow::ensure!( - direnv_output.status.success(), - "direnv exited with error {:?}. Stderr:\n{}", - direnv_output.status, - String::from_utf8_lossy(&direnv_output.stderr) - ); - - let output = String::from_utf8_lossy(&direnv_output.stdout); - if output.is_empty() { - return Ok(None); - } - - Ok(Some( - serde_json::from_str(&output).context("failed to parse direnv output")?, - )) + fn message(with: &str) -> (Option, Option) { + let message = EnvironmentErrorMessage::from_str(with); + (None, Some(message)) } - let direnv_environment = match load_direnv { - DirenvSettings::ShellHook => None, - DirenvSettings::Direct => load_direnv_environment(dir).await.log_err().flatten(), - } - .unwrap_or(HashMap::default()); + let (direnv_environment, direnv_error) = match load_direnv { + DirenvSettings::ShellHook => (None, None), + DirenvSettings::Direct => match load_direnv_environment(dir).await { + Ok(env) => (Some(env), None), + Err(err) => ( + None, + as From>::from(err), + ), + }, + }; + let direnv_environment = direnv_environment.unwrap_or(HashMap::default()); let marker = "ZED_SHELL_START"; - let shell = std::env::var("SHELL").context( - "SHELL environment variable is not assigned so we can't source login environment variables", - )?; + let Some(shell) = std::env::var("SHELL").log_err() else { + return message("Failed to get login environment. SHELL environment variable is not set"); + }; // What we're doing here is to spawn a shell and then `cd` into // the project directory to get the env in there as if the user @@ -259,26 +277,26 @@ async fn load_shell_environment( additional_command.unwrap_or("") ); - let output = smol::process::Command::new(&shell) + let Some(output) = smol::process::Command::new(&shell) .args(["-l", "-i", "-c", &command]) .envs(direnv_environment) .output() .await - .context("failed to spawn login shell to source login environment variables")?; + .log_err() + else { + return message("Failed to spawn login shell to source login environment variables. See logs for details"); + }; - anyhow::ensure!( - output.status.success(), - "login shell exited with error {:?}", - output.status - ); + if !output.status.success() { + log::error!("login shell exited with {}", output.status); + return message("Login shell exited with nonzero exit code. See logs for details"); + } let stdout = String::from_utf8_lossy(&output.stdout); - let env_output_start = stdout.find(marker).ok_or_else(|| { - anyhow!( - "failed to parse output of `env` command in login shell: {}", - stdout - ) - })?; + let Some(env_output_start) = stdout.find(marker) else { + log::error!("failed to parse output of `env` command in login shell: {stdout}"); + return message("Failed to parse stdout of env command. See logs for the output"); + }; let mut parsed_env = HashMap::default(); let env_output = &stdout[env_output_start + marker.len()..]; @@ -287,5 +305,5 @@ async fn load_shell_environment( parsed_env.insert(key, value); }); - Ok(parsed_env) + (Some(parsed_env), direnv_error) } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index f2a8d59c6f..a0164dd981 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -15,7 +15,9 @@ pub mod worktree_store; #[cfg(test)] mod project_tests; +mod direnv; mod environment; +pub use environment::EnvironmentErrorMessage; pub mod search_history; mod yarn; @@ -1185,6 +1187,23 @@ impl Project { self.environment.read(cx).get_cli_environment() } + pub fn shell_environment_errors<'a>( + &'a self, + cx: &'a AppContext, + ) -> impl Iterator { + self.environment.read(cx).environment_errors() + } + + pub fn remove_environment_error( + &mut self, + cx: &mut ModelContext, + worktree_id: WorktreeId, + ) { + self.environment.update(cx, |environment, _| { + environment.remove_environment_error(worktree_id); + }); + } + #[cfg(any(test, feature = "test-support"))] pub fn has_open_buffer(&self, path: impl Into, cx: &AppContext) -> bool { self.buffer_store From fa85238c6970ce828858535d1a0799af51500b03 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 8 Oct 2024 11:37:54 +0200 Subject: [PATCH 11/35] ssh: Limit amount of reconnect attempts (#18819) Co-Authored-by: Thorsten Release Notes: - N/A --------- Co-authored-by: Thorsten --- Cargo.lock | 1 + crates/project/src/project.rs | 6 +- crates/remote/src/remote.rs | 4 +- crates/remote/src/ssh_session.rs | 520 +++++++++++++++++++++++------- crates/title_bar/Cargo.toml | 1 + crates/title_bar/src/title_bar.rs | 10 +- 6 files changed, 415 insertions(+), 127 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9deb937370..dada7d97f8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11885,6 +11885,7 @@ dependencies = [ "pretty_assertions", "project", "recent_projects", + "remote", "rpc", "serde", "settings", diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index a0164dd981..8c2b4bd2a0 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1263,8 +1263,10 @@ impl Project { .clone() } - pub fn ssh_is_connected(&self, cx: &AppContext) -> Option { - Some(!self.ssh_client.as_ref()?.read(cx).is_reconnect_underway()) + pub fn ssh_connection_state(&self, cx: &AppContext) -> Option { + self.ssh_client + .as_ref() + .map(|ssh| ssh.read(cx).connection_state()) } pub fn replica_id(&self) -> ReplicaId { diff --git a/crates/remote/src/remote.rs b/crates/remote/src/remote.rs index c3d9e8f9cc..3cfaf48a88 100644 --- a/crates/remote/src/remote.rs +++ b/crates/remote/src/remote.rs @@ -2,4 +2,6 @@ pub mod json_log; pub mod protocol; pub mod ssh_session; -pub use ssh_session::{SshClientDelegate, SshConnectionOptions, SshPlatform, SshRemoteClient}; +pub use ssh_session::{ + ConnectionState, SshClientDelegate, SshConnectionOptions, SshPlatform, SshRemoteClient, +}; diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 26ef8626ec..cf2a702231 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -31,7 +31,8 @@ use smol::{ use std::{ any::TypeId, ffi::OsStr, - mem, + fmt, + ops::ControlFlow, path::{Path, PathBuf}, sync::{ atomic::{AtomicU32, Ordering::SeqCst}, @@ -40,7 +41,7 @@ use std::{ time::{Duration, Instant}, }; use tempfile::TempDir; -use util::maybe; +use util::ResultExt; #[derive( Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize, @@ -234,19 +235,157 @@ impl ChannelForwarder { } } -struct SshRemoteClientState { - ssh_connection: SshRemoteConnection, - delegate: Arc, - forwarder: ChannelForwarder, - multiplex_task: Task>, - heartbeat_task: Task>, +const MAX_MISSED_HEARTBEATS: usize = 5; +const HEARTBEAT_INTERVAL: Duration = Duration::from_secs(5); +const HEARTBEAT_TIMEOUT: Duration = Duration::from_secs(5); + +const MAX_RECONNECT_ATTEMPTS: usize = 3; + +enum State { + Connecting, + Connected { + ssh_connection: SshRemoteConnection, + delegate: Arc, + forwarder: ChannelForwarder, + + multiplex_task: Task>, + heartbeat_task: Task>, + }, + HeartbeatMissed { + missed_heartbeats: usize, + + ssh_connection: SshRemoteConnection, + delegate: Arc, + forwarder: ChannelForwarder, + + multiplex_task: Task>, + heartbeat_task: Task>, + }, + Reconnecting, + ReconnectFailed { + ssh_connection: SshRemoteConnection, + delegate: Arc, + forwarder: ChannelForwarder, + + error: anyhow::Error, + attempts: usize, + }, + ReconnectExhausted, +} + +impl fmt::Display for State { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Connecting => write!(f, "connecting"), + Self::Connected { .. } => write!(f, "connected"), + Self::Reconnecting => write!(f, "reconnecting"), + Self::ReconnectFailed { .. } => write!(f, "reconnect failed"), + Self::ReconnectExhausted => write!(f, "reconnect exhausted"), + Self::HeartbeatMissed { .. } => write!(f, "heartbeat missed"), + } + } +} + +impl State { + fn ssh_connection(&self) -> Option<&SshRemoteConnection> { + match self { + Self::Connected { ssh_connection, .. } => Some(ssh_connection), + Self::HeartbeatMissed { ssh_connection, .. } => Some(ssh_connection), + Self::ReconnectFailed { ssh_connection, .. } => Some(ssh_connection), + _ => None, + } + } + + fn can_reconnect(&self) -> bool { + matches!( + self, + Self::Connected { .. } | Self::HeartbeatMissed { .. } | Self::ReconnectFailed { .. } + ) + } + + fn heartbeat_recovered(self) -> Self { + match self { + Self::HeartbeatMissed { + ssh_connection, + delegate, + forwarder, + multiplex_task, + heartbeat_task, + .. + } => Self::Connected { + ssh_connection, + delegate, + forwarder, + multiplex_task, + heartbeat_task, + }, + _ => self, + } + } + + fn heartbeat_missed(self) -> Self { + match self { + Self::Connected { + ssh_connection, + delegate, + forwarder, + multiplex_task, + heartbeat_task, + } => Self::HeartbeatMissed { + missed_heartbeats: 1, + ssh_connection, + delegate, + forwarder, + multiplex_task, + heartbeat_task, + }, + Self::HeartbeatMissed { + missed_heartbeats, + ssh_connection, + delegate, + forwarder, + multiplex_task, + heartbeat_task, + } => Self::HeartbeatMissed { + missed_heartbeats: missed_heartbeats + 1, + ssh_connection, + delegate, + forwarder, + multiplex_task, + heartbeat_task, + }, + _ => self, + } + } +} + +/// The state of the ssh connection. +#[derive(Clone, Copy, Debug)] +pub enum ConnectionState { + Connecting, + Connected, + HeartbeatMissed, + Reconnecting, + Disconnected, +} + +impl From<&State> for ConnectionState { + fn from(value: &State) -> Self { + match value { + State::Connecting => Self::Connecting, + State::Connected { .. } => Self::Connected, + State::Reconnecting | State::ReconnectFailed { .. } => Self::Reconnecting, + State::HeartbeatMissed { .. } => Self::HeartbeatMissed, + State::ReconnectExhausted => Self::Disconnected, + } + } } pub struct SshRemoteClient { client: Arc, unique_identifier: String, connection_options: SshConnectionOptions, - inner_state: Arc>>, + state: Arc>>, } impl Drop for SshRemoteClient { @@ -266,6 +405,7 @@ impl SshRemoteClient { let (outgoing_tx, outgoing_rx) = mpsc::unbounded::(); let (incoming_tx, incoming_rx) = mpsc::unbounded::(); + let client = cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx))?; let this = cx.new_model(|cx| { cx.on_app_quit(|this: &mut Self, _| { this.shutdown_processes(); @@ -273,47 +413,49 @@ impl SshRemoteClient { }) .detach(); - let client = ChannelClient::new(incoming_rx, outgoing_tx, cx); Self { - client, + client: client.clone(), unique_identifier: unique_identifier.clone(), - connection_options: SshConnectionOptions::default(), - inner_state: Arc::new(Mutex::new(None)), + connection_options: connection_options.clone(), + state: Arc::new(Mutex::new(Some(State::Connecting))), } })?; - let inner_state = { - let (proxy, proxy_incoming_tx, proxy_outgoing_rx) = - ChannelForwarder::new(incoming_tx, outgoing_rx, &mut cx); + let (proxy, proxy_incoming_tx, proxy_outgoing_rx) = + ChannelForwarder::new(incoming_tx, outgoing_rx, &mut cx); - let (ssh_connection, ssh_proxy_process) = Self::establish_connection( - unique_identifier, - connection_options, - delegate.clone(), - &mut cx, - ) - .await?; + let (ssh_connection, ssh_proxy_process) = Self::establish_connection( + unique_identifier, + connection_options, + delegate.clone(), + &mut cx, + ) + .await?; - let multiplex_task = Self::multiplex( - this.downgrade(), - ssh_proxy_process, - proxy_incoming_tx, - proxy_outgoing_rx, - &mut cx, - ); + let multiplex_task = Self::multiplex( + this.downgrade(), + ssh_proxy_process, + proxy_incoming_tx, + proxy_outgoing_rx, + &mut cx, + ); - SshRemoteClientState { + if let Err(error) = client.ping(HEARTBEAT_TIMEOUT).await { + log::error!("failed to establish connection: {}", error); + delegate.set_error(error.to_string(), &mut cx); + return Err(error); + } + + let heartbeat_task = Self::heartbeat(this.downgrade(), &mut cx); + + this.update(&mut cx, |this, _| { + *this.state.lock() = Some(State::Connected { ssh_connection, delegate, forwarder: proxy, multiplex_task, - heartbeat_task: Self::heartbeat(this.downgrade(), &mut cx), - } - }; - - this.update(&mut cx, |this, cx| { - this.inner_state.lock().replace(inner_state); - cx.notify(); + heartbeat_task, + }); })?; Ok(this) @@ -321,78 +463,192 @@ impl SshRemoteClient { } fn shutdown_processes(&self) { - let Some(mut state) = self.inner_state.lock().take() else { + let Some(state) = self.state.lock().take() else { return; }; log::info!("shutting down ssh processes"); - // Drop `multiplex_task` because it owns our ssh_proxy_process, which is a - // child of master_process. - let task = mem::replace(&mut state.multiplex_task, Task::ready(Ok(()))); - drop(task); - // Now drop the rest of state, which kills master process. - drop(state); - } - fn reconnect(&self, cx: &ModelContext) -> Result<()> { - log::info!("Trying to reconnect to ssh server..."); - let Some(state) = self.inner_state.lock().take() else { - return Err(anyhow!("reconnect is already in progress")); - }; - - let workspace_identifier = self.unique_identifier.clone(); - - let SshRemoteClientState { - mut ssh_connection, - delegate, - forwarder: proxy, + let State::Connected { multiplex_task, heartbeat_task, - } = state; + .. + } = state + else { + return; + }; + // Drop `multiplex_task` because it owns our ssh_proxy_process, which is a + // child of master_process. drop(multiplex_task); + // Now drop the rest of state, which kills master process. drop(heartbeat_task); + } - cx.spawn(|this, mut cx| async move { - let (incoming_tx, outgoing_rx) = proxy.into_channels().await; + fn reconnect(&mut self, cx: &mut ModelContext) -> Result<()> { + let mut lock = self.state.lock(); - ssh_connection.master_process.kill()?; - ssh_connection + let can_reconnect = lock + .as_ref() + .map(|state| state.can_reconnect()) + .unwrap_or(false); + if !can_reconnect { + let error = if let Some(state) = lock.as_ref() { + format!("invalid state, cannot reconnect while in state {state}") + } else { + "no state set".to_string() + }; + return Err(anyhow!(error)); + } + + let state = lock.take().unwrap(); + let (attempts, mut ssh_connection, delegate, forwarder) = match state { + State::Connected { + ssh_connection, + delegate, + forwarder, + multiplex_task, + heartbeat_task, + } + | State::HeartbeatMissed { + ssh_connection, + delegate, + forwarder, + multiplex_task, + heartbeat_task, + .. + } => { + drop(multiplex_task); + drop(heartbeat_task); + (0, ssh_connection, delegate, forwarder) + } + State::ReconnectFailed { + attempts, + ssh_connection, + delegate, + forwarder, + .. + } => (attempts, ssh_connection, delegate, forwarder), + State::Connecting | State::Reconnecting | State::ReconnectExhausted => unreachable!(), + }; + + let attempts = attempts + 1; + if attempts > MAX_RECONNECT_ATTEMPTS { + log::error!( + "Failed to reconnect to after {} attempts, giving up", + MAX_RECONNECT_ATTEMPTS + ); + *lock = Some(State::ReconnectExhausted); + return Ok(()); + } + *lock = Some(State::Reconnecting); + drop(lock); + + log::info!("Trying to reconnect to ssh server... Attempt {}", attempts); + + let identifier = self.unique_identifier.clone(); + let client = self.client.clone(); + let reconnect_task = cx.spawn(|this, mut cx| async move { + macro_rules! failed { + ($error:expr, $attempts:expr, $ssh_connection:expr, $delegate:expr, $forwarder:expr) => { + return State::ReconnectFailed { + error: anyhow!($error), + attempts: $attempts, + ssh_connection: $ssh_connection, + delegate: $delegate, + forwarder: $forwarder, + }; + }; + } + + if let Err(error) = ssh_connection.master_process.kill() { + failed!(error, attempts, ssh_connection, delegate, forwarder); + }; + + if let Err(error) = ssh_connection .master_process .status() .await - .context("Failed to kill ssh process")?; + .context("Failed to kill ssh process") + { + failed!(error, attempts, ssh_connection, delegate, forwarder); + } let connection_options = ssh_connection.socket.connection_options.clone(); - let (ssh_connection, ssh_process) = Self::establish_connection( - workspace_identifier, + let (incoming_tx, outgoing_rx) = forwarder.into_channels().await; + let (forwarder, proxy_incoming_tx, proxy_outgoing_rx) = + ChannelForwarder::new(incoming_tx, outgoing_rx, &mut cx); + + let (ssh_connection, ssh_process) = match Self::establish_connection( + identifier, connection_options, delegate.clone(), &mut cx, ) - .await?; - - let (proxy, proxy_incoming_tx, proxy_outgoing_rx) = - ChannelForwarder::new(incoming_tx, outgoing_rx, &mut cx); - - let inner_state = SshRemoteClientState { - ssh_connection, - delegate, - forwarder: proxy, - multiplex_task: Self::multiplex( - this.clone(), - ssh_process, - proxy_incoming_tx, - proxy_outgoing_rx, - &mut cx, - ), - heartbeat_task: Self::heartbeat(this.clone(), &mut cx), + .await + { + Ok((ssh_connection, ssh_process)) => (ssh_connection, ssh_process), + Err(error) => { + failed!(error, attempts, ssh_connection, delegate, forwarder); + } }; - this.update(&mut cx, |this, _| { - this.inner_state.lock().replace(inner_state); + let multiplex_task = Self::multiplex( + this.clone(), + ssh_process, + proxy_incoming_tx, + proxy_outgoing_rx, + &mut cx, + ); + + if let Err(error) = client.ping(HEARTBEAT_TIMEOUT).await { + failed!(error, attempts, ssh_connection, delegate, forwarder); + }; + + State::Connected { + ssh_connection, + delegate, + forwarder, + multiplex_task, + heartbeat_task: Self::heartbeat(this.clone(), &mut cx), + } + }); + + cx.spawn(|this, mut cx| async move { + let new_state = reconnect_task.await; + this.update(&mut cx, |this, cx| { + match &new_state { + State::Connecting + | State::Reconnecting { .. } + | State::HeartbeatMissed { .. } => {} + State::Connected { .. } => { + log::info!("Successfully reconnected"); + } + State::ReconnectFailed { + error, attempts, .. + } => { + log::error!( + "Reconnect attempt {} failed: {:?}. Starting new attempt...", + attempts, + error + ); + } + State::ReconnectExhausted => { + log::error!("Reconnect attempt failed and all attempts exhausted"); + } + } + + let reconnect_failed = matches!(new_state, State::ReconnectFailed { .. }); + *this.state.lock() = Some(new_state); + cx.notify(); + if reconnect_failed { + this.reconnect(cx) + } else { + Ok(()) + } }) }) - .detach(); + .detach_and_log_err(cx); + Ok(()) } @@ -403,10 +659,6 @@ impl SshRemoteClient { cx.spawn(|mut cx| { let this = this.clone(); async move { - const MAX_MISSED_HEARTBEATS: usize = 5; - const HEARTBEAT_INTERVAL: Duration = Duration::from_secs(5); - const HEARTBEAT_TIMEOUT: Duration = Duration::from_secs(5); - let mut missed_heartbeats = 0; let mut timer = Timer::interval(HEARTBEAT_INTERVAL); @@ -415,19 +667,7 @@ impl SshRemoteClient { log::info!("Sending heartbeat to server..."); - let result = smol::future::or( - async { - client.request(proto::Ping {}).await?; - Ok(()) - }, - async { - smol::Timer::after(HEARTBEAT_TIMEOUT).await; - - Err(anyhow!("Timeout detected")) - }, - ) - .await; - + let result = client.ping(HEARTBEAT_TIMEOUT).await; if result.is_err() { missed_heartbeats += 1; log::warn!( @@ -440,17 +680,10 @@ impl SshRemoteClient { missed_heartbeats = 0; } - if missed_heartbeats >= MAX_MISSED_HEARTBEATS { - log::error!( - "Missed last {} hearbeats. Reconnecting...", - missed_heartbeats - ); - - this.update(&mut cx, |this, cx| { - this.reconnect(cx) - .context("failed to reconnect after missing heartbeats") - }) - .context("failed to update weak reference, SshRemoteClient lost?")??; + let result = this.update(&mut cx, |this, mut cx| { + this.handle_heartbeat_result(missed_heartbeats, &mut cx) + })?; + if result.is_break() { return Ok(()); } } @@ -458,6 +691,34 @@ impl SshRemoteClient { }) } + fn handle_heartbeat_result( + &mut self, + missed_heartbeats: usize, + cx: &mut ModelContext, + ) -> ControlFlow<()> { + let state = self.state.lock().take().unwrap(); + self.state.lock().replace(if missed_heartbeats > 0 { + state.heartbeat_missed() + } else { + state.heartbeat_recovered() + }); + cx.notify(); + + if missed_heartbeats >= MAX_MISSED_HEARTBEATS { + log::error!( + "Missed last {} heartbeats. Reconnecting...", + missed_heartbeats + ); + + self.reconnect(cx) + .context("failed to start reconnect process after missing heartbeats") + .log_err(); + ControlFlow::Break(()) + } else { + ControlFlow::Continue(()) + } + } + fn multiplex( this: WeakModel, mut ssh_proxy_process: Child, @@ -611,10 +872,11 @@ impl SshRemoteClient { } pub fn ssh_args(&self) -> Option> { - let state = self.inner_state.lock(); - state + self.state + .lock() .as_ref() - .map(|state| state.ssh_connection.socket.ssh_args()) + .and_then(|state| state.ssh_connection()) + .map(|ssh_connection| ssh_connection.socket.ssh_args()) } pub fn to_proto_client(&self) -> AnyProtoClient { @@ -625,8 +887,12 @@ impl SshRemoteClient { self.connection_options.connection_string() } - pub fn is_reconnect_underway(&self) -> bool { - maybe!({ Some(self.inner_state.try_lock()?.is_none()) }).unwrap_or_default() + pub fn connection_state(&self) -> ConnectionState { + self.state + .lock() + .as_ref() + .map(ConnectionState::from) + .unwrap_or(ConnectionState::Disconnected) } #[cfg(any(test, feature = "test-support"))] @@ -646,7 +912,7 @@ impl SshRemoteClient { client, unique_identifier: "fake".to_string(), connection_options: SshConnectionOptions::default(), - inner_state: Arc::new(Mutex::new(None)), + state: Arc::new(Mutex::new(None)), }) }), server_cx.update(|cx| ChannelClient::new(client_to_server_rx, server_to_client_tx, cx)), @@ -1046,6 +1312,20 @@ impl ChannelClient { } } + pub async fn ping(&self, timeout: Duration) -> Result<()> { + smol::future::or( + async { + self.request(proto::Ping {}).await?; + Ok(()) + }, + async { + smol::Timer::after(timeout).await; + Err(anyhow!("Timeout detected")) + }, + ) + .await + } + pub fn send(&self, payload: T) -> Result<()> { log::debug!("ssh send name:{}", T::NAME); self.send_dynamic(payload.into_envelope(0, None, None)) diff --git a/crates/title_bar/Cargo.toml b/crates/title_bar/Cargo.toml index c837b74dca..e4d3d7fc5b 100644 --- a/crates/title_bar/Cargo.toml +++ b/crates/title_bar/Cargo.toml @@ -41,6 +41,7 @@ gpui.workspace = true notifications.workspace = true project.workspace = true recent_projects.workspace = true +remote.workspace = true rpc.workspace = true serde.workspace = true smallvec.workspace = true diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 52dab68a2a..da0179fd64 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -265,10 +265,12 @@ impl TitleBar { fn render_ssh_project_host(&self, cx: &mut ViewContext) -> Option { let host = self.project.read(cx).ssh_connection_string(cx)?; let meta = SharedString::from(format!("Connected to: {host}")); - let indicator_color = if self.project.read(cx).ssh_is_connected(cx)? { - Color::Success - } else { - Color::Warning + let indicator_color = match self.project.read(cx).ssh_connection_state(cx)? { + remote::ConnectionState::Connecting => Color::Info, + remote::ConnectionState::Connected => Color::Success, + remote::ConnectionState::HeartbeatMissed => Color::Warning, + remote::ConnectionState::Reconnecting => Color::Warning, + remote::ConnectionState::Disconnected => Color::Error, }; let indicator = div() .absolute() From be531653a4c84cecdae2be06227b1d907eb08cbb Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 8 Oct 2024 11:54:28 +0200 Subject: [PATCH 12/35] Direnv warn (#18850) Follow-up fixes to #18567 Release Notes: - N/A --- crates/activity_indicator/src/activity_indicator.rs | 3 ++- crates/project/src/environment.rs | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 2a4f233db1..687519068d 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -195,7 +195,8 @@ impl ActivityIndicator { on_click: Some(Arc::new(move |this, cx| { this.project.update(cx, |project, cx| { project.remove_environment_error(cx, worktree_id); - }) + }); + cx.dispatch_action(Box::new(workspace::OpenLog)); })), }); } diff --git a/crates/project/src/environment.rs b/crates/project/src/environment.rs index 1f6d5ba3d1..b7aa20a740 100644 --- a/crates/project/src/environment.rs +++ b/crates/project/src/environment.rs @@ -134,7 +134,7 @@ impl ProjectEnvironment { let load_direnv = ProjectSettings::get_global(cx).load_direnv.clone(); cx.spawn(|this, mut cx| async move { - let (mut shell_env, error) = cx + let (mut shell_env, error_message) = cx .background_executor() .spawn({ let cwd = worktree_abs_path.clone(); @@ -152,7 +152,7 @@ impl ProjectEnvironment { set_origin_marker(shell_env, EnvironmentOrigin::WorktreeShell); } - if let Some(error) = error { + if let Some(error) = error_message { this.update(&mut cx, |this, _| { this.environment_error_messages.insert(worktree_id, error); }) From f0566d54eb058b70de19fc342cf50467c55f5ad0 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 8 Oct 2024 12:57:47 +0200 Subject: [PATCH 13/35] ssh: Log error when remote server panics (#18853) Release Notes: - N/A --- Cargo.lock | 1 + crates/remote_server/Cargo.toml | 1 + crates/remote_server/src/main.rs | 6 ++-- crates/remote_server/src/unix.rs | 47 +++++++++++++++++++++++++++++++- 4 files changed, 51 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dada7d97f8..dda10e2edc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9163,6 +9163,7 @@ name = "remote_server" version = "0.1.0" dependencies = [ "anyhow", + "backtrace", "cargo_toml", "clap", "client", diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index 211b76e091..bd61919151 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -22,6 +22,7 @@ test-support = ["fs/test-support"] [dependencies] anyhow.workspace = true +backtrace = "0.3" clap.workspace = true client.workspace = true env_logger.workspace = true diff --git a/crates/remote_server/src/main.rs b/crates/remote_server/src/main.rs index e5582d9b1f..872af42596 100644 --- a/crates/remote_server/src/main.rs +++ b/crates/remote_server/src/main.rs @@ -37,7 +37,7 @@ fn main() { #[cfg(not(windows))] fn main() -> Result<()> { - use remote_server::unix::{execute_proxy, execute_run, init_logging}; + use remote_server::unix::{execute_proxy, execute_run, init}; let cli = Cli::parse(); @@ -48,11 +48,11 @@ fn main() -> Result<()> { stdin_socket, stdout_socket, }) => { - init_logging(Some(log_file))?; + init(Some(log_file))?; execute_run(pid_file, stdin_socket, stdout_socket) } Some(Commands::Proxy { identifier }) => { - init_logging(None)?; + init(None)?; execute_proxy(identifier) } Some(Commands::Version) => { diff --git a/crates/remote_server/src/unix.rs b/crates/remote_server/src/unix.rs index 74b71a2277..049b0a003e 100644 --- a/crates/remote_server/src/unix.rs +++ b/crates/remote_server/src/unix.rs @@ -20,7 +20,13 @@ use std::{ sync::Arc, }; -pub fn init_logging(log_file: Option) -> Result<()> { +pub fn init(log_file: Option) -> Result<()> { + init_logging(log_file)?; + init_panic_hook(); + Ok(()) +} + +fn init_logging(log_file: Option) -> Result<()> { if let Some(log_file) = log_file { let target = Box::new(if log_file.exists() { std::fs::OpenOptions::new() @@ -46,6 +52,45 @@ pub fn init_logging(log_file: Option) -> Result<()> { Ok(()) } +fn init_panic_hook() { + std::panic::set_hook(Box::new(|info| { + let payload = info + .payload() + .downcast_ref::<&str>() + .map(|s| s.to_string()) + .or_else(|| info.payload().downcast_ref::().cloned()) + .unwrap_or_else(|| "Box".to_string()); + + let backtrace = backtrace::Backtrace::new(); + let mut backtrace = backtrace + .frames() + .iter() + .flat_map(|frame| { + frame + .symbols() + .iter() + .filter_map(|frame| Some(format!("{:#}", frame.name()?))) + }) + .collect::>(); + + // Strip out leading stack frames for rust panic-handling. + if let Some(ix) = backtrace + .iter() + .position(|name| name == "rust_begin_unwind") + { + backtrace.drain(0..=ix); + } + + log::error!( + "server: panic occurred: {}\nBacktrace:\n{}", + payload, + backtrace.join("\n") + ); + + std::process::abort(); + })); +} + fn start_server( stdin_listener: UnixListener, stdout_listener: UnixListener, From b2eb439f32cb0afa34df61e6aa7ef38b00af97bd Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 8 Oct 2024 13:57:26 +0200 Subject: [PATCH 14/35] remote server: Add more debug logging (#18855) Closes #ISSUE Release Notes: - N/A --- crates/remote_server/src/unix.rs | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/crates/remote_server/src/unix.rs b/crates/remote_server/src/unix.rs index 049b0a003e..2e03887ae9 100644 --- a/crates/remote_server/src/unix.rs +++ b/crates/remote_server/src/unix.rs @@ -106,6 +106,7 @@ fn start_server( cx.on_app_quit(move |_| { let mut app_quit_tx = app_quit_tx.clone(); async move { + log::info!("app quitting. sending signal to server main loop"); app_quit_tx.send(()).await.ok(); } }) @@ -195,6 +196,13 @@ fn start_server( } pub fn execute_run(pid_file: PathBuf, stdin_socket: PathBuf, stdout_socket: PathBuf) -> Result<()> { + log::info!( + "server: starting up. pid_file: {:?}, stdin_socket: {:?}, stdout_socket: {:?}", + pid_file, + stdin_socket, + stdout_socket + ); + write_pid_file(&pid_file) .with_context(|| format!("failed to write pid file: {:?}", &pid_file))?; @@ -202,10 +210,12 @@ pub fn execute_run(pid_file: PathBuf, stdin_socket: PathBuf, stdout_socket: Path let stdout_listener = UnixListener::bind(stdout_socket).context("failed to bind stdout socket")?; + log::debug!("server: starting gpui app"); gpui::App::headless().run(move |cx| { settings::init(cx); HeadlessProject::init(cx); + log::info!("server: gpui app started, initializing server"); let session = start_server(stdin_listener, stdout_listener, cx); let project = cx.new_model(|cx| { HeadlessProject::new(session, Arc::new(RealFs::new(Default::default(), None)), cx) @@ -343,8 +353,9 @@ fn write_pid_file(path: &Path) -> Result<()> { if path.exists() { std::fs::remove_file(path)?; } - - std::fs::write(path, std::process::id().to_string()).context("Failed to write PID file") + let pid = std::process::id().to_string(); + log::debug!("server: writing PID {} to file {:?}", pid, path); + std::fs::write(path, pid).context("Failed to write PID file") } async fn handle_io(mut reader: R, mut writer: W, socket_name: &str) -> Result<()> From 5bb18adbe86edbe368676f9ed3131a9982522b0c Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 8 Oct 2024 08:13:29 -0400 Subject: [PATCH 15/35] Inform users they can ask us to reopen issues closed by the stale issue action (#18857) Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index 03c89b74b7..c38db5bfd7 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -17,7 +17,7 @@ jobs: We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. Are you able to reproduce this issue in the latest version of Zed? If so, please let us know by commenting on this issue and we will keep it open; otherwise, we'll close it in 7 days. Feel free to open a new issue if you're seeing this message after the issue has been closed. Thanks for your help! - close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!" + close-issue-message: "This issue was closed due to inactivity. If you're still experiencing this problem, feel free to ping a Zed team member to reopen this issue or open a new one." # We will increase `days-before-stale` to 365 on or after Jan 24th, # 2024. This date marks one year since migrating issues from # 'community' to 'zed' repository. The migration added activity to all From dd44168cadc8084a963b500617b9a555b87514bb Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 8 Oct 2024 10:20:20 -0400 Subject: [PATCH 16/35] dart: Improve indentation (#18845) Co-authored-by: Marshall Bowers --- assets/settings/default.json | 3 +++ extensions/dart/languages/dart/indents.scm | 21 +++------------------ 2 files changed, 6 insertions(+), 18 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 133ff9451d..e5cbcd2f9c 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -840,6 +840,9 @@ "allowed": true } }, + "Dart": { + "tab_size": 2 + }, "Elixir": { "language_servers": ["elixir-ls", "!next-ls", "!lexical", "..."] }, diff --git a/extensions/dart/languages/dart/indents.scm b/extensions/dart/languages/dart/indents.scm index 4d6f8c1cb7..112b414aa4 100644 --- a/extensions/dart/languages/dart/indents.scm +++ b/extensions/dart/languages/dart/indents.scm @@ -1,18 +1,3 @@ -(class_definition - "class" @context - name: (_) @name) @item - -(function_signature - name: (_) @name) @item - -(getter_signature - "get" @context - name: (_) @name) @item - -(setter_signature - "set" @context - name: (_) @name) @item - -(enum_declaration - "enum" @context - name: (_) @name) @item +(_ "[" "]" @end) @indent +(_ "{" "}" @end) @indent +(_ "(" ")" @end) @indent From 4c7a6f5e7fdabb2a5daa65fd5f1f5b87b1ab2f65 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 8 Oct 2024 10:30:04 -0400 Subject: [PATCH 17/35] Add is_via_ssh field to editor events (#18837) Release Notes: - N/A --- crates/client/src/telemetry.rs | 2 ++ crates/collab/src/api/events.rs | 2 ++ crates/editor/src/editor.rs | 4 +++- crates/telemetry_events/src/telemetry_events.rs | 2 ++ 4 files changed, 9 insertions(+), 1 deletion(-) diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index a8912c2f20..24d448aa02 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -364,6 +364,7 @@ impl Telemetry { operation: &'static str, copilot_enabled: bool, copilot_enabled_for_language: bool, + is_via_ssh: bool, ) { let event = Event::Editor(EditorEvent { file_extension, @@ -371,6 +372,7 @@ impl Telemetry { operation: operation.into(), copilot_enabled, copilot_enabled_for_language, + is_via_ssh, }); self.report_event(event) diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index bbfa69c0b8..f5cd1c00ea 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -679,6 +679,7 @@ pub struct EditorEventRow { minor: Option, patch: Option, checksum_matched: bool, + is_via_ssh: bool, } impl EditorEventRow { @@ -720,6 +721,7 @@ impl EditorEventRow { region_code: "".to_string(), city: "".to_string(), historical_event: false, + is_via_ssh: event.is_via_ssh, } } } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 62c2240828..d06f66184b 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -12481,13 +12481,15 @@ impl Editor { .settings_at(0, cx) .show_inline_completions; - let telemetry = project.read(cx).client().telemetry().clone(); + let project = project.read(cx); + let telemetry = project.client().telemetry().clone(); telemetry.report_editor_event( file_extension, vim_mode, operation, copilot_enabled, copilot_enabled_for_language, + project.is_via_ssh(), ) } diff --git a/crates/telemetry_events/src/telemetry_events.rs b/crates/telemetry_events/src/telemetry_events.rs index 56e94edb44..a42b8cc8b5 100644 --- a/crates/telemetry_events/src/telemetry_events.rs +++ b/crates/telemetry_events/src/telemetry_events.rs @@ -117,6 +117,8 @@ pub struct EditorEvent { pub copilot_enabled: bool, /// Whether the user has copilot enabled for the language of the file opened or saved pub copilot_enabled_for_language: bool, + /// Whether the client is editing a local file or a remote file via SSH + pub is_via_ssh: bool, } /// Deprecated since Zed v0.137.0 (2024-05-29). Replaced by InlineCompletionEvent. From d295c46433670118273c6c2b08e2416e13730ec9 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 8 Oct 2024 11:10:20 -0400 Subject: [PATCH 18/35] Remove deprecated copilot event (#18862) `CopilotEvent` was succeeded by `InlineCompletionEvent` 5 months ago. Release Notes: - N/A --- crates/collab/src/api/events.rs | 2 -- crates/telemetry_events/src/telemetry_events.rs | 10 ---------- 2 files changed, 12 deletions(-) diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index f5cd1c00ea..5d40c59daf 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -429,8 +429,6 @@ pub async fn post_events( country_code.clone(), checksum_matched, )), - // Needed for clients sending old copilot_event types - Event::Copilot(_) => {} Event::InlineCompletion(event) => { to_upload .inline_completion_events diff --git a/crates/telemetry_events/src/telemetry_events.rs b/crates/telemetry_events/src/telemetry_events.rs index a42b8cc8b5..32cb377ed6 100644 --- a/crates/telemetry_events/src/telemetry_events.rs +++ b/crates/telemetry_events/src/telemetry_events.rs @@ -91,7 +91,6 @@ impl Display for AssistantPhase { #[serde(tag = "type")] pub enum Event { Editor(EditorEvent), - Copilot(CopilotEvent), // Needed for clients sending old copilot_event types InlineCompletion(InlineCompletionEvent), Call(CallEvent), Assistant(AssistantEvent), @@ -121,15 +120,6 @@ pub struct EditorEvent { pub is_via_ssh: bool, } -/// Deprecated since Zed v0.137.0 (2024-05-29). Replaced by InlineCompletionEvent. -// Needed for clients sending old copilot_event types -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct CopilotEvent { - pub suggestion_id: Option, - pub suggestion_accepted: bool, - pub file_extension: Option, -} - #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct InlineCompletionEvent { /// Provider of the completion suggestion (e.g. copilot, supermaven) From ff7aa024eea8f89010c4c8afee943f10168ae05b Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 8 Oct 2024 17:47:24 +0200 Subject: [PATCH 19/35] remote server on macOS: Sign with entitlements (#18863) This does two things: - Prevent feature unification - Sign the remote-server binary with the same entitlements we use for Zed because we saw this in crash report: Crashed Thread: 4 Dispatch queue: com.apple.root.user-initiated-qos Exception Type: EXC_BAD_ACCESS (SIGKILL (Code Signature Invalid)) Exception Codes: UNKNOWN_0x32 at 0x0000000103636644 Exception Codes: 0x0000000000000032, 0x0000000103636644 Termination Reason: Namespace CODESIGNING, Code 2 Invalid Page VM Region Info: 0x103636644 is in 0x103634000-0x103638000; bytes after start: 9796 bytes before end: 6587 REGION TYPE START - END [ VSIZE] PRT/MAX SHRMOD REGION DETAIL VM_ALLOCATE 103630000-103634000 [ 16K] r--/rwx SM=ZER ---> VM_ALLOCATE 103634000-103638000 [ 16K] r-x/rwx SM=COW VM_ALLOCATE 103638000-103640000 [ 32K] r--/rwx SM=ZER Which sounds a lot like codesigning/jit/entitlements stuff. Release Notes: - N/A Co-authored-by: Piotr Co-authored-by: Bennet --- script/bundle-mac | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/script/bundle-mac b/script/bundle-mac index 4c17791812..bc95e1dd6a 100755 --- a/script/bundle-mac +++ b/script/bundle-mac @@ -83,7 +83,10 @@ if [ "$local_arch" = true ]; then cargo build ${build_flag} --package zed --package cli --package remote_server else echo "Compiling zed binaries" - cargo build ${build_flag} --package zed --package cli --package remote_server --target aarch64-apple-darwin --target x86_64-apple-darwin + cargo build ${build_flag} --package zed --package cli --target aarch64-apple-darwin --target x86_64-apple-darwin + # Build remote_server in separate invocation to prevent feature unification from other crates + # from influencing dynamic libraries required by it. + cargo build ${build_flag} --package remote_server --target aarch64-apple-darwin --target x86_64-apple-darwin fi echo "Creating application bundle" @@ -358,7 +361,7 @@ function sign_binary() { if [[ $can_code_sign = true ]]; then echo "Code signing executable $binary_path" - /usr/bin/codesign --deep --force --timestamp --options runtime --sign "$IDENTITY" "${binary_path}" -v + /usr/bin/codesign --deep --force --timestamp --options runtime --entitlements crates/zed/resources/zed.entitlements --sign "$IDENTITY" "${binary_path}" -v fi } From 4139e2de235d9cdad9ecc6421e8f2a00ba8ff976 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 8 Oct 2024 08:58:28 -0700 Subject: [PATCH 20/35] In proposed change editors, apply diff hunks in batches (#18841) Release Notes: - N/A --- crates/editor/src/editor.rs | 14 +++++++--- crates/editor/src/hunk_diff.rs | 4 +-- crates/language/src/buffer.rs | 40 +++++++++++++++++++++-------- crates/language/src/buffer_tests.rs | 14 +++++----- 4 files changed, 50 insertions(+), 22 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index d06f66184b..39bc2f45e5 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -6213,14 +6213,22 @@ impl Editor { fn apply_selected_diff_hunks(&mut self, _: &ApplyDiffHunk, cx: &mut ViewContext) { let snapshot = self.buffer.read(cx).snapshot(cx); let hunks = hunks_for_selections(&snapshot, &self.selections.disjoint_anchors()); + let mut ranges_by_buffer = HashMap::default(); self.transact(cx, |editor, cx| { for hunk in hunks { if let Some(buffer) = editor.buffer.read(cx).buffer(hunk.buffer_id) { - buffer.update(cx, |buffer, cx| { - buffer.merge_into_base(Some(hunk.buffer_range.to_offset(buffer)), cx); - }); + ranges_by_buffer + .entry(buffer.clone()) + .or_insert_with(Vec::new) + .push(hunk.buffer_range.to_offset(buffer.read(cx))); } } + + for (buffer, ranges) in ranges_by_buffer { + buffer.update(cx, |buffer, cx| { + buffer.merge_into_base(ranges, cx); + }); + } }); } diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 7fbb07ae35..e495481323 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -350,7 +350,7 @@ impl Editor { .next()?; buffer.update(cx, |branch_buffer, cx| { - branch_buffer.merge_into_base(Some(range), cx); + branch_buffer.merge_into_base(vec![range], cx); }); None @@ -360,7 +360,7 @@ impl Editor { let buffers = self.buffer.read(cx).all_buffers(); for branch_buffer in buffers { branch_buffer.update(cx, |branch_buffer, cx| { - branch_buffer.merge_into_base(None, cx); + branch_buffer.merge_into_base(Vec::new(), cx); }); } } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 160e8b3ba9..59740509d3 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -827,25 +827,45 @@ impl Buffer { }) } - /// Applies all of the changes in this buffer that intersect the given `range` - /// to its base buffer. This buffer must be a branch buffer to call this method. - pub fn merge_into_base(&mut self, range: Option>, cx: &mut ModelContext) { + /// Applies all of the changes in this buffer that intersect any of the + /// given `ranges` to its base buffer. + /// + /// If `ranges` is empty, then all changes will be applied. This buffer must + /// be a branch buffer to call this method. + pub fn merge_into_base(&mut self, ranges: Vec>, cx: &mut ModelContext) { let Some(base_buffer) = self.diff_base_buffer() else { debug_panic!("not a branch buffer"); return; }; + let mut ranges = if ranges.is_empty() { + &[0..usize::MAX] + } else { + ranges.as_slice() + } + .into_iter() + .peekable(); + let mut edits = Vec::new(); for edit in self.edits_since::(&base_buffer.read(cx).version()) { - if let Some(range) = &range { - if range.start > edit.new.end || edit.new.start > range.end { - continue; + let mut is_included = false; + while let Some(range) = ranges.peek() { + if range.end < edit.new.start { + ranges.next().unwrap(); + } else { + if range.start <= edit.new.end { + is_included = true; + } + break; } } - edits.push(( - edit.old.clone(), - self.text_for_range(edit.new.clone()).collect::(), - )); + + if is_included { + edits.push(( + edit.old.clone(), + self.text_for_range(edit.new.clone()).collect::(), + )); + } } let operation = base_buffer.update(cx, |base_buffer, cx| { diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index fe390d5510..83c35cbeca 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -2472,7 +2472,7 @@ fn test_branch_and_merge(cx: &mut TestAppContext) { // Merging the branch applies all of its changes to the base. branch_buffer.update(cx, |branch_buffer, cx| { - branch_buffer.merge_into_base(None, cx); + branch_buffer.merge_into_base(Vec::new(), cx); }); branch_buffer.update(cx, |branch_buffer, cx| { @@ -2494,7 +2494,7 @@ fn test_merge_into_base(cx: &mut TestAppContext) { // Make 3 edits, merge one into the base. branch.update(cx, |branch, cx| { branch.edit([(0..3, "ABC"), (7..9, "HI"), (11..11, "LMN")], None, cx); - branch.merge_into_base(Some(5..8), cx); + branch.merge_into_base(vec![5..8], cx); }); branch.read_with(cx, |branch, _| assert_eq!(branch.text(), "ABCdefgHIjkLMN")); @@ -2503,13 +2503,13 @@ fn test_merge_into_base(cx: &mut TestAppContext) { // Undo the one already-merged edit. Merge that into the base. branch.update(cx, |branch, cx| { branch.edit([(7..9, "hi")], None, cx); - branch.merge_into_base(Some(5..8), cx); + branch.merge_into_base(vec![5..8], cx); }); base.read_with(cx, |base, _| assert_eq!(base.text(), "abcdefghijk")); // Merge an insertion into the base. branch.update(cx, |branch, cx| { - branch.merge_into_base(Some(11..11), cx); + branch.merge_into_base(vec![11..11], cx); }); branch.read_with(cx, |branch, _| assert_eq!(branch.text(), "ABCdefghijkLMN")); @@ -2518,7 +2518,7 @@ fn test_merge_into_base(cx: &mut TestAppContext) { // Deleted the inserted text and merge that into the base. branch.update(cx, |branch, cx| { branch.edit([(11..14, "")], None, cx); - branch.merge_into_base(Some(10..11), cx); + branch.merge_into_base(vec![10..11], cx); }); base.read_with(cx, |base, _| assert_eq!(base.text(), "abcdefghijk")); @@ -2534,7 +2534,7 @@ fn test_undo_after_merge_into_base(cx: &mut TestAppContext) { // Make 2 edits, merge one into the base. branch.update(cx, |branch, cx| { branch.edit([(0..3, "ABC"), (7..9, "HI")], None, cx); - branch.merge_into_base(Some(7..7), cx); + branch.merge_into_base(vec![7..7], cx); }); base.read_with(cx, |base, _| assert_eq!(base.text(), "abcdefgHIjk")); branch.read_with(cx, |branch, _| assert_eq!(branch.text(), "ABCdefgHIjk")); @@ -2548,7 +2548,7 @@ fn test_undo_after_merge_into_base(cx: &mut TestAppContext) { // Merge that operation into the base again. branch.update(cx, |branch, cx| { - branch.merge_into_base(Some(7..7), cx); + branch.merge_into_base(vec![7..7], cx); }); base.read_with(cx, |base, _| assert_eq!(base.text(), "abcdefgHIjk")); branch.read_with(cx, |branch, _| assert_eq!(branch.text(), "ABCdefgHIjk")); From 3da1902e24e81d472ed4389ae1f0c25715361a5f Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 8 Oct 2024 13:07:34 -0400 Subject: [PATCH 21/35] worktree: Depend on `rpc` with `test-support` feature in tests (#18866) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR updates the `worktree` crate to depend on `rpc` with the `test-support` feature flag when running tests. This fixes an issue I was seeing locally when trying to run tests in the `worktree` crate: ``` λ cargo test -p worktree -- test_repository_subfolder_git_status Compiling worktree v0.1.0 (/Users/maxdeviant/projects/zed/crates/worktree) error[E0432]: unresolved import `rpc::AnyProtoClient` --> crates/worktree/src/worktree.rs:39:18 | 39 | use rpc::{proto, AnyProtoClient}; | ^^^^^^^^^^^^^^ no `AnyProtoClient` in the root For more information about this error, try `rustc --explain E0432`. error: could not compile `worktree` (lib test) due to 1 previous error ``` Release Notes: - N/A --- crates/worktree/Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/worktree/Cargo.toml b/crates/worktree/Cargo.toml index 41221d7b6e..9437358e1a 100644 --- a/crates/worktree/Cargo.toml +++ b/crates/worktree/Cargo.toml @@ -56,4 +56,5 @@ gpui = { workspace = true, features = ["test-support"] } http_client.workspace = true pretty_assertions.workspace = true rand.workspace = true +rpc = { workspace = true, features = ["test-support"] } settings = { workspace = true, features = ["test-support"] } From 77bf2ad0f14b33c2be817b1094321578edbcdccf Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 8 Oct 2024 13:13:40 -0400 Subject: [PATCH 22/35] Add is_via_ssh field to edit events (#18867) Release Notes: - N/A --- crates/client/src/telemetry.rs | 3 ++- crates/collab/src/api/events.rs | 2 ++ crates/editor/src/editor.rs | 9 +++++++-- crates/telemetry_events/src/telemetry_events.rs | 4 +++- crates/terminal_view/src/terminal_element.rs | 6 +++--- 5 files changed, 17 insertions(+), 7 deletions(-) diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 24d448aa02..5a752bc05f 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -458,7 +458,7 @@ impl Telemetry { })) } - pub fn log_edit_event(self: &Arc, environment: &'static str) { + pub fn log_edit_event(self: &Arc, environment: &'static str, is_via_ssh: bool) { let mut state = self.state.lock(); let period_data = state.event_coalescer.log_event(environment); drop(state); @@ -467,6 +467,7 @@ impl Telemetry { let event = Event::Edit(EditEvent { duration: end.timestamp_millis() - start.timestamp_millis(), environment: environment.to_string(), + is_via_ssh, }); self.report_event(event); diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 5d40c59daf..715f374494 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -1263,6 +1263,7 @@ pub struct EditEventRow { period_start: i64, period_end: i64, environment: String, + is_via_ssh: bool, } impl EditEventRow { @@ -1296,6 +1297,7 @@ impl EditEventRow { period_start: period_start.timestamp_millis(), period_end: period_end.timestamp_millis(), environment: event.environment, + is_via_ssh: event.is_via_ssh, } } } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 39bc2f45e5..2f7c8ce5d7 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -12146,9 +12146,14 @@ impl Editor { } let Some(project) = &self.project else { return }; - let telemetry = project.read(cx).client().telemetry().clone(); + let (telemetry, is_via_ssh) = { + let project = project.read(cx); + let telemetry = project.client().telemetry().clone(); + let is_via_ssh = project.is_via_ssh(); + (telemetry, is_via_ssh) + }; refresh_linked_ranges(self, cx); - telemetry.log_edit_event("editor"); + telemetry.log_edit_event("editor", is_via_ssh); } multi_buffer::Event::ExcerptsAdded { buffer, diff --git a/crates/telemetry_events/src/telemetry_events.rs b/crates/telemetry_events/src/telemetry_events.rs index 32cb377ed6..32d2bde5c6 100644 --- a/crates/telemetry_events/src/telemetry_events.rs +++ b/crates/telemetry_events/src/telemetry_events.rs @@ -116,7 +116,7 @@ pub struct EditorEvent { pub copilot_enabled: bool, /// Whether the user has copilot enabled for the language of the file opened or saved pub copilot_enabled_for_language: bool, - /// Whether the client is editing a local file or a remote file via SSH + /// Whether the client is opening/saving a local file or a remote file via SSH pub is_via_ssh: bool, } @@ -174,6 +174,8 @@ pub struct ActionEvent { pub struct EditEvent { pub duration: i64, pub environment: String, + /// Whether the edits occurred locally or remotely via SSH + pub is_via_ssh: bool, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] diff --git a/crates/terminal_view/src/terminal_element.rs b/crates/terminal_view/src/terminal_element.rs index ce4be0e679..db87fd8dc2 100644 --- a/crates/terminal_view/src/terminal_element.rs +++ b/crates/terminal_view/src/terminal_element.rs @@ -1019,9 +1019,9 @@ impl InputHandler for TerminalInputHandler { self.workspace .update(cx, |this, cx| { cx.invalidate_character_coordinates(); - - let telemetry = this.project().read(cx).client().telemetry().clone(); - telemetry.log_edit_event("terminal"); + let project = this.project().read(cx); + let telemetry = project.client().telemetry().clone(); + telemetry.log_edit_event("terminal", project.is_via_ssh()); }) .ok(); } From 3f2de172aeb378813ddcdf96106bb48386b85ec6 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 8 Oct 2024 13:16:17 -0400 Subject: [PATCH 23/35] collab: Set cached token values when initially creating lifetime usage records (#18865) This PR fixes an issue where we weren't setting the cached token fields when initially creating a lifetime usage record. Release Notes: - N/A --- crates/collab/src/llm/db/queries/usages.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/collab/src/llm/db/queries/usages.rs b/crates/collab/src/llm/db/queries/usages.rs index 128a42bc58..d703066913 100644 --- a/crates/collab/src/llm/db/queries/usages.rs +++ b/crates/collab/src/llm/db/queries/usages.rs @@ -382,6 +382,10 @@ impl LlmDatabase { user_id: ActiveValue::set(user_id), model_id: ActiveValue::set(model.id), input_tokens: ActiveValue::set(input_token_count as i64), + cache_creation_input_tokens: ActiveValue::set( + cache_creation_input_tokens as i64, + ), + cache_read_input_tokens: ActiveValue::set(cache_read_input_tokens as i64), output_tokens: ActiveValue::set(output_token_count as i64), ..Default::default() } From af9a59577032bb24c393928bcde84e5282dd0c74 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Tue, 8 Oct 2024 19:32:52 +0200 Subject: [PATCH 24/35] ssh: Add tweaks to the UI (#18817) Follow up to https://github.com/zed-industries/zed/pull/18727 --- Release Notes: - N/A --- crates/recent_projects/src/dev_servers.rs | 39 +++-- crates/recent_projects/src/recent_projects.rs | 6 +- crates/recent_projects/src/ssh_connections.rs | 145 +++++++++++------- crates/title_bar/src/title_bar.rs | 14 +- 4 files changed, 122 insertions(+), 82 deletions(-) diff --git a/crates/recent_projects/src/dev_servers.rs b/crates/recent_projects/src/dev_servers.rs index fa1d511791..6171489872 100644 --- a/crates/recent_projects/src/dev_servers.rs +++ b/crates/recent_projects/src/dev_servers.rs @@ -556,23 +556,28 @@ impl DevServerProjects { .w_full() .border_l_1() .border_color(cx.theme().colors().border_variant) - .my_1() + .mb_1() .mx_1p5() - .py_0p5() - .px_3() + .pl_2() .child( List::new() .empty_message("No projects.") .children(ssh_connection.projects.iter().enumerate().map(|(pix, p)| { - self.render_ssh_project(ix, &ssh_connection, pix, p, cx) + v_flex().gap_0p5().child(self.render_ssh_project( + ix, + &ssh_connection, + pix, + p, + cx, + )) })) .child( - h_flex().child( + h_flex().mt_1().pl_1().child( Button::new("new-remote_project", "Open Folder…") - .icon(IconName::Plus) .size(ButtonSize::Default) - .style(ButtonStyle::Filled) .layer(ElevationIndex::ModalSurface) + .icon(IconName::Plus) + .icon_color(Color::Muted) .icon_position(IconPosition::Start) .on_click(cx.listener(move |this, _, cx| { this.create_ssh_project(ix, ssh_connection.clone(), cx); @@ -593,9 +598,15 @@ impl DevServerProjects { ) -> impl IntoElement { let project = project.clone(); let server = server.clone(); + ListItem::new(("remote-project", ix)) + .inset(true) .spacing(ui::ListItemSpacing::Sparse) - .start_slot(Icon::new(IconName::Folder).color(Color::Muted)) + .start_slot( + Icon::new(IconName::Folder) + .color(Color::Muted) + .size(IconSize::Small), + ) .child(Label::new(project.paths.join(", "))) .on_click(cx.listener(move |this, _, cx| { let Some(app_state) = this @@ -635,7 +646,7 @@ impl DevServerProjects { .on_click( cx.listener(move |this, _, cx| this.delete_ssh_project(server_ix, ix, cx)), ) - .tooltip(|cx| Tooltip::text("Delete remote project", cx)) + .tooltip(|cx| Tooltip::text("Delete Remote Project", cx)) .into_any_element(), )) } @@ -709,6 +720,7 @@ impl DevServerProjects { }) }); let theme = cx.theme(); + v_flex() .id("create-dev-server") .overflow_hidden() @@ -763,6 +775,7 @@ impl DevServerProjects { .child( h_flex() .bg(theme.colors().editor_background) + .rounded_b_md() .w_full() .map(|this| { if let Some(ssh_prompt) = ssh_prompt { @@ -773,9 +786,8 @@ impl DevServerProjects { h_flex() .p_2() .w_full() - .content_center() - .gap_2() - .child(h_flex().w_full()) + .justify_center() + .gap_1p5() .child( div().p_1().rounded_lg().bg(color).with_animation( "pulse-ssh-waiting-for-connection", @@ -788,8 +800,7 @@ impl DevServerProjects { .child( Label::new("Waiting for connection…") .size(LabelSize::Small), - ) - .child(h_flex().w_full()), + ), ) } }), diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index f73e7069d4..2285ed6287 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -566,7 +566,7 @@ impl PickerDelegate for RecentProjectsDelegate { .border_t_1() .py_2() .pr_2() - .border_color(cx.theme().colors().border) + .border_color(cx.theme().colors().border_variant) .justify_end() .gap_4() .child( @@ -574,7 +574,7 @@ impl PickerDelegate for RecentProjectsDelegate { .when_some(KeyBinding::for_action(&OpenRemote, cx), |button, key| { button.child(key) }) - .child(Label::new("Open remote folder…").color(Color::Muted)) + .child(Label::new("Open Remote Folder…").color(Color::Muted)) .on_click(|_, cx| cx.dispatch_action(OpenRemote.boxed_clone())), ) .child( @@ -583,7 +583,7 @@ impl PickerDelegate for RecentProjectsDelegate { KeyBinding::for_action(&workspace::Open, cx), |button, key| button.child(key), ) - .child(Label::new("Open local folder…").color(Color::Muted)) + .child(Label::new("Open Local Folder…").color(Color::Muted)) .on_click(|_, cx| cx.dispatch_action(workspace::Open.boxed_clone())), ) .into_any(), diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 5862d48e81..2e41d56468 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -16,9 +16,9 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; use ui::{ - div, h_flex, v_flex, ActiveTheme, ButtonCommon, Clickable, Color, FluentBuilder as _, Icon, - IconButton, IconName, IconSize, InteractiveElement, IntoElement, Label, LabelCommon, Styled, - StyledExt as _, Tooltip, ViewContext, VisualContext, WindowContext, + div, h_flex, prelude::*, v_flex, ActiveTheme, ButtonCommon, Clickable, Color, Icon, IconButton, + IconName, IconSize, InteractiveElement, IntoElement, Label, LabelCommon, Styled, Tooltip, + ViewContext, VisualContext, WindowContext, }; use workspace::{AppState, ModalView, Workspace}; @@ -84,6 +84,7 @@ pub struct SshPrompt { pub struct SshConnectionModal { pub(crate) prompt: View, } + impl SshPrompt { pub fn new(connection_options: &SshConnectionOptions, cx: &mut ViewContext) -> Self { let connection_string = connection_options.connection_string().into(); @@ -136,57 +137,70 @@ impl SshPrompt { } impl Render for SshPrompt { - fn render(&mut self, _: &mut ViewContext) -> impl IntoElement { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let cx = cx.window_context(); + let theme = cx.theme(); v_flex() - .w_full() .key_context("PasswordPrompt") - .justify_start() + .size_full() + .justify_center() .child( - v_flex() - .p_4() - .size_full() - .child( - h_flex() - .gap_2() - .justify_between() - .child(h_flex().w_full()) - .child(if self.error_message.is_some() { - Icon::new(IconName::XCircle) - .size(IconSize::Medium) - .color(Color::Error) - .into_any_element() - } else { - Icon::new(IconName::ArrowCircle) - .size(IconSize::Medium) - .with_animation( - "arrow-circle", - Animation::new(Duration::from_secs(2)).repeat(), - |icon, delta| { - icon.transform(Transformation::rotate(percentage( - delta, - ))) - }, - ) - .into_any_element() - }) - .child(Label::new(format!( - "Connecting to {}…", - self.connection_string - ))) - .child(h_flex().w_full()), - ) - .when_some(self.error_message.as_ref(), |el, error| { - el.child(Label::new(error.clone())) + h_flex() + .py_2() + .px_4() + .justify_center() + .child(if self.error_message.is_some() { + Icon::new(IconName::XCircle) + .size(IconSize::Medium) + .color(Color::Error) + .into_any_element() + } else { + Icon::new(IconName::ArrowCircle) + .size(IconSize::Medium) + .with_animation( + "arrow-circle", + Animation::new(Duration::from_secs(2)).repeat(), + |icon, delta| { + icon.transform(Transformation::rotate(percentage(delta))) + }, + ) + .into_any_element() }) - .when( - self.error_message.is_none() && self.status_message.is_some(), - |el| el.child(Label::new(self.status_message.clone().unwrap())), + .child( + div() + .ml_1() + .child(Label::new("SSH Connection").size(LabelSize::Small)), ) - .when_some(self.prompt.as_ref(), |el, prompt| { - el.child(Label::new(prompt.0.clone())) - .child(self.editor.clone()) - }), + .child( + div() + .when_some(self.error_message.as_ref(), |el, error| { + el.child(Label::new(format!("-{}", error)).size(LabelSize::Small)) + }) + .when( + self.error_message.is_none() && self.status_message.is_some(), + |el| { + el.child( + Label::new(format!( + "-{}", + self.status_message.clone().unwrap() + )) + .size(LabelSize::Small), + ) + }, + ), + ), ) + .child(div().when_some(self.prompt.as_ref(), |el, prompt| { + el.child( + h_flex() + .p_4() + .border_t_1() + .border_color(theme.colors().border_variant) + .font_buffer(cx) + .child(Label::new(prompt.0.clone())) + .child(self.editor.clone()), + ) + })) } } @@ -210,39 +224,54 @@ impl Render for SshConnectionModal { fn render(&mut self, cx: &mut ui::ViewContext) -> impl ui::IntoElement { let connection_string = self.prompt.read(cx).connection_string.clone(); let theme = cx.theme(); - let header_color = theme.colors().element_background; - let body_color = theme.colors().background; + let mut header_color = cx.theme().colors().text; + header_color.fade_out(0.96); + let body_color = theme.colors().editor_background; + v_flex() .elevation_3(cx) .on_action(cx.listener(Self::dismiss)) .on_action(cx.listener(Self::confirm)) - .w(px(400.)) + .w(px(500.)) + .border_1() + .border_color(theme.colors().border) .child( h_flex() + .relative() .p_1() + .rounded_t_md() .border_b_1() .border_color(theme.colors().border) .bg(header_color) .justify_between() .child( - IconButton::new("ssh-connection-cancel", IconName::ArrowLeft) - .icon_size(IconSize::XSmall) - .on_click(|_, cx| cx.dispatch_action(menu::Cancel.boxed_clone())) - .tooltip(|cx| Tooltip::for_action("Back", &menu::Cancel, cx)), + div().absolute().left_0p5().top_0p5().child( + IconButton::new("ssh-connection-cancel", IconName::ArrowLeft) + .icon_size(IconSize::XSmall) + .on_click(|_, cx| cx.dispatch_action(menu::Cancel.boxed_clone())) + .tooltip(|cx| Tooltip::for_action("Back", &menu::Cancel, cx)), + ), ) .child( h_flex() + .w_full() .gap_2() + .justify_center() .child(Icon::new(IconName::Server).size(IconSize::XSmall)) .child( Label::new(connection_string) .size(ui::LabelSize::Small) .single_line(), ), - ) - .child(div()), + ), + ) + .child( + h_flex() + .rounded_b_md() + .bg(body_color) + .w_full() + .child(self.prompt.clone()), ) - .child(h_flex().bg(body_color).w_full().child(self.prompt.clone())) } } diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index da0179fd64..6fda4d594f 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -24,8 +24,8 @@ use smallvec::SmallVec; use std::sync::Arc; use theme::ActiveTheme; use ui::{ - h_flex, prelude::*, Avatar, Button, ButtonLike, ButtonStyle, ContextMenu, Icon, IconName, - Indicator, PopoverMenu, Tooltip, + h_flex, prelude::*, Avatar, Button, ButtonLike, ButtonStyle, ContextMenu, Icon, + IconButtonShape, IconName, IconSize, Indicator, PopoverMenu, Tooltip, }; use util::ResultExt; use vcs_menu::{BranchList, OpenRecent as ToggleVcsMenu}; @@ -274,18 +274,19 @@ impl TitleBar { }; let indicator = div() .absolute() - .w_1_4() - .h_1_4() + .size_1p5() .right_0p5() .bottom_0p5() - .p_1() - .rounded_2xl() + .rounded_full() .bg(indicator_color.color(cx)); Some( div() + .relative() .child( IconButton::new("ssh-server-icon", IconName::Server) + .icon_size(IconSize::Small) + .shape(IconButtonShape::Square) .tooltip(move |cx| { Tooltip::with_meta( "Remote Project", @@ -294,7 +295,6 @@ impl TitleBar { cx, ) }) - .shape(ui::IconButtonShape::Square) .on_click(|_, cx| { cx.dispatch_action(OpenRemote.boxed_clone()); }), From 5377674fc05ff151750767d19a0026f28c8df67a Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 8 Oct 2024 13:54:11 -0400 Subject: [PATCH 25/35] csharp: Add support for triple-slash doc comments (#18869) This PR adds support for triple-slash (`///`) doc comments in C#. As requested by https://github.com/zed-industries/zed/issues/18766. Release Notes: - N/A --- extensions/csharp/languages/csharp/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extensions/csharp/languages/csharp/config.toml b/extensions/csharp/languages/csharp/config.toml index fd0e13b6c2..8f07b45e3b 100644 --- a/extensions/csharp/languages/csharp/config.toml +++ b/extensions/csharp/languages/csharp/config.toml @@ -2,7 +2,7 @@ name = "CSharp" code_fence_block_name = "csharp" grammar = "c_sharp" path_suffixes = ["cs"] -line_comments = ["// "] +line_comments = ["// ", "/// "] autoclose_before = ";:.,=}])>" brackets = [ { start = "{", end = "}", close = true, newline = true }, From 7960468d8a6270f2b5aaca90f739cebf0cba864c Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 8 Oct 2024 14:25:29 -0400 Subject: [PATCH 26/35] dart: Bump to v0.1.1 (#18859) - Includes https://github.com/zed-industries/zed/pull/18845 --- Cargo.lock | 2 +- extensions/dart/Cargo.toml | 2 +- extensions/dart/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dda10e2edc..1bae8d9850 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14750,7 +14750,7 @@ dependencies = [ [[package]] name = "zed_dart" -version = "0.1.0" +version = "0.1.1" dependencies = [ "zed_extension_api 0.1.0", ] diff --git a/extensions/dart/Cargo.toml b/extensions/dart/Cargo.toml index d1b4b290ba..3d79e104c1 100644 --- a/extensions/dart/Cargo.toml +++ b/extensions/dart/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_dart" -version = "0.1.0" +version = "0.1.1" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/dart/extension.toml b/extensions/dart/extension.toml index 48cd847618..5ea8c37c2f 100644 --- a/extensions/dart/extension.toml +++ b/extensions/dart/extension.toml @@ -1,7 +1,7 @@ id = "dart" name = "Dart" description = "Dart support." -version = "0.1.0" +version = "0.1.1" schema_version = 1 authors = ["Abdullah Alsigar ", "Flo ", "ybbond "] repository = "https://github.com/zed-industries/zed" From f33019c885dc8ad1e6cfbcd3f36d5383e14b491f Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 8 Oct 2024 16:13:56 -0400 Subject: [PATCH 27/35] Document extension bump process (#18872) Co-authored-by: Marshall Bowers --- extensions/README.md | 63 +++++++++++++++++++++++++++++++ script/language-extension-version | 29 ++++++++++++++ 2 files changed, 92 insertions(+) create mode 100644 extensions/README.md create mode 100755 script/language-extension-version diff --git a/extensions/README.md b/extensions/README.md new file mode 100644 index 0000000000..c677e0b909 --- /dev/null +++ b/extensions/README.md @@ -0,0 +1,63 @@ +# Zed Extensions + +This directory contains extensions for Zed that are largely maintained by the Zed team. They currently live in the Zed repository for ease of maintenance. + +If you are looking for the Zed extension registry, see the [`zed-industries/extensions`](https://github.com/zed-industries/extensions) repo. + +## Structure + +Currently, Zed includes support for a number of languages without requiring installing an extension. Those languages can be found under [`crates/languages/src`](https://github.com/zed-industries/zed/tree/main/crates/languages/src). + +Support for all other languages is done via extensions. This directory ([extensions/](https://github.com/zed-industries/zed/tree/main/extensions/)) contains a number of officially maintained extensions. These extensions use the same [zed_extension_api](https://docs.rs/zed_extension_api/latest/zed_extension_api/) available to all [Zed Extensions](https://zed.dev/extensions) for providing [language servers](https://zed.dev/docs/extensions/languages#language-servers), [tree-sitter grammars](https://zed.dev/docs/extensions/languages#grammar) and [tree-sitter queries](https://zed.dev/docs/extensions/languages#tree-sitter-queries). + +## Dev Extensions + +See the docs for [Developing an Extension Locally](https://zed.dev/docs/extensions/developing-extensions#developing-an-extension-locally) for how to work with one of these extensions. + +## Updating + +> [!NOTE] +> This update process is usually handled by Zed staff. +> Community contributors should just submit a PR (step 1) and we'll take it from there. + +The process for updating an extension in this directory has three parts. + +1. Create a PR with your changes. (Merge it) +2. Bump the extension version in: + + - extensions/{language_name}/extension.toml + - extensions/{language_name}/Cargo.toml + - Cargo.lock + + You can do this manually, or with a script: + + ```sh + # Output the current version for a given language + ./script/language-extension-version + + # Update the version in `extension.toml` and `Cargo.toml` and trigger a `cargo check` + ./script/language-extension-version + ``` + + Commit your changes to a branch, push a PR and merge it. + +3. Open a PR to [`zed-industries/extensions`](https://github.com/zed-industries/extensions) repo that updates the extension in question + +Edit [`extensions.toml`](https://github.com/zed-industries/extensions/blob/main/extensions.toml) in the extensions repo to reflect the new version you set above and update the submodule latest Zed commit. + +```sh +# Go into your clone of the extensions repo +cd ../extensions + +# Update +git checkout main +git pull +just init-submodule extensions/zed + +# Update the Zed submodule +cd extensions/zed +git checkout main +git pull +cd - +git add extensions.toml extensions/zed +``` diff --git a/script/language-extension-version b/script/language-extension-version new file mode 100755 index 0000000000..fc5c448736 --- /dev/null +++ b/script/language-extension-version @@ -0,0 +1,29 @@ +#!/usr/bin/env bash + +set -euox pipefail + +if [ "$#" -lt 1 ]; then + echo "Usage: $0 [version]" + exit 1 +fi + +LANGUAGE=$1 +VERSION=${2:-} + +EXTENSION_DIR="extensions/$LANGUAGE" +EXTENSION_TOML="$EXTENSION_DIR/extension.toml" +CARGO_TOML="$EXTENSION_DIR/Cargo.toml" + +if [ ! -d "$EXTENSION_DIR" ]; then + echo "Directory $EXTENSION_DIR does not exist." + exit 1 +fi + +if [ -z "$VERSION" ]; then + grep -m 1 'version =' "$EXTENSION_TOML" | awk -F\" '{print $2}' + exit 0 +fi + +sed -i '' -e "s/^version = \".*\"/version = \"$VERSION\"/" "$EXTENSION_TOML" +sed -i '' -e "s/^version = \".*\"/version = \"$VERSION\"/" "$CARGO_TOML" +cargo check From 744891f15f47a69acff0183737b239cc0b10d242 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 8 Oct 2024 16:16:38 -0400 Subject: [PATCH 28/35] Provide a default value for `is_via_ssh` when it isn't sent via older clients (#18874) Release Notes: - N/A --- crates/telemetry_events/src/telemetry_events.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/telemetry_events/src/telemetry_events.rs b/crates/telemetry_events/src/telemetry_events.rs index 32d2bde5c6..14bcf985bf 100644 --- a/crates/telemetry_events/src/telemetry_events.rs +++ b/crates/telemetry_events/src/telemetry_events.rs @@ -117,6 +117,7 @@ pub struct EditorEvent { /// Whether the user has copilot enabled for the language of the file opened or saved pub copilot_enabled_for_language: bool, /// Whether the client is opening/saving a local file or a remote file via SSH + #[serde(default)] pub is_via_ssh: bool, } @@ -175,6 +176,7 @@ pub struct EditEvent { pub duration: i64, pub environment: String, /// Whether the edits occurred locally or remotely via SSH + #[serde(default)] pub is_via_ssh: bool, } From a95fb8f1f9cd8c9761fc638c4ff2552e2f3f973a Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Tue, 8 Oct 2024 23:37:04 +0200 Subject: [PATCH 29/35] ssh: Fix text wrapping in loading text (#18876) This PR adds `flex_wrap` to the loading text container to prevent the loading modal layout to break. Release Notes: - N/A --- crates/recent_projects/src/ssh_connections.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 2e41d56468..ebac54385d 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -146,9 +146,9 @@ impl Render for SshPrompt { .justify_center() .child( h_flex() - .py_2() - .px_4() + .p_2() .justify_center() + .flex_wrap() .child(if self.error_message.is_some() { Icon::new(IconName::XCircle) .size(IconSize::Medium) @@ -173,6 +173,8 @@ impl Render for SshPrompt { ) .child( div() + .text_ellipsis() + .overflow_x_hidden() .when_some(self.error_message.as_ref(), |el, error| { el.child(Label::new(format!("-{}", error)).size(LabelSize::Small)) }) From f861479890068aa66c73cff4dfe9ed0f4b74b17f Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 8 Oct 2024 18:29:38 -0400 Subject: [PATCH 30/35] collab: Update billing code for LLM usage billing (#18879) This PR reworks our existing billing code in preparation for charging based on LLM usage. We aren't yet exercising the new billing-related code outside of development. There are some noteworthy changes for our existing LLM usage tracking: - A new `monthly_usages` table has been added for tracking usage per-user, per-model, per-month - The per-month usage measures have been removed, in favor of the `monthly_usages` table - All of the per-month metrics in the Clickhouse rows have been changed from a rolling 30-day window to a calendar month Release Notes: - N/A --------- Co-authored-by: Antonio Scandurra Co-authored-by: Richard Co-authored-by: Max --- .../20241008155620_create_monthly_usages.sql | 13 + crates/collab/src/api/billing.rs | 103 +++++++- .../src/db/queries/billing_subscriptions.rs | 23 ++ crates/collab/src/lib.rs | 8 +- crates/collab/src/llm.rs | 16 +- crates/collab/src/llm/db.rs | 8 + crates/collab/src/llm/db/queries/usages.rs | 230 +++++++++++------- crates/collab/src/llm/db/tables.rs | 1 + .../collab/src/llm/db/tables/monthly_usage.rs | 22 ++ .../collab/src/llm/db/tables/usage_measure.rs | 4 - crates/collab/src/llm/db/tests/usage_tests.rs | 32 +-- crates/collab/src/llm/token.rs | 8 + crates/collab/src/main.rs | 25 +- crates/collab/src/rpc.rs | 27 +- crates/collab/src/tests/test_server.rs | 2 +- 15 files changed, 390 insertions(+), 132 deletions(-) create mode 100644 crates/collab/migrations_llm/20241008155620_create_monthly_usages.sql create mode 100644 crates/collab/src/llm/db/tables/monthly_usage.rs diff --git a/crates/collab/migrations_llm/20241008155620_create_monthly_usages.sql b/crates/collab/migrations_llm/20241008155620_create_monthly_usages.sql new file mode 100644 index 0000000000..2733552a3a --- /dev/null +++ b/crates/collab/migrations_llm/20241008155620_create_monthly_usages.sql @@ -0,0 +1,13 @@ +create table monthly_usages ( + id serial primary key, + user_id integer not null, + model_id integer not null references models (id) on delete cascade, + month integer not null, + year integer not null, + input_tokens bigint not null default 0, + cache_creation_input_tokens bigint not null default 0, + cache_read_input_tokens bigint not null default 0, + output_tokens bigint not null default 0 +); + +create unique index uix_monthly_usages_on_user_id_model_id_month_year on monthly_usages (user_id, model_id, month, year); diff --git a/crates/collab/src/api/billing.rs b/crates/collab/src/api/billing.rs index 23a16590ca..b70fc1e3ba 100644 --- a/crates/collab/src/api/billing.rs +++ b/crates/collab/src/api/billing.rs @@ -22,12 +22,15 @@ use stripe::{ }; use util::ResultExt; -use crate::db::billing_subscription::StripeSubscriptionStatus; +use crate::db::billing_subscription::{self, StripeSubscriptionStatus}; use crate::db::{ billing_customer, BillingSubscriptionId, CreateBillingCustomerParams, CreateBillingSubscriptionParams, CreateProcessedStripeEventParams, UpdateBillingCustomerParams, UpdateBillingSubscriptionParams, }; +use crate::llm::db::LlmDatabase; +use crate::llm::MONTHLY_SPENDING_LIMIT_IN_CENTS; +use crate::rpc::ResultExt as _; use crate::{AppState, Error, Result}; pub fn router() -> Router { @@ -79,7 +82,7 @@ async fn list_billing_subscriptions( .into_iter() .map(|subscription| BillingSubscriptionJson { id: subscription.id, - name: "Zed Pro".to_string(), + name: "Zed LLM Usage".to_string(), status: subscription.stripe_subscription_status, cancel_at: subscription.stripe_cancel_at.map(|cancel_at| { cancel_at @@ -117,7 +120,7 @@ async fn create_billing_subscription( let Some((stripe_client, stripe_price_id)) = app .stripe_client .clone() - .zip(app.config.stripe_price_id.clone()) + .zip(app.config.stripe_llm_usage_price_id.clone()) else { log::error!("failed to retrieve Stripe client or price ID"); Err(Error::http( @@ -150,7 +153,7 @@ async fn create_billing_subscription( params.client_reference_id = Some(user.github_login.as_str()); params.line_items = Some(vec![CreateCheckoutSessionLineItems { price: Some(stripe_price_id.to_string()), - quantity: Some(1), + quantity: Some(0), ..Default::default() }]); let success_url = format!("{}/account", app.config.zed_dot_dev_url()); @@ -631,3 +634,95 @@ async fn find_or_create_billing_customer( Ok(Some(billing_customer)) } + +const SYNC_LLM_USAGE_WITH_STRIPE_INTERVAL: Duration = Duration::from_secs(24 * 60 * 60); + +pub fn sync_llm_usage_with_stripe_periodically(app: Arc, llm_db: LlmDatabase) { + let Some(stripe_client) = app.stripe_client.clone() else { + log::warn!("failed to retrieve Stripe client"); + return; + }; + let Some(stripe_llm_usage_price_id) = app.config.stripe_llm_usage_price_id.clone() else { + log::warn!("failed to retrieve Stripe LLM usage price ID"); + return; + }; + + let executor = app.executor.clone(); + executor.spawn_detached({ + let executor = executor.clone(); + async move { + loop { + sync_with_stripe( + &app, + &llm_db, + &stripe_client, + stripe_llm_usage_price_id.clone(), + ) + .await + .trace_err(); + + executor.sleep(SYNC_LLM_USAGE_WITH_STRIPE_INTERVAL).await; + } + } + }); +} + +async fn sync_with_stripe( + app: &Arc, + llm_db: &LlmDatabase, + stripe_client: &stripe::Client, + stripe_llm_usage_price_id: Arc, +) -> anyhow::Result<()> { + let subscriptions = app.db.get_active_billing_subscriptions().await?; + + for (customer, subscription) in subscriptions { + update_stripe_subscription( + llm_db, + stripe_client, + &stripe_llm_usage_price_id, + customer, + subscription, + ) + .await + .log_err(); + } + + Ok(()) +} + +async fn update_stripe_subscription( + llm_db: &LlmDatabase, + stripe_client: &stripe::Client, + stripe_llm_usage_price_id: &Arc, + customer: billing_customer::Model, + subscription: billing_subscription::Model, +) -> Result<(), anyhow::Error> { + let monthly_spending = llm_db + .get_user_spending_for_month(customer.user_id, Utc::now()) + .await?; + let subscription_id = SubscriptionId::from_str(&subscription.stripe_subscription_id) + .context("failed to parse subscription ID")?; + + let monthly_spending_over_free_tier = + monthly_spending.saturating_sub(MONTHLY_SPENDING_LIMIT_IN_CENTS); + + let new_quantity = (monthly_spending_over_free_tier as f32 / 100.).ceil(); + Subscription::update( + stripe_client, + &subscription_id, + stripe::UpdateSubscription { + items: Some(vec![stripe::UpdateSubscriptionItems { + // TODO: Do we need to send up the `id` if a subscription item + // with this price already exists, or will Stripe take care of + // it? + id: None, + price: Some(stripe_llm_usage_price_id.to_string()), + quantity: Some(new_quantity as u64), + ..Default::default() + }]), + ..Default::default() + }, + ) + .await?; + Ok(()) +} diff --git a/crates/collab/src/db/queries/billing_subscriptions.rs b/crates/collab/src/db/queries/billing_subscriptions.rs index 7a7ba31f16..bcf093bebd 100644 --- a/crates/collab/src/db/queries/billing_subscriptions.rs +++ b/crates/collab/src/db/queries/billing_subscriptions.rs @@ -112,6 +112,29 @@ impl Database { .await } + pub async fn get_active_billing_subscriptions( + &self, + ) -> Result> { + self.transaction(|tx| async move { + let mut result = Vec::new(); + let mut rows = billing_subscription::Entity::find() + .inner_join(billing_customer::Entity) + .select_also(billing_customer::Entity) + .order_by_asc(billing_subscription::Column::Id) + .stream(&*tx) + .await?; + + while let Some(row) = rows.next().await { + if let (subscription, Some(customer)) = row? { + result.push((customer, subscription)); + } + } + + Ok(result) + }) + .await + } + /// Returns whether the user has an active billing subscription. pub async fn has_active_billing_subscription(&self, user_id: UserId) -> Result { Ok(self.count_active_billing_subscriptions(user_id).await? > 0) diff --git a/crates/collab/src/lib.rs b/crates/collab/src/lib.rs index 6c32023a97..ccecf80087 100644 --- a/crates/collab/src/lib.rs +++ b/crates/collab/src/lib.rs @@ -174,7 +174,7 @@ pub struct Config { pub slack_panics_webhook: Option, pub auto_join_channel_id: Option, pub stripe_api_key: Option, - pub stripe_price_id: Option>, + pub stripe_llm_usage_price_id: Option>, pub supermaven_admin_api_key: Option>, pub user_backfiller_github_access_token: Option>, } @@ -193,6 +193,10 @@ impl Config { } } + pub fn is_llm_billing_enabled(&self) -> bool { + self.stripe_llm_usage_price_id.is_some() + } + #[cfg(test)] pub fn test() -> Self { Self { @@ -231,7 +235,7 @@ impl Config { migrations_path: None, seed_path: None, stripe_api_key: None, - stripe_price_id: None, + stripe_llm_usage_price_id: None, supermaven_admin_api_key: None, user_backfiller_github_access_token: None, } diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index 9809985ac7..96413cf7c5 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -436,6 +436,9 @@ fn normalize_model_name(known_models: Vec, name: String) -> String { } } +/// The maximum monthly spending an individual user can reach before they have to pay. +pub const MONTHLY_SPENDING_LIMIT_IN_CENTS: usize = 5 * 100; + /// The maximum lifetime spending an individual user can reach before being cut off. /// /// Represented in cents. @@ -458,6 +461,18 @@ async fn check_usage_limit( ) .await?; + if state.config.is_llm_billing_enabled() { + if usage.spending_this_month >= MONTHLY_SPENDING_LIMIT_IN_CENTS { + if !claims.has_llm_subscription.unwrap_or(false) { + return Err(Error::http( + StatusCode::PAYMENT_REQUIRED, + "Maximum spending limit reached for this month.".to_string(), + )); + } + } + } + + // TODO: Remove this once we've rolled out monthly spending limits. if usage.lifetime_spending >= LIFETIME_SPENDING_LIMIT_IN_CENTS { return Err(Error::http( StatusCode::FORBIDDEN, @@ -505,7 +520,6 @@ async fn check_usage_limit( UsageMeasure::RequestsPerMinute => "requests_per_minute", UsageMeasure::TokensPerMinute => "tokens_per_minute", UsageMeasure::TokensPerDay => "tokens_per_day", - _ => "", }; if let Some(client) = state.clickhouse_client.as_ref() { diff --git a/crates/collab/src/llm/db.rs b/crates/collab/src/llm/db.rs index d46f51bb0d..996837116b 100644 --- a/crates/collab/src/llm/db.rs +++ b/crates/collab/src/llm/db.rs @@ -97,6 +97,14 @@ impl LlmDatabase { .ok_or_else(|| anyhow!("unknown model {provider:?}:{name}"))?) } + pub fn model_by_id(&self, id: ModelId) -> Result<&model::Model> { + Ok(self + .models + .values() + .find(|model| model.id == id) + .ok_or_else(|| anyhow!("no model for ID {id:?}"))?) + } + pub fn options(&self) -> &ConnectOptions { &self.options } diff --git a/crates/collab/src/llm/db/queries/usages.rs b/crates/collab/src/llm/db/queries/usages.rs index d703066913..1a98685bcd 100644 --- a/crates/collab/src/llm/db/queries/usages.rs +++ b/crates/collab/src/llm/db/queries/usages.rs @@ -1,5 +1,5 @@ use crate::db::UserId; -use chrono::Duration; +use chrono::{Datelike, Duration}; use futures::StreamExt as _; use rpc::LanguageModelProvider; use sea_orm::QuerySelect; @@ -140,6 +140,46 @@ impl LlmDatabase { .await } + pub async fn get_user_spending_for_month( + &self, + user_id: UserId, + now: DateTimeUtc, + ) -> Result { + self.transaction(|tx| async move { + let month = now.date_naive().month() as i32; + let year = now.date_naive().year(); + + let mut monthly_usages = monthly_usage::Entity::find() + .filter( + monthly_usage::Column::UserId + .eq(user_id) + .and(monthly_usage::Column::Month.eq(month)) + .and(monthly_usage::Column::Year.eq(year)), + ) + .stream(&*tx) + .await?; + let mut monthly_spending_in_cents = 0; + + while let Some(usage) = monthly_usages.next().await { + let usage = usage?; + let Ok(model) = self.model_by_id(usage.model_id) else { + continue; + }; + + monthly_spending_in_cents += calculate_spending( + model, + usage.input_tokens as usize, + usage.cache_creation_input_tokens as usize, + usage.cache_read_input_tokens as usize, + usage.output_tokens as usize, + ); + } + + Ok(monthly_spending_in_cents) + }) + .await + } + pub async fn get_usage( &self, user_id: UserId, @@ -162,6 +202,18 @@ impl LlmDatabase { .all(&*tx) .await?; + let month = now.date_naive().month() as i32; + let year = now.date_naive().year(); + let monthly_usage = monthly_usage::Entity::find() + .filter( + monthly_usage::Column::UserId + .eq(user_id) + .and(monthly_usage::Column::ModelId.eq(model.id)) + .and(monthly_usage::Column::Month.eq(month)) + .and(monthly_usage::Column::Year.eq(year)), + ) + .one(&*tx) + .await?; let lifetime_usage = lifetime_usage::Entity::find() .filter( lifetime_usage::Column::UserId @@ -177,28 +229,18 @@ impl LlmDatabase { self.get_usage_for_measure(&usages, now, UsageMeasure::TokensPerMinute)?; let tokens_this_day = self.get_usage_for_measure(&usages, now, UsageMeasure::TokensPerDay)?; - let input_tokens_this_month = - self.get_usage_for_measure(&usages, now, UsageMeasure::InputTokensPerMonth)?; - let cache_creation_input_tokens_this_month = self.get_usage_for_measure( - &usages, - now, - UsageMeasure::CacheCreationInputTokensPerMonth, - )?; - let cache_read_input_tokens_this_month = self.get_usage_for_measure( - &usages, - now, - UsageMeasure::CacheReadInputTokensPerMonth, - )?; - let output_tokens_this_month = - self.get_usage_for_measure(&usages, now, UsageMeasure::OutputTokensPerMonth)?; - let spending_this_month = calculate_spending( - model, - input_tokens_this_month, - cache_creation_input_tokens_this_month, - cache_read_input_tokens_this_month, - output_tokens_this_month, - ); - let lifetime_spending = if let Some(lifetime_usage) = lifetime_usage { + let spending_this_month = if let Some(monthly_usage) = &monthly_usage { + calculate_spending( + model, + monthly_usage.input_tokens as usize, + monthly_usage.cache_creation_input_tokens as usize, + monthly_usage.cache_read_input_tokens as usize, + monthly_usage.output_tokens as usize, + ) + } else { + 0 + }; + let lifetime_spending = if let Some(lifetime_usage) = &lifetime_usage { calculate_spending( model, lifetime_usage.input_tokens as usize, @@ -214,10 +256,18 @@ impl LlmDatabase { requests_this_minute, tokens_this_minute, tokens_this_day, - input_tokens_this_month, - cache_creation_input_tokens_this_month, - cache_read_input_tokens_this_month, - output_tokens_this_month, + input_tokens_this_month: monthly_usage + .as_ref() + .map_or(0, |usage| usage.input_tokens as usize), + cache_creation_input_tokens_this_month: monthly_usage + .as_ref() + .map_or(0, |usage| usage.cache_creation_input_tokens as usize), + cache_read_input_tokens_this_month: monthly_usage + .as_ref() + .map_or(0, |usage| usage.cache_read_input_tokens as usize), + output_tokens_this_month: monthly_usage + .as_ref() + .map_or(0, |usage| usage.output_tokens as usize), spending_this_month, lifetime_spending, }) @@ -290,60 +340,68 @@ impl LlmDatabase { &tx, ) .await?; - let input_tokens_this_month = self - .update_usage_for_measure( - user_id, - is_staff, - model.id, - &usages, - UsageMeasure::InputTokensPerMonth, - now, - input_token_count, - &tx, - ) - .await?; - let cache_creation_input_tokens_this_month = self - .update_usage_for_measure( - user_id, - is_staff, - model.id, - &usages, - UsageMeasure::CacheCreationInputTokensPerMonth, - now, - cache_creation_input_tokens, - &tx, - ) - .await?; - let cache_read_input_tokens_this_month = self - .update_usage_for_measure( - user_id, - is_staff, - model.id, - &usages, - UsageMeasure::CacheReadInputTokensPerMonth, - now, - cache_read_input_tokens, - &tx, - ) - .await?; - let output_tokens_this_month = self - .update_usage_for_measure( - user_id, - is_staff, - model.id, - &usages, - UsageMeasure::OutputTokensPerMonth, - now, - output_token_count, - &tx, + + let month = now.date_naive().month() as i32; + let year = now.date_naive().year(); + + // Update monthly usage + let monthly_usage = monthly_usage::Entity::find() + .filter( + monthly_usage::Column::UserId + .eq(user_id) + .and(monthly_usage::Column::ModelId.eq(model.id)) + .and(monthly_usage::Column::Month.eq(month)) + .and(monthly_usage::Column::Year.eq(year)), ) + .one(&*tx) .await?; + + let monthly_usage = match monthly_usage { + Some(usage) => { + monthly_usage::Entity::update(monthly_usage::ActiveModel { + id: ActiveValue::unchanged(usage.id), + input_tokens: ActiveValue::set( + usage.input_tokens + input_token_count as i64, + ), + cache_creation_input_tokens: ActiveValue::set( + usage.cache_creation_input_tokens + cache_creation_input_tokens as i64, + ), + cache_read_input_tokens: ActiveValue::set( + usage.cache_read_input_tokens + cache_read_input_tokens as i64, + ), + output_tokens: ActiveValue::set( + usage.output_tokens + output_token_count as i64, + ), + ..Default::default() + }) + .exec(&*tx) + .await? + } + None => { + monthly_usage::ActiveModel { + user_id: ActiveValue::set(user_id), + model_id: ActiveValue::set(model.id), + month: ActiveValue::set(month), + year: ActiveValue::set(year), + input_tokens: ActiveValue::set(input_token_count as i64), + cache_creation_input_tokens: ActiveValue::set( + cache_creation_input_tokens as i64, + ), + cache_read_input_tokens: ActiveValue::set(cache_read_input_tokens as i64), + output_tokens: ActiveValue::set(output_token_count as i64), + ..Default::default() + } + .insert(&*tx) + .await? + } + }; + let spending_this_month = calculate_spending( model, - input_tokens_this_month, - cache_creation_input_tokens_this_month, - cache_read_input_tokens_this_month, - output_tokens_this_month, + monthly_usage.input_tokens as usize, + monthly_usage.cache_creation_input_tokens as usize, + monthly_usage.cache_read_input_tokens as usize, + monthly_usage.output_tokens as usize, ); // Update lifetime usage @@ -406,10 +464,11 @@ impl LlmDatabase { requests_this_minute, tokens_this_minute, tokens_this_day, - input_tokens_this_month, - cache_creation_input_tokens_this_month, - cache_read_input_tokens_this_month, - output_tokens_this_month, + input_tokens_this_month: monthly_usage.input_tokens as usize, + cache_creation_input_tokens_this_month: monthly_usage.cache_creation_input_tokens + as usize, + cache_read_input_tokens_this_month: monthly_usage.cache_read_input_tokens as usize, + output_tokens_this_month: monthly_usage.output_tokens as usize, spending_this_month, lifetime_spending, }) @@ -597,7 +656,6 @@ fn calculate_spending( const MINUTE_BUCKET_COUNT: usize = 12; const DAY_BUCKET_COUNT: usize = 48; -const MONTH_BUCKET_COUNT: usize = 30; impl UsageMeasure { fn bucket_count(&self) -> usize { @@ -605,10 +663,6 @@ impl UsageMeasure { UsageMeasure::RequestsPerMinute => MINUTE_BUCKET_COUNT, UsageMeasure::TokensPerMinute => MINUTE_BUCKET_COUNT, UsageMeasure::TokensPerDay => DAY_BUCKET_COUNT, - UsageMeasure::InputTokensPerMonth => MONTH_BUCKET_COUNT, - UsageMeasure::CacheCreationInputTokensPerMonth => MONTH_BUCKET_COUNT, - UsageMeasure::CacheReadInputTokensPerMonth => MONTH_BUCKET_COUNT, - UsageMeasure::OutputTokensPerMonth => MONTH_BUCKET_COUNT, } } @@ -617,10 +671,6 @@ impl UsageMeasure { UsageMeasure::RequestsPerMinute => Duration::minutes(1), UsageMeasure::TokensPerMinute => Duration::minutes(1), UsageMeasure::TokensPerDay => Duration::hours(24), - UsageMeasure::InputTokensPerMonth => Duration::days(30), - UsageMeasure::CacheCreationInputTokensPerMonth => Duration::days(30), - UsageMeasure::CacheReadInputTokensPerMonth => Duration::days(30), - UsageMeasure::OutputTokensPerMonth => Duration::days(30), } } diff --git a/crates/collab/src/llm/db/tables.rs b/crates/collab/src/llm/db/tables.rs index 4beefe2b5d..57aded70e9 100644 --- a/crates/collab/src/llm/db/tables.rs +++ b/crates/collab/src/llm/db/tables.rs @@ -1,5 +1,6 @@ pub mod lifetime_usage; pub mod model; +pub mod monthly_usage; pub mod provider; pub mod revoked_access_token; pub mod usage; diff --git a/crates/collab/src/llm/db/tables/monthly_usage.rs b/crates/collab/src/llm/db/tables/monthly_usage.rs new file mode 100644 index 0000000000..1e849f6aef --- /dev/null +++ b/crates/collab/src/llm/db/tables/monthly_usage.rs @@ -0,0 +1,22 @@ +use crate::{db::UserId, llm::db::ModelId}; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] +#[sea_orm(table_name = "monthly_usages")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i32, + pub user_id: UserId, + pub model_id: ModelId, + pub month: i32, + pub year: i32, + pub input_tokens: i64, + pub cache_creation_input_tokens: i64, + pub cache_read_input_tokens: i64, + pub output_tokens: i64, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/llm/db/tables/usage_measure.rs b/crates/collab/src/llm/db/tables/usage_measure.rs index 50c9501e54..b0e5b86644 100644 --- a/crates/collab/src/llm/db/tables/usage_measure.rs +++ b/crates/collab/src/llm/db/tables/usage_measure.rs @@ -9,10 +9,6 @@ pub enum UsageMeasure { RequestsPerMinute, TokensPerMinute, TokensPerDay, - InputTokensPerMonth, - CacheCreationInputTokensPerMonth, - CacheReadInputTokensPerMonth, - OutputTokensPerMonth, } #[derive(Clone, Debug, PartialEq, DeriveEntityModel)] diff --git a/crates/collab/src/llm/db/tests/usage_tests.rs b/crates/collab/src/llm/db/tests/usage_tests.rs index 97bcc20e44..8e8dc0ff6b 100644 --- a/crates/collab/src/llm/db/tests/usage_tests.rs +++ b/crates/collab/src/llm/db/tests/usage_tests.rs @@ -6,7 +6,7 @@ use crate::{ }, test_llm_db, }; -use chrono::{Duration, Utc}; +use chrono::{DateTime, Duration, Utc}; use pretty_assertions::assert_eq; use rpc::LanguageModelProvider; @@ -29,7 +29,10 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { .await .unwrap(); - let t0 = Utc::now(); + // We're using a fixed datetime to prevent flakiness based on the clock. + let t0 = DateTime::parse_from_rfc3339("2024-08-08T22:46:33Z") + .unwrap() + .with_timezone(&Utc); let user_id = UserId::from_proto(123); let now = t0; @@ -134,23 +137,10 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { } ); - let t2 = t0 + Duration::days(30); - let now = t2; - let usage = db.get_usage(user_id, provider, model, now).await.unwrap(); - assert_eq!( - usage, - Usage { - requests_this_minute: 0, - tokens_this_minute: 0, - tokens_this_day: 0, - input_tokens_this_month: 9000, - cache_creation_input_tokens_this_month: 0, - cache_read_input_tokens_this_month: 0, - output_tokens_this_month: 0, - spending_this_month: 0, - lifetime_spending: 0, - } - ); + // We're using a fixed datetime to prevent flakiness based on the clock. + let now = DateTime::parse_from_rfc3339("2024-10-08T22:15:58Z") + .unwrap() + .with_timezone(&Utc); // Test cache creation input tokens db.record_usage(user_id, false, provider, model, 1000, 500, 0, 0, now) @@ -164,7 +154,7 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { requests_this_minute: 1, tokens_this_minute: 1500, tokens_this_day: 1500, - input_tokens_this_month: 10000, + input_tokens_this_month: 1000, cache_creation_input_tokens_this_month: 500, cache_read_input_tokens_this_month: 0, output_tokens_this_month: 0, @@ -185,7 +175,7 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { requests_this_minute: 2, tokens_this_minute: 2800, tokens_this_day: 2800, - input_tokens_this_month: 11000, + input_tokens_this_month: 2000, cache_creation_input_tokens_this_month: 500, cache_read_input_tokens_this_month: 300, output_tokens_this_month: 0, diff --git a/crates/collab/src/llm/token.rs b/crates/collab/src/llm/token.rs index e1e6c73326..2f6ce6ee28 100644 --- a/crates/collab/src/llm/token.rs +++ b/crates/collab/src/llm/token.rs @@ -22,6 +22,12 @@ pub struct LlmTokenClaims { pub is_staff: bool, #[serde(default)] pub has_llm_closed_beta_feature_flag: bool, + // This field is temporarily optional so it can be added + // in a backwards-compatible way. We can make it required + // once all of the LLM tokens have cycled (~1 hour after + // this change has been deployed). + #[serde(default)] + pub has_llm_subscription: Option, pub plan: rpc::proto::Plan, } @@ -33,6 +39,7 @@ impl LlmTokenClaims { github_user_login: String, is_staff: bool, has_llm_closed_beta_feature_flag: bool, + has_llm_subscription: bool, plan: rpc::proto::Plan, config: &Config, ) -> Result { @@ -50,6 +57,7 @@ impl LlmTokenClaims { github_user_login: Some(github_user_login), is_staff, has_llm_closed_beta_feature_flag, + has_llm_subscription: Some(has_llm_subscription), plan, }; diff --git a/crates/collab/src/main.rs b/crates/collab/src/main.rs index 0e6bb67d13..bbbd4e562c 100644 --- a/crates/collab/src/main.rs +++ b/crates/collab/src/main.rs @@ -6,6 +6,7 @@ use axum::{ routing::get, Extension, Router, }; +use collab::api::billing::sync_llm_usage_with_stripe_periodically; use collab::api::CloudflareIpCountryHeader; use collab::llm::{db::LlmDatabase, log_usage_periodically}; use collab::migrations::run_database_migrations; @@ -29,7 +30,7 @@ use tower_http::trace::TraceLayer; use tracing_subscriber::{ filter::EnvFilter, fmt::format::JsonFields, util::SubscriberInitExt, Layer, }; -use util::ResultExt as _; +use util::{maybe, ResultExt as _}; const VERSION: &str = env!("CARGO_PKG_VERSION"); const REVISION: Option<&'static str> = option_env!("GITHUB_SHA"); @@ -136,6 +137,28 @@ async fn main() -> Result<()> { fetch_extensions_from_blob_store_periodically(state.clone()); spawn_user_backfiller(state.clone()); + let llm_db = maybe!(async { + let database_url = state + .config + .llm_database_url + .as_ref() + .ok_or_else(|| anyhow!("missing LLM_DATABASE_URL"))?; + let max_connections = state + .config + .llm_database_max_connections + .ok_or_else(|| anyhow!("missing LLM_DATABASE_MAX_CONNECTIONS"))?; + + let mut db_options = db::ConnectOptions::new(database_url); + db_options.max_connections(max_connections); + LlmDatabase::new(db_options, state.executor.clone()).await + }) + .await + .trace_err(); + + if let Some(llm_db) = llm_db { + sync_llm_usage_with_stripe_periodically(state.clone(), llm_db); + } + app = app .merge(collab::api::events::router()) .merge(collab::api::extensions::router()) diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 27c95a5b44..e66c306c50 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -191,16 +191,26 @@ impl Session { } } - pub async fn current_plan(&self, db: MutexGuard<'_, DbHandle>) -> anyhow::Result { + pub async fn has_llm_subscription( + &self, + db: &MutexGuard<'_, DbHandle>, + ) -> anyhow::Result { if self.is_staff() { - return Ok(proto::Plan::ZedPro); + return Ok(true); } let Some(user_id) = self.user_id() else { - return Ok(proto::Plan::Free); + return Ok(false); }; - if db.has_active_billing_subscription(user_id).await? { + Ok(db.has_active_billing_subscription(user_id).await?) + } + + pub async fn current_plan( + &self, + _db: &MutexGuard<'_, DbHandle>, + ) -> anyhow::Result { + if self.is_staff() { Ok(proto::Plan::ZedPro) } else { Ok(proto::Plan::Free) @@ -3471,7 +3481,7 @@ fn should_auto_subscribe_to_channels(version: ZedVersion) -> bool { } async fn update_user_plan(_user_id: UserId, session: &Session) -> Result<()> { - let plan = session.current_plan(session.db().await).await?; + let plan = session.current_plan(&session.db().await).await?; session .peer @@ -4471,7 +4481,7 @@ async fn count_language_model_tokens( }; authorize_access_to_legacy_llm_endpoints(&session).await?; - let rate_limit: Box = match session.current_plan(session.db().await).await? { + let rate_limit: Box = match session.current_plan(&session.db().await).await? { proto::Plan::ZedPro => Box::new(ZedProCountLanguageModelTokensRateLimit), proto::Plan::Free => Box::new(FreeCountLanguageModelTokensRateLimit), }; @@ -4592,7 +4602,7 @@ async fn compute_embeddings( let api_key = api_key.context("no OpenAI API key configured on the server")?; authorize_access_to_legacy_llm_endpoints(&session).await?; - let rate_limit: Box = match session.current_plan(session.db().await).await? { + let rate_limit: Box = match session.current_plan(&session.db().await).await? { proto::Plan::ZedPro => Box::new(ZedProComputeEmbeddingsRateLimit), proto::Plan::Free => Box::new(FreeComputeEmbeddingsRateLimit), }; @@ -4915,7 +4925,8 @@ async fn get_llm_api_token( user.github_login.clone(), session.is_staff(), has_llm_closed_beta_feature_flag, - session.current_plan(db).await?, + session.has_llm_subscription(&db).await?, + session.current_plan(&db).await?, &session.app_state.config, )?; response.send(proto::GetLlmTokenResponse { token })?; diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs index 8d2396eef0..55bc279c8e 100644 --- a/crates/collab/src/tests/test_server.rs +++ b/crates/collab/src/tests/test_server.rs @@ -677,7 +677,7 @@ impl TestServer { migrations_path: None, seed_path: None, stripe_api_key: None, - stripe_price_id: None, + stripe_llm_usage_price_id: None, supermaven_admin_api_key: None, user_backfiller_github_access_token: None, }, From 801210cd50d4cdbae81e56ba333e39d559197209 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 8 Oct 2024 20:03:33 -0400 Subject: [PATCH 31/35] collab: Make `github_user_login` required in `LlmTokenClaims` (#18882) This PR makes the `github_user_login` field required in the `LlmTokenClaims`. We previously added this in https://github.com/zed-industries/zed/pull/16316 and made it optional for backwards-compatibility. It's been more than long enough for all of the previous LLM tokens to have expired, so we can now make the field required. Release Notes: - N/A --- crates/collab/src/llm/token.rs | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/crates/collab/src/llm/token.rs b/crates/collab/src/llm/token.rs index 2f6ce6ee28..6098a1cd13 100644 --- a/crates/collab/src/llm/token.rs +++ b/crates/collab/src/llm/token.rs @@ -13,14 +13,8 @@ pub struct LlmTokenClaims { pub exp: u64, pub jti: String, pub user_id: u64, - // This field is temporarily optional so it can be added - // in a backwards-compatible way. We can make it required - // once all of the LLM tokens have cycled (~1 hour after - // this change has been deployed). - #[serde(default)] - pub github_user_login: Option, + pub github_user_login: String, pub is_staff: bool, - #[serde(default)] pub has_llm_closed_beta_feature_flag: bool, // This field is temporarily optional so it can be added // in a backwards-compatible way. We can make it required @@ -54,7 +48,7 @@ impl LlmTokenClaims { exp: (now + LLM_TOKEN_LIFETIME).timestamp() as u64, jti: uuid::Uuid::new_v4().to_string(), user_id: user_id.to_proto(), - github_user_login: Some(github_user_login), + github_user_login, is_staff, has_llm_closed_beta_feature_flag, has_llm_subscription: Some(has_llm_subscription), From b0a9005163e16f1ed8f7a4204e6d8ab6ac908a02 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 8 Oct 2024 20:25:07 -0400 Subject: [PATCH 32/35] client: Send telemetry events with `Content-Type: application/json` (#18886) This PR updates the telemetry events sent to collab to use `Content-Type: application/json` instead of `Content-Type: text/plain`. The POST bodies are JSON, so `application/json` is the correct MIME type. I suspect the `text/plain` is a remnant from when the events were still going through Vercel. Release Notes: - N/A --- crates/client/src/telemetry.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 5a752bc05f..1dc3cedee2 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -653,7 +653,7 @@ impl Telemetry { .build_zed_api_url("/telemetry/events", &[])? .as_ref(), ) - .header("Content-Type", "text/plain") + .header("Content-Type", "application/json") .header("x-zed-checksum", checksum) .body(json_bytes.into()); From e35114815257f800acf9020aeb82470b8cec53b3 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 8 Oct 2024 17:30:42 -0700 Subject: [PATCH 33/35] Fix bugs in expanding diff hunk (#18885) Release Notes: - Fixed an issue where diff hunks at the boundaries of multi buffer excerpts could not be expanded --- crates/editor/src/editor_tests.rs | 54 ++++++++++++++++++++ crates/editor/src/hunk_diff.rs | 84 ++++++++++++------------------- crates/language/src/buffer.rs | 1 - 3 files changed, 85 insertions(+), 54 deletions(-) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 249d0a4746..092afb394e 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -11715,6 +11715,60 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) ); } +#[gpui::test] +async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let base = "aaa\nbbb\nccc\nddd\neee\nfff\nggg\n"; + let text = "aaa\nBBB\nBB2\nccc\nDDD\nEEE\nfff\nggg\n"; + + let buffer = cx.new_model(|cx| { + let mut buffer = Buffer::local(text.to_string(), cx); + buffer.set_diff_base(Some(base.into()), cx); + buffer + }); + + let multi_buffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(ReadWrite); + multibuffer.push_excerpts( + buffer.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(2, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(5, 0)..Point::new(7, 0), + primary: None, + }, + ], + cx, + ); + multibuffer + }); + + let editor = cx.add_window(|cx| Editor::new(EditorMode::Full, multi_buffer, None, true, cx)); + let mut cx = EditorTestContext::for_editor(editor, cx).await; + cx.run_until_parked(); + + cx.update_editor(|editor, cx| editor.expand_all_hunk_diffs(&Default::default(), cx)); + cx.executor().run_until_parked(); + + cx.assert_diff_hunks( + " + aaa + - bbb + + BBB + + - ddd + - eee + + EEE + fff + " + .unindent(), + ); +} + #[gpui::test] async fn test_edits_around_expanded_insertion_hunks( executor: BackgroundExecutor, diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index e495481323..c3a539b3d4 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -209,19 +209,20 @@ impl Editor { retain }); - for remaining_hunk in hunks_to_toggle { - let remaining_hunk_point_range = - Point::new(remaining_hunk.row_range.start.0, 0) - ..Point::new(remaining_hunk.row_range.end.0, 0); + for hunk in hunks_to_toggle { + let remaining_hunk_point_range = Point::new(hunk.row_range.start.0, 0) + ..Point::new(hunk.row_range.end.0, 0); + let hunk_start = snapshot + .buffer_snapshot + .anchor_before(remaining_hunk_point_range.start); + let hunk_end = snapshot + .buffer_snapshot + .anchor_in_excerpt(hunk_start.excerpt_id, hunk.buffer_range.end) + .unwrap(); hunks_to_expand.push(HoveredHunk { - status: hunk_status(&remaining_hunk), - multi_buffer_range: snapshot - .buffer_snapshot - .anchor_before(remaining_hunk_point_range.start) - ..snapshot - .buffer_snapshot - .anchor_after(remaining_hunk_point_range.end), - diff_base_byte_range: remaining_hunk.diff_base_byte_range.clone(), + status: hunk_status(&hunk), + multi_buffer_range: hunk_start..hunk_end, + diff_base_byte_range: hunk.diff_base_byte_range.clone(), }); } @@ -246,33 +247,22 @@ impl Editor { hunk: &HoveredHunk, cx: &mut ViewContext<'_, Editor>, ) -> Option<()> { - let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); + let buffer = self.buffer.clone(); + let multi_buffer_snapshot = buffer.read(cx).snapshot(cx); let hunk_range = hunk.multi_buffer_range.clone(); - let hunk_point_range = hunk_range.to_point(&multi_buffer_snapshot); - - let buffer = self.buffer().clone(); - let snapshot = self.snapshot(cx); let (diff_base_buffer, deleted_text_lines) = buffer.update(cx, |buffer, cx| { - let hunk = buffer_diff_hunk(&snapshot.buffer_snapshot, hunk_point_range.clone())?; - let mut buffer_ranges = buffer.range_to_buffer_ranges(hunk_point_range, cx); - if buffer_ranges.len() == 1 { - let (buffer, _, _) = buffer_ranges.pop()?; - let diff_base_buffer = diff_base_buffer - .or_else(|| self.current_diff_base_buffer(&buffer, cx)) - .or_else(|| create_diff_base_buffer(&buffer, cx))?; - let buffer = buffer.read(cx); - let deleted_text_lines = buffer.diff_base().map(|diff_base| { - let diff_start_row = diff_base - .offset_to_point(hunk.diff_base_byte_range.start) - .row; - let diff_end_row = diff_base.offset_to_point(hunk.diff_base_byte_range.end).row; - - diff_end_row - diff_start_row - })?; - Some((diff_base_buffer, deleted_text_lines)) - } else { - None - } + let buffer = buffer.buffer(hunk_range.start.buffer_id?)?; + let diff_base_buffer = diff_base_buffer + .or_else(|| self.current_diff_base_buffer(&buffer, cx)) + .or_else(|| create_diff_base_buffer(&buffer, cx))?; + let deleted_text_lines = buffer.read(cx).diff_base().map(|diff_base| { + let diff_start_row = diff_base + .offset_to_point(hunk.diff_base_byte_range.start) + .row; + let diff_end_row = diff_base.offset_to_point(hunk.diff_base_byte_range.end).row; + diff_end_row - diff_start_row + })?; + Some((diff_base_buffer, deleted_text_lines)) })?; let block_insert_index = match self.expanded_hunks.hunks.binary_search_by(|probe| { @@ -1128,21 +1118,6 @@ fn editor_with_deleted_text( (editor_height, editor) } -fn buffer_diff_hunk( - buffer_snapshot: &MultiBufferSnapshot, - row_range: Range, -) -> Option { - let mut hunks = buffer_snapshot.git_diff_hunks_in_range( - MultiBufferRow(row_range.start.row)..MultiBufferRow(row_range.end.row), - ); - let hunk = hunks.next()?; - let second_hunk = hunks.next(); - if second_hunk.is_none() { - return Some(hunk); - } - None -} - impl DisplayDiffHunk { pub fn start_display_row(&self) -> DisplayRow { match self { @@ -1209,7 +1184,10 @@ pub fn diff_hunk_to_display( let hunk_end_point = Point::new(hunk_end_row.0, 0); let multi_buffer_start = snapshot.buffer_snapshot.anchor_before(hunk_start_point); - let multi_buffer_end = snapshot.buffer_snapshot.anchor_after(hunk_end_point); + let multi_buffer_end = snapshot + .buffer_snapshot + .anchor_in_excerpt(multi_buffer_start.excerpt_id, hunk.buffer_range.end) + .unwrap(); let end = hunk_end_point.to_display_point(snapshot).row(); DisplayDiffHunk::Unfolded { diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 59740509d3..13d07caa88 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1156,7 +1156,6 @@ impl Buffer { this.non_text_state_update_count += 1; if let Some(BufferDiffBase::PastBufferVersion { rope, .. }) = &mut this.diff_base { *rope = diff_base_rope; - cx.emit(BufferEvent::DiffBaseChanged); } cx.emit(BufferEvent::DiffUpdated); }) From 5d5c4b667741246c521c161cda792b9ad29fc102 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Wed, 9 Oct 2024 01:07:18 -0700 Subject: [PATCH 34/35] Revert http client changes (#18892) These proved to be too unstable. Will restore these changes once the issues have been fixed. Release Notes: - N/A --- Cargo.lock | 613 ++++++------------ Cargo.toml | 15 +- crates/client/Cargo.toml | 3 +- crates/client/src/client.rs | 27 +- crates/collab/Cargo.toml | 2 +- crates/collab/src/llm.rs | 12 +- crates/collab/src/rpc.rs | 6 +- crates/evals/Cargo.toml | 2 +- crates/evals/src/eval.rs | 7 +- crates/extension/Cargo.toml | 3 +- crates/extension/src/extension_builder.rs | 2 +- crates/extension/src/extension_store_test.rs | 52 +- crates/extension_cli/Cargo.toml | 2 +- crates/extension_cli/src/main.rs | 9 +- crates/gpui/src/app.rs | 4 - crates/http_client/Cargo.toml | 4 +- crates/http_client/src/http_client.rs | 56 +- .../Cargo.toml | 15 +- crates/isahc_http_client/LICENSE-APACHE | 1 + .../src/isahc_http_client.rs | 105 +++ crates/live_kit_server/Cargo.toml | 2 +- crates/reqwest_client/Cargo.toml | 31 - crates/reqwest_client/LICENSE-GPL | 1 - crates/reqwest_client/examples/client.rs | 16 - crates/reqwest_client/src/reqwest_client.rs | 259 -------- crates/semantic_index/Cargo.toml | 2 +- crates/semantic_index/examples/index.rs | 7 +- crates/storybook/Cargo.toml | 2 +- crates/storybook/src/storybook.rs | 12 +- crates/ureq_client/LICENSE-GPL | 1 - crates/ureq_client/examples/client.rs | 24 - crates/ureq_client/src/ureq_client.rs | 200 ------ crates/vim/Cargo.toml | 2 +- crates/zed/Cargo.toml | 2 +- crates/zed/src/main.rs | 10 +- 35 files changed, 386 insertions(+), 1125 deletions(-) rename crates/{ureq_client => isahc_http_client}/Cargo.toml (53%) create mode 120000 crates/isahc_http_client/LICENSE-APACHE create mode 100644 crates/isahc_http_client/src/isahc_http_client.rs delete mode 100644 crates/reqwest_client/Cargo.toml delete mode 120000 crates/reqwest_client/LICENSE-GPL delete mode 100644 crates/reqwest_client/examples/client.rs delete mode 100644 crates/reqwest_client/src/reqwest_client.rs delete mode 120000 crates/ureq_client/LICENSE-GPL delete mode 100644 crates/ureq_client/examples/client.rs delete mode 100644 crates/ureq_client/src/ureq_client.rs diff --git a/Cargo.lock b/Cargo.lock index 1bae8d9850..cc588fcb22 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -846,8 +846,8 @@ dependencies = [ "chrono", "futures-util", "http-types", - "hyper 0.14.30", - "hyper-rustls 0.24.2", + "hyper", + "hyper-rustls", "serde", "serde_json", "serde_path_to_error", @@ -880,14 +880,15 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" [[package]] name = "async-tls" -version = "0.13.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ae3c9eba89d472a0e4fe1dea433df78fbbe63d2b764addaf2ba3a6bde89a5e" +checksum = "cfeefd0ca297cbbb3bd34fd6b228401c2a5177038257afd751bc29f0a2da4795" dependencies = [ "futures-core", "futures-io", - "rustls 0.21.12", + "rustls 0.20.9", "rustls-pemfile 1.0.4", + "webpki", "webpki-roots 0.22.6", ] @@ -904,9 +905,9 @@ dependencies = [ [[package]] name = "async-tungstenite" -version = "0.28.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90e661b6cb0a6eb34d02c520b052daa3aa9ac0cc02495c9d066bbce13ead132b" +checksum = "a1e9efbe14612da0a19fb983059a0b621e9cf6225d7018ecab4f9988215540dc" dependencies = [ "async-std", "async-tls", @@ -914,7 +915,7 @@ dependencies = [ "futures-util", "log", "pin-project-lite", - "tungstenite 0.24.0", + "tungstenite 0.20.1", ] [[package]] @@ -1063,7 +1064,7 @@ dependencies = [ "fastrand 2.1.1", "hex", "http 0.2.12", - "ring", + "ring 0.17.8", "time", "tokio", "tracing", @@ -1232,7 +1233,7 @@ dependencies = [ "once_cell", "p256", "percent-encoding", - "ring", + "ring 0.17.8", "sha2", "subtle", "time", @@ -1335,13 +1336,13 @@ dependencies = [ "aws-smithy-types", "bytes 1.7.1", "fastrand 2.1.1", - "h2 0.3.26", + "h2", "http 0.2.12", "http-body 0.4.6", "http-body 1.0.1", "httparse", - "hyper 0.14.30", - "hyper-rustls 0.24.2", + "hyper", + "hyper-rustls", "once_cell", "pin-project-lite", "pin-utils", @@ -1431,7 +1432,7 @@ dependencies = [ "headers", "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.30", + "hyper", "itoa", "matchit", "memchr", @@ -1444,7 +1445,7 @@ dependencies = [ "serde_path_to_error", "serde_urlencoded", "sha1", - "sync_wrapper 0.1.2", + "sync_wrapper", "tokio", "tokio-tungstenite 0.20.1", "tower", @@ -1583,7 +1584,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash 1.1.0", + "rustc-hash", "shlex", "syn 2.0.76", ] @@ -1603,7 +1604,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash 1.1.0", + "rustc-hash", "shlex", "syn 2.0.76", ] @@ -2099,6 +2100,12 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" +[[package]] +name = "castaway" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" + [[package]] name = "cbc" version = "0.1.2" @@ -2356,8 +2363,8 @@ dependencies = [ "clickhouse-derive", "clickhouse-rs-cityhash-sys", "futures 0.3.30", - "hyper 0.14.30", - "hyper-tls 0.5.0", + "hyper", + "hyper-tls", "lz4", "sealed", "serde", @@ -2395,7 +2402,6 @@ dependencies = [ "anyhow", "async-native-tls", "async-recursion 0.3.2", - "async-tls", "async-tungstenite", "chrono", "clock", @@ -2413,6 +2419,8 @@ dependencies = [ "rand 0.8.5", "release_channel", "rpc", + "rustls 0.20.9", + "rustls-native-certs 0.8.0", "schemars", "serde", "serde_json", @@ -2559,8 +2567,9 @@ dependencies = [ "headless", "hex", "http_client", - "hyper 0.14.30", + "hyper", "indoc", + "isahc_http_client", "jsonwebtoken", "language", "language_model", @@ -2584,8 +2593,7 @@ dependencies = [ "release_channel", "remote", "remote_server", - "reqwest 0.11.27", - "reqwest_client", + "reqwest", "rpc", "rustc-demangle", "scrypt", @@ -2669,7 +2677,7 @@ dependencies = [ name = "collections" version = "0.1.0" dependencies = [ - "rustc-hash 1.1.0", + "rustc-hash", ] [[package]] @@ -2987,7 +2995,7 @@ dependencies = [ "log", "rangemap", "rayon", - "rustc-hash 1.1.0", + "rustc-hash", "rustybuzz", "self_cell", "swash", @@ -3077,7 +3085,7 @@ dependencies = [ "hashbrown 0.14.5", "log", "regalloc2", - "rustc-hash 1.1.0", + "rustc-hash", "smallvec", "target-lexicon", ] @@ -3333,6 +3341,36 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "curl" +version = "0.4.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e2161dd6eba090ff1594084e95fd67aeccf04382ffea77999ea94ed42ec67b6" +dependencies = [ + "curl-sys", + "libc", + "openssl-probe", + "openssl-sys", + "schannel", + "socket2 0.5.7", + "windows-sys 0.52.0", +] + +[[package]] +name = "curl-sys" +version = "0.4.74+curl-8.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8af10b986114528fcdc4b63b6f5f021b7057618411046a4de2ba0f0149a097bf" +dependencies = [ + "cc", + "libc", + "libz-sys", + "openssl-sys", + "pkg-config", + "vcpkg", + "windows-sys 0.52.0", +] + [[package]] name = "cursor-icon" version = "1.1.0" @@ -3994,6 +4032,7 @@ dependencies = [ "git", "gpui", "http_client", + "isahc_http_client", "language", "languages", "node_runtime", @@ -4004,7 +4043,6 @@ dependencies = [ "serde_json", "settings", "smol", - "ureq_client", ] [[package]] @@ -4089,6 +4127,7 @@ dependencies = [ "gpui", "http_client", "indexed_docs", + "isahc_http_client", "language", "log", "lsp", @@ -4097,7 +4136,6 @@ dependencies = [ "paths", "project", "release_channel", - "reqwest_client", "schemars", "semantic_version", "serde", @@ -4110,7 +4148,6 @@ dependencies = [ "tokio", "toml 0.8.19", "ui", - "ureq_client", "url", "util", "wasm-encoder 0.215.0", @@ -4130,9 +4167,9 @@ dependencies = [ "env_logger", "extension", "fs", + "isahc_http_client", "language", "log", - "reqwest_client", "rpc", "serde", "serde_json", @@ -4379,7 +4416,7 @@ dependencies = [ "futures-core", "futures-sink", "nanorand", - "spin", + "spin 0.9.8", ] [[package]] @@ -5145,25 +5182,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "h2" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" -dependencies = [ - "atomic-waker", - "bytes 1.7.1", - "fnv", - "futures-core", - "futures-sink", - "http 1.1.0", - "indexmap 2.4.0", - "slab", - "tokio", - "tokio-util", - "tracing", -] - [[package]] name = "half" version = "2.4.1" @@ -5544,10 +5562,8 @@ dependencies = [ "anyhow", "derive_more", "futures 0.3.30", - "http 1.1.0", + "http 0.2.12", "log", - "rustls 0.21.12", - "rustls-native-certs 0.8.0", "serde", "serde_json", "smol", @@ -5588,7 +5604,7 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2 0.3.26", + "h2", "http 0.2.12", "http-body 0.4.6", "httparse", @@ -5602,26 +5618,6 @@ dependencies = [ "want", ] -[[package]] -name = "hyper" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" -dependencies = [ - "bytes 1.7.1", - "futures-channel", - "futures-util", - "h2 0.4.6", - "http 1.1.0", - "http-body 1.0.1", - "httparse", - "itoa", - "pin-project-lite", - "smallvec", - "tokio", - "want", -] - [[package]] name = "hyper-rustls" version = "0.24.2" @@ -5630,29 +5626,12 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http 0.2.12", - "hyper 0.14.30", + "hyper", "log", "rustls 0.21.12", "rustls-native-certs 0.6.3", "tokio", - "tokio-rustls 0.24.1", -] - -[[package]] -name = "hyper-rustls" -version = "0.27.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" -dependencies = [ - "futures-util", - "http 1.1.0", - "hyper 1.4.1", - "hyper-util", - "rustls 0.23.13", - "rustls-pki-types", - "tokio", - "tokio-rustls 0.26.0", - "tower-service", + "tokio-rustls", ] [[package]] @@ -5662,47 +5641,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes 1.7.1", - "hyper 0.14.30", + "hyper", "native-tls", "tokio", "tokio-native-tls", ] -[[package]] -name = "hyper-tls" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" -dependencies = [ - "bytes 1.7.1", - "http-body-util", - "hyper 1.4.1", - "hyper-util", - "native-tls", - "tokio", - "tokio-native-tls", - "tower-service", -] - -[[package]] -name = "hyper-util" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" -dependencies = [ - "bytes 1.7.1", - "futures-channel", - "futures-util", - "http 1.1.0", - "http-body 1.0.1", - "hyper 1.4.1", - "pin-project-lite", - "socket2 0.5.7", - "tokio", - "tower-service", - "tracing", -] - [[package]] name = "iana-time-zone" version = "0.1.60" @@ -6070,6 +6014,44 @@ version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" +[[package]] +name = "isahc" +version = "1.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "334e04b4d781f436dc315cb1e7515bd96826426345d498149e4bde36b67f8ee9" +dependencies = [ + "async-channel 1.9.0", + "castaway", + "crossbeam-utils", + "curl", + "curl-sys", + "encoding_rs", + "event-listener 2.5.3", + "futures-lite 1.13.0", + "http 0.2.12", + "log", + "mime", + "once_cell", + "polling 2.8.0", + "slab", + "sluice", + "tracing", + "tracing-futures", + "url", + "waker-fn", +] + +[[package]] +name = "isahc_http_client" +version = "0.1.0" +dependencies = [ + "anyhow", + "futures 0.3.30", + "http_client", + "isahc", + "util", +] + [[package]] name = "itertools" version = "0.10.5" @@ -6174,7 +6156,7 @@ dependencies = [ "base64 0.21.7", "js-sys", "pem", - "ring", + "ring 0.17.8", "serde", "serde_json", "simple_asn1", @@ -6424,7 +6406,7 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" dependencies = [ - "spin", + "spin 0.9.8", ] [[package]] @@ -6619,7 +6601,7 @@ dependencies = [ "prost", "prost-build", "prost-types", - "reqwest 0.12.8", + "reqwest", "serde", ] @@ -7103,7 +7085,7 @@ dependencies = [ "hexf-parse", "indexmap 2.4.0", "log", - "rustc-hash 1.1.0", + "rustc-hash", "spirv", "termcolor", "thiserror", @@ -8751,54 +8733,6 @@ dependencies = [ "zed_actions", ] -[[package]] -name = "quinn" -version = "0.11.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c7c5fdde3cdae7203427dc4f0a68fe0ed09833edc525a03456b153b79828684" -dependencies = [ - "bytes 1.7.1", - "pin-project-lite", - "quinn-proto", - "quinn-udp", - "rustc-hash 2.0.0", - "rustls 0.23.13", - "socket2 0.5.7", - "thiserror", - "tokio", - "tracing", -] - -[[package]] -name = "quinn-proto" -version = "0.11.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6" -dependencies = [ - "bytes 1.7.1", - "rand 0.8.5", - "ring", - "rustc-hash 2.0.0", - "rustls 0.23.13", - "slab", - "thiserror", - "tinyvec", - "tracing", -] - -[[package]] -name = "quinn-udp" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fe68c2e9e1a1234e218683dbdf9f9dfcb094113c5ac2b938dfcb9bab4c4140b" -dependencies = [ - "libc", - "once_cell", - "socket2 0.5.7", - "tracing", - "windows-sys 0.59.0", -] - [[package]] name = "quote" version = "1.0.37" @@ -9075,7 +9009,7 @@ checksum = "ad156d539c879b7a24a363a2016d77961786e71f48f2e2fc8302a92abd2429a6" dependencies = [ "hashbrown 0.13.2", "log", - "rustc-hash 1.1.0", + "rustc-hash", "slice-group-by", "smallvec", ] @@ -9254,11 +9188,11 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "h2 0.3.26", + "h2", "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.30", - "hyper-tls 0.5.0", + "hyper", + "hyper-tls", "ipnet", "js-sys", "log", @@ -9271,8 +9205,8 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 0.1.2", - "system-configuration 0.5.1", + "sync_wrapper", + "system-configuration", "tokio", "tokio-native-tls", "tower-service", @@ -9283,68 +9217,6 @@ dependencies = [ "winreg 0.50.0", ] -[[package]] -name = "reqwest" -version = "0.12.8" -source = "git+https://github.com/zed-industries/reqwest.git?rev=fd110f6998da16bbca97b6dddda9be7827c50e29#fd110f6998da16bbca97b6dddda9be7827c50e29" -dependencies = [ - "base64 0.22.1", - "bytes 1.7.1", - "encoding_rs", - "futures-core", - "futures-util", - "h2 0.4.6", - "http 1.1.0", - "http-body 1.0.1", - "http-body-util", - "hyper 1.4.1", - "hyper-rustls 0.27.3", - "hyper-tls 0.6.0", - "hyper-util", - "ipnet", - "js-sys", - "log", - "mime", - "native-tls", - "once_cell", - "percent-encoding", - "pin-project-lite", - "quinn", - "rustls 0.23.13", - "rustls-pemfile 2.1.3", - "rustls-pki-types", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper 1.0.1", - "system-configuration 0.6.1", - "tokio", - "tokio-native-tls", - "tokio-rustls 0.26.0", - "tokio-util", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "wasm-streams", - "web-sys", - "windows-registry", -] - -[[package]] -name = "reqwest_client" -version = "0.1.0" -dependencies = [ - "anyhow", - "bytes 1.7.1", - "futures 0.3.30", - "http_client", - "reqwest 0.12.8", - "serde", - "smol", - "tokio", -] - [[package]] name = "resvg" version = "0.41.0" @@ -9393,6 +9265,21 @@ dependencies = [ "util", ] +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + [[package]] name = "ring" version = "0.17.8" @@ -9403,8 +9290,8 @@ dependencies = [ "cfg-if", "getrandom 0.2.15", "libc", - "spin", - "untrusted", + "spin 0.9.8", + "untrusted 0.9.0", "windows-sys 0.52.0", ] @@ -9560,7 +9447,7 @@ dependencies = [ "futures 0.3.30", "glob", "rand 0.8.5", - "ring", + "ring 0.17.8", "serde", "serde_json", "shellexpand 3.1.0", @@ -9632,12 +9519,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" -[[package]] -name = "rustc-hash" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" - [[package]] name = "rustc_version" version = "0.4.1" @@ -9687,6 +9568,18 @@ dependencies = [ "rustix 0.38.35", ] +[[package]] +name = "rustls" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +dependencies = [ + "log", + "ring 0.16.20", + "sct", + "webpki", +] + [[package]] name = "rustls" version = "0.21.12" @@ -9694,25 +9587,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", - "ring", - "rustls-webpki 0.101.7", + "ring 0.17.8", + "rustls-webpki", "sct", ] -[[package]] -name = "rustls" -version = "0.23.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dabaac7466917e566adb06783a81ca48944c6898a1b08b9374106dd671f4c8" -dependencies = [ - "once_cell", - "ring", - "rustls-pki-types", - "rustls-webpki 0.102.8", - "subtle", - "zeroize", -] - [[package]] name = "rustls-native-certs" version = "0.6.3" @@ -9769,19 +9648,8 @@ version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring", - "untrusted", -] - -[[package]] -name = "rustls-webpki" -version = "0.102.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" -dependencies = [ - "ring", - "rustls-pki-types", - "untrusted", + "ring 0.17.8", + "untrusted 0.9.0", ] [[package]] @@ -9895,8 +9763,8 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring", - "untrusted", + "ring 0.17.8", + "untrusted 0.9.0", ] [[package]] @@ -10092,6 +9960,7 @@ dependencies = [ "gpui", "heed", "http_client", + "isahc_http_client", "language", "language_model", "languages", @@ -10109,7 +9978,6 @@ dependencies = [ "tree-sitter", "ui", "unindent", - "ureq_client", "util", "workspace", "worktree", @@ -10542,6 +10410,17 @@ dependencies = [ "version_check", ] +[[package]] +name = "sluice" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d7400c0eff44aa2fcb5e31a5f24ba9716ed90138769e4977a2ba6014ae63eb5" +dependencies = [ + "async-channel 1.9.0", + "futures-core", + "futures-io", +] + [[package]] name = "smallvec" version = "1.13.2" @@ -10656,6 +10535,12 @@ dependencies = [ "smallvec", ] +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + [[package]] name = "spin" version = "0.9.8" @@ -10986,6 +10871,7 @@ dependencies = [ "fuzzy", "gpui", "indoc", + "isahc_http_client", "language", "log", "menu", @@ -10999,7 +10885,6 @@ dependencies = [ "theme", "title_bar", "ui", - "ureq_client", ] [[package]] @@ -11287,15 +11172,6 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" -[[package]] -name = "sync_wrapper" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" -dependencies = [ - "futures-core", -] - [[package]] name = "synchronoise" version = "1.0.1" @@ -11336,18 +11212,7 @@ checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" dependencies = [ "bitflags 1.3.2", "core-foundation 0.9.4", - "system-configuration-sys 0.5.0", -] - -[[package]] -name = "system-configuration" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" -dependencies = [ - "bitflags 2.6.0", - "core-foundation 0.9.4", - "system-configuration-sys 0.6.0", + "system-configuration-sys", ] [[package]] @@ -11360,16 +11225,6 @@ dependencies = [ "libc", ] -[[package]] -name = "system-configuration-sys" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "system-deps" version = "6.2.2" @@ -11747,7 +11602,7 @@ dependencies = [ "fancy-regex", "lazy_static", "parking_lot", - "rustc-hash 1.1.0", + "rustc-hash", ] [[package]] @@ -11963,17 +11818,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-rustls" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" -dependencies = [ - "rustls 0.23.13", - "rustls-pki-types", - "tokio", -] - [[package]] name = "tokio-socks" version = "0.5.2" @@ -12023,9 +11867,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.12" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" dependencies = [ "bytes 1.7.1", "futures-core", @@ -12207,6 +12051,16 @@ dependencies = [ "valuable", ] +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + [[package]] name = "tracing-log" version = "0.2.0" @@ -12513,24 +12367,6 @@ dependencies = [ "utf-8", ] -[[package]] -name = "tungstenite" -version = "0.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18e5b8366ee7a95b16d32197d0b2604b43a0be89dc5fac9f8e96ccafbaedda8a" -dependencies = [ - "byteorder", - "bytes 1.7.1", - "data-encoding", - "http 1.1.0", - "httparse", - "log", - "rand 0.8.5", - "sha1", - "thiserror", - "utf-8", -] - [[package]] name = "typeid" version = "1.0.2" @@ -12689,43 +12525,18 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + [[package]] name = "untrusted" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" -[[package]] -name = "ureq" -version = "2.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8cdd25c339e200129fe4de81451814e5228c9b771d57378817d6117cc2b3f97" -dependencies = [ - "base64 0.21.7", - "flate2", - "log", - "once_cell", - "rustls 0.21.12", - "rustls-webpki 0.101.7", - "url", - "webpki-roots 0.25.4", -] - -[[package]] -name = "ureq_client" -version = "0.1.0" -dependencies = [ - "anyhow", - "futures 0.3.30", - "gpui", - "http_client", - "parking_lot", - "serde", - "smol", - "ureq", - "util", -] - [[package]] name = "url" version = "2.5.2" @@ -13029,7 +12840,7 @@ dependencies = [ "futures-util", "headers", "http 0.2.12", - "hyper 0.14.30", + "hyper", "log", "mime", "mime_guess", @@ -13165,19 +12976,6 @@ dependencies = [ "wasmparser 0.201.0", ] -[[package]] -name = "wasm-streams" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" -dependencies = [ - "futures-util", - "js-sys", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - [[package]] name = "wasmparser" version = "0.201.0" @@ -13593,8 +13391,8 @@ version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" dependencies = [ - "ring", - "untrusted", + "ring 0.17.8", + "untrusted 0.9.0", ] [[package]] @@ -13851,17 +13649,6 @@ dependencies = [ "syn 2.0.76", ] -[[package]] -name = "windows-registry" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" -dependencies = [ - "windows-result 0.2.0", - "windows-strings", - "windows-targets 0.52.6", -] - [[package]] name = "windows-result" version = "0.1.2" @@ -14653,6 +14440,7 @@ dependencies = [ "image_viewer", "inline_completion_button", "install_cli", + "isahc_http_client", "journal", "language", "language_model", @@ -14705,7 +14493,6 @@ dependencies = [ "tree-sitter-md", "tree-sitter-rust", "ui", - "ureq_client", "url", "urlencoding", "util", diff --git a/Cargo.toml b/Cargo.toml index 6fc22029e7..887d9fb55a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,6 +52,7 @@ members = [ "crates/indexed_docs", "crates/inline_completion_button", "crates/install_cli", + "crates/isahc_http_client", "crates/journal", "crates/language", "crates/language_model", @@ -87,7 +88,6 @@ members = [ "crates/remote", "crates/remote_server", "crates/repl", - "crates/reqwest_client", "crates/rich_text", "crates/rope", "crates/rpc", @@ -122,7 +122,6 @@ members = [ "crates/ui", "crates/ui_input", "crates/ui_macros", - "crates/ureq_client", "crates/util", "crates/vcs_menu", "crates/vim", @@ -230,6 +229,7 @@ image_viewer = { path = "crates/image_viewer" } indexed_docs = { path = "crates/indexed_docs" } inline_completion_button = { path = "crates/inline_completion_button" } install_cli = { path = "crates/install_cli" } +isahc_http_client = { path = "crates/isahc_http_client" } journal = { path = "crates/journal" } language = { path = "crates/language" } language_model = { path = "crates/language_model" } @@ -266,7 +266,6 @@ release_channel = { path = "crates/release_channel" } remote = { path = "crates/remote" } remote_server = { path = "crates/remote_server" } repl = { path = "crates/repl" } -reqwest_client = { path = "crates/reqwest_client" } rich_text = { path = "crates/rich_text" } rope = { path = "crates/rope" } rpc = { path = "crates/rpc" } @@ -301,7 +300,6 @@ title_bar = { path = "crates/title_bar" } ui = { path = "crates/ui" } ui_input = { path = "crates/ui_input" } ui_macros = { path = "crates/ui_macros" } -ureq_client = { path = "crates/ureq_client" } util = { path = "crates/util" } vcs_menu = { path = "crates/vcs_menu" } vim = { path = "crates/vim" } @@ -329,7 +327,7 @@ async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "8 async-recursion = "1.0.0" async-tar = "0.5.0" async-trait = "0.1" -async-tungstenite = "0.28" +async-tungstenite = "0.23" async-watch = "0.3.1" async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] } base64 = "0.22" @@ -369,6 +367,10 @@ ignore = "0.4.22" image = "0.25.1" indexmap = { version = "1.6.2", features = ["serde"] } indoc = "2" +# We explicitly disable http2 support in isahc. +isahc = { version = "1.7.2", default-features = false, features = [ + "text-decoding", +] } itertools = "0.13.0" jsonwebtoken = "9.3" libc = "0.2" @@ -393,14 +395,13 @@ pulldown-cmark = { version = "0.12.0", default-features = false } rand = "0.8.5" regex = "1.5" repair_json = "0.1.0" -reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f6998da16bbca97b6dddda9be7827c50e29" } rsa = "0.9.6" runtimelib = { version = "0.15", default-features = false, features = [ "async-dispatcher-runtime", ] } rustc-demangle = "0.1.23" rust-embed = { version = "8.4", features = ["include-exclude"] } -rustls = "0.21.12" +rustls = "0.20.3" rustls-native-certs = "0.8.0" schemars = { version = "0.8", features = ["impl_json_schema"] } semver = "1.0" diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index c3fbea1f98..dd420bbbe6 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -18,7 +18,6 @@ test-support = ["clock/test-support", "collections/test-support", "gpui/test-sup [dependencies] anyhow.workspace = true async-recursion = "0.3" -async-tls = "0.13" async-tungstenite = { workspace = true, features = ["async-std", "async-tls"] } chrono = { workspace = true, features = ["serde"] } clock.workspace = true @@ -35,6 +34,8 @@ postage.workspace = true rand.workspace = true release_channel.workspace = true rpc = { workspace = true, features = ["gpui"] } +rustls.workspace = true +rustls-native-certs.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 03d81b117f..7a37b1b405 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1023,7 +1023,7 @@ impl Client { &self, http: Arc, release_channel: Option, - ) -> impl Future> { + ) -> impl Future> { #[cfg(any(test, feature = "test-support"))] let url_override = self.rpc_url.read().clone(); @@ -1117,7 +1117,7 @@ impl Client { // for us from the RPC URL. // // Among other things, it will generate and set a `Sec-WebSocket-Key` header for us. - let mut request = IntoClientRequest::into_client_request(rpc_url.as_str())?; + let mut request = rpc_url.into_client_request()?; // We then modify the request to add our desired headers. let request_headers = request.headers_mut(); @@ -1137,13 +1137,30 @@ impl Client { match url_scheme { Https => { + let client_config = { + let mut root_store = rustls::RootCertStore::empty(); + + let root_certs = rustls_native_certs::load_native_certs(); + for error in root_certs.errors { + log::warn!("error loading native certs: {:?}", error); + } + root_store.add_parsable_certificates( + &root_certs + .certs + .into_iter() + .map(|cert| cert.as_ref().to_owned()) + .collect::>(), + ); + rustls::ClientConfig::builder() + .with_safe_defaults() + .with_root_certificates(root_store) + .with_no_client_auth() + }; let (stream, _) = async_tungstenite::async_tls::client_async_tls_with_connector( request, stream, - Some(async_tls::TlsConnector::from( - http_client::TLS_CONFIG.clone(), - )), + Some(client_config.into()), ) .await?; Ok(Connection::new( diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index de7a3c6214..48482bd435 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -37,6 +37,7 @@ futures.workspace = true google_ai.workspace = true hex.workspace = true http_client.workspace = true +isahc_http_client.workspace = true jsonwebtoken.workspace = true live_kit_server.workspace = true log.workspace = true @@ -47,7 +48,6 @@ prometheus = "0.13" prost.workspace = true rand.workspace = true reqwest = { version = "0.11", features = ["json"] } -reqwest_client.workspace = true rpc.workspace = true rustc-demangle.workspace = true scrypt = "0.11" diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index 96413cf7c5..86563a766c 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -22,8 +22,7 @@ use chrono::{DateTime, Duration, Utc}; use collections::HashMap; use db::{usage_measure::UsageMeasure, ActiveUserCount, LlmDatabase}; use futures::{Stream, StreamExt as _}; - -use reqwest_client::ReqwestClient; +use isahc_http_client::IsahcHttpClient; use rpc::ListModelsResponse; use rpc::{ proto::Plan, LanguageModelProvider, PerformCompletionParams, EXPIRED_LLM_TOKEN_HEADER_NAME, @@ -44,7 +43,7 @@ pub struct LlmState { pub config: Config, pub executor: Executor, pub db: Arc, - pub http_client: ReqwestClient, + pub http_client: IsahcHttpClient, pub clickhouse_client: Option, active_user_count_by_model: RwLock, ActiveUserCount)>>, @@ -70,8 +69,11 @@ impl LlmState { let db = Arc::new(db); let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION")); - let http_client = - ReqwestClient::user_agent(&user_agent).context("failed to construct http client")?; + let http_client = IsahcHttpClient::builder() + .default_header("User-Agent", user_agent) + .build() + .map(IsahcHttpClient::from) + .context("failed to construct http client")?; let this = Self { executor, diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index e66c306c50..95e2e62041 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -36,8 +36,8 @@ use collections::{HashMap, HashSet}; pub use connection_pool::{ConnectionPool, ZedVersion}; use core::fmt::{self, Debug, Formatter}; use http_client::HttpClient; +use isahc_http_client::IsahcHttpClient; use open_ai::{OpenAiEmbeddingModel, OPEN_AI_API_URL}; -use reqwest_client::ReqwestClient; use sha2::Digest; use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi}; @@ -964,8 +964,8 @@ impl Server { tracing::info!("connection opened"); let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION")); - let http_client = match ReqwestClient::user_agent(&user_agent) { - Ok(http_client) => Arc::new(http_client), + let http_client = match IsahcHttpClient::builder().default_header("User-Agent", user_agent).build() { + Ok(http_client) => Arc::new(IsahcHttpClient::from(http_client)), Err(error) => { tracing::error!(?error, "failed to create HTTP client"); return; diff --git a/crates/evals/Cargo.toml b/crates/evals/Cargo.toml index 2697b76845..b3b3a2f2a8 100644 --- a/crates/evals/Cargo.toml +++ b/crates/evals/Cargo.toml @@ -25,6 +25,7 @@ fs.workspace = true git.workspace = true gpui.workspace = true http_client.workspace = true +isahc_http_client.workspace = true language.workspace = true languages.workspace = true node_runtime.workspace = true @@ -35,4 +36,3 @@ serde.workspace = true serde_json.workspace = true settings.workspace = true smol.workspace = true -ureq_client.workspace = true diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index e2dc5c8e03..899d821053 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -32,7 +32,6 @@ use std::{ Arc, }, }; -use ureq_client::UreqClient; const CODESEARCH_NET_DIR: &'static str = "target/datasets/code-search-net"; const EVAL_REPOS_DIR: &'static str = "target/datasets/eval-repos"; @@ -101,11 +100,7 @@ fn main() -> Result<()> { gpui::App::headless().run(move |cx| { let executor = cx.background_executor().clone(); - let client = Arc::new(UreqClient::new( - None, - "Zed LLM evals".to_string(), - executor.clone(), - )); + let client = isahc_http_client::IsahcHttpClient::new(None, None); cx.set_http_client(client.clone()); match cli.command { Commands::Fetch {} => { diff --git a/crates/extension/Cargo.toml b/crates/extension/Cargo.toml index 2b1d6193f8..f495ef7cdc 100644 --- a/crates/extension/Cargo.toml +++ b/crates/extension/Cargo.toml @@ -56,6 +56,7 @@ wit-component.workspace = true workspace.workspace = true [dev-dependencies] +isahc_http_client.workspace = true ctor.workspace = true env_logger.workspace = true fs = { workspace = true, features = ["test-support"] } @@ -63,7 +64,5 @@ gpui = { workspace = true, features = ["test-support"] } language = { workspace = true, features = ["test-support"] } parking_lot.workspace = true project = { workspace = true, features = ["test-support"] } -reqwest_client.workspace = true tokio.workspace = true -ureq_client.workspace = true workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/extension/src/extension_builder.rs b/crates/extension/src/extension_builder.rs index 876d0336dc..7380e699f9 100644 --- a/crates/extension/src/extension_builder.rs +++ b/crates/extension/src/extension_builder.rs @@ -25,7 +25,7 @@ use wit_component::ComponentEncoder; /// Once Rust 1.78 is released, there will be a `wasm32-wasip2` target available, so we will /// not need the adapter anymore. const RUST_TARGET: &str = "wasm32-wasip1"; -pub const WASI_ADAPTER_URL: &str = +const WASI_ADAPTER_URL: &str = "https://github.com/bytecodealliance/wasmtime/releases/download/v18.0.2/wasi_snapshot_preview1.reactor.wasm"; /// Compiling Tree-sitter parsers from C to WASM requires Clang 17, and a WASM build of libc diff --git a/crates/extension/src/extension_store_test.rs b/crates/extension/src/extension_store_test.rs index 7a3c645e04..126e6b2cfb 100644 --- a/crates/extension/src/extension_store_test.rs +++ b/crates/extension/src/extension_store_test.rs @@ -1,4 +1,3 @@ -use crate::extension_builder::WASI_ADAPTER_URL; use crate::extension_manifest::SchemaVersion; use crate::extension_settings::ExtensionSettings; use crate::{ @@ -12,14 +11,14 @@ use collections::BTreeMap; use fs::{FakeFs, Fs, RealFs}; use futures::{io::BufReader, AsyncReadExt, StreamExt}; use gpui::{Context, SemanticVersion, TestAppContext}; -use http_client::{AsyncBody, FakeHttpClient, HttpClient, Response}; +use http_client::{FakeHttpClient, Response}; use indexed_docs::IndexedDocsRegistry; +use isahc_http_client::IsahcHttpClient; use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName}; use node_runtime::NodeRuntime; use parking_lot::Mutex; use project::{Project, DEFAULT_COMPLETION_CONTEXT}; use release_channel::AppVersion; -use reqwest_client::ReqwestClient; use serde_json::json; use settings::{Settings as _, SettingsStore}; use snippet_provider::SnippetRegistry; @@ -29,7 +28,6 @@ use std::{ sync::Arc, }; use theme::ThemeRegistry; -use ureq_client::UreqClient; use util::test::temp_tree; #[cfg(test)] @@ -578,7 +576,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { std::env::consts::ARCH ) }); - let builder_client = Arc::new(UreqClient::new(None, user_agent, cx.executor().clone())); + let builder_client = IsahcHttpClient::new(None, Some(user_agent)); let extension_store = cx.new_model(|cx| { ExtensionStore::new( @@ -771,50 +769,6 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { assert!(fs.metadata(&expected_server_path).await.unwrap().is_none()); } -#[gpui::test] -async fn test_wasi_adapter_download(cx: &mut TestAppContext) { - let client = Arc::new(UreqClient::new( - None, - "zed-test-wasi-adapter-download".to_string(), - cx.executor().clone(), - )); - - let mut response = client - .get(WASI_ADAPTER_URL, AsyncBody::default(), true) - .await - .unwrap(); - - let mut content = Vec::new(); - let mut body = BufReader::new(response.body_mut()); - body.read_to_end(&mut content).await.unwrap(); - - assert!(wasmparser::Parser::is_core_wasm(&content)); - assert_eq!(content.len(), 96801); // Determined by downloading this to my computer - wit_component::ComponentEncoder::default() - .adapter("wasi_snapshot_preview1", &content) - .unwrap(); -} - -#[tokio::test] -async fn test_wasi_adapter_download_tokio() { - let client = Arc::new(ReqwestClient::new()); - - let mut response = client - .get(WASI_ADAPTER_URL, AsyncBody::default(), true) - .await - .unwrap(); - - let mut content = Vec::new(); - let mut body = BufReader::new(response.body_mut()); - body.read_to_end(&mut content).await.unwrap(); - - assert!(wasmparser::Parser::is_core_wasm(&content)); - assert_eq!(content.len(), 96801); // Determined by downloading this to my computer - wit_component::ComponentEncoder::default() - .adapter("wasi_snapshot_preview1", &content) - .unwrap(); -} - fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let store = SettingsStore::test(cx); diff --git a/crates/extension_cli/Cargo.toml b/crates/extension_cli/Cargo.toml index 6de3e858d4..bc649d8e04 100644 --- a/crates/extension_cli/Cargo.toml +++ b/crates/extension_cli/Cargo.toml @@ -18,9 +18,9 @@ clap = { workspace = true, features = ["derive"] } env_logger.workspace = true extension = { workspace = true, features = ["no-webrtc"] } fs.workspace = true +isahc_http_client.workspace = true language.workspace = true log.workspace = true -reqwest_client.workspace = true rpc.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/extension_cli/src/main.rs b/crates/extension_cli/src/main.rs index dd6f221378..6eaebca2f0 100644 --- a/crates/extension_cli/src/main.rs +++ b/crates/extension_cli/src/main.rs @@ -13,8 +13,8 @@ use extension::{ extension_builder::{CompileExtensionOptions, ExtensionBuilder}, ExtensionManifest, }; +use isahc_http_client::IsahcHttpClient; use language::LanguageConfig; -use reqwest_client::ReqwestClient; use theme::ThemeRegistry; use tree_sitter::{Language, Query, WasmStore}; @@ -66,7 +66,12 @@ async fn main() -> Result<()> { std::env::consts::OS, std::env::consts::ARCH ); - let http_client = Arc::new(ReqwestClient::user_agent(&user_agent)?); + let http_client = Arc::new( + IsahcHttpClient::builder() + .default_header("User-Agent", user_agent) + .build() + .map(IsahcHttpClient::from)?, + ); let builder = ExtensionBuilder::new(http_client, scratch_dir); builder diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index f81a2092d5..bba5f857b4 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -1533,8 +1533,4 @@ impl HttpClient for NullHttpClient { fn proxy(&self) -> Option<&http_client::Uri> { None } - - fn type_name(&self) -> &'static str { - type_name::() - } } diff --git a/crates/http_client/Cargo.toml b/crates/http_client/Cargo.toml index e8585cff98..0244ac4104 100644 --- a/crates/http_client/Cargo.toml +++ b/crates/http_client/Cargo.toml @@ -16,13 +16,11 @@ path = "src/http_client.rs" doctest = true [dependencies] +http = "0.2" anyhow.workspace = true derive_more.workspace = true futures.workspace = true -http = "1.1" log.workspace = true -rustls-native-certs.workspace = true -rustls.workspace = true serde.workspace = true serde_json.workspace = true smol.workspace = true diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index bf1046d88e..2f029a1d23 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -11,22 +11,13 @@ use http::request::Builder; #[cfg(feature = "test-support")] use std::fmt; use std::{ - any::type_name, - sync::{Arc, LazyLock, Mutex}, + sync::{Arc, Mutex}, time::Duration, }; pub use url::Url; -#[derive(Clone)] pub struct ReadTimeout(pub Duration); -impl Default for ReadTimeout { - fn default() -> Self { - Self(Duration::from_secs(5)) - } -} - -#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)] - +#[derive(Default, Debug, Clone)] pub enum RedirectPolicy { #[default] NoFollow, @@ -35,23 +26,6 @@ pub enum RedirectPolicy { } pub struct FollowRedirects(pub bool); -pub static TLS_CONFIG: LazyLock> = LazyLock::new(|| { - let mut root_store = rustls::RootCertStore::empty(); - - let root_certs = rustls_native_certs::load_native_certs(); - for error in root_certs.errors { - log::warn!("error loading native certs: {:?}", error); - } - root_store.add_parsable_certificates(&root_certs.certs); - - Arc::new( - rustls::ClientConfig::builder() - .with_safe_defaults() - .with_root_certificates(root_store) - .with_no_client_auth(), - ) -}); - pub trait HttpRequestExt { /// Set a read timeout on the request. /// For isahc, this is the low_speed_timeout. @@ -73,8 +47,6 @@ impl HttpRequestExt for http::request::Builder { } pub trait HttpClient: 'static + Send + Sync { - fn type_name(&self) -> &'static str; - fn send( &self, req: http::Request, @@ -157,10 +129,6 @@ impl HttpClient for HttpClientWithProxy { fn proxy(&self) -> Option<&Uri> { self.proxy.as_ref() } - - fn type_name(&self) -> &'static str { - self.client.type_name() - } } impl HttpClient for Arc { @@ -174,10 +142,6 @@ impl HttpClient for Arc { fn proxy(&self) -> Option<&Uri> { self.proxy.as_ref() } - - fn type_name(&self) -> &'static str { - self.client.type_name() - } } /// An [`HttpClient`] that has a base URL. @@ -289,10 +253,6 @@ impl HttpClient for Arc { fn proxy(&self) -> Option<&Uri> { self.client.proxy.as_ref() } - - fn type_name(&self) -> &'static str { - self.client.type_name() - } } impl HttpClient for HttpClientWithUrl { @@ -306,10 +266,6 @@ impl HttpClient for HttpClientWithUrl { fn proxy(&self) -> Option<&Uri> { self.client.proxy.as_ref() } - - fn type_name(&self) -> &'static str { - self.client.type_name() - } } pub fn read_proxy_from_env() -> Option { @@ -350,10 +306,6 @@ impl HttpClient for BlockedHttpClient { fn proxy(&self) -> Option<&Uri> { None } - - fn type_name(&self) -> &'static str { - type_name::() - } } #[cfg(feature = "test-support")] @@ -426,8 +378,4 @@ impl HttpClient for FakeHttpClient { fn proxy(&self) -> Option<&Uri> { None } - - fn type_name(&self) -> &'static str { - type_name::() - } } diff --git a/crates/ureq_client/Cargo.toml b/crates/isahc_http_client/Cargo.toml similarity index 53% rename from crates/ureq_client/Cargo.toml rename to crates/isahc_http_client/Cargo.toml index 757ba01094..82f7621bf8 100644 --- a/crates/ureq_client/Cargo.toml +++ b/crates/isahc_http_client/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "ureq_client" +name = "isahc_http_client" version = "0.1.0" edition = "2021" publish = false @@ -12,20 +12,11 @@ workspace = true test-support = [] [lib] -path = "src/ureq_client.rs" -doctest = true - -[[example]] -name = "client" -path = "examples/client.rs" +path = "src/isahc_http_client.rs" [dependencies] anyhow.workspace = true futures.workspace = true -gpui.workspace = true http_client.workspace = true -parking_lot.workspace = true -serde.workspace = true -smol.workspace = true -ureq = "=2.9.1" +isahc.workspace = true util.workspace = true diff --git a/crates/isahc_http_client/LICENSE-APACHE b/crates/isahc_http_client/LICENSE-APACHE new file mode 120000 index 0000000000..1cd601d0a3 --- /dev/null +++ b/crates/isahc_http_client/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/crates/isahc_http_client/src/isahc_http_client.rs b/crates/isahc_http_client/src/isahc_http_client.rs new file mode 100644 index 0000000000..778f6a0459 --- /dev/null +++ b/crates/isahc_http_client/src/isahc_http_client.rs @@ -0,0 +1,105 @@ +use std::{mem, sync::Arc, time::Duration}; + +use futures::future::BoxFuture; +use util::maybe; + +pub use isahc::config::Configurable; +pub struct IsahcHttpClient(isahc::HttpClient); + +pub use http_client::*; + +impl IsahcHttpClient { + pub fn new(proxy: Option, user_agent: Option) -> Arc { + let mut builder = isahc::HttpClient::builder() + .connect_timeout(Duration::from_secs(5)) + .low_speed_timeout(100, Duration::from_secs(5)) + .proxy(proxy.clone()); + if let Some(agent) = user_agent { + builder = builder.default_header("User-Agent", agent); + } + Arc::new(IsahcHttpClient(builder.build().unwrap())) + } + pub fn builder() -> isahc::HttpClientBuilder { + isahc::HttpClientBuilder::new() + } +} + +impl From for IsahcHttpClient { + fn from(client: isahc::HttpClient) -> Self { + Self(client) + } +} + +impl HttpClient for IsahcHttpClient { + fn proxy(&self) -> Option<&Uri> { + None + } + + fn send( + &self, + req: http_client::http::Request, + ) -> BoxFuture<'static, Result, anyhow::Error>> + { + let redirect_policy = req + .extensions() + .get::() + .cloned() + .unwrap_or_default(); + let read_timeout = req + .extensions() + .get::() + .map(|t| t.0); + let req = maybe!({ + let (mut parts, body) = req.into_parts(); + let mut builder = isahc::Request::builder() + .method(parts.method) + .uri(parts.uri) + .version(parts.version); + if let Some(read_timeout) = read_timeout { + builder = builder.low_speed_timeout(100, read_timeout); + } + + let headers = builder.headers_mut()?; + mem::swap(headers, &mut parts.headers); + + let extensions = builder.extensions_mut()?; + mem::swap(extensions, &mut parts.extensions); + + let isahc_body = match body.0 { + http_client::Inner::Empty => isahc::AsyncBody::empty(), + http_client::Inner::AsyncReader(reader) => isahc::AsyncBody::from_reader(reader), + http_client::Inner::SyncReader(reader) => { + isahc::AsyncBody::from_bytes_static(reader.into_inner()) + } + }; + + builder + .redirect_policy(match redirect_policy { + http_client::RedirectPolicy::FollowAll => isahc::config::RedirectPolicy::Follow, + http_client::RedirectPolicy::FollowLimit(limit) => { + isahc::config::RedirectPolicy::Limit(limit) + } + http_client::RedirectPolicy::NoFollow => isahc::config::RedirectPolicy::None, + }) + .body(isahc_body) + .ok() + }); + + let client = self.0.clone(); + + Box::pin(async move { + match req { + Some(req) => client + .send_async(req) + .await + .map_err(Into::into) + .map(|response| { + let (parts, body) = response.into_parts(); + let body = http_client::AsyncBody::from_reader(body); + http_client::Response::from_parts(parts, body) + }), + None => Err(anyhow::anyhow!("Request was malformed")), + } + }) + } +} diff --git a/crates/live_kit_server/Cargo.toml b/crates/live_kit_server/Cargo.toml index 4b4b5e13da..bad4c5a05f 100644 --- a/crates/live_kit_server/Cargo.toml +++ b/crates/live_kit_server/Cargo.toml @@ -20,7 +20,7 @@ jsonwebtoken.workspace = true log.workspace = true prost.workspace = true prost-types.workspace = true -reqwest.workspace = true +reqwest = "0.11" serde.workspace = true [build-dependencies] diff --git a/crates/reqwest_client/Cargo.toml b/crates/reqwest_client/Cargo.toml deleted file mode 100644 index 060a382d72..0000000000 --- a/crates/reqwest_client/Cargo.toml +++ /dev/null @@ -1,31 +0,0 @@ -[package] -name = "reqwest_client" -version = "0.1.0" -edition = "2021" -publish = false -license = "Apache-2.0" - -[lints] -workspace = true - -[features] -test-support = [] - -[lib] -path = "src/reqwest_client.rs" -doctest = true - -[[example]] -name = "client" -path = "examples/client.rs" - -[dependencies] -anyhow.workspace = true -bytes = "1.0" -futures.workspace = true -http_client.workspace = true -serde.workspace = true -smol.workspace = true -tokio.workspace = true - -reqwest = { workspace = true, features = ["rustls-tls-manual-roots", "stream"] } diff --git a/crates/reqwest_client/LICENSE-GPL b/crates/reqwest_client/LICENSE-GPL deleted file mode 120000 index 89e542f750..0000000000 --- a/crates/reqwest_client/LICENSE-GPL +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-GPL \ No newline at end of file diff --git a/crates/reqwest_client/examples/client.rs b/crates/reqwest_client/examples/client.rs deleted file mode 100644 index 1f50d21e4e..0000000000 --- a/crates/reqwest_client/examples/client.rs +++ /dev/null @@ -1,16 +0,0 @@ -use futures::AsyncReadExt as _; -use http_client::AsyncBody; -use http_client::HttpClient; -use reqwest_client::ReqwestClient; - -#[tokio::main] -async fn main() { - let resp = ReqwestClient::new() - .get("http://zed.dev", AsyncBody::empty(), true) - .await - .unwrap(); - - let mut body = String::new(); - resp.into_body().read_to_string(&mut body).await.unwrap(); - println!("{}", &body); -} diff --git a/crates/reqwest_client/src/reqwest_client.rs b/crates/reqwest_client/src/reqwest_client.rs deleted file mode 100644 index f8698b9080..0000000000 --- a/crates/reqwest_client/src/reqwest_client.rs +++ /dev/null @@ -1,259 +0,0 @@ -use std::{any::type_name, borrow::Cow, io::Read, pin::Pin, task::Poll}; - -use anyhow::anyhow; -use bytes::{BufMut, Bytes, BytesMut}; -use futures::{AsyncRead, TryStreamExt}; -use http_client::{http, AsyncBody, ReadTimeout}; -use reqwest::header::{HeaderMap, HeaderValue}; -use smol::future::FutureExt; - -const DEFAULT_CAPACITY: usize = 4096; - -pub struct ReqwestClient { - client: reqwest::Client, -} - -impl ReqwestClient { - pub fn new() -> Self { - Self { - client: reqwest::Client::new(), - } - } - - pub fn user_agent(agent: &str) -> anyhow::Result { - let mut map = HeaderMap::new(); - map.insert(http::header::USER_AGENT, HeaderValue::from_str(agent)?); - Ok(Self { - client: reqwest::Client::builder().default_headers(map).build()?, - }) - } -} - -impl From for ReqwestClient { - fn from(client: reqwest::Client) -> Self { - Self { client } - } -} - -// This struct is essentially a re-implementation of -// https://docs.rs/tokio-util/0.7.12/tokio_util/io/struct.ReaderStream.html -// except outside of Tokio's aegis -struct ReaderStream { - reader: Option>>, - buf: BytesMut, - capacity: usize, -} - -impl ReaderStream { - fn new(reader: Pin>) -> Self { - Self { - reader: Some(reader), - buf: BytesMut::new(), - capacity: DEFAULT_CAPACITY, - } - } -} - -impl futures::Stream for ReaderStream { - type Item = std::io::Result; - - fn poll_next( - mut self: Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> Poll> { - let mut this = self.as_mut(); - - let mut reader = match this.reader.take() { - Some(r) => r, - None => return Poll::Ready(None), - }; - - if this.buf.capacity() == 0 { - let capacity = this.capacity; - this.buf.reserve(capacity); - } - - match poll_read_buf(&mut reader, cx, &mut this.buf) { - Poll::Pending => Poll::Pending, - Poll::Ready(Err(err)) => { - self.reader = None; - - Poll::Ready(Some(Err(err))) - } - Poll::Ready(Ok(0)) => { - self.reader = None; - Poll::Ready(None) - } - Poll::Ready(Ok(_)) => { - let chunk = this.buf.split(); - self.reader = Some(reader); - Poll::Ready(Some(Ok(chunk.freeze()))) - } - } - } -} - -/// Implementation from https://docs.rs/tokio-util/0.7.12/src/tokio_util/util/poll_buf.rs.html -/// Specialized for this use case -pub fn poll_read_buf( - io: &mut Pin>, - cx: &mut std::task::Context<'_>, - buf: &mut BytesMut, -) -> Poll> { - if !buf.has_remaining_mut() { - return Poll::Ready(Ok(0)); - } - - let n = { - let dst = buf.chunk_mut(); - - // Safety: `chunk_mut()` returns a `&mut UninitSlice`, and `UninitSlice` is a - // transparent wrapper around `[MaybeUninit]`. - let dst = unsafe { &mut *(dst as *mut _ as *mut [std::mem::MaybeUninit]) }; - let mut buf = tokio::io::ReadBuf::uninit(dst); - let ptr = buf.filled().as_ptr(); - let unfilled_portion = buf.initialize_unfilled(); - // SAFETY: Pin projection - let io_pin = unsafe { Pin::new_unchecked(io) }; - std::task::ready!(io_pin.poll_read(cx, unfilled_portion)?); - - // Ensure the pointer does not change from under us - assert_eq!(ptr, buf.filled().as_ptr()); - buf.filled().len() - }; - - // Safety: This is guaranteed to be the number of initialized (and read) - // bytes due to the invariants provided by `ReadBuf::filled`. - unsafe { - buf.advance_mut(n); - } - - Poll::Ready(Ok(n)) -} - -enum WrappedBodyInner { - None, - SyncReader(std::io::Cursor>), - Stream(ReaderStream), -} - -struct WrappedBody(WrappedBodyInner); - -impl WrappedBody { - fn new(body: AsyncBody) -> Self { - match body.0 { - http_client::Inner::Empty => Self(WrappedBodyInner::None), - http_client::Inner::SyncReader(cursor) => Self(WrappedBodyInner::SyncReader(cursor)), - http_client::Inner::AsyncReader(pin) => { - Self(WrappedBodyInner::Stream(ReaderStream::new(pin))) - } - } - } -} - -impl futures::stream::Stream for WrappedBody { - type Item = Result; - - fn poll_next( - mut self: std::pin::Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll> { - match &mut self.0 { - WrappedBodyInner::None => Poll::Ready(None), - WrappedBodyInner::SyncReader(cursor) => { - let mut buf = Vec::new(); - match cursor.read_to_end(&mut buf) { - Ok(bytes) => { - if bytes == 0 { - return Poll::Ready(None); - } else { - return Poll::Ready(Some(Ok(Bytes::from(buf)))); - } - } - Err(e) => return Poll::Ready(Some(Err(e))), - } - } - WrappedBodyInner::Stream(stream) => { - // SAFETY: Pin projection - let stream = unsafe { Pin::new_unchecked(stream) }; - futures::Stream::poll_next(stream, cx) - } - } - } -} - -impl http_client::HttpClient for ReqwestClient { - fn proxy(&self) -> Option<&http::Uri> { - None - } - - fn type_name(&self) -> &'static str { - type_name::() - } - - fn send( - &self, - req: http::Request, - ) -> futures::future::BoxFuture< - 'static, - Result, anyhow::Error>, - > { - let (parts, body) = req.into_parts(); - - let mut request = self.client.request(parts.method, parts.uri.to_string()); - - request = request.headers(parts.headers); - - if let Some(redirect_policy) = parts.extensions.get::() { - request = request.redirect_policy(match redirect_policy { - http_client::RedirectPolicy::NoFollow => reqwest::redirect::Policy::none(), - http_client::RedirectPolicy::FollowLimit(limit) => { - reqwest::redirect::Policy::limited(*limit as usize) - } - http_client::RedirectPolicy::FollowAll => reqwest::redirect::Policy::limited(100), - }); - } - - if let Some(ReadTimeout(timeout)) = parts.extensions.get::() { - request = request.timeout(*timeout); - } - - let body = WrappedBody::new(body); - let request = request.body(reqwest::Body::wrap_stream(body)); - - async move { - let response = request.send().await.map_err(|e| anyhow!(e))?; - let status = response.status(); - let mut builder = http::Response::builder().status(status.as_u16()); - for (name, value) in response.headers() { - builder = builder.header(name, value); - } - let bytes = response.bytes_stream(); - let bytes = bytes - .map_err(|e| futures::io::Error::new(futures::io::ErrorKind::Other, e)) - .into_async_read(); - let body = http_client::AsyncBody::from_reader(bytes); - builder.body(body).map_err(|e| anyhow!(e)) - } - .boxed() - } -} - -#[cfg(test)] -mod test { - - use core::str; - - use http_client::AsyncBody; - use smol::stream::StreamExt; - - use crate::WrappedBody; - - #[tokio::test] - async fn test_sync_streaming_upload() { - let mut body = WrappedBody::new(AsyncBody::from("hello there".to_string())).fuse(); - let result = body.next().await.unwrap().unwrap(); - assert!(body.next().await.is_none()); - assert_eq!(str::from_utf8(&result).unwrap(), "hello there"); - } -} diff --git a/crates/semantic_index/Cargo.toml b/crates/semantic_index/Cargo.toml index 508e64ffea..f157cc9b3f 100644 --- a/crates/semantic_index/Cargo.toml +++ b/crates/semantic_index/Cargo.toml @@ -51,6 +51,7 @@ workspace.workspace = true worktree.workspace = true [dev-dependencies] +isahc_http_client.workspace = true client = { workspace = true, features = ["test-support"] } env_logger.workspace = true fs = { workspace = true, features = ["test-support"] } @@ -61,7 +62,6 @@ language = { workspace = true, features = ["test-support"] } languages.workspace = true project = { workspace = true, features = ["test-support"] } tempfile.workspace = true -ureq_client.workspace = true util = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } worktree = { workspace = true, features = ["test-support"] } diff --git a/crates/semantic_index/examples/index.rs b/crates/semantic_index/examples/index.rs index 1ebed4c17f..c5c2c633a1 100644 --- a/crates/semantic_index/examples/index.rs +++ b/crates/semantic_index/examples/index.rs @@ -2,6 +2,7 @@ use client::Client; use futures::channel::oneshot; use gpui::App; use http_client::HttpClientWithUrl; +use isahc_http_client::IsahcHttpClient; use language::language_settings::AllLanguageSettings; use project::Project; use semantic_index::{OpenAiEmbeddingModel, OpenAiEmbeddingProvider, SemanticDb}; @@ -28,11 +29,7 @@ fn main() { let clock = Arc::new(FakeSystemClock::default()); let http = Arc::new(HttpClientWithUrl::new( - Arc::new(ureq_client::UreqClient::new( - None, - "Zed semantic index example".to_string(), - cx.background_executor().clone(), - )), + IsahcHttpClient::new(None, None), "http://localhost:11434", None, )); diff --git a/crates/storybook/Cargo.toml b/crates/storybook/Cargo.toml index b05c7692f9..f8e78acad3 100644 --- a/crates/storybook/Cargo.toml +++ b/crates/storybook/Cargo.toml @@ -22,6 +22,7 @@ editor.workspace = true fuzzy.workspace = true gpui.workspace = true indoc.workspace = true +isahc_http_client.workspace = true language.workspace = true log.workspace = true menu.workspace = true @@ -35,7 +36,6 @@ strum = { workspace = true, features = ["derive"] } theme.workspace = true title_bar = { workspace = true, features = ["stories"] } ui = { workspace = true, features = ["stories"] } -ureq_client.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/storybook/src/storybook.rs b/crates/storybook/src/storybook.rs index a77602efcc..73b1396da5 100644 --- a/crates/storybook/src/storybook.rs +++ b/crates/storybook/src/storybook.rs @@ -4,14 +4,13 @@ mod assets; mod stories; mod story_selector; -use std::sync::Arc; - use clap::Parser; use dialoguer::FuzzySelect; use gpui::{ div, px, size, AnyView, AppContext, Bounds, Render, ViewContext, VisualContext, WindowBounds, WindowOptions, }; +use isahc_http_client::IsahcHttpClient; use log::LevelFilter; use project::Project; use settings::{KeymapFile, Settings}; @@ -19,7 +18,6 @@ use simplelog::SimpleLogger; use strum::IntoEnumIterator; use theme::{ThemeRegistry, ThemeSettings}; use ui::prelude::*; -use ureq_client::UreqClient; use crate::app_menus::app_menus; use crate::assets::Assets; @@ -68,12 +66,8 @@ fn main() { gpui::App::new().with_assets(Assets).run(move |cx| { load_embedded_fonts(cx).unwrap(); - let http_client = UreqClient::new( - None, - "zed_storybook".to_string(), - cx.background_executor().clone(), - ); - cx.set_http_client(Arc::new(http_client)); + let http_client = IsahcHttpClient::new(None, Some("zed_storybook".to_string())); + cx.set_http_client(http_client); settings::init(cx); theme::init(theme::LoadThemes::All(Box::new(Assets)), cx); diff --git a/crates/ureq_client/LICENSE-GPL b/crates/ureq_client/LICENSE-GPL deleted file mode 120000 index 89e542f750..0000000000 --- a/crates/ureq_client/LICENSE-GPL +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-GPL \ No newline at end of file diff --git a/crates/ureq_client/examples/client.rs b/crates/ureq_client/examples/client.rs deleted file mode 100644 index c5caae40da..0000000000 --- a/crates/ureq_client/examples/client.rs +++ /dev/null @@ -1,24 +0,0 @@ -use futures::AsyncReadExt; -use http_client::{AsyncBody, HttpClient}; -use ureq_client::UreqClient; - -fn main() { - gpui::App::headless().run(|cx| { - println!("{:?}", std::thread::current().id()); - cx.spawn(|cx| async move { - let resp = UreqClient::new( - None, - "Conrad's bot".to_string(), - cx.background_executor().clone(), - ) - .get("http://zed.dev", AsyncBody::empty(), true) - .await - .unwrap(); - - let mut body = String::new(); - resp.into_body().read_to_string(&mut body).await.unwrap(); - println!("{}", body); - }) - .detach(); - }) -} diff --git a/crates/ureq_client/src/ureq_client.rs b/crates/ureq_client/src/ureq_client.rs deleted file mode 100644 index d3d8d3aedc..0000000000 --- a/crates/ureq_client/src/ureq_client.rs +++ /dev/null @@ -1,200 +0,0 @@ -use std::any::type_name; -use std::collections::HashMap; -use std::io::Read; -use std::sync::Arc; -use std::time::Duration; -use std::{pin::Pin, task::Poll}; - -use anyhow::Error; -use futures::channel::mpsc; -use futures::future::BoxFuture; -use futures::{AsyncRead, SinkExt, StreamExt}; -use http_client::{http, AsyncBody, HttpClient, RedirectPolicy, Uri}; -use smol::future::FutureExt; -use util::ResultExt; - -pub struct UreqClient { - // Note in ureq 2.x the options are stored on the Agent. - // In ureq 3.x we'll be able to set these on the request. - // In practice it's probably "fine" to have many clients, the number of distinct options - // is low; and most requests to the same connection will have the same options so the - // connection pool will work. - clients: Arc>>, - proxy_url: Option, - proxy: Option, - user_agent: String, - background_executor: gpui::BackgroundExecutor, -} - -impl UreqClient { - pub fn new( - proxy_url: Option, - user_agent: String, - background_executor: gpui::BackgroundExecutor, - ) -> Self { - Self { - clients: Arc::default(), - proxy_url: proxy_url.clone(), - proxy: proxy_url.and_then(|url| ureq::Proxy::new(url.to_string()).log_err()), - user_agent, - background_executor, - } - } - - fn agent_for(&self, redirect_policy: RedirectPolicy, timeout: Duration) -> ureq::Agent { - let mut clients = self.clients.lock(); - // in case our assumption of distinct options is wrong, we'll sporadically clean it out. - if clients.len() > 50 { - clients.clear() - } - - clients - .entry((timeout, redirect_policy.clone())) - .or_insert_with(|| { - let mut builder = ureq::AgentBuilder::new() - .timeout_connect(Duration::from_secs(5)) - .timeout_read(timeout) - .timeout_write(timeout) - .user_agent(&self.user_agent) - .tls_config(http_client::TLS_CONFIG.clone()) - .redirects(match redirect_policy { - RedirectPolicy::NoFollow => 0, - RedirectPolicy::FollowLimit(limit) => limit, - RedirectPolicy::FollowAll => 100, - }); - if let Some(proxy) = &self.proxy { - builder = builder.proxy(proxy.clone()); - } - builder.build() - }) - .clone() - } -} -impl HttpClient for UreqClient { - fn proxy(&self) -> Option<&Uri> { - self.proxy_url.as_ref() - } - - fn type_name(&self) -> &'static str { - type_name::() - } - - fn send( - &self, - request: http::Request, - ) -> BoxFuture<'static, Result, Error>> { - let agent = self.agent_for( - request - .extensions() - .get::() - .cloned() - .unwrap_or_default(), - request - .extensions() - .get::() - .cloned() - .unwrap_or_default() - .0, - ); - let mut req = agent.request(&request.method().as_ref(), &request.uri().to_string()); - for (name, value) in request.headers().into_iter() { - req = req.set(name.as_str(), value.to_str().unwrap()); - } - let body = request.into_body(); - let executor = self.background_executor.clone(); - - self.background_executor - .spawn(async move { - let response = match req.send(body) { - Ok(response) => response, - Err(e) => match e { - ureq::Error::Status(_, response) => response, - ureq::Error::Transport(transport) => { - anyhow::bail!(transport) - } - }, - }; - - let mut builder = http::Response::builder() - .status(response.status()) - .version(http::Version::HTTP_11); - for name in response.headers_names() { - if let Some(value) = response.header(&name) { - builder = builder.header(name, value); - } - } - - let body = AsyncBody::from_reader(UreqResponseReader::new(executor, response)); - let http_response = builder.body(body)?; - - Ok(http_response) - }) - .boxed() - } -} - -struct UreqResponseReader { - receiver: mpsc::Receiver>>, - buffer: Vec, - idx: usize, - _task: gpui::Task<()>, -} - -impl UreqResponseReader { - fn new(background_executor: gpui::BackgroundExecutor, response: ureq::Response) -> Self { - let (mut sender, receiver) = mpsc::channel(1); - let mut reader = response.into_reader(); - let task = background_executor.spawn(async move { - let mut buffer = vec![0; 8192]; - loop { - let n = match reader.read(&mut buffer) { - Ok(0) => break, - Ok(n) => n, - Err(e) => { - let _ = sender.send(Err(e)).await; - break; - } - }; - let _ = sender.send(Ok(buffer[..n].to_vec())).await; - } - }); - - UreqResponseReader { - _task: task, - receiver, - buffer: Vec::new(), - idx: 0, - } - } -} - -impl AsyncRead for UreqResponseReader { - fn poll_read( - mut self: Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - buf: &mut [u8], - ) -> Poll> { - if self.buffer.is_empty() { - match self.receiver.poll_next_unpin(cx) { - Poll::Ready(Some(Ok(data))) => self.buffer = data, - Poll::Ready(Some(Err(e))) => { - return Poll::Ready(Err(e)); - } - Poll::Ready(None) => { - return Poll::Ready(Ok(0)); - } - Poll::Pending => { - return Poll::Pending; - } - } - } - let n = std::cmp::min(buf.len(), self.buffer.len() - self.idx); - buf[..n].copy_from_slice(&self.buffer[self.idx..self.idx + n]); - self.idx += n; - if self.idx == self.buffer.len() { - self.buffer.clear(); - self.idx = 0; - } - Poll::Ready(Ok(n)) - } -} diff --git a/crates/vim/Cargo.toml b/crates/vim/Cargo.toml index bb347f49b7..410371cb0c 100644 --- a/crates/vim/Cargo.toml +++ b/crates/vim/Cargo.toml @@ -17,7 +17,7 @@ neovim = ["nvim-rs", "async-compat", "async-trait", "tokio"] [dependencies] anyhow.workspace = true -async-compat = { workspace = true, "optional" = true } +async-compat = { version = "0.2.1", "optional" = true } async-trait = { workspace = true, "optional" = true } collections.workspace = true command_palette.workspace = true diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index e340e176a7..e22f75f5bb 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -57,6 +57,7 @@ http_client.workspace = true image_viewer.workspace = true inline_completion_button.workspace = true install_cli.workspace = true +isahc_http_client.workspace = true journal.workspace = true language.workspace = true language_model.workspace = true @@ -107,7 +108,6 @@ theme.workspace = true theme_selector.workspace = true time.workspace = true ui.workspace = true -ureq_client.workspace = true url.workspace = true urlencoding = "2.1.2" util.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 535cafbccb..9857c60491 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -24,9 +24,9 @@ use gpui::{ UpdateGlobal as _, VisualContext, }; use http_client::{read_proxy_from_env, Uri}; +use isahc_http_client::IsahcHttpClient; use language::LanguageRegistry; use log::LevelFilter; -use ureq_client::UreqClient; use assets::Assets; use node_runtime::{NodeBinaryOptions, NodeRuntime}; @@ -334,7 +334,9 @@ fn main() { log::info!("========== starting zed =========="); - let app = App::new().with_assets(Assets); + let app = App::new() + .with_assets(Assets) + .with_http_client(IsahcHttpClient::new(None, None)); let system_id = app.background_executor().block(system_id()).ok(); let installation_id = app.background_executor().block(installation_id()).ok(); @@ -468,8 +470,8 @@ fn main() { .ok() }) .or_else(read_proxy_from_env); - let http = UreqClient::new(proxy_url, user_agent, cx.background_executor().clone()); - cx.set_http_client(Arc::new(http)); + let http = IsahcHttpClient::new(proxy_url, Some(user_agent)); + cx.set_http_client(http); ::set_global(fs.clone(), cx); From 9c54bd1bd4afe7f19113e896b041dd79d14fd8d6 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 9 Oct 2024 10:45:35 +0200 Subject: [PATCH 35/35] macOS: Drop input handler to avoid editor/project not being dropped (#18898) This fixes the problem of a `Project` sometimes not being dropped when closing the single, last window of Zed. Turns out, it wasn't get dropped for the following reason: 1. `editor::Editor` held a reference to project 2. The macOS `input_handler` on the `Window` held a reference to that `Editor` 3. The AppKit window (and its input handler) get dropped asynchronously (in the code in this diff), after the window is closed. 4. After the window is closed and no `cx.update()` calls are made anymore, `flush_effects` is not called anymore. 5. But `flush_effects` is where we dropped entities that don't have any more references. In short: we dropped `Editor`, which held a reference to `Project`, out of band, `flush_effects` wasn't called anymore, and thus the `Project` wasn't dropped. cc @ConradIrwin @bennetbo since we talked about this. Release Notes: - N/A Co-authored-by: Antonio --- crates/gpui/src/platform/mac/window.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/gpui/src/platform/mac/window.rs b/crates/gpui/src/platform/mac/window.rs index 5f9ee43dec..6d887f2b13 100644 --- a/crates/gpui/src/platform/mac/window.rs +++ b/crates/gpui/src/platform/mac/window.rs @@ -768,6 +768,7 @@ impl Drop for MacWindow { unsafe { this.native_window.setDelegate_(nil); } + this.input_handler.take(); this.executor .spawn(async move { unsafe {