Compare commits
13 Commits
main
...
v0.171.3-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
52b93c213e | ||
|
|
599992ae71 | ||
|
|
0a2fc20711 | ||
|
|
d3ea0aad8e | ||
|
|
07b76fa116 | ||
|
|
e1e4c1b786 | ||
|
|
d264d93c94 | ||
|
|
4698b36e4d | ||
|
|
59b35e537b | ||
|
|
23b8561f2d | ||
|
|
5ad757ed48 | ||
|
|
1ce81bc560 | ||
|
|
2d6aa1b83c |
18
Cargo.lock
generated
18
Cargo.lock
generated
@@ -559,6 +559,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anthropic",
|
||||
"anyhow",
|
||||
"deepseek",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"gpui",
|
||||
@@ -3684,6 +3685,18 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deepseek"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"futures 0.3.31",
|
||||
"http_client",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deflate64"
|
||||
version = "0.1.9"
|
||||
@@ -6791,6 +6804,7 @@ dependencies = [
|
||||
"async-trait",
|
||||
"collections",
|
||||
"extension",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"language",
|
||||
@@ -6808,6 +6822,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"base64 0.22.1",
|
||||
"collections",
|
||||
"deepseek",
|
||||
"futures 0.3.31",
|
||||
"google_ai",
|
||||
"gpui",
|
||||
@@ -6851,6 +6866,7 @@ dependencies = [
|
||||
"client",
|
||||
"collections",
|
||||
"copilot",
|
||||
"deepseek",
|
||||
"editor",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
@@ -16269,7 +16285,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.171.0"
|
||||
version = "0.171.3"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"anyhow",
|
||||
|
||||
@@ -32,6 +32,7 @@ members = [
|
||||
"crates/copilot",
|
||||
"crates/db",
|
||||
"crates/diagnostics",
|
||||
"crates/deepseek",
|
||||
"crates/docs_preprocessor",
|
||||
"crates/editor",
|
||||
"crates/evals",
|
||||
@@ -229,6 +230,7 @@ context_server = { path = "crates/context_server" }
|
||||
context_server_settings = { path = "crates/context_server_settings" }
|
||||
copilot = { path = "crates/copilot" }
|
||||
db = { path = "crates/db" }
|
||||
deepseek = { path = "crates/deepseek" }
|
||||
diagnostics = { path = "crates/diagnostics" }
|
||||
editor = { path = "crates/editor" }
|
||||
extension = { path = "crates/extension" }
|
||||
|
||||
1
assets/icons/ai_deep_seek.svg
Normal file
1
assets/icons/ai_deep_seek.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>DeepSeek</title><path d="M23.748 4.482c-.254-.124-.364.113-.512.234-.051.039-.094.09-.137.136-.372.397-.806.657-1.373.626-.829-.046-1.537.214-2.163.848-.133-.782-.575-1.248-1.247-1.548-.352-.156-.708-.311-.955-.65-.172-.241-.219-.51-.305-.774-.055-.16-.11-.323-.293-.35-.2-.031-.278.136-.356.276-.313.572-.434 1.202-.422 1.84.027 1.436.633 2.58 1.838 3.393.137.093.172.187.129.323-.082.28-.18.552-.266.833-.055.179-.137.217-.329.14a5.526 5.526 0 01-1.736-1.18c-.857-.828-1.631-1.742-2.597-2.458a11.365 11.365 0 00-.689-.471c-.985-.957.13-1.743.388-1.836.27-.098.093-.432-.779-.428-.872.004-1.67.295-2.687.684a3.055 3.055 0 01-.465.137 9.597 9.597 0 00-2.883-.102c-1.885.21-3.39 1.102-4.497 2.623C.082 8.606-.231 10.684.152 12.85c.403 2.284 1.569 4.175 3.36 5.653 1.858 1.533 3.997 2.284 6.438 2.14 1.482-.085 3.133-.284 4.994-1.86.47.234.962.327 1.78.397.63.059 1.236-.03 1.705-.128.735-.156.684-.837.419-.961-2.155-1.004-1.682-.595-2.113-.926 1.096-1.296 2.746-2.642 3.392-7.003.05-.347.007-.565 0-.845-.004-.17.035-.237.23-.256a4.173 4.173 0 001.545-.475c1.396-.763 1.96-2.015 2.093-3.517.02-.23-.004-.467-.247-.588zM11.581 18c-2.089-1.642-3.102-2.183-3.52-2.16-.392.024-.321.471-.235.763.09.288.207.486.371.739.114.167.192.416-.113.603-.673.416-1.842-.14-1.897-.167-1.361-.802-2.5-1.86-3.301-3.307-.774-1.393-1.224-2.887-1.298-4.482-.02-.386.093-.522.477-.592a4.696 4.696 0 011.529-.039c2.132.312 3.946 1.265 5.468 2.774.868.86 1.525 1.887 2.202 2.891.72 1.066 1.494 2.082 2.48 2.914.348.292.625.514.891.677-.802.09-2.14.11-3.054-.614zm1-6.44a.306.306 0 01.415-.287.302.302 0 01.2.288.306.306 0 01-.31.307.303.303 0 01-.304-.308zm3.11 1.596c-.2.081-.399.151-.59.16a1.245 1.245 0 01-.798-.254c-.274-.23-.47-.358-.552-.758a1.73 1.73 0 01.016-.588c.07-.327-.008-.537-.239-.727-.187-.156-.426-.199-.688-.199a.559.559 0 01-.254-.078c-.11-.054-.2-.19-.114-.358.028-.054.16-.186.192-.21.356-.202.767-.136 1.146.016.352.144.618.408 1.001.782.391.451.462.576.685.914.176.265.336.537.445.848.067.195-.019.354-.25.452z" fill="black"></path></svg>
|
||||
|
After Width: | Height: | Size: 2.1 KiB |
@@ -1166,6 +1166,9 @@
|
||||
},
|
||||
"lmstudio": {
|
||||
"api_url": "http://localhost:1234/api/v0"
|
||||
},
|
||||
"deepseek": {
|
||||
"api_url": "https://api.deepseek.com"
|
||||
}
|
||||
},
|
||||
// Zed's Prettier integration settings.
|
||||
|
||||
@@ -14,6 +14,7 @@ path = "src/assistant_settings.rs"
|
||||
[dependencies]
|
||||
anthropic = { workspace = true, features = ["schemars"] }
|
||||
anyhow.workspace = true
|
||||
deepseek = { workspace = true, features = ["schemars"] }
|
||||
feature_flags.workspace = true
|
||||
gpui.workspace = true
|
||||
language_model.workspace = true
|
||||
|
||||
@@ -2,6 +2,7 @@ use std::sync::Arc;
|
||||
|
||||
use ::open_ai::Model as OpenAiModel;
|
||||
use anthropic::Model as AnthropicModel;
|
||||
use deepseek::Model as DeepseekModel;
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
use gpui::{AppContext, Pixels};
|
||||
use language_model::{CloudModel, LanguageModel};
|
||||
@@ -46,6 +47,11 @@ pub enum AssistantProviderContentV1 {
|
||||
default_model: Option<LmStudioModel>,
|
||||
api_url: Option<String>,
|
||||
},
|
||||
#[serde(rename = "deepseek")]
|
||||
DeepSeek {
|
||||
default_model: Option<DeepseekModel>,
|
||||
api_url: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
@@ -149,6 +155,12 @@ impl AssistantSettingsContent {
|
||||
model: model.id().to_string(),
|
||||
})
|
||||
}
|
||||
AssistantProviderContentV1::DeepSeek { default_model, .. } => {
|
||||
default_model.map(|model| LanguageModelSelection {
|
||||
provider: "deepseek".to_string(),
|
||||
model: model.id().to_string(),
|
||||
})
|
||||
}
|
||||
}),
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
@@ -253,6 +265,18 @@ impl AssistantSettingsContent {
|
||||
available_models,
|
||||
});
|
||||
}
|
||||
"deepseek" => {
|
||||
let api_url = match &settings.provider {
|
||||
Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
|
||||
api_url.clone()
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
settings.provider = Some(AssistantProviderContentV1::DeepSeek {
|
||||
default_model: DeepseekModel::from_id(&model).ok(),
|
||||
api_url,
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
VersionedAssistantSettingsContent::V2(settings) => {
|
||||
@@ -341,6 +365,7 @@ fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema:
|
||||
"openai".into(),
|
||||
"zed.dev".into(),
|
||||
"copilot_chat".into(),
|
||||
"deepseek".into(),
|
||||
]),
|
||||
..Default::default()
|
||||
}
|
||||
@@ -380,7 +405,7 @@ pub struct AssistantSettingsContentV1 {
|
||||
default_height: Option<f32>,
|
||||
/// The provider of the assistant service.
|
||||
///
|
||||
/// This can be "openai", "anthropic", "ollama", "lmstudio", "zed.dev"
|
||||
/// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
|
||||
/// each with their respective default models and configurations.
|
||||
provider: Option<AssistantProviderContentV1>,
|
||||
}
|
||||
|
||||
24
crates/deepseek/Cargo.toml
Normal file
24
crates/deepseek/Cargo.toml
Normal file
@@ -0,0 +1,24 @@
|
||||
[package]
|
||||
name = "deepseek"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/deepseek.rs"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
schemars = ["dep:schemars"]
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
futures.workspace = true
|
||||
http_client.workspace = true
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
1
crates/deepseek/LICENSE-GPL
Symbolic link
1
crates/deepseek/LICENSE-GPL
Symbolic link
@@ -0,0 +1 @@
|
||||
../../LICENSE-GPL
|
||||
301
crates/deepseek/src/deepseek.rs
Normal file
301
crates/deepseek/src/deepseek.rs
Normal file
@@ -0,0 +1,301 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use futures::{
|
||||
io::BufReader,
|
||||
stream::{BoxStream, StreamExt},
|
||||
AsyncBufReadExt, AsyncReadExt,
|
||||
};
|
||||
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::convert::TryFrom;
|
||||
|
||||
pub const DEEPSEEK_API_URL: &str = "https://api.deepseek.com";
|
||||
|
||||
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Role {
|
||||
User,
|
||||
Assistant,
|
||||
System,
|
||||
Tool,
|
||||
}
|
||||
|
||||
impl TryFrom<String> for Role {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(value: String) -> Result<Self> {
|
||||
match value.as_str() {
|
||||
"user" => Ok(Self::User),
|
||||
"assistant" => Ok(Self::Assistant),
|
||||
"system" => Ok(Self::System),
|
||||
"tool" => Ok(Self::Tool),
|
||||
_ => Err(anyhow!("invalid role '{value}'")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Role> for String {
|
||||
fn from(val: Role) -> Self {
|
||||
match val {
|
||||
Role::User => "user".to_owned(),
|
||||
Role::Assistant => "assistant".to_owned(),
|
||||
Role::System => "system".to_owned(),
|
||||
Role::Tool => "tool".to_owned(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
|
||||
pub enum Model {
|
||||
#[serde(rename = "deepseek-chat")]
|
||||
#[default]
|
||||
Chat,
|
||||
#[serde(rename = "deepseek-reasoner")]
|
||||
Reasoner,
|
||||
#[serde(rename = "custom")]
|
||||
Custom {
|
||||
name: String,
|
||||
/// The name displayed in the UI, such as in the assistant panel model dropdown menu.
|
||||
display_name: Option<String>,
|
||||
max_tokens: usize,
|
||||
max_output_tokens: Option<u32>,
|
||||
},
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn from_id(id: &str) -> Result<Self> {
|
||||
match id {
|
||||
"deepseek-chat" => Ok(Self::Chat),
|
||||
"deepseek-reasoner" => Ok(Self::Reasoner),
|
||||
_ => Err(anyhow!("invalid model id")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn id(&self) -> &str {
|
||||
match self {
|
||||
Self::Chat => "deepseek-chat",
|
||||
Self::Reasoner => "deepseek-reasoner",
|
||||
Self::Custom { name, .. } => name,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn display_name(&self) -> &str {
|
||||
match self {
|
||||
Self::Chat => "DeepSeek Chat",
|
||||
Self::Reasoner => "DeepSeek Reasoner",
|
||||
Self::Custom {
|
||||
name, display_name, ..
|
||||
} => display_name.as_ref().unwrap_or(name).as_str(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn max_token_count(&self) -> usize {
|
||||
match self {
|
||||
Self::Chat | Self::Reasoner => 64_000,
|
||||
Self::Custom { max_tokens, .. } => *max_tokens,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn max_output_tokens(&self) -> Option<u32> {
|
||||
match self {
|
||||
Self::Chat => Some(8_192),
|
||||
Self::Reasoner => Some(8_192),
|
||||
Self::Custom {
|
||||
max_output_tokens, ..
|
||||
} => *max_output_tokens,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Request {
|
||||
pub model: String,
|
||||
pub messages: Vec<RequestMessage>,
|
||||
pub stream: bool,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub max_tokens: Option<u32>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub temperature: Option<f32>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub response_format: Option<ResponseFormat>,
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub tools: Vec<ToolDefinition>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ResponseFormat {
|
||||
Text,
|
||||
#[serde(rename = "json_object")]
|
||||
JsonObject,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum ToolDefinition {
|
||||
Function { function: FunctionDefinition },
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct FunctionDefinition {
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
pub parameters: Option<Value>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
#[serde(tag = "role", rename_all = "lowercase")]
|
||||
pub enum RequestMessage {
|
||||
Assistant {
|
||||
content: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
tool_calls: Vec<ToolCall>,
|
||||
},
|
||||
User {
|
||||
content: String,
|
||||
},
|
||||
System {
|
||||
content: String,
|
||||
},
|
||||
Tool {
|
||||
content: String,
|
||||
tool_call_id: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
pub struct ToolCall {
|
||||
pub id: String,
|
||||
#[serde(flatten)]
|
||||
pub content: ToolCallContent,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
#[serde(tag = "type", rename_all = "lowercase")]
|
||||
pub enum ToolCallContent {
|
||||
Function { function: FunctionContent },
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
pub struct FunctionContent {
|
||||
pub name: String,
|
||||
pub arguments: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Response {
|
||||
pub id: String,
|
||||
pub object: String,
|
||||
pub created: u64,
|
||||
pub model: String,
|
||||
pub choices: Vec<Choice>,
|
||||
pub usage: Usage,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub reasoning_content: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Usage {
|
||||
pub prompt_tokens: u32,
|
||||
pub completion_tokens: u32,
|
||||
pub total_tokens: u32,
|
||||
#[serde(default)]
|
||||
pub prompt_cache_hit_tokens: u32,
|
||||
#[serde(default)]
|
||||
pub prompt_cache_miss_tokens: u32,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Choice {
|
||||
pub index: u32,
|
||||
pub message: RequestMessage,
|
||||
pub finish_reason: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct StreamResponse {
|
||||
pub id: String,
|
||||
pub object: String,
|
||||
pub created: u64,
|
||||
pub model: String,
|
||||
pub choices: Vec<StreamChoice>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct StreamChoice {
|
||||
pub index: u32,
|
||||
pub delta: StreamDelta,
|
||||
pub finish_reason: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct StreamDelta {
|
||||
pub role: Option<Role>,
|
||||
pub content: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub tool_calls: Option<Vec<ToolCallChunk>>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub reasoning_content: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct ToolCallChunk {
|
||||
pub index: usize,
|
||||
pub id: Option<String>,
|
||||
pub function: Option<FunctionChunk>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct FunctionChunk {
|
||||
pub name: Option<String>,
|
||||
pub arguments: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn stream_completion(
|
||||
client: &dyn HttpClient,
|
||||
api_url: &str,
|
||||
api_key: &str,
|
||||
request: Request,
|
||||
) -> Result<BoxStream<'static, Result<StreamResponse>>> {
|
||||
let uri = format!("{api_url}/v1/chat/completions");
|
||||
let request_builder = HttpRequest::builder()
|
||||
.method(Method::POST)
|
||||
.uri(uri)
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Authorization", format!("Bearer {}", api_key));
|
||||
|
||||
let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
|
||||
let mut response = client.send(request).await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let reader = BufReader::new(response.into_body());
|
||||
Ok(reader
|
||||
.lines()
|
||||
.filter_map(|line| async move {
|
||||
match line {
|
||||
Ok(line) => {
|
||||
let line = line.strip_prefix("data: ")?;
|
||||
if line == "[DONE]" {
|
||||
None
|
||||
} else {
|
||||
match serde_json::from_str(line) {
|
||||
Ok(response) => Some(Ok(response)),
|
||||
Err(error) => Some(Err(anyhow!(error))),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(error) => Some(Err(anyhow!(error))),
|
||||
}
|
||||
})
|
||||
.boxed())
|
||||
} else {
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
Err(anyhow!(
|
||||
"Failed to connect to DeepSeek API: {} {}",
|
||||
response.status(),
|
||||
body,
|
||||
))
|
||||
}
|
||||
}
|
||||
@@ -21,25 +21,18 @@ const fn zed_repo_url() -> &'static str {
|
||||
"https://github.com/zed-industries/zed"
|
||||
}
|
||||
|
||||
fn request_feature_url(specs: &SystemSpecs) -> String {
|
||||
format!(
|
||||
concat!(
|
||||
"https://github.com/zed-industries/zed/issues/new",
|
||||
"?labels=admin+read%2Ctriage%2Cenhancement",
|
||||
"&template=0_feature_request.yml",
|
||||
"&environment={}"
|
||||
),
|
||||
urlencoding::encode(&specs.to_string())
|
||||
)
|
||||
fn request_feature_url() -> String {
|
||||
"https://github.com/zed-industries/zed/issues/new?template=0_feature_request.yml".to_string()
|
||||
}
|
||||
|
||||
fn file_bug_report_url(specs: &SystemSpecs) -> String {
|
||||
format!(
|
||||
concat!(
|
||||
"https://github.com/zed-industries/zed/issues/new",
|
||||
"?labels=admin+read%2Ctriage%2Cbug",
|
||||
"&template=1_bug_report.yml",
|
||||
"&environment={}"
|
||||
"?",
|
||||
"template=1_bug_report.yml",
|
||||
"&",
|
||||
"environment={}"
|
||||
),
|
||||
urlencoding::encode(&specs.to_string())
|
||||
)
|
||||
@@ -70,11 +63,9 @@ pub fn init(cx: &mut AppContext) {
|
||||
.detach();
|
||||
})
|
||||
.register_action(|_, _: &RequestFeature, cx| {
|
||||
let specs = SystemSpecs::new(cx);
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
let specs = specs.await;
|
||||
cx.update(|cx| {
|
||||
cx.open_url(&request_feature_url(&specs));
|
||||
cx.open_url(&request_feature_url());
|
||||
})
|
||||
.log_err();
|
||||
})
|
||||
|
||||
@@ -290,9 +290,13 @@ impl MacPlatform {
|
||||
action,
|
||||
os_action,
|
||||
} => {
|
||||
// Note that this is not the standard logic for selecting which keybinding to
|
||||
// display. Typically the last binding takes precedence for display. However, in
|
||||
// this case the menus are not updated on context changes. To make these bindings
|
||||
// more likely to be correct, the first binding instead takes precedence (typically
|
||||
// from the base keymap).
|
||||
let keystrokes = keymap
|
||||
.bindings_for_action(action.as_ref())
|
||||
.rev()
|
||||
.next()
|
||||
.map(|binding| binding.keystrokes());
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ use crate::language_settings::SoftWrap;
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use async_trait::async_trait;
|
||||
use collections::{HashMap, HashSet};
|
||||
use fs::Fs;
|
||||
use futures::Future;
|
||||
use gpui::{AppContext, AsyncAppContext, Model, SharedString, Task};
|
||||
pub use highlight_map::HighlightMap;
|
||||
@@ -464,6 +465,7 @@ pub trait LspAdapter: 'static + Send + Sync {
|
||||
/// Returns initialization options that are going to be sent to a LSP server as a part of [`lsp::InitializeParams`]
|
||||
async fn initialization_options(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
_: &Arc<dyn LspAdapterDelegate>,
|
||||
) -> Result<Option<Value>> {
|
||||
Ok(None)
|
||||
@@ -471,6 +473,7 @@ pub trait LspAdapter: 'static + Send + Sync {
|
||||
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
_: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
_cx: &mut AsyncAppContext,
|
||||
@@ -1854,6 +1857,7 @@ impl LspAdapter for FakeLspAdapter {
|
||||
|
||||
async fn initialization_options(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
_: &Arc<dyn LspAdapterDelegate>,
|
||||
) -> Result<Option<Value>> {
|
||||
Ok(self.initialization_options.clone())
|
||||
|
||||
@@ -17,6 +17,7 @@ async-trait.workspace = true
|
||||
collections.workspace = true
|
||||
extension.workspace = true
|
||||
futures.workspace = true
|
||||
fs.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
lsp.workspace = true
|
||||
|
||||
@@ -8,6 +8,7 @@ use anyhow::{Context, Result};
|
||||
use async_trait::async_trait;
|
||||
use collections::HashMap;
|
||||
use extension::{Extension, ExtensionLanguageServerProxy, WorktreeDelegate};
|
||||
use fs::Fs;
|
||||
use futures::{Future, FutureExt};
|
||||
use gpui::AsyncAppContext;
|
||||
use language::{
|
||||
@@ -224,6 +225,7 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
|
||||
async fn initialization_options(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
) -> Result<Option<serde_json::Value>> {
|
||||
let delegate = Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _;
|
||||
@@ -246,6 +248,7 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
_cx: &mut AsyncAppContext,
|
||||
|
||||
@@ -29,6 +29,7 @@ log.workspace = true
|
||||
ollama = { workspace = true, features = ["schemars"] }
|
||||
open_ai = { workspace = true, features = ["schemars"] }
|
||||
lmstudio = { workspace = true, features = ["schemars"] }
|
||||
deepseek = { workspace = true, features = ["schemars"] }
|
||||
parking_lot.workspace = true
|
||||
proto.workspace = true
|
||||
schemars.workspace = true
|
||||
|
||||
@@ -80,6 +80,7 @@ impl CloudModel {
|
||||
| open_ai::Model::FourOmni
|
||||
| open_ai::Model::FourOmniMini
|
||||
| open_ai::Model::O1Mini
|
||||
| open_ai::Model::O1Preview
|
||||
| open_ai::Model::O1
|
||||
| open_ai::Model::Custom { .. } => {
|
||||
LanguageModelAvailability::RequiresPlan(Plan::ZedPro)
|
||||
|
||||
@@ -410,6 +410,84 @@ impl LanguageModelRequest {
|
||||
top_p: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_deepseek(self, model: String, max_output_tokens: Option<u32>) -> deepseek::Request {
|
||||
let is_reasoner = model == "deepseek-reasoner";
|
||||
|
||||
let len = self.messages.len();
|
||||
let merged_messages =
|
||||
self.messages
|
||||
.into_iter()
|
||||
.fold(Vec::with_capacity(len), |mut acc, msg| {
|
||||
let role = msg.role;
|
||||
let content = msg.string_contents();
|
||||
|
||||
if is_reasoner {
|
||||
if let Some(last_msg) = acc.last_mut() {
|
||||
match (last_msg, role) {
|
||||
(deepseek::RequestMessage::User { content: last }, Role::User) => {
|
||||
last.push(' ');
|
||||
last.push_str(&content);
|
||||
return acc;
|
||||
}
|
||||
|
||||
(
|
||||
deepseek::RequestMessage::Assistant {
|
||||
content: last_content,
|
||||
..
|
||||
},
|
||||
Role::Assistant,
|
||||
) => {
|
||||
*last_content = last_content
|
||||
.take()
|
||||
.map(|c| {
|
||||
let mut s =
|
||||
String::with_capacity(c.len() + content.len() + 1);
|
||||
s.push_str(&c);
|
||||
s.push(' ');
|
||||
s.push_str(&content);
|
||||
s
|
||||
})
|
||||
.or(Some(content));
|
||||
|
||||
return acc;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
acc.push(match role {
|
||||
Role::User => deepseek::RequestMessage::User { content },
|
||||
Role::Assistant => deepseek::RequestMessage::Assistant {
|
||||
content: Some(content),
|
||||
tool_calls: Vec::new(),
|
||||
},
|
||||
Role::System => deepseek::RequestMessage::System { content },
|
||||
});
|
||||
acc
|
||||
});
|
||||
|
||||
deepseek::Request {
|
||||
model,
|
||||
messages: merged_messages,
|
||||
stream: true,
|
||||
max_tokens: max_output_tokens,
|
||||
temperature: if is_reasoner { None } else { self.temperature },
|
||||
response_format: None,
|
||||
tools: self
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(|tool| deepseek::ToolDefinition::Function {
|
||||
function: deepseek::FunctionDefinition {
|
||||
name: tool.name,
|
||||
description: Some(tool.description),
|
||||
parameters: Some(tool.input_schema),
|
||||
},
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
|
||||
@@ -66,6 +66,16 @@ impl From<Role> for open_ai::Role {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Role> for deepseek::Role {
|
||||
fn from(val: Role) -> Self {
|
||||
match val {
|
||||
Role::User => deepseek::Role::User,
|
||||
Role::Assistant => deepseek::Role::Assistant,
|
||||
Role::System => deepseek::Role::System,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Role> for lmstudio::Role {
|
||||
fn from(val: Role) -> Self {
|
||||
match val {
|
||||
|
||||
@@ -29,6 +29,7 @@ menu.workspace = true
|
||||
ollama = { workspace = true, features = ["schemars"] }
|
||||
lmstudio = { workspace = true, features = ["schemars"] }
|
||||
open_ai = { workspace = true, features = ["schemars"] }
|
||||
deepseek = { workspace = true, features = ["schemars"] }
|
||||
project.workspace = true
|
||||
proto.workspace = true
|
||||
schemars.workspace = true
|
||||
|
||||
@@ -4,6 +4,7 @@ use client::{Client, UserStore};
|
||||
use fs::Fs;
|
||||
use gpui::{AppContext, Model, ModelContext};
|
||||
use language_model::{LanguageModelProviderId, LanguageModelRegistry, ZED_CLOUD_PROVIDER_ID};
|
||||
use provider::deepseek::DeepSeekLanguageModelProvider;
|
||||
|
||||
mod logging;
|
||||
pub mod provider;
|
||||
@@ -60,6 +61,10 @@ fn register_language_model_providers(
|
||||
LmStudioLanguageModelProvider::new(client.http_client(), cx),
|
||||
cx,
|
||||
);
|
||||
registry.register_provider(
|
||||
DeepSeekLanguageModelProvider::new(client.http_client(), cx),
|
||||
cx,
|
||||
);
|
||||
registry.register_provider(
|
||||
GoogleLanguageModelProvider::new(client.http_client(), cx),
|
||||
cx,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
pub mod anthropic;
|
||||
pub mod cloud;
|
||||
pub mod copilot_chat;
|
||||
pub mod deepseek;
|
||||
pub mod google;
|
||||
pub mod lmstudio;
|
||||
pub mod ollama;
|
||||
|
||||
558
crates/language_models/src/provider/deepseek.rs
Normal file
558
crates/language_models/src/provider/deepseek.rs
Normal file
@@ -0,0 +1,558 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use collections::BTreeMap;
|
||||
use editor::{Editor, EditorElement, EditorStyle};
|
||||
use futures::{future::BoxFuture, stream::BoxStream, FutureExt, StreamExt};
|
||||
use gpui::{
|
||||
AnyView, AppContext, AsyncAppContext, FontStyle, ModelContext, Subscription, Task, TextStyle,
|
||||
View, WhiteSpace,
|
||||
};
|
||||
use http_client::HttpClient;
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelCompletionEvent, LanguageModelId, LanguageModelName,
|
||||
LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
|
||||
LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role,
|
||||
};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsStore};
|
||||
use std::sync::Arc;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{prelude::*, Icon, IconName};
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::AllLanguageModelSettings;
|
||||
|
||||
const PROVIDER_ID: &str = "deepseek";
|
||||
const PROVIDER_NAME: &str = "DeepSeek";
|
||||
const DEEPSEEK_API_KEY_VAR: &str = "DEEPSEEK_API_KEY";
|
||||
|
||||
#[derive(Default, Clone, Debug, PartialEq)]
|
||||
pub struct DeepSeekSettings {
|
||||
pub api_url: String,
|
||||
pub available_models: Vec<AvailableModel>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct AvailableModel {
|
||||
pub name: String,
|
||||
pub display_name: Option<String>,
|
||||
pub max_tokens: usize,
|
||||
pub max_output_tokens: Option<u32>,
|
||||
}
|
||||
|
||||
pub struct DeepSeekLanguageModelProvider {
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
state: gpui::Model<State>,
|
||||
}
|
||||
|
||||
pub struct State {
|
||||
api_key: Option<String>,
|
||||
api_key_from_env: bool,
|
||||
_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl State {
|
||||
fn is_authenticated(&self) -> bool {
|
||||
self.api_key.is_some()
|
||||
}
|
||||
|
||||
fn reset_api_key(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
let settings = &AllLanguageModelSettings::get_global(cx).deepseek;
|
||||
let delete_credentials = cx.delete_credentials(&settings.api_url);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
delete_credentials.await.log_err();
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.api_key = None;
|
||||
this.api_key_from_env = false;
|
||||
cx.notify();
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn set_api_key(&mut self, api_key: String, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
let settings = &AllLanguageModelSettings::get_global(cx).deepseek;
|
||||
let write_credentials =
|
||||
cx.write_credentials(&settings.api_url, "Bearer", api_key.as_bytes());
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
write_credentials.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.api_key = Some(api_key);
|
||||
cx.notify();
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn authenticate(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
if self.is_authenticated() {
|
||||
Task::ready(Ok(()))
|
||||
} else {
|
||||
let api_url = AllLanguageModelSettings::get_global(cx)
|
||||
.deepseek
|
||||
.api_url
|
||||
.clone();
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let (api_key, from_env) = if let Ok(api_key) = std::env::var(DEEPSEEK_API_KEY_VAR) {
|
||||
(api_key, true)
|
||||
} else {
|
||||
let (_, api_key) = cx
|
||||
.update(|cx| cx.read_credentials(&api_url))?
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("credentials not found"))?;
|
||||
(String::from_utf8(api_key)?, false)
|
||||
};
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.api_key = Some(api_key);
|
||||
this.api_key_from_env = from_env;
|
||||
cx.notify();
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DeepSeekLanguageModelProvider {
|
||||
pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut AppContext) -> Self {
|
||||
let state = cx.new_model(|cx| State {
|
||||
api_key: None,
|
||||
api_key_from_env: false,
|
||||
_subscription: cx.observe_global::<SettingsStore>(|_this: &mut State, cx| {
|
||||
cx.notify();
|
||||
}),
|
||||
});
|
||||
|
||||
Self { http_client, state }
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModelProviderState for DeepSeekLanguageModelProvider {
|
||||
type ObservableEntity = State;
|
||||
|
||||
fn observable_entity(&self) -> Option<gpui::Model<Self::ObservableEntity>> {
|
||||
Some(self.state.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModelProvider for DeepSeekLanguageModelProvider {
|
||||
fn id(&self) -> LanguageModelProviderId {
|
||||
LanguageModelProviderId(PROVIDER_ID.into())
|
||||
}
|
||||
|
||||
fn name(&self) -> LanguageModelProviderName {
|
||||
LanguageModelProviderName(PROVIDER_NAME.into())
|
||||
}
|
||||
|
||||
fn icon(&self) -> IconName {
|
||||
IconName::AiDeepSeek
|
||||
}
|
||||
|
||||
fn provided_models(&self, cx: &AppContext) -> Vec<Arc<dyn LanguageModel>> {
|
||||
let mut models = BTreeMap::default();
|
||||
|
||||
models.insert("deepseek-chat", deepseek::Model::Chat);
|
||||
models.insert("deepseek-reasoner", deepseek::Model::Reasoner);
|
||||
|
||||
for available_model in AllLanguageModelSettings::get_global(cx)
|
||||
.deepseek
|
||||
.available_models
|
||||
.iter()
|
||||
{
|
||||
models.insert(
|
||||
&available_model.name,
|
||||
deepseek::Model::Custom {
|
||||
name: available_model.name.clone(),
|
||||
display_name: available_model.display_name.clone(),
|
||||
max_tokens: available_model.max_tokens,
|
||||
max_output_tokens: available_model.max_output_tokens,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
models
|
||||
.into_values()
|
||||
.map(|model| {
|
||||
Arc::new(DeepSeekLanguageModel {
|
||||
id: LanguageModelId::from(model.id().to_string()),
|
||||
model,
|
||||
state: self.state.clone(),
|
||||
http_client: self.http_client.clone(),
|
||||
request_limiter: RateLimiter::new(4),
|
||||
}) as Arc<dyn LanguageModel>
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn is_authenticated(&self, cx: &AppContext) -> bool {
|
||||
self.state.read(cx).is_authenticated()
|
||||
}
|
||||
|
||||
fn authenticate(&self, cx: &mut AppContext) -> Task<Result<()>> {
|
||||
self.state.update(cx, |state, cx| state.authenticate(cx))
|
||||
}
|
||||
|
||||
fn configuration_view(&self, cx: &mut WindowContext) -> AnyView {
|
||||
cx.new_view(|cx| ConfigurationView::new(self.state.clone(), cx))
|
||||
.into()
|
||||
}
|
||||
|
||||
fn reset_credentials(&self, cx: &mut AppContext) -> Task<Result<()>> {
|
||||
self.state.update(cx, |state, cx| state.reset_api_key(cx))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DeepSeekLanguageModel {
|
||||
id: LanguageModelId,
|
||||
model: deepseek::Model,
|
||||
state: gpui::Model<State>,
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
request_limiter: RateLimiter,
|
||||
}
|
||||
|
||||
impl DeepSeekLanguageModel {
|
||||
fn stream_completion(
|
||||
&self,
|
||||
request: deepseek::Request,
|
||||
cx: &AsyncAppContext,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<deepseek::StreamResponse>>>> {
|
||||
let http_client = self.http_client.clone();
|
||||
let Ok((api_key, api_url)) = cx.read_model(&self.state, |state, cx| {
|
||||
let settings = &AllLanguageModelSettings::get_global(cx).deepseek;
|
||||
(state.api_key.clone(), settings.api_url.clone())
|
||||
}) else {
|
||||
return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
|
||||
};
|
||||
|
||||
let future = self.request_limiter.stream(async move {
|
||||
let api_key = api_key.ok_or_else(|| anyhow!("Missing DeepSeek API Key"))?;
|
||||
let request =
|
||||
deepseek::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
|
||||
let response = request.await?;
|
||||
Ok(response)
|
||||
});
|
||||
|
||||
async move { Ok(future.await?.boxed()) }.boxed()
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModel for DeepSeekLanguageModel {
|
||||
fn id(&self) -> LanguageModelId {
|
||||
self.id.clone()
|
||||
}
|
||||
|
||||
fn name(&self) -> LanguageModelName {
|
||||
LanguageModelName::from(self.model.display_name().to_string())
|
||||
}
|
||||
|
||||
fn provider_id(&self) -> LanguageModelProviderId {
|
||||
LanguageModelProviderId(PROVIDER_ID.into())
|
||||
}
|
||||
|
||||
fn provider_name(&self) -> LanguageModelProviderName {
|
||||
LanguageModelProviderName(PROVIDER_NAME.into())
|
||||
}
|
||||
|
||||
fn telemetry_id(&self) -> String {
|
||||
format!("deepseek/{}", self.model.id())
|
||||
}
|
||||
|
||||
fn max_token_count(&self) -> usize {
|
||||
self.model.max_token_count()
|
||||
}
|
||||
|
||||
fn max_output_tokens(&self) -> Option<u32> {
|
||||
self.model.max_output_tokens()
|
||||
}
|
||||
|
||||
fn count_tokens(
|
||||
&self,
|
||||
request: LanguageModelRequest,
|
||||
cx: &AppContext,
|
||||
) -> BoxFuture<'static, Result<usize>> {
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
let messages = request
|
||||
.messages
|
||||
.into_iter()
|
||||
.map(|message| tiktoken_rs::ChatCompletionRequestMessage {
|
||||
role: match message.role {
|
||||
Role::User => "user".into(),
|
||||
Role::Assistant => "assistant".into(),
|
||||
Role::System => "system".into(),
|
||||
},
|
||||
content: Some(message.string_contents()),
|
||||
name: None,
|
||||
function_call: None,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn stream_completion(
|
||||
&self,
|
||||
request: LanguageModelRequest,
|
||||
cx: &AsyncAppContext,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<LanguageModelCompletionEvent>>>> {
|
||||
let request = request.into_deepseek(self.model.id().to_string(), self.max_output_tokens());
|
||||
let stream = self.stream_completion(request, cx);
|
||||
|
||||
async move {
|
||||
let stream = stream.await?;
|
||||
Ok(stream
|
||||
.map(|result| {
|
||||
result.and_then(|response| {
|
||||
response
|
||||
.choices
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("Empty response"))
|
||||
.map(|choice| {
|
||||
choice
|
||||
.delta
|
||||
.content
|
||||
.clone()
|
||||
.unwrap_or_default()
|
||||
.map(LanguageModelCompletionEvent::Text)
|
||||
})
|
||||
})
|
||||
})
|
||||
.boxed())
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
fn use_any_tool(
|
||||
&self,
|
||||
request: LanguageModelRequest,
|
||||
name: String,
|
||||
description: String,
|
||||
schema: serde_json::Value,
|
||||
cx: &AsyncAppContext,
|
||||
) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<String>>>> {
|
||||
let mut deepseek_request =
|
||||
request.into_deepseek(self.model.id().to_string(), self.max_output_tokens());
|
||||
|
||||
deepseek_request.tools = vec![deepseek::ToolDefinition::Function {
|
||||
function: deepseek::FunctionDefinition {
|
||||
name: name.clone(),
|
||||
description: Some(description),
|
||||
parameters: Some(schema),
|
||||
},
|
||||
}];
|
||||
|
||||
let response_stream = self.stream_completion(deepseek_request, cx);
|
||||
|
||||
self.request_limiter
|
||||
.run(async move {
|
||||
let stream = response_stream.await?;
|
||||
|
||||
let tool_args_stream = stream
|
||||
.filter_map(move |response| async move {
|
||||
match response {
|
||||
Ok(response) => {
|
||||
for choice in response.choices {
|
||||
if let Some(tool_calls) = choice.delta.tool_calls {
|
||||
for tool_call in tool_calls {
|
||||
if let Some(function) = tool_call.function {
|
||||
if let Some(args) = function.arguments {
|
||||
return Some(Ok(args));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
Err(e) => Some(Err(e)),
|
||||
}
|
||||
})
|
||||
.boxed();
|
||||
|
||||
Ok(tool_args_stream)
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
}
|
||||
|
||||
struct ConfigurationView {
|
||||
api_key_editor: View<Editor>,
|
||||
state: gpui::Model<State>,
|
||||
load_credentials_task: Option<Task<()>>,
|
||||
}
|
||||
|
||||
impl ConfigurationView {
|
||||
fn new(state: gpui::Model<State>, cx: &mut ViewContext<Self>) -> Self {
|
||||
let api_key_editor = cx.new_view(|cx| {
|
||||
let mut editor = Editor::single_line(cx);
|
||||
editor.set_placeholder_text("sk-00000000000000000000000000000000", cx);
|
||||
editor
|
||||
});
|
||||
|
||||
cx.observe(&state, |_, _, cx| {
|
||||
cx.notify();
|
||||
})
|
||||
.detach();
|
||||
|
||||
let load_credentials_task = Some(cx.spawn({
|
||||
let state = state.clone();
|
||||
|this, mut cx| async move {
|
||||
if let Some(task) = state
|
||||
.update(&mut cx, |state, cx| state.authenticate(cx))
|
||||
.log_err()
|
||||
{
|
||||
let _ = task.await;
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.load_credentials_task = None;
|
||||
cx.notify();
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}));
|
||||
|
||||
Self {
|
||||
api_key_editor,
|
||||
state,
|
||||
load_credentials_task,
|
||||
}
|
||||
}
|
||||
|
||||
fn save_api_key(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
|
||||
let api_key = self.api_key_editor.read(cx).text(cx);
|
||||
if api_key.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let state = self.state.clone();
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
state
|
||||
.update(&mut cx, |state, cx| state.set_api_key(api_key, cx))?
|
||||
.await
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn reset_api_key(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.api_key_editor
|
||||
.update(cx, |editor, cx| editor.set_text("", cx));
|
||||
|
||||
let state = self.state.clone();
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
state
|
||||
.update(&mut cx, |state, cx| state.reset_api_key(cx))?
|
||||
.await
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn render_api_key_editor(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let settings = ThemeSettings::get_global(cx);
|
||||
let text_style = TextStyle {
|
||||
color: cx.theme().colors().text,
|
||||
font_family: settings.ui_font.family.clone(),
|
||||
font_features: settings.ui_font.features.clone(),
|
||||
font_fallbacks: settings.ui_font.fallbacks.clone(),
|
||||
font_size: rems(0.875).into(),
|
||||
font_weight: settings.ui_font.weight,
|
||||
font_style: FontStyle::Normal,
|
||||
line_height: relative(1.3),
|
||||
background_color: None,
|
||||
underline: None,
|
||||
strikethrough: None,
|
||||
white_space: WhiteSpace::Normal,
|
||||
truncate: None,
|
||||
};
|
||||
EditorElement::new(
|
||||
&self.api_key_editor,
|
||||
EditorStyle {
|
||||
background: cx.theme().colors().editor_background,
|
||||
local_player: cx.theme().players().local(),
|
||||
text: text_style,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn should_render_editor(&self, cx: &mut ViewContext<Self>) -> bool {
|
||||
!self.state.read(cx).is_authenticated()
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ConfigurationView {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
const DEEPSEEK_CONSOLE_URL: &str = "https://platform.deepseek.com/api_keys";
|
||||
const INSTRUCTIONS: [&str; 3] = [
|
||||
"To use DeepSeek in Zed, you need an API key:",
|
||||
"- Get your API key from:",
|
||||
"- Paste it below and press enter:",
|
||||
];
|
||||
|
||||
let env_var_set = self.state.read(cx).api_key_from_env;
|
||||
|
||||
if self.load_credentials_task.is_some() {
|
||||
div().child(Label::new("Loading credentials...")).into_any()
|
||||
} else if self.should_render_editor(cx) {
|
||||
v_flex()
|
||||
.size_full()
|
||||
.on_action(cx.listener(Self::save_api_key))
|
||||
.child(Label::new(INSTRUCTIONS[0]))
|
||||
.child(
|
||||
h_flex().child(Label::new(INSTRUCTIONS[1])).child(
|
||||
Button::new("deepseek_console", DEEPSEEK_CONSOLE_URL)
|
||||
.style(ButtonStyle::Subtle)
|
||||
.icon(IconName::ExternalLink)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.on_click(move |_, cx| cx.open_url(DEEPSEEK_CONSOLE_URL)),
|
||||
),
|
||||
)
|
||||
.child(Label::new(INSTRUCTIONS[2]))
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.my_2()
|
||||
.px_2()
|
||||
.py_1()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.rounded_md()
|
||||
.child(self.render_api_key_editor(cx)),
|
||||
)
|
||||
.child(
|
||||
Label::new(format!(
|
||||
"Or set {} environment variable",
|
||||
DEEPSEEK_API_KEY_VAR
|
||||
))
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.into_any()
|
||||
} else {
|
||||
h_flex()
|
||||
.size_full()
|
||||
.justify_between()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(Icon::new(IconName::Check).color(Color::Success))
|
||||
.child(Label::new(if env_var_set {
|
||||
format!("API key set in {}", DEEPSEEK_API_KEY_VAR)
|
||||
} else {
|
||||
"API key configured".to_string()
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
Button::new("reset-key", "Reset")
|
||||
.icon(IconName::Trash)
|
||||
.disabled(env_var_set)
|
||||
.on_click(cx.listener(|this, _, cx| this.reset_api_key(cx))),
|
||||
)
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -13,6 +13,7 @@ use crate::provider::{
|
||||
anthropic::AnthropicSettings,
|
||||
cloud::{self, ZedDotDevSettings},
|
||||
copilot_chat::CopilotChatSettings,
|
||||
deepseek::DeepSeekSettings,
|
||||
google::GoogleSettings,
|
||||
lmstudio::LmStudioSettings,
|
||||
ollama::OllamaSettings,
|
||||
@@ -61,6 +62,7 @@ pub struct AllLanguageModelSettings {
|
||||
pub google: GoogleSettings,
|
||||
pub copilot_chat: CopilotChatSettings,
|
||||
pub lmstudio: LmStudioSettings,
|
||||
pub deepseek: DeepSeekSettings,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
|
||||
@@ -72,6 +74,7 @@ pub struct AllLanguageModelSettingsContent {
|
||||
#[serde(rename = "zed.dev")]
|
||||
pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
|
||||
pub google: Option<GoogleSettingsContent>,
|
||||
pub deepseek: Option<DeepseekSettingsContent>,
|
||||
pub copilot_chat: Option<CopilotChatSettingsContent>,
|
||||
}
|
||||
|
||||
@@ -162,6 +165,12 @@ pub struct LmStudioSettingsContent {
|
||||
pub available_models: Option<Vec<provider::lmstudio::AvailableModel>>,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
|
||||
pub struct DeepseekSettingsContent {
|
||||
pub api_url: Option<String>,
|
||||
pub available_models: Option<Vec<provider::deepseek::AvailableModel>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
|
||||
#[serde(untagged)]
|
||||
pub enum OpenAiSettingsContent {
|
||||
@@ -299,6 +308,18 @@ impl settings::Settings for AllLanguageModelSettings {
|
||||
lmstudio.as_ref().and_then(|s| s.available_models.clone()),
|
||||
);
|
||||
|
||||
// DeepSeek
|
||||
let deepseek = value.deepseek.clone();
|
||||
|
||||
merge(
|
||||
&mut settings.deepseek.api_url,
|
||||
value.deepseek.as_ref().and_then(|s| s.api_url.clone()),
|
||||
);
|
||||
merge(
|
||||
&mut settings.deepseek.available_models,
|
||||
deepseek.as_ref().and_then(|s| s.available_models.clone()),
|
||||
);
|
||||
|
||||
// OpenAI
|
||||
let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
|
||||
Some((content, upgraded)) => (Some(content), upgraded),
|
||||
|
||||
@@ -4,6 +4,7 @@ use futures::StreamExt;
|
||||
use language::{LspAdapter, LspAdapterDelegate};
|
||||
use lsp::{LanguageServerBinary, LanguageServerName};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::Fs;
|
||||
use serde_json::json;
|
||||
use smol::fs;
|
||||
use std::{
|
||||
@@ -107,6 +108,7 @@ impl LspAdapter for CssLspAdapter {
|
||||
|
||||
async fn initialization_options(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
_: &Arc<dyn LspAdapterDelegate>,
|
||||
) -> Result<Option<serde_json::Value>> {
|
||||
Ok(Some(json!({
|
||||
|
||||
@@ -6,6 +6,7 @@ use gpui::{AppContext, AsyncAppContext, Task};
|
||||
use http_client::github::latest_github_release;
|
||||
pub use language::*;
|
||||
use lsp::{LanguageServerBinary, LanguageServerName};
|
||||
use project::Fs;
|
||||
use regex::Regex;
|
||||
use serde_json::json;
|
||||
use smol::fs;
|
||||
@@ -197,6 +198,7 @@ impl super::LspAdapter for GoLspAdapter {
|
||||
|
||||
async fn initialization_options(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
_: &Arc<dyn LspAdapterDelegate>,
|
||||
) -> Result<Option<serde_json::Value>> {
|
||||
Ok(Some(json!({
|
||||
|
||||
@@ -9,7 +9,7 @@ use http_client::github::{latest_github_release, GitHubLspBinaryVersion};
|
||||
use language::{LanguageRegistry, LanguageToolchainStore, LspAdapter, LspAdapterDelegate};
|
||||
use lsp::{LanguageServerBinary, LanguageServerName};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::{lsp_store::language_server_settings, ContextProviderWithTasks};
|
||||
use project::{lsp_store::language_server_settings, ContextProviderWithTasks, Fs};
|
||||
use serde_json::{json, Value};
|
||||
use settings::{KeymapFile, SettingsJsonSchemaParams, SettingsStore};
|
||||
use smol::{
|
||||
@@ -208,6 +208,7 @@ impl LspAdapter for JsonLspAdapter {
|
||||
|
||||
async fn initialization_options(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
_: &Arc<dyn LspAdapterDelegate>,
|
||||
) -> Result<Option<serde_json::Value>> {
|
||||
Ok(Some(json!({
|
||||
@@ -217,6 +218,7 @@ impl LspAdapter for JsonLspAdapter {
|
||||
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
|
||||
@@ -18,6 +18,7 @@ use pet_core::os_environment::Environment;
|
||||
use pet_core::python_environment::PythonEnvironmentKind;
|
||||
use pet_core::Configuration;
|
||||
use project::lsp_store::language_server_settings;
|
||||
use project::Fs;
|
||||
use serde_json::{json, Value};
|
||||
use smol::lock::OnceCell;
|
||||
use std::cmp::Ordering;
|
||||
@@ -250,6 +251,7 @@ impl LspAdapter for PythonLspAdapter {
|
||||
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
adapter: &Arc<dyn LspAdapterDelegate>,
|
||||
toolchains: Arc<dyn LanguageToolchainStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
@@ -931,6 +933,7 @@ impl LspAdapter for PyLspAdapter {
|
||||
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
adapter: &Arc<dyn LspAdapterDelegate>,
|
||||
toolchains: Arc<dyn LanguageToolchainStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
|
||||
@@ -6,7 +6,7 @@ use gpui::AsyncAppContext;
|
||||
use language::{LanguageToolchainStore, LspAdapter, LspAdapterDelegate};
|
||||
use lsp::{LanguageServerBinary, LanguageServerName};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::lsp_store::language_server_settings;
|
||||
use project::{lsp_store::language_server_settings, Fs};
|
||||
use serde_json::{json, Value};
|
||||
use smol::fs;
|
||||
use std::{
|
||||
@@ -116,6 +116,7 @@ impl LspAdapter for TailwindLspAdapter {
|
||||
|
||||
async fn initialization_options(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
_: &Arc<dyn LspAdapterDelegate>,
|
||||
) -> Result<Option<serde_json::Value>> {
|
||||
Ok(Some(json!({
|
||||
@@ -131,6 +132,7 @@ impl LspAdapter for TailwindLspAdapter {
|
||||
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
|
||||
@@ -8,8 +8,8 @@ use http_client::github::{build_asset_url, AssetKind, GitHubLspBinaryVersion};
|
||||
use language::{LanguageToolchainStore, LspAdapter, LspAdapterDelegate};
|
||||
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerName};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::lsp_store::language_server_settings;
|
||||
use project::ContextProviderWithTasks;
|
||||
use project::{lsp_store::language_server_settings, Fs};
|
||||
use serde_json::{json, Value};
|
||||
use smol::{fs, io::BufReader, stream::StreamExt};
|
||||
use std::{
|
||||
@@ -77,16 +77,25 @@ impl TypeScriptLspAdapter {
|
||||
pub fn new(node: NodeRuntime) -> Self {
|
||||
TypeScriptLspAdapter { node }
|
||||
}
|
||||
async fn tsdk_path(adapter: &Arc<dyn LspAdapterDelegate>) -> &'static str {
|
||||
async fn tsdk_path(fs: &dyn Fs, adapter: &Arc<dyn LspAdapterDelegate>) -> Option<&'static str> {
|
||||
let is_yarn = adapter
|
||||
.read_text_file(PathBuf::from(".yarn/sdks/typescript/lib/typescript.js"))
|
||||
.await
|
||||
.is_ok();
|
||||
|
||||
if is_yarn {
|
||||
let tsdk_path = if is_yarn {
|
||||
".yarn/sdks/typescript/lib"
|
||||
} else {
|
||||
"node_modules/typescript/lib"
|
||||
};
|
||||
|
||||
if fs
|
||||
.is_dir(&adapter.worktree_root_path().join(tsdk_path))
|
||||
.await
|
||||
{
|
||||
Some(tsdk_path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -233,9 +242,10 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||
|
||||
async fn initialization_options(
|
||||
self: Arc<Self>,
|
||||
fs: &dyn Fs,
|
||||
adapter: &Arc<dyn LspAdapterDelegate>,
|
||||
) -> Result<Option<serde_json::Value>> {
|
||||
let tsdk_path = Self::tsdk_path(adapter).await;
|
||||
let tsdk_path = Self::tsdk_path(fs, adapter).await;
|
||||
Ok(Some(json!({
|
||||
"provideFormatter": true,
|
||||
"hostInfo": "zed",
|
||||
@@ -257,6 +267,7 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
@@ -353,6 +364,7 @@ impl LspAdapter for EsLintLspAdapter {
|
||||
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
|
||||
@@ -5,7 +5,7 @@ use gpui::AsyncAppContext;
|
||||
use language::{LanguageToolchainStore, LspAdapter, LspAdapterDelegate};
|
||||
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerName};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::lsp_store::language_server_settings;
|
||||
use project::{lsp_store::language_server_settings, Fs};
|
||||
use serde_json::Value;
|
||||
use std::{
|
||||
any::Any,
|
||||
@@ -34,16 +34,25 @@ impl VtslsLspAdapter {
|
||||
VtslsLspAdapter { node }
|
||||
}
|
||||
|
||||
async fn tsdk_path(adapter: &Arc<dyn LspAdapterDelegate>) -> &'static str {
|
||||
async fn tsdk_path(fs: &dyn Fs, adapter: &Arc<dyn LspAdapterDelegate>) -> Option<&'static str> {
|
||||
let is_yarn = adapter
|
||||
.read_text_file(PathBuf::from(".yarn/sdks/typescript/lib/typescript.js"))
|
||||
.await
|
||||
.is_ok();
|
||||
|
||||
if is_yarn {
|
||||
let tsdk_path = if is_yarn {
|
||||
".yarn/sdks/typescript/lib"
|
||||
} else {
|
||||
Self::TYPESCRIPT_TSDK_PATH
|
||||
};
|
||||
|
||||
if fs
|
||||
.is_dir(&adapter.worktree_root_path().join(tsdk_path))
|
||||
.await
|
||||
{
|
||||
Some(tsdk_path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -196,11 +205,12 @@ impl LspAdapter for VtslsLspAdapter {
|
||||
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
fs: &dyn Fs,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<Value> {
|
||||
let tsdk_path = Self::tsdk_path(delegate).await;
|
||||
let tsdk_path = Self::tsdk_path(fs, delegate).await;
|
||||
let config = serde_json::json!({
|
||||
"tsdk": tsdk_path,
|
||||
"suggest": {
|
||||
|
||||
@@ -7,7 +7,7 @@ use language::{
|
||||
};
|
||||
use lsp::{LanguageServerBinary, LanguageServerName};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::lsp_store::language_server_settings;
|
||||
use project::{lsp_store::language_server_settings, Fs};
|
||||
use serde_json::Value;
|
||||
use settings::{Settings, SettingsLocation};
|
||||
use smol::fs;
|
||||
@@ -128,6 +128,7 @@ impl LspAdapter for YamlLspAdapter {
|
||||
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
cx: &mut AsyncAppContext,
|
||||
|
||||
@@ -72,8 +72,10 @@ pub enum Model {
|
||||
FourOmni,
|
||||
#[serde(rename = "gpt-4o-mini", alias = "gpt-4o-mini")]
|
||||
FourOmniMini,
|
||||
#[serde(rename = "o1", alias = "o1-preview")]
|
||||
#[serde(rename = "o1", alias = "o1")]
|
||||
O1,
|
||||
#[serde(rename = "o1-preview", alias = "o1-preview")]
|
||||
O1Preview,
|
||||
#[serde(rename = "o1-mini", alias = "o1-mini")]
|
||||
O1Mini,
|
||||
|
||||
@@ -97,6 +99,7 @@ impl Model {
|
||||
"gpt-4o" => Ok(Self::FourOmni),
|
||||
"gpt-4o-mini" => Ok(Self::FourOmniMini),
|
||||
"o1" => Ok(Self::O1),
|
||||
"o1-preview" => Ok(Self::O1Preview),
|
||||
"o1-mini" => Ok(Self::O1Mini),
|
||||
_ => Err(anyhow!("invalid model id")),
|
||||
}
|
||||
@@ -110,6 +113,7 @@ impl Model {
|
||||
Self::FourOmni => "gpt-4o",
|
||||
Self::FourOmniMini => "gpt-4o-mini",
|
||||
Self::O1 => "o1",
|
||||
Self::O1Preview => "o1-preview",
|
||||
Self::O1Mini => "o1-mini",
|
||||
Self::Custom { name, .. } => name,
|
||||
}
|
||||
@@ -123,6 +127,7 @@ impl Model {
|
||||
Self::FourOmni => "gpt-4o",
|
||||
Self::FourOmniMini => "gpt-4o-mini",
|
||||
Self::O1 => "o1",
|
||||
Self::O1Preview => "o1-preview",
|
||||
Self::O1Mini => "o1-mini",
|
||||
Self::Custom {
|
||||
name, display_name, ..
|
||||
@@ -137,7 +142,8 @@ impl Model {
|
||||
Self::FourTurbo => 128000,
|
||||
Self::FourOmni => 128000,
|
||||
Self::FourOmniMini => 128000,
|
||||
Self::O1 => 128000,
|
||||
Self::O1 => 200000,
|
||||
Self::O1Preview => 128000,
|
||||
Self::O1Mini => 128000,
|
||||
Self::Custom { max_tokens, .. } => *max_tokens,
|
||||
}
|
||||
|
||||
@@ -244,7 +244,7 @@ impl LocalLspStore {
|
||||
let delegate = delegate as Arc<dyn LspAdapterDelegate>;
|
||||
let key = key.clone();
|
||||
let adapter = adapter.clone();
|
||||
|
||||
let fs = self.fs.clone();
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
let result = {
|
||||
let delegate = delegate.clone();
|
||||
@@ -260,13 +260,18 @@ impl LocalLspStore {
|
||||
let workspace_config = adapter
|
||||
.adapter
|
||||
.clone()
|
||||
.workspace_configuration(&delegate, toolchains.clone(), &mut cx)
|
||||
.workspace_configuration(
|
||||
fs.as_ref(),
|
||||
&delegate,
|
||||
toolchains.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut initialization_options = adapter
|
||||
.adapter
|
||||
.clone()
|
||||
.initialization_options(&(delegate))
|
||||
.initialization_options(fs.as_ref(), &(delegate))
|
||||
.await?;
|
||||
|
||||
match (&mut initialization_options, override_options) {
|
||||
@@ -283,7 +288,13 @@ impl LocalLspStore {
|
||||
adapter.adapter.prepare_initialize_params(params)
|
||||
})??;
|
||||
|
||||
Self::setup_lsp_messages(this.clone(), &language_server, delegate, adapter);
|
||||
Self::setup_lsp_messages(
|
||||
this.clone(),
|
||||
fs,
|
||||
&language_server,
|
||||
delegate,
|
||||
adapter,
|
||||
);
|
||||
|
||||
let did_change_configuration_params =
|
||||
Arc::new(lsp::DidChangeConfigurationParams {
|
||||
@@ -494,6 +505,7 @@ impl LocalLspStore {
|
||||
|
||||
fn setup_lsp_messages(
|
||||
this: WeakModel<LspStore>,
|
||||
fs: Arc<dyn Fs>,
|
||||
language_server: &LanguageServer,
|
||||
delegate: Arc<dyn LspAdapterDelegate>,
|
||||
adapter: Arc<CachedLspAdapter>,
|
||||
@@ -527,15 +539,17 @@ impl LocalLspStore {
|
||||
let adapter = adapter.adapter.clone();
|
||||
let delegate = delegate.clone();
|
||||
let this = this.clone();
|
||||
let fs = fs.clone();
|
||||
move |params, mut cx| {
|
||||
let adapter = adapter.clone();
|
||||
let delegate = delegate.clone();
|
||||
let this = this.clone();
|
||||
let fs = fs.clone();
|
||||
async move {
|
||||
let toolchains =
|
||||
this.update(&mut cx, |this, cx| this.toolchain_store(cx))?;
|
||||
let workspace_config = adapter
|
||||
.workspace_configuration(&delegate, toolchains, &mut cx)
|
||||
.workspace_configuration(fs.as_ref(), &delegate, toolchains, &mut cx)
|
||||
.await?;
|
||||
Ok(params
|
||||
.items
|
||||
@@ -844,9 +858,8 @@ impl LocalLspStore {
|
||||
})
|
||||
.is_ok();
|
||||
if did_update {
|
||||
let response = rx.recv().await?;
|
||||
|
||||
Ok(Some(response))
|
||||
let response = rx.recv().await.ok();
|
||||
Ok(response)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
@@ -2967,7 +2980,10 @@ impl LspStore {
|
||||
|
||||
let _maintain_workspace_config = {
|
||||
let (sender, receiver) = watch::channel();
|
||||
(Self::maintain_workspace_config(receiver, cx), sender)
|
||||
(
|
||||
Self::maintain_workspace_config(fs.clone(), receiver, cx),
|
||||
sender,
|
||||
)
|
||||
};
|
||||
Self {
|
||||
mode: LspStoreMode::Local(LocalLspStore {
|
||||
@@ -3030,6 +3046,7 @@ impl LspStore {
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(super) fn new_remote(
|
||||
buffer_store: Model<BufferStore>,
|
||||
worktree_store: Model<WorktreeStore>,
|
||||
@@ -3037,6 +3054,7 @@ impl LspStore {
|
||||
languages: Arc<LanguageRegistry>,
|
||||
upstream_client: AnyProtoClient,
|
||||
project_id: u64,
|
||||
fs: Arc<dyn Fs>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
cx.subscribe(&buffer_store, Self::on_buffer_store_event)
|
||||
@@ -3045,7 +3063,7 @@ impl LspStore {
|
||||
.detach();
|
||||
let _maintain_workspace_config = {
|
||||
let (sender, receiver) = watch::channel();
|
||||
(Self::maintain_workspace_config(receiver, cx), sender)
|
||||
(Self::maintain_workspace_config(fs, receiver, cx), sender)
|
||||
};
|
||||
Self {
|
||||
mode: LspStoreMode::Remote(RemoteLspStore {
|
||||
@@ -5131,6 +5149,7 @@ impl LspStore {
|
||||
|
||||
pub(crate) async fn refresh_workspace_configurations(
|
||||
this: &WeakModel<Self>,
|
||||
fs: Arc<dyn Fs>,
|
||||
mut cx: AsyncAppContext,
|
||||
) {
|
||||
maybe!(async move {
|
||||
@@ -5177,7 +5196,12 @@ impl LspStore {
|
||||
.ok()?;
|
||||
for (adapter, server, delegate) in servers {
|
||||
let settings = adapter
|
||||
.workspace_configuration(&delegate, toolchain_store.clone(), &mut cx)
|
||||
.workspace_configuration(
|
||||
fs.as_ref(),
|
||||
&delegate,
|
||||
toolchain_store.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await
|
||||
.ok()?;
|
||||
|
||||
@@ -5200,6 +5224,7 @@ impl LspStore {
|
||||
}
|
||||
}
|
||||
fn maintain_workspace_config(
|
||||
fs: Arc<dyn Fs>,
|
||||
external_refresh_requests: watch::Receiver<()>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
@@ -5214,7 +5239,7 @@ impl LspStore {
|
||||
futures::stream::select(settings_changed_rx, external_refresh_requests);
|
||||
cx.spawn(move |this, cx| async move {
|
||||
while let Some(()) = joint_future.next().await {
|
||||
Self::refresh_workspace_configurations(&this, cx.clone()).await;
|
||||
Self::refresh_workspace_configurations(&this, fs.clone(), cx.clone()).await;
|
||||
}
|
||||
|
||||
drop(settings_observation);
|
||||
@@ -8408,6 +8433,7 @@ impl LspAdapter for SshLspAdapter {
|
||||
|
||||
async fn initialization_options(
|
||||
self: Arc<Self>,
|
||||
_: &dyn Fs,
|
||||
_: &Arc<dyn LspAdapterDelegate>,
|
||||
) -> Result<Option<serde_json::Value>> {
|
||||
let Some(options) = &self.initialization_options else {
|
||||
@@ -8792,16 +8818,59 @@ fn ensure_uniform_list_compatible_label(label: &mut CodeLabel) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (range, _) in &mut label.runs {
|
||||
range.start = offset_map[range.start];
|
||||
range.end = offset_map[range.end];
|
||||
let last_index = new_idx;
|
||||
let mut run_ranges_errors = Vec::new();
|
||||
label.runs.retain_mut(|(range, _)| {
|
||||
match offset_map.get(range.start) {
|
||||
Some(&start) => range.start = start,
|
||||
None => {
|
||||
run_ranges_errors.push(range.clone());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
match offset_map.get(range.end) {
|
||||
Some(&end) => range.end = end,
|
||||
None => {
|
||||
run_ranges_errors.push(range.clone());
|
||||
range.end = last_index;
|
||||
}
|
||||
}
|
||||
true
|
||||
});
|
||||
if !run_ranges_errors.is_empty() {
|
||||
log::error!(
|
||||
"Completion label has errors in its run ranges: {run_ranges_errors:?}, label text: {}",
|
||||
label.text
|
||||
);
|
||||
}
|
||||
|
||||
let mut wrong_filter_range = None;
|
||||
if label.filter_range == (0..label.text.len()) {
|
||||
label.filter_range = 0..new_text.len();
|
||||
} else {
|
||||
label.filter_range.start = offset_map[label.filter_range.start];
|
||||
label.filter_range.end = offset_map[label.filter_range.end];
|
||||
let mut original_filter_range = Some(label.filter_range.clone());
|
||||
match offset_map.get(label.filter_range.start) {
|
||||
Some(&start) => label.filter_range.start = start,
|
||||
None => {
|
||||
wrong_filter_range = original_filter_range.take();
|
||||
label.filter_range.start = last_index;
|
||||
}
|
||||
}
|
||||
|
||||
match offset_map.get(label.filter_range.end) {
|
||||
Some(&end) => label.filter_range.end = end,
|
||||
None => {
|
||||
wrong_filter_range = original_filter_range.take();
|
||||
label.filter_range.end = last_index;
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(wrong_filter_range) = wrong_filter_range {
|
||||
log::error!(
|
||||
"Completion label has an invalid filter range: {wrong_filter_range:?}, label text: {}",
|
||||
label.text
|
||||
);
|
||||
}
|
||||
|
||||
label.text = new_text;
|
||||
|
||||
@@ -808,6 +808,7 @@ impl Project {
|
||||
languages.clone(),
|
||||
ssh_proto.clone(),
|
||||
SSH_PROJECT_ID,
|
||||
fs.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -981,6 +982,7 @@ impl Project {
|
||||
languages.clone(),
|
||||
client.clone().into(),
|
||||
remote_id,
|
||||
fs.clone(),
|
||||
cx,
|
||||
);
|
||||
lsp_store.set_language_server_statuses_from_proto(response.payload.language_servers);
|
||||
|
||||
@@ -128,6 +128,7 @@ pub enum IconName {
|
||||
Ai,
|
||||
AiAnthropic,
|
||||
AiAnthropicHosted,
|
||||
AiDeepSeek,
|
||||
AiGoogle,
|
||||
AiLmStudio,
|
||||
AiOllama,
|
||||
|
||||
@@ -1653,6 +1653,7 @@ pub(crate) fn next_subword_end(
|
||||
if need_backtrack {
|
||||
*new_point.column_mut() -= 1;
|
||||
}
|
||||
let new_point = map.clip_point(new_point, Bias::Left);
|
||||
if point == new_point {
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -5725,7 +5725,8 @@ impl<'a> GitTraversal<'a> {
|
||||
} else if entry.is_file() {
|
||||
// For a file entry, park the cursor on the corresponding status
|
||||
if statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &()) {
|
||||
self.current_entry_summary = Some(statuses.item().unwrap().status.into());
|
||||
// TODO: Investigate statuses.item() being None here.
|
||||
self.current_entry_summary = statuses.item().map(|item| item.status.into());
|
||||
} else {
|
||||
self.current_entry_summary = Some(GitSummary::UNCHANGED);
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
description = "The fast, collaborative code editor."
|
||||
edition.workspace = true
|
||||
name = "zed"
|
||||
version = "0.171.0"
|
||||
version = "0.171.3"
|
||||
publish.workspace = true
|
||||
license = "GPL-3.0-or-later"
|
||||
authors = ["Zed Team <hi@zed.dev>"]
|
||||
|
||||
@@ -1 +1 @@
|
||||
dev
|
||||
preview
|
||||
Reference in New Issue
Block a user