Compare commits
23 Commits
fix-linux-
...
icon-secur
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
850ecffb3d | ||
|
|
76167109db | ||
|
|
cd8679e81a | ||
|
|
43f977c6b9 | ||
|
|
bdb8caa42e | ||
|
|
9ae77ec3c9 | ||
|
|
d5ed9d3e3a | ||
|
|
74a1b5d14d | ||
|
|
07af011eb4 | ||
|
|
c357dc25fc | ||
|
|
93bc6616c6 | ||
|
|
a33e881906 | ||
|
|
c978db8626 | ||
|
|
2dad46c5c0 | ||
|
|
4c51fffbb5 | ||
|
|
0d80b452fb | ||
|
|
bad6bde03a | ||
|
|
4ec2d04ad9 | ||
|
|
0f0017dc8e | ||
|
|
9db0d66251 | ||
|
|
b07389d9f3 | ||
|
|
db2e26f67b | ||
|
|
391c92b07a |
29
Cargo.lock
generated
29
Cargo.lock
generated
@@ -5342,6 +5342,32 @@ dependencies = [
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "edit_prediction_context2"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"collections",
|
||||
"env_logger 0.11.8",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"indoc",
|
||||
"language",
|
||||
"log",
|
||||
"lsp",
|
||||
"parking_lot",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smallvec",
|
||||
"text",
|
||||
"tree-sitter",
|
||||
"util",
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "editor"
|
||||
version = "0.1.0"
|
||||
@@ -5405,6 +5431,7 @@ dependencies = [
|
||||
"tree-sitter-bash",
|
||||
"tree-sitter-c",
|
||||
"tree-sitter-html",
|
||||
"tree-sitter-md",
|
||||
"tree-sitter-python",
|
||||
"tree-sitter-rust",
|
||||
"tree-sitter-typescript",
|
||||
@@ -21692,6 +21719,7 @@ dependencies = [
|
||||
"db",
|
||||
"edit_prediction",
|
||||
"edit_prediction_context",
|
||||
"edit_prediction_context2",
|
||||
"editor",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
@@ -21741,7 +21769,6 @@ dependencies = [
|
||||
"clap",
|
||||
"client",
|
||||
"cloud_llm_client",
|
||||
"cloud_zeta2_prompt",
|
||||
"collections",
|
||||
"edit_prediction_context",
|
||||
"editor",
|
||||
|
||||
@@ -56,6 +56,7 @@ members = [
|
||||
"crates/edit_prediction",
|
||||
"crates/edit_prediction_button",
|
||||
"crates/edit_prediction_context",
|
||||
"crates/edit_prediction_context2",
|
||||
"crates/zeta2_tools",
|
||||
"crates/editor",
|
||||
"crates/eval",
|
||||
@@ -316,6 +317,7 @@ image_viewer = { path = "crates/image_viewer" }
|
||||
edit_prediction = { path = "crates/edit_prediction" }
|
||||
edit_prediction_button = { path = "crates/edit_prediction_button" }
|
||||
edit_prediction_context = { path = "crates/edit_prediction_context" }
|
||||
edit_prediction_context2 = { path = "crates/edit_prediction_context2" }
|
||||
zeta2_tools = { path = "crates/zeta2_tools" }
|
||||
inspector_ui = { path = "crates/inspector_ui" }
|
||||
install_cli = { path = "crates/install_cli" }
|
||||
|
||||
@@ -584,41 +584,100 @@ impl Model {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cross_region_inference_id(&self, region: &str) -> anyhow::Result<String> {
|
||||
pub fn cross_region_inference_id(
|
||||
&self,
|
||||
region: &str,
|
||||
allow_global: bool,
|
||||
) -> anyhow::Result<String> {
|
||||
// List derived from here:
|
||||
// https://docs.aws.amazon.com/bedrock/latest/userguide/inference-profiles-support.html#inference-profiles-support-system
|
||||
let model_id = self.request_id();
|
||||
|
||||
let supports_global = matches!(
|
||||
self,
|
||||
Model::ClaudeOpus4_5
|
||||
| Model::ClaudeOpus4_5Thinking
|
||||
| Model::ClaudeHaiku4_5
|
||||
| Model::ClaudeSonnet4
|
||||
| Model::ClaudeSonnet4Thinking
|
||||
| Model::ClaudeSonnet4_5
|
||||
| Model::ClaudeSonnet4_5Thinking
|
||||
);
|
||||
|
||||
let region_group = if region.starts_with("us-gov-") {
|
||||
"us-gov"
|
||||
} else if region.starts_with("us-") {
|
||||
"us"
|
||||
} else if region.starts_with("us-")
|
||||
|| region.starts_with("ca-")
|
||||
|| region.starts_with("sa-")
|
||||
{
|
||||
if allow_global && supports_global {
|
||||
"global"
|
||||
} else {
|
||||
"us"
|
||||
}
|
||||
} else if region.starts_with("eu-") {
|
||||
"eu"
|
||||
if allow_global && supports_global {
|
||||
"global"
|
||||
} else {
|
||||
"eu"
|
||||
}
|
||||
} else if region.starts_with("ap-") || region == "me-central-1" || region == "me-south-1" {
|
||||
"apac"
|
||||
} else if region.starts_with("ca-") || region.starts_with("sa-") {
|
||||
// Canada and South America regions - default to US profiles
|
||||
"us"
|
||||
if allow_global && supports_global {
|
||||
"global"
|
||||
} else {
|
||||
"apac"
|
||||
}
|
||||
} else {
|
||||
anyhow::bail!("Unsupported Region {region}");
|
||||
};
|
||||
|
||||
let model_id = self.request_id();
|
||||
match (self, region_group, region) {
|
||||
(Model::Custom { .. }, _, _) => Ok(self.request_id().into()),
|
||||
|
||||
match (self, region_group) {
|
||||
// Custom models can't have CRI IDs
|
||||
(Model::Custom { .. }, _) => Ok(self.request_id().into()),
|
||||
(
|
||||
Model::ClaudeOpus4_5
|
||||
| Model::ClaudeOpus4_5Thinking
|
||||
| Model::ClaudeHaiku4_5
|
||||
| Model::ClaudeSonnet4
|
||||
| Model::ClaudeSonnet4Thinking
|
||||
| Model::ClaudeSonnet4_5
|
||||
| Model::ClaudeSonnet4_5Thinking,
|
||||
"global",
|
||||
_,
|
||||
) => Ok(format!("{}.{}", region_group, model_id)),
|
||||
|
||||
// Models with US Gov only
|
||||
(Model::Claude3_5Sonnet, "us-gov") | (Model::Claude3Haiku, "us-gov") => {
|
||||
Ok(format!("{}.{}", region_group, model_id))
|
||||
(
|
||||
Model::Claude3Haiku
|
||||
| Model::Claude3_5Sonnet
|
||||
| Model::Claude3_7Sonnet
|
||||
| Model::Claude3_7SonnetThinking
|
||||
| Model::ClaudeSonnet4_5
|
||||
| Model::ClaudeSonnet4_5Thinking,
|
||||
"us-gov",
|
||||
_,
|
||||
) => Ok(format!("{}.{}", region_group, model_id)),
|
||||
|
||||
(
|
||||
Model::ClaudeHaiku4_5 | Model::ClaudeSonnet4_5 | Model::ClaudeSonnet4_5Thinking,
|
||||
"apac",
|
||||
"ap-southeast-2" | "ap-southeast-4",
|
||||
) => Ok(format!("au.{}", model_id)),
|
||||
|
||||
(
|
||||
Model::ClaudeHaiku4_5 | Model::ClaudeSonnet4_5 | Model::ClaudeSonnet4_5Thinking,
|
||||
"apac",
|
||||
"ap-northeast-1" | "ap-northeast-3",
|
||||
) => Ok(format!("jp.{}", model_id)),
|
||||
|
||||
(Model::AmazonNovaLite, "us", r) if r.starts_with("ca-") => {
|
||||
Ok(format!("ca.{}", model_id))
|
||||
}
|
||||
|
||||
// Available everywhere
|
||||
(Model::AmazonNovaLite | Model::AmazonNovaMicro | Model::AmazonNovaPro, _) => {
|
||||
Ok(format!("{}.{}", region_group, model_id))
|
||||
}
|
||||
|
||||
// Models in US
|
||||
(
|
||||
Model::AmazonNovaPremier
|
||||
| Model::AmazonNovaLite
|
||||
| Model::AmazonNovaMicro
|
||||
| Model::AmazonNovaPro
|
||||
| Model::Claude3_5Haiku
|
||||
| Model::ClaudeHaiku4_5
|
||||
| Model::Claude3_5Sonnet
|
||||
@@ -655,16 +714,18 @@ impl Model {
|
||||
| Model::PalmyraWriterX4
|
||||
| Model::PalmyraWriterX5,
|
||||
"us",
|
||||
_,
|
||||
) => Ok(format!("{}.{}", region_group, model_id)),
|
||||
|
||||
// Models available in EU
|
||||
(
|
||||
Model::Claude3_5Sonnet
|
||||
Model::AmazonNovaLite
|
||||
| Model::AmazonNovaMicro
|
||||
| Model::AmazonNovaPro
|
||||
| Model::Claude3_5Sonnet
|
||||
| Model::ClaudeHaiku4_5
|
||||
| Model::Claude3_7Sonnet
|
||||
| Model::Claude3_7SonnetThinking
|
||||
| Model::ClaudeSonnet4
|
||||
| Model::ClaudeSonnet4Thinking
|
||||
| Model::ClaudeSonnet4_5
|
||||
| Model::ClaudeSonnet4_5Thinking
|
||||
| Model::Claude3Haiku
|
||||
@@ -673,26 +734,26 @@ impl Model {
|
||||
| Model::MetaLlama323BInstructV1
|
||||
| Model::MistralPixtralLarge2502V1,
|
||||
"eu",
|
||||
_,
|
||||
) => Ok(format!("{}.{}", region_group, model_id)),
|
||||
|
||||
// Models available in APAC
|
||||
(
|
||||
Model::Claude3_5Sonnet
|
||||
Model::AmazonNovaLite
|
||||
| Model::AmazonNovaMicro
|
||||
| Model::AmazonNovaPro
|
||||
| Model::Claude3_5Sonnet
|
||||
| Model::Claude3_5SonnetV2
|
||||
| Model::ClaudeHaiku4_5
|
||||
| Model::Claude3Haiku
|
||||
| Model::Claude3Sonnet
|
||||
| Model::Claude3_7Sonnet
|
||||
| Model::Claude3_7SonnetThinking
|
||||
| Model::ClaudeSonnet4
|
||||
| Model::ClaudeSonnet4Thinking
|
||||
| Model::ClaudeSonnet4_5
|
||||
| Model::ClaudeSonnet4_5Thinking,
|
||||
| Model::Claude3Haiku
|
||||
| Model::Claude3Sonnet,
|
||||
"apac",
|
||||
_,
|
||||
) => Ok(format!("{}.{}", region_group, model_id)),
|
||||
|
||||
// Any other combination is not supported
|
||||
_ => Ok(self.request_id().into()),
|
||||
_ => Ok(model_id.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -705,15 +766,15 @@ mod tests {
|
||||
fn test_us_region_inference_ids() -> anyhow::Result<()> {
|
||||
// Test US regions
|
||||
assert_eq!(
|
||||
Model::Claude3_5SonnetV2.cross_region_inference_id("us-east-1")?,
|
||||
Model::Claude3_5SonnetV2.cross_region_inference_id("us-east-1", false)?,
|
||||
"us.anthropic.claude-3-5-sonnet-20241022-v2:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::Claude3_5SonnetV2.cross_region_inference_id("us-west-2")?,
|
||||
Model::Claude3_5SonnetV2.cross_region_inference_id("us-west-2", false)?,
|
||||
"us.anthropic.claude-3-5-sonnet-20241022-v2:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::AmazonNovaPro.cross_region_inference_id("us-east-2")?,
|
||||
Model::AmazonNovaPro.cross_region_inference_id("us-east-2", false)?,
|
||||
"us.amazon.nova-pro-v1:0"
|
||||
);
|
||||
Ok(())
|
||||
@@ -723,19 +784,19 @@ mod tests {
|
||||
fn test_eu_region_inference_ids() -> anyhow::Result<()> {
|
||||
// Test European regions
|
||||
assert_eq!(
|
||||
Model::ClaudeSonnet4.cross_region_inference_id("eu-west-1")?,
|
||||
Model::ClaudeSonnet4.cross_region_inference_id("eu-west-1", false)?,
|
||||
"eu.anthropic.claude-sonnet-4-20250514-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::ClaudeSonnet4_5.cross_region_inference_id("eu-west-1")?,
|
||||
Model::ClaudeSonnet4_5.cross_region_inference_id("eu-west-1", false)?,
|
||||
"eu.anthropic.claude-sonnet-4-5-20250929-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::Claude3Sonnet.cross_region_inference_id("eu-west-1")?,
|
||||
Model::Claude3Sonnet.cross_region_inference_id("eu-west-1", false)?,
|
||||
"eu.anthropic.claude-3-sonnet-20240229-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::AmazonNovaMicro.cross_region_inference_id("eu-north-1")?,
|
||||
Model::AmazonNovaMicro.cross_region_inference_id("eu-north-1", false)?,
|
||||
"eu.amazon.nova-micro-v1:0"
|
||||
);
|
||||
Ok(())
|
||||
@@ -745,15 +806,15 @@ mod tests {
|
||||
fn test_apac_region_inference_ids() -> anyhow::Result<()> {
|
||||
// Test Asia-Pacific regions
|
||||
assert_eq!(
|
||||
Model::Claude3_5SonnetV2.cross_region_inference_id("ap-northeast-1")?,
|
||||
Model::Claude3_5SonnetV2.cross_region_inference_id("ap-northeast-1", false)?,
|
||||
"apac.anthropic.claude-3-5-sonnet-20241022-v2:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::Claude3_5SonnetV2.cross_region_inference_id("ap-southeast-2")?,
|
||||
Model::Claude3_5SonnetV2.cross_region_inference_id("ap-southeast-2", false)?,
|
||||
"apac.anthropic.claude-3-5-sonnet-20241022-v2:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::AmazonNovaLite.cross_region_inference_id("ap-south-1")?,
|
||||
Model::AmazonNovaLite.cross_region_inference_id("ap-south-1", false)?,
|
||||
"apac.amazon.nova-lite-v1:0"
|
||||
);
|
||||
Ok(())
|
||||
@@ -763,11 +824,11 @@ mod tests {
|
||||
fn test_gov_region_inference_ids() -> anyhow::Result<()> {
|
||||
// Test Government regions
|
||||
assert_eq!(
|
||||
Model::Claude3_5Sonnet.cross_region_inference_id("us-gov-east-1")?,
|
||||
Model::Claude3_5Sonnet.cross_region_inference_id("us-gov-east-1", false)?,
|
||||
"us-gov.anthropic.claude-3-5-sonnet-20240620-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::Claude3Haiku.cross_region_inference_id("us-gov-west-1")?,
|
||||
Model::Claude3Haiku.cross_region_inference_id("us-gov-west-1", false)?,
|
||||
"us-gov.anthropic.claude-3-haiku-20240307-v1:0"
|
||||
);
|
||||
Ok(())
|
||||
@@ -777,15 +838,15 @@ mod tests {
|
||||
fn test_meta_models_inference_ids() -> anyhow::Result<()> {
|
||||
// Test Meta models
|
||||
assert_eq!(
|
||||
Model::MetaLlama370BInstructV1.cross_region_inference_id("us-east-1")?,
|
||||
Model::MetaLlama370BInstructV1.cross_region_inference_id("us-east-1", false)?,
|
||||
"meta.llama3-70b-instruct-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::MetaLlama3170BInstructV1.cross_region_inference_id("us-east-1")?,
|
||||
Model::MetaLlama3170BInstructV1.cross_region_inference_id("us-east-1", false)?,
|
||||
"us.meta.llama3-1-70b-instruct-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::MetaLlama321BInstructV1.cross_region_inference_id("eu-west-1")?,
|
||||
Model::MetaLlama321BInstructV1.cross_region_inference_id("eu-west-1", false)?,
|
||||
"eu.meta.llama3-2-1b-instruct-v1:0"
|
||||
);
|
||||
Ok(())
|
||||
@@ -796,11 +857,11 @@ mod tests {
|
||||
// Mistral models don't follow the regional prefix pattern,
|
||||
// so they should return their original IDs
|
||||
assert_eq!(
|
||||
Model::MistralMistralLarge2402V1.cross_region_inference_id("us-east-1")?,
|
||||
Model::MistralMistralLarge2402V1.cross_region_inference_id("us-east-1", false)?,
|
||||
"mistral.mistral-large-2402-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::MistralMixtral8x7BInstructV0.cross_region_inference_id("eu-west-1")?,
|
||||
Model::MistralMixtral8x7BInstructV0.cross_region_inference_id("eu-west-1", false)?,
|
||||
"mistral.mixtral-8x7b-instruct-v0:1"
|
||||
);
|
||||
Ok(())
|
||||
@@ -811,11 +872,11 @@ mod tests {
|
||||
// AI21 models don't follow the regional prefix pattern,
|
||||
// so they should return their original IDs
|
||||
assert_eq!(
|
||||
Model::AI21J2UltraV1.cross_region_inference_id("us-east-1")?,
|
||||
Model::AI21J2UltraV1.cross_region_inference_id("us-east-1", false)?,
|
||||
"ai21.j2-ultra-v1"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::AI21JambaInstructV1.cross_region_inference_id("eu-west-1")?,
|
||||
Model::AI21JambaInstructV1.cross_region_inference_id("eu-west-1", false)?,
|
||||
"ai21.jamba-instruct-v1:0"
|
||||
);
|
||||
Ok(())
|
||||
@@ -826,11 +887,11 @@ mod tests {
|
||||
// Cohere models don't follow the regional prefix pattern,
|
||||
// so they should return their original IDs
|
||||
assert_eq!(
|
||||
Model::CohereCommandRV1.cross_region_inference_id("us-east-1")?,
|
||||
Model::CohereCommandRV1.cross_region_inference_id("us-east-1", false)?,
|
||||
"cohere.command-r-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::CohereCommandTextV14_4k.cross_region_inference_id("ap-southeast-1")?,
|
||||
Model::CohereCommandTextV14_4k.cross_region_inference_id("ap-southeast-1", false)?,
|
||||
"cohere.command-text-v14:7:4k"
|
||||
);
|
||||
Ok(())
|
||||
@@ -850,10 +911,17 @@ mod tests {
|
||||
|
||||
// Custom model should return its name unchanged
|
||||
assert_eq!(
|
||||
custom_model.cross_region_inference_id("us-east-1")?,
|
||||
custom_model.cross_region_inference_id("us-east-1", false)?,
|
||||
"custom.my-model-v1:0"
|
||||
);
|
||||
|
||||
// Test that models without global support fall back to regional when allow_global is true
|
||||
assert_eq!(
|
||||
Model::AmazonNovaPro.cross_region_inference_id("us-east-1", true)?,
|
||||
"us.amazon.nova-pro-v1:0",
|
||||
"Nova Pro should fall back to regional profile even when allow_global is true"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -892,3 +960,28 @@ mod tests {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_global_inference_ids() -> anyhow::Result<()> {
|
||||
// Test global inference for models that support it when allow_global is true
|
||||
assert_eq!(
|
||||
Model::ClaudeSonnet4.cross_region_inference_id("us-east-1", true)?,
|
||||
"global.anthropic.claude-sonnet-4-20250514-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::ClaudeSonnet4_5.cross_region_inference_id("eu-west-1", true)?,
|
||||
"global.anthropic.claude-sonnet-4-5-20250929-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::ClaudeHaiku4_5.cross_region_inference_id("ap-south-1", true)?,
|
||||
"global.anthropic.claude-haiku-4-5-20251001-v1:0"
|
||||
);
|
||||
|
||||
// Test that regional prefix is used when allow_global is false
|
||||
assert_eq!(
|
||||
Model::ClaudeSonnet4.cross_region_inference_id("us-east-1", false)?,
|
||||
"us.anthropic.claude-sonnet-4-20250514-v1:0"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1723,6 +1723,10 @@ impl ProtoClient for Client {
|
||||
fn is_via_collab(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn has_wsl_interop(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// prefix for the zed:// url scheme
|
||||
|
||||
@@ -121,6 +121,8 @@ CREATE TABLE "project_repositories" (
|
||||
"merge_message" VARCHAR,
|
||||
"branch_summary" VARCHAR,
|
||||
"head_commit_details" VARCHAR,
|
||||
"remote_upstream_url" VARCHAR,
|
||||
"remote_origin_url" VARCHAR,
|
||||
PRIMARY KEY (project_id, id)
|
||||
);
|
||||
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE "project_repositories" ADD COLUMN "remote_upstream_url" VARCHAR;
|
||||
ALTER TABLE "project_repositories" ADD COLUMN "remote_origin_url" VARCHAR;
|
||||
@@ -362,6 +362,8 @@ impl Database {
|
||||
entry_ids: ActiveValue::set("[]".into()),
|
||||
head_commit_details: ActiveValue::set(None),
|
||||
merge_message: ActiveValue::set(None),
|
||||
remote_upstream_url: ActiveValue::set(None),
|
||||
remote_origin_url: ActiveValue::set(None),
|
||||
}
|
||||
}),
|
||||
)
|
||||
@@ -511,6 +513,8 @@ impl Database {
|
||||
serde_json::to_string(&update.current_merge_conflicts).unwrap(),
|
||||
)),
|
||||
merge_message: ActiveValue::set(update.merge_message.clone()),
|
||||
remote_upstream_url: ActiveValue::set(update.remote_upstream_url.clone()),
|
||||
remote_origin_url: ActiveValue::set(update.remote_origin_url.clone()),
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
@@ -1005,6 +1009,8 @@ impl Database {
|
||||
is_last_update: true,
|
||||
merge_message: db_repository_entry.merge_message,
|
||||
stash_entries: Vec::new(),
|
||||
remote_upstream_url: db_repository_entry.remote_upstream_url.clone(),
|
||||
remote_origin_url: db_repository_entry.remote_origin_url.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -796,6 +796,8 @@ impl Database {
|
||||
is_last_update: true,
|
||||
merge_message: db_repository.merge_message,
|
||||
stash_entries: Vec::new(),
|
||||
remote_upstream_url: db_repository.remote_upstream_url.clone(),
|
||||
remote_origin_url: db_repository.remote_origin_url.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,6 +22,8 @@ pub struct Model {
|
||||
pub branch_summary: Option<String>,
|
||||
// A JSON object representing the current Head commit values
|
||||
pub head_commit_details: Option<String>,
|
||||
pub remote_upstream_url: Option<String>,
|
||||
pub remote_origin_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
|
||||
@@ -469,6 +469,8 @@ impl Server {
|
||||
.add_request_handler(forward_mutating_project_request::<proto::GetBlobContent>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::GitCreateBranch>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::GitChangeBranch>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::GitCreateRemote>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::GitRemoveRemote>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::CheckForPushedCommits>)
|
||||
.add_message_handler(broadcast_project_message_from_host::<proto::AdvertiseContexts>)
|
||||
.add_message_handler(update_context)
|
||||
|
||||
@@ -3518,7 +3518,6 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||
.into_iter()
|
||||
.map(|(sha, message)| (sha.parse().unwrap(), message.into()))
|
||||
.collect(),
|
||||
remote_url: Some("git@github.com:zed-industries/zed.git".to_string()),
|
||||
};
|
||||
client_a.fs().set_blame_for_repo(
|
||||
Path::new(path!("/my-repo/.git")),
|
||||
@@ -3603,10 +3602,6 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||
for (idx, (buffer, entry)) in entries.iter().flatten().enumerate() {
|
||||
let details = blame.details_for_entry(*buffer, entry).unwrap();
|
||||
assert_eq!(details.message, format!("message for idx-{}", idx));
|
||||
assert_eq!(
|
||||
details.permalink.unwrap().to_string(),
|
||||
format!("https://github.com/zed-industries/zed/commit/{}", entry.sha)
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1105,9 +1105,33 @@ impl EditPredictionButton {
|
||||
.separator();
|
||||
}
|
||||
|
||||
let menu = self.build_language_settings_menu(menu, window, cx);
|
||||
let menu = self.add_provider_switching_section(menu, provider, cx);
|
||||
menu = self.build_language_settings_menu(menu, window, cx);
|
||||
|
||||
if cx.has_flag::<Zeta2FeatureFlag>() {
|
||||
let settings = all_language_settings(None, cx);
|
||||
let context_retrieval = settings.edit_predictions.use_context;
|
||||
menu = menu.separator().header("Context Retrieval").item(
|
||||
ContextMenuEntry::new("Enable Context Retrieval")
|
||||
.toggleable(IconPosition::Start, context_retrieval)
|
||||
.action(workspace::ToggleEditPrediction.boxed_clone())
|
||||
.handler({
|
||||
let fs = self.fs.clone();
|
||||
move |_, cx| {
|
||||
update_settings_file(fs.clone(), cx, move |settings, _| {
|
||||
settings
|
||||
.project
|
||||
.all_languages
|
||||
.features
|
||||
.get_or_insert_default()
|
||||
.experimental_edit_prediction_context_retrieval =
|
||||
Some(!context_retrieval)
|
||||
});
|
||||
}
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
menu = self.add_provider_switching_section(menu, provider, cx);
|
||||
menu
|
||||
})
|
||||
}
|
||||
|
||||
42
crates/edit_prediction_context2/Cargo.toml
Normal file
42
crates/edit_prediction_context2/Cargo.toml
Normal file
@@ -0,0 +1,42 @@
|
||||
[package]
|
||||
name = "edit_prediction_context2"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/edit_prediction_context2.rs"
|
||||
|
||||
[dependencies]
|
||||
parking_lot.workspace = true
|
||||
anyhow.workspace = true
|
||||
collections.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
lsp.workspace = true
|
||||
project.workspace = true
|
||||
log.workspace = true
|
||||
serde.workspace = true
|
||||
smallvec.workspace = true
|
||||
tree-sitter.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
env_logger.workspace = true
|
||||
indoc.workspace = true
|
||||
futures.workspace = true
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
lsp = { workspace = true, features = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
project = {workspace= true, features = ["test-support"]}
|
||||
serde_json.workspace = true
|
||||
settings = {workspace= true, features = ["test-support"]}
|
||||
text = { workspace = true, features = ["test-support"] }
|
||||
util = { workspace = true, features = ["test-support"] }
|
||||
zlog.workspace = true
|
||||
1
crates/edit_prediction_context2/LICENSE-GPL
Symbolic link
1
crates/edit_prediction_context2/LICENSE-GPL
Symbolic link
@@ -0,0 +1 @@
|
||||
../../LICENSE-GPL
|
||||
324
crates/edit_prediction_context2/src/assemble_excerpts.rs
Normal file
324
crates/edit_prediction_context2/src/assemble_excerpts.rs
Normal file
@@ -0,0 +1,324 @@
|
||||
use crate::RelatedExcerpt;
|
||||
use language::{BufferSnapshot, OffsetRangeExt as _, Point};
|
||||
use std::ops::Range;
|
||||
|
||||
#[cfg(not(test))]
|
||||
const MAX_OUTLINE_ITEM_BODY_SIZE: usize = 512;
|
||||
#[cfg(test)]
|
||||
const MAX_OUTLINE_ITEM_BODY_SIZE: usize = 24;
|
||||
|
||||
pub fn assemble_excerpts(
|
||||
buffer: &BufferSnapshot,
|
||||
mut input_ranges: Vec<Range<Point>>,
|
||||
) -> Vec<RelatedExcerpt> {
|
||||
merge_ranges(&mut input_ranges);
|
||||
|
||||
let mut outline_ranges = Vec::new();
|
||||
let outline_items = buffer.outline_items_as_points_containing(0..buffer.len(), false, None);
|
||||
let mut outline_ix = 0;
|
||||
for input_range in &mut input_ranges {
|
||||
*input_range = clip_range_to_lines(input_range, false, buffer);
|
||||
|
||||
while let Some(outline_item) = outline_items.get(outline_ix) {
|
||||
let item_range = clip_range_to_lines(&outline_item.range, false, buffer);
|
||||
|
||||
if item_range.start > input_range.start {
|
||||
break;
|
||||
}
|
||||
|
||||
if item_range.end > input_range.start {
|
||||
let body_range = outline_item
|
||||
.body_range(buffer)
|
||||
.map(|body| clip_range_to_lines(&body, true, buffer))
|
||||
.filter(|body_range| {
|
||||
body_range.to_offset(buffer).len() > MAX_OUTLINE_ITEM_BODY_SIZE
|
||||
});
|
||||
|
||||
add_outline_item(
|
||||
item_range.clone(),
|
||||
body_range.clone(),
|
||||
buffer,
|
||||
&mut outline_ranges,
|
||||
);
|
||||
|
||||
if let Some(body_range) = body_range
|
||||
&& input_range.start < body_range.start
|
||||
{
|
||||
let mut child_outline_ix = outline_ix + 1;
|
||||
while let Some(next_outline_item) = outline_items.get(child_outline_ix) {
|
||||
if next_outline_item.range.end > body_range.end {
|
||||
break;
|
||||
}
|
||||
if next_outline_item.depth == outline_item.depth + 1 {
|
||||
let next_item_range =
|
||||
clip_range_to_lines(&next_outline_item.range, false, buffer);
|
||||
|
||||
add_outline_item(
|
||||
next_item_range,
|
||||
next_outline_item
|
||||
.body_range(buffer)
|
||||
.map(|body| clip_range_to_lines(&body, true, buffer)),
|
||||
buffer,
|
||||
&mut outline_ranges,
|
||||
);
|
||||
child_outline_ix += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
outline_ix += 1;
|
||||
}
|
||||
}
|
||||
|
||||
input_ranges.extend_from_slice(&outline_ranges);
|
||||
merge_ranges(&mut input_ranges);
|
||||
|
||||
input_ranges
|
||||
.into_iter()
|
||||
.map(|range| {
|
||||
let offset_range = range.to_offset(buffer);
|
||||
RelatedExcerpt {
|
||||
point_range: range,
|
||||
anchor_range: buffer.anchor_before(offset_range.start)
|
||||
..buffer.anchor_after(offset_range.end),
|
||||
text: buffer.as_rope().slice(offset_range),
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn clip_range_to_lines(
|
||||
range: &Range<Point>,
|
||||
inward: bool,
|
||||
buffer: &BufferSnapshot,
|
||||
) -> Range<Point> {
|
||||
let mut range = range.clone();
|
||||
if inward {
|
||||
if range.start.column > 0 {
|
||||
range.start.column = buffer.line_len(range.start.row);
|
||||
}
|
||||
range.end.column = 0;
|
||||
} else {
|
||||
range.start.column = 0;
|
||||
if range.end.column > 0 {
|
||||
range.end.column = buffer.line_len(range.end.row);
|
||||
}
|
||||
}
|
||||
range
|
||||
}
|
||||
|
||||
fn add_outline_item(
|
||||
mut item_range: Range<Point>,
|
||||
body_range: Option<Range<Point>>,
|
||||
buffer: &BufferSnapshot,
|
||||
outline_ranges: &mut Vec<Range<Point>>,
|
||||
) {
|
||||
if let Some(mut body_range) = body_range {
|
||||
if body_range.start.column > 0 {
|
||||
body_range.start.column = buffer.line_len(body_range.start.row);
|
||||
}
|
||||
body_range.end.column = 0;
|
||||
|
||||
let head_range = item_range.start..body_range.start;
|
||||
if head_range.start < head_range.end {
|
||||
outline_ranges.push(head_range);
|
||||
}
|
||||
|
||||
let tail_range = body_range.end..item_range.end;
|
||||
if tail_range.start < tail_range.end {
|
||||
outline_ranges.push(tail_range);
|
||||
}
|
||||
} else {
|
||||
item_range.start.column = 0;
|
||||
item_range.end.column = buffer.line_len(item_range.end.row);
|
||||
outline_ranges.push(item_range);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn merge_ranges(ranges: &mut Vec<Range<Point>>) {
|
||||
ranges.sort_unstable_by(|a, b| a.start.cmp(&b.start).then(b.end.cmp(&a.end)));
|
||||
|
||||
let mut index = 1;
|
||||
while index < ranges.len() {
|
||||
let mut prev_range_end = ranges[index - 1].end;
|
||||
if prev_range_end.column > 0 {
|
||||
prev_range_end += Point::new(1, 0);
|
||||
}
|
||||
|
||||
if (prev_range_end + Point::new(1, 0))
|
||||
.cmp(&ranges[index].start)
|
||||
.is_ge()
|
||||
{
|
||||
let removed = ranges.remove(index);
|
||||
if removed.end.cmp(&ranges[index - 1].end).is_gt() {
|
||||
ranges[index - 1].end = removed.end;
|
||||
}
|
||||
} else {
|
||||
index += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gpui::{TestAppContext, prelude::*};
|
||||
use indoc::indoc;
|
||||
use language::{Buffer, Language, LanguageConfig, LanguageMatcher, OffsetRangeExt};
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::{fmt::Write as _, sync::Arc};
|
||||
use util::test::marked_text_ranges;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_rust(cx: &mut TestAppContext) {
|
||||
let table = [
|
||||
(
|
||||
indoc! {r#"
|
||||
struct User {
|
||||
first_name: String,
|
||||
«last_name»: String,
|
||||
age: u32,
|
||||
email: String,
|
||||
create_at: Instant,
|
||||
}
|
||||
|
||||
impl User {
|
||||
pub fn first_name(&self) -> String {
|
||||
self.first_name.clone()
|
||||
}
|
||||
|
||||
pub fn full_name(&self) -> String {
|
||||
« format!("{} {}", self.first_name, self.last_name)
|
||||
» }
|
||||
}
|
||||
"#},
|
||||
indoc! {r#"
|
||||
struct User {
|
||||
first_name: String,
|
||||
last_name: String,
|
||||
…
|
||||
}
|
||||
|
||||
impl User {
|
||||
…
|
||||
pub fn full_name(&self) -> String {
|
||||
format!("{} {}", self.first_name, self.last_name)
|
||||
}
|
||||
}
|
||||
"#},
|
||||
),
|
||||
(
|
||||
indoc! {r#"
|
||||
struct «User» {
|
||||
first_name: String,
|
||||
last_name: String,
|
||||
age: u32,
|
||||
}
|
||||
|
||||
impl User {
|
||||
// methods
|
||||
}
|
||||
"#
|
||||
},
|
||||
indoc! {r#"
|
||||
struct User {
|
||||
first_name: String,
|
||||
last_name: String,
|
||||
age: u32,
|
||||
}
|
||||
…
|
||||
"#},
|
||||
),
|
||||
(
|
||||
indoc! {r#"
|
||||
trait «FooProvider» {
|
||||
const NAME: &'static str;
|
||||
|
||||
fn provide_foo(&self, id: usize) -> Foo;
|
||||
|
||||
fn provide_foo_batched(&self, ids: &[usize]) -> Vec<Foo> {
|
||||
ids.iter()
|
||||
.map(|id| self.provide_foo(*id))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn sync(&self);
|
||||
}
|
||||
"#
|
||||
},
|
||||
indoc! {r#"
|
||||
trait FooProvider {
|
||||
const NAME: &'static str;
|
||||
|
||||
fn provide_foo(&self, id: usize) -> Foo;
|
||||
|
||||
fn provide_foo_batched(&self, ids: &[usize]) -> Vec<Foo> {
|
||||
…
|
||||
}
|
||||
|
||||
fn sync(&self);
|
||||
}
|
||||
"#},
|
||||
),
|
||||
];
|
||||
|
||||
for (input, expected_output) in table {
|
||||
let (input, ranges) = marked_text_ranges(&input, false);
|
||||
let buffer =
|
||||
cx.new(|cx| Buffer::local(input, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
buffer.read_with(cx, |buffer, _cx| {
|
||||
let ranges: Vec<Range<Point>> = ranges
|
||||
.into_iter()
|
||||
.map(|range| range.to_point(&buffer))
|
||||
.collect();
|
||||
|
||||
let excerpts = assemble_excerpts(&buffer.snapshot(), ranges);
|
||||
|
||||
let output = format_excerpts(buffer, &excerpts);
|
||||
assert_eq!(output, expected_output);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn format_excerpts(buffer: &Buffer, excerpts: &[RelatedExcerpt]) -> String {
|
||||
let mut output = String::new();
|
||||
let file_line_count = buffer.max_point().row;
|
||||
let mut current_row = 0;
|
||||
for excerpt in excerpts {
|
||||
if excerpt.text.is_empty() {
|
||||
continue;
|
||||
}
|
||||
if current_row < excerpt.point_range.start.row {
|
||||
writeln!(&mut output, "…").unwrap();
|
||||
}
|
||||
current_row = excerpt.point_range.start.row;
|
||||
|
||||
for line in excerpt.text.to_string().lines() {
|
||||
output.push_str(line);
|
||||
output.push('\n');
|
||||
current_row += 1;
|
||||
}
|
||||
}
|
||||
if current_row < file_line_count {
|
||||
writeln!(&mut output, "…").unwrap();
|
||||
}
|
||||
output
|
||||
}
|
||||
|
||||
fn rust_lang() -> Language {
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
Some(language::tree_sitter_rust::LANGUAGE.into()),
|
||||
)
|
||||
.with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
465
crates/edit_prediction_context2/src/edit_prediction_context2.rs
Normal file
465
crates/edit_prediction_context2/src/edit_prediction_context2.rs
Normal file
@@ -0,0 +1,465 @@
|
||||
use crate::assemble_excerpts::assemble_excerpts;
|
||||
use anyhow::Result;
|
||||
use collections::HashMap;
|
||||
use futures::{FutureExt, StreamExt as _, channel::mpsc, future};
|
||||
use gpui::{App, AppContext, AsyncApp, Context, Entity, EventEmitter, Task, WeakEntity};
|
||||
use language::{Anchor, Buffer, BufferSnapshot, OffsetRangeExt as _, Point, Rope, ToOffset as _};
|
||||
use project::{LocationLink, Project, ProjectPath};
|
||||
use serde::{Serialize, Serializer};
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
collections::hash_map,
|
||||
ops::Range,
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use util::{RangeExt as _, ResultExt};
|
||||
|
||||
mod assemble_excerpts;
|
||||
#[cfg(test)]
|
||||
mod edit_prediction_context_tests;
|
||||
#[cfg(test)]
|
||||
mod fake_definition_lsp;
|
||||
|
||||
pub struct RelatedExcerptStore {
|
||||
project: WeakEntity<Project>,
|
||||
related_files: Vec<RelatedFile>,
|
||||
cache: HashMap<Identifier, Arc<CacheEntry>>,
|
||||
update_tx: mpsc::UnboundedSender<(Entity<Buffer>, Anchor)>,
|
||||
}
|
||||
|
||||
pub enum RelatedExcerptStoreEvent {
|
||||
StartedRefresh,
|
||||
FinishedRefresh {
|
||||
cache_hit_count: usize,
|
||||
cache_miss_count: usize,
|
||||
mean_definition_latency: Duration,
|
||||
max_definition_latency: Duration,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
struct Identifier {
|
||||
pub name: String,
|
||||
pub range: Range<Anchor>,
|
||||
}
|
||||
|
||||
enum DefinitionTask {
|
||||
CacheHit(Arc<CacheEntry>),
|
||||
CacheMiss(Task<Result<Option<Vec<LocationLink>>>>),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CacheEntry {
|
||||
definitions: SmallVec<[CachedDefinition; 1]>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct CachedDefinition {
|
||||
path: ProjectPath,
|
||||
buffer: Entity<Buffer>,
|
||||
anchor_range: Range<Anchor>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct RelatedFile {
|
||||
#[serde(serialize_with = "serialize_project_path")]
|
||||
pub path: ProjectPath,
|
||||
#[serde(skip)]
|
||||
pub buffer: WeakEntity<Buffer>,
|
||||
pub excerpts: Vec<RelatedExcerpt>,
|
||||
pub max_row: u32,
|
||||
}
|
||||
|
||||
impl RelatedFile {
|
||||
pub fn merge_excerpts(&mut self) {
|
||||
self.excerpts.sort_unstable_by(|a, b| {
|
||||
a.point_range
|
||||
.start
|
||||
.cmp(&b.point_range.start)
|
||||
.then(b.point_range.end.cmp(&a.point_range.end))
|
||||
});
|
||||
|
||||
let mut index = 1;
|
||||
while index < self.excerpts.len() {
|
||||
if self.excerpts[index - 1]
|
||||
.point_range
|
||||
.end
|
||||
.cmp(&self.excerpts[index].point_range.start)
|
||||
.is_ge()
|
||||
{
|
||||
let removed = self.excerpts.remove(index);
|
||||
if removed
|
||||
.point_range
|
||||
.end
|
||||
.cmp(&self.excerpts[index - 1].point_range.end)
|
||||
.is_gt()
|
||||
{
|
||||
self.excerpts[index - 1].point_range.end = removed.point_range.end;
|
||||
self.excerpts[index - 1].anchor_range.end = removed.anchor_range.end;
|
||||
}
|
||||
} else {
|
||||
index += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct RelatedExcerpt {
|
||||
#[serde(skip)]
|
||||
pub anchor_range: Range<Anchor>,
|
||||
#[serde(serialize_with = "serialize_point_range")]
|
||||
pub point_range: Range<Point>,
|
||||
#[serde(serialize_with = "serialize_rope")]
|
||||
pub text: Rope,
|
||||
}
|
||||
|
||||
fn serialize_project_path<S: Serializer>(
|
||||
project_path: &ProjectPath,
|
||||
serializer: S,
|
||||
) -> Result<S::Ok, S::Error> {
|
||||
project_path.path.serialize(serializer)
|
||||
}
|
||||
|
||||
fn serialize_rope<S: Serializer>(rope: &Rope, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
rope.to_string().serialize(serializer)
|
||||
}
|
||||
|
||||
fn serialize_point_range<S: Serializer>(
|
||||
range: &Range<Point>,
|
||||
serializer: S,
|
||||
) -> Result<S::Ok, S::Error> {
|
||||
[
|
||||
[range.start.row, range.start.column],
|
||||
[range.end.row, range.end.column],
|
||||
]
|
||||
.serialize(serializer)
|
||||
}
|
||||
|
||||
const DEBOUNCE_DURATION: Duration = Duration::from_millis(100);
|
||||
|
||||
impl EventEmitter<RelatedExcerptStoreEvent> for RelatedExcerptStore {}
|
||||
|
||||
impl RelatedExcerptStore {
|
||||
pub fn new(project: &Entity<Project>, cx: &mut Context<Self>) -> Self {
|
||||
let (update_tx, mut update_rx) = mpsc::unbounded::<(Entity<Buffer>, Anchor)>();
|
||||
cx.spawn(async move |this, cx| {
|
||||
let executor = cx.background_executor().clone();
|
||||
while let Some((mut buffer, mut position)) = update_rx.next().await {
|
||||
let mut timer = executor.timer(DEBOUNCE_DURATION).fuse();
|
||||
loop {
|
||||
futures::select_biased! {
|
||||
next = update_rx.next() => {
|
||||
if let Some((new_buffer, new_position)) = next {
|
||||
buffer = new_buffer;
|
||||
position = new_position;
|
||||
timer = executor.timer(DEBOUNCE_DURATION).fuse();
|
||||
} else {
|
||||
return anyhow::Ok(());
|
||||
}
|
||||
}
|
||||
_ = timer => break,
|
||||
}
|
||||
}
|
||||
|
||||
Self::fetch_excerpts(this.clone(), buffer, position, cx).await?;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
RelatedExcerptStore {
|
||||
project: project.downgrade(),
|
||||
update_tx,
|
||||
related_files: Vec::new(),
|
||||
cache: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn refresh(&mut self, buffer: Entity<Buffer>, position: Anchor, _: &mut Context<Self>) {
|
||||
self.update_tx.unbounded_send((buffer, position)).ok();
|
||||
}
|
||||
|
||||
pub fn related_files(&self) -> &[RelatedFile] {
|
||||
&self.related_files
|
||||
}
|
||||
|
||||
async fn fetch_excerpts(
|
||||
this: WeakEntity<Self>,
|
||||
buffer: Entity<Buffer>,
|
||||
position: Anchor,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<()> {
|
||||
let (project, snapshot) = this.read_with(cx, |this, cx| {
|
||||
(this.project.upgrade(), buffer.read(cx).snapshot())
|
||||
})?;
|
||||
let Some(project) = project else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let file = snapshot.file().cloned();
|
||||
if let Some(file) = &file {
|
||||
log::debug!("retrieving_context buffer:{}", file.path().as_unix_str());
|
||||
}
|
||||
|
||||
this.update(cx, |_, cx| {
|
||||
cx.emit(RelatedExcerptStoreEvent::StartedRefresh);
|
||||
})?;
|
||||
|
||||
let identifiers = cx
|
||||
.background_spawn(async move { identifiers_for_position(&snapshot, position) })
|
||||
.await;
|
||||
|
||||
let async_cx = cx.clone();
|
||||
let start_time = Instant::now();
|
||||
let futures = this.update(cx, |this, cx| {
|
||||
identifiers
|
||||
.into_iter()
|
||||
.filter_map(|identifier| {
|
||||
let task = if let Some(entry) = this.cache.get(&identifier) {
|
||||
DefinitionTask::CacheHit(entry.clone())
|
||||
} else {
|
||||
DefinitionTask::CacheMiss(
|
||||
this.project
|
||||
.update(cx, |project, cx| {
|
||||
project.definitions(&buffer, identifier.range.start, cx)
|
||||
})
|
||||
.ok()?,
|
||||
)
|
||||
};
|
||||
|
||||
let cx = async_cx.clone();
|
||||
let project = project.clone();
|
||||
Some(async move {
|
||||
match task {
|
||||
DefinitionTask::CacheHit(cache_entry) => {
|
||||
Some((identifier, cache_entry, None))
|
||||
}
|
||||
DefinitionTask::CacheMiss(task) => {
|
||||
let locations = task.await.log_err()??;
|
||||
let duration = start_time.elapsed();
|
||||
cx.update(|cx| {
|
||||
(
|
||||
identifier,
|
||||
Arc::new(CacheEntry {
|
||||
definitions: locations
|
||||
.into_iter()
|
||||
.filter_map(|location| {
|
||||
process_definition(location, &project, cx)
|
||||
})
|
||||
.collect(),
|
||||
}),
|
||||
Some(duration),
|
||||
)
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})?;
|
||||
|
||||
let mut cache_hit_count = 0;
|
||||
let mut cache_miss_count = 0;
|
||||
let mut mean_definition_latency = Duration::ZERO;
|
||||
let mut max_definition_latency = Duration::ZERO;
|
||||
let mut new_cache = HashMap::default();
|
||||
new_cache.reserve(futures.len());
|
||||
for (identifier, entry, duration) in future::join_all(futures).await.into_iter().flatten() {
|
||||
new_cache.insert(identifier, entry);
|
||||
if let Some(duration) = duration {
|
||||
cache_miss_count += 1;
|
||||
mean_definition_latency += duration;
|
||||
max_definition_latency = max_definition_latency.max(duration);
|
||||
} else {
|
||||
cache_hit_count += 1;
|
||||
}
|
||||
}
|
||||
mean_definition_latency /= cache_miss_count.max(1) as u32;
|
||||
|
||||
let (new_cache, related_files) = rebuild_related_files(new_cache, cx).await?;
|
||||
|
||||
if let Some(file) = &file {
|
||||
log::debug!(
|
||||
"finished retrieving context buffer:{}, latency:{:?}",
|
||||
file.path().as_unix_str(),
|
||||
start_time.elapsed()
|
||||
);
|
||||
}
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
this.cache = new_cache;
|
||||
this.related_files = related_files;
|
||||
cx.emit(RelatedExcerptStoreEvent::FinishedRefresh {
|
||||
cache_hit_count,
|
||||
cache_miss_count,
|
||||
mean_definition_latency,
|
||||
max_definition_latency,
|
||||
});
|
||||
})?;
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn rebuild_related_files(
|
||||
new_entries: HashMap<Identifier, Arc<CacheEntry>>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<(HashMap<Identifier, Arc<CacheEntry>>, Vec<RelatedFile>)> {
|
||||
let mut snapshots = HashMap::default();
|
||||
for entry in new_entries.values() {
|
||||
for definition in &entry.definitions {
|
||||
if let hash_map::Entry::Vacant(e) = snapshots.entry(definition.buffer.entity_id()) {
|
||||
definition
|
||||
.buffer
|
||||
.read_with(cx, |buffer, _| buffer.parsing_idle())?
|
||||
.await;
|
||||
e.insert(
|
||||
definition
|
||||
.buffer
|
||||
.read_with(cx, |buffer, _| buffer.snapshot())?,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(cx
|
||||
.background_spawn(async move {
|
||||
let mut files = Vec::<RelatedFile>::new();
|
||||
let mut ranges_by_buffer = HashMap::<_, Vec<Range<Point>>>::default();
|
||||
let mut paths_by_buffer = HashMap::default();
|
||||
for entry in new_entries.values() {
|
||||
for definition in &entry.definitions {
|
||||
let Some(snapshot) = snapshots.get(&definition.buffer.entity_id()) else {
|
||||
continue;
|
||||
};
|
||||
paths_by_buffer.insert(definition.buffer.entity_id(), definition.path.clone());
|
||||
ranges_by_buffer
|
||||
.entry(definition.buffer.clone())
|
||||
.or_default()
|
||||
.push(definition.anchor_range.to_point(snapshot));
|
||||
}
|
||||
}
|
||||
|
||||
for (buffer, ranges) in ranges_by_buffer {
|
||||
let Some(snapshot) = snapshots.get(&buffer.entity_id()) else {
|
||||
continue;
|
||||
};
|
||||
let Some(project_path) = paths_by_buffer.get(&buffer.entity_id()) else {
|
||||
continue;
|
||||
};
|
||||
let excerpts = assemble_excerpts(snapshot, ranges);
|
||||
files.push(RelatedFile {
|
||||
path: project_path.clone(),
|
||||
buffer: buffer.downgrade(),
|
||||
excerpts,
|
||||
max_row: snapshot.max_point().row,
|
||||
});
|
||||
}
|
||||
|
||||
files.sort_by_key(|file| file.path.clone());
|
||||
(new_entries, files)
|
||||
})
|
||||
.await)
|
||||
}
|
||||
|
||||
fn process_definition(
|
||||
location: LocationLink,
|
||||
project: &Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Option<CachedDefinition> {
|
||||
let buffer = location.target.buffer.read(cx);
|
||||
let anchor_range = location.target.range;
|
||||
let file = buffer.file()?;
|
||||
let worktree = project.read(cx).worktree_for_id(file.worktree_id(cx), cx)?;
|
||||
if worktree.read(cx).is_single_file() {
|
||||
return None;
|
||||
}
|
||||
Some(CachedDefinition {
|
||||
path: ProjectPath {
|
||||
worktree_id: file.worktree_id(cx),
|
||||
path: file.path().clone(),
|
||||
},
|
||||
buffer: location.target.buffer,
|
||||
anchor_range,
|
||||
})
|
||||
}
|
||||
|
||||
/// Gets all of the identifiers that are present in the given line, and its containing
|
||||
/// outline items.
|
||||
fn identifiers_for_position(buffer: &BufferSnapshot, position: Anchor) -> Vec<Identifier> {
|
||||
let offset = position.to_offset(buffer);
|
||||
let point = buffer.offset_to_point(offset);
|
||||
|
||||
let line_range = Point::new(point.row, 0)..Point::new(point.row + 1, 0).min(buffer.max_point());
|
||||
let mut ranges = vec![line_range.to_offset(&buffer)];
|
||||
|
||||
// Include the range of the outline item itself, but not its body.
|
||||
let outline_items = buffer.outline_items_as_offsets_containing(offset..offset, false, None);
|
||||
for item in outline_items {
|
||||
if let Some(body_range) = item.body_range(&buffer) {
|
||||
ranges.push(item.range.start..body_range.start.to_offset(&buffer));
|
||||
} else {
|
||||
ranges.push(item.range.clone());
|
||||
}
|
||||
}
|
||||
|
||||
ranges.sort_by(|a, b| a.start.cmp(&b.start).then(b.end.cmp(&a.end)));
|
||||
ranges.dedup_by(|a, b| {
|
||||
if a.start <= b.end {
|
||||
b.start = b.start.min(a.start);
|
||||
b.end = b.end.max(a.end);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
|
||||
let mut identifiers = Vec::new();
|
||||
let outer_range =
|
||||
ranges.first().map_or(0, |r| r.start)..ranges.last().map_or(buffer.len(), |r| r.end);
|
||||
|
||||
let mut captures = buffer
|
||||
.syntax
|
||||
.captures(outer_range.clone(), &buffer.text, |grammar| {
|
||||
grammar
|
||||
.highlights_config
|
||||
.as_ref()
|
||||
.map(|config| &config.query)
|
||||
});
|
||||
|
||||
for range in ranges {
|
||||
captures.set_byte_range(range.start..outer_range.end);
|
||||
|
||||
let mut last_range = None;
|
||||
while let Some(capture) = captures.peek() {
|
||||
let node_range = capture.node.byte_range();
|
||||
if node_range.start > range.end {
|
||||
break;
|
||||
}
|
||||
let config = captures.grammars()[capture.grammar_index]
|
||||
.highlights_config
|
||||
.as_ref();
|
||||
|
||||
if let Some(config) = config
|
||||
&& config.identifier_capture_indices.contains(&capture.index)
|
||||
&& range.contains_inclusive(&node_range)
|
||||
&& Some(&node_range) != last_range.as_ref()
|
||||
{
|
||||
let name = buffer.text_for_range(node_range.clone()).collect();
|
||||
identifiers.push(Identifier {
|
||||
range: buffer.anchor_after(node_range.start)
|
||||
..buffer.anchor_before(node_range.end),
|
||||
name,
|
||||
});
|
||||
last_range = Some(node_range);
|
||||
}
|
||||
|
||||
captures.advance();
|
||||
}
|
||||
}
|
||||
|
||||
identifiers
|
||||
}
|
||||
@@ -0,0 +1,360 @@
|
||||
use super::*;
|
||||
use futures::channel::mpsc::UnboundedReceiver;
|
||||
use gpui::TestAppContext;
|
||||
use indoc::indoc;
|
||||
use language::{Language, LanguageConfig, LanguageMatcher, Point, ToPoint as _, tree_sitter_rust};
|
||||
use lsp::FakeLanguageServer;
|
||||
use project::{FakeFs, LocationLink, Project};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::sync::Arc;
|
||||
use util::path;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_edit_prediction_context(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(path!("/root"), test_project_1()).await;
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let mut servers = setup_fake_lsp(&project, cx);
|
||||
|
||||
let (buffer, _handle) = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let _server = servers.next().await.unwrap();
|
||||
cx.run_until_parked();
|
||||
|
||||
let related_excerpt_store = cx.new(|cx| RelatedExcerptStore::new(&project, cx));
|
||||
related_excerpt_store.update(cx, |store, cx| {
|
||||
let position = {
|
||||
let buffer = buffer.read(cx);
|
||||
let offset = buffer.text().find("todo").unwrap();
|
||||
buffer.anchor_before(offset)
|
||||
};
|
||||
|
||||
store.refresh(buffer.clone(), position, cx);
|
||||
});
|
||||
|
||||
cx.executor().advance_clock(DEBOUNCE_DURATION);
|
||||
related_excerpt_store.update(cx, |store, _| {
|
||||
let excerpts = store.related_files();
|
||||
assert_related_files(
|
||||
&excerpts,
|
||||
&[
|
||||
(
|
||||
"src/company.rs",
|
||||
&[indoc! {"
|
||||
pub struct Company {
|
||||
owner: Arc<Person>,
|
||||
address: Address,
|
||||
}"}],
|
||||
),
|
||||
(
|
||||
"src/main.rs",
|
||||
&[
|
||||
indoc! {"
|
||||
pub struct Session {
|
||||
company: Arc<Company>,
|
||||
}
|
||||
|
||||
impl Session {
|
||||
pub fn set_company(&mut self, company: Arc<Company>) {"},
|
||||
indoc! {"
|
||||
}
|
||||
}"},
|
||||
],
|
||||
),
|
||||
(
|
||||
"src/person.rs",
|
||||
&[
|
||||
indoc! {"
|
||||
impl Person {
|
||||
pub fn get_first_name(&self) -> &str {
|
||||
&self.first_name
|
||||
}"},
|
||||
"}",
|
||||
],
|
||||
),
|
||||
],
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_fake_definition_lsp(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(path!("/root"), test_project_1()).await;
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let mut servers = setup_fake_lsp(&project, cx);
|
||||
|
||||
let (buffer, _handle) = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let _server = servers.next().await.unwrap();
|
||||
cx.run_until_parked();
|
||||
|
||||
let buffer_text = buffer.read_with(cx, |buffer, _| buffer.text());
|
||||
|
||||
let definitions = project
|
||||
.update(cx, |project, cx| {
|
||||
let offset = buffer_text.find("Address {").unwrap();
|
||||
project.definitions(&buffer, offset, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_definitions(&definitions, &["pub struct Address {"], cx);
|
||||
|
||||
let definitions = project
|
||||
.update(cx, |project, cx| {
|
||||
let offset = buffer_text.find("State::CA").unwrap();
|
||||
project.definitions(&buffer, offset, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_definitions(&definitions, &["pub enum State {"], cx);
|
||||
|
||||
let definitions = project
|
||||
.update(cx, |project, cx| {
|
||||
let offset = buffer_text.find("to_string()").unwrap();
|
||||
project.definitions(&buffer, offset, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_definitions(&definitions, &["pub fn to_string(&self) -> String {"], cx);
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
let settings_store = cx.update(|cx| SettingsStore::test(cx));
|
||||
cx.set_global(settings_store);
|
||||
env_logger::try_init().ok();
|
||||
}
|
||||
|
||||
fn setup_fake_lsp(
|
||||
project: &Entity<Project>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> UnboundedReceiver<FakeLanguageServer> {
|
||||
let (language_registry, fs) = project.read_with(cx, |project, _| {
|
||||
(project.languages().clone(), project.fs().clone())
|
||||
});
|
||||
let language = rust_lang();
|
||||
language_registry.add(language.clone());
|
||||
fake_definition_lsp::register_fake_definition_server(&language_registry, language, fs)
|
||||
}
|
||||
|
||||
fn test_project_1() -> serde_json::Value {
|
||||
let person_rs = indoc! {r#"
|
||||
pub struct Person {
|
||||
first_name: String,
|
||||
last_name: String,
|
||||
email: String,
|
||||
age: u32,
|
||||
}
|
||||
|
||||
impl Person {
|
||||
pub fn get_first_name(&self) -> &str {
|
||||
&self.first_name
|
||||
}
|
||||
|
||||
pub fn get_last_name(&self) -> &str {
|
||||
&self.last_name
|
||||
}
|
||||
|
||||
pub fn get_email(&self) -> &str {
|
||||
&self.email
|
||||
}
|
||||
|
||||
pub fn get_age(&self) -> u32 {
|
||||
self.age
|
||||
}
|
||||
}
|
||||
"#};
|
||||
|
||||
let address_rs = indoc! {r#"
|
||||
pub struct Address {
|
||||
street: String,
|
||||
city: String,
|
||||
state: State,
|
||||
zip: u32,
|
||||
}
|
||||
|
||||
pub enum State {
|
||||
CA,
|
||||
OR,
|
||||
WA,
|
||||
TX,
|
||||
// ...
|
||||
}
|
||||
|
||||
impl Address {
|
||||
pub fn get_street(&self) -> &str {
|
||||
&self.street
|
||||
}
|
||||
|
||||
pub fn get_city(&self) -> &str {
|
||||
&self.city
|
||||
}
|
||||
|
||||
pub fn get_state(&self) -> State {
|
||||
self.state
|
||||
}
|
||||
|
||||
pub fn get_zip(&self) -> u32 {
|
||||
self.zip
|
||||
}
|
||||
}
|
||||
"#};
|
||||
|
||||
let company_rs = indoc! {r#"
|
||||
use super::person::Person;
|
||||
use super::address::Address;
|
||||
|
||||
pub struct Company {
|
||||
owner: Arc<Person>,
|
||||
address: Address,
|
||||
}
|
||||
|
||||
impl Company {
|
||||
pub fn get_owner(&self) -> &Person {
|
||||
&self.owner
|
||||
}
|
||||
|
||||
pub fn get_address(&self) -> &Address {
|
||||
&self.address
|
||||
}
|
||||
|
||||
pub fn to_string(&self) -> String {
|
||||
format!("{} ({})", self.owner.first_name, self.address.city)
|
||||
}
|
||||
}
|
||||
"#};
|
||||
|
||||
let main_rs = indoc! {r#"
|
||||
use std::sync::Arc;
|
||||
use super::person::Person;
|
||||
use super::address::Address;
|
||||
use super::company::Company;
|
||||
|
||||
pub struct Session {
|
||||
company: Arc<Company>,
|
||||
}
|
||||
|
||||
impl Session {
|
||||
pub fn set_company(&mut self, company: Arc<Company>) {
|
||||
self.company = company;
|
||||
if company.owner != self.company.owner {
|
||||
log("new owner", company.owner.get_first_name()); todo();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let company = Company {
|
||||
owner: Arc::new(Person {
|
||||
first_name: "John".to_string(),
|
||||
last_name: "Doe".to_string(),
|
||||
email: "john@example.com".to_string(),
|
||||
age: 30,
|
||||
}),
|
||||
address: Address {
|
||||
street: "123 Main St".to_string(),
|
||||
city: "Anytown".to_string(),
|
||||
state: State::CA,
|
||||
zip: 12345,
|
||||
},
|
||||
};
|
||||
|
||||
println!("Company: {}", company.to_string());
|
||||
}
|
||||
"#};
|
||||
|
||||
json!({
|
||||
"src": {
|
||||
"person.rs": person_rs,
|
||||
"address.rs": address_rs,
|
||||
"company.rs": company_rs,
|
||||
"main.rs": main_rs,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fn assert_related_files(actual_files: &[RelatedFile], expected_files: &[(&str, &[&str])]) {
|
||||
let actual_files = actual_files
|
||||
.iter()
|
||||
.map(|file| {
|
||||
let excerpts = file
|
||||
.excerpts
|
||||
.iter()
|
||||
.map(|excerpt| excerpt.text.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
(file.path.path.as_unix_str(), excerpts)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let expected_excerpts = expected_files
|
||||
.iter()
|
||||
.map(|(path, texts)| {
|
||||
(
|
||||
*path,
|
||||
texts
|
||||
.iter()
|
||||
.map(|line| line.to_string())
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
pretty_assertions::assert_eq!(actual_files, expected_excerpts)
|
||||
}
|
||||
|
||||
fn assert_definitions(definitions: &[LocationLink], first_lines: &[&str], cx: &mut TestAppContext) {
|
||||
let actual_first_lines = definitions
|
||||
.iter()
|
||||
.map(|definition| {
|
||||
definition.target.buffer.read_with(cx, |buffer, _| {
|
||||
let mut start = definition.target.range.start.to_point(&buffer);
|
||||
start.column = 0;
|
||||
let end = Point::new(start.row, buffer.line_len(start.row));
|
||||
buffer
|
||||
.text_for_range(start..end)
|
||||
.collect::<String>()
|
||||
.trim()
|
||||
.to_string()
|
||||
})
|
||||
})
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
assert_eq!(actual_first_lines, first_lines);
|
||||
}
|
||||
|
||||
pub(crate) fn rust_lang() -> Arc<Language> {
|
||||
Arc::new(
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
first_line_pattern: None,
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::LANGUAGE.into()),
|
||||
)
|
||||
.with_highlights_query(include_str!("../../languages/src/rust/highlights.scm"))
|
||||
.unwrap()
|
||||
.with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
|
||||
.unwrap(),
|
||||
)
|
||||
}
|
||||
329
crates/edit_prediction_context2/src/fake_definition_lsp.rs
Normal file
329
crates/edit_prediction_context2/src/fake_definition_lsp.rs
Normal file
@@ -0,0 +1,329 @@
|
||||
use collections::HashMap;
|
||||
use futures::channel::mpsc::UnboundedReceiver;
|
||||
use language::{Language, LanguageRegistry};
|
||||
use lsp::{
|
||||
FakeLanguageServer, LanguageServerBinary, TextDocumentSyncCapability, TextDocumentSyncKind, Uri,
|
||||
};
|
||||
use parking_lot::Mutex;
|
||||
use project::Fs;
|
||||
use std::{ops::Range, path::PathBuf, sync::Arc};
|
||||
use tree_sitter::{Parser, QueryCursor, StreamingIterator, Tree};
|
||||
|
||||
/// Registers a fake language server that implements go-to-definition using tree-sitter,
|
||||
/// making the assumption that all names are unique, and all variables' types are
|
||||
/// explicitly declared.
|
||||
pub fn register_fake_definition_server(
|
||||
language_registry: &Arc<LanguageRegistry>,
|
||||
language: Arc<Language>,
|
||||
fs: Arc<dyn Fs>,
|
||||
) -> UnboundedReceiver<FakeLanguageServer> {
|
||||
let index = Arc::new(Mutex::new(DefinitionIndex::new(language.clone())));
|
||||
|
||||
language_registry.register_fake_lsp(
|
||||
language.name(),
|
||||
language::FakeLspAdapter {
|
||||
name: "fake-definition-lsp",
|
||||
initialization_options: None,
|
||||
prettier_plugins: Vec::new(),
|
||||
disk_based_diagnostics_progress_token: None,
|
||||
disk_based_diagnostics_sources: Vec::new(),
|
||||
language_server_binary: LanguageServerBinary {
|
||||
path: PathBuf::from("fake-definition-lsp"),
|
||||
arguments: Vec::new(),
|
||||
env: None,
|
||||
},
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
definition_provider: Some(lsp::OneOf::Left(true)),
|
||||
text_document_sync: Some(TextDocumentSyncCapability::Kind(
|
||||
TextDocumentSyncKind::FULL,
|
||||
)),
|
||||
..Default::default()
|
||||
},
|
||||
label_for_completion: None,
|
||||
initializer: Some(Box::new({
|
||||
move |server| {
|
||||
server.handle_notification::<lsp::notification::DidOpenTextDocument, _>({
|
||||
let index = index.clone();
|
||||
move |params, _cx| {
|
||||
index
|
||||
.lock()
|
||||
.open_buffer(params.text_document.uri, ¶ms.text_document.text);
|
||||
}
|
||||
});
|
||||
|
||||
server.handle_notification::<lsp::notification::DidCloseTextDocument, _>({
|
||||
let index = index.clone();
|
||||
let fs = fs.clone();
|
||||
move |params, cx| {
|
||||
let uri = params.text_document.uri;
|
||||
let path = uri.to_file_path().ok();
|
||||
index.lock().mark_buffer_closed(&uri);
|
||||
|
||||
if let Some(path) = path {
|
||||
let index = index.clone();
|
||||
let fs = fs.clone();
|
||||
cx.spawn(async move |_cx| {
|
||||
if let Ok(content) = fs.load(&path).await {
|
||||
index.lock().index_file(uri, &content);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
|
||||
let index = index.clone();
|
||||
let fs = fs.clone();
|
||||
move |params, cx| {
|
||||
let index = index.clone();
|
||||
let fs = fs.clone();
|
||||
cx.spawn(async move |_cx| {
|
||||
for event in params.changes {
|
||||
if index.lock().is_buffer_open(&event.uri) {
|
||||
continue;
|
||||
}
|
||||
|
||||
match event.typ {
|
||||
lsp::FileChangeType::DELETED => {
|
||||
index.lock().remove_definitions_for_file(&event.uri);
|
||||
}
|
||||
lsp::FileChangeType::CREATED
|
||||
| lsp::FileChangeType::CHANGED => {
|
||||
if let Some(path) = event.uri.to_file_path().ok() {
|
||||
if let Ok(content) = fs.load(&path).await {
|
||||
index.lock().index_file(event.uri, &content);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
});
|
||||
|
||||
server.handle_notification::<lsp::notification::DidChangeTextDocument, _>({
|
||||
let index = index.clone();
|
||||
move |params, _cx| {
|
||||
if let Some(change) = params.content_changes.into_iter().last() {
|
||||
index
|
||||
.lock()
|
||||
.index_file(params.text_document.uri, &change.text);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
server.handle_notification::<lsp::notification::DidChangeWorkspaceFolders, _>(
|
||||
{
|
||||
let index = index.clone();
|
||||
let fs = fs.clone();
|
||||
move |params, cx| {
|
||||
let index = index.clone();
|
||||
let fs = fs.clone();
|
||||
let files = fs.as_fake().files();
|
||||
cx.spawn(async move |_cx| {
|
||||
for folder in params.event.added {
|
||||
let Ok(path) = folder.uri.to_file_path() else {
|
||||
continue;
|
||||
};
|
||||
for file in &files {
|
||||
if let Some(uri) = Uri::from_file_path(&file).ok()
|
||||
&& file.starts_with(&path)
|
||||
&& let Ok(content) = fs.load(&file).await
|
||||
{
|
||||
index.lock().index_file(uri, &content);
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
server.set_request_handler::<lsp::request::GotoDefinition, _, _>({
|
||||
let index = index.clone();
|
||||
move |params, _cx| {
|
||||
let result = index.lock().get_definitions(
|
||||
params.text_document_position_params.text_document.uri,
|
||||
params.text_document_position_params.position,
|
||||
);
|
||||
async move { Ok(result) }
|
||||
}
|
||||
});
|
||||
}
|
||||
})),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
struct DefinitionIndex {
|
||||
language: Arc<Language>,
|
||||
definitions: HashMap<String, Vec<lsp::Location>>,
|
||||
files: HashMap<Uri, FileEntry>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct FileEntry {
|
||||
contents: String,
|
||||
is_open_in_buffer: bool,
|
||||
}
|
||||
|
||||
impl DefinitionIndex {
|
||||
fn new(language: Arc<Language>) -> Self {
|
||||
Self {
|
||||
language,
|
||||
definitions: HashMap::default(),
|
||||
files: HashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn remove_definitions_for_file(&mut self, uri: &Uri) {
|
||||
self.definitions.retain(|_, locations| {
|
||||
locations.retain(|loc| &loc.uri != uri);
|
||||
!locations.is_empty()
|
||||
});
|
||||
self.files.remove(uri);
|
||||
}
|
||||
|
||||
fn open_buffer(&mut self, uri: Uri, content: &str) {
|
||||
self.index_file_inner(uri, content, true);
|
||||
}
|
||||
|
||||
fn mark_buffer_closed(&mut self, uri: &Uri) {
|
||||
if let Some(entry) = self.files.get_mut(uri) {
|
||||
entry.is_open_in_buffer = false;
|
||||
}
|
||||
}
|
||||
|
||||
fn is_buffer_open(&self, uri: &Uri) -> bool {
|
||||
self.files
|
||||
.get(uri)
|
||||
.map(|entry| entry.is_open_in_buffer)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn index_file(&mut self, uri: Uri, content: &str) {
|
||||
self.index_file_inner(uri, content, false);
|
||||
}
|
||||
|
||||
fn index_file_inner(&mut self, uri: Uri, content: &str, is_open_in_buffer: bool) -> Option<()> {
|
||||
self.remove_definitions_for_file(&uri);
|
||||
let grammar = self.language.grammar()?;
|
||||
let outline_config = grammar.outline_config.as_ref()?;
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(&grammar.ts_language).ok()?;
|
||||
let tree = parser.parse(content, None)?;
|
||||
let declarations = extract_declarations_from_tree(&tree, content, outline_config);
|
||||
for (name, byte_range) in declarations {
|
||||
let range = byte_range_to_lsp_range(content, byte_range);
|
||||
let location = lsp::Location {
|
||||
uri: uri.clone(),
|
||||
range,
|
||||
};
|
||||
self.definitions
|
||||
.entry(name)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(location);
|
||||
}
|
||||
self.files.insert(
|
||||
uri,
|
||||
FileEntry {
|
||||
contents: content.to_string(),
|
||||
is_open_in_buffer,
|
||||
},
|
||||
);
|
||||
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn get_definitions(
|
||||
&mut self,
|
||||
uri: Uri,
|
||||
position: lsp::Position,
|
||||
) -> Option<lsp::GotoDefinitionResponse> {
|
||||
let entry = self.files.get(&uri)?;
|
||||
let name = word_at_position(&entry.contents, position)?;
|
||||
let locations = self.definitions.get(name).cloned()?;
|
||||
Some(lsp::GotoDefinitionResponse::Array(locations))
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_declarations_from_tree(
|
||||
tree: &Tree,
|
||||
content: &str,
|
||||
outline_config: &language::OutlineConfig,
|
||||
) -> Vec<(String, Range<usize>)> {
|
||||
let mut cursor = QueryCursor::new();
|
||||
let mut declarations = Vec::new();
|
||||
let mut matches = cursor.matches(&outline_config.query, tree.root_node(), content.as_bytes());
|
||||
while let Some(query_match) = matches.next() {
|
||||
let mut name_range: Option<Range<usize>> = None;
|
||||
let mut has_item_range = false;
|
||||
|
||||
for capture in query_match.captures {
|
||||
let range = capture.node.byte_range();
|
||||
if capture.index == outline_config.name_capture_ix {
|
||||
name_range = Some(range);
|
||||
} else if capture.index == outline_config.item_capture_ix {
|
||||
has_item_range = true;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(name_range) = name_range
|
||||
&& has_item_range
|
||||
{
|
||||
let name = content[name_range.clone()].to_string();
|
||||
if declarations.iter().any(|(n, _)| n == &name) {
|
||||
continue;
|
||||
}
|
||||
declarations.push((name, name_range));
|
||||
}
|
||||
}
|
||||
declarations
|
||||
}
|
||||
|
||||
fn byte_range_to_lsp_range(content: &str, byte_range: Range<usize>) -> lsp::Range {
|
||||
let start = byte_offset_to_position(content, byte_range.start);
|
||||
let end = byte_offset_to_position(content, byte_range.end);
|
||||
lsp::Range { start, end }
|
||||
}
|
||||
|
||||
fn byte_offset_to_position(content: &str, offset: usize) -> lsp::Position {
|
||||
let mut line = 0;
|
||||
let mut character = 0;
|
||||
let mut current_offset = 0;
|
||||
for ch in content.chars() {
|
||||
if current_offset >= offset {
|
||||
break;
|
||||
}
|
||||
if ch == '\n' {
|
||||
line += 1;
|
||||
character = 0;
|
||||
} else {
|
||||
character += 1;
|
||||
}
|
||||
current_offset += ch.len_utf8();
|
||||
}
|
||||
lsp::Position { line, character }
|
||||
}
|
||||
|
||||
fn word_at_position(content: &str, position: lsp::Position) -> Option<&str> {
|
||||
let mut lines = content.lines();
|
||||
let line = lines.nth(position.line as usize)?;
|
||||
let column = position.character as usize;
|
||||
if column > line.len() {
|
||||
return None;
|
||||
}
|
||||
let start = line[..column]
|
||||
.rfind(|c: char| !c.is_alphanumeric() && c != '_')
|
||||
.map(|i| i + 1)
|
||||
.unwrap_or(0);
|
||||
let end = line[column..]
|
||||
.find(|c: char| !c.is_alphanumeric() && c != '_')
|
||||
.map(|i| i + column)
|
||||
.unwrap_or(line.len());
|
||||
Some(&line[start..end]).filter(|word| !word.is_empty())
|
||||
}
|
||||
@@ -118,6 +118,7 @@ tree-sitter-rust.workspace = true
|
||||
tree-sitter-typescript.workspace = true
|
||||
tree-sitter-yaml.workspace = true
|
||||
tree-sitter-bash.workspace = true
|
||||
tree-sitter-md.workspace = true
|
||||
unindent.workspace = true
|
||||
util = { workspace = true, features = ["test-support"] }
|
||||
workspace = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -182,7 +182,7 @@ use std::{
|
||||
iter::{self, Peekable},
|
||||
mem,
|
||||
num::NonZeroU32,
|
||||
ops::{Deref, DerefMut, Not, Range, RangeInclusive},
|
||||
ops::{ControlFlow, Deref, DerefMut, Not, Range, RangeInclusive},
|
||||
path::{Path, PathBuf},
|
||||
rc::Rc,
|
||||
sync::Arc,
|
||||
@@ -191,7 +191,7 @@ use std::{
|
||||
use task::{ResolvedTask, RunnableTag, TaskTemplate, TaskVariables};
|
||||
use text::{BufferId, FromAnchor, OffsetUtf16, Rope, ToOffset as _};
|
||||
use theme::{
|
||||
ActiveTheme, PlayerColor, StatusColors, SyntaxTheme, Theme, ThemeSettings,
|
||||
AccentColors, ActiveTheme, PlayerColor, StatusColors, SyntaxTheme, Theme, ThemeSettings,
|
||||
observe_buffer_font_size_adjustment,
|
||||
};
|
||||
use ui::{
|
||||
@@ -1079,6 +1079,7 @@ pub struct Editor {
|
||||
show_breakpoints: Option<bool>,
|
||||
show_wrap_guides: Option<bool>,
|
||||
show_indent_guides: Option<bool>,
|
||||
buffers_with_disabled_indent_guides: HashSet<BufferId>,
|
||||
highlight_order: usize,
|
||||
highlighted_rows: HashMap<TypeId, Vec<RowHighlight>>,
|
||||
background_highlights: HashMap<HighlightKey, BackgroundHighlight>,
|
||||
@@ -1206,11 +1207,17 @@ pub struct Editor {
|
||||
select_next_is_case_sensitive: Option<bool>,
|
||||
pub lookup_key: Option<Box<dyn Any + Send + Sync>>,
|
||||
applicable_language_settings: HashMap<Option<LanguageName>, LanguageSettings>,
|
||||
accent_overrides: Vec<SharedString>,
|
||||
accent_data: Option<AccentData>,
|
||||
fetched_tree_sitter_chunks: HashMap<ExcerptId, HashSet<Range<BufferRow>>>,
|
||||
use_base_text_line_numbers: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
struct AccentData {
|
||||
colors: AccentColors,
|
||||
overrides: Vec<SharedString>,
|
||||
}
|
||||
|
||||
fn debounce_value(debounce_ms: u64) -> Option<Duration> {
|
||||
if debounce_ms > 0 {
|
||||
Some(Duration::from_millis(debounce_ms))
|
||||
@@ -2198,6 +2205,7 @@ impl Editor {
|
||||
show_breakpoints: None,
|
||||
show_wrap_guides: None,
|
||||
show_indent_guides,
|
||||
buffers_with_disabled_indent_guides: HashSet::default(),
|
||||
highlight_order: 0,
|
||||
highlighted_rows: HashMap::default(),
|
||||
background_highlights: HashMap::default(),
|
||||
@@ -2354,7 +2362,7 @@ impl Editor {
|
||||
lookup_key: None,
|
||||
select_next_is_case_sensitive: None,
|
||||
applicable_language_settings: HashMap::default(),
|
||||
accent_overrides: Vec::new(),
|
||||
accent_data: None,
|
||||
fetched_tree_sitter_chunks: HashMap::default(),
|
||||
use_base_text_line_numbers: false,
|
||||
};
|
||||
@@ -2364,7 +2372,7 @@ impl Editor {
|
||||
}
|
||||
|
||||
editor.applicable_language_settings = editor.fetch_applicable_language_settings(cx);
|
||||
editor.accent_overrides = editor.fetch_accent_overrides(cx);
|
||||
editor.accent_data = editor.fetch_accent_data(cx);
|
||||
|
||||
if let Some(breakpoints) = editor.breakpoint_store.as_ref() {
|
||||
editor
|
||||
@@ -8067,10 +8075,17 @@ impl Editor {
|
||||
|
||||
if self.edit_prediction_indent_conflict {
|
||||
let cursor_point = cursor.to_point(&multibuffer);
|
||||
let mut suggested_indent = None;
|
||||
multibuffer.suggested_indents_callback(
|
||||
cursor_point.row..cursor_point.row + 1,
|
||||
|_, indent| {
|
||||
suggested_indent = Some(indent);
|
||||
ControlFlow::Break(())
|
||||
},
|
||||
cx,
|
||||
);
|
||||
|
||||
let indents = multibuffer.suggested_indents(cursor_point.row..cursor_point.row + 1, cx);
|
||||
|
||||
if let Some((_, indent)) = indents.iter().next()
|
||||
if let Some(indent) = suggested_indent
|
||||
&& indent.len == cursor_point.column
|
||||
{
|
||||
self.edit_prediction_indent_conflict = false;
|
||||
@@ -20077,6 +20092,20 @@ impl Editor {
|
||||
self.show_indent_guides
|
||||
}
|
||||
|
||||
pub fn disable_indent_guides_for_buffer(
|
||||
&mut self,
|
||||
buffer_id: BufferId,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.buffers_with_disabled_indent_guides.insert(buffer_id);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn has_indent_guides_disabled_for_buffer(&self, buffer_id: BufferId) -> bool {
|
||||
self.buffers_with_disabled_indent_guides
|
||||
.contains(&buffer_id)
|
||||
}
|
||||
|
||||
pub fn toggle_line_numbers(
|
||||
&mut self,
|
||||
_: &ToggleLineNumbers,
|
||||
@@ -21706,16 +21735,18 @@ impl Editor {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn fetch_accent_overrides(&self, cx: &App) -> Vec<SharedString> {
|
||||
fn fetch_accent_data(&self, cx: &App) -> Option<AccentData> {
|
||||
if !self.mode.is_full() {
|
||||
return Vec::new();
|
||||
return None;
|
||||
}
|
||||
|
||||
let theme_settings = theme::ThemeSettings::get_global(cx);
|
||||
let theme = cx.theme();
|
||||
let accent_colors = theme.accents().clone();
|
||||
|
||||
theme_settings
|
||||
let accent_overrides = theme_settings
|
||||
.theme_overrides
|
||||
.get(cx.theme().name.as_ref())
|
||||
.get(theme.name.as_ref())
|
||||
.map(|theme_style| &theme_style.accents)
|
||||
.into_iter()
|
||||
.flatten()
|
||||
@@ -21728,7 +21759,12 @@ impl Editor {
|
||||
.flatten(),
|
||||
)
|
||||
.flat_map(|accent| accent.0.clone())
|
||||
.collect()
|
||||
.collect();
|
||||
|
||||
Some(AccentData {
|
||||
colors: accent_colors,
|
||||
overrides: accent_overrides,
|
||||
})
|
||||
}
|
||||
|
||||
fn fetch_applicable_language_settings(
|
||||
@@ -21758,9 +21794,9 @@ impl Editor {
|
||||
let language_settings_changed = new_language_settings != self.applicable_language_settings;
|
||||
self.applicable_language_settings = new_language_settings;
|
||||
|
||||
let new_accent_overrides = self.fetch_accent_overrides(cx);
|
||||
let accent_overrides_changed = new_accent_overrides != self.accent_overrides;
|
||||
self.accent_overrides = new_accent_overrides;
|
||||
let new_accents = self.fetch_accent_data(cx);
|
||||
let accents_changed = new_accents != self.accent_data;
|
||||
self.accent_data = new_accents;
|
||||
|
||||
if self.diagnostics_enabled() {
|
||||
let new_severity = EditorSettings::get_global(cx)
|
||||
@@ -21834,7 +21870,7 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
if language_settings_changed || accent_overrides_changed {
|
||||
if language_settings_changed || accents_changed {
|
||||
self.colorize_brackets(true, cx);
|
||||
}
|
||||
|
||||
|
||||
@@ -19095,6 +19095,109 @@ async fn test_document_format_with_prettier(cx: &mut TestAppContext) {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_document_format_with_prettier_explicit_language(cx: &mut TestAppContext) {
|
||||
init_test(cx, |settings| {
|
||||
settings.defaults.formatter = Some(FormatterList::Single(Formatter::Prettier))
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_file(path!("/file.settings"), Default::default())
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, [path!("/file.settings").as_ref()], cx).await;
|
||||
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
|
||||
|
||||
let ts_lang = Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
name: "TypeScript".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["ts".to_string()],
|
||||
..LanguageMatcher::default()
|
||||
},
|
||||
prettier_parser_name: Some("typescript".to_string()),
|
||||
..LanguageConfig::default()
|
||||
},
|
||||
Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
|
||||
));
|
||||
|
||||
language_registry.add(ts_lang.clone());
|
||||
|
||||
update_test_language_settings(cx, |settings| {
|
||||
settings.defaults.prettier.get_or_insert_default().allowed = Some(true);
|
||||
});
|
||||
|
||||
let test_plugin = "test_plugin";
|
||||
let _ = language_registry.register_fake_lsp(
|
||||
"TypeScript",
|
||||
FakeLspAdapter {
|
||||
prettier_plugins: vec![test_plugin],
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
|
||||
let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX;
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer(path!("/file.settings"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
project.update(cx, |project, cx| {
|
||||
project.set_language_for_buffer(&buffer, ts_lang, cx)
|
||||
});
|
||||
|
||||
let buffer_text = "one\ntwo\nthree\n";
|
||||
let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let (editor, cx) = cx.add_window_view(|window, cx| build_editor(buffer, window, cx));
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text(buffer_text, window, cx)
|
||||
});
|
||||
|
||||
editor
|
||||
.update_in(cx, |editor, window, cx| {
|
||||
editor.perform_format(
|
||||
project.clone(),
|
||||
FormatTrigger::Manual,
|
||||
FormatTarget::Buffers(editor.buffer().read(cx).all_buffers()),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.unwrap()
|
||||
.await;
|
||||
assert_eq!(
|
||||
editor.update(cx, |editor, cx| editor.text(cx)),
|
||||
buffer_text.to_string() + prettier_format_suffix + "\ntypescript",
|
||||
"Test prettier formatting was not applied to the original buffer text",
|
||||
);
|
||||
|
||||
update_test_language_settings(cx, |settings| {
|
||||
settings.defaults.formatter = Some(FormatterList::default())
|
||||
});
|
||||
let format = editor.update_in(cx, |editor, window, cx| {
|
||||
editor.perform_format(
|
||||
project.clone(),
|
||||
FormatTrigger::Manual,
|
||||
FormatTarget::Buffers(editor.buffer().read(cx).all_buffers()),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
format.await.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
editor.update(cx, |editor, cx| editor.text(cx)),
|
||||
buffer_text.to_string()
|
||||
+ prettier_format_suffix
|
||||
+ "\ntypescript\n"
|
||||
+ prettier_format_suffix
|
||||
+ "\ntypescript",
|
||||
"Autoformatting (via test prettier) was not applied to the original buffer text",
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_addition_reverts(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
@@ -27395,6 +27498,65 @@ async fn test_paste_url_from_other_app_creates_markdown_link_over_selected_text(
|
||||
));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_markdown_list_indent_with_multi_cursor(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let markdown_language = languages::language("markdown", tree_sitter_md::LANGUAGE.into());
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
cx.update_buffer(|buffer, cx| buffer.set_language(Some(markdown_language), cx));
|
||||
|
||||
cx.set_state(&indoc! {"
|
||||
- [ ] Item 1
|
||||
- [ ] Item 1.a
|
||||
- [ˇ] Item 2
|
||||
- [ˇ] Item 2.a
|
||||
- [ˇ] Item 2.b
|
||||
"
|
||||
});
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.handle_input("X", window, cx);
|
||||
});
|
||||
|
||||
cx.assert_editor_state(indoc! {"
|
||||
- [ ] Item 1
|
||||
- [ ] Item 1.a
|
||||
- [Xˇ] Item 2
|
||||
- [Xˇ] Item 2.a
|
||||
- [Xˇ] Item 2.b
|
||||
"
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_markdown_list_indent_with_newline(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let markdown_language = languages::language("markdown", tree_sitter_md::LANGUAGE.into());
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
cx.update_buffer(|buffer, cx| buffer.set_language(Some(markdown_language), cx));
|
||||
|
||||
cx.set_state(indoc! {"
|
||||
- [x] list item
|
||||
- [x] sub list itemˇ
|
||||
"
|
||||
});
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.newline(&Newline, window, cx);
|
||||
});
|
||||
|
||||
cx.assert_editor_state(indoc! {"
|
||||
- [x] list item
|
||||
- [x] sub list item
|
||||
ˇ
|
||||
"
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_paste_url_from_zed_copy_creates_markdown_link_over_selected_text(
|
||||
cx: &mut gpui::TestAppContext,
|
||||
|
||||
@@ -3915,6 +3915,8 @@ impl EditorElement {
|
||||
) -> impl IntoElement {
|
||||
let editor = self.editor.read(cx);
|
||||
let multi_buffer = editor.buffer.read(cx);
|
||||
let is_read_only = self.editor.read(cx).read_only(cx);
|
||||
|
||||
let file_status = multi_buffer
|
||||
.all_diff_hunks_expanded()
|
||||
.then(|| editor.status_for_buffer_id(for_excerpt.buffer_id, cx))
|
||||
@@ -3967,7 +3969,7 @@ impl EditorElement {
|
||||
.gap_1p5()
|
||||
.when(is_sticky, |el| el.shadow_md())
|
||||
.border_1()
|
||||
.map(|div| {
|
||||
.map(|border| {
|
||||
let border_color = if is_selected
|
||||
&& is_folded
|
||||
&& focus_handle.contains_focused(window, cx)
|
||||
@@ -3976,7 +3978,7 @@ impl EditorElement {
|
||||
} else {
|
||||
colors.border
|
||||
};
|
||||
div.border_color(border_color)
|
||||
border.border_color(border_color)
|
||||
})
|
||||
.bg(colors.editor_subheader_background)
|
||||
.hover(|style| style.bg(colors.element_hover))
|
||||
@@ -4056,13 +4058,15 @@ impl EditorElement {
|
||||
})
|
||||
.take(1),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.size_3()
|
||||
.justify_center()
|
||||
.flex_shrink_0()
|
||||
.children(indicator),
|
||||
)
|
||||
.when(!is_read_only, |this| {
|
||||
this.child(
|
||||
h_flex()
|
||||
.size_3()
|
||||
.justify_center()
|
||||
.flex_shrink_0()
|
||||
.children(indicator),
|
||||
)
|
||||
})
|
||||
.child(
|
||||
h_flex()
|
||||
.cursor_pointer()
|
||||
|
||||
@@ -508,7 +508,19 @@ impl GitBlame {
|
||||
let buffer_edits = buffer.update(cx, |buffer, _| buffer.subscribe());
|
||||
|
||||
let blame_buffer = project.blame_buffer(&buffer, None, cx);
|
||||
Some(async move { (id, snapshot, buffer_edits, blame_buffer.await) })
|
||||
let remote_url = project
|
||||
.git_store()
|
||||
.read(cx)
|
||||
.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
|
||||
.and_then(|(repo, _)| {
|
||||
repo.read(cx)
|
||||
.remote_upstream_url
|
||||
.clone()
|
||||
.or(repo.read(cx).remote_origin_url.clone())
|
||||
});
|
||||
Some(
|
||||
async move { (id, snapshot, buffer_edits, blame_buffer.await, remote_url) },
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
@@ -524,13 +536,9 @@ impl GitBlame {
|
||||
.await;
|
||||
let mut res = vec![];
|
||||
let mut errors = vec![];
|
||||
for (id, snapshot, buffer_edits, blame) in blame {
|
||||
for (id, snapshot, buffer_edits, blame, remote_url) in blame {
|
||||
match blame {
|
||||
Ok(Some(Blame {
|
||||
entries,
|
||||
messages,
|
||||
remote_url,
|
||||
})) => {
|
||||
Ok(Some(Blame { entries, messages })) => {
|
||||
let entries = build_blame_entry_sum_tree(
|
||||
entries,
|
||||
snapshot.max_point().row,
|
||||
|
||||
@@ -181,6 +181,10 @@ pub fn indent_guides_in_range(
|
||||
.buffer_snapshot()
|
||||
.indent_guides_in_range(start_anchor..end_anchor, ignore_disabled_for_language, cx)
|
||||
.filter(|indent_guide| {
|
||||
if editor.has_indent_guides_disabled_for_buffer(indent_guide.buffer_id) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if editor.is_buffer_folded(indent_guide.buffer_id, cx) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -705,7 +705,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut fake_servers = language_registry.register_fake_language_server(
|
||||
let mut fake_servers = language_registry.register_fake_lsp_server(
|
||||
LanguageServerName("gleam".into()),
|
||||
lsp::ServerCapabilities {
|
||||
completion_provider: Some(Default::default()),
|
||||
|
||||
@@ -50,6 +50,8 @@ pub struct FakeGitRepositoryState {
|
||||
pub blames: HashMap<RepoPath, Blame>,
|
||||
pub current_branch_name: Option<String>,
|
||||
pub branches: HashSet<String>,
|
||||
/// List of remotes, keys are names and values are URLs
|
||||
pub remotes: HashMap<String, String>,
|
||||
pub simulated_index_write_error_message: Option<String>,
|
||||
pub refs: HashMap<String, String>,
|
||||
}
|
||||
@@ -68,6 +70,7 @@ impl FakeGitRepositoryState {
|
||||
refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
|
||||
merge_base_contents: Default::default(),
|
||||
oids: Default::default(),
|
||||
remotes: HashMap::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -432,8 +435,13 @@ impl GitRepository for FakeGitRepository {
|
||||
})
|
||||
}
|
||||
|
||||
fn delete_branch(&self, _name: String) -> BoxFuture<'_, Result<()>> {
|
||||
unimplemented!()
|
||||
fn delete_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
|
||||
self.with_state_async(true, move |state| {
|
||||
if !state.branches.remove(&name) {
|
||||
bail!("no such branch: {name}");
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn blame(&self, path: RepoPath, _content: Rope) -> BoxFuture<'_, Result<git::blame::Blame>> {
|
||||
@@ -598,6 +606,19 @@ impl GitRepository for FakeGitRepository {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
|
||||
self.with_state_async(false, move |state| {
|
||||
let remotes = state
|
||||
.remotes
|
||||
.keys()
|
||||
.map(|r| Remote {
|
||||
name: r.clone().into(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
Ok(remotes)
|
||||
})
|
||||
}
|
||||
|
||||
fn get_push_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
|
||||
unimplemented!()
|
||||
}
|
||||
@@ -606,10 +627,6 @@ impl GitRepository for FakeGitRepository {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<gpui::SharedString>>> {
|
||||
future::ready(Ok(Vec::new())).boxed()
|
||||
}
|
||||
@@ -683,6 +700,20 @@ impl GitRepository for FakeGitRepository {
|
||||
fn default_branch(&self) -> BoxFuture<'_, Result<Option<SharedString>>> {
|
||||
async { Ok(Some("main".into())) }.boxed()
|
||||
}
|
||||
|
||||
fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
|
||||
self.with_state_async(true, move |state| {
|
||||
state.remotes.insert(name, url);
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
|
||||
self.with_state_async(true, move |state| {
|
||||
state.remotes.remove(&name);
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -19,7 +19,6 @@ pub use git2 as libgit;
|
||||
pub struct Blame {
|
||||
pub entries: Vec<BlameEntry>,
|
||||
pub messages: HashMap<Oid, String>,
|
||||
pub remote_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
@@ -36,7 +35,6 @@ impl Blame {
|
||||
working_directory: &Path,
|
||||
path: &RepoPath,
|
||||
content: &Rope,
|
||||
remote_url: Option<String>,
|
||||
) -> Result<Self> {
|
||||
let output = run_git_blame(git_binary, working_directory, path, content).await?;
|
||||
let mut entries = parse_git_blame(&output)?;
|
||||
@@ -53,11 +51,7 @@ impl Blame {
|
||||
.await
|
||||
.context("failed to get commit messages")?;
|
||||
|
||||
Ok(Self {
|
||||
entries,
|
||||
messages,
|
||||
remote_url,
|
||||
})
|
||||
Ok(Self { entries, messages })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::str::FromStr;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use derive_more::Deref;
|
||||
@@ -11,7 +12,7 @@ pub struct RemoteUrl(Url);
|
||||
static USERNAME_REGEX: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"^[0-9a-zA-Z\-_]+@").expect("Failed to create USERNAME_REGEX"));
|
||||
|
||||
impl std::str::FromStr for RemoteUrl {
|
||||
impl FromStr for RemoteUrl {
|
||||
type Err = url::ParseError;
|
||||
|
||||
fn from_str(input: &str) -> Result<Self, Self::Err> {
|
||||
|
||||
@@ -7,13 +7,15 @@ use collections::HashMap;
|
||||
use futures::future::BoxFuture;
|
||||
use futures::io::BufWriter;
|
||||
use futures::{AsyncWriteExt, FutureExt as _, select_biased};
|
||||
use git2::BranchType;
|
||||
use git2::{BranchType, ErrorCode};
|
||||
use gpui::{AppContext as _, AsyncApp, BackgroundExecutor, SharedString, Task};
|
||||
use parking_lot::Mutex;
|
||||
use rope::Rope;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use smol::io::{AsyncBufReadExt, AsyncReadExt, BufReader};
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::ffi::{OsStr, OsString};
|
||||
use std::process::{ExitStatus, Stdio};
|
||||
use std::{
|
||||
@@ -55,6 +57,12 @@ impl Branch {
|
||||
self.ref_name.starts_with("refs/remotes/")
|
||||
}
|
||||
|
||||
pub fn remote_name(&self) -> Option<&str> {
|
||||
self.ref_name
|
||||
.strip_prefix("refs/remotes/")
|
||||
.and_then(|stripped| stripped.split("/").next())
|
||||
}
|
||||
|
||||
pub fn tracking_status(&self) -> Option<UpstreamTrackingStatus> {
|
||||
self.upstream
|
||||
.as_ref()
|
||||
@@ -590,6 +598,10 @@ pub trait GitRepository: Send + Sync {
|
||||
|
||||
fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>>;
|
||||
|
||||
fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>>;
|
||||
|
||||
fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>>;
|
||||
|
||||
/// returns a list of remote branches that contain HEAD
|
||||
fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<SharedString>>>;
|
||||
|
||||
@@ -1385,9 +1397,19 @@ impl GitRepository for RealGitRepository {
|
||||
branch
|
||||
} else if let Ok(revision) = repo.find_branch(&name, BranchType::Remote) {
|
||||
let (_, branch_name) = name.split_once("/").context("Unexpected branch format")?;
|
||||
|
||||
let revision = revision.get();
|
||||
let branch_commit = revision.peel_to_commit()?;
|
||||
let mut branch = repo.branch(&branch_name, &branch_commit, false)?;
|
||||
let mut branch = match repo.branch(&branch_name, &branch_commit, false) {
|
||||
Ok(branch) => branch,
|
||||
Err(err) if err.code() == ErrorCode::Exists => {
|
||||
repo.find_branch(&branch_name, BranchType::Local)?
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err.into());
|
||||
}
|
||||
};
|
||||
|
||||
branch.set_upstream(Some(&name))?;
|
||||
branch
|
||||
} else {
|
||||
@@ -1403,7 +1425,6 @@ impl GitRepository for RealGitRepository {
|
||||
self.executor
|
||||
.spawn(async move {
|
||||
let branch = branch.await?;
|
||||
|
||||
GitBinary::new(git_binary_path, working_directory?, executor)
|
||||
.run(&["checkout", &branch])
|
||||
.await?;
|
||||
@@ -1473,28 +1494,17 @@ impl GitRepository for RealGitRepository {
|
||||
let git_binary_path = self.any_git_binary_path.clone();
|
||||
let executor = self.executor.clone();
|
||||
|
||||
async move {
|
||||
let remote_url = if let Some(remote_url) = self.remote_url("upstream").await {
|
||||
Some(remote_url)
|
||||
} else if let Some(remote_url) = self.remote_url("origin").await {
|
||||
Some(remote_url)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
executor
|
||||
.spawn(async move {
|
||||
crate::blame::Blame::for_path(
|
||||
&git_binary_path,
|
||||
&working_directory?,
|
||||
&path,
|
||||
&content,
|
||||
remote_url,
|
||||
)
|
||||
.await
|
||||
})
|
||||
executor
|
||||
.spawn(async move {
|
||||
crate::blame::Blame::for_path(
|
||||
&git_binary_path,
|
||||
&working_directory?,
|
||||
&path,
|
||||
&content,
|
||||
)
|
||||
.await
|
||||
}
|
||||
.boxed()
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result<FileHistory>> {
|
||||
@@ -1993,7 +2003,7 @@ impl GitRepository for RealGitRepository {
|
||||
let working_directory = working_directory?;
|
||||
let output = new_smol_command(&git_binary_path)
|
||||
.current_dir(&working_directory)
|
||||
.args(["remote"])
|
||||
.args(["remote", "-v"])
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
@@ -2002,14 +2012,43 @@ impl GitRepository for RealGitRepository {
|
||||
"Failed to get all remotes:\n{}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
let remote_names = String::from_utf8_lossy(&output.stdout)
|
||||
.split('\n')
|
||||
.filter(|name| !name.is_empty())
|
||||
.map(|name| Remote {
|
||||
name: name.trim().to_string().into(),
|
||||
let remote_names: HashSet<Remote> = String::from_utf8_lossy(&output.stdout)
|
||||
.lines()
|
||||
.filter(|line| !line.is_empty())
|
||||
.filter_map(|line| {
|
||||
let mut split_line = line.split_whitespace();
|
||||
let remote_name = split_line.next()?;
|
||||
|
||||
Some(Remote {
|
||||
name: remote_name.trim().to_string().into(),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
Ok(remote_names)
|
||||
|
||||
Ok(remote_names.into_iter().collect())
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
|
||||
let repo = self.repository.clone();
|
||||
self.executor
|
||||
.spawn(async move {
|
||||
let repo = repo.lock();
|
||||
repo.remote_delete(&name)?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
|
||||
let repo = self.repository.clone();
|
||||
self.executor
|
||||
.spawn(async move {
|
||||
let repo = repo.lock();
|
||||
repo.remote(&name, url.as_ref())?;
|
||||
Ok(())
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use buffer_diff::{BufferDiff, BufferDiffSnapshot};
|
||||
use editor::display_map::{BlockPlacement, BlockProperties, BlockStyle};
|
||||
use editor::{Addon, Editor, EditorEvent, ExcerptId, ExcerptRange, MultiBuffer};
|
||||
use editor::{Editor, EditorEvent, ExcerptId, ExcerptRange, MultiBuffer};
|
||||
use git::repository::{CommitDetails, CommitDiff, RepoPath};
|
||||
use git::{GitHostingProviderRegistry, GitRemote, parse_git_remote_url};
|
||||
use gpui::{
|
||||
@@ -11,9 +11,8 @@ use gpui::{
|
||||
};
|
||||
use language::{
|
||||
Anchor, Buffer, Capability, DiskState, File, LanguageRegistry, LineEnding, ReplicaId, Rope,
|
||||
TextBuffer, ToPoint,
|
||||
TextBuffer,
|
||||
};
|
||||
use multi_buffer::ExcerptInfo;
|
||||
use multi_buffer::PathKey;
|
||||
use project::{Project, WorktreeId, git_store::Repository};
|
||||
use std::{
|
||||
@@ -22,11 +21,9 @@ use std::{
|
||||
sync::Arc,
|
||||
};
|
||||
use theme::ActiveTheme;
|
||||
use ui::{
|
||||
Avatar, Button, ButtonCommon, Clickable, Color, Icon, IconName, IconSize, Label,
|
||||
LabelCommon as _, LabelSize, SharedString, div, h_flex, v_flex,
|
||||
};
|
||||
use ui::{Avatar, DiffStat, Tooltip, prelude::*};
|
||||
use util::{ResultExt, paths::PathStyle, rel_path::RelPath, truncate_and_trailoff};
|
||||
use workspace::item::TabTooltipContent;
|
||||
use workspace::{
|
||||
Item, ItemHandle, ItemNavHistory, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView,
|
||||
Workspace,
|
||||
@@ -151,11 +148,10 @@ impl CommitView {
|
||||
let editor = cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx);
|
||||
|
||||
editor.disable_inline_diagnostics();
|
||||
editor.set_expand_all_diff_hunks(cx);
|
||||
editor.register_addon(CommitViewAddon {
|
||||
multibuffer: multibuffer.downgrade(),
|
||||
});
|
||||
|
||||
editor
|
||||
});
|
||||
let commit_sha = Arc::<str>::from(commit.sha.as_ref());
|
||||
@@ -262,6 +258,8 @@ impl CommitView {
|
||||
|
||||
this.editor.update(cx, |editor, cx| {
|
||||
editor.disable_header_for_buffer(message_buffer.read(cx).remote_id(), cx);
|
||||
editor
|
||||
.disable_indent_guides_for_buffer(message_buffer.read(cx).remote_id(), cx);
|
||||
|
||||
editor.insert_blocks(
|
||||
[BlockProperties {
|
||||
@@ -357,6 +355,41 @@ impl CommitView {
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn calculate_changed_lines(&self, cx: &App) -> (u32, u32) {
|
||||
let snapshot = self.multibuffer.read(cx).snapshot(cx);
|
||||
let mut total_additions = 0u32;
|
||||
let mut total_deletions = 0u32;
|
||||
|
||||
let mut seen_buffers = std::collections::HashSet::new();
|
||||
for (_, buffer, _) in snapshot.excerpts() {
|
||||
let buffer_id = buffer.remote_id();
|
||||
if !seen_buffers.insert(buffer_id) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Some(diff) = snapshot.diff_for_buffer_id(buffer_id) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let base_text = diff.base_text();
|
||||
|
||||
for hunk in diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer) {
|
||||
let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row);
|
||||
total_additions += added_rows;
|
||||
|
||||
let base_start = base_text
|
||||
.offset_to_point(hunk.diff_base_byte_range.start)
|
||||
.row;
|
||||
let base_end = base_text.offset_to_point(hunk.diff_base_byte_range.end).row;
|
||||
let deleted_rows = base_end.saturating_sub(base_start);
|
||||
|
||||
total_deletions += deleted_rows;
|
||||
}
|
||||
}
|
||||
|
||||
(total_additions, total_deletions)
|
||||
}
|
||||
|
||||
fn render_header(&self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let commit = &self.commit;
|
||||
let author_name = commit.author_name.clone();
|
||||
@@ -380,46 +413,72 @@ impl CommitView {
|
||||
)
|
||||
});
|
||||
|
||||
v_flex()
|
||||
.p_4()
|
||||
.pl_0()
|
||||
.gap_4()
|
||||
let (additions, deletions) = self.calculate_changed_lines(cx);
|
||||
|
||||
let commit_diff_stat = if additions > 0 || deletions > 0 {
|
||||
Some(DiffStat::new(
|
||||
"commit-diff-stat",
|
||||
additions as usize,
|
||||
deletions as usize,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
h_flex()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.child(
|
||||
h_flex()
|
||||
.w(self.editor.read(cx).last_gutter_dimensions().full_width())
|
||||
.justify_center()
|
||||
.child(self.render_commit_avatar(&commit.sha, rems_from_px(48.), window, cx)),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.py_4()
|
||||
.pl_1()
|
||||
.pr_4()
|
||||
.w_full()
|
||||
.items_start()
|
||||
.child(
|
||||
h_flex()
|
||||
.w(self.editor.read(cx).last_gutter_dimensions().full_width())
|
||||
.justify_center()
|
||||
.child(self.render_commit_avatar(
|
||||
&commit.sha,
|
||||
gpui::rems(3.0),
|
||||
window,
|
||||
cx,
|
||||
)),
|
||||
)
|
||||
.justify_between()
|
||||
.flex_wrap()
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_3()
|
||||
.items_baseline()
|
||||
.gap_1()
|
||||
.child(Label::new(author_name).color(Color::Default))
|
||||
.child(
|
||||
Label::new(format!("commit {}", commit.sha))
|
||||
.color(Color::Muted),
|
||||
Label::new(format!("Commit:{}", commit.sha))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small)
|
||||
.truncate()
|
||||
.buffer_font(cx),
|
||||
),
|
||||
)
|
||||
.child(Label::new(date_string).color(Color::Muted)),
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.child(
|
||||
Label::new(date_string)
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.child(
|
||||
Label::new("•")
|
||||
.color(Color::Ignored)
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.children(commit_diff_stat),
|
||||
),
|
||||
)
|
||||
.child(div().flex_grow())
|
||||
.children(github_url.map(|url| {
|
||||
Button::new("view_on_github", "View on GitHub")
|
||||
.icon(IconName::Github)
|
||||
.style(ui::ButtonStyle::Subtle)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(move |_, _, cx| cx.open_url(&url))
|
||||
})),
|
||||
)
|
||||
@@ -714,55 +773,6 @@ impl language::File for GitBlob {
|
||||
// }
|
||||
// }
|
||||
|
||||
struct CommitViewAddon {
|
||||
multibuffer: WeakEntity<MultiBuffer>,
|
||||
}
|
||||
|
||||
impl Addon for CommitViewAddon {
|
||||
fn render_buffer_header_controls(
|
||||
&self,
|
||||
excerpt: &ExcerptInfo,
|
||||
_window: &Window,
|
||||
cx: &App,
|
||||
) -> Option<AnyElement> {
|
||||
let multibuffer = self.multibuffer.upgrade()?;
|
||||
let snapshot = multibuffer.read(cx).snapshot(cx);
|
||||
let excerpts = snapshot.excerpts().collect::<Vec<_>>();
|
||||
let current_idx = excerpts.iter().position(|(id, _, _)| *id == excerpt.id)?;
|
||||
let (_, _, current_range) = &excerpts[current_idx];
|
||||
|
||||
let start_row = current_range.context.start.to_point(&excerpt.buffer).row;
|
||||
|
||||
let prev_end_row = if current_idx > 0 {
|
||||
let (_, prev_buffer, prev_range) = &excerpts[current_idx - 1];
|
||||
if prev_buffer.remote_id() == excerpt.buffer_id {
|
||||
prev_range.context.end.to_point(&excerpt.buffer).row
|
||||
} else {
|
||||
0
|
||||
}
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let skipped_lines = start_row.saturating_sub(prev_end_row);
|
||||
|
||||
if skipped_lines > 0 {
|
||||
Some(
|
||||
Label::new(format!("{} unchanged lines", skipped_lines))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small)
|
||||
.into_any_element(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn to_any(&self) -> &dyn Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
async fn build_buffer(
|
||||
mut text: String,
|
||||
blob: Arc<dyn File>,
|
||||
@@ -865,13 +875,28 @@ impl Item for CommitView {
|
||||
fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString {
|
||||
let short_sha = self.commit.sha.get(0..7).unwrap_or(&*self.commit.sha);
|
||||
let subject = truncate_and_trailoff(self.commit.message.split('\n').next().unwrap(), 20);
|
||||
format!("{short_sha} - {subject}").into()
|
||||
format!("{short_sha} — {subject}").into()
|
||||
}
|
||||
|
||||
fn tab_tooltip_text(&self, _: &App) -> Option<ui::SharedString> {
|
||||
fn tab_tooltip_content(&self, _: &App) -> Option<TabTooltipContent> {
|
||||
let short_sha = self.commit.sha.get(0..16).unwrap_or(&*self.commit.sha);
|
||||
let subject = self.commit.message.split('\n').next().unwrap();
|
||||
Some(format!("{short_sha} - {subject}").into())
|
||||
|
||||
Some(TabTooltipContent::Custom(Box::new(Tooltip::element({
|
||||
let subject = subject.to_string();
|
||||
let short_sha = short_sha.to_string();
|
||||
|
||||
move |_, _| {
|
||||
v_flex()
|
||||
.child(Label::new(subject.clone()))
|
||||
.child(
|
||||
Label::new(short_sha.clone())
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.into_any_element()
|
||||
}
|
||||
}))))
|
||||
}
|
||||
|
||||
fn to_item_events(event: &EditorEvent, f: impl FnMut(ItemEvent)) {
|
||||
@@ -988,12 +1013,11 @@ impl Item for CommitView {
|
||||
impl Render for CommitView {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let is_stash = self.stash.is_some();
|
||||
div()
|
||||
|
||||
v_flex()
|
||||
.key_context(if is_stash { "StashDiff" } else { "CommitDiff" })
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.flex()
|
||||
.flex_col()
|
||||
.size_full()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(self.render_header(window, cx))
|
||||
.child(div().flex_grow().child(self.editor.clone()))
|
||||
}
|
||||
@@ -1013,7 +1037,7 @@ impl EventEmitter<ToolbarItemEvent> for CommitViewToolbar {}
|
||||
|
||||
impl Render for CommitViewToolbar {
|
||||
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
|
||||
div()
|
||||
div().hidden()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3463,7 +3463,6 @@ impl GitPanel {
|
||||
) -> Option<impl IntoElement> {
|
||||
let active_repository = self.active_repository.clone()?;
|
||||
let panel_editor_style = panel_editor_style(true, window, cx);
|
||||
|
||||
let enable_coauthors = self.render_co_authors(cx);
|
||||
|
||||
let editor_focus_handle = self.commit_editor.focus_handle(cx);
|
||||
@@ -4772,7 +4771,6 @@ impl RenderOnce for PanelRepoFooter {
|
||||
const MAX_REPO_LEN: usize = 16;
|
||||
const LABEL_CHARACTER_BUDGET: usize = MAX_BRANCH_LEN + MAX_REPO_LEN;
|
||||
const MAX_SHORT_SHA_LEN: usize = 8;
|
||||
|
||||
let branch_name = self
|
||||
.branch
|
||||
.as_ref()
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use anyhow::Context as _;
|
||||
|
||||
use git::repository::{Remote, RemoteCommandOutput};
|
||||
use linkify::{LinkFinder, LinkKind};
|
||||
use ui::SharedString;
|
||||
|
||||
@@ -26,12 +26,13 @@ pub(crate) struct LinuxDispatcher {
|
||||
main_thread_id: thread::ThreadId,
|
||||
}
|
||||
|
||||
const MIN_THREADS: usize = 2;
|
||||
|
||||
impl LinuxDispatcher {
|
||||
pub fn new(main_sender: Sender<RunnableVariant>) -> Self {
|
||||
let (background_sender, background_receiver) = flume::unbounded::<RunnableVariant>();
|
||||
let thread_count = std::thread::available_parallelism()
|
||||
.map(|i| i.get())
|
||||
.unwrap_or(1);
|
||||
let thread_count =
|
||||
std::thread::available_parallelism().map_or(MIN_THREADS, |i| i.get().max(MIN_THREADS));
|
||||
|
||||
let mut background_threads = (0..thread_count)
|
||||
.map(|i| {
|
||||
|
||||
@@ -1419,7 +1419,7 @@ impl Dispatch<wl_keyboard::WlKeyboard, ()> for WaylandClientStatePtr {
|
||||
state.repeat.current_keycode = Some(keycode);
|
||||
|
||||
let rate = state.repeat.characters_per_second;
|
||||
let repeat_interval = Duration::from_secs(1) / rate;
|
||||
let repeat_interval = Duration::from_secs(1) / rate.max(1);
|
||||
let id = state.repeat.current_id;
|
||||
state
|
||||
.loop_handle
|
||||
|
||||
@@ -7,9 +7,7 @@ use std::{
|
||||
use flume::Sender;
|
||||
use util::ResultExt;
|
||||
use windows::{
|
||||
System::Threading::{
|
||||
ThreadPool, ThreadPoolTimer, TimerElapsedHandler, WorkItemHandler, WorkItemPriority,
|
||||
},
|
||||
System::Threading::{ThreadPool, ThreadPoolTimer, TimerElapsedHandler, WorkItemHandler},
|
||||
Win32::{
|
||||
Foundation::{LPARAM, WPARAM},
|
||||
UI::WindowsAndMessaging::PostMessageW,
|
||||
@@ -55,7 +53,7 @@ impl WindowsDispatcher {
|
||||
Ok(())
|
||||
})
|
||||
};
|
||||
ThreadPool::RunWithPriorityAsync(&handler, WorkItemPriority::High).log_err();
|
||||
ThreadPool::RunAsync(&handler).log_err();
|
||||
}
|
||||
|
||||
fn dispatch_on_threadpool_after(&self, runnable: RunnableVariant, duration: Duration) {
|
||||
|
||||
@@ -4022,6 +4022,20 @@ impl BufferSnapshot {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn outline_items_as_offsets_containing<T: ToOffset>(
|
||||
&self,
|
||||
range: Range<T>,
|
||||
include_extra_context: bool,
|
||||
theme: Option<&SyntaxTheme>,
|
||||
) -> Vec<OutlineItem<usize>> {
|
||||
self.outline_items_containing_internal(
|
||||
range,
|
||||
include_extra_context,
|
||||
theme,
|
||||
|buffer, range| range.to_offset(buffer),
|
||||
)
|
||||
}
|
||||
|
||||
fn outline_items_containing_internal<T: ToOffset, U>(
|
||||
&self,
|
||||
range: Range<T>,
|
||||
|
||||
@@ -784,28 +784,48 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
|
||||
.unindent();
|
||||
|
||||
let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let outline = buffer.update(cx, |buffer, _| buffer.snapshot().outline(None));
|
||||
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
|
||||
let outline = snapshot.outline(None);
|
||||
|
||||
assert_eq!(
|
||||
pretty_assertions::assert_eq!(
|
||||
outline
|
||||
.items
|
||||
.iter()
|
||||
.map(|item| (item.text.as_str(), item.depth))
|
||||
.map(|item| (
|
||||
item.text.as_str(),
|
||||
item.depth,
|
||||
item.to_point(&snapshot).body_range(&snapshot)
|
||||
.map(|range| minimize_space(&snapshot.text_for_range(range).collect::<String>()))
|
||||
))
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
("struct Person", 0),
|
||||
("name", 1),
|
||||
("age", 1),
|
||||
("mod module", 0),
|
||||
("enum LoginState", 1),
|
||||
("LoggedOut", 2),
|
||||
("LoggingOn", 2),
|
||||
("LoggedIn", 2),
|
||||
("person", 3),
|
||||
("time", 3),
|
||||
("impl Eq for Person", 0),
|
||||
("impl Drop for Person", 0),
|
||||
("fn drop", 1),
|
||||
("struct Person", 0, Some("name: String, age: usize,".to_string())),
|
||||
("name", 1, None),
|
||||
("age", 1, None),
|
||||
(
|
||||
"mod module",
|
||||
0,
|
||||
Some(
|
||||
"enum LoginState { LoggedOut, LoggingOn, LoggedIn { person: Person, time: Instant, } }".to_string()
|
||||
)
|
||||
),
|
||||
(
|
||||
"enum LoginState",
|
||||
1,
|
||||
Some("LoggedOut, LoggingOn, LoggedIn { person: Person, time: Instant, }".to_string())
|
||||
),
|
||||
("LoggedOut", 2, None),
|
||||
("LoggingOn", 2, None),
|
||||
("LoggedIn", 2, Some("person: Person, time: Instant,".to_string())),
|
||||
("person", 3, None),
|
||||
("time", 3, None),
|
||||
("impl Eq for Person", 0, None),
|
||||
(
|
||||
"impl Drop for Person",
|
||||
0,
|
||||
Some("fn drop(&mut self) { println!(\"bye\"); }".to_string())
|
||||
),
|
||||
("fn drop", 1, Some("println!(\"bye\");".to_string())),
|
||||
]
|
||||
);
|
||||
|
||||
@@ -840,6 +860,11 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
|
||||
]
|
||||
);
|
||||
|
||||
fn minimize_space(text: &str) -> String {
|
||||
static WHITESPACE: LazyLock<Regex> = LazyLock::new(|| Regex::new("[\\n\\s]+").unwrap());
|
||||
WHITESPACE.replace_all(text, " ").trim().to_string()
|
||||
}
|
||||
|
||||
async fn search<'a>(
|
||||
outline: &'a Outline<Anchor>,
|
||||
query: &'a str,
|
||||
|
||||
@@ -437,26 +437,14 @@ impl LanguageRegistry {
|
||||
language_name: impl Into<LanguageName>,
|
||||
mut adapter: crate::FakeLspAdapter,
|
||||
) -> futures::channel::mpsc::UnboundedReceiver<lsp::FakeLanguageServer> {
|
||||
let language_name = language_name.into();
|
||||
let adapter_name = LanguageServerName(adapter.name.into());
|
||||
let capabilities = adapter.capabilities.clone();
|
||||
let initializer = adapter.initializer.take();
|
||||
let adapter = CachedLspAdapter::new(Arc::new(adapter));
|
||||
{
|
||||
let mut state = self.state.write();
|
||||
state
|
||||
.lsp_adapters
|
||||
.entry(language_name)
|
||||
.or_default()
|
||||
.push(adapter.clone());
|
||||
state.all_lsp_adapters.insert(adapter.name(), adapter);
|
||||
}
|
||||
|
||||
self.register_fake_language_server(adapter_name, capabilities, initializer)
|
||||
self.register_fake_lsp_adapter(language_name, adapter);
|
||||
self.register_fake_lsp_server(adapter_name, capabilities, initializer)
|
||||
}
|
||||
|
||||
/// Register a fake lsp adapter (without the language server)
|
||||
/// The returned channel receives a new instance of the language server every time it is started
|
||||
#[cfg(any(feature = "test-support", test))]
|
||||
pub fn register_fake_lsp_adapter(
|
||||
&self,
|
||||
@@ -479,7 +467,7 @@ impl LanguageRegistry {
|
||||
/// Register a fake language server (without the adapter)
|
||||
/// The returned channel receives a new instance of the language server every time it is started
|
||||
#[cfg(any(feature = "test-support", test))]
|
||||
pub fn register_fake_language_server(
|
||||
pub fn register_fake_lsp_server(
|
||||
&self,
|
||||
lsp_name: LanguageServerName,
|
||||
capabilities: lsp::ServerCapabilities,
|
||||
|
||||
@@ -373,6 +373,8 @@ impl InlayHintSettings {
|
||||
pub struct EditPredictionSettings {
|
||||
/// The provider that supplies edit predictions.
|
||||
pub provider: settings::EditPredictionProvider,
|
||||
/// Whether to use the experimental edit prediction context retrieval system.
|
||||
pub use_context: bool,
|
||||
/// A list of globs representing files that edit predictions should be disabled for.
|
||||
/// This list adds to a pre-existing, sensible default set of globs.
|
||||
/// Any additional ones you add are combined with them.
|
||||
@@ -622,6 +624,11 @@ impl settings::Settings for AllLanguageSettings {
|
||||
.features
|
||||
.as_ref()
|
||||
.and_then(|f| f.edit_prediction_provider);
|
||||
let use_edit_prediction_context = all_languages
|
||||
.features
|
||||
.as_ref()
|
||||
.and_then(|f| f.experimental_edit_prediction_context_retrieval)
|
||||
.unwrap_or_default();
|
||||
|
||||
let edit_predictions = all_languages.edit_predictions.clone().unwrap();
|
||||
let edit_predictions_mode = edit_predictions.mode.unwrap();
|
||||
@@ -668,6 +675,7 @@ impl settings::Settings for AllLanguageSettings {
|
||||
} else {
|
||||
EditPredictionProvider::None
|
||||
},
|
||||
use_context: use_edit_prediction_context,
|
||||
disabled_globs: disabled_globs
|
||||
.iter()
|
||||
.filter_map(|g| {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::{BufferSnapshot, Point, ToPoint};
|
||||
use crate::{BufferSnapshot, Point, ToPoint, ToTreeSitterPoint};
|
||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::{BackgroundExecutor, HighlightStyle};
|
||||
use std::ops::Range;
|
||||
@@ -48,6 +48,54 @@ impl<T: ToPoint> OutlineItem<T> {
|
||||
.map(|r| r.start.to_point(buffer)..r.end.to_point(buffer)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn body_range(&self, buffer: &BufferSnapshot) -> Option<Range<Point>> {
|
||||
if let Some(range) = self.body_range.as_ref() {
|
||||
return Some(range.start.to_point(buffer)..range.end.to_point(buffer));
|
||||
}
|
||||
|
||||
let range = self.range.start.to_point(buffer)..self.range.end.to_point(buffer);
|
||||
let start_indent = buffer.indent_size_for_line(range.start.row);
|
||||
let node = buffer.syntax_ancestor(range.clone())?;
|
||||
|
||||
let mut cursor = node.walk();
|
||||
loop {
|
||||
let node = cursor.node();
|
||||
if node.start_position() >= range.start.to_ts_point()
|
||||
&& node.end_position() <= range.end.to_ts_point()
|
||||
{
|
||||
break;
|
||||
}
|
||||
cursor.goto_first_child_for_point(range.start.to_ts_point());
|
||||
}
|
||||
|
||||
if !cursor.goto_last_child() {
|
||||
return None;
|
||||
}
|
||||
let body_node = loop {
|
||||
let node = cursor.node();
|
||||
if node.child_count() > 0 {
|
||||
break node;
|
||||
}
|
||||
if !cursor.goto_previous_sibling() {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
let mut start_row = body_node.start_position().row as u32;
|
||||
let mut end_row = body_node.end_position().row as u32;
|
||||
|
||||
while start_row < end_row && buffer.indent_size_for_line(start_row) == start_indent {
|
||||
start_row += 1;
|
||||
}
|
||||
while start_row < end_row && buffer.indent_size_for_line(end_row - 1) == start_indent {
|
||||
end_row -= 1;
|
||||
}
|
||||
if start_row < end_row {
|
||||
return Some(Point::new(start_row, 0)..Point::new(end_row, 0));
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Outline<T> {
|
||||
|
||||
@@ -1215,6 +1215,19 @@ impl<'a> SyntaxMapMatches<'a> {
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
// pub fn set_byte_range(&mut self, range: Range<usize>) {
|
||||
// for layer in &mut self.layers {
|
||||
// layer.matches.set_byte_range(range.clone());
|
||||
// layer.advance();
|
||||
// }
|
||||
// self.layers.sort_unstable_by_key(|layer| layer.sort_key());
|
||||
// self.active_layer_count = self
|
||||
// .layers
|
||||
// .iter()
|
||||
// .position(|layer| !layer.has_next)
|
||||
// .unwrap_or(self.layers.len());
|
||||
// }
|
||||
}
|
||||
|
||||
impl SyntaxMapCapturesLayer<'_> {
|
||||
|
||||
@@ -71,6 +71,7 @@ pub struct AmazonBedrockSettings {
|
||||
pub profile_name: Option<String>,
|
||||
pub role_arn: Option<String>,
|
||||
pub authentication_method: Option<BedrockAuthMethod>,
|
||||
pub allow_global: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, EnumIter, IntoStaticStr, JsonSchema)]
|
||||
@@ -239,6 +240,13 @@ impl State {
|
||||
.or(settings_region)
|
||||
.unwrap_or(String::from("us-east-1"))
|
||||
}
|
||||
|
||||
fn get_allow_global(&self) -> bool {
|
||||
self.settings
|
||||
.as_ref()
|
||||
.and_then(|s| s.allow_global)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BedrockLanguageModelProvider {
|
||||
@@ -545,11 +553,13 @@ impl LanguageModel for BedrockModel {
|
||||
LanguageModelCompletionError,
|
||||
>,
|
||||
> {
|
||||
let Ok(region) = cx.read_entity(&self.state, |state, _cx| state.get_region()) else {
|
||||
let Ok((region, allow_global)) = cx.read_entity(&self.state, |state, _cx| {
|
||||
(state.get_region(), state.get_allow_global())
|
||||
}) else {
|
||||
return async move { Err(anyhow::anyhow!("App State Dropped").into()) }.boxed();
|
||||
};
|
||||
|
||||
let model_id = match self.model.cross_region_inference_id(®ion) {
|
||||
let model_id = match self.model.cross_region_inference_id(®ion, allow_global) {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
return async move { Err(e.into()) }.boxed();
|
||||
|
||||
@@ -58,6 +58,7 @@ impl settings::Settings for AllLanguageModelSettings {
|
||||
profile_name: bedrock.profile,
|
||||
role_arn: None, // todo(was never a setting for this...)
|
||||
authentication_method: bedrock.authentication_method.map(Into::into),
|
||||
allow_global: bedrock.allow_global,
|
||||
},
|
||||
deepseek: DeepSeekSettings {
|
||||
api_url: deepseek.api_url.unwrap(),
|
||||
|
||||
@@ -24,4 +24,5 @@ rewrap_prefixes = [
|
||||
auto_indent_on_paste = false
|
||||
auto_indent_using_last_non_empty_line = false
|
||||
tab_size = 2
|
||||
decrease_indent_pattern = "^\\s*$"
|
||||
prettier_parser_name = "markdown"
|
||||
|
||||
3
crates/languages/src/markdown/indents.scm
Normal file
3
crates/languages/src/markdown/indents.scm
Normal file
@@ -0,0 +1,3 @@
|
||||
(list (list_item) @indent)
|
||||
|
||||
(list_item (list) @indent)
|
||||
@@ -23,7 +23,7 @@ use serde::{Deserialize, Serialize};
|
||||
use serde_json::{Value, json};
|
||||
use settings::Settings;
|
||||
use smol::lock::OnceCell;
|
||||
use std::cmp::Ordering;
|
||||
use std::cmp::{Ordering, Reverse};
|
||||
use std::env::consts;
|
||||
use terminal::terminal_settings::TerminalSettings;
|
||||
use util::command::new_smol_command;
|
||||
@@ -1101,13 +1101,33 @@ fn get_venv_parent_dir(env: &PythonEnvironment) -> Option<PathBuf> {
|
||||
venv.parent().map(|parent| parent.to_path_buf())
|
||||
}
|
||||
|
||||
fn wr_distance(wr: &PathBuf, venv: Option<&PathBuf>) -> usize {
|
||||
// How far is this venv from the root of our current project?
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
enum SubprojectDistance {
|
||||
WithinSubproject(Reverse<usize>),
|
||||
WithinWorktree(Reverse<usize>),
|
||||
NotInWorktree,
|
||||
}
|
||||
|
||||
fn wr_distance(
|
||||
wr: &PathBuf,
|
||||
subroot_relative_path: &RelPath,
|
||||
venv: Option<&PathBuf>,
|
||||
) -> SubprojectDistance {
|
||||
if let Some(venv) = venv
|
||||
&& let Ok(p) = venv.strip_prefix(wr)
|
||||
{
|
||||
p.components().count()
|
||||
if subroot_relative_path.components().next().is_some()
|
||||
&& let Ok(distance) = p
|
||||
.strip_prefix(subroot_relative_path.as_std_path())
|
||||
.map(|p| p.components().count())
|
||||
{
|
||||
SubprojectDistance::WithinSubproject(Reverse(distance))
|
||||
} else {
|
||||
SubprojectDistance::WithinWorktree(Reverse(p.components().count()))
|
||||
}
|
||||
} else {
|
||||
usize::MAX
|
||||
SubprojectDistance::NotInWorktree
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1170,11 +1190,14 @@ impl ToolchainLister for PythonToolchainProvider {
|
||||
});
|
||||
|
||||
// Compare project paths against worktree root
|
||||
let proj_ordering = || {
|
||||
let lhs_project = lhs.project.clone().or_else(|| get_venv_parent_dir(lhs));
|
||||
let rhs_project = rhs.project.clone().or_else(|| get_venv_parent_dir(rhs));
|
||||
wr_distance(&wr, lhs_project.as_ref()).cmp(&wr_distance(&wr, rhs_project.as_ref()))
|
||||
};
|
||||
let proj_ordering =
|
||||
|| {
|
||||
let lhs_project = lhs.project.clone().or_else(|| get_venv_parent_dir(lhs));
|
||||
let rhs_project = rhs.project.clone().or_else(|| get_venv_parent_dir(rhs));
|
||||
wr_distance(&wr, &subroot_relative_path, lhs_project.as_ref()).cmp(
|
||||
&wr_distance(&wr, &subroot_relative_path, rhs_project.as_ref()),
|
||||
)
|
||||
};
|
||||
|
||||
// Compare environment priorities
|
||||
let priority_ordering = || env_priority(lhs.kind).cmp(&env_priority(rhs.kind));
|
||||
|
||||
@@ -43,7 +43,7 @@ use std::{
|
||||
io,
|
||||
iter::{self, FromIterator},
|
||||
mem,
|
||||
ops::{self, AddAssign, Range, RangeBounds, Sub, SubAssign},
|
||||
ops::{self, AddAssign, ControlFlow, Range, RangeBounds, Sub, SubAssign},
|
||||
rc::Rc,
|
||||
str,
|
||||
sync::Arc,
|
||||
@@ -4618,7 +4618,24 @@ impl MultiBufferSnapshot {
|
||||
cx: &App,
|
||||
) -> BTreeMap<MultiBufferRow, IndentSize> {
|
||||
let mut result = BTreeMap::new();
|
||||
self.suggested_indents_callback(
|
||||
rows,
|
||||
|row, indent| {
|
||||
result.insert(row, indent);
|
||||
ControlFlow::Continue(())
|
||||
},
|
||||
cx,
|
||||
);
|
||||
result
|
||||
}
|
||||
|
||||
// move this to be a generator once those are a thing
|
||||
pub fn suggested_indents_callback(
|
||||
&self,
|
||||
rows: impl IntoIterator<Item = u32>,
|
||||
mut cb: impl FnMut(MultiBufferRow, IndentSize) -> ControlFlow<()>,
|
||||
cx: &App,
|
||||
) {
|
||||
let mut rows_for_excerpt = Vec::new();
|
||||
let mut cursor = self.cursor::<Point, Point>();
|
||||
let mut rows = rows.into_iter().peekable();
|
||||
@@ -4662,16 +4679,17 @@ impl MultiBufferSnapshot {
|
||||
let buffer_indents = region
|
||||
.buffer
|
||||
.suggested_indents(buffer_rows, single_indent_size);
|
||||
let multibuffer_indents = buffer_indents.into_iter().map(|(row, indent)| {
|
||||
(
|
||||
for (row, indent) in buffer_indents {
|
||||
if cb(
|
||||
MultiBufferRow(start_multibuffer_row + row - start_buffer_row),
|
||||
indent,
|
||||
)
|
||||
});
|
||||
result.extend(multibuffer_indents);
|
||||
.is_break()
|
||||
{
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn indent_size_for_line(&self, row: MultiBufferRow) -> IndentSize {
|
||||
|
||||
@@ -2,7 +2,8 @@ use anyhow::Context as _;
|
||||
use collections::{HashMap, HashSet};
|
||||
use fs::Fs;
|
||||
use gpui::{AsyncApp, Entity};
|
||||
use language::{Buffer, Diff, language_settings::language_settings};
|
||||
use language::language_settings::PrettierSettings;
|
||||
use language::{Buffer, Diff, Language, language_settings::language_settings};
|
||||
use lsp::{LanguageServer, LanguageServerId};
|
||||
use node_runtime::NodeRuntime;
|
||||
use paths::default_prettier_dir;
|
||||
@@ -349,7 +350,7 @@ impl Prettier {
|
||||
Self::Real(local) => {
|
||||
let params = buffer
|
||||
.update(cx, |buffer, cx| {
|
||||
let buffer_language = buffer.language();
|
||||
let buffer_language = buffer.language().map(|language| language.as_ref());
|
||||
let language_settings = language_settings(buffer_language.map(|l| l.name()), buffer.file(), cx);
|
||||
let prettier_settings = &language_settings.prettier;
|
||||
anyhow::ensure!(
|
||||
@@ -449,15 +450,7 @@ impl Prettier {
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut prettier_parser = prettier_settings.parser.as_deref();
|
||||
if buffer_path.is_none() {
|
||||
prettier_parser = prettier_parser.or_else(|| buffer_language.and_then(|language| language.prettier_parser_name()));
|
||||
if prettier_parser.is_none() {
|
||||
log::error!("Formatting unsaved file with prettier failed. No prettier parser configured for language {buffer_language:?}");
|
||||
anyhow::bail!("Cannot determine prettier parser for unsaved file");
|
||||
}
|
||||
|
||||
}
|
||||
let parser = prettier_parser_name(buffer_path.as_deref(), buffer_language, prettier_settings).context("getting prettier parser")?;
|
||||
|
||||
let ignore_path = ignore_dir.and_then(|dir| {
|
||||
let ignore_file = dir.join(".prettierignore");
|
||||
@@ -475,15 +468,15 @@ impl Prettier {
|
||||
anyhow::Ok(FormatParams {
|
||||
text: buffer.text(),
|
||||
options: FormatOptions {
|
||||
parser: prettier_parser.map(ToOwned::to_owned),
|
||||
plugins,
|
||||
path: buffer_path,
|
||||
parser,
|
||||
plugins,
|
||||
prettier_options,
|
||||
ignore_path,
|
||||
},
|
||||
})
|
||||
})?
|
||||
.context("building prettier request")?;
|
||||
})?
|
||||
.context("building prettier request")?;
|
||||
|
||||
let response = local
|
||||
.server
|
||||
@@ -503,7 +496,26 @@ impl Prettier {
|
||||
{
|
||||
Some("rust") => anyhow::bail!("prettier does not support Rust"),
|
||||
Some(_other) => {
|
||||
let formatted_text = buffer.text() + FORMAT_SUFFIX;
|
||||
let mut formatted_text = buffer.text() + FORMAT_SUFFIX;
|
||||
|
||||
let buffer_language =
|
||||
buffer.language().map(|language| language.as_ref());
|
||||
let language_settings = language_settings(
|
||||
buffer_language.map(|l| l.name()),
|
||||
buffer.file(),
|
||||
cx,
|
||||
);
|
||||
let prettier_settings = &language_settings.prettier;
|
||||
let parser = prettier_parser_name(
|
||||
buffer_path.as_deref(),
|
||||
buffer_language,
|
||||
prettier_settings,
|
||||
)?;
|
||||
|
||||
if let Some(parser) = parser {
|
||||
formatted_text = format!("{formatted_text}\n{parser}");
|
||||
}
|
||||
|
||||
Ok(buffer.diff(formatted_text, cx))
|
||||
}
|
||||
None => panic!("Should not format buffer without a language with prettier"),
|
||||
@@ -551,6 +563,40 @@ impl Prettier {
|
||||
}
|
||||
}
|
||||
|
||||
fn prettier_parser_name(
|
||||
buffer_path: Option<&Path>,
|
||||
buffer_language: Option<&Language>,
|
||||
prettier_settings: &PrettierSettings,
|
||||
) -> anyhow::Result<Option<String>> {
|
||||
let parser = if buffer_path.is_none() {
|
||||
let parser = prettier_settings
|
||||
.parser
|
||||
.as_deref()
|
||||
.or_else(|| buffer_language.and_then(|language| language.prettier_parser_name()));
|
||||
if parser.is_none() {
|
||||
log::error!(
|
||||
"Formatting unsaved file with prettier failed. No prettier parser configured for language {buffer_language:?}"
|
||||
);
|
||||
anyhow::bail!("Cannot determine prettier parser for unsaved file");
|
||||
}
|
||||
parser
|
||||
} else if let (Some(buffer_language), Some(buffer_path)) = (buffer_language, buffer_path)
|
||||
&& buffer_path.extension().is_some_and(|extension| {
|
||||
!buffer_language
|
||||
.config()
|
||||
.matcher
|
||||
.path_suffixes
|
||||
.contains(&extension.to_string_lossy().into_owned())
|
||||
})
|
||||
{
|
||||
buffer_language.prettier_parser_name()
|
||||
} else {
|
||||
prettier_settings.parser.as_deref()
|
||||
};
|
||||
|
||||
Ok(parser.map(ToOwned::to_owned))
|
||||
}
|
||||
|
||||
async fn has_prettier_in_node_modules(fs: &dyn Fs, path: &Path) -> anyhow::Result<bool> {
|
||||
let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME);
|
||||
if let Some(node_modules_location_metadata) = fs
|
||||
|
||||
@@ -238,6 +238,123 @@ mod ext_agent_tests {
|
||||
.collect();
|
||||
assert_eq!(remaining, vec!["custom".to_string()]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_extension_icon_path_allows_valid_paths() {
|
||||
// Create a temporary directory structure for testing
|
||||
let temp_dir = tempfile::tempdir().unwrap();
|
||||
let extensions_dir = temp_dir.path();
|
||||
let ext_dir = extensions_dir.join("my-extension");
|
||||
std::fs::create_dir_all(&ext_dir).unwrap();
|
||||
|
||||
// Create a valid icon file
|
||||
let icon_path = ext_dir.join("icon.svg");
|
||||
std::fs::write(&icon_path, "<svg></svg>").unwrap();
|
||||
|
||||
// Test that a valid relative path works
|
||||
let result = super::resolve_extension_icon_path(extensions_dir, "my-extension", "icon.svg");
|
||||
assert!(result.is_some());
|
||||
assert!(result.unwrap().ends_with("icon.svg"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_extension_icon_path_allows_nested_paths() {
|
||||
let temp_dir = tempfile::tempdir().unwrap();
|
||||
let extensions_dir = temp_dir.path();
|
||||
let ext_dir = extensions_dir.join("my-extension");
|
||||
let icons_dir = ext_dir.join("assets").join("icons");
|
||||
std::fs::create_dir_all(&icons_dir).unwrap();
|
||||
|
||||
let icon_path = icons_dir.join("logo.svg");
|
||||
std::fs::write(&icon_path, "<svg></svg>").unwrap();
|
||||
|
||||
let result = super::resolve_extension_icon_path(
|
||||
extensions_dir,
|
||||
"my-extension",
|
||||
"assets/icons/logo.svg",
|
||||
);
|
||||
assert!(result.is_some());
|
||||
assert!(result.unwrap().ends_with("logo.svg"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_extension_icon_path_blocks_path_traversal() {
|
||||
let temp_dir = tempfile::tempdir().unwrap();
|
||||
let extensions_dir = temp_dir.path();
|
||||
|
||||
// Create two extension directories
|
||||
let ext1_dir = extensions_dir.join("extension1");
|
||||
let ext2_dir = extensions_dir.join("extension2");
|
||||
std::fs::create_dir_all(&ext1_dir).unwrap();
|
||||
std::fs::create_dir_all(&ext2_dir).unwrap();
|
||||
|
||||
// Create a file in extension2
|
||||
let secret_file = ext2_dir.join("secret.svg");
|
||||
std::fs::write(&secret_file, "<svg>secret</svg>").unwrap();
|
||||
|
||||
// Try to access extension2's file from extension1 using path traversal
|
||||
let result = super::resolve_extension_icon_path(
|
||||
extensions_dir,
|
||||
"extension1",
|
||||
"../extension2/secret.svg",
|
||||
);
|
||||
assert!(
|
||||
result.is_none(),
|
||||
"Path traversal to sibling extension should be blocked"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_extension_icon_path_blocks_absolute_escape() {
|
||||
let temp_dir = tempfile::tempdir().unwrap();
|
||||
let extensions_dir = temp_dir.path();
|
||||
let ext_dir = extensions_dir.join("my-extension");
|
||||
std::fs::create_dir_all(&ext_dir).unwrap();
|
||||
|
||||
// Create a file outside the extensions directory
|
||||
let outside_file = temp_dir.path().join("outside.svg");
|
||||
std::fs::write(&outside_file, "<svg>outside</svg>").unwrap();
|
||||
|
||||
// Try to escape to parent directory
|
||||
let result =
|
||||
super::resolve_extension_icon_path(extensions_dir, "my-extension", "../outside.svg");
|
||||
assert!(
|
||||
result.is_none(),
|
||||
"Path traversal to parent directory should be blocked"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_extension_icon_path_blocks_deep_traversal() {
|
||||
let temp_dir = tempfile::tempdir().unwrap();
|
||||
let extensions_dir = temp_dir.path();
|
||||
let ext_dir = extensions_dir.join("my-extension");
|
||||
std::fs::create_dir_all(&ext_dir).unwrap();
|
||||
|
||||
// Try deep path traversal
|
||||
let result = super::resolve_extension_icon_path(
|
||||
extensions_dir,
|
||||
"my-extension",
|
||||
"../../../../../../etc/passwd",
|
||||
);
|
||||
assert!(
|
||||
result.is_none(),
|
||||
"Deep path traversal should be blocked (file doesn't exist)"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_extension_icon_path_returns_none_for_nonexistent() {
|
||||
let temp_dir = tempfile::tempdir().unwrap();
|
||||
let extensions_dir = temp_dir.path();
|
||||
let ext_dir = extensions_dir.join("my-extension");
|
||||
std::fs::create_dir_all(&ext_dir).unwrap();
|
||||
|
||||
// Try to access a file that doesn't exist
|
||||
let result =
|
||||
super::resolve_extension_icon_path(extensions_dir, "my-extension", "nonexistent.svg");
|
||||
assert!(result.is_none(), "Nonexistent file should return None");
|
||||
}
|
||||
}
|
||||
|
||||
impl AgentServerStore {
|
||||
@@ -274,20 +391,18 @@ impl AgentServerStore {
|
||||
extension_agents.clear();
|
||||
for (ext_id, manifest) in manifests {
|
||||
for (agent_name, agent_entry) in &manifest.agent_servers {
|
||||
// Store absolute icon path if provided, resolving symlinks for dev extensions
|
||||
let icon_path = if let Some(icon) = &agent_entry.icon {
|
||||
let icon_path = extensions_dir.join(ext_id).join(icon);
|
||||
// Canonicalize to resolve symlinks (dev extensions are symlinked)
|
||||
let absolute_icon_path = icon_path
|
||||
.canonicalize()
|
||||
.unwrap_or(icon_path)
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
self.agent_icons.insert(
|
||||
ExternalAgentServerName(agent_name.clone().into()),
|
||||
SharedString::from(absolute_icon_path.clone()),
|
||||
);
|
||||
Some(absolute_icon_path)
|
||||
if let Some(absolute_icon_path) =
|
||||
resolve_extension_icon_path(&extensions_dir, ext_id, icon)
|
||||
{
|
||||
self.agent_icons.insert(
|
||||
ExternalAgentServerName(agent_name.clone().into()),
|
||||
SharedString::from(absolute_icon_path.clone()),
|
||||
);
|
||||
Some(absolute_icon_path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@@ -310,23 +425,18 @@ impl AgentServerStore {
|
||||
let mut agents = vec![];
|
||||
for (ext_id, manifest) in manifests {
|
||||
for (agent_name, agent_entry) in &manifest.agent_servers {
|
||||
// Store absolute icon path if provided, resolving symlinks for dev extensions
|
||||
let icon = if let Some(icon) = &agent_entry.icon {
|
||||
let icon_path = extensions_dir.join(ext_id).join(icon);
|
||||
// Canonicalize to resolve symlinks (dev extensions are symlinked)
|
||||
let absolute_icon_path = icon_path
|
||||
.canonicalize()
|
||||
.unwrap_or(icon_path)
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
// Store icon locally for remote client
|
||||
self.agent_icons.insert(
|
||||
ExternalAgentServerName(agent_name.clone().into()),
|
||||
SharedString::from(absolute_icon_path.clone()),
|
||||
);
|
||||
|
||||
Some(absolute_icon_path)
|
||||
if let Some(absolute_icon_path) =
|
||||
resolve_extension_icon_path(&extensions_dir, ext_id, icon)
|
||||
{
|
||||
self.agent_icons.insert(
|
||||
ExternalAgentServerName(agent_name.clone().into()),
|
||||
SharedString::from(absolute_icon_path.clone()),
|
||||
);
|
||||
Some(absolute_icon_path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@@ -368,7 +478,49 @@ impl AgentServerStore {
|
||||
pub fn agent_icon(&self, name: &ExternalAgentServerName) -> Option<SharedString> {
|
||||
self.agent_icons.get(name).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
/// Safely resolves an extension icon path, ensuring it stays within the extension directory.
|
||||
/// Returns `None` if the path would escape the extension directory (path traversal attack).
|
||||
fn resolve_extension_icon_path(
|
||||
extensions_dir: &Path,
|
||||
extension_id: &str,
|
||||
icon_relative_path: &str,
|
||||
) -> Option<String> {
|
||||
let extension_root = extensions_dir.join(extension_id);
|
||||
let icon_path = extension_root.join(icon_relative_path);
|
||||
|
||||
// Canonicalize both paths to resolve symlinks and normalize the paths.
|
||||
// For the extension root, we need to handle the case where it might be a symlink
|
||||
// (common for dev extensions).
|
||||
let canonical_extension_root = extension_root.canonicalize().unwrap_or(extension_root);
|
||||
let canonical_icon_path = match icon_path.canonicalize() {
|
||||
Ok(path) => path,
|
||||
Err(err) => {
|
||||
log::warn!(
|
||||
"Failed to canonicalize icon path for extension '{}': {} (path: {})",
|
||||
extension_id,
|
||||
err,
|
||||
icon_relative_path
|
||||
);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
// Verify the resolved icon path is within the extension directory
|
||||
if canonical_icon_path.starts_with(&canonical_extension_root) {
|
||||
Some(canonical_icon_path.to_string_lossy().to_string())
|
||||
} else {
|
||||
log::warn!(
|
||||
"Icon path '{}' for extension '{}' escapes extension directory, ignoring for security",
|
||||
icon_relative_path,
|
||||
extension_id
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl AgentServerStore {
|
||||
pub fn init_remote(session: &AnyProtoClient) {
|
||||
session.add_entity_message_handler(Self::handle_external_agents_updated);
|
||||
session.add_entity_message_handler(Self::handle_loading_status_updated);
|
||||
@@ -453,7 +605,9 @@ impl AgentServerStore {
|
||||
.clone()
|
||||
.and_then(|settings| settings.custom_command()),
|
||||
http_client: http_client.clone(),
|
||||
is_remote: downstream_client.is_some(),
|
||||
no_browser: downstream_client
|
||||
.as_ref()
|
||||
.is_some_and(|(_, client)| !client.has_wsl_interop()),
|
||||
}),
|
||||
);
|
||||
self.external_agents.insert(
|
||||
@@ -1355,7 +1509,7 @@ struct LocalCodex {
|
||||
project_environment: Entity<ProjectEnvironment>,
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
custom_command: Option<AgentServerCommand>,
|
||||
is_remote: bool,
|
||||
no_browser: bool,
|
||||
}
|
||||
|
||||
impl ExternalAgentServer for LocalCodex {
|
||||
@@ -1375,7 +1529,7 @@ impl ExternalAgentServer for LocalCodex {
|
||||
.map(|root_dir| Path::new(root_dir))
|
||||
.unwrap_or(paths::home_dir())
|
||||
.into();
|
||||
let is_remote = self.is_remote;
|
||||
let no_browser = self.no_browser;
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let mut env = project_environment
|
||||
@@ -1388,7 +1542,7 @@ impl ExternalAgentServer for LocalCodex {
|
||||
})?
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
if is_remote {
|
||||
if no_browser {
|
||||
env.insert("NO_BROWSER".to_owned(), "1".to_owned());
|
||||
}
|
||||
|
||||
|
||||
@@ -472,6 +472,8 @@ impl GitStore {
|
||||
client.add_entity_request_handler(Self::handle_change_branch);
|
||||
client.add_entity_request_handler(Self::handle_create_branch);
|
||||
client.add_entity_request_handler(Self::handle_rename_branch);
|
||||
client.add_entity_request_handler(Self::handle_create_remote);
|
||||
client.add_entity_request_handler(Self::handle_remove_remote);
|
||||
client.add_entity_request_handler(Self::handle_delete_branch);
|
||||
client.add_entity_request_handler(Self::handle_git_init);
|
||||
client.add_entity_request_handler(Self::handle_push);
|
||||
@@ -2274,6 +2276,25 @@ impl GitStore {
|
||||
Ok(proto::Ack {})
|
||||
}
|
||||
|
||||
async fn handle_create_remote(
|
||||
this: Entity<Self>,
|
||||
envelope: TypedEnvelope<proto::GitCreateRemote>,
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<proto::Ack> {
|
||||
let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
|
||||
let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
|
||||
let remote_name = envelope.payload.remote_name;
|
||||
let remote_url = envelope.payload.remote_url;
|
||||
|
||||
repository_handle
|
||||
.update(&mut cx, |repository_handle, _| {
|
||||
repository_handle.create_remote(remote_name, remote_url)
|
||||
})?
|
||||
.await??;
|
||||
|
||||
Ok(proto::Ack {})
|
||||
}
|
||||
|
||||
async fn handle_delete_branch(
|
||||
this: Entity<Self>,
|
||||
envelope: TypedEnvelope<proto::GitDeleteBranch>,
|
||||
@@ -2292,6 +2313,24 @@ impl GitStore {
|
||||
Ok(proto::Ack {})
|
||||
}
|
||||
|
||||
async fn handle_remove_remote(
|
||||
this: Entity<Self>,
|
||||
envelope: TypedEnvelope<proto::GitRemoveRemote>,
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<proto::Ack> {
|
||||
let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
|
||||
let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
|
||||
let remote_name = envelope.payload.remote_name;
|
||||
|
||||
repository_handle
|
||||
.update(&mut cx, |repository_handle, _| {
|
||||
repository_handle.remove_remote(remote_name)
|
||||
})?
|
||||
.await??;
|
||||
|
||||
Ok(proto::Ack {})
|
||||
}
|
||||
|
||||
async fn handle_show(
|
||||
this: Entity<Self>,
|
||||
envelope: TypedEnvelope<proto::GitShow>,
|
||||
@@ -3257,6 +3296,8 @@ impl RepositorySnapshot {
|
||||
.iter()
|
||||
.map(stash_to_proto)
|
||||
.collect(),
|
||||
remote_upstream_url: self.remote_upstream_url.clone(),
|
||||
remote_origin_url: self.remote_origin_url.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3326,6 +3367,8 @@ impl RepositorySnapshot {
|
||||
.iter()
|
||||
.map(stash_to_proto)
|
||||
.collect(),
|
||||
remote_upstream_url: self.remote_upstream_url.clone(),
|
||||
remote_origin_url: self.remote_origin_url.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4865,6 +4908,61 @@ impl Repository {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn create_remote(
|
||||
&mut self,
|
||||
remote_name: String,
|
||||
remote_url: String,
|
||||
) -> oneshot::Receiver<Result<()>> {
|
||||
let id = self.id;
|
||||
self.send_job(
|
||||
Some(format!("git remote add {remote_name} {remote_url}").into()),
|
||||
move |repo, _cx| async move {
|
||||
match repo {
|
||||
RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
|
||||
backend.create_remote(remote_name, remote_url).await
|
||||
}
|
||||
RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
|
||||
client
|
||||
.request(proto::GitCreateRemote {
|
||||
project_id: project_id.0,
|
||||
repository_id: id.to_proto(),
|
||||
remote_name,
|
||||
remote_url,
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
|
||||
let id = self.id;
|
||||
self.send_job(
|
||||
Some(format!("git remove remote {remote_name}").into()),
|
||||
move |repo, _cx| async move {
|
||||
match repo {
|
||||
RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
|
||||
backend.remove_remote(remote_name).await
|
||||
}
|
||||
RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
|
||||
client
|
||||
.request(proto::GitRemoveRemote {
|
||||
project_id: project_id.0,
|
||||
repository_id: id.to_proto(),
|
||||
remote_name,
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_remotes(
|
||||
&mut self,
|
||||
branch_name: Option<String>,
|
||||
@@ -4902,7 +5000,7 @@ impl Repository {
|
||||
let remotes = response
|
||||
.remotes
|
||||
.into_iter()
|
||||
.map(|remotes| git::repository::Remote {
|
||||
.map(|remotes| Remote {
|
||||
name: remotes.name.into(),
|
||||
})
|
||||
.collect();
|
||||
@@ -5301,6 +5399,8 @@ impl Repository {
|
||||
cx.emit(RepositoryEvent::StashEntriesChanged)
|
||||
}
|
||||
self.snapshot.stash_entries = new_stash_entries;
|
||||
self.snapshot.remote_upstream_url = update.remote_upstream_url;
|
||||
self.snapshot.remote_origin_url = update.remote_origin_url;
|
||||
|
||||
let edits = update
|
||||
.removed_statuses
|
||||
@@ -5860,11 +5960,7 @@ fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::B
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
proto::BlameBufferResponse {
|
||||
blame_response: Some(proto::blame_buffer_response::BlameResponse {
|
||||
entries,
|
||||
messages,
|
||||
remote_url: blame.remote_url,
|
||||
}),
|
||||
blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5901,11 +5997,7 @@ fn deserialize_blame_buffer_response(
|
||||
.filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
Some(Blame {
|
||||
entries,
|
||||
messages,
|
||||
remote_url: response.remote_url,
|
||||
})
|
||||
Some(Blame { entries, messages })
|
||||
}
|
||||
|
||||
fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
|
||||
@@ -6053,7 +6145,6 @@ async fn compute_snapshot(
|
||||
events.push(RepositoryEvent::BranchChanged);
|
||||
}
|
||||
|
||||
// Used by edit prediction data collection
|
||||
let remote_origin_url = backend.remote_url("origin").await;
|
||||
let remote_upstream_url = backend.remote_url("upstream").await;
|
||||
|
||||
|
||||
@@ -93,9 +93,6 @@ enum FindSearchCandidates {
|
||||
/// based on disk contents of a buffer. This step is not performed for buffers we already have in memory.
|
||||
confirm_contents_will_match_tx: Sender<MatchingEntry>,
|
||||
confirm_contents_will_match_rx: Receiver<MatchingEntry>,
|
||||
/// Of those that contain at least one match (or are already in memory), look for rest of matches (and figure out their ranges).
|
||||
/// But wait - first, we need to go back to the main thread to open a buffer (& create an entity for it).
|
||||
get_buffer_for_full_scan_tx: Sender<ProjectPath>,
|
||||
},
|
||||
Remote,
|
||||
OpenBuffersOnly,
|
||||
@@ -226,7 +223,7 @@ impl Search {
|
||||
.boxed_local(),
|
||||
cx.background_spawn(Self::maintain_sorted_search_results(
|
||||
sorted_search_results_rx,
|
||||
get_buffer_for_full_scan_tx.clone(),
|
||||
get_buffer_for_full_scan_tx,
|
||||
self.limit,
|
||||
))
|
||||
.boxed_local(),
|
||||
@@ -234,7 +231,6 @@ impl Search {
|
||||
(
|
||||
FindSearchCandidates::Local {
|
||||
fs,
|
||||
get_buffer_for_full_scan_tx,
|
||||
confirm_contents_will_match_tx,
|
||||
confirm_contents_will_match_rx,
|
||||
input_paths_rx,
|
||||
@@ -593,7 +589,6 @@ impl Worker<'_> {
|
||||
input_paths_rx,
|
||||
confirm_contents_will_match_rx,
|
||||
mut confirm_contents_will_match_tx,
|
||||
mut get_buffer_for_full_scan_tx,
|
||||
fs,
|
||||
) = match self.candidates {
|
||||
FindSearchCandidates::Local {
|
||||
@@ -601,21 +596,15 @@ impl Worker<'_> {
|
||||
input_paths_rx,
|
||||
confirm_contents_will_match_rx,
|
||||
confirm_contents_will_match_tx,
|
||||
get_buffer_for_full_scan_tx,
|
||||
} => (
|
||||
input_paths_rx,
|
||||
confirm_contents_will_match_rx,
|
||||
confirm_contents_will_match_tx,
|
||||
get_buffer_for_full_scan_tx,
|
||||
Some(fs),
|
||||
),
|
||||
FindSearchCandidates::Remote | FindSearchCandidates::OpenBuffersOnly => (
|
||||
unbounded().1,
|
||||
unbounded().1,
|
||||
unbounded().0,
|
||||
unbounded().0,
|
||||
None,
|
||||
),
|
||||
FindSearchCandidates::Remote | FindSearchCandidates::OpenBuffersOnly => {
|
||||
(unbounded().1, unbounded().1, unbounded().0, None)
|
||||
}
|
||||
};
|
||||
// WorkerA: grabs a request for "find all matches in file/a" <- takes 5 minutes
|
||||
// right after: WorkerB: grabs a request for "find all matches in file/b" <- takes 5 seconds
|
||||
@@ -629,7 +618,6 @@ impl Worker<'_> {
|
||||
open_entries: &self.open_buffers,
|
||||
fs: fs.as_deref(),
|
||||
confirm_contents_will_match_tx: &confirm_contents_will_match_tx,
|
||||
get_buffer_for_full_scan_tx: &get_buffer_for_full_scan_tx,
|
||||
};
|
||||
// Whenever we notice that some step of a pipeline is closed, we don't want to close subsequent
|
||||
// steps straight away. Another worker might be about to produce a value that will
|
||||
@@ -645,10 +633,7 @@ impl Worker<'_> {
|
||||
find_first_match = find_first_match.next() => {
|
||||
if let Some(buffer_with_at_least_one_match) = find_first_match {
|
||||
handler.handle_find_first_match(buffer_with_at_least_one_match).await;
|
||||
} else {
|
||||
get_buffer_for_full_scan_tx = bounded(1).0;
|
||||
}
|
||||
|
||||
},
|
||||
scan_path = scan_path.next() => {
|
||||
if let Some(path_to_scan) = scan_path {
|
||||
@@ -673,7 +658,6 @@ struct RequestHandler<'worker> {
|
||||
fs: Option<&'worker dyn Fs>,
|
||||
open_entries: &'worker HashSet<ProjectEntryId>,
|
||||
confirm_contents_will_match_tx: &'worker Sender<MatchingEntry>,
|
||||
get_buffer_for_full_scan_tx: &'worker Sender<ProjectPath>,
|
||||
}
|
||||
|
||||
impl RequestHandler<'_> {
|
||||
@@ -729,9 +713,8 @@ impl RequestHandler<'_> {
|
||||
_ = maybe!(async move {
|
||||
let InputPath {
|
||||
entry,
|
||||
|
||||
snapshot,
|
||||
should_scan_tx,
|
||||
mut should_scan_tx,
|
||||
} = req;
|
||||
|
||||
if entry.is_fifo || !entry.is_file() {
|
||||
@@ -754,7 +737,7 @@ impl RequestHandler<'_> {
|
||||
if self.open_entries.contains(&entry.id) {
|
||||
// The buffer is already in memory and that's the version we want to scan;
|
||||
// hence skip the dilly-dally and look for all matches straight away.
|
||||
self.get_buffer_for_full_scan_tx
|
||||
should_scan_tx
|
||||
.send(ProjectPath {
|
||||
worktree_id: snapshot.id(),
|
||||
path: entry.path.clone(),
|
||||
|
||||
@@ -124,6 +124,8 @@ message UpdateRepository {
|
||||
optional GitCommitDetails head_commit_details = 11;
|
||||
optional string merge_message = 12;
|
||||
repeated StashEntry stash_entries = 13;
|
||||
optional string remote_upstream_url = 14;
|
||||
optional string remote_origin_url = 15;
|
||||
}
|
||||
|
||||
message RemoveRepository {
|
||||
@@ -190,6 +192,19 @@ message GitRenameBranch {
|
||||
string new_name = 4;
|
||||
}
|
||||
|
||||
message GitCreateRemote {
|
||||
uint64 project_id = 1;
|
||||
uint64 repository_id = 2;
|
||||
string remote_name = 3;
|
||||
string remote_url = 4;
|
||||
}
|
||||
|
||||
message GitRemoveRemote {
|
||||
uint64 project_id = 1;
|
||||
uint64 repository_id = 2;
|
||||
string remote_name = 3;
|
||||
}
|
||||
|
||||
message GitDeleteBranch {
|
||||
uint64 project_id = 1;
|
||||
uint64 repository_id = 2;
|
||||
@@ -487,8 +502,8 @@ message BlameBufferResponse {
|
||||
message BlameResponse {
|
||||
repeated BlameEntry entries = 1;
|
||||
repeated CommitMessage messages = 2;
|
||||
optional string remote_url = 4;
|
||||
reserved 3;
|
||||
reserved 4;
|
||||
}
|
||||
|
||||
optional BlameResponse blame_response = 5;
|
||||
|
||||
@@ -437,13 +437,18 @@ message Envelope {
|
||||
OpenImageResponse open_image_response = 392;
|
||||
CreateImageForPeer create_image_for_peer = 393;
|
||||
|
||||
|
||||
GitFileHistory git_file_history = 397;
|
||||
GitFileHistoryResponse git_file_history_response = 398;
|
||||
|
||||
RunGitHook run_git_hook = 399;
|
||||
|
||||
GitDeleteBranch git_delete_branch = 400;
|
||||
ExternalExtensionAgentsUpdated external_extension_agents_updated = 401; // current max
|
||||
|
||||
ExternalExtensionAgentsUpdated external_extension_agents_updated = 401;
|
||||
|
||||
GitCreateRemote git_create_remote = 402;
|
||||
GitRemoveRemote git_remove_remote = 403;// current max
|
||||
}
|
||||
|
||||
reserved 87 to 88, 396;
|
||||
|
||||
@@ -305,6 +305,8 @@ messages!(
|
||||
(RemoteMessageResponse, Background),
|
||||
(AskPassRequest, Background),
|
||||
(AskPassResponse, Background),
|
||||
(GitCreateRemote, Background),
|
||||
(GitRemoveRemote, Background),
|
||||
(GitCreateBranch, Background),
|
||||
(GitChangeBranch, Background),
|
||||
(GitRenameBranch, Background),
|
||||
@@ -504,6 +506,8 @@ request_messages!(
|
||||
(GetRemotes, GetRemotesResponse),
|
||||
(Pull, RemoteMessageResponse),
|
||||
(AskPassRequest, AskPassResponse),
|
||||
(GitCreateRemote, Ack),
|
||||
(GitRemoveRemote, Ack),
|
||||
(GitCreateBranch, Ack),
|
||||
(GitChangeBranch, Ack),
|
||||
(GitRenameBranch, Ack),
|
||||
@@ -676,6 +680,8 @@ entity_messages!(
|
||||
GitChangeBranch,
|
||||
GitRenameBranch,
|
||||
GitCreateBranch,
|
||||
GitCreateRemote,
|
||||
GitRemoveRemote,
|
||||
CheckForPushedCommits,
|
||||
GitDiff,
|
||||
GitInit,
|
||||
|
||||
@@ -43,7 +43,6 @@ urlencoding.workspace = true
|
||||
util.workspace = true
|
||||
which.workspace = true
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
fs = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -328,8 +328,15 @@ impl RemoteClient {
|
||||
let (incoming_tx, incoming_rx) = mpsc::unbounded::<Envelope>();
|
||||
let (connection_activity_tx, connection_activity_rx) = mpsc::channel::<()>(1);
|
||||
|
||||
let client =
|
||||
cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "client"))?;
|
||||
let client = cx.update(|cx| {
|
||||
ChannelClient::new(
|
||||
incoming_rx,
|
||||
outgoing_tx,
|
||||
cx,
|
||||
"client",
|
||||
remote_connection.has_wsl_interop(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let path_style = remote_connection.path_style();
|
||||
let this = cx.new(|_| Self {
|
||||
@@ -420,8 +427,9 @@ impl RemoteClient {
|
||||
outgoing_tx: mpsc::UnboundedSender<Envelope>,
|
||||
cx: &App,
|
||||
name: &'static str,
|
||||
has_wsl_interop: bool,
|
||||
) -> AnyProtoClient {
|
||||
ChannelClient::new(incoming_rx, outgoing_tx, cx, name).into()
|
||||
ChannelClient::new(incoming_rx, outgoing_tx, cx, name, has_wsl_interop).into()
|
||||
}
|
||||
|
||||
pub fn shutdown_processes<T: RequestMessage>(
|
||||
@@ -921,8 +929,8 @@ impl RemoteClient {
|
||||
});
|
||||
let (outgoing_tx, _) = mpsc::unbounded::<Envelope>();
|
||||
let (_, incoming_rx) = mpsc::unbounded::<Envelope>();
|
||||
let server_client =
|
||||
server_cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "fake-server"));
|
||||
let server_client = server_cx
|
||||
.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "fake-server", false));
|
||||
let connection: Arc<dyn RemoteConnection> = Arc::new(fake::FakeRemoteConnection {
|
||||
connection_options: opts.clone(),
|
||||
server_cx: fake::SendableCx::new(server_cx),
|
||||
@@ -1140,6 +1148,7 @@ pub trait RemoteConnection: Send + Sync {
|
||||
fn path_style(&self) -> PathStyle;
|
||||
fn shell(&self) -> String;
|
||||
fn default_system_shell(&self) -> String;
|
||||
fn has_wsl_interop(&self) -> bool;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
fn simulate_disconnect(&self, _: &AsyncApp) {}
|
||||
@@ -1188,6 +1197,7 @@ struct ChannelClient {
|
||||
name: &'static str,
|
||||
task: Mutex<Task<Result<()>>>,
|
||||
remote_started: Signal<()>,
|
||||
has_wsl_interop: bool,
|
||||
}
|
||||
|
||||
impl ChannelClient {
|
||||
@@ -1196,6 +1206,7 @@ impl ChannelClient {
|
||||
outgoing_tx: mpsc::UnboundedSender<Envelope>,
|
||||
cx: &App,
|
||||
name: &'static str,
|
||||
has_wsl_interop: bool,
|
||||
) -> Arc<Self> {
|
||||
Arc::new_cyclic(|this| Self {
|
||||
outgoing_tx: Mutex::new(outgoing_tx),
|
||||
@@ -1211,6 +1222,7 @@ impl ChannelClient {
|
||||
&cx.to_async(),
|
||||
)),
|
||||
remote_started: Signal::new(cx),
|
||||
has_wsl_interop,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1489,6 +1501,10 @@ impl ProtoClient for ChannelClient {
|
||||
fn is_via_collab(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn has_wsl_interop(&self) -> bool {
|
||||
self.has_wsl_interop
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
@@ -1652,6 +1668,10 @@ mod fake {
|
||||
fn default_system_shell(&self) -> String {
|
||||
"sh".to_owned()
|
||||
}
|
||||
|
||||
fn has_wsl_interop(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct Delegate;
|
||||
|
||||
@@ -131,11 +131,7 @@ async fn build_remote_server_from_source(
|
||||
let build_remote_server =
|
||||
std::env::var("ZED_BUILD_REMOTE_SERVER").unwrap_or("nocompress".into());
|
||||
|
||||
if build_remote_server == "false"
|
||||
|| build_remote_server == "no"
|
||||
|| build_remote_server == "off"
|
||||
|| build_remote_server == "0"
|
||||
{
|
||||
if let "false" | "no" | "off" | "0" = &*build_remote_server {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
|
||||
@@ -394,6 +394,10 @@ impl RemoteConnection for SshRemoteConnection {
|
||||
fn path_style(&self) -> PathStyle {
|
||||
self.ssh_path_style
|
||||
}
|
||||
|
||||
fn has_wsl_interop(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl SshRemoteConnection {
|
||||
|
||||
@@ -47,6 +47,7 @@ pub(crate) struct WslRemoteConnection {
|
||||
shell: String,
|
||||
shell_kind: ShellKind,
|
||||
default_system_shell: String,
|
||||
has_wsl_interop: bool,
|
||||
connection_options: WslConnectionOptions,
|
||||
}
|
||||
|
||||
@@ -71,6 +72,7 @@ impl WslRemoteConnection {
|
||||
shell: String::new(),
|
||||
shell_kind: ShellKind::Posix,
|
||||
default_system_shell: String::from("/bin/sh"),
|
||||
has_wsl_interop: false,
|
||||
};
|
||||
delegate.set_status(Some("Detecting WSL environment"), cx);
|
||||
this.shell = this
|
||||
@@ -79,6 +81,15 @@ impl WslRemoteConnection {
|
||||
.context("failed detecting shell")?;
|
||||
log::info!("Remote shell discovered: {}", this.shell);
|
||||
this.shell_kind = ShellKind::new(&this.shell, false);
|
||||
this.has_wsl_interop = this.detect_has_wsl_interop().await.unwrap_or_default();
|
||||
log::info!(
|
||||
"Remote has wsl interop {}",
|
||||
if this.has_wsl_interop {
|
||||
"enabled"
|
||||
} else {
|
||||
"disabled"
|
||||
}
|
||||
);
|
||||
this.platform = this
|
||||
.detect_platform()
|
||||
.await
|
||||
@@ -115,6 +126,14 @@ impl WslRemoteConnection {
|
||||
.unwrap_or_else(|| "/bin/sh".to_string()))
|
||||
}
|
||||
|
||||
async fn detect_has_wsl_interop(&self) -> Result<bool> {
|
||||
Ok(self
|
||||
.run_wsl_command_with_output("cat", &["/proc/sys/fs/binfmt_misc/WSLInterop"])
|
||||
.await
|
||||
.inspect_err(|err| log::error!("Failed to detect wsl interop: {err}"))?
|
||||
.contains("enabled"))
|
||||
}
|
||||
|
||||
async fn windows_path_to_wsl_path(&self, source: &Path) -> Result<String> {
|
||||
windows_path_to_wsl_path_impl(&self.connection_options, source).await
|
||||
}
|
||||
@@ -317,6 +336,7 @@ impl RemoteConnection for WslRemoteConnection {
|
||||
proxy_args.push(format!("{}={}", env_var, value));
|
||||
}
|
||||
}
|
||||
|
||||
proxy_args.push(remote_binary_path.display(PathStyle::Posix).into_owned());
|
||||
proxy_args.push("proxy".to_owned());
|
||||
proxy_args.push("--identifier".to_owned());
|
||||
@@ -489,6 +509,10 @@ impl RemoteConnection for WslRemoteConnection {
|
||||
fn default_system_shell(&self) -> String {
|
||||
self.default_system_shell.clone()
|
||||
}
|
||||
|
||||
fn has_wsl_interop(&self) -> bool {
|
||||
self.has_wsl_interop
|
||||
}
|
||||
}
|
||||
|
||||
/// `wslpath` is a executable available in WSL, it's a linux binary.
|
||||
|
||||
@@ -452,7 +452,7 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext
|
||||
});
|
||||
|
||||
let mut fake_lsp = server_cx.update(|cx| {
|
||||
headless.read(cx).languages.register_fake_language_server(
|
||||
headless.read(cx).languages.register_fake_lsp_server(
|
||||
LanguageServerName("rust-analyzer".into()),
|
||||
lsp::ServerCapabilities {
|
||||
completion_provider: Some(lsp::CompletionOptions::default()),
|
||||
@@ -476,7 +476,7 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext
|
||||
..FakeLspAdapter::default()
|
||||
},
|
||||
);
|
||||
headless.read(cx).languages.register_fake_language_server(
|
||||
headless.read(cx).languages.register_fake_lsp_server(
|
||||
LanguageServerName("fake-analyzer".into()),
|
||||
lsp::ServerCapabilities {
|
||||
completion_provider: Some(lsp::CompletionOptions::default()),
|
||||
@@ -669,7 +669,7 @@ async fn test_remote_cancel_language_server_work(
|
||||
});
|
||||
|
||||
let mut fake_lsp = server_cx.update(|cx| {
|
||||
headless.read(cx).languages.register_fake_language_server(
|
||||
headless.read(cx).languages.register_fake_lsp_server(
|
||||
LanguageServerName("rust-analyzer".into()),
|
||||
Default::default(),
|
||||
None,
|
||||
|
||||
@@ -199,6 +199,7 @@ fn start_server(
|
||||
listeners: ServerListeners,
|
||||
log_rx: Receiver<Vec<u8>>,
|
||||
cx: &mut App,
|
||||
is_wsl_interop: bool,
|
||||
) -> AnyProtoClient {
|
||||
// This is the server idle timeout. If no connection comes in this timeout, the server will shut down.
|
||||
const IDLE_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10 * 60);
|
||||
@@ -318,7 +319,7 @@ fn start_server(
|
||||
})
|
||||
.detach();
|
||||
|
||||
RemoteClient::proto_client_from_channels(incoming_rx, outgoing_tx, cx, "server")
|
||||
RemoteClient::proto_client_from_channels(incoming_rx, outgoing_tx, cx, "server", is_wsl_interop)
|
||||
}
|
||||
|
||||
fn init_paths() -> anyhow::Result<()> {
|
||||
@@ -407,8 +408,15 @@ pub fn execute_run(
|
||||
|
||||
HeadlessProject::init(cx);
|
||||
|
||||
let is_wsl_interop = if cfg!(target_os = "linux") {
|
||||
// See: https://learn.microsoft.com/en-us/windows/wsl/filesystems#disable-interoperability
|
||||
matches!(std::fs::read_to_string("/proc/sys/fs/binfmt_misc/WSLInterop"), Ok(s) if s.contains("enabled"))
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
log::info!("gpui app started, initializing server");
|
||||
let session = start_server(listeners, log_rx, cx);
|
||||
let session = start_server(listeners, log_rx, cx, is_wsl_interop);
|
||||
|
||||
GitHostingProviderRegistry::set_global(git_hosting_provider_registry, cx);
|
||||
git_hosting_providers::init(cx);
|
||||
|
||||
@@ -59,6 +59,7 @@ pub trait ProtoClient: Send + Sync {
|
||||
fn message_handler_set(&self) -> &parking_lot::Mutex<ProtoMessageHandlerSet>;
|
||||
|
||||
fn is_via_collab(&self) -> bool;
|
||||
fn has_wsl_interop(&self) -> bool;
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -510,6 +511,10 @@ impl AnyProtoClient {
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
pub fn has_wsl_interop(&self) -> bool {
|
||||
self.0.client.has_wsl_interop()
|
||||
}
|
||||
}
|
||||
|
||||
fn to_any_envelope<T: EnvelopedMessage>(
|
||||
|
||||
@@ -15,6 +15,7 @@ use util::ResultExt as _;
|
||||
use util::{
|
||||
asset_str,
|
||||
markdown::{MarkdownEscaped, MarkdownInlineCode, MarkdownString},
|
||||
schemars::AllowTrailingCommas,
|
||||
};
|
||||
|
||||
use crate::SettingsAssets;
|
||||
@@ -451,7 +452,9 @@ impl KeymapFile {
|
||||
/// Creates a JSON schema generator, suitable for generating json schemas
|
||||
/// for actions
|
||||
pub fn action_schema_generator() -> schemars::SchemaGenerator {
|
||||
schemars::generate::SchemaSettings::draft2019_09().into_generator()
|
||||
schemars::generate::SchemaSettings::draft2019_09()
|
||||
.with_transform(AllowTrailingCommas)
|
||||
.into_generator()
|
||||
}
|
||||
|
||||
pub fn generate_json_schema_for_registered_actions(cx: &mut App) -> Value {
|
||||
|
||||
@@ -62,6 +62,8 @@ impl merge_from::MergeFrom for AllLanguageSettingsContent {
|
||||
pub struct FeaturesContent {
|
||||
/// Determines which edit prediction provider to use.
|
||||
pub edit_prediction_provider: Option<EditPredictionProvider>,
|
||||
/// Enables the experimental edit prediction context retrieval system.
|
||||
pub experimental_edit_prediction_context_retrieval: Option<bool>,
|
||||
}
|
||||
|
||||
/// The provider that supplies edit predictions.
|
||||
|
||||
@@ -61,6 +61,7 @@ pub struct AmazonBedrockSettingsContent {
|
||||
pub region: Option<String>,
|
||||
pub profile: Option<String>,
|
||||
pub authentication_method: Option<BedrockAuthMethodContent>,
|
||||
pub allow_global: Option<bool>,
|
||||
}
|
||||
|
||||
#[with_fallible_options]
|
||||
|
||||
@@ -25,7 +25,7 @@ use std::{
|
||||
use util::{
|
||||
ResultExt as _,
|
||||
rel_path::RelPath,
|
||||
schemars::{DefaultDenyUnknownFields, replace_subschema},
|
||||
schemars::{AllowTrailingCommas, DefaultDenyUnknownFields, replace_subschema},
|
||||
};
|
||||
|
||||
pub type EditorconfigProperties = ec4rs::Properties;
|
||||
@@ -1010,6 +1010,7 @@ impl SettingsStore {
|
||||
pub fn json_schema(&self, params: &SettingsJsonSchemaParams) -> Value {
|
||||
let mut generator = schemars::generate::SchemaSettings::draft2019_09()
|
||||
.with_transform(DefaultDenyUnknownFields)
|
||||
.with_transform(AllowTrailingCommas)
|
||||
.into_generator();
|
||||
|
||||
UserSettingsContent::json_schema(&mut generator);
|
||||
|
||||
@@ -2,7 +2,7 @@ use collections::HashMap;
|
||||
use schemars::{JsonSchema, json_schema};
|
||||
use serde::Deserialize;
|
||||
use std::borrow::Cow;
|
||||
use util::schemars::DefaultDenyUnknownFields;
|
||||
use util::schemars::{AllowTrailingCommas, DefaultDenyUnknownFields};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct VsSnippetsFile {
|
||||
@@ -14,6 +14,7 @@ impl VsSnippetsFile {
|
||||
pub fn generate_json_schema() -> serde_json::Value {
|
||||
let schema = schemars::generate::SchemaSettings::draft2019_09()
|
||||
.with_transform(DefaultDenyUnknownFields)
|
||||
.with_transform(AllowTrailingCommas)
|
||||
.into_generator()
|
||||
.root_schema_for::<Self>();
|
||||
|
||||
|
||||
@@ -357,6 +357,7 @@ impl DebugTaskFile {
|
||||
"$schema": meta_schema,
|
||||
"title": "Debug Configurations",
|
||||
"description": "Configuration for debug scenarios",
|
||||
"allowTrailingCommas": true,
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
|
||||
@@ -4,7 +4,7 @@ use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::path::PathBuf;
|
||||
use util::schemars::DefaultDenyUnknownFields;
|
||||
use util::schemars::{AllowTrailingCommas, DefaultDenyUnknownFields};
|
||||
use util::serde::default_true;
|
||||
use util::{ResultExt, truncate_and_remove_front};
|
||||
|
||||
@@ -118,6 +118,7 @@ impl TaskTemplates {
|
||||
pub fn generate_json_schema() -> serde_json::Value {
|
||||
let schema = schemars::generate::SchemaSettings::draft2019_09()
|
||||
.with_transform(DefaultDenyUnknownFields)
|
||||
.with_transform(AllowTrailingCommas)
|
||||
.into_generator()
|
||||
.root_schema_for::<Self>();
|
||||
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
use gpui::{IntoElement, Render};
|
||||
use ui::{Divider, prelude::*, tooltip_container};
|
||||
|
||||
pub struct TerminalTooltip {
|
||||
title: SharedString,
|
||||
pid: u32,
|
||||
}
|
||||
|
||||
impl TerminalTooltip {
|
||||
pub fn new(title: impl Into<SharedString>, pid: u32) -> Self {
|
||||
Self {
|
||||
title: title.into(),
|
||||
pid,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for TerminalTooltip {
|
||||
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
tooltip_container(cx, move |this, _cx| {
|
||||
this.occlude()
|
||||
.on_mouse_move(|_, _window, cx| cx.stop_propagation())
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_1()
|
||||
.child(Label::new(self.title.clone()))
|
||||
.child(Divider::horizontal())
|
||||
.child(
|
||||
Label::new(format!("Process ID (PID): {}", self.pid))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,6 @@ pub mod terminal_panel;
|
||||
mod terminal_path_like_target;
|
||||
pub mod terminal_scrollbar;
|
||||
mod terminal_slash_command;
|
||||
pub mod terminal_tab_tooltip;
|
||||
|
||||
use assistant_slash_command::SlashCommandRegistry;
|
||||
use editor::{EditorSettings, actions::SelectAll, blink_manager::BlinkManager};
|
||||
@@ -32,9 +31,8 @@ use terminal_panel::TerminalPanel;
|
||||
use terminal_path_like_target::{hover_path_like_target, open_path_like_target};
|
||||
use terminal_scrollbar::TerminalScrollHandle;
|
||||
use terminal_slash_command::TerminalSlashCommand;
|
||||
use terminal_tab_tooltip::TerminalTooltip;
|
||||
use ui::{
|
||||
ContextMenu, Icon, IconName, Label, ScrollAxes, Scrollbars, Tooltip, WithScrollbar, h_flex,
|
||||
ContextMenu, Divider, ScrollAxes, Scrollbars, Tooltip, WithScrollbar,
|
||||
prelude::*,
|
||||
scrollbars::{self, GlobalSetting, ScrollbarVisibility},
|
||||
};
|
||||
@@ -1140,14 +1138,24 @@ impl Item for TerminalView {
|
||||
type Event = ItemEvent;
|
||||
|
||||
fn tab_tooltip_content(&self, cx: &App) -> Option<TabTooltipContent> {
|
||||
let terminal = self.terminal().read(cx);
|
||||
let title = terminal.title(false);
|
||||
let pid = terminal.pid_getter()?.fallback_pid();
|
||||
Some(TabTooltipContent::Custom(Box::new(Tooltip::element({
|
||||
let terminal = self.terminal().read(cx);
|
||||
let title = terminal.title(false);
|
||||
let pid = terminal.pid_getter()?.fallback_pid();
|
||||
|
||||
Some(TabTooltipContent::Custom(Box::new(move |_window, cx| {
|
||||
cx.new(|_| TerminalTooltip::new(title.clone(), pid.as_u32()))
|
||||
.into()
|
||||
})))
|
||||
move |_, _| {
|
||||
v_flex()
|
||||
.gap_1()
|
||||
.child(Label::new(title.clone()))
|
||||
.child(h_flex().flex_grow().child(Divider::horizontal()))
|
||||
.child(
|
||||
Label::new(format!("Process ID (PID): {}", pid))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.into_any_element()
|
||||
}
|
||||
}))))
|
||||
}
|
||||
|
||||
fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement {
|
||||
|
||||
@@ -8,10 +8,14 @@ use sum_tree::{Bias, Dimensions};
|
||||
/// A timestamped position in a buffer
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Anchor {
|
||||
/// The timestamp of the operation that inserted the text
|
||||
/// in which this anchor is located.
|
||||
pub timestamp: clock::Lamport,
|
||||
/// The byte offset in the buffer
|
||||
/// The byte offset into the text inserted in the operation
|
||||
/// at `timestamp`.
|
||||
pub offset: usize,
|
||||
/// Describes which character the anchor is biased towards
|
||||
/// Whether this anchor stays attached to the character *before* or *after*
|
||||
/// the offset.
|
||||
pub bias: Bias,
|
||||
pub buffer_id: Option<BufferId>,
|
||||
}
|
||||
|
||||
@@ -588,19 +588,20 @@ impl ToolchainSelector {
|
||||
.worktree_for_id(worktree_id, cx)?
|
||||
.read(cx)
|
||||
.abs_path();
|
||||
let workspace_id = workspace.database_id()?;
|
||||
let weak = workspace.weak_handle();
|
||||
cx.spawn_in(window, async move |workspace, cx| {
|
||||
let active_toolchain = workspace::WORKSPACE_DB
|
||||
.toolchain(
|
||||
workspace_id,
|
||||
worktree_id,
|
||||
relative_path.clone(),
|
||||
language_name.clone(),
|
||||
)
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
let active_toolchain = project
|
||||
.read_with(cx, |this, cx| {
|
||||
this.active_toolchain(
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: relative_path.clone(),
|
||||
},
|
||||
language_name.clone(),
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await;
|
||||
workspace
|
||||
.update_in(cx, |this, window, cx| {
|
||||
this.toggle_modal(window, cx, move |window, cx| {
|
||||
@@ -618,6 +619,7 @@ impl ToolchainSelector {
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
|
||||
|
||||
@@ -485,6 +485,7 @@ pub struct Table<const COLS: usize = 3> {
|
||||
interaction_state: Option<WeakEntity<TableInteractionState>>,
|
||||
col_widths: Option<TableWidths<COLS>>,
|
||||
map_row: Option<Rc<dyn Fn((usize, Stateful<Div>), &mut Window, &mut App) -> AnyElement>>,
|
||||
use_ui_font: bool,
|
||||
empty_table_callback: Option<Rc<dyn Fn(&mut Window, &mut App) -> AnyElement>>,
|
||||
}
|
||||
|
||||
@@ -498,6 +499,7 @@ impl<const COLS: usize> Table<COLS> {
|
||||
rows: TableContents::Vec(Vec::new()),
|
||||
interaction_state: None,
|
||||
map_row: None,
|
||||
use_ui_font: true,
|
||||
empty_table_callback: None,
|
||||
col_widths: None,
|
||||
}
|
||||
@@ -590,6 +592,11 @@ impl<const COLS: usize> Table<COLS> {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn no_ui_font(mut self) -> Self {
|
||||
self.use_ui_font = false;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn map_row(
|
||||
mut self,
|
||||
callback: impl Fn((usize, Stateful<Div>), &mut Window, &mut App) -> AnyElement + 'static,
|
||||
@@ -618,8 +625,8 @@ fn base_cell_style(width: Option<Length>) -> Div {
|
||||
.overflow_hidden()
|
||||
}
|
||||
|
||||
fn base_cell_style_text(width: Option<Length>, cx: &App) -> Div {
|
||||
base_cell_style(width).text_ui(cx)
|
||||
fn base_cell_style_text(width: Option<Length>, use_ui_font: bool, cx: &App) -> Div {
|
||||
base_cell_style(width).when(use_ui_font, |el| el.text_ui(cx))
|
||||
}
|
||||
|
||||
pub fn render_table_row<const COLS: usize>(
|
||||
@@ -656,7 +663,12 @@ pub fn render_table_row<const COLS: usize>(
|
||||
.map(IntoElement::into_any_element)
|
||||
.into_iter()
|
||||
.zip(column_widths)
|
||||
.map(|(cell, width)| base_cell_style_text(width, cx).px_1().py_0p5().child(cell)),
|
||||
.map(|(cell, width)| {
|
||||
base_cell_style_text(width, table_context.use_ui_font, cx)
|
||||
.px_1()
|
||||
.py_0p5()
|
||||
.child(cell)
|
||||
}),
|
||||
);
|
||||
|
||||
let row = if let Some(map_row) = table_context.map_row {
|
||||
@@ -700,7 +712,7 @@ pub fn render_table_header<const COLS: usize>(
|
||||
.border_color(cx.theme().colors().border)
|
||||
.children(headers.into_iter().enumerate().zip(column_widths).map(
|
||||
|((header_idx, h), width)| {
|
||||
base_cell_style_text(width, cx)
|
||||
base_cell_style_text(width, table_context.use_ui_font, cx)
|
||||
.child(h)
|
||||
.id(ElementId::NamedInteger(
|
||||
shared_element_id.clone(),
|
||||
@@ -739,6 +751,7 @@ pub struct TableRenderContext<const COLS: usize> {
|
||||
pub total_row_count: usize,
|
||||
pub column_widths: Option<[Length; COLS]>,
|
||||
pub map_row: Option<Rc<dyn Fn((usize, Stateful<Div>), &mut Window, &mut App) -> AnyElement>>,
|
||||
pub use_ui_font: bool,
|
||||
}
|
||||
|
||||
impl<const COLS: usize> TableRenderContext<COLS> {
|
||||
@@ -748,6 +761,7 @@ impl<const COLS: usize> TableRenderContext<COLS> {
|
||||
total_row_count: table.rows.len(),
|
||||
column_widths: table.col_widths.as_ref().map(|widths| widths.lengths(cx)),
|
||||
map_row: table.map_row.clone(),
|
||||
use_ui_font: table.use_ui_font,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,3 +53,20 @@ impl schemars::transform::Transform for DefaultDenyUnknownFields {
|
||||
transform_subschemas(self, schema);
|
||||
}
|
||||
}
|
||||
|
||||
/// Defaults `allowTrailingCommas` to `true`, for use with `json-language-server`.
|
||||
/// This can be applied to any schema that will be treated as `jsonc`.
|
||||
///
|
||||
/// Note that this is non-recursive and only applied to the root schema.
|
||||
#[derive(Clone)]
|
||||
pub struct AllowTrailingCommas;
|
||||
|
||||
impl schemars::transform::Transform for AllowTrailingCommas {
|
||||
fn transform(&mut self, schema: &mut schemars::Schema) {
|
||||
if let Some(object) = schema.as_object_mut()
|
||||
&& !object.contains_key("allowTrailingCommas")
|
||||
{
|
||||
object.insert("allowTrailingCommas".to_string(), true.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1656,49 +1656,6 @@ impl WorkspaceDb {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn toolchain(
|
||||
&self,
|
||||
workspace_id: WorkspaceId,
|
||||
worktree_id: WorktreeId,
|
||||
relative_worktree_path: Arc<RelPath>,
|
||||
language_name: LanguageName,
|
||||
) -> Result<Option<Toolchain>> {
|
||||
self.write(move |this| {
|
||||
let mut select = this
|
||||
.select_bound(sql!(
|
||||
SELECT
|
||||
name, path, raw_json
|
||||
FROM toolchains
|
||||
WHERE
|
||||
workspace_id = ? AND
|
||||
language_name = ? AND
|
||||
worktree_id = ? AND
|
||||
relative_worktree_path = ?
|
||||
))
|
||||
.context("select toolchain")?;
|
||||
|
||||
let toolchain: Vec<(String, String, String)> = select((
|
||||
workspace_id,
|
||||
language_name.as_ref().to_string(),
|
||||
worktree_id.to_usize(),
|
||||
relative_worktree_path.as_unix_str().to_string(),
|
||||
))?;
|
||||
|
||||
Ok(toolchain
|
||||
.into_iter()
|
||||
.next()
|
||||
.and_then(|(name, path, raw_json)| {
|
||||
Some(Toolchain {
|
||||
name: name.into(),
|
||||
path: path.into(),
|
||||
language_name,
|
||||
as_json: serde_json::Value::from_str(&raw_json).ok()?,
|
||||
})
|
||||
}))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub(crate) async fn toolchains(
|
||||
&self,
|
||||
workspace_id: WorkspaceId,
|
||||
|
||||
@@ -52,7 +52,7 @@ use std::{
|
||||
fmt,
|
||||
future::Future,
|
||||
mem::{self},
|
||||
ops::{Deref, DerefMut},
|
||||
ops::{Deref, DerefMut, Range},
|
||||
path::{Path, PathBuf},
|
||||
pin::Pin,
|
||||
sync::{
|
||||
@@ -428,7 +428,7 @@ impl Worktree {
|
||||
let mut entry = Entry::new(
|
||||
RelPath::empty().into(),
|
||||
&metadata,
|
||||
&next_entry_id,
|
||||
ProjectEntryId::new(&next_entry_id),
|
||||
snapshot.root_char_bag,
|
||||
None,
|
||||
);
|
||||
@@ -2736,13 +2736,30 @@ impl BackgroundScannerState {
|
||||
}
|
||||
}
|
||||
|
||||
async fn insert_entry(
|
||||
fn entry_id_for(
|
||||
&mut self,
|
||||
mut entry: Entry,
|
||||
fs: &dyn Fs,
|
||||
watcher: &dyn Watcher,
|
||||
) -> Entry {
|
||||
self.reuse_entry_id(&mut entry);
|
||||
next_entry_id: &AtomicUsize,
|
||||
path: &RelPath,
|
||||
metadata: &fs::Metadata,
|
||||
) -> ProjectEntryId {
|
||||
// If an entry with the same inode was removed from the worktree during this scan,
|
||||
// then it *might* represent the same file or directory. But the OS might also have
|
||||
// re-used the inode for a completely different file or directory.
|
||||
//
|
||||
// Conditionally reuse the old entry's id:
|
||||
// * if the mtime is the same, the file was probably been renamed.
|
||||
// * if the path is the same, the file may just have been updated
|
||||
if let Some(removed_entry) = self.removed_entries.remove(&metadata.inode) {
|
||||
if removed_entry.mtime == Some(metadata.mtime) || *removed_entry.path == *path {
|
||||
return removed_entry.id;
|
||||
}
|
||||
} else if let Some(existing_entry) = self.snapshot.entry_for_path(path) {
|
||||
return existing_entry.id;
|
||||
}
|
||||
ProjectEntryId::new(next_entry_id)
|
||||
}
|
||||
|
||||
async fn insert_entry(&mut self, entry: Entry, fs: &dyn Fs, watcher: &dyn Watcher) -> Entry {
|
||||
let entry = self.snapshot.insert_entry(entry, fs);
|
||||
if entry.path.file_name() == Some(&DOT_GIT) {
|
||||
self.insert_git_repository(entry.path.clone(), fs, watcher)
|
||||
@@ -3389,13 +3406,13 @@ impl Entry {
|
||||
fn new(
|
||||
path: Arc<RelPath>,
|
||||
metadata: &fs::Metadata,
|
||||
next_entry_id: &AtomicUsize,
|
||||
id: ProjectEntryId,
|
||||
root_char_bag: CharBag,
|
||||
canonical_path: Option<Arc<Path>>,
|
||||
) -> Self {
|
||||
let char_bag = char_bag_for_path(root_char_bag, &path);
|
||||
Self {
|
||||
id: ProjectEntryId::new(next_entry_id),
|
||||
id,
|
||||
kind: if metadata.is_dir {
|
||||
EntryKind::PendingDir
|
||||
} else {
|
||||
@@ -3682,8 +3699,10 @@ impl BackgroundScanner {
|
||||
.await;
|
||||
if ignore_stack.is_abs_path_ignored(root_abs_path.as_path(), true) {
|
||||
root_entry.is_ignored = true;
|
||||
let mut root_entry = root_entry.clone();
|
||||
state.reuse_entry_id(&mut root_entry);
|
||||
state
|
||||
.insert_entry(root_entry.clone(), self.fs.as_ref(), self.watcher.as_ref())
|
||||
.insert_entry(root_entry, self.fs.as_ref(), self.watcher.as_ref())
|
||||
.await;
|
||||
}
|
||||
if root_entry.is_dir() {
|
||||
@@ -3877,29 +3896,35 @@ impl BackgroundScanner {
|
||||
abs_paths.dedup_by(|a, b| a.starts_with(b));
|
||||
{
|
||||
let snapshot = &self.state.lock().await.snapshot;
|
||||
abs_paths.retain(|abs_path| {
|
||||
let abs_path = &SanitizedPath::new(abs_path);
|
||||
|
||||
let mut ranges_to_drop = SmallVec::<[Range<usize>; 4]>::new();
|
||||
|
||||
fn skip_ix(ranges: &mut SmallVec<[Range<usize>; 4]>, ix: usize) {
|
||||
if let Some(last_range) = ranges.last_mut()
|
||||
&& last_range.end == ix
|
||||
{
|
||||
last_range.end += 1;
|
||||
} else {
|
||||
ranges.push(ix..ix + 1);
|
||||
}
|
||||
}
|
||||
|
||||
for (ix, abs_path) in abs_paths.iter().enumerate() {
|
||||
let abs_path = &SanitizedPath::new(&abs_path);
|
||||
|
||||
{
|
||||
let mut is_git_related = false;
|
||||
let mut dot_git_paths = None;
|
||||
|
||||
let dot_git_paths = self.executor.block(maybe!(async {
|
||||
let mut path = None;
|
||||
for ancestor in abs_path.as_path().ancestors() {
|
||||
|
||||
for ancestor in abs_path.as_path().ancestors() {
|
||||
if is_git_dir(ancestor, self.fs.as_ref()).await {
|
||||
let path_in_git_dir = abs_path
|
||||
.as_path()
|
||||
.strip_prefix(ancestor)
|
||||
.expect("stripping off the ancestor");
|
||||
path = Some((ancestor.to_owned(), path_in_git_dir.to_owned()));
|
||||
break;
|
||||
dot_git_paths = Some((ancestor.to_owned(), path_in_git_dir.to_owned()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
path
|
||||
|
||||
}));
|
||||
}
|
||||
|
||||
if let Some((dot_git_abs_path, path_in_git_dir)) = dot_git_paths {
|
||||
if skipped_files_in_dot_git
|
||||
@@ -3909,8 +3934,11 @@ impl BackgroundScanner {
|
||||
path_in_git_dir.starts_with(skipped_git_subdir)
|
||||
})
|
||||
{
|
||||
log::debug!("ignoring event {abs_path:?} as it's in the .git directory among skipped files or directories");
|
||||
return false;
|
||||
log::debug!(
|
||||
"ignoring event {abs_path:?} as it's in the .git directory among skipped files or directories"
|
||||
);
|
||||
skip_ix(&mut ranges_to_drop, ix);
|
||||
continue;
|
||||
}
|
||||
|
||||
is_git_related = true;
|
||||
@@ -3919,8 +3947,7 @@ impl BackgroundScanner {
|
||||
}
|
||||
}
|
||||
|
||||
let relative_path = if let Ok(path) =
|
||||
abs_path.strip_prefix(&root_canonical_path)
|
||||
let relative_path = if let Ok(path) = abs_path.strip_prefix(&root_canonical_path)
|
||||
&& let Ok(path) = RelPath::new(path, PathStyle::local())
|
||||
{
|
||||
path
|
||||
@@ -3931,10 +3958,11 @@ impl BackgroundScanner {
|
||||
);
|
||||
} else {
|
||||
log::error!(
|
||||
"ignoring event {abs_path:?} outside of root path {root_canonical_path:?}",
|
||||
"ignoring event {abs_path:?} outside of root path {root_canonical_path:?}",
|
||||
);
|
||||
}
|
||||
return false;
|
||||
skip_ix(&mut ranges_to_drop, ix);
|
||||
continue;
|
||||
};
|
||||
|
||||
if abs_path.file_name() == Some(OsStr::new(GITIGNORE)) {
|
||||
@@ -3958,21 +3986,26 @@ impl BackgroundScanner {
|
||||
});
|
||||
if !parent_dir_is_loaded {
|
||||
log::debug!("ignoring event {relative_path:?} within unloaded directory");
|
||||
return false;
|
||||
skip_ix(&mut ranges_to_drop, ix);
|
||||
continue;
|
||||
}
|
||||
|
||||
if self.settings.is_path_excluded(&relative_path) {
|
||||
if !is_git_related {
|
||||
log::debug!("ignoring FS event for excluded path {relative_path:?}");
|
||||
}
|
||||
return false;
|
||||
skip_ix(&mut ranges_to_drop, ix);
|
||||
continue;
|
||||
}
|
||||
|
||||
relative_paths.push(relative_path.into_arc());
|
||||
true
|
||||
}
|
||||
});
|
||||
|
||||
for range_to_drop in ranges_to_drop.into_iter().rev() {
|
||||
abs_paths.drain(range_to_drop);
|
||||
}
|
||||
}
|
||||
|
||||
if relative_paths.is_empty() && dot_git_abs_paths.is_empty() {
|
||||
return;
|
||||
}
|
||||
@@ -4275,7 +4308,7 @@ impl BackgroundScanner {
|
||||
let mut child_entry = Entry::new(
|
||||
child_path.clone(),
|
||||
&child_metadata,
|
||||
&next_entry_id,
|
||||
ProjectEntryId::new(&next_entry_id),
|
||||
root_char_bag,
|
||||
None,
|
||||
);
|
||||
@@ -4462,10 +4495,11 @@ impl BackgroundScanner {
|
||||
.ignore_stack_for_abs_path(&abs_path, metadata.is_dir, self.fs.as_ref())
|
||||
.await;
|
||||
let is_external = !canonical_path.starts_with(&root_canonical_path);
|
||||
let entry_id = state.entry_id_for(self.next_entry_id.as_ref(), path, &metadata);
|
||||
let mut fs_entry = Entry::new(
|
||||
path.clone(),
|
||||
&metadata,
|
||||
self.next_entry_id.as_ref(),
|
||||
entry_id,
|
||||
state.snapshot.root_char_bag,
|
||||
if metadata.is_symlink {
|
||||
Some(canonical_path.as_path().to_path_buf().into())
|
||||
|
||||
@@ -1533,6 +1533,175 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_create_file_in_expanded_gitignored_dir(cx: &mut TestAppContext) {
|
||||
// Tests the behavior of our worktree refresh when a file in a gitignored directory
|
||||
// is created.
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
".gitignore": "ignored_dir\n",
|
||||
"ignored_dir": {
|
||||
"existing_file.txt": "existing content",
|
||||
"another_file.txt": "another content",
|
||||
},
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let tree = Worktree::local(
|
||||
Path::new("/root"),
|
||||
true,
|
||||
fs.clone(),
|
||||
Default::default(),
|
||||
&mut cx.to_async(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
|
||||
tree.read_with(cx, |tree, _| {
|
||||
let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
|
||||
assert!(ignored_dir.is_ignored);
|
||||
assert_eq!(ignored_dir.kind, EntryKind::UnloadedDir);
|
||||
});
|
||||
|
||||
tree.update(cx, |tree, cx| {
|
||||
tree.load_file(rel_path("ignored_dir/existing_file.txt"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
tree.read_with(cx, |tree, _| {
|
||||
let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
|
||||
assert!(ignored_dir.is_ignored);
|
||||
assert_eq!(ignored_dir.kind, EntryKind::Dir);
|
||||
|
||||
assert!(
|
||||
tree.entry_for_path(rel_path("ignored_dir/existing_file.txt"))
|
||||
.is_some()
|
||||
);
|
||||
assert!(
|
||||
tree.entry_for_path(rel_path("ignored_dir/another_file.txt"))
|
||||
.is_some()
|
||||
);
|
||||
});
|
||||
|
||||
let entry = tree
|
||||
.update(cx, |tree, cx| {
|
||||
tree.create_entry(rel_path("ignored_dir/new_file.txt").into(), false, None, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(entry.into_included().is_some());
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
tree.read_with(cx, |tree, _| {
|
||||
let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
|
||||
assert!(ignored_dir.is_ignored);
|
||||
assert_eq!(
|
||||
ignored_dir.kind,
|
||||
EntryKind::Dir,
|
||||
"ignored_dir should still be loaded, not UnloadedDir"
|
||||
);
|
||||
|
||||
assert!(
|
||||
tree.entry_for_path(rel_path("ignored_dir/existing_file.txt"))
|
||||
.is_some(),
|
||||
"existing_file.txt should still be visible"
|
||||
);
|
||||
assert!(
|
||||
tree.entry_for_path(rel_path("ignored_dir/another_file.txt"))
|
||||
.is_some(),
|
||||
"another_file.txt should still be visible"
|
||||
);
|
||||
assert!(
|
||||
tree.entry_for_path(rel_path("ignored_dir/new_file.txt"))
|
||||
.is_some(),
|
||||
"new_file.txt should be visible"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_fs_event_for_gitignored_dir_does_not_lose_contents(cx: &mut TestAppContext) {
|
||||
// Tests the behavior of our worktree refresh when a directory modification for a gitignored directory
|
||||
// is triggered.
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
".gitignore": "ignored_dir\n",
|
||||
"ignored_dir": {
|
||||
"file1.txt": "content1",
|
||||
"file2.txt": "content2",
|
||||
},
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let tree = Worktree::local(
|
||||
Path::new("/root"),
|
||||
true,
|
||||
fs.clone(),
|
||||
Default::default(),
|
||||
&mut cx.to_async(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
|
||||
// Load a file to expand the ignored directory
|
||||
tree.update(cx, |tree, cx| {
|
||||
tree.load_file(rel_path("ignored_dir/file1.txt"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
tree.read_with(cx, |tree, _| {
|
||||
let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
|
||||
assert_eq!(ignored_dir.kind, EntryKind::Dir);
|
||||
assert!(
|
||||
tree.entry_for_path(rel_path("ignored_dir/file1.txt"))
|
||||
.is_some()
|
||||
);
|
||||
assert!(
|
||||
tree.entry_for_path(rel_path("ignored_dir/file2.txt"))
|
||||
.is_some()
|
||||
);
|
||||
});
|
||||
|
||||
fs.emit_fs_event("/root/ignored_dir", Some(fs::PathEventKind::Changed));
|
||||
tree.flush_fs_events(cx).await;
|
||||
|
||||
tree.read_with(cx, |tree, _| {
|
||||
let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
|
||||
assert_eq!(
|
||||
ignored_dir.kind,
|
||||
EntryKind::Dir,
|
||||
"ignored_dir should still be loaded (Dir), not UnloadedDir"
|
||||
);
|
||||
assert!(
|
||||
tree.entry_for_path(rel_path("ignored_dir/file1.txt"))
|
||||
.is_some(),
|
||||
"file1.txt should still be visible after directory fs event"
|
||||
);
|
||||
assert!(
|
||||
tree.entry_for_path(rel_path("ignored_dir/file2.txt"))
|
||||
.is_some(),
|
||||
"file2.txt should still be visible after directory fs event"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
async fn test_random_worktree_operations_during_initial_scan(
|
||||
cx: &mut TestAppContext,
|
||||
|
||||
@@ -22,5 +22,9 @@
|
||||
<true/>
|
||||
<key>com.apple.security.personal-information.photos-library</key>
|
||||
<true/>
|
||||
<key>com.apple.security.files.user-selected.read-write</key>
|
||||
<true/>
|
||||
<key>com.apple.security.files.downloads.read-write</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
@@ -215,6 +215,10 @@ pub mod git {
|
||||
Switch,
|
||||
/// Selects a different repository.
|
||||
SelectRepo,
|
||||
/// Filter remotes.
|
||||
FilterRemotes,
|
||||
/// Create a git remote.
|
||||
CreateRemote,
|
||||
/// Opens the git branch selector.
|
||||
#[action(deprecated_aliases = ["branches::OpenRecent"])]
|
||||
Branch,
|
||||
|
||||
@@ -30,6 +30,7 @@ credentials_provider.workspace = true
|
||||
db.workspace = true
|
||||
edit_prediction.workspace = true
|
||||
edit_prediction_context.workspace = true
|
||||
edit_prediction_context2.workspace = true
|
||||
editor.workspace = true
|
||||
feature_flags.workspace = true
|
||||
fs.workspace = true
|
||||
|
||||
@@ -1,173 +0,0 @@
|
||||
use cloud_llm_client::predict_edits_v3::Excerpt;
|
||||
use edit_prediction_context::Line;
|
||||
use language::{BufferSnapshot, Point};
|
||||
use std::ops::Range;
|
||||
|
||||
pub fn assemble_excerpts(
|
||||
buffer: &BufferSnapshot,
|
||||
merged_line_ranges: impl IntoIterator<Item = Range<Line>>,
|
||||
) -> Vec<Excerpt> {
|
||||
let mut output = Vec::new();
|
||||
|
||||
let outline_items = buffer.outline_items_as_points_containing(0..buffer.len(), false, None);
|
||||
let mut outline_items = outline_items.into_iter().peekable();
|
||||
|
||||
for range in merged_line_ranges {
|
||||
let point_range = Point::new(range.start.0, 0)..Point::new(range.end.0, 0);
|
||||
|
||||
while let Some(outline_item) = outline_items.peek() {
|
||||
if outline_item.range.start >= point_range.start {
|
||||
break;
|
||||
}
|
||||
if outline_item.range.end > point_range.start {
|
||||
let mut point_range = outline_item.source_range_for_text.clone();
|
||||
point_range.start.column = 0;
|
||||
point_range.end.column = buffer.line_len(point_range.end.row);
|
||||
|
||||
output.push(Excerpt {
|
||||
start_line: Line(point_range.start.row),
|
||||
text: buffer
|
||||
.text_for_range(point_range.clone())
|
||||
.collect::<String>()
|
||||
.into(),
|
||||
})
|
||||
}
|
||||
outline_items.next();
|
||||
}
|
||||
|
||||
output.push(Excerpt {
|
||||
start_line: Line(point_range.start.row),
|
||||
text: buffer
|
||||
.text_for_range(point_range.clone())
|
||||
.collect::<String>()
|
||||
.into(),
|
||||
})
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::*;
|
||||
use cloud_llm_client::predict_edits_v3;
|
||||
use gpui::{TestAppContext, prelude::*};
|
||||
use indoc::indoc;
|
||||
use language::{Buffer, Language, LanguageConfig, LanguageMatcher, OffsetRangeExt};
|
||||
use pretty_assertions::assert_eq;
|
||||
use util::test::marked_text_ranges;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_rust(cx: &mut TestAppContext) {
|
||||
let table = [
|
||||
(
|
||||
indoc! {r#"
|
||||
struct User {
|
||||
first_name: String,
|
||||
« last_name: String,
|
||||
ageˇ: u32,
|
||||
» email: String,
|
||||
create_at: Instant,
|
||||
}
|
||||
|
||||
impl User {
|
||||
pub fn first_name(&self) -> String {
|
||||
self.first_name.clone()
|
||||
}
|
||||
|
||||
pub fn full_name(&self) -> String {
|
||||
« format!("{} {}", self.first_name, self.last_name)
|
||||
» }
|
||||
}
|
||||
"#},
|
||||
indoc! {r#"
|
||||
1|struct User {
|
||||
…
|
||||
3| last_name: String,
|
||||
4| age<|cursor|>: u32,
|
||||
…
|
||||
9|impl User {
|
||||
…
|
||||
14| pub fn full_name(&self) -> String {
|
||||
15| format!("{} {}", self.first_name, self.last_name)
|
||||
…
|
||||
"#},
|
||||
),
|
||||
(
|
||||
indoc! {r#"
|
||||
struct User {
|
||||
first_name: String,
|
||||
« last_name: String,
|
||||
age: u32,
|
||||
}
|
||||
»"#
|
||||
},
|
||||
indoc! {r#"
|
||||
1|struct User {
|
||||
…
|
||||
3| last_name: String,
|
||||
4| age: u32,
|
||||
5|}
|
||||
"#},
|
||||
),
|
||||
];
|
||||
|
||||
for (input, expected_output) in table {
|
||||
let input_without_ranges = input.replace(['«', '»'], "");
|
||||
let input_without_caret = input.replace('ˇ', "");
|
||||
let cursor_offset = input_without_ranges.find('ˇ');
|
||||
let (input, ranges) = marked_text_ranges(&input_without_caret, false);
|
||||
let buffer =
|
||||
cx.new(|cx| Buffer::local(input, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
buffer.read_with(cx, |buffer, _cx| {
|
||||
let insertions = cursor_offset
|
||||
.map(|offset| {
|
||||
let point = buffer.offset_to_point(offset);
|
||||
vec![(
|
||||
predict_edits_v3::Point {
|
||||
line: Line(point.row),
|
||||
column: point.column,
|
||||
},
|
||||
"<|cursor|>",
|
||||
)]
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let ranges: Vec<Range<Line>> = ranges
|
||||
.into_iter()
|
||||
.map(|range| {
|
||||
let point_range = range.to_point(&buffer);
|
||||
Line(point_range.start.row)..Line(point_range.end.row)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut output = String::new();
|
||||
cloud_zeta2_prompt::write_excerpts(
|
||||
assemble_excerpts(&buffer.snapshot(), ranges).iter(),
|
||||
&insertions,
|
||||
Line(buffer.max_point().row),
|
||||
true,
|
||||
&mut output,
|
||||
);
|
||||
assert_eq!(output, expected_output);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn rust_lang() -> Language {
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
Some(language::tree_sitter_rust::LANGUAGE.into()),
|
||||
)
|
||||
.with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
use anyhow::Result;
|
||||
use cloud_zeta2_prompt::retrieval_prompt::SearchToolQuery;
|
||||
use collections::HashMap;
|
||||
use edit_prediction_context2::{RelatedExcerpt, RelatedFile};
|
||||
use futures::{
|
||||
StreamExt,
|
||||
channel::mpsc::{self, UnboundedSender},
|
||||
@@ -8,7 +9,7 @@ use futures::{
|
||||
use gpui::{AppContext, AsyncApp, Entity};
|
||||
use language::{Anchor, Buffer, BufferSnapshot, OffsetRangeExt, Point, ToOffset, ToPoint};
|
||||
use project::{
|
||||
Project, WorktreeSettings,
|
||||
Project, ProjectPath, WorktreeSettings,
|
||||
search::{SearchQuery, SearchResult},
|
||||
};
|
||||
use smol::channel;
|
||||
@@ -20,14 +21,14 @@ use util::{
|
||||
use workspace::item::Settings as _;
|
||||
|
||||
#[cfg(feature = "eval-support")]
|
||||
type CachedSearchResults = std::collections::BTreeMap<std::path::PathBuf, Vec<Range<usize>>>;
|
||||
type CachedSearchResults = std::collections::BTreeMap<std::path::PathBuf, Vec<Range<(u32, u32)>>>;
|
||||
|
||||
pub async fn run_retrieval_searches(
|
||||
queries: Vec<SearchToolQuery>,
|
||||
project: Entity<Project>,
|
||||
#[cfg(feature = "eval-support")] eval_cache: Option<std::sync::Arc<dyn crate::EvalCache>>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<HashMap<Entity<Buffer>, Vec<Range<Anchor>>>> {
|
||||
) -> Result<Vec<RelatedFile>> {
|
||||
#[cfg(feature = "eval-support")]
|
||||
let cache = if let Some(eval_cache) = eval_cache {
|
||||
use crate::EvalCacheEntryKind;
|
||||
@@ -54,24 +55,44 @@ pub async fn run_retrieval_searches(
|
||||
if let Some(cached_results) = eval_cache.read(key) {
|
||||
let file_results = serde_json::from_str::<CachedSearchResults>(&cached_results)
|
||||
.context("Failed to deserialize cached search results")?;
|
||||
let mut results = HashMap::default();
|
||||
let mut results = Vec::new();
|
||||
|
||||
for (path, ranges) in file_results {
|
||||
let project_path = project.update(cx, |project, cx| {
|
||||
project.find_project_path(path, cx).unwrap()
|
||||
})?;
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
let project_path = project.find_project_path(path, cx).unwrap();
|
||||
project.open_buffer(project_path, cx)
|
||||
project.open_buffer(project_path.clone(), cx)
|
||||
})?
|
||||
.await?;
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
|
||||
let mut ranges: Vec<_> = ranges
|
||||
.into_iter()
|
||||
.map(|range| {
|
||||
snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end)
|
||||
})
|
||||
.map(
|
||||
|Range {
|
||||
start: (start_row, start_col),
|
||||
end: (end_row, end_col),
|
||||
}| {
|
||||
snapshot.anchor_before(Point::new(start_row, start_col))
|
||||
..snapshot.anchor_after(Point::new(end_row, end_col))
|
||||
},
|
||||
)
|
||||
.collect();
|
||||
merge_anchor_ranges(&mut ranges, &snapshot);
|
||||
results.insert(buffer, ranges);
|
||||
results.push(RelatedFile {
|
||||
path: project_path,
|
||||
buffer: buffer.downgrade(),
|
||||
excerpts: ranges
|
||||
.into_iter()
|
||||
.map(|range| RelatedExcerpt {
|
||||
point_range: range.to_point(&snapshot),
|
||||
text: snapshot.as_rope().slice(range.to_offset(&snapshot)),
|
||||
anchor_range: range,
|
||||
})
|
||||
.collect(),
|
||||
max_row: snapshot.max_point().row,
|
||||
});
|
||||
}
|
||||
|
||||
return Ok(results);
|
||||
@@ -117,14 +138,29 @@ pub async fn run_retrieval_searches(
|
||||
#[cfg(feature = "eval-support")]
|
||||
let cache = cache.clone();
|
||||
cx.background_spawn(async move {
|
||||
let mut results: HashMap<Entity<Buffer>, Vec<Range<Anchor>>> = HashMap::default();
|
||||
let mut results: Vec<RelatedFile> = Vec::default();
|
||||
let mut snapshots = HashMap::default();
|
||||
|
||||
let mut total_bytes = 0;
|
||||
'outer: while let Some((buffer, snapshot, excerpts)) = results_rx.next().await {
|
||||
snapshots.insert(buffer.entity_id(), snapshot);
|
||||
let existing = results.entry(buffer).or_default();
|
||||
existing.reserve(excerpts.len());
|
||||
'outer: while let Some((project_path, buffer, snapshot, excerpts)) = results_rx.next().await
|
||||
{
|
||||
let existing = results
|
||||
.iter_mut()
|
||||
.find(|related_file| related_file.buffer.entity_id() == buffer.entity_id());
|
||||
let existing = match existing {
|
||||
Some(existing) => existing,
|
||||
None => {
|
||||
results.push(RelatedFile {
|
||||
path: project_path,
|
||||
buffer: buffer.downgrade(),
|
||||
excerpts: Vec::new(),
|
||||
max_row: snapshot.max_point().row,
|
||||
});
|
||||
results.last_mut().unwrap()
|
||||
}
|
||||
};
|
||||
// let existing = results.entry(buffer).or_default();
|
||||
existing.excerpts.reserve(excerpts.len());
|
||||
|
||||
for (range, size) in excerpts {
|
||||
// Blunt trimming of the results until we have a proper algorithmic filtering step
|
||||
@@ -133,24 +169,34 @@ pub async fn run_retrieval_searches(
|
||||
break 'outer;
|
||||
}
|
||||
total_bytes += size;
|
||||
existing.push(range);
|
||||
existing.excerpts.push(RelatedExcerpt {
|
||||
point_range: range.to_point(&snapshot),
|
||||
text: snapshot.as_rope().slice(range.to_offset(&snapshot)),
|
||||
anchor_range: range,
|
||||
});
|
||||
}
|
||||
snapshots.insert(buffer.entity_id(), snapshot);
|
||||
}
|
||||
|
||||
#[cfg(feature = "eval-support")]
|
||||
if let Some((cache, queries, key)) = cache {
|
||||
let cached_results: CachedSearchResults = results
|
||||
.iter()
|
||||
.filter_map(|(buffer, ranges)| {
|
||||
let snapshot = snapshots.get(&buffer.entity_id())?;
|
||||
let path = snapshot.file().map(|f| f.path());
|
||||
let mut ranges = ranges
|
||||
.map(|related_file| {
|
||||
let mut ranges = related_file
|
||||
.excerpts
|
||||
.iter()
|
||||
.map(|range| range.to_offset(&snapshot))
|
||||
.map(
|
||||
|RelatedExcerpt {
|
||||
point_range: Range { start, end },
|
||||
..
|
||||
}| {
|
||||
(start.row, start.column)..(end.row, end.column)
|
||||
},
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
ranges.sort_unstable_by_key(|range| (range.start, range.end));
|
||||
|
||||
Some((path?.as_std_path().to_path_buf(), ranges))
|
||||
(related_file.path.path.as_std_path().to_path_buf(), ranges)
|
||||
})
|
||||
.collect();
|
||||
cache.write(
|
||||
@@ -160,10 +206,8 @@ pub async fn run_retrieval_searches(
|
||||
);
|
||||
}
|
||||
|
||||
for (buffer, ranges) in results.iter_mut() {
|
||||
if let Some(snapshot) = snapshots.get(&buffer.entity_id()) {
|
||||
merge_anchor_ranges(ranges, snapshot);
|
||||
}
|
||||
for related_file in results.iter_mut() {
|
||||
related_file.merge_excerpts();
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
@@ -171,6 +215,7 @@ pub async fn run_retrieval_searches(
|
||||
.await
|
||||
}
|
||||
|
||||
#[cfg(feature = "eval-support")]
|
||||
pub(crate) fn merge_anchor_ranges(ranges: &mut Vec<Range<Anchor>>, snapshot: &BufferSnapshot) {
|
||||
ranges.sort_unstable_by(|a, b| {
|
||||
a.start
|
||||
@@ -201,6 +246,7 @@ const MAX_RESULTS_LEN: usize = MAX_EXCERPT_LEN * 5;
|
||||
struct SearchJob {
|
||||
buffer: Entity<Buffer>,
|
||||
snapshot: BufferSnapshot,
|
||||
project_path: ProjectPath,
|
||||
ranges: Vec<Range<usize>>,
|
||||
query_ix: usize,
|
||||
jobs_tx: channel::Sender<SearchJob>,
|
||||
@@ -208,7 +254,12 @@ struct SearchJob {
|
||||
|
||||
async fn run_query(
|
||||
input_query: SearchToolQuery,
|
||||
results_tx: UnboundedSender<(Entity<Buffer>, BufferSnapshot, Vec<(Range<Anchor>, usize)>)>,
|
||||
results_tx: UnboundedSender<(
|
||||
ProjectPath,
|
||||
Entity<Buffer>,
|
||||
BufferSnapshot,
|
||||
Vec<(Range<Anchor>, usize)>,
|
||||
)>,
|
||||
path_style: PathStyle,
|
||||
exclude_matcher: PathMatcher,
|
||||
project: &Entity<Project>,
|
||||
@@ -257,12 +308,21 @@ async fn run_query(
|
||||
.read_with(cx, |buffer, _| buffer.parsing_idle())?
|
||||
.await;
|
||||
let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
|
||||
let Some(file) = snapshot.file() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let project_path = cx.update(|cx| ProjectPath {
|
||||
worktree_id: file.worktree_id(cx),
|
||||
path: file.path().clone(),
|
||||
})?;
|
||||
let expanded_ranges: Vec<_> = ranges
|
||||
.into_iter()
|
||||
.filter_map(|range| expand_to_parent_range(&range, &snapshot))
|
||||
.collect();
|
||||
jobs_tx
|
||||
.send(SearchJob {
|
||||
project_path,
|
||||
buffer,
|
||||
snapshot,
|
||||
ranges: expanded_ranges,
|
||||
@@ -301,6 +361,13 @@ async fn run_query(
|
||||
|
||||
while let Some(SearchResult::Buffer { buffer, ranges }) = results_rx.next().await {
|
||||
let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
|
||||
let Some(file) = snapshot.file() else {
|
||||
continue;
|
||||
};
|
||||
let project_path = cx.update(|cx| ProjectPath {
|
||||
worktree_id: file.worktree_id(cx),
|
||||
path: file.path().clone(),
|
||||
})?;
|
||||
|
||||
let ranges = ranges
|
||||
.into_iter()
|
||||
@@ -314,7 +381,8 @@ async fn run_query(
|
||||
})
|
||||
.collect();
|
||||
|
||||
let send_result = results_tx.unbounded_send((buffer.clone(), snapshot.clone(), ranges));
|
||||
let send_result =
|
||||
results_tx.unbounded_send((project_path, buffer.clone(), snapshot.clone(), ranges));
|
||||
|
||||
if let Err(err) = send_result
|
||||
&& !err.is_disconnected()
|
||||
@@ -330,7 +398,12 @@ async fn run_query(
|
||||
}
|
||||
|
||||
async fn process_nested_search_job(
|
||||
results_tx: &UnboundedSender<(Entity<Buffer>, BufferSnapshot, Vec<(Range<Anchor>, usize)>)>,
|
||||
results_tx: &UnboundedSender<(
|
||||
ProjectPath,
|
||||
Entity<Buffer>,
|
||||
BufferSnapshot,
|
||||
Vec<(Range<Anchor>, usize)>,
|
||||
)>,
|
||||
queries: &Vec<SearchQuery>,
|
||||
content_query: &Option<SearchQuery>,
|
||||
job: SearchJob,
|
||||
@@ -347,6 +420,7 @@ async fn process_nested_search_job(
|
||||
}
|
||||
job.jobs_tx
|
||||
.send(SearchJob {
|
||||
project_path: job.project_path,
|
||||
buffer: job.buffer,
|
||||
snapshot: job.snapshot,
|
||||
ranges: subranges,
|
||||
@@ -382,7 +456,8 @@ async fn process_nested_search_job(
|
||||
})
|
||||
.collect();
|
||||
|
||||
let send_result = results_tx.unbounded_send((job.buffer, job.snapshot, matches));
|
||||
let send_result =
|
||||
results_tx.unbounded_send((job.project_path, job.buffer, job.snapshot, matches));
|
||||
|
||||
if let Err(err) = send_result
|
||||
&& !err.is_disconnected()
|
||||
@@ -413,230 +488,3 @@ fn expand_to_parent_range<T: ToPoint + ToOffset>(
|
||||
let node = snapshot.syntax_ancestor(line_range)?;
|
||||
Some(node.byte_range())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::assemble_excerpts::assemble_excerpts;
|
||||
use cloud_zeta2_prompt::write_codeblock;
|
||||
use edit_prediction_context::Line;
|
||||
use gpui::TestAppContext;
|
||||
use indoc::indoc;
|
||||
use language::{Language, LanguageConfig, LanguageMatcher, tree_sitter_rust};
|
||||
use pretty_assertions::assert_eq;
|
||||
use project::FakeFs;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::path::Path;
|
||||
use util::path;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_retrieval(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/root"),
|
||||
json!({
|
||||
"user.rs": indoc!{"
|
||||
pub struct Organization {
|
||||
owner: Arc<User>,
|
||||
}
|
||||
|
||||
pub struct User {
|
||||
first_name: String,
|
||||
last_name: String,
|
||||
}
|
||||
|
||||
impl Organization {
|
||||
pub fn owner(&self) -> Arc<User> {
|
||||
self.owner.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl User {
|
||||
pub fn new(first_name: String, last_name: String) -> Self {
|
||||
Self {
|
||||
first_name,
|
||||
last_name
|
||||
}
|
||||
}
|
||||
|
||||
pub fn first_name(&self) -> String {
|
||||
self.first_name.clone()
|
||||
}
|
||||
|
||||
pub fn last_name(&self) -> String {
|
||||
self.last_name.clone()
|
||||
}
|
||||
}
|
||||
"},
|
||||
"main.rs": indoc!{r#"
|
||||
fn main() {
|
||||
let user = User::new(FIRST_NAME.clone(), "doe".into());
|
||||
println!("user {:?}", user);
|
||||
}
|
||||
"#},
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, vec![Path::new(path!("/root"))], cx).await;
|
||||
project.update(cx, |project, _cx| {
|
||||
project.languages().add(rust_lang().into())
|
||||
});
|
||||
|
||||
assert_results(
|
||||
&project,
|
||||
SearchToolQuery {
|
||||
glob: "user.rs".into(),
|
||||
syntax_node: vec!["impl\\s+User".into(), "pub\\s+fn\\s+first_name".into()],
|
||||
content: None,
|
||||
},
|
||||
indoc! {r#"
|
||||
`````root/user.rs
|
||||
…
|
||||
impl User {
|
||||
…
|
||||
pub fn first_name(&self) -> String {
|
||||
self.first_name.clone()
|
||||
}
|
||||
…
|
||||
`````
|
||||
"#},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
assert_results(
|
||||
&project,
|
||||
SearchToolQuery {
|
||||
glob: "user.rs".into(),
|
||||
syntax_node: vec!["impl\\s+User".into()],
|
||||
content: Some("\\.clone".into()),
|
||||
},
|
||||
indoc! {r#"
|
||||
`````root/user.rs
|
||||
…
|
||||
impl User {
|
||||
…
|
||||
pub fn first_name(&self) -> String {
|
||||
self.first_name.clone()
|
||||
…
|
||||
pub fn last_name(&self) -> String {
|
||||
self.last_name.clone()
|
||||
…
|
||||
`````
|
||||
"#},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
assert_results(
|
||||
&project,
|
||||
SearchToolQuery {
|
||||
glob: "*.rs".into(),
|
||||
syntax_node: vec![],
|
||||
content: Some("\\.clone".into()),
|
||||
},
|
||||
indoc! {r#"
|
||||
`````root/main.rs
|
||||
fn main() {
|
||||
let user = User::new(FIRST_NAME.clone(), "doe".into());
|
||||
…
|
||||
`````
|
||||
|
||||
`````root/user.rs
|
||||
…
|
||||
impl Organization {
|
||||
pub fn owner(&self) -> Arc<User> {
|
||||
self.owner.clone()
|
||||
…
|
||||
impl User {
|
||||
…
|
||||
pub fn first_name(&self) -> String {
|
||||
self.first_name.clone()
|
||||
…
|
||||
pub fn last_name(&self) -> String {
|
||||
self.last_name.clone()
|
||||
…
|
||||
`````
|
||||
"#},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn assert_results(
|
||||
project: &Entity<Project>,
|
||||
query: SearchToolQuery,
|
||||
expected_output: &str,
|
||||
cx: &mut TestAppContext,
|
||||
) {
|
||||
let results = run_retrieval_searches(
|
||||
vec![query],
|
||||
project.clone(),
|
||||
#[cfg(feature = "eval-support")]
|
||||
None,
|
||||
&mut cx.to_async(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut results = results.into_iter().collect::<Vec<_>>();
|
||||
results.sort_by_key(|results| {
|
||||
results
|
||||
.0
|
||||
.read_with(cx, |buffer, _| buffer.file().unwrap().path().clone())
|
||||
});
|
||||
|
||||
let mut output = String::new();
|
||||
for (buffer, ranges) in results {
|
||||
buffer.read_with(cx, |buffer, cx| {
|
||||
let excerpts = ranges.into_iter().map(|range| {
|
||||
let point_range = range.to_point(buffer);
|
||||
if point_range.end.column > 0 {
|
||||
Line(point_range.start.row)..Line(point_range.end.row + 1)
|
||||
} else {
|
||||
Line(point_range.start.row)..Line(point_range.end.row)
|
||||
}
|
||||
});
|
||||
|
||||
write_codeblock(
|
||||
&buffer.file().unwrap().full_path(cx),
|
||||
assemble_excerpts(&buffer.snapshot(), excerpts).iter(),
|
||||
&[],
|
||||
Line(buffer.max_point().row),
|
||||
false,
|
||||
&mut output,
|
||||
);
|
||||
});
|
||||
}
|
||||
output.pop();
|
||||
|
||||
assert_eq!(output, expected_output);
|
||||
}
|
||||
|
||||
fn rust_lang() -> Language {
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::LANGUAGE.into()),
|
||||
)
|
||||
.with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
cx.update(move |cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
zlog::init_test();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use cloud_llm_client::predict_edits_v3::Event;
|
||||
use credentials_provider::CredentialsProvider;
|
||||
use edit_prediction_context2::RelatedFile;
|
||||
use futures::{AsyncReadExt as _, FutureExt, future::Shared};
|
||||
use gpui::{
|
||||
App, AppContext as _, Entity, Task,
|
||||
@@ -49,6 +50,7 @@ impl SweepAi {
|
||||
position: language::Anchor,
|
||||
events: Vec<Arc<Event>>,
|
||||
recent_paths: &VecDeque<ProjectPath>,
|
||||
related_files: Vec<RelatedFile>,
|
||||
diagnostic_search_range: Range<Point>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Option<EditPredictionResult>>> {
|
||||
@@ -120,6 +122,19 @@ impl SweepAi {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let retrieval_chunks = related_files
|
||||
.iter()
|
||||
.flat_map(|related_file| {
|
||||
related_file.excerpts.iter().map(|excerpt| FileChunk {
|
||||
file_path: related_file.path.path.as_unix_str().to_string(),
|
||||
start_line: excerpt.point_range.start.row as usize,
|
||||
end_line: excerpt.point_range.end.row as usize,
|
||||
content: excerpt.text.to_string(),
|
||||
timestamp: None,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
let diagnostic_entries = snapshot.diagnostics_in_range(diagnostic_search_range, false);
|
||||
let mut diagnostic_content = String::new();
|
||||
let mut diagnostic_count = 0;
|
||||
@@ -168,7 +183,7 @@ impl SweepAi {
|
||||
multiple_suggestions: false,
|
||||
branch: None,
|
||||
file_chunks,
|
||||
retrieval_chunks: vec![],
|
||||
retrieval_chunks,
|
||||
recent_user_actions: vec![],
|
||||
use_bytes: true,
|
||||
// TODO
|
||||
@@ -320,7 +335,7 @@ struct AutocompleteRequest {
|
||||
pub cursor_position: usize,
|
||||
pub original_file_contents: String,
|
||||
pub file_chunks: Vec<FileChunk>,
|
||||
pub retrieval_chunks: Vec<RetrievalChunk>,
|
||||
pub retrieval_chunks: Vec<FileChunk>,
|
||||
pub recent_user_actions: Vec<UserAction>,
|
||||
pub multiple_suggestions: bool,
|
||||
pub privacy_mode_enabled: bool,
|
||||
@@ -337,15 +352,6 @@ struct FileChunk {
|
||||
pub timestamp: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
struct RetrievalChunk {
|
||||
pub file_path: String,
|
||||
pub start_line: usize,
|
||||
pub end_line: usize,
|
||||
pub content: String,
|
||||
pub timestamp: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
struct UserAction {
|
||||
pub action_type: ActionType,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -15,7 +15,6 @@ path = "src/zeta2_tools.rs"
|
||||
anyhow.workspace = true
|
||||
client.workspace = true
|
||||
cloud_llm_client.workspace = true
|
||||
cloud_zeta2_prompt.workspace = true
|
||||
collections.workspace = true
|
||||
edit_prediction_context.workspace = true
|
||||
editor.workspace = true
|
||||
|
||||
@@ -8,26 +8,25 @@ use std::{
|
||||
|
||||
use anyhow::Result;
|
||||
use client::{Client, UserStore};
|
||||
use cloud_zeta2_prompt::retrieval_prompt::SearchToolQuery;
|
||||
use editor::{Editor, PathKey};
|
||||
use futures::StreamExt as _;
|
||||
use gpui::{
|
||||
Animation, AnimationExt, App, AppContext as _, Context, Entity, EventEmitter, FocusHandle,
|
||||
Focusable, ParentElement as _, SharedString, Styled as _, Task, TextAlign, Window, actions,
|
||||
pulsating_between,
|
||||
Focusable, InteractiveElement as _, IntoElement as _, ParentElement as _, SharedString,
|
||||
Styled as _, Task, TextAlign, Window, actions, div, pulsating_between,
|
||||
};
|
||||
use multi_buffer::MultiBuffer;
|
||||
use project::Project;
|
||||
use text::OffsetRangeExt;
|
||||
use ui::{
|
||||
ButtonCommon, Clickable, Color, Disableable, FluentBuilder as _, Icon, IconButton, IconName,
|
||||
IconSize, InteractiveElement, IntoElement, ListHeader, ListItem, StyledTypography, div, h_flex,
|
||||
v_flex,
|
||||
ButtonCommon, Clickable, Disableable, FluentBuilder as _, IconButton, IconName,
|
||||
StyledTypography as _, h_flex, v_flex,
|
||||
};
|
||||
|
||||
use workspace::Item;
|
||||
use zeta::{
|
||||
Zeta, ZetaContextRetrievalDebugInfo, ZetaContextRetrievalStartedDebugInfo, ZetaDebugInfo,
|
||||
ZetaSearchQueryDebugInfo,
|
||||
Zeta, ZetaContextRetrievalFinishedDebugInfo, ZetaContextRetrievalStartedDebugInfo,
|
||||
ZetaDebugInfo,
|
||||
};
|
||||
|
||||
pub struct Zeta2ContextView {
|
||||
@@ -42,10 +41,8 @@ pub struct Zeta2ContextView {
|
||||
#[derive(Debug)]
|
||||
struct RetrievalRun {
|
||||
editor: Entity<Editor>,
|
||||
search_queries: Vec<SearchToolQuery>,
|
||||
started_at: Instant,
|
||||
search_results_generated_at: Option<Instant>,
|
||||
search_results_executed_at: Option<Instant>,
|
||||
metadata: Vec<(&'static str, SharedString)>,
|
||||
finished_at: Option<Instant>,
|
||||
}
|
||||
|
||||
@@ -97,22 +94,12 @@ impl Zeta2ContextView {
|
||||
) {
|
||||
match event {
|
||||
ZetaDebugInfo::ContextRetrievalStarted(info) => {
|
||||
if info.project == self.project {
|
||||
if info.project_entity_id == self.project.entity_id() {
|
||||
self.handle_context_retrieval_started(info, window, cx);
|
||||
}
|
||||
}
|
||||
ZetaDebugInfo::SearchQueriesGenerated(info) => {
|
||||
if info.project == self.project {
|
||||
self.handle_search_queries_generated(info, window, cx);
|
||||
}
|
||||
}
|
||||
ZetaDebugInfo::SearchQueriesExecuted(info) => {
|
||||
if info.project == self.project {
|
||||
self.handle_search_queries_executed(info, window, cx);
|
||||
}
|
||||
}
|
||||
ZetaDebugInfo::ContextRetrievalFinished(info) => {
|
||||
if info.project == self.project {
|
||||
if info.project_entity_id == self.project.entity_id() {
|
||||
self.handle_context_retrieval_finished(info, window, cx);
|
||||
}
|
||||
}
|
||||
@@ -129,7 +116,7 @@ impl Zeta2ContextView {
|
||||
if self
|
||||
.runs
|
||||
.back()
|
||||
.is_some_and(|run| run.search_results_executed_at.is_none())
|
||||
.is_some_and(|run| run.finished_at.is_none())
|
||||
{
|
||||
self.runs.pop_back();
|
||||
}
|
||||
@@ -144,11 +131,9 @@ impl Zeta2ContextView {
|
||||
|
||||
self.runs.push_back(RetrievalRun {
|
||||
editor,
|
||||
search_queries: Vec::new(),
|
||||
started_at: info.timestamp,
|
||||
search_results_generated_at: None,
|
||||
search_results_executed_at: None,
|
||||
finished_at: None,
|
||||
metadata: Vec::new(),
|
||||
});
|
||||
|
||||
cx.notify();
|
||||
@@ -156,7 +141,7 @@ impl Zeta2ContextView {
|
||||
|
||||
fn handle_context_retrieval_finished(
|
||||
&mut self,
|
||||
info: ZetaContextRetrievalDebugInfo,
|
||||
info: ZetaContextRetrievalFinishedDebugInfo,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
@@ -165,67 +150,72 @@ impl Zeta2ContextView {
|
||||
};
|
||||
|
||||
run.finished_at = Some(info.timestamp);
|
||||
run.metadata = info.metadata;
|
||||
|
||||
let project = self.project.clone();
|
||||
let related_files = self
|
||||
.zeta
|
||||
.read(cx)
|
||||
.context_for_project(&self.project, cx)
|
||||
.to_vec();
|
||||
|
||||
let editor = run.editor.clone();
|
||||
let multibuffer = run.editor.read(cx).buffer().clone();
|
||||
multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.clear(cx);
|
||||
|
||||
let context = self.zeta.read(cx).context_for_project(&self.project);
|
||||
let mut paths = Vec::new();
|
||||
for (buffer, ranges) in context {
|
||||
let path = PathKey::for_buffer(&buffer, cx);
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let ranges = ranges
|
||||
.iter()
|
||||
.map(|range| range.to_point(&snapshot))
|
||||
.collect::<Vec<_>>();
|
||||
paths.push((path, buffer, ranges));
|
||||
}
|
||||
|
||||
for (path, buffer, ranges) in paths {
|
||||
multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx);
|
||||
}
|
||||
});
|
||||
|
||||
run.editor.update(cx, |editor, cx| {
|
||||
editor.move_to_beginning(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn handle_search_queries_generated(
|
||||
&mut self,
|
||||
info: ZetaSearchQueryDebugInfo,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(run) = self.runs.back_mut() else {
|
||||
return;
|
||||
};
|
||||
|
||||
run.search_results_generated_at = Some(info.timestamp);
|
||||
run.search_queries = info.search_queries;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn handle_search_queries_executed(
|
||||
&mut self,
|
||||
info: ZetaContextRetrievalDebugInfo,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if self.current_ix + 2 == self.runs.len() {
|
||||
// Switch to latest when the queries are executed
|
||||
self.current_ix += 1;
|
||||
}
|
||||
|
||||
let Some(run) = self.runs.back_mut() else {
|
||||
return;
|
||||
};
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let mut paths = Vec::new();
|
||||
for related_file in related_files {
|
||||
let (buffer, point_ranges): (_, Vec<_>) =
|
||||
if let Some(buffer) = related_file.buffer.upgrade() {
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
|
||||
|
||||
run.search_results_executed_at = Some(info.timestamp);
|
||||
cx.notify();
|
||||
(
|
||||
buffer,
|
||||
related_file
|
||||
.excerpts
|
||||
.iter()
|
||||
.map(|excerpt| excerpt.anchor_range.to_point(&snapshot))
|
||||
.collect(),
|
||||
)
|
||||
} else {
|
||||
(
|
||||
project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_buffer(related_file.path.clone(), cx)
|
||||
})?
|
||||
.await?,
|
||||
related_file
|
||||
.excerpts
|
||||
.iter()
|
||||
.map(|excerpt| excerpt.point_range.clone())
|
||||
.collect(),
|
||||
)
|
||||
};
|
||||
cx.update(|_, cx| {
|
||||
let path = PathKey::for_buffer(&buffer, cx);
|
||||
paths.push((path, buffer, point_ranges));
|
||||
})?;
|
||||
}
|
||||
|
||||
multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.clear(cx);
|
||||
|
||||
for (path, buffer, ranges) in paths {
|
||||
multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx);
|
||||
}
|
||||
})?;
|
||||
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
editor.move_to_beginning(&Default::default(), window, cx);
|
||||
})?;
|
||||
|
||||
this.update(cx, |_, cx| cx.notify())
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn handle_go_back(
|
||||
@@ -254,8 +244,11 @@ impl Zeta2ContextView {
|
||||
}
|
||||
|
||||
fn render_informational_footer(&self, cx: &mut Context<'_, Zeta2ContextView>) -> ui::Div {
|
||||
let is_latest = self.runs.len() == self.current_ix + 1;
|
||||
let run = &self.runs[self.current_ix];
|
||||
let new_run_started = self
|
||||
.runs
|
||||
.back()
|
||||
.map_or(false, |latest_run| latest_run.finished_at.is_none());
|
||||
|
||||
h_flex()
|
||||
.p_2()
|
||||
@@ -264,114 +257,65 @@ impl Zeta2ContextView {
|
||||
.text_xs()
|
||||
.border_t_1()
|
||||
.gap_2()
|
||||
.child(v_flex().h_full().flex_1().child({
|
||||
let t0 = run.started_at;
|
||||
let mut table = ui::Table::<2>::new().width(ui::px(300.)).no_ui_font();
|
||||
for (key, value) in &run.metadata {
|
||||
table = table.row([key.into_any_element(), value.clone().into_any_element()])
|
||||
}
|
||||
table = table.row([
|
||||
"Total Time".into_any_element(),
|
||||
format!("{} ms", (run.finished_at.unwrap_or(t0) - t0).as_millis())
|
||||
.into_any_element(),
|
||||
]);
|
||||
table
|
||||
}))
|
||||
.child(
|
||||
v_flex().h_full().flex_1().children(
|
||||
run.search_queries
|
||||
.iter()
|
||||
.enumerate()
|
||||
.flat_map(|(ix, query)| {
|
||||
std::iter::once(ListHeader::new(query.glob.clone()).into_any_element())
|
||||
.chain(query.syntax_node.iter().enumerate().map(
|
||||
move |(regex_ix, regex)| {
|
||||
ListItem::new(ix * 100 + regex_ix)
|
||||
.start_slot(
|
||||
Icon::new(IconName::MagnifyingGlass)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
.child(regex.clone())
|
||||
.into_any_element()
|
||||
},
|
||||
v_flex().h_full().text_align(TextAlign::Right).child(
|
||||
h_flex()
|
||||
.justify_end()
|
||||
.child(
|
||||
IconButton::new("go-back", IconName::ChevronLeft)
|
||||
.disabled(self.current_ix == 0 || self.runs.len() < 2)
|
||||
.tooltip(ui::Tooltip::for_action_title(
|
||||
"Go to previous run",
|
||||
&Zeta2ContextGoBack,
|
||||
))
|
||||
.chain(query.content.as_ref().map(move |regex| {
|
||||
ListItem::new(ix * 100 + query.syntax_node.len())
|
||||
.start_slot(
|
||||
Icon::new(IconName::MagnifyingGlass)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::Small),
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.handle_go_back(&Zeta2ContextGoBack, window, cx);
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.child(format!("{}/{}", self.current_ix + 1, self.runs.len()))
|
||||
.map(|this| {
|
||||
if new_run_started {
|
||||
this.with_animation(
|
||||
"pulsating-count",
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(pulsating_between(0.4, 0.8)),
|
||||
|label, delta| label.opacity(delta),
|
||||
)
|
||||
.child(regex.clone())
|
||||
.into_any_element()
|
||||
}))
|
||||
}),
|
||||
} else {
|
||||
this.into_any_element()
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
IconButton::new("go-forward", IconName::ChevronRight)
|
||||
.disabled(self.current_ix + 1 == self.runs.len())
|
||||
.tooltip(ui::Tooltip::for_action_title(
|
||||
"Go to next run",
|
||||
&Zeta2ContextGoBack,
|
||||
))
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.handle_go_forward(&Zeta2ContextGoForward, window, cx);
|
||||
})),
|
||||
),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
v_flex()
|
||||
.h_full()
|
||||
.text_align(TextAlign::Right)
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_end()
|
||||
.child(
|
||||
IconButton::new("go-back", IconName::ChevronLeft)
|
||||
.disabled(self.current_ix == 0 || self.runs.len() < 2)
|
||||
.tooltip(ui::Tooltip::for_action_title(
|
||||
"Go to previous run",
|
||||
&Zeta2ContextGoBack,
|
||||
))
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.handle_go_back(&Zeta2ContextGoBack, window, cx);
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.child(format!("{}/{}", self.current_ix + 1, self.runs.len()))
|
||||
.map(|this| {
|
||||
if self.runs.back().is_some_and(|back| {
|
||||
back.search_results_executed_at.is_none()
|
||||
}) {
|
||||
this.with_animation(
|
||||
"pulsating-count",
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(pulsating_between(0.4, 0.8)),
|
||||
|label, delta| label.opacity(delta),
|
||||
)
|
||||
.into_any_element()
|
||||
} else {
|
||||
this.into_any_element()
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
IconButton::new("go-forward", IconName::ChevronRight)
|
||||
.disabled(self.current_ix + 1 == self.runs.len())
|
||||
.tooltip(ui::Tooltip::for_action_title(
|
||||
"Go to next run",
|
||||
&Zeta2ContextGoBack,
|
||||
))
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.handle_go_forward(&Zeta2ContextGoForward, window, cx);
|
||||
})),
|
||||
),
|
||||
)
|
||||
.map(|mut div| {
|
||||
let pending_message = |div: ui::Div, msg: &'static str| {
|
||||
if is_latest {
|
||||
return div.child(msg);
|
||||
} else {
|
||||
return div.child("Canceled");
|
||||
}
|
||||
};
|
||||
|
||||
let t0 = run.started_at;
|
||||
let Some(t1) = run.search_results_generated_at else {
|
||||
return pending_message(div, "Planning search...");
|
||||
};
|
||||
div = div.child(format!("Planned search: {:>5} ms", (t1 - t0).as_millis()));
|
||||
|
||||
let Some(t2) = run.search_results_executed_at else {
|
||||
return pending_message(div, "Running search...");
|
||||
};
|
||||
div = div.child(format!("Ran search: {:>5} ms", (t2 - t1).as_millis()));
|
||||
|
||||
div.child(format!(
|
||||
"Total: {:>5} ms",
|
||||
(run.finished_at.unwrap_or(t0) - t0).as_millis()
|
||||
))
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -108,6 +108,7 @@ pub struct Zeta2Inspector {
|
||||
|
||||
pub enum ContextModeState {
|
||||
Llm,
|
||||
Lsp,
|
||||
Syntax {
|
||||
max_retrieved_declarations: Entity<InputField>,
|
||||
},
|
||||
@@ -222,6 +223,9 @@ impl Zeta2Inspector {
|
||||
),
|
||||
};
|
||||
}
|
||||
ContextMode::Lsp(_) => {
|
||||
self.context_mode = ContextModeState::Lsp;
|
||||
}
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
@@ -302,6 +306,9 @@ impl Zeta2Inspector {
|
||||
ContextModeState::Syntax {
|
||||
max_retrieved_declarations,
|
||||
} => number_input_value(max_retrieved_declarations, cx),
|
||||
ContextModeState::Lsp => {
|
||||
zeta::DEFAULT_SYNTAX_CONTEXT_OPTIONS.max_retrieved_declarations
|
||||
}
|
||||
};
|
||||
|
||||
ContextMode::Syntax(EditPredictionContextOptions {
|
||||
@@ -310,6 +317,7 @@ impl Zeta2Inspector {
|
||||
..context_options
|
||||
})
|
||||
}
|
||||
ContextMode::Lsp(excerpt_options) => ContextMode::Lsp(excerpt_options),
|
||||
};
|
||||
|
||||
this.set_zeta_options(
|
||||
@@ -656,6 +664,7 @@ impl Zeta2Inspector {
|
||||
ContextModeState::Syntax {
|
||||
max_retrieved_declarations,
|
||||
} => Some(max_retrieved_declarations.clone()),
|
||||
ContextModeState::Lsp => None,
|
||||
})
|
||||
.child(self.max_prompt_bytes_input.clone())
|
||||
.child(self.render_prompt_format_dropdown(window, cx)),
|
||||
@@ -679,6 +688,7 @@ impl Zeta2Inspector {
|
||||
match &self.context_mode {
|
||||
ContextModeState::Llm => "LLM-based",
|
||||
ContextModeState::Syntax { .. } => "Syntax",
|
||||
ContextModeState::Lsp => "LSP-based",
|
||||
},
|
||||
ContextMenu::build(window, cx, move |menu, _window, _cx| {
|
||||
menu.item(
|
||||
@@ -695,6 +705,7 @@ impl Zeta2Inspector {
|
||||
this.zeta.read(cx).options().clone();
|
||||
match current_options.context.clone() {
|
||||
ContextMode::Agentic(_) => {}
|
||||
ContextMode::Lsp(_) => {}
|
||||
ContextMode::Syntax(context_options) => {
|
||||
let options = ZetaOptions {
|
||||
context: ContextMode::Agentic(
|
||||
@@ -739,6 +750,7 @@ impl Zeta2Inspector {
|
||||
this.set_zeta_options(options, cx);
|
||||
}
|
||||
ContextMode::Syntax(_) => {}
|
||||
ContextMode::Lsp(_) => {}
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
|
||||
@@ -21,15 +21,12 @@ use ::util::paths::PathStyle;
|
||||
use anyhow::{Result, anyhow};
|
||||
use clap::{Args, Parser, Subcommand, ValueEnum};
|
||||
use cloud_llm_client::predict_edits_v3;
|
||||
use edit_prediction_context::{
|
||||
EditPredictionContextOptions, EditPredictionExcerptOptions, EditPredictionScoreOptions,
|
||||
};
|
||||
use edit_prediction_context::EditPredictionExcerptOptions;
|
||||
use gpui::{Application, AsyncApp, Entity, prelude::*};
|
||||
use language::{Bias, Buffer, BufferSnapshot, Point};
|
||||
use metrics::delta_chr_f;
|
||||
use project::{Project, Worktree};
|
||||
use project::{Project, Worktree, lsp_store::OpenLspBufferHandle};
|
||||
use reqwest_client::ReqwestClient;
|
||||
use serde_json::json;
|
||||
use std::io::{self};
|
||||
use std::time::Duration;
|
||||
use std::{collections::HashSet, path::PathBuf, str::FromStr, sync::Arc};
|
||||
@@ -97,7 +94,7 @@ struct ContextArgs {
|
||||
enum ContextProvider {
|
||||
Zeta1,
|
||||
#[default]
|
||||
Syntax,
|
||||
Zeta2,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Args)]
|
||||
@@ -204,19 +201,12 @@ enum PredictionProvider {
|
||||
Sweep,
|
||||
}
|
||||
|
||||
fn zeta2_args_to_options(args: &Zeta2Args, omit_excerpt_overlaps: bool) -> zeta::ZetaOptions {
|
||||
fn zeta2_args_to_options(args: &Zeta2Args) -> zeta::ZetaOptions {
|
||||
zeta::ZetaOptions {
|
||||
context: ContextMode::Syntax(EditPredictionContextOptions {
|
||||
max_retrieved_declarations: args.max_retrieved_definitions,
|
||||
use_imports: !args.disable_imports_gathering,
|
||||
excerpt: EditPredictionExcerptOptions {
|
||||
max_bytes: args.max_excerpt_bytes,
|
||||
min_bytes: args.min_excerpt_bytes,
|
||||
target_before_cursor_over_total_bytes: args.target_before_cursor_over_total_bytes,
|
||||
},
|
||||
score: EditPredictionScoreOptions {
|
||||
omit_excerpt_overlaps,
|
||||
},
|
||||
context: ContextMode::Lsp(EditPredictionExcerptOptions {
|
||||
max_bytes: args.max_excerpt_bytes,
|
||||
min_bytes: args.min_excerpt_bytes,
|
||||
target_before_cursor_over_total_bytes: args.target_before_cursor_over_total_bytes,
|
||||
}),
|
||||
max_diagnostic_bytes: args.max_diagnostic_bytes,
|
||||
max_prompt_bytes: args.max_prompt_bytes,
|
||||
@@ -295,6 +285,7 @@ struct LoadedContext {
|
||||
worktree: Entity<Worktree>,
|
||||
project: Entity<Project>,
|
||||
buffer: Entity<Buffer>,
|
||||
lsp_open_handle: Option<OpenLspBufferHandle>,
|
||||
}
|
||||
|
||||
async fn load_context(
|
||||
@@ -330,7 +321,7 @@ async fn load_context(
|
||||
.await?;
|
||||
|
||||
let mut ready_languages = HashSet::default();
|
||||
let (_lsp_open_handle, buffer) = if *use_language_server {
|
||||
let (lsp_open_handle, buffer) = if *use_language_server {
|
||||
let (lsp_open_handle, _, buffer) = open_buffer_with_language_server(
|
||||
project.clone(),
|
||||
worktree.clone(),
|
||||
@@ -377,10 +368,11 @@ async fn load_context(
|
||||
worktree,
|
||||
project,
|
||||
buffer,
|
||||
lsp_open_handle,
|
||||
})
|
||||
}
|
||||
|
||||
async fn zeta2_syntax_context(
|
||||
async fn zeta2_context(
|
||||
args: ContextArgs,
|
||||
app_state: &Arc<ZetaCliAppState>,
|
||||
cx: &mut AsyncApp,
|
||||
@@ -390,6 +382,7 @@ async fn zeta2_syntax_context(
|
||||
project,
|
||||
buffer,
|
||||
clipped_cursor,
|
||||
lsp_open_handle: _handle,
|
||||
..
|
||||
} = load_context(&args, app_state, cx).await?;
|
||||
|
||||
@@ -406,30 +399,26 @@ async fn zeta2_syntax_context(
|
||||
zeta::Zeta::new(app_state.client.clone(), app_state.user_store.clone(), cx)
|
||||
});
|
||||
let indexing_done_task = zeta.update(cx, |zeta, cx| {
|
||||
zeta.set_options(zeta2_args_to_options(&args.zeta2_args, true));
|
||||
zeta.set_options(zeta2_args_to_options(&args.zeta2_args));
|
||||
zeta.register_buffer(&buffer, &project, cx);
|
||||
zeta.wait_for_initial_indexing(&project, cx)
|
||||
});
|
||||
cx.spawn(async move |cx| {
|
||||
indexing_done_task.await?;
|
||||
let request = zeta
|
||||
.update(cx, |zeta, cx| {
|
||||
let cursor = buffer.read(cx).snapshot().anchor_before(clipped_cursor);
|
||||
zeta.cloud_request_for_zeta_cli(&project, &buffer, cursor, cx)
|
||||
})?
|
||||
.await?;
|
||||
let updates_rx = zeta.update(cx, |zeta, cx| {
|
||||
let cursor = buffer.read(cx).snapshot().anchor_before(clipped_cursor);
|
||||
zeta.set_use_context(true);
|
||||
zeta.refresh_context_if_needed(&project, &buffer, cursor, cx);
|
||||
zeta.project_context_updates(&project).unwrap()
|
||||
})?;
|
||||
|
||||
let (prompt_string, section_labels) = cloud_zeta2_prompt::build_prompt(&request)?;
|
||||
updates_rx.recv().await.ok();
|
||||
|
||||
match args.zeta2_args.output_format {
|
||||
OutputFormat::Prompt => anyhow::Ok(prompt_string),
|
||||
OutputFormat::Request => anyhow::Ok(serde_json::to_string_pretty(&request)?),
|
||||
OutputFormat::Full => anyhow::Ok(serde_json::to_string_pretty(&json!({
|
||||
"request": request,
|
||||
"prompt": prompt_string,
|
||||
"section_labels": section_labels,
|
||||
}))?),
|
||||
}
|
||||
let context = zeta.update(cx, |zeta, cx| {
|
||||
zeta.context_for_project(&project, cx).to_vec()
|
||||
})?;
|
||||
|
||||
anyhow::Ok(serde_json::to_string_pretty(&context).unwrap())
|
||||
})
|
||||
})?
|
||||
.await?;
|
||||
@@ -482,7 +471,6 @@ fn main() {
|
||||
None => {
|
||||
if args.printenv {
|
||||
::util::shell_env::print_env();
|
||||
return;
|
||||
} else {
|
||||
panic!("Expected a command");
|
||||
}
|
||||
@@ -494,7 +482,7 @@ fn main() {
|
||||
arguments.extension,
|
||||
arguments.limit,
|
||||
arguments.skip,
|
||||
zeta2_args_to_options(&arguments.zeta2_args, false),
|
||||
zeta2_args_to_options(&arguments.zeta2_args),
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
@@ -507,10 +495,8 @@ fn main() {
|
||||
zeta1_context(context_args, &app_state, cx).await.unwrap();
|
||||
serde_json::to_string_pretty(&context.body).unwrap()
|
||||
}
|
||||
ContextProvider::Syntax => {
|
||||
zeta2_syntax_context(context_args, &app_state, cx)
|
||||
.await
|
||||
.unwrap()
|
||||
ContextProvider::Zeta2 => {
|
||||
zeta2_context(context_args, &app_state, cx).await.unwrap()
|
||||
}
|
||||
};
|
||||
println!("{}", result);
|
||||
|
||||
@@ -136,8 +136,7 @@ pub async fn perform_predict(
|
||||
let result = result.clone();
|
||||
async move {
|
||||
let mut start_time = None;
|
||||
let mut search_queries_generated_at = None;
|
||||
let mut search_queries_executed_at = None;
|
||||
let mut retrieval_finished_at = None;
|
||||
while let Some(event) = debug_rx.next().await {
|
||||
match event {
|
||||
zeta::ZetaDebugInfo::ContextRetrievalStarted(info) => {
|
||||
@@ -147,17 +146,17 @@ pub async fn perform_predict(
|
||||
&info.search_prompt,
|
||||
)?;
|
||||
}
|
||||
zeta::ZetaDebugInfo::SearchQueriesGenerated(info) => {
|
||||
search_queries_generated_at = Some(info.timestamp);
|
||||
fs::write(
|
||||
example_run_dir.join("search_queries.json"),
|
||||
serde_json::to_string_pretty(&info.search_queries).unwrap(),
|
||||
)?;
|
||||
zeta::ZetaDebugInfo::ContextRetrievalFinished(info) => {
|
||||
retrieval_finished_at = Some(info.timestamp);
|
||||
for (key, value) in &info.metadata {
|
||||
if *key == "search_queries" {
|
||||
fs::write(
|
||||
example_run_dir.join("search_queries.json"),
|
||||
value.as_bytes(),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
zeta::ZetaDebugInfo::SearchQueriesExecuted(info) => {
|
||||
search_queries_executed_at = Some(info.timestamp);
|
||||
}
|
||||
zeta::ZetaDebugInfo::ContextRetrievalFinished(_info) => {}
|
||||
zeta::ZetaDebugInfo::EditPredictionRequested(request) => {
|
||||
let prediction_started_at = Instant::now();
|
||||
start_time.get_or_insert(prediction_started_at);
|
||||
@@ -200,13 +199,8 @@ pub async fn perform_predict(
|
||||
|
||||
let mut result = result.lock().unwrap();
|
||||
result.generated_len = response.chars().count();
|
||||
|
||||
result.planning_search_time =
|
||||
Some(search_queries_generated_at.unwrap() - start_time.unwrap());
|
||||
result.running_search_time = Some(
|
||||
search_queries_executed_at.unwrap()
|
||||
- search_queries_generated_at.unwrap(),
|
||||
);
|
||||
result.retrieval_time =
|
||||
retrieval_finished_at.unwrap() - start_time.unwrap();
|
||||
result.prediction_time = prediction_finished_at - prediction_started_at;
|
||||
result.total_time = prediction_finished_at - start_time.unwrap();
|
||||
|
||||
@@ -219,7 +213,12 @@ pub async fn perform_predict(
|
||||
});
|
||||
|
||||
zeta.update(cx, |zeta, cx| {
|
||||
zeta.refresh_context(project.clone(), cursor_buffer.clone(), cursor_anchor, cx)
|
||||
zeta.refresh_context_with_agentic_retrieval(
|
||||
project.clone(),
|
||||
cursor_buffer.clone(),
|
||||
cursor_anchor,
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await?;
|
||||
}
|
||||
@@ -321,8 +320,7 @@ pub struct PredictionDetails {
|
||||
pub diff: String,
|
||||
pub excerpts: Vec<ActualExcerpt>,
|
||||
pub excerpts_text: String, // TODO: contains the worktree root path. Drop this field and compute it on the fly
|
||||
pub planning_search_time: Option<Duration>,
|
||||
pub running_search_time: Option<Duration>,
|
||||
pub retrieval_time: Duration,
|
||||
pub prediction_time: Duration,
|
||||
pub total_time: Duration,
|
||||
pub run_example_dir: PathBuf,
|
||||
@@ -336,8 +334,7 @@ impl PredictionDetails {
|
||||
diff: Default::default(),
|
||||
excerpts: Default::default(),
|
||||
excerpts_text: Default::default(),
|
||||
planning_search_time: Default::default(),
|
||||
running_search_time: Default::default(),
|
||||
retrieval_time: Default::default(),
|
||||
prediction_time: Default::default(),
|
||||
total_time: Default::default(),
|
||||
run_example_dir,
|
||||
@@ -357,28 +354,20 @@ impl PredictionDetails {
|
||||
}
|
||||
|
||||
pub fn to_markdown(&self) -> String {
|
||||
let inference_time = self.planning_search_time.unwrap_or_default() + self.prediction_time;
|
||||
|
||||
format!(
|
||||
"## Excerpts\n\n\
|
||||
{}\n\n\
|
||||
## Prediction\n\n\
|
||||
{}\n\n\
|
||||
## Time\n\n\
|
||||
Planning searches: {}ms\n\
|
||||
Running searches: {}ms\n\
|
||||
Making Prediction: {}ms\n\n\
|
||||
-------------------\n\n\
|
||||
Total: {}ms\n\
|
||||
Inference: {}ms ({:.2}%)\n",
|
||||
Retrieval: {}ms\n\
|
||||
Prediction: {}ms\n\n\
|
||||
Total: {}ms\n",
|
||||
self.excerpts_text,
|
||||
self.diff,
|
||||
self.planning_search_time.unwrap_or_default().as_millis(),
|
||||
self.running_search_time.unwrap_or_default().as_millis(),
|
||||
self.retrieval_time.as_millis(),
|
||||
self.prediction_time.as_millis(),
|
||||
self.total_time.as_millis(),
|
||||
inference_time.as_millis(),
|
||||
(inference_time.as_millis() as f64 / self.total_time.as_millis() as f64) * 100.
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,8 @@ use anyhow::{Result, anyhow};
|
||||
use futures::channel::mpsc;
|
||||
use futures::{FutureExt as _, StreamExt as _};
|
||||
use gpui::{AsyncApp, Entity, Task};
|
||||
use language::{Buffer, LanguageId, LanguageServerId, ParseStatus};
|
||||
use language::{Buffer, LanguageId, LanguageNotFound, LanguageServerId, ParseStatus};
|
||||
use project::lsp_store::OpenLspBufferHandle;
|
||||
use project::{Project, ProjectPath, Worktree};
|
||||
use std::collections::HashSet;
|
||||
use std::sync::Arc;
|
||||
@@ -40,7 +41,7 @@ pub async fn open_buffer_with_language_server(
|
||||
path: Arc<RelPath>,
|
||||
ready_languages: &mut HashSet<LanguageId>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<(Entity<Entity<Buffer>>, LanguageServerId, Entity<Buffer>)> {
|
||||
) -> Result<(OpenLspBufferHandle, LanguageServerId, Entity<Buffer>)> {
|
||||
let buffer = open_buffer(project.clone(), worktree, path.clone(), cx).await?;
|
||||
|
||||
let (lsp_open_handle, path_style) = project.update(cx, |project, cx| {
|
||||
@@ -50,6 +51,17 @@ pub async fn open_buffer_with_language_server(
|
||||
)
|
||||
})?;
|
||||
|
||||
let language_registry = project.read_with(cx, |project, _| project.languages().clone())?;
|
||||
let result = language_registry
|
||||
.load_language_for_file_path(path.as_std_path())
|
||||
.await;
|
||||
|
||||
if let Err(error) = result
|
||||
&& !error.is::<LanguageNotFound>()
|
||||
{
|
||||
anyhow::bail!(error);
|
||||
}
|
||||
|
||||
let Some(language_id) = buffer.read_with(cx, |buffer, _cx| {
|
||||
buffer.language().map(|language| language.id())
|
||||
})?
|
||||
@@ -57,9 +69,9 @@ pub async fn open_buffer_with_language_server(
|
||||
return Err(anyhow!("No language for {}", path.display(path_style)));
|
||||
};
|
||||
|
||||
let log_prefix = path.display(path_style);
|
||||
let log_prefix = format!("{} | ", path.display(path_style));
|
||||
if !ready_languages.contains(&language_id) {
|
||||
wait_for_lang_server(&project, &buffer, log_prefix.into_owned(), cx).await?;
|
||||
wait_for_lang_server(&project, &buffer, log_prefix, cx).await?;
|
||||
ready_languages.insert(language_id);
|
||||
}
|
||||
|
||||
@@ -95,7 +107,7 @@ pub fn wait_for_lang_server(
|
||||
log_prefix: String,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Task<Result<()>> {
|
||||
println!("{}⏵ Waiting for language server", log_prefix);
|
||||
eprintln!("{}⏵ Waiting for language server", log_prefix);
|
||||
|
||||
let (mut tx, mut rx) = mpsc::channel(1);
|
||||
|
||||
@@ -137,7 +149,7 @@ pub fn wait_for_lang_server(
|
||||
..
|
||||
} = event
|
||||
{
|
||||
println!("{}⟲ {message}", log_prefix)
|
||||
eprintln!("{}⟲ {message}", log_prefix)
|
||||
}
|
||||
}
|
||||
}),
|
||||
@@ -162,7 +174,7 @@ pub fn wait_for_lang_server(
|
||||
cx.spawn(async move |cx| {
|
||||
if !has_lang_server {
|
||||
// some buffers never have a language server, so this aborts quickly in that case.
|
||||
let timeout = cx.background_executor().timer(Duration::from_secs(5));
|
||||
let timeout = cx.background_executor().timer(Duration::from_secs(500));
|
||||
futures::select! {
|
||||
_ = added_rx.next() => {},
|
||||
_ = timeout.fuse() => {
|
||||
@@ -173,7 +185,7 @@ pub fn wait_for_lang_server(
|
||||
let timeout = cx.background_executor().timer(Duration::from_secs(60 * 5));
|
||||
let result = futures::select! {
|
||||
_ = rx.next() => {
|
||||
println!("{}⚑ Language server idle", log_prefix);
|
||||
eprintln!("{}⚑ Language server idle", log_prefix);
|
||||
anyhow::Ok(())
|
||||
},
|
||||
_ = timeout.fuse() => {
|
||||
|
||||
@@ -89,12 +89,32 @@ To do this:
|
||||
|
||||
#### Cross-Region Inference
|
||||
|
||||
The Zed implementation of Amazon Bedrock uses [Cross-Region inference](https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference.html) for all the models and region combinations that support it.
|
||||
The Zed implementation of Amazon Bedrock uses [Cross-Region inference](https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference.html) to improve availability and throughput.
|
||||
With Cross-Region inference, you can distribute traffic across multiple AWS Regions, enabling higher throughput.
|
||||
|
||||
For example, if you use `Claude Sonnet 3.7 Thinking` from `us-east-1`, it may be processed across the US regions, namely: `us-east-1`, `us-east-2`, or `us-west-2`.
|
||||
Cross-Region inference requests are kept within the AWS Regions that are part of the geography where the data originally resides.
|
||||
For example, a request made within the US is kept within the AWS Regions in the US.
|
||||
##### Regional vs Global Inference Profiles
|
||||
|
||||
Bedrock supports two types of cross-region inference profiles:
|
||||
|
||||
- **Regional profiles** (default): Route requests within a specific geography (US, EU, APAC). For example, `us-east-1` uses the `us.*` profile which routes across `us-east-1`, `us-east-2`, and `us-west-2`.
|
||||
- **Global profiles**: Route requests across all commercial AWS Regions for maximum availability and performance.
|
||||
|
||||
By default, Zed uses **regional profiles** which keep your data within the same geography. You can opt into global profiles by adding `"allow_global": true` to your Bedrock configuration:
|
||||
|
||||
```json [settings]
|
||||
{
|
||||
"language_models": {
|
||||
"bedrock": {
|
||||
"authentication_method": "named_profile",
|
||||
"region": "your-aws-region",
|
||||
"profile": "your-profile-name",
|
||||
"allow_global": true
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Note:** Only select newer models support global inference profiles. See the [AWS Bedrock supported models documentation](https://docs.aws.amazon.com/bedrock/latest/userguide/inference-profiles-support.html#inference-profiles-support-system) for the current list of models that support global inference. If you encounter availability issues with a model in your region, enabling `allow_global` may resolve them.
|
||||
|
||||
Although the data remains stored only in the source Region, your input prompts and output results might move outside of your source Region during cross-Region inference.
|
||||
All data will be transmitted encrypted across Amazon's secure network.
|
||||
|
||||
@@ -2,34 +2,44 @@
|
||||
|
||||
PHP support is available through the [PHP extension](https://github.com/zed-extensions/php).
|
||||
|
||||
- Tree-sitter: https://github.com/tree-sitter/tree-sitter-php
|
||||
- Language Servers:
|
||||
- [phpactor](https://github.com/phpactor/phpactor)
|
||||
- [intelephense](https://github.com/bmewburn/vscode-intelephense/)
|
||||
- Tree-sitter: [tree-sitter/tree-sitter-php](https://github.com/tree-sitter/tree-sitter-php)
|
||||
- Language Server: [phpactor/phpactor](https://github.com/phpactor/phpactor)
|
||||
- Alternate Language Server: [bmewburn/vscode-intelephense](https://github.com/bmewburn/vscode-intelephense/)
|
||||
|
||||
## Install PHP
|
||||
|
||||
The PHP extension requires PHP to be installed and available in your `PATH`:
|
||||
|
||||
```sh
|
||||
# macOS via Homebrew
|
||||
brew install php
|
||||
|
||||
# Debian/Ubuntu
|
||||
sudo apt-get install php-cli
|
||||
|
||||
# CentOS 8+/RHEL
|
||||
sudo dnf install php-cli
|
||||
|
||||
# Arch Linux
|
||||
sudo pacman -S php
|
||||
|
||||
# check PHP path
|
||||
## macOS and Linux
|
||||
which php
|
||||
|
||||
## Windows
|
||||
where php
|
||||
```
|
||||
|
||||
## Choosing a language server
|
||||
|
||||
The PHP extension offers both `phpactor` and `intelephense` language server support.
|
||||
|
||||
`phpactor` is enabled by default.
|
||||
|
||||
### Phpactor
|
||||
|
||||
The Zed PHP Extension can install `phpactor` automatically but requires `php` to be installed and available in your path:
|
||||
|
||||
```sh
|
||||
# brew install php # macOS
|
||||
# sudo apt-get install php # Debian/Ubuntu
|
||||
# yum install php # CentOS/RHEL
|
||||
# pacman -S php # Arch Linux
|
||||
which php
|
||||
```
|
||||
The PHP extension uses [LSP language servers](https://microsoft.github.io/language-server-protocol) with Phpactor as the default. If you want to use other language servers that support Zed (e.g. Intelephense or PHP Tools), make sure to follow the documentation on how to implement it.
|
||||
|
||||
### Intelephense
|
||||
|
||||
[Intelephense](https://intelephense.com/) is a [proprietary](https://github.com/bmewburn/vscode-intelephense/blob/master/LICENSE.txt#L29) language server for PHP operating under a freemium model. Certain features require purchase of a [premium license](https://intelephense.com/).
|
||||
[Intelephense](https://intelephense.com/) is a [proprietary](https://github.com/bmewburn/vscode-intelephense/blob/master/LICENSE.txt#L29) language server for PHP operating under a freemium model. Certain features require purchase of a [premium license](https://intelephense.com/buy).
|
||||
|
||||
To switch to `intelephense`, add the following to your `settings.json`:
|
||||
To use Intelephense, add the following to your `settings.json`:
|
||||
|
||||
```json [settings]
|
||||
{
|
||||
@@ -41,7 +51,9 @@ To switch to `intelephense`, add the following to your `settings.json`:
|
||||
}
|
||||
```
|
||||
|
||||
To use the premium features, you can place your [licence.txt file](https://intelephense.com/faq.html) at `~/intelephense/licence.txt` inside your home directory. Alternatively, you can pass the licence key or a path to a file containing the licence key as an initialization option for the `intelephense` language server. To do this, add the following to your `settings.json`:
|
||||
To use the premium features, you can place your license file inside your home directory at `~/intelephense/licence.txt` for macOS and Linux, or `%USERPROFILE%/intelephense/licence.txt` on Windows.
|
||||
|
||||
Alternatively, you can pass the licence key or a path to a file containing the licence key as an initialization option. To do this, add the following to your `settings.json`:
|
||||
|
||||
```json [settings]
|
||||
{
|
||||
@@ -55,15 +67,67 @@ To use the premium features, you can place your [licence.txt file](https://intel
|
||||
}
|
||||
```
|
||||
|
||||
### PHP Tools
|
||||
|
||||
[PHP Tools](https://www.devsense.com/) is a proprietary language server that offers free and premium features. You need to [purchase a license](https://www.devsense.com/en/purchase) to activate the premium features.
|
||||
|
||||
To use PHP Tools, add the following to your `settings.json`:
|
||||
|
||||
```json [settings]
|
||||
{
|
||||
"languages": {
|
||||
"PHP": {
|
||||
"language_servers": ["phptools", "!intelephense", "!phpactor", "..."]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
To use the premium features, you can add your license in `initialization_options` in your `settings.json`:
|
||||
|
||||
```json [settings]
|
||||
{
|
||||
"lsp": {
|
||||
"phptools": {
|
||||
"initialization_options": {
|
||||
"0": "your_license_key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
or, set environment variable `DEVSENSE_PHP_LS_LICENSE` on `.env` file in your project.
|
||||
|
||||
```env
|
||||
DEVSENSE_PHP_LS_LICENSE="your_license_key"
|
||||
```
|
||||
|
||||
Check out the documentation of [PHP Tools for Zed](https://docs.devsense.com/other/zed/) for more details.
|
||||
|
||||
### Phpactor
|
||||
|
||||
To use Phpactor instead of Intelephense or any other tools, add the following to your `settings.json`:
|
||||
|
||||
```json [settings]
|
||||
{
|
||||
"languages": {
|
||||
"PHP": {
|
||||
"language_servers": ["phpactor", "!intelephense", "!phptools", "..."]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## PHPDoc
|
||||
|
||||
Zed supports syntax highlighting for PHPDoc comments.
|
||||
|
||||
- Tree-sitter: [claytonrcarter/tree-sitter-phpdoc](https://github.com/claytonrcarter/tree-sitter-phpdoc)
|
||||
|
||||
## Setting up Xdebug
|
||||
## Debugging
|
||||
|
||||
Zed’s PHP extension provides a debug adapter for PHP and Xdebug. The adapter name is `Xdebug`. Here a couple ways you can use it:
|
||||
The PHP extension provides a debug adapter for PHP via Xdebug. There are several ways to use it:
|
||||
|
||||
```json
|
||||
[
|
||||
@@ -83,10 +147,10 @@ Zed’s PHP extension provides a debug adapter for PHP and Xdebug. The adapter n
|
||||
]
|
||||
```
|
||||
|
||||
In case you run into issues:
|
||||
These are common troubleshooting tips, in case you run into issues:
|
||||
|
||||
- ensure that you have Xdebug installed for the version of PHP you’re running
|
||||
- ensure that Xdebug is configured to run in `debug` mode
|
||||
- ensure that Xdebug is actually starting a debugging session
|
||||
- check that the host and port matches between Xdebug and Zed
|
||||
- look at the diagnostics log by using the `xdebug_info()` function in the page you’re trying to debug
|
||||
- Ensure that you have Xdebug installed for the version of PHP you’re running.
|
||||
- Ensure that Xdebug is configured to run in `debug` mode.
|
||||
- Ensure that Xdebug is actually starting a debugging session.
|
||||
- Ensure that the host and port matches between Xdebug and Zed.
|
||||
- Look at the diagnostics log by using the `xdebug_info()` function in the page you’re trying to debug.
|
||||
|
||||
Reference in New Issue
Block a user