Compare commits
14 Commits
fix-git-ht
...
another
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a2876f5d3e | ||
|
|
13deaa3f69 | ||
|
|
694afd15c9 | ||
|
|
eb648dd096 | ||
|
|
1c4c568068 | ||
|
|
ec88a6886f | ||
|
|
7fb16977ce | ||
|
|
53b2792844 | ||
|
|
760d08711c | ||
|
|
71866d6314 | ||
|
|
0c2bbb3aa9 | ||
|
|
9d8a163f5b | ||
|
|
8a22a07d14 | ||
|
|
fad4df5e70 |
52
Cargo.lock
generated
52
Cargo.lock
generated
@@ -405,6 +405,7 @@ dependencies = [
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"prompt_library",
|
||||
"prompt_store",
|
||||
"proto",
|
||||
"rand 0.8.5",
|
||||
"rope",
|
||||
@@ -472,6 +473,7 @@ dependencies = [
|
||||
"picker",
|
||||
"project",
|
||||
"prompt_library",
|
||||
"prompt_store",
|
||||
"proto",
|
||||
"rand 0.8.5",
|
||||
"rope",
|
||||
@@ -526,7 +528,7 @@ dependencies = [
|
||||
"picker",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"prompt_library",
|
||||
"prompt_store",
|
||||
"rand 0.8.5",
|
||||
"regex",
|
||||
"rope",
|
||||
@@ -617,7 +619,7 @@ dependencies = [
|
||||
"log",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"prompt_library",
|
||||
"prompt_store",
|
||||
"rope",
|
||||
"schemars",
|
||||
"semantic_index",
|
||||
@@ -2076,6 +2078,7 @@ name = "buffer_diff"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clock",
|
||||
"ctor",
|
||||
"env_logger 0.11.6",
|
||||
"futures 0.3.31",
|
||||
@@ -2841,7 +2844,7 @@ dependencies = [
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"prometheus",
|
||||
"prompt_library",
|
||||
"prompt_store",
|
||||
"prost 0.9.0",
|
||||
"rand 0.8.5",
|
||||
"recent_projects",
|
||||
@@ -5350,7 +5353,6 @@ dependencies = [
|
||||
"serde_json",
|
||||
"smol",
|
||||
"sum_tree",
|
||||
"tempfile",
|
||||
"text",
|
||||
"time",
|
||||
"unindent",
|
||||
@@ -5396,6 +5398,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"buffer_diff",
|
||||
"collections",
|
||||
"component",
|
||||
"db",
|
||||
"editor",
|
||||
"feature_flags",
|
||||
@@ -5405,6 +5408,7 @@ dependencies = [
|
||||
"gpui",
|
||||
"itertools 0.14.0",
|
||||
"language",
|
||||
"linkme",
|
||||
"menu",
|
||||
"multi_buffer",
|
||||
"panel",
|
||||
@@ -5416,6 +5420,7 @@ dependencies = [
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smallvec",
|
||||
"strum",
|
||||
"theme",
|
||||
"time",
|
||||
@@ -10310,12 +10315,36 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "prompt_library"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"collections",
|
||||
"editor",
|
||||
"gpui",
|
||||
"language",
|
||||
"language_model",
|
||||
"log",
|
||||
"menu",
|
||||
"picker",
|
||||
"prompt_store",
|
||||
"release_channel",
|
||||
"rope",
|
||||
"serde",
|
||||
"settings",
|
||||
"theme",
|
||||
"ui",
|
||||
"util",
|
||||
"workspace",
|
||||
"zed_actions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prompt_store"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assets",
|
||||
"chrono",
|
||||
"collections",
|
||||
"editor",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"fuzzy",
|
||||
@@ -10323,23 +10352,14 @@ dependencies = [
|
||||
"handlebars 4.5.0",
|
||||
"heed",
|
||||
"language",
|
||||
"language_model",
|
||||
"log",
|
||||
"menu",
|
||||
"parking_lot",
|
||||
"paths",
|
||||
"picker",
|
||||
"release_channel",
|
||||
"rope",
|
||||
"serde",
|
||||
"settings",
|
||||
"text",
|
||||
"theme",
|
||||
"ui",
|
||||
"util",
|
||||
"uuid",
|
||||
"workspace",
|
||||
"zed_actions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -16825,7 +16845,7 @@ dependencies = [
|
||||
"project",
|
||||
"project_panel",
|
||||
"project_symbols",
|
||||
"prompt_library",
|
||||
"prompt_store",
|
||||
"proto",
|
||||
"recent_projects",
|
||||
"release_channel",
|
||||
@@ -16851,7 +16871,6 @@ dependencies = [
|
||||
"tasks_ui",
|
||||
"telemetry",
|
||||
"telemetry_events",
|
||||
"tempfile",
|
||||
"terminal_view",
|
||||
"theme",
|
||||
"theme_extension",
|
||||
@@ -16868,7 +16887,6 @@ dependencies = [
|
||||
"vim",
|
||||
"vim_mode_setting",
|
||||
"welcome",
|
||||
"which 6.0.3",
|
||||
"windows 0.58.0",
|
||||
"winresource",
|
||||
"workspace",
|
||||
|
||||
@@ -103,6 +103,7 @@ members = [
|
||||
"crates/project_panel",
|
||||
"crates/project_symbols",
|
||||
"crates/prompt_library",
|
||||
"crates/prompt_store",
|
||||
"crates/proto",
|
||||
"crates/recent_projects",
|
||||
"crates/refineable",
|
||||
@@ -308,6 +309,7 @@ project = { path = "crates/project" }
|
||||
project_panel = { path = "crates/project_panel" }
|
||||
project_symbols = { path = "crates/project_symbols" }
|
||||
prompt_library = { path = "crates/prompt_library" }
|
||||
prompt_store = { path = "crates/prompt_store" }
|
||||
proto = { path = "crates/proto" }
|
||||
recent_projects = { path = "crates/recent_projects" }
|
||||
refineable = { path = "crates/refineable" }
|
||||
@@ -749,4 +751,4 @@ should_implement_trait = { level = "allow" }
|
||||
let_underscore_future = "allow"
|
||||
|
||||
[workspace.metadata.cargo-machete]
|
||||
ignored = ["bindgen", "cbindgen", "prost_build", "serde"]
|
||||
ignored = ["bindgen", "cbindgen", "prost_build", "serde", "component", "linkme"]
|
||||
|
||||
6
assets/icons/git_branch_small.svg
Normal file
6
assets/icons/git_branch_small.svg
Normal file
@@ -0,0 +1,6 @@
|
||||
<svg width="12" height="12" viewBox="0 0 12 12" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M3.75 3.25C4.02614 3.25 4.25 3.02614 4.25 2.75C4.25 2.47386 4.02614 2.25 3.75 2.25C3.47386 2.25 3.25 2.47386 3.25 2.75C3.25 3.02614 3.47386 3.25 3.75 3.25ZM3.75 4.25C4.57843 4.25 5.25 3.57843 5.25 2.75C5.25 1.92157 4.57843 1.25 3.75 1.25C2.92157 1.25 2.25 1.92157 2.25 2.75C2.25 3.57843 2.92157 4.25 3.75 4.25Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.25 3.25C8.52614 3.25 8.75 3.02614 8.75 2.75C8.75 2.47386 8.52614 2.25 8.25 2.25C7.97386 2.25 7.75 2.47386 7.75 2.75C7.75 3.02614 7.97386 3.25 8.25 3.25ZM8.25 4.25C9.07843 4.25 9.75 3.57843 9.75 2.75C9.75 1.92157 9.07843 1.25 8.25 1.25C7.42157 1.25 6.75 1.92157 6.75 2.75C6.75 3.57843 7.42157 4.25 8.25 4.25Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M3.75 9.75C4.02614 9.75 4.25 9.52614 4.25 9.25C4.25 8.97386 4.02614 8.75 3.75 8.75C3.47386 8.75 3.25 8.97386 3.25 9.25C3.25 9.52614 3.47386 9.75 3.75 9.75ZM3.75 10.75C4.57843 10.75 5.25 10.0784 5.25 9.25C5.25 8.42157 4.57843 7.75 3.75 7.75C2.92157 7.75 2.25 8.42157 2.25 9.25C2.25 10.0784 2.92157 10.75 3.75 10.75Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M3.25 3.75H4.25V5.59609C4.67823 5.35824 5.24991 5.25 6 5.25H7.25017C7.5262 5.25 7.75 5.02625 7.75 4.75V3.75H8.75V4.75C8.75 5.57832 8.07871 6.25 7.25017 6.25H6C5.14559 6.25 4.77639 6.41132 4.59684 6.56615C4.42571 6.71373 4.33877 6.92604 4.25 7.30651V8.25H3.25V3.75Z" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
@@ -370,8 +370,8 @@
|
||||
"ctrl-shift-v": "markdown::OpenPreview",
|
||||
"ctrl-alt-shift-c": "editor::DisplayCursorNames",
|
||||
"ctrl-alt-y": "git::ToggleStaged",
|
||||
"alt-y": "git::StageAndNext",
|
||||
"alt-shift-y": "git::UnstageAndNext",
|
||||
"alt-y": ["git::StageAndNext", { "whole_excerpt": false }],
|
||||
"alt-shift-y": ["git::UnstageAndNext", { "whole_excerpt": false }],
|
||||
"alt-.": "editor::GoToHunk",
|
||||
"alt-,": "editor::GoToPrevHunk"
|
||||
}
|
||||
|
||||
@@ -118,6 +118,7 @@
|
||||
"cmd-a": "editor::SelectAll",
|
||||
"cmd-l": "editor::SelectLine",
|
||||
"cmd-shift-i": "editor::Format",
|
||||
"alt-shift-o": "editor::OrganizeImports",
|
||||
"cmd-shift-left": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
|
||||
"shift-home": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
|
||||
"ctrl-shift-a": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
|
||||
@@ -130,8 +131,8 @@
|
||||
"cmd-;": "editor::ToggleLineNumbers",
|
||||
"cmd-alt-z": "git::Restore",
|
||||
"cmd-alt-y": "git::ToggleStaged",
|
||||
"cmd-y": "git::StageAndNext",
|
||||
"cmd-shift-y": "git::UnstageAndNext",
|
||||
"cmd-y": ["git::StageAndNext", { "whole_excerpt": false }],
|
||||
"cmd-shift-y": ["git::UnstageAndNext", { "whole_excerpt": false }],
|
||||
"cmd-'": "editor::ToggleSelectedDiffHunks",
|
||||
"cmd-\"": "editor::ExpandAllDiffHunks",
|
||||
"cmd-alt-g b": "editor::ToggleGitBlame",
|
||||
|
||||
@@ -51,6 +51,7 @@ parking_lot.workspace = true
|
||||
paths.workspace = true
|
||||
project.workspace = true
|
||||
prompt_library.workspace = true
|
||||
prompt_store.workspace = true
|
||||
proto.workspace = true
|
||||
rope.workspace = true
|
||||
schemars.workspace = true
|
||||
|
||||
@@ -19,7 +19,7 @@ use gpui::{actions, App, Global, UpdateGlobal};
|
||||
use language_model::{
|
||||
LanguageModelId, LanguageModelProviderId, LanguageModelRegistry, LanguageModelResponseMessage,
|
||||
};
|
||||
use prompt_library::PromptBuilder;
|
||||
use prompt_store::PromptBuilder;
|
||||
use semantic_index::{CloudEmbeddingProvider, SemanticDb};
|
||||
use serde::Deserialize;
|
||||
use settings::{Settings, SettingsStore};
|
||||
|
||||
@@ -24,7 +24,8 @@ use language_model::{
|
||||
AuthenticateError, LanguageModelProviderId, LanguageModelRegistry, ZED_CLOUD_PROVIDER_ID,
|
||||
};
|
||||
use project::Project;
|
||||
use prompt_library::{open_prompt_library, PromptBuilder, PromptLibrary};
|
||||
use prompt_library::{open_prompt_library, PromptLibrary};
|
||||
use prompt_store::PromptBuilder;
|
||||
use search::{buffer_search::DivRegistrar, BufferSearchBar};
|
||||
use settings::{update_settings_file, Settings};
|
||||
use smol::stream::StreamExt;
|
||||
|
||||
@@ -39,7 +39,7 @@ use language_model_selector::{InlineLanguageModelSelector, LanguageModelSelector
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use parking_lot::Mutex;
|
||||
use project::{CodeAction, ProjectTransaction};
|
||||
use prompt_library::PromptBuilder;
|
||||
use prompt_store::PromptBuilder;
|
||||
use rope::Rope;
|
||||
use settings::{update_settings_file, Settings, SettingsStore};
|
||||
use smol::future::FutureExt;
|
||||
|
||||
@@ -20,7 +20,7 @@ use language_model::{
|
||||
LanguageModelRequestMessage, Role,
|
||||
};
|
||||
use language_model_selector::{InlineLanguageModelSelector, LanguageModelSelector};
|
||||
use prompt_library::PromptBuilder;
|
||||
use prompt_store::PromptBuilder;
|
||||
use settings::{update_settings_file, Settings};
|
||||
use std::{
|
||||
cmp,
|
||||
|
||||
@@ -56,6 +56,7 @@ paths.workspace = true
|
||||
picker.workspace = true
|
||||
project.workspace = true
|
||||
prompt_library.workspace = true
|
||||
prompt_store.workspace = true
|
||||
proto.workspace = true
|
||||
rope.workspace = true
|
||||
serde.workspace = true
|
||||
|
||||
@@ -270,8 +270,15 @@ impl ActiveThread {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
if let Some(model) = model_registry.active_model() {
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
// Insert an empty user message to contain the tool results.
|
||||
thread.insert_user_message("", Vec::new(), cx);
|
||||
// Insert a user message to contain the tool results.
|
||||
thread.insert_user_message(
|
||||
// TODO: Sending up a user message without any content results in the model sending back
|
||||
// responses that also don't have any content. We currently don't handle this case well,
|
||||
// so for now we provide some text to keep the model on track.
|
||||
"Here are the tool results.",
|
||||
Vec::new(),
|
||||
cx,
|
||||
);
|
||||
thread.send_to_model(model, RequestKind::Chat, true, cx);
|
||||
});
|
||||
}
|
||||
@@ -295,10 +302,7 @@ impl ActiveThread {
|
||||
let colors = cx.theme().colors();
|
||||
|
||||
// Don't render user messages that are just there for returning tool results.
|
||||
if message.role == Role::User
|
||||
&& message.text.is_empty()
|
||||
&& self.thread.read(cx).message_has_tool_results(message_id)
|
||||
{
|
||||
if message.role == Role::User && self.thread.read(cx).message_has_tool_results(message_id) {
|
||||
return Empty.into_any();
|
||||
}
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ use command_palette_hooks::CommandPaletteFilter;
|
||||
use feature_flags::{Assistant2FeatureFlag, FeatureFlagAppExt};
|
||||
use fs::Fs;
|
||||
use gpui::{actions, App};
|
||||
use prompt_library::PromptBuilder;
|
||||
use prompt_store::PromptBuilder;
|
||||
use settings::Settings as _;
|
||||
|
||||
pub use crate::assistant_panel::{AssistantPanel, ConcreteAssistantPanelDelegate};
|
||||
|
||||
@@ -20,7 +20,8 @@ use gpui::{
|
||||
use language::LanguageRegistry;
|
||||
use language_model::{LanguageModelProviderTosView, LanguageModelRegistry};
|
||||
use project::Project;
|
||||
use prompt_library::{open_prompt_library, PromptBuilder, PromptLibrary};
|
||||
use prompt_library::{open_prompt_library, PromptLibrary};
|
||||
use prompt_store::PromptBuilder;
|
||||
use settings::{update_settings_file, Settings};
|
||||
use time::UtcOffset;
|
||||
use ui::{prelude::*, ContextMenu, KeyBinding, PopoverMenu, PopoverMenuHandle, Tab, Tooltip};
|
||||
|
||||
@@ -14,7 +14,7 @@ use language_model::{
|
||||
};
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use parking_lot::Mutex;
|
||||
use prompt_library::PromptBuilder;
|
||||
use prompt_store::PromptBuilder;
|
||||
use rope::Rope;
|
||||
use smol::future::FutureExt;
|
||||
use std::{
|
||||
|
||||
@@ -28,7 +28,7 @@ use language_model::{report_assistant_event, LanguageModelRegistry};
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use parking_lot::Mutex;
|
||||
use project::{CodeAction, ProjectTransaction};
|
||||
use prompt_library::PromptBuilder;
|
||||
use prompt_store::PromptBuilder;
|
||||
use settings::{Settings, SettingsStore};
|
||||
use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
|
||||
use terminal_view::{terminal_panel::TerminalPanel, TerminalView};
|
||||
|
||||
@@ -16,7 +16,7 @@ use language_model::{
|
||||
report_assistant_event, LanguageModelRegistry, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, Role,
|
||||
};
|
||||
use prompt_library::PromptBuilder;
|
||||
use prompt_store::PromptBuilder;
|
||||
use std::sync::Arc;
|
||||
use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
|
||||
use terminal_view::TerminalView;
|
||||
|
||||
@@ -37,7 +37,7 @@ parking_lot.workspace = true
|
||||
paths.workspace = true
|
||||
picker.workspace = true
|
||||
project.workspace = true
|
||||
prompt_library.workspace = true
|
||||
prompt_store.workspace = true
|
||||
regex.workspace = true
|
||||
rope.workspace = true
|
||||
rpc.workspace = true
|
||||
|
||||
@@ -27,7 +27,7 @@ use language_model::{
|
||||
use open_ai::Model as OpenAiModel;
|
||||
use paths::contexts_dir;
|
||||
use project::Project;
|
||||
use prompt_library::PromptBuilder;
|
||||
use prompt_store::PromptBuilder;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
|
||||
@@ -20,7 +20,7 @@ use language_model::{LanguageModelCacheConfiguration, LanguageModelRegistry, Rol
|
||||
use parking_lot::Mutex;
|
||||
use pretty_assertions::assert_eq;
|
||||
use project::Project;
|
||||
use prompt_library::PromptBuilder;
|
||||
use prompt_store::PromptBuilder;
|
||||
use rand::prelude::*;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
@@ -671,7 +671,7 @@ async fn test_slash_commands(cx: &mut TestAppContext) {
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
cx.update(prompt_library::init);
|
||||
cx.update(prompt_store::init);
|
||||
let mut settings_store = cx.update(SettingsStore::test);
|
||||
cx.update(|cx| {
|
||||
settings_store
|
||||
|
||||
@@ -16,7 +16,7 @@ use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task,
|
||||
use language::LanguageRegistry;
|
||||
use paths::contexts_dir;
|
||||
use project::Project;
|
||||
use prompt_library::PromptBuilder;
|
||||
use prompt_store::PromptBuilder;
|
||||
use regex::Regex;
|
||||
use rpc::AnyProtoClient;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
@@ -32,7 +32,7 @@ language.workspace = true
|
||||
language_model.workspace = true
|
||||
log.workspace = true
|
||||
project.workspace = true
|
||||
prompt_library.workspace = true
|
||||
prompt_store.workspace = true
|
||||
rope.workspace = true
|
||||
schemars.workspace = true
|
||||
semantic_index.workspace = true
|
||||
|
||||
@@ -5,7 +5,7 @@ use assistant_slash_command::{
|
||||
};
|
||||
use gpui::{Task, WeakEntity};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use prompt_library::PromptStore;
|
||||
use prompt_store::PromptStore;
|
||||
use std::{
|
||||
fmt::Write,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
|
||||
@@ -13,7 +13,7 @@ use feature_flags::FeatureFlag;
|
||||
use gpui::{App, Task, WeakEntity};
|
||||
use language::{Anchor, CodeLabel, LspAdapterDelegate};
|
||||
use language_model::{LanguageModelRegistry, LanguageModelTool};
|
||||
use prompt_library::PromptBuilder;
|
||||
use prompt_store::PromptBuilder;
|
||||
use schemars::JsonSchema;
|
||||
use semantic_index::SemanticDb;
|
||||
use serde::Deserialize;
|
||||
|
||||
@@ -5,7 +5,7 @@ use assistant_slash_command::{
|
||||
};
|
||||
use gpui::{Task, WeakEntity};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use prompt_library::PromptStore;
|
||||
use prompt_store::PromptStore;
|
||||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
|
||||
@@ -16,6 +16,7 @@ test-support = []
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
clock.workspace = true
|
||||
futures.workspace = true
|
||||
git2.workspace = true
|
||||
gpui.workspace = true
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
use futures::{channel::oneshot, future::OptionFuture};
|
||||
use futures::channel::oneshot;
|
||||
use git2::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch};
|
||||
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter};
|
||||
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task};
|
||||
use language::{Language, LanguageRegistry};
|
||||
use rope::Rope;
|
||||
use std::cmp::Ordering;
|
||||
use std::mem;
|
||||
use std::{future::Future, iter, ops::Range, sync::Arc};
|
||||
use sum_tree::SumTree;
|
||||
use sum_tree::{SumTree, TreeMap};
|
||||
use text::ToOffset as _;
|
||||
use text::{Anchor, Bias, BufferId, OffsetRangeExt, Point};
|
||||
use util::ResultExt;
|
||||
@@ -20,13 +21,14 @@ pub struct BufferDiff {
|
||||
pub struct BufferDiffSnapshot {
|
||||
inner: BufferDiffInner,
|
||||
secondary_diff: Option<Box<BufferDiffSnapshot>>,
|
||||
pub is_single_insertion: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct BufferDiffInner {
|
||||
hunks: SumTree<InternalDiffHunk>,
|
||||
base_text: Option<language::BufferSnapshot>,
|
||||
pending_hunks: TreeMap<usize, PendingHunk>,
|
||||
base_text: language::BufferSnapshot,
|
||||
base_text_exists: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
@@ -47,16 +49,8 @@ pub enum DiffHunkSecondaryStatus {
|
||||
HasSecondaryHunk,
|
||||
OverlapsWithSecondaryHunk,
|
||||
None,
|
||||
}
|
||||
|
||||
impl DiffHunkSecondaryStatus {
|
||||
pub fn is_secondary(&self) -> bool {
|
||||
match self {
|
||||
DiffHunkSecondaryStatus::HasSecondaryHunk => true,
|
||||
DiffHunkSecondaryStatus::OverlapsWithSecondaryHunk => true,
|
||||
DiffHunkSecondaryStatus::None => false,
|
||||
}
|
||||
}
|
||||
SecondaryHunkAdditionPending,
|
||||
SecondaryHunkRemovalPending,
|
||||
}
|
||||
|
||||
/// A diff hunk resolved to rows in the buffer.
|
||||
@@ -78,6 +72,17 @@ struct InternalDiffHunk {
|
||||
diff_base_byte_range: Range<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
struct PendingHunk {
|
||||
buffer_version: clock::Global,
|
||||
new_status: DiffHunkSecondaryStatus,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct DiffHunkSummary {
|
||||
buffer_range: Range<Anchor>,
|
||||
}
|
||||
|
||||
impl sum_tree::Item for InternalDiffHunk {
|
||||
type Summary = DiffHunkSummary;
|
||||
|
||||
@@ -88,11 +93,6 @@ impl sum_tree::Item for InternalDiffHunk {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct DiffHunkSummary {
|
||||
buffer_range: Range<Anchor>,
|
||||
}
|
||||
|
||||
impl sum_tree::Summary for DiffHunkSummary {
|
||||
type Context = text::BufferSnapshot;
|
||||
|
||||
@@ -159,131 +159,166 @@ impl BufferDiffSnapshot {
|
||||
self.inner.hunks_intersecting_range_rev(range, buffer)
|
||||
}
|
||||
|
||||
pub fn base_text(&self) -> Option<&language::BufferSnapshot> {
|
||||
self.inner.base_text.as_ref()
|
||||
pub fn base_text(&self) -> &language::BufferSnapshot {
|
||||
&self.inner.base_text
|
||||
}
|
||||
|
||||
pub fn base_texts_eq(&self, other: &Self) -> bool {
|
||||
match (other.base_text(), self.base_text()) {
|
||||
(None, None) => true,
|
||||
(None, Some(_)) => false,
|
||||
(Some(_), None) => false,
|
||||
(Some(old), Some(new)) => {
|
||||
let (old_id, old_empty) = (old.remote_id(), old.is_empty());
|
||||
let (new_id, new_empty) = (new.remote_id(), new.is_empty());
|
||||
new_id == old_id || (new_empty && old_empty)
|
||||
}
|
||||
if self.inner.base_text_exists != other.inner.base_text_exists {
|
||||
return false;
|
||||
}
|
||||
let left = &self.inner.base_text;
|
||||
let right = &other.inner.base_text;
|
||||
let (old_id, old_empty) = (left.remote_id(), left.is_empty());
|
||||
let (new_id, new_empty) = (right.remote_id(), right.is_empty());
|
||||
new_id == old_id || (new_empty && old_empty)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_secondary_text_for_stage_or_unstage(
|
||||
&self,
|
||||
impl BufferDiffInner {
|
||||
fn stage_or_unstage_hunks(
|
||||
&mut self,
|
||||
unstaged_diff: &Self,
|
||||
stage: bool,
|
||||
hunks: impl Iterator<Item = (Range<Anchor>, Range<usize>)>,
|
||||
hunks: &[DiffHunk],
|
||||
buffer: &text::BufferSnapshot,
|
||||
cx: &mut App,
|
||||
) -> Option<Rope> {
|
||||
let secondary_diff = self.secondary_diff()?;
|
||||
let head_text = self.base_text().map(|text| text.as_rope().clone());
|
||||
let index_text = secondary_diff
|
||||
.base_text()
|
||||
.map(|text| text.as_rope().clone());
|
||||
file_exists: bool,
|
||||
) -> (Option<Rope>, Vec<(usize, PendingHunk)>) {
|
||||
let head_text = self
|
||||
.base_text_exists
|
||||
.then(|| self.base_text.as_rope().clone());
|
||||
let index_text = unstaged_diff
|
||||
.base_text_exists
|
||||
.then(|| unstaged_diff.base_text.as_rope().clone());
|
||||
|
||||
// If the file doesn't exist in either HEAD or the index, then the
|
||||
// entire file must be either created or deleted in the index.
|
||||
let (index_text, head_text) = match (index_text, head_text) {
|
||||
(Some(index_text), Some(head_text)) => (index_text, head_text),
|
||||
// file is deleted in both index and head
|
||||
(None, None) => return None,
|
||||
// file is deleted in index
|
||||
(None, Some(head_text)) => {
|
||||
return if stage {
|
||||
Some(buffer.as_rope().clone())
|
||||
(Some(index_text), Some(head_text)) if file_exists || !stage => (index_text, head_text),
|
||||
(_, head_text @ _) => {
|
||||
if stage {
|
||||
log::debug!("stage all");
|
||||
return (
|
||||
file_exists.then(|| buffer.as_rope().clone()),
|
||||
vec![(
|
||||
0,
|
||||
PendingHunk {
|
||||
buffer_version: buffer.version().clone(),
|
||||
new_status: DiffHunkSecondaryStatus::SecondaryHunkRemovalPending,
|
||||
},
|
||||
)],
|
||||
);
|
||||
} else {
|
||||
Some(head_text)
|
||||
}
|
||||
}
|
||||
// file exists in the index, but is deleted in head
|
||||
(Some(_), None) => {
|
||||
return if stage {
|
||||
Some(buffer.as_rope().clone())
|
||||
} else {
|
||||
None
|
||||
log::debug!("unstage all");
|
||||
return (
|
||||
head_text,
|
||||
vec![(
|
||||
0,
|
||||
PendingHunk {
|
||||
buffer_version: buffer.version().clone(),
|
||||
new_status: DiffHunkSecondaryStatus::SecondaryHunkAdditionPending,
|
||||
},
|
||||
)],
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let mut secondary_cursor = secondary_diff.inner.hunks.cursor::<DiffHunkSummary>(buffer);
|
||||
secondary_cursor.next(buffer);
|
||||
let mut unstaged_hunk_cursor = unstaged_diff.hunks.cursor::<DiffHunkSummary>(buffer);
|
||||
unstaged_hunk_cursor.next(buffer);
|
||||
let mut edits = Vec::new();
|
||||
let mut prev_secondary_hunk_buffer_offset = 0;
|
||||
let mut prev_secondary_hunk_base_text_offset = 0;
|
||||
for (buffer_range, diff_base_byte_range) in hunks {
|
||||
let skipped_hunks = secondary_cursor.slice(&buffer_range.start, Bias::Left, buffer);
|
||||
let mut pending_hunks = Vec::new();
|
||||
let mut prev_unstaged_hunk_buffer_offset = 0;
|
||||
let mut prev_unstaged_hunk_base_text_offset = 0;
|
||||
for DiffHunk {
|
||||
buffer_range,
|
||||
diff_base_byte_range,
|
||||
secondary_status,
|
||||
..
|
||||
} in hunks.iter().cloned()
|
||||
{
|
||||
if (stage && secondary_status == DiffHunkSecondaryStatus::None)
|
||||
|| (!stage && secondary_status == DiffHunkSecondaryStatus::HasSecondaryHunk)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let skipped_hunks = unstaged_hunk_cursor.slice(&buffer_range.start, Bias::Left, buffer);
|
||||
|
||||
if let Some(secondary_hunk) = skipped_hunks.last() {
|
||||
prev_secondary_hunk_base_text_offset = secondary_hunk.diff_base_byte_range.end;
|
||||
prev_secondary_hunk_buffer_offset =
|
||||
prev_unstaged_hunk_base_text_offset = secondary_hunk.diff_base_byte_range.end;
|
||||
prev_unstaged_hunk_buffer_offset =
|
||||
secondary_hunk.buffer_range.end.to_offset(buffer);
|
||||
}
|
||||
|
||||
let mut buffer_offset_range = buffer_range.to_offset(buffer);
|
||||
let start_overshoot = buffer_offset_range.start - prev_secondary_hunk_buffer_offset;
|
||||
let mut secondary_base_text_start =
|
||||
prev_secondary_hunk_base_text_offset + start_overshoot;
|
||||
let start_overshoot = buffer_offset_range.start - prev_unstaged_hunk_buffer_offset;
|
||||
let mut index_start = prev_unstaged_hunk_base_text_offset + start_overshoot;
|
||||
|
||||
while let Some(secondary_hunk) = secondary_cursor.item().filter(|item| {
|
||||
while let Some(unstaged_hunk) = unstaged_hunk_cursor.item().filter(|item| {
|
||||
item.buffer_range
|
||||
.start
|
||||
.cmp(&buffer_range.end, buffer)
|
||||
.is_le()
|
||||
}) {
|
||||
let secondary_hunk_offset_range = secondary_hunk.buffer_range.to_offset(buffer);
|
||||
prev_secondary_hunk_base_text_offset = secondary_hunk.diff_base_byte_range.end;
|
||||
prev_secondary_hunk_buffer_offset = secondary_hunk_offset_range.end;
|
||||
let unstaged_hunk_offset_range = unstaged_hunk.buffer_range.to_offset(buffer);
|
||||
prev_unstaged_hunk_base_text_offset = unstaged_hunk.diff_base_byte_range.end;
|
||||
prev_unstaged_hunk_buffer_offset = unstaged_hunk_offset_range.end;
|
||||
|
||||
secondary_base_text_start =
|
||||
secondary_base_text_start.min(secondary_hunk.diff_base_byte_range.start);
|
||||
index_start = index_start.min(unstaged_hunk.diff_base_byte_range.start);
|
||||
buffer_offset_range.start = buffer_offset_range
|
||||
.start
|
||||
.min(secondary_hunk_offset_range.start);
|
||||
.min(unstaged_hunk_offset_range.start);
|
||||
|
||||
secondary_cursor.next(buffer);
|
||||
unstaged_hunk_cursor.next(buffer);
|
||||
}
|
||||
|
||||
let end_overshoot = buffer_offset_range
|
||||
.end
|
||||
.saturating_sub(prev_secondary_hunk_buffer_offset);
|
||||
let secondary_base_text_end = prev_secondary_hunk_base_text_offset + end_overshoot;
|
||||
.saturating_sub(prev_unstaged_hunk_buffer_offset);
|
||||
let index_end = prev_unstaged_hunk_base_text_offset + end_overshoot;
|
||||
|
||||
let secondary_base_text_range = secondary_base_text_start..secondary_base_text_end;
|
||||
let index_range = index_start..index_end;
|
||||
buffer_offset_range.end = buffer_offset_range
|
||||
.end
|
||||
.max(prev_secondary_hunk_buffer_offset);
|
||||
.max(prev_unstaged_hunk_buffer_offset);
|
||||
|
||||
let replacement_text = if stage {
|
||||
log::debug!("staging");
|
||||
log::debug!("stage hunk {:?}", buffer_offset_range);
|
||||
buffer
|
||||
.text_for_range(buffer_offset_range)
|
||||
.collect::<String>()
|
||||
} else {
|
||||
log::debug!("unstaging");
|
||||
log::debug!("unstage hunk {:?}", buffer_offset_range);
|
||||
head_text
|
||||
.chunks_in_range(diff_base_byte_range.clone())
|
||||
.collect::<String>()
|
||||
};
|
||||
edits.push((secondary_base_text_range, replacement_text));
|
||||
pending_hunks.push((
|
||||
diff_base_byte_range.start,
|
||||
PendingHunk {
|
||||
buffer_version: buffer.version().clone(),
|
||||
new_status: if stage {
|
||||
DiffHunkSecondaryStatus::SecondaryHunkRemovalPending
|
||||
} else {
|
||||
DiffHunkSecondaryStatus::SecondaryHunkAdditionPending
|
||||
},
|
||||
},
|
||||
));
|
||||
edits.push((index_range, replacement_text));
|
||||
}
|
||||
|
||||
let buffer = cx.new(|cx| {
|
||||
language::Buffer::local_normalized(index_text, text::LineEnding::default(), cx)
|
||||
});
|
||||
let new_text = buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(edits, None, cx);
|
||||
buffer.as_rope().clone()
|
||||
});
|
||||
Some(new_text)
|
||||
let mut new_index_text = Rope::new();
|
||||
let mut index_cursor = index_text.cursor(0);
|
||||
for (old_range, replacement_text) in edits {
|
||||
new_index_text.append(index_cursor.slice(old_range.start));
|
||||
index_cursor.seek_forward(old_range.end);
|
||||
new_index_text.push(&replacement_text);
|
||||
}
|
||||
new_index_text.append(index_cursor.suffix());
|
||||
(Some(new_index_text), pending_hunks)
|
||||
}
|
||||
}
|
||||
|
||||
impl BufferDiffInner {
|
||||
fn hunks_intersecting_range<'a>(
|
||||
&'a self,
|
||||
range: Range<Anchor>,
|
||||
@@ -318,11 +353,14 @@ impl BufferDiffInner {
|
||||
]
|
||||
});
|
||||
|
||||
let mut secondary_cursor = secondary.as_ref().map(|diff| {
|
||||
let mut cursor = diff.hunks.cursor::<DiffHunkSummary>(buffer);
|
||||
let mut secondary_cursor = None;
|
||||
let mut pending_hunks = TreeMap::default();
|
||||
if let Some(secondary) = secondary.as_ref() {
|
||||
let mut cursor = secondary.hunks.cursor::<DiffHunkSummary>(buffer);
|
||||
cursor.next(buffer);
|
||||
cursor
|
||||
});
|
||||
secondary_cursor = Some(cursor);
|
||||
pending_hunks = secondary.pending_hunks.clone();
|
||||
}
|
||||
|
||||
let mut summaries = buffer.summaries_for_anchors_with_payload::<Point, _, _>(anchor_iter);
|
||||
iter::from_fn(move || loop {
|
||||
@@ -340,7 +378,19 @@ impl BufferDiffInner {
|
||||
}
|
||||
|
||||
let mut secondary_status = DiffHunkSecondaryStatus::None;
|
||||
if let Some(secondary_cursor) = secondary_cursor.as_mut() {
|
||||
|
||||
let mut has_pending = false;
|
||||
if let Some(pending_hunk) = pending_hunks.get(&start_base) {
|
||||
if !buffer.has_edits_since_in_range(
|
||||
&pending_hunk.buffer_version,
|
||||
start_anchor..end_anchor,
|
||||
) {
|
||||
has_pending = true;
|
||||
secondary_status = pending_hunk.new_status;
|
||||
}
|
||||
}
|
||||
|
||||
if let (Some(secondary_cursor), false) = (secondary_cursor.as_mut(), has_pending) {
|
||||
if start_anchor
|
||||
.cmp(&secondary_cursor.start().buffer_range.start, buffer)
|
||||
.is_gt()
|
||||
@@ -354,14 +404,15 @@ impl BufferDiffInner {
|
||||
secondary_range.end.row += 1;
|
||||
secondary_range.end.column = 0;
|
||||
}
|
||||
if secondary_range == (start_point..end_point) {
|
||||
if secondary_range.is_empty() && secondary_hunk.diff_base_byte_range.is_empty()
|
||||
{
|
||||
// ignore
|
||||
} else if secondary_range == (start_point..end_point) {
|
||||
secondary_status = DiffHunkSecondaryStatus::HasSecondaryHunk;
|
||||
} else if secondary_range.start <= end_point {
|
||||
secondary_status = DiffHunkSecondaryStatus::OverlapsWithSecondaryHunk;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::debug!("no secondary cursor!!");
|
||||
}
|
||||
|
||||
return Some(DiffHunk {
|
||||
@@ -518,6 +569,14 @@ fn compute_hunks(
|
||||
tree.push(hunk, &buffer);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tree.push(
|
||||
InternalDiffHunk {
|
||||
buffer_range: Anchor::MIN..Anchor::MAX,
|
||||
diff_base_byte_range: 0..0,
|
||||
},
|
||||
&buffer,
|
||||
);
|
||||
}
|
||||
|
||||
tree
|
||||
@@ -631,95 +690,71 @@ impl BufferDiff {
|
||||
|
||||
fn build(
|
||||
buffer: text::BufferSnapshot,
|
||||
diff_base: Option<Arc<String>>,
|
||||
base_text: Option<Arc<String>>,
|
||||
language: Option<Arc<Language>>,
|
||||
language_registry: Option<Arc<LanguageRegistry>>,
|
||||
cx: &mut App,
|
||||
) -> impl Future<Output = BufferDiffInner> {
|
||||
let diff_base =
|
||||
diff_base.map(|diff_base| (diff_base.clone(), Rope::from(diff_base.as_str())));
|
||||
let base_text_snapshot = diff_base.as_ref().map(|(_, diff_base)| {
|
||||
language::Buffer::build_snapshot(
|
||||
diff_base.clone(),
|
||||
let base_text_pair;
|
||||
let base_text_exists;
|
||||
let base_text_snapshot;
|
||||
if let Some(text) = &base_text {
|
||||
let base_text_rope = Rope::from(text.as_str());
|
||||
base_text_pair = Some((text.clone(), base_text_rope.clone()));
|
||||
let snapshot = language::Buffer::build_snapshot(
|
||||
base_text_rope,
|
||||
language.clone(),
|
||||
language_registry.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let base_text_snapshot = cx.background_spawn(OptionFuture::from(base_text_snapshot));
|
||||
);
|
||||
base_text_snapshot = cx.background_spawn(snapshot);
|
||||
base_text_exists = true;
|
||||
} else {
|
||||
base_text_pair = None;
|
||||
base_text_snapshot = Task::ready(language::Buffer::build_empty_snapshot(cx));
|
||||
base_text_exists = false;
|
||||
};
|
||||
|
||||
let hunks = cx.background_spawn({
|
||||
let buffer = buffer.clone();
|
||||
async move { compute_hunks(diff_base, buffer) }
|
||||
async move { compute_hunks(base_text_pair, buffer) }
|
||||
});
|
||||
|
||||
async move {
|
||||
let (base_text, hunks) = futures::join!(base_text_snapshot, hunks);
|
||||
BufferDiffInner { base_text, hunks }
|
||||
BufferDiffInner {
|
||||
base_text,
|
||||
hunks,
|
||||
base_text_exists,
|
||||
pending_hunks: TreeMap::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build_with_base_buffer(
|
||||
buffer: text::BufferSnapshot,
|
||||
diff_base: Option<Arc<String>>,
|
||||
diff_base_buffer: Option<language::BufferSnapshot>,
|
||||
base_text: Option<Arc<String>>,
|
||||
base_text_snapshot: language::BufferSnapshot,
|
||||
cx: &App,
|
||||
) -> impl Future<Output = BufferDiffInner> {
|
||||
let diff_base = diff_base.clone().zip(
|
||||
diff_base_buffer
|
||||
.clone()
|
||||
.map(|buffer| buffer.as_rope().clone()),
|
||||
);
|
||||
let base_text_exists = base_text.is_some();
|
||||
let base_text_pair = base_text.map(|text| (text, base_text_snapshot.as_rope().clone()));
|
||||
cx.background_spawn(async move {
|
||||
BufferDiffInner {
|
||||
hunks: compute_hunks(diff_base, buffer),
|
||||
base_text: diff_base_buffer,
|
||||
base_text: base_text_snapshot,
|
||||
hunks: compute_hunks(base_text_pair, buffer),
|
||||
pending_hunks: TreeMap::default(),
|
||||
base_text_exists,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn build_empty(buffer: &text::BufferSnapshot) -> BufferDiffInner {
|
||||
fn build_empty(buffer: &text::BufferSnapshot, cx: &mut App) -> BufferDiffInner {
|
||||
BufferDiffInner {
|
||||
base_text: language::Buffer::build_empty_snapshot(cx),
|
||||
hunks: SumTree::new(buffer),
|
||||
base_text: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_with_single_insertion(
|
||||
insertion_present_in_secondary_diff: bool,
|
||||
buffer: language::BufferSnapshot,
|
||||
cx: &mut App,
|
||||
) -> BufferDiffSnapshot {
|
||||
let base_text = language::Buffer::build_empty_snapshot(cx);
|
||||
let hunks = SumTree::from_item(
|
||||
InternalDiffHunk {
|
||||
buffer_range: Anchor::MIN..Anchor::MAX,
|
||||
diff_base_byte_range: 0..0,
|
||||
},
|
||||
&base_text,
|
||||
);
|
||||
BufferDiffSnapshot {
|
||||
inner: BufferDiffInner {
|
||||
hunks: hunks.clone(),
|
||||
base_text: Some(base_text.clone()),
|
||||
},
|
||||
secondary_diff: Some(Box::new(BufferDiffSnapshot {
|
||||
inner: BufferDiffInner {
|
||||
hunks: if insertion_present_in_secondary_diff {
|
||||
hunks
|
||||
} else {
|
||||
SumTree::new(&buffer.text)
|
||||
},
|
||||
base_text: Some(if insertion_present_in_secondary_diff {
|
||||
base_text
|
||||
} else {
|
||||
buffer
|
||||
}),
|
||||
},
|
||||
secondary_diff: None,
|
||||
is_single_insertion: true,
|
||||
})),
|
||||
is_single_insertion: true,
|
||||
pending_hunks: TreeMap::default(),
|
||||
base_text_exists: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -728,7 +763,38 @@ impl BufferDiff {
|
||||
}
|
||||
|
||||
pub fn secondary_diff(&self) -> Option<Entity<BufferDiff>> {
|
||||
Some(self.secondary_diff.as_ref()?.clone())
|
||||
self.secondary_diff.clone()
|
||||
}
|
||||
|
||||
pub fn stage_or_unstage_hunks(
|
||||
&mut self,
|
||||
stage: bool,
|
||||
hunks: &[DiffHunk],
|
||||
buffer: &text::BufferSnapshot,
|
||||
file_exists: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Rope> {
|
||||
let (new_index_text, pending_hunks) = self.inner.stage_or_unstage_hunks(
|
||||
&self.secondary_diff.as_ref()?.read(cx).inner,
|
||||
stage,
|
||||
&hunks,
|
||||
buffer,
|
||||
file_exists,
|
||||
);
|
||||
if let Some(unstaged_diff) = &self.secondary_diff {
|
||||
unstaged_diff.update(cx, |diff, _| {
|
||||
for (offset, pending_hunk) in pending_hunks {
|
||||
diff.inner.pending_hunks.insert(offset, pending_hunk);
|
||||
}
|
||||
});
|
||||
}
|
||||
if let Some((first, last)) = hunks.first().zip(hunks.last()) {
|
||||
let changed_range = first.buffer_range.start..last.buffer_range.end;
|
||||
cx.emit(BufferDiffEvent::DiffChanged {
|
||||
changed_range: Some(changed_range),
|
||||
});
|
||||
}
|
||||
new_index_text
|
||||
}
|
||||
|
||||
pub fn range_to_hunk_range(
|
||||
@@ -777,7 +843,7 @@ impl BufferDiff {
|
||||
Self::build_with_base_buffer(
|
||||
buffer.clone(),
|
||||
base_text,
|
||||
this.base_text().cloned(),
|
||||
this.base_text().clone(),
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
@@ -799,22 +865,33 @@ impl BufferDiff {
|
||||
|
||||
fn set_state(
|
||||
&mut self,
|
||||
inner: BufferDiffInner,
|
||||
new_state: BufferDiffInner,
|
||||
buffer: &text::BufferSnapshot,
|
||||
) -> Option<Range<Anchor>> {
|
||||
let changed_range = match (self.inner.base_text.as_ref(), inner.base_text.as_ref()) {
|
||||
(None, None) => None,
|
||||
(Some(old), Some(new)) if old.remote_id() == new.remote_id() => {
|
||||
inner.compare(&self.inner, buffer)
|
||||
}
|
||||
_ => Some(text::Anchor::MIN..text::Anchor::MAX),
|
||||
};
|
||||
self.inner = inner;
|
||||
let (base_text_changed, changed_range) =
|
||||
match (self.inner.base_text_exists, new_state.base_text_exists) {
|
||||
(false, false) => (true, None),
|
||||
(true, true)
|
||||
if self.inner.base_text.remote_id() == new_state.base_text.remote_id() =>
|
||||
{
|
||||
(false, new_state.compare(&self.inner, buffer))
|
||||
}
|
||||
_ => (true, Some(text::Anchor::MIN..text::Anchor::MAX)),
|
||||
};
|
||||
let pending_hunks = mem::take(&mut self.inner.pending_hunks);
|
||||
self.inner = new_state;
|
||||
if !base_text_changed {
|
||||
self.inner.pending_hunks = pending_hunks;
|
||||
}
|
||||
changed_range
|
||||
}
|
||||
|
||||
pub fn base_text(&self) -> Option<&language::BufferSnapshot> {
|
||||
self.inner.base_text.as_ref()
|
||||
pub fn base_text(&self) -> &language::BufferSnapshot {
|
||||
&self.inner.base_text
|
||||
}
|
||||
|
||||
pub fn base_text_exists(&self) -> bool {
|
||||
self.inner.base_text_exists
|
||||
}
|
||||
|
||||
pub fn snapshot(&self, cx: &App) -> BufferDiffSnapshot {
|
||||
@@ -824,7 +901,6 @@ impl BufferDiff {
|
||||
.secondary_diff
|
||||
.as_ref()
|
||||
.map(|diff| Box::new(diff.read(cx).snapshot(cx))),
|
||||
is_single_insertion: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -901,15 +977,16 @@ impl BufferDiff {
|
||||
rx
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn base_text_string(&self) -> Option<String> {
|
||||
self.inner.base_text.as_ref().map(|buffer| buffer.text())
|
||||
self.inner
|
||||
.base_text_exists
|
||||
.then(|| self.inner.base_text.text())
|
||||
}
|
||||
|
||||
pub fn new(buffer: &text::BufferSnapshot) -> Self {
|
||||
pub fn new(buffer: &text::BufferSnapshot, cx: &mut App) -> Self {
|
||||
BufferDiff {
|
||||
buffer_id: buffer.remote_id(),
|
||||
inner: BufferDiff::build_empty(buffer),
|
||||
inner: BufferDiff::build_empty(buffer, cx),
|
||||
secondary_diff: None,
|
||||
}
|
||||
}
|
||||
@@ -939,14 +1016,10 @@ impl BufferDiff {
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn recalculate_diff_sync(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
|
||||
let base_text = self
|
||||
.inner
|
||||
.base_text
|
||||
.as_ref()
|
||||
.map(|base_text| base_text.text());
|
||||
let base_text = self.base_text_string().map(Arc::new);
|
||||
let snapshot = BufferDiff::build_with_base_buffer(
|
||||
buffer.clone(),
|
||||
base_text.clone().map(Arc::new),
|
||||
base_text,
|
||||
self.inner.base_text.clone(),
|
||||
cx,
|
||||
);
|
||||
@@ -957,6 +1030,10 @@ impl BufferDiff {
|
||||
}
|
||||
|
||||
impl DiffHunk {
|
||||
pub fn is_created_file(&self) -> bool {
|
||||
self.diff_base_byte_range == (0..0) && self.buffer_range == (Anchor::MIN..Anchor::MAX)
|
||||
}
|
||||
|
||||
pub fn status(&self) -> DiffHunkStatus {
|
||||
let kind = if self.buffer_range.start == self.buffer_range.end {
|
||||
DiffHunkStatusKind::Deleted
|
||||
@@ -973,6 +1050,23 @@ impl DiffHunk {
|
||||
}
|
||||
|
||||
impl DiffHunkStatus {
|
||||
pub fn has_secondary_hunk(&self) -> bool {
|
||||
matches!(
|
||||
self.secondary,
|
||||
DiffHunkSecondaryStatus::HasSecondaryHunk
|
||||
| DiffHunkSecondaryStatus::SecondaryHunkAdditionPending
|
||||
| DiffHunkSecondaryStatus::OverlapsWithSecondaryHunk
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_pending(&self) -> bool {
|
||||
matches!(
|
||||
self.secondary,
|
||||
DiffHunkSecondaryStatus::SecondaryHunkAdditionPending
|
||||
| DiffHunkSecondaryStatus::SecondaryHunkRemovalPending
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_deleted(&self) -> bool {
|
||||
self.kind == DiffHunkStatusKind::Deleted
|
||||
}
|
||||
@@ -1006,7 +1100,6 @@ impl DiffHunkStatus {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn deleted_none() -> Self {
|
||||
Self {
|
||||
kind: DiffHunkStatusKind::Deleted,
|
||||
@@ -1014,7 +1107,6 @@ impl DiffHunkStatus {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn added_none() -> Self {
|
||||
Self {
|
||||
kind: DiffHunkStatusKind::Added,
|
||||
@@ -1022,7 +1114,6 @@ impl DiffHunkStatus {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn modified_none() -> Self {
|
||||
Self {
|
||||
kind: DiffHunkStatusKind::Modified,
|
||||
@@ -1120,7 +1211,7 @@ mod tests {
|
||||
],
|
||||
);
|
||||
|
||||
diff = BufferDiff::build_empty(&buffer);
|
||||
diff = cx.update(|cx| BufferDiff::build_empty(&buffer, cx));
|
||||
assert_hunks(
|
||||
diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer, None),
|
||||
&buffer,
|
||||
@@ -1435,43 +1526,55 @@ mod tests {
|
||||
for example in table {
|
||||
let (buffer_text, ranges) = marked_text_ranges(&example.buffer_marked_text, false);
|
||||
let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text);
|
||||
let uncommitted_diff =
|
||||
BufferDiff::build_sync(buffer.clone(), example.head_text.clone(), cx);
|
||||
let unstaged_diff =
|
||||
BufferDiff::build_sync(buffer.clone(), example.index_text.clone(), cx);
|
||||
let uncommitted_diff = BufferDiffSnapshot {
|
||||
inner: uncommitted_diff,
|
||||
secondary_diff: Some(Box::new(BufferDiffSnapshot {
|
||||
inner: unstaged_diff,
|
||||
is_single_insertion: false,
|
||||
secondary_diff: None,
|
||||
})),
|
||||
is_single_insertion: false,
|
||||
};
|
||||
let hunk_range =
|
||||
buffer.anchor_before(ranges[0].start)..buffer.anchor_before(ranges[0].end);
|
||||
|
||||
let range = buffer.anchor_before(ranges[0].start)..buffer.anchor_before(ranges[0].end);
|
||||
let unstaged = BufferDiff::build_sync(buffer.clone(), example.index_text.clone(), cx);
|
||||
let uncommitted = BufferDiff::build_sync(buffer.clone(), example.head_text.clone(), cx);
|
||||
|
||||
let new_index_text = cx
|
||||
.update(|cx| {
|
||||
uncommitted_diff.new_secondary_text_for_stage_or_unstage(
|
||||
true,
|
||||
uncommitted_diff
|
||||
.hunks_intersecting_range(range, &buffer)
|
||||
.map(|hunk| {
|
||||
(hunk.buffer_range.clone(), hunk.diff_base_byte_range.clone())
|
||||
}),
|
||||
&buffer,
|
||||
cx,
|
||||
let unstaged_diff = cx.new(|cx| {
|
||||
let mut diff = BufferDiff::new(&buffer, cx);
|
||||
diff.set_state(unstaged, &buffer);
|
||||
diff
|
||||
});
|
||||
|
||||
let uncommitted_diff = cx.new(|cx| {
|
||||
let mut diff = BufferDiff::new(&buffer, cx);
|
||||
diff.set_state(uncommitted, &buffer);
|
||||
diff.set_secondary_diff(unstaged_diff);
|
||||
diff
|
||||
});
|
||||
|
||||
uncommitted_diff.update(cx, |diff, cx| {
|
||||
let hunks = diff
|
||||
.hunks_intersecting_range(hunk_range.clone(), &buffer, &cx)
|
||||
.collect::<Vec<_>>();
|
||||
for hunk in &hunks {
|
||||
assert_ne!(hunk.secondary_status, DiffHunkSecondaryStatus::None)
|
||||
}
|
||||
|
||||
let new_index_text = diff
|
||||
.stage_or_unstage_hunks(true, &hunks, &buffer, true, cx)
|
||||
.unwrap()
|
||||
.to_string();
|
||||
|
||||
let hunks = diff
|
||||
.hunks_intersecting_range(hunk_range.clone(), &buffer, &cx)
|
||||
.collect::<Vec<_>>();
|
||||
for hunk in &hunks {
|
||||
assert_eq!(
|
||||
hunk.secondary_status,
|
||||
DiffHunkSecondaryStatus::SecondaryHunkRemovalPending
|
||||
)
|
||||
})
|
||||
.unwrap()
|
||||
.to_string();
|
||||
pretty_assertions::assert_eq!(
|
||||
new_index_text,
|
||||
example.final_index_text,
|
||||
"example: {}",
|
||||
example.name
|
||||
);
|
||||
}
|
||||
|
||||
pretty_assertions::assert_eq!(
|
||||
new_index_text,
|
||||
example.final_index_text,
|
||||
"example: {}",
|
||||
example.name
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1505,7 +1608,7 @@ mod tests {
|
||||
|
||||
let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text_1);
|
||||
|
||||
let empty_diff = BufferDiff::build_empty(&buffer);
|
||||
let empty_diff = cx.update(|cx| BufferDiff::build_empty(&buffer, cx));
|
||||
let diff_1 = BufferDiff::build_sync(buffer.clone(), base_text.clone(), cx);
|
||||
let range = diff_1.compare(&empty_diff, &buffer).unwrap();
|
||||
assert_eq!(range.to_point(&buffer), Point::new(0, 0)..Point::new(8, 0));
|
||||
@@ -1668,7 +1771,7 @@ mod tests {
|
||||
index_text: &Rope,
|
||||
head_text: String,
|
||||
cx: &mut TestAppContext,
|
||||
) -> BufferDiff {
|
||||
) -> Entity<BufferDiff> {
|
||||
let inner = BufferDiff::build_sync(working_copy.text.clone(), head_text, cx);
|
||||
let secondary = BufferDiff {
|
||||
buffer_id: working_copy.remote_id(),
|
||||
@@ -1680,11 +1783,11 @@ mod tests {
|
||||
secondary_diff: None,
|
||||
};
|
||||
let secondary = cx.new(|_| secondary);
|
||||
BufferDiff {
|
||||
cx.new(|_| BufferDiff {
|
||||
buffer_id: working_copy.remote_id(),
|
||||
inner,
|
||||
secondary_diff: Some(secondary),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
let operations = std::env::var("OPERATIONS")
|
||||
@@ -1712,7 +1815,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let mut diff = uncommitted_diff(&working_copy, &index_text, head_text.clone(), cx);
|
||||
let mut hunks = cx.update(|cx| {
|
||||
let mut hunks = diff.update(cx, |diff, cx| {
|
||||
diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &working_copy, cx)
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
@@ -1723,6 +1826,7 @@ mod tests {
|
||||
for _ in 0..operations {
|
||||
let i = rng.gen_range(0..hunks.len());
|
||||
let hunk = &mut hunks[i];
|
||||
let hunk_to_change = hunk.clone();
|
||||
let stage = match hunk.secondary_status {
|
||||
DiffHunkSecondaryStatus::HasSecondaryHunk => {
|
||||
hunk.secondary_status = DiffHunkSecondaryStatus::None;
|
||||
@@ -1735,21 +1839,13 @@ mod tests {
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let snapshot = cx.update(|cx| diff.snapshot(cx));
|
||||
index_text = cx.update(|cx| {
|
||||
snapshot
|
||||
.new_secondary_text_for_stage_or_unstage(
|
||||
stage,
|
||||
[(hunk.buffer_range.clone(), hunk.diff_base_byte_range.clone())]
|
||||
.into_iter(),
|
||||
&working_copy,
|
||||
cx,
|
||||
)
|
||||
index_text = diff.update(cx, |diff, cx| {
|
||||
diff.stage_or_unstage_hunks(stage, &[hunk_to_change], &working_copy, true, cx)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
diff = uncommitted_diff(&working_copy, &index_text, head_text.clone(), cx);
|
||||
let found_hunks = cx.update(|cx| {
|
||||
let found_hunks = diff.update(cx, |diff, cx| {
|
||||
diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &working_copy, cx)
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
@@ -111,7 +111,7 @@ node_runtime.workspace = true
|
||||
notifications = { workspace = true, features = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
prompt_library.workspace = true
|
||||
prompt_store.workspace = true
|
||||
recent_projects = { workspace = true }
|
||||
release_channel.workspace = true
|
||||
remote = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -328,6 +328,7 @@ impl Server {
|
||||
.add_request_handler(forward_mutating_project_request::<proto::PrepareRename>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::PerformRename>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::ReloadBuffers>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::ApplyCodeActionKind>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::FormatBuffers>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::CreateProjectEntry>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::RenameProjectEntry>)
|
||||
|
||||
@@ -14,7 +14,7 @@ use client::{User, RECEIVE_TIMEOUT};
|
||||
use collections::{HashMap, HashSet};
|
||||
use fs::{FakeFs, Fs as _, RemoveOptions};
|
||||
use futures::{channel::mpsc, StreamExt as _};
|
||||
use prompt_library::PromptBuilder;
|
||||
use prompt_store::PromptBuilder;
|
||||
|
||||
use git::status::{FileStatus, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode};
|
||||
use gpui::{
|
||||
|
||||
@@ -18,7 +18,7 @@ pub trait Component {
|
||||
}
|
||||
|
||||
pub trait ComponentPreview: Component {
|
||||
fn preview(_window: &mut Window, _cx: &App) -> AnyElement;
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement;
|
||||
}
|
||||
|
||||
#[distributed_slice]
|
||||
@@ -32,7 +32,7 @@ pub static COMPONENT_DATA: LazyLock<RwLock<ComponentRegistry>> =
|
||||
|
||||
pub struct ComponentRegistry {
|
||||
components: Vec<(Option<&'static str>, &'static str, Option<&'static str>)>,
|
||||
previews: HashMap<&'static str, fn(&mut Window, &App) -> AnyElement>,
|
||||
previews: HashMap<&'static str, fn(&mut Window, &mut App) -> AnyElement>,
|
||||
}
|
||||
|
||||
impl ComponentRegistry {
|
||||
@@ -62,7 +62,10 @@ pub fn register_component<T: Component>() {
|
||||
}
|
||||
|
||||
pub fn register_preview<T: ComponentPreview>() {
|
||||
let preview_data = (T::name(), T::preview as fn(&mut Window, &App) -> AnyElement);
|
||||
let preview_data = (
|
||||
T::name(),
|
||||
T::preview as fn(&mut Window, &mut App) -> AnyElement,
|
||||
);
|
||||
COMPONENT_DATA
|
||||
.write()
|
||||
.previews
|
||||
@@ -77,7 +80,7 @@ pub struct ComponentMetadata {
|
||||
name: SharedString,
|
||||
scope: Option<SharedString>,
|
||||
description: Option<SharedString>,
|
||||
preview: Option<fn(&mut Window, &App) -> AnyElement>,
|
||||
preview: Option<fn(&mut Window, &mut App) -> AnyElement>,
|
||||
}
|
||||
|
||||
impl ComponentMetadata {
|
||||
@@ -93,7 +96,7 @@ impl ComponentMetadata {
|
||||
self.description.clone()
|
||||
}
|
||||
|
||||
pub fn preview(&self) -> Option<fn(&mut Window, &App) -> AnyElement> {
|
||||
pub fn preview(&self) -> Option<fn(&mut Window, &mut App) -> AnyElement> {
|
||||
self.preview
|
||||
}
|
||||
}
|
||||
@@ -235,6 +238,7 @@ pub struct ComponentExampleGroup {
|
||||
pub title: Option<SharedString>,
|
||||
pub examples: Vec<ComponentExample>,
|
||||
pub grow: bool,
|
||||
pub vertical: bool,
|
||||
}
|
||||
|
||||
impl RenderOnce for ComponentExampleGroup {
|
||||
@@ -270,6 +274,7 @@ impl RenderOnce for ComponentExampleGroup {
|
||||
.child(
|
||||
div()
|
||||
.flex()
|
||||
.when(self.vertical, |this| this.flex_col())
|
||||
.items_start()
|
||||
.w_full()
|
||||
.gap_6()
|
||||
@@ -287,6 +292,7 @@ impl ComponentExampleGroup {
|
||||
title: None,
|
||||
examples,
|
||||
grow: false,
|
||||
vertical: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -296,6 +302,7 @@ impl ComponentExampleGroup {
|
||||
title: Some(title.into()),
|
||||
examples,
|
||||
grow: false,
|
||||
vertical: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -304,6 +311,12 @@ impl ComponentExampleGroup {
|
||||
self.grow = true;
|
||||
self
|
||||
}
|
||||
|
||||
/// Lay the group out vertically.
|
||||
pub fn vertical(mut self) -> Self {
|
||||
self.vertical = true;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a single example
|
||||
|
||||
@@ -93,7 +93,7 @@ impl ComponentPreview {
|
||||
&self,
|
||||
ix: usize,
|
||||
window: &mut Window,
|
||||
cx: &Context<Self>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> impl IntoElement {
|
||||
let component = self.get_component(ix);
|
||||
|
||||
|
||||
@@ -348,6 +348,7 @@ gpui::actions!(
|
||||
OpenPermalinkToLine,
|
||||
OpenSelectionsInMultibuffer,
|
||||
OpenUrl,
|
||||
OrganizeImports,
|
||||
Outdent,
|
||||
AutoIndent,
|
||||
PageDown,
|
||||
|
||||
@@ -52,7 +52,7 @@ pub use actions::{AcceptEditPrediction, OpenExcerpts, OpenExcerptsSplit};
|
||||
use aho_corasick::AhoCorasick;
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use blink_manager::BlinkManager;
|
||||
use buffer_diff::{DiffHunkSecondaryStatus, DiffHunkStatus};
|
||||
use buffer_diff::DiffHunkStatus;
|
||||
use client::{Collaborator, ParticipantIndex};
|
||||
use clock::ReplicaId;
|
||||
use collections::{BTreeMap, HashMap, HashSet, VecDeque};
|
||||
@@ -120,8 +120,8 @@ use task::{ResolvedTask, TaskTemplate, TaskVariables};
|
||||
use hover_links::{find_file, HoverLink, HoveredLinkState, InlayHighlight};
|
||||
pub use lsp::CompletionContext;
|
||||
use lsp::{
|
||||
CompletionItemKind, CompletionTriggerKind, DiagnosticSeverity, InsertTextFormat,
|
||||
LanguageServerId, LanguageServerName,
|
||||
CodeActionKind, CompletionItemKind, CompletionTriggerKind, DiagnosticSeverity,
|
||||
InsertTextFormat, LanguageServerId, LanguageServerName,
|
||||
};
|
||||
|
||||
use language::BufferSnapshot;
|
||||
@@ -203,6 +203,7 @@ pub(crate) const CURSORS_VISIBLE_FOR: Duration = Duration::from_millis(2000);
|
||||
#[doc(hidden)]
|
||||
pub const CODE_ACTIONS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(250);
|
||||
|
||||
pub(crate) const CODE_ACTION_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
pub(crate) const FORMAT_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
pub(crate) const SCROLL_CENTER_TOP_BOTTOM_DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1);
|
||||
|
||||
@@ -7719,14 +7720,9 @@ impl Editor {
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
let mut revert_changes = HashMap::default();
|
||||
let snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let Some(project) = &self.project else {
|
||||
return;
|
||||
};
|
||||
|
||||
let chunk_by = self
|
||||
.snapshot(window, cx)
|
||||
.hunks_for_ranges(ranges.into_iter())
|
||||
.hunks_for_ranges(ranges)
|
||||
.into_iter()
|
||||
.chunk_by(|hunk| hunk.buffer_id);
|
||||
for (buffer_id, hunks) in &chunk_by {
|
||||
@@ -7734,15 +7730,7 @@ impl Editor {
|
||||
for hunk in &hunks {
|
||||
self.prepare_restore_change(&mut revert_changes, hunk, cx);
|
||||
}
|
||||
Self::do_stage_or_unstage(
|
||||
project,
|
||||
false,
|
||||
buffer_id,
|
||||
hunks.into_iter(),
|
||||
&snapshot,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
self.do_stage_or_unstage(false, buffer_id, hunks.into_iter(), window, cx);
|
||||
}
|
||||
drop(chunk_by);
|
||||
if !revert_changes.is_empty() {
|
||||
@@ -7787,7 +7775,6 @@ impl Editor {
|
||||
let original_text = diff
|
||||
.read(cx)
|
||||
.base_text()
|
||||
.as_ref()?
|
||||
.as_rope()
|
||||
.slice(hunk.diff_base_byte_range.clone());
|
||||
let buffer_snapshot = buffer.snapshot();
|
||||
@@ -11437,27 +11424,37 @@ impl Editor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Option<MultiBufferDiffHunk> {
|
||||
let mut hunk = snapshot
|
||||
.buffer_snapshot
|
||||
.diff_hunks_in_range(position..snapshot.buffer_snapshot.max_point())
|
||||
.find(|hunk| hunk.row_range.start.0 > position.row);
|
||||
if hunk.is_none() {
|
||||
hunk = snapshot
|
||||
.buffer_snapshot
|
||||
.diff_hunks_in_range(Point::zero()..position)
|
||||
.find(|hunk| hunk.row_range.end.0 < position.row)
|
||||
}
|
||||
let hunk = self.hunk_after_position(snapshot, position);
|
||||
|
||||
if let Some(hunk) = &hunk {
|
||||
let destination = Point::new(hunk.row_range.start.0, 0);
|
||||
self.unfold_ranges(&[destination..destination], false, false, cx);
|
||||
let point = Point::new(hunk.row_range.start.0, 0);
|
||||
|
||||
self.unfold_ranges(&[point..point], false, false, cx);
|
||||
self.change_selections(Some(Autoscroll::fit()), window, cx, |s| {
|
||||
s.select_ranges(vec![destination..destination]);
|
||||
s.select_ranges([point..point]);
|
||||
});
|
||||
}
|
||||
|
||||
hunk
|
||||
}
|
||||
|
||||
fn hunk_after_position(
|
||||
&mut self,
|
||||
snapshot: &EditorSnapshot,
|
||||
position: Point,
|
||||
) -> Option<MultiBufferDiffHunk> {
|
||||
snapshot
|
||||
.buffer_snapshot
|
||||
.diff_hunks_in_range(position..snapshot.buffer_snapshot.max_point())
|
||||
.find(|hunk| hunk.row_range.start.0 > position.row)
|
||||
.or_else(|| {
|
||||
snapshot
|
||||
.buffer_snapshot
|
||||
.diff_hunks_in_range(Point::zero()..position)
|
||||
.find(|hunk| hunk.row_range.end.0 < position.row)
|
||||
})
|
||||
}
|
||||
|
||||
fn go_to_prev_hunk(&mut self, _: &GoToPrevHunk, window: &mut Window, cx: &mut Context<Self>) {
|
||||
let snapshot = self.snapshot(window, cx);
|
||||
let selection = self.selections.newest::<Point>(cx);
|
||||
@@ -12494,7 +12491,6 @@ impl Editor {
|
||||
buffer.push_transaction(&transaction.0, cx);
|
||||
}
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
@@ -12503,6 +12499,60 @@ impl Editor {
|
||||
})
|
||||
}
|
||||
|
||||
fn organize_imports(
|
||||
&mut self,
|
||||
_: &OrganizeImports,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Task<Result<()>>> {
|
||||
let project = match &self.project {
|
||||
Some(project) => project.clone(),
|
||||
None => return None,
|
||||
};
|
||||
Some(self.perform_code_action_kind(
|
||||
project,
|
||||
CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
|
||||
window,
|
||||
cx,
|
||||
))
|
||||
}
|
||||
|
||||
fn perform_code_action_kind(
|
||||
&mut self,
|
||||
project: Entity<Project>,
|
||||
kind: CodeActionKind,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let buffer = self.buffer.clone();
|
||||
let buffers = buffer.read(cx).all_buffers();
|
||||
let mut timeout = cx.background_executor().timer(CODE_ACTION_TIMEOUT).fuse();
|
||||
let apply_action = project.update(cx, |project, cx| {
|
||||
project.apply_code_action_kind(buffers, kind, true, cx)
|
||||
});
|
||||
cx.spawn_in(window, |_, mut cx| async move {
|
||||
let transaction = futures::select_biased! {
|
||||
() = timeout => {
|
||||
log::warn!("timed out waiting for executing code action");
|
||||
None
|
||||
}
|
||||
transaction = apply_action.log_err().fuse() => transaction,
|
||||
};
|
||||
buffer
|
||||
.update(&mut cx, |buffer, cx| {
|
||||
// check if we need this
|
||||
if let Some(transaction) = transaction {
|
||||
if !buffer.is_singleton() {
|
||||
buffer.push_transaction(&transaction.0, cx);
|
||||
}
|
||||
}
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn restart_language_server(
|
||||
&mut self,
|
||||
_: &RestartLanguageServer,
|
||||
@@ -12978,11 +13028,11 @@ impl Editor {
|
||||
self.fold_creases(to_fold, true, window, cx);
|
||||
} else {
|
||||
let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
|
||||
let buffer_ids: HashSet<_> = multi_buffer_snapshot
|
||||
.ranges_to_buffer_ranges(self.selections.disjoint_anchor_ranges())
|
||||
.map(|(snapshot, _, _)| snapshot.remote_id())
|
||||
.collect();
|
||||
let buffer_ids = self
|
||||
.selections
|
||||
.disjoint_anchor_ranges()
|
||||
.flat_map(|range| multi_buffer_snapshot.buffer_ids_for_range(range))
|
||||
.collect::<HashSet<_>>();
|
||||
for buffer_id in buffer_ids {
|
||||
self.fold_buffer(buffer_id, cx);
|
||||
}
|
||||
@@ -13155,10 +13205,11 @@ impl Editor {
|
||||
self.unfold_ranges(&ranges, true, true, cx);
|
||||
} else {
|
||||
let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let buffer_ids: HashSet<_> = multi_buffer_snapshot
|
||||
.ranges_to_buffer_ranges(self.selections.disjoint_anchor_ranges())
|
||||
.map(|(snapshot, _, _)| snapshot.remote_id())
|
||||
.collect();
|
||||
let buffer_ids = self
|
||||
.selections
|
||||
.disjoint_anchor_ranges()
|
||||
.flat_map(|range| multi_buffer_snapshot.buffer_ids_for_range(range))
|
||||
.collect::<HashSet<_>>();
|
||||
for buffer_id in buffer_ids {
|
||||
self.unfold_buffer(buffer_id, cx);
|
||||
}
|
||||
@@ -13470,7 +13521,7 @@ impl Editor {
|
||||
snapshot: &MultiBufferSnapshot,
|
||||
) -> bool {
|
||||
let mut hunks = self.diff_hunks_in_ranges(ranges, &snapshot);
|
||||
hunks.any(|hunk| hunk.secondary_status != DiffHunkSecondaryStatus::None)
|
||||
hunks.any(|hunk| hunk.status().has_secondary_hunk())
|
||||
}
|
||||
|
||||
pub fn toggle_staged_selected_diff_hunks(
|
||||
@@ -13487,20 +13538,20 @@ impl Editor {
|
||||
|
||||
pub fn stage_and_next(
|
||||
&mut self,
|
||||
_: &::git::StageAndNext,
|
||||
action: &::git::StageAndNext,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.do_stage_or_unstage_and_next(true, window, cx);
|
||||
self.do_stage_or_unstage_and_next(true, action.whole_excerpt, window, cx);
|
||||
}
|
||||
|
||||
pub fn unstage_and_next(
|
||||
&mut self,
|
||||
_: &::git::UnstageAndNext,
|
||||
action: &::git::UnstageAndNext,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.do_stage_or_unstage_and_next(false, window, cx);
|
||||
self.do_stage_or_unstage_and_next(false, action.whole_excerpt, window, cx);
|
||||
}
|
||||
|
||||
pub fn stage_or_unstage_diff_hunks(
|
||||
@@ -13511,31 +13562,47 @@ impl Editor {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let Some(project) = &self.project else {
|
||||
return;
|
||||
};
|
||||
|
||||
let chunk_by = self
|
||||
.diff_hunks_in_ranges(&ranges, &snapshot)
|
||||
.chunk_by(|hunk| hunk.buffer_id);
|
||||
for (buffer_id, hunks) in &chunk_by {
|
||||
Self::do_stage_or_unstage(project, stage, buffer_id, hunks, &snapshot, window, cx);
|
||||
self.do_stage_or_unstage(stage, buffer_id, hunks, window, cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn do_stage_or_unstage_and_next(
|
||||
&mut self,
|
||||
stage: bool,
|
||||
whole_excerpt: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let mut ranges = self.selections.disjoint_anchor_ranges().collect::<Vec<_>>();
|
||||
|
||||
if ranges.iter().any(|range| range.start != range.end) {
|
||||
self.stage_or_unstage_diff_hunks(stage, &ranges[..], window, cx);
|
||||
return;
|
||||
}
|
||||
|
||||
if !self.buffer().read(cx).is_singleton() {
|
||||
if !whole_excerpt {
|
||||
let snapshot = self.snapshot(window, cx);
|
||||
let newest_range = self.selections.newest::<Point>(cx).range();
|
||||
|
||||
let run_twice = snapshot
|
||||
.hunks_for_ranges([newest_range])
|
||||
.first()
|
||||
.is_some_and(|hunk| {
|
||||
let next_line = Point::new(hunk.row_range.end.0 + 1, 0);
|
||||
self.hunk_after_position(&snapshot, next_line)
|
||||
.is_some_and(|other| other.row_range == hunk.row_range)
|
||||
});
|
||||
|
||||
if run_twice {
|
||||
self.go_to_next_hunk(&Default::default(), window, cx);
|
||||
}
|
||||
} else if !self.buffer().read(cx).is_singleton() {
|
||||
self.stage_or_unstage_diff_hunks(stage, &ranges[..], window, cx);
|
||||
|
||||
if let Some((excerpt_id, buffer, range)) = self.active_excerpt(cx) {
|
||||
if buffer.read(cx).is_empty() {
|
||||
let buffer = buffer.read(cx);
|
||||
@@ -13549,9 +13616,9 @@ impl Editor {
|
||||
let Some(project) = self.project.as_ref() else {
|
||||
return;
|
||||
};
|
||||
let project = project.read(cx);
|
||||
|
||||
let Some(repo) = project.git_store().read(cx).active_repository() else {
|
||||
let Some(repo) = project.read(cx).git_store().read(cx).active_repository()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -13583,7 +13650,7 @@ impl Editor {
|
||||
point = snapshot.clip_point(point, Bias::Right);
|
||||
self.change_selections(Some(Autoscroll::top_relative(6)), window, cx, |s| {
|
||||
s.select_ranges([point..point]);
|
||||
})
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
@@ -13593,16 +13660,20 @@ impl Editor {
|
||||
}
|
||||
|
||||
fn do_stage_or_unstage(
|
||||
project: &Entity<Project>,
|
||||
&self,
|
||||
stage: bool,
|
||||
buffer_id: BufferId,
|
||||
hunks: impl Iterator<Item = MultiBufferDiffHunk>,
|
||||
snapshot: &MultiBufferSnapshot,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
let Some(project) = self.project.as_ref() else {
|
||||
return;
|
||||
};
|
||||
let Some(buffer) = project.read(cx).buffer_for_id(buffer_id, cx) else {
|
||||
log::debug!("no buffer for id");
|
||||
return;
|
||||
};
|
||||
let Some(diff) = self.buffer.read(cx).diff_for(buffer_id) else {
|
||||
return;
|
||||
};
|
||||
let buffer_snapshot = buffer.read(cx).snapshot();
|
||||
@@ -13616,37 +13687,31 @@ impl Editor {
|
||||
log::debug!("no git repo for buffer id");
|
||||
return;
|
||||
};
|
||||
let Some(diff) = snapshot.diff_for_buffer_id(buffer_id) else {
|
||||
log::debug!("no diff for buffer id");
|
||||
return;
|
||||
};
|
||||
|
||||
let new_index_text = if !stage && diff.is_single_insertion || stage && !file_exists {
|
||||
log::debug!("removing from index");
|
||||
None
|
||||
} else {
|
||||
diff.new_secondary_text_for_stage_or_unstage(
|
||||
let new_index_text = diff.update(cx, |diff, cx| {
|
||||
diff.stage_or_unstage_hunks(
|
||||
stage,
|
||||
hunks.filter_map(|hunk| {
|
||||
if stage && hunk.secondary_status == DiffHunkSecondaryStatus::None {
|
||||
return None;
|
||||
} else if !stage
|
||||
&& hunk.secondary_status == DiffHunkSecondaryStatus::HasSecondaryHunk
|
||||
{
|
||||
return None;
|
||||
}
|
||||
Some((hunk.buffer_range.clone(), hunk.diff_base_byte_range.clone()))
|
||||
}),
|
||||
&hunks
|
||||
.map(|hunk| buffer_diff::DiffHunk {
|
||||
buffer_range: hunk.buffer_range,
|
||||
diff_base_byte_range: hunk.diff_base_byte_range,
|
||||
secondary_status: hunk.secondary_status,
|
||||
row_range: 0..0, // unused
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
&buffer_snapshot,
|
||||
file_exists,
|
||||
cx,
|
||||
)
|
||||
};
|
||||
});
|
||||
|
||||
if file_exists {
|
||||
let buffer_store = project.read(cx).buffer_store().clone();
|
||||
buffer_store
|
||||
.update(cx, |buffer_store, cx| buffer_store.save_buffer(buffer, cx))
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
let recv = repo
|
||||
.read(cx)
|
||||
.set_index_text(&path, new_index_text.map(|rope| rope.to_string()));
|
||||
@@ -13721,7 +13786,7 @@ impl Editor {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let snapshot = self.snapshot(window, cx);
|
||||
let hunks = snapshot.hunks_for_ranges(self.selections.ranges(cx).into_iter());
|
||||
let hunks = snapshot.hunks_for_ranges(self.selections.ranges(cx));
|
||||
let mut ranges_by_buffer = HashMap::default();
|
||||
self.transact(window, cx, |editor, _window, cx| {
|
||||
for hunk in hunks {
|
||||
@@ -17048,7 +17113,7 @@ impl EditorSnapshot {
|
||||
|
||||
pub fn hunks_for_ranges(
|
||||
&self,
|
||||
ranges: impl Iterator<Item = Range<Point>>,
|
||||
ranges: impl IntoIterator<Item = Range<Point>>,
|
||||
) -> Vec<MultiBufferDiffHunk> {
|
||||
let mut hunks = Vec::new();
|
||||
let mut processed_buffer_rows: HashMap<BufferId, HashSet<Range<text::Anchor>>> =
|
||||
|
||||
@@ -7,7 +7,7 @@ use crate::{
|
||||
},
|
||||
JoinLines,
|
||||
};
|
||||
use buffer_diff::{BufferDiff, DiffHunkStatus, DiffHunkStatusKind};
|
||||
use buffer_diff::{BufferDiff, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind};
|
||||
use futures::StreamExt;
|
||||
use gpui::{
|
||||
div, BackgroundExecutor, SemanticVersion, TestAppContext, UpdateGlobal, VisualTestContext,
|
||||
@@ -7875,6 +7875,157 @@ async fn test_document_format_manual_trigger(cx: &mut TestAppContext) {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_organize_imports_manual_trigger(cx: &mut TestAppContext) {
|
||||
init_test(cx, |settings| {
|
||||
settings.defaults.formatter = Some(language_settings::SelectedFormatter::List(
|
||||
FormatterList(vec![Formatter::LanguageServer { name: None }].into()),
|
||||
))
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_file(path!("/file.ts"), Default::default()).await;
|
||||
|
||||
let project = Project::test(fs, [path!("/").as_ref()], cx).await;
|
||||
|
||||
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
|
||||
language_registry.add(Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
name: "TypeScript".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["ts".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
..LanguageConfig::default()
|
||||
},
|
||||
Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
|
||||
)));
|
||||
update_test_language_settings(cx, |settings| {
|
||||
settings.defaults.prettier = Some(PrettierSettings {
|
||||
allowed: true,
|
||||
..PrettierSettings::default()
|
||||
});
|
||||
});
|
||||
let mut fake_servers = language_registry.register_fake_lsp(
|
||||
"TypeScript",
|
||||
FakeLspAdapter {
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer(path!("/file.ts"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let (editor, cx) = cx.add_window_view(|window, cx| {
|
||||
build_editor_with_project(project.clone(), buffer, window, cx)
|
||||
});
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text(
|
||||
"import { a } from 'module';\nimport { b } from 'module';\n\nconst x = a;\n",
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
cx.executor().start_waiting();
|
||||
let fake_server = fake_servers.next().await.unwrap();
|
||||
|
||||
let format = editor
|
||||
.update_in(cx, |editor, window, cx| {
|
||||
editor.perform_code_action_kind(
|
||||
project.clone(),
|
||||
CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.unwrap();
|
||||
fake_server
|
||||
.handle_request::<lsp::request::CodeActionRequest, _, _>(move |params, _| async move {
|
||||
assert_eq!(
|
||||
params.text_document.uri,
|
||||
lsp::Url::from_file_path(path!("/file.ts")).unwrap()
|
||||
);
|
||||
Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
|
||||
lsp::CodeAction {
|
||||
title: "Organize Imports".to_string(),
|
||||
kind: Some(lsp::CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
|
||||
edit: Some(lsp::WorkspaceEdit {
|
||||
changes: Some(
|
||||
[(
|
||||
params.text_document.uri.clone(),
|
||||
vec![lsp::TextEdit::new(
|
||||
lsp::Range::new(
|
||||
lsp::Position::new(1, 0),
|
||||
lsp::Position::new(2, 0),
|
||||
),
|
||||
"".to_string(),
|
||||
)],
|
||||
)]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
),
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
},
|
||||
)]))
|
||||
})
|
||||
.next()
|
||||
.await;
|
||||
cx.executor().start_waiting();
|
||||
format.await;
|
||||
assert_eq!(
|
||||
editor.update(cx, |editor, cx| editor.text(cx)),
|
||||
"import { a } from 'module';\n\nconst x = a;\n"
|
||||
);
|
||||
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text(
|
||||
"import { a } from 'module';\nimport { b } from 'module';\n\nconst x = a;\n",
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
// Ensure we don't lock if code action hangs.
|
||||
fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
|
||||
move |params, _| async move {
|
||||
assert_eq!(
|
||||
params.text_document.uri,
|
||||
lsp::Url::from_file_path(path!("/file.ts")).unwrap()
|
||||
);
|
||||
futures::future::pending::<()>().await;
|
||||
unreachable!()
|
||||
},
|
||||
);
|
||||
let format = editor
|
||||
.update_in(cx, |editor, window, cx| {
|
||||
editor.perform_code_action_kind(
|
||||
project,
|
||||
CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.unwrap();
|
||||
cx.executor().advance_clock(super::CODE_ACTION_TIMEOUT);
|
||||
cx.executor().start_waiting();
|
||||
format.await;
|
||||
assert_eq!(
|
||||
editor.update(cx, |editor, cx| editor.text(cx)),
|
||||
"import { a } from 'module';\nimport { b } from 'module';\n\nconst x = a;\n"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_concurrent_format_requests(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
@@ -12404,7 +12555,7 @@ async fn test_addition_reverts(cx: &mut TestAppContext) {
|
||||
struct Row9.2;
|
||||
struct Row9.3;
|
||||
struct Row10;"#},
|
||||
vec![DiffHunkStatus::added_none(), DiffHunkStatus::added_none()],
|
||||
vec![DiffHunkStatusKind::Added, DiffHunkStatusKind::Added],
|
||||
indoc! {r#"struct Row;
|
||||
struct Row1;
|
||||
struct Row1.1;
|
||||
@@ -12442,7 +12593,7 @@ async fn test_addition_reverts(cx: &mut TestAppContext) {
|
||||
struct Row8;
|
||||
struct Row9;
|
||||
struct Row10;"#},
|
||||
vec![DiffHunkStatus::added_none(), DiffHunkStatus::added_none()],
|
||||
vec![DiffHunkStatusKind::Added, DiffHunkStatusKind::Added],
|
||||
indoc! {r#"struct Row;
|
||||
struct Row1;
|
||||
struct Row2;
|
||||
@@ -12489,11 +12640,11 @@ async fn test_addition_reverts(cx: &mut TestAppContext) {
|
||||
«ˇ// something on bottom»
|
||||
struct Row10;"#},
|
||||
vec![
|
||||
DiffHunkStatus::added_none(),
|
||||
DiffHunkStatus::added_none(),
|
||||
DiffHunkStatus::added_none(),
|
||||
DiffHunkStatus::added_none(),
|
||||
DiffHunkStatus::added_none(),
|
||||
DiffHunkStatusKind::Added,
|
||||
DiffHunkStatusKind::Added,
|
||||
DiffHunkStatusKind::Added,
|
||||
DiffHunkStatusKind::Added,
|
||||
DiffHunkStatusKind::Added,
|
||||
],
|
||||
indoc! {r#"struct Row;
|
||||
ˇstruct Row1;
|
||||
@@ -12541,10 +12692,7 @@ async fn test_modification_reverts(cx: &mut TestAppContext) {
|
||||
struct Row99;
|
||||
struct Row9;
|
||||
struct Row10;"#},
|
||||
vec![
|
||||
DiffHunkStatus::modified_none(),
|
||||
DiffHunkStatus::modified_none(),
|
||||
],
|
||||
vec![DiffHunkStatusKind::Modified, DiffHunkStatusKind::Modified],
|
||||
indoc! {r#"struct Row;
|
||||
struct Row1;
|
||||
struct Row33;
|
||||
@@ -12571,10 +12719,7 @@ async fn test_modification_reverts(cx: &mut TestAppContext) {
|
||||
struct Row99;
|
||||
struct Row9;
|
||||
struct Row10;"#},
|
||||
vec![
|
||||
DiffHunkStatus::modified_none(),
|
||||
DiffHunkStatus::modified_none(),
|
||||
],
|
||||
vec![DiffHunkStatusKind::Modified, DiffHunkStatusKind::Modified],
|
||||
indoc! {r#"struct Row;
|
||||
struct Row1;
|
||||
struct Row33;
|
||||
@@ -12603,12 +12748,12 @@ async fn test_modification_reverts(cx: &mut TestAppContext) {
|
||||
struct Row9;
|
||||
struct Row1011;ˇ"#},
|
||||
vec![
|
||||
DiffHunkStatus::modified_none(),
|
||||
DiffHunkStatus::modified_none(),
|
||||
DiffHunkStatus::modified_none(),
|
||||
DiffHunkStatus::modified_none(),
|
||||
DiffHunkStatus::modified_none(),
|
||||
DiffHunkStatus::modified_none(),
|
||||
DiffHunkStatusKind::Modified,
|
||||
DiffHunkStatusKind::Modified,
|
||||
DiffHunkStatusKind::Modified,
|
||||
DiffHunkStatusKind::Modified,
|
||||
DiffHunkStatusKind::Modified,
|
||||
DiffHunkStatusKind::Modified,
|
||||
],
|
||||
indoc! {r#"struct Row;
|
||||
ˇstruct Row1;
|
||||
@@ -12686,10 +12831,7 @@ struct Row10;"#};
|
||||
ˇ
|
||||
struct Row8;
|
||||
struct Row10;"#},
|
||||
vec![
|
||||
DiffHunkStatus::deleted_none(),
|
||||
DiffHunkStatus::deleted_none(),
|
||||
],
|
||||
vec![DiffHunkStatusKind::Deleted, DiffHunkStatusKind::Deleted],
|
||||
indoc! {r#"struct Row;
|
||||
struct Row2;
|
||||
|
||||
@@ -12712,10 +12854,7 @@ struct Row10;"#};
|
||||
ˇ»
|
||||
struct Row8;
|
||||
struct Row10;"#},
|
||||
vec![
|
||||
DiffHunkStatus::deleted_none(),
|
||||
DiffHunkStatus::deleted_none(),
|
||||
],
|
||||
vec![DiffHunkStatusKind::Deleted, DiffHunkStatusKind::Deleted],
|
||||
indoc! {r#"struct Row;
|
||||
struct Row2;
|
||||
|
||||
@@ -12740,10 +12879,7 @@ struct Row10;"#};
|
||||
|
||||
struct Row8;ˇ
|
||||
struct Row10;"#},
|
||||
vec![
|
||||
DiffHunkStatus::deleted_none(),
|
||||
DiffHunkStatus::deleted_none(),
|
||||
],
|
||||
vec![DiffHunkStatusKind::Deleted, DiffHunkStatusKind::Deleted],
|
||||
indoc! {r#"struct Row;
|
||||
struct Row1;
|
||||
ˇstruct Row2;
|
||||
@@ -12768,9 +12904,9 @@ struct Row10;"#};
|
||||
struct Row8;ˇ»
|
||||
struct Row10;"#},
|
||||
vec![
|
||||
DiffHunkStatus::deleted_none(),
|
||||
DiffHunkStatus::deleted_none(),
|
||||
DiffHunkStatus::deleted_none(),
|
||||
DiffHunkStatusKind::Deleted,
|
||||
DiffHunkStatusKind::Deleted,
|
||||
DiffHunkStatusKind::Deleted,
|
||||
],
|
||||
indoc! {r#"struct Row;
|
||||
struct Row1;
|
||||
@@ -16687,14 +16823,13 @@ pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsC
|
||||
#[track_caller]
|
||||
fn assert_hunk_revert(
|
||||
not_reverted_text_with_selections: &str,
|
||||
expected_hunk_statuses_before: Vec<DiffHunkStatus>,
|
||||
expected_hunk_statuses_before: Vec<DiffHunkStatusKind>,
|
||||
expected_reverted_text_with_selections: &str,
|
||||
base_text: &str,
|
||||
cx: &mut EditorLspTestContext,
|
||||
) {
|
||||
cx.set_state(not_reverted_text_with_selections);
|
||||
cx.set_head_text(base_text);
|
||||
cx.clear_index_text();
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let actual_hunk_statuses_before = cx.update_editor(|editor, window, cx| {
|
||||
@@ -16702,7 +16837,7 @@ fn assert_hunk_revert(
|
||||
let reverted_hunk_statuses = snapshot
|
||||
.buffer_snapshot
|
||||
.diff_hunks_in_range(0..snapshot.buffer_snapshot.len())
|
||||
.map(|hunk| hunk.status())
|
||||
.map(|hunk| hunk.status().kind)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
editor.git_restore(&Default::default(), window, cx);
|
||||
|
||||
@@ -26,7 +26,7 @@ use crate::{
|
||||
FILE_HEADER_HEIGHT, GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED, MAX_LINE_LEN,
|
||||
MULTI_BUFFER_EXCERPT_HEADER_HEIGHT,
|
||||
};
|
||||
use buffer_diff::{DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind};
|
||||
use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind};
|
||||
use client::ParticipantIndex;
|
||||
use collections::{BTreeMap, HashMap, HashSet};
|
||||
use file_icons::FileIcons;
|
||||
@@ -429,6 +429,13 @@ impl EditorElement {
|
||||
cx.propagate();
|
||||
}
|
||||
});
|
||||
register_action(editor, window, |editor, action, window, cx| {
|
||||
if let Some(task) = editor.organize_imports(action, window, cx) {
|
||||
task.detach_and_notify_err(window, cx);
|
||||
} else {
|
||||
cx.propagate();
|
||||
}
|
||||
});
|
||||
register_action(editor, window, Editor::restart_language_server);
|
||||
register_action(editor, window, Editor::show_character_palette);
|
||||
register_action(editor, window, |editor, action, window, cx| {
|
||||
@@ -4337,6 +4344,8 @@ impl EditorElement {
|
||||
}
|
||||
|
||||
fn paint_diff_hunks(layout: &mut EditorLayout, window: &mut Window, cx: &mut App) {
|
||||
let is_light = cx.theme().appearance().is_light();
|
||||
|
||||
if layout.display_hunks.is_empty() {
|
||||
return;
|
||||
}
|
||||
@@ -4356,7 +4365,7 @@ impl EditorElement {
|
||||
hunk_bounds,
|
||||
cx.theme().colors().version_control_modified,
|
||||
Corners::all(px(0.)),
|
||||
DiffHunkSecondaryStatus::None,
|
||||
DiffHunkStatus::modified_none(),
|
||||
))
|
||||
}
|
||||
DisplayDiffHunk::Unfolded {
|
||||
@@ -4368,19 +4377,19 @@ impl EditorElement {
|
||||
hunk_hitbox.bounds,
|
||||
cx.theme().colors().version_control_added,
|
||||
Corners::all(px(0.)),
|
||||
status.secondary,
|
||||
*status,
|
||||
),
|
||||
DiffHunkStatusKind::Modified => (
|
||||
hunk_hitbox.bounds,
|
||||
cx.theme().colors().version_control_modified,
|
||||
Corners::all(px(0.)),
|
||||
status.secondary,
|
||||
*status,
|
||||
),
|
||||
DiffHunkStatusKind::Deleted if !display_row_range.is_empty() => (
|
||||
hunk_hitbox.bounds,
|
||||
cx.theme().colors().version_control_deleted,
|
||||
Corners::all(px(0.)),
|
||||
status.secondary,
|
||||
*status,
|
||||
),
|
||||
DiffHunkStatusKind::Deleted => (
|
||||
Bounds::new(
|
||||
@@ -4392,19 +4401,18 @@ impl EditorElement {
|
||||
),
|
||||
cx.theme().colors().version_control_deleted,
|
||||
Corners::all(1. * line_height),
|
||||
status.secondary,
|
||||
*status,
|
||||
),
|
||||
}),
|
||||
};
|
||||
|
||||
if let Some((hunk_bounds, background_color, corner_radii, secondary_status)) =
|
||||
if let Some((hunk_bounds, mut background_color, corner_radii, secondary_status)) =
|
||||
hunk_to_paint
|
||||
{
|
||||
let background_color = if secondary_status != DiffHunkSecondaryStatus::None {
|
||||
background_color.opacity(0.3)
|
||||
} else {
|
||||
background_color.opacity(1.0)
|
||||
};
|
||||
if secondary_status.has_secondary_hunk() {
|
||||
background_color =
|
||||
background_color.opacity(if is_light { 0.2 } else { 0.32 });
|
||||
}
|
||||
window.paint_quad(quad(
|
||||
hunk_bounds,
|
||||
corner_radii,
|
||||
@@ -5095,9 +5103,15 @@ impl EditorElement {
|
||||
end_display_row.0 -= 1;
|
||||
}
|
||||
let color = match &hunk.status().kind {
|
||||
DiffHunkStatusKind::Added => theme.status().created,
|
||||
DiffHunkStatusKind::Modified => theme.status().modified,
|
||||
DiffHunkStatusKind::Deleted => theme.status().deleted,
|
||||
DiffHunkStatusKind::Added => {
|
||||
theme.colors().version_control_added
|
||||
}
|
||||
DiffHunkStatusKind::Modified => {
|
||||
theme.colors().version_control_modified
|
||||
}
|
||||
DiffHunkStatusKind::Deleted => {
|
||||
theme.colors().version_control_deleted
|
||||
}
|
||||
};
|
||||
ColoredRange {
|
||||
start: start_display_row,
|
||||
@@ -6693,12 +6707,14 @@ impl Element for EditorElement {
|
||||
.editor
|
||||
.update(cx, |editor, cx| editor.highlighted_display_rows(window, cx));
|
||||
|
||||
let is_light = cx.theme().appearance().is_light();
|
||||
|
||||
for (ix, row_info) in row_infos.iter().enumerate() {
|
||||
let Some(diff_status) = row_info.diff_status else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let staged_opacity = 0.10;
|
||||
let staged_opacity = if is_light { 0.14 } else { 0.10 };
|
||||
let unstaged_opacity = 0.04;
|
||||
|
||||
let background_color = match diff_status.kind {
|
||||
@@ -6711,12 +6727,11 @@ impl Element for EditorElement {
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let background_color =
|
||||
if diff_status.secondary == DiffHunkSecondaryStatus::None {
|
||||
background_color.opacity(staged_opacity)
|
||||
} else {
|
||||
background_color.opacity(unstaged_opacity)
|
||||
};
|
||||
let background_color = if diff_status.has_secondary_hunk() {
|
||||
background_color.opacity(unstaged_opacity)
|
||||
} else {
|
||||
background_color.opacity(staged_opacity)
|
||||
};
|
||||
|
||||
highlighted_rows
|
||||
.entry(start_row + DisplayRow(ix as u32))
|
||||
@@ -8763,65 +8778,62 @@ fn diff_hunk_controls(
|
||||
.rounded_b_lg()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.gap_1()
|
||||
.when(status.secondary == DiffHunkSecondaryStatus::None, |el| {
|
||||
el.child(
|
||||
Button::new("unstage", "Unstage")
|
||||
.tooltip({
|
||||
let focus_handle = editor.focus_handle(cx);
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Unstage Hunk",
|
||||
&::git::ToggleStaged,
|
||||
&focus_handle,
|
||||
.child(if status.has_secondary_hunk() {
|
||||
Button::new(("stage", row as u64), "Stage")
|
||||
.alpha(if status.is_pending() { 0.66 } else { 1.0 })
|
||||
.tooltip({
|
||||
let focus_handle = editor.focus_handle(cx);
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Stage Hunk",
|
||||
&::git::ToggleStaged,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})
|
||||
.on_click({
|
||||
let editor = editor.clone();
|
||||
move |_event, window, cx| {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.stage_or_unstage_diff_hunks(
|
||||
true,
|
||||
&[hunk_range.start..hunk_range.start],
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})
|
||||
.on_click({
|
||||
let editor = editor.clone();
|
||||
move |_event, window, cx| {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.stage_or_unstage_diff_hunks(
|
||||
false,
|
||||
&[hunk_range.start..hunk_range.start],
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
}),
|
||||
)
|
||||
})
|
||||
.when(status.secondary != DiffHunkSecondaryStatus::None, |el| {
|
||||
el.child(
|
||||
Button::new("stage", "Stage")
|
||||
.tooltip({
|
||||
let focus_handle = editor.focus_handle(cx);
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Stage Hunk",
|
||||
&::git::ToggleStaged,
|
||||
&focus_handle,
|
||||
);
|
||||
});
|
||||
}
|
||||
})
|
||||
} else {
|
||||
Button::new(("unstage", row as u64), "Unstage")
|
||||
.alpha(if status.is_pending() { 0.66 } else { 1.0 })
|
||||
.tooltip({
|
||||
let focus_handle = editor.focus_handle(cx);
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Unstage Hunk",
|
||||
&::git::ToggleStaged,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})
|
||||
.on_click({
|
||||
let editor = editor.clone();
|
||||
move |_event, window, cx| {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.stage_or_unstage_diff_hunks(
|
||||
false,
|
||||
&[hunk_range.start..hunk_range.start],
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})
|
||||
.on_click({
|
||||
let editor = editor.clone();
|
||||
move |_event, window, cx| {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.stage_or_unstage_diff_hunks(
|
||||
true,
|
||||
&[hunk_range.start..hunk_range.start],
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
}),
|
||||
)
|
||||
);
|
||||
});
|
||||
}
|
||||
})
|
||||
})
|
||||
.child(
|
||||
Button::new("discard", "Restore")
|
||||
|
||||
@@ -185,7 +185,7 @@ impl ProposedChangesEditor {
|
||||
} else {
|
||||
branch_buffer = location.buffer.update(cx, |buffer, cx| buffer.branch(cx));
|
||||
new_diffs.push(cx.new(|cx| {
|
||||
let mut diff = BufferDiff::new(branch_buffer.read(cx));
|
||||
let mut diff = BufferDiff::new(&branch_buffer.read(cx).snapshot(), cx);
|
||||
let _ = diff.set_base_text(
|
||||
location.buffer.clone(),
|
||||
branch_buffer.read(cx).text_snapshot(),
|
||||
|
||||
@@ -275,11 +275,7 @@ async fn run_evaluation(
|
||||
let db_path = Path::new(EVAL_DB_PATH);
|
||||
let api_key = std::env::var("OPENAI_API_KEY").unwrap();
|
||||
let git_hosting_provider_registry = Arc::new(GitHostingProviderRegistry::new());
|
||||
let fs = Arc::new(RealFs::new(
|
||||
git_hosting_provider_registry,
|
||||
None,
|
||||
PathBuf::from("/non/existent/askpass"),
|
||||
)) as Arc<dyn Fs>;
|
||||
let fs = Arc::new(RealFs::new(git_hosting_provider_registry, None)) as Arc<dyn Fs>;
|
||||
let clock = Arc::new(RealSystemClock);
|
||||
let client = cx
|
||||
.update(|cx| {
|
||||
|
||||
@@ -248,7 +248,6 @@ impl From<MTime> for proto::Timestamp {
|
||||
pub struct RealFs {
|
||||
git_hosting_provider_registry: Arc<GitHostingProviderRegistry>,
|
||||
git_binary_path: Option<PathBuf>,
|
||||
askpass_path: PathBuf,
|
||||
}
|
||||
|
||||
pub trait FileHandle: Send + Sync + std::fmt::Debug {
|
||||
@@ -303,12 +302,10 @@ impl RealFs {
|
||||
pub fn new(
|
||||
git_hosting_provider_registry: Arc<GitHostingProviderRegistry>,
|
||||
git_binary_path: Option<PathBuf>,
|
||||
askpass_path: PathBuf,
|
||||
) -> Self {
|
||||
Self {
|
||||
git_hosting_provider_registry,
|
||||
git_binary_path,
|
||||
askpass_path,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -772,7 +769,6 @@ impl Fs for RealFs {
|
||||
Some(Arc::new(RealGitRepository::new(
|
||||
repo,
|
||||
self.git_binary_path.clone(),
|
||||
self.askpass_path.to_owned(),
|
||||
self.git_hosting_provider_registry.clone(),
|
||||
)))
|
||||
}
|
||||
|
||||
@@ -30,7 +30,6 @@ schemars.workspace = true
|
||||
serde.workspace = true
|
||||
smol.workspace = true
|
||||
sum_tree.workspace = true
|
||||
tempfile.workspace = true
|
||||
text.workspace = true
|
||||
time.workspace = true
|
||||
url.workspace = true
|
||||
|
||||
@@ -35,15 +35,23 @@ pub struct Push {
|
||||
pub options: Option<PushOptions>,
|
||||
}
|
||||
|
||||
impl_actions!(git, [Push]);
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Deserialize, JsonSchema)]
|
||||
pub struct StageAndNext {
|
||||
pub whole_excerpt: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Deserialize, JsonSchema)]
|
||||
pub struct UnstageAndNext {
|
||||
pub whole_excerpt: bool,
|
||||
}
|
||||
|
||||
impl_actions!(git, [Push, StageAndNext, UnstageAndNext]);
|
||||
|
||||
actions!(
|
||||
git,
|
||||
[
|
||||
// per-hunk
|
||||
ToggleStaged,
|
||||
StageAndNext,
|
||||
UnstageAndNext,
|
||||
// per-file
|
||||
StageFile,
|
||||
UnstageFile,
|
||||
@@ -56,6 +64,7 @@ actions!(
|
||||
Pull,
|
||||
Fetch,
|
||||
Commit,
|
||||
ExpandCommitEditor,
|
||||
]
|
||||
);
|
||||
action_with_deprecated_aliases!(git, RestoreFile, ["editor::RevertFile"]);
|
||||
|
||||
@@ -10,11 +10,8 @@ use rope::Rope;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use std::borrow::Borrow;
|
||||
use std::env::temp_dir;
|
||||
use std::io::Write as _;
|
||||
use std::os::unix::fs::PermissionsExt as _;
|
||||
use std::os::unix::net::UnixListener;
|
||||
use std::process::{Command, Stdio};
|
||||
use std::process::Stdio;
|
||||
use std::sync::LazyLock;
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
@@ -77,6 +74,12 @@ impl UpstreamTracking {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<UpstreamTrackingStatus> for UpstreamTracking {
|
||||
fn from(status: UpstreamTrackingStatus) -> Self {
|
||||
UpstreamTracking::Tracked(status)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
|
||||
pub struct UpstreamTrackingStatus {
|
||||
pub ahead: u32,
|
||||
@@ -203,7 +206,6 @@ impl std::fmt::Debug for dyn GitRepository {
|
||||
pub struct RealGitRepository {
|
||||
pub repository: Mutex<git2::Repository>,
|
||||
pub git_binary_path: PathBuf,
|
||||
pub askpass_path: PathBuf,
|
||||
hosting_provider_registry: Arc<GitHostingProviderRegistry>,
|
||||
}
|
||||
|
||||
@@ -211,13 +213,11 @@ impl RealGitRepository {
|
||||
pub fn new(
|
||||
repository: git2::Repository,
|
||||
git_binary_path: Option<PathBuf>,
|
||||
askpass_path: PathBuf,
|
||||
hosting_provider_registry: Arc<GitHostingProviderRegistry>,
|
||||
) -> Self {
|
||||
Self {
|
||||
repository: Mutex::new(repository),
|
||||
git_binary_path: git_binary_path.unwrap_or_else(|| PathBuf::from("git")),
|
||||
askpass_path,
|
||||
hosting_provider_registry,
|
||||
}
|
||||
}
|
||||
@@ -614,10 +614,7 @@ impl GitRepository for RealGitRepository {
|
||||
) -> Result<()> {
|
||||
let working_directory = self.working_directory()?;
|
||||
|
||||
// We don't use the bundled git, so we can ensure that system
|
||||
// credential management and transfer mechanisms are respected
|
||||
let output = new_std_command("git")
|
||||
.env("GIT_ASKPASS", &self.askpass_path)
|
||||
let output = new_std_command(&self.git_binary_path)
|
||||
.current_dir(&working_directory)
|
||||
.args(["push", "--quiet"])
|
||||
.args(options.map(|option| match option {
|
||||
@@ -641,12 +638,9 @@ impl GitRepository for RealGitRepository {
|
||||
fn pull(&self, branch_name: &str, remote_name: &str) -> Result<()> {
|
||||
let working_directory = self.working_directory()?;
|
||||
|
||||
// We don't use the bundled git, so we can ensure that system
|
||||
// credential management and transfer mechanisms are respected
|
||||
let output = new_std_command("git")
|
||||
.env("GIT_ASKPASS", &self.askpass_path)
|
||||
let output = new_std_command(&self.git_binary_path)
|
||||
.current_dir(&working_directory)
|
||||
.args(["pull"])
|
||||
.args(["pull", "--quiet"])
|
||||
.arg(remote_name)
|
||||
.arg(branch_name)
|
||||
.output()?;
|
||||
@@ -664,10 +658,7 @@ impl GitRepository for RealGitRepository {
|
||||
fn fetch(&self) -> Result<()> {
|
||||
let working_directory = self.working_directory()?;
|
||||
|
||||
// We don't use the bundled git, so we can ensure that system
|
||||
// credential management and transfer mechanisms are respected
|
||||
let output = new_std_command("git")
|
||||
.env("GIT_ASKPASS", &self.askpass_path)
|
||||
let output = new_std_command(&self.git_binary_path)
|
||||
.current_dir(&working_directory)
|
||||
.args(["fetch", "--quiet", "--all"])
|
||||
.output()?;
|
||||
|
||||
@@ -20,6 +20,7 @@ test-support = ["multi_buffer/test-support"]
|
||||
anyhow.workspace = true
|
||||
buffer_diff.workspace = true
|
||||
collections.workspace = true
|
||||
component.workspace = true
|
||||
db.workspace = true
|
||||
editor.workspace = true
|
||||
feature_flags.workspace = true
|
||||
@@ -29,6 +30,7 @@ git.workspace = true
|
||||
gpui.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
linkme.workspace = true
|
||||
menu.workspace = true
|
||||
multi_buffer.workspace = true
|
||||
panel.workspace = true
|
||||
@@ -40,6 +42,7 @@ serde.workspace = true
|
||||
serde_derive.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smallvec.workspace = true
|
||||
strum.workspace = true
|
||||
theme.workspace = true
|
||||
time.workspace = true
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
use crate::branch_picker::{self, BranchList};
|
||||
use crate::git_panel::{commit_message_editor, GitPanel};
|
||||
use git::Commit;
|
||||
use git::{Commit, ExpandCommitEditor};
|
||||
use panel::{panel_button, panel_editor_style, panel_filled_button};
|
||||
use project::Project;
|
||||
use ui::{prelude::*, KeybindingHint, PopoverButton, Tooltip, TriggerablePopover};
|
||||
@@ -110,14 +110,17 @@ struct RestoreDock {
|
||||
|
||||
impl CommitModal {
|
||||
pub fn register(workspace: &mut Workspace, _: &mut Window, _cx: &mut Context<Workspace>) {
|
||||
workspace.register_action(|workspace, _: &Commit, window, cx| {
|
||||
workspace.register_action(|workspace, _: &ExpandCommitEditor, window, cx| {
|
||||
let Some(git_panel) = workspace.panel::<GitPanel>(cx) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let (can_commit, conflict) = git_panel.update(cx, |git_panel, _cx| {
|
||||
let (can_commit, conflict) = git_panel.update(cx, |git_panel, cx| {
|
||||
let can_commit = git_panel.can_commit();
|
||||
let conflict = git_panel.has_unstaged_conflicts();
|
||||
if can_commit {
|
||||
git_panel.set_modal_open(true, cx);
|
||||
}
|
||||
(can_commit, conflict)
|
||||
});
|
||||
if !can_commit {
|
||||
@@ -131,6 +134,7 @@ impl CommitModal {
|
||||
prompt.await.ok();
|
||||
})
|
||||
.detach();
|
||||
return;
|
||||
}
|
||||
|
||||
let dock = workspace.dock_at_position(git_panel.position(window, cx));
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,4 @@
|
||||
use std::any::{Any, TypeId};
|
||||
|
||||
use ::git::UnstageAndNext;
|
||||
use crate::git_panel::{GitPanel, GitPanelAddon, GitStatusEntry};
|
||||
use anyhow::Result;
|
||||
use buffer_diff::{BufferDiff, DiffHunkSecondaryStatus};
|
||||
use collections::HashSet;
|
||||
@@ -11,14 +9,17 @@ use editor::{
|
||||
};
|
||||
use feature_flags::FeatureFlagViewExt;
|
||||
use futures::StreamExt;
|
||||
use git::{status::FileStatus, Commit, StageAll, StageAndNext, ToggleStaged, UnstageAll};
|
||||
use git::{
|
||||
status::FileStatus, Commit, StageAll, StageAndNext, ToggleStaged, UnstageAll, UnstageAndNext,
|
||||
};
|
||||
use gpui::{
|
||||
actions, Action, AnyElement, AnyView, App, AppContext as _, AsyncWindowContext, Entity,
|
||||
EventEmitter, FocusHandle, Focusable, Render, Subscription, Task, WeakEntity,
|
||||
};
|
||||
use language::{Anchor, Buffer, Capability, OffsetRangeExt, Point};
|
||||
use language::{Anchor, Buffer, Capability, OffsetRangeExt};
|
||||
use multi_buffer::{MultiBuffer, PathKey};
|
||||
use project::{git::GitStore, Project, ProjectPath};
|
||||
use std::any::{Any, TypeId};
|
||||
use theme::ActiveTheme;
|
||||
use ui::{prelude::*, vertical_divider, Tooltip};
|
||||
use util::ResultExt as _;
|
||||
@@ -29,8 +30,6 @@ use workspace::{
|
||||
Workspace,
|
||||
};
|
||||
|
||||
use crate::git_panel::{GitPanel, GitPanelAddon, GitStatusEntry};
|
||||
|
||||
actions!(git, [Diff]);
|
||||
|
||||
pub struct ProjectDiff {
|
||||
@@ -230,14 +229,16 @@ impl ProjectDiff {
|
||||
let mut has_unstaged_hunks = false;
|
||||
for hunk in editor.diff_hunks_in_ranges(&ranges, &snapshot) {
|
||||
match hunk.secondary_status {
|
||||
DiffHunkSecondaryStatus::HasSecondaryHunk => {
|
||||
DiffHunkSecondaryStatus::HasSecondaryHunk
|
||||
| DiffHunkSecondaryStatus::SecondaryHunkAdditionPending => {
|
||||
has_unstaged_hunks = true;
|
||||
}
|
||||
DiffHunkSecondaryStatus::OverlapsWithSecondaryHunk => {
|
||||
has_staged_hunks = true;
|
||||
has_unstaged_hunks = true;
|
||||
}
|
||||
DiffHunkSecondaryStatus::None => {
|
||||
DiffHunkSecondaryStatus::None
|
||||
| DiffHunkSecondaryStatus::SecondaryHunkRemovalPending => {
|
||||
has_staged_hunks = true;
|
||||
}
|
||||
}
|
||||
@@ -378,13 +379,10 @@ impl ProjectDiff {
|
||||
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let diff = diff.read(cx);
|
||||
let diff_hunk_ranges = if diff.base_text().is_none() {
|
||||
vec![Point::zero()..snapshot.max_point()]
|
||||
} else {
|
||||
diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx)
|
||||
.map(|diff_hunk| diff_hunk.buffer_range.to_point(&snapshot))
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
let diff_hunk_ranges = diff
|
||||
.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx)
|
||||
.map(|diff_hunk| diff_hunk.buffer_range.to_point(&snapshot))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let (was_empty, is_excerpt_newly_added) = self.multibuffer.update(cx, |multibuffer, cx| {
|
||||
let was_empty = multibuffer.is_empty();
|
||||
@@ -815,7 +813,9 @@ impl Render for ProjectDiffToolbar {
|
||||
Button::new("stage", "Stage")
|
||||
.tooltip(Tooltip::for_action_title_in(
|
||||
"Stage",
|
||||
&StageAndNext,
|
||||
&StageAndNext {
|
||||
whole_excerpt: false,
|
||||
},
|
||||
&focus_handle,
|
||||
))
|
||||
// don't actually disable the button so it's mashable
|
||||
@@ -825,14 +825,22 @@ impl Render for ProjectDiffToolbar {
|
||||
Color::Disabled
|
||||
})
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.dispatch_action(&StageAndNext, window, cx)
|
||||
this.dispatch_action(
|
||||
&StageAndNext {
|
||||
whole_excerpt: false,
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
Button::new("unstage", "Unstage")
|
||||
.tooltip(Tooltip::for_action_title_in(
|
||||
"Unstage",
|
||||
&UnstageAndNext,
|
||||
&UnstageAndNext {
|
||||
whole_excerpt: false,
|
||||
},
|
||||
&focus_handle,
|
||||
))
|
||||
.color(if button_states.unstage {
|
||||
@@ -841,7 +849,13 @@ impl Render for ProjectDiffToolbar {
|
||||
Color::Disabled
|
||||
})
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.dispatch_action(&UnstageAndNext, window, cx)
|
||||
this.dispatch_action(
|
||||
&UnstageAndNext {
|
||||
whole_excerpt: false,
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})),
|
||||
)
|
||||
}),
|
||||
@@ -971,7 +985,7 @@ mod tests {
|
||||
path!("/project"),
|
||||
json!({
|
||||
".git": {},
|
||||
"foo": "FOO\n",
|
||||
"foo.txt": "FOO\n",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
@@ -985,11 +999,15 @@ mod tests {
|
||||
|
||||
fs.set_head_for_repo(
|
||||
path!("/project/.git").as_ref(),
|
||||
&[("foo".into(), "foo\n".into())],
|
||||
&[("foo.txt".into(), "foo\n".into())],
|
||||
);
|
||||
fs.set_index_for_repo(
|
||||
path!("/project/.git").as_ref(),
|
||||
&[("foo.txt".into(), "foo\n".into())],
|
||||
);
|
||||
fs.with_git_state(path!("/project/.git").as_ref(), true, |state| {
|
||||
state.statuses = HashMap::from_iter([(
|
||||
"foo".into(),
|
||||
"foo.txt".into(),
|
||||
TrackedStatus {
|
||||
index_status: StatusCode::Unmodified,
|
||||
worktree_status: StatusCode::Modified,
|
||||
@@ -1020,7 +1038,7 @@ mod tests {
|
||||
|
||||
assert_state_with_diff(&editor, cx, &"ˇ".unindent());
|
||||
|
||||
let text = String::from_utf8(fs.read_file_sync("/project/foo").unwrap()).unwrap();
|
||||
let text = String::from_utf8(fs.read_file_sync("/project/foo.txt").unwrap()).unwrap();
|
||||
assert_eq!(text, "foo\n");
|
||||
}
|
||||
|
||||
|
||||
@@ -47,6 +47,10 @@ impl RepositorySelector {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn repositories_len(&self, cx: &App) -> usize {
|
||||
self.picker.read(cx).delegate.repository_entries.len()
|
||||
}
|
||||
|
||||
fn handle_project_git_event(
|
||||
&mut self,
|
||||
git_store: &Entity<GitStore>,
|
||||
|
||||
@@ -399,6 +399,8 @@ macro_rules! action_with_deprecated_aliases {
|
||||
/// Registers the action and implements the Action trait for any struct that implements Clone,
|
||||
/// Default, PartialEq, serde_deserialize::Deserialize, and schemars::JsonSchema.
|
||||
///
|
||||
/// Similar to `actions!`, but accepts structs with fields.
|
||||
///
|
||||
/// Fields and variants that don't make sense for user configuration should be annotated with
|
||||
/// #[serde(skip)].
|
||||
#[macro_export]
|
||||
|
||||
@@ -32,6 +32,41 @@
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"devDependency": {
|
||||
"description": "Specifies dependencies that are required for the development and testing of the project. These dependencies are not needed in the production environment.",
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"optionalDependency": {
|
||||
"description": "Specifies dependencies that are optional for your project. These dependencies are attempted to be installed during the npm install process, but if they fail to install, the installation process will not fail.",
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"peerDependency": {
|
||||
"description": "Specifies dependencies that are required by the package but are expected to be provided by the consumer of the package.",
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"peerDependencyMeta": {
|
||||
"description": "When a user installs your package, warnings are emitted if packages specified in \"peerDependencies\" are not already installed. The \"peerDependenciesMeta\" field serves to provide more information on how your peer dependencies are utilized. Most commonly, it allows peer dependencies to be marked as optional. Metadata for this field is specified with a simple hash of the package name to a metadata object.",
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": true,
|
||||
"properties": {
|
||||
"optional": {
|
||||
"description": "Specifies that this peer dependency is optional and should not be installed automatically.",
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"license": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -135,13 +170,17 @@
|
||||
},
|
||||
"types": {
|
||||
"$ref": "#/definitions/packageExportsEntryOrFallback",
|
||||
"description": "The module path that is resolved for TypeScript types when this specifier is imported. Should be listed before other conditions."
|
||||
"description": "The module path that is resolved for TypeScript types when this specifier is imported. Should be listed before other conditions. Additionally, versioned \"types\" condition in the form \"types@{selector}\" are supported."
|
||||
}
|
||||
},
|
||||
"patternProperties": {
|
||||
"^[^.0-9]+$": {
|
||||
"$ref": "#/definitions/packageExportsEntryOrFallback",
|
||||
"description": "The module path that is resolved when this environment matches the property name."
|
||||
},
|
||||
"^types@.+$": {
|
||||
"$ref": "#/definitions/packageExportsEntryOrFallback",
|
||||
"description": "The module path that is resolved for TypeScript types when this specifier is imported. Should be listed before other conditions. Additionally, versioned \"types\" condition in the form \"types@{selector}\" are supported."
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
@@ -173,6 +212,77 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"packageImportsEntryPath": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
],
|
||||
"description": "The module path that is resolved when this specifier is imported. Set to `null` to disallow importing this module."
|
||||
},
|
||||
"packageImportsEntryObject": {
|
||||
"type": "object",
|
||||
"description": "Used to specify conditional exports, note that Conditional exports are unsupported in older environments, so it's recommended to use the fallback array option if support for those environments is a concern.",
|
||||
"properties": {
|
||||
"require": {
|
||||
"$ref": "#/definitions/packageImportsEntryOrFallback",
|
||||
"description": "The module path that is resolved when this specifier is imported as a CommonJS module using the `require(...)` function."
|
||||
},
|
||||
"import": {
|
||||
"$ref": "#/definitions/packageImportsEntryOrFallback",
|
||||
"description": "The module path that is resolved when this specifier is imported as an ECMAScript module using an `import` declaration or the dynamic `import(...)` function."
|
||||
},
|
||||
"node": {
|
||||
"$ref": "#/definitions/packageImportsEntryOrFallback",
|
||||
"description": "The module path that is resolved when this environment is Node.js."
|
||||
},
|
||||
"default": {
|
||||
"$ref": "#/definitions/packageImportsEntryOrFallback",
|
||||
"description": "The module path that is resolved when no other export type matches."
|
||||
},
|
||||
"types": {
|
||||
"$ref": "#/definitions/packageImportsEntryOrFallback",
|
||||
"description": "The module path that is resolved for TypeScript types when this specifier is imported. Should be listed before other conditions. Additionally, versioned \"types\" condition in the form \"types@{selector}\" are supported."
|
||||
}
|
||||
},
|
||||
"patternProperties": {
|
||||
"^[^.0-9]+$": {
|
||||
"$ref": "#/definitions/packageImportsEntryOrFallback",
|
||||
"description": "The module path that is resolved when this environment matches the property name."
|
||||
},
|
||||
"^types@.+$": {
|
||||
"$ref": "#/definitions/packageImportsEntryOrFallback",
|
||||
"description": "The module path that is resolved for TypeScript types when this specifier is imported. Should be listed before other conditions. Additionally, versioned \"types\" condition in the form \"types@{selector}\" are supported."
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"packageImportsEntry": {
|
||||
"oneOf": [
|
||||
{
|
||||
"$ref": "#/definitions/packageImportsEntryPath"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/packageImportsEntryObject"
|
||||
}
|
||||
]
|
||||
},
|
||||
"packageImportsFallback": {
|
||||
"type": "array",
|
||||
"description": "Used to allow fallbacks in case this environment doesn't support the preceding entries.",
|
||||
"items": {
|
||||
"$ref": "#/definitions/packageImportsEntry"
|
||||
}
|
||||
},
|
||||
"packageImportsEntryOrFallback": {
|
||||
"oneOf": [
|
||||
{
|
||||
"$ref": "#/definitions/packageImportsEntry"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/packageImportsFallback"
|
||||
}
|
||||
]
|
||||
},
|
||||
"fundingUrl": {
|
||||
"type": "string",
|
||||
"format": "uri",
|
||||
@@ -212,7 +322,7 @@
|
||||
"pattern": "^(?:(?:@(?:[a-z0-9-*~][a-z0-9-*._~]*)?/[a-z0-9-._~])|[a-z0-9-~])[a-z0-9-._~]*$"
|
||||
},
|
||||
"version": {
|
||||
"description": "Version must be parseable by node-semver, which is bundled with npm as a dependency.",
|
||||
"description": "Version must be parsable by node-semver, which is bundled with npm as a dependency.",
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
@@ -330,6 +440,17 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"imports": {
|
||||
"description": "The \"imports\" field is used to create private mappings that only apply to import specifiers from within the package itself.",
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^#.+$": {
|
||||
"$ref": "#/definitions/packageImportsEntryOrFallback",
|
||||
"description": "The module path that is resolved when this environment matches the property name."
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"bin": {
|
||||
"type": [
|
||||
"string",
|
||||
@@ -487,7 +608,7 @@
|
||||
},
|
||||
"prepare": {
|
||||
"type": "string",
|
||||
"description": "Run both BEFORE the package is packed and published, and on local npm install without any arguments. This is run AFTER prepublish, but BEFORE prepublishOnly."
|
||||
"description": "Runs BEFORE the package is packed, i.e. during \"npm publish\" and \"npm pack\", and on local \"npm install\" without any arguments. This is run AFTER \"prepublish\", but BEFORE \"prepublishOnly\"."
|
||||
},
|
||||
"prepublishOnly": {
|
||||
"type": "string",
|
||||
@@ -594,27 +715,16 @@
|
||||
"$ref": "#/definitions/dependency"
|
||||
},
|
||||
"devDependencies": {
|
||||
"$ref": "#/definitions/dependency"
|
||||
"$ref": "#/definitions/devDependency"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"$ref": "#/definitions/dependency"
|
||||
"$ref": "#/definitions/optionalDependency"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"$ref": "#/definitions/dependency"
|
||||
"$ref": "#/definitions/peerDependency"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"description": "When a user installs your package, warnings are emitted if packages specified in \"peerDependencies\" are not already installed. The \"peerDependenciesMeta\" field serves to provide more information on how your peer dependencies are utilized. Most commonly, it allows peer dependencies to be marked as optional. Metadata for this field is specified with a simple hash of the package name to a metadata object.",
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": true,
|
||||
"properties": {
|
||||
"optional": {
|
||||
"description": "Specifies that this peer dependency is optional and should not be installed automatically.",
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
}
|
||||
"$ref": "#/definitions/peerDependencyMeta"
|
||||
},
|
||||
"bundleDependencies": {
|
||||
"description": "Array of package names that will be bundled when publishing the package.",
|
||||
@@ -828,6 +938,190 @@
|
||||
},
|
||||
"jscpd": {
|
||||
"$ref": "https://json.schemastore.org/jscpd.json"
|
||||
},
|
||||
"pnpm": {
|
||||
"description": "Defines pnpm specific configuration.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"overrides": {
|
||||
"description": "Used to override any dependency in the dependency graph.",
|
||||
"type": "object"
|
||||
},
|
||||
"packageExtensions": {
|
||||
"description": "Used to extend the existing package definitions with additional information.",
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^.+$": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"dependencies": {
|
||||
"$ref": "#/definitions/dependency"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"$ref": "#/definitions/optionalDependency"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"$ref": "#/definitions/peerDependency"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"$ref": "#/definitions/peerDependencyMeta"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"peerDependencyRules": {
|
||||
"properties": {
|
||||
"ignoreMissing": {
|
||||
"description": "pnpm will not print warnings about missing peer dependencies from this list.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"allowedVersions": {
|
||||
"description": "Unmet peer dependency warnings will not be printed for peer dependencies of the specified range.",
|
||||
"type": "object"
|
||||
},
|
||||
"allowAny": {
|
||||
"description": "Any peer dependency matching the pattern will be resolved from any version, regardless of the range specified in \"peerDependencies\".",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"neverBuiltDependencies": {
|
||||
"description": "A list of dependencies to run builds for.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"onlyBuiltDependencies": {
|
||||
"description": "A list of package names that are allowed to be executed during installation.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"onlyBuiltDependenciesFile": {
|
||||
"description": "Specifies a JSON file that lists the only packages permitted to run installation scripts during the pnpm install process.",
|
||||
"type": "string"
|
||||
},
|
||||
"ignoredBuiltDependencies": {
|
||||
"description": "A list of package names that should not be built during installation.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"allowedDeprecatedVersions": {
|
||||
"description": "A list of deprecated versions that the warnings are suppressed.",
|
||||
"type": "object"
|
||||
},
|
||||
"patchedDependencies": {
|
||||
"description": "A list of dependencies that are patched.",
|
||||
"type": "object"
|
||||
},
|
||||
"allowNonAppliedPatches": {
|
||||
"description": "When true, installation won't fail if some of the patches from the \"patchedDependencies\" field were not applied.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"updateConfig": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"ignoreDependencies": {
|
||||
"description": "A list of packages that should be ignored when running \"pnpm outdated\" or \"pnpm update --latest\".",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"configDependencies": {
|
||||
"type": "object",
|
||||
"description": "Configurational dependencies are installed before all the other types of dependencies (before 'dependencies', 'devDependencies', 'optionalDependencies')."
|
||||
},
|
||||
"auditConfig": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"ignoreCves": {
|
||||
"description": "A list of CVE IDs that will be ignored by \"pnpm audit\".",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"pattern": "^CVE-\\d{4}-\\d{4,7}$"
|
||||
}
|
||||
},
|
||||
"ignoreGhsas": {
|
||||
"description": "A list of GHSA Codes that will be ignored by \"pnpm audit\".",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"pattern": "^GHSA(-[23456789cfghjmpqrvwx]{4}){3}$"
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"requiredScripts": {
|
||||
"description": "A list of scripts that must exist in each project.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"supportedArchitectures": {
|
||||
"description": "Specifies architectures for which you'd like to install optional dependencies, even if they don't match the architecture of the system running the install.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"os": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"cpu": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"libc": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"ignoredOptionalDependencies": {
|
||||
"description": "A list of optional dependencies that the install should be skipped.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"executionEnv": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"nodeVersion": {
|
||||
"description": "Specifies which exact Node.js version should be used for the project's runtime.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"anyOf": [
|
||||
|
||||
@@ -452,6 +452,7 @@
|
||||
"$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).",
|
||||
"description": "Set the newline character for emitting files.",
|
||||
"type": ["string", "null"],
|
||||
"default": "lf",
|
||||
"anyOf": [
|
||||
{
|
||||
"enum": ["crlf", "lf"]
|
||||
@@ -663,11 +664,12 @@
|
||||
"ES2021",
|
||||
"ES2022",
|
||||
"ES2023",
|
||||
"ES2024",
|
||||
"ESNext"
|
||||
]
|
||||
},
|
||||
{
|
||||
"pattern": "^([Ee][Ss]([356]|(20(1[56789]|2[0123]))|[Nn][Ee][Xx][Tt]))$"
|
||||
"pattern": "^([Ee][Ss]([356]|(20(1[56789]|2[01234]))|[Nn][Ee][Xx][Tt]))$"
|
||||
}
|
||||
],
|
||||
"markdownDescription": "Set the JavaScript language version for emitted JavaScript and include compatible library declarations.\n\nSee more: https://www.typescriptlang.org/tsconfig#target"
|
||||
@@ -772,7 +774,7 @@
|
||||
"$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).",
|
||||
"description": "Ensure that casing is correct in imports.",
|
||||
"type": ["boolean", "null"],
|
||||
"default": false,
|
||||
"default": true,
|
||||
"markdownDescription": "Ensure that casing is correct in imports.\n\nSee more: https://www.typescriptlang.org/tsconfig#forceConsistentCasingInFileNames"
|
||||
},
|
||||
"generateCpuProfile": {
|
||||
@@ -928,6 +930,7 @@
|
||||
"ES2017.SharedMemory",
|
||||
"ES2017.String",
|
||||
"ES2017.TypedArrays",
|
||||
"ES2017.ArrayBuffer",
|
||||
"ES2018",
|
||||
"ES2018.AsyncGenerator",
|
||||
"ES2018.AsyncIterable",
|
||||
@@ -985,6 +988,14 @@
|
||||
"ES2022.RegExp",
|
||||
"ES2023",
|
||||
"ES2023.Array",
|
||||
"ES2024",
|
||||
"ES2024.ArrayBuffer",
|
||||
"ES2024.Collection",
|
||||
"ES2024.Object",
|
||||
"ES2024.Promise",
|
||||
"ES2024.Regexp",
|
||||
"ES2024.SharedMemory",
|
||||
"ES2024.String",
|
||||
"Decorators",
|
||||
"Decorators.Legacy",
|
||||
"ES2017.Date",
|
||||
@@ -1003,7 +1014,7 @@
|
||||
"pattern": "^[Ee][Ss]2016(\\.[Aa][Rr][Rr][Aa][Yy]\\.[Ii][Nn][Cc][Ll][Uu][Dd][Ee])?$"
|
||||
},
|
||||
{
|
||||
"pattern": "^[Ee][Ss]2017(\\.([Ii][Nn][Tt][Ll]|[Oo][Bb][Jj][Ee][Cc][Tt]|[Ss][Hh][Aa][Rr][Ee][Dd][Mm][Ee][Mm][Oo][Rr][Yy]|[Ss][Tt][Rr][Ii][Nn][Gg]|[Tt][Yy][Pp][Ee][Dd][Aa][Rr][Rr][Aa][Yy][Ss]|[Dd][Aa][Tt][Ee]))?$"
|
||||
"pattern": "^[Ee][Ss]2017(\\.([Ii][Nn][Tt][Ll]|[Oo][Bb][Jj][Ee][Cc][Tt]|[Ss][Hh][Aa][Rr][Ee][Dd][Mm][Ee][Mm][Oo][Rr][Yy]|[Ss][Tt][Rr][Ii][Nn][Gg]|[Tt][Yy][Pp][Ee][Dd][Aa][Rr][Rr][Aa][Yy][Ss]|[Dd][Aa][Tt][Ee]|[Aa][Rr][Rr][Aa][Yy][Bb][Uu][Ff][Ff][Ee][Rr]))?$"
|
||||
},
|
||||
{
|
||||
"pattern": "^[Ee][Ss]2018(\\.([Aa][Ss][Yy][Nn][Cc][Gg][Ee][Nn][Ee][Rr][Aa][Tt][Oo][Rr]|[Aa][Ss][Yy][Nn][Cc][Ii][Tt][Ee][Rr][Aa][Bb][Ll][Ee]|[Ii][Nn][Tt][Ll]|[Pp][Rr][Oo][Mm][Ii][Ss][Ee]|[Rr][Ee][Gg][Ee][Xx][Pp]))?$"
|
||||
@@ -1023,6 +1034,9 @@
|
||||
{
|
||||
"pattern": "^[Ee][Ss]2023(\\.([Aa][Rr][Rr][Aa][Yy]|[Cc][Oo][Ll][Ll][Ee][Cc][Tt][Ii][Oo][Nn]))?$"
|
||||
},
|
||||
{
|
||||
"pattern": "^[Ee][Ss]2024(\\.([Aa][Rr][Rr][Aa][Yy][Bb][Uu][Ff][Ff][Ee][Rr]|[Cc][Oo][Ll][Ll][Ee][Cc][Tt][Ii][Oo][Nn]|[Oo][Bb][Jj][Ee][Cc][Tt]|[Pp][Rr][Oo][Mm][Ii][Ss][Ee]|[Rr][Ee][Gg][Ee][Xx][Pp]|[Ss][Hh][Aa][Rr][Ee][Dd][Mm][Ee][Mm][Oo][Rr][Yy]|[Ss][Tt][Rr][Ii][Nn][Gg]))?$"
|
||||
},
|
||||
{
|
||||
"pattern": "^[Ee][Ss][Nn][Ee][Xx][Tt](\\.([Aa][Rr][Rr][Aa][Yy]|[Aa][Ss][Yy][Nn][Cc][Ii][Tt][Ee][Rr][Aa][Bb][Ll][Ee]|[Bb][Ii][Gg][Ii][Nn][Tt]|[Ii][Nn][Tt][Ll]|[Pp][Rr][Oo][Mm][Ii][Ss][Ee]|[Ss][Tt][Rr][Ii][Nn][Gg]|[Ss][Yy][Mm][Bb][Oo][Ll]|[Ww][Ee][Aa][Kk][Rr][Ee][Ff]|[Dd][Ee][Cc][Oo][Rr][Aa][Tt][Oo][Rr][Ss]|[Dd][Ii][Ss][Pp][Oo][Ss][Aa][Bb][Ll][Ee]))?$"
|
||||
},
|
||||
|
||||
@@ -73,7 +73,7 @@ impl Anchor {
|
||||
if let Some(base_text) = snapshot
|
||||
.diffs
|
||||
.get(&excerpt.buffer_id)
|
||||
.and_then(|diff| diff.base_text())
|
||||
.map(|diff| diff.base_text())
|
||||
{
|
||||
let self_anchor = self.diff_base_anchor.filter(|a| base_text.can_resolve(a));
|
||||
let other_anchor = other.diff_base_anchor.filter(|a| base_text.can_resolve(a));
|
||||
@@ -110,7 +110,7 @@ impl Anchor {
|
||||
if let Some(base_text) = snapshot
|
||||
.diffs
|
||||
.get(&excerpt.buffer_id)
|
||||
.and_then(|diff| diff.base_text())
|
||||
.map(|diff| diff.base_text())
|
||||
{
|
||||
if a.buffer_id == Some(base_text.remote_id()) {
|
||||
return a.bias_left(base_text);
|
||||
@@ -135,7 +135,7 @@ impl Anchor {
|
||||
if let Some(base_text) = snapshot
|
||||
.diffs
|
||||
.get(&excerpt.buffer_id)
|
||||
.and_then(|diff| diff.base_text())
|
||||
.map(|diff| diff.base_text())
|
||||
{
|
||||
if a.buffer_id == Some(base_text.remote_id()) {
|
||||
return a.bias_right(&base_text);
|
||||
|
||||
@@ -69,7 +69,7 @@ pub struct MultiBuffer {
|
||||
// only used by consumers using `set_excerpts_for_buffer`
|
||||
buffers_by_path: BTreeMap<PathKey, Vec<ExcerptId>>,
|
||||
diffs: HashMap<BufferId, DiffState>,
|
||||
all_diff_hunks_expanded: bool,
|
||||
// all_diff_hunks_expanded: bool,
|
||||
subscriptions: Topic,
|
||||
/// If true, the multi-buffer only contains a single [`Buffer`] and a single [`Excerpt`]
|
||||
singleton: bool,
|
||||
@@ -245,14 +245,9 @@ impl DiffState {
|
||||
DiffState {
|
||||
_subscription: cx.subscribe(&diff, |this, diff, event, cx| match event {
|
||||
BufferDiffEvent::DiffChanged { changed_range } => {
|
||||
let changed_range = if let Some(changed_range) = changed_range {
|
||||
changed_range.clone()
|
||||
} else if diff.read(cx).base_text().is_none() && this.all_diff_hunks_expanded {
|
||||
text::Anchor::MIN..text::Anchor::MAX
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
this.buffer_diff_changed(diff, changed_range, cx)
|
||||
if let Some(changed_range) = changed_range.clone() {
|
||||
this.buffer_diff_changed(diff, changed_range, cx)
|
||||
}
|
||||
}
|
||||
BufferDiffEvent::LanguageChanged => this.buffer_diff_language_changed(diff, cx),
|
||||
}),
|
||||
@@ -270,6 +265,7 @@ pub struct MultiBufferSnapshot {
|
||||
diffs: TreeMap<BufferId, BufferDiffSnapshot>,
|
||||
diff_transforms: SumTree<DiffTransform>,
|
||||
trailing_excerpt_update_count: usize,
|
||||
all_diff_hunks_expanded: bool,
|
||||
non_text_state_update_count: usize,
|
||||
edit_count: usize,
|
||||
is_dirty: bool,
|
||||
@@ -559,7 +555,6 @@ impl MultiBuffer {
|
||||
}),
|
||||
buffers: RefCell::default(),
|
||||
diffs: HashMap::default(),
|
||||
all_diff_hunks_expanded: false,
|
||||
subscriptions: Topic::default(),
|
||||
singleton: false,
|
||||
capability,
|
||||
@@ -581,7 +576,6 @@ impl MultiBuffer {
|
||||
buffers: Default::default(),
|
||||
buffers_by_path: Default::default(),
|
||||
diffs: HashMap::default(),
|
||||
all_diff_hunks_expanded: false,
|
||||
subscriptions: Default::default(),
|
||||
singleton: false,
|
||||
capability,
|
||||
@@ -622,7 +616,6 @@ impl MultiBuffer {
|
||||
buffers: RefCell::new(buffers),
|
||||
buffers_by_path: Default::default(),
|
||||
diffs: diff_bases,
|
||||
all_diff_hunks_expanded: self.all_diff_hunks_expanded,
|
||||
subscriptions: Default::default(),
|
||||
singleton: self.singleton,
|
||||
capability: self.capability,
|
||||
@@ -2231,18 +2224,7 @@ impl MultiBuffer {
|
||||
let buffer = buffer_state.buffer.read(cx);
|
||||
let diff_change_range = range.to_offset(buffer);
|
||||
|
||||
let mut new_diff = diff.snapshot(cx);
|
||||
if new_diff.base_text().is_none() && self.all_diff_hunks_expanded {
|
||||
let secondary_diff_insertion = new_diff
|
||||
.secondary_diff()
|
||||
.map_or(true, |secondary_diff| secondary_diff.base_text().is_none());
|
||||
new_diff = BufferDiff::build_with_single_insertion(
|
||||
secondary_diff_insertion,
|
||||
buffer.snapshot(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
let new_diff = diff.snapshot(cx);
|
||||
let mut snapshot = self.snapshot.borrow_mut();
|
||||
let base_text_changed = snapshot
|
||||
.diffs
|
||||
@@ -2398,12 +2380,12 @@ impl MultiBuffer {
|
||||
}
|
||||
|
||||
pub fn set_all_diff_hunks_expanded(&mut self, cx: &mut Context<Self>) {
|
||||
self.all_diff_hunks_expanded = true;
|
||||
self.snapshot.borrow_mut().all_diff_hunks_expanded = true;
|
||||
self.expand_or_collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], true, cx);
|
||||
}
|
||||
|
||||
pub fn all_diff_hunks_expanded(&self) -> bool {
|
||||
self.all_diff_hunks_expanded
|
||||
self.snapshot.borrow().all_diff_hunks_expanded
|
||||
}
|
||||
|
||||
pub fn has_multiple_hunks(&self, cx: &App) -> bool {
|
||||
@@ -2459,7 +2441,7 @@ impl MultiBuffer {
|
||||
expand: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if self.all_diff_hunks_expanded && !expand {
|
||||
if self.snapshot.borrow().all_diff_hunks_expanded && !expand {
|
||||
return;
|
||||
}
|
||||
self.sync(cx);
|
||||
@@ -2964,9 +2946,10 @@ impl MultiBuffer {
|
||||
}
|
||||
|
||||
// Avoid querying diff hunks if there's no possibility of hunks being expanded.
|
||||
let all_diff_hunks_expanded = snapshot.all_diff_hunks_expanded;
|
||||
if old_expanded_hunks.is_empty()
|
||||
&& change_kind == DiffChangeKind::BufferEdited
|
||||
&& !self.all_diff_hunks_expanded
|
||||
&& !all_diff_hunks_expanded
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@@ -2976,11 +2959,7 @@ impl MultiBuffer {
|
||||
while let Some(excerpt) = excerpts.item() {
|
||||
// Recompute the expanded hunks in the portion of the excerpt that
|
||||
// intersects the edit.
|
||||
if let Some((diff, base_text)) = snapshot
|
||||
.diffs
|
||||
.get(&excerpt.buffer_id)
|
||||
.and_then(|diff| Some((diff, diff.base_text()?)))
|
||||
{
|
||||
if let Some(diff) = snapshot.diffs.get(&excerpt.buffer_id) {
|
||||
let buffer = &excerpt.buffer;
|
||||
let excerpt_start = *excerpts.start();
|
||||
let excerpt_end = excerpt_start + ExcerptOffset::new(excerpt.text_summary.len);
|
||||
@@ -2995,17 +2974,21 @@ impl MultiBuffer {
|
||||
buffer.anchor_before(edit_buffer_start)..buffer.anchor_after(edit_buffer_end);
|
||||
|
||||
for hunk in diff.hunks_intersecting_range(edit_anchor_range, buffer) {
|
||||
if hunk.is_created_file() && !all_diff_hunks_expanded {
|
||||
continue;
|
||||
}
|
||||
|
||||
let hunk_buffer_range = hunk.buffer_range.to_offset(buffer);
|
||||
if hunk_buffer_range.start < excerpt_buffer_start {
|
||||
log::trace!("skipping hunk that starts before excerpt");
|
||||
continue;
|
||||
}
|
||||
|
||||
let hunk_info = DiffTransformHunkInfo {
|
||||
excerpt_id: excerpt.id,
|
||||
hunk_start_anchor: hunk.buffer_range.start,
|
||||
hunk_secondary_status: hunk.secondary_status,
|
||||
};
|
||||
if hunk_buffer_range.start < excerpt_buffer_start {
|
||||
log::trace!("skipping hunk that starts before excerpt");
|
||||
continue;
|
||||
}
|
||||
|
||||
let hunk_excerpt_start = excerpt_start
|
||||
+ ExcerptOffset::new(
|
||||
@@ -3028,21 +3011,18 @@ impl MultiBuffer {
|
||||
let was_previously_expanded = old_expanded_hunks.contains(&hunk_info);
|
||||
let should_expand_hunk = match &change_kind {
|
||||
DiffChangeKind::DiffUpdated { base_changed: true } => {
|
||||
self.all_diff_hunks_expanded || was_previously_expanded
|
||||
was_previously_expanded || all_diff_hunks_expanded
|
||||
}
|
||||
DiffChangeKind::ExpandOrCollapseHunks { expand } => {
|
||||
let intersects = hunk_buffer_range.is_empty()
|
||||
|| hunk_buffer_range.end > edit_buffer_start;
|
||||
if *expand {
|
||||
intersects
|
||||
|| was_previously_expanded
|
||||
|| self.all_diff_hunks_expanded
|
||||
intersects || was_previously_expanded || all_diff_hunks_expanded
|
||||
} else {
|
||||
!intersects
|
||||
&& (was_previously_expanded || self.all_diff_hunks_expanded)
|
||||
!intersects && (was_previously_expanded || all_diff_hunks_expanded)
|
||||
}
|
||||
}
|
||||
_ => was_previously_expanded || self.all_diff_hunks_expanded,
|
||||
_ => was_previously_expanded || all_diff_hunks_expanded,
|
||||
};
|
||||
|
||||
if should_expand_hunk {
|
||||
@@ -3057,6 +3037,7 @@ impl MultiBuffer {
|
||||
&& hunk_buffer_range.start >= edit_buffer_start
|
||||
&& hunk_buffer_range.start <= excerpt_buffer_end
|
||||
{
|
||||
let base_text = diff.base_text();
|
||||
let mut text_cursor =
|
||||
base_text.as_rope().cursor(hunk.diff_base_byte_range.start);
|
||||
let mut base_text_summary =
|
||||
@@ -3500,11 +3481,14 @@ impl MultiBufferSnapshot {
|
||||
let buffer_end = buffer.anchor_after(buffer_range.end);
|
||||
Some(
|
||||
diff.hunks_intersecting_range(buffer_start..buffer_end, buffer)
|
||||
.map(|hunk| {
|
||||
(
|
||||
.filter_map(|hunk| {
|
||||
if hunk.is_created_file() && !self.all_diff_hunks_expanded {
|
||||
return None;
|
||||
}
|
||||
Some((
|
||||
Point::new(hunk.row_range.start, 0)..Point::new(hunk.row_range.end, 0),
|
||||
hunk,
|
||||
)
|
||||
))
|
||||
}),
|
||||
)
|
||||
})
|
||||
@@ -3557,7 +3541,9 @@ impl MultiBufferSnapshot {
|
||||
cursor.seek(&range.start);
|
||||
std::iter::from_fn(move || {
|
||||
let region = cursor.region()?;
|
||||
if region.range.start >= range.end {
|
||||
if region.range.start > range.end
|
||||
|| region.range.start == range.end && region.range.start > range.start
|
||||
{
|
||||
return None;
|
||||
}
|
||||
cursor.next_excerpt();
|
||||
@@ -4383,8 +4369,7 @@ impl MultiBufferSnapshot {
|
||||
} => {
|
||||
let buffer_start = base_text_byte_range.start + start_overshoot;
|
||||
let mut buffer_end = base_text_byte_range.start + end_overshoot;
|
||||
let Some(base_text) = self.diffs.get(buffer_id).and_then(|diff| diff.base_text())
|
||||
else {
|
||||
let Some(base_text) = self.diffs.get(buffer_id).map(|diff| diff.base_text()) else {
|
||||
panic!("{:?} is in non-existent deleted hunk", range.start)
|
||||
};
|
||||
|
||||
@@ -4432,8 +4417,7 @@ impl MultiBufferSnapshot {
|
||||
..
|
||||
} => {
|
||||
let buffer_end = base_text_byte_range.start + overshoot;
|
||||
let Some(base_text) = self.diffs.get(buffer_id).and_then(|diff| diff.base_text())
|
||||
else {
|
||||
let Some(base_text) = self.diffs.get(buffer_id).map(|diff| diff.base_text()) else {
|
||||
panic!("{:?} is in non-existent deleted hunk", range.end)
|
||||
};
|
||||
|
||||
@@ -4537,7 +4521,7 @@ impl MultiBufferSnapshot {
|
||||
}) => {
|
||||
if let Some(diff_base_anchor) = &anchor.diff_base_anchor {
|
||||
if let Some(base_text) =
|
||||
self.diffs.get(buffer_id).and_then(|diff| diff.base_text())
|
||||
self.diffs.get(buffer_id).map(|diff| diff.base_text())
|
||||
{
|
||||
if base_text.can_resolve(&diff_base_anchor) {
|
||||
let base_text_offset = diff_base_anchor.to_offset(&base_text);
|
||||
@@ -4867,17 +4851,14 @@ impl MultiBufferSnapshot {
|
||||
..
|
||||
}) = diff_transforms.item()
|
||||
{
|
||||
let base_text = self
|
||||
.diffs
|
||||
.get(buffer_id)
|
||||
.and_then(|diff| diff.base_text())
|
||||
.expect("missing diff base");
|
||||
let diff = self.diffs.get(buffer_id).expect("missing diff");
|
||||
if offset_in_transform > base_text_byte_range.len() {
|
||||
debug_assert!(*has_trailing_newline);
|
||||
bias = Bias::Right;
|
||||
} else {
|
||||
diff_base_anchor = Some(
|
||||
base_text.anchor_at(base_text_byte_range.start + offset_in_transform, bias),
|
||||
diff.base_text()
|
||||
.anchor_at(base_text_byte_range.start + offset_in_transform, bias),
|
||||
);
|
||||
bias = Bias::Left;
|
||||
}
|
||||
@@ -6235,7 +6216,7 @@ where
|
||||
..
|
||||
} => {
|
||||
let diff = self.diffs.get(&buffer_id)?;
|
||||
let buffer = diff.base_text()?;
|
||||
let buffer = diff.base_text();
|
||||
let mut rope_cursor = buffer.as_rope().cursor(0);
|
||||
let buffer_start = rope_cursor.summary::<D>(base_text_byte_range.start);
|
||||
let buffer_range_len = rope_cursor.summary::<D>(base_text_byte_range.end);
|
||||
@@ -7282,7 +7263,7 @@ impl<'a> Iterator for MultiBufferChunks<'a> {
|
||||
}
|
||||
chunks
|
||||
} else {
|
||||
let base_buffer = &self.diffs.get(&buffer_id)?.base_text()?;
|
||||
let base_buffer = &self.diffs.get(&buffer_id)?.base_text();
|
||||
base_buffer.chunks(base_text_start..base_text_end, self.language_aware)
|
||||
};
|
||||
|
||||
|
||||
@@ -1999,8 +1999,8 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
|
||||
|
||||
let id_1 = buffer_1.read_with(cx, |buffer, _| buffer.remote_id());
|
||||
let id_2 = buffer_2.read_with(cx, |buffer, _| buffer.remote_id());
|
||||
let base_id_1 = diff_1.read_with(cx, |diff, _| diff.base_text().as_ref().unwrap().remote_id());
|
||||
let base_id_2 = diff_2.read_with(cx, |diff, _| diff.base_text().as_ref().unwrap().remote_id());
|
||||
let base_id_1 = diff_1.read_with(cx, |diff, _| diff.base_text().remote_id());
|
||||
let base_id_2 = diff_2.read_with(cx, |diff, _| diff.base_text().remote_id());
|
||||
|
||||
let buffer_lines = (0..=snapshot.max_row().0)
|
||||
.map(|row| {
|
||||
@@ -2035,6 +2035,25 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
|
||||
]
|
||||
);
|
||||
|
||||
let buffer_ids_by_range = [
|
||||
(Point::new(0, 0)..Point::new(0, 0), &[id_1] as &[_]),
|
||||
(Point::new(0, 0)..Point::new(2, 0), &[id_1]),
|
||||
(Point::new(2, 0)..Point::new(2, 0), &[id_1]),
|
||||
(Point::new(3, 0)..Point::new(3, 0), &[id_1]),
|
||||
(Point::new(8, 0)..Point::new(9, 0), &[id_1]),
|
||||
(Point::new(8, 0)..Point::new(10, 0), &[id_1, id_2]),
|
||||
(Point::new(9, 0)..Point::new(9, 0), &[id_2]),
|
||||
];
|
||||
for (range, buffer_ids) in buffer_ids_by_range {
|
||||
assert_eq!(
|
||||
snapshot
|
||||
.buffer_ids_for_range(range.clone())
|
||||
.collect::<Vec<_>>(),
|
||||
buffer_ids,
|
||||
"buffer_ids_for_range({range:?}"
|
||||
);
|
||||
}
|
||||
|
||||
assert_position_translation(&snapshot);
|
||||
assert_line_indents(&snapshot);
|
||||
|
||||
@@ -2221,8 +2240,7 @@ impl ReferenceMultibuffer {
|
||||
let buffer = excerpt.buffer.read(cx);
|
||||
let buffer_range = excerpt.range.to_offset(buffer);
|
||||
let diff = self.diffs.get(&buffer.remote_id()).unwrap().read(cx);
|
||||
// let diff = diff.snapshot.clone();
|
||||
let base_buffer = diff.base_text().unwrap();
|
||||
let base_buffer = diff.base_text();
|
||||
|
||||
let mut offset = buffer_range.start;
|
||||
let mut hunks = diff
|
||||
|
||||
@@ -79,7 +79,7 @@ pub fn panel_editor_container(_window: &mut Window, cx: &mut App) -> Div {
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
}
|
||||
|
||||
pub fn panel_editor_style(monospace: bool, window: &mut Window, cx: &mut App) -> EditorStyle {
|
||||
pub fn panel_editor_style(monospace: bool, window: &Window, cx: &App) -> EditorStyle {
|
||||
let settings = ThemeSettings::get_global(cx);
|
||||
|
||||
let font_size = TextSize::Small.rems(cx).to_pixels(window.rem_size());
|
||||
|
||||
@@ -816,20 +816,20 @@ impl LocalBufferStore {
|
||||
.any(|(work_dir, _)| file.path.starts_with(work_dir))
|
||||
{
|
||||
let snapshot = buffer.text_snapshot();
|
||||
let has_unstaged_diff = diff_state
|
||||
.unstaged_diff
|
||||
.as_ref()
|
||||
.is_some_and(|diff| diff.is_upgradable());
|
||||
let has_uncommitted_diff = diff_state
|
||||
.uncommitted_diff
|
||||
.as_ref()
|
||||
.is_some_and(|set| set.is_upgradable());
|
||||
diff_state_updates.push((
|
||||
snapshot.clone(),
|
||||
file.path.clone(),
|
||||
diff_state
|
||||
.unstaged_diff
|
||||
.as_ref()
|
||||
.and_then(|set| set.upgrade())
|
||||
.is_some(),
|
||||
diff_state
|
||||
.uncommitted_diff
|
||||
.as_ref()
|
||||
.and_then(|set| set.upgrade())
|
||||
.is_some(),
|
||||
))
|
||||
has_unstaged_diff.then(|| diff_state.index_text.clone()),
|
||||
has_uncommitted_diff.then(|| diff_state.head_text.clone()),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -845,37 +845,47 @@ impl LocalBufferStore {
|
||||
diff_state_updates
|
||||
.into_iter()
|
||||
.filter_map(
|
||||
|(buffer_snapshot, path, needs_staged_text, needs_committed_text)| {
|
||||
|(buffer_snapshot, path, current_index_text, current_head_text)| {
|
||||
let local_repo = snapshot.local_repo_for_path(&path)?;
|
||||
let relative_path = local_repo.relativize(&path).ok()?;
|
||||
let staged_text = if needs_staged_text {
|
||||
let index_text = if current_index_text.is_some() {
|
||||
local_repo.repo().load_index_text(&relative_path)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let committed_text = if needs_committed_text {
|
||||
let head_text = if current_head_text.is_some() {
|
||||
local_repo.repo().load_committed_text(&relative_path)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let diff_bases_change =
|
||||
match (needs_staged_text, needs_committed_text) {
|
||||
(true, true) => Some(if staged_text == committed_text {
|
||||
DiffBasesChange::SetBoth(committed_text)
|
||||
} else {
|
||||
DiffBasesChange::SetEach {
|
||||
index: staged_text,
|
||||
head: committed_text,
|
||||
}
|
||||
}),
|
||||
(true, false) => {
|
||||
Some(DiffBasesChange::SetIndex(staged_text))
|
||||
|
||||
// Avoid triggering a diff update if the base text has not changed.
|
||||
if let Some((current_index, current_head)) =
|
||||
current_index_text.as_ref().zip(current_head_text.as_ref())
|
||||
{
|
||||
if current_index.as_deref() == index_text.as_ref()
|
||||
&& current_head.as_deref() == head_text.as_ref()
|
||||
{
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
let diff_bases_change = match (
|
||||
current_index_text.is_some(),
|
||||
current_head_text.is_some(),
|
||||
) {
|
||||
(true, true) => Some(if index_text == head_text {
|
||||
DiffBasesChange::SetBoth(head_text)
|
||||
} else {
|
||||
DiffBasesChange::SetEach {
|
||||
index: index_text,
|
||||
head: head_text,
|
||||
}
|
||||
(false, true) => {
|
||||
Some(DiffBasesChange::SetHead(committed_text))
|
||||
}
|
||||
(false, false) => None,
|
||||
};
|
||||
}),
|
||||
(true, false) => Some(DiffBasesChange::SetIndex(index_text)),
|
||||
(false, true) => Some(DiffBasesChange::SetHead(head_text)),
|
||||
(false, false) => None,
|
||||
};
|
||||
Some((buffer_snapshot, diff_bases_change))
|
||||
},
|
||||
)
|
||||
@@ -1476,14 +1486,15 @@ impl BufferStore {
|
||||
diff_state.language = language;
|
||||
diff_state.language_registry = language_registry;
|
||||
|
||||
let diff = cx.new(|_| BufferDiff::new(&text_snapshot));
|
||||
let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
|
||||
match kind {
|
||||
DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
|
||||
DiffKind::Uncommitted => {
|
||||
let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
|
||||
diff
|
||||
} else {
|
||||
let unstaged_diff = cx.new(|_| BufferDiff::new(&text_snapshot));
|
||||
let unstaged_diff =
|
||||
cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
|
||||
diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
|
||||
unstaged_diff
|
||||
};
|
||||
@@ -2384,8 +2395,7 @@ impl BufferStore {
|
||||
shared.diff = Some(diff.clone());
|
||||
}
|
||||
})?;
|
||||
let staged_text =
|
||||
diff.read_with(&cx, |diff, _| diff.base_text().map(|buffer| buffer.text()))?;
|
||||
let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
|
||||
Ok(proto::OpenUnstagedDiffResponse { staged_text })
|
||||
}
|
||||
|
||||
@@ -2415,22 +2425,25 @@ impl BufferStore {
|
||||
diff.read_with(&cx, |diff, cx| {
|
||||
use proto::open_uncommitted_diff_response::Mode;
|
||||
|
||||
let staged_buffer = diff
|
||||
.secondary_diff()
|
||||
.and_then(|diff| diff.read(cx).base_text());
|
||||
let unstaged_diff = diff.secondary_diff();
|
||||
let index_snapshot = unstaged_diff.and_then(|diff| {
|
||||
let diff = diff.read(cx);
|
||||
diff.base_text_exists().then(|| diff.base_text())
|
||||
});
|
||||
|
||||
let mode;
|
||||
let staged_text;
|
||||
let committed_text;
|
||||
if let Some(committed_buffer) = diff.base_text() {
|
||||
committed_text = Some(committed_buffer.text());
|
||||
if let Some(staged_buffer) = staged_buffer {
|
||||
if staged_buffer.remote_id() == committed_buffer.remote_id() {
|
||||
if diff.base_text_exists() {
|
||||
let committed_snapshot = diff.base_text();
|
||||
committed_text = Some(committed_snapshot.text());
|
||||
if let Some(index_text) = index_snapshot {
|
||||
if index_text.remote_id() == committed_snapshot.remote_id() {
|
||||
mode = Mode::IndexMatchesHead;
|
||||
staged_text = None;
|
||||
} else {
|
||||
mode = Mode::IndexAndHead;
|
||||
staged_text = Some(staged_buffer.text());
|
||||
staged_text = Some(index_text.text());
|
||||
}
|
||||
} else {
|
||||
mode = Mode::IndexAndHead;
|
||||
@@ -2439,7 +2452,7 @@ impl BufferStore {
|
||||
} else {
|
||||
mode = Mode::IndexAndHead;
|
||||
committed_text = None;
|
||||
staged_text = staged_buffer.as_ref().map(|buffer| buffer.text());
|
||||
staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
|
||||
}
|
||||
|
||||
proto::OpenUncommittedDiffResponse {
|
||||
|
||||
@@ -18,6 +18,7 @@ use language::{Buffer, LanguageRegistry};
|
||||
use rpc::proto::{git_reset, ToProto};
|
||||
use rpc::{proto, AnyProtoClient, TypedEnvelope};
|
||||
use settings::WorktreeId;
|
||||
use std::collections::VecDeque;
|
||||
use std::future::Future;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
@@ -25,8 +26,6 @@ use text::BufferId;
|
||||
use util::{maybe, ResultExt};
|
||||
use worktree::{ProjectEntryId, RepositoryEntry, StatusEntry};
|
||||
|
||||
type GitJob = Box<dyn FnOnce(&mut AsyncApp) -> Task<()>>;
|
||||
|
||||
pub struct GitStore {
|
||||
buffer_store: Entity<BufferStore>,
|
||||
pub(super) project_id: Option<ProjectId>,
|
||||
@@ -64,6 +63,16 @@ pub enum GitEvent {
|
||||
GitStateUpdated,
|
||||
}
|
||||
|
||||
struct GitJob {
|
||||
job: Box<dyn FnOnce(&mut AsyncApp) -> Task<()>>,
|
||||
key: Option<GitJobKey>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
enum GitJobKey {
|
||||
WriteIndex(RepoPath),
|
||||
}
|
||||
|
||||
impl EventEmitter<GitEvent> for GitStore {}
|
||||
|
||||
impl GitStore {
|
||||
@@ -223,9 +232,29 @@ impl GitStore {
|
||||
|
||||
fn spawn_git_worker(cx: &mut Context<'_, GitStore>) -> mpsc::UnboundedSender<GitJob> {
|
||||
let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
|
||||
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
while let Some(job) = job_rx.next().await {
|
||||
job(&mut cx).await
|
||||
let mut jobs = VecDeque::new();
|
||||
loop {
|
||||
while let Ok(Some(next_job)) = job_rx.try_next() {
|
||||
jobs.push_back(next_job);
|
||||
}
|
||||
|
||||
if let Some(job) = jobs.pop_front() {
|
||||
if let Some(current_key) = &job.key {
|
||||
if jobs
|
||||
.iter()
|
||||
.any(|other_job| other_job.key.as_ref() == Some(current_key))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
(job.job)(&mut cx).await;
|
||||
} else if let Some(job) = job_rx.next().await {
|
||||
jobs.push_back(job);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
@@ -567,6 +596,15 @@ impl Repository {
|
||||
}
|
||||
|
||||
fn send_job<F, Fut, R>(&self, job: F) -> oneshot::Receiver<R>
|
||||
where
|
||||
F: FnOnce(GitRepo) -> Fut + 'static,
|
||||
Fut: Future<Output = R> + Send + 'static,
|
||||
R: Send + 'static,
|
||||
{
|
||||
self.send_keyed_job(None, job)
|
||||
}
|
||||
|
||||
fn send_keyed_job<F, Fut, R>(&self, key: Option<GitJobKey>, job: F) -> oneshot::Receiver<R>
|
||||
where
|
||||
F: FnOnce(GitRepo) -> Fut + 'static,
|
||||
Fut: Future<Output = R> + Send + 'static,
|
||||
@@ -575,13 +613,16 @@ impl Repository {
|
||||
let (result_tx, result_rx) = futures::channel::oneshot::channel();
|
||||
let git_repo = self.git_repo.clone();
|
||||
self.job_sender
|
||||
.unbounded_send(Box::new(|cx: &mut AsyncApp| {
|
||||
let job = job(git_repo);
|
||||
cx.background_spawn(async move {
|
||||
let result = job.await;
|
||||
result_tx.send(result).ok();
|
||||
})
|
||||
}))
|
||||
.unbounded_send(GitJob {
|
||||
key,
|
||||
job: Box::new(|cx: &mut AsyncApp| {
|
||||
let job = job(git_repo);
|
||||
cx.background_spawn(async move {
|
||||
let result = job.await;
|
||||
result_tx.send(result).ok();
|
||||
})
|
||||
}),
|
||||
})
|
||||
.ok();
|
||||
result_rx
|
||||
}
|
||||
@@ -1144,28 +1185,31 @@ impl Repository {
|
||||
content: Option<String>,
|
||||
) -> oneshot::Receiver<anyhow::Result<()>> {
|
||||
let path = path.clone();
|
||||
self.send_job(|git_repo| async move {
|
||||
match git_repo {
|
||||
GitRepo::Local(repo) => repo.set_index_text(&path, content),
|
||||
GitRepo::Remote {
|
||||
project_id,
|
||||
client,
|
||||
worktree_id,
|
||||
work_directory_id,
|
||||
} => {
|
||||
client
|
||||
.request(proto::SetIndexText {
|
||||
project_id: project_id.0,
|
||||
worktree_id: worktree_id.to_proto(),
|
||||
work_directory_id: work_directory_id.to_proto(),
|
||||
path: path.as_ref().to_proto(),
|
||||
text: content,
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
self.send_keyed_job(
|
||||
Some(GitJobKey::WriteIndex(path.clone())),
|
||||
|git_repo| async move {
|
||||
match git_repo {
|
||||
GitRepo::Local(repo) => repo.set_index_text(&path, content),
|
||||
GitRepo::Remote {
|
||||
project_id,
|
||||
client,
|
||||
worktree_id,
|
||||
work_directory_id,
|
||||
} => {
|
||||
client
|
||||
.request(proto::SetIndexText {
|
||||
project_id: project_id.0,
|
||||
worktree_id: worktree_id.to_proto(),
|
||||
work_directory_id: work_directory_id.to_proto(),
|
||||
path: path.as_ref().to_proto(),
|
||||
text: content,
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_remotes(
|
||||
|
||||
@@ -1089,6 +1089,64 @@ impl LocalLspStore {
|
||||
self.language_servers_for_buffer(buffer, cx).next()
|
||||
}
|
||||
|
||||
async fn execute_code_action_kind_locally(
|
||||
lsp_store: WeakEntity<LspStore>,
|
||||
mut buffers: Vec<Entity<Buffer>>,
|
||||
kind: CodeActionKind,
|
||||
push_to_history: bool,
|
||||
mut cx: AsyncApp,
|
||||
) -> anyhow::Result<ProjectTransaction> {
|
||||
// Do not allow multiple concurrent code actions requests for the
|
||||
// same buffer.
|
||||
lsp_store.update(&mut cx, |this, cx| {
|
||||
let this = this.as_local_mut().unwrap();
|
||||
buffers.retain(|buffer| {
|
||||
this.buffers_being_formatted
|
||||
.insert(buffer.read(cx).remote_id())
|
||||
});
|
||||
})?;
|
||||
let _cleanup = defer({
|
||||
let this = lsp_store.clone();
|
||||
let mut cx = cx.clone();
|
||||
let buffers = &buffers;
|
||||
move || {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let this = this.as_local_mut().unwrap();
|
||||
for buffer in buffers {
|
||||
this.buffers_being_formatted
|
||||
.remove(&buffer.read(cx).remote_id());
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
});
|
||||
let mut project_transaction = ProjectTransaction::default();
|
||||
|
||||
for buffer in &buffers {
|
||||
let adapters_and_servers = lsp_store.update(&mut cx, |lsp_store, cx| {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
lsp_store
|
||||
.as_local()
|
||||
.unwrap()
|
||||
.language_servers_for_buffer(buffer, cx)
|
||||
.map(|(adapter, lsp)| (adapter.clone(), lsp.clone()))
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
})?;
|
||||
Self::execute_code_actions_on_servers(
|
||||
&lsp_store,
|
||||
&adapters_and_servers,
|
||||
vec![kind.clone()],
|
||||
&buffer,
|
||||
push_to_history,
|
||||
&mut project_transaction,
|
||||
&mut cx,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
Ok(project_transaction)
|
||||
}
|
||||
|
||||
async fn format_locally(
|
||||
lsp_store: WeakEntity<LspStore>,
|
||||
mut buffers: Vec<FormattableBuffer>,
|
||||
@@ -2900,6 +2958,7 @@ impl LspStore {
|
||||
client.add_entity_message_handler(Self::handle_language_server_log);
|
||||
client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
|
||||
client.add_entity_request_handler(Self::handle_format_buffers);
|
||||
client.add_entity_request_handler(Self::handle_apply_code_action_kind);
|
||||
client.add_entity_request_handler(Self::handle_resolve_completion_documentation);
|
||||
client.add_entity_request_handler(Self::handle_apply_code_action);
|
||||
client.add_entity_request_handler(Self::handle_inlay_hints);
|
||||
@@ -3891,6 +3950,65 @@ impl LspStore {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_code_action_kind(
|
||||
&mut self,
|
||||
buffers: HashSet<Entity<Buffer>>,
|
||||
kind: CodeActionKind,
|
||||
push_to_history: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<anyhow::Result<ProjectTransaction>> {
|
||||
if let Some(_) = self.as_local() {
|
||||
cx.spawn(move |lsp_store, mut cx| async move {
|
||||
let buffers = buffers.into_iter().collect::<Vec<_>>();
|
||||
let result = LocalLspStore::execute_code_action_kind_locally(
|
||||
lsp_store.clone(),
|
||||
buffers,
|
||||
kind,
|
||||
push_to_history,
|
||||
cx.clone(),
|
||||
)
|
||||
.await;
|
||||
lsp_store.update(&mut cx, |lsp_store, _| {
|
||||
lsp_store.update_last_formatting_failure(&result);
|
||||
})?;
|
||||
result
|
||||
})
|
||||
} else if let Some((client, project_id)) = self.upstream_client() {
|
||||
let buffer_store = self.buffer_store();
|
||||
cx.spawn(move |lsp_store, mut cx| async move {
|
||||
let result = client
|
||||
.request(proto::ApplyCodeActionKind {
|
||||
project_id,
|
||||
kind: kind.as_str().to_owned(),
|
||||
buffer_ids: buffers
|
||||
.iter()
|
||||
.map(|buffer| {
|
||||
buffer.update(&mut cx, |buffer, _| buffer.remote_id().into())
|
||||
})
|
||||
.collect::<Result<_>>()?,
|
||||
})
|
||||
.await
|
||||
.and_then(|result| result.transaction.context("missing transaction"));
|
||||
lsp_store.update(&mut cx, |lsp_store, _| {
|
||||
lsp_store.update_last_formatting_failure(&result);
|
||||
})?;
|
||||
|
||||
let transaction_response = result?;
|
||||
buffer_store
|
||||
.update(&mut cx, |buffer_store, cx| {
|
||||
buffer_store.deserialize_project_transaction(
|
||||
transaction_response,
|
||||
push_to_history,
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await
|
||||
})
|
||||
} else {
|
||||
Task::ready(Ok(ProjectTransaction::default()))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_inlay_hint(
|
||||
&self,
|
||||
hint: InlayHint,
|
||||
@@ -7229,6 +7347,48 @@ impl LspStore {
|
||||
})
|
||||
}
|
||||
|
||||
async fn handle_apply_code_action_kind(
|
||||
this: Entity<Self>,
|
||||
envelope: TypedEnvelope<proto::ApplyCodeActionKind>,
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<proto::ApplyCodeActionKindResponse> {
|
||||
let sender_id = envelope.original_sender_id().unwrap_or_default();
|
||||
let format = this.update(&mut cx, |this, cx| {
|
||||
let mut buffers = HashSet::default();
|
||||
for buffer_id in &envelope.payload.buffer_ids {
|
||||
let buffer_id = BufferId::new(*buffer_id)?;
|
||||
buffers.insert(this.buffer_store.read(cx).get_existing(buffer_id)?);
|
||||
}
|
||||
let kind = match envelope.payload.kind.as_str() {
|
||||
"" => Ok(CodeActionKind::EMPTY),
|
||||
"quickfix" => Ok(CodeActionKind::QUICKFIX),
|
||||
"refactor" => Ok(CodeActionKind::REFACTOR),
|
||||
"refactor.extract" => Ok(CodeActionKind::REFACTOR_EXTRACT),
|
||||
"refactor.inline" => Ok(CodeActionKind::REFACTOR_INLINE),
|
||||
"refactor.rewrite" => Ok(CodeActionKind::REFACTOR_REWRITE),
|
||||
"source" => Ok(CodeActionKind::SOURCE),
|
||||
"source.organizeImports" => Ok(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
|
||||
"source.fixAll" => Ok(CodeActionKind::SOURCE_FIX_ALL),
|
||||
_ => Err(anyhow!("Invalid code action kind")),
|
||||
}?;
|
||||
anyhow::Ok(this.apply_code_action_kind(buffers, kind, false, cx))
|
||||
})??;
|
||||
|
||||
let project_transaction = format.await?;
|
||||
let project_transaction = this.update(&mut cx, |this, cx| {
|
||||
this.buffer_store.update(cx, |buffer_store, cx| {
|
||||
buffer_store.serialize_project_transaction_for_peer(
|
||||
project_transaction,
|
||||
sender_id,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
})?;
|
||||
Ok(proto::ApplyCodeActionKindResponse {
|
||||
transaction: Some(project_transaction),
|
||||
})
|
||||
}
|
||||
|
||||
async fn shutdown_language_server(
|
||||
server_state: Option<LanguageServerState>,
|
||||
name: LanguageServerName,
|
||||
|
||||
@@ -3029,6 +3029,18 @@ impl Project {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn apply_code_action_kind(
|
||||
&self,
|
||||
buffers: HashSet<Entity<Buffer>>,
|
||||
kind: CodeActionKind,
|
||||
push_to_history: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<ProjectTransaction>> {
|
||||
self.lsp_store.update(cx, |lsp_store, cx| {
|
||||
lsp_store.apply_code_action_kind(buffers, kind, push_to_history, cx)
|
||||
})
|
||||
}
|
||||
|
||||
fn prepare_rename_impl(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::{task_inventory::TaskContexts, Event, *};
|
||||
use buffer_diff::{assert_hunks, DiffHunkSecondaryStatus, DiffHunkStatus};
|
||||
use buffer_diff::{assert_hunks, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind};
|
||||
use fs::FakeFs;
|
||||
use futures::{future, StreamExt};
|
||||
use gpui::{App, SemanticVersion, UpdateGlobal};
|
||||
@@ -5819,7 +5819,7 @@ async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
|
||||
assert_hunks(
|
||||
unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
|
||||
&snapshot,
|
||||
&unstaged_diff.base_text().unwrap().text(),
|
||||
&unstaged_diff.base_text().text(),
|
||||
&[(
|
||||
2..3,
|
||||
"",
|
||||
@@ -5860,19 +5860,25 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
|
||||
json!({
|
||||
".git": {},
|
||||
"src": {
|
||||
"main.rs": file_contents,
|
||||
"modification.rs": file_contents,
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
fs.set_index_for_repo(
|
||||
Path::new("/dir/.git"),
|
||||
&[("src/main.rs".into(), staged_contents)],
|
||||
);
|
||||
fs.set_head_for_repo(
|
||||
Path::new("/dir/.git"),
|
||||
&[("src/main.rs".into(), committed_contents)],
|
||||
&[
|
||||
("src/modification.rs".into(), committed_contents),
|
||||
("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
|
||||
],
|
||||
);
|
||||
fs.set_index_for_repo(
|
||||
Path::new("/dir/.git"),
|
||||
&[
|
||||
("src/modification.rs".into(), staged_contents),
|
||||
("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
|
||||
],
|
||||
);
|
||||
|
||||
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
|
||||
@@ -5880,33 +5886,28 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
|
||||
let language = rust_lang();
|
||||
language_registry.add(language.clone());
|
||||
|
||||
let buffer = project
|
||||
let buffer_1 = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer("/dir/src/main.rs", cx)
|
||||
project.open_local_buffer("/dir/src/modification.rs", cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let uncommitted_diff = project
|
||||
let diff_1 = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_uncommitted_diff(buffer.clone(), cx)
|
||||
project.open_uncommitted_diff(buffer_1.clone(), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
uncommitted_diff.read_with(cx, |diff, _| {
|
||||
assert_eq!(
|
||||
diff.base_text().and_then(|base| base.language().cloned()),
|
||||
Some(language)
|
||||
)
|
||||
diff_1.read_with(cx, |diff, _| {
|
||||
assert_eq!(diff.base_text().language().cloned(), Some(language))
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
uncommitted_diff.update(cx, |uncommitted_diff, cx| {
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
diff_1.update(cx, |diff, cx| {
|
||||
let snapshot = buffer_1.read(cx).snapshot();
|
||||
assert_hunks(
|
||||
uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
|
||||
diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
|
||||
&snapshot,
|
||||
&uncommitted_diff.base_text_string().unwrap(),
|
||||
&diff.base_text_string().unwrap(),
|
||||
&[
|
||||
(
|
||||
0..1,
|
||||
@@ -5924,25 +5925,29 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
|
||||
);
|
||||
});
|
||||
|
||||
// Reset HEAD to a version that differs from both the buffer and the index.
|
||||
let committed_contents = r#"
|
||||
// print goodbye
|
||||
fn main() {
|
||||
}
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
fs.set_head_for_repo(
|
||||
Path::new("/dir/.git"),
|
||||
&[("src/main.rs".into(), committed_contents)],
|
||||
&[
|
||||
("src/modification.rs".into(), committed_contents.clone()),
|
||||
("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
|
||||
],
|
||||
);
|
||||
|
||||
// Buffer now has an unstaged hunk.
|
||||
cx.run_until_parked();
|
||||
uncommitted_diff.update(cx, |uncommitted_diff, cx| {
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
diff_1.update(cx, |diff, cx| {
|
||||
let snapshot = buffer_1.read(cx).snapshot();
|
||||
assert_hunks(
|
||||
uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
|
||||
diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
|
||||
&snapshot,
|
||||
&uncommitted_diff.base_text().unwrap().text(),
|
||||
&diff.base_text().text(),
|
||||
&[(
|
||||
2..3,
|
||||
"",
|
||||
@@ -5951,6 +5956,56 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
|
||||
)],
|
||||
);
|
||||
});
|
||||
|
||||
// Open a buffer for a file that's been deleted.
|
||||
let buffer_2 = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer("/dir/src/deletion.rs", cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let diff_2 = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_uncommitted_diff(buffer_2.clone(), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
diff_2.update(cx, |diff, cx| {
|
||||
let snapshot = buffer_2.read(cx).snapshot();
|
||||
assert_hunks(
|
||||
diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
|
||||
&snapshot,
|
||||
&diff.base_text_string().unwrap(),
|
||||
&[(
|
||||
0..0,
|
||||
"// the-deleted-contents\n",
|
||||
"",
|
||||
DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
|
||||
)],
|
||||
);
|
||||
});
|
||||
|
||||
// Stage the deletion of this file
|
||||
fs.set_index_for_repo(
|
||||
Path::new("/dir/.git"),
|
||||
&[("src/modification.rs".into(), committed_contents.clone())],
|
||||
);
|
||||
cx.run_until_parked();
|
||||
diff_2.update(cx, |diff, cx| {
|
||||
let snapshot = buffer_2.read(cx).snapshot();
|
||||
assert_hunks(
|
||||
diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
|
||||
&snapshot,
|
||||
&diff.base_text_string().unwrap(),
|
||||
&[(
|
||||
0..0,
|
||||
"// the-deleted-contents\n",
|
||||
"",
|
||||
DiffHunkStatus::deleted(DiffHunkSecondaryStatus::None),
|
||||
)],
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -5958,16 +6013,16 @@ async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let committed_contents = r#"
|
||||
fn main() {
|
||||
println!("hello from HEAD");
|
||||
}
|
||||
"#
|
||||
fn main() {
|
||||
println!("hello from HEAD");
|
||||
}
|
||||
"#
|
||||
.unindent();
|
||||
let file_contents = r#"
|
||||
fn main() {
|
||||
println!("hello from the working copy");
|
||||
}
|
||||
"#
|
||||
fn main() {
|
||||
println!("hello from the working copy");
|
||||
}
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
@@ -5984,7 +6039,11 @@ async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
|
||||
|
||||
fs.set_head_for_repo(
|
||||
Path::new("/dir/.git"),
|
||||
&[("src/main.rs".into(), committed_contents)],
|
||||
&[("src/main.rs".into(), committed_contents.clone())],
|
||||
);
|
||||
fs.set_index_for_repo(
|
||||
Path::new("/dir/.git"),
|
||||
&[("src/main.rs".into(), committed_contents.clone())],
|
||||
);
|
||||
|
||||
let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
|
||||
@@ -6013,7 +6072,10 @@ async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
|
||||
1..2,
|
||||
" println!(\"hello from HEAD\");\n",
|
||||
" println!(\"hello from the working copy\");\n",
|
||||
DiffHunkStatus::modified_none(),
|
||||
DiffHunkStatus {
|
||||
kind: DiffHunkStatusKind::Modified,
|
||||
secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
|
||||
},
|
||||
)],
|
||||
);
|
||||
});
|
||||
|
||||
@@ -13,31 +13,21 @@ path = "src/prompt_library.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
assets.workspace = true
|
||||
chrono.workspace = true
|
||||
collections.workspace = true
|
||||
editor.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
handlebars.workspace = true
|
||||
heed.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
log.workspace = true
|
||||
menu.workspace = true
|
||||
parking_lot.workspace = true
|
||||
paths.workspace = true
|
||||
picker.workspace = true
|
||||
prompt_store.workspace = true
|
||||
release_channel.workspace = true
|
||||
rope.workspace = true
|
||||
serde.workspace = true
|
||||
settings.workspace = true
|
||||
text.workspace = true
|
||||
theme.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
uuid.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
mod prompt_store;
|
||||
mod prompts;
|
||||
|
||||
use anyhow::Result;
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::CompletionProvider;
|
||||
@@ -29,8 +26,7 @@ use util::{ResultExt, TryFutureExt};
|
||||
use workspace::Workspace;
|
||||
use zed_actions::assistant::InlineAssist;
|
||||
|
||||
pub use crate::prompt_store::*;
|
||||
pub use crate::prompts::*;
|
||||
use prompt_store::*;
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
prompt_store::init(cx);
|
||||
|
||||
33
crates/prompt_store/Cargo.toml
Normal file
33
crates/prompt_store/Cargo.toml
Normal file
@@ -0,0 +1,33 @@
|
||||
[package]
|
||||
name = "prompt_store"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/prompt_store.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
assets.workspace = true
|
||||
chrono.workspace = true
|
||||
collections.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
handlebars.workspace = true
|
||||
heed.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
paths.workspace = true
|
||||
rope.workspace = true
|
||||
serde.workspace = true
|
||||
text.workspace = true
|
||||
util.workspace = true
|
||||
uuid.workspace = true
|
||||
1
crates/prompt_store/LICENSE-GPL
Symbolic link
1
crates/prompt_store/LICENSE-GPL
Symbolic link
@@ -0,0 +1 @@
|
||||
../../LICENSE-GPL
|
||||
@@ -1,3 +1,5 @@
|
||||
mod prompts;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use collections::HashMap;
|
||||
@@ -10,6 +12,7 @@ use heed::{
|
||||
Database, RoTxn,
|
||||
};
|
||||
use parking_lot::RwLock;
|
||||
pub use prompts::*;
|
||||
use rope::Rope;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
@@ -327,7 +327,10 @@ message Envelope {
|
||||
Fetch fetch = 305;
|
||||
GetRemotes get_remotes = 306;
|
||||
GetRemotesResponse get_remotes_response = 307;
|
||||
Pull pull = 308; // current max
|
||||
Pull pull = 308;
|
||||
|
||||
ApplyCodeActionKind apply_code_action_kind = 309;
|
||||
ApplyCodeActionKindResponse apply_code_action_kind_response = 310; // current max
|
||||
}
|
||||
|
||||
reserved 87 to 88;
|
||||
@@ -916,6 +919,16 @@ message ChannelBufferVersion {
|
||||
uint64 epoch = 3;
|
||||
}
|
||||
|
||||
message ApplyCodeActionKind {
|
||||
uint64 project_id = 1;
|
||||
string kind = 2;
|
||||
repeated uint64 buffer_ids = 3;
|
||||
}
|
||||
|
||||
message ApplyCodeActionKindResponse {
|
||||
ProjectTransaction transaction = 1;
|
||||
}
|
||||
|
||||
enum FormatTrigger {
|
||||
Save = 0;
|
||||
Manual = 1;
|
||||
|
||||
@@ -236,6 +236,8 @@ messages!(
|
||||
(ExpandAllForProjectEntryResponse, Foreground),
|
||||
(Follow, Foreground),
|
||||
(FollowResponse, Foreground),
|
||||
(ApplyCodeActionKind, Foreground),
|
||||
(ApplyCodeActionKindResponse, Foreground),
|
||||
(FormatBuffers, Foreground),
|
||||
(FormatBuffersResponse, Foreground),
|
||||
(FuzzySearchUsers, Foreground),
|
||||
@@ -472,6 +474,7 @@ request_messages!(
|
||||
(ExpandProjectEntry, ExpandProjectEntryResponse),
|
||||
(ExpandAllForProjectEntry, ExpandAllForProjectEntryResponse),
|
||||
(Follow, FollowResponse),
|
||||
(ApplyCodeActionKind, ApplyCodeActionKindResponse),
|
||||
(FormatBuffers, FormatBuffersResponse),
|
||||
(FuzzySearchUsers, UsersResponse),
|
||||
(GetCachedEmbeddings, GetCachedEmbeddingsResponse),
|
||||
@@ -610,6 +613,7 @@ entity_messages!(
|
||||
ExpandProjectEntry,
|
||||
ExpandAllForProjectEntry,
|
||||
FindSearchCandidates,
|
||||
ApplyCodeActionKind,
|
||||
FormatBuffers,
|
||||
GetCodeActions,
|
||||
GetCompletions,
|
||||
|
||||
@@ -814,7 +814,10 @@ impl BufferSearchBar {
|
||||
|
||||
self.configured_options =
|
||||
SearchOptions::from_settings(&EditorSettings::get_global(cx).search);
|
||||
if self.dismissed && self.configured_options != self.default_options {
|
||||
if self.dismissed
|
||||
&& (self.configured_options != self.default_options
|
||||
|| self.configured_options != self.search_options)
|
||||
{
|
||||
self.search_options = self.configured_options;
|
||||
self.default_options = self.configured_options;
|
||||
}
|
||||
|
||||
@@ -220,7 +220,7 @@ impl RenderOnce for AvatarAvailabilityIndicator {
|
||||
|
||||
// View this component preview using `workspace: open component-preview`
|
||||
impl ComponentPreview for Avatar {
|
||||
fn preview(_window: &mut Window, cx: &App) -> AnyElement {
|
||||
fn preview(_window: &mut Window, cx: &mut App) -> AnyElement {
|
||||
let example_avatar = "https://avatars.githubusercontent.com/u/1714999?v=4";
|
||||
|
||||
v_flex()
|
||||
|
||||
@@ -458,7 +458,7 @@ impl RenderOnce for Button {
|
||||
|
||||
// View this component preview using `workspace: open component-preview`
|
||||
impl ComponentPreview for Button {
|
||||
fn preview(_window: &mut Window, _cx: &App) -> AnyElement {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.children(vec![
|
||||
|
||||
@@ -202,7 +202,7 @@ impl RenderOnce for IconButton {
|
||||
}
|
||||
|
||||
impl ComponentPreview for IconButton {
|
||||
fn preview(_window: &mut Window, _cx: &App) -> AnyElement {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.children(vec![
|
||||
|
||||
@@ -144,7 +144,7 @@ impl RenderOnce for ToggleButton {
|
||||
}
|
||||
|
||||
impl ComponentPreview for ToggleButton {
|
||||
fn preview(_window: &mut Window, _cx: &App) -> AnyElement {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.children(vec![
|
||||
|
||||
@@ -90,7 +90,7 @@ impl RenderOnce for ContentGroup {
|
||||
|
||||
// View this component preview using `workspace: open component-preview`
|
||||
impl ComponentPreview for ContentGroup {
|
||||
fn preview(_window: &mut Window, _cx: &App) -> AnyElement {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
example_group(vec![
|
||||
single_example(
|
||||
"Default",
|
||||
|
||||
@@ -61,7 +61,7 @@ impl RenderOnce for Facepile {
|
||||
}
|
||||
|
||||
impl ComponentPreview for Facepile {
|
||||
fn preview(_window: &mut Window, _cx: &App) -> AnyElement {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
let faces: [&'static str; 6] = [
|
||||
"https://avatars.githubusercontent.com/u/326587?s=60&v=4",
|
||||
"https://avatars.githubusercontent.com/u/2280405?s=60&v=4",
|
||||
|
||||
@@ -218,6 +218,7 @@ pub enum IconName {
|
||||
Github,
|
||||
Globe,
|
||||
GitBranch,
|
||||
GitBranchSmall,
|
||||
Hash,
|
||||
HistoryRerun,
|
||||
Indicator,
|
||||
@@ -492,7 +493,7 @@ impl RenderOnce for IconWithIndicator {
|
||||
|
||||
// View this component preview using `workspace: open component-preview`
|
||||
impl ComponentPreview for Icon {
|
||||
fn preview(_window: &mut Window, _cx: &App) -> AnyElement {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.children(vec![
|
||||
|
||||
@@ -26,7 +26,7 @@ impl RenderOnce for DecoratedIcon {
|
||||
|
||||
// View this component preview using `workspace: open component-preview`
|
||||
impl ComponentPreview for DecoratedIcon {
|
||||
fn preview(_window: &mut Window, cx: &App) -> AnyElement {
|
||||
fn preview(_window: &mut Window, cx: &mut App) -> AnyElement {
|
||||
let decoration_x = IconDecoration::new(
|
||||
IconDecorationKind::X,
|
||||
cx.theme().colors().surface_background,
|
||||
|
||||
@@ -207,7 +207,7 @@ impl RenderOnce for KeybindingHint {
|
||||
|
||||
// View this component preview using `workspace: open component-preview`
|
||||
impl ComponentPreview for KeybindingHint {
|
||||
fn preview(window: &mut Window, cx: &App) -> AnyElement {
|
||||
fn preview(window: &mut Window, cx: &mut App) -> AnyElement {
|
||||
let enter_fallback = gpui::KeyBinding::new("enter", menu::Confirm, None);
|
||||
let enter = KeyBinding::for_action(&menu::Confirm, window, cx)
|
||||
.unwrap_or(KeyBinding::new(enter_fallback, cx));
|
||||
|
||||
@@ -199,7 +199,7 @@ mod label_preview {
|
||||
|
||||
// View this component preview using `workspace: open component-preview`
|
||||
impl ComponentPreview for Label {
|
||||
fn preview(_window: &mut Window, _cx: &App) -> AnyElement {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.children(vec![
|
||||
|
||||
@@ -173,7 +173,7 @@ impl RenderOnce for Tab {
|
||||
|
||||
// View this component preview using `workspace: open component-preview`
|
||||
impl ComponentPreview for Tab {
|
||||
fn preview(_window: &mut Window, _cx: &App) -> AnyElement {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.children(vec![example_group_with_title(
|
||||
|
||||
@@ -153,7 +153,7 @@ where
|
||||
|
||||
// View this component preview using `workspace: open component-preview`
|
||||
impl ComponentPreview for Table {
|
||||
fn preview(_window: &mut Window, _cx: &App) -> AnyElement {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.children(vec![
|
||||
|
||||
@@ -510,7 +510,7 @@ impl RenderOnce for SwitchWithLabel {
|
||||
|
||||
// View this component preview using `workspace: open component-preview`
|
||||
impl ComponentPreview for Checkbox {
|
||||
fn preview(_window: &mut Window, _cx: &App) -> AnyElement {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.children(vec![
|
||||
@@ -595,7 +595,7 @@ impl ComponentPreview for Checkbox {
|
||||
|
||||
// View this component preview using `workspace: open component-preview`
|
||||
impl ComponentPreview for Switch {
|
||||
fn preview(_window: &mut Window, _cx: &App) -> AnyElement {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.children(vec![
|
||||
@@ -658,7 +658,7 @@ impl ComponentPreview for Switch {
|
||||
|
||||
// View this component preview using `workspace: open component-preview`
|
||||
impl ComponentPreview for CheckboxWithLabel {
|
||||
fn preview(_window: &mut Window, _cx: &App) -> AnyElement {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.children(vec![example_group_with_title(
|
||||
|
||||
@@ -224,7 +224,7 @@ impl Render for LinkPreview {
|
||||
|
||||
// View this component preview using `workspace: open component-preview`
|
||||
impl ComponentPreview for Tooltip {
|
||||
fn preview(_window: &mut Window, _cx: &App) -> AnyElement {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
example_group(vec![single_example(
|
||||
"Text only",
|
||||
Button::new("delete-example", "Delete")
|
||||
|
||||
@@ -235,7 +235,7 @@ impl Headline {
|
||||
|
||||
// View this component preview using `workspace: open component-preview`
|
||||
impl ComponentPreview for Headline {
|
||||
fn preview(_window: &mut Window, _cx: &App) -> AnyElement {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
v_flex()
|
||||
.gap_1()
|
||||
.children(vec![
|
||||
|
||||
@@ -214,13 +214,6 @@ impl Member {
|
||||
Member::Axis(PaneAxis::new(axis, members))
|
||||
}
|
||||
|
||||
fn contains(&self, needle: &Entity<Pane>) -> bool {
|
||||
match self {
|
||||
Member::Axis(axis) => axis.members.iter().any(|member| member.contains(needle)),
|
||||
Member::Pane(pane) => pane == needle,
|
||||
}
|
||||
}
|
||||
|
||||
fn first_pane(&self) -> Entity<Pane> {
|
||||
match self {
|
||||
Member::Axis(axis) => axis.members[0].first_pane(),
|
||||
@@ -702,7 +695,7 @@ impl PaneAxis {
|
||||
cx.entity().downgrade(),
|
||||
)
|
||||
.children(self.members.iter().enumerate().map(|(ix, member)| {
|
||||
if member.contains(active_pane) {
|
||||
if matches!(member, Member::Pane(pane) if pane == active_pane) {
|
||||
active_pane_ix = Some(ix);
|
||||
}
|
||||
member
|
||||
|
||||
@@ -90,7 +90,7 @@ profiling.workspace = true
|
||||
project.workspace = true
|
||||
project_panel.workspace = true
|
||||
project_symbols.workspace = true
|
||||
prompt_library.workspace = true
|
||||
prompt_store.workspace = true
|
||||
proto.workspace = true
|
||||
recent_projects.workspace = true
|
||||
release_channel.workspace = true
|
||||
@@ -116,7 +116,6 @@ task.workspace = true
|
||||
tasks_ui.workspace = true
|
||||
telemetry.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
tempfile.workspace = true
|
||||
terminal_view.workspace = true
|
||||
theme.workspace = true
|
||||
theme_extension.workspace = true
|
||||
@@ -131,7 +130,6 @@ uuid.workspace = true
|
||||
vim.workspace = true
|
||||
vim_mode_setting.workspace = true
|
||||
welcome.workspace = true
|
||||
which.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
zeta.workspace = true
|
||||
|
||||
@@ -23,7 +23,7 @@ use gpui::{App, AppContext as _, Application, AsyncApp, UpdateGlobal as _};
|
||||
use gpui_tokio::Tokio;
|
||||
use http_client::{read_proxy_from_env, Uri};
|
||||
use language::LanguageRegistry;
|
||||
use prompt_library::PromptBuilder;
|
||||
use prompt_store::PromptBuilder;
|
||||
use reqwest_client::ReqwestClient;
|
||||
|
||||
use assets::Assets;
|
||||
@@ -256,11 +256,9 @@ fn main() {
|
||||
};
|
||||
log::info!("Using git binary path: {:?}", git_binary_path);
|
||||
|
||||
let git_askpass_path = zed::git_askpass::get_askpass_dir();
|
||||
let fs = Arc::new(RealFs::new(
|
||||
git_hosting_provider_registry.clone(),
|
||||
git_binary_path,
|
||||
git_askpass_path.clone(),
|
||||
));
|
||||
let user_settings_file_rx = watch_config_file(
|
||||
&app.background_executor(),
|
||||
@@ -303,7 +301,6 @@ fn main() {
|
||||
});
|
||||
|
||||
app.run(move |cx| {
|
||||
zed::git_askpass::setup_git_askpass(git_askpass_path, cx);
|
||||
release_channel::init(app_version, cx);
|
||||
gpui_tokio::init(cx);
|
||||
if let Some(app_commit_sha) = app_commit_sha {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
mod app_menus;
|
||||
pub mod git_askpass;
|
||||
pub mod inline_completion_registry;
|
||||
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
|
||||
pub(crate) mod linux_prompts;
|
||||
@@ -38,7 +37,7 @@ use outline_panel::OutlinePanel;
|
||||
use paths::{local_settings_file_relative_path, local_tasks_file_relative_path};
|
||||
use project::{DirectoryLister, ProjectItem};
|
||||
use project_panel::ProjectPanel;
|
||||
use prompt_library::PromptBuilder;
|
||||
use prompt_store::PromptBuilder;
|
||||
use quick_action_bar::QuickActionBar;
|
||||
use recent_projects::open_ssh_project;
|
||||
use release_channel::{AppCommitSha, ReleaseChannel};
|
||||
@@ -4113,6 +4112,8 @@ mod tests {
|
||||
| "vim::PushLiteral"
|
||||
| "vim::Number"
|
||||
| "vim::SelectRegister"
|
||||
| "git::StageAndNext"
|
||||
| "git::UnstageAndNext"
|
||||
| "terminal::SendText"
|
||||
| "terminal::SendKeystroke"
|
||||
| "app_menu::OpenApplicationMenu"
|
||||
|
||||
@@ -1,137 +0,0 @@
|
||||
use std::{os::unix::fs::PermissionsExt, path::PathBuf};
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use gpui::AsyncApp;
|
||||
use smol::{
|
||||
io::{AsyncWriteExt as _, BufReader},
|
||||
net::unix::UnixListener,
|
||||
};
|
||||
use ui::{App, Window};
|
||||
use util::{maybe, ResultExt as _};
|
||||
use workspace::Workspace;
|
||||
|
||||
pub fn get_askpass_dir() -> PathBuf {
|
||||
// TODO: bundle this script instead of creating it
|
||||
let temp_dir = tempfile::Builder::new()
|
||||
.prefix("zed-git-askpass-session")
|
||||
.tempdir()
|
||||
.unwrap();
|
||||
|
||||
// Create a domain socket listener to handle requests from the askpass program.
|
||||
let askpass_socket = temp_dir.path().join("git_askpass.sock");
|
||||
|
||||
// Create an askpass script that communicates back to this process.
|
||||
let askpass_script = format!(
|
||||
"{shebang}\n{print_args} | {nc} -U {askpass_socket} 2> /dev/null \n",
|
||||
// on macOS `brew install netcat` provides the GNU netcat implementation
|
||||
// which does not support -U.
|
||||
nc = if cfg!(target_os = "macos") {
|
||||
"/usr/bin/nc"
|
||||
} else {
|
||||
"nc"
|
||||
},
|
||||
askpass_socket = askpass_socket.display(),
|
||||
print_args = "printf '%s\\0' \"$@\"",
|
||||
shebang = "#!/bin/sh",
|
||||
);
|
||||
let askpass_script_path = temp_dir.path().join("askpass.sh");
|
||||
std::fs::write(&askpass_script_path, &askpass_script).unwrap();
|
||||
std::fs::set_permissions(&askpass_script_path, std::fs::Permissions::from_mode(0o755)).unwrap();
|
||||
|
||||
PathBuf::from(askpass_script)
|
||||
}
|
||||
|
||||
pub fn setup_git_askpass(askpasss_file: PathBuf, cx: &mut App) {
|
||||
maybe!({
|
||||
anyhow::ensure!(
|
||||
which::which("nc").is_ok(),
|
||||
"Cannot find `nc` command (netcat), which is required to connect over SSH."
|
||||
);
|
||||
|
||||
// TODO: REMOVE THIS ONCE WE HAVE A WAY OF BUNDLING AN ASKPASS SCRIPT
|
||||
let askpass_socket = askpasss_file.parent().unwrap().join("git_askpass.sock");
|
||||
|
||||
let listener =
|
||||
UnixListener::bind(&askpass_socket).context("failed to create askpass socket")?;
|
||||
|
||||
cx.spawn({
|
||||
|mut cx| async move {
|
||||
while let Ok((mut stream, _)) = listener.accept().await {
|
||||
let mut buffer = Vec::new();
|
||||
let mut reader = BufReader::new(&mut stream);
|
||||
if smol::io::AsyncBufReadExt::read_until(&mut reader, b'\0', &mut buffer)
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
buffer.clear();
|
||||
}
|
||||
let password_prompt = String::from_utf8_lossy(&buffer);
|
||||
if let Some(Ok(password)) = ask_password(&password_prompt, &mut cx)
|
||||
.await
|
||||
.context("failed to get ssh password")
|
||||
.log_err()
|
||||
{
|
||||
stream.write_all(password.as_bytes()).await.log_err();
|
||||
} else {
|
||||
stream.write("\n".as_bytes()).await.log_err();
|
||||
}
|
||||
|
||||
stream.flush().await.log_err();
|
||||
stream.close().await.log_err();
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
|
||||
async fn ask_password(prompt: &str, cx: &mut AsyncApp) -> Option<Result<String>> {
|
||||
let mut workspace = get_workspace(cx, |window| window.is_window_active());
|
||||
if workspace.is_none() {
|
||||
workspace = get_workspace(cx, |_| true);
|
||||
}
|
||||
|
||||
let Some(workspace) = workspace else {
|
||||
return None;
|
||||
};
|
||||
|
||||
// DO THINGS WITH THE WORKSPACE
|
||||
// pop the askpass modal, get the output out of a oneshot, and we're good to go
|
||||
None
|
||||
}
|
||||
|
||||
fn get_workspace(
|
||||
cx: &mut AsyncApp,
|
||||
predicate: impl Fn(&mut Window) -> bool,
|
||||
) -> Option<gpui::Entity<Workspace>> {
|
||||
let workspace = cx
|
||||
.update(|cx| {
|
||||
for window in cx.windows() {
|
||||
let workspace = window
|
||||
.update(cx, |view, window, _| {
|
||||
if predicate(window) {
|
||||
if let Ok(workspace) = view.downcast::<Workspace>() {
|
||||
return Some(workspace);
|
||||
}
|
||||
}
|
||||
return None;
|
||||
})
|
||||
.ok()
|
||||
.flatten();
|
||||
|
||||
if let Some(workspace) = workspace {
|
||||
return Some(workspace);
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
})
|
||||
.ok()?;
|
||||
|
||||
workspace
|
||||
}
|
||||
@@ -1,10 +1,22 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
set -euo pipefail
|
||||
|
||||
CONTAINER_ID=$(docker run -d --rm -it -v ~/.mitmproxy:/home/mitmproxy/.mitmproxy -p 9876:8080 mitmproxy/mitmproxy mitmdump)
|
||||
if command -v docker >/dev/null 2>&1; then
|
||||
ENGINE="docker"
|
||||
elif command -v podman >/dev/null 2>&1; then
|
||||
ENGINE="podman"
|
||||
else
|
||||
echo "Neither Docker nor Podman found. Please install one of them."
|
||||
exit 1
|
||||
fi
|
||||
if [ ! -d ~/.mitmproxy ]; then
|
||||
mkdir -p ~/.mitmproxy
|
||||
fi
|
||||
|
||||
trap 'docker stop '"$CONTAINER_ID"' 1> /dev/null || true; exit 1' SIGINT
|
||||
CONTAINER_ID="$(${ENGINE} run -d --rm -it -v ~/.mitmproxy:/home/mitmproxy/.mitmproxy -p 9876:8080 mitmproxy/mitmproxy mitmdump)"
|
||||
|
||||
trap "${ENGINE} stop \"$CONTAINER_ID\" 1> /dev/null || true; exit 1" SIGINT
|
||||
|
||||
echo "Add the root certificate created in ~/.mitmproxy to your certificate chain for HTTP"
|
||||
echo "on macOS:"
|
||||
@@ -15,4 +27,4 @@ read
|
||||
http_proxy=http://localhost:9876 cargo run
|
||||
|
||||
# Clean up detached proxy after running
|
||||
docker stop "$CONTAINER_ID" 2>/dev/null || true
|
||||
${ENGINE} stop "${CONTAINER_ID}" 2>/dev/null || true
|
||||
|
||||
Reference in New Issue
Block a user