Compare commits

..

21 Commits

Author SHA1 Message Date
David
8e79ab6f6a lazy scroll start 2025-11-25 12:36:24 +01:00
David
ecddbd470f load only on click and first n items 2025-11-24 11:21:44 +01:00
David
42787904b0 do not greedly load multibuff if paths > 100 2025-11-21 12:58:42 +01:00
David
2bdc385fdf some notes where to add thigns 2025-11-21 12:40:39 +01:00
David
615803646f many much wip 2025-11-19 18:11:22 +01:00
David
101bbebe52 unfreezes diff panel at multibuffer loading speed cost 2025-11-19 11:27:04 +01:00
David
d6af4d3cdd restore more things 2025-11-18 17:32:39 +01:00
David
a32319374d cant type anymore? 2025-11-18 17:26:14 +01:00
David
716937c9c9 Merge branch 'main' into perf/project-diff2 2025-11-18 16:12:50 +01:00
Lukas Wirth
097024d46f util: Use process spawn helpers in more places (#42976)
Release Notes:

- N/A *or* Added/Fixed/Improved ...
2025-11-18 14:31:39 +00:00
Ben Brandt
f1c2afdee0 Update codex docs to include configuration for third-party providers (#42973)
Release Notes:

- N/A
2025-11-18 13:50:59 +00:00
Jakub Konka
ea120dfe18 Revert "git: Remove JobStatus from PendingOp in favour of in-flight p… (#42970)
…runing (#42955)"

This reverts commit 696fdd8fed.

Release Notes:

- N/A
2025-11-18 13:30:40 +00:00
Lukas Wirth
d2988ffc77 vim: Fix snapshot out of bounds indexing (#42969)
Fixes ZED-38X

Release Notes:

- N/A
2025-11-18 13:02:40 +00:00
Engin Açıkgöz
f17d2c92b6 terminal_view: Fix terminal opening in root directory when editing single file corktree (#42953)
Fixes #42945

## Problem
When opening a single file via command line (e.g., `zed
~/Downloads/file.txt`), the terminal panel was opening in the root
directory (/) instead of the file's directory.

## Root Cause
The code only checked for active project directory, which returns None
when a single file is opened. Additionally, file worktrees weren't
handling parent directory lookup.

## Solution
Added fallback logic to use the first project directory when there's no
active entry, and made file worktrees return their parent directory
instead of None.

## Testing
- All existing tests pass
- Added test coverage for file worktree scenarios
- Manually tested with `zed ~/Downloads/file.txt` - terminal now opens
in correct directory

This improves the user experience for users who frequently open single
files from the command line.

## Release Notes

- Fixed terminal opening in root directory when editing single files
from the command line
2025-11-18 13:37:48 +01:00
David
6ee35cb43e rewrote project diff buffer loading. Now responsive on start get slow after a while as the mb loads full 2025-11-18 13:29:45 +01:00
Antonio Scandurra
c1d9dc369c Try reducing flakiness of fs-event tests by bumping timeout to 4s on CI (#42960)
Release Notes:

- N/A
2025-11-18 11:00:02 +00:00
David
d76c326ff5 somewhat responsive! omg 2025-11-17 14:43:01 +01:00
David
6effe1f48e sleepy time 2025-11-12 12:18:19 +01:00
David
8d3153abd4 ugly code protoyping only dont get this into prod 2025-11-12 11:30:52 +01:00
Jakub Konka
3a301afbc6 Enable all debug info for easier profiling 2025-11-10 22:38:40 +01:00
Jakub Konka
78add792c7 project_diff: Load buffers in the background 2025-11-10 22:01:38 +01:00
34 changed files with 682 additions and 326 deletions

View File

@@ -16,9 +16,7 @@ rustflags = ["-D", "warnings"]
debug = "limited"
# Use Mold on Linux, because it's faster than GNU ld and LLD.
#
# We no longer set this in the default `config.toml` so that developers can opt in to Wild, which
# is faster than Mold, in their own ~/.cargo/config.toml.
# We dont use wild in CI as its not production ready.
[target.x86_64-unknown-linux-gnu]
linker = "clang"
rustflags = ["-C", "link-arg=-fuse-ld=mold"]

View File

@@ -8,6 +8,14 @@ perf-test = ["test", "--profile", "release-fast", "--lib", "--bins", "--tests",
# Keep similar flags here to share some ccache
perf-compare = ["run", "--profile", "release-fast", "-p", "perf", "--config", "target.'cfg(true)'.rustflags=[\"--cfg\", \"perf_enabled\"]", "--", "compare"]
# [target.x86_64-unknown-linux-gnu]
# linker = "clang"
# rustflags = ["-C", "link-arg=-fuse-ld=mold"]
[target.aarch64-unknown-linux-gnu]
linker = "clang"
rustflags = ["-C", "link-arg=-fuse-ld=mold"]
[target.'cfg(target_os = "windows")']
rustflags = [
"--cfg",

16
Cargo.lock generated
View File

@@ -2617,26 +2617,23 @@ dependencies = [
[[package]]
name = "calloop"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b99da2f8558ca23c71f4fd15dc57c906239752dd27ff3c00a1d56b685b7cbfec"
version = "0.14.3"
dependencies = [
"bitflags 2.9.4",
"log",
"polling",
"rustix 0.38.44",
"rustix 1.1.2",
"slab",
"thiserror 1.0.69",
"tracing",
]
[[package]]
name = "calloop-wayland-source"
version = "0.3.0"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95a66a987056935f7efce4ab5668920b5d0dac4a7c99991a67395f13702ddd20"
checksum = "138efcf0940a02ebf0cc8d1eff41a1682a46b431630f4c52450d6265876021fa"
dependencies = [
"calloop",
"rustix 0.38.44",
"rustix 1.1.2",
"wayland-backend",
"wayland-client",
]
@@ -10030,6 +10027,7 @@ name = "miniprofiler_ui"
version = "0.1.0"
dependencies = [
"gpui",
"log",
"serde_json",
"smol",
"util",

View File

@@ -784,6 +784,7 @@ features = [
notify = { git = "https://github.com/zed-industries/notify.git", rev = "b4588b2e5aee68f4c0e100f140e808cbce7b1419" }
notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "b4588b2e5aee68f4c0e100f140e808cbce7b1419" }
windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" }
calloop = { path = "/home/davidsk/tmp/calloop" }
[profile.dev]
split-debuginfo = "unpacked"
@@ -860,7 +861,7 @@ ui_input = { codegen-units = 1 }
zed_actions = { codegen-units = 1 }
[profile.release]
debug = "limited"
debug = "full"
lto = "thin"
codegen-units = 1

View File

@@ -10,8 +10,8 @@ use paths::remote_servers_dir;
use release_channel::{AppCommitSha, ReleaseChannel};
use serde::{Deserialize, Serialize};
use settings::{RegisterSetting, Settings, SettingsStore};
use smol::fs::File;
use smol::{fs, io::AsyncReadExt};
use smol::{fs::File, process::Command};
use std::mem;
use std::{
env::{
@@ -23,6 +23,7 @@ use std::{
sync::Arc,
time::Duration,
};
use util::command::new_smol_command;
use workspace::Workspace;
const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification";
@@ -121,7 +122,7 @@ impl Drop for MacOsUnmounter<'_> {
let mount_path = mem::take(&mut self.mount_path);
self.background_executor
.spawn(async move {
let unmount_output = Command::new("hdiutil")
let unmount_output = new_smol_command("hdiutil")
.args(["detach", "-force"])
.arg(&mount_path)
.output()
@@ -799,7 +800,7 @@ async fn install_release_linux(
.await
.context("failed to create directory into which to extract update")?;
let output = Command::new("tar")
let output = new_smol_command("tar")
.arg("-xzf")
.arg(&downloaded_tar_gz)
.arg("-C")
@@ -834,7 +835,7 @@ async fn install_release_linux(
to = PathBuf::from(prefix);
}
let output = Command::new("rsync")
let output = new_smol_command("rsync")
.args(["-av", "--delete"])
.arg(&from)
.arg(&to)
@@ -866,7 +867,7 @@ async fn install_release_macos(
let mut mounted_app_path: OsString = mount_path.join(running_app_filename).into();
mounted_app_path.push("/");
let output = Command::new("hdiutil")
let output = new_smol_command("hdiutil")
.args(["attach", "-nobrowse"])
.arg(&downloaded_dmg)
.arg("-mountroot")
@@ -886,7 +887,7 @@ async fn install_release_macos(
background_executor: cx.background_executor(),
};
let output = Command::new("rsync")
let output = new_smol_command("rsync")
.args(["-av", "--delete"])
.arg(&mounted_app_path)
.arg(&running_app_path)
@@ -917,7 +918,7 @@ async fn cleanup_windows() -> Result<()> {
}
async fn install_release_windows(downloaded_installer: PathBuf) -> Result<Option<PathBuf>> {
let output = Command::new(downloaded_installer)
let output = new_smol_command(downloaded_installer)
.arg("/verysilent")
.arg("/update=true")
.arg("!desktopicon")

View File

@@ -23907,6 +23907,10 @@ impl EditorSnapshot {
self.scroll_anchor.scroll_position(&self.display_snapshot)
}
pub fn scroll_near_end(&self) -> bool {
self.scroll_anchor.near_end(&self.display_snapshot)
}
fn gutter_dimensions(
&self,
font_id: FontId,

View File

@@ -9055,6 +9055,9 @@ impl Element for EditorElement {
)
});
if snapshot.scroll_near_end() {
dbg!("near end!");
}
let mut scroll_position = snapshot.scroll_position();
// The scroll position is a fractional point, the whole number of which represents
// the top of the window in terms of display rows.

View File

@@ -46,12 +46,20 @@ impl ScrollAnchor {
}
}
pub fn near_end(&self, snapshot: &DisplaySnapshot) -> bool {
let editor_length = snapshot.max_point().row().as_f64();
let scroll_top = self.anchor.to_display_point(snapshot).row().as_f64();
(scroll_top - editor_length).abs() < 300.0
}
pub fn scroll_position(&self, snapshot: &DisplaySnapshot) -> gpui::Point<ScrollOffset> {
self.offset.apply_along(Axis::Vertical, |offset| {
if self.anchor == Anchor::min() {
0.
} else {
dbg!(snapshot.max_point().row().as_f64());
let scroll_top = self.anchor.to_display_point(snapshot).row().as_f64();
dbg!(scroll_top, offset);
(offset + scroll_top).max(0.)
}
})
@@ -243,6 +251,11 @@ impl ScrollManager {
}
}
};
let near_end = self.anchor.near_end(map);
// TODO decounce here
if near_end {
cx.read();
}
let scroll_top_row = DisplayRow(scroll_top as u32);
let scroll_top_buffer_point = map

View File

@@ -395,19 +395,19 @@ mod tests {
thread::spawn(move || stream.run(move |events| tx.send(events.to_vec()).is_ok()));
fs::write(path.join("new-file"), "").unwrap();
let events = rx.recv_timeout(Duration::from_secs(2)).unwrap();
let events = rx.recv_timeout(timeout()).unwrap();
let event = events.last().unwrap();
assert_eq!(event.path, path.join("new-file"));
assert!(event.flags.contains(StreamFlags::ITEM_CREATED));
fs::remove_file(path.join("existing-file-5")).unwrap();
let mut events = rx.recv_timeout(Duration::from_secs(2)).unwrap();
let mut events = rx.recv_timeout(timeout()).unwrap();
let mut event = events.last().unwrap();
// we see this duplicate about 1/100 test runs.
if event.path == path.join("new-file")
&& event.flags.contains(StreamFlags::ITEM_CREATED)
{
events = rx.recv_timeout(Duration::from_secs(2)).unwrap();
events = rx.recv_timeout(timeout()).unwrap();
event = events.last().unwrap();
}
assert_eq!(event.path, path.join("existing-file-5"));
@@ -440,13 +440,13 @@ mod tests {
});
fs::write(path.join("new-file"), "").unwrap();
let events = rx.recv_timeout(Duration::from_secs(2)).unwrap();
let events = rx.recv_timeout(timeout()).unwrap();
let event = events.last().unwrap();
assert_eq!(event.path, path.join("new-file"));
assert!(event.flags.contains(StreamFlags::ITEM_CREATED));
fs::remove_file(path.join("existing-file-5")).unwrap();
let events = rx.recv_timeout(Duration::from_secs(2)).unwrap();
let events = rx.recv_timeout(timeout()).unwrap();
let event = events.last().unwrap();
assert_eq!(event.path, path.join("existing-file-5"));
assert!(event.flags.contains(StreamFlags::ITEM_REMOVED));
@@ -477,11 +477,11 @@ mod tests {
});
fs::write(path.join("new-file"), "").unwrap();
assert_eq!(rx.recv_timeout(Duration::from_secs(2)).unwrap(), "running");
assert_eq!(rx.recv_timeout(timeout()).unwrap(), "running");
// Dropping the handle causes `EventStream::run` to return.
drop(handle);
assert_eq!(rx.recv_timeout(Duration::from_secs(2)).unwrap(), "stopped");
assert_eq!(rx.recv_timeout(timeout()).unwrap(), "stopped");
}
#[test]
@@ -500,11 +500,14 @@ mod tests {
}
fn flush_historical_events() {
let duration = if std::env::var("CI").is_ok() {
Duration::from_secs(2)
thread::sleep(timeout());
}
fn timeout() -> Duration {
if std::env::var("CI").is_ok() {
Duration::from_secs(4)
} else {
Duration::from_millis(500)
};
thread::sleep(duration);
}
}
}

View File

@@ -30,10 +30,11 @@ use git::{
TrashUntrackedFiles, UnstageAll,
};
use gpui::{
Action, AsyncApp, AsyncWindowContext, ClickEvent, Corner, DismissEvent, Entity, EventEmitter,
FocusHandle, Focusable, KeyContext, ListHorizontalSizingBehavior, ListSizingBehavior,
MouseButton, MouseDownEvent, Point, PromptLevel, ScrollStrategy, Subscription, Task,
UniformListScrollHandle, WeakEntity, actions, anchored, deferred, uniform_list,
Action, AppContext, AsyncApp, AsyncWindowContext, ClickEvent, Corner, DismissEvent, Entity,
EventEmitter, FocusHandle, Focusable, KeyContext, ListHorizontalSizingBehavior,
ListSizingBehavior, MouseButton, MouseDownEvent, Point, PromptLevel, ScrollStrategy,
Subscription, Task, UniformListScrollHandle, WeakEntity, actions, anchored, deferred,
uniform_list,
};
use itertools::Itertools;
use language::{Buffer, File};
@@ -49,7 +50,7 @@ use panel::{
};
use project::{
Fs, Project, ProjectPath,
git_store::{GitStoreEvent, Repository, RepositoryEvent, RepositoryId},
git_store::{GitStoreEvent, Repository, RepositoryEvent, RepositoryId, pending_op},
};
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore, StatusStyle};
@@ -311,6 +312,9 @@ pub struct GitPanel {
bulk_staging: Option<BulkStaging>,
stash_entries: GitStash,
_settings_subscription: Subscription,
/// On clicking an entry in a the git_panel this will
/// trigger loading it
open_diff_task: Option<Task<()>>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -471,6 +475,7 @@ impl GitPanel {
bulk_staging: None,
stash_entries: Default::default(),
_settings_subscription,
open_diff_task: None,
};
this.schedule_update(window, cx);
@@ -750,11 +755,25 @@ impl GitPanel {
fn open_diff(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
maybe!({
let entry = self.entries.get(self.selected_entry?)?.status_entry()?;
let entry = self
.entries
.get(self.selected_entry?)?
.status_entry()?
.clone();
let workspace = self.workspace.upgrade()?;
let git_repo = self.active_repository.as_ref()?;
let git_repo = self.active_repository.as_ref()?.clone();
let focus_handle = self.focus_handle.clone();
if let Some(project_diff) = workspace.read(cx).active_item_as::<ProjectDiff>(cx)
// let panel = panel.upgrade().unwrap(); // TODO FIXME
// cx.read_entity(&panel, |panel, cx| {
// panel
// })
// .unwrap(); // TODO FIXME
// how do we get the projectdiff here?
let project_diff = if let Some(project_diff) =
workspace.read(cx).active_item_as::<ProjectDiff>(cx)
&& let Some(project_path) = project_diff.read(cx).active_path(cx)
&& Some(&entry.repo_path)
== git_repo
@@ -764,16 +783,20 @@ impl GitPanel {
{
project_diff.focus_handle(cx).focus(window);
project_diff.update(cx, |project_diff, cx| project_diff.autoscroll(cx));
return None;
};
self.workspace
.update(cx, |workspace, cx| {
ProjectDiff::deploy_at(workspace, Some(entry.clone()), window, cx);
project_diff
} else {
workspace.update(cx, |workspace, cx| {
ProjectDiff::deploy_at(workspace, Some(entry.clone()), window, cx)
})
.ok();
self.focus_handle.focus(window);
};
focus_handle.focus(window); // TODO: should we focus before the file is loaded or wait for that?
let project_diff = project_diff.downgrade();
self.open_diff_task = Some(cx.spawn_in(window, async move |_, cx| {
ProjectDiff::refresh_one(project_diff, entry.repo_path, entry.status, cx)
.await
.unwrap(); // TODO FIXME
}));
Some(())
});
}
@@ -2658,6 +2681,15 @@ impl GitPanel {
let is_new = entry.status.is_created();
let staging = entry.status.staging();
if let Some(pending) = repo.pending_ops_for_path(&entry.repo_path)
&& pending
.ops
.iter()
.any(|op| op.git_status == pending_op::GitStatus::Reverted && op.finished())
{
continue;
}
let entry = GitStatusEntry {
repo_path: entry.repo_path.clone(),
status: entry.status,
@@ -3851,6 +3883,7 @@ impl GitPanel {
})
}
// context menu
fn deploy_entry_context_menu(
&mut self,
position: Point<Pixels>,
@@ -4076,6 +4109,7 @@ impl GitPanel {
this.selected_entry = Some(ix);
cx.notify();
if event.modifiers().secondary() {
// the click handler
this.open_file(&Default::default(), window, cx)
} else {
this.open_diff(&Default::default(), window, cx);

View File

@@ -7,19 +7,21 @@ use crate::{
use anyhow::{Context as _, Result, anyhow};
use buffer_diff::{BufferDiff, DiffHunkSecondaryStatus};
use collections::{HashMap, HashSet};
use db::smol::stream::StreamExt;
use editor::{
Addon, Editor, EditorEvent, SelectionEffects,
actions::{GoToHunk, GoToPreviousHunk},
multibuffer_context_lines,
scroll::Autoscroll,
};
use futures::stream::FuturesUnordered;
use git::{
Commit, StageAll, StageAndNext, ToggleStaged, UnstageAll, UnstageAndNext,
repository::{Branch, RepoPath, Upstream, UpstreamTracking, UpstreamTrackingStatus},
status::FileStatus,
};
use gpui::{
Action, AnyElement, AnyView, App, AppContext as _, AsyncWindowContext, Entity, EventEmitter,
Action, AnyElement, AnyView, App, AppContext, AsyncWindowContext, Entity, EventEmitter,
FocusHandle, Focusable, Render, Subscription, Task, WeakEntity, actions,
};
use language::{Anchor, Buffer, Capability, OffsetRangeExt};
@@ -27,17 +29,20 @@ use multi_buffer::{MultiBuffer, PathKey};
use project::{
Project, ProjectPath,
git_store::{
Repository,
self, Repository, StatusEntry,
branch_diff::{self, BranchDiffEvent, DiffBase},
},
};
use settings::{Settings, SettingsStore};
use std::any::{Any, TypeId};
use std::ops::Range;
use std::sync::Arc;
use std::{
any::{Any, TypeId},
sync::Arc,
};
use theme::ActiveTheme;
use ui::{KeyBinding, Tooltip, prelude::*, vertical_divider};
use util::{ResultExt as _, rel_path::RelPath};
use util::{ResultExt, rel_path::RelPath};
use workspace::{
CloseActiveItem, ItemNavHistory, SerializableItem, ToolbarItemEvent, ToolbarItemLocation,
ToolbarItemView, Workspace,
@@ -92,7 +97,7 @@ impl ProjectDiff {
window: &mut Window,
cx: &mut Context<Workspace>,
) {
Self::deploy_at(workspace, None, window, cx)
Self::deploy_at(workspace, None, window, cx);
}
fn deploy_branch_diff(
@@ -134,7 +139,7 @@ impl ProjectDiff {
entry: Option<GitStatusEntry>,
window: &mut Window,
cx: &mut Context<Workspace>,
) {
) -> Entity<ProjectDiff> {
telemetry::event!(
"Git Diff Opened",
source = if entry.is_some() {
@@ -166,7 +171,8 @@ impl ProjectDiff {
project_diff.update(cx, |project_diff, cx| {
project_diff.move_to_entry(entry, window, cx);
})
}
};
project_diff
}
pub fn autoscroll(&self, cx: &mut Context<Self>) {
@@ -272,9 +278,13 @@ impl ProjectDiff {
window,
move |this, _git_store, event, window, cx| match event {
BranchDiffEvent::FileListChanged => {
// TODO this does not account for size of paths
// maybe a quick fs metadata could get us info on that?
// would make number of paths async but thats fine here
let entries = this.first_n_entries(cx, 100);
this._task = window.spawn(cx, {
let this = cx.weak_entity();
async |cx| Self::refresh(this, cx).await
async |cx| Self::refresh(this, entries, cx).await
})
}
},
@@ -290,21 +300,31 @@ impl ProjectDiff {
if is_sort_by_path != was_sort_by_path
|| is_collapse_untracked_diff != was_collapse_untracked_diff
{
this._task = {
window.spawn(cx, {
let this = cx.weak_entity();
async |cx| Self::refresh(this, cx).await
})
}
todo!();
// this._task = {
// window.spawn(cx, {
// let this = cx.weak_entity();
// async |cx| Self::refresh(this, cx).await
// })
// }
}
was_sort_by_path = is_sort_by_path;
was_collapse_untracked_diff = is_collapse_untracked_diff;
})
.detach();
// let entries = cx.read_entity(&cx.entity(), |project_diff, cx| {
// project_diff.first_n_entries(cx, 100)
// });
let task = window.spawn(cx, {
let this = cx.weak_entity();
async |cx| Self::refresh(this, cx).await
async |cx| {
let entries = this
.read_with(cx, |project_diff, cx| project_diff.first_n_entries(cx, 100))
.unwrap();
Self::refresh(this, entries, cx).await
}
});
Self {
@@ -471,10 +491,11 @@ impl ProjectDiff {
cx: &mut Context<Self>,
) {
let subscription = cx.subscribe_in(&diff, window, move |this, _, _, window, cx| {
this._task = window.spawn(cx, {
let this = cx.weak_entity();
async |cx| Self::refresh(this, cx).await
})
// TODO fix this
// this._task = window.spawn(cx, {
// let this = cx.weak_entity();
// async |cx| Self::refresh(this, cx).await
// })
});
self.buffer_diff_subscriptions
.insert(path_key.path.clone(), (diff.clone(), subscription));
@@ -550,51 +571,203 @@ impl ProjectDiff {
}
}
pub async fn refresh(this: WeakEntity<Self>, cx: &mut AsyncWindowContext) -> Result<()> {
let mut path_keys = Vec::new();
let buffers_to_load = this.update(cx, |this, cx| {
let (repo, buffers_to_load) = this.branch_diff.update(cx, |branch_diff, cx| {
let load_buffers = branch_diff.load_buffers(cx);
(branch_diff.repo().cloned(), load_buffers)
pub fn first_n_entries(&self, cx: &App, n: usize) -> Vec<StatusEntry> {
let Some(ref repo) = self.branch_diff.read(cx).repo else {
return Vec::new();
};
repo.read(cx).cached_status().take(n).collect()
}
pub async fn refresh_one(
this: WeakEntity<Self>,
repo_path: RepoPath,
status: FileStatus,
cx: &mut AsyncWindowContext,
) -> Result<()> {
use git_store::branch_diff::BranchDiff;
let Some(this) = this.upgrade() else {
return Ok(());
};
let multibuffer = cx.read_entity(&this, |this, _| this.multibuffer.clone())?;
let branch_diff = cx.read_entity(&this, |pd, _| pd.branch_diff.clone())?;
let Some(repo) = cx.read_entity(&branch_diff, |bd, _| bd.repo.clone())? else {
return Ok(());
};
let project = cx.read_entity(&branch_diff, |bd, _| bd.project.clone())?;
let mut previous_paths =
cx.read_entity(&multibuffer, |mb, _| mb.paths().collect::<HashSet<_>>())?;
let tree_diff_status = cx.read_entity(&branch_diff, |branch_diff, _| {
branch_diff
.tree_diff
.as_ref()
.and_then(|t| t.entries.get(&repo_path))
.cloned()
})?;
let Some(status) = cx.read_entity(&branch_diff, |bd, _| {
bd.merge_statuses(Some(status), tree_diff_status.as_ref())
})?
else {
return Ok(());
};
if !status.has_changes() {
return Ok(());
}
let Some(project_path) = cx.read_entity(&repo, |repo, cx| {
repo.repo_path_to_project_path(&repo_path, cx)
})?
else {
return Ok(());
};
let sort_prefix =
cx.read_entity(&repo, |repo, cx| sort_prefix(repo, &repo_path, status, cx))?;
let path_key = PathKey::with_sort_prefix(sort_prefix, repo_path.into_arc());
previous_paths.remove(&path_key);
let repo = repo.clone();
let Some((buffer, diff)) = BranchDiff::load_buffer(
tree_diff_status,
project_path,
repo,
project.downgrade(),
&mut cx.to_app(),
)
.await
.log_err() else {
return Ok(());
};
cx.update(|window, cx| {
this.update(cx, |this, cx| {
this.register_buffer(path_key, status, buffer, diff, window, cx)
});
let mut previous_paths = this.multibuffer.read(cx).paths().collect::<HashSet<_>>();
})?;
if let Some(repo) = repo {
let repo = repo.read(cx);
// TODO LL clear multibuff on open?
// // remove anything not part of the diff in the multibuffer
// this.update(cx, |this, cx| {
// multibuffer.update(cx, |multibuffer, cx| {
// for path in previous_paths {
// this.buffer_diff_subscriptions.remove(&path.path);
// multibuffer.remove_excerpts_for_path(path, cx);
// }
// });
// })?;
path_keys = Vec::with_capacity(buffers_to_load.len());
for entry in buffers_to_load.iter() {
let sort_prefix = sort_prefix(&repo, &entry.repo_path, entry.file_status, cx);
let path_key =
PathKey::with_sort_prefix(sort_prefix, entry.repo_path.as_ref().clone());
previous_paths.remove(&path_key);
path_keys.push(path_key)
}
Ok(())
}
pub async fn refresh(
this: WeakEntity<Self>,
cached_status: Vec<StatusEntry>,
cx: &mut AsyncWindowContext,
) -> Result<()> {
dbg!("refreshing all");
use git_store::branch_diff::BranchDiff;
let Some(this) = this.upgrade() else {
return Ok(());
};
let multibuffer = cx.read_entity(&this, |this, _| this.multibuffer.clone())?;
let branch_diff = cx.read_entity(&this, |pd, _| pd.branch_diff.clone())?;
let Some(repo) = cx.read_entity(&branch_diff, |bd, _| bd.repo.clone())? else {
return Ok(());
};
let project = cx.read_entity(&branch_diff, |bd, _| bd.project.clone())?;
let mut previous_paths =
cx.read_entity(&multibuffer, |mb, _| mb.paths().collect::<HashSet<_>>())?;
// Idea: on click in git panel prioritize task for that file in some way ...
// could have a hashmap of futures here
// - needs to prioritize *some* background tasks over others
// -
let mut tasks = FuturesUnordered::new();
let mut seen = HashSet::default();
for entry in cached_status {
seen.insert(entry.repo_path.clone());
let tree_diff_status = cx.read_entity(&branch_diff, |branch_diff, _| {
branch_diff
.tree_diff
.as_ref()
.and_then(|t| t.entries.get(&entry.repo_path))
.cloned()
})?;
let Some(status) = cx.read_entity(&branch_diff, |bd, _| {
bd.merge_statuses(Some(entry.status), tree_diff_status.as_ref())
})?
else {
continue;
};
if !status.has_changes() {
continue;
}
this.multibuffer.update(cx, |multibuffer, cx| {
let Some(project_path) = cx.read_entity(&repo, |repo, cx| {
repo.repo_path_to_project_path(&entry.repo_path, cx)
})?
else {
continue;
};
let sort_prefix = cx.read_entity(&repo, |repo, cx| {
sort_prefix(repo, &entry.repo_path, entry.status, cx)
})?;
let path_key = PathKey::with_sort_prefix(sort_prefix, entry.repo_path.into_arc());
previous_paths.remove(&path_key);
let repo = repo.clone();
let project = project.downgrade();
let task = cx.spawn(async move |cx| {
let res = BranchDiff::load_buffer(
tree_diff_status,
project_path,
repo,
project,
&mut cx.to_app(),
)
.await;
(res, path_key, entry.status)
});
tasks.push(task)
}
// remove anything not part of the diff in the multibuffer
this.update(cx, |this, cx| {
multibuffer.update(cx, |multibuffer, cx| {
for path in previous_paths {
this.buffer_diff_subscriptions.remove(&path.path);
multibuffer.remove_excerpts_for_path(path, cx);
}
});
buffers_to_load
})?;
for (entry, path_key) in buffers_to_load.into_iter().zip(path_keys.into_iter()) {
if let Some((buffer, diff)) = entry.load.await.log_err() {
// add the new buffers as they are parsed
let mut last_notify = Instant::now();
while let Some((res, path_key, file_status)) = tasks.next().await {
if let Some((buffer, diff)) = res.log_err() {
cx.update(|window, cx| {
this.update(cx, |this, cx| {
this.register_buffer(path_key, entry.file_status, buffer, diff, window, cx)
})
.ok();
this.register_buffer(path_key, file_status, buffer, diff, window, cx)
});
})?;
}
if last_notify.elapsed().as_millis() > 100 {
cx.notify();
last_notify = Instant::now();
}
}
this.update(cx, |this, cx| {
this.pending_scroll.take();
cx.notify();
})?;
Ok(())
}

View File

@@ -187,12 +187,13 @@ font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "11052312
"source-fontconfig-dlopen",
], optional = true }
calloop = { version = "0.13.0" }
calloop = { version = "0.14.3" }
filedescriptor = { version = "0.8.2", optional = true }
open = { version = "5.2.0", optional = true }
# Wayland
calloop-wayland-source = { version = "0.3.0", optional = true }
calloop-wayland-source = { version = "0.4.1", optional = true }
wayland-backend = { version = "0.3.3", features = [
"client_system",
"dlopen",
@@ -265,7 +266,6 @@ naga.workspace = true
[target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.build-dependencies]
naga.workspace = true
[[example]]
name = "hello_world"
path = "examples/hello_world.rs"

View File

@@ -310,6 +310,11 @@ impl AsyncWindowContext {
.update(self, |_, window, cx| read(cx.global(), window, cx))
}
/// Returns an `AsyncApp` by cloning the one used by Self
pub fn to_app(&self) -> AsyncApp {
self.app.clone()
}
/// A convenience method for [`App::update_global`](BorrowAppContext::update_global).
/// for updating the global state of the specified type.
pub fn update_global<G, R>(

View File

@@ -233,6 +233,9 @@ impl<'a, T: 'static> Context<'a, T> {
/// Spawn the future returned by the given function.
/// The function is provided a weak handle to the entity owned by this context and a context that can be held across await points.
/// The returned task must be held or detached.
///
/// # Example
/// `cx.spawn(async move |some_weak_entity, cx| ...)`
#[track_caller]
pub fn spawn<AsyncFn, R>(&self, f: AsyncFn) -> Task<R>
where

View File

@@ -1,7 +1,6 @@
use std::{
env,
path::{Path, PathBuf},
process::Command,
rc::Rc,
sync::Arc,
};
@@ -18,6 +17,7 @@ use anyhow::{Context as _, anyhow};
use calloop::{LoopSignal, channel::Channel};
use futures::channel::oneshot;
use util::ResultExt as _;
use util::command::{new_smol_command, new_std_command};
#[cfg(any(feature = "wayland", feature = "x11"))]
use xkbcommon::xkb::{self, Keycode, Keysym, State};
@@ -215,7 +215,7 @@ impl<P: LinuxClient + 'static> Platform for P {
clippy::disallowed_methods,
reason = "We are restarting ourselves, using std command thus is fine"
)]
let restart_process = Command::new("/usr/bin/env")
let restart_process = new_std_command("/usr/bin/env")
.arg("bash")
.arg("-c")
.arg(script)
@@ -422,7 +422,7 @@ impl<P: LinuxClient + 'static> Platform for P {
let path = path.to_owned();
self.background_executor()
.spawn(async move {
let _ = smol::process::Command::new("xdg-open")
let _ = new_smol_command("xdg-open")
.arg(path)
.spawn()
.context("invoking xdg-open")

View File

@@ -487,12 +487,15 @@ impl WaylandClient {
let (common, main_receiver) = LinuxCommon::new(event_loop.get_signal());
let handle = event_loop.handle();
let handle = event_loop.handle(); // CHECK that wayland sources get higher prio
handle
// these are all tasks spawned on the foreground executor.
// There is no concept of priority, they are all equal.
.insert_source(main_receiver, {
let handle = handle.clone();
move |event, _, _: &mut WaylandClientStatePtr| {
if let calloop::channel::Event::Msg(runnable) = event {
// will only be called when the event loop has finished processing all pending events from the sources
handle.insert_idle(|_| {
let start = Instant::now();
let mut timing = match runnable {
@@ -650,6 +653,7 @@ impl WaylandClient {
event_loop: Some(event_loop),
}));
// MAGIC HERE IT IS
WaylandSource::new(conn, event_queue)
.insert(handle)
.unwrap();
@@ -1574,6 +1578,7 @@ fn linux_button_to_gpui(button: u32) -> Option<MouseButton> {
})
}
// how is this being called inside calloop
impl Dispatch<wl_pointer::WlPointer, ()> for WaylandClientStatePtr {
fn event(
this: &mut Self,
@@ -1664,7 +1669,7 @@ impl Dispatch<wl_pointer::WlPointer, ()> for WaylandClientStatePtr {
modifiers: state.modifiers,
});
drop(state);
window.handle_input(input);
window.handle_input(input); // How does this get into the event loop?
}
}
wl_pointer::Event::Button {

View File

@@ -53,14 +53,16 @@ use std::{
ffi::{CStr, OsStr, c_void},
os::{raw::c_char, unix::ffi::OsStrExt},
path::{Path, PathBuf},
process::Command,
ptr,
rc::Rc,
slice, str,
sync::{Arc, OnceLock},
};
use strum::IntoEnumIterator;
use util::ResultExt;
use util::{
ResultExt,
command::{new_smol_command, new_std_command},
};
#[allow(non_upper_case_globals)]
const NSUTF8StringEncoding: NSUInteger = 4;
@@ -552,7 +554,7 @@ impl Platform for MacPlatform {
clippy::disallowed_methods,
reason = "We are restarting ourselves, using std command thus is fine"
)]
let restart_process = Command::new("/bin/bash")
let restart_process = new_std_command("/bin/bash")
.arg("-c")
.arg(script)
.arg(app_pid)
@@ -867,7 +869,7 @@ impl Platform for MacPlatform {
.lock()
.background_executor
.spawn(async move {
if let Some(mut child) = smol::process::Command::new("open")
if let Some(mut child) = new_smol_command("open")
.arg(path)
.spawn()
.context("invoking open command")

View File

@@ -67,10 +67,15 @@ impl RustLspAdapter {
#[cfg(target_os = "linux")]
async fn determine_libc_type() -> LibcType {
use futures::pin_mut;
use smol::process::Command;
async fn from_ldd_version() -> Option<LibcType> {
let ldd_output = Command::new("ldd").arg("--version").output().await.ok()?;
use util::command::new_smol_command;
let ldd_output = new_smol_command("ldd")
.arg("--version")
.output()
.await
.ok()?;
let ldd_version = String::from_utf8_lossy(&ldd_output.stdout);
if ldd_version.contains("GNU libc") || ldd_version.contains("GLIBC") {

View File

@@ -18,6 +18,7 @@ workspace.workspace = true
util.workspace = true
serde_json.workspace = true
smol.workspace = true
log.workspace = true
[dev-dependencies]
gpui = { workspace = true, features = ["test-support"] }

View File

@@ -5,10 +5,7 @@ use std::{
};
use gpui::{
App, AppContext, Context, Entity, Hsla, InteractiveElement, IntoElement, ParentElement, Render,
ScrollHandle, SerializedTaskTiming, StatefulInteractiveElement, Styled, Task, TaskTiming,
TitlebarOptions, WindowBounds, WindowHandle, WindowOptions, div, prelude::FluentBuilder, px,
relative, size,
App, AppContext, Context, Entity, Hsla, InteractiveElement, IntoElement, ParentElement, Render, ScrollHandle, SerializedThreadTaskTimings, StatefulInteractiveElement, Styled, Task, TaskTiming, ThreadTaskTimings, TitlebarOptions, WindowBounds, WindowHandle, WindowOptions, div, prelude::FluentBuilder, px, relative, size
};
use util::ResultExt;
use workspace::{
@@ -287,8 +284,13 @@ impl Render for ProfilerWindow {
let Some(data) = this.get_timings() else {
return;
};
let timings =
SerializedTaskTiming::convert(this.startup_time, &data);
let timings = ThreadTaskTimings {
thread_name: Some("main".to_string()),
thread_id: std::thread::current().id(),
timings: data.clone()
};
let timings = Vec::from([SerializedThreadTaskTimings::convert(this.startup_time, timings)]);
let active_path = workspace
.read_with(cx, |workspace, cx| {
@@ -305,12 +307,17 @@ impl Render for ProfilerWindow {
);
cx.background_spawn(async move {
let path = path.await;
let path =
path.log_err().and_then(|p| p.log_err()).flatten();
let Some(path) = path else {
return;
let path = match path.await.log_err() {
Some(Ok(Some(path))) => path,
Some(e @ Err(_)) => {
e.log_err();
log::warn!("Saving miniprof in workingdir");
std::path::Path::new(
"performance_profile.miniprof",
)
.to_path_buf()
}
Some(Ok(None)) | None => return,
};
let Some(timings) =

View File

@@ -3,13 +3,10 @@ use async_trait::async_trait;
use dap::{DapLocator, DebugRequest, adapters::DebugAdapterName};
use gpui::SharedString;
use serde_json::{Value, json};
use smol::{
Timer,
io::AsyncReadExt,
process::{Command, Stdio},
};
use smol::{Timer, io::AsyncReadExt, process::Stdio};
use std::time::Duration;
use task::{BuildTaskDefinition, DebugScenario, ShellBuilder, SpawnInTerminal, TaskTemplate};
use util::command::new_smol_command;
pub(crate) struct CargoLocator;
@@ -18,7 +15,7 @@ async fn find_best_executable(executables: &[String], test_name: &str) -> Option
return executables.first().cloned();
}
for executable in executables {
let Some(mut child) = Command::new(&executable)
let Some(mut child) = new_smol_command(&executable)
.arg("--list")
.stdout(Stdio::piped())
.spawn()

View File

@@ -6,7 +6,7 @@ use rpc::proto::{self, REMOTE_SERVER_PROJECT_ID};
use std::{collections::VecDeque, path::Path, sync::Arc};
use task::{Shell, shell_to_proto};
use terminal::terminal_settings::TerminalSettings;
use util::{ResultExt, rel_path::RelPath};
use util::{ResultExt, command::new_smol_command, rel_path::RelPath};
use worktree::Worktree;
use collections::HashMap;
@@ -389,7 +389,7 @@ async fn load_direnv_environment(
};
let args = &["export", "json"];
let direnv_output = smol::process::Command::new(&direnv_path)
let direnv_output = new_smol_command(&direnv_path)
.args(args)
.envs(env)
.env("TERM", "dumb")

View File

@@ -35,8 +35,8 @@ use git::{
},
stash::{GitStash, StashEntry},
status::{
DiffTreeType, FileStatus, GitSummary, StageStatus, StatusCode, TrackedStatus, TreeDiff,
TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
UnmergedStatus, UnmergedStatusCode,
},
};
use gpui::{
@@ -48,7 +48,7 @@ use language::{
proto::{deserialize_version, serialize_version},
};
use parking_lot::Mutex;
use pending_op::{PendingOp, PendingOps};
use pending_op::{PendingOp, PendingOpId, PendingOps};
use postage::stream::Stream as _;
use rpc::{
AnyProtoClient, TypedEnvelope,
@@ -3768,39 +3768,47 @@ impl Repository {
let commit = commit.to_string();
let id = self.id;
cx.spawn(async move |this, cx| {
this.update(cx, |this, _cx| {
this.send_job(
Some(format!("git checkout {}", commit).into()),
move |git_repo, _| async move {
match git_repo {
RepositoryState::Local {
backend,
environment,
..
} => {
backend
.checkout_files(commit, paths, environment.clone())
.await
}
RepositoryState::Remote { project_id, client } => {
client
.request(proto::GitCheckoutFiles {
project_id: project_id.0,
repository_id: id.to_proto(),
commit,
paths: paths.into_iter().map(|p| p.to_proto()).collect(),
})
.await?;
self.spawn_job_with_tracking(
paths.clone(),
pending_op::GitStatus::Reverted,
cx,
async move |this, cx| {
this.update(cx, |this, _cx| {
this.send_job(
Some(format!("git checkout {}", commit).into()),
move |git_repo, _| async move {
match git_repo {
RepositoryState::Local {
backend,
environment,
..
} => {
backend
.checkout_files(commit, paths, environment.clone())
.await
}
RepositoryState::Remote { project_id, client } => {
client
.request(proto::GitCheckoutFiles {
project_id: project_id.0,
repository_id: id.to_proto(),
commit,
paths: paths
.into_iter()
.map(|p| p.to_proto())
.collect(),
})
.await?;
Ok(())
Ok(())
}
}
}
},
)
})?
.await?
})
},
)
})?
.await?
},
)
}
pub fn reset(
@@ -3945,9 +3953,9 @@ impl Repository {
let status = format!("git add {paths}");
let job_key = GitJobKey::WriteIndex(entries.clone());
self.spawn_stage_job_with_tracking(
self.spawn_job_with_tracking(
entries.clone(),
StageStatus::Staged,
pending_op::GitStatus::Staged,
cx,
async move |this, cx| {
for save_task in save_tasks {
@@ -4007,9 +4015,9 @@ impl Repository {
let status = format!("git reset {paths}");
let job_key = GitJobKey::WriteIndex(entries.clone());
self.spawn_stage_job_with_tracking(
self.spawn_job_with_tracking(
entries.clone(),
StageStatus::Unstaged,
pending_op::GitStatus::Unstaged,
cx,
async move |this, cx| {
for save_task in save_tasks {
@@ -4051,67 +4059,6 @@ impl Repository {
)
}
fn spawn_stage_job_with_tracking<AsyncFn>(
&mut self,
paths: Vec<RepoPath>,
stage_status: StageStatus,
cx: &mut Context<Self>,
f: AsyncFn,
) -> Task<Result<()>>
where
AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
{
let mut edits = Vec::with_capacity(paths.len());
let mut ids = Vec::with_capacity(paths.len());
for path in paths {
let mut ops = self
.snapshot
.pending_ops_for_path(&path)
.unwrap_or_else(|| PendingOps::new(&path));
let id = ops.max_id() + 1;
ops.ops.push(PendingOp {
id,
stage_status,
finished: false,
});
edits.push(sum_tree::Edit::Insert(ops));
ids.push((id, path));
}
self.snapshot.pending_ops_by_path.edit(edits, ());
cx.spawn(async move |this, cx| {
let (finished, result) = match f(this.clone(), cx).await {
Ok(()) => (true, Ok(())),
Err(err) if err.is::<Canceled>() => (false, Ok(())),
Err(err) => (false, Err(err)),
};
this.update(cx, |this, _| {
let mut edits = Vec::with_capacity(ids.len());
for (id, entry) in ids {
if let Some(mut ops) = this.snapshot.pending_ops_for_path(&entry) {
if finished {
if let Some(op) = ops.op_by_id_mut(id) {
op.finished = true;
}
} else {
let idx = ops
.ops
.iter()
.position(|op| op.id == id)
.expect("pending operation must exist");
ops.ops.remove(idx);
}
edits.push(sum_tree::Edit::Insert(ops));
}
}
this.snapshot.pending_ops_by_path.edit(edits, ());
})?;
result
})
}
pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
let to_stage = self
.cached_status()
@@ -5443,6 +5390,67 @@ impl Repository {
pub fn barrier(&mut self) -> oneshot::Receiver<()> {
self.send_job(None, |_, _| async {})
}
fn spawn_job_with_tracking<AsyncFn>(
&mut self,
paths: Vec<RepoPath>,
git_status: pending_op::GitStatus,
cx: &mut Context<Self>,
f: AsyncFn,
) -> Task<Result<()>>
where
AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
{
let ids = self.new_pending_ops_for_paths(paths, git_status);
cx.spawn(async move |this, cx| {
let (job_status, result) = match f(this.clone(), cx).await {
Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
Err(err) => (pending_op::JobStatus::Error, Err(err)),
};
this.update(cx, |this, _| {
let mut edits = Vec::with_capacity(ids.len());
for (id, entry) in ids {
if let Some(mut ops) = this.snapshot.pending_ops_for_path(&entry) {
if let Some(op) = ops.op_by_id_mut(id) {
op.job_status = job_status;
}
edits.push(sum_tree::Edit::Insert(ops));
}
}
this.snapshot.pending_ops_by_path.edit(edits, ());
})?;
result
})
}
fn new_pending_ops_for_paths(
&mut self,
paths: Vec<RepoPath>,
git_status: pending_op::GitStatus,
) -> Vec<(PendingOpId, RepoPath)> {
let mut edits = Vec::with_capacity(paths.len());
let mut ids = Vec::with_capacity(paths.len());
for path in paths {
let mut ops = self
.snapshot
.pending_ops_for_path(&path)
.unwrap_or_else(|| PendingOps::new(&path));
let id = ops.max_id() + 1;
ops.ops.push(PendingOp {
id,
git_status,
job_status: pending_op::JobStatus::Running,
});
edits.push(sum_tree::Edit::Insert(ops));
ids.push((id, path));
}
self.snapshot.pending_ops_by_path.edit(edits, ());
ids
}
}
fn get_permalink_in_rust_registry_src(
@@ -5713,7 +5721,7 @@ async fn compute_snapshot(
let pending_ops_by_path = SumTree::from_iter(
prev_snapshot.pending_ops_by_path.iter().filter_map(|ops| {
let inner_ops: Vec<PendingOp> =
ops.ops.iter().filter(|op| !op.finished).cloned().collect();
ops.ops.iter().filter(|op| op.running()).cloned().collect();
if inner_ops.is_empty() {
None
} else {

View File

@@ -34,11 +34,11 @@ impl DiffBase {
pub struct BranchDiff {
diff_base: DiffBase,
repo: Option<Entity<Repository>>,
project: Entity<Project>,
pub repo: Option<Entity<Repository>>,
pub project: Entity<Project>,
base_commit: Option<SharedString>,
head_commit: Option<SharedString>,
tree_diff: Option<TreeDiff>,
pub tree_diff: Option<TreeDiff>,
_subscription: Subscription,
update_needed: postage::watch::Sender<()>,
_task: Task<()>,
@@ -283,7 +283,11 @@ impl BranchDiff {
else {
continue;
};
let task = Self::load_buffer(branch_diff, project_path, repo.clone(), cx);
let repo = repo.clone();
let task = cx.spawn(async move |project, cx| {
Self::load_buffer(branch_diff, project_path, repo.clone(), project, cx).await
});
output.push(DiffBuffer {
repo_path: item.repo_path.clone(),
@@ -303,8 +307,11 @@ impl BranchDiff {
let Some(project_path) = repo.read(cx).repo_path_to_project_path(&path, cx) else {
continue;
};
let task =
Self::load_buffer(Some(branch_diff.clone()), project_path, repo.clone(), cx);
let repo = repo.clone();
let branch_diff2 = Some(branch_diff.clone());
let task = cx.spawn(async move |project, cx| {
Self::load_buffer(branch_diff2, project_path, repo, project, cx).await
});
let file_status = diff_status_to_file_status(branch_diff);
@@ -318,42 +325,40 @@ impl BranchDiff {
output
}
fn load_buffer(
pub async fn load_buffer(
branch_diff: Option<git::status::TreeDiffStatus>,
project_path: crate::ProjectPath,
repo: Entity<Repository>,
cx: &Context<'_, Project>,
) -> Task<Result<(Entity<Buffer>, Entity<BufferDiff>)>> {
let task = cx.spawn(async move |project, cx| {
let buffer = project
.update(cx, |project, cx| project.open_buffer(project_path, cx))?
.await?;
project: WeakEntity<Project>,
cx: &mut gpui::AsyncApp, // making this generic over AppContext hangs the compiler
) -> Result<(Entity<Buffer>, Entity<BufferDiff>)> {
let buffer = project
.update(cx, |project, cx| project.open_buffer(project_path, cx))?
.await?;
let languages = project.update(cx, |project, _cx| project.languages().clone())?;
let languages = project.update(cx, |project, _cx| project.languages().clone())?;
let changes = if let Some(entry) = branch_diff {
let oid = match entry {
git::status::TreeDiffStatus::Added { .. } => None,
git::status::TreeDiffStatus::Modified { old, .. }
| git::status::TreeDiffStatus::Deleted { old } => Some(old),
};
project
.update(cx, |project, cx| {
project.git_store().update(cx, |git_store, cx| {
git_store.open_diff_since(oid, buffer.clone(), repo, languages, cx)
})
})?
.await?
} else {
project
.update(cx, |project, cx| {
project.open_uncommitted_diff(buffer.clone(), cx)
})?
.await?
let changes = if let Some(entry) = branch_diff {
let oid = match entry {
git::status::TreeDiffStatus::Added { .. } => None,
git::status::TreeDiffStatus::Modified { old, .. }
| git::status::TreeDiffStatus::Deleted { old } => Some(old),
};
Ok((buffer, changes))
});
task
project
.update(cx, |project, cx| {
project.git_store().update(cx, |git_store, cx| {
git_store.open_diff_since(oid, buffer.clone(), repo, languages, cx)
})
})?
.await?
} else {
project
.update(cx, |project, cx| {
project.open_uncommitted_diff(buffer.clone(), cx)
})?
.await?
};
Ok((buffer, changes))
}
}

View File

@@ -1,9 +1,24 @@
use git::repository::RepoPath;
use git::status::StageStatus;
use std::ops::Add;
use sum_tree::{ContextLessSummary, Item, KeyedItem};
use worktree::{PathKey, PathSummary};
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum GitStatus {
Staged,
Unstaged,
Reverted,
Unchanged,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum JobStatus {
Running,
Finished,
Skipped,
Error,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct PendingOps {
pub repo_path: RepoPath,
@@ -13,8 +28,8 @@ pub struct PendingOps {
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct PendingOp {
pub id: PendingOpId,
pub stage_status: StageStatus,
pub finished: bool,
pub git_status: GitStatus,
pub job_status: JobStatus,
}
#[derive(Clone, Debug)]
@@ -96,23 +111,37 @@ impl PendingOps {
self.ops.iter_mut().find(|op| op.id == id)
}
/// File is staged if the last job is finished and status is fully staged.
/// File is staged if the last job is finished and has status Staged.
pub fn staged(&self) -> bool {
if let Some(last) = self.ops.last() {
if last.stage_status.is_fully_staged() && last.finished {
if last.git_status == GitStatus::Staged && last.job_status == JobStatus::Finished {
return true;
}
}
false
}
/// File is staged if the last job is not yet finished and status is fully staged.
/// File is staged if the last job is not finished and has status Staged.
pub fn staging(&self) -> bool {
if let Some(last) = self.ops.last() {
if last.stage_status.is_fully_staged() && !last.finished {
if last.git_status == GitStatus::Staged && last.job_status != JobStatus::Finished {
return true;
}
}
false
}
}
impl PendingOp {
pub fn running(&self) -> bool {
self.job_status == JobStatus::Running
}
pub fn finished(&self) -> bool {
matches!(self.job_status, JobStatus::Finished | JobStatus::Skipped)
}
pub fn error(&self) -> bool {
self.job_status == JobStatus::Error
}
}

View File

@@ -17,7 +17,7 @@ use futures::{StreamExt, future};
use git::{
GitHostingProviderRegistry,
repository::{RepoPath, repo_path},
status::{StageStatus, StatusCode, TrackedStatus},
status::{StatusCode, TrackedStatus},
};
use git2::RepositoryInitOptions;
use gpui::{App, BackgroundExecutor, FutureExt, SemanticVersion, UpdateGlobal};
@@ -8538,8 +8538,11 @@ fn merge_pending_ops_snapshots(
.find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
{
let t_op = &mut t_ops.ops[op_idx];
if s_op.finished {
t_op.finished = true;
match (s_op.job_status, t_op.job_status) {
(pending_op::JobStatus::Running, _) => {}
(s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
(s_st, t_st) if s_st == t_st => {}
_ => unreachable!(),
}
} else {
t_ops.ops.push(s_op);
@@ -8614,10 +8617,10 @@ async fn test_repository_pending_ops_staging(
let mut id = 1u16;
let mut assert_stage = async |path: RepoPath, stage| {
let stage_status = if stage {
StageStatus::Staged
let git_status = if stage {
pending_op::GitStatus::Staged
} else {
StageStatus::Unstaged
pending_op::GitStatus::Unstaged
};
repo.update(cx, |repo, cx| {
let task = if stage {
@@ -8630,8 +8633,8 @@ async fn test_repository_pending_ops_staging(
ops.ops.last(),
Some(&pending_op::PendingOp {
id: id.into(),
stage_status,
finished: false,
git_status,
job_status: pending_op::JobStatus::Running
})
);
task
@@ -8645,8 +8648,8 @@ async fn test_repository_pending_ops_staging(
ops.ops.last(),
Some(&pending_op::PendingOp {
id: id.into(),
stage_status,
finished: true,
git_status,
job_status: pending_op::JobStatus::Finished
})
);
});
@@ -8671,28 +8674,28 @@ async fn test_repository_pending_ops_staging(
vec![
pending_op::PendingOp {
id: 1u16.into(),
stage_status: StageStatus::Staged,
finished: true,
git_status: pending_op::GitStatus::Staged,
job_status: pending_op::JobStatus::Finished
},
pending_op::PendingOp {
id: 2u16.into(),
stage_status: StageStatus::Unstaged,
finished: true,
git_status: pending_op::GitStatus::Unstaged,
job_status: pending_op::JobStatus::Finished
},
pending_op::PendingOp {
id: 3u16.into(),
stage_status: StageStatus::Staged,
finished: true,
git_status: pending_op::GitStatus::Staged,
job_status: pending_op::JobStatus::Finished
},
pending_op::PendingOp {
id: 4u16.into(),
stage_status: StageStatus::Unstaged,
finished: true,
git_status: pending_op::GitStatus::Unstaged,
job_status: pending_op::JobStatus::Finished
},
pending_op::PendingOp {
id: 5u16.into(),
stage_status: StageStatus::Staged,
finished: true,
git_status: pending_op::GitStatus::Staged,
job_status: pending_op::JobStatus::Finished
}
],
);
@@ -8789,11 +8792,18 @@ async fn test_repository_pending_ops_long_running_staging(
.get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
.unwrap()
.ops,
vec![pending_op::PendingOp {
id: 2u16.into(),
stage_status: StageStatus::Staged,
finished: true,
}],
vec![
pending_op::PendingOp {
id: 1u16.into(),
git_status: pending_op::GitStatus::Staged,
job_status: pending_op::JobStatus::Skipped
},
pending_op::PendingOp {
id: 2u16.into(),
git_status: pending_op::GitStatus::Staged,
job_status: pending_op::JobStatus::Finished
}
],
);
repo.update(cx, |repo, _cx| {
@@ -8893,13 +8903,13 @@ async fn test_repository_pending_ops_stage_all(
vec![
pending_op::PendingOp {
id: 1u16.into(),
stage_status: StageStatus::Staged,
finished: true,
git_status: pending_op::GitStatus::Staged,
job_status: pending_op::JobStatus::Finished
},
pending_op::PendingOp {
id: 2u16.into(),
stage_status: StageStatus::Unstaged,
finished: true,
git_status: pending_op::GitStatus::Unstaged,
job_status: pending_op::JobStatus::Finished
},
],
);
@@ -8912,13 +8922,13 @@ async fn test_repository_pending_ops_stage_all(
vec![
pending_op::PendingOp {
id: 1u16.into(),
stage_status: StageStatus::Staged,
finished: true,
git_status: pending_op::GitStatus::Staged,
job_status: pending_op::JobStatus::Finished
},
pending_op::PendingOp {
id: 2u16.into(),
stage_status: StageStatus::Unstaged,
finished: true,
git_status: pending_op::GitStatus::Unstaged,
job_status: pending_op::JobStatus::Finished
},
],
);

View File

@@ -16,7 +16,7 @@ use task::{Shell, ShellBuilder, ShellKind, SpawnInTerminal};
use terminal::{
TaskState, TaskStatus, Terminal, TerminalBuilder, terminal_settings::TerminalSettings,
};
use util::{get_default_system_shell, maybe, rel_path::RelPath};
use util::{command::new_std_command, get_default_system_shell, maybe, rel_path::RelPath};
use crate::{Project, ProjectPath};
@@ -505,13 +505,13 @@ impl Project {
None,
None,
)?;
let mut command = std::process::Command::new(command_template.program);
let mut command = new_std_command(command_template.program);
command.args(command_template.args);
command.envs(command_template.env);
Ok(command)
}
None => {
let mut command = std::process::Command::new(command);
let mut command = new_std_command(command);
command.args(args);
command.envs(env);
if let Some(path) = path {

View File

@@ -124,6 +124,7 @@ async fn build_remote_server_from_source(
use smol::process::{Command, Stdio};
use std::env::VarError;
use std::path::Path;
use util::command::new_smol_command;
// By default, we make building remote server from source opt-out and we do not force artifact compression
// for quicker builds.
@@ -189,7 +190,7 @@ async fn build_remote_server_from_source(
delegate.set_status(Some("Building remote server binary from source"), cx);
log::info!("building remote server binary from source");
run_cmd(
Command::new("cargo")
new_smol_command("cargo")
.current_dir(concat!(env!("CARGO_MANIFEST_DIR"), "/../.."))
.args([
"build",
@@ -219,12 +220,18 @@ async fn build_remote_server_from_source(
.context("rustup not found on $PATH, install rustup (see https://rustup.rs/)")?;
delegate.set_status(Some("Adding rustup target for cross-compilation"), cx);
log::info!("adding rustup target");
run_cmd(Command::new(rustup).args(["target", "add"]).arg(&triple)).await?;
run_cmd(
new_smol_command(rustup)
.args(["target", "add"])
.arg(&triple),
)
.await?;
if which("cargo-zigbuild", cx).await?.is_none() {
delegate.set_status(Some("Installing cargo-zigbuild for cross-compilation"), cx);
log::info!("installing cargo-zigbuild");
run_cmd(Command::new("cargo").args(["install", "--locked", "cargo-zigbuild"])).await?;
run_cmd(new_smol_command("cargo").args(["install", "--locked", "cargo-zigbuild"]))
.await?;
}
delegate.set_status(
@@ -235,7 +242,7 @@ async fn build_remote_server_from_source(
);
log::info!("building remote binary from source for {triple} with Zig");
run_cmd(
Command::new("cargo")
new_smol_command("cargo")
.args([
"zigbuild",
"--package",
@@ -262,12 +269,13 @@ async fn build_remote_server_from_source(
#[cfg(not(target_os = "windows"))]
{
run_cmd(Command::new("gzip").args(["-f", &bin_path.to_string_lossy()])).await?;
run_cmd(new_smol_command("gzip").args(["-f", &bin_path.to_string_lossy()])).await?;
}
#[cfg(target_os = "windows")]
{
// On Windows, we use 7z to compress the binary
let seven_zip = which("7z.exe",cx)
.await?
.context("7z.exe not found on $PATH, install it (e.g. with `winget install -e --id 7zip.7zip`) or, if you don't want this behaviour, set $env:ZED_BUILD_REMOTE_SERVER=\"nocompress\"")?;
@@ -275,7 +283,7 @@ async fn build_remote_server_from_source(
if smol::fs::metadata(&gz_path).await.is_ok() {
smol::fs::remove_file(&gz_path).await?;
}
run_cmd(Command::new(seven_zip).args([
run_cmd(new_smol_command(seven_zip).args([
"a",
"-tgzip",
&gz_path,

View File

@@ -16,6 +16,7 @@ use language::LanguageRegistry;
use node_runtime::{NodeBinaryOptions, NodeRuntime};
use paths::logs_dir;
use project::project_settings::ProjectSettings;
use util::command::new_smol_command;
use proto::CrashReport;
use release_channel::{AppVersion, RELEASE_CHANNEL, ReleaseChannel};
@@ -656,7 +657,7 @@ pub(crate) fn execute_proxy(
async fn kill_running_server(pid: u32, paths: &ServerPaths) -> Result<(), ExecuteProxyError> {
log::info!("killing existing server with PID {}", pid);
smol::process::Command::new("kill")
new_smol_command("kill")
.arg(pid.to_string())
.output()
.await
@@ -707,7 +708,7 @@ async fn spawn_server(paths: &ServerPaths) -> Result<(), SpawnServerError> {
}
let binary_name = std::env::current_exe().map_err(SpawnServerError::CurrentExe)?;
let mut server_process = smol::process::Command::new(binary_name);
let mut server_process = new_smol_command(binary_name);
server_process
.arg("run")
.arg("--log-file")
@@ -772,7 +773,7 @@ async fn check_pid_file(path: &Path) -> Result<Option<u32>, CheckPidError> {
};
log::debug!("Checking if process with PID {} exists...", pid);
match smol::process::Command::new("kill")
match new_smol_command("kill")
.arg("-0")
.arg(pid.to_string())
.output()

View File

@@ -13,6 +13,32 @@ use std::{
time::Duration,
};
// https://docs.rs/tokio/latest/src/tokio/task/yield_now.rs.html#39-64
pub async fn yield_now() {
/// Yield implementation
struct YieldNow {
yielded: bool,
}
impl Future for YieldNow {
type Output = ();
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<()> {
// use core::task::ready;
// ready!(crate::trace::trace_leaf(cx));
if self.yielded {
return Poll::Ready(());
}
self.yielded = true;
// context::defer(cx.waker());
Poll::Pending
}
}
YieldNow { yielded: false }.await;
}
#[derive(Clone)]
pub struct ForegroundExecutor {
session_id: SessionId,

View File

@@ -1556,7 +1556,8 @@ pub(crate) fn default_working_directory(workspace: &Workspace, cx: &App) -> Opti
.read(cx)
.active_project_directory(cx)
.as_deref()
.map(Path::to_path_buf),
.map(Path::to_path_buf)
.or_else(|| first_project_directory(workspace, cx)),
WorkingDirectory::FirstProjectDirectory => first_project_directory(workspace, cx),
WorkingDirectory::AlwaysHome => None,
WorkingDirectory::Always { directory } => {
@@ -1570,10 +1571,13 @@ pub(crate) fn default_working_directory(workspace: &Workspace, cx: &App) -> Opti
///Gets the first project's home directory, or the home directory
fn first_project_directory(workspace: &Workspace, cx: &App) -> Option<PathBuf> {
let worktree = workspace.worktrees(cx).next()?.read(cx);
if !worktree.root_entry()?.is_dir() {
return None;
let worktree_path = worktree.abs_path();
if worktree.root_entry()?.is_dir() {
Some(worktree_path.to_path_buf())
} else {
// If worktree is a file, return its parent directory
worktree_path.parent().map(|p| p.to_path_buf())
}
Some(worktree.abs_path().to_path_buf())
}
#[cfg(test)]
@@ -1606,7 +1610,7 @@ mod tests {
});
}
// No active entry, but a worktree, worktree is a file -> home_dir()
// No active entry, but a worktree, worktree is a file -> parent directory
#[gpui::test]
async fn no_active_entry_worktree_is_file(cx: &mut TestAppContext) {
let (project, workspace) = init_test(cx).await;
@@ -1621,9 +1625,9 @@ mod tests {
assert!(workspace.worktrees(cx).next().is_some());
let res = default_working_directory(workspace, cx);
assert_eq!(res, None);
assert_eq!(res, Some(Path::new("/").to_path_buf()));
let res = first_project_directory(workspace, cx);
assert_eq!(res, None);
assert_eq!(res, Some(Path::new("/").to_path_buf()));
});
}

View File

@@ -34,11 +34,13 @@ async fn capture_unix(
) -> Result<collections::HashMap<String, String>> {
use std::os::unix::process::CommandExt;
use crate::command::new_std_command;
let shell_kind = ShellKind::new(shell_path, false);
let zed_path = super::get_shell_safe_zed_path(shell_kind)?;
let mut command_string = String::new();
let mut command = std::process::Command::new(shell_path);
let mut command = new_std_command(shell_path);
command.args(args);
// In some shells, file descriptors greater than 2 cannot be used in interactive mode,
// so file descriptor 0 (stdin) is used instead. This impacts zsh, old bash; perhaps others.

View File

@@ -211,7 +211,7 @@ fn find_target(
let mut pre_char = String::new();
// Backward scan to find the start of the number, but stop at start_offset
for ch in snapshot.reversed_chars_at(offset + 1) {
for ch in snapshot.reversed_chars_at(offset + if offset < snapshot.len() { 1 } else { 0 }) {
// Search boundaries
if offset == 0 || ch.is_whitespace() || (need_range && offset <= start_offset) {
break;

View File

@@ -168,6 +168,8 @@ To ensure you're using your billing method of choice, [open a new Codex thread](
If you are already logged in and want to change your authentication method, type `/logout` in the thread and authenticate again.
If you want to use a third-party provider with Codex, you can configure that with your [Codex config.toml](https://github.com/openai/codex/blob/main/docs/config.md#model-selection) or pass extra [args/env variables](https://github.com/openai/codex/blob/main/docs/config.md#model-selection) to your Codex agent servers settings.
#### Installation
The first time you create a Codex thread, Zed will install [codex-acp](https://github.com/zed-industries/codex-acp). This installation is only available to Zed and is kept up to date as you use the agent.