Compare commits

...

10 Commits

Author SHA1 Message Date
Piotr Osiewicz
bfd06df221 Update 2024-06-28 13:17:53 +02:00
Piotr Osiewicz
ba098e3047 Merge branch 'main' into yarn-pwnage 2024-06-28 11:45:35 +02:00
Piotr Osiewicz
a29b9eea34 Yay, this works ig 2024-06-27 18:07:20 +02:00
Piotr Osiewicz
7fc5c98dfc Introduce EntryData to represent files with known contents 2024-06-27 15:14:27 +02:00
Piotr Osiewicz
b4d3e7c2d3 Checkpoint 2024-06-27 14:26:56 +02:00
Piotr Osiewicz
75d50052f8 Merge branch 'main' into yarn-pwnage 2024-06-27 13:06:55 +02:00
Piotr Osiewicz
325602aa26 Containerisation 2024-06-27 12:31:44 +02:00
Piotr Osiewicz
ef0eab54f8 Remove fs_zip, start adjusting worktree 2024-06-27 10:49:24 +02:00
Piotr Osiewicz
474da34946 Add fs_zip crate 2024-06-26 12:23:33 +02:00
Piotr Osiewicz
ef20c801c6 WIP 2024-06-25 18:56:07 +02:00
19 changed files with 396 additions and 179 deletions

1
Cargo.lock generated
View File

@@ -13316,6 +13316,7 @@ name = "worktree"
version = "0.1.0"
dependencies = [
"anyhow",
"async_zip",
"clock",
"collections",
"env_logger",

View File

@@ -221,11 +221,11 @@ fn collect_files(
.unwrap_or_default()
.to_string();
if entry.is_dir() {
if entry.is_container() {
// Auto-fold directories that contain no files
let mut child_entries = snapshot.child_entries(&entry.path);
if let Some(child) = child_entries.next() {
if child_entries.next().is_none() && child.kind.is_dir() {
if child_entries.next().is_none() && child.kind.is_container() {
if is_top_level_directory {
is_top_level_directory = false;
folded_directory_names_stack.push(

View File

@@ -306,7 +306,9 @@ impl RandomizedTest for ProjectCollaborationTest {
let worktree = worktree.read(cx);
worktree.is_visible()
&& worktree.entries(false, 0).any(|e| e.is_file())
&& worktree.root_entry().map_or(false, |e| e.is_dir())
&& worktree
.root_entry()
.map_or(false, |e| e.is_container())
})
.choose(rng)
});

View File

@@ -46,7 +46,7 @@ impl Match {
}
fn is_dir(&self, project: &Project, cx: &WindowContext) -> bool {
self.entry(project, cx).is_some_and(|e| e.is_dir())
self.entry(project, cx).is_some_and(|e| e.is_container())
|| self.suffix.as_ref().is_some_and(|s| s.ends_with('/'))
}
@@ -121,7 +121,7 @@ impl Match {
text.push_str(suffix);
let entry = self.entry(project, cx);
let color = if let Some(entry) = entry {
if entry.is_dir() {
if entry.is_container() {
Color::Accent
} else {
Color::Conflict
@@ -134,7 +134,7 @@ impl Match {
HighlightStyle::color(color.color(cx)),
));
offset += suffix.as_bytes().len();
if entry.is_some_and(|e| e.is_dir()) {
if entry.is_some_and(|e| e.is_container()) {
text.push(separator);
offset += separator.len_utf8();

View File

@@ -67,7 +67,10 @@ pub trait Fs: Send + Sync {
self.remove_file(path, options).await
}
async fn open_sync(&self, path: &Path) -> Result<Box<dyn io::Read>>;
async fn load(&self, path: &Path) -> Result<String>;
async fn load(&self, path: &Path) -> Result<String> {
Ok(String::from_utf8(self.load_bytes(path).await?)?)
}
async fn load_bytes(&self, path: &Path) -> Result<Vec<u8>>;
async fn atomic_write(&self, path: PathBuf, text: String) -> Result<()>;
async fn save(&self, path: &Path, text: &Rope, line_ending: LineEnding) -> Result<()>;
async fn canonicalize(&self, path: &Path) -> Result<PathBuf>;
@@ -319,6 +322,12 @@ impl Fs for RealFs {
Ok(text)
}
async fn load_bytes(&self, path: &Path) -> Result<Vec<u8>> {
let path = path.to_path_buf();
let bytes = smol::unblock(|| std::fs::read(path)).await?;
Ok(bytes)
}
async fn atomic_write(&self, path: PathBuf, data: String) -> Result<()> {
smol::unblock(move || {
let mut tmp_file = if cfg!(target_os = "linux") {
@@ -1433,6 +1442,10 @@ impl Fs for FakeFs {
Ok(String::from_utf8(content.clone())?)
}
async fn load_bytes(&self, path: &Path) -> Result<String> {
self.load_internal(path).await
}
async fn atomic_write(&self, path: PathBuf, data: String) -> Result<()> {
self.simulate_random_delay().await;
let path = normalize_path(path.as_path());

View File

@@ -202,7 +202,7 @@ impl LspAdapter for TypeScriptLspAdapter {
"provideFormatter": true,
"hostInfo": "zed",
"tsserver": {
"path": "node_modules/typescript/lib",
"path": "/Users/hiro/Projects/repros/yarn-test/.yarn/sdks/typescript/lib",
},
"preferences": {
"includeInlayParameterNameHints": "all",

View File

@@ -162,8 +162,9 @@ impl LspAdapter for VtslsLspAdapter {
_: &Arc<dyn LspAdapterDelegate>,
) -> Result<Option<serde_json::Value>> {
Ok(Some(json!({
"typescript": {
"tsdk": "node_modules/typescript/lib",
"typescript":
{
"tsdk": "/Users/hiro/Projects/repros/yarn-test/.yarn/sdks/typescript/lib",
"format": {
"enable": true
},
@@ -191,6 +192,9 @@ impl LspAdapter for VtslsLspAdapter {
}
},
"vtsls": {
"typescript": {
"globalTsdk": "/Users/hiro/Projects/repros/yarn-test/.yarn/sdks/typescript/lib"
},
"experimental": {
"completion": {
"enableServerSideFuzzyMatch": true,
@@ -211,7 +215,7 @@ impl LspAdapter for VtslsLspAdapter {
"suggest": {
"completeFunctionCalls": true
},
"tsdk": "node_modules/typescript/lib",
"tsdk": "/Users/hiro/Projects/repros/yarn-test/.yarn/sdks/typescript/lib",
"format": {
"enable": true
},
@@ -239,6 +243,9 @@ impl LspAdapter for VtslsLspAdapter {
}
},
"vtsls": {
"typescript": {
"globalTsdk": "/Users/hiro/Projects/repros/yarn-test/.yarn/sdks/typescript/lib"
},
"experimental": {
"completion": {
"enableServerSideFuzzyMatch": true,

View File

@@ -496,7 +496,7 @@ impl OutlinePanel {
}
for child_dir in worktree
.child_entries(&entry.path)
.filter(|entry| entry.is_dir())
.filter(|entry| entry.is_container())
{
let removed = unfolded_dirs.remove(&child_dir.id);
if !removed {
@@ -1200,7 +1200,7 @@ impl OutlinePanel {
);
let mut current_entry = buffer_entry;
loop {
if current_entry.is_dir() {
if current_entry.is_container() {
if self
.collapsed_entries
.remove(&CollapsedEntry::Dir(worktree_id, current_entry.id))
@@ -1697,7 +1697,7 @@ impl OutlinePanel {
.insert(entry.id, (buffer_id, excerpts));
let mut current_entry = entry;
loop {
if current_entry.is_dir() {
if current_entry.is_container() {
let is_root =
worktree.root_entry().map(|entry| entry.id)
== Some(current_entry.id);
@@ -1790,7 +1790,7 @@ impl OutlinePanel {
.or_default()
.entry(parent.to_path_buf())
.or_default();
if entry.is_dir() {
if entry.is_container() {
children.dirs += 1;
} else {
children.files += 1;
@@ -1798,7 +1798,7 @@ impl OutlinePanel {
}
}
if entry.is_dir() {
if entry.is_container() {
Some(FsEntry::Directory(worktree_id, entry))
} else {
let (buffer_id, excerpts) = worktree_excerpts

View File

@@ -2141,12 +2141,14 @@ impl Project {
/// LanguageServerName is owned, because it is inserted into a map
pub fn open_local_buffer_via_lsp(
&mut self,
abs_path: lsp::Url,
mut abs_path: lsp::Url,
language_server_id: LanguageServerId,
language_server_name: LanguageServerName,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Buffer>>> {
cx.spawn(move |this, mut cx| async move {
// Escape percent-encoded string.
let _ = abs_path.set_scheme("file");
let abs_path = abs_path
.to_file_path()
.map_err(|_| anyhow!("can't convert URI to path"))?;
@@ -2167,7 +2169,8 @@ impl Project {
);
})
.ok();
(worktree, PathBuf::new())
let worktree_root = worktree.update(&mut cx, |this, _| this.abs_path())?;
(worktree, abs_path.strip_prefix(worktree_root)?.into())
};
let project_path = ProjectPath {
@@ -7901,7 +7904,7 @@ impl Project {
File {
is_local: true,
entry_id: Some(entry.id),
mtime: entry.mtime,
mtime: entry.data.disk_entry().map(|entry| entry.1),
path: entry.path.clone(),
worktree: worktree_handle.clone(),
is_deleted: false,
@@ -7911,7 +7914,7 @@ impl Project {
File {
is_local: true,
entry_id: Some(entry.id),
mtime: entry.mtime,
mtime: entry.data.disk_entry().map(|entry| entry.1),
path: entry.path.clone(),
worktree: worktree_handle.clone(),
is_deleted: false,
@@ -10954,7 +10957,7 @@ impl Project {
let worktree = worktree.read(cx);
worktree.is_visible()
&& worktree.is_local()
&& worktree.root_entry().map_or(false, |e| e.is_dir())
&& worktree.root_entry().map_or(false, |e| e.is_container())
})
.collect::<Vec<_>>();
let cwd = match available_worktrees.len() {
@@ -11411,7 +11414,7 @@ impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
fn next(&mut self) -> Option<Self::Item> {
self.traversal.next().map(|entry| match entry.kind {
EntryKind::Dir => fuzzy::PathMatchCandidate {
EntryKind::Container => fuzzy::PathMatchCandidate {
path: &entry.path,
char_bag: CharBag::from_iter(entry.path.to_string_lossy().to_lowercase().chars()),
},
@@ -11419,7 +11422,7 @@ impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
path: &entry.path,
char_bag,
},
EntryKind::UnloadedDir | EntryKind::PendingDir => unreachable!(),
EntryKind::UnloadedContainer | EntryKind::PendingContainer => unreachable!(),
})
}
}

View File

@@ -45,7 +45,7 @@ use workspace::{
notifications::{DetachAndPromptErr, NotifyTaskExt},
DraggedSelection, OpenInTerminal, SelectedEntry, Workspace,
};
use worktree::CreatedEntry;
use worktree::{CreatedEntry, EntryData};
const PROJECT_PANEL_KEY: &str = "ProjectPanel";
const NEW_ENTRY_ID: ProjectEntryId = ProjectEntryId::MAX;
@@ -456,7 +456,7 @@ impl ProjectPanel {
if let Some((worktree, entry)) = self.selected_entry(cx) {
let auto_fold_dirs = ProjectPanelSettings::get_global(cx).auto_fold_dirs;
let is_root = Some(entry) == worktree.root_entry();
let is_dir = entry.is_dir();
let is_dir = entry.is_container();
let is_foldable = auto_fold_dirs && self.is_foldable(entry, worktree);
let is_unfoldable = auto_fold_dirs && self.is_unfoldable(entry, worktree);
let worktree_id = worktree.id();
@@ -559,7 +559,7 @@ impl ProjectPanel {
}
fn is_unfoldable(&self, entry: &Entry, worktree: &Worktree) -> bool {
if !entry.is_dir() || self.unfolded_dir_ids.contains(&entry.id) {
if !entry.is_container() || self.unfolded_dir_ids.contains(&entry.id) {
return false;
}
@@ -568,7 +568,7 @@ impl ProjectPanel {
let mut child_entries = snapshot.child_entries(&parent_path);
if let Some(child) = child_entries.next() {
if child_entries.next().is_none() {
return child.kind.is_dir();
return child.kind.is_container();
}
}
};
@@ -576,13 +576,13 @@ impl ProjectPanel {
}
fn is_foldable(&self, entry: &Entry, worktree: &Worktree) -> bool {
if entry.is_dir() {
if entry.is_container() {
let snapshot = worktree.snapshot();
let mut child_entries = snapshot.child_entries(&entry.path);
if let Some(child) = child_entries.next() {
if child_entries.next().is_none() {
return child.kind.is_dir();
return child.kind.is_container();
}
}
}
@@ -591,7 +591,7 @@ impl ProjectPanel {
fn expand_selected_entry(&mut self, _: &ExpandSelectedEntry, cx: &mut ViewContext<Self>) {
if let Some((worktree, entry)) = self.selected_entry(cx) {
if entry.is_dir() {
if entry.is_container() {
let worktree_id = worktree.id();
let entry_id = entry.id;
let expanded_dir_ids =
@@ -911,7 +911,7 @@ impl ProjectPanel {
let worktree = worktree.read(cx);
if let Some(mut entry) = worktree.entry_for_id(entry_id) {
loop {
if entry.is_dir() {
if entry.is_container() {
if let Err(ix) = expanded_dir_ids.binary_search(&entry.id) {
expanded_dir_ids.insert(ix, entry.id);
}
@@ -963,7 +963,7 @@ impl ProjectPanel {
worktree_id,
entry_id,
is_new_entry: false,
is_dir: entry.is_dir(),
is_dir: entry.is_container(),
processing_filename: None,
});
let file_name = entry
@@ -1117,7 +1117,7 @@ impl ProjectPanel {
loop {
let mut child_entries_iter = snapshot.child_entries(path);
if let Some(child) = child_entries_iter.next() {
if child_entries_iter.next().is_none() && child.is_dir() {
if child_entries_iter.next().is_none() && child.is_container() {
self.unfolded_dir_ids.remove(&child.id);
path = &*child.path;
} else {
@@ -1259,7 +1259,9 @@ impl ProjectPanel {
) -> Option<PathBuf> {
let mut new_path = target_entry.path.to_path_buf();
// If we're pasting into a file, or a directory into itself, go up one level.
if target_entry.is_file() || (target_entry.is_dir() && target_entry.id == source.entry_id) {
if target_entry.is_file()
|| (target_entry.is_container() && target_entry.id == source.entry_id)
{
new_path.pop();
}
let clipboard_entry_file_name = self
@@ -1362,7 +1364,7 @@ impl ProjectPanel {
fn open_in_terminal(&mut self, _: &OpenInTerminal, cx: &mut ViewContext<Self>) {
if let Some((worktree, entry)) = self.selected_entry(cx) {
let abs_path = worktree.abs_path().join(&entry.path);
let working_directory = if entry.is_dir() {
let working_directory = if entry.is_container() {
Some(abs_path)
} else {
if entry.is_symlink {
@@ -1384,7 +1386,7 @@ impl ProjectPanel {
cx: &mut ViewContext<Self>,
) {
if let Some((worktree, entry)) = self.selected_entry(cx) {
if entry.is_dir() {
if entry.is_container() {
let include_root = self.project.read(cx).visible_worktrees(cx).count() > 1;
let dir_path = if include_root {
let mut full_path = PathBuf::from(worktree.root_name());
@@ -1540,7 +1542,7 @@ impl ProjectPanel {
let Some(entry) = worktree.entry_for_path(path) else {
continue;
};
if entry.is_dir() {
if entry.is_container() {
if let Err(idx) = expanded_dir_ids.binary_search(&entry.id) {
expanded_dir_ids.insert(idx, entry.id);
}
@@ -1583,12 +1585,12 @@ impl ProjectPanel {
};
let mut new_entry_parent_id = None;
let mut new_entry_kind = EntryKind::Dir;
let mut new_entry_kind = EntryKind::Container;
if let Some(edit_state) = &self.edit_state {
if edit_state.worktree_id == worktree_id && edit_state.is_new_entry {
new_entry_parent_id = Some(edit_state.entry_id);
new_entry_kind = if edit_state.is_dir {
EntryKind::Dir
EntryKind::Container
} else {
EntryKind::File(Default::default())
};
@@ -1599,7 +1601,7 @@ impl ProjectPanel {
let mut entry_iter = snapshot.entries(true, 0);
while let Some(entry) = entry_iter.entry() {
if auto_collapse_dirs
&& entry.kind.is_dir()
&& entry.kind.is_container()
&& !self.unfolded_dir_ids.contains(&entry.id)
{
if let Some(root_path) = snapshot.root_entry() {
@@ -1607,7 +1609,7 @@ impl ProjectPanel {
if let Some(child) = child_entries.next() {
if entry.path != root_path.path
&& child_entries.next().is_none()
&& child.kind.is_dir()
&& child.kind.is_container()
{
entry_iter.advance();
continue;
@@ -1618,12 +1620,16 @@ impl ProjectPanel {
visible_worktree_entries.push(entry.clone());
if Some(entry.id) == new_entry_parent_id {
let mtime = if let EntryData::DiskEntry { mtime, .. } = entry.data {
mtime
} else {
None
};
visible_worktree_entries.push(Entry {
id: NEW_ENTRY_ID,
kind: new_entry_kind,
path: entry.path.join("\0").into(),
inode: 0,
mtime: entry.mtime,
data: EntryData::DiskEntry { inode: 0, mtime },
is_ignored: entry.is_ignored,
is_external: false,
is_private: false,
@@ -2169,7 +2175,7 @@ impl ProjectPanel {
if !this.marked_entries.insert(selection) {
this.marked_entries.remove(&selection);
}
} else if kind.is_dir() {
} else if kind.is_container() {
this.toggle_expanded(entry_id, cx);
} else {
let click_count = event.up.click_count;
@@ -2596,7 +2602,7 @@ impl Panel for ProjectPanel {
|| project.visible_worktrees(cx).any(|tree| {
tree.read(cx)
.root_entry()
.map_or(false, |entry| entry.is_dir())
.map_or(false, |entry| entry.is_container())
})
}
}
@@ -5016,7 +5022,7 @@ mod tests {
}
let indent = " ".repeat(details.depth);
let icon = if details.kind.is_dir() {
let icon = if details.kind.is_container() {
if details.is_expanded {
"v "
} else {

View File

@@ -1685,6 +1685,7 @@ message Entry {
string path = 3;
uint64 inode = 4;
Timestamp mtime = 5;
string contents = 10;
bool is_symlink = 6;
bool is_ignored = 7;
bool is_external = 8;

View File

@@ -2401,7 +2401,7 @@ pub mod tests {
.entry_for_path(&(worktree_id, "a").into(), cx)
.expect("no entry for /a/ directory")
});
assert!(a_dir_entry.is_dir());
assert!(a_dir_entry.is_container());
window
.update(cx, |workspace, cx| {
ProjectSearchView::new_search_in_directory(workspace, &a_dir_entry.path, cx)

View File

@@ -667,7 +667,7 @@ impl WorktreeIndex {
}
}
if entry.mtime != saved_mtime {
if entry.data.disk_entry().map(|entry| entry.1) != saved_mtime {
let handle = entries_being_indexed.insert(entry.id);
updated_entries_tx.send((entry.clone(), handle)).await?;
}
@@ -768,7 +768,7 @@ impl WorktreeIndex {
chunks: chunk_text(&text, language.as_ref(), &entry.path),
handle,
path: entry.path,
mtime: entry.mtime,
mtime: entry.data.disk_entry().map(|entry| entry.1),
text,
};

View File

@@ -1170,7 +1170,7 @@ fn current_project_directory(workspace: &Workspace, cx: &AppContext) -> Option<P
fn get_path_from_wt(wt: &LocalWorktree) -> Option<PathBuf> {
wt.root_entry()
.filter(|re| re.is_dir())
.filter(|re| re.is_container())
.map(|_| wt.abs_path().to_path_buf())
}

View File

@@ -4615,7 +4615,7 @@ async fn join_channel_internal(
&& project.visible_worktrees(cx).any(|tree| {
tree.read(cx)
.root_entry()
.map_or(false, |entry| entry.is_dir())
.map_or(false, |entry| entry.is_container())
})
{
Some(workspace.project.clone())

View File

@@ -23,6 +23,7 @@ test-support = [
[dependencies]
anyhow.workspace = true
async_zip.workspace = true
clock.workspace = true
collections.workspace = true
fs.workspace = true

View File

@@ -49,7 +49,7 @@ use std::{
future::Future,
mem,
ops::{AddAssign, Deref, DerefMut, Sub},
path::{Path, PathBuf},
path::{Component, Path, PathBuf},
pin::Pin,
sync::{
atomic::{AtomicUsize, Ordering::SeqCst},
@@ -371,6 +371,15 @@ static EMPTY_PATH: &str = "";
impl EventEmitter<Event> for Worktree {}
// Splits zip path into a path to the .zip file itself + it's suffix.
fn zip_path(path: &Path) -> Option<(Arc<Path>, Arc<Path>)> {
let path_str = path.to_str()?;
let (zip, inner) = path_str.split_once(".zip/")?;
let zip_path = PathBuf::from(format!("{}.zip", zip));
let inner_path = PathBuf::from(inner);
Some((Arc::from(zip_path), Arc::from(inner_path)))
}
impl Worktree {
pub async fn local(
path: impl Into<Arc<Path>>,
@@ -379,9 +388,14 @@ impl Worktree {
next_entry_id: Arc<AtomicUsize>,
cx: &mut AsyncAppContext,
) -> Result<Model<Self>> {
let abs_path = path.into();
let mut abs_path = path.into();
let zip_path = zip_path(&abs_path);
if let Some(zip_path) = zip_path.as_ref() {
abs_path = zip_path.0.clone();
}
let metadata = fs
.metadata(&abs_path)
.metadata(&zip_path.as_ref().map(|(path, _)| path).unwrap_or(&abs_path))
.await
.context("failed to stat worktree path")?;
@@ -422,18 +436,31 @@ impl Worktree {
abs_path,
),
};
if let Some(metadata) = metadata {
snapshot.insert_entry(
Entry::new(
Arc::from(Path::new("")),
&metadata,
&next_entry_id,
snapshot.root_char_bag,
None,
),
fs.as_ref(),
if let Some(metadata) = metadata.as_ref() {
let mut entry = Entry::new(
Arc::from(Path::new("")),
EntryArg::Metadata(metadata),
&next_entry_id,
snapshot.root_char_bag,
None,
);
if zip_path.is_some() {
entry.kind = EntryKind::UnloadedContainer;
}
snapshot.insert_entry(entry, fs.as_ref());
}
if let Some((_, nested_path)) = zip_path {
let path = char_bag_for_path(snapshot.root_char_bag, &nested_path);
let mut entry = Entry::new(
nested_path,
EntryArg::Contents(String::from("I *really* like trains").into()),
&next_entry_id,
snapshot.root_char_bag,
None,
);
entry.kind = EntryKind::File(path);
snapshot.insert_entry(entry, fs.as_ref());
}
let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
@@ -1254,7 +1281,6 @@ impl LocalWorktree {
cx.spawn(|this, mut cx| async move {
let abs_path = abs_path?;
let text = fs.load(&abs_path).await?;
let mut index_task = None;
let snapshot = this.update(&mut cx, |this, _| this.as_local().unwrap().snapshot())?;
if let Some(repo) = snapshot.repository_for_path(&path) {
@@ -1283,7 +1309,6 @@ impl LocalWorktree {
}
}
}
let diff_base = if let Some(index_task) = index_task {
index_task.await
} else {
@@ -1293,7 +1318,16 @@ impl LocalWorktree {
let worktree = this
.upgrade()
.ok_or_else(|| anyhow!("worktree was dropped"))?;
let file = match entry.await? {
let entry = entry.await?;
dbg!(&abs_path);
let text =
if let Some(contents) = entry.as_ref().and_then(|entry| entry.data.contents()) {
dbg!("Grabbing existing contents");
contents.to_string()
} else {
fs.load(&abs_path).await?
};
let file = match entry {
Some(entry) => File::for_entry(entry, worktree),
None => {
let metadata = fs
@@ -1732,10 +1766,13 @@ impl LocalWorktree {
refresh.recv().await;
log::trace!("refreshed entry {path:?} in {:?}", t0.elapsed());
let new_entry = this.update(&mut cx, |this, _| {
this.entry_for_path(path)
let ret = this
.entry_for_path(path)
.cloned()
.ok_or_else(|| anyhow!("failed to read path after update"))
.ok_or_else(|| anyhow!("failed to read path after update"));
ret
})??;
Ok(Some(new_entry))
})
}
@@ -2236,7 +2273,7 @@ impl Snapshot {
None
};
} else {
if result[result_ix].is_dir() {
if result[result_ix].is_container() {
cursor.seek_forward(
&TraversalTarget::Path(&result[result_ix].path),
Bias::Left,
@@ -2313,7 +2350,8 @@ impl Snapshot {
}
pub fn inode_for_path(&self, path: impl AsRef<Path>) -> Option<u64> {
self.entry_for_path(path.as_ref()).map(|e| e.inode)
self.entry_for_path(path.as_ref())
.and_then(|e| e.data.inode())
}
}
@@ -2431,7 +2469,7 @@ impl LocalSnapshot {
}
}
if entry.kind == EntryKind::PendingDir {
if entry.kind == EntryKind::PendingContainer {
if let Some(existing_entry) =
self.entries_by_path.get(&PathKey(entry.path.clone()), &())
{
@@ -2463,7 +2501,9 @@ impl LocalSnapshot {
let mut inodes = TreeSet::default();
for ancestor in path.ancestors().skip(1) {
if let Some(entry) = self.entry_for_path(ancestor) {
inodes.insert(entry.inode);
if let Some(inode) = entry.data.inode() {
inodes.insert(inode);
}
}
}
inodes
@@ -2503,9 +2543,9 @@ impl LocalSnapshot {
#[cfg(test)]
pub(crate) fn expanded_entries(&self) -> impl Iterator<Item = &Entry> {
self.entries_by_path
.cursor::<()>()
.filter(|entry| entry.kind == EntryKind::Dir && (entry.is_external || entry.is_ignored))
self.entries_by_path.cursor::<()>().filter(|entry| {
entry.kind == EntryKind::Container && (entry.is_external || entry.is_ignored)
})
}
#[cfg(test)]
@@ -2629,25 +2669,27 @@ impl BackgroundScannerState {
}
}
}
if !ancestor_inodes.contains(&entry.inode) {
ancestor_inodes.insert(entry.inode);
scan_job_tx
.try_send(ScanJob {
abs_path,
path,
ignore_stack,
scan_queue: scan_job_tx.clone(),
ancestor_inodes,
is_external: entry.is_external,
containing_repository,
})
.unwrap();
if let Some(inode) = entry.data.inode() {
if !ancestor_inodes.contains(&inode) {
ancestor_inodes.insert(inode);
scan_job_tx
.try_send(ScanJob {
abs_path,
path,
ignore_stack,
scan_queue: scan_job_tx.clone(),
ancestor_inodes,
is_external: entry.is_external,
containing_repository,
})
.unwrap();
}
}
}
fn reuse_entry_id(&mut self, entry: &mut Entry) {
if let Some(mtime) = entry.mtime {
if let Some(removed_entry_id) = self.removed_entry_ids.remove(&(entry.inode, mtime)) {
if let Some(key) = entry.data.disk_entry() {
if let Some(removed_entry_id) = self.removed_entry_ids.remove(&key) {
entry.id = removed_entry_id;
} else if let Some(existing_entry) = self.snapshot.entry_for_path(&entry.path) {
entry.id = existing_entry.id;
@@ -2689,8 +2731,10 @@ impl BackgroundScannerState {
};
match parent_entry.kind {
EntryKind::PendingDir | EntryKind::UnloadedDir => parent_entry.kind = EntryKind::Dir,
EntryKind::Dir => {}
EntryKind::PendingContainer | EntryKind::UnloadedContainer => {
parent_entry.kind = EntryKind::Container
}
EntryKind::Container => {}
_ => return,
}
@@ -2742,14 +2786,13 @@ impl BackgroundScannerState {
let mut entries_by_id_edits = Vec::new();
for entry in removed_entries.cursor::<()>() {
if let Some(mtime) = entry.mtime {
let removed_entry_id = self
.removed_entry_ids
.entry((entry.inode, mtime))
.or_insert(entry.id);
if let Some(key) = entry.data.disk_entry() {
let removed_entry_id = self.removed_entry_ids.entry(key).or_insert(entry.id);
*removed_entry_id = cmp::max(*removed_entry_id, entry.id);
}
entries_by_id_edits.push(Edit::Remove(entry.id));
if entry.data.inode().is_some() {
entries_by_id_edits.push(Edit::Remove(entry.id));
}
}
self.snapshot.entries_by_id.edit(entries_by_id_edits, &());
@@ -3050,7 +3093,7 @@ impl File {
Arc::new(Self {
worktree,
path: entry.path.clone(),
mtime: entry.mtime,
mtime: entry.data.disk_entry().map(|entry| entry.1),
entry_id: Some(entry.id),
is_local: true,
is_deleted: false,
@@ -3101,14 +3144,47 @@ impl File {
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum EntryData {
DiskEntry {
inode: u64,
mtime: Option<SystemTime>,
},
Contents {
inner: Arc<str>,
},
}
impl EntryData {
fn inode(&self) -> Option<u64> {
if let Self::DiskEntry { inode, .. } = self {
Some(*inode)
} else {
None
}
}
pub fn disk_entry(&self) -> Option<(u64, SystemTime)> {
if let Self::DiskEntry { inode, mtime } = self {
mtime.map(|mtime| (*inode, mtime))
} else {
None
}
}
fn contents(&self) -> Option<Arc<str>> {
if let Self::Contents { inner } = self {
Some(inner.clone())
} else {
None
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct Entry {
pub id: ProjectEntryId,
pub kind: EntryKind,
pub path: Arc<Path>,
pub inode: u64,
pub mtime: Option<SystemTime>,
pub data: EntryData,
pub canonical_path: Option<PathBuf>,
pub is_symlink: bool,
/// Whether this entry is ignored by Git.
@@ -3132,9 +3208,9 @@ pub struct Entry {
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum EntryKind {
UnloadedDir,
PendingDir,
Dir,
UnloadedContainer,
PendingContainer,
Container,
File(CharBag),
}
@@ -3162,26 +3238,47 @@ pub struct GitRepositoryChange {
pub type UpdatedEntriesSet = Arc<[(Arc<Path>, ProjectEntryId, PathChange)]>;
pub type UpdatedGitRepositoriesSet = Arc<[(Arc<Path>, GitRepositoryChange)]>;
enum EntryArg<'a> {
Metadata(&'a fs::Metadata),
Contents(Arc<str>),
}
impl Entry {
fn new(
path: Arc<Path>,
metadata: &fs::Metadata,
metadata: EntryArg,
next_entry_id: &AtomicUsize,
root_char_bag: CharBag,
canonical_path: Option<PathBuf>,
) -> Self {
Self {
id: ProjectEntryId::new(next_entry_id),
kind: if metadata.is_dir {
EntryKind::PendingDir
let kind = if let EntryArg::Metadata(meta) = &metadata {
if meta.is_dir {
EntryKind::PendingContainer
} else if path.ends_with(".zip") {
EntryKind::UnloadedContainer
} else {
EntryKind::File(char_bag_for_path(root_char_bag, &path))
},
}
} else {
EntryKind::File(char_bag_for_path(root_char_bag, &path))
};
let (data, is_symlink) = match metadata {
EntryArg::Metadata(meta) => (
EntryData::DiskEntry {
inode: meta.inode,
mtime: Some(meta.mtime),
},
meta.is_symlink,
),
EntryArg::Contents(contents) => (EntryData::Contents { inner: contents }, false),
};
Self {
id: ProjectEntryId::new(next_entry_id),
kind,
path,
inode: metadata.inode,
mtime: Some(metadata.mtime),
data,
canonical_path,
is_symlink: metadata.is_symlink,
is_symlink,
is_ignored: false,
is_external: false,
is_private: false,
@@ -3190,11 +3287,11 @@ impl Entry {
}
pub fn is_created(&self) -> bool {
self.mtime.is_some()
self.data.disk_entry().is_some()
}
pub fn is_dir(&self) -> bool {
self.kind.is_dir()
pub fn is_container(&self) -> bool {
self.kind.is_container()
}
pub fn is_file(&self) -> bool {
@@ -3207,15 +3304,15 @@ impl Entry {
}
impl EntryKind {
pub fn is_dir(&self) -> bool {
pub fn is_container(&self) -> bool {
matches!(
self,
EntryKind::Dir | EntryKind::PendingDir | EntryKind::UnloadedDir
EntryKind::Container | EntryKind::PendingContainer | EntryKind::UnloadedContainer
)
}
pub fn is_unloaded(&self) -> bool {
matches!(self, EntryKind::UnloadedDir)
matches!(self, EntryKind::UnloadedContainer)
}
pub fn is_file(&self) -> bool {
@@ -3447,6 +3544,7 @@ impl BackgroundScanner {
root_entry.is_ignored = true;
state.insert_entry(root_entry.clone(), self.fs.as_ref());
}
dbg!("Enqueue scan", &root_abs_path);
state.enqueue_scan_dir(root_abs_path, &root_entry, &scan_job_tx);
}
};
@@ -3565,6 +3663,7 @@ impl BackgroundScanner {
async fn process_events(&mut self, mut abs_paths: Vec<PathBuf>) {
let root_path = self.state.lock().snapshot.abs_path.clone();
let root_canonical_path = match self.fs.canonicalize(&root_path).await {
Ok(path) => path,
Err(err) => {
@@ -3614,7 +3713,7 @@ impl BackgroundScanner {
let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
snapshot
.entry_for_path(parent)
.map_or(false, |entry| entry.kind == EntryKind::Dir)
.map_or(false, |entry| entry.kind == EntryKind::Container)
});
if !parent_dir_is_loaded {
log::debug!("ignoring event {relative_path:?} within unloaded directory");
@@ -3673,10 +3772,11 @@ impl BackgroundScanner {
{
let mut state = self.state.lock();
let root_path = state.snapshot.abs_path.clone();
dbg!(&root_path);
for path in paths {
for ancestor in path.ancestors() {
if let Some(entry) = state.snapshot.entry_for_path(ancestor) {
if entry.kind == EntryKind::UnloadedDir {
if entry.kind == EntryKind::UnloadedContainer {
let abs_path = root_path.join(ancestor);
state.enqueue_scan_dir(abs_path.into(), entry, &scan_job_tx);
state.paths_to_scan.insert(path.clone());
@@ -3788,6 +3888,47 @@ impl BackgroundScanner {
.is_ok()
}
async fn scan_zip(&self, _job: &ScanJob) -> Result<()> {
dbg!(&_job.abs_path);
let abs_path = _job.abs_path.canonicalize()?;
let file_contents = self.fs.load_bytes(&abs_path).await.unwrap();
let zip = async_zip::base::read::mem::ZipFileReader::new(file_contents)
.await
.unwrap();
let mut new_entries = Vec::new();
let root_char_bag = { self.state.lock().snapshot.root_char_bag };
for i in 0..zip.file().entries().len() {
let Ok(mut entry) = zip.reader_with_entry(i).await else {
continue;
};
let path: PathBuf = entry.entry().filename().as_str().unwrap().into();
let path: Arc<Path> = path.into();
let kind = if entry.entry().dir().unwrap_or(false) {
EntryKind::Container
} else {
EntryKind::File(char_bag_for_path(root_char_bag, &path))
};
let mut buf = String::with_capacity(entry.entry().uncompressed_size() as usize);
entry.read_to_string_checked(&mut buf).await?;
let mut entry = Entry::new(
path,
EntryArg::Contents(buf.into()),
self.next_entry_id.as_ref(),
self.state.lock().snapshot.root_char_bag,
None,
);
entry.kind = kind;
new_entries.push(entry);
}
let root_path: Arc<Path> = Arc::from(Path::new(""));
self.state
.lock()
.populate_dir(&root_path, new_entries, None);
Ok(())
}
async fn scan_dir(&self, job: &ScanJob) -> Result<()> {
let root_abs_path;
let root_char_bag;
@@ -3809,10 +3950,19 @@ impl BackgroundScanner {
let mut root_canonical_path = None;
let mut new_entries: Vec<Entry> = Vec::new();
let mut new_jobs: Vec<Option<ScanJob>> = Vec::new();
let mut child_paths = self
.fs
.read_dir(&job.abs_path)
.await?
let dir_contents = self.fs.read_dir(&job.abs_path).await;
if dir_contents.is_err()
&& job.abs_path.components().last().map_or(false, |component| {
if let Component::Normal(last) = component {
Path::new(last).extension() == Some(OsStr::new("zip"))
} else {
false
}
})
{
return self.scan_zip(&job).await;
}
let mut child_paths = dir_contents?
.filter_map(|entry| async {
match entry {
Ok(entry) => Some(entry),
@@ -3886,7 +4036,7 @@ impl BackgroundScanner {
let mut child_entry = Entry::new(
child_path.clone(),
&child_metadata,
EntryArg::Metadata(&child_metadata),
&next_entry_id,
root_char_bag,
None,
@@ -3927,29 +4077,31 @@ impl BackgroundScanner {
child_entry.canonical_path = Some(canonical_path);
}
if child_entry.is_dir() {
if child_entry.is_container() {
child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true);
// Avoid recursing until crash in the case of a recursive symlink
if job.ancestor_inodes.contains(&child_entry.inode) {
new_jobs.push(None);
} else {
let mut ancestor_inodes = job.ancestor_inodes.clone();
ancestor_inodes.insert(child_entry.inode);
if let Some(inode) = child_entry.data.inode() {
if job.ancestor_inodes.contains(&inode) {
new_jobs.push(None);
} else {
let mut ancestor_inodes = job.ancestor_inodes.clone();
ancestor_inodes.insert(inode);
new_jobs.push(Some(ScanJob {
abs_path: child_abs_path.clone(),
path: child_path,
is_external: child_entry.is_external,
ignore_stack: if child_entry.is_ignored {
IgnoreStack::all()
} else {
ignore_stack.clone()
},
ancestor_inodes,
scan_queue: job.scan_queue.clone(),
containing_repository: containing_repository.clone(),
}));
new_jobs.push(Some(ScanJob {
abs_path: child_abs_path.clone(),
path: child_path,
is_external: child_entry.is_external,
ignore_stack: if child_entry.is_ignored {
IgnoreStack::all()
} else {
ignore_stack.clone()
},
ancestor_inodes,
scan_queue: job.scan_queue.clone(),
containing_repository: containing_repository.clone(),
}));
}
}
} else {
child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false);
@@ -3980,12 +4132,12 @@ impl BackgroundScanner {
let mut job_ix = 0;
for entry in &mut new_entries {
state.reuse_entry_id(entry);
if entry.is_dir() {
if entry.is_container() {
if state.should_scan_directory(entry) {
job_ix += 1;
} else {
log::debug!("defer scanning directory {:?}", entry.path);
entry.kind = EntryKind::UnloadedDir;
entry.kind = EntryKind::UnloadedContainer;
new_jobs.remove(job_ix);
}
}
@@ -4015,7 +4167,7 @@ impl BackgroundScanner {
abs_paths
.iter()
.map(|abs_path| async move {
let metadata = self.fs.metadata(abs_path).await?;
let metadata = self.fs.metadata(abs_path).await.ok().flatten();
if let Some(metadata) = metadata {
let canonical_path = self.fs.canonicalize(abs_path).await?;
@@ -4065,7 +4217,7 @@ impl BackgroundScanner {
let mut fs_entry = Entry::new(
path.clone(),
metadata,
EntryArg::Metadata(metadata),
self.next_entry_id.as_ref(),
state.snapshot.root_char_bag,
if metadata.is_symlink {
@@ -4075,7 +4227,7 @@ impl BackgroundScanner {
},
);
let is_dir = fs_entry.is_dir();
let is_dir = fs_entry.is_container();
fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir);
fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path);
@@ -4089,14 +4241,14 @@ impl BackgroundScanner {
}
}
if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_dir()) {
if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_container()) {
if state.should_scan_directory(&fs_entry)
|| (fs_entry.path.as_os_str().is_empty()
&& abs_path.file_name() == Some(*DOT_GIT))
{
state.enqueue_scan_dir(abs_path, &fs_entry, scan_queue_tx);
} else {
fs_entry.kind = EntryKind::UnloadedDir;
fs_entry.kind = EntryKind::UnloadedContainer;
}
}
@@ -4238,9 +4390,9 @@ impl BackgroundScanner {
for mut entry in snapshot.child_entries(path).cloned() {
let was_ignored = entry.is_ignored;
let abs_path: Arc<Path> = snapshot.abs_path().join(&entry.path).into();
entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir());
entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_container());
if entry.is_dir() {
if entry.is_container() {
let child_ignore_stack = if entry.is_ignored {
IgnoreStack::all()
} else {
@@ -4270,7 +4422,7 @@ impl BackgroundScanner {
let mut path_entry = snapshot.entries_by_id.get(&entry.id, &()).unwrap().clone();
path_entry.scan_id = snapshot.scan_id;
path_entry.is_ignored = entry.is_ignored;
if !entry.is_dir() && !entry.is_ignored && !entry.is_external {
if !entry.is_container() && !entry.is_ignored && !entry.is_external {
if let Some((ref repo_entry, local_repo)) = repo {
if let Ok(repo_path) = repo_entry.relativize(&snapshot, &entry.path) {
entry.git_status = local_repo.repo_ptr.status(&repo_path);
@@ -4923,7 +5075,7 @@ impl<'a> Traversal<'a> {
);
if let Some(entry) = self.cursor.item() {
if (self.include_files || !entry.is_file())
&& (self.include_dirs || !entry.is_dir())
&& (self.include_dirs || !entry.is_container())
&& (self.include_ignored || !entry.is_ignored)
{
return true;
@@ -5036,12 +5188,31 @@ impl<'a> Iterator for ChildEntriesIter<'a> {
impl<'a> From<&'a Entry> for proto::Entry {
fn from(entry: &'a Entry) -> Self {
let inode;
let mtime;
let contents;
match entry.data {
EntryData::DiskEntry {
inode: entry_inode,
mtime: entry_mtime,
} => {
contents = String::new();
inode = entry_inode;
mtime = entry_mtime.map(|mtime| mtime.into());
}
EntryData::Contents { ref inner } => {
contents = inner.to_string();
inode = 0;
mtime = None;
}
}
Self {
id: entry.id.to_proto(),
is_dir: entry.is_dir(),
is_dir: entry.is_container(),
path: entry.path.to_string_lossy().into(),
inode: entry.inode,
mtime: entry.mtime.map(|time| time.into()),
inode,
mtime,
contents,
is_symlink: entry.is_symlink,
is_ignored: entry.is_ignored,
is_external: entry.is_external,
@@ -5055,19 +5226,28 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
fn try_from((root_char_bag, entry): (&'a CharBag, proto::Entry)) -> Result<Self> {
let kind = if entry.is_dir {
EntryKind::Dir
EntryKind::Container
} else {
let mut char_bag = *root_char_bag;
char_bag.extend(entry.path.chars().map(|c| c.to_ascii_lowercase()));
EntryKind::File(char_bag)
};
let path: Arc<Path> = PathBuf::from(entry.path).into();
let data = if entry.inode != 0 {
EntryData::DiskEntry {
inode: entry.inode,
mtime: entry.mtime.map(|time| time.into()),
}
} else {
EntryData::Contents {
inner: entry.contents.into(),
}
};
Ok(Entry {
id: ProjectEntryId::from_proto(entry.id),
kind,
path,
inode: entry.inode,
mtime: entry.mtime.map(|time| time.into()),
data,
canonical_path: None,
is_ignored: entry.is_ignored,
is_external: entry.is_external,

View File

@@ -241,7 +241,7 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
assert_eq!(
tree.entry_for_path("deps/dep-dir2").unwrap().kind,
EntryKind::UnloadedDir
EntryKind::UnloadedContainer
);
});
@@ -1194,11 +1194,14 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
.unwrap()
.to_included()
.unwrap();
assert!(entry.is_dir());
assert!(entry.is_container());
cx.executor().run_until_parked();
tree.read_with(cx, |tree, _| {
assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
assert_eq!(
tree.entry_for_path("a/e").unwrap().kind,
EntryKind::Container
);
});
let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
@@ -1248,8 +1251,8 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
cx.executor().run_until_parked();
tree_fake.read_with(cx, |tree, _| {
assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
assert!(tree.entry_for_path("a/b/c/").unwrap().is_container());
assert!(tree.entry_for_path("a/b/").unwrap().is_container());
});
let fs_real = Arc::new(RealFs::default());
@@ -1282,8 +1285,8 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
cx.executor().run_until_parked();
tree_real.read_with(cx, |tree, _| {
assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
assert!(tree.entry_for_path("a/b/c/").unwrap().is_container());
assert!(tree.entry_for_path("a/b/").unwrap().is_container());
});
// Test smallest change
@@ -1320,9 +1323,9 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
cx.executor().run_until_parked();
tree_real.read_with(cx, |tree, _| {
assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
assert!(tree.entry_for_path("d/").unwrap().is_dir());
assert!(tree.entry_for_path("d/e/f").unwrap().is_container());
assert!(tree.entry_for_path("d/e/").unwrap().is_container());
assert!(tree.entry_for_path("d/").unwrap().is_container());
});
}
@@ -1558,8 +1561,8 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
fn ignore_pending_dir(entry: &Entry) -> Entry {
let mut entry = entry.clone();
if entry.kind.is_dir() {
entry.kind = EntryKind::Dir
if entry.kind.is_container() {
entry.kind = EntryKind::Container
}
entry
}
@@ -1620,7 +1623,7 @@ fn randomly_mutate_worktree(
}
..=66 if entry.path.as_ref() != Path::new("") => {
let other_entry = snapshot.entries(false, 0).choose(rng).unwrap();
let new_parent_path = if other_entry.is_dir() {
let new_parent_path = if other_entry.is_container() {
other_entry.path.clone()
} else {
other_entry.path.parent().unwrap().into()
@@ -1643,7 +1646,7 @@ fn randomly_mutate_worktree(
})
}
_ => {
if entry.is_dir() {
if entry.is_container() {
let child_path = entry.path.join(random_filename(rng));
let is_dir = rng.gen_bool(0.3);
log::info!(

View File

@@ -770,7 +770,7 @@ fn open_local_file(
let worktree = project
.read(cx)
.visible_worktrees(cx)
.find_map(|tree| tree.read(cx).root_entry()?.is_dir().then_some(tree));
.find_map(|tree| tree.read(cx).root_entry()?.is_container().then_some(tree));
if let Some(worktree) = worktree {
let tree_id = worktree.read(cx).id();
cx.spawn(|workspace, mut cx| async move {