Compare commits
11 Commits
remote-ima
...
v0.156.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0b182b286a | ||
|
|
61d8df5af9 | ||
|
|
60245060b9 | ||
|
|
4cb04509da | ||
|
|
b7aeef4253 | ||
|
|
c844f1caa5 | ||
|
|
597d651d93 | ||
|
|
f6d974f3b1 | ||
|
|
f3bf41aea2 | ||
|
|
e6cc9f5dfa | ||
|
|
8ee952076a |
2
Cargo.lock
generated
2
Cargo.lock
generated
@@ -14398,7 +14398,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.156.0"
|
||||
version = "0.156.2"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"anyhow",
|
||||
|
||||
@@ -58,27 +58,32 @@ struct Args {
|
||||
dev_server_token: Option<String>,
|
||||
}
|
||||
|
||||
fn parse_path_with_position(argument_str: &str) -> Result<String, std::io::Error> {
|
||||
let path = PathWithPosition::parse_str(argument_str);
|
||||
let curdir = env::current_dir()?;
|
||||
|
||||
let canonicalized = path.map_path(|path| match fs::canonicalize(&path) {
|
||||
Ok(path) => Ok(path),
|
||||
Err(e) => {
|
||||
if let Some(mut parent) = path.parent() {
|
||||
if parent == Path::new("") {
|
||||
parent = &curdir
|
||||
fn parse_path_with_position(argument_str: &str) -> anyhow::Result<String> {
|
||||
let canonicalized = match Path::new(argument_str).canonicalize() {
|
||||
Ok(existing_path) => PathWithPosition::from_path(existing_path),
|
||||
Err(_) => {
|
||||
let path = PathWithPosition::parse_str(argument_str);
|
||||
let curdir = env::current_dir().context("reteiving current directory")?;
|
||||
path.map_path(|path| match fs::canonicalize(&path) {
|
||||
Ok(path) => Ok(path),
|
||||
Err(e) => {
|
||||
if let Some(mut parent) = path.parent() {
|
||||
if parent == Path::new("") {
|
||||
parent = &curdir
|
||||
}
|
||||
match fs::canonicalize(parent) {
|
||||
Ok(parent) => Ok(parent.join(path.file_name().unwrap())),
|
||||
Err(_) => Err(e),
|
||||
}
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
match fs::canonicalize(parent) {
|
||||
Ok(parent) => Ok(parent.join(path.file_name().unwrap())),
|
||||
Err(_) => Err(e),
|
||||
}
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
})
|
||||
}
|
||||
})?;
|
||||
Ok(canonicalized.to_string(|path| path.display().to_string()))
|
||||
.with_context(|| format!("parsing as path with position {argument_str}"))?,
|
||||
};
|
||||
Ok(canonicalized.to_string(|path| path.to_string_lossy().to_string()))
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
|
||||
@@ -297,7 +297,11 @@ impl Element for UniformList {
|
||||
for (mut item, ix) in items.into_iter().zip(visible_range) {
|
||||
let item_origin = padded_bounds.origin
|
||||
+ point(
|
||||
scroll_offset.x + padding.left,
|
||||
if can_scroll_horizontally {
|
||||
scroll_offset.x + padding.left
|
||||
} else {
|
||||
scroll_offset.x
|
||||
},
|
||||
item_height * ix + scroll_offset.y + padding.top,
|
||||
);
|
||||
let available_width = if can_scroll_horizontally {
|
||||
|
||||
@@ -626,12 +626,9 @@ impl LanguageServer {
|
||||
properties: vec![
|
||||
"additionalTextEdits".to_string(),
|
||||
"command".to_string(),
|
||||
"detail".to_string(),
|
||||
"documentation".to_string(),
|
||||
"filterText".to_string(),
|
||||
"labelDetails".to_string(),
|
||||
"tags".to_string(),
|
||||
"textEdit".to_string(),
|
||||
// NB: Do not have this resolved, otherwise Zed becomes slow to complete things
|
||||
// "textEdit".to_string(),
|
||||
],
|
||||
}),
|
||||
insert_replace_support: Some(true),
|
||||
|
||||
@@ -7617,10 +7617,16 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate {
|
||||
}
|
||||
|
||||
async fn read_text_file(&self, path: PathBuf) -> Result<String> {
|
||||
if self.worktree.entry_for_path(&path).is_none() {
|
||||
return Err(anyhow!("no such path {path:?}"));
|
||||
};
|
||||
self.fs.load(&path).await
|
||||
let entry = self
|
||||
.worktree
|
||||
.entry_for_path(&path)
|
||||
.with_context(|| format!("no worktree entry for path {path:?}"))?;
|
||||
let abs_path = self
|
||||
.worktree
|
||||
.absolutize(&entry.path)
|
||||
.with_context(|| format!("cannot absolutize path {path:?}"))?;
|
||||
|
||||
self.fs.load(&abs_path).await
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1906,11 +1906,9 @@ impl ProjectPanel {
|
||||
Some(Arc::<Path>::from(full_path.join(suffix)))
|
||||
})
|
||||
})
|
||||
.or_else(|| entry.path.file_name().map(Path::new).map(Arc::from))
|
||||
.unwrap_or_else(|| entry.path.clone());
|
||||
let depth = path
|
||||
.strip_prefix(worktree_abs_path)
|
||||
.map(|suffix| suffix.components().count())
|
||||
.unwrap_or_default();
|
||||
let depth = path.components().count();
|
||||
(depth, path)
|
||||
};
|
||||
let width_estimate = item_width_estimate(
|
||||
|
||||
@@ -221,11 +221,7 @@ impl PathWithPosition {
|
||||
pub fn parse_str(s: &str) -> Self {
|
||||
let trimmed = s.trim();
|
||||
let path = Path::new(trimmed);
|
||||
let maybe_file_name_with_row_col = path
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_str()
|
||||
.unwrap_or_default();
|
||||
let maybe_file_name_with_row_col = path.file_name().unwrap_or_default().to_string_lossy();
|
||||
if maybe_file_name_with_row_col.is_empty() {
|
||||
return Self {
|
||||
path: Path::new(s).to_path_buf(),
|
||||
@@ -240,7 +236,7 @@ impl PathWithPosition {
|
||||
static SUFFIX_RE: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(ROW_COL_CAPTURE_REGEX).unwrap());
|
||||
match SUFFIX_RE
|
||||
.captures(maybe_file_name_with_row_col)
|
||||
.captures(&maybe_file_name_with_row_col)
|
||||
.map(|caps| caps.extract())
|
||||
{
|
||||
Some((_, [file_name, maybe_row, maybe_column])) => {
|
||||
@@ -361,26 +357,26 @@ pub fn compare_paths(
|
||||
let b_is_file = components_b.peek().is_none() && b_is_file;
|
||||
let ordering = a_is_file.cmp(&b_is_file).then_with(|| {
|
||||
let path_a = Path::new(component_a.as_os_str());
|
||||
let num_and_remainder_a = NumericPrefixWithSuffix::from_numeric_prefixed_str(
|
||||
if a_is_file {
|
||||
path_a.file_stem()
|
||||
} else {
|
||||
path_a.file_name()
|
||||
}
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or_default(),
|
||||
);
|
||||
let path_string_a = if a_is_file {
|
||||
path_a.file_stem()
|
||||
} else {
|
||||
path_a.file_name()
|
||||
}
|
||||
.map(|s| s.to_string_lossy());
|
||||
let num_and_remainder_a = path_string_a
|
||||
.as_deref()
|
||||
.map(NumericPrefixWithSuffix::from_numeric_prefixed_str);
|
||||
|
||||
let path_b = Path::new(component_b.as_os_str());
|
||||
let num_and_remainder_b = NumericPrefixWithSuffix::from_numeric_prefixed_str(
|
||||
if b_is_file {
|
||||
path_b.file_stem()
|
||||
} else {
|
||||
path_b.file_name()
|
||||
}
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or_default(),
|
||||
);
|
||||
let path_string_b = if b_is_file {
|
||||
path_b.file_stem()
|
||||
} else {
|
||||
path_b.file_name()
|
||||
}
|
||||
.map(|s| s.to_string_lossy());
|
||||
let num_and_remainder_b = path_string_b
|
||||
.as_deref()
|
||||
.map(NumericPrefixWithSuffix::from_numeric_prefixed_str);
|
||||
|
||||
num_and_remainder_a.cmp(&num_and_remainder_b)
|
||||
});
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
description = "The fast, collaborative code editor."
|
||||
edition = "2021"
|
||||
name = "zed"
|
||||
version = "0.156.0"
|
||||
version = "0.156.2"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
authors = ["Zed Team <hi@zed.dev>"]
|
||||
|
||||
@@ -1 +1 @@
|
||||
dev
|
||||
stable
|
||||
@@ -58,8 +58,9 @@ use workspace::{
|
||||
AppState, WorkspaceSettings, WorkspaceStore,
|
||||
};
|
||||
use zed::{
|
||||
app_menus, build_window_options, handle_cli_connection, handle_keymap_file_changes,
|
||||
initialize_workspace, open_paths_with_positions, OpenListener, OpenRequest,
|
||||
app_menus, build_window_options, derive_paths_with_position, handle_cli_connection,
|
||||
handle_keymap_file_changes, initialize_workspace, open_paths_with_positions, OpenListener,
|
||||
OpenRequest,
|
||||
};
|
||||
|
||||
use crate::zed::inline_completion_registry;
|
||||
@@ -710,13 +711,11 @@ fn handle_open_request(
|
||||
|
||||
if let Some(connection_info) = request.ssh_connection {
|
||||
cx.spawn(|mut cx| async move {
|
||||
let paths_with_position =
|
||||
derive_paths_with_position(app_state.fs.as_ref(), request.open_paths).await;
|
||||
open_ssh_project(
|
||||
connection_info,
|
||||
request
|
||||
.open_paths
|
||||
.into_iter()
|
||||
.map(|path| path.path)
|
||||
.collect::<Vec<_>>(),
|
||||
paths_with_position.into_iter().map(|p| p.path).collect(),
|
||||
app_state,
|
||||
workspace::OpenOptions::default(),
|
||||
&mut cx,
|
||||
@@ -731,8 +730,10 @@ fn handle_open_request(
|
||||
if !request.open_paths.is_empty() {
|
||||
let app_state = app_state.clone();
|
||||
task = Some(cx.spawn(|mut cx| async move {
|
||||
let paths_with_position =
|
||||
derive_paths_with_position(app_state.fs.as_ref(), request.open_paths).await;
|
||||
let (_window, results) = open_paths_with_positions(
|
||||
&request.open_paths,
|
||||
&paths_with_position,
|
||||
app_state,
|
||||
workspace::OpenOptions::default(),
|
||||
&mut cx,
|
||||
|
||||
@@ -9,12 +9,15 @@ use collections::HashMap;
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use editor::scroll::Autoscroll;
|
||||
use editor::Editor;
|
||||
use fs::Fs;
|
||||
use futures::channel::mpsc::{UnboundedReceiver, UnboundedSender};
|
||||
use futures::channel::{mpsc, oneshot};
|
||||
use futures::future::join_all;
|
||||
use futures::{FutureExt, SinkExt, StreamExt};
|
||||
use gpui::{AppContext, AsyncAppContext, Global, WindowHandle};
|
||||
use language::{Bias, Point};
|
||||
use remote::SshConnectionOptions;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use std::{process, thread};
|
||||
@@ -27,7 +30,7 @@ use workspace::{AppState, OpenOptions, Workspace};
|
||||
#[derive(Default, Debug)]
|
||||
pub struct OpenRequest {
|
||||
pub cli_connection: Option<(mpsc::Receiver<CliRequest>, IpcSender<CliResponse>)>,
|
||||
pub open_paths: Vec<PathWithPosition>,
|
||||
pub open_paths: Vec<String>,
|
||||
pub open_channel_notes: Vec<(u64, Option<String>)>,
|
||||
pub join_channel: Option<u64>,
|
||||
pub ssh_connection: Option<SshConnectionOptions>,
|
||||
@@ -57,8 +60,7 @@ impl OpenRequest {
|
||||
|
||||
fn parse_file_path(&mut self, file: &str) {
|
||||
if let Some(decoded) = urlencoding::decode(file).log_err() {
|
||||
let path_buf = PathWithPosition::parse_str(&decoded);
|
||||
self.open_paths.push(path_buf)
|
||||
self.open_paths.push(decoded.into_owned())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -369,26 +371,15 @@ async fn open_workspaces(
|
||||
location
|
||||
.paths()
|
||||
.iter()
|
||||
.map(|path| PathWithPosition {
|
||||
path: path.clone(),
|
||||
row: None,
|
||||
column: None,
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.map(|path| path.to_string_lossy().to_string())
|
||||
.collect()
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
} else {
|
||||
// If paths are provided, parse them (they include positions)
|
||||
let paths_with_position = paths
|
||||
.into_iter()
|
||||
.map(|path_with_position_string| {
|
||||
PathWithPosition::parse_str(&path_with_position_string)
|
||||
})
|
||||
.collect();
|
||||
vec![paths_with_position]
|
||||
vec![paths]
|
||||
};
|
||||
|
||||
if grouped_paths.is_empty() {
|
||||
@@ -441,7 +432,7 @@ async fn open_workspaces(
|
||||
}
|
||||
|
||||
async fn open_workspace(
|
||||
workspace_paths: Vec<PathWithPosition>,
|
||||
workspace_paths: Vec<String>,
|
||||
open_new_workspace: Option<bool>,
|
||||
wait: bool,
|
||||
responses: &IpcSender<CliResponse>,
|
||||
@@ -451,8 +442,10 @@ async fn open_workspace(
|
||||
) -> bool {
|
||||
let mut errored = false;
|
||||
|
||||
let paths_with_position =
|
||||
derive_paths_with_position(app_state.fs.as_ref(), workspace_paths).await;
|
||||
match open_paths_with_positions(
|
||||
&workspace_paths,
|
||||
&paths_with_position,
|
||||
app_state.clone(),
|
||||
workspace::OpenOptions {
|
||||
open_new_workspace,
|
||||
@@ -466,7 +459,7 @@ async fn open_workspace(
|
||||
Ok((workspace, items)) => {
|
||||
let mut item_release_futures = Vec::new();
|
||||
|
||||
for (item, path) in items.into_iter().zip(&workspace_paths) {
|
||||
for (item, path) in items.into_iter().zip(&paths_with_position) {
|
||||
match item {
|
||||
Some(Ok(item)) => {
|
||||
cx.update(|cx| {
|
||||
@@ -497,7 +490,7 @@ async fn open_workspace(
|
||||
if wait {
|
||||
let background = cx.background_executor().clone();
|
||||
let wait = async move {
|
||||
if workspace_paths.is_empty() {
|
||||
if paths_with_position.is_empty() {
|
||||
let (done_tx, done_rx) = oneshot::channel();
|
||||
let _subscription = workspace.update(cx, |_, cx| {
|
||||
cx.on_release(move |_, _, _| {
|
||||
@@ -532,7 +525,7 @@ async fn open_workspace(
|
||||
errored = true;
|
||||
responses
|
||||
.send(CliResponse::Stderr {
|
||||
message: format!("error opening {workspace_paths:?}: {error}"),
|
||||
message: format!("error opening {paths_with_position:?}: {error}"),
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
@@ -540,9 +533,26 @@ async fn open_workspace(
|
||||
errored
|
||||
}
|
||||
|
||||
pub async fn derive_paths_with_position(
|
||||
fs: &dyn Fs,
|
||||
path_strings: impl IntoIterator<Item = impl AsRef<str>>,
|
||||
) -> Vec<PathWithPosition> {
|
||||
join_all(path_strings.into_iter().map(|path_str| async move {
|
||||
let canonicalized = fs.canonicalize(Path::new(path_str.as_ref())).await;
|
||||
(path_str, canonicalized)
|
||||
}))
|
||||
.await
|
||||
.into_iter()
|
||||
.map(|(original, canonicalized)| match canonicalized {
|
||||
Ok(canonicalized) => PathWithPosition::from_path(canonicalized),
|
||||
Err(_) => PathWithPosition::parse_str(original.as_ref()),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
use std::sync::Arc;
|
||||
|
||||
use cli::{
|
||||
ipc::{self},
|
||||
@@ -551,7 +561,6 @@ mod tests {
|
||||
use editor::Editor;
|
||||
use gpui::TestAppContext;
|
||||
use serde_json::json;
|
||||
use util::paths::PathWithPosition;
|
||||
use workspace::{AppState, Workspace};
|
||||
|
||||
use crate::zed::{open_listener::open_workspace, tests::init_test};
|
||||
@@ -665,12 +674,7 @@ mod tests {
|
||||
) {
|
||||
let (response_tx, _) = ipc::channel::<CliResponse>().unwrap();
|
||||
|
||||
let path = PathBuf::from(path);
|
||||
let workspace_paths = vec![PathWithPosition {
|
||||
path,
|
||||
row: None,
|
||||
column: None,
|
||||
}];
|
||||
let workspace_paths = vec![path.to_owned()];
|
||||
|
||||
let errored = cx
|
||||
.spawn(|mut cx| async move {
|
||||
|
||||
Reference in New Issue
Block a user