Compare commits

...

10 Commits

Author SHA1 Message Date
Conrad Irwin
40be8253e1 Don't pass CLI env to terminal either 2025-05-17 00:30:53 +02:00
Conrad Irwin
2416c79eea os:: 2025-05-16 23:50:06 +02:00
Conrad Irwin
bd0d9cf9f5 Merge branch 'main' into dont-set-shell 2025-05-16 23:23:16 +02:00
Conrad Irwin
dc8bc6a819 Fix imports 2025-05-16 23:18:28 +02:00
Conrad Irwin
7826c6fc2c it depends 2025-05-16 22:53:49 +02:00
Conrad Irwin
dce95f781b more 2025-05-16 22:49:55 +02:00
Conrad Irwin
3516881ca1 more 2025-05-16 12:30:34 +02:00
Conrad Irwin
76376144a5 license 2025-05-16 12:01:29 +02:00
Conrad Irwin
9830354690 Don't set env on the process 2025-05-16 11:58:04 +02:00
Conrad Irwin
d01f2a1c9a Don't set the $SHELL env var 2025-05-15 00:19:59 +02:00
65 changed files with 688 additions and 649 deletions

30
Cargo.lock generated
View File

@@ -661,6 +661,7 @@ dependencies = [
"component", "component",
"derive_more", "derive_more",
"editor", "editor",
"environment",
"feature_flags", "feature_flags",
"fs", "fs",
"futures 0.3.31", "futures 0.3.31",
@@ -3253,6 +3254,7 @@ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
"collections", "collections",
"environment",
"futures 0.3.31", "futures 0.3.31",
"gpui", "gpui",
"log", "log",
@@ -3305,6 +3307,7 @@ dependencies = [
"ctor", "ctor",
"editor", "editor",
"env_logger 0.11.8", "env_logger 0.11.8",
"environment",
"fs", "fs",
"futures 0.3.31", "futures 0.3.31",
"gpui", "gpui",
@@ -3978,6 +3981,7 @@ dependencies = [
"collections", "collections",
"dap-types", "dap-types",
"env_logger 0.11.8", "env_logger 0.11.8",
"environment",
"fs", "fs",
"futures 0.3.31", "futures 0.3.31",
"gpui", "gpui",
@@ -4852,6 +4856,21 @@ dependencies = [
"log", "log",
] ]
[[package]]
name = "environment"
version = "0.1.0"
dependencies = [
"anyhow",
"collections",
"paths",
"serde_json",
"smol",
"tokio",
"util",
"which 6.0.3",
"workspace-hack",
]
[[package]] [[package]]
name = "envy" name = "envy"
version = "0.4.2" version = "0.4.2"
@@ -4957,6 +4976,7 @@ dependencies = [
"dirs 4.0.0", "dirs 4.0.0",
"dotenv", "dotenv",
"env_logger 0.11.8", "env_logger 0.11.8",
"environment",
"extension", "extension",
"fs", "fs",
"futures 0.3.31", "futures 0.3.31",
@@ -5053,6 +5073,7 @@ dependencies = [
"async-trait", "async-trait",
"collections", "collections",
"dap", "dap",
"environment",
"fs", "fs",
"futures 0.3.31", "futures 0.3.31",
"gpui", "gpui",
@@ -5112,6 +5133,7 @@ dependencies = [
"ctor", "ctor",
"dap", "dap",
"env_logger 0.11.8", "env_logger 0.11.8",
"environment",
"extension", "extension",
"fs", "fs",
"futures 0.3.31", "futures 0.3.31",
@@ -5944,6 +5966,7 @@ dependencies = [
"async-trait", "async-trait",
"collections", "collections",
"derive_more", "derive_more",
"environment",
"futures 0.3.31", "futures 0.3.31",
"git2", "git2",
"gpui", "gpui",
@@ -9035,6 +9058,8 @@ dependencies = [
"async-trait", "async-trait",
"async-watch", "async-watch",
"async_zip", "async_zip",
"collections",
"environment",
"futures 0.3.31", "futures 0.3.31",
"http_client", "http_client",
"log", "log",
@@ -10798,6 +10823,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"collections", "collections",
"environment",
"fs", "fs",
"gpui", "gpui",
"language", "language",
@@ -10921,6 +10947,7 @@ dependencies = [
"dap", "dap",
"dap_adapters", "dap_adapters",
"env_logger 0.11.8", "env_logger 0.11.8",
"environment",
"extension", "extension",
"fancy-regex 0.14.0", "fancy-regex 0.14.0",
"fs", "fs",
@@ -11803,6 +11830,7 @@ dependencies = [
"askpass", "askpass",
"async-trait", "async-trait",
"collections", "collections",
"environment",
"fs", "fs",
"futures 0.3.31", "futures 0.3.31",
"gpui", "gpui",
@@ -11905,6 +11933,7 @@ dependencies = [
"command_palette_hooks", "command_palette_hooks",
"editor", "editor",
"env_logger 0.11.8", "env_logger 0.11.8",
"environment",
"feature_flags", "feature_flags",
"file_icons", "file_icons",
"futures 0.3.31", "futures 0.3.31",
@@ -14042,6 +14071,7 @@ dependencies = [
"collections", "collections",
"editor", "editor",
"env_logger 0.11.8", "env_logger 0.11.8",
"environment",
"futures 0.3.31", "futures 0.3.31",
"gpui", "gpui",
"http_client", "http_client",

View File

@@ -43,6 +43,7 @@ members = [
"crates/deepseek", "crates/deepseek",
"crates/diagnostics", "crates/diagnostics",
"crates/docs_preprocessor", "crates/docs_preprocessor",
"crates/environment",
"crates/editor", "crates/editor",
"crates/eval", "crates/eval",
"crates/extension", "crates/extension",
@@ -250,6 +251,7 @@ debugger_ui = { path = "crates/debugger_ui" }
deepseek = { path = "crates/deepseek" } deepseek = { path = "crates/deepseek" }
diagnostics = { path = "crates/diagnostics" } diagnostics = { path = "crates/diagnostics" }
editor = { path = "crates/editor" } editor = { path = "crates/editor" }
environment = { path = "crates/environment" }
extension = { path = "crates/extension" } extension = { path = "crates/extension" }
extension_host = { path = "crates/extension_host" } extension_host = { path = "crates/extension_host" }
extensions_ui = { path = "crates/extensions_ui" } extensions_ui = { path = "crates/extensions_ui" }

View File

@@ -23,7 +23,7 @@ use std::sync::Arc;
use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase}; use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase};
use terminal_view::TerminalView; use terminal_view::TerminalView;
use ui::prelude::*; use ui::prelude::*;
use util::ResultExt; use util::{ResultExt, get_system_shell};
use workspace::{Toast, Workspace, notifications::NotificationId}; use workspace::{Toast, Workspace, notifications::NotificationId};
pub fn init( pub fn init(
@@ -230,7 +230,7 @@ impl TerminalInlineAssistant {
) -> Result<Task<LanguageModelRequest>> { ) -> Result<Task<LanguageModelRequest>> {
let assist = self.assists.get(&assist_id).context("invalid assist")?; let assist = self.assists.get(&assist_id).context("invalid assist")?;
let shell = std::env::var("SHELL").ok(); let shell = get_system_shell();
let (latest_output, working_directory) = assist let (latest_output, working_directory) = assist
.terminal .terminal
.update(cx, |terminal, cx| { .update(cx, |terminal, cx| {
@@ -251,7 +251,7 @@ impl TerminalInlineAssistant {
.context("invalid assist")? .context("invalid assist")?
.read(cx) .read(cx)
.prompt(cx), .prompt(cx),
shell.as_deref(), Some(&shell),
working_directory.as_deref(), working_directory.as_deref(),
&latest_output, &latest_output,
)?; )?;

View File

@@ -25,6 +25,7 @@ collections.workspace = true
component.workspace = true component.workspace = true
derive_more.workspace = true derive_more.workspace = true
editor.workspace = true editor.workspace = true
environment.workspace = true
feature_flags.workspace = true feature_flags.workspace = true
futures.workspace = true futures.workspace = true
gpui.workspace = true gpui.workspace = true

View File

@@ -148,11 +148,14 @@ impl Tool for TerminalTool {
Some(dir) => project.update(cx, |project, cx| { Some(dir) => project.update(cx, |project, cx| {
project.directory_environment(dir.as_path().into(), cx) project.directory_environment(dir.as_path().into(), cx)
}), }),
None => Task::ready(None).shared(), None => cx
.background_executor()
.spawn(async move { environment::in_home_dir().await })
.shared(),
}; };
let env = cx.spawn(async move |_| { let env = cx.spawn(async move |_| {
let mut env = env.await.unwrap_or_default(); let mut env = env.await;
if cfg!(unix) { if cfg!(unix) {
env.insert("PAGER".into(), "cat".into()); env.insert("PAGER".into(), "cat".into());
} }

View File

@@ -15,6 +15,7 @@ path = "src/context_server.rs"
anyhow.workspace = true anyhow.workspace = true
async-trait.workspace = true async-trait.workspace = true
collections.workspace = true collections.workspace = true
environment.workspace = true
futures.workspace = true futures.workspace = true
gpui.workspace = true gpui.workspace = true
log.workspace = true log.workspace = true

View File

@@ -23,10 +23,14 @@ pub struct StdioTransport {
impl StdioTransport { impl StdioTransport {
pub fn new(binary: ModelContextServerBinary, cx: &AsyncApp) -> Result<Self> { pub fn new(binary: ModelContextServerBinary, cx: &AsyncApp) -> Result<Self> {
let mut command = util::command::new_smol_command(&binary.executable); let mut env = environment::inherited();
if let Some(binary_env) = binary.env.clone() {
env.extend(binary_env);
}
let mut command = util::command::new_smol_command(&binary.executable, &env);
command command
.args(&binary.args) .args(&binary.args)
.envs(binary.env.unwrap_or_default())
.stdin(std::process::Stdio::piped()) .stdin(std::process::Stdio::piped())
.stdout(std::process::Stdio::piped()) .stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped()) .stderr(std::process::Stdio::piped())

View File

@@ -29,6 +29,7 @@ chrono.workspace = true
client.workspace = true client.workspace = true
collections.workspace = true collections.workspace = true
command_palette_hooks.workspace = true command_palette_hooks.workspace = true
environment.workspace = true
fs.workspace = true fs.workspace = true
futures.workspace = true futures.workspace = true
gpui.workspace = true gpui.workspace = true

View File

@@ -405,8 +405,11 @@ impl Copilot {
cx.notify(); cx.notify();
} }
fn build_env(&self, copilot_settings: &CopilotSettings) -> Option<HashMap<String, String>> { fn build_env(&self, copilot_settings: &CopilotSettings) -> HashMap<String, String> {
let proxy_url = copilot_settings.proxy.clone()?; let mut env = environment::inherited();
let Some(proxy_url) = copilot_settings.proxy.clone() else {
return env;
};
let no_verify = copilot_settings.proxy_no_verify; let no_verify = copilot_settings.proxy_no_verify;
let http_or_https_proxy = if proxy_url.starts_with("http:") { let http_or_https_proxy = if proxy_url.starts_with("http:") {
"HTTP_PROXY" "HTTP_PROXY"
@@ -416,17 +419,16 @@ impl Copilot {
log::error!( log::error!(
"Unsupported protocol scheme for language server proxy (must be http or https)" "Unsupported protocol scheme for language server proxy (must be http or https)"
); );
return None; return env;
}; };
let mut env = HashMap::default();
env.insert(http_or_https_proxy.to_string(), proxy_url); env.insert(http_or_https_proxy.to_string(), proxy_url);
if let Some(true) = no_verify { if let Some(true) = no_verify {
env.insert("NODE_TLS_REJECT_UNAUTHORIZED".to_string(), "0".to_string()); env.insert("NODE_TLS_REJECT_UNAUTHORIZED".to_string(), "0".to_string());
}; };
Some(env) env
} }
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
@@ -466,7 +468,7 @@ impl Copilot {
new_server_id: LanguageServerId, new_server_id: LanguageServerId,
fs: Arc<dyn Fs>, fs: Arc<dyn Fs>,
node_runtime: NodeRuntime, node_runtime: NodeRuntime,
env: Option<HashMap<String, String>>, project_env: HashMap<String, String>,
this: WeakEntity<Self>, this: WeakEntity<Self>,
awaiting_sign_in_after_start: bool, awaiting_sign_in_after_start: bool,
cx: &mut AsyncApp, cx: &mut AsyncApp,
@@ -478,7 +480,7 @@ impl Copilot {
let binary = LanguageServerBinary { let binary = LanguageServerBinary {
path: node_path, path: node_path,
arguments, arguments,
env, env: Some(project_env.clone()),
}; };
let root_path = if cfg!(target_os = "windows") { let root_path = if cfg!(target_os = "windows") {
@@ -496,6 +498,7 @@ impl Copilot {
root_path, root_path,
None, None,
Default::default(), Default::default(),
project_env,
cx, cx,
)?; )?;

View File

@@ -30,6 +30,7 @@ async-trait.workspace = true
client.workspace = true client.workspace = true
collections.workspace = true collections.workspace = true
dap-types.workspace = true dap-types.workspace = true
environment.workspace = true
fs.workspace = true fs.workspace = true
futures.workspace = true futures.workspace = true
gpui.workspace = true gpui.workspace = true

View File

@@ -364,7 +364,7 @@ pub async fn download_adapter_from_github(
futures::io::copy(response.body_mut(), &mut file).await?; futures::io::copy(response.body_mut(), &mut file).await?;
// we cannot check the status as some adapter include files with names that trigger `Illegal byte sequence` // we cannot check the status as some adapter include files with names that trigger `Illegal byte sequence`
util::command::new_smol_command("unzip") util::command::new_smol_command("unzip", &environment::inherited())
.arg(&zip_path) .arg(&zip_path)
.arg("-d") .arg("-d")
.arg(&version_path) .arg(&version_path)

View File

@@ -547,7 +547,7 @@ impl TcpTransport {
let host = connection_args.host; let host = connection_args.host;
let port = connection_args.port; let port = connection_args.port;
let mut command = util::command::new_std_command(&binary.command); let mut command = util::command::new_std_command(&binary.command, &binary.envs);
util::set_pre_exec_to_start_new_session(&mut command); util::set_pre_exec_to_start_new_session(&mut command);
let mut command = smol::process::Command::from(command); let mut command = smol::process::Command::from(command);
@@ -556,7 +556,6 @@ impl TcpTransport {
} }
command.args(&binary.arguments); command.args(&binary.arguments);
command.envs(&binary.envs);
command command
.stdin(Stdio::null()) .stdin(Stdio::null())
@@ -643,7 +642,7 @@ pub struct StdioTransport {
impl StdioTransport { impl StdioTransport {
#[allow(dead_code, reason = "This is used in non test builds of Zed")] #[allow(dead_code, reason = "This is used in non test builds of Zed")]
async fn start(binary: &DebugAdapterBinary, _: AsyncApp) -> Result<(TransportPipe, Self)> { async fn start(binary: &DebugAdapterBinary, _: AsyncApp) -> Result<(TransportPipe, Self)> {
let mut command = util::command::new_std_command(&binary.command); let mut command = util::command::new_std_command(&binary.command, &binary.envs);
util::set_pre_exec_to_start_new_session(&mut command); util::set_pre_exec_to_start_new_session(&mut command);
let mut command = smol::process::Command::from(command); let mut command = smol::process::Command::from(command);
@@ -652,7 +651,6 @@ impl StdioTransport {
} }
command.args(&binary.arguments); command.args(&binary.arguments);
command.envs(&binary.envs);
command command
.stdin(Stdio::piped()) .stdin(Stdio::piped())

View File

@@ -39,6 +39,7 @@ impl DebugAdapter for RubyDebugAdapter {
) -> Result<DebugAdapterBinary> { ) -> Result<DebugAdapterBinary> {
let adapter_path = paths::debug_adapters_dir().join(self.name().as_ref()); let adapter_path = paths::debug_adapters_dir().join(self.name().as_ref());
let mut rdbg_path = adapter_path.join("rdbg"); let mut rdbg_path = adapter_path.join("rdbg");
let env = delegate.shell_env().await;
if !delegate.fs().is_file(&rdbg_path).await { if !delegate.fs().is_file(&rdbg_path).await {
match delegate.which("rdbg".as_ref()) { match delegate.which("rdbg".as_ref()) {
Some(path) => rdbg_path = path, Some(path) => rdbg_path = path,
@@ -46,7 +47,7 @@ impl DebugAdapter for RubyDebugAdapter {
delegate.output_to_console( delegate.output_to_console(
"rdbg not found on path, trying `gem install debug`".to_string(), "rdbg not found on path, trying `gem install debug`".to_string(),
); );
let output = new_smol_command("gem") let output = new_smol_command("gem", &env)
.arg("install") .arg("install")
.arg("--no-document") .arg("--no-document")
.arg("--bindir") .arg("--bindir")

View File

@@ -103,7 +103,7 @@ async fn lsp_task_context(
Some(TaskContext { Some(TaskContext {
cwd: worktree_abs_path.map(|p| p.to_path_buf()), cwd: worktree_abs_path.map(|p| p.to_path_buf()),
project_env: project_env.unwrap_or_default(), project_env,
..TaskContext::default() ..TaskContext::default()
}) })
} }

View File

@@ -0,0 +1,27 @@
[package]
name = "environment"
version = "0.1.0"
edition.workspace = true
publish.workspace = true
license = "GPL-3.0-or-later"
[lints]
workspace = true
[lib]
path = "src/environment.rs"
doctest = false
[features]
test-support = []
[dependencies]
anyhow.workspace = true
collections.workspace = true
paths.workspace = true
smol.workspace = true
serde_json.workspace = true
tokio.workspace = true
util.workspace = true
which.workspace = true
workspace-hack.workspace = true

View File

@@ -0,0 +1 @@
../../LICENSE-GPL

View File

@@ -0,0 +1,161 @@
use anyhow::Result;
use collections::HashMap;
use std::path::Path;
use util::ResultExt;
/// The environment that Zed was launched with.
/// Prefer `in_home_dir` outside of a project context, or `in_dir` inside of a project context,
/// as these will be more like the user's shell environment.
pub fn inherited() -> HashMap<String, String> {
std::env::vars().collect()
}
/// The environment read from launching a login shell in the user's home directory.
pub async fn in_home_dir() -> HashMap<String, String> {
static HOME_ENV: tokio::sync::OnceCell<HashMap<String, String>> =
tokio::sync::OnceCell::const_new();
HOME_ENV
.get_or_init(|| async {
match in_dir(paths::home_dir(), false).await.log_err() {
Some(env) => env,
None => inherited(),
}
})
.await
.clone()
}
#[cfg(any(test, feature = "test-support"))]
pub async fn in_dir(_dir: &Path, _load_direnv: bool) -> Result<HashMap<String, String>> {
let fake_env = [("ZED_FAKE_TEST_ENV".into(), "true".into())]
.into_iter()
.collect();
Ok(fake_env)
}
#[cfg(all(target_os = "windows", not(any(test, feature = "test-support"))))]
pub async fn in_dir(_dir: &Path, _load_direnv: bool) -> Result<HashMap<String, String>> {
Ok(Default::default())
}
#[cfg(not(any(target_os = "windows", test, feature = "test-support")))]
/// The environment read from launching a login shell in a specific directory.
pub async fn in_dir(dir: &Path, load_direnv: bool) -> Result<HashMap<String, String>> {
use anyhow::Context;
use std::path::PathBuf;
use util::ResultExt;
use util::parse_env_output;
const MARKER: &str = "ZED_SHELL_START";
let shell = util::get_system_shell();
let shell_path = PathBuf::from(&shell);
let shell_name = shell_path.file_name().and_then(|f| f.to_str());
// What we're doing here is to spawn a shell and then `cd` into
// the project directory to get the env in there as if the user
// `cd`'d into it. We do that because tools like direnv, asdf, ...
// hook into `cd` and only set up the env after that.
//
// If the user selects `Direct` for direnv, it would set an environment
// variable that later uses to know that it should not run the hook.
// We would include in `.envs` call so it is okay to run the hook
// even if direnv direct mode is enabled.
//
// In certain shells we need to execute additional_command in order to
// trigger the behavior of direnv, etc.
let command = match shell_name {
Some("fish") => format!(
"cd '{}'; emit fish_prompt; printf '%s' {MARKER}; /usr/bin/env;",
dir.display()
),
_ => format!(
"cd '{}'; printf '%s' {MARKER}; /usr/bin/env;",
dir.display()
),
};
// csh/tcsh only supports `-l` if it's the only flag. So this won't be a login shell.
// Users must rely on vars from `~/.tcshrc` or `~/.cshrc` and not `.login` as a result.
let args = match shell_name {
Some("tcsh") | Some("csh") => vec!["-i".to_string(), "-c".to_string(), command],
_ => vec![
"-l".to_string(),
"-i".to_string(),
"-c".to_string(),
command,
],
};
let output = smol::unblock(move || {
util::set_pre_exec_to_start_new_session(std::process::Command::new(&shell).args(&args))
.output()
})
.await
.with_context(|| "Failed to spawn login shell to source login environment variables")?;
if !output.status.success() {
anyhow::bail!("Login shell exited with nonzero exit code.");
}
let stdout = String::from_utf8_lossy(&output.stdout);
let Some(env_output_start) = stdout.find(MARKER) else {
let stderr = String::from_utf8_lossy(&output.stderr);
anyhow::bail!(
"failed to parse output of `env` command in login shell. stdout: {:?}, stderr: {:?}",
stdout,
stderr
);
};
let mut parsed_env = HashMap::default();
let env_output = &stdout[env_output_start + MARKER.len()..];
parse_env_output(env_output, |key, value| {
parsed_env.insert(key, value);
});
if load_direnv {
if let Some(direnv) = load_direnv_environment(&parsed_env, dir).await.log_err() {
for (key, value) in direnv {
parsed_env.insert(key, value);
}
}
}
Ok(parsed_env)
}
#[cfg(not(any(target_os = "windows", test, feature = "test-support")))]
async fn load_direnv_environment(
env: &HashMap<String, String>,
dir: &Path,
) -> Result<HashMap<String, String>> {
let Ok(direnv_path) = which::which("direnv") else {
return Ok(HashMap::default());
};
let direnv_output = smol::process::Command::new(direnv_path)
.args(["export", "json"])
.envs(env)
.env("TERM", "dumb")
.current_dir(dir)
.output()
.await?;
if !direnv_output.status.success() {
anyhow::bail!(
"Loading direnv environment failed ({}), stderr: {}",
direnv_output.status,
String::from_utf8_lossy(&direnv_output.stderr)
);
}
let output = String::from_utf8_lossy(&direnv_output.stdout);
if output.is_empty() {
return Ok(HashMap::default());
}
Ok(serde_json::from_str(&output)?)
}

View File

@@ -31,6 +31,7 @@ clap.workspace = true
client.workspace = true client.workspace = true
collections.workspace = true collections.workspace = true
dirs.workspace = true dirs.workspace = true
environment.workspace = true
dotenv.workspace = true dotenv.workspace = true
env_logger.workspace = true env_logger.workspace = true
extension.workspace = true extension.workspace = true

View File

@@ -417,7 +417,7 @@ pub fn init(cx: &mut App) -> Arc<AgentAppState> {
tx.send(Some(options)).log_err(); tx.send(Some(options)).log_err();
}) })
.detach(); .detach();
let node_runtime = NodeRuntime::new(client.http_client(), None, rx); let node_runtime = NodeRuntime::new(client.http_client(), rx);
let extension_host_proxy = ExtensionHostProxy::global(cx); let extension_host_proxy = ExtensionHostProxy::global(cx);

View File

@@ -802,7 +802,7 @@ pub fn repo_path_for_url(repos_dir: &Path, repo_url: &str) -> PathBuf {
} }
pub async fn run_git(repo_path: &Path, args: &[&str]) -> Result<String> { pub async fn run_git(repo_path: &Path, args: &[&str]) -> Result<String> {
let output = new_smol_command("git") let output = new_smol_command("git", &environment::inherited())
.current_dir(repo_path) .current_dir(repo_path)
.args(args) .args(args)
.output() .output()

View File

@@ -18,6 +18,7 @@ async-tar.workspace = true
async-trait.workspace = true async-trait.workspace = true
collections.workspace = true collections.workspace = true
dap.workspace = true dap.workspace = true
environment.workspace = true
fs.workspace = true fs.workspace = true
futures.workspace = true futures.workspace = true
gpui.workspace = true gpui.workspace = true

View File

@@ -144,7 +144,7 @@ impl ExtensionBuilder {
"compiling Rust crate for extension {}", "compiling Rust crate for extension {}",
extension_dir.display() extension_dir.display()
); );
let output = util::command::new_std_command("cargo") let output = util::command::new_std_command("cargo", &environment::inherited())
.args(["build", "--target", RUST_TARGET]) .args(["build", "--target", RUST_TARGET])
.args(options.release.then_some("--release")) .args(options.release.then_some("--release"))
.arg("--target-dir") .arg("--target-dir")
@@ -254,7 +254,7 @@ impl ExtensionBuilder {
let scanner_path = src_path.join("scanner.c"); let scanner_path = src_path.join("scanner.c");
log::info!("compiling {grammar_name} parser"); log::info!("compiling {grammar_name} parser");
let clang_output = util::command::new_std_command(&clang_path) let clang_output = util::command::new_std_command(&clang_path, &environment::inherited())
.args(["-fPIC", "-shared", "-Os"]) .args(["-fPIC", "-shared", "-Os"])
.arg(format!("-Wl,--export=tree_sitter_{grammar_name}")) .arg(format!("-Wl,--export=tree_sitter_{grammar_name}"))
.arg("-o") .arg("-o")
@@ -279,9 +279,10 @@ impl ExtensionBuilder {
fn checkout_repo(&self, directory: &Path, url: &str, rev: &str) -> Result<()> { fn checkout_repo(&self, directory: &Path, url: &str, rev: &str) -> Result<()> {
let git_dir = directory.join(".git"); let git_dir = directory.join(".git");
let env = environment::inherited();
if directory.exists() { if directory.exists() {
let remotes_output = util::command::new_std_command("git") let remotes_output = util::command::new_std_command("git", &env)
.arg("--git-dir") .arg("--git-dir")
.arg(&git_dir) .arg(&git_dir)
.args(["remote", "-v"]) .args(["remote", "-v"])
@@ -304,7 +305,7 @@ impl ExtensionBuilder {
fs::create_dir_all(directory).with_context(|| { fs::create_dir_all(directory).with_context(|| {
format!("failed to create grammar directory {}", directory.display(),) format!("failed to create grammar directory {}", directory.display(),)
})?; })?;
let init_output = util::command::new_std_command("git") let init_output = util::command::new_std_command("git", &env)
.arg("init") .arg("init")
.current_dir(directory) .current_dir(directory)
.output()?; .output()?;
@@ -315,7 +316,7 @@ impl ExtensionBuilder {
); );
} }
let remote_add_output = util::command::new_std_command("git") let remote_add_output = util::command::new_std_command("git", &env)
.arg("--git-dir") .arg("--git-dir")
.arg(&git_dir) .arg(&git_dir)
.args(["remote", "add", "origin", url]) .args(["remote", "add", "origin", url])
@@ -329,14 +330,14 @@ impl ExtensionBuilder {
} }
} }
let fetch_output = util::command::new_std_command("git") let fetch_output = util::command::new_std_command("git", &env)
.arg("--git-dir") .arg("--git-dir")
.arg(&git_dir) .arg(&git_dir)
.args(["fetch", "--depth", "1", "origin", rev]) .args(["fetch", "--depth", "1", "origin", rev])
.output() .output()
.context("failed to execute `git fetch`")?; .context("failed to execute `git fetch`")?;
let checkout_output = util::command::new_std_command("git") let checkout_output = util::command::new_std_command("git", &env)
.arg("--git-dir") .arg("--git-dir")
.arg(&git_dir) .arg(&git_dir)
.args(["checkout", rev]) .args(["checkout", rev])
@@ -363,7 +364,8 @@ impl ExtensionBuilder {
} }
fn install_rust_wasm_target_if_needed(&self) -> Result<()> { fn install_rust_wasm_target_if_needed(&self) -> Result<()> {
let rustc_output = util::command::new_std_command("rustc") let env = environment::inherited();
let rustc_output = util::command::new_std_command("rustc", &env)
.arg("--print") .arg("--print")
.arg("sysroot") .arg("sysroot")
.output() .output()
@@ -380,7 +382,7 @@ impl ExtensionBuilder {
return Ok(()); return Ok(());
} }
let output = util::command::new_std_command("rustup") let output = util::command::new_std_command("rustup", &env)
.args(["target", "add", RUST_TARGET]) .args(["target", "add", RUST_TARGET])
.stderr(Stdio::piped()) .stderr(Stdio::piped())
.stdout(Stdio::inherit()) .stdout(Stdio::inherit())

View File

@@ -23,6 +23,7 @@ async-trait.workspace = true
client.workspace = true client.workspace = true
collections.workspace = true collections.workspace = true
dap.workspace = true dap.workspace = true
environment.workspace = true
extension.workspace = true extension.workspace = true
fs.workspace = true fs.workspace = true
futures.workspace = true futures.workspace = true

View File

@@ -709,8 +709,9 @@ impl process::Host for WasmState {
) -> wasmtime::Result<Result<process::Output, String>> { ) -> wasmtime::Result<Result<process::Output, String>> {
maybe!(async { maybe!(async {
self.manifest.allow_exec(&command.command, &command.args)?; self.manifest.allow_exec(&command.command, &command.args)?;
let env = environment::in_home_dir().await;
let output = util::command::new_smol_command(command.command.as_str()) let output = util::command::new_smol_command(command.command.as_str(), &env)
.args(&command.args) .args(&command.args)
.envs(command.env) .envs(command.env)
.output() .output()

View File

@@ -7,6 +7,7 @@ pub mod fs_watcher;
use anyhow::{Context as _, Result, anyhow}; use anyhow::{Context as _, Result, anyhow};
#[cfg(any(target_os = "linux", target_os = "freebsd"))] #[cfg(any(target_os = "linux", target_os = "freebsd"))]
use ashpd::desktop::trash; use ashpd::desktop::trash;
use collections::HashMap;
use gpui::App; use gpui::App;
use gpui::BackgroundExecutor; use gpui::BackgroundExecutor;
use gpui::Global; use gpui::Global;
@@ -132,8 +133,17 @@ pub trait Fs: Send + Sync {
); );
fn home_dir(&self) -> Option<PathBuf>; fn home_dir(&self) -> Option<PathBuf>;
fn open_repo(&self, abs_dot_git: &Path) -> Option<Arc<dyn GitRepository>>; fn open_repo(
fn git_init(&self, abs_work_directory: &Path, fallback_branch_name: String) -> Result<()>; &self,
abs_dot_git: &Path,
project_env: HashMap<String, String>,
) -> Option<Arc<dyn GitRepository>>;
fn git_init(
&self,
abs_work_directory: &Path,
fallback_branch_name: String,
project_env: &HashMap<String, String>,
) -> Result<()>;
fn is_fake(&self) -> bool; fn is_fake(&self) -> bool;
async fn is_case_sensitive(&self) -> Result<bool>; async fn is_case_sensitive(&self) -> Result<bool>;
@@ -788,16 +798,26 @@ impl Fs for RealFs {
) )
} }
fn open_repo(&self, dotgit_path: &Path) -> Option<Arc<dyn GitRepository>> { fn open_repo(
&self,
dotgit_path: &Path,
project_env: HashMap<String, String>,
) -> Option<Arc<dyn GitRepository>> {
Some(Arc::new(RealGitRepository::new( Some(Arc::new(RealGitRepository::new(
dotgit_path, dotgit_path,
self.git_binary_path.clone(), self.git_binary_path.clone(),
project_env,
self.executor.clone(), self.executor.clone(),
)?)) )?))
} }
fn git_init(&self, abs_work_directory_path: &Path, fallback_branch_name: String) -> Result<()> { fn git_init(
let config = new_std_command("git") &self,
abs_work_directory_path: &Path,
fallback_branch_name: String,
project_env: &HashMap<String, String>,
) -> Result<()> {
let config = new_std_command("git", project_env)
.current_dir(abs_work_directory_path) .current_dir(abs_work_directory_path)
.args(&["config", "--global", "--get", "init.defaultBranch"]) .args(&["config", "--global", "--get", "init.defaultBranch"])
.output()?; .output()?;
@@ -810,7 +830,7 @@ impl Fs for RealFs {
branch_name = Cow::Borrowed(fallback_branch_name.as_str()); branch_name = Cow::Borrowed(fallback_branch_name.as_str());
} }
new_std_command("git") new_std_command("git", project_env)
.current_dir(abs_work_directory_path) .current_dir(abs_work_directory_path)
.args(&["init", "-b"]) .args(&["init", "-b"])
.arg(branch_name.trim()) .arg(branch_name.trim())
@@ -2292,7 +2312,11 @@ impl Fs for FakeFs {
) )
} }
fn open_repo(&self, abs_dot_git: &Path) -> Option<Arc<dyn GitRepository>> { fn open_repo(
&self,
abs_dot_git: &Path,
_: HashMap<String, String>,
) -> Option<Arc<dyn GitRepository>> {
use util::ResultExt as _; use util::ResultExt as _;
self.with_git_state_and_paths( self.with_git_state_and_paths(
@@ -2315,6 +2339,7 @@ impl Fs for FakeFs {
&self, &self,
abs_work_directory_path: &Path, abs_work_directory_path: &Path,
_fallback_branch_name: String, _fallback_branch_name: String,
_project_path: &HashMap<String, String>,
) -> Result<()> { ) -> Result<()> {
smol::block_on(self.create_dir(&abs_work_directory_path.join(".git"))) smol::block_on(self.create_dir(&abs_work_directory_path.join(".git")))
} }

View File

@@ -20,6 +20,7 @@ askpass.workspace = true
async-trait.workspace = true async-trait.workspace = true
collections.workspace = true collections.workspace = true
derive_more.workspace = true derive_more.workspace = true
environment.workspace = true
git2.workspace = true git2.workspace = true
gpui.workspace = true gpui.workspace = true
http_client.workspace = true http_client.workspace = true

View File

@@ -33,11 +33,12 @@ impl Blame {
pub async fn for_path( pub async fn for_path(
git_binary: &Path, git_binary: &Path,
working_directory: &Path, working_directory: &Path,
env: &HashMap<String, String>,
path: &Path, path: &Path,
content: &Rope, content: &Rope,
remote_url: Option<String>, remote_url: Option<String>,
) -> Result<Self> { ) -> Result<Self> {
let output = run_git_blame(git_binary, working_directory, path, content).await?; let output = run_git_blame(git_binary, working_directory, env, path, content).await?;
let mut entries = parse_git_blame(&output)?; let mut entries = parse_git_blame(&output)?;
entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start)); entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start));
@@ -48,7 +49,7 @@ impl Blame {
} }
let shas = unique_shas.into_iter().collect::<Vec<_>>(); let shas = unique_shas.into_iter().collect::<Vec<_>>();
let messages = get_messages(working_directory, &shas) let messages = get_messages(working_directory, env, &shas)
.await .await
.context("failed to get commit messages")?; .context("failed to get commit messages")?;
@@ -66,10 +67,11 @@ const GIT_BLAME_NO_PATH: &str = "fatal: no such path";
async fn run_git_blame( async fn run_git_blame(
git_binary: &Path, git_binary: &Path,
working_directory: &Path, working_directory: &Path,
env: &HashMap<String, String>,
path: &Path, path: &Path,
contents: &Rope, contents: &Rope,
) -> Result<String> { ) -> Result<String> {
let mut child = util::command::new_smol_command(git_binary) let mut child = util::command::new_smol_command(git_binary, env)
.current_dir(working_directory) .current_dir(working_directory)
.arg("blame") .arg("blame")
.arg("--incremental") .arg("--incremental")

Binary file not shown.

View File

@@ -19,7 +19,6 @@ use std::process::{ExitStatus, Stdio};
use std::sync::LazyLock; use std::sync::LazyLock;
use std::{ use std::{
cmp::Ordering, cmp::Ordering,
future,
io::{BufRead, BufReader, BufWriter, Read}, io::{BufRead, BufReader, BufWriter, Read},
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc, sync::Arc,
@@ -367,6 +366,7 @@ impl std::fmt::Debug for dyn GitRepository {
pub struct RealGitRepository { pub struct RealGitRepository {
pub repository: Arc<Mutex<git2::Repository>>, pub repository: Arc<Mutex<git2::Repository>>,
pub git_binary_path: PathBuf, pub git_binary_path: PathBuf,
env: HashMap<String, String>,
executor: BackgroundExecutor, executor: BackgroundExecutor,
} }
@@ -374,6 +374,7 @@ impl RealGitRepository {
pub fn new( pub fn new(
dotgit_path: &Path, dotgit_path: &Path,
git_binary_path: Option<PathBuf>, git_binary_path: Option<PathBuf>,
env: HashMap<String, String>,
executor: BackgroundExecutor, executor: BackgroundExecutor,
) -> Option<Self> { ) -> Option<Self> {
let workdir_root = dotgit_path.parent()?; let workdir_root = dotgit_path.parent()?;
@@ -381,6 +382,7 @@ impl RealGitRepository {
Some(Self { Some(Self {
repository: Arc::new(Mutex::new(repository)), repository: Arc::new(Mutex::new(repository)),
git_binary_path: git_binary_path.unwrap_or_else(|| PathBuf::from("git")), git_binary_path: git_binary_path.unwrap_or_else(|| PathBuf::from("git")),
env,
executor, executor,
}) })
} }
@@ -392,6 +394,13 @@ impl RealGitRepository {
.context("failed to read git work directory") .context("failed to read git work directory")
.map(Path::to_path_buf) .map(Path::to_path_buf)
} }
fn git_command(&self) -> Result<std::process::Command> {
let working_directory = self.working_directory()?;
let mut command = new_std_command(&self.git_binary_path, &self.env);
command.current_dir(working_directory);
Ok(command)
}
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@@ -417,12 +426,11 @@ impl GitRepository for RealGitRepository {
} }
fn show(&self, commit: String) -> BoxFuture<Result<CommitDetails>> { fn show(&self, commit: String) -> BoxFuture<Result<CommitDetails>> {
let working_directory = self.working_directory(); let command = self.git_command();
self.executor self.executor
.spawn(async move { .spawn(async move {
let working_directory = working_directory?; let mut command = command?;
let output = new_std_command("git") let output = command
.current_dir(&working_directory)
.args([ .args([
"--no-optional-locks", "--no-optional-locks",
"show", "show",
@@ -453,13 +461,11 @@ impl GitRepository for RealGitRepository {
} }
fn load_commit(&self, commit: String, cx: AsyncApp) -> BoxFuture<Result<CommitDiff>> { fn load_commit(&self, commit: String, cx: AsyncApp) -> BoxFuture<Result<CommitDiff>> {
let Some(working_directory) = self.repository.lock().workdir().map(ToOwned::to_owned) let command = self.git_command();
else { let command2 = self.git_command();
return future::ready(Err(anyhow!("no working directory"))).boxed();
};
cx.background_spawn(async move { cx.background_spawn(async move {
let show_output = util::command::new_std_command("git") let mut command = command?;
.current_dir(&working_directory) let show_output = command
.args([ .args([
"--no-optional-locks", "--no-optional-locks",
"show", "show",
@@ -480,8 +486,8 @@ impl GitRepository for RealGitRepository {
let parent_sha = lines.next().unwrap().trim().trim_end_matches('\0'); let parent_sha = lines.next().unwrap().trim().trim_end_matches('\0');
let changes = parse_git_diff_name_status(lines.next().unwrap_or("")); let changes = parse_git_diff_name_status(lines.next().unwrap_or(""));
let mut cat_file_process = util::command::new_std_command("git") let mut command = command2?;
.current_dir(&working_directory) let mut cat_file_process = command
.args(["--no-optional-locks", "cat-file", "--batch=%(objectsize)"]) .args(["--no-optional-locks", "cat-file", "--batch=%(objectsize)"])
.stdin(Stdio::piped()) .stdin(Stdio::piped())
.stdout(Stdio::piped()) .stdout(Stdio::piped())
@@ -561,16 +567,15 @@ impl GitRepository for RealGitRepository {
env: Arc<HashMap<String, String>>, env: Arc<HashMap<String, String>>,
) -> BoxFuture<Result<()>> { ) -> BoxFuture<Result<()>> {
async move { async move {
let working_directory = self.working_directory(); let command = self.git_command()?;
let mode_flag = match mode { let mode_flag = match mode {
ResetMode::Mixed => "--mixed", ResetMode::Mixed => "--mixed",
ResetMode::Soft => "--soft", ResetMode::Soft => "--soft",
}; };
let output = new_smol_command(&self.git_binary_path) let output = smol::process::Command::from(command)
.envs(env.iter()) .envs(env.iter())
.current_dir(&working_directory?)
.args(["reset", mode_flag, &commit]) .args(["reset", mode_flag, &commit])
.output() .output()
.await?; .await?;
@@ -591,15 +596,13 @@ impl GitRepository for RealGitRepository {
paths: Vec<RepoPath>, paths: Vec<RepoPath>,
env: Arc<HashMap<String, String>>, env: Arc<HashMap<String, String>>,
) -> BoxFuture<Result<()>> { ) -> BoxFuture<Result<()>> {
let working_directory = self.working_directory(); let command = self.git_command();
let git_binary_path = self.git_binary_path.clone();
async move { async move {
if paths.is_empty() { if paths.is_empty() {
return Ok(()); return Ok(());
} }
let output = new_smol_command(&git_binary_path) let output = smol::process::Command::from(command?)
.current_dir(&working_directory?)
.envs(env.iter()) .envs(env.iter())
.args(["checkout", &commit, "--"]) .args(["checkout", &commit, "--"])
.args(paths.iter().map(|path| path.as_ref())) .args(paths.iter().map(|path| path.as_ref()))
@@ -671,14 +674,12 @@ impl GitRepository for RealGitRepository {
content: Option<String>, content: Option<String>,
env: Arc<HashMap<String, String>>, env: Arc<HashMap<String, String>>,
) -> BoxFuture<anyhow::Result<()>> { ) -> BoxFuture<anyhow::Result<()>> {
let working_directory = self.working_directory(); let command = self.git_command();
let git_binary_path = self.git_binary_path.clone(); let command2 = self.git_command();
self.executor self.executor
.spawn(async move { .spawn(async move {
let working_directory = working_directory?;
if let Some(content) = content { if let Some(content) = content {
let mut child = new_smol_command(&git_binary_path) let mut child = smol::process::Command::from(command?)
.current_dir(&working_directory)
.envs(env.iter()) .envs(env.iter())
.args(["hash-object", "-w", "--stdin"]) .args(["hash-object", "-w", "--stdin"])
.stdin(Stdio::piped()) .stdin(Stdio::piped())
@@ -695,8 +696,7 @@ impl GitRepository for RealGitRepository {
log::debug!("indexing SHA: {sha}, path {path:?}"); log::debug!("indexing SHA: {sha}, path {path:?}");
let output = new_smol_command(&git_binary_path) let output = smol::process::Command::from(command2?)
.current_dir(&working_directory)
.envs(env.iter()) .envs(env.iter())
.args(["update-index", "--add", "--cacheinfo", "100644", &sha]) .args(["update-index", "--add", "--cacheinfo", "100644", &sha])
.arg(path.to_unix_style()) .arg(path.to_unix_style())
@@ -710,8 +710,7 @@ impl GitRepository for RealGitRepository {
)); ));
} }
} else { } else {
let output = new_smol_command(&git_binary_path) let output = smol::process::Command::from(command?)
.current_dir(&working_directory)
.envs(env.iter()) .envs(env.iter())
.args(["update-index", "--force-remove"]) .args(["update-index", "--force-remove"])
.arg(path.to_unix_style()) .arg(path.to_unix_style())
@@ -738,12 +737,11 @@ impl GitRepository for RealGitRepository {
} }
fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<Result<Vec<Option<String>>>> { fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<Result<Vec<Option<String>>>> {
let working_directory = self.working_directory(); let command = self.git_command();
self.executor self.executor
.spawn(async move { .spawn(async move {
let working_directory = working_directory?; let mut command = command?;
let mut process = new_std_command("git") let mut process = command
.current_dir(&working_directory)
.args([ .args([
"--no-optional-locks", "--no-optional-locks",
"cat-file", "cat-file",
@@ -796,15 +794,12 @@ impl GitRepository for RealGitRepository {
} }
fn status(&self, path_prefixes: &[RepoPath]) -> BoxFuture<Result<GitStatus>> { fn status(&self, path_prefixes: &[RepoPath]) -> BoxFuture<Result<GitStatus>> {
let git_binary_path = self.git_binary_path.clone(); let command = self.git_command();
let working_directory = self.working_directory();
let path_prefixes = path_prefixes.to_owned(); let path_prefixes = path_prefixes.to_owned();
self.executor self.executor
.spawn(async move { .spawn(async move {
let output = new_std_command(&git_binary_path) let mut command = command?;
.current_dir(working_directory?) let output = command.args(git_status_args(&path_prefixes)).output()?;
.args(git_status_args(&path_prefixes))
.output()?;
if output.status.success() { if output.status.success() {
let stdout = String::from_utf8_lossy(&output.stdout); let stdout = String::from_utf8_lossy(&output.stdout);
stdout.parse() stdout.parse()
@@ -817,8 +812,8 @@ impl GitRepository for RealGitRepository {
} }
fn branches(&self) -> BoxFuture<Result<Vec<Branch>>> { fn branches(&self) -> BoxFuture<Result<Vec<Branch>>> {
let working_directory = self.working_directory(); let command = self.git_command();
let git_binary_path = self.git_binary_path.clone(); let command2 = self.git_command();
self.executor self.executor
.spawn(async move { .spawn(async move {
let fields = [ let fields = [
@@ -839,9 +834,7 @@ impl GitRepository for RealGitRepository {
"--format", "--format",
&fields, &fields,
]; ];
let working_directory = working_directory?; let output = smol::process::Command::from(command?)
let output = new_smol_command(&git_binary_path)
.current_dir(&working_directory)
.args(args) .args(args)
.output() .output()
.await?; .await?;
@@ -859,8 +852,7 @@ impl GitRepository for RealGitRepository {
if branches.is_empty() { if branches.is_empty() {
let args = vec!["symbolic-ref", "--quiet", "HEAD"]; let args = vec!["symbolic-ref", "--quiet", "HEAD"];
let output = new_smol_command(&git_binary_path) let output = smol::process::Command::from(command2?)
.current_dir(&working_directory)
.args(args) .args(args)
.output() .output()
.await?; .await?;
@@ -931,6 +923,7 @@ impl GitRepository for RealGitRepository {
fn blame(&self, path: RepoPath, content: Rope) -> BoxFuture<Result<crate::blame::Blame>> { fn blame(&self, path: RepoPath, content: Rope) -> BoxFuture<Result<crate::blame::Blame>> {
let working_directory = self.working_directory(); let working_directory = self.working_directory();
let git_binary_path = self.git_binary_path.clone(); let git_binary_path = self.git_binary_path.clone();
let env = self.env.clone();
let remote_url = self let remote_url = self
.remote_url("upstream") .remote_url("upstream")
@@ -941,6 +934,7 @@ impl GitRepository for RealGitRepository {
crate::blame::Blame::for_path( crate::blame::Blame::for_path(
&git_binary_path, &git_binary_path,
&working_directory?, &working_directory?,
&env,
&path, &path,
&content, &content,
remote_url, remote_url,
@@ -951,8 +945,7 @@ impl GitRepository for RealGitRepository {
} }
fn diff(&self, diff: DiffType) -> BoxFuture<Result<String>> { fn diff(&self, diff: DiffType) -> BoxFuture<Result<String>> {
let working_directory = self.working_directory(); let command = self.git_command();
let git_binary_path = self.git_binary_path.clone();
self.executor self.executor
.spawn(async move { .spawn(async move {
let args = match diff { let args = match diff {
@@ -960,8 +953,7 @@ impl GitRepository for RealGitRepository {
DiffType::HeadToWorktree => None, DiffType::HeadToWorktree => None,
}; };
let output = new_smol_command(&git_binary_path) let output = smol::process::Command::from(command?)
.current_dir(&working_directory?)
.args(["diff"]) .args(["diff"])
.args(args) .args(args)
.output() .output()
@@ -983,13 +975,11 @@ impl GitRepository for RealGitRepository {
paths: Vec<RepoPath>, paths: Vec<RepoPath>,
env: Arc<HashMap<String, String>>, env: Arc<HashMap<String, String>>,
) -> BoxFuture<Result<()>> { ) -> BoxFuture<Result<()>> {
let working_directory = self.working_directory(); let command = self.git_command();
let git_binary_path = self.git_binary_path.clone();
self.executor self.executor
.spawn(async move { .spawn(async move {
if !paths.is_empty() { if !paths.is_empty() {
let output = new_smol_command(&git_binary_path) let output = smol::process::Command::from(command?)
.current_dir(&working_directory?)
.envs(env.iter()) .envs(env.iter())
.args(["update-index", "--add", "--remove", "--"]) .args(["update-index", "--add", "--remove", "--"])
.args(paths.iter().map(|p| p.to_unix_style())) .args(paths.iter().map(|p| p.to_unix_style()))
@@ -1013,14 +1003,12 @@ impl GitRepository for RealGitRepository {
paths: Vec<RepoPath>, paths: Vec<RepoPath>,
env: Arc<HashMap<String, String>>, env: Arc<HashMap<String, String>>,
) -> BoxFuture<Result<()>> { ) -> BoxFuture<Result<()>> {
let working_directory = self.working_directory(); let command = self.git_command();
let git_binary_path = self.git_binary_path.clone();
self.executor self.executor
.spawn(async move { .spawn(async move {
if !paths.is_empty() { if !paths.is_empty() {
let output = new_smol_command(&git_binary_path) let output = smol::process::Command::from(command?)
.current_dir(&working_directory?)
.envs(env.iter()) .envs(env.iter())
.args(["reset", "--quiet", "--"]) .args(["reset", "--quiet", "--"])
.args(paths.iter().map(|p| p.as_ref())) .args(paths.iter().map(|p| p.as_ref()))
@@ -1046,12 +1034,11 @@ impl GitRepository for RealGitRepository {
options: CommitOptions, options: CommitOptions,
env: Arc<HashMap<String, String>>, env: Arc<HashMap<String, String>>,
) -> BoxFuture<Result<()>> { ) -> BoxFuture<Result<()>> {
let working_directory = self.working_directory(); let command = self.git_command();
self.executor self.executor
.spawn(async move { .spawn(async move {
let mut cmd = new_smol_command("git"); let mut cmd = smol::process::Command::from(command?);
cmd.current_dir(&working_directory?) cmd.envs(env.iter())
.envs(env.iter())
.args(["commit", "--quiet", "-m"]) .args(["commit", "--quiet", "-m"])
.arg(&message.to_string()) .arg(&message.to_string())
.arg("--cleanup=strip"); .arg("--cleanup=strip");
@@ -1087,10 +1074,11 @@ impl GitRepository for RealGitRepository {
cx: AsyncApp, cx: AsyncApp,
) -> BoxFuture<Result<RemoteCommandOutput>> { ) -> BoxFuture<Result<RemoteCommandOutput>> {
let working_directory = self.working_directory(); let working_directory = self.working_directory();
let command = self.git_command();
let executor = cx.background_executor().clone(); let executor = cx.background_executor().clone();
async move { async move {
let working_directory = working_directory?; let working_directory = working_directory?;
let mut command = new_smol_command("git"); let mut command = smol::process::Command::from(command?);
command command
.envs(env.iter()) .envs(env.iter())
.current_dir(&working_directory) .current_dir(&working_directory)
@@ -1118,13 +1106,12 @@ impl GitRepository for RealGitRepository {
env: Arc<HashMap<String, String>>, env: Arc<HashMap<String, String>>,
cx: AsyncApp, cx: AsyncApp,
) -> BoxFuture<Result<RemoteCommandOutput>> { ) -> BoxFuture<Result<RemoteCommandOutput>> {
let working_directory = self.working_directory(); let command = self.git_command();
let executor = cx.background_executor().clone(); let executor = cx.background_executor().clone();
async move { async move {
let mut command = new_smol_command("git"); let mut command = smol::process::Command::from(command?);
command command
.envs(env.iter()) .envs(env.iter())
.current_dir(&working_directory?)
.args(["pull"]) .args(["pull"])
.arg(remote_name) .arg(remote_name)
.arg(branch_name) .arg(branch_name)
@@ -1142,13 +1129,12 @@ impl GitRepository for RealGitRepository {
env: Arc<HashMap<String, String>>, env: Arc<HashMap<String, String>>,
cx: AsyncApp, cx: AsyncApp,
) -> BoxFuture<Result<RemoteCommandOutput>> { ) -> BoxFuture<Result<RemoteCommandOutput>> {
let working_directory = self.working_directory(); let command = self.git_command();
let executor = cx.background_executor().clone(); let executor = cx.background_executor().clone();
async move { async move {
let mut command = new_smol_command("git"); let mut command = smol::process::Command::from(command?);
command command
.envs(env.iter()) .envs(env.iter())
.current_dir(&working_directory?)
.args(["fetch", "--all"]) .args(["fetch", "--all"])
.stdout(smol::process::Stdio::piped()) .stdout(smol::process::Stdio::piped())
.stderr(smol::process::Stdio::piped()); .stderr(smol::process::Stdio::piped());
@@ -1159,14 +1145,12 @@ impl GitRepository for RealGitRepository {
} }
fn get_remotes(&self, branch_name: Option<String>) -> BoxFuture<Result<Vec<Remote>>> { fn get_remotes(&self, branch_name: Option<String>) -> BoxFuture<Result<Vec<Remote>>> {
let working_directory = self.working_directory(); let command = self.git_command();
let git_binary_path = self.git_binary_path.clone(); let command2 = self.git_command();
self.executor self.executor
.spawn(async move { .spawn(async move {
let working_directory = working_directory?;
if let Some(branch_name) = branch_name { if let Some(branch_name) = branch_name {
let output = new_smol_command(&git_binary_path) let output = smol::process::Command::from(command?)
.current_dir(&working_directory)
.args(["config", "--get"]) .args(["config", "--get"])
.arg(format!("branch.{}.remote", branch_name)) .arg(format!("branch.{}.remote", branch_name))
.output() .output()
@@ -1181,8 +1165,7 @@ impl GitRepository for RealGitRepository {
} }
} }
let output = new_smol_command(&git_binary_path) let output = smol::process::Command::from(command2?)
.current_dir(&working_directory)
.args(["remote"]) .args(["remote"])
.output() .output()
.await?; .await?;
@@ -1209,12 +1192,15 @@ impl GitRepository for RealGitRepository {
fn check_for_pushed_commit(&self) -> BoxFuture<Result<Vec<SharedString>>> { fn check_for_pushed_commit(&self) -> BoxFuture<Result<Vec<SharedString>>> {
let working_directory = self.working_directory(); let working_directory = self.working_directory();
let env = self.env.clone();
let git_binary_path = self.git_binary_path.clone(); let git_binary_path = self.git_binary_path.clone();
self.executor self.executor
.spawn(async move { .spawn(async move {
let working_directory = working_directory?; let working_directory = working_directory?;
let git_cmd = async |args: &[&str]| -> Result<String> { let git_cmd = async |args: &[&str]| -> Result<String> {
let output = new_smol_command(&git_binary_path) let output = smol::process::Command::new(&git_binary_path)
.envs(&env)
.current_dir(&working_directory) .current_dir(&working_directory)
.args(args) .args(args)
.output() .output()
@@ -1270,11 +1256,12 @@ impl GitRepository for RealGitRepository {
fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> { fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> {
let working_directory = self.working_directory(); let working_directory = self.working_directory();
let git_binary_path = self.git_binary_path.clone(); let git_binary_path = self.git_binary_path.clone();
let env = self.env.clone();
let executor = self.executor.clone(); let executor = self.executor.clone();
self.executor self.executor
.spawn(async move { .spawn(async move {
let working_directory = working_directory?; let working_directory = working_directory?;
let mut git = GitBinary::new(git_binary_path, working_directory, executor) let mut git = GitBinary::new(git_binary_path, working_directory, env, executor)
.envs(checkpoint_author_envs()); .envs(checkpoint_author_envs());
git.with_temp_index(async |git| { git.with_temp_index(async |git| {
let head_sha = git.run(&["rev-parse", "HEAD"]).await.ok(); let head_sha = git.run(&["rev-parse", "HEAD"]).await.ok();
@@ -1299,13 +1286,14 @@ impl GitRepository for RealGitRepository {
fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<Result<()>> { fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<Result<()>> {
let working_directory = self.working_directory(); let working_directory = self.working_directory();
let git_binary_path = self.git_binary_path.clone(); let git_binary_path = self.git_binary_path.clone();
let env = self.env.clone();
let executor = self.executor.clone(); let executor = self.executor.clone();
self.executor self.executor
.spawn(async move { .spawn(async move {
let working_directory = working_directory?; let working_directory = working_directory?;
let mut git = GitBinary::new(git_binary_path, working_directory, executor); let mut git = GitBinary::new(git_binary_path, working_directory, env, executor);
git.run(&[ git.run(&[
"restore", "restore",
"--source", "--source",
@@ -1334,12 +1322,13 @@ impl GitRepository for RealGitRepository {
) -> BoxFuture<Result<bool>> { ) -> BoxFuture<Result<bool>> {
let working_directory = self.working_directory(); let working_directory = self.working_directory();
let git_binary_path = self.git_binary_path.clone(); let git_binary_path = self.git_binary_path.clone();
let env = self.env.clone();
let executor = self.executor.clone(); let executor = self.executor.clone();
self.executor self.executor
.spawn(async move { .spawn(async move {
let working_directory = working_directory?; let working_directory = working_directory?;
let git = GitBinary::new(git_binary_path, working_directory, executor); let git = GitBinary::new(git_binary_path, working_directory, env, executor);
let result = git let result = git
.run(&[ .run(&[
"diff-tree", "diff-tree",
@@ -1373,12 +1362,13 @@ impl GitRepository for RealGitRepository {
) -> BoxFuture<Result<String>> { ) -> BoxFuture<Result<String>> {
let working_directory = self.working_directory(); let working_directory = self.working_directory();
let git_binary_path = self.git_binary_path.clone(); let git_binary_path = self.git_binary_path.clone();
let env = self.env.clone();
let executor = self.executor.clone(); let executor = self.executor.clone();
self.executor self.executor
.spawn(async move { .spawn(async move {
let working_directory = working_directory?; let working_directory = working_directory?;
let git = GitBinary::new(git_binary_path, working_directory, executor); let git = GitBinary::new(git_binary_path, working_directory, env, executor);
git.run(&[ git.run(&[
"diff", "diff",
"--find-renames", "--find-renames",
@@ -1416,13 +1406,14 @@ struct GitBinary {
working_directory: PathBuf, working_directory: PathBuf,
executor: BackgroundExecutor, executor: BackgroundExecutor,
index_file_path: Option<PathBuf>, index_file_path: Option<PathBuf>,
envs: HashMap<String, String>, env: HashMap<String, String>,
} }
impl GitBinary { impl GitBinary {
fn new( fn new(
git_binary_path: PathBuf, git_binary_path: PathBuf,
working_directory: PathBuf, working_directory: PathBuf,
env: HashMap<String, String>,
executor: BackgroundExecutor, executor: BackgroundExecutor,
) -> Self { ) -> Self {
Self { Self {
@@ -1430,12 +1421,12 @@ impl GitBinary {
working_directory, working_directory,
executor, executor,
index_file_path: None, index_file_path: None,
envs: HashMap::default(), env,
} }
} }
fn envs(mut self, envs: HashMap<String, String>) -> Self { fn envs(mut self, envs: HashMap<String, String>) -> Self {
self.envs = envs; self.env.extend(envs);
self self
} }
@@ -1515,13 +1506,13 @@ impl GitBinary {
where where
S: AsRef<OsStr>, S: AsRef<OsStr>,
{ {
let mut command = new_smol_command(&self.git_binary_path); let mut command = new_smol_command(&self.git_binary_path, &self.env);
command.current_dir(&self.working_directory); command.current_dir(&self.working_directory);
command.args(args); command.args(args);
if let Some(index_file_path) = self.index_file_path.as_ref() { if let Some(index_file_path) = self.index_file_path.as_ref() {
command.env("GIT_INDEX_FILE", index_file_path); command.env("GIT_INDEX_FILE", index_file_path);
} }
command.envs(&self.envs); command.envs(&self.env);
command command
} }
} }
@@ -1827,8 +1818,13 @@ mod tests {
let file_path = repo_dir.path().join("file"); let file_path = repo_dir.path().join("file");
smol::fs::write(&file_path, "initial").await.unwrap(); smol::fs::write(&file_path, "initial").await.unwrap();
let repo = let repo = RealGitRepository::new(
RealGitRepository::new(&repo_dir.path().join(".git"), None, cx.executor()).unwrap(); &repo_dir.path().join(".git"),
None,
environment::in_home_dir().await,
cx.executor(),
)
.unwrap();
repo.stage_paths( repo.stage_paths(
vec![RepoPath::from_str("file")], vec![RepoPath::from_str("file")],
Arc::new(HashMap::default()), Arc::new(HashMap::default()),
@@ -1908,8 +1904,13 @@ mod tests {
let repo_dir = tempfile::tempdir().unwrap(); let repo_dir = tempfile::tempdir().unwrap();
git2::Repository::init(repo_dir.path()).unwrap(); git2::Repository::init(repo_dir.path()).unwrap();
let repo = let repo = RealGitRepository::new(
RealGitRepository::new(&repo_dir.path().join(".git"), None, cx.executor()).unwrap(); &repo_dir.path().join(".git"),
None,
environment::in_home_dir().await,
cx.executor(),
)
.unwrap();
smol::fs::write(repo_dir.path().join("foo"), "foo") smol::fs::write(repo_dir.path().join("foo"), "foo")
.await .await
@@ -1946,8 +1947,13 @@ mod tests {
let repo_dir = tempfile::tempdir().unwrap(); let repo_dir = tempfile::tempdir().unwrap();
git2::Repository::init(repo_dir.path()).unwrap(); git2::Repository::init(repo_dir.path()).unwrap();
let repo = let repo = RealGitRepository::new(
RealGitRepository::new(&repo_dir.path().join(".git"), None, cx.executor()).unwrap(); &repo_dir.path().join(".git"),
None,
environment::in_home_dir().await,
cx.executor(),
)
.unwrap();
smol::fs::write(repo_dir.path().join("file1"), "content1") smol::fs::write(repo_dir.path().join("file1"), "content1")
.await .await
@@ -2006,12 +2012,13 @@ mod tests {
fn gc(&self) -> BoxFuture<Result<()>> { fn gc(&self) -> BoxFuture<Result<()>> {
let working_directory = self.working_directory(); let working_directory = self.working_directory();
let git_binary_path = self.git_binary_path.clone(); let git_binary_path = self.git_binary_path.clone();
let env = self.env.clone();
let executor = self.executor.clone(); let executor = self.executor.clone();
self.executor self.executor
.spawn(async move { .spawn(async move {
let git_binary_path = git_binary_path.clone(); let git_binary_path = git_binary_path.clone();
let working_directory = working_directory?; let working_directory = working_directory?;
let git = GitBinary::new(git_binary_path, working_directory, executor); let git = GitBinary::new(git_binary_path, working_directory, env, executor);
git.run(&["gc", "--prune"]).await?; git.run(&["gc", "--prune"]).await?;
Ok(()) Ok(())
}) })

View File

@@ -1904,11 +1904,9 @@ impl GitPanel {
let fallback_branch_name = GitPanelSettings::get_global(cx) let fallback_branch_name = GitPanelSettings::get_global(cx)
.fallback_branch_name .fallback_branch_name
.clone(); .clone();
this.project.read(cx).git_init( this.project.update(cx, |project, cx| {
worktree.read(cx).abs_path(), project.git_init(worktree.read(cx).abs_path(), fallback_branch_name, cx)
fallback_branch_name, })
cx,
)
}) else { }) else {
return; return;
}; };

View File

@@ -27,7 +27,7 @@ pub trait ContextProvider: Send + Sync {
&self, &self,
_variables: &TaskVariables, _variables: &TaskVariables,
_location: &Location, _location: &Location,
_project_env: Option<HashMap<String, String>>, _project_env: HashMap<String, String>,
_toolchains: Arc<dyn LanguageToolchainStore>, _toolchains: Arc<dyn LanguageToolchainStore>,
_cx: &mut App, _cx: &mut App,
) -> Task<Result<TaskVariables>> { ) -> Task<Result<TaskVariables>> {

View File

@@ -44,7 +44,7 @@ pub trait ToolchainLister: Send + Sync {
async fn list( async fn list(
&self, &self,
worktree_root: PathBuf, worktree_root: PathBuf,
project_env: Option<HashMap<String, String>>, project_env: HashMap<String, String>,
) -> ToolchainList; ) -> ToolchainList;
// Returns a term which we should use in UI to refer to a toolchain. // Returns a term which we should use in UI to refer to a toolchain.
fn term(&self) -> SharedString; fn term(&self) -> SharedString;

View File

@@ -69,6 +69,7 @@ impl super::LspAdapter for CLspAdapter {
delegate: &dyn LspAdapterDelegate, delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap(); let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
let env = delegate.shell_env().await;
let zip_path = container_dir.join(format!("clangd_{}.zip", version.name)); let zip_path = container_dir.join(format!("clangd_{}.zip", version.name));
let version_dir = container_dir.join(format!("clangd_{}", version.name)); let version_dir = container_dir.join(format!("clangd_{}", version.name));
let binary_path = version_dir.join("bin/clangd"); let binary_path = version_dir.join("bin/clangd");
@@ -88,7 +89,7 @@ impl super::LspAdapter for CLspAdapter {
} }
futures::io::copy(response.body_mut(), &mut file).await?; futures::io::copy(response.body_mut(), &mut file).await?;
let unzip_status = util::command::new_smol_command("unzip") let unzip_status = util::command::new_smol_command("unzip", &env)
.current_dir(&container_dir) .current_dir(&container_dir)
.arg(&zip_path) .arg(&zip_path)
.output() .output()

View File

@@ -120,7 +120,8 @@ impl super::LspAdapter for GoLspAdapter {
delegate: &dyn LspAdapterDelegate, delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
let go = delegate.which("go".as_ref()).await.unwrap_or("go".into()); let go = delegate.which("go".as_ref()).await.unwrap_or("go".into());
let go_version_output = util::command::new_smol_command(&go) let env = delegate.shell_env().await;
let go_version_output = util::command::new_smol_command(&go, &env)
.args(["version"]) .args(["version"])
.output() .output()
.await .await
@@ -154,7 +155,7 @@ impl super::LspAdapter for GoLspAdapter {
let gobin_dir = container_dir.join("gobin"); let gobin_dir = container_dir.join("gobin");
fs::create_dir_all(&gobin_dir).await?; fs::create_dir_all(&gobin_dir).await?;
let install_output = util::command::new_smol_command(go) let install_output = util::command::new_smol_command(go, &env)
.env("GO111MODULE", "on") .env("GO111MODULE", "on")
.env("GOBIN", &gobin_dir) .env("GOBIN", &gobin_dir)
.args(["install", "golang.org/x/tools/gopls@latest"]) .args(["install", "golang.org/x/tools/gopls@latest"])
@@ -174,7 +175,7 @@ impl super::LspAdapter for GoLspAdapter {
} }
let installed_binary_path = gobin_dir.join(BINARY); let installed_binary_path = gobin_dir.join(BINARY);
let version_output = util::command::new_smol_command(&installed_binary_path) let version_output = util::command::new_smol_command(&installed_binary_path, &env)
.arg("version") .arg("version")
.output() .output()
.await .await
@@ -443,7 +444,7 @@ impl ContextProvider for GoContextProvider {
&self, &self,
variables: &TaskVariables, variables: &TaskVariables,
location: &Location, location: &Location,
_: Option<HashMap<String, String>>, _: HashMap<String, String>,
_: Arc<dyn LanguageToolchainStore>, _: Arc<dyn LanguageToolchainStore>,
cx: &mut gpui::App, cx: &mut gpui::App,
) -> Task<Result<TaskVariables>> { ) -> Task<Result<TaskVariables>> {

View File

@@ -365,7 +365,7 @@ impl ContextProvider for PythonContextProvider {
&self, &self,
variables: &task::TaskVariables, variables: &task::TaskVariables,
location: &project::Location, location: &project::Location,
_: Option<HashMap<String, String>>, _: HashMap<String, String>,
toolchains: Arc<dyn LanguageToolchainStore>, toolchains: Arc<dyn LanguageToolchainStore>,
cx: &mut gpui::App, cx: &mut gpui::App,
) -> Task<Result<task::TaskVariables>> { ) -> Task<Result<task::TaskVariables>> {
@@ -675,10 +675,9 @@ impl ToolchainLister for PythonToolchainProvider {
async fn list( async fn list(
&self, &self,
worktree_root: PathBuf, worktree_root: PathBuf,
project_env: Option<HashMap<String, String>>, project_env: HashMap<String, String>,
) -> ToolchainList { ) -> ToolchainList {
let env = project_env.unwrap_or_default(); let environment = EnvironmentApi::from_env(&project_env);
let environment = EnvironmentApi::from_env(&env);
let locators = pet::locators::create_locators( let locators = pet::locators::create_locators(
Arc::new(pet_conda::Conda::from(&environment)), Arc::new(pet_conda::Conda::from(&environment)),
Arc::new(pet_poetry::Poetry::from(&environment)), Arc::new(pet_poetry::Poetry::from(&environment)),
@@ -889,9 +888,10 @@ impl PyLspAdapter {
.await .await
.ok_or_else(|| anyhow!("Could not get working directory for PyLSP"))?; .ok_or_else(|| anyhow!("Could not get working directory for PyLSP"))?;
let mut path = PathBuf::from(work_dir.as_ref()); let mut path = PathBuf::from(work_dir.as_ref());
let env = delegate.shell_env().await;
path.push("pylsp-venv"); path.push("pylsp-venv");
if !path.exists() { if !path.exists() {
util::command::new_smol_command(python_path) util::command::new_smol_command(python_path, &env)
.arg("-m") .arg("-m")
.arg("venv") .arg("venv")
.arg("pylsp-venv") .arg("pylsp-venv")
@@ -983,8 +983,9 @@ impl LspAdapter for PyLspAdapter {
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
let venv = self.base_venv(delegate).await.map_err(|e| anyhow!(e))?; let venv = self.base_venv(delegate).await.map_err(|e| anyhow!(e))?;
let pip_path = venv.join(BINARY_DIR).join("pip3"); let pip_path = venv.join(BINARY_DIR).join("pip3");
let env = delegate.shell_env().await;
ensure!( ensure!(
util::command::new_smol_command(pip_path.as_path()) util::command::new_smol_command(pip_path.as_path(), &env)
.arg("install") .arg("install")
.arg("python-lsp-server") .arg("python-lsp-server")
.arg("-U") .arg("-U")
@@ -995,7 +996,7 @@ impl LspAdapter for PyLspAdapter {
"python-lsp-server installation failed" "python-lsp-server installation failed"
); );
ensure!( ensure!(
util::command::new_smol_command(pip_path.as_path()) util::command::new_smol_command(pip_path.as_path(), &env)
.arg("install") .arg("install")
.arg("python-lsp-server[all]") .arg("python-lsp-server[all]")
.arg("-U") .arg("-U")
@@ -1006,7 +1007,7 @@ impl LspAdapter for PyLspAdapter {
"python-lsp-server[all] installation failed" "python-lsp-server[all] installation failed"
); );
ensure!( ensure!(
util::command::new_smol_command(pip_path) util::command::new_smol_command(pip_path, &env)
.arg("install") .arg("install")
.arg("pylsp-mypy") .arg("pylsp-mypy")
.arg("-U") .arg("-U")

View File

@@ -560,7 +560,7 @@ impl ContextProvider for RustContextProvider {
&self, &self,
task_variables: &TaskVariables, task_variables: &TaskVariables,
location: &Location, location: &Location,
project_env: Option<HashMap<String, String>>, project_env: HashMap<String, String>,
_: Arc<dyn LanguageToolchainStore>, _: Arc<dyn LanguageToolchainStore>,
cx: &mut gpui::App, cx: &mut gpui::App,
) -> Task<Result<TaskVariables>> { ) -> Task<Result<TaskVariables>> {
@@ -592,14 +592,12 @@ impl ContextProvider for RustContextProvider {
.as_deref() .as_deref()
.and_then(|local_abs_path| local_abs_path.parent()) .and_then(|local_abs_path| local_abs_path.parent())
{ {
if let Some(package_name) = if let Some(package_name) = human_readable_package_name(path, &project_env).await {
human_readable_package_name(path, project_env.as_ref()).await
{
variables.insert(RUST_PACKAGE_TASK_VARIABLE.clone(), package_name); variables.insert(RUST_PACKAGE_TASK_VARIABLE.clone(), package_name);
} }
} }
if let Some(path) = local_abs_path.as_ref() { if let Some(path) = local_abs_path.as_ref() {
if let Some(target) = target_info_from_abs_path(&path, project_env.as_ref()).await { if let Some(target) = target_info_from_abs_path(&path, &project_env).await {
variables.extend(TaskVariables::from_iter([ variables.extend(TaskVariables::from_iter([
(RUST_PACKAGE_TASK_VARIABLE.clone(), target.package_name), (RUST_PACKAGE_TASK_VARIABLE.clone(), target.package_name),
(RUST_BIN_NAME_TASK_VARIABLE.clone(), target.target_name), (RUST_BIN_NAME_TASK_VARIABLE.clone(), target.target_name),
@@ -864,12 +862,9 @@ struct TargetInfo {
async fn target_info_from_abs_path( async fn target_info_from_abs_path(
abs_path: &Path, abs_path: &Path,
project_env: Option<&HashMap<String, String>>, project_env: &HashMap<String, String>,
) -> Option<TargetInfo> { ) -> Option<TargetInfo> {
let mut command = util::command::new_smol_command("cargo"); let mut command = util::command::new_smol_command("cargo", &project_env);
if let Some(envs) = project_env {
command.envs(envs);
}
let output = command let output = command
.current_dir(abs_path.parent()?) .current_dir(abs_path.parent()?)
.arg("metadata") .arg("metadata")
@@ -913,12 +908,9 @@ fn target_info_from_metadata(metadata: CargoMetadata, abs_path: &Path) -> Option
async fn human_readable_package_name( async fn human_readable_package_name(
package_directory: &Path, package_directory: &Path,
project_env: Option<&HashMap<String, String>>, project_env: &HashMap<String, String>,
) -> Option<String> { ) -> Option<String> {
let mut command = util::command::new_smol_command("cargo"); let mut command = util::command::new_smol_command("cargo", project_env);
if let Some(envs) = project_env {
command.envs(envs);
}
let pkgid = String::from_utf8( let pkgid = String::from_utf8(
command command
.current_dir(package_directory) .current_dir(package_directory)

View File

@@ -317,6 +317,7 @@ impl LanguageServer {
root_path: &Path, root_path: &Path,
code_action_kinds: Option<Vec<CodeActionKind>>, code_action_kinds: Option<Vec<CodeActionKind>>,
workspace_folders: Arc<Mutex<BTreeSet<Url>>>, workspace_folders: Arc<Mutex<BTreeSet<Url>>>,
mut project_env: HashMap<String, String>,
cx: &mut AsyncApp, cx: &mut AsyncApp,
) -> Result<Self> { ) -> Result<Self> {
let working_dir = if root_path.is_dir() { let working_dir = if root_path.is_dir() {
@@ -332,10 +333,13 @@ impl LanguageServer {
&binary.arguments &binary.arguments
); );
let mut server = util::command::new_smol_command(&binary.path) if let Some(env) = binary.env.clone() {
project_env.extend(env);
}
let mut server = util::command::new_smol_command(&binary.path, &project_env)
.current_dir(working_dir) .current_dir(working_dir)
.args(&binary.arguments) .args(&binary.arguments)
.envs(binary.env.clone().unwrap_or_default())
.stdin(Stdio::piped()) .stdin(Stdio::piped())
.stdout(Stdio::piped()) .stdout(Stdio::piped())
.stderr(Stdio::piped()) .stderr(Stdio::piped())

View File

@@ -22,6 +22,8 @@ async-watch.workspace = true
async-tar.workspace = true async-tar.workspace = true
async-trait.workspace = true async-trait.workspace = true
async_zip.workspace = true async_zip.workspace = true
collections.workspace = true
environment.workspace = true
futures.workspace = true futures.workspace = true
http_client.workspace = true http_client.workspace = true
log.workspace = true log.workspace = true

View File

@@ -4,7 +4,8 @@ use anyhow::{Context, Result, anyhow, bail};
pub use archive::extract_zip; pub use archive::extract_zip;
use async_compression::futures::bufread::GzipDecoder; use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive; use async_tar::Archive;
use futures::{AsyncReadExt, FutureExt as _, channel::oneshot, future::Shared}; use collections::HashMap;
use futures::AsyncReadExt;
use http_client::{HttpClient, Url}; use http_client::{HttpClient, Url};
use semver::Version; use semver::Version;
use serde::Deserialize; use serde::Deserialize;
@@ -12,7 +13,6 @@ use smol::io::BufReader;
use smol::{fs, lock::Mutex}; use smol::{fs, lock::Mutex};
use std::{ use std::{
env::{self, consts}, env::{self, consts},
ffi::OsString,
io, io,
path::{Path, PathBuf}, path::{Path, PathBuf},
process::{Output, Stdio}, process::{Output, Stdio},
@@ -20,8 +20,6 @@ use std::{
}; };
use util::ResultExt; use util::ResultExt;
const NODE_CA_CERTS_ENV_VAR: &str = "NODE_EXTRA_CA_CERTS";
#[derive(Clone, Debug, Default, Eq, PartialEq)] #[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct NodeBinaryOptions { pub struct NodeBinaryOptions {
pub allow_path_lookup: bool, pub allow_path_lookup: bool,
@@ -37,13 +35,11 @@ struct NodeRuntimeState {
instance: Option<Box<dyn NodeRuntimeTrait>>, instance: Option<Box<dyn NodeRuntimeTrait>>,
last_options: Option<NodeBinaryOptions>, last_options: Option<NodeBinaryOptions>,
options: async_watch::Receiver<Option<NodeBinaryOptions>>, options: async_watch::Receiver<Option<NodeBinaryOptions>>,
shell_env_loaded: Shared<oneshot::Receiver<()>>,
} }
impl NodeRuntime { impl NodeRuntime {
pub fn new( pub fn new(
http: Arc<dyn HttpClient>, http: Arc<dyn HttpClient>,
shell_env_loaded: Option<oneshot::Receiver<()>>,
options: async_watch::Receiver<Option<NodeBinaryOptions>>, options: async_watch::Receiver<Option<NodeBinaryOptions>>,
) -> Self { ) -> Self {
NodeRuntime(Arc::new(Mutex::new(NodeRuntimeState { NodeRuntime(Arc::new(Mutex::new(NodeRuntimeState {
@@ -51,7 +47,6 @@ impl NodeRuntime {
instance: None, instance: None,
last_options: None, last_options: None,
options, options,
shell_env_loaded: shell_env_loaded.unwrap_or(oneshot::channel().1).shared(),
}))) })))
} }
@@ -61,7 +56,6 @@ impl NodeRuntime {
instance: None, instance: None,
last_options: None, last_options: None,
options: async_watch::channel(Some(NodeBinaryOptions::default())).1, options: async_watch::channel(Some(NodeBinaryOptions::default())).1,
shell_env_loaded: oneshot::channel().1.shared(),
}))) })))
} }
@@ -79,22 +73,23 @@ impl NodeRuntime {
return Ok(instance.boxed_clone()); return Ok(instance.boxed_clone());
} }
let env = environment::in_home_dir().await;
if let Some((node, npm)) = options.use_paths.as_ref() { if let Some((node, npm)) = options.use_paths.as_ref() {
let instance = SystemNodeRuntime::new(node.clone(), npm.clone()).await?; let instance = SystemNodeRuntime::new(env, node.clone(), npm.clone()).await?;
state.instance = Some(instance.boxed_clone()); state.instance = Some(instance.boxed_clone());
return Ok(instance); return Ok(instance);
} }
if options.allow_path_lookup { if options.allow_path_lookup {
state.shell_env_loaded.clone().await.ok(); if let Some(instance) = SystemNodeRuntime::detect(env.clone()).await {
if let Some(instance) = SystemNodeRuntime::detect().await {
state.instance = Some(instance.boxed_clone()); state.instance = Some(instance.boxed_clone());
return Ok(instance); return Ok(instance);
} }
} }
let instance = if options.allow_binary_download { let instance = if options.allow_binary_download {
ManagedNodeRuntime::install_if_needed(&state.http).await? ManagedNodeRuntime::install_if_needed(env, &state.http).await?
} else { } else {
Box::new(UnavailableNodeRuntime) Box::new(UnavailableNodeRuntime)
}; };
@@ -266,6 +261,7 @@ trait NodeRuntimeTrait: Send + Sync {
#[derive(Clone)] #[derive(Clone)]
struct ManagedNodeRuntime { struct ManagedNodeRuntime {
installation_path: PathBuf, installation_path: PathBuf,
clean_env: HashMap<String, String>,
} }
impl ManagedNodeRuntime { impl ManagedNodeRuntime {
@@ -281,7 +277,10 @@ impl ManagedNodeRuntime {
#[cfg(windows)] #[cfg(windows)]
const NPM_PATH: &str = "node_modules/npm/bin/npm-cli.js"; const NPM_PATH: &str = "node_modules/npm/bin/npm-cli.js";
async fn install_if_needed(http: &Arc<dyn HttpClient>) -> Result<Box<dyn NodeRuntimeTrait>> { async fn install_if_needed(
env: HashMap<String, String>,
http: &Arc<dyn HttpClient>,
) -> Result<Box<dyn NodeRuntimeTrait>> {
log::info!("Node runtime install_if_needed"); log::info!("Node runtime install_if_needed");
let os = match consts::OS { let os = match consts::OS {
@@ -303,11 +302,20 @@ impl ManagedNodeRuntime {
let node_dir = node_containing_dir.join(folder_name); let node_dir = node_containing_dir.join(folder_name);
let node_binary = node_dir.join(Self::NODE_PATH); let node_binary = node_dir.join(Self::NODE_PATH);
let npm_file = node_dir.join(Self::NPM_PATH); let npm_file = node_dir.join(Self::NPM_PATH);
let node_ca_certs = env::var(NODE_CA_CERTS_ENV_VAR).unwrap_or_else(|_| String::new());
let result = util::command::new_smol_command(&node_binary) let mut clean_env = HashMap::default();
.env_clear() clean_env.insert(
.env(NODE_CA_CERTS_ENV_VAR, node_ca_certs) "PATH".to_string(),
path_with_node_binary_prepended(
env.get("PATH").cloned().unwrap_or_default(),
&node_binary,
),
);
if let Ok(node_ca_certs) = env::var("NODE_EXTRA_CA_CERTS") {
clean_env.insert("NODE_EXTRA_CA_CERTS".to_string(), node_ca_certs);
}
let result = util::command::new_smol_command(&node_binary, &env)
.arg(npm_file) .arg(npm_file)
.arg("--version") .arg("--version")
.stdin(Stdio::null()) .stdin(Stdio::null())
@@ -363,30 +371,24 @@ impl ManagedNodeRuntime {
_ = fs::write(node_dir.join("blank_global_npmrc"), []).await; _ = fs::write(node_dir.join("blank_global_npmrc"), []).await;
anyhow::Ok(Box::new(ManagedNodeRuntime { anyhow::Ok(Box::new(ManagedNodeRuntime {
clean_env,
installation_path: node_dir, installation_path: node_dir,
})) }))
} }
} }
fn path_with_node_binary_prepended(node_binary: &Path) -> Option<OsString> { fn path_with_node_binary_prepended(existing_path: String, node_binary: &Path) -> String {
let existing_path = env::var_os("PATH"); let Some(node_bin_dir) = node_binary.parent().map(|dir| dir.as_os_str()) else {
let node_bin_dir = node_binary.parent().map(|dir| dir.as_os_str()); return existing_path;
match (existing_path, node_bin_dir) { };
(Some(existing_path), Some(node_bin_dir)) => {
if let Ok(joined) = env::join_paths( let mut existing = env::split_paths(&existing_path).collect::<Vec<_>>();
[PathBuf::from(node_bin_dir)] existing.insert(0, PathBuf::from(node_bin_dir));
.into_iter()
.chain(env::split_paths(&existing_path)), let joined = env::join_paths(existing)
) { .ok()
Some(joined) .and_then(|e| e.to_str().map(|s| s.to_string()));
} else { joined.unwrap_or(existing_path)
Some(existing_path)
}
}
(Some(existing_path), None) => Some(existing_path),
(None, Some(node_bin_dir)) => Some(node_bin_dir.to_owned()),
_ => None,
}
} }
#[async_trait::async_trait] #[async_trait::async_trait]
@@ -409,7 +411,6 @@ impl NodeRuntimeTrait for ManagedNodeRuntime {
let attempt = || async move { let attempt = || async move {
let node_binary = self.installation_path.join(Self::NODE_PATH); let node_binary = self.installation_path.join(Self::NODE_PATH);
let npm_file = self.installation_path.join(Self::NPM_PATH); let npm_file = self.installation_path.join(Self::NPM_PATH);
let env_path = path_with_node_binary_prepended(&node_binary).unwrap_or_default();
if smol::fs::metadata(&node_binary).await.is_err() { if smol::fs::metadata(&node_binary).await.is_err() {
return Err(anyhow!("missing node binary file")); return Err(anyhow!("missing node binary file"));
@@ -419,12 +420,7 @@ impl NodeRuntimeTrait for ManagedNodeRuntime {
return Err(anyhow!("missing npm file")); return Err(anyhow!("missing npm file"));
} }
let node_ca_certs = env::var(NODE_CA_CERTS_ENV_VAR).unwrap_or_else(|_| String::new()); let mut command = util::command::new_smol_command(node_binary, &self.clean_env);
let mut command = util::command::new_smol_command(node_binary);
command.env_clear();
command.env("PATH", env_path);
command.env(NODE_CA_CERTS_ENV_VAR, node_ca_certs);
command.arg(npm_file).arg(subcommand); command.arg(npm_file).arg(subcommand);
command.args(["--cache".into(), self.installation_path.join("cache")]); command.args(["--cache".into(), self.installation_path.join("cache")]);
command.args([ command.args([
@@ -478,12 +474,26 @@ pub struct SystemNodeRuntime {
npm: PathBuf, npm: PathBuf,
global_node_modules: PathBuf, global_node_modules: PathBuf,
scratch_dir: PathBuf, scratch_dir: PathBuf,
clean_env: HashMap<String, String>,
} }
impl SystemNodeRuntime { impl SystemNodeRuntime {
const MIN_VERSION: semver::Version = Version::new(20, 0, 0); const MIN_VERSION: semver::Version = Version::new(20, 0, 0);
async fn new(node: PathBuf, npm: PathBuf) -> Result<Box<dyn NodeRuntimeTrait>> { async fn new(
let output = util::command::new_smol_command(&node) env: HashMap<String, String>,
node: PathBuf,
npm: PathBuf,
) -> Result<Box<dyn NodeRuntimeTrait>> {
let path =
path_with_node_binary_prepended(env.get("PATH").cloned().unwrap_or_default(), &node);
let mut clean_env = HashMap::default();
clean_env.insert("PATH".to_string(), path);
if let Ok(node_ca_certs) = env::var("NODE_EXTRA_CA_CERTS") {
clean_env.insert("NODE_EXTRA_CA_CERTS".to_string(), node_ca_certs);
}
let output = util::command::new_smol_command(&node, &clean_env)
.arg("--version") .arg("--version")
.output() .output()
.await .await
@@ -515,6 +525,7 @@ impl SystemNodeRuntime {
npm, npm,
global_node_modules: PathBuf::default(), global_node_modules: PathBuf::default(),
scratch_dir, scratch_dir,
clean_env,
}; };
let output = this.run_npm_subcommand(None, None, "root", &["-g"]).await?; let output = this.run_npm_subcommand(None, None, "root", &["-g"]).await?;
this.global_node_modules = this.global_node_modules =
@@ -523,10 +534,14 @@ impl SystemNodeRuntime {
Ok(Box::new(this)) Ok(Box::new(this))
} }
async fn detect() -> Option<Box<dyn NodeRuntimeTrait>> { async fn detect(env: HashMap<String, String>) -> Option<Box<dyn NodeRuntimeTrait>> {
let node = which::which("node").ok()?; let path = env
let npm = which::which("npm").ok()?; .get("PATH")
Self::new(node, npm).await.log_err() .cloned()
.or_else(|| std::env::var("PATH").ok());
let node = which::which_in_global("node", path.as_ref()).ok()?.next()?;
let npm = which::which_in_global("npm", path.as_ref()).ok()?.next()?;
Self::new(env, node, npm).await.log_err()
} }
} }
@@ -547,13 +562,8 @@ impl NodeRuntimeTrait for SystemNodeRuntime {
subcommand: &str, subcommand: &str,
args: &[&str], args: &[&str],
) -> anyhow::Result<Output> { ) -> anyhow::Result<Output> {
let node_ca_certs = env::var(NODE_CA_CERTS_ENV_VAR).unwrap_or_else(|_| String::new()); let mut command = util::command::new_smol_command(self.npm.clone(), &self.clean_env);
let mut command = util::command::new_smol_command(self.npm.clone());
let path = path_with_node_binary_prepended(&self.node).unwrap_or_default();
command command
.env_clear()
.env("PATH", path)
.env(NODE_CA_CERTS_ENV_VAR, node_ca_certs)
.arg(subcommand) .arg(subcommand)
.args(["--cache".into(), self.scratch_dir.join("cache")]) .args(["--cache".into(), self.scratch_dir.join("cache")])
.args(args); .args(args);

View File

@@ -18,6 +18,7 @@ test-support = []
[dependencies] [dependencies]
anyhow.workspace = true anyhow.workspace = true
collections.workspace = true collections.workspace = true
environment.workspace = true
fs.workspace = true fs.workspace = true
gpui.workspace = true gpui.workspace = true
language.workspace = true language.workspace = true

View File

@@ -278,6 +278,7 @@ impl Prettier {
arguments: vec![prettier_server.into(), prettier_dir.as_path().into()], arguments: vec![prettier_server.into(), prettier_dir.as_path().into()],
env: None, env: None,
}; };
let env = environment::in_home_dir().await;
let server = LanguageServer::new( let server = LanguageServer::new(
Arc::new(parking_lot::Mutex::new(None)), Arc::new(parking_lot::Mutex::new(None)),
server_id, server_id,
@@ -286,6 +287,7 @@ impl Prettier {
&prettier_dir, &prettier_dir,
None, None,
Default::default(), Default::default(),
env,
&mut cx, &mut cx,
) )
.context("prettier server creation")?; .context("prettier server creation")?;

View File

@@ -38,6 +38,7 @@ clock.workspace = true
collections.workspace = true collections.workspace = true
context_server.workspace = true context_server.workspace = true
dap.workspace = true dap.workspace = true
environment.workspace = true
extension.workspace = true extension.workspace = true
fancy-regex.workspace = true fancy-regex.workspace = true
fs.workspace = true fs.workspace = true

View File

@@ -203,7 +203,7 @@ impl DapStore {
.get_binary(&delegate, &definition, user_installed_path, cx) .get_binary(&delegate, &definition, user_installed_path, cx)
.await?; .await?;
let env = this let mut env = this
.update(cx, |this, cx| { .update(cx, |this, cx| {
this.as_local() this.as_local()
.unwrap() .unwrap()
@@ -214,10 +214,8 @@ impl DapStore {
})? })?
.await; .await;
if let Some(mut env) = env { env.extend(std::mem::take(&mut binary.envs));
env.extend(std::mem::take(&mut binary.envs)); binary.envs = env;
binary.envs = env;
}
Ok(binary) Ok(binary)
}) })
@@ -815,7 +813,7 @@ pub struct DapAdapterDelegate {
node_runtime: NodeRuntime, node_runtime: NodeRuntime,
http_client: Arc<dyn HttpClient>, http_client: Arc<dyn HttpClient>,
toolchain_store: Arc<dyn LanguageToolchainStore>, toolchain_store: Arc<dyn LanguageToolchainStore>,
load_shell_env_task: Shared<Task<Option<HashMap<String, String>>>>, load_shell_env_task: Shared<Task<HashMap<String, String>>>,
} }
impl DapAdapterDelegate { impl DapAdapterDelegate {
@@ -826,7 +824,7 @@ impl DapAdapterDelegate {
node_runtime: NodeRuntime, node_runtime: NodeRuntime,
http_client: Arc<dyn HttpClient>, http_client: Arc<dyn HttpClient>,
toolchain_store: Arc<dyn LanguageToolchainStore>, toolchain_store: Arc<dyn LanguageToolchainStore>,
load_shell_env_task: Shared<Task<Option<HashMap<String, String>>>>, load_shell_env_task: Shared<Task<HashMap<String, String>>>,
) -> Self { ) -> Self {
Self { Self {
fs, fs,
@@ -868,7 +866,7 @@ impl dap::adapters::DapDelegate for DapAdapterDelegate {
async fn shell_env(&self) -> HashMap<String, String> { async fn shell_env(&self) -> HashMap<String, String> {
let task = self.load_shell_env_task.clone(); let task = self.load_shell_env_task.clone();
task.await.unwrap_or_default() task.await
} }
fn toolchain_store(&self) -> Arc<dyn LanguageToolchainStore> { fn toolchain_store(&self) -> Arc<dyn LanguageToolchainStore> {

View File

@@ -1,76 +0,0 @@
use crate::environment::EnvironmentErrorMessage;
use std::process::ExitStatus;
#[cfg(not(any(target_os = "windows", test, feature = "test-support")))]
use {collections::HashMap, std::path::Path, util::ResultExt};
#[derive(Clone)]
pub enum DirenvError {
NotFound,
FailedRun,
NonZeroExit(ExitStatus, Vec<u8>),
EmptyOutput,
InvalidJson,
}
impl From<DirenvError> for Option<EnvironmentErrorMessage> {
fn from(value: DirenvError) -> Self {
match value {
DirenvError::NotFound => None,
DirenvError::FailedRun | DirenvError::NonZeroExit(_, _) => {
Some(EnvironmentErrorMessage(String::from(
"Failed to run direnv. See logs for more info",
)))
}
DirenvError::EmptyOutput => None,
DirenvError::InvalidJson => Some(EnvironmentErrorMessage(String::from(
"Direnv returned invalid json. See logs for more info",
))),
}
}
}
#[cfg(not(any(target_os = "windows", test, feature = "test-support")))]
pub async fn load_direnv_environment(
env: &HashMap<String, String>,
dir: &Path,
) -> Result<HashMap<String, String>, DirenvError> {
let Ok(direnv_path) = which::which("direnv") else {
return Err(DirenvError::NotFound);
};
let Some(direnv_output) = smol::process::Command::new(direnv_path)
.args(["export", "json"])
.envs(env)
.env("TERM", "dumb")
.current_dir(dir)
.output()
.await
.log_err()
else {
return Err(DirenvError::FailedRun);
};
if !direnv_output.status.success() {
log::error!(
"Loading direnv environment failed ({}), stderr: {}",
direnv_output.status,
String::from_utf8_lossy(&direnv_output.stderr)
);
return Err(DirenvError::NonZeroExit(
direnv_output.status,
direnv_output.stderr,
));
}
let output = String::from_utf8_lossy(&direnv_output.stdout);
if output.is_empty() {
return Err(DirenvError::EmptyOutput);
}
let Some(env) = serde_json::from_str(&output).log_err() else {
return Err(DirenvError::InvalidJson);
};
Ok(env)
}

View File

@@ -15,7 +15,7 @@ use crate::{
pub struct ProjectEnvironment { pub struct ProjectEnvironment {
cli_environment: Option<HashMap<String, String>>, cli_environment: Option<HashMap<String, String>>,
environments: HashMap<Arc<Path>, Shared<Task<Option<HashMap<String, String>>>>>, environments: HashMap<Arc<Path>, Shared<Task<HashMap<String, String>>>>,
environment_error_messages: HashMap<Arc<Path>, EnvironmentErrorMessage>, environment_error_messages: HashMap<Arc<Path>, EnvironmentErrorMessage>,
} }
@@ -62,14 +62,14 @@ impl ProjectEnvironment {
buffer: &Entity<Buffer>, buffer: &Entity<Buffer>,
worktree_store: &Entity<WorktreeStore>, worktree_store: &Entity<WorktreeStore>,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Shared<Task<Option<HashMap<String, String>>>> { ) -> Shared<Task<HashMap<String, String>>> {
if cfg!(any(test, feature = "test-support")) { if cfg!(any(test, feature = "test-support")) {
return Task::ready(Some(HashMap::default())).shared(); return Task::ready(HashMap::default()).shared();
} }
if let Some(cli_environment) = self.get_cli_environment() { if let Some(cli_environment) = self.get_cli_environment() {
log::debug!("using project environment variables from CLI"); log::debug!("using project environment variables from CLI");
return Task::ready(Some(cli_environment)).shared(); return Task::ready(cli_environment).shared();
} }
let Some(worktree) = buffer let Some(worktree) = buffer
@@ -78,7 +78,7 @@ impl ProjectEnvironment {
.map(|f| f.worktree_id(cx)) .map(|f| f.worktree_id(cx))
.and_then(|worktree_id| worktree_store.read(cx).worktree_for_id(worktree_id, cx)) .and_then(|worktree_id| worktree_store.read(cx).worktree_for_id(worktree_id, cx))
else { else {
return Task::ready(None).shared(); return Task::ready(environment::inherited()).shared();
}; };
self.get_worktree_environment(worktree, cx) self.get_worktree_environment(worktree, cx)
@@ -88,14 +88,14 @@ impl ProjectEnvironment {
&mut self, &mut self,
worktree: Entity<Worktree>, worktree: Entity<Worktree>,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Shared<Task<Option<HashMap<String, String>>>> { ) -> Shared<Task<HashMap<String, String>>> {
if cfg!(any(test, feature = "test-support")) { if cfg!(any(test, feature = "test-support")) {
return Task::ready(Some(HashMap::default())).shared(); return Task::ready(HashMap::default()).shared();
} }
if let Some(cli_environment) = self.get_cli_environment() { if let Some(cli_environment) = self.get_cli_environment() {
log::debug!("using project environment variables from CLI"); log::debug!("using project environment variables from CLI");
return Task::ready(Some(cli_environment)).shared(); return Task::ready(cli_environment).shared();
} }
let mut abs_path = worktree.read(cx).abs_path(); let mut abs_path = worktree.read(cx).abs_path();
@@ -103,10 +103,10 @@ impl ProjectEnvironment {
log::error!( log::error!(
"attempted to get project environment for a non-local worktree at {abs_path:?}" "attempted to get project environment for a non-local worktree at {abs_path:?}"
); );
return Task::ready(None).shared(); return Task::ready(environment::inherited()).shared();
} else if worktree.read(cx).is_single_file() { } else if worktree.read(cx).is_single_file() {
let Some(parent) = abs_path.parent() else { let Some(parent) = abs_path.parent() else {
return Task::ready(None).shared(); return Task::ready(environment::inherited()).shared();
}; };
abs_path = parent.into(); abs_path = parent.into();
} }
@@ -122,14 +122,14 @@ impl ProjectEnvironment {
&mut self, &mut self,
abs_path: Arc<Path>, abs_path: Arc<Path>,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Shared<Task<Option<HashMap<String, String>>>> { ) -> Shared<Task<HashMap<String, String>>> {
if cfg!(any(test, feature = "test-support")) { if cfg!(any(test, feature = "test-support")) {
return Task::ready(Some(HashMap::default())).shared(); return Task::ready(HashMap::default()).shared();
} }
if let Some(cli_environment) = self.get_cli_environment() { if let Some(cli_environment) = self.get_cli_environment() {
log::debug!("using project environment variables from CLI"); log::debug!("using project environment variables from CLI");
return Task::ready(Some(cli_environment)).shared(); return Task::ready(cli_environment).shared();
} }
self.environments self.environments
@@ -198,7 +198,17 @@ async fn load_directory_shell_environment(
); );
}; };
load_shell_environment(&dir, load_direnv).await match ::environment::in_dir(&dir, matches!(load_direnv, DirenvSettings::Direct)).await {
Ok(env) => (Some(env), None),
Err(err) => (
None,
Some(EnvironmentErrorMessage(format!(
"Failed to load shell environment in {}: {}",
dir.display(),
err
))),
),
}
} }
Err(err) => ( Err(err) => (
None, None,
@@ -211,148 +221,10 @@ async fn load_directory_shell_environment(
} }
} }
#[cfg(any(test, feature = "test-support"))]
async fn load_shell_environment(
_dir: &Path,
_load_direnv: &DirenvSettings,
) -> (
Option<HashMap<String, String>>,
Option<EnvironmentErrorMessage>,
) {
let fake_env = [("ZED_FAKE_TEST_ENV".into(), "true".into())]
.into_iter()
.collect();
(Some(fake_env), None)
}
#[cfg(all(target_os = "windows", not(any(test, feature = "test-support"))))]
async fn load_shell_environment(
_dir: &Path,
_load_direnv: &DirenvSettings,
) -> (
Option<HashMap<String, String>>,
Option<EnvironmentErrorMessage>,
) {
// TODO the current code works with Unix $SHELL only, implement environment loading on windows
(None, None)
}
#[cfg(not(any(target_os = "windows", test, feature = "test-support")))]
async fn load_shell_environment(
dir: &Path,
load_direnv: &DirenvSettings,
) -> (
Option<HashMap<String, String>>,
Option<EnvironmentErrorMessage>,
) {
use crate::direnv::{DirenvError, load_direnv_environment};
use std::path::PathBuf;
use util::parse_env_output;
fn message<T>(with: &str) -> (Option<T>, Option<EnvironmentErrorMessage>) {
let message = EnvironmentErrorMessage::from_str(with);
(None, Some(message))
}
const MARKER: &str = "ZED_SHELL_START";
let Some(shell) = std::env::var("SHELL").log_err() else {
return message("Failed to get login environment. SHELL environment variable is not set");
};
let shell_path = PathBuf::from(&shell);
let shell_name = shell_path.file_name().and_then(|f| f.to_str());
// What we're doing here is to spawn a shell and then `cd` into
// the project directory to get the env in there as if the user
// `cd`'d into it. We do that because tools like direnv, asdf, ...
// hook into `cd` and only set up the env after that.
//
// If the user selects `Direct` for direnv, it would set an environment
// variable that later uses to know that it should not run the hook.
// We would include in `.envs` call so it is okay to run the hook
// even if direnv direct mode is enabled.
//
// In certain shells we need to execute additional_command in order to
// trigger the behavior of direnv, etc.
let command = match shell_name {
Some("fish") => format!(
"cd '{}'; emit fish_prompt; printf '%s' {MARKER}; /usr/bin/env;",
dir.display()
),
_ => format!(
"cd '{}'; printf '%s' {MARKER}; /usr/bin/env;",
dir.display()
),
};
// csh/tcsh only supports `-l` if it's the only flag. So this won't be a login shell.
// Users must rely on vars from `~/.tcshrc` or `~/.cshrc` and not `.login` as a result.
let args = match shell_name {
Some("tcsh") | Some("csh") => vec!["-i".to_string(), "-c".to_string(), command],
_ => vec![
"-l".to_string(),
"-i".to_string(),
"-c".to_string(),
command,
],
};
let Some(output) = smol::unblock(move || {
util::set_pre_exec_to_start_new_session(std::process::Command::new(&shell).args(&args))
.output()
})
.await
.log_err() else {
return message(
"Failed to spawn login shell to source login environment variables. See logs for details",
);
};
if !output.status.success() {
log::error!("login shell exited with {}", output.status);
return message("Login shell exited with nonzero exit code. See logs for details");
}
let stdout = String::from_utf8_lossy(&output.stdout);
let Some(env_output_start) = stdout.find(MARKER) else {
let stderr = String::from_utf8_lossy(&output.stderr);
log::error!(
"failed to parse output of `env` command in login shell. stdout: {:?}, stderr: {:?}",
stdout,
stderr
);
return message("Failed to parse stdout of env command. See logs for the output");
};
let mut parsed_env = HashMap::default();
let env_output = &stdout[env_output_start + MARKER.len()..];
parse_env_output(env_output, |key, value| {
parsed_env.insert(key, value);
});
let (direnv_environment, direnv_error) = match load_direnv {
DirenvSettings::ShellHook => (None, None),
DirenvSettings::Direct => match load_direnv_environment(&parsed_env, dir).await {
Ok(env) => (Some(env), None),
Err(err) => (
None,
<Option<EnvironmentErrorMessage> as From<DirenvError>>::from(err),
),
},
};
for (key, value) in direnv_environment.unwrap_or(HashMap::default()) {
parsed_env.insert(key, value);
}
(Some(parsed_env), direnv_error)
}
fn get_directory_env_impl( fn get_directory_env_impl(
abs_path: Arc<Path>, abs_path: Arc<Path>,
cx: &Context<ProjectEnvironment>, cx: &Context<ProjectEnvironment>,
) -> Task<Option<HashMap<String, String>>> { ) -> Task<HashMap<String, String>> {
let load_direnv = ProjectSettings::get_global(cx).load_direnv.clone(); let load_direnv = ProjectSettings::get_global(cx).load_direnv.clone();
cx.spawn(async move |this, cx| { cx.spawn(async move |this, cx| {
@@ -386,6 +258,10 @@ fn get_directory_env_impl(
.log_err(); .log_err();
} }
shell_env if let Some(shell_env) = shell_env {
shell_env
} else {
environment::inherited()
}
}) })
} }

View File

@@ -1433,13 +1433,22 @@ impl GitStore {
&self, &self,
path: Arc<Path>, path: Arc<Path>,
fallback_branch_name: String, fallback_branch_name: String,
cx: &App, cx: &mut App,
) -> Task<Result<()>> { ) -> Task<Result<()>> {
match &self.state { match &self.state {
GitStoreState::Local { fs, .. } => { GitStoreState::Local {
fs,
project_environment,
..
} => {
let fs = fs.clone(); let fs = fs.clone();
cx.background_executor() let load_env = project_environment.update(cx, |env, cx| {
.spawn(async move { fs.git_init(&path, fallback_branch_name) }) env.get_directory_environment(path.clone(), cx)
});
cx.background_executor().spawn(async move {
let env = load_env.await;
fs.git_init(&path, fallback_branch_name, &env)
})
} }
GitStoreState::Ssh { GitStoreState::Ssh {
upstream_client, upstream_client,
@@ -1543,11 +1552,11 @@ impl GitStore {
async fn handle_git_init( async fn handle_git_init(
this: Entity<Self>, this: Entity<Self>,
envelope: TypedEnvelope<proto::GitInit>, envelope: TypedEnvelope<proto::GitInit>,
cx: AsyncApp, mut cx: AsyncApp,
) -> Result<proto::Ack> { ) -> Result<proto::Ack> {
let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into(); let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
let name = envelope.payload.fallback_branch_name; let name = envelope.payload.fallback_branch_name;
cx.update(|cx| this.read(cx).git_init(path, name, cx))? this.update(&mut cx, |this, cx| this.git_init(path, name, cx))?
.await?; .await?;
Ok(proto::Ack {}) Ok(proto::Ack {})
@@ -4066,17 +4075,17 @@ impl Repository {
.upgrade() .upgrade()
.ok_or_else(|| anyhow!("missing project environment"))? .ok_or_else(|| anyhow!("missing project environment"))?
.update(cx, |project_environment, cx| { .update(cx, |project_environment, cx| {
project_environment.get_directory_environment(work_directory_abs_path.clone(), cx) project_environment
.get_directory_environment(work_directory_abs_path.clone(), cx)
})? })?
.await .await;
.unwrap_or_else(|| {
log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
HashMap::default()
});
let backend = cx let backend = cx
.background_spawn(async move { .background_spawn({
fs.open_repo(&dot_git_abs_path) let environment = environment.clone();
.ok_or_else(|| anyhow!("failed to build repository")) async move {
fs.open_repo(&dot_git_abs_path, environment)
.ok_or_else(|| anyhow!("failed to build repository"))
}
}) })
.await?; .await?;

View File

@@ -207,12 +207,14 @@ impl LocalLspStore {
let pending_workspace_folders: Arc<Mutex<BTreeSet<Url>>> = Default::default(); let pending_workspace_folders: Arc<Mutex<BTreeSet<Url>>> = Default::default();
let pending_server = cx.spawn({ let pending_server = cx.spawn({
let adapter = adapter.clone(); let adapter = adapter.clone();
let delegate = delegate.clone();
let server_name = adapter.name.clone(); let server_name = adapter.name.clone();
let stderr_capture = stderr_capture.clone(); let stderr_capture = stderr_capture.clone();
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
let lsp_store = self.weak.clone(); let lsp_store = self.weak.clone();
let pending_workspace_folders = pending_workspace_folders.clone(); let pending_workspace_folders = pending_workspace_folders.clone();
async move |cx| { async move |cx| {
let env = delegate.shell_env().await;
let binary = binary.await?; let binary = binary.await?;
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
if let Some(server) = lsp_store if let Some(server) = lsp_store
@@ -238,6 +240,7 @@ impl LocalLspStore {
&root_path, &root_path,
adapter.code_action_kinds(), adapter.code_action_kinds(),
pending_workspace_folders, pending_workspace_folders,
env,
cx, cx,
) )
} }
@@ -427,7 +430,9 @@ impl LocalLspStore {
let mut binary = binary_result?; let mut binary = binary_result?;
let mut shell_env = delegate.shell_env().await; let mut shell_env = delegate.shell_env().await;
shell_env.extend(binary.env.unwrap_or_default()); if let Some(binary_env) = binary.env {
shell_env.extend(binary_env);
}
if let Some(settings) = settings { if let Some(settings) = settings {
if let Some(arguments) = settings.arguments { if let Some(arguments) = settings.arguments {
@@ -2000,11 +2005,7 @@ impl LocalLspStore {
Some(worktree_path) Some(worktree_path)
})?; })?;
let mut child = util::command::new_smol_command(command); let mut child = util::command::new_smol_command(command, &buffer.env);
if let Some(buffer_env) = buffer.env.as_ref() {
child.envs(buffer_env);
}
if let Some(working_dir_path) = working_dir_path { if let Some(working_dir_path) = working_dir_path {
child.current_dir(working_dir_path); child.current_dir(working_dir_path);
@@ -3406,7 +3407,7 @@ impl LocalLspStore {
pub struct FormattableBuffer { pub struct FormattableBuffer {
handle: Entity<Buffer>, handle: Entity<Buffer>,
abs_path: Option<PathBuf>, abs_path: Option<PathBuf>,
env: Option<HashMap<String, String>>, env: HashMap<String, String>,
ranges: Option<Vec<Range<Anchor>>>, ranges: Option<Vec<Range<Anchor>>>,
} }
@@ -8319,13 +8320,13 @@ impl LspStore {
&self, &self,
buffer: &Entity<Buffer>, buffer: &Entity<Buffer>,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Shared<Task<Option<HashMap<String, String>>>> { ) -> Shared<Task<HashMap<String, String>>> {
if let Some(environment) = &self.as_local().map(|local| local.environment.clone()) { if let Some(environment) = &self.as_local().map(|local| local.environment.clone()) {
environment.update(cx, |env, cx| { environment.update(cx, |env, cx| {
env.get_buffer_environment(&buffer, &self.worktree_store, cx) env.get_buffer_environment(&buffer, &self.worktree_store, cx)
}) })
} else { } else {
Task::ready(None).shared() Task::ready(environment::inherited()).shared()
} }
} }
@@ -10166,7 +10167,7 @@ pub struct LocalLspAdapterDelegate {
fs: Arc<dyn Fs>, fs: Arc<dyn Fs>,
http_client: Arc<dyn HttpClient>, http_client: Arc<dyn HttpClient>,
language_registry: Arc<LanguageRegistry>, language_registry: Arc<LanguageRegistry>,
load_shell_env_task: Shared<Task<Option<HashMap<String, String>>>>, load_shell_env_task: Shared<Task<HashMap<String, String>>>,
} }
impl LocalLspAdapterDelegate { impl LocalLspAdapterDelegate {
@@ -10242,7 +10243,7 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate {
async fn shell_env(&self) -> HashMap<String, String> { async fn shell_env(&self) -> HashMap<String, String> {
let task = self.load_shell_env_task.clone(); let task = self.load_shell_env_task.clone();
task.await.unwrap_or_default() task.await
} }
async fn npm_package_installed_version( async fn npm_package_installed_version(
@@ -10266,9 +10267,8 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate {
}; };
let env = self.shell_env().await; let env = self.shell_env().await;
let output = util::command::new_smol_command(&npm) let output = util::command::new_smol_command(&npm, &env)
.args(["root", "-g"]) .args(["root", "-g"])
.envs(env)
.current_dir(local_package_directory) .current_dir(local_package_directory)
.output() .output()
.await?; .await?;
@@ -10300,7 +10300,8 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate {
async fn try_exec(&self, command: LanguageServerBinary) -> Result<()> { async fn try_exec(&self, command: LanguageServerBinary) -> Result<()> {
let working_dir = self.worktree_root_path(); let working_dir = self.worktree_root_path();
let output = util::command::new_smol_command(&command.path) let shell_env = self.shell_env().await;
let output = util::command::new_smol_command(&command.path, &shell_env)
.args(command.arguments) .args(command.arguments)
.envs(command.env.clone().unwrap_or_default()) .envs(command.env.clone().unwrap_or_default())
.current_dir(working_dir) .current_dir(working_dir)

View File

@@ -21,7 +21,6 @@ pub mod worktree_store;
#[cfg(test)] #[cfg(test)]
mod project_tests; mod project_tests;
mod direnv;
mod environment; mod environment;
use buffer_diff::BufferDiff; use buffer_diff::BufferDiff;
use context_server_store::ContextServerStore; use context_server_store::ContextServerStore;
@@ -1654,7 +1653,7 @@ impl Project {
buffer: &Entity<Buffer>, buffer: &Entity<Buffer>,
worktree_store: &Entity<WorktreeStore>, worktree_store: &Entity<WorktreeStore>,
cx: &'a mut App, cx: &'a mut App,
) -> Shared<Task<Option<HashMap<String, String>>>> { ) -> Shared<Task<HashMap<String, String>>> {
self.environment.update(cx, |environment, cx| { self.environment.update(cx, |environment, cx| {
environment.get_buffer_environment(&buffer, &worktree_store, cx) environment.get_buffer_environment(&buffer, &worktree_store, cx)
}) })
@@ -1664,7 +1663,7 @@ impl Project {
&self, &self,
abs_path: Arc<Path>, abs_path: Arc<Path>,
cx: &mut App, cx: &mut App,
) -> Shared<Task<Option<HashMap<String, String>>>> { ) -> Shared<Task<HashMap<String, String>>> {
self.environment.update(cx, |environment, cx| { self.environment.update(cx, |environment, cx| {
environment.get_directory_environment(abs_path, cx) environment.get_directory_environment(abs_path, cx)
}) })
@@ -4873,11 +4872,11 @@ impl Project {
&self, &self,
path: Arc<Path>, path: Arc<Path>,
fallback_branch_name: String, fallback_branch_name: String,
cx: &App, cx: &mut App,
) -> Task<Result<()>> { ) -> Task<Result<()>> {
self.git_store self.git_store.update(cx, |git_store, cx| {
.read(cx) git_store.git_init(path, fallback_branch_name, cx)
.git_init(path, fallback_branch_name, cx) })
} }
pub fn buffer_store(&self) -> &Entity<BufferStore> { pub fn buffer_store(&self) -> &Entity<BufferStore> {

View File

@@ -7116,7 +7116,9 @@ async fn test_staging_random_hunks(
path!("/dir/.git").as_ref(), path!("/dir/.git").as_ref(),
&[("file.txt".into(), index_text.clone())], &[("file.txt".into(), index_text.clone())],
); );
let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap(); let repo = fs
.open_repo(path!("/dir/.git").as_ref(), ::environment::inherited())
.unwrap();
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
let buffer = project let buffer = project

View File

@@ -788,7 +788,7 @@ impl ContextProvider for BasicContextProvider {
&self, &self,
_: &TaskVariables, _: &TaskVariables,
location: &Location, location: &Location,
_: Option<HashMap<String, String>>, _: HashMap<String, String>,
_: Arc<dyn LanguageToolchainStore>, _: Arc<dyn LanguageToolchainStore>,
cx: &mut App, cx: &mut App,
) -> Task<Result<TaskVariables>> { ) -> Task<Result<TaskVariables>> {

View File

@@ -338,7 +338,7 @@ fn local_task_context_for_location(
task_variables.sweep(); task_variables.sweep();
Some(TaskContext { Some(TaskContext {
project_env: project_env.unwrap_or_default(), project_env,
cwd: worktree_abs_path.map(|p| p.to_path_buf()), cwd: worktree_abs_path.map(|p| p.to_path_buf()),
task_variables, task_variables,
}) })
@@ -361,7 +361,7 @@ fn remote_task_context_for_location(
BasicContextProvider::new(worktree_store).build_context( BasicContextProvider::new(worktree_store).build_context(
&TaskVariables::default(), &TaskVariables::default(),
&location, &location,
None, HashMap::default(),
toolchain_store, toolchain_store,
cx, cx,
) )
@@ -411,7 +411,7 @@ fn remote_task_context_for_location(
fn combine_task_variables( fn combine_task_variables(
mut captured_variables: TaskVariables, mut captured_variables: TaskVariables,
location: Location, location: Location,
project_env: Option<HashMap<String, String>>, project_env: HashMap<String, String>,
baseline: BasicContextProvider, baseline: BasicContextProvider,
toolchain_store: Arc<dyn LanguageToolchainStore>, toolchain_store: Arc<dyn LanguageToolchainStore>,
cx: &mut App, cx: &mut App,

View File

@@ -216,12 +216,15 @@ impl Project {
let (completion_tx, completion_rx) = bounded(1); let (completion_tx, completion_rx) = bounded(1);
// Start with the environment that we might have inherited from the Zed CLI. // Start tasks with the environment that we might have inherited from the Zed CLI.
let mut env = this let mut env = if matches!(kind, TerminalKind::Task(_)) {
.environment this.environment
.read(cx) .read(cx)
.get_cli_environment() .get_cli_environment()
.unwrap_or_default(); .unwrap_or_default()
} else {
HashMap::default()
};
// Then extend it with the explicit env variables from the settings, so they take // Then extend it with the explicit env variables from the settings, so they take
// precedence. // precedence.
env.extend(settings.env.clone()); env.extend(settings.env.clone());

View File

@@ -22,6 +22,7 @@ anyhow.workspace = true
askpass.workspace = true askpass.workspace = true
async-trait.workspace = true async-trait.workspace = true
collections.workspace = true collections.workspace = true
environment.workspace = true
fs.workspace = true fs.workspace = true
futures.workspace = true futures.workspace = true
gpui.workspace = true gpui.workspace = true

View File

@@ -347,7 +347,7 @@ impl SshSocket {
// into a machine. You must use `cd` to get back to $HOME. // into a machine. You must use `cd` to get back to $HOME.
// You need to do it like this: $ ssh host "cd; sh -c 'ls -l /tmp'" // You need to do it like this: $ ssh host "cd; sh -c 'ls -l /tmp'"
fn ssh_command(&self, program: &str, args: &[&str]) -> process::Command { fn ssh_command(&self, program: &str, args: &[&str]) -> process::Command {
let mut command = util::command::new_smol_command("ssh"); let mut command = util::command::new_smol_command("ssh", &environment::inherited());
let to_run = iter::once(&program) let to_run = iter::once(&program)
.chain(args.iter()) .chain(args.iter())
.map(|token| { .map(|token| {
@@ -1339,7 +1339,7 @@ impl RemoteConnection for SshRemoteConnection {
dest_path: PathBuf, dest_path: PathBuf,
cx: &App, cx: &App,
) -> Task<Result<()>> { ) -> Task<Result<()>> {
let mut command = util::command::new_smol_command("scp"); let mut command = util::command::new_smol_command("scp", &environment::inherited());
let output = self let output = self
.socket .socket
.ssh_options(&mut command) .ssh_options(&mut command)
@@ -1920,7 +1920,7 @@ impl SshRemoteConnection {
async fn upload_file(&self, src_path: &Path, dest_path: &Path) -> Result<()> { async fn upload_file(&self, src_path: &Path, dest_path: &Path) -> Result<()> {
log::debug!("uploading file {:?} to {:?}", src_path, dest_path); log::debug!("uploading file {:?} to {:?}", src_path, dest_path);
let mut command = util::command::new_smol_command("scp"); let mut command = util::command::new_smol_command("scp", &environment::inherited());
let output = self let output = self
.socket .socket
.ssh_options(&mut command) .ssh_options(&mut command)

View File

@@ -468,7 +468,7 @@ pub fn execute_run(
) )
}; };
let node_runtime = NodeRuntime::new(http_client.clone(), None, node_settings_rx); let node_runtime = NodeRuntime::new(http_client.clone(), node_settings_rx);
let mut languages = LanguageRegistry::new(cx.background_executor().clone()); let mut languages = LanguageRegistry::new(cx.background_executor().clone());
languages.set_language_server_download_dir(paths::languages_dir().clone()); languages.set_language_server_download_dir(paths::languages_dir().clone());

View File

@@ -22,6 +22,7 @@ client.workspace = true
collections.workspace = true collections.workspace = true
command_palette_hooks.workspace = true command_palette_hooks.workspace = true
editor.workspace = true editor.workspace = true
environment.workspace = true
feature_flags.workspace = true feature_flags.workspace = true
file_icons.workspace = true file_icons.workspace = true
futures.workspace = true futures.workspace = true

View File

@@ -101,9 +101,10 @@ pub fn python_env_kernel_specifications(
let kernelspecs = toolchains.toolchains.into_iter().map(|toolchain| { let kernelspecs = toolchains.toolchains.into_iter().map(|toolchain| {
background_executor.spawn(async move { background_executor.spawn(async move {
let python_path = toolchain.path.to_string(); let python_path = toolchain.path.to_string();
let env = environment::in_home_dir().await;
// Check if ipykernel is installed // Check if ipykernel is installed
let ipykernel_check = util::command::new_smol_command(&python_path) let ipykernel_check = util::command::new_smol_command(&python_path, &env)
.args(&["-c", "import ipykernel"]) .args(&["-c", "import ipykernel"])
.output() .output()
.await; .await;

View File

@@ -54,7 +54,7 @@ impl LocalKernelSpecification {
self.name self.name
); );
let mut cmd = util::command::new_smol_command(&argv[0]); let mut cmd = util::command::new_smol_command(&argv[0], &environment::inherited());
for arg in &argv[1..] { for arg in &argv[1..] {
if arg == "{connection_file}" { if arg == "{connection_file}" {
@@ -428,9 +428,10 @@ pub async fn local_kernel_specifications(fs: Arc<dyn Fs>) -> Result<Vec<LocalKer
let conda_data_dir = conda_prefix.join("share").join("jupyter"); let conda_data_dir = conda_prefix.join("share").join("jupyter");
data_dirs.push(conda_data_dir); data_dirs.push(conda_data_dir);
} }
let env = environment::in_home_dir().await;
// Search for kernels inside the base python environment // Search for kernels inside the base python environment
let command = util::command::new_smol_command("python") let command = util::command::new_smol_command("python", &env)
.arg("-c") .arg("-c")
.arg("import sys; print(sys.prefix)") .arg("import sys; print(sys.prefix)")
.output() .output()

View File

@@ -16,6 +16,7 @@ doctest = false
anyhow.workspace = true anyhow.workspace = true
client.workspace = true client.workspace = true
collections.workspace = true collections.workspace = true
environment.workspace = true
futures.workspace = true futures.workspace = true
gpui.workspace = true gpui.workspace = true
inline_completion.workspace = true inline_completion.workspace = true

View File

@@ -267,7 +267,7 @@ impl SupermavenAgent {
client: Arc<Client>, client: Arc<Client>,
cx: &mut Context<Supermaven>, cx: &mut Context<Supermaven>,
) -> Result<Self> { ) -> Result<Self> {
let mut process = util::command::new_smol_command(&binary_path) let mut process = util::command::new_smol_command(&binary_path, &environment::inherited())
.arg("stdio") .arg("stdio")
.stdin(Stdio::piped()) .stdin(Stdio::piped())
.stdout(Stdio::piped()) .stdout(Stdio::piped())

View File

@@ -401,7 +401,7 @@ impl ShellBuilder {
} }
fn system_shell() -> String { fn system_shell() -> String {
std::env::var("SHELL").unwrap_or("/bin/sh".to_string()) util::get_system_shell()
} }
} }

View File

@@ -1,32 +1,36 @@
use collections::HashMap;
use std::ffi::OsStr; use std::ffi::OsStr;
#[cfg(target_os = "windows")]
use smol::process::windows::CommandExt;
#[cfg(target_os = "windows")]
use std::os::process::windows::CommandExt;
#[cfg(target_os = "windows")] #[cfg(target_os = "windows")]
const CREATE_NO_WINDOW: u32 = 0x0800_0000_u32; const CREATE_NO_WINDOW: u32 = 0x0800_0000_u32;
#[cfg(target_os = "windows")] // Create a new command to run in a given environment
pub fn new_std_command(program: impl AsRef<OsStr>) -> std::process::Command { // (typically from the environment crate)
use std::os::windows::process::CommandExt; pub fn new_std_command(
program: impl AsRef<OsStr>,
env: &HashMap<String, String>,
) -> std::process::Command {
let mut command = std::process::Command::new(program); let mut command = std::process::Command::new(program);
command.env_clear().envs(env);
#[cfg(target_os = "windows")]
command.creation_flags(CREATE_NO_WINDOW); command.creation_flags(CREATE_NO_WINDOW);
command command
} }
#[cfg(not(target_os = "windows"))] // Create a new command to run in a given environment
pub fn new_std_command(program: impl AsRef<OsStr>) -> std::process::Command { // (typically from the environment crate)
std::process::Command::new(program) pub fn new_smol_command(
} program: impl AsRef<OsStr>,
env: &HashMap<String, String>,
#[cfg(target_os = "windows")] ) -> smol::process::Command {
pub fn new_smol_command(program: impl AsRef<OsStr>) -> smol::process::Command {
use smol::process::windows::CommandExt;
let mut command = smol::process::Command::new(program); let mut command = smol::process::Command::new(program);
command.env_clear().envs(env);
#[cfg(target_os = "windows")]
command.creation_flags(CREATE_NO_WINDOW); command.creation_flags(CREATE_NO_WINDOW);
command command
} }
#[cfg(not(target_os = "windows"))]
pub fn new_smol_command(program: impl AsRef<OsStr>) -> smol::process::Command {
smol::process::Command::new(program)
}

View File

@@ -26,9 +26,6 @@ use std::{
}; };
use unicase::UniCase; use unicase::UniCase;
#[cfg(unix)]
use anyhow::{Context as _, anyhow};
pub use take_until::*; pub use take_until::*;
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub use util_macros::{line_endings, separator, uri}; pub use util_macros::{line_endings, separator, uri};
@@ -259,7 +256,7 @@ where
} }
#[cfg(unix)] #[cfg(unix)]
fn load_shell_from_passwd() -> Result<()> { pub fn load_shell_from_passwd() -> Result<()> {
let buflen = match unsafe { libc::sysconf(libc::_SC_GETPW_R_SIZE_MAX) } { let buflen = match unsafe { libc::sysconf(libc::_SC_GETPW_R_SIZE_MAX) } {
n if n < 0 => 1024, n if n < 0 => 1024,
n => n as usize, n => n as usize,
@@ -296,63 +293,7 @@ fn load_shell_from_passwd() -> Result<()> {
); );
let shell = unsafe { std::ffi::CStr::from_ptr(entry.pw_shell).to_str().unwrap() }; let shell = unsafe { std::ffi::CStr::from_ptr(entry.pw_shell).to_str().unwrap() };
if env::var("SHELL").map_or(true, |shell_env| shell_env != shell) { SHELL_FROM_PASSWD.set(shell.to_string()).ok();
log::info!(
"updating SHELL environment variable to value from passwd entry: {:?}",
shell,
);
unsafe { env::set_var("SHELL", shell) };
}
Ok(())
}
#[cfg(unix)]
pub fn load_login_shell_environment() -> Result<()> {
load_shell_from_passwd().log_err();
let marker = "ZED_LOGIN_SHELL_START";
let shell = env::var("SHELL").context(
"SHELL environment variable is not assigned so we can't source login environment variables",
)?;
// If possible, we want to `cd` in the user's `$HOME` to trigger programs
// such as direnv, asdf, mise, ... to adjust the PATH. These tools often hook
// into shell's `cd` command (and hooks) to manipulate env.
// We do this so that we get the env a user would have when spawning a shell
// in home directory.
let shell_cmd_prefix = std::env::var_os("HOME")
.and_then(|home| home.into_string().ok())
.map(|home| format!("cd '{home}';"));
let shell_cmd = format!(
"{}printf '%s' {marker}; /usr/bin/env;",
shell_cmd_prefix.as_deref().unwrap_or("")
);
let output = set_pre_exec_to_start_new_session(
std::process::Command::new(&shell).args(["-l", "-i", "-c", &shell_cmd]),
)
.output()
.context("failed to spawn login shell to source login environment variables")?;
if !output.status.success() {
Err(anyhow!("login shell exited with error"))?;
}
let stdout = String::from_utf8_lossy(&output.stdout);
if let Some(env_output_start) = stdout.find(marker) {
let env_output = &stdout[env_output_start + marker.len()..];
parse_env_output(env_output, |key, value| unsafe { env::set_var(key, value) });
log::info!(
"set environment variables from shell:{}, path:{}",
shell,
env::var("PATH").unwrap_or_default(),
);
}
Ok(()) Ok(())
} }
@@ -1013,6 +954,8 @@ pub fn default<D: Default>() -> D {
Default::default() Default::default()
} }
static SHELL_FROM_PASSWD: OnceLock<String> = OnceLock::new();
pub fn get_system_shell() -> String { pub fn get_system_shell() -> String {
#[cfg(target_os = "windows")] #[cfg(target_os = "windows")]
{ {
@@ -1021,7 +964,10 @@ pub fn get_system_shell() -> String {
#[cfg(not(target_os = "windows"))] #[cfg(not(target_os = "windows"))]
{ {
std::env::var("SHELL").unwrap_or("/bin/sh".to_string()) std::env::var("SHELL")
.ok()
.or_else(|| SHELL_FROM_PASSWD.get().cloned())
.unwrap_or("/bin/sh".to_string())
} }
} }

View File

@@ -15,7 +15,7 @@ use editor::Editor;
use extension::ExtensionHostProxy; use extension::ExtensionHostProxy;
use extension_host::ExtensionStore; use extension_host::ExtensionStore;
use fs::{Fs, RealFs}; use fs::{Fs, RealFs};
use futures::{StreamExt, channel::oneshot, future}; use futures::{StreamExt, future};
use git::GitHostingProviderRegistry; use git::GitHostingProviderRegistry;
use gpui::{App, AppContext as _, Application, AsyncApp, UpdateGlobal as _}; use gpui::{App, AppContext as _, Application, AsyncApp, UpdateGlobal as _};
@@ -305,18 +305,12 @@ fn main() {
paths::keymap_file().clone(), paths::keymap_file().clone(),
); );
let (shell_env_loaded_tx, shell_env_loaded_rx) = oneshot::channel(); #[cfg(unix)]
if !stdout_is_a_pty() { app.background_executor()
app.background_executor() .spawn(async {
.spawn(async { util::load_shell_from_passwd().log_err();
#[cfg(unix)] })
util::load_login_shell_environment().log_err(); .detach();
shell_env_loaded_tx.send(()).ok();
})
.detach()
} else {
drop(shell_env_loaded_tx)
}
app.on_open_urls({ app.on_open_urls({
let open_listener = open_listener.clone(); let open_listener = open_listener.clone();
@@ -417,7 +411,7 @@ fn main() {
tx.send(Some(options)).log_err(); tx.send(Some(options)).log_err();
}) })
.detach(); .detach();
let node_runtime = NodeRuntime::new(client.http_client(), Some(shell_env_loaded_rx), rx); let node_runtime = NodeRuntime::new(client.http_client(), rx);
language::init(cx); language::init(cx);
language_extension::init(extension_host_proxy.clone(), languages.clone()); language_extension::init(extension_host_proxy.clone(), languages.clone());

View File

@@ -1,23 +1,25 @@
# Environment Variables # Environment Variables
_**Note**: The following only applies to Zed 0.152.0 and later._
Multiple features in Zed are affected by environment variables: Multiple features in Zed are affected by environment variables:
- Tasks - Tasks
- Built-in terminal - Built-in terminal
- Look-up of language servers - Language servers and debug adapters
- Language servers
In order to make the best use of these features, it's helpful to understand where Zed gets its environment variables from and how they're used. In order to make the best use of these features, it's helpful to understand where Zed gets its environment variables from and how they're used.
## Where does Zed get its environment variables from? ## Where does Zed get its environment variables from?
How Zed was started — whether it's icon was clicked in the macOS Dock or in a Linux window manager, or whether it was started via the CLI `zed` that comes with Zed — influences which environment variables Zed can use. ### The inherited environment
### Launched from the CLI When Zed is launched via the macOS Dock, or a GNOME or KDE icon on Linux, or an application launcher like Alfred or Raycast, it is launched as a child of the Window manager. The process inherits the environment set by your window manager. This environment typically does not contain any customization from your shell configuration files.
If Zed is opened via the CLI (`zed`), it will inherit the environment variables from the surrounding shell session. In rare cases (for example when developing Zed, or using `zed --foreground`), the environment is inherited from a shell instead of the window manager.
### The CLI environment
If Zed is opened via the CLI (`zed`), it will use the environment variables from the surrounding shell session for any projects
that are opened in this way.
That means if you do That means if you do
@@ -26,48 +28,35 @@ $ export MY_ENV_VAR=hello
$ zed . $ zed .
``` ```
the environment variable `MY_ENV_VAR` is now available inside Zed. For example, in the built-in terminal. the environment variable `MY_ENV_VAR` is now available to processes run by Zed for the project in the current directory. For example, in the built-in terminal.
Starting with Zed 0.152.0, the CLI `zed` will _always_ pass along its environment to Zed, regardless of whether a Zed instance was previously running or not. Prior to Zed 0.152.0 this was not the case and only the first Zed instance would inherit the environment variables. ### The loaded environment
### Launched via window manager, Dock, or launcher If you have a project open in Zed that does not have an associated CLI environment, then Zed will spawn a shell, cd into the root directory of that project, and then run `printenv`. This will load an environment that will (ideally) match the CLI environment. This allows us to provide the expected environment variables to tools managed by things like `direnv`, `asdf` or `mise`, and to ensure that when we look up binaries, we are looking in the correct `PATH`.
When Zed has been launched via the macOS Dock, or a GNOME or KDE icon on Linux, or an application launcher like Alfred or Raycast, it has no surrounding shell environment from which to inherit its environment variables. ### The HOME environment
In order to still have a useful environment, Zed spawns a login shell in the user's home directory and gets its environment. This environment is then set on the Zed _process_. That means all Zed windows and projects will inherit that home directory environment. For some tools (like `node` for language server management) Zed runs them outside of the context of a project. In this case we load the
environment from your `HOME` directory as described above.
Since that can lead to problems for users that require different environment variables for a project (because they use `direnv`, or `asdf`, or `mise`, ... in that project), when opening project, Zed spawns another login shell. This time in the project's directory. The environment from that login shell is _not_ set on the process (because that would mean opening a new project changes the environment for all Zed windows). Instead, the environment is stored and passed along when running tasks, opening terminals, or spawning language servers.
## Where and how are environment variables used? ## Where and how are environment variables used?
There are two sets of environment variables:
1. Environment variables of the Zed process
2. Environment variables stored per project
The variables from (1) are always used, since they are stored on the process itself and every spawned process (tasks, terminals, language servers, ...) will inherit them by default.
The variables from (2) are used explicitly, depending on the feature.
### Tasks ### Tasks
Tasks are spawned with an combined environment. In order of precedence (low to high, with the last overwriting the first): Tasks are spawned with an combined environment. In order of precedence (low to high, with the last overwriting the first):
- the Zed process environment - the Zed inherited environment
- if the project was opened from the CLI: the CLI environment - if the project was opened from the CLI: the CLI environment
- if the project was not opened from the CLI: the project environment variables obtained by running a login shell in the project's root folder
- optional, explicitly configured environment in settings - optional, explicitly configured environment in settings
### Built-in terminal ### Built-in terminal
Built-in terminals, like tasks, are spawned with an combined environment. In order of precedence (low to high): Built-in terminals, like tasks, are spawned with an combined environment. In order of precedence (low to high):
- the Zed process environment - the Zed inherited environment
- if the project was opened from the CLI: the CLI environment
- if the project was not opened from the CLI: the project environment variables obtained by running a login shell in the project's root folder
- optional, explicitly configured environment in settings - optional, explicitly configured environment in settings
### Look-up of language servers ### Look-up of language servers and debug adapters
For some languages the language server adapters lookup the binary in the user's `$PATH`. Examples: For some languages the language server adapters lookup the binary in the user's `$PATH`. Examples:
@@ -79,14 +68,15 @@ For some languages the language server adapters lookup the binary in the user's
For this look-up, Zed uses the following the environment: For this look-up, Zed uses the following the environment:
- the Zed inherited environment
- if the project was opened from the CLI: the CLI environment - if the project was opened from the CLI: the CLI environment
- if the project was not opened from the CLI: the project environment variables obtained by running a login shell in the project's root folder - if the project was not opened from the CLI: the loaded environment
### Language servers ### Language servers and debug adapters
After looking up a language server, Zed starts them. After looking up a language server, Zed starts them.
These language server processes always inherit Zed's process environment. But, depending on the language server look-up, additional environment variables might be set or overwrite the process environment. These language server processes always inherit Zed's inherited environment. But, depending on the language server look-up, additional environment variables might be set or overwrite the inherited environment.
- If the language server was found in the project environment's `$PATH`, then the project environment's is passed along to the language server process. Where the project environment comes from depends on how the project was opened, via CLI or not. See previous point on look-up of language servers. - If the language server was found in the project environment's `$PATH`, then the project environment's is passed along to the language server process. Where the project environment comes from depends on how the project was opened, via CLI or not. See previous point on look-up of language servers.
- If the language servers was not found in the project environment, Zed tries to install it globally and start it globally. In that case, the process will inherit Zed's process environment, and — if the project was opened via ClI — from the CLI. - If the language servers was not found in the project environment, Zed tries to install it globally and start it globally. In that case, the process will inherit Zed's inherited environment, and — if the project was opened via ClI — from the CLI.