Compare commits
25 Commits
git-open-d
...
v0.160.7
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4f88d58516 | ||
|
|
4241c58fcd | ||
|
|
571f9de553 | ||
|
|
458d9fca8e | ||
|
|
ded671c1be | ||
|
|
883281d26f | ||
|
|
de1e2d0691 | ||
|
|
86e7561ce2 | ||
|
|
fcb71c6672 | ||
|
|
8b90601d56 | ||
|
|
6adda2013f | ||
|
|
cd661e1411 | ||
|
|
257a8a3d33 | ||
|
|
d992e08c28 | ||
|
|
de4303726b | ||
|
|
460b4d49e1 | ||
|
|
c4a0c9acca | ||
|
|
8ab5b67a4d | ||
|
|
18c1268b25 | ||
|
|
0cb056c26a | ||
|
|
68a683ab4b | ||
|
|
4abd65e894 | ||
|
|
e2155a9e40 | ||
|
|
f862fe4eb7 | ||
|
|
8560f0dc06 |
7
.github/ISSUE_TEMPLATE/0_feature_request.yml
vendored
7
.github/ISSUE_TEMPLATE/0_feature_request.yml
vendored
@@ -15,6 +15,13 @@ body:
|
||||
description: A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Environment
|
||||
description: Run the `copy system specs into clipboard` command palette action and paste the output in the field below.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: |
|
||||
|
||||
11
.github/ISSUE_TEMPLATE/1_bug_report.yml
vendored
11
.github/ISSUE_TEMPLATE/1_bug_report.yml
vendored
@@ -2,7 +2,7 @@ name: Bug Report
|
||||
description: |
|
||||
Use this template for **non-crash-related** bug reports.
|
||||
Tip: open this issue template from within Zed with the `file bug report` command palette action.
|
||||
labels: ["admin read", "triage", "defect"]
|
||||
labels: ["admin read", "triage", "bug"]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
@@ -38,9 +38,12 @@ body:
|
||||
Linux: `~/.local/share/zed/logs/Zed.log` or $XDG_DATA_HOME
|
||||
If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary><pre>
|
||||
<!-- Click below this line and paste or drag-and-drop your log-->
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Click above this line and paste or drag-and-drop your log--></pre></details>
|
||||
<!-- Click below this line and paste or drag-and-drop your log-->
|
||||
```
|
||||
|
||||
```
|
||||
<!-- Click above this line and paste or drag-and-drop your log--></details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
11
.github/ISSUE_TEMPLATE/2_crash_report.yml
vendored
11
.github/ISSUE_TEMPLATE/2_crash_report.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: Crash Report
|
||||
description: |
|
||||
Use this template for crash reports.
|
||||
labels: ["admin read", "triage", "defect", "panic / crash"]
|
||||
labels: ["admin read", "triage", "bug", "panic / crash"]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
@@ -31,9 +31,12 @@ body:
|
||||
Linux: `~/.local/share/zed/logs/Zed.log` or $XDG_DATA_HOME
|
||||
If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary><pre>
|
||||
<!-- Click below this line and paste or drag-and-drop your log-->
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Click above this line and paste or drag-and-drop your log--></pre></details>
|
||||
<!-- Click below this line and paste or drag-and-drop your log-->
|
||||
```
|
||||
|
||||
```
|
||||
<!-- Click above this line and paste or drag-and-drop your log--></details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
# issues, preventing 365 days from working until then.
|
||||
days-before-stale: 180
|
||||
days-before-close: 7
|
||||
any-of-issue-labels: "defect,panic / crash"
|
||||
any-of-issue-labels: "bug,panic / crash"
|
||||
operations-per-run: 1000
|
||||
ascending: true
|
||||
enable-statistics: true
|
||||
|
||||
5
Cargo.lock
generated
5
Cargo.lock
generated
@@ -9533,9 +9533,11 @@ dependencies = [
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"itertools 0.13.0",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"prost",
|
||||
"release_channel",
|
||||
"rpc",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -14740,7 +14742,6 @@ dependencies = [
|
||||
"parking_lot",
|
||||
"postage",
|
||||
"project",
|
||||
"release_channel",
|
||||
"remote",
|
||||
"schemars",
|
||||
"serde",
|
||||
@@ -15033,7 +15034,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.160.0"
|
||||
version = "0.160.7"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"anyhow",
|
||||
|
||||
@@ -21,8 +21,6 @@ use ui::prelude::*;
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::slash_command::diagnostics_command::collect_buffer_diagnostics;
|
||||
|
||||
pub(crate) struct FileSlashCommand;
|
||||
|
||||
impl FileSlashCommand {
|
||||
@@ -539,8 +537,6 @@ pub fn append_buffer_to_output(
|
||||
output.text.push('\n');
|
||||
|
||||
let section_ix = output.sections.len();
|
||||
collect_buffer_diagnostics(output, buffer, false);
|
||||
|
||||
output.sections.insert(
|
||||
section_ix,
|
||||
build_entry_output_section(prev_len..output.text.len(), path, false, None),
|
||||
|
||||
@@ -686,6 +686,12 @@ async fn download_remote_server_binary(
|
||||
let request_body = AsyncBody::from(serde_json::to_string(&update_request_body)?);
|
||||
|
||||
let mut response = client.get(&release.url, request_body, true).await?;
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow!(
|
||||
"failed to download remote server release: {:?}",
|
||||
response.status()
|
||||
));
|
||||
}
|
||||
smol::io::copy(response.body_mut(), &mut temp_file).await?;
|
||||
smol::fs::rename(&temp, &target_path).await?;
|
||||
|
||||
|
||||
@@ -35,14 +35,30 @@ pub enum Model {
|
||||
Gpt4,
|
||||
#[serde(alias = "gpt-3.5-turbo", rename = "gpt-3.5-turbo")]
|
||||
Gpt3_5Turbo,
|
||||
#[serde(alias = "o1-preview", rename = "o1-preview-2024-09-12")]
|
||||
O1Preview,
|
||||
#[serde(alias = "o1-mini", rename = "o1-mini-2024-09-12")]
|
||||
O1Mini,
|
||||
#[serde(alias = "claude-3-5-sonnet", rename = "claude-3.5-sonnet")]
|
||||
Claude3_5Sonnet,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn uses_streaming(&self) -> bool {
|
||||
match self {
|
||||
Self::Gpt4o | Self::Gpt4 | Self::Gpt3_5Turbo | Self::Claude3_5Sonnet => true,
|
||||
Self::O1Mini | Self::O1Preview => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_id(id: &str) -> Result<Self> {
|
||||
match id {
|
||||
"gpt-4o" => Ok(Self::Gpt4o),
|
||||
"gpt-4" => Ok(Self::Gpt4),
|
||||
"gpt-3.5-turbo" => Ok(Self::Gpt3_5Turbo),
|
||||
"o1-preview" => Ok(Self::O1Preview),
|
||||
"o1-mini" => Ok(Self::O1Mini),
|
||||
"claude-3-5-sonnet" => Ok(Self::Claude3_5Sonnet),
|
||||
_ => Err(anyhow!("Invalid model id: {}", id)),
|
||||
}
|
||||
}
|
||||
@@ -52,6 +68,9 @@ impl Model {
|
||||
Self::Gpt3_5Turbo => "gpt-3.5-turbo",
|
||||
Self::Gpt4 => "gpt-4",
|
||||
Self::Gpt4o => "gpt-4o",
|
||||
Self::O1Mini => "o1-mini",
|
||||
Self::O1Preview => "o1-preview",
|
||||
Self::Claude3_5Sonnet => "claude-3-5-sonnet",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,6 +79,9 @@ impl Model {
|
||||
Self::Gpt3_5Turbo => "GPT-3.5",
|
||||
Self::Gpt4 => "GPT-4",
|
||||
Self::Gpt4o => "GPT-4o",
|
||||
Self::O1Mini => "o1-mini",
|
||||
Self::O1Preview => "o1-preview",
|
||||
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -68,6 +90,9 @@ impl Model {
|
||||
Self::Gpt4o => 128000,
|
||||
Self::Gpt4 => 8192,
|
||||
Self::Gpt3_5Turbo => 16385,
|
||||
Self::O1Mini => 128000,
|
||||
Self::O1Preview => 128000,
|
||||
Self::Claude3_5Sonnet => 200_000,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -87,7 +112,7 @@ impl Request {
|
||||
Self {
|
||||
intent: true,
|
||||
n: 1,
|
||||
stream: true,
|
||||
stream: model.uses_streaming(),
|
||||
temperature: 0.1,
|
||||
model,
|
||||
messages,
|
||||
@@ -113,7 +138,8 @@ pub struct ResponseEvent {
|
||||
pub struct ResponseChoice {
|
||||
pub index: usize,
|
||||
pub finish_reason: Option<String>,
|
||||
pub delta: ResponseDelta,
|
||||
pub delta: Option<ResponseDelta>,
|
||||
pub message: Option<ResponseDelta>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -333,9 +359,23 @@ async fn stream_completion(
|
||||
if let Some(low_speed_timeout) = low_speed_timeout {
|
||||
request_builder = request_builder.read_timeout(low_speed_timeout);
|
||||
}
|
||||
let is_streaming = request.stream;
|
||||
|
||||
let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
|
||||
let mut response = client.send(request).await?;
|
||||
if response.status().is_success() {
|
||||
|
||||
if !response.status().is_success() {
|
||||
let mut body = Vec::new();
|
||||
response.body_mut().read_to_end(&mut body).await?;
|
||||
let body_str = std::str::from_utf8(&body)?;
|
||||
return Err(anyhow!(
|
||||
"Failed to connect to API: {} {}",
|
||||
response.status(),
|
||||
body_str
|
||||
));
|
||||
}
|
||||
|
||||
if is_streaming {
|
||||
let reader = BufReader::new(response.into_body());
|
||||
Ok(reader
|
||||
.lines()
|
||||
@@ -367,19 +407,9 @@ async fn stream_completion(
|
||||
} else {
|
||||
let mut body = Vec::new();
|
||||
response.body_mut().read_to_end(&mut body).await?;
|
||||
|
||||
let body_str = std::str::from_utf8(&body)?;
|
||||
let response: ResponseEvent = serde_json::from_str(body_str)?;
|
||||
|
||||
match serde_json::from_str::<ResponseEvent>(body_str) {
|
||||
Ok(_) => Err(anyhow!(
|
||||
"Unexpected success response while expecting an error: {}",
|
||||
body_str,
|
||||
)),
|
||||
Err(_) => Err(anyhow!(
|
||||
"Failed to connect to API: {} {}",
|
||||
response.status(),
|
||||
body_str,
|
||||
)),
|
||||
}
|
||||
Ok(futures::stream::once(async move { Ok(response) }).boxed())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -992,12 +992,15 @@ impl SerializableItem for Editor {
|
||||
};
|
||||
|
||||
// First create the empty buffer
|
||||
let buffer = project.update(&mut cx, |project, cx| {
|
||||
project.create_local_buffer("", language, cx)
|
||||
})?;
|
||||
let buffer = project
|
||||
.update(&mut cx, |project, cx| project.create_buffer(cx))?
|
||||
.await?;
|
||||
|
||||
// Then set the text so that the dirty bit is set correctly
|
||||
buffer.update(&mut cx, |buffer, cx| {
|
||||
if let Some(language) = language {
|
||||
buffer.set_language(Some(language), cx);
|
||||
}
|
||||
buffer.set_text(contents, cx);
|
||||
})?;
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ const fn request_feature_url() -> &'static str {
|
||||
|
||||
fn file_bug_report_url(specs: &SystemSpecs) -> String {
|
||||
format!(
|
||||
"https://github.com/zed-industries/zed/issues/new?assignees=&labels=admin+read%2Ctriage%2Cdefect&projects=&template=1_bug_report.yml&environment={}",
|
||||
"https://github.com/zed-industries/zed/issues/new?assignees=&labels=admin+read%2Ctriage%2Cbug&projects=&template=1_bug_report.yml&environment={}",
|
||||
urlencoding::encode(&specs.to_string())
|
||||
)
|
||||
}
|
||||
|
||||
@@ -136,7 +136,7 @@ pub struct GenerateContentResponse {
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GenerateContentCandidate {
|
||||
pub index: usize,
|
||||
pub index: Option<usize>,
|
||||
pub content: Content,
|
||||
pub finish_reason: Option<String>,
|
||||
pub finish_message: Option<String>,
|
||||
|
||||
@@ -5,7 +5,7 @@ use anyhow::Result;
|
||||
use collections::{HashMap, HashSet};
|
||||
use core::slice;
|
||||
use ec4rs::{
|
||||
property::{FinalNewline, IndentSize, IndentStyle, MaxLineLen, TabWidth, TrimTrailingWs},
|
||||
property::{FinalNewline, IndentSize, IndentStyle, TabWidth, TrimTrailingWs},
|
||||
Properties as EditorconfigProperties,
|
||||
};
|
||||
use globset::{Glob, GlobMatcher, GlobSet, GlobSetBuilder};
|
||||
@@ -870,10 +870,6 @@ impl AllLanguageSettings {
|
||||
}
|
||||
|
||||
fn merge_with_editorconfig(settings: &mut LanguageSettings, cfg: &EditorconfigProperties) {
|
||||
let max_line_length = cfg.get::<MaxLineLen>().ok().and_then(|v| match v {
|
||||
MaxLineLen::Value(u) => Some(u as u32),
|
||||
MaxLineLen::Off => None,
|
||||
});
|
||||
let tab_size = cfg.get::<IndentSize>().ok().and_then(|v| match v {
|
||||
IndentSize::Value(u) => NonZeroU32::new(u as u32),
|
||||
IndentSize::UseTabWidth => cfg.get::<TabWidth>().ok().and_then(|w| match w {
|
||||
@@ -896,13 +892,6 @@ fn merge_with_editorconfig(settings: &mut LanguageSettings, cfg: &EditorconfigPr
|
||||
TrimTrailingWs::Value(b) => b,
|
||||
})
|
||||
.ok();
|
||||
let preferred_line_length = max_line_length;
|
||||
let soft_wrap = if max_line_length.is_some() {
|
||||
Some(SoftWrap::PreferredLineLength)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
fn merge<T>(target: &mut T, value: Option<T>) {
|
||||
if let Some(value) = value {
|
||||
*target = value;
|
||||
@@ -918,8 +907,6 @@ fn merge_with_editorconfig(settings: &mut LanguageSettings, cfg: &EditorconfigPr
|
||||
&mut settings.ensure_final_newline_on_save,
|
||||
ensure_final_newline_on_save,
|
||||
);
|
||||
merge(&mut settings.preferred_line_length, preferred_line_length);
|
||||
merge(&mut settings.soft_wrap, soft_wrap);
|
||||
}
|
||||
|
||||
/// The kind of an inlay hint.
|
||||
|
||||
@@ -30,6 +30,7 @@ use crate::{
|
||||
};
|
||||
use crate::{LanguageModelCompletionEvent, LanguageModelProviderState};
|
||||
|
||||
use super::anthropic::count_anthropic_tokens;
|
||||
use super::open_ai::count_open_ai_tokens;
|
||||
|
||||
const PROVIDER_ID: &str = "copilot_chat";
|
||||
@@ -179,13 +180,19 @@ impl LanguageModel for CopilotChatLanguageModel {
|
||||
request: LanguageModelRequest,
|
||||
cx: &AppContext,
|
||||
) -> BoxFuture<'static, Result<usize>> {
|
||||
let model = match self.model {
|
||||
CopilotChatModel::Gpt4o => open_ai::Model::FourOmni,
|
||||
CopilotChatModel::Gpt4 => open_ai::Model::Four,
|
||||
CopilotChatModel::Gpt3_5Turbo => open_ai::Model::ThreePointFiveTurbo,
|
||||
};
|
||||
|
||||
count_open_ai_tokens(request, model, cx)
|
||||
match self.model {
|
||||
CopilotChatModel::Claude3_5Sonnet => count_anthropic_tokens(request, cx),
|
||||
_ => {
|
||||
let model = match self.model {
|
||||
CopilotChatModel::Gpt4o => open_ai::Model::FourOmni,
|
||||
CopilotChatModel::Gpt4 => open_ai::Model::Four,
|
||||
CopilotChatModel::Gpt3_5Turbo => open_ai::Model::ThreePointFiveTurbo,
|
||||
CopilotChatModel::O1Preview | CopilotChatModel::O1Mini => open_ai::Model::Four,
|
||||
CopilotChatModel::Claude3_5Sonnet => unreachable!(),
|
||||
};
|
||||
count_open_ai_tokens(request, model, cx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn stream_completion(
|
||||
@@ -209,7 +216,8 @@ impl LanguageModel for CopilotChatLanguageModel {
|
||||
}
|
||||
}
|
||||
|
||||
let request = self.to_copilot_chat_request(request);
|
||||
let copilot_request = self.to_copilot_chat_request(request);
|
||||
let is_streaming = copilot_request.stream;
|
||||
let Ok(low_speed_timeout) = cx.update(|cx| {
|
||||
AllLanguageModelSettings::get_global(cx)
|
||||
.copilot_chat
|
||||
@@ -220,16 +228,31 @@ impl LanguageModel for CopilotChatLanguageModel {
|
||||
|
||||
let request_limiter = self.request_limiter.clone();
|
||||
let future = cx.spawn(|cx| async move {
|
||||
let response = CopilotChat::stream_completion(request, low_speed_timeout, cx);
|
||||
let response = CopilotChat::stream_completion(copilot_request, low_speed_timeout, cx);
|
||||
request_limiter.stream(async move {
|
||||
let response = response.await?;
|
||||
let stream = response
|
||||
.filter_map(|response| async move {
|
||||
.filter_map(move |response| async move {
|
||||
match response {
|
||||
Ok(result) => {
|
||||
let choice = result.choices.first();
|
||||
match choice {
|
||||
Some(choice) => Some(Ok(choice.delta.content.clone().unwrap_or_default())),
|
||||
Some(choice) if !is_streaming => {
|
||||
match &choice.message {
|
||||
Some(msg) => Some(Ok(msg.content.clone().unwrap_or_default())),
|
||||
None => Some(Err(anyhow::anyhow!(
|
||||
"The Copilot Chat API returned a response with no message content"
|
||||
))),
|
||||
}
|
||||
},
|
||||
Some(choice) => {
|
||||
match &choice.delta {
|
||||
Some(delta) => Some(Ok(delta.content.clone().unwrap_or_default())),
|
||||
None => Some(Err(anyhow::anyhow!(
|
||||
"The Copilot Chat API returned a response with no delta content"
|
||||
))),
|
||||
}
|
||||
},
|
||||
None => Some(Err(anyhow::anyhow!(
|
||||
"The Copilot Chat API returned a response with no choices, but hadn't finished the message yet. Please try again."
|
||||
))),
|
||||
|
||||
@@ -1420,26 +1420,26 @@ impl OutlinePanel {
|
||||
}
|
||||
}
|
||||
|
||||
fn reveal_entry_for_selection(
|
||||
&mut self,
|
||||
editor: &View<Editor>,
|
||||
cx: &mut ViewContext<'_, Self>,
|
||||
) {
|
||||
fn reveal_entry_for_selection(&mut self, editor: View<Editor>, cx: &mut ViewContext<'_, Self>) {
|
||||
if !self.active {
|
||||
return;
|
||||
}
|
||||
if !OutlinePanelSettings::get_global(cx).auto_reveal_entries {
|
||||
return;
|
||||
}
|
||||
let Some(entry_with_selection) = self.location_for_editor_selection(editor, cx) else {
|
||||
self.selected_entry = SelectedEntry::None;
|
||||
cx.notify();
|
||||
return;
|
||||
};
|
||||
|
||||
let project = self.project.clone();
|
||||
self.reveal_selection_task = cx.spawn(|outline_panel, mut cx| async move {
|
||||
cx.background_executor().timer(UPDATE_DEBOUNCE).await;
|
||||
let entry_with_selection = outline_panel.update(&mut cx, |outline_panel, cx| {
|
||||
outline_panel.location_for_editor_selection(&editor, cx)
|
||||
})?;
|
||||
let Some(entry_with_selection) = entry_with_selection else {
|
||||
outline_panel.update(&mut cx, |outline_panel, cx| {
|
||||
outline_panel.selected_entry = SelectedEntry::None;
|
||||
cx.notify();
|
||||
})?;
|
||||
return Ok(());
|
||||
};
|
||||
let related_buffer_entry = match &entry_with_selection {
|
||||
PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => {
|
||||
project.update(&mut cx, |project, cx| {
|
||||
@@ -2406,7 +2406,7 @@ impl OutlinePanel {
|
||||
}
|
||||
|
||||
fn location_for_editor_selection(
|
||||
&mut self,
|
||||
&self,
|
||||
editor: &View<Editor>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<PanelEntry> {
|
||||
@@ -2470,7 +2470,7 @@ impl OutlinePanel {
|
||||
}
|
||||
|
||||
fn outline_location(
|
||||
&mut self,
|
||||
&self,
|
||||
buffer_id: BufferId,
|
||||
excerpt_id: ExcerptId,
|
||||
multi_buffer_snapshot: editor::MultiBufferSnapshot,
|
||||
@@ -3785,7 +3785,7 @@ impl Render for OutlinePanel {
|
||||
}
|
||||
}),
|
||||
)
|
||||
.track_focus(&self.focus_handle(cx));
|
||||
.track_focus(&self.focus_handle);
|
||||
|
||||
if self.cached_entries.is_empty() {
|
||||
let header = if self.updating_fs_entries {
|
||||
@@ -4054,7 +4054,7 @@ fn subscribe_for_editor_events(
|
||||
editor,
|
||||
move |outline_panel, editor, e: &EditorEvent, cx| match e {
|
||||
EditorEvent::SelectionsChanged { local: true } => {
|
||||
outline_panel.reveal_entry_for_selection(&editor, cx);
|
||||
outline_panel.reveal_entry_for_selection(editor, cx);
|
||||
cx.notify();
|
||||
}
|
||||
EditorEvent::ExcerptsAdded { excerpts, .. } => {
|
||||
@@ -4197,7 +4197,13 @@ mod tests {
|
||||
cx.executor()
|
||||
.advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100));
|
||||
cx.run_until_parked();
|
||||
outline_panel.update(cx, |outline_panel, _| {
|
||||
outline_panel.update(cx, |outline_panel, cx| {
|
||||
// Project search re-adds items to the buffer, removing the caret from it.
|
||||
// Select the first entry and move 4 elements down.
|
||||
for _ in 0..6 {
|
||||
outline_panel.select_next(&SelectNext, cx);
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
display_entries(
|
||||
&outline_panel.cached_entries,
|
||||
@@ -4513,7 +4519,7 @@ mod tests {
|
||||
r#"/
|
||||
public/lottie/
|
||||
syntax-tree.json
|
||||
search: { "something": "static" } <==== selected
|
||||
search: { "something": "static" }
|
||||
src/
|
||||
app/(site)/
|
||||
(about)/jobs/[slug]/
|
||||
@@ -4529,8 +4535,11 @@ mod tests {
|
||||
});
|
||||
|
||||
outline_panel.update(cx, |outline_panel, cx| {
|
||||
outline_panel.select_next(&SelectNext, cx);
|
||||
outline_panel.select_next(&SelectNext, cx);
|
||||
// After the search is done, we have updated the outline panel contents and caret is not in any excerot, so there are no selections.
|
||||
// Move to 5th element in the list (0th action will selection the first element)
|
||||
for _ in 0..6 {
|
||||
outline_panel.select_next(&SelectNext, cx);
|
||||
}
|
||||
outline_panel.collapse_selected_entry(&CollapseSelectedEntry, cx);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
|
||||
@@ -2214,7 +2214,7 @@ impl Project {
|
||||
match event {
|
||||
BufferEvent::ReloadNeeded => {
|
||||
if !self.is_via_collab() {
|
||||
self.reload_buffers([buffer.clone()].into_iter().collect(), false, cx)
|
||||
self.reload_buffers([buffer.clone()].into_iter().collect(), true, cx)
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,9 +4,7 @@ use futures::{future, StreamExt};
|
||||
use gpui::{AppContext, SemanticVersion, UpdateGlobal};
|
||||
use http_client::Url;
|
||||
use language::{
|
||||
language_settings::{
|
||||
language_settings, AllLanguageSettings, LanguageSettingsContent, SoftWrap,
|
||||
},
|
||||
language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
|
||||
tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter,
|
||||
LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
|
||||
};
|
||||
@@ -106,7 +104,6 @@ async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
max_line_length = 80
|
||||
[*.js]
|
||||
tab_width = 10
|
||||
"#,
|
||||
@@ -116,7 +113,6 @@ async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
|
||||
"hard_tabs": false,
|
||||
"ensure_final_newline_on_save": false,
|
||||
"remove_trailing_whitespace_on_save": false,
|
||||
"preferred_line_length": 64,
|
||||
"soft_wrap": "editor_width"
|
||||
}"#,
|
||||
},
|
||||
@@ -125,7 +121,6 @@ async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
|
||||
".editorconfig": r#"
|
||||
[*.rs]
|
||||
indent_size = 2
|
||||
max_line_length = off
|
||||
"#,
|
||||
"b.rs": "fn b() {\n B\n}",
|
||||
},
|
||||
@@ -174,10 +169,6 @@ async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
|
||||
assert_eq!(settings_a.hard_tabs, true);
|
||||
assert_eq!(settings_a.ensure_final_newline_on_save, true);
|
||||
assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
|
||||
assert_eq!(settings_a.preferred_line_length, 80);
|
||||
|
||||
// "max_line_length" also sets "soft_wrap"
|
||||
assert_eq!(settings_a.soft_wrap, SoftWrap::PreferredLineLength);
|
||||
|
||||
// .editorconfig in b/ overrides .editorconfig in root
|
||||
assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
|
||||
@@ -185,10 +176,6 @@ async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
|
||||
// "indent_size" is not set, so "tab_width" is used
|
||||
assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
|
||||
|
||||
// When max_line_length is "off", default to .zed/settings.json
|
||||
assert_eq!(settings_b.preferred_line_length, 64);
|
||||
assert_eq!(settings_b.soft_wrap, SoftWrap::EditorWidth);
|
||||
|
||||
// README.md should not be affected by .editorconfig's globe "*.rs"
|
||||
assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
|
||||
});
|
||||
|
||||
@@ -16,6 +16,7 @@ use gpui::{
|
||||
};
|
||||
use picker::Picker;
|
||||
use project::Project;
|
||||
use remote::ssh_session::ConnectionIdentifier;
|
||||
use remote::SshConnectionOptions;
|
||||
use remote::SshRemoteClient;
|
||||
use settings::update_settings_file;
|
||||
@@ -413,7 +414,7 @@ impl RemoteServerProjects {
|
||||
let ssh_prompt = cx.new_view(|cx| SshPrompt::new(&connection_options, cx));
|
||||
|
||||
let connection = connect_over_ssh(
|
||||
connection_options.remote_server_identifier(),
|
||||
ConnectionIdentifier::Setup,
|
||||
connection_options.clone(),
|
||||
ssh_prompt.clone(),
|
||||
cx,
|
||||
@@ -501,7 +502,7 @@ impl RemoteServerProjects {
|
||||
.clone();
|
||||
|
||||
let connect = connect_over_ssh(
|
||||
connection_options.remote_server_identifier(),
|
||||
ConnectionIdentifier::Setup,
|
||||
connection_options.clone(),
|
||||
prompt,
|
||||
cx,
|
||||
|
||||
@@ -13,8 +13,8 @@ use gpui::{AppContext, Model};
|
||||
|
||||
use language::CursorShape;
|
||||
use markdown::{Markdown, MarkdownStyle};
|
||||
use release_channel::{AppVersion, ReleaseChannel};
|
||||
use remote::ssh_session::{ServerBinary, ServerVersion};
|
||||
use release_channel::ReleaseChannel;
|
||||
use remote::ssh_session::ConnectionIdentifier;
|
||||
use remote::{SshConnectionOptions, SshPlatform, SshRemoteClient};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -441,23 +441,66 @@ impl remote::SshClientDelegate for SshClientDelegate {
|
||||
self.update_status(status, cx)
|
||||
}
|
||||
|
||||
fn get_server_binary(
|
||||
fn download_server_binary_locally(
|
||||
&self,
|
||||
platform: SshPlatform,
|
||||
upload_binary_over_ssh: bool,
|
||||
release_channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> oneshot::Receiver<Result<(ServerBinary, ServerVersion)>> {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
let this = self.clone();
|
||||
) -> Task<anyhow::Result<PathBuf>> {
|
||||
cx.spawn(|mut cx| async move {
|
||||
tx.send(
|
||||
this.get_server_binary_impl(platform, upload_binary_over_ssh, &mut cx)
|
||||
.await,
|
||||
let binary_path = AutoUpdater::download_remote_server_release(
|
||||
platform.os,
|
||||
platform.arch,
|
||||
release_channel,
|
||||
version,
|
||||
&mut cx,
|
||||
)
|
||||
.ok();
|
||||
.await
|
||||
.map_err(|e| {
|
||||
anyhow!(
|
||||
"Failed to download remote server binary (version: {}, os: {}, arch: {}): {}",
|
||||
version
|
||||
.map(|v| format!("{}", v))
|
||||
.unwrap_or("unknown".to_string()),
|
||||
platform.os,
|
||||
platform.arch,
|
||||
e
|
||||
)
|
||||
})?;
|
||||
Ok(binary_path)
|
||||
})
|
||||
.detach();
|
||||
rx
|
||||
}
|
||||
|
||||
fn get_download_params(
|
||||
&self,
|
||||
platform: SshPlatform,
|
||||
release_channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Task<Result<(String, String)>> {
|
||||
cx.spawn(|mut cx| async move {
|
||||
let (release, request_body) = AutoUpdater::get_remote_server_release_url(
|
||||
platform.os,
|
||||
platform.arch,
|
||||
release_channel,
|
||||
version,
|
||||
&mut cx,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
anyhow!(
|
||||
"Failed to get remote server binary download url (version: {}, os: {}, arch: {}): {}",
|
||||
version.map(|v| format!("{}", v)).unwrap_or("unknown".to_string()),
|
||||
platform.os,
|
||||
platform.arch,
|
||||
e
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok((release.url, request_body))
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
fn remote_server_binary_path(
|
||||
@@ -485,208 +528,6 @@ impl SshClientDelegate {
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
async fn get_server_binary_impl(
|
||||
&self,
|
||||
platform: SshPlatform,
|
||||
upload_binary_via_ssh: bool,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<(ServerBinary, ServerVersion)> {
|
||||
let (version, release_channel) = cx.update(|cx| {
|
||||
let version = AppVersion::global(cx);
|
||||
let channel = ReleaseChannel::global(cx);
|
||||
|
||||
(version, channel)
|
||||
})?;
|
||||
|
||||
// In dev mode, build the remote server binary from source
|
||||
#[cfg(debug_assertions)]
|
||||
if release_channel == ReleaseChannel::Dev {
|
||||
let result = self.build_local(cx, platform, version).await?;
|
||||
// Fall through to a remote binary if we're not able to compile a local binary
|
||||
if let Some((path, version)) = result {
|
||||
return Ok((
|
||||
ServerBinary::LocalBinary(path),
|
||||
ServerVersion::Semantic(version),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// For nightly channel, always get latest
|
||||
let current_version = if release_channel == ReleaseChannel::Nightly {
|
||||
None
|
||||
} else {
|
||||
Some(version)
|
||||
};
|
||||
|
||||
self.update_status(
|
||||
Some(&format!("Checking remote server release {}", version)),
|
||||
cx,
|
||||
);
|
||||
|
||||
if upload_binary_via_ssh {
|
||||
let binary_path = AutoUpdater::download_remote_server_release(
|
||||
platform.os,
|
||||
platform.arch,
|
||||
release_channel,
|
||||
current_version,
|
||||
cx,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
anyhow!(
|
||||
"Failed to download remote server binary (version: {}, os: {}, arch: {}): {}",
|
||||
version,
|
||||
platform.os,
|
||||
platform.arch,
|
||||
e
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok((
|
||||
ServerBinary::LocalBinary(binary_path),
|
||||
ServerVersion::Semantic(version),
|
||||
))
|
||||
} else {
|
||||
let (release, request_body) = AutoUpdater::get_remote_server_release_url(
|
||||
platform.os,
|
||||
platform.arch,
|
||||
release_channel,
|
||||
current_version,
|
||||
cx,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
anyhow!(
|
||||
"Failed to get remote server binary download url (version: {}, os: {}, arch: {}): {}",
|
||||
version,
|
||||
platform.os,
|
||||
platform.arch,
|
||||
e
|
||||
)
|
||||
})?;
|
||||
|
||||
let version = release
|
||||
.version
|
||||
.parse::<SemanticVersion>()
|
||||
.map(ServerVersion::Semantic)
|
||||
.unwrap_or_else(|_| ServerVersion::Commit(release.version));
|
||||
Ok((
|
||||
ServerBinary::ReleaseUrl {
|
||||
url: release.url,
|
||||
body: request_body,
|
||||
},
|
||||
version,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
async fn build_local(
|
||||
&self,
|
||||
cx: &mut AsyncAppContext,
|
||||
platform: SshPlatform,
|
||||
version: gpui::SemanticVersion,
|
||||
) -> Result<Option<(PathBuf, gpui::SemanticVersion)>> {
|
||||
use smol::process::{Command, Stdio};
|
||||
|
||||
async fn run_cmd(command: &mut Command) -> Result<()> {
|
||||
let output = command
|
||||
.kill_on_drop(true)
|
||||
.stderr(Stdio::inherit())
|
||||
.output()
|
||||
.await?;
|
||||
if !output.status.success() {
|
||||
Err(anyhow!("Failed to run command: {:?}", command))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
if platform.arch == std::env::consts::ARCH && platform.os == std::env::consts::OS {
|
||||
self.update_status(Some("Building remote server binary from source"), cx);
|
||||
log::info!("building remote server binary from source");
|
||||
run_cmd(Command::new("cargo").args([
|
||||
"build",
|
||||
"--package",
|
||||
"remote_server",
|
||||
"--features",
|
||||
"debug-embed",
|
||||
"--target-dir",
|
||||
"target/remote_server",
|
||||
]))
|
||||
.await?;
|
||||
|
||||
self.update_status(Some("Compressing binary"), cx);
|
||||
|
||||
run_cmd(Command::new("gzip").args([
|
||||
"-9",
|
||||
"-f",
|
||||
"target/remote_server/debug/remote_server",
|
||||
]))
|
||||
.await?;
|
||||
|
||||
let path = std::env::current_dir()?.join("target/remote_server/debug/remote_server.gz");
|
||||
return Ok(Some((path, version)));
|
||||
} else if let Some(triple) = platform.triple() {
|
||||
smol::fs::create_dir_all("target/remote_server").await?;
|
||||
|
||||
self.update_status(Some("Installing cross.rs for cross-compilation"), cx);
|
||||
log::info!("installing cross");
|
||||
run_cmd(Command::new("cargo").args([
|
||||
"install",
|
||||
"cross",
|
||||
"--git",
|
||||
"https://github.com/cross-rs/cross",
|
||||
]))
|
||||
.await?;
|
||||
|
||||
self.update_status(
|
||||
Some(&format!(
|
||||
"Building remote server binary from source for {} with Docker",
|
||||
&triple
|
||||
)),
|
||||
cx,
|
||||
);
|
||||
log::info!("building remote server binary from source for {}", &triple);
|
||||
run_cmd(
|
||||
Command::new("cross")
|
||||
.args([
|
||||
"build",
|
||||
"--package",
|
||||
"remote_server",
|
||||
"--features",
|
||||
"debug-embed",
|
||||
"--target-dir",
|
||||
"target/remote_server",
|
||||
"--target",
|
||||
&triple,
|
||||
])
|
||||
.env(
|
||||
"CROSS_CONTAINER_OPTS",
|
||||
"--mount type=bind,src=./target,dst=/app/target",
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
self.update_status(Some("Compressing binary"), cx);
|
||||
|
||||
run_cmd(Command::new("gzip").args([
|
||||
"-9",
|
||||
"-f",
|
||||
&format!("target/remote_server/{}/debug/remote_server", triple),
|
||||
]))
|
||||
.await?;
|
||||
|
||||
let path = std::env::current_dir()?.join(format!(
|
||||
"target/remote_server/{}/debug/remote_server.gz",
|
||||
triple
|
||||
));
|
||||
|
||||
return Ok(Some((path, version)));
|
||||
} else {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_connecting_over_ssh(workspace: &Workspace, cx: &AppContext) -> bool {
|
||||
@@ -694,7 +535,7 @@ pub fn is_connecting_over_ssh(workspace: &Workspace, cx: &AppContext) -> bool {
|
||||
}
|
||||
|
||||
pub fn connect_over_ssh(
|
||||
unique_identifier: String,
|
||||
unique_identifier: ConnectionIdentifier,
|
||||
connection_options: SshConnectionOptions,
|
||||
ui: View<SshPrompt>,
|
||||
cx: &mut WindowContext,
|
||||
|
||||
@@ -24,6 +24,7 @@ collections.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
itertools.workspace = true
|
||||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
prost.workspace = true
|
||||
@@ -35,6 +36,7 @@ smol.workspace = true
|
||||
tempfile.workspace = true
|
||||
thiserror.workspace = true
|
||||
util.workspace = true
|
||||
release_channel.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -20,7 +20,9 @@ use gpui::{
|
||||
AppContext, AsyncAppContext, BorrowAppContext, Context, EventEmitter, Global, Model,
|
||||
ModelContext, SemanticVersion, Task, WeakModel,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use parking_lot::Mutex;
|
||||
use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
|
||||
use rpc::{
|
||||
proto::{self, build_typed_envelope, Envelope, EnvelopedMessage, PeerId, RequestMessage},
|
||||
AnyProtoClient, EntityMessageSubscriber, ErrorExt, ProtoClient, ProtoMessageHandlerSet,
|
||||
@@ -33,8 +35,7 @@ use smol::{
|
||||
use std::{
|
||||
any::TypeId,
|
||||
collections::VecDeque,
|
||||
ffi::OsStr,
|
||||
fmt,
|
||||
fmt, iter,
|
||||
ops::ControlFlow,
|
||||
path::{Path, PathBuf},
|
||||
sync::{
|
||||
@@ -69,6 +70,18 @@ pub struct SshConnectionOptions {
|
||||
pub upload_binary_over_ssh: bool,
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! shell_script {
|
||||
($fmt:expr, $($name:ident = $arg:expr),+ $(,)?) => {{
|
||||
format!(
|
||||
$fmt,
|
||||
$(
|
||||
$name = shlex::try_quote($arg).unwrap()
|
||||
),+
|
||||
)
|
||||
}};
|
||||
}
|
||||
|
||||
impl SshConnectionOptions {
|
||||
pub fn parse_command_line(input: &str) -> Result<Self> {
|
||||
let input = input.trim_start_matches("ssh ");
|
||||
@@ -189,17 +202,6 @@ impl SshConnectionOptions {
|
||||
host
|
||||
}
|
||||
}
|
||||
|
||||
// Uniquely identifies dev server projects on a remote host. Needs to be
|
||||
// stable for the same dev server project.
|
||||
pub fn remote_server_identifier(&self) -> String {
|
||||
let mut identifier = format!("dev-server-{:?}", self.host);
|
||||
if let Some(username) = self.username.as_ref() {
|
||||
identifier.push('-');
|
||||
identifier.push_str(&username);
|
||||
}
|
||||
identifier
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
@@ -227,10 +229,19 @@ pub enum ServerBinary {
|
||||
ReleaseUrl { url: String, body: String },
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum ServerVersion {
|
||||
Semantic(SemanticVersion),
|
||||
Commit(String),
|
||||
}
|
||||
impl ServerVersion {
|
||||
pub fn semantic_version(&self) -> Option<SemanticVersion> {
|
||||
match self {
|
||||
Self::Semantic(version) => Some(*version),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ServerVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
@@ -252,24 +263,45 @@ pub trait SshClientDelegate: Send + Sync {
|
||||
platform: SshPlatform,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<PathBuf>;
|
||||
fn get_server_binary(
|
||||
fn get_download_params(
|
||||
&self,
|
||||
platform: SshPlatform,
|
||||
upload_binary_over_ssh: bool,
|
||||
release_channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> oneshot::Receiver<Result<(ServerBinary, ServerVersion)>>;
|
||||
) -> Task<Result<(String, String)>>;
|
||||
|
||||
fn download_server_binary_locally(
|
||||
&self,
|
||||
platform: SshPlatform,
|
||||
release_channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Task<Result<PathBuf>>;
|
||||
fn set_status(&self, status: Option<&str>, cx: &mut AsyncAppContext);
|
||||
}
|
||||
|
||||
impl SshSocket {
|
||||
fn ssh_command<S: AsRef<OsStr>>(&self, program: S) -> process::Command {
|
||||
// :WARNING: ssh unquotes arguments when executing on the remote :WARNING:
|
||||
// e.g. $ ssh host sh -c 'ls -l' is equivalent to $ ssh host sh -c ls -l
|
||||
// and passes -l as an argument to sh, not to ls.
|
||||
// You need to do it like this: $ ssh host "sh -c 'ls -l /tmp'"
|
||||
fn ssh_command(&self, program: &str, args: &[&str]) -> process::Command {
|
||||
let mut command = process::Command::new("ssh");
|
||||
let to_run = iter::once(&program)
|
||||
.chain(args.iter())
|
||||
.map(|token| shlex::try_quote(token).unwrap())
|
||||
.join(" ");
|
||||
self.ssh_options(&mut command)
|
||||
.arg(self.connection_options.ssh_url())
|
||||
.arg(program);
|
||||
.arg(to_run);
|
||||
command
|
||||
}
|
||||
|
||||
fn shell_script(&self, script: impl AsRef<str>) -> process::Command {
|
||||
return self.ssh_command("sh", &["-c", script.as_ref()]);
|
||||
}
|
||||
|
||||
fn ssh_options<'a>(&self, command: &'a mut process::Command) -> &'a mut process::Command {
|
||||
command
|
||||
.stdin(Stdio::piped())
|
||||
@@ -290,7 +322,7 @@ impl SshSocket {
|
||||
}
|
||||
}
|
||||
|
||||
async fn run_cmd(command: &mut process::Command) -> Result<String> {
|
||||
async fn run_cmd(mut command: process::Command) -> Result<String> {
|
||||
let output = command.output().await?;
|
||||
if output.status.success() {
|
||||
Ok(String::from_utf8_lossy(&output.stdout).to_string())
|
||||
@@ -477,14 +509,43 @@ pub enum SshRemoteEvent {
|
||||
|
||||
impl EventEmitter<SshRemoteEvent> for SshRemoteClient {}
|
||||
|
||||
// Identifies the socket on the remote server so that reconnects
|
||||
// can re-join the same project.
|
||||
pub enum ConnectionIdentifier {
|
||||
Setup,
|
||||
Workspace(i64),
|
||||
}
|
||||
|
||||
impl ConnectionIdentifier {
|
||||
// This string gets used in a socket name, and so must be relatively short.
|
||||
// The total length of:
|
||||
// /home/{username}/.local/share/zed/server_state/{name}/stdout.sock
|
||||
// Must be less than about 100 characters
|
||||
// https://unix.stackexchange.com/questions/367008/why-is-socket-path-length-limited-to-a-hundred-chars
|
||||
// So our strings should be at most 20 characters or so.
|
||||
fn to_string(&self, cx: &AppContext) -> String {
|
||||
let identifier_prefix = match ReleaseChannel::global(cx) {
|
||||
ReleaseChannel::Stable => "".to_string(),
|
||||
release_channel => format!("{}-", release_channel.dev_name()),
|
||||
};
|
||||
match self {
|
||||
Self::Setup => format!("{identifier_prefix}setup"),
|
||||
Self::Workspace(workspace_id) => {
|
||||
format!("{identifier_prefix}workspace-{workspace_id}",)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SshRemoteClient {
|
||||
pub fn new(
|
||||
unique_identifier: String,
|
||||
unique_identifier: ConnectionIdentifier,
|
||||
connection_options: SshConnectionOptions,
|
||||
cancellation: oneshot::Receiver<()>,
|
||||
delegate: Arc<dyn SshClientDelegate>,
|
||||
cx: &mut AppContext,
|
||||
) -> Task<Result<Option<Model<Self>>>> {
|
||||
let unique_identifier = unique_identifier.to_string(cx);
|
||||
cx.spawn(|mut cx| async move {
|
||||
let success = Box::pin(async move {
|
||||
let (outgoing_tx, outgoing_rx) = mpsc::unbounded::<Envelope>();
|
||||
@@ -1053,7 +1114,15 @@ impl SshRemoteClient {
|
||||
) -> Model<Self> {
|
||||
let (_tx, rx) = oneshot::channel();
|
||||
client_cx
|
||||
.update(|cx| Self::new("fake".to_string(), opts, rx, Arc::new(fake::Delegate), cx))
|
||||
.update(|cx| {
|
||||
Self::new(
|
||||
ConnectionIdentifier::Setup,
|
||||
opts,
|
||||
rx,
|
||||
Arc::new(fake::Delegate),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
@@ -1217,7 +1286,7 @@ impl RemoteConnection for SshRemoteConnection {
|
||||
}
|
||||
|
||||
let socket = self.socket.clone();
|
||||
run_cmd(socket.ssh_command(&remote_binary_path).arg("version")).await?;
|
||||
run_cmd(socket.ssh_command(&remote_binary_path.to_string_lossy(), &["version"])).await?;
|
||||
Ok(remote_binary_path)
|
||||
}
|
||||
|
||||
@@ -1234,22 +1303,33 @@ impl RemoteConnection for SshRemoteConnection {
|
||||
) -> Task<Result<i32>> {
|
||||
delegate.set_status(Some("Starting proxy"), cx);
|
||||
|
||||
let mut start_proxy_command = format!(
|
||||
"RUST_LOG={} {} {:?} proxy --identifier {}",
|
||||
std::env::var("RUST_LOG").unwrap_or_default(),
|
||||
std::env::var("RUST_BACKTRACE")
|
||||
.map(|b| { format!("RUST_BACKTRACE={}", b) })
|
||||
.unwrap_or_default(),
|
||||
remote_binary_path,
|
||||
unique_identifier,
|
||||
let mut start_proxy_command = shell_script!(
|
||||
"exec {binary_path} proxy --identifier {identifier}",
|
||||
binary_path = &remote_binary_path.to_string_lossy(),
|
||||
identifier = &unique_identifier,
|
||||
);
|
||||
|
||||
if let Some(rust_log) = std::env::var("RUST_LOG").ok() {
|
||||
start_proxy_command = format!(
|
||||
"RUST_LOG={} {}",
|
||||
shlex::try_quote(&rust_log).unwrap(),
|
||||
start_proxy_command
|
||||
)
|
||||
}
|
||||
if let Some(rust_backtrace) = std::env::var("RUST_BACKTRACE").ok() {
|
||||
start_proxy_command = format!(
|
||||
"RUST_BACKTRACE={} {}",
|
||||
shlex::try_quote(&rust_backtrace).unwrap(),
|
||||
start_proxy_command
|
||||
)
|
||||
}
|
||||
if reconnect {
|
||||
start_proxy_command.push_str(" --reconnect");
|
||||
}
|
||||
|
||||
let ssh_proxy_process = match self
|
||||
.socket
|
||||
.ssh_command(start_proxy_command)
|
||||
.shell_script(start_proxy_command)
|
||||
// IMPORTANT: we kill this process when we drop the task that uses it.
|
||||
.kill_on_drop(true)
|
||||
.spawn()
|
||||
@@ -1431,8 +1511,8 @@ impl SshRemoteConnection {
|
||||
socket_path,
|
||||
};
|
||||
|
||||
let os = run_cmd(socket.ssh_command("uname").arg("-s")).await?;
|
||||
let arch = run_cmd(socket.ssh_command("uname").arg("-m")).await?;
|
||||
let os = run_cmd(socket.ssh_command("uname", &["-s"])).await?;
|
||||
let arch = run_cmd(socket.ssh_command("uname", &["-m"])).await?;
|
||||
|
||||
let os = match os.trim() {
|
||||
"Darwin" => "macos",
|
||||
@@ -1630,14 +1710,9 @@ impl SshRemoteConnection {
|
||||
}
|
||||
|
||||
async fn get_ssh_source_port(&self) -> Result<String> {
|
||||
let output = run_cmd(
|
||||
self.socket
|
||||
.ssh_command("sh")
|
||||
.arg("-c")
|
||||
.arg(r#""echo $SSH_CLIENT | cut -d' ' -f2""#),
|
||||
)
|
||||
.await
|
||||
.context("failed to get source port from SSH_CLIENT on host")?;
|
||||
let output = run_cmd(self.socket.shell_script("echo $SSH_CLIENT | cut -d' ' -f2"))
|
||||
.await
|
||||
.context("failed to get source port from SSH_CLIENT on host")?;
|
||||
|
||||
Ok(output.trim().to_string())
|
||||
}
|
||||
@@ -1648,13 +1723,13 @@ impl SshRemoteConnection {
|
||||
.ok_or_else(|| anyhow!("Lock file path has no parent directory"))?;
|
||||
|
||||
let script = format!(
|
||||
r#"'mkdir -p "{parent_dir}" && [ ! -f "{lock_file}" ] && echo "{content}" > "{lock_file}" && echo "created" || echo "exists"'"#,
|
||||
r#"mkdir -p "{parent_dir}" && [ ! -f "{lock_file}" ] && echo "{content}" > "{lock_file}" && echo "created" || echo "exists""#,
|
||||
parent_dir = parent_dir.display(),
|
||||
lock_file = lock_file.display(),
|
||||
content = content,
|
||||
);
|
||||
|
||||
let output = run_cmd(self.socket.ssh_command("sh").arg("-c").arg(&script))
|
||||
let output = run_cmd(self.socket.shell_script(&script))
|
||||
.await
|
||||
.with_context(|| format!("failed to create a lock file at {:?}", lock_file))?;
|
||||
|
||||
@@ -1662,7 +1737,7 @@ impl SshRemoteConnection {
|
||||
}
|
||||
|
||||
fn generate_stale_check_script(lock_file: &Path, max_age: u64) -> String {
|
||||
format!(
|
||||
shell_script!(
|
||||
r#"
|
||||
if [ ! -f "{lock_file}" ]; then
|
||||
echo "lock file does not exist"
|
||||
@@ -1690,18 +1765,15 @@ impl SshRemoteConnection {
|
||||
else
|
||||
echo "recent"
|
||||
fi"#,
|
||||
lock_file = lock_file.display(),
|
||||
max_age = max_age
|
||||
lock_file = &lock_file.to_string_lossy(),
|
||||
max_age = &max_age.to_string()
|
||||
)
|
||||
}
|
||||
|
||||
async fn is_lock_stale(&self, lock_file: &Path, max_age: &Duration) -> Result<bool> {
|
||||
let script = format!(
|
||||
"'{}'",
|
||||
Self::generate_stale_check_script(lock_file, max_age.as_secs())
|
||||
);
|
||||
let script = Self::generate_stale_check_script(lock_file, max_age.as_secs());
|
||||
|
||||
let output = run_cmd(self.socket.ssh_command("sh").arg("-c").arg(&script))
|
||||
let output = run_cmd(self.socket.shell_script(script))
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("failed to check whether lock file {:?} is stale", lock_file)
|
||||
@@ -1714,9 +1786,12 @@ impl SshRemoteConnection {
|
||||
}
|
||||
|
||||
async fn remove_lock_file(&self, lock_file: &Path) -> Result<()> {
|
||||
run_cmd(self.socket.ssh_command("rm").arg("-f").arg(lock_file))
|
||||
.await
|
||||
.context("failed to remove lock file")?;
|
||||
run_cmd(
|
||||
self.socket
|
||||
.ssh_command("rm", &["-f", &lock_file.to_string_lossy()]),
|
||||
)
|
||||
.await
|
||||
.context("failed to remove lock file")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1727,109 +1802,149 @@ impl SshRemoteConnection {
|
||||
platform: SshPlatform,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
if std::env::var("ZED_USE_CACHED_REMOTE_SERVER").is_ok() {
|
||||
if let Ok(installed_version) =
|
||||
run_cmd(self.socket.ssh_command(dst_path).arg("version")).await
|
||||
{
|
||||
log::info!("using cached server binary version {}", installed_version);
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
if cfg!(not(debug_assertions)) {
|
||||
// When we're not in dev mode, we don't want to switch out the binary if it's
|
||||
// still open.
|
||||
// In dev mode, that's fine, since we often kill Zed processes with Ctrl-C and want
|
||||
// to still replace the binary.
|
||||
if self.is_binary_in_use(dst_path).await? {
|
||||
log::info!("server binary is opened by another process. not updating");
|
||||
delegate.set_status(
|
||||
Some("Skipping update of remote development server, since it's still in use"),
|
||||
cx,
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
let upload_binary_over_ssh = self.socket.connection_options.upload_binary_over_ssh;
|
||||
let (binary, new_server_version) = delegate
|
||||
.get_server_binary(platform, upload_binary_over_ssh, cx)
|
||||
.await??;
|
||||
|
||||
if cfg!(not(debug_assertions)) {
|
||||
let installed_version = if let Ok(version_output) =
|
||||
run_cmd(self.socket.ssh_command(dst_path).arg("version")).await
|
||||
{
|
||||
let current_version = match run_cmd(
|
||||
self.socket
|
||||
.ssh_command(&dst_path.to_string_lossy(), &["version"]),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(version_output) => {
|
||||
if let Ok(version) = version_output.trim().parse::<SemanticVersion>() {
|
||||
Some(ServerVersion::Semantic(version))
|
||||
} else {
|
||||
Some(ServerVersion::Commit(version_output.trim().to_string()))
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
Err(_) => None,
|
||||
};
|
||||
let (release_channel, wanted_version) = cx.update(|cx| {
|
||||
let release_channel = ReleaseChannel::global(cx);
|
||||
let wanted_version = match release_channel {
|
||||
ReleaseChannel::Nightly => {
|
||||
AppCommitSha::try_global(cx).map(|sha| ServerVersion::Commit(sha.0))
|
||||
}
|
||||
ReleaseChannel::Dev => None,
|
||||
_ => Some(ServerVersion::Semantic(AppVersion::global(cx))),
|
||||
};
|
||||
(release_channel, wanted_version)
|
||||
})?;
|
||||
|
||||
if let Some(installed_version) = installed_version {
|
||||
use ServerVersion::*;
|
||||
match (installed_version, new_server_version) {
|
||||
(Semantic(installed), Semantic(new)) if installed == new => {
|
||||
log::info!("remote development server present and matching client version");
|
||||
return Ok(());
|
||||
}
|
||||
(Semantic(installed), Semantic(new)) if installed > new => {
|
||||
let error = anyhow!("The version of the remote server ({}) is newer than the Zed version ({}). Please update Zed.", installed, new);
|
||||
return Err(error);
|
||||
}
|
||||
(Commit(installed), Commit(new)) if installed == new => {
|
||||
log::info!(
|
||||
"remote development server present and matching client version {}",
|
||||
installed
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
(installed, _) => {
|
||||
log::info!(
|
||||
"remote development server has version: {}. updating...",
|
||||
installed
|
||||
);
|
||||
}
|
||||
match (¤t_version, &wanted_version) {
|
||||
(Some(current), Some(wanted)) if current == wanted => {
|
||||
log::info!("remote development server present and matching client version");
|
||||
return Ok(());
|
||||
}
|
||||
(Some(ServerVersion::Semantic(current)), Some(ServerVersion::Semantic(wanted)))
|
||||
if current > wanted =>
|
||||
{
|
||||
anyhow::bail!("The version of the remote server ({}) is newer than the Zed version ({}). Please update Zed.", current, wanted);
|
||||
}
|
||||
_ => {
|
||||
log::info!("Installing remote development server");
|
||||
}
|
||||
}
|
||||
|
||||
if self.is_binary_in_use(dst_path).await? {
|
||||
// When we're not in dev mode, we don't want to switch out the binary if it's
|
||||
// still open.
|
||||
// In dev mode, that's fine, since we often kill Zed processes with Ctrl-C and want
|
||||
// to still replace the binary.
|
||||
if cfg!(not(debug_assertions)) {
|
||||
anyhow::bail!("The remote server version ({:?}) does not match the wanted version ({:?}), but is in use by another Zed client so cannot be upgraded.", ¤t_version, &wanted_version)
|
||||
} else {
|
||||
log::info!("Binary is currently in use, ignoring because this is a dev build")
|
||||
}
|
||||
}
|
||||
|
||||
if wanted_version.is_none() {
|
||||
if std::env::var("ZED_BUILD_REMOTE_SERVER").is_err() {
|
||||
if let Some(current_version) = current_version {
|
||||
log::warn!(
|
||||
"In development, using cached remote server binary version ({})",
|
||||
current_version
|
||||
);
|
||||
|
||||
return Ok(());
|
||||
} else {
|
||||
anyhow::bail!(
|
||||
"ZED_BUILD_REMOTE_SERVER is not set, but no remote server exists at ({:?})",
|
||||
dst_path
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
let src_path = self.build_local(platform, delegate, cx).await?;
|
||||
|
||||
return self
|
||||
.upload_local_server_binary(&src_path, dst_path, delegate, cx)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
anyhow::bail!("Running development build in release mode, cannot cross compile (unset ZED_BUILD_REMOTE_SERVER)")
|
||||
}
|
||||
|
||||
let upload_binary_over_ssh = self.socket.connection_options.upload_binary_over_ssh;
|
||||
|
||||
if !upload_binary_over_ssh {
|
||||
let (url, body) = delegate
|
||||
.get_download_params(
|
||||
platform,
|
||||
release_channel,
|
||||
wanted_version.clone().and_then(|v| v.semantic_version()),
|
||||
cx,
|
||||
)
|
||||
.await?;
|
||||
|
||||
match self
|
||||
.download_binary_on_server(&url, &body, dst_path, delegate, cx)
|
||||
.await
|
||||
{
|
||||
Ok(_) => return Ok(()),
|
||||
Err(e) => {
|
||||
log::error!(
|
||||
"Failed to download binary on server, attempting to upload server: {}",
|
||||
e
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match binary {
|
||||
ServerBinary::LocalBinary(src_path) => {
|
||||
self.upload_local_server_binary(&src_path, dst_path, delegate, cx)
|
||||
.await
|
||||
}
|
||||
ServerBinary::ReleaseUrl { url, body } => {
|
||||
self.download_binary_on_server(&url, &body, dst_path, delegate, cx)
|
||||
.await
|
||||
}
|
||||
}
|
||||
let src_path = delegate
|
||||
.download_server_binary_locally(
|
||||
platform,
|
||||
release_channel,
|
||||
wanted_version.and_then(|v| v.semantic_version()),
|
||||
cx,
|
||||
)
|
||||
.await?;
|
||||
|
||||
self.upload_local_server_binary(&src_path, dst_path, delegate, cx)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn is_binary_in_use(&self, binary_path: &Path) -> Result<bool> {
|
||||
let script = format!(
|
||||
r#"'
|
||||
let script = shell_script!(
|
||||
r#"
|
||||
if command -v lsof >/dev/null 2>&1; then
|
||||
if lsof "{}" >/dev/null 2>&1; then
|
||||
if lsof "{binary_path}" >/dev/null 2>&1; then
|
||||
echo "in_use"
|
||||
exit 0
|
||||
fi
|
||||
elif command -v fuser >/dev/null 2>&1; then
|
||||
if fuser "{}" >/dev/null 2>&1; then
|
||||
if fuser "{binary_path}" >/dev/null 2>&1; then
|
||||
echo "in_use"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
echo "not_in_use"
|
||||
'"#,
|
||||
binary_path.display(),
|
||||
binary_path.display(),
|
||||
"#,
|
||||
binary_path = &binary_path.to_string_lossy(),
|
||||
);
|
||||
|
||||
let output = run_cmd(self.socket.ssh_command("sh").arg("-c").arg(script))
|
||||
let output = run_cmd(self.socket.shell_script(script))
|
||||
.await
|
||||
.context("failed to check if binary is in use")?;
|
||||
|
||||
@@ -1848,31 +1963,32 @@ impl SshRemoteConnection {
|
||||
dst_path_gz.set_extension("gz");
|
||||
|
||||
if let Some(parent) = dst_path.parent() {
|
||||
run_cmd(self.socket.ssh_command("mkdir").arg("-p").arg(parent)).await?;
|
||||
run_cmd(
|
||||
self.socket
|
||||
.ssh_command("mkdir", &["-p", &parent.to_string_lossy()]),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
delegate.set_status(Some("Downloading remote development server on host"), cx);
|
||||
|
||||
let script = format!(
|
||||
let script = shell_script!(
|
||||
r#"
|
||||
if command -v wget >/dev/null 2>&1; then
|
||||
wget --max-redirect=5 --method=GET --header="Content-Type: application/json" --body-data='{}' '{}' -O '{}' && echo "wget"
|
||||
elif command -v curl >/dev/null 2>&1; then
|
||||
curl -L -X GET -H "Content-Type: application/json" -d '{}' '{}' -o '{}' && echo "curl"
|
||||
if command -v curl >/dev/null 2>&1; then
|
||||
curl -f -L -X GET -H "Content-Type: application/json" -d {body} {url} -o {dst_path} && echo "curl"
|
||||
elif command -v wget >/dev/null 2>&1; then
|
||||
wget --max-redirect=5 --method=GET --header="Content-Type: application/json" --body-data={body} {url} -O {dst_path} && echo "wget"
|
||||
else
|
||||
echo "Neither curl nor wget is available" >&2
|
||||
exit 1
|
||||
fi
|
||||
"#,
|
||||
body.replace("'", r#"\'"#),
|
||||
url,
|
||||
dst_path_gz.display(),
|
||||
body.replace("'", r#"\'"#),
|
||||
url,
|
||||
dst_path_gz.display(),
|
||||
body = body,
|
||||
url = url,
|
||||
dst_path = &dst_path_gz.to_string_lossy(),
|
||||
);
|
||||
|
||||
let output = run_cmd(self.socket.ssh_command("bash").arg("-c").arg(script))
|
||||
let output = run_cmd(self.socket.shell_script(script))
|
||||
.await
|
||||
.context("Failed to download server binary")?;
|
||||
|
||||
@@ -1895,7 +2011,11 @@ impl SshRemoteConnection {
|
||||
dst_path_gz.set_extension("gz");
|
||||
|
||||
if let Some(parent) = dst_path.parent() {
|
||||
run_cmd(self.socket.ssh_command("mkdir").arg("-p").arg(parent)).await?;
|
||||
run_cmd(
|
||||
self.socket
|
||||
.ssh_command("mkdir", &["-p", &parent.to_string_lossy()]),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let src_stat = fs::metadata(&src_path).await?;
|
||||
@@ -1923,20 +2043,16 @@ impl SshRemoteConnection {
|
||||
delegate.set_status(Some("Extracting remote development server"), cx);
|
||||
run_cmd(
|
||||
self.socket
|
||||
.ssh_command("gunzip")
|
||||
.arg("--force")
|
||||
.arg(&dst_path_gz),
|
||||
.ssh_command("gunzip", &["-f", &dst_path_gz.to_string_lossy()]),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let server_mode = 0o755;
|
||||
delegate.set_status(Some("Marking remote development server executable"), cx);
|
||||
run_cmd(
|
||||
self.socket
|
||||
.ssh_command("chmod")
|
||||
.arg(format!("{:o}", server_mode))
|
||||
.arg(dst_path),
|
||||
)
|
||||
run_cmd(self.socket.ssh_command(
|
||||
"chmod",
|
||||
&[&format!("{:o}", server_mode), &dst_path.to_string_lossy()],
|
||||
))
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
@@ -1974,6 +2090,113 @@ impl SshRemoteConnection {
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
async fn build_local(
|
||||
&self,
|
||||
platform: SshPlatform,
|
||||
delegate: &Arc<dyn SshClientDelegate>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<PathBuf> {
|
||||
use smol::process::{Command, Stdio};
|
||||
|
||||
async fn run_cmd(command: &mut Command) -> Result<()> {
|
||||
let output = command
|
||||
.kill_on_drop(true)
|
||||
.stderr(Stdio::inherit())
|
||||
.output()
|
||||
.await?;
|
||||
if !output.status.success() {
|
||||
Err(anyhow!("Failed to run command: {:?}", command))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
if platform.arch == std::env::consts::ARCH && platform.os == std::env::consts::OS {
|
||||
delegate.set_status(Some("Building remote server binary from source"), cx);
|
||||
log::info!("building remote server binary from source");
|
||||
run_cmd(Command::new("cargo").args([
|
||||
"build",
|
||||
"--package",
|
||||
"remote_server",
|
||||
"--features",
|
||||
"debug-embed",
|
||||
"--target-dir",
|
||||
"target/remote_server",
|
||||
]))
|
||||
.await?;
|
||||
|
||||
delegate.set_status(Some("Compressing binary"), cx);
|
||||
|
||||
run_cmd(Command::new("gzip").args([
|
||||
"-9",
|
||||
"-f",
|
||||
"target/remote_server/debug/remote_server",
|
||||
]))
|
||||
.await?;
|
||||
|
||||
let path = std::env::current_dir()?.join("target/remote_server/debug/remote_server.gz");
|
||||
return Ok(path);
|
||||
}
|
||||
let Some(triple) = platform.triple() else {
|
||||
anyhow::bail!("can't cross compile for: {:?}", platform);
|
||||
};
|
||||
smol::fs::create_dir_all("target/remote_server").await?;
|
||||
|
||||
delegate.set_status(Some("Installing cross.rs for cross-compilation"), cx);
|
||||
log::info!("installing cross");
|
||||
run_cmd(Command::new("cargo").args([
|
||||
"install",
|
||||
"cross",
|
||||
"--git",
|
||||
"https://github.com/cross-rs/cross",
|
||||
]))
|
||||
.await?;
|
||||
|
||||
delegate.set_status(
|
||||
Some(&format!(
|
||||
"Building remote server binary from source for {} with Docker",
|
||||
&triple
|
||||
)),
|
||||
cx,
|
||||
);
|
||||
log::info!("building remote server binary from source for {}", &triple);
|
||||
run_cmd(
|
||||
Command::new("cross")
|
||||
.args([
|
||||
"build",
|
||||
"--package",
|
||||
"remote_server",
|
||||
"--features",
|
||||
"debug-embed",
|
||||
"--target-dir",
|
||||
"target/remote_server",
|
||||
"--target",
|
||||
&triple,
|
||||
])
|
||||
.env(
|
||||
"CROSS_CONTAINER_OPTS",
|
||||
"--mount type=bind,src=./target,dst=/app/target",
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
delegate.set_status(Some("Compressing binary"), cx);
|
||||
|
||||
run_cmd(Command::new("gzip").args([
|
||||
"-9",
|
||||
"-f",
|
||||
&format!("target/remote_server/{}/debug/remote_server", triple),
|
||||
]))
|
||||
.await?;
|
||||
|
||||
let path = std::env::current_dir()?.join(format!(
|
||||
"target/remote_server/{}/debug/remote_server.gz",
|
||||
triple
|
||||
));
|
||||
|
||||
return Ok(path);
|
||||
}
|
||||
}
|
||||
|
||||
type ResponseChannels = Mutex<HashMap<MessageId, oneshot::Sender<(Envelope, oneshot::Sender<()>)>>>;
|
||||
@@ -2295,12 +2518,12 @@ mod fake {
|
||||
},
|
||||
select_biased, FutureExt, SinkExt, StreamExt,
|
||||
};
|
||||
use gpui::{AsyncAppContext, Task, TestAppContext};
|
||||
use gpui::{AsyncAppContext, SemanticVersion, Task, TestAppContext};
|
||||
use release_channel::ReleaseChannel;
|
||||
use rpc::proto::Envelope;
|
||||
|
||||
use super::{
|
||||
ChannelClient, RemoteConnection, ServerBinary, ServerVersion, SshClientDelegate,
|
||||
SshConnectionOptions, SshPlatform,
|
||||
ChannelClient, RemoteConnection, SshClientDelegate, SshConnectionOptions, SshPlatform,
|
||||
};
|
||||
|
||||
pub(super) struct FakeRemoteConnection {
|
||||
@@ -2412,23 +2635,36 @@ mod fake {
|
||||
) -> oneshot::Receiver<Result<String>> {
|
||||
unreachable!()
|
||||
}
|
||||
fn remote_server_binary_path(
|
||||
|
||||
fn download_server_binary_locally(
|
||||
&self,
|
||||
_: SshPlatform,
|
||||
_: ReleaseChannel,
|
||||
_: Option<SemanticVersion>,
|
||||
_: &mut AsyncAppContext,
|
||||
) -> Result<PathBuf> {
|
||||
) -> Task<Result<PathBuf>> {
|
||||
unreachable!()
|
||||
}
|
||||
fn get_server_binary(
|
||||
|
||||
fn get_download_params(
|
||||
&self,
|
||||
_: SshPlatform,
|
||||
_: bool,
|
||||
_: &mut AsyncAppContext,
|
||||
) -> oneshot::Receiver<Result<(ServerBinary, ServerVersion)>> {
|
||||
_platform: SshPlatform,
|
||||
_release_channel: ReleaseChannel,
|
||||
_version: Option<SemanticVersion>,
|
||||
_cx: &mut AsyncAppContext,
|
||||
) -> Task<Result<(String, String)>> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
fn set_status(&self, _: Option<&str>, _: &mut AsyncAppContext) {}
|
||||
|
||||
fn remote_server_binary_path(
|
||||
&self,
|
||||
_platform: SshPlatform,
|
||||
_cx: &mut AsyncAppContext,
|
||||
) -> Result<PathBuf> {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::headless_project::HeadlessProject;
|
||||
use client::{Client, UserStore};
|
||||
use clock::FakeSystemClock;
|
||||
use fs::{FakeFs, Fs};
|
||||
use gpui::{Context, Model, TestAppContext};
|
||||
use gpui::{Context, Model, SemanticVersion, TestAppContext};
|
||||
use http_client::{BlockedHttpClient, FakeHttpClient};
|
||||
use language::{
|
||||
language_settings::{language_settings, AllLanguageSettings},
|
||||
@@ -1184,6 +1184,9 @@ pub async fn init_test(
|
||||
server_cx: &mut TestAppContext,
|
||||
) -> (Model<Project>, Model<HeadlessProject>) {
|
||||
let server_fs = server_fs.clone();
|
||||
cx.update(|cx| {
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
});
|
||||
init_logger();
|
||||
|
||||
let (opts, ssh_server_client) = SshRemoteClient::fake_server(cx, server_cx);
|
||||
|
||||
@@ -16,7 +16,7 @@ pub struct ImageView {
|
||||
|
||||
impl ImageView {
|
||||
pub fn from(base64_encoded_data: &str) -> Result<Self> {
|
||||
let bytes = BASE64_STANDARD.decode(base64_encoded_data)?;
|
||||
let bytes = BASE64_STANDARD.decode(base64_encoded_data.trim())?;
|
||||
|
||||
let format = image::guess_format(&bytes)?;
|
||||
let mut data = image::load_from_memory_with_format(&bytes, format)?.into_rgba8();
|
||||
|
||||
@@ -50,7 +50,6 @@ parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
project.workspace = true
|
||||
task.workspace = true
|
||||
release_channel.workspace = true
|
||||
remote.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
|
||||
@@ -63,8 +63,7 @@ use postage::stream::Stream;
|
||||
use project::{
|
||||
DirectoryLister, Project, ProjectEntryId, ProjectPath, ResolvedPath, Worktree, WorktreeId,
|
||||
};
|
||||
use release_channel::ReleaseChannel;
|
||||
use remote::{SshClientDelegate, SshConnectionOptions};
|
||||
use remote::{ssh_session::ConnectionIdentifier, SshClientDelegate, SshConnectionOptions};
|
||||
use serde::Deserialize;
|
||||
use session::AppSession;
|
||||
use settings::Settings;
|
||||
@@ -4665,7 +4664,7 @@ enum ActivateInDirectionTarget {
|
||||
}
|
||||
|
||||
fn notify_if_database_failed(workspace: WindowHandle<Workspace>, cx: &mut AsyncAppContext) {
|
||||
const REPORT_ISSUE_URL: &str = "https://github.com/zed-industries/zed/issues/new?assignees=&labels=defect%2Ctriage&template=2_bug_report.yml";
|
||||
const REPORT_ISSUE_URL: &str = "https://github.com/zed-industries/zed/issues/new?assignees=&labels=admin+read%2Ctriage%2Cbug&projects=&template=1_bug_report.yml";
|
||||
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
@@ -5494,26 +5493,14 @@ pub fn open_ssh_project(
|
||||
paths: Vec<PathBuf>,
|
||||
cx: &mut AppContext,
|
||||
) -> Task<Result<()>> {
|
||||
let release_channel = ReleaseChannel::global(cx);
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
let (serialized_ssh_project, workspace_id, serialized_workspace) =
|
||||
serialize_ssh_project(connection_options.clone(), paths.clone(), &cx).await?;
|
||||
|
||||
let identifier_prefix = match release_channel {
|
||||
ReleaseChannel::Stable => None,
|
||||
_ => Some(format!("{}-", release_channel.dev_name())),
|
||||
};
|
||||
let unique_identifier = format!(
|
||||
"{}workspace-{}",
|
||||
identifier_prefix.unwrap_or_default(),
|
||||
workspace_id.0
|
||||
);
|
||||
|
||||
let session = match cx
|
||||
.update(|cx| {
|
||||
remote::SshRemoteClient::new(
|
||||
unique_identifier,
|
||||
ConnectionIdentifier::Workspace(workspace_id.0),
|
||||
connection_options,
|
||||
cancel_rx,
|
||||
delegate,
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
description = "The fast, collaborative code editor."
|
||||
edition = "2021"
|
||||
name = "zed"
|
||||
version = "0.160.0"
|
||||
version = "0.160.7"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
authors = ["Zed Team <hi@zed.dev>"]
|
||||
|
||||
@@ -1 +1 @@
|
||||
dev
|
||||
stable
|
||||
@@ -47,6 +47,9 @@ impl OpenRequest {
|
||||
this.parse_file_path(file)
|
||||
} else if let Some(file) = url.strip_prefix("zed://file") {
|
||||
this.parse_file_path(file)
|
||||
} else if let Some(file) = url.strip_prefix("zed://ssh") {
|
||||
let ssh_url = "ssh:/".to_string() + file;
|
||||
this.parse_ssh_file_path(&ssh_url, cx)?
|
||||
} else if url.starts_with("ssh://") {
|
||||
this.parse_ssh_file_path(&url, cx)?
|
||||
} else if let Some(request_path) = parse_zed_link(&url, cx) {
|
||||
|
||||
@@ -23,7 +23,7 @@ On your local machine, Zed runs its UI, talks to language models, uses Tree-sitt
|
||||
1. Once the Zed server is running, you will be prompted to choose a path to open on the remote server.
|
||||
> **Note:** Zed does not currently handle opening very large directories (for example, `/` or `~` that may have >100,000 files) very well. We are working on improving this, but suggest in the meantime opening only specific projects, or subfolders of very large mono-repos.
|
||||
|
||||
For simple cases where you don't need any SSH arguments, you can run `zed ssh://[<user>@]<host>[:<port>]/<path>` to open a remote folder/file directly.
|
||||
For simple cases where you don't need any SSH arguments, you can run `zed ssh://[<user>@]<host>[:<port>]/<path>` to open a remote folder/file directly. If you'd like to hotlink into an SSH project, use a link of the format: `zed://ssh/[<user>@]<host>[:<port>]/<path>`.
|
||||
|
||||
## Supported platforms
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"core_labels": [
|
||||
"defect",
|
||||
"bug",
|
||||
"design",
|
||||
"documentation",
|
||||
"duplicate",
|
||||
|
||||
Reference in New Issue
Block a user