Compare commits
41 Commits
arena-conc
...
remote-ker
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
473bc89d3a | ||
|
|
118e7a66b3 | ||
|
|
6469500330 | ||
|
|
694231afd1 | ||
|
|
566c93a0f5 | ||
|
|
1430718d1a | ||
|
|
e31f44450e | ||
|
|
e0761db62d | ||
|
|
8c342ef706 | ||
|
|
7e67753d51 | ||
|
|
1475a7000f | ||
|
|
41fd9189e3 | ||
|
|
973498e075 | ||
|
|
b63394f4bd | ||
|
|
743165fa6c | ||
|
|
e03968f538 | ||
|
|
3c57a4071c | ||
|
|
ad6a07e574 | ||
|
|
c2668bc953 | ||
|
|
705a06c3dd | ||
|
|
f77b6ab79c | ||
|
|
ea5131ce0a | ||
|
|
1c2b3ad782 | ||
|
|
496dae968b | ||
|
|
5c6565a9e0 | ||
|
|
7853e32f80 | ||
|
|
f5cbfa718e | ||
|
|
6a2c712990 | ||
|
|
9454f0f1c7 | ||
|
|
5b0c15d8c4 | ||
|
|
aae39071ef | ||
|
|
a35b73e63e | ||
|
|
c0d11be75f | ||
|
|
0e26d22fea | ||
|
|
bd0f197415 | ||
|
|
343c88574a | ||
|
|
e7a0890086 | ||
|
|
d4c5c0f05e | ||
|
|
f0c7e62adc | ||
|
|
80d50f56f3 | ||
|
|
fb6c987e3e |
7
.github/workflows/ci.yml
vendored
7
.github/workflows/ci.yml
vendored
@@ -245,6 +245,7 @@ jobs:
|
||||
# 25 was chosen arbitrarily.
|
||||
fetch-depth: 25
|
||||
clean: false
|
||||
ref: ${{ github.ref }}
|
||||
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
@@ -261,6 +262,9 @@ jobs:
|
||||
mkdir -p target/
|
||||
# Ignore any errors that occur while drafting release notes to not fail the build.
|
||||
script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md || true
|
||||
script/create-draft-release target/release-notes.md
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate license file
|
||||
run: script/generate-licenses
|
||||
@@ -306,7 +310,6 @@ jobs:
|
||||
target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
target/release/Zed.dmg
|
||||
body_path: target/release-notes.md
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -353,7 +356,6 @@ jobs:
|
||||
files: |
|
||||
target/zed-remote-server-linux-x86_64.gz
|
||||
target/release/zed-linux-x86_64.tar.gz
|
||||
body: ""
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -400,6 +402,5 @@ jobs:
|
||||
files: |
|
||||
target/zed-remote-server-linux-aarch64.gz
|
||||
target/release/zed-linux-aarch64.tar.gz
|
||||
body: ""
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
1992
Cargo.lock
generated
1992
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -368,12 +368,14 @@ indexmap = { version = "1.6.2", features = ["serde"] }
|
||||
indoc = "2"
|
||||
itertools = "0.13.0"
|
||||
jsonwebtoken = "9.3"
|
||||
jupyter-protocol = { version = "0.2.0" }
|
||||
jupyter-websocket-client = { version = "0.4.1" }
|
||||
libc = "0.2"
|
||||
linkify = "0.10.0"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
|
||||
markup5ever_rcdom = "0.3.0"
|
||||
nanoid = "0.4"
|
||||
nbformat = "0.5.0"
|
||||
nbformat = "0.6.0"
|
||||
nix = "0.29"
|
||||
num-format = "0.4.4"
|
||||
once_cell = "1.19.0"
|
||||
@@ -407,7 +409,7 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f
|
||||
"stream",
|
||||
] }
|
||||
rsa = "0.9.6"
|
||||
runtimelib = { version = "0.19.0", default-features = false, features = [
|
||||
runtimelib = { version = "0.21.0", default-features = false, features = [
|
||||
"async-dispatcher-runtime",
|
||||
] }
|
||||
rustc-demangle = "0.1.23"
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
"ctrl-shift-l": "editor::SplitSelectionIntoLines",
|
||||
"ctrl-shift-a": "editor::SelectLargerSyntaxNode",
|
||||
"ctrl-shift-d": "editor::DuplicateLineDown",
|
||||
"alt-f3": "editor::SelectAllMatches", // find_all_under
|
||||
"f12": "editor::GoToDefinition",
|
||||
"ctrl-f12": "editor::GoToDefinitionSplit",
|
||||
"shift-f12": "editor::FindAllReferences",
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
"cmd-shift-l": "editor::SplitSelectionIntoLines",
|
||||
"cmd-shift-a": "editor::SelectLargerSyntaxNode",
|
||||
"cmd-shift-d": "editor::DuplicateLineDown",
|
||||
"ctrl-cmd-g": "editor::SelectAllMatches", // find_all_under
|
||||
"shift-f12": "editor::FindAllReferences",
|
||||
"alt-cmd-down": "editor::GoToDefinition",
|
||||
"ctrl-alt-cmd-down": "editor::GoToDefinitionSplit",
|
||||
|
||||
@@ -381,8 +381,7 @@
|
||||
"shift-b": "vim::CurlyBrackets",
|
||||
"<": "vim::AngleBrackets",
|
||||
">": "vim::AngleBrackets",
|
||||
"a": "vim::AngleBrackets",
|
||||
"g": "vim::Argument"
|
||||
"a": "vim::Argument"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -490,6 +490,9 @@
|
||||
"version": "2",
|
||||
// Whether the assistant is enabled.
|
||||
"enabled": true,
|
||||
// Whether to show inline hints showing the keybindings to use the inline assistant and the
|
||||
// assistant panel.
|
||||
"show_hints": true,
|
||||
// Whether to show the assistant panel button in the status bar.
|
||||
"button": true,
|
||||
// Where to dock the assistant panel. Can be 'left', 'right' or 'bottom'.
|
||||
@@ -873,15 +876,8 @@
|
||||
//
|
||||
"file_types": {
|
||||
"Plain Text": ["txt"],
|
||||
"JSON": ["flake.lock"],
|
||||
"JSONC": [
|
||||
"**/.zed/**/*.json",
|
||||
"**/zed/**/*.json",
|
||||
"**/Zed/**/*.json",
|
||||
"tsconfig.json",
|
||||
"pyrightconfig.json"
|
||||
],
|
||||
"TOML": ["uv.lock"]
|
||||
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json"],
|
||||
"Shell Script": [".env.*"]
|
||||
},
|
||||
/// By default use a recent system version of node, or install our own.
|
||||
/// You can override this to use a version of node that is not in $PATH with:
|
||||
|
||||
@@ -2050,30 +2050,6 @@ impl ContextEditor {
|
||||
ContextEvent::SlashCommandOutputSectionAdded { section } => {
|
||||
self.insert_slash_command_output_sections([section.clone()], false, cx);
|
||||
}
|
||||
ContextEvent::SlashCommandFinished {
|
||||
output_range: _output_range,
|
||||
run_commands_in_ranges,
|
||||
} => {
|
||||
for range in run_commands_in_ranges {
|
||||
let commands = self.context.update(cx, |context, cx| {
|
||||
context.reparse(cx);
|
||||
context
|
||||
.pending_commands_for_range(range.clone(), cx)
|
||||
.to_vec()
|
||||
});
|
||||
|
||||
for command in commands {
|
||||
self.run_command(
|
||||
command.source_range,
|
||||
&command.name,
|
||||
&command.arguments,
|
||||
false,
|
||||
self.workspace.clone(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
ContextEvent::UsePendingTools => {
|
||||
let pending_tool_uses = self
|
||||
.context
|
||||
@@ -2152,6 +2128,37 @@ impl ContextEditor {
|
||||
command_id: InvokedSlashCommandId,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let Some(invoked_slash_command) =
|
||||
self.context.read(cx).invoked_slash_command(&command_id)
|
||||
{
|
||||
if let InvokedSlashCommandStatus::Finished = invoked_slash_command.status {
|
||||
let run_commands_in_ranges = invoked_slash_command
|
||||
.run_commands_in_ranges
|
||||
.iter()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
for range in run_commands_in_ranges {
|
||||
let commands = self.context.update(cx, |context, cx| {
|
||||
context.reparse(cx);
|
||||
context
|
||||
.pending_commands_for_range(range.clone(), cx)
|
||||
.to_vec()
|
||||
});
|
||||
|
||||
for command in commands {
|
||||
self.run_command(
|
||||
command.source_range,
|
||||
&command.name,
|
||||
&command.arguments,
|
||||
false,
|
||||
self.workspace.clone(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
if let Some(invoked_slash_command) =
|
||||
self.context.read(cx).invoked_slash_command(&command_id)
|
||||
|
||||
@@ -60,6 +60,7 @@ pub struct AssistantSettings {
|
||||
pub inline_alternatives: Vec<LanguageModelSelection>,
|
||||
pub using_outdated_settings_version: bool,
|
||||
pub enable_experimental_live_diffs: bool,
|
||||
pub show_hints: bool,
|
||||
}
|
||||
|
||||
impl AssistantSettings {
|
||||
@@ -202,6 +203,7 @@ impl AssistantSettingsContent {
|
||||
AssistantSettingsContent::Versioned(settings) => match settings {
|
||||
VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
|
||||
enabled: settings.enabled,
|
||||
show_hints: None,
|
||||
button: settings.button,
|
||||
dock: settings.dock,
|
||||
default_width: settings.default_width,
|
||||
@@ -242,6 +244,7 @@ impl AssistantSettingsContent {
|
||||
},
|
||||
AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
|
||||
enabled: None,
|
||||
show_hints: None,
|
||||
button: settings.button,
|
||||
dock: settings.dock,
|
||||
default_width: settings.default_width,
|
||||
@@ -354,6 +357,7 @@ impl Default for VersionedAssistantSettingsContent {
|
||||
fn default() -> Self {
|
||||
Self::V2(AssistantSettingsContentV2 {
|
||||
enabled: None,
|
||||
show_hints: None,
|
||||
button: None,
|
||||
dock: None,
|
||||
default_width: None,
|
||||
@@ -371,6 +375,11 @@ pub struct AssistantSettingsContentV2 {
|
||||
///
|
||||
/// Default: true
|
||||
enabled: Option<bool>,
|
||||
/// Whether to show inline hints that show keybindings for inline assistant
|
||||
/// and assistant panel.
|
||||
///
|
||||
/// Default: true
|
||||
show_hints: Option<bool>,
|
||||
/// Whether to show the assistant panel button in the status bar.
|
||||
///
|
||||
/// Default: true
|
||||
@@ -505,6 +514,7 @@ impl Settings for AssistantSettings {
|
||||
|
||||
let value = value.upgrade();
|
||||
merge(&mut settings.enabled, value.enabled);
|
||||
merge(&mut settings.show_hints, value.show_hints);
|
||||
merge(&mut settings.button, value.button);
|
||||
merge(&mut settings.dock, value.dock);
|
||||
merge(
|
||||
@@ -575,6 +585,7 @@ mod tests {
|
||||
}),
|
||||
inline_alternatives: None,
|
||||
enabled: None,
|
||||
show_hints: None,
|
||||
button: None,
|
||||
dock: None,
|
||||
default_width: None,
|
||||
|
||||
@@ -381,10 +381,6 @@ pub enum ContextEvent {
|
||||
SlashCommandOutputSectionAdded {
|
||||
section: SlashCommandOutputSection<language::Anchor>,
|
||||
},
|
||||
SlashCommandFinished {
|
||||
output_range: Range<language::Anchor>,
|
||||
run_commands_in_ranges: Vec<Range<language::Anchor>>,
|
||||
},
|
||||
UsePendingTools,
|
||||
ToolFinished {
|
||||
tool_use_id: Arc<str>,
|
||||
@@ -916,6 +912,7 @@ impl Context {
|
||||
InvokedSlashCommand {
|
||||
name: name.into(),
|
||||
range: output_range,
|
||||
run_commands_in_ranges: Vec::new(),
|
||||
status: InvokedSlashCommandStatus::Running(Task::ready(())),
|
||||
transaction: None,
|
||||
timestamp: id.0,
|
||||
@@ -1914,7 +1911,6 @@ impl Context {
|
||||
}
|
||||
|
||||
let mut pending_section_stack: Vec<PendingSection> = Vec::new();
|
||||
let mut run_commands_in_ranges: Vec<Range<language::Anchor>> = Vec::new();
|
||||
let mut last_role: Option<Role> = None;
|
||||
let mut last_section_range = None;
|
||||
|
||||
@@ -1980,7 +1976,13 @@ impl Context {
|
||||
|
||||
let end = this.buffer.read(cx).anchor_before(insert_position);
|
||||
if run_commands_in_text {
|
||||
run_commands_in_ranges.push(start..end);
|
||||
if let Some(invoked_slash_command) =
|
||||
this.invoked_slash_commands.get_mut(&command_id)
|
||||
{
|
||||
invoked_slash_command
|
||||
.run_commands_in_ranges
|
||||
.push(start..end);
|
||||
}
|
||||
}
|
||||
}
|
||||
SlashCommandEvent::EndSection => {
|
||||
@@ -2100,6 +2102,7 @@ impl Context {
|
||||
InvokedSlashCommand {
|
||||
name: name.to_string().into(),
|
||||
range: command_range.clone(),
|
||||
run_commands_in_ranges: Vec::new(),
|
||||
status: InvokedSlashCommandStatus::Running(insert_output_task),
|
||||
transaction: Some(first_transaction),
|
||||
timestamp: command_id.0,
|
||||
@@ -3176,6 +3179,7 @@ pub struct ParsedSlashCommand {
|
||||
pub struct InvokedSlashCommand {
|
||||
pub name: SharedString,
|
||||
pub range: Range<language::Anchor>,
|
||||
pub run_commands_in_ranges: Vec<Range<language::Anchor>>,
|
||||
pub status: InvokedSlashCommandStatus,
|
||||
pub transaction: Option<language::TransactionId>,
|
||||
timestamp: clock::Lamport,
|
||||
|
||||
@@ -69,6 +69,10 @@ impl SlashCommand for DefaultSlashCommand {
|
||||
text.push('\n');
|
||||
}
|
||||
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range: 0..text.len(),
|
||||
|
||||
@@ -343,7 +343,7 @@ fn init_test(cx: &mut AppContext) -> Model<ChannelStore> {
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
client::init_settings(cx);
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::default());
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
let client = Client::new(clock, http.clone(), cx);
|
||||
let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
|
||||
|
||||
@@ -42,7 +42,6 @@ serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
sha2.workspace = true
|
||||
smol.workspace = true
|
||||
sysinfo.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
text.workspace = true
|
||||
thiserror.workspace = true
|
||||
|
||||
@@ -1780,7 +1780,7 @@ mod tests {
|
||||
let user_id = 5;
|
||||
let client = cx.update(|cx| {
|
||||
Client::new(
|
||||
Arc::new(FakeSystemClock::default()),
|
||||
Arc::new(FakeSystemClock::new()),
|
||||
FakeHttpClient::with_404_response(),
|
||||
cx,
|
||||
)
|
||||
@@ -1821,7 +1821,7 @@ mod tests {
|
||||
let user_id = 5;
|
||||
let client = cx.update(|cx| {
|
||||
Client::new(
|
||||
Arc::new(FakeSystemClock::default()),
|
||||
Arc::new(FakeSystemClock::new()),
|
||||
FakeHttpClient::with_404_response(),
|
||||
cx,
|
||||
)
|
||||
@@ -1900,7 +1900,7 @@ mod tests {
|
||||
let dropped_auth_count = Arc::new(Mutex::new(0));
|
||||
let client = cx.update(|cx| {
|
||||
Client::new(
|
||||
Arc::new(FakeSystemClock::default()),
|
||||
Arc::new(FakeSystemClock::new()),
|
||||
FakeHttpClient::with_404_response(),
|
||||
cx,
|
||||
)
|
||||
@@ -1943,7 +1943,7 @@ mod tests {
|
||||
let user_id = 5;
|
||||
let client = cx.update(|cx| {
|
||||
Client::new(
|
||||
Arc::new(FakeSystemClock::default()),
|
||||
Arc::new(FakeSystemClock::new()),
|
||||
FakeHttpClient::with_404_response(),
|
||||
cx,
|
||||
)
|
||||
@@ -2003,7 +2003,7 @@ mod tests {
|
||||
let user_id = 5;
|
||||
let client = cx.update(|cx| {
|
||||
Client::new(
|
||||
Arc::new(FakeSystemClock::default()),
|
||||
Arc::new(FakeSystemClock::new()),
|
||||
FakeHttpClient::with_404_response(),
|
||||
cx,
|
||||
)
|
||||
@@ -2038,7 +2038,7 @@ mod tests {
|
||||
let user_id = 5;
|
||||
let client = cx.update(|cx| {
|
||||
Client::new(
|
||||
Arc::new(FakeSystemClock::default()),
|
||||
Arc::new(FakeSystemClock::new()),
|
||||
FakeHttpClient::with_404_response(),
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -2,7 +2,6 @@ mod event_coalescer;
|
||||
|
||||
use crate::{ChannelId, TelemetrySettings};
|
||||
use anyhow::Result;
|
||||
use chrono::{DateTime, Utc};
|
||||
use clock::SystemClock;
|
||||
use collections::{HashMap, HashSet};
|
||||
use futures::Future;
|
||||
@@ -15,12 +14,11 @@ use settings::{Settings, SettingsStore};
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use std::time::Instant;
|
||||
use std::{env, mem, path::PathBuf, sync::Arc, time::Duration};
|
||||
use sysinfo::{CpuRefreshKind, Pid, ProcessRefreshKind, RefreshKind, System};
|
||||
use telemetry_events::{
|
||||
ActionEvent, AppEvent, AssistantEvent, CallEvent, CpuEvent, EditEvent, EditorEvent, Event,
|
||||
EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, ReplEvent,
|
||||
SettingEvent,
|
||||
ActionEvent, AppEvent, AssistantEvent, CallEvent, EditEvent, EditorEvent, Event,
|
||||
EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, ReplEvent, SettingEvent,
|
||||
};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
use worktree::{UpdatedEntriesSet, WorktreeId};
|
||||
@@ -46,7 +44,7 @@ struct TelemetryState {
|
||||
flush_events_task: Option<Task<()>>,
|
||||
log_file: Option<File>,
|
||||
is_staff: Option<bool>,
|
||||
first_event_date_time: Option<DateTime<Utc>>,
|
||||
first_event_date_time: Option<Instant>,
|
||||
event_coalescer: EventCoalescer,
|
||||
max_queue_size: usize,
|
||||
worktree_id_map: WorktreeIdMap,
|
||||
@@ -293,48 +291,6 @@ impl Telemetry {
|
||||
state.session_id = Some(session_id);
|
||||
state.app_version = release_channel::AppVersion::global(cx).to_string();
|
||||
state.os_name = os_name();
|
||||
|
||||
drop(state);
|
||||
|
||||
let this = self.clone();
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
let mut system = System::new_with_specifics(
|
||||
RefreshKind::new().with_cpu(CpuRefreshKind::everything()),
|
||||
);
|
||||
|
||||
let refresh_kind = ProcessRefreshKind::new().with_cpu().with_memory();
|
||||
let current_process = Pid::from_u32(std::process::id());
|
||||
system.refresh_processes_specifics(
|
||||
sysinfo::ProcessesToUpdate::Some(&[current_process]),
|
||||
refresh_kind,
|
||||
);
|
||||
|
||||
// Waiting some amount of time before the first query is important to get a reasonable value
|
||||
// https://docs.rs/sysinfo/0.29.10/sysinfo/trait.ProcessExt.html#tymethod.cpu_usage
|
||||
const DURATION_BETWEEN_SYSTEM_EVENTS: Duration = Duration::from_secs(4 * 60);
|
||||
|
||||
loop {
|
||||
smol::Timer::after(DURATION_BETWEEN_SYSTEM_EVENTS).await;
|
||||
|
||||
let current_process = Pid::from_u32(std::process::id());
|
||||
system.refresh_processes_specifics(
|
||||
sysinfo::ProcessesToUpdate::Some(&[current_process]),
|
||||
refresh_kind,
|
||||
);
|
||||
let Some(process) = system.process(current_process) else {
|
||||
log::error!(
|
||||
"Failed to find own process {current_process:?} in system process table"
|
||||
);
|
||||
// TODO: Fire an error telemetry event
|
||||
return;
|
||||
};
|
||||
|
||||
this.report_memory_event(process.memory(), process.virtual_memory());
|
||||
this.report_cpu_event(process.cpu_usage(), system.cpus().len() as u32);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub fn metrics_enabled(self: &Arc<Self>) -> bool {
|
||||
@@ -416,28 +372,6 @@ impl Telemetry {
|
||||
self.report_event(event)
|
||||
}
|
||||
|
||||
pub fn report_cpu_event(self: &Arc<Self>, usage_as_percentage: f32, core_count: u32) {
|
||||
let event = Event::Cpu(CpuEvent {
|
||||
usage_as_percentage,
|
||||
core_count,
|
||||
});
|
||||
|
||||
self.report_event(event)
|
||||
}
|
||||
|
||||
pub fn report_memory_event(
|
||||
self: &Arc<Self>,
|
||||
memory_in_bytes: u64,
|
||||
virtual_memory_in_bytes: u64,
|
||||
) {
|
||||
let event = Event::Memory(MemoryEvent {
|
||||
memory_in_bytes,
|
||||
virtual_memory_in_bytes,
|
||||
});
|
||||
|
||||
self.report_event(event)
|
||||
}
|
||||
|
||||
pub fn report_app_event(self: &Arc<Self>, operation: String) -> Event {
|
||||
let event = Event::App(AppEvent { operation });
|
||||
|
||||
@@ -469,7 +403,10 @@ impl Telemetry {
|
||||
|
||||
if let Some((start, end, environment)) = period_data {
|
||||
let event = Event::Edit(EditEvent {
|
||||
duration: end.timestamp_millis() - start.timestamp_millis(),
|
||||
duration: end
|
||||
.saturating_duration_since(start)
|
||||
.min(Duration::from_secs(60 * 60 * 24))
|
||||
.as_millis() as i64,
|
||||
environment: environment.to_string(),
|
||||
is_via_ssh,
|
||||
});
|
||||
@@ -567,9 +504,10 @@ impl Telemetry {
|
||||
let date_time = self.clock.utc_now();
|
||||
|
||||
let milliseconds_since_first_event = match state.first_event_date_time {
|
||||
Some(first_event_date_time) => {
|
||||
date_time.timestamp_millis() - first_event_date_time.timestamp_millis()
|
||||
}
|
||||
Some(first_event_date_time) => date_time
|
||||
.saturating_duration_since(first_event_date_time)
|
||||
.min(Duration::from_secs(60 * 60 * 24))
|
||||
.as_millis() as i64,
|
||||
None => {
|
||||
state.first_event_date_time = Some(date_time);
|
||||
0
|
||||
@@ -702,7 +640,6 @@ pub fn calculate_json_checksum(json: &impl AsRef<[u8]>) -> Option<String> {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::TimeZone;
|
||||
use clock::FakeSystemClock;
|
||||
use gpui::TestAppContext;
|
||||
use http_client::FakeHttpClient;
|
||||
@@ -710,9 +647,7 @@ mod tests {
|
||||
#[gpui::test]
|
||||
fn test_telemetry_flush_on_max_queue_size(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let clock = Arc::new(FakeSystemClock::new(
|
||||
Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(),
|
||||
));
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http = FakeHttpClient::with_200_response();
|
||||
let system_id = Some("system_id".to_string());
|
||||
let installation_id = Some("installation_id".to_string());
|
||||
@@ -743,7 +678,7 @@ mod tests {
|
||||
Some(first_date_time)
|
||||
);
|
||||
|
||||
clock.advance(chrono::Duration::milliseconds(100));
|
||||
clock.advance(Duration::from_millis(100));
|
||||
|
||||
let event = telemetry.report_app_event(operation.clone());
|
||||
assert_eq!(
|
||||
@@ -759,7 +694,7 @@ mod tests {
|
||||
Some(first_date_time)
|
||||
);
|
||||
|
||||
clock.advance(chrono::Duration::milliseconds(100));
|
||||
clock.advance(Duration::from_millis(100));
|
||||
|
||||
let event = telemetry.report_app_event(operation.clone());
|
||||
assert_eq!(
|
||||
@@ -775,7 +710,7 @@ mod tests {
|
||||
Some(first_date_time)
|
||||
);
|
||||
|
||||
clock.advance(chrono::Duration::milliseconds(100));
|
||||
clock.advance(Duration::from_millis(100));
|
||||
|
||||
// Adding a 4th event should cause a flush
|
||||
let event = telemetry.report_app_event(operation.clone());
|
||||
@@ -796,9 +731,7 @@ mod tests {
|
||||
cx: &mut TestAppContext,
|
||||
) {
|
||||
init_test(cx);
|
||||
let clock = Arc::new(FakeSystemClock::new(
|
||||
Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(),
|
||||
));
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http = FakeHttpClient::with_200_response();
|
||||
let system_id = Some("system_id".to_string());
|
||||
let installation_id = Some("installation_id".to_string());
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::sync::Arc;
|
||||
use std::time;
|
||||
use std::{sync::Arc, time::Instant};
|
||||
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use clock::SystemClock;
|
||||
|
||||
const COALESCE_TIMEOUT: time::Duration = time::Duration::from_secs(20);
|
||||
@@ -10,8 +9,8 @@ const SIMULATED_DURATION_FOR_SINGLE_EVENT: time::Duration = time::Duration::from
|
||||
#[derive(Debug, PartialEq)]
|
||||
struct PeriodData {
|
||||
environment: &'static str,
|
||||
start: DateTime<Utc>,
|
||||
end: Option<DateTime<Utc>>,
|
||||
start: Instant,
|
||||
end: Option<Instant>,
|
||||
}
|
||||
|
||||
pub struct EventCoalescer {
|
||||
@@ -27,9 +26,8 @@ impl EventCoalescer {
|
||||
pub fn log_event(
|
||||
&mut self,
|
||||
environment: &'static str,
|
||||
) -> Option<(DateTime<Utc>, DateTime<Utc>, &'static str)> {
|
||||
) -> Option<(Instant, Instant, &'static str)> {
|
||||
let log_time = self.clock.utc_now();
|
||||
let coalesce_timeout = Duration::from_std(COALESCE_TIMEOUT).unwrap();
|
||||
|
||||
let Some(state) = &mut self.state else {
|
||||
self.state = Some(PeriodData {
|
||||
@@ -43,7 +41,7 @@ impl EventCoalescer {
|
||||
let period_end = state
|
||||
.end
|
||||
.unwrap_or(state.start + SIMULATED_DURATION_FOR_SINGLE_EVENT);
|
||||
let within_timeout = log_time - period_end < coalesce_timeout;
|
||||
let within_timeout = log_time - period_end < COALESCE_TIMEOUT;
|
||||
let environment_is_same = state.environment == environment;
|
||||
let should_coaelesce = !within_timeout || !environment_is_same;
|
||||
|
||||
@@ -70,16 +68,13 @@ impl EventCoalescer {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use chrono::TimeZone;
|
||||
use clock::FakeSystemClock;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_same_context_exceeding_timeout() {
|
||||
let clock = Arc::new(FakeSystemClock::new(
|
||||
Utc.with_ymd_and_hms(1990, 4, 12, 0, 0, 0).unwrap(),
|
||||
));
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let environment_1 = "environment_1";
|
||||
let mut event_coalescer = EventCoalescer::new(clock.clone());
|
||||
|
||||
@@ -98,7 +93,7 @@ mod tests {
|
||||
})
|
||||
);
|
||||
|
||||
let within_timeout_adjustment = Duration::from_std(COALESCE_TIMEOUT / 2).unwrap();
|
||||
let within_timeout_adjustment = COALESCE_TIMEOUT / 2;
|
||||
|
||||
// Ensure that many calls within the timeout don't start a new period
|
||||
for _ in 0..100 {
|
||||
@@ -118,7 +113,7 @@ mod tests {
|
||||
}
|
||||
|
||||
let period_end = clock.utc_now();
|
||||
let exceed_timeout_adjustment = Duration::from_std(COALESCE_TIMEOUT * 2).unwrap();
|
||||
let exceed_timeout_adjustment = COALESCE_TIMEOUT * 2;
|
||||
// Logging an event exceeding the timeout should start a new period
|
||||
clock.advance(exceed_timeout_adjustment);
|
||||
let new_period_start = clock.utc_now();
|
||||
@@ -137,9 +132,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_different_environment_under_timeout() {
|
||||
let clock = Arc::new(FakeSystemClock::new(
|
||||
Utc.with_ymd_and_hms(1990, 4, 12, 0, 0, 0).unwrap(),
|
||||
));
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let environment_1 = "environment_1";
|
||||
let mut event_coalescer = EventCoalescer::new(clock.clone());
|
||||
|
||||
@@ -158,7 +151,7 @@ mod tests {
|
||||
})
|
||||
);
|
||||
|
||||
let within_timeout_adjustment = Duration::from_std(COALESCE_TIMEOUT / 2).unwrap();
|
||||
let within_timeout_adjustment = COALESCE_TIMEOUT / 2;
|
||||
clock.advance(within_timeout_adjustment);
|
||||
let period_end = clock.utc_now();
|
||||
let period_data = event_coalescer.log_event(environment_1);
|
||||
@@ -193,9 +186,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_switching_environment_while_within_timeout() {
|
||||
let clock = Arc::new(FakeSystemClock::new(
|
||||
Utc.with_ymd_and_hms(1990, 4, 12, 0, 0, 0).unwrap(),
|
||||
));
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let environment_1 = "environment_1";
|
||||
let mut event_coalescer = EventCoalescer::new(clock.clone());
|
||||
|
||||
@@ -214,7 +205,7 @@ mod tests {
|
||||
})
|
||||
);
|
||||
|
||||
let within_timeout_adjustment = Duration::from_std(COALESCE_TIMEOUT / 2).unwrap();
|
||||
let within_timeout_adjustment = COALESCE_TIMEOUT / 2;
|
||||
clock.advance(within_timeout_adjustment);
|
||||
let period_end = clock.utc_now();
|
||||
let environment_2 = "environment_2";
|
||||
@@ -240,9 +231,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_switching_environment_while_exceeding_timeout() {
|
||||
let clock = Arc::new(FakeSystemClock::new(
|
||||
Utc.with_ymd_and_hms(1990, 4, 12, 0, 0, 0).unwrap(),
|
||||
));
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let environment_1 = "environment_1";
|
||||
let mut event_coalescer = EventCoalescer::new(clock.clone());
|
||||
|
||||
@@ -261,7 +250,7 @@ mod tests {
|
||||
})
|
||||
);
|
||||
|
||||
let exceed_timeout_adjustment = Duration::from_std(COALESCE_TIMEOUT * 2).unwrap();
|
||||
let exceed_timeout_adjustment = COALESCE_TIMEOUT * 2;
|
||||
clock.advance(exceed_timeout_adjustment);
|
||||
let period_end = clock.utc_now();
|
||||
let environment_2 = "environment_2";
|
||||
|
||||
@@ -16,7 +16,6 @@ doctest = false
|
||||
test-support = ["dep:parking_lot"]
|
||||
|
||||
[dependencies]
|
||||
chrono.workspace = true
|
||||
parking_lot = { workspace = true, optional = true }
|
||||
serde.workspace = true
|
||||
smallvec.workspace = true
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use std::time::Instant;
|
||||
|
||||
pub trait SystemClock: Send + Sync {
|
||||
/// Returns the current date and time in UTC.
|
||||
fn utc_now(&self) -> DateTime<Utc>;
|
||||
fn utc_now(&self) -> Instant;
|
||||
}
|
||||
|
||||
pub struct RealSystemClock;
|
||||
|
||||
impl SystemClock for RealSystemClock {
|
||||
fn utc_now(&self) -> DateTime<Utc> {
|
||||
Utc::now()
|
||||
fn utc_now(&self) -> Instant {
|
||||
Instant::now()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub struct FakeSystemClockState {
|
||||
now: DateTime<Utc>,
|
||||
now: Instant,
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
@@ -24,36 +24,30 @@ pub struct FakeSystemClock {
|
||||
state: parking_lot::Mutex<FakeSystemClockState>,
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl Default for FakeSystemClock {
|
||||
fn default() -> Self {
|
||||
Self::new(Utc::now())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl FakeSystemClock {
|
||||
pub fn new(now: DateTime<Utc>) -> Self {
|
||||
let state = FakeSystemClockState { now };
|
||||
pub fn new() -> Self {
|
||||
let state = FakeSystemClockState {
|
||||
now: Instant::now(),
|
||||
};
|
||||
|
||||
Self {
|
||||
state: parking_lot::Mutex::new(state),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_now(&self, now: DateTime<Utc>) {
|
||||
pub fn set_now(&self, now: Instant) {
|
||||
self.state.lock().now = now;
|
||||
}
|
||||
|
||||
/// Advances the [`FakeSystemClock`] by the specified [`Duration`](chrono::Duration).
|
||||
pub fn advance(&self, duration: chrono::Duration) {
|
||||
pub fn advance(&self, duration: std::time::Duration) {
|
||||
self.state.lock().now += duration;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl SystemClock for FakeSystemClock {
|
||||
fn utc_now(&self) -> DateTime<Utc> {
|
||||
fn utc_now(&self) -> Instant {
|
||||
self.state.lock().now
|
||||
}
|
||||
}
|
||||
|
||||
@@ -418,7 +418,7 @@ pub async fn post_events(
|
||||
if let Some(kinesis_client) = app.kinesis_client.clone() {
|
||||
if let Some(stream) = app.config.kinesis_stream.clone() {
|
||||
let mut request = kinesis_client.put_records().stream_name(stream);
|
||||
for row in for_snowflake(request_body.clone(), first_event_at) {
|
||||
for row in for_snowflake(request_body.clone(), first_event_at, country_code.clone()) {
|
||||
if let Some(data) = serde_json::to_vec(&row).log_err() {
|
||||
request = request.records(
|
||||
aws_sdk_kinesis::types::PutRecordsRequestEntry::builder()
|
||||
@@ -483,20 +483,7 @@ pub async fn post_events(
|
||||
checksum_matched,
|
||||
))
|
||||
}
|
||||
Event::Cpu(event) => to_upload.cpu_events.push(CpuEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
)),
|
||||
Event::Memory(event) => to_upload.memory_events.push(MemoryEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
)),
|
||||
Event::Cpu(_) | Event::Memory(_) => continue,
|
||||
Event::App(event) => to_upload.app_events.push(AppEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
@@ -947,6 +934,7 @@ pub struct CpuEventRow {
|
||||
}
|
||||
|
||||
impl CpuEventRow {
|
||||
#[allow(unused)]
|
||||
fn from_event(
|
||||
event: CpuEvent,
|
||||
wrapper: &EventWrapper,
|
||||
@@ -1001,6 +989,7 @@ pub struct MemoryEventRow {
|
||||
}
|
||||
|
||||
impl MemoryEventRow {
|
||||
#[allow(unused)]
|
||||
fn from_event(
|
||||
event: MemoryEvent,
|
||||
wrapper: &EventWrapper,
|
||||
@@ -1392,8 +1381,9 @@ pub fn calculate_json_checksum(app: Arc<AppState>, json: &impl AsRef<[u8]>) -> O
|
||||
fn for_snowflake(
|
||||
body: EventRequestBody,
|
||||
first_event_at: chrono::DateTime<chrono::Utc>,
|
||||
country_code: Option<String>,
|
||||
) -> impl Iterator<Item = SnowflakeRow> {
|
||||
body.events.into_iter().map(move |event| {
|
||||
body.events.into_iter().flat_map(move |event| {
|
||||
let timestamp =
|
||||
first_event_at + Duration::milliseconds(event.milliseconds_since_first_event);
|
||||
let (event_type, mut event_properties) = match &event.event {
|
||||
@@ -1450,14 +1440,7 @@ fn for_snowflake(
|
||||
},
|
||||
serde_json::to_value(e).unwrap(),
|
||||
),
|
||||
Event::Cpu(e) => (
|
||||
"System CPU Sampled".to_string(),
|
||||
serde_json::to_value(e).unwrap(),
|
||||
),
|
||||
Event::Memory(e) => (
|
||||
"System Memory Sampled".to_string(),
|
||||
serde_json::to_value(e).unwrap(),
|
||||
),
|
||||
Event::Cpu(_) | Event::Memory(_) => return None,
|
||||
Event::App(e) => {
|
||||
let mut properties = json!({});
|
||||
let event_type = match e.operation.trim() {
|
||||
@@ -1571,13 +1554,19 @@ fn for_snowflake(
|
||||
body.release_channel.clone().into(),
|
||||
);
|
||||
map.insert("signed_in".to_string(), event.signed_in.into());
|
||||
if let Some(country_code) = country_code.as_ref() {
|
||||
map.insert("country_code".to_string(), country_code.clone().into());
|
||||
}
|
||||
}
|
||||
|
||||
let user_properties = Some(serde_json::json!({
|
||||
"is_staff": body.is_staff,
|
||||
"Country": country_code.clone(),
|
||||
"OS": format!("{} {}", body.os_name, body.os_version.clone().unwrap_or_default()),
|
||||
"Version": body.app_version.clone(),
|
||||
}));
|
||||
|
||||
SnowflakeRow {
|
||||
Some(SnowflakeRow {
|
||||
time: timestamp,
|
||||
user_id: body.metrics_id.clone(),
|
||||
device_id: body.system_id.clone(),
|
||||
@@ -1585,7 +1574,7 @@ fn for_snowflake(
|
||||
event_properties,
|
||||
user_properties,
|
||||
insert_id: Some(Uuid::new_v4().to_string()),
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -4031,7 +4031,10 @@ async fn get_llm_api_token(
|
||||
}
|
||||
|
||||
let has_llm_subscription = session.has_llm_subscription(&db).await?;
|
||||
if !has_llm_subscription {
|
||||
|
||||
let bypass_account_age_check =
|
||||
has_llm_subscription || flags.iter().any(|flag| flag == "bypass-account-age-check");
|
||||
if !bypass_account_age_check {
|
||||
let mut account_created_at = user.created_at;
|
||||
if let Some(github_created_at) = user.github_user_created_at {
|
||||
account_created_at = account_created_at.min(github_created_at);
|
||||
|
||||
@@ -168,7 +168,7 @@ impl TestServer {
|
||||
client::init_settings(cx);
|
||||
});
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::default());
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
let user_id = if let Ok(Some(user)) = self.app_state.db.get_user_by_github_login(name).await
|
||||
{
|
||||
|
||||
@@ -24,6 +24,8 @@ use gpui::{AsyncAppContext, EventEmitter, Model, ModelContext, Subscription, Tas
|
||||
use log;
|
||||
use parking_lot::RwLock;
|
||||
use project::Project;
|
||||
use schemars::gen::SchemaGenerator;
|
||||
use schemars::schema::{InstanceType, Schema, SchemaObject};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources, SettingsStore};
|
||||
@@ -36,16 +38,32 @@ use crate::{
|
||||
|
||||
#[derive(Deserialize, Serialize, Default, Clone, PartialEq, Eq, JsonSchema, Debug)]
|
||||
pub struct ContextServerSettings {
|
||||
/// Settings for context servers used in the Assistant.
|
||||
#[serde(default)]
|
||||
pub context_servers: HashMap<Arc<str>, ServerConfig>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone, PartialEq, Eq, JsonSchema, Debug, Default)]
|
||||
pub struct ServerConfig {
|
||||
/// The command to run this context server.
|
||||
///
|
||||
/// This will override the command set by an extension.
|
||||
pub command: Option<ServerCommand>,
|
||||
/// The settings for this context server.
|
||||
///
|
||||
/// Consult the documentation for the context server to see what settings
|
||||
/// are supported.
|
||||
#[schemars(schema_with = "server_config_settings_json_schema")]
|
||||
pub settings: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
fn server_config_settings_json_schema(_generator: &mut SchemaGenerator) -> Schema {
|
||||
Schema::Object(SchemaObject {
|
||||
instance_type: Some(InstanceType::Object.into()),
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone, PartialEq, Eq, JsonSchema, Debug)]
|
||||
pub struct ServerCommand {
|
||||
pub path: String,
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Context as _;
|
||||
use gpui::{View, ViewContext, WindowContext};
|
||||
use language::Language;
|
||||
@@ -54,9 +52,9 @@ pub fn switch_source_header(
|
||||
cx.spawn(|_editor, mut cx| async move {
|
||||
let switch_source_header = switch_source_header_task
|
||||
.await
|
||||
.with_context(|| format!("Switch source/header LSP request for path \"{}\" failed", source_file))?;
|
||||
.with_context(|| format!("Switch source/header LSP request for path \"{source_file}\" failed"))?;
|
||||
if switch_source_header.0.is_empty() {
|
||||
log::info!("Clangd returned an empty string when requesting to switch source/header from \"{}\"", source_file);
|
||||
log::info!("Clangd returned an empty string when requesting to switch source/header from \"{source_file}\"" );
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@@ -67,14 +65,17 @@ pub fn switch_source_header(
|
||||
)
|
||||
})?;
|
||||
|
||||
let path = goto.to_file_path().map_err(|()| {
|
||||
anyhow::anyhow!("URL conversion to file path failed for \"{goto}\"")
|
||||
})?;
|
||||
|
||||
workspace
|
||||
.update(&mut cx, |workspace, view_cx| {
|
||||
workspace.open_abs_path(PathBuf::from(goto.path()), false, view_cx)
|
||||
workspace.open_abs_path(path, false, view_cx)
|
||||
})
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Switch source/header could not open \"{}\" in workspace",
|
||||
goto.path()
|
||||
"Switch source/header could not open \"{goto}\" in workspace"
|
||||
)
|
||||
})?
|
||||
.await
|
||||
|
||||
@@ -66,7 +66,7 @@ use std::{
|
||||
use sum_tree::{Bias, TreeMap};
|
||||
use tab_map::{TabMap, TabSnapshot};
|
||||
use text::LineIndent;
|
||||
use ui::{div, px, IntoElement, ParentElement, SharedString, Styled, WindowContext};
|
||||
use ui::{px, SharedString, WindowContext};
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
use wrap_map::{WrapMap, WrapSnapshot};
|
||||
|
||||
@@ -541,11 +541,17 @@ pub struct HighlightStyles {
|
||||
pub suggestion: Option<HighlightStyle>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum ChunkReplacement {
|
||||
Renderer(ChunkRenderer),
|
||||
Str(SharedString),
|
||||
}
|
||||
|
||||
pub struct HighlightedChunk<'a> {
|
||||
pub text: &'a str,
|
||||
pub style: Option<HighlightStyle>,
|
||||
pub is_tab: bool,
|
||||
pub renderer: Option<ChunkRenderer>,
|
||||
pub replacement: Option<ChunkReplacement>,
|
||||
}
|
||||
|
||||
impl<'a> HighlightedChunk<'a> {
|
||||
@@ -557,7 +563,7 @@ impl<'a> HighlightedChunk<'a> {
|
||||
let mut text = self.text;
|
||||
let style = self.style;
|
||||
let is_tab = self.is_tab;
|
||||
let renderer = self.renderer;
|
||||
let renderer = self.replacement;
|
||||
iter::from_fn(move || {
|
||||
let mut prefix_len = 0;
|
||||
while let Some(&ch) = chars.peek() {
|
||||
@@ -573,30 +579,33 @@ impl<'a> HighlightedChunk<'a> {
|
||||
text: prefix,
|
||||
style,
|
||||
is_tab,
|
||||
renderer: renderer.clone(),
|
||||
replacement: renderer.clone(),
|
||||
});
|
||||
}
|
||||
chars.next();
|
||||
let (prefix, suffix) = text.split_at(ch.len_utf8());
|
||||
text = suffix;
|
||||
if let Some(replacement) = replacement(ch) {
|
||||
let background = editor_style.status.hint_background;
|
||||
let underline = editor_style.status.hint;
|
||||
let invisible_highlight = HighlightStyle {
|
||||
background_color: Some(editor_style.status.hint_background),
|
||||
underline: Some(UnderlineStyle {
|
||||
color: Some(editor_style.status.hint),
|
||||
thickness: px(1.),
|
||||
wavy: false,
|
||||
}),
|
||||
..Default::default()
|
||||
};
|
||||
let invisible_style = if let Some(mut style) = style {
|
||||
style.highlight(invisible_highlight);
|
||||
style
|
||||
} else {
|
||||
invisible_highlight
|
||||
};
|
||||
return Some(HighlightedChunk {
|
||||
text: prefix,
|
||||
style: None,
|
||||
style: Some(invisible_style),
|
||||
is_tab: false,
|
||||
renderer: Some(ChunkRenderer {
|
||||
render: Arc::new(move |_| {
|
||||
div()
|
||||
.child(replacement)
|
||||
.bg(background)
|
||||
.text_decoration_1()
|
||||
.text_decoration_color(underline)
|
||||
.into_any_element()
|
||||
}),
|
||||
constrain_width: false,
|
||||
}),
|
||||
replacement: Some(ChunkReplacement::Str(replacement.into())),
|
||||
});
|
||||
} else {
|
||||
let invisible_highlight = HighlightStyle {
|
||||
@@ -619,7 +628,7 @@ impl<'a> HighlightedChunk<'a> {
|
||||
text: prefix,
|
||||
style: Some(invisible_style),
|
||||
is_tab: false,
|
||||
renderer: renderer.clone(),
|
||||
replacement: renderer.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -631,7 +640,7 @@ impl<'a> HighlightedChunk<'a> {
|
||||
text: remainder,
|
||||
style,
|
||||
is_tab,
|
||||
renderer: renderer.clone(),
|
||||
replacement: renderer.clone(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
@@ -895,7 +904,7 @@ impl DisplaySnapshot {
|
||||
text: chunk.text,
|
||||
style: highlight_style,
|
||||
is_tab: chunk.is_tab,
|
||||
renderer: chunk.renderer,
|
||||
replacement: chunk.renderer.map(ChunkReplacement::Renderer),
|
||||
}
|
||||
.highlight_invisibles(editor_style)
|
||||
})
|
||||
|
||||
@@ -540,6 +540,15 @@ pub enum IsVimMode {
|
||||
No,
|
||||
}
|
||||
|
||||
pub trait ActiveLineTrailerProvider {
|
||||
fn render_active_line_trailer(
|
||||
&mut self,
|
||||
style: &EditorStyle,
|
||||
focus_handle: &FocusHandle,
|
||||
cx: &mut WindowContext,
|
||||
) -> Option<AnyElement>;
|
||||
}
|
||||
|
||||
/// Zed's primary text input `View`, allowing users to edit a [`MultiBuffer`]
|
||||
///
|
||||
/// See the [module level documentation](self) for more information.
|
||||
@@ -667,6 +676,7 @@ pub struct Editor {
|
||||
next_scroll_position: NextScrollCursorCenterTopBottom,
|
||||
addons: HashMap<TypeId, Box<dyn Addon>>,
|
||||
_scroll_cursor_center_top_bottom_task: Task<()>,
|
||||
active_line_trailer_provider: Option<Box<dyn ActiveLineTrailerProvider>>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Default)]
|
||||
@@ -1026,7 +1036,12 @@ impl CompletionsMenu {
|
||||
let match_candidates = completions
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(id, completion)| StringMatchCandidate::new(id, completion.label.text.clone()))
|
||||
.map(|(id, completion)| {
|
||||
StringMatchCandidate::new(
|
||||
id,
|
||||
completion.label.text[completion.label.filter_range.clone()].into(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
Self {
|
||||
@@ -2200,6 +2215,7 @@ impl Editor {
|
||||
addons: HashMap::default(),
|
||||
_scroll_cursor_center_top_bottom_task: Task::ready(()),
|
||||
text_style_refinement: None,
|
||||
active_line_trailer_provider: None,
|
||||
};
|
||||
this.tasks_update_task = Some(this.refresh_runnables(cx));
|
||||
this._subscriptions.extend(project_subscriptions);
|
||||
@@ -2488,6 +2504,16 @@ impl Editor {
|
||||
self.refresh_inline_completion(false, false, cx);
|
||||
}
|
||||
|
||||
pub fn set_active_line_trailer_provider<T>(
|
||||
&mut self,
|
||||
provider: Option<T>,
|
||||
_cx: &mut ViewContext<Self>,
|
||||
) where
|
||||
T: ActiveLineTrailerProvider + 'static,
|
||||
{
|
||||
self.active_line_trailer_provider = provider.map(|provider| Box::new(provider) as Box<_>);
|
||||
}
|
||||
|
||||
pub fn placeholder_text(&self, _cx: &WindowContext) -> Option<&str> {
|
||||
self.placeholder_text.as_deref()
|
||||
}
|
||||
@@ -11844,6 +11870,29 @@ impl Editor {
|
||||
&& self.has_blame_entries(cx)
|
||||
}
|
||||
|
||||
pub fn render_active_line_trailer(
|
||||
&mut self,
|
||||
style: &EditorStyle,
|
||||
cx: &mut WindowContext,
|
||||
) -> Option<AnyElement> {
|
||||
let selection = self.selections.newest::<Point>(cx);
|
||||
if !selection.is_empty() {
|
||||
return None;
|
||||
};
|
||||
|
||||
let snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let buffer_row = MultiBufferRow(selection.head().row);
|
||||
|
||||
if snapshot.line_len(buffer_row) != 0 || self.has_active_inline_completion(cx) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
self.active_line_trailer_provider
|
||||
.as_mut()?
|
||||
.render_active_line_trailer(style, &focus_handle, cx)
|
||||
}
|
||||
|
||||
fn has_blame_entries(&self, cx: &mut WindowContext) -> bool {
|
||||
self.blame()
|
||||
.map_or(false, |blame| blame.read(cx).has_generated_entries())
|
||||
|
||||
@@ -1398,6 +1398,15 @@ fn test_move_cursor_different_line_lengths(cx: &mut TestAppContext) {
|
||||
view.change_selections(None, cx, |s| {
|
||||
s.select_display_ranges([empty_range(0, "ⓐⓑⓒⓓⓔ".len())]);
|
||||
});
|
||||
|
||||
// moving above start of document should move selection to start of document,
|
||||
// but the next move down should still be at the original goal_x
|
||||
view.move_up(&MoveUp, cx);
|
||||
assert_eq!(
|
||||
view.selections.display_ranges(cx),
|
||||
&[empty_range(0, "".len())]
|
||||
);
|
||||
|
||||
view.move_down(&MoveDown, cx);
|
||||
assert_eq!(
|
||||
view.selections.display_ranges(cx),
|
||||
@@ -1422,6 +1431,25 @@ fn test_move_cursor_different_line_lengths(cx: &mut TestAppContext) {
|
||||
&[empty_range(4, "ⓐⓑⓒⓓⓔ".len())]
|
||||
);
|
||||
|
||||
// moving past end of document should not change goal_x
|
||||
view.move_down(&MoveDown, cx);
|
||||
assert_eq!(
|
||||
view.selections.display_ranges(cx),
|
||||
&[empty_range(5, "".len())]
|
||||
);
|
||||
|
||||
view.move_down(&MoveDown, cx);
|
||||
assert_eq!(
|
||||
view.selections.display_ranges(cx),
|
||||
&[empty_range(5, "".len())]
|
||||
);
|
||||
|
||||
view.move_up(&MoveUp, cx);
|
||||
assert_eq!(
|
||||
view.selections.display_ranges(cx),
|
||||
&[empty_range(4, "ⓐⓑⓒⓓⓔ".len())]
|
||||
);
|
||||
|
||||
view.move_up(&MoveUp, cx);
|
||||
assert_eq!(
|
||||
view.selections.display_ranges(cx),
|
||||
|
||||
@@ -16,8 +16,8 @@ use crate::{
|
||||
items::BufferSearchHighlights,
|
||||
mouse_context_menu::{self, MenuPosition, MouseContextMenu},
|
||||
scroll::scroll_amount::ScrollAmount,
|
||||
BlockId, CodeActionsMenu, CursorShape, CustomBlockId, DisplayPoint, DisplayRow,
|
||||
DocumentHighlightRead, DocumentHighlightWrite, Editor, EditorMode, EditorSettings,
|
||||
BlockId, ChunkReplacement, CodeActionsMenu, CursorShape, CustomBlockId, DisplayPoint,
|
||||
DisplayRow, DocumentHighlightRead, DocumentHighlightWrite, Editor, EditorMode, EditorSettings,
|
||||
EditorSnapshot, EditorStyle, ExpandExcerpts, FocusedBlock, GutterDimensions, HalfPageDown,
|
||||
HalfPageUp, HandleInput, HoveredCursor, HoveredHunk, JumpData, LineDown, LineUp, OpenExcerpts,
|
||||
PageDown, PageUp, Point, RowExt, RowRangeExt, SelectPhase, Selection, SoftWrap, ToPoint,
|
||||
@@ -34,8 +34,8 @@ use gpui::{
|
||||
FontId, GlobalElementId, Hitbox, Hsla, InteractiveElement, IntoElement, Length,
|
||||
ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, PaintQuad,
|
||||
ParentElement, Pixels, ScrollDelta, ScrollWheelEvent, ShapedLine, SharedString, Size,
|
||||
StatefulInteractiveElement, Style, Styled, TextRun, TextStyle, TextStyleRefinement, View,
|
||||
ViewContext, WeakView, WindowContext,
|
||||
StatefulInteractiveElement, Style, Styled, TextRun, TextStyleRefinement, View, ViewContext,
|
||||
WeakView, WindowContext,
|
||||
};
|
||||
use gpui::{ClickEvent, Subscription};
|
||||
use itertools::Itertools;
|
||||
@@ -1412,7 +1412,7 @@ impl EditorElement {
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_inline_blame(
|
||||
fn layout_active_line_trailer(
|
||||
&self,
|
||||
display_row: DisplayRow,
|
||||
display_snapshot: &DisplaySnapshot,
|
||||
@@ -1424,61 +1424,71 @@ impl EditorElement {
|
||||
line_height: Pixels,
|
||||
cx: &mut WindowContext,
|
||||
) -> Option<AnyElement> {
|
||||
if !self
|
||||
let render_inline_blame = self
|
||||
.editor
|
||||
.update(cx, |editor, cx| editor.render_git_blame_inline(cx))
|
||||
{
|
||||
return None;
|
||||
}
|
||||
.update(cx, |editor, cx| editor.render_git_blame_inline(cx));
|
||||
if render_inline_blame {
|
||||
let workspace = self
|
||||
.editor
|
||||
.read(cx)
|
||||
.workspace
|
||||
.as_ref()
|
||||
.map(|(w, _)| w.clone());
|
||||
|
||||
let workspace = self
|
||||
.editor
|
||||
.read(cx)
|
||||
.workspace
|
||||
.as_ref()
|
||||
.map(|(w, _)| w.clone());
|
||||
let display_point = DisplayPoint::new(display_row, 0);
|
||||
let buffer_row = MultiBufferRow(display_point.to_point(display_snapshot).row);
|
||||
|
||||
let display_point = DisplayPoint::new(display_row, 0);
|
||||
let buffer_row = MultiBufferRow(display_point.to_point(display_snapshot).row);
|
||||
let blame = self.editor.read(cx).blame.clone()?;
|
||||
let blame_entry = blame
|
||||
.update(cx, |blame, cx| {
|
||||
blame.blame_for_rows([Some(buffer_row)], cx).next()
|
||||
})
|
||||
.flatten()?;
|
||||
|
||||
let blame = self.editor.read(cx).blame.clone()?;
|
||||
let blame_entry = blame
|
||||
.update(cx, |blame, cx| {
|
||||
blame.blame_for_rows([Some(buffer_row)], cx).next()
|
||||
})
|
||||
.flatten()?;
|
||||
let mut element =
|
||||
render_inline_blame_entry(&blame, blame_entry, &self.style, workspace, cx);
|
||||
|
||||
let mut element =
|
||||
render_inline_blame_entry(&blame, blame_entry, &self.style, workspace, cx);
|
||||
let start_y = content_origin.y
|
||||
+ line_height * (display_row.as_f32() - scroll_pixel_position.y / line_height);
|
||||
|
||||
let start_y = content_origin.y
|
||||
+ line_height * (display_row.as_f32() - scroll_pixel_position.y / line_height);
|
||||
let start_x = {
|
||||
const INLINE_BLAME_PADDING_EM_WIDTHS: f32 = 6.;
|
||||
|
||||
let start_x = {
|
||||
const INLINE_BLAME_PADDING_EM_WIDTHS: f32 = 6.;
|
||||
let line_end = if let Some(crease_trailer) = crease_trailer {
|
||||
crease_trailer.bounds.right()
|
||||
} else {
|
||||
content_origin.x - scroll_pixel_position.x + line_layout.width
|
||||
};
|
||||
let padded_line_end = line_end + em_width * INLINE_BLAME_PADDING_EM_WIDTHS;
|
||||
|
||||
let line_end = if let Some(crease_trailer) = crease_trailer {
|
||||
crease_trailer.bounds.right()
|
||||
} else {
|
||||
content_origin.x - scroll_pixel_position.x + line_layout.width
|
||||
let min_column_in_pixels = ProjectSettings::get_global(cx)
|
||||
.git
|
||||
.inline_blame
|
||||
.and_then(|settings| settings.min_column)
|
||||
.map(|col| self.column_pixels(col as usize, cx))
|
||||
.unwrap_or(px(0.));
|
||||
let min_start = content_origin.x - scroll_pixel_position.x + min_column_in_pixels;
|
||||
|
||||
cmp::max(padded_line_end, min_start)
|
||||
};
|
||||
let padded_line_end = line_end + em_width * INLINE_BLAME_PADDING_EM_WIDTHS;
|
||||
|
||||
let min_column_in_pixels = ProjectSettings::get_global(cx)
|
||||
.git
|
||||
.inline_blame
|
||||
.and_then(|settings| settings.min_column)
|
||||
.map(|col| self.column_pixels(col as usize, cx))
|
||||
.unwrap_or(px(0.));
|
||||
let min_start = content_origin.x - scroll_pixel_position.x + min_column_in_pixels;
|
||||
let absolute_offset = point(start_x, start_y);
|
||||
element.prepaint_as_root(absolute_offset, AvailableSpace::min_size(), cx);
|
||||
|
||||
cmp::max(padded_line_end, min_start)
|
||||
};
|
||||
Some(element)
|
||||
} else if let Some(mut element) = self.editor.update(cx, |editor, cx| {
|
||||
editor.render_active_line_trailer(&self.style, cx)
|
||||
}) {
|
||||
let start_y = content_origin.y
|
||||
+ line_height * (display_row.as_f32() - scroll_pixel_position.y / line_height);
|
||||
let start_x = content_origin.x - scroll_pixel_position.x + em_width;
|
||||
let absolute_offset = point(start_x, start_y);
|
||||
element.prepaint_as_root(absolute_offset, AvailableSpace::min_size(), cx);
|
||||
|
||||
let absolute_offset = point(start_x, start_y);
|
||||
element.prepaint_as_root(absolute_offset, AvailableSpace::min_size(), cx);
|
||||
|
||||
Some(element)
|
||||
Some(element)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
@@ -2019,7 +2029,7 @@ impl EditorElement {
|
||||
let chunks = snapshot.highlighted_chunks(rows.clone(), true, style);
|
||||
LineWithInvisibles::from_chunks(
|
||||
chunks,
|
||||
&style.text,
|
||||
&style,
|
||||
MAX_LINE_LEN,
|
||||
rows.len(),
|
||||
snapshot.mode,
|
||||
@@ -3454,7 +3464,7 @@ impl EditorElement {
|
||||
self.paint_lines(&invisible_display_ranges, layout, cx);
|
||||
self.paint_redactions(layout, cx);
|
||||
self.paint_cursors(layout, cx);
|
||||
self.paint_inline_blame(layout, cx);
|
||||
self.paint_active_line_trailer(layout, cx);
|
||||
cx.with_element_namespace("crease_trailers", |cx| {
|
||||
for trailer in layout.crease_trailers.iter_mut().flatten() {
|
||||
trailer.element.paint(cx);
|
||||
@@ -3936,10 +3946,10 @@ impl EditorElement {
|
||||
}
|
||||
}
|
||||
|
||||
fn paint_inline_blame(&mut self, layout: &mut EditorLayout, cx: &mut WindowContext) {
|
||||
if let Some(mut inline_blame) = layout.inline_blame.take() {
|
||||
fn paint_active_line_trailer(&mut self, layout: &mut EditorLayout, cx: &mut WindowContext) {
|
||||
if let Some(mut element) = layout.active_line_trailer.take() {
|
||||
cx.paint_layer(layout.text_hitbox.bounds, |cx| {
|
||||
inline_blame.paint(cx);
|
||||
element.paint(cx);
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -4372,7 +4382,7 @@ impl LineWithInvisibles {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn from_chunks<'a>(
|
||||
chunks: impl Iterator<Item = HighlightedChunk<'a>>,
|
||||
text_style: &TextStyle,
|
||||
editor_style: &EditorStyle,
|
||||
max_line_len: usize,
|
||||
max_line_count: usize,
|
||||
editor_mode: EditorMode,
|
||||
@@ -4380,6 +4390,7 @@ impl LineWithInvisibles {
|
||||
is_row_soft_wrapped: impl Copy + Fn(usize) -> bool,
|
||||
cx: &mut WindowContext,
|
||||
) -> Vec<Self> {
|
||||
let text_style = &editor_style.text;
|
||||
let mut layouts = Vec::with_capacity(max_line_count);
|
||||
let mut fragments: SmallVec<[LineFragment; 1]> = SmallVec::new();
|
||||
let mut line = String::new();
|
||||
@@ -4398,9 +4409,9 @@ impl LineWithInvisibles {
|
||||
text: "\n",
|
||||
style: None,
|
||||
is_tab: false,
|
||||
renderer: None,
|
||||
replacement: None,
|
||||
}]) {
|
||||
if let Some(renderer) = highlighted_chunk.renderer {
|
||||
if let Some(replacement) = highlighted_chunk.replacement {
|
||||
if !line.is_empty() {
|
||||
let shaped_line = cx
|
||||
.text_system()
|
||||
@@ -4413,42 +4424,71 @@ impl LineWithInvisibles {
|
||||
styles.clear();
|
||||
}
|
||||
|
||||
let available_width = if renderer.constrain_width {
|
||||
let chunk = if highlighted_chunk.text == ellipsis.as_ref() {
|
||||
ellipsis.clone()
|
||||
} else {
|
||||
SharedString::from(Arc::from(highlighted_chunk.text))
|
||||
};
|
||||
let shaped_line = cx
|
||||
.text_system()
|
||||
.shape_line(
|
||||
chunk,
|
||||
font_size,
|
||||
&[text_style.to_run(highlighted_chunk.text.len())],
|
||||
)
|
||||
.unwrap();
|
||||
AvailableSpace::Definite(shaped_line.width)
|
||||
} else {
|
||||
AvailableSpace::MinContent
|
||||
};
|
||||
match replacement {
|
||||
ChunkReplacement::Renderer(renderer) => {
|
||||
let available_width = if renderer.constrain_width {
|
||||
let chunk = if highlighted_chunk.text == ellipsis.as_ref() {
|
||||
ellipsis.clone()
|
||||
} else {
|
||||
SharedString::from(Arc::from(highlighted_chunk.text))
|
||||
};
|
||||
let shaped_line = cx
|
||||
.text_system()
|
||||
.shape_line(
|
||||
chunk,
|
||||
font_size,
|
||||
&[text_style.to_run(highlighted_chunk.text.len())],
|
||||
)
|
||||
.unwrap();
|
||||
AvailableSpace::Definite(shaped_line.width)
|
||||
} else {
|
||||
AvailableSpace::MinContent
|
||||
};
|
||||
|
||||
let mut element = (renderer.render)(&mut ChunkRendererContext {
|
||||
context: cx,
|
||||
max_width: text_width,
|
||||
});
|
||||
let line_height = text_style.line_height_in_pixels(cx.rem_size());
|
||||
let size = element.layout_as_root(
|
||||
size(available_width, AvailableSpace::Definite(line_height)),
|
||||
cx,
|
||||
);
|
||||
let mut element = (renderer.render)(&mut ChunkRendererContext {
|
||||
context: cx,
|
||||
max_width: text_width,
|
||||
});
|
||||
let line_height = text_style.line_height_in_pixels(cx.rem_size());
|
||||
let size = element.layout_as_root(
|
||||
size(available_width, AvailableSpace::Definite(line_height)),
|
||||
cx,
|
||||
);
|
||||
|
||||
width += size.width;
|
||||
len += highlighted_chunk.text.len();
|
||||
fragments.push(LineFragment::Element {
|
||||
element: Some(element),
|
||||
size,
|
||||
len: highlighted_chunk.text.len(),
|
||||
});
|
||||
width += size.width;
|
||||
len += highlighted_chunk.text.len();
|
||||
fragments.push(LineFragment::Element {
|
||||
element: Some(element),
|
||||
size,
|
||||
len: highlighted_chunk.text.len(),
|
||||
});
|
||||
}
|
||||
ChunkReplacement::Str(x) => {
|
||||
let text_style = if let Some(style) = highlighted_chunk.style {
|
||||
Cow::Owned(text_style.clone().highlight(style))
|
||||
} else {
|
||||
Cow::Borrowed(text_style)
|
||||
};
|
||||
|
||||
let run = TextRun {
|
||||
len: x.len(),
|
||||
font: text_style.font(),
|
||||
color: text_style.color,
|
||||
background_color: text_style.background_color,
|
||||
underline: text_style.underline,
|
||||
strikethrough: text_style.strikethrough,
|
||||
};
|
||||
let line_layout = cx
|
||||
.text_system()
|
||||
.shape_line(x, font_size, &[run])
|
||||
.unwrap()
|
||||
.with_len(highlighted_chunk.text.len());
|
||||
|
||||
width += line_layout.width;
|
||||
len += highlighted_chunk.text.len();
|
||||
fragments.push(LineFragment::Text(line_layout))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (ix, mut line_chunk) in highlighted_chunk.text.split('\n').enumerate() {
|
||||
if ix > 0 {
|
||||
@@ -5301,14 +5341,14 @@ impl Element for EditorElement {
|
||||
)
|
||||
});
|
||||
|
||||
let mut inline_blame = None;
|
||||
let mut active_line_trailer = None;
|
||||
if let Some(newest_selection_head) = newest_selection_head {
|
||||
let display_row = newest_selection_head.row();
|
||||
if (start_row..end_row).contains(&display_row) {
|
||||
let line_ix = display_row.minus(start_row) as usize;
|
||||
let line_layout = &line_layouts[line_ix];
|
||||
let crease_trailer_layout = crease_trailers[line_ix].as_ref();
|
||||
inline_blame = self.layout_inline_blame(
|
||||
active_line_trailer = self.layout_active_line_trailer(
|
||||
display_row,
|
||||
&snapshot.display_snapshot,
|
||||
line_layout,
|
||||
@@ -5627,7 +5667,7 @@ impl Element for EditorElement {
|
||||
line_elements,
|
||||
line_numbers,
|
||||
blamed_display_rows,
|
||||
inline_blame,
|
||||
active_line_trailer,
|
||||
blocks,
|
||||
cursors,
|
||||
visible_cursors,
|
||||
@@ -5764,7 +5804,7 @@ pub struct EditorLayout {
|
||||
line_numbers: Vec<Option<ShapedLine>>,
|
||||
display_hunks: Vec<(DisplayDiffHunk, Option<Hitbox>)>,
|
||||
blamed_display_rows: Option<Vec<AnyElement>>,
|
||||
inline_blame: Option<AnyElement>,
|
||||
active_line_trailer: Option<AnyElement>,
|
||||
blocks: Vec<BlockLayout>,
|
||||
highlighted_ranges: Vec<(Range<DisplayPoint>, Hsla)>,
|
||||
highlighted_gutter_ranges: Vec<(Range<DisplayPoint>, Hsla)>,
|
||||
@@ -5992,7 +6032,7 @@ fn layout_line(
|
||||
let chunks = snapshot.highlighted_chunks(row..row + DisplayRow(1), true, style);
|
||||
LineWithInvisibles::from_chunks(
|
||||
chunks,
|
||||
&style.text,
|
||||
&style,
|
||||
MAX_LINE_LEN,
|
||||
1,
|
||||
snapshot.mode,
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint};
|
||||
use crate::{scroll::ScrollAnchor, CharKind, DisplayRow, EditorStyle, RowExt, ToOffset, ToPoint};
|
||||
use gpui::{px, Pixels, WindowTextSystem};
|
||||
use gpui::{Pixels, WindowTextSystem};
|
||||
use language::Point;
|
||||
use multi_buffer::{MultiBufferRow, MultiBufferSnapshot};
|
||||
use serde::Deserialize;
|
||||
@@ -120,7 +120,7 @@ pub(crate) fn up_by_rows(
|
||||
preserve_column_at_start: bool,
|
||||
text_layout_details: &TextLayoutDetails,
|
||||
) -> (DisplayPoint, SelectionGoal) {
|
||||
let mut goal_x = match goal {
|
||||
let goal_x = match goal {
|
||||
SelectionGoal::HorizontalPosition(x) => x.into(),
|
||||
SelectionGoal::WrappedHorizontalPosition((_, x)) => x.into(),
|
||||
SelectionGoal::HorizontalRange { end, .. } => end.into(),
|
||||
@@ -138,7 +138,6 @@ pub(crate) fn up_by_rows(
|
||||
return (start, goal);
|
||||
} else {
|
||||
point = DisplayPoint::new(DisplayRow(0), 0);
|
||||
goal_x = px(0.);
|
||||
}
|
||||
|
||||
let mut clipped_point = map.clip_point(point, Bias::Left);
|
||||
@@ -159,7 +158,7 @@ pub(crate) fn down_by_rows(
|
||||
preserve_column_at_end: bool,
|
||||
text_layout_details: &TextLayoutDetails,
|
||||
) -> (DisplayPoint, SelectionGoal) {
|
||||
let mut goal_x = match goal {
|
||||
let goal_x = match goal {
|
||||
SelectionGoal::HorizontalPosition(x) => x.into(),
|
||||
SelectionGoal::WrappedHorizontalPosition((_, x)) => x.into(),
|
||||
SelectionGoal::HorizontalRange { end, .. } => end.into(),
|
||||
@@ -174,7 +173,6 @@ pub(crate) fn down_by_rows(
|
||||
return (start, goal);
|
||||
} else {
|
||||
point = map.max_point();
|
||||
goal_x = map.x_for_display_point(point, text_layout_details)
|
||||
}
|
||||
|
||||
let mut clipped_point = map.clip_point(point, Bias::Right);
|
||||
@@ -610,7 +608,7 @@ mod tests {
|
||||
test::{editor_test_context::EditorTestContext, marked_display_snapshot},
|
||||
Buffer, DisplayMap, DisplayRow, ExcerptRange, FoldPlaceholder, InlayId, MultiBuffer,
|
||||
};
|
||||
use gpui::{font, Context as _};
|
||||
use gpui::{font, px, Context as _};
|
||||
use language::Capability;
|
||||
use project::Project;
|
||||
use settings::SettingsStore;
|
||||
@@ -977,7 +975,7 @@ mod tests {
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(2), 0),
|
||||
SelectionGoal::HorizontalPosition(0.0)
|
||||
SelectionGoal::HorizontalPosition(col_2_x.0),
|
||||
),
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -990,7 +988,7 @@ mod tests {
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(2), 0),
|
||||
SelectionGoal::HorizontalPosition(0.0)
|
||||
SelectionGoal::HorizontalPosition(0.0),
|
||||
),
|
||||
);
|
||||
|
||||
@@ -1059,7 +1057,7 @@ mod tests {
|
||||
let max_point_x = snapshot
|
||||
.x_for_display_point(DisplayPoint::new(DisplayRow(7), 2), &text_layout_details);
|
||||
|
||||
// Can't move down off the end
|
||||
// Can't move down off the end, and attempting to do so leaves the selection goal unchanged
|
||||
assert_eq!(
|
||||
down(
|
||||
&snapshot,
|
||||
@@ -1070,7 +1068,7 @@ mod tests {
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(7), 2),
|
||||
SelectionGoal::HorizontalPosition(max_point_x.0)
|
||||
SelectionGoal::HorizontalPosition(0.0)
|
||||
),
|
||||
);
|
||||
assert_eq!(
|
||||
|
||||
@@ -44,6 +44,7 @@ util.workspace = true
|
||||
vim.workspace = true
|
||||
wasmtime-wasi.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -38,12 +38,12 @@ use crate::extension_version_selector::{
|
||||
ExtensionVersionSelector, ExtensionVersionSelectorDelegate,
|
||||
};
|
||||
|
||||
actions!(zed, [Extensions, InstallDevExtension]);
|
||||
actions!(zed, [InstallDevExtension]);
|
||||
|
||||
pub fn init(cx: &mut AppContext) {
|
||||
cx.observe_new_views(move |workspace: &mut Workspace, cx| {
|
||||
workspace
|
||||
.register_action(move |workspace, _: &Extensions, cx| {
|
||||
.register_action(move |workspace, _: &zed_actions::Extensions, cx| {
|
||||
let existing = workspace
|
||||
.active_pane()
|
||||
.read(cx)
|
||||
|
||||
@@ -260,7 +260,10 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke {
|
||||
|
||||
#[allow(non_upper_case_globals)]
|
||||
let key = match first_char {
|
||||
Some(SPACE_KEY) => "space".to_string(),
|
||||
Some(SPACE_KEY) => {
|
||||
ime_key = Some(" ".to_string());
|
||||
"space".to_string()
|
||||
}
|
||||
Some(BACKSPACE_KEY) => "backspace".to_string(),
|
||||
Some(ENTER_KEY) | Some(NUMPAD_ENTER_KEY) => "enter".to_string(),
|
||||
Some(ESCAPE_KEY) => "escape".to_string(),
|
||||
|
||||
@@ -343,8 +343,10 @@ impl MacPlatform {
|
||||
ns_string(key_to_native(&keystroke.key).as_ref()),
|
||||
)
|
||||
.autorelease();
|
||||
let _: () =
|
||||
msg_send![item, setAllowsAutomaticKeyEquivalentLocalization: NO];
|
||||
if MacPlatform::os_version().unwrap() >= SemanticVersion::new(12, 0, 0) {
|
||||
let _: () =
|
||||
msg_send![item, setAllowsAutomaticKeyEquivalentLocalization: NO];
|
||||
}
|
||||
item.setKeyEquivalentModifierMask_(mask);
|
||||
}
|
||||
// For multi-keystroke bindings, render the keystroke as part of the title.
|
||||
|
||||
@@ -44,6 +44,21 @@ impl ShapedLine {
|
||||
self.layout.len
|
||||
}
|
||||
|
||||
/// Override the len, useful if you're rendering text a
|
||||
/// as text b (e.g. rendering invisibles).
|
||||
pub fn with_len(mut self, len: usize) -> Self {
|
||||
let layout = self.layout.as_ref();
|
||||
self.layout = Arc::new(LineLayout {
|
||||
font_size: layout.font_size,
|
||||
width: layout.width,
|
||||
ascent: layout.ascent,
|
||||
descent: layout.descent,
|
||||
runs: layout.runs.clone(),
|
||||
len,
|
||||
});
|
||||
self
|
||||
}
|
||||
|
||||
/// Paint the line of text to the window.
|
||||
pub fn paint(
|
||||
&self,
|
||||
|
||||
@@ -29,7 +29,7 @@ pub struct LineLayout {
|
||||
}
|
||||
|
||||
/// A run of text that has been shaped .
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ShapedRun {
|
||||
/// The font id for this run
|
||||
pub font_id: FontId,
|
||||
|
||||
@@ -3038,7 +3038,7 @@ impl<'a> WindowContext<'a> {
|
||||
return true;
|
||||
}
|
||||
|
||||
if let Some(input) = keystroke.ime_key {
|
||||
if let Some(input) = keystroke.with_simulated_ime().ime_key {
|
||||
if let Some(mut input_handler) = self.window.platform_window.take_input_handler() {
|
||||
input_handler.dispatch_input(&input, self);
|
||||
self.window.platform_window.set_input_handler(input_handler);
|
||||
@@ -3050,7 +3050,7 @@ impl<'a> WindowContext<'a> {
|
||||
}
|
||||
|
||||
/// Represent this action as a key binding string, to display in the UI.
|
||||
pub fn keystroke_text_for(&self, action: &dyn Action) -> String {
|
||||
pub fn keystroke_text_for_action(&self, action: &dyn Action) -> String {
|
||||
self.bindings_for_action(action)
|
||||
.into_iter()
|
||||
.next()
|
||||
@@ -3065,6 +3065,26 @@ impl<'a> WindowContext<'a> {
|
||||
.unwrap_or_else(|| action.name().to_string())
|
||||
}
|
||||
|
||||
/// Represent this action as a key binding string, to display in the UI.
|
||||
pub fn keystroke_text_for_action_in(
|
||||
&self,
|
||||
action: &dyn Action,
|
||||
focus_handle: &FocusHandle,
|
||||
) -> String {
|
||||
self.bindings_for_action_in(action, focus_handle)
|
||||
.into_iter()
|
||||
.next()
|
||||
.map(|binding| {
|
||||
binding
|
||||
.keystrokes()
|
||||
.iter()
|
||||
.map(ToString::to_string)
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ")
|
||||
})
|
||||
.unwrap_or_else(|| action.name().to_string())
|
||||
}
|
||||
|
||||
/// Dispatch a mouse or keyboard event on the window.
|
||||
#[profiling::function]
|
||||
pub fn dispatch_event(&mut self, event: PlatformInput) -> DispatchEventResult {
|
||||
@@ -3462,7 +3482,13 @@ impl<'a> WindowContext<'a> {
|
||||
if !self.propagate_event {
|
||||
continue 'replay;
|
||||
}
|
||||
if let Some(input) = replay.keystroke.ime_key.as_ref().cloned() {
|
||||
if let Some(input) = replay
|
||||
.keystroke
|
||||
.with_simulated_ime()
|
||||
.ime_key
|
||||
.as_ref()
|
||||
.cloned()
|
||||
{
|
||||
if let Some(mut input_handler) = self.window.platform_window.take_input_handler() {
|
||||
input_handler.dispatch_input(&input, self);
|
||||
self.window.platform_window.set_input_handler(input_handler)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
name = "JSON"
|
||||
grammar = "json"
|
||||
path_suffixes = ["json"]
|
||||
path_suffixes = ["json", "flake.lock"]
|
||||
line_comments = ["// "]
|
||||
autoclose_before = ",]}"
|
||||
brackets = [
|
||||
|
||||
@@ -139,7 +139,7 @@
|
||||
}
|
||||
},
|
||||
"patternProperties": {
|
||||
"^(?![\\.0-9]).": {
|
||||
"^[^.0-9]+$": {
|
||||
"$ref": "#/definitions/packageExportsEntryOrFallback",
|
||||
"description": "The module path that is resolved when this environment matches the property name."
|
||||
}
|
||||
@@ -616,7 +616,7 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"bundledDependencies": {
|
||||
"bundleDependencies": {
|
||||
"description": "Array of package names that will be bundled when publishing the package.",
|
||||
"oneOf": [
|
||||
{
|
||||
@@ -630,8 +630,8 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"bundleDependencies": {
|
||||
"description": "DEPRECATED: This field is honored, but \"bundledDependencies\" is the correct field name.",
|
||||
"bundledDependencies": {
|
||||
"description": "DEPRECATED: This field is honored, but \"bundleDependencies\" is the correct field name.",
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "array",
|
||||
@@ -734,6 +734,9 @@
|
||||
"registry": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"provenance": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"additionalProperties": true
|
||||
|
||||
@@ -232,7 +232,7 @@
|
||||
"$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).",
|
||||
"description": "Enable importing files with any extension, provided a declaration file is present.",
|
||||
"type": ["boolean", "null"],
|
||||
"markdownDescription": "Enable importing files with any extension, provided a declaration file is present.\n\nSee more: https://www.typescriptlang.org/tsconfig#allowImportingTsExtensions"
|
||||
"markdownDescription": "Enable importing files with any extension, provided a declaration file is present.\n\nSee more: https://www.typescriptlang.org/tsconfig#allowArbitraryExtensions"
|
||||
},
|
||||
"allowImportingTsExtensions": {
|
||||
"$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).",
|
||||
@@ -426,17 +426,17 @@
|
||||
"anyOf": [
|
||||
{
|
||||
"enum": [
|
||||
"Classic",
|
||||
"Node",
|
||||
"Node10",
|
||||
"Node16",
|
||||
"NodeNext",
|
||||
"Bundler"
|
||||
"classic",
|
||||
"node",
|
||||
"node10",
|
||||
"node16",
|
||||
"nodenext",
|
||||
"bundler"
|
||||
],
|
||||
"markdownEnumDescriptions": [
|
||||
"It’s recommended to use `\"Node16\"` instead",
|
||||
"Deprecated, use `\"Node10\"` in TypeScript 5.0+ instead",
|
||||
"It’s recommended to use `\"Node16\"` instead",
|
||||
"It’s recommended to use `\"node16\"` instead",
|
||||
"Deprecated, use `\"node10\"` in TypeScript 5.0+ instead",
|
||||
"It’s recommended to use `\"node16\"` instead",
|
||||
"This is the recommended setting for libraries and Node.js applications",
|
||||
"This is the recommended setting for libraries and Node.js applications",
|
||||
"This is the recommended setting in TypeScript 5.0+ for applications that use a bundler"
|
||||
@@ -497,10 +497,10 @@
|
||||
},
|
||||
"noUnusedLocals": {
|
||||
"$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).",
|
||||
"description": "Enable error reporting when a local variables aren't read.",
|
||||
"description": "Enable error reporting when a local variable isn't read.",
|
||||
"type": ["boolean", "null"],
|
||||
"default": false,
|
||||
"markdownDescription": "Enable error reporting when a local variables aren't read.\n\nSee more: https://www.typescriptlang.org/tsconfig#noUnusedLocals"
|
||||
"markdownDescription": "Enable error reporting when a local variable isn't read.\n\nSee more: https://www.typescriptlang.org/tsconfig#noUnusedLocals"
|
||||
},
|
||||
"noUnusedParameters": {
|
||||
"$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).",
|
||||
@@ -949,14 +949,19 @@
|
||||
"ESNext.Array",
|
||||
"ESNext.AsyncIterable",
|
||||
"ESNext.BigInt",
|
||||
"ESNext.Collection",
|
||||
"ESNext.Intl",
|
||||
"ESNext.Object",
|
||||
"ESNext.Promise",
|
||||
"ESNext.Regexp",
|
||||
"ESNext.String",
|
||||
"ESNext.Symbol",
|
||||
"DOM",
|
||||
"DOM.AsyncIterable",
|
||||
"DOM.Iterable",
|
||||
"ScriptHost",
|
||||
"WebWorker",
|
||||
"WebWorker.AsyncIterable",
|
||||
"WebWorker.ImportScripts",
|
||||
"Webworker.Iterable",
|
||||
"ES7",
|
||||
@@ -1022,13 +1027,13 @@
|
||||
"pattern": "^[Ee][Ss][Nn][Ee][Xx][Tt](\\.([Aa][Rr][Rr][Aa][Yy]|[Aa][Ss][Yy][Nn][Cc][Ii][Tt][Ee][Rr][Aa][Bb][Ll][Ee]|[Bb][Ii][Gg][Ii][Nn][Tt]|[Ii][Nn][Tt][Ll]|[Pp][Rr][Oo][Mm][Ii][Ss][Ee]|[Ss][Tt][Rr][Ii][Nn][Gg]|[Ss][Yy][Mm][Bb][Oo][Ll]|[Ww][Ee][Aa][Kk][Rr][Ee][Ff]|[Dd][Ee][Cc][Oo][Rr][Aa][Tt][Oo][Rr][Ss]|[Dd][Ii][Ss][Pp][Oo][Ss][Aa][Bb][Ll][Ee]))?$"
|
||||
},
|
||||
{
|
||||
"pattern": "^[Dd][Oo][Mm](\\.[Ii][Tt][Ee][Rr][Aa][Bb][Ll][Ee])?$"
|
||||
"pattern": "^[Dd][Oo][Mm](\\.([Aa][Ss][Yy][Nn][Cc])?[Ii][Tt][Ee][Rr][Aa][Bb][Ll][Ee])?$"
|
||||
},
|
||||
{
|
||||
"pattern": "^[Ss][Cc][Rr][Ii][Pp][Tt][Hh][Oo][Ss][Tt]$"
|
||||
},
|
||||
{
|
||||
"pattern": "^[Ww][Ee][Bb][Ww][Oo][Rr][Kk][Ee][Rr](\\.([Ii][Mm][Pp][Oo][Rr][Tt][Ss][Cc][Rr][Ii][Pp][Tt][Ss]|[Ii][Tt][Ee][Rr][Aa][Bb][Ll][Ee]))?$"
|
||||
"pattern": "^[Ww][Ee][Bb][Ww][Oo][Rr][Kk][Ee][Rr](\\.([Ii][Mm][Pp][Oo][Rr][Tt][Ss][Cc][Rr][Ii][Pp][Tt][Ss]|([Aa][Ss][Yy][Nn][Cc])?[Ii][Tt][Ee][Rr][Aa][Bb][Ll][Ee]))?$"
|
||||
},
|
||||
{
|
||||
"pattern": "^[Dd][Ee][Cc][Oo][Rr][Aa][Tt][Oo][Rr][Ss](\\.([Ll][Ee][Gg][Aa][Cc][Yy]))?$"
|
||||
@@ -1203,6 +1208,34 @@
|
||||
"description": "Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting.",
|
||||
"type": ["boolean", "null"],
|
||||
"markdownDescription": "Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting.\n\nSee more: https://www.typescriptlang.org/tsconfig#verbatimModuleSyntax"
|
||||
},
|
||||
"noCheck": {
|
||||
"$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).",
|
||||
"description": "Disable full type checking (only critical parse and emit errors will be reported)",
|
||||
"type": ["boolean", "null"],
|
||||
"default": false,
|
||||
"markdownDescription": "Disable full type checking (only critical parse and emit errors will be reported)\n\nSee more: https://www.typescriptlang.org/tsconfig#noCheck"
|
||||
},
|
||||
"isolatedDeclarations": {
|
||||
"$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).",
|
||||
"description": "Require sufficient annotation on exports so other tools can trivially generate declaration files.",
|
||||
"type": ["boolean", "null"],
|
||||
"default": false,
|
||||
"markdownDescription": "Require sufficient annotation on exports so other tools can trivially generate declaration files.\n\nSee more: https://www.typescriptlang.org/tsconfig#isolatedDeclarations"
|
||||
},
|
||||
"noUncheckedSideEffectImports": {
|
||||
"$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).",
|
||||
"description": "Check side effect imports.",
|
||||
"type": ["boolean", "null"],
|
||||
"default": false,
|
||||
"markdownDescription": "Check side effect imports.\n\nSee more: https://www.typescriptlang.org/tsconfig#noUncheckedSideEffectImports"
|
||||
},
|
||||
"strictBuiltinIteratorReturn": {
|
||||
"$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).",
|
||||
"description": "Built-in iterators are instantiated with a 'TReturn' type of 'undefined' instead of 'any'.",
|
||||
"type": ["boolean", "null"],
|
||||
"default": false,
|
||||
"markdownDescription": "Built-in iterators are instantiated with a 'TReturn' type of 'undefined' instead of 'any'.\n\nSee more: https://www.typescriptlang.org/tsconfig#strictBuiltinIteratorReturn"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1423,4 +1456,3 @@
|
||||
"title": "JSON schema for the TypeScript compiler's configuration file",
|
||||
"type": "object"
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
name = "JSONC"
|
||||
grammar = "jsonc"
|
||||
path_suffixes = ["jsonc"]
|
||||
path_suffixes = ["jsonc", "tsconfig.json", "pyrightconfig.json"]
|
||||
line_comments = ["// "]
|
||||
autoclose_before = ",]}"
|
||||
brackets = [
|
||||
|
||||
@@ -5,3 +5,6 @@
|
||||
|
||||
((inline) @content
|
||||
(#set! "language" "markdown-inline"))
|
||||
|
||||
((html_block) @content
|
||||
(#set! "language" "html"))
|
||||
|
||||
@@ -4,6 +4,7 @@ use async_trait::async_trait;
|
||||
use collections::HashMap;
|
||||
use gpui::AsyncAppContext;
|
||||
use gpui::{AppContext, Task};
|
||||
use language::language_settings::language_settings;
|
||||
use language::LanguageName;
|
||||
use language::LanguageToolchainStore;
|
||||
use language::Toolchain;
|
||||
@@ -21,6 +22,7 @@ use serde_json::{json, Value};
|
||||
use smol::{lock::OnceCell, process::Command};
|
||||
use std::cmp::Ordering;
|
||||
|
||||
use std::str::FromStr;
|
||||
use std::sync::Mutex;
|
||||
use std::{
|
||||
any::Any,
|
||||
@@ -35,6 +37,23 @@ use util::ResultExt;
|
||||
const SERVER_PATH: &str = "node_modules/pyright/langserver.index.js";
|
||||
const NODE_MODULE_RELATIVE_SERVER_PATH: &str = "pyright/langserver.index.js";
|
||||
|
||||
enum TestRunner {
|
||||
UNITTEST,
|
||||
PYTEST,
|
||||
}
|
||||
|
||||
impl FromStr for TestRunner {
|
||||
type Err = ();
|
||||
|
||||
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
|
||||
match s {
|
||||
"unittest" => Ok(Self::UNITTEST),
|
||||
"pytest" => Ok(Self::PYTEST),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
vec![server_path.into(), "--stdio".into()]
|
||||
}
|
||||
@@ -265,8 +284,8 @@ async fn get_cached_server_binary(
|
||||
|
||||
pub(crate) struct PythonContextProvider;
|
||||
|
||||
const PYTHON_UNITTEST_TARGET_TASK_VARIABLE: VariableName =
|
||||
VariableName::Custom(Cow::Borrowed("PYTHON_UNITTEST_TARGET"));
|
||||
const PYTHON_TEST_TARGET_TASK_VARIABLE: VariableName =
|
||||
VariableName::Custom(Cow::Borrowed("PYTHON_TEST_TARGET"));
|
||||
|
||||
const PYTHON_ACTIVE_TOOLCHAIN_PATH: VariableName =
|
||||
VariableName::Custom(Cow::Borrowed("PYTHON_ACTIVE_ZED_TOOLCHAIN"));
|
||||
@@ -279,28 +298,16 @@ impl ContextProvider for PythonContextProvider {
|
||||
toolchains: Arc<dyn LanguageToolchainStore>,
|
||||
cx: &mut gpui::AppContext,
|
||||
) -> Task<Result<task::TaskVariables>> {
|
||||
let python_module_name = python_module_name_from_relative_path(
|
||||
variables.get(&VariableName::RelativeFile).unwrap_or(""),
|
||||
);
|
||||
let unittest_class_name =
|
||||
variables.get(&VariableName::Custom(Cow::Borrowed("_unittest_class_name")));
|
||||
let unittest_method_name = variables.get(&VariableName::Custom(Cow::Borrowed(
|
||||
"_unittest_method_name",
|
||||
)));
|
||||
let test_target = {
|
||||
let test_runner = selected_test_runner(location.buffer.read(cx).file(), cx);
|
||||
|
||||
let unittest_target_str = match (unittest_class_name, unittest_method_name) {
|
||||
(Some(class_name), Some(method_name)) => {
|
||||
format!("{}.{}.{}", python_module_name, class_name, method_name)
|
||||
}
|
||||
(Some(class_name), None) => format!("{}.{}", python_module_name, class_name),
|
||||
(None, None) => python_module_name,
|
||||
(None, Some(_)) => return Task::ready(Ok(task::TaskVariables::default())), // should never happen, a TestCase class is the unit of testing
|
||||
let runner = match test_runner {
|
||||
TestRunner::UNITTEST => self.build_unittest_target(variables),
|
||||
TestRunner::PYTEST => self.build_pytest_target(variables),
|
||||
};
|
||||
runner
|
||||
};
|
||||
|
||||
let unittest_target = (
|
||||
PYTHON_UNITTEST_TARGET_TASK_VARIABLE.clone(),
|
||||
unittest_target_str,
|
||||
);
|
||||
let worktree_id = location.buffer.read(cx).file().map(|f| f.worktree_id(cx));
|
||||
cx.spawn(move |mut cx| async move {
|
||||
let active_toolchain = if let Some(worktree_id) = worktree_id {
|
||||
@@ -312,53 +319,174 @@ impl ContextProvider for PythonContextProvider {
|
||||
String::from("python3")
|
||||
};
|
||||
let toolchain = (PYTHON_ACTIVE_TOOLCHAIN_PATH, active_toolchain);
|
||||
Ok(task::TaskVariables::from_iter([unittest_target, toolchain]))
|
||||
Ok(task::TaskVariables::from_iter([test_target?, toolchain]))
|
||||
})
|
||||
}
|
||||
|
||||
fn associated_tasks(
|
||||
&self,
|
||||
_: Option<Arc<dyn language::File>>,
|
||||
_: &AppContext,
|
||||
file: Option<Arc<dyn language::File>>,
|
||||
cx: &AppContext,
|
||||
) -> Option<TaskTemplates> {
|
||||
Some(TaskTemplates(vec![
|
||||
let test_runner = selected_test_runner(file.as_ref(), cx);
|
||||
|
||||
let mut tasks = vec![
|
||||
// Execute a selection
|
||||
TaskTemplate {
|
||||
label: "execute selection".to_owned(),
|
||||
command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(),
|
||||
args: vec!["-c".to_owned(), VariableName::SelectedText.template_value()],
|
||||
..TaskTemplate::default()
|
||||
},
|
||||
// Execute an entire file
|
||||
TaskTemplate {
|
||||
label: format!("run '{}'", VariableName::File.template_value()),
|
||||
command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(),
|
||||
args: vec![VariableName::File.template_value()],
|
||||
..TaskTemplate::default()
|
||||
},
|
||||
TaskTemplate {
|
||||
label: format!("unittest '{}'", VariableName::File.template_value()),
|
||||
command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(),
|
||||
args: vec![
|
||||
"-m".to_owned(),
|
||||
"unittest".to_owned(),
|
||||
VariableName::File.template_value(),
|
||||
],
|
||||
..TaskTemplate::default()
|
||||
},
|
||||
TaskTemplate {
|
||||
label: "unittest $ZED_CUSTOM_PYTHON_UNITTEST_TARGET".to_owned(),
|
||||
command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(),
|
||||
args: vec![
|
||||
"-m".to_owned(),
|
||||
"unittest".to_owned(),
|
||||
"$ZED_CUSTOM_PYTHON_UNITTEST_TARGET".to_owned(),
|
||||
],
|
||||
tags: vec![
|
||||
"python-unittest-class".to_owned(),
|
||||
"python-unittest-method".to_owned(),
|
||||
],
|
||||
..TaskTemplate::default()
|
||||
},
|
||||
]))
|
||||
];
|
||||
|
||||
tasks.extend(match test_runner {
|
||||
TestRunner::UNITTEST => {
|
||||
[
|
||||
// Run tests for an entire file
|
||||
TaskTemplate {
|
||||
label: format!("unittest '{}'", VariableName::File.template_value()),
|
||||
command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(),
|
||||
args: vec![
|
||||
"-m".to_owned(),
|
||||
"unittest".to_owned(),
|
||||
VariableName::File.template_value(),
|
||||
],
|
||||
..TaskTemplate::default()
|
||||
},
|
||||
// Run test(s) for a specific target within a file
|
||||
TaskTemplate {
|
||||
label: "unittest $ZED_CUSTOM_PYTHON_TEST_TARGET".to_owned(),
|
||||
command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(),
|
||||
args: vec![
|
||||
"-m".to_owned(),
|
||||
"unittest".to_owned(),
|
||||
"$ZED_CUSTOM_PYTHON_TEST_TARGET".to_owned(),
|
||||
],
|
||||
tags: vec![
|
||||
"python-unittest-class".to_owned(),
|
||||
"python-unittest-method".to_owned(),
|
||||
],
|
||||
..TaskTemplate::default()
|
||||
},
|
||||
]
|
||||
}
|
||||
TestRunner::PYTEST => {
|
||||
[
|
||||
// Run tests for an entire file
|
||||
TaskTemplate {
|
||||
label: format!("pytest '{}'", VariableName::File.template_value()),
|
||||
command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(),
|
||||
args: vec![
|
||||
"-m".to_owned(),
|
||||
"pytest".to_owned(),
|
||||
VariableName::File.template_value(),
|
||||
],
|
||||
..TaskTemplate::default()
|
||||
},
|
||||
// Run test(s) for a specific target within a file
|
||||
TaskTemplate {
|
||||
label: "pytest $ZED_CUSTOM_PYTHON_TEST_TARGET".to_owned(),
|
||||
command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(),
|
||||
args: vec![
|
||||
"-m".to_owned(),
|
||||
"pytest".to_owned(),
|
||||
"$ZED_CUSTOM_PYTHON_TEST_TARGET".to_owned(),
|
||||
],
|
||||
tags: vec![
|
||||
"python-pytest-class".to_owned(),
|
||||
"python-pytest-method".to_owned(),
|
||||
],
|
||||
..TaskTemplate::default()
|
||||
},
|
||||
]
|
||||
}
|
||||
});
|
||||
|
||||
Some(TaskTemplates(tasks))
|
||||
}
|
||||
}
|
||||
|
||||
fn selected_test_runner(location: Option<&Arc<dyn language::File>>, cx: &AppContext) -> TestRunner {
|
||||
const TEST_RUNNER_VARIABLE: &str = "TEST_RUNNER";
|
||||
language_settings(Some(LanguageName::new("Python")), location, cx)
|
||||
.tasks
|
||||
.variables
|
||||
.get(TEST_RUNNER_VARIABLE)
|
||||
.and_then(|val| TestRunner::from_str(val).ok())
|
||||
.unwrap_or(TestRunner::PYTEST)
|
||||
}
|
||||
|
||||
impl PythonContextProvider {
|
||||
fn build_unittest_target(
|
||||
&self,
|
||||
variables: &task::TaskVariables,
|
||||
) -> Result<(VariableName, String)> {
|
||||
let python_module_name = python_module_name_from_relative_path(
|
||||
variables.get(&VariableName::RelativeFile).unwrap_or(""),
|
||||
);
|
||||
|
||||
let unittest_class_name =
|
||||
variables.get(&VariableName::Custom(Cow::Borrowed("_unittest_class_name")));
|
||||
|
||||
let unittest_method_name = variables.get(&VariableName::Custom(Cow::Borrowed(
|
||||
"_unittest_method_name",
|
||||
)));
|
||||
|
||||
let unittest_target_str = match (unittest_class_name, unittest_method_name) {
|
||||
(Some(class_name), Some(method_name)) => {
|
||||
format!("{}.{}.{}", python_module_name, class_name, method_name)
|
||||
}
|
||||
(Some(class_name), None) => format!("{}.{}", python_module_name, class_name),
|
||||
(None, None) => python_module_name,
|
||||
(None, Some(_)) => return Ok((VariableName::Custom(Cow::Borrowed("")), String::new())), // should never happen, a TestCase class is the unit of testing
|
||||
};
|
||||
|
||||
let unittest_target = (
|
||||
PYTHON_TEST_TARGET_TASK_VARIABLE.clone(),
|
||||
unittest_target_str,
|
||||
);
|
||||
|
||||
Ok(unittest_target)
|
||||
}
|
||||
|
||||
fn build_pytest_target(
|
||||
&self,
|
||||
variables: &task::TaskVariables,
|
||||
) -> Result<(VariableName, String)> {
|
||||
let file_path = variables
|
||||
.get(&VariableName::RelativeFile)
|
||||
.ok_or_else(|| anyhow!("No file path given"))?;
|
||||
|
||||
let pytest_class_name =
|
||||
variables.get(&VariableName::Custom(Cow::Borrowed("_pytest_class_name")));
|
||||
|
||||
let pytest_method_name =
|
||||
variables.get(&VariableName::Custom(Cow::Borrowed("_pytest_method_name")));
|
||||
|
||||
let pytest_target_str = match (pytest_class_name, pytest_method_name) {
|
||||
(Some(class_name), Some(method_name)) => {
|
||||
format!("{}::{}::{}", file_path, class_name, method_name)
|
||||
}
|
||||
(Some(class_name), None) => {
|
||||
format!("{}::{}", file_path, class_name)
|
||||
}
|
||||
(None, Some(method_name)) => {
|
||||
format!("{}::{}", file_path, method_name)
|
||||
}
|
||||
(None, None) => file_path.to_string(),
|
||||
};
|
||||
|
||||
let pytest_target = (PYTHON_TEST_TARGET_TASK_VARIABLE.clone(), pytest_target_str);
|
||||
|
||||
Ok(pytest_target)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,14 @@
|
||||
; Type alias
|
||||
(type_alias_statement "type" @keyword)
|
||||
|
||||
; Identifier naming conventions
|
||||
|
||||
((identifier) @type.class
|
||||
(#match? @type.class "^[A-Z]"))
|
||||
|
||||
((identifier) @constant
|
||||
(#match? @constant "^_*[A-Z][A-Z\\d_]*$"))
|
||||
|
||||
; TypeVar with constraints in type parameters
|
||||
(type
|
||||
(tuple (identifier) @type)
|
||||
@@ -12,25 +20,28 @@
|
||||
|
||||
; Function calls
|
||||
|
||||
(decorator) @function
|
||||
(decorator
|
||||
"@" @punctuation.special
|
||||
(identifier) @function.decorator)
|
||||
|
||||
(call
|
||||
function: (attribute attribute: (identifier) @function.method))
|
||||
function: (attribute attribute: (identifier) @function.method.call))
|
||||
(call
|
||||
function: (identifier) @function)
|
||||
function: (identifier) @function.call)
|
||||
|
||||
; Function definitions
|
||||
; Function and class definitions
|
||||
|
||||
(function_definition
|
||||
name: (identifier) @function)
|
||||
name: (identifier) @function.definition)
|
||||
|
||||
; Identifier naming conventions
|
||||
; Class definitions and calling: needs to come after the regex matching above
|
||||
|
||||
((identifier) @type
|
||||
(#match? @type "^[A-Z]"))
|
||||
(class_definition
|
||||
name: (identifier) @type.class.definition)
|
||||
|
||||
((identifier) @constant
|
||||
(#match? @constant "^_*[A-Z][A-Z\\d_]*$"))
|
||||
(call
|
||||
function: (identifier) @type.class.call
|
||||
(#match? @type.class.call "^[A-Z][A-Z0-9_]*[a-z]"))
|
||||
|
||||
; Builtin functions
|
||||
|
||||
@@ -46,6 +57,7 @@
|
||||
(none)
|
||||
(true)
|
||||
(false)
|
||||
(ellipsis)
|
||||
] @constant.builtin
|
||||
|
||||
[
|
||||
@@ -58,7 +70,7 @@
|
||||
[
|
||||
(parameters (identifier) @variable.special)
|
||||
(attribute (identifier) @variable.special)
|
||||
(#match? @variable.special "^self$")
|
||||
(#match? @variable.special "^self|cls$")
|
||||
]
|
||||
|
||||
(comment) @comment
|
||||
@@ -86,6 +98,25 @@
|
||||
(parameters)?
|
||||
body: (block (expression_statement (string) @string.doc)))
|
||||
|
||||
(module
|
||||
(expression_statement (assignment))
|
||||
. (expression_statement (string) @string.doc))
|
||||
|
||||
(class_definition
|
||||
body: (block
|
||||
(expression_statement (assignment))
|
||||
. (expression_statement (string) @string.doc)))
|
||||
|
||||
(class_definition
|
||||
body: (block
|
||||
(function_definition
|
||||
name: (identifier) @function.method.constructor
|
||||
(#eq? @function.method.constructor "__init__")
|
||||
body: (block
|
||||
(expression_statement (assignment))
|
||||
. (expression_statement (string) @string.doc)))))
|
||||
|
||||
|
||||
[
|
||||
"-"
|
||||
"-="
|
||||
|
||||
@@ -29,3 +29,42 @@
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
; pytest functions
|
||||
(
|
||||
(module
|
||||
(function_definition
|
||||
name: (identifier) @run @_pytest_method_name
|
||||
(#match? @_pytest_method_name "^test_")
|
||||
) @python-pytest-method
|
||||
)
|
||||
(#set! tag python-pytest-method)
|
||||
)
|
||||
|
||||
; pytest classes
|
||||
(
|
||||
(module
|
||||
(class_definition
|
||||
name: (identifier) @run @_pytest_class_name
|
||||
(#match? @_pytest_class_name "^Test")
|
||||
)
|
||||
(#set! tag python-pytest-class)
|
||||
)
|
||||
)
|
||||
|
||||
; pytest class methods
|
||||
(
|
||||
(module
|
||||
(class_definition
|
||||
name: (identifier) @_pytest_class_name
|
||||
(#match? @_pytest_class_name "^Test")
|
||||
body: (block
|
||||
(function_definition
|
||||
name: (identifier) @run @_pytest_method_name
|
||||
(#match? @_pytest_method_name "^test")
|
||||
) @python-pytest-method
|
||||
(#set! tag python-pytest-method)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -3875,13 +3875,13 @@ impl OutlinePanel {
|
||||
.child({
|
||||
let keystroke = match self.position(cx) {
|
||||
DockPosition::Left => {
|
||||
cx.keystroke_text_for(&workspace::ToggleLeftDock)
|
||||
cx.keystroke_text_for_action(&workspace::ToggleLeftDock)
|
||||
}
|
||||
DockPosition::Bottom => {
|
||||
cx.keystroke_text_for(&workspace::ToggleBottomDock)
|
||||
cx.keystroke_text_for_action(&workspace::ToggleBottomDock)
|
||||
}
|
||||
DockPosition::Right => {
|
||||
cx.keystroke_text_for(&workspace::ToggleRightDock)
|
||||
cx.keystroke_text_for_action(&workspace::ToggleRightDock)
|
||||
}
|
||||
};
|
||||
Label::new(format!("Toggle this panel with {keystroke}"))
|
||||
|
||||
@@ -1133,7 +1133,7 @@ impl Project {
|
||||
|
||||
let fs = Arc::new(RealFs::default());
|
||||
let languages = LanguageRegistry::test(cx.background_executor().clone());
|
||||
let clock = Arc::new(FakeSystemClock::default());
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http_client = http_client::FakeHttpClient::with_404_response();
|
||||
let client = cx
|
||||
.update(|cx| client::Client::new(clock, http_client.clone(), cx))
|
||||
@@ -1179,7 +1179,7 @@ impl Project {
|
||||
use gpui::Context;
|
||||
|
||||
let languages = LanguageRegistry::test(cx.executor());
|
||||
let clock = Arc::new(FakeSystemClock::default());
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http_client = http_client::FakeHttpClient::with_404_response();
|
||||
let client = cx.update(|cx| client::Client::new(clock, http_client.clone(), cx));
|
||||
let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
|
||||
|
||||
@@ -402,7 +402,7 @@ fn session_state(session: View<Session>, cx: &WindowContext) -> ReplMenuState {
|
||||
status: session.kernel.status(),
|
||||
..fill_fields()
|
||||
},
|
||||
Kernel::RunningKernel(kernel) => match &kernel.execution_state {
|
||||
Kernel::RunningKernel(kernel) => match &kernel.execution_state() {
|
||||
ExecutionState::Idle => ReplMenuState {
|
||||
tooltip: format!("Run code on {} ({})", kernel_name, kernel_language).into(),
|
||||
indicator: Some(Indicator::dot().color(Color::Success)),
|
||||
|
||||
@@ -185,13 +185,13 @@ impl PickerDelegate for RecentProjectsDelegate {
|
||||
fn placeholder_text(&self, cx: &mut WindowContext) -> Arc<str> {
|
||||
let (create_window, reuse_window) = if self.create_new_window {
|
||||
(
|
||||
cx.keystroke_text_for(&menu::Confirm),
|
||||
cx.keystroke_text_for(&menu::SecondaryConfirm),
|
||||
cx.keystroke_text_for_action(&menu::Confirm),
|
||||
cx.keystroke_text_for_action(&menu::SecondaryConfirm),
|
||||
)
|
||||
} else {
|
||||
(
|
||||
cx.keystroke_text_for(&menu::SecondaryConfirm),
|
||||
cx.keystroke_text_for(&menu::Confirm),
|
||||
cx.keystroke_text_for_action(&menu::SecondaryConfirm),
|
||||
cx.keystroke_text_for_action(&menu::Confirm),
|
||||
)
|
||||
};
|
||||
Arc::from(format!(
|
||||
|
||||
@@ -1277,7 +1277,7 @@ fn build_project(ssh: Model<SshRemoteClient>, cx: &mut TestAppContext) -> Model<
|
||||
|
||||
let client = cx.update(|cx| {
|
||||
Client::new(
|
||||
Arc::new(FakeSystemClock::default()),
|
||||
Arc::new(FakeSystemClock::new()),
|
||||
FakeHttpClient::with_404_response(),
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -25,6 +25,8 @@ feature_flags.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
image.workspace = true
|
||||
jupyter-websocket-client.workspace = true
|
||||
jupyter-protocol.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
markdown_preview.workspace = true
|
||||
|
||||
227
crates/repl/src/kernels/mod.rs
Normal file
227
crates/repl/src/kernels/mod.rs
Normal file
@@ -0,0 +1,227 @@
|
||||
mod native_kernel;
|
||||
use std::{fmt::Debug, future::Future, path::PathBuf};
|
||||
|
||||
use futures::{
|
||||
channel::mpsc::{self, Receiver},
|
||||
future::Shared,
|
||||
stream,
|
||||
};
|
||||
use gpui::{AppContext, Model, Task};
|
||||
use language::LanguageName;
|
||||
pub use native_kernel::*;
|
||||
|
||||
mod remote_kernels;
|
||||
use project::{Project, WorktreeId};
|
||||
pub use remote_kernels::*;
|
||||
|
||||
use anyhow::Result;
|
||||
use runtimelib::{ExecutionState, JupyterKernelspec, JupyterMessage, KernelInfoReply};
|
||||
use smol::process::Command;
|
||||
use ui::SharedString;
|
||||
|
||||
pub type JupyterMessageChannel = stream::SelectAll<Receiver<JupyterMessage>>;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum KernelSpecification {
|
||||
Remote(RemoteKernelSpecification),
|
||||
Jupyter(LocalKernelSpecification),
|
||||
PythonEnv(LocalKernelSpecification),
|
||||
}
|
||||
|
||||
impl KernelSpecification {
|
||||
pub fn name(&self) -> SharedString {
|
||||
match self {
|
||||
Self::Jupyter(spec) => spec.name.clone().into(),
|
||||
Self::PythonEnv(spec) => spec.name.clone().into(),
|
||||
Self::Remote(spec) => spec.name.clone().into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn type_name(&self) -> SharedString {
|
||||
match self {
|
||||
Self::Jupyter(_) => "Jupyter".into(),
|
||||
Self::PythonEnv(_) => "Python Environment".into(),
|
||||
Self::Remote(_) => "Remote".into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn path(&self) -> SharedString {
|
||||
SharedString::from(match self {
|
||||
Self::Jupyter(spec) => spec.path.to_string_lossy().to_string(),
|
||||
Self::PythonEnv(spec) => spec.path.to_string_lossy().to_string(),
|
||||
Self::Remote(spec) => spec.url.to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn language(&self) -> SharedString {
|
||||
SharedString::from(match self {
|
||||
Self::Jupyter(spec) => spec.kernelspec.language.clone(),
|
||||
Self::PythonEnv(spec) => spec.kernelspec.language.clone(),
|
||||
Self::Remote(spec) => spec.kernelspec.language.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn python_env_kernel_specifications(
|
||||
project: &Model<Project>,
|
||||
worktree_id: WorktreeId,
|
||||
cx: &mut AppContext,
|
||||
) -> impl Future<Output = Result<Vec<KernelSpecification>>> {
|
||||
let python_language = LanguageName::new("Python");
|
||||
let toolchains = project
|
||||
.read(cx)
|
||||
.available_toolchains(worktree_id, python_language, cx);
|
||||
let background_executor = cx.background_executor().clone();
|
||||
|
||||
async move {
|
||||
let toolchains = if let Some(toolchains) = toolchains.await {
|
||||
toolchains
|
||||
} else {
|
||||
return Ok(Vec::new());
|
||||
};
|
||||
|
||||
let kernelspecs = toolchains.toolchains.into_iter().map(|toolchain| {
|
||||
background_executor.spawn(async move {
|
||||
let python_path = toolchain.path.to_string();
|
||||
|
||||
// Check if ipykernel is installed
|
||||
let ipykernel_check = Command::new(&python_path)
|
||||
.args(&["-c", "import ipykernel"])
|
||||
.output()
|
||||
.await;
|
||||
|
||||
if ipykernel_check.is_ok() && ipykernel_check.unwrap().status.success() {
|
||||
// Create a default kernelspec for this environment
|
||||
let default_kernelspec = JupyterKernelspec {
|
||||
argv: vec![
|
||||
python_path.clone(),
|
||||
"-m".to_string(),
|
||||
"ipykernel_launcher".to_string(),
|
||||
"-f".to_string(),
|
||||
"{connection_file}".to_string(),
|
||||
],
|
||||
display_name: toolchain.name.to_string(),
|
||||
language: "python".to_string(),
|
||||
interrupt_mode: None,
|
||||
metadata: None,
|
||||
env: None,
|
||||
};
|
||||
|
||||
Some(KernelSpecification::PythonEnv(LocalKernelSpecification {
|
||||
name: toolchain.name.to_string(),
|
||||
path: PathBuf::from(&python_path),
|
||||
kernelspec: default_kernelspec,
|
||||
}))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
let kernel_specs = futures::future::join_all(kernelspecs)
|
||||
.await
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect();
|
||||
|
||||
anyhow::Ok(kernel_specs)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait RunningKernel: Send + Debug {
|
||||
fn request_tx(&self) -> mpsc::Sender<JupyterMessage>;
|
||||
fn working_directory(&self) -> &PathBuf;
|
||||
fn execution_state(&self) -> &ExecutionState;
|
||||
fn set_execution_state(&mut self, state: ExecutionState);
|
||||
fn kernel_info(&self) -> Option<&KernelInfoReply>;
|
||||
fn set_kernel_info(&mut self, info: KernelInfoReply);
|
||||
fn force_shutdown(&mut self) -> anyhow::Result<()>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum KernelStatus {
|
||||
Idle,
|
||||
Busy,
|
||||
Starting,
|
||||
Error,
|
||||
ShuttingDown,
|
||||
Shutdown,
|
||||
Restarting,
|
||||
}
|
||||
|
||||
impl KernelStatus {
|
||||
pub fn is_connected(&self) -> bool {
|
||||
match self {
|
||||
KernelStatus::Idle | KernelStatus::Busy => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToString for KernelStatus {
|
||||
fn to_string(&self) -> String {
|
||||
match self {
|
||||
KernelStatus::Idle => "Idle".to_string(),
|
||||
KernelStatus::Busy => "Busy".to_string(),
|
||||
KernelStatus::Starting => "Starting".to_string(),
|
||||
KernelStatus::Error => "Error".to_string(),
|
||||
KernelStatus::ShuttingDown => "Shutting Down".to_string(),
|
||||
KernelStatus::Shutdown => "Shutdown".to_string(),
|
||||
KernelStatus::Restarting => "Restarting".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Kernel {
|
||||
RunningKernel(Box<dyn RunningKernel>),
|
||||
StartingKernel(Shared<Task<()>>),
|
||||
ErroredLaunch(String),
|
||||
ShuttingDown,
|
||||
Shutdown,
|
||||
Restarting,
|
||||
}
|
||||
|
||||
impl From<&Kernel> for KernelStatus {
|
||||
fn from(kernel: &Kernel) -> Self {
|
||||
match kernel {
|
||||
Kernel::RunningKernel(kernel) => match kernel.execution_state() {
|
||||
ExecutionState::Idle => KernelStatus::Idle,
|
||||
ExecutionState::Busy => KernelStatus::Busy,
|
||||
},
|
||||
Kernel::StartingKernel(_) => KernelStatus::Starting,
|
||||
Kernel::ErroredLaunch(_) => KernelStatus::Error,
|
||||
Kernel::ShuttingDown => KernelStatus::ShuttingDown,
|
||||
Kernel::Shutdown => KernelStatus::Shutdown,
|
||||
Kernel::Restarting => KernelStatus::Restarting,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Kernel {
|
||||
pub fn status(&self) -> KernelStatus {
|
||||
self.into()
|
||||
}
|
||||
|
||||
pub fn set_execution_state(&mut self, status: &ExecutionState) {
|
||||
if let Kernel::RunningKernel(running_kernel) = self {
|
||||
running_kernel.set_execution_state(status.clone());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_kernel_info(&mut self, kernel_info: &KernelInfoReply) {
|
||||
if let Kernel::RunningKernel(running_kernel) = self {
|
||||
running_kernel.set_kernel_info(kernel_info.clone());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_shutting_down(&self) -> bool {
|
||||
match self {
|
||||
Kernel::Restarting | Kernel::ShuttingDown => true,
|
||||
Kernel::RunningKernel(_)
|
||||
| Kernel::StartingKernel(_)
|
||||
| Kernel::ErroredLaunch(_)
|
||||
| Kernel::Shutdown => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,69 +1,27 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use futures::{
|
||||
channel::mpsc::{self, Receiver},
|
||||
future::Shared,
|
||||
stream::{self, SelectAll, StreamExt},
|
||||
SinkExt as _,
|
||||
};
|
||||
use gpui::{AppContext, EntityId, Model, Task};
|
||||
use language::LanguageName;
|
||||
use project::{Fs, Project, WorktreeId};
|
||||
use runtimelib::{
|
||||
dirs, ConnectionInfo, ExecutionState, JupyterKernelspec, JupyterMessage, JupyterMessageContent,
|
||||
KernelInfoReply,
|
||||
channel::mpsc::{self},
|
||||
io::BufReader,
|
||||
stream::{SelectAll, StreamExt},
|
||||
AsyncBufReadExt as _, SinkExt as _,
|
||||
};
|
||||
use gpui::{EntityId, Task, View, WindowContext};
|
||||
use jupyter_protocol::{JupyterMessage, JupyterMessageContent, KernelInfoReply};
|
||||
use project::Fs;
|
||||
use runtimelib::{dirs, ConnectionInfo, ExecutionState, JupyterKernelspec};
|
||||
use smol::{net::TcpListener, process::Command};
|
||||
use std::{
|
||||
env,
|
||||
fmt::Debug,
|
||||
future::Future,
|
||||
net::{IpAddr, Ipv4Addr, SocketAddr},
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
};
|
||||
use ui::SharedString;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum KernelSpecification {
|
||||
Remote(RemoteKernelSpecification),
|
||||
Jupyter(LocalKernelSpecification),
|
||||
PythonEnv(LocalKernelSpecification),
|
||||
}
|
||||
use crate::Session;
|
||||
|
||||
impl KernelSpecification {
|
||||
pub fn name(&self) -> SharedString {
|
||||
match self {
|
||||
Self::Jupyter(spec) => spec.name.clone().into(),
|
||||
Self::PythonEnv(spec) => spec.name.clone().into(),
|
||||
Self::Remote(spec) => spec.name.clone().into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn type_name(&self) -> SharedString {
|
||||
match self {
|
||||
Self::Jupyter(_) => "Jupyter".into(),
|
||||
Self::PythonEnv(_) => "Python Environment".into(),
|
||||
Self::Remote(_) => "Remote".into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn path(&self) -> SharedString {
|
||||
SharedString::from(match self {
|
||||
Self::Jupyter(spec) => spec.path.to_string_lossy().to_string(),
|
||||
Self::PythonEnv(spec) => spec.path.to_string_lossy().to_string(),
|
||||
Self::Remote(spec) => spec.url.to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn language(&self) -> SharedString {
|
||||
SharedString::from(match self {
|
||||
Self::Jupyter(spec) => spec.kernelspec.language.clone(),
|
||||
Self::PythonEnv(spec) => spec.kernelspec.language.clone(),
|
||||
Self::Remote(spec) => spec.kernelspec.language.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
use super::RunningKernel;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LocalKernelSpecification {
|
||||
@@ -80,22 +38,6 @@ impl PartialEq for LocalKernelSpecification {
|
||||
|
||||
impl Eq for LocalKernelSpecification {}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RemoteKernelSpecification {
|
||||
pub name: String,
|
||||
pub url: String,
|
||||
pub token: String,
|
||||
pub kernelspec: JupyterKernelspec,
|
||||
}
|
||||
|
||||
impl PartialEq for RemoteKernelSpecification {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.name == other.name && self.url == other.url
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for RemoteKernelSpecification {}
|
||||
|
||||
impl LocalKernelSpecification {
|
||||
#[must_use]
|
||||
fn command(&self, connection_path: &PathBuf) -> Result<Command> {
|
||||
@@ -147,110 +89,20 @@ async fn peek_ports(ip: IpAddr) -> Result<[u16; 5]> {
|
||||
Ok(ports)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum KernelStatus {
|
||||
Idle,
|
||||
Busy,
|
||||
Starting,
|
||||
Error,
|
||||
ShuttingDown,
|
||||
Shutdown,
|
||||
Restarting,
|
||||
}
|
||||
|
||||
impl KernelStatus {
|
||||
pub fn is_connected(&self) -> bool {
|
||||
match self {
|
||||
KernelStatus::Idle | KernelStatus::Busy => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToString for KernelStatus {
|
||||
fn to_string(&self) -> String {
|
||||
match self {
|
||||
KernelStatus::Idle => "Idle".to_string(),
|
||||
KernelStatus::Busy => "Busy".to_string(),
|
||||
KernelStatus::Starting => "Starting".to_string(),
|
||||
KernelStatus::Error => "Error".to_string(),
|
||||
KernelStatus::ShuttingDown => "Shutting Down".to_string(),
|
||||
KernelStatus::Shutdown => "Shutdown".to_string(),
|
||||
KernelStatus::Restarting => "Restarting".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Kernel> for KernelStatus {
|
||||
fn from(kernel: &Kernel) -> Self {
|
||||
match kernel {
|
||||
Kernel::RunningKernel(kernel) => match kernel.execution_state {
|
||||
ExecutionState::Idle => KernelStatus::Idle,
|
||||
ExecutionState::Busy => KernelStatus::Busy,
|
||||
},
|
||||
Kernel::StartingKernel(_) => KernelStatus::Starting,
|
||||
Kernel::ErroredLaunch(_) => KernelStatus::Error,
|
||||
Kernel::ShuttingDown => KernelStatus::ShuttingDown,
|
||||
Kernel::Shutdown => KernelStatus::Shutdown,
|
||||
Kernel::Restarting => KernelStatus::Restarting,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Kernel {
|
||||
RunningKernel(RunningKernel),
|
||||
StartingKernel(Shared<Task<()>>),
|
||||
ErroredLaunch(String),
|
||||
ShuttingDown,
|
||||
Shutdown,
|
||||
Restarting,
|
||||
}
|
||||
|
||||
impl Kernel {
|
||||
pub fn status(&self) -> KernelStatus {
|
||||
self.into()
|
||||
}
|
||||
|
||||
pub fn set_execution_state(&mut self, status: &ExecutionState) {
|
||||
if let Kernel::RunningKernel(running_kernel) = self {
|
||||
running_kernel.execution_state = status.clone();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_kernel_info(&mut self, kernel_info: &KernelInfoReply) {
|
||||
if let Kernel::RunningKernel(running_kernel) = self {
|
||||
running_kernel.kernel_info = Some(kernel_info.clone());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_shutting_down(&self) -> bool {
|
||||
match self {
|
||||
Kernel::Restarting | Kernel::ShuttingDown => true,
|
||||
Kernel::RunningKernel(_)
|
||||
| Kernel::StartingKernel(_)
|
||||
| Kernel::ErroredLaunch(_)
|
||||
| Kernel::Shutdown => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RunningKernel {
|
||||
pub struct NativeRunningKernel {
|
||||
pub process: smol::process::Child,
|
||||
_shell_task: Task<Result<()>>,
|
||||
_iopub_task: Task<Result<()>>,
|
||||
_control_task: Task<Result<()>>,
|
||||
_routing_task: Task<Result<()>>,
|
||||
connection_path: PathBuf,
|
||||
_process_status_task: Option<Task<()>>,
|
||||
pub working_directory: PathBuf,
|
||||
pub request_tx: mpsc::Sender<JupyterMessage>,
|
||||
pub execution_state: ExecutionState,
|
||||
pub kernel_info: Option<KernelInfoReply>,
|
||||
}
|
||||
|
||||
type JupyterMessageChannel = stream::SelectAll<Receiver<JupyterMessage>>;
|
||||
|
||||
impl Debug for RunningKernel {
|
||||
impl Debug for NativeRunningKernel {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("RunningKernel")
|
||||
.field("process", &self.process)
|
||||
@@ -258,25 +110,16 @@ impl Debug for RunningKernel {
|
||||
}
|
||||
}
|
||||
|
||||
impl RunningKernel {
|
||||
impl NativeRunningKernel {
|
||||
pub fn new(
|
||||
kernel_specification: KernelSpecification,
|
||||
kernel_specification: LocalKernelSpecification,
|
||||
entity_id: EntityId,
|
||||
working_directory: PathBuf,
|
||||
fs: Arc<dyn Fs>,
|
||||
cx: &mut AppContext,
|
||||
) -> Task<Result<(Self, JupyterMessageChannel)>> {
|
||||
let kernel_specification = match kernel_specification {
|
||||
KernelSpecification::Jupyter(spec) => spec,
|
||||
KernelSpecification::PythonEnv(spec) => spec,
|
||||
KernelSpecification::Remote(_spec) => {
|
||||
// todo!(): Implement remote kernel specification
|
||||
return Task::ready(Err(anyhow::anyhow!(
|
||||
"Running remote kernels is not supported"
|
||||
)));
|
||||
}
|
||||
};
|
||||
|
||||
// todo: convert to weak view
|
||||
session: View<Session>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Box<dyn RunningKernel>>> {
|
||||
cx.spawn(|cx| async move {
|
||||
let ip = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1));
|
||||
let ports = peek_ports(ip).await?;
|
||||
@@ -304,7 +147,7 @@ impl RunningKernel {
|
||||
|
||||
let mut cmd = kernel_specification.command(&connection_path)?;
|
||||
|
||||
let process = cmd
|
||||
let mut process = cmd
|
||||
.current_dir(&working_directory)
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped())
|
||||
@@ -315,17 +158,13 @@ impl RunningKernel {
|
||||
|
||||
let session_id = Uuid::new_v4().to_string();
|
||||
|
||||
let mut iopub_socket = connection_info
|
||||
.create_client_iopub_connection("", &session_id)
|
||||
.await?;
|
||||
let mut shell_socket = connection_info
|
||||
.create_client_shell_connection(&session_id)
|
||||
.await?;
|
||||
let mut control_socket = connection_info
|
||||
.create_client_control_connection(&session_id)
|
||||
.await?;
|
||||
|
||||
let (mut iopub, iosub) = futures::channel::mpsc::channel(100);
|
||||
let mut iopub_socket =
|
||||
runtimelib::create_client_iopub_connection(&connection_info, "", &session_id)
|
||||
.await?;
|
||||
let mut shell_socket =
|
||||
runtimelib::create_client_shell_connection(&connection_info, &session_id).await?;
|
||||
let mut control_socket =
|
||||
runtimelib::create_client_control_connection(&connection_info, &session_id).await?;
|
||||
|
||||
let (request_tx, mut request_rx) =
|
||||
futures::channel::mpsc::channel::<JupyterMessage>(100);
|
||||
@@ -334,18 +173,41 @@ impl RunningKernel {
|
||||
let (mut shell_reply_tx, shell_reply_rx) = futures::channel::mpsc::channel(100);
|
||||
|
||||
let mut messages_rx = SelectAll::new();
|
||||
messages_rx.push(iosub);
|
||||
messages_rx.push(control_reply_rx);
|
||||
messages_rx.push(shell_reply_rx);
|
||||
|
||||
let iopub_task = cx.background_executor().spawn({
|
||||
async move {
|
||||
while let Ok(message) = iopub_socket.read().await {
|
||||
iopub.send(message).await?;
|
||||
cx.spawn({
|
||||
let session = session.clone();
|
||||
|
||||
|mut cx| async move {
|
||||
while let Some(message) = messages_rx.next().await {
|
||||
session
|
||||
.update(&mut cx, |session, cx| {
|
||||
session.route(&message, cx);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
anyhow::Ok(())
|
||||
}
|
||||
});
|
||||
})
|
||||
.detach();
|
||||
|
||||
// iopub task
|
||||
cx.spawn({
|
||||
let session = session.clone();
|
||||
|
||||
|mut cx| async move {
|
||||
while let Ok(message) = iopub_socket.read().await {
|
||||
session
|
||||
.update(&mut cx, |session, cx| {
|
||||
session.route(&message, cx);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
anyhow::Ok(())
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
let (mut control_request_tx, mut control_request_rx) =
|
||||
futures::channel::mpsc::channel(100);
|
||||
@@ -391,26 +253,115 @@ impl RunningKernel {
|
||||
}
|
||||
});
|
||||
|
||||
anyhow::Ok((
|
||||
Self {
|
||||
process,
|
||||
request_tx,
|
||||
working_directory,
|
||||
_shell_task: shell_task,
|
||||
_iopub_task: iopub_task,
|
||||
_control_task: control_task,
|
||||
_routing_task: routing_task,
|
||||
connection_path,
|
||||
execution_state: ExecutionState::Idle,
|
||||
kernel_info: None,
|
||||
},
|
||||
messages_rx,
|
||||
))
|
||||
let stderr = process.stderr.take();
|
||||
|
||||
cx.spawn(|mut _cx| async move {
|
||||
if stderr.is_none() {
|
||||
return;
|
||||
}
|
||||
let reader = BufReader::new(stderr.unwrap());
|
||||
let mut lines = reader.lines();
|
||||
while let Some(Ok(line)) = lines.next().await {
|
||||
log::error!("kernel: {}", line);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
let stdout = process.stdout.take();
|
||||
|
||||
cx.spawn(|mut _cx| async move {
|
||||
if stdout.is_none() {
|
||||
return;
|
||||
}
|
||||
let reader = BufReader::new(stdout.unwrap());
|
||||
let mut lines = reader.lines();
|
||||
while let Some(Ok(line)) = lines.next().await {
|
||||
log::info!("kernel: {}", line);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
let status = process.status();
|
||||
|
||||
let process_status_task = cx.spawn(|mut cx| async move {
|
||||
let error_message = match status.await {
|
||||
Ok(status) => {
|
||||
if status.success() {
|
||||
log::info!("kernel process exited successfully");
|
||||
return;
|
||||
}
|
||||
|
||||
format!("kernel process exited with status: {:?}", status)
|
||||
}
|
||||
Err(err) => {
|
||||
format!("kernel process exited with error: {:?}", err)
|
||||
}
|
||||
};
|
||||
|
||||
log::error!("{}", error_message);
|
||||
|
||||
session
|
||||
.update(&mut cx, |session, cx| {
|
||||
session.kernel_errored(error_message, cx);
|
||||
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
|
||||
anyhow::Ok(Box::new(Self {
|
||||
process,
|
||||
request_tx,
|
||||
working_directory,
|
||||
_process_status_task: Some(process_status_task),
|
||||
_shell_task: shell_task,
|
||||
_control_task: control_task,
|
||||
_routing_task: routing_task,
|
||||
connection_path,
|
||||
execution_state: ExecutionState::Idle,
|
||||
kernel_info: None,
|
||||
}) as Box<dyn RunningKernel>)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for RunningKernel {
|
||||
impl RunningKernel for NativeRunningKernel {
|
||||
fn request_tx(&self) -> mpsc::Sender<JupyterMessage> {
|
||||
self.request_tx.clone()
|
||||
}
|
||||
|
||||
fn working_directory(&self) -> &PathBuf {
|
||||
&self.working_directory
|
||||
}
|
||||
|
||||
fn execution_state(&self) -> &ExecutionState {
|
||||
&self.execution_state
|
||||
}
|
||||
|
||||
fn set_execution_state(&mut self, state: ExecutionState) {
|
||||
self.execution_state = state;
|
||||
}
|
||||
|
||||
fn kernel_info(&self) -> Option<&KernelInfoReply> {
|
||||
self.kernel_info.as_ref()
|
||||
}
|
||||
|
||||
fn set_kernel_info(&mut self, info: KernelInfoReply) {
|
||||
self.kernel_info = Some(info);
|
||||
}
|
||||
|
||||
fn force_shutdown(&mut self) -> anyhow::Result<()> {
|
||||
match self.process.kill() {
|
||||
Ok(_) => Ok(()),
|
||||
Err(error) => Err(anyhow::anyhow!(
|
||||
"Failed to kill the kernel process: {}",
|
||||
error
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for NativeRunningKernel {
|
||||
fn drop(&mut self) {
|
||||
std::fs::remove_file(&self.connection_path).ok();
|
||||
self.request_tx.close_channel();
|
||||
@@ -467,72 +418,6 @@ async fn read_kernels_dir(path: PathBuf, fs: &dyn Fs) -> Result<Vec<LocalKernelS
|
||||
Ok(valid_kernelspecs)
|
||||
}
|
||||
|
||||
pub fn python_env_kernel_specifications(
|
||||
project: &Model<Project>,
|
||||
worktree_id: WorktreeId,
|
||||
cx: &mut AppContext,
|
||||
) -> impl Future<Output = Result<Vec<KernelSpecification>>> {
|
||||
let python_language = LanguageName::new("Python");
|
||||
let toolchains = project
|
||||
.read(cx)
|
||||
.available_toolchains(worktree_id, python_language, cx);
|
||||
let background_executor = cx.background_executor().clone();
|
||||
|
||||
async move {
|
||||
let toolchains = if let Some(toolchains) = toolchains.await {
|
||||
toolchains
|
||||
} else {
|
||||
return Ok(Vec::new());
|
||||
};
|
||||
|
||||
let kernelspecs = toolchains.toolchains.into_iter().map(|toolchain| {
|
||||
background_executor.spawn(async move {
|
||||
let python_path = toolchain.path.to_string();
|
||||
|
||||
// Check if ipykernel is installed
|
||||
let ipykernel_check = Command::new(&python_path)
|
||||
.args(&["-c", "import ipykernel"])
|
||||
.output()
|
||||
.await;
|
||||
|
||||
if ipykernel_check.is_ok() && ipykernel_check.unwrap().status.success() {
|
||||
// Create a default kernelspec for this environment
|
||||
let default_kernelspec = JupyterKernelspec {
|
||||
argv: vec![
|
||||
python_path.clone(),
|
||||
"-m".to_string(),
|
||||
"ipykernel_launcher".to_string(),
|
||||
"-f".to_string(),
|
||||
"{connection_file}".to_string(),
|
||||
],
|
||||
display_name: toolchain.name.to_string(),
|
||||
language: "python".to_string(),
|
||||
interrupt_mode: None,
|
||||
metadata: None,
|
||||
env: None,
|
||||
};
|
||||
|
||||
Some(KernelSpecification::PythonEnv(LocalKernelSpecification {
|
||||
name: toolchain.name.to_string(),
|
||||
path: PathBuf::from(&python_path),
|
||||
kernelspec: default_kernelspec,
|
||||
}))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
let kernel_specs = futures::future::join_all(kernelspecs)
|
||||
.await
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect();
|
||||
|
||||
anyhow::Ok(kernel_specs)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn local_kernel_specifications(fs: Arc<dyn Fs>) -> Result<Vec<LocalKernelSpecification>> {
|
||||
let mut data_dirs = dirs::data_dirs();
|
||||
|
||||
148
crates/repl/src/kernels/remote_kernels.rs
Normal file
148
crates/repl/src/kernels/remote_kernels.rs
Normal file
@@ -0,0 +1,148 @@
|
||||
use futures::{channel::mpsc, SinkExt as _, StreamExt as _};
|
||||
use gpui::{Task, View, WindowContext};
|
||||
use jupyter_protocol::{ExecutionState, JupyterMessage, KernelInfoReply};
|
||||
use runtimelib::JupyterKernelspec;
|
||||
|
||||
use crate::Session;
|
||||
|
||||
use super::RunningKernel;
|
||||
use anyhow::Result;
|
||||
use jupyter_websocket_client::{JupyterWebSocketReader, JupyterWebSocketWriter, RemoteServer};
|
||||
use std::fmt::Debug;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RemoteKernelSpecification {
|
||||
pub name: String,
|
||||
pub url: String,
|
||||
pub token: String,
|
||||
pub kernelspec: JupyterKernelspec,
|
||||
}
|
||||
|
||||
impl PartialEq for RemoteKernelSpecification {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.name == other.name && self.url == other.url
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for RemoteKernelSpecification {}
|
||||
|
||||
pub struct RemoteRunningKernel {
|
||||
remote_server: RemoteServer,
|
||||
_receiving_task: Task<Result<()>>,
|
||||
_routing_task: Task<Result<()>>,
|
||||
pub working_directory: std::path::PathBuf,
|
||||
pub request_tx: mpsc::Sender<JupyterMessage>,
|
||||
pub execution_state: ExecutionState,
|
||||
pub kernel_info: Option<KernelInfoReply>,
|
||||
}
|
||||
|
||||
impl RemoteRunningKernel {
|
||||
pub fn new(
|
||||
kernelspec: RemoteKernelSpecification,
|
||||
working_directory: std::path::PathBuf,
|
||||
session: View<Session>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Box<dyn RunningKernel>>> {
|
||||
let remote_server = RemoteServer {
|
||||
base_url: kernelspec.url,
|
||||
token: kernelspec.token,
|
||||
};
|
||||
cx.spawn(|cx| async move {
|
||||
// todo: launch a kernel to get a kernel ID
|
||||
let kernel_id = "d77b481b-2f14-4528-af0a-6c4c9ca98085";
|
||||
|
||||
let kernel_socket = remote_server.connect_to_kernel(kernel_id).await?;
|
||||
|
||||
let (mut w, mut r): (JupyterWebSocketWriter, JupyterWebSocketReader) =
|
||||
kernel_socket.split();
|
||||
|
||||
let (request_tx, mut request_rx) =
|
||||
futures::channel::mpsc::channel::<JupyterMessage>(100);
|
||||
|
||||
let routing_task = cx.background_executor().spawn({
|
||||
async move {
|
||||
while let Some(message) = request_rx.next().await {
|
||||
w.send(message).await.ok();
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
});
|
||||
|
||||
let receiving_task = cx.spawn({
|
||||
let session = session.clone();
|
||||
|
||||
|mut cx| async move {
|
||||
while let Some(message) = r.next().await {
|
||||
match message {
|
||||
Ok(message) => {
|
||||
session
|
||||
.update(&mut cx, |session, cx| {
|
||||
session.route(&message, cx);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Error receiving message: {:?}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
});
|
||||
|
||||
anyhow::Ok(Box::new(Self {
|
||||
_routing_task: routing_task,
|
||||
_receiving_task: receiving_task,
|
||||
remote_server,
|
||||
working_directory,
|
||||
request_tx,
|
||||
// todo(kyle): pull this from the kernel API to start with
|
||||
execution_state: ExecutionState::Idle,
|
||||
kernel_info: None,
|
||||
}) as Box<dyn RunningKernel>)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for RemoteRunningKernel {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("RemoteRunningKernel")
|
||||
// custom debug that keeps tokens out of logs
|
||||
.field("remote_server url", &self.remote_server.base_url)
|
||||
.field("working_directory", &self.working_directory)
|
||||
.field("request_tx", &self.request_tx)
|
||||
.field("execution_state", &self.execution_state)
|
||||
.field("kernel_info", &self.kernel_info)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl RunningKernel for RemoteRunningKernel {
|
||||
fn request_tx(&self) -> futures::channel::mpsc::Sender<runtimelib::JupyterMessage> {
|
||||
self.request_tx.clone()
|
||||
}
|
||||
|
||||
fn working_directory(&self) -> &std::path::PathBuf {
|
||||
&self.working_directory
|
||||
}
|
||||
|
||||
fn execution_state(&self) -> &runtimelib::ExecutionState {
|
||||
&self.execution_state
|
||||
}
|
||||
|
||||
fn set_execution_state(&mut self, state: runtimelib::ExecutionState) {
|
||||
self.execution_state = state;
|
||||
}
|
||||
|
||||
fn kernel_info(&self) -> Option<&runtimelib::KernelInfoReply> {
|
||||
self.kernel_info.as_ref()
|
||||
}
|
||||
|
||||
fn set_kernel_info(&mut self, info: runtimelib::KernelInfoReply) {
|
||||
self.kernel_info = Some(info);
|
||||
}
|
||||
|
||||
fn force_shutdown(&mut self) -> anyhow::Result<()> {
|
||||
unimplemented!("force_shutdown")
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
pub mod components;
|
||||
mod jupyter_settings;
|
||||
mod kernels;
|
||||
pub mod kernels;
|
||||
pub mod notebook;
|
||||
mod outputs;
|
||||
mod repl_editor;
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use crate::components::KernelListItem;
|
||||
use crate::kernels::{RemoteKernelSpecification, RemoteRunningKernel};
|
||||
use crate::setup_editor_session_actions;
|
||||
use crate::{
|
||||
kernels::{Kernel, KernelSpecification, RunningKernel},
|
||||
kernels::{Kernel, KernelSpecification, NativeRunningKernel},
|
||||
outputs::{ExecutionStatus, ExecutionView},
|
||||
KernelStatus,
|
||||
};
|
||||
@@ -15,16 +16,15 @@ use editor::{
|
||||
scroll::Autoscroll,
|
||||
Anchor, AnchorRangeExt as _, Editor, MultiBuffer, ToPoint,
|
||||
};
|
||||
use futures::io::BufReader;
|
||||
use futures::{AsyncBufReadExt as _, FutureExt as _, StreamExt as _};
|
||||
use futures::FutureExt as _;
|
||||
use gpui::{
|
||||
div, prelude::*, EventEmitter, Model, Render, Subscription, Task, View, ViewContext, WeakView,
|
||||
};
|
||||
use language::Point;
|
||||
use project::Fs;
|
||||
use runtimelib::{
|
||||
ExecuteRequest, ExecutionState, InterruptRequest, JupyterMessage, JupyterMessageContent,
|
||||
ShutdownRequest,
|
||||
ExecuteRequest, ExecutionState, InterruptRequest, JupyterKernelspec, JupyterMessage,
|
||||
JupyterMessageContent, ShutdownRequest,
|
||||
};
|
||||
use std::{env::temp_dir, ops::Range, sync::Arc, time::Duration};
|
||||
use theme::ActiveTheme;
|
||||
@@ -35,8 +35,6 @@ pub struct Session {
|
||||
editor: WeakView<Editor>,
|
||||
pub kernel: Kernel,
|
||||
blocks: HashMap<String, EditorBlock>,
|
||||
messaging_task: Option<Task<()>>,
|
||||
process_status_task: Option<Task<()>>,
|
||||
pub kernel_specification: KernelSpecification,
|
||||
telemetry: Arc<Telemetry>,
|
||||
_buffer_subscription: Subscription,
|
||||
@@ -215,12 +213,31 @@ impl Session {
|
||||
})
|
||||
.ok();
|
||||
|
||||
// Creating a baked in kernel specification to see if remoting is working
|
||||
let kernel_specification = KernelSpecification::Remote(RemoteKernelSpecification {
|
||||
name: "todo".to_string(),
|
||||
url: "http://localhost:8888/".to_string(),
|
||||
token: std::env::var("JUPYTER_TOKEN").expect("JUPYTER_TOKEN not set"),
|
||||
kernelspec: JupyterKernelspec {
|
||||
argv: vec![
|
||||
"python".to_string(),
|
||||
"-m".to_string(),
|
||||
"ipykernel_launcher".to_string(),
|
||||
"-f".to_string(),
|
||||
"{connection_file}".to_string(),
|
||||
],
|
||||
env: None,
|
||||
display_name: "Python 3 (ipykernel)".to_string(),
|
||||
language: "python".to_string(),
|
||||
interrupt_mode: Some("signal".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
});
|
||||
|
||||
let mut session = Self {
|
||||
fs,
|
||||
editor,
|
||||
kernel: Kernel::StartingKernel(Task::ready(()).shared()),
|
||||
messaging_task: None,
|
||||
process_status_task: None,
|
||||
blocks: HashMap::default(),
|
||||
kernel_specification,
|
||||
_buffer_subscription: subscription,
|
||||
@@ -246,132 +263,40 @@ impl Session {
|
||||
cx.entity_id().to_string(),
|
||||
);
|
||||
|
||||
let kernel = RunningKernel::new(
|
||||
self.kernel_specification.clone(),
|
||||
entity_id,
|
||||
working_directory,
|
||||
self.fs.clone(),
|
||||
cx,
|
||||
);
|
||||
let session_view = cx.view().clone();
|
||||
|
||||
let kernel = match self.kernel_specification.clone() {
|
||||
KernelSpecification::Jupyter(kernel_specification)
|
||||
| KernelSpecification::PythonEnv(kernel_specification) => NativeRunningKernel::new(
|
||||
kernel_specification,
|
||||
entity_id,
|
||||
working_directory,
|
||||
self.fs.clone(),
|
||||
session_view,
|
||||
cx,
|
||||
),
|
||||
KernelSpecification::Remote(remote_kernel_specification) => RemoteRunningKernel::new(
|
||||
remote_kernel_specification,
|
||||
working_directory,
|
||||
session_view,
|
||||
cx,
|
||||
),
|
||||
};
|
||||
|
||||
let pending_kernel = cx
|
||||
.spawn(|this, mut cx| async move {
|
||||
let kernel = kernel.await;
|
||||
|
||||
match kernel {
|
||||
Ok((mut kernel, mut messages_rx)) => {
|
||||
Ok(kernel) => {
|
||||
this.update(&mut cx, |session, cx| {
|
||||
let stderr = kernel.process.stderr.take();
|
||||
|
||||
cx.spawn(|_session, mut _cx| async move {
|
||||
if stderr.is_none() {
|
||||
return;
|
||||
}
|
||||
let reader = BufReader::new(stderr.unwrap());
|
||||
let mut lines = reader.lines();
|
||||
while let Some(Ok(line)) = lines.next().await {
|
||||
// todo!(): Log stdout and stderr to something the session can show
|
||||
log::error!("kernel: {}", line);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
let stdout = kernel.process.stdout.take();
|
||||
|
||||
cx.spawn(|_session, mut _cx| async move {
|
||||
if stdout.is_none() {
|
||||
return;
|
||||
}
|
||||
let reader = BufReader::new(stdout.unwrap());
|
||||
let mut lines = reader.lines();
|
||||
while let Some(Ok(line)) = lines.next().await {
|
||||
log::info!("kernel: {}", line);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
let status = kernel.process.status();
|
||||
session.kernel(Kernel::RunningKernel(kernel), cx);
|
||||
|
||||
let process_status_task = cx.spawn(|session, mut cx| async move {
|
||||
let error_message = match status.await {
|
||||
Ok(status) => {
|
||||
if status.success() {
|
||||
log::info!("kernel process exited successfully");
|
||||
return;
|
||||
}
|
||||
|
||||
format!("kernel process exited with status: {:?}", status)
|
||||
}
|
||||
Err(err) => {
|
||||
format!("kernel process exited with error: {:?}", err)
|
||||
}
|
||||
};
|
||||
|
||||
log::error!("{}", error_message);
|
||||
|
||||
session
|
||||
.update(&mut cx, |session, cx| {
|
||||
session.kernel(
|
||||
Kernel::ErroredLaunch(error_message.clone()),
|
||||
cx,
|
||||
);
|
||||
|
||||
session.blocks.values().for_each(|block| {
|
||||
block.execution_view.update(
|
||||
cx,
|
||||
|execution_view, cx| {
|
||||
match execution_view.status {
|
||||
ExecutionStatus::Finished => {
|
||||
// Do nothing when the output was good
|
||||
}
|
||||
_ => {
|
||||
// All other cases, set the status to errored
|
||||
execution_view.status =
|
||||
ExecutionStatus::KernelErrored(
|
||||
error_message.clone(),
|
||||
)
|
||||
}
|
||||
}
|
||||
cx.notify();
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
|
||||
session.process_status_task = Some(process_status_task);
|
||||
|
||||
session.messaging_task = Some(cx.spawn(|session, mut cx| async move {
|
||||
while let Some(message) = messages_rx.next().await {
|
||||
session
|
||||
.update(&mut cx, |session, cx| {
|
||||
session.route(&message, cx);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}));
|
||||
|
||||
// todo!(@rgbkrk): send KernelInfoRequest once our shell channel read/writes are split
|
||||
// cx.spawn(|this, mut cx| async move {
|
||||
// cx.background_executor()
|
||||
// .timer(Duration::from_millis(120))
|
||||
// .await;
|
||||
// this.update(&mut cx, |this, cx| {
|
||||
// this.send(KernelInfoRequest {}.into(), cx).ok();
|
||||
// })
|
||||
// .ok();
|
||||
// })
|
||||
// .detach();
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
Err(err) => {
|
||||
this.update(&mut cx, |session, cx| {
|
||||
session.kernel(Kernel::ErroredLaunch(err.to_string()), cx);
|
||||
session.kernel_errored(err.to_string(), cx);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
@@ -383,6 +308,26 @@ impl Session {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn kernel_errored(&mut self, error_message: String, cx: &mut ViewContext<Self>) {
|
||||
self.kernel(Kernel::ErroredLaunch(error_message.clone()), cx);
|
||||
|
||||
self.blocks.values().for_each(|block| {
|
||||
block.execution_view.update(cx, |execution_view, cx| {
|
||||
match execution_view.status {
|
||||
ExecutionStatus::Finished => {
|
||||
// Do nothing when the output was good
|
||||
}
|
||||
_ => {
|
||||
// All other cases, set the status to errored
|
||||
execution_view.status =
|
||||
ExecutionStatus::KernelErrored(error_message.clone())
|
||||
}
|
||||
}
|
||||
cx.notify();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
fn on_buffer_event(
|
||||
&mut self,
|
||||
buffer: Model<MultiBuffer>,
|
||||
@@ -416,7 +361,7 @@ impl Session {
|
||||
|
||||
fn send(&mut self, message: JupyterMessage, _cx: &mut ViewContext<Self>) -> anyhow::Result<()> {
|
||||
if let Kernel::RunningKernel(kernel) = &mut self.kernel {
|
||||
kernel.request_tx.try_send(message).ok();
|
||||
kernel.request_tx().try_send(message).ok();
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
@@ -553,7 +498,7 @@ impl Session {
|
||||
}
|
||||
}
|
||||
|
||||
fn route(&mut self, message: &JupyterMessage, cx: &mut ViewContext<Self>) {
|
||||
pub fn route(&mut self, message: &JupyterMessage, cx: &mut ViewContext<Self>) {
|
||||
let parent_message_id = match message.parent_header.as_ref() {
|
||||
Some(header) => &header.msg_id,
|
||||
None => return,
|
||||
@@ -631,7 +576,7 @@ impl Session {
|
||||
|
||||
match kernel {
|
||||
Kernel::RunningKernel(mut kernel) => {
|
||||
let mut request_tx = kernel.request_tx.clone();
|
||||
let mut request_tx = kernel.request_tx().clone();
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let message: JupyterMessage = ShutdownRequest { restart: false }.into();
|
||||
@@ -640,13 +585,7 @@ impl Session {
|
||||
// Give the kernel a bit of time to clean up
|
||||
cx.background_executor().timer(Duration::from_secs(3)).await;
|
||||
|
||||
this.update(&mut cx, |session, _cx| {
|
||||
session.messaging_task.take();
|
||||
session.process_status_task.take();
|
||||
})
|
||||
.ok();
|
||||
|
||||
kernel.process.kill().ok();
|
||||
kernel.force_shutdown().ok();
|
||||
|
||||
this.update(&mut cx, |session, cx| {
|
||||
session.clear_outputs(cx);
|
||||
@@ -658,8 +597,6 @@ impl Session {
|
||||
.detach();
|
||||
}
|
||||
_ => {
|
||||
self.messaging_task.take();
|
||||
self.process_status_task.take();
|
||||
self.kernel(Kernel::Shutdown, cx);
|
||||
}
|
||||
}
|
||||
@@ -674,7 +611,7 @@ impl Session {
|
||||
// Do nothing if already restarting
|
||||
}
|
||||
Kernel::RunningKernel(mut kernel) => {
|
||||
let mut request_tx = kernel.request_tx.clone();
|
||||
let mut request_tx = kernel.request_tx().clone();
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
// Send shutdown request with restart flag
|
||||
@@ -682,17 +619,11 @@ impl Session {
|
||||
let message: JupyterMessage = ShutdownRequest { restart: true }.into();
|
||||
request_tx.try_send(message).ok();
|
||||
|
||||
this.update(&mut cx, |session, _cx| {
|
||||
session.messaging_task.take();
|
||||
session.process_status_task.take();
|
||||
})
|
||||
.ok();
|
||||
|
||||
// Wait for kernel to shutdown
|
||||
cx.background_executor().timer(Duration::from_secs(1)).await;
|
||||
|
||||
// Force kill the kernel if it hasn't shut down
|
||||
kernel.process.kill().ok();
|
||||
kernel.force_shutdown().ok();
|
||||
|
||||
// Start a new kernel
|
||||
this.update(&mut cx, |session, cx| {
|
||||
@@ -705,9 +636,6 @@ impl Session {
|
||||
.detach();
|
||||
}
|
||||
_ => {
|
||||
// If it's not already running, we can just clean up and start a new kernel
|
||||
self.messaging_task.take();
|
||||
self.process_status_task.take();
|
||||
self.clear_outputs(cx);
|
||||
self.start_kernel(cx);
|
||||
}
|
||||
@@ -727,7 +655,7 @@ impl Render for Session {
|
||||
let (status_text, interrupt_button) = match &self.kernel {
|
||||
Kernel::RunningKernel(kernel) => (
|
||||
kernel
|
||||
.kernel_info
|
||||
.kernel_info()
|
||||
.as_ref()
|
||||
.map(|info| info.language_info.name.clone()),
|
||||
Some(
|
||||
@@ -747,7 +675,7 @@ impl Render for Session {
|
||||
|
||||
KernelListItem::new(self.kernel_specification.clone())
|
||||
.status_color(match &self.kernel {
|
||||
Kernel::RunningKernel(kernel) => match kernel.execution_state {
|
||||
Kernel::RunningKernel(kernel) => match kernel.execution_state() {
|
||||
ExecutionState::Idle => Color::Success,
|
||||
ExecutionState::Busy => Color::Modified,
|
||||
},
|
||||
|
||||
@@ -25,7 +25,7 @@ fn main() {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |_| {});
|
||||
});
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::default());
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
|
||||
let http = Arc::new(HttpClientWithUrl::new(
|
||||
Arc::new(
|
||||
|
||||
@@ -32,7 +32,6 @@ auto_update.workspace = true
|
||||
call.workspace = true
|
||||
client.workspace = true
|
||||
command_palette.workspace = true
|
||||
extensions_ui.workspace = true
|
||||
feedback.workspace = true
|
||||
feature_flags.workspace = true
|
||||
gpui.workspace = true
|
||||
|
||||
@@ -581,7 +581,7 @@ impl TitleBar {
|
||||
.action("Settings", zed_actions::OpenSettings.boxed_clone())
|
||||
.action("Key Bindings", Box::new(zed_actions::OpenKeymap))
|
||||
.action("Themes…", theme_selector::Toggle::default().boxed_clone())
|
||||
.action("Extensions", extensions_ui::Extensions.boxed_clone())
|
||||
.action("Extensions", zed_actions::Extensions.boxed_clone())
|
||||
.separator()
|
||||
.link(
|
||||
"Book Onboarding",
|
||||
@@ -617,7 +617,7 @@ impl TitleBar {
|
||||
menu.action("Settings", zed_actions::OpenSettings.boxed_clone())
|
||||
.action("Key Bindings", Box::new(zed_actions::OpenKeymap))
|
||||
.action("Themes…", theme_selector::Toggle::default().boxed_clone())
|
||||
.action("Extensions", extensions_ui::Extensions.boxed_clone())
|
||||
.action("Extensions", zed_actions::Extensions.boxed_clone())
|
||||
.separator()
|
||||
.link(
|
||||
"Book Onboarding",
|
||||
|
||||
@@ -1407,7 +1407,7 @@ mod test {
|
||||
|
||||
// Generic arguments
|
||||
cx.set_state("fn boop<A: ˇDebug, B>() {}", Mode::Normal);
|
||||
cx.simulate_keystrokes("v i g");
|
||||
cx.simulate_keystrokes("v i a");
|
||||
cx.assert_state("fn boop<«A: Debugˇ», B>() {}", Mode::Visual);
|
||||
|
||||
// Function arguments
|
||||
@@ -1415,11 +1415,11 @@ mod test {
|
||||
"fn boop(ˇarg_a: (Tuple, Of, Types), arg_b: String) {}",
|
||||
Mode::Normal,
|
||||
);
|
||||
cx.simulate_keystrokes("d a g");
|
||||
cx.simulate_keystrokes("d a a");
|
||||
cx.assert_state("fn boop(ˇarg_b: String) {}", Mode::Normal);
|
||||
|
||||
cx.set_state("std::namespace::test(\"strinˇg\", a.b.c())", Mode::Normal);
|
||||
cx.simulate_keystrokes("v a g");
|
||||
cx.simulate_keystrokes("v a a");
|
||||
cx.assert_state("std::namespace::test(«\"string\", ˇ»a.b.c())", Mode::Visual);
|
||||
|
||||
// Tuple, vec, and array arguments
|
||||
@@ -1427,34 +1427,34 @@ mod test {
|
||||
"fn boop(arg_a: (Tuple, Ofˇ, Types), arg_b: String) {}",
|
||||
Mode::Normal,
|
||||
);
|
||||
cx.simulate_keystrokes("c i g");
|
||||
cx.simulate_keystrokes("c i a");
|
||||
cx.assert_state(
|
||||
"fn boop(arg_a: (Tuple, ˇ, Types), arg_b: String) {}",
|
||||
Mode::Insert,
|
||||
);
|
||||
|
||||
cx.set_state("let a = (test::call(), 'p', my_macro!{ˇ});", Mode::Normal);
|
||||
cx.simulate_keystrokes("c a g");
|
||||
cx.simulate_keystrokes("c a a");
|
||||
cx.assert_state("let a = (test::call(), 'p'ˇ);", Mode::Insert);
|
||||
|
||||
cx.set_state("let a = [test::call(ˇ), 300];", Mode::Normal);
|
||||
cx.simulate_keystrokes("c i g");
|
||||
cx.simulate_keystrokes("c i a");
|
||||
cx.assert_state("let a = [ˇ, 300];", Mode::Insert);
|
||||
|
||||
cx.set_state(
|
||||
"let a = vec![Vec::new(), vecˇ![test::call(), 300]];",
|
||||
Mode::Normal,
|
||||
);
|
||||
cx.simulate_keystrokes("c a g");
|
||||
cx.simulate_keystrokes("c a a");
|
||||
cx.assert_state("let a = vec![Vec::new()ˇ];", Mode::Insert);
|
||||
|
||||
// Cursor immediately before / after brackets
|
||||
cx.set_state("let a = [test::call(first_arg)ˇ]", Mode::Normal);
|
||||
cx.simulate_keystrokes("v i g");
|
||||
cx.simulate_keystrokes("v i a");
|
||||
cx.assert_state("let a = [«test::call(first_arg)ˇ»]", Mode::Visual);
|
||||
|
||||
cx.set_state("let a = [test::callˇ(first_arg)]", Mode::Normal);
|
||||
cx.simulate_keystrokes("v i g");
|
||||
cx.simulate_keystrokes("v i a");
|
||||
cx.assert_state("let a = [«test::call(first_arg)ˇ»]", Mode::Visual);
|
||||
}
|
||||
|
||||
|
||||
@@ -18,7 +18,6 @@ test-support = []
|
||||
anyhow.workspace = true
|
||||
client.workspace = true
|
||||
db.workspace = true
|
||||
extensions_ui.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
inline_completion_button.workspace = true
|
||||
|
||||
@@ -250,7 +250,7 @@ impl Render for WelcomePage {
|
||||
"welcome page: open extensions".to_string(),
|
||||
);
|
||||
cx.dispatch_action(Box::new(
|
||||
extensions_ui::Extensions,
|
||||
zed_actions::Extensions,
|
||||
));
|
||||
})),
|
||||
)
|
||||
|
||||
@@ -2455,6 +2455,8 @@ impl Pane {
|
||||
to_pane = workspace.split_pane(to_pane, split_direction, cx);
|
||||
}
|
||||
let old_ix = from_pane.read(cx).index_for_item_id(item_id);
|
||||
let old_len = to_pane.read(cx).items.len();
|
||||
move_item(&from_pane, &to_pane, item_id, ix, cx);
|
||||
if to_pane == from_pane {
|
||||
if let Some(old_index) = old_ix {
|
||||
to_pane.update(cx, |this, _| {
|
||||
@@ -2472,7 +2474,10 @@ impl Pane {
|
||||
}
|
||||
} else {
|
||||
to_pane.update(cx, |this, _| {
|
||||
if this.has_pinned_tabs() && ix < this.pinned_tab_count {
|
||||
if this.items.len() > old_len // Did we not deduplicate on drag?
|
||||
&& this.has_pinned_tabs()
|
||||
&& ix < this.pinned_tab_count
|
||||
{
|
||||
this.pinned_tab_count += 1;
|
||||
}
|
||||
});
|
||||
@@ -2484,7 +2489,6 @@ impl Pane {
|
||||
}
|
||||
})
|
||||
}
|
||||
move_item(&from_pane, &to_pane, item_id, ix, cx);
|
||||
});
|
||||
})
|
||||
.log_err();
|
||||
|
||||
@@ -606,7 +606,7 @@ impl AppState {
|
||||
|
||||
let fs = fs::FakeFs::new(cx.background_executor().clone());
|
||||
let languages = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
|
||||
let clock = Arc::new(clock::FakeSystemClock::default());
|
||||
let clock = Arc::new(clock::FakeSystemClock::new());
|
||||
let http_client = http_client::FakeHttpClient::with_404_response();
|
||||
let client = Client::new(clock, http_client.clone(), cx);
|
||||
let session = cx.new_model(|cx| AppSession::new(Session::test(), cx));
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
description = "The fast, collaborative code editor."
|
||||
edition = "2021"
|
||||
name = "zed"
|
||||
version = "0.163.0"
|
||||
version = "0.164.0"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
authors = ["Zed Team <hi@zed.dev>"]
|
||||
|
||||
@@ -66,7 +66,7 @@ use zed::{
|
||||
OpenRequest,
|
||||
};
|
||||
|
||||
use crate::zed::inline_completion_registry;
|
||||
use crate::zed::{assistant_hints, inline_completion_registry};
|
||||
|
||||
#[cfg(feature = "mimalloc")]
|
||||
#[global_allocator]
|
||||
@@ -401,6 +401,7 @@ fn main() {
|
||||
stdout_is_a_pty(),
|
||||
cx,
|
||||
);
|
||||
assistant_hints::init(cx);
|
||||
repl::init(
|
||||
app_state.fs.clone(),
|
||||
app_state.client.telemetry().clone(),
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
mod app_menus;
|
||||
pub mod assistant_hints;
|
||||
pub mod inline_completion_registry;
|
||||
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
|
||||
pub(crate) mod linux_prompts;
|
||||
@@ -823,8 +824,13 @@ pub fn handle_keymap_file_changes(
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.on_keyboard_layout_change(move |_| {
|
||||
keyboard_layout_tx.unbounded_send(()).ok();
|
||||
let mut current_mapping = settings::get_key_equivalents(cx.keyboard_layout());
|
||||
cx.on_keyboard_layout_change(move |cx| {
|
||||
let next_mapping = settings::get_key_equivalents(cx.keyboard_layout());
|
||||
if next_mapping != current_mapping {
|
||||
current_mapping = next_mapping;
|
||||
keyboard_layout_tx.unbounded_send(()).ok();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ pub fn app_menus() -> Vec<Menu> {
|
||||
items: vec![],
|
||||
}),
|
||||
MenuItem::separator(),
|
||||
MenuItem::action("Extensions", extensions_ui::Extensions),
|
||||
MenuItem::action("Extensions", zed_actions::Extensions),
|
||||
MenuItem::action("Install CLI", install_cli::Install),
|
||||
MenuItem::separator(),
|
||||
MenuItem::action("Hide Zed", super::Hide),
|
||||
|
||||
115
crates/zed/src/zed/assistant_hints.rs
Normal file
115
crates/zed/src/zed/assistant_hints.rs
Normal file
@@ -0,0 +1,115 @@
|
||||
use assistant::assistant_settings::AssistantSettings;
|
||||
use collections::HashMap;
|
||||
use editor::{ActiveLineTrailerProvider, Editor, EditorMode};
|
||||
use gpui::{AnyWindowHandle, AppContext, ViewContext, WeakView, WindowContext};
|
||||
use settings::{Settings, SettingsStore};
|
||||
use std::{cell::RefCell, rc::Rc};
|
||||
use theme::ActiveTheme;
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub fn init(cx: &mut AppContext) {
|
||||
let editors: Rc<RefCell<HashMap<WeakView<Editor>, AnyWindowHandle>>> = Rc::default();
|
||||
|
||||
cx.observe_new_views({
|
||||
let editors = editors.clone();
|
||||
move |_: &mut Workspace, cx: &mut ViewContext<Workspace>| {
|
||||
let workspace_handle = cx.view().clone();
|
||||
cx.subscribe(&workspace_handle, {
|
||||
let editors = editors.clone();
|
||||
move |_, _, event, cx| match event {
|
||||
workspace::Event::ItemAdded { item } => {
|
||||
if let Some(editor) = item.act_as::<Editor>(cx) {
|
||||
if editor.read(cx).mode() != EditorMode::Full {
|
||||
return;
|
||||
}
|
||||
|
||||
cx.on_release({
|
||||
let editor_handle = editor.downgrade();
|
||||
let editors = editors.clone();
|
||||
move |_, _, _| {
|
||||
editors.borrow_mut().remove(&editor_handle);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
editors
|
||||
.borrow_mut()
|
||||
.insert(editor.downgrade(), cx.window_handle());
|
||||
|
||||
let show_hints = should_show_hints(cx);
|
||||
editor.update(cx, |editor, cx| {
|
||||
assign_active_line_trailer_provider(editor, show_hints, cx)
|
||||
})
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
let mut show_hints = AssistantSettings::get_global(cx).show_hints;
|
||||
cx.observe_global::<SettingsStore>(move |cx| {
|
||||
let new_show_hints = should_show_hints(cx);
|
||||
if new_show_hints != show_hints {
|
||||
show_hints = new_show_hints;
|
||||
for (editor, window) in editors.borrow().iter() {
|
||||
_ = window.update(cx, |_window, cx| {
|
||||
_ = editor.update(cx, |editor, cx| {
|
||||
assign_active_line_trailer_provider(editor, show_hints, cx);
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
struct AssistantHintsProvider;
|
||||
|
||||
impl ActiveLineTrailerProvider for AssistantHintsProvider {
|
||||
fn render_active_line_trailer(
|
||||
&mut self,
|
||||
style: &editor::EditorStyle,
|
||||
focus_handle: &gpui::FocusHandle,
|
||||
cx: &mut WindowContext,
|
||||
) -> Option<gpui::AnyElement> {
|
||||
if !focus_handle.is_focused(cx) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let chat_keybinding =
|
||||
cx.keystroke_text_for_action_in(&assistant::ToggleFocus, focus_handle);
|
||||
let generate_keybinding =
|
||||
cx.keystroke_text_for_action_in(&zed_actions::InlineAssist::default(), focus_handle);
|
||||
|
||||
Some(
|
||||
h_flex()
|
||||
.id("inline-assistant-instructions")
|
||||
.w_full()
|
||||
.font_family(style.text.font().family)
|
||||
.text_color(cx.theme().status().hint)
|
||||
.line_height(style.text.line_height)
|
||||
.child(format!(
|
||||
"{chat_keybinding} to chat, {generate_keybinding} to generate"
|
||||
))
|
||||
.into_any(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn assign_active_line_trailer_provider(
|
||||
editor: &mut Editor,
|
||||
show_hints: bool,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
let provider = show_hints.then_some(AssistantHintsProvider);
|
||||
editor.set_active_line_trailer_provider(provider, cx);
|
||||
}
|
||||
|
||||
fn should_show_hints(cx: &AppContext) -> bool {
|
||||
let assistant_settings = AssistantSettings::get_global(cx);
|
||||
assistant_settings.enabled && assistant_settings.show_hints
|
||||
}
|
||||
@@ -32,6 +32,7 @@ actions!(
|
||||
Quit,
|
||||
OpenKeymap,
|
||||
About,
|
||||
Extensions,
|
||||
OpenLicenses,
|
||||
OpenTelemetryLog,
|
||||
DecreaseBufferFontSize,
|
||||
|
||||
@@ -200,18 +200,28 @@ You must provide the model's Context Window in the `max_tokens` parameter, this
|
||||
{
|
||||
"assistant": {
|
||||
"enabled": true,
|
||||
"show_hints": true,
|
||||
"button": true,
|
||||
"dock": "right"
|
||||
"default_width": 480,
|
||||
"default_model": {
|
||||
"provider": "zed.dev",
|
||||
"model": "claude-3-5-sonnet"
|
||||
},
|
||||
"version": "2",
|
||||
"button": true,
|
||||
"default_width": 480,
|
||||
"dock": "right"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
| key | type | default | description |
|
||||
| -------------- | ------- | ------- | ------------------------------------------------------------------------------------- |
|
||||
| enabled | boolean | true | Setting this to `false` will completely disable the assistant |
|
||||
| show_hints | boolean | true | Whether to to show hints in the editor explaining how to use assistant |
|
||||
| button | boolean | true | Show the assistant icon in the status bar |
|
||||
| dock | string | "right" | The default dock position for the assistant panel. Can be ["left", "right", "bottom"] |
|
||||
| default_height | string | null | The pixel height of the assistant panel when docked to the bottom |
|
||||
| default_width | string | null | The pixel width of the assistant panel when docked to the left or right |
|
||||
|
||||
#### Custom endpoints {#custom-endpoint}
|
||||
|
||||
You can use a custom API endpoint for different providers, as long as it's compatible with the providers API structure.
|
||||
@@ -271,13 +281,3 @@ will generate two outputs for every assist. One with Claude 3.5 Sonnet, and one
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Common Panel Settings
|
||||
|
||||
| key | type | default | description |
|
||||
| -------------- | ------- | ------- | ------------------------------------------------------------------------------------- |
|
||||
| enabled | boolean | true | Setting this to `false` will completely disable the assistant |
|
||||
| button | boolean | true | Show the assistant icon in the status bar |
|
||||
| dock | string | "right" | The default dock position for the assistant panel. Can be ["left", "right", "bottom"] |
|
||||
| default_height | string | null | The pixel height of the assistant panel when docked to the bottom |
|
||||
| default_width | string | null | The pixel width of the assistant panel when docked to the left or right |
|
||||
|
||||
@@ -2327,15 +2327,18 @@ Run the `theme selector: toggle` action in the command palette to see a current
|
||||
- Default:
|
||||
|
||||
```json
|
||||
"assistant": {
|
||||
"enabled": true,
|
||||
"button": true,
|
||||
"dock": "right",
|
||||
"default_width": 640,
|
||||
"default_height": 320,
|
||||
"provider": "openai",
|
||||
"version": "1",
|
||||
},
|
||||
{
|
||||
"assistant": {
|
||||
"enabled": true,
|
||||
"button": true,
|
||||
"dock": "right",
|
||||
"default_width": 640,
|
||||
"default_height": 320,
|
||||
"provider": "openai",
|
||||
"version": "1",
|
||||
"show_hints": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Outline Panel
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
name = "TOML"
|
||||
grammar = "toml"
|
||||
path_suffixes = ["Cargo.lock", "toml", "Pipfile"]
|
||||
path_suffixes = ["Cargo.lock", "toml", "Pipfile", "uv.lock"]
|
||||
line_comments = ["# "]
|
||||
autoclose_before = ",]}"
|
||||
brackets = [
|
||||
|
||||
8
script/create-draft-release
Executable file
8
script/create-draft-release
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
preview=""
|
||||
if [[ "$GITHUB_REF_NAME" == *"-pre" ]]; then
|
||||
preview="-p"
|
||||
fi
|
||||
|
||||
gh release create -d "$GITHUB_REF_NAME" -F "$1" $preview
|
||||
@@ -64,10 +64,6 @@ async function main() {
|
||||
}
|
||||
|
||||
console.log(releaseNotes.join("\n") + "\n");
|
||||
console.log("<!-- ");
|
||||
console.log(missing.join("\n"));
|
||||
console.log(skipped.join("\n"));
|
||||
console.log("-->");
|
||||
}
|
||||
|
||||
function getCommits(oldTag, newTag) {
|
||||
|
||||
25
script/update-json-schemas
Executable file
25
script/update-json-schemas
Executable file
@@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
cd "$(dirname "$0")/.." || exit 1
|
||||
cd crates/languages/src/json/schemas
|
||||
files=(
|
||||
"tsconfig.json"
|
||||
"package.json"
|
||||
)
|
||||
for file in "${files[@]}"; do
|
||||
curl -sL -o "$file" "https://raw.githubusercontent.com/SchemaStore/schemastore/master/src/schemas/json/$file"
|
||||
done
|
||||
|
||||
HASH="$(curl -s 'https://api.github.com/repos/SchemaStore/schemastore/commits/HEAD' | jq -r '.sha')"
|
||||
SHORT_HASH="${HASH:0:7}"
|
||||
DATE="$(curl -s 'https://api.github.com/repos/SchemaStore/schemastore/commits/HEAD' |jq -r .commit.author.date | cut -c1-10)"
|
||||
echo
|
||||
echo "Updated JSON schemas to [SchemaStore/schemastore@$SHORT_HASH](https://github.com/SchemaStore/schemastore/tree/$HASH) ($DATE)"
|
||||
echo
|
||||
for file in "${files[@]}"; do
|
||||
echo "- [$file](https://github.com/SchemaStore/schemastore/commits/master/src/schemas/json/$file)" \
|
||||
"@ [$SHORT_HASH](https://raw.githubusercontent.com/SchemaStore/schemastore/$HASH/src/schemas/json/$file)"
|
||||
done
|
||||
echo
|
||||
Reference in New Issue
Block a user