Compare commits

..

6 Commits

Author SHA1 Message Date
Mikayla
f2a05789c8 Add dependency review action 2024-07-22 11:22:52 -07:00
Rashid Almheiri
a5a7a835fe Set the default Starlark LSP for zaucy/zed-starlark (#14972)
given zaucy/zed-starlark#4 was merged, zed-starlark now has multiple
LSPs and requires additional configuration which isn't available
directly for extensions.

cc @zaucy 

Release Notes:

- N/A
2024-07-22 12:58:28 -04:00
Marshall Bowers
28baa56e3d repl: Factor out ReplStore (#14970)
This PR factors a `ReplStore` out of the `RuntimePanel`.

Since we're planning to remove the `RuntimePanel` and replace it with an
ephemeral tab that can be opened, we need the kernel specifications and
sessions to have somewhere long-lived that they can reside in.

Release Notes:

- N/A
2024-07-22 12:46:33 -04:00
Conrad Irwin
2e23527e09 Refactor key dispatch (#14942)
Simplify key dispatch code.

Previously we would maintain a cache of key matchers for each context
that
would store the pending input. For the last while we've also stored the
typed prefix on the window. This is redundant, we only need one copy, so
now
it's just stored on the window, which lets us avoid the boilerplate of
keeping
all the matchers in sync.

This stops us from losing multikey bindings when the context on a node
changes
(#11009) (though we still interrupt multikey bindings if the focus
changes).

While in the code, I fixed up a few other things with multi-key bindings
that
were causing problems:

Previously we assumed that all multi-key bindings took precedence over
any
single-key binding, now this is done such that if a user binds a
single-key
binding, it will take precedence over all system-defined multi-key
bindings
(irrespective of the depth in the context tree). This was a common cause
of
confusion for new users trying to bind to `cmd-k` or `ctrl-w` in vim
mode
(#13543).

Previously after a pending multi-key keystroke failed to match, we would
drop
the prefix if it was an input event. Now we correctly replay it
(#14725).

Release Notes:

- Fixed multi-key shortcuts not working across completion menu changes
([#11009](https://github.com/zed-industries/zed/issues/11009))
- Fixed multi-key shortcuts discarding earlier input
([#14445](https://github.com/zed-industries/zed/pull/14445))
- vim: Fixed `jk` binding preventing you from repeating `j`
([#14725](https://github.com/zed-industries/zed/issues/14725))
- vim: Fixed `escape` in normal mode to also clear the selected
register.
- Fixed key maps so user-defined mappings take precedence over builtin
multi-key mappings
([#13543](https://github.com/zed-industries/zed/issues/13543))
- Fixed a bug where overridden shortcuts would still show in the Command
Palette
2024-07-22 10:46:16 -06:00
Piotr Osiewicz
865904a0c9 lsp: Pass back diagnostic .data when querying code actions for it (#14962)
Per the LSP spec, we should pass .data field of diagnostics into code
action request:
```
	/**
	 * A data entry field that is preserved between a
	 * `textDocument/publishDiagnostics` notification and
	 * `textDocument/codeAction` request. *
	 * @since 3.16.0 */ data?: LSPAny;
```


Release Notes:

- Fixed rare cases where a code action triggered by diagnostic may not
be available for use.
2024-07-22 17:49:11 +02:00
张小白
10d2353e07 windows: Treat settings.json as JSONC (#14944)
Before this PR, comments in `settings.json` are marked with red lines,
indicating that `"comments are not allowed in JSON."`

![Screenshot 2024-07-22
153951](https://github.com/user-attachments/assets/fbb631e8-43cf-4473-97c0-50c83c4d6ab1)


After this PR, this issue is resolved.

![Screenshot 2024-07-22
153527](https://github.com/user-attachments/assets/ee0f7877-c623-4caa-94cd-97e82f9b8945)

Release Notes:

- N/A
2024-07-22 10:46:52 -04:00
40 changed files with 1741 additions and 2339 deletions

14
.github/workflows/check_deps.yml vendored Normal file
View File

@@ -0,0 +1,14 @@
name: "Dependency Review"
on: [pull_request]
permissions:
contents: read
jobs:
dependency-review:
runs-on: ubuntu-latest
steps:
- name: "Checkout Repository"
uses: actions/checkout@v4
- name: "Dependency Review"
uses: actions/dependency-review-action@v4

26
Cargo.lock generated
View File

@@ -306,9 +306,6 @@ name = "arrayvec"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
dependencies = [
"serde",
]
[[package]]
name = "as-raw-xcb-connection"
@@ -1703,19 +1700,6 @@ dependencies = [
"profiling",
]
[[package]]
name = "blake3"
version = "1.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9ec96fe9a81b5e365f9db71fe00edc4fe4ca2cc7dcb7861f0603012a7caa210"
dependencies = [
"arrayref",
"arrayvec",
"cc",
"cfg-if",
"constant_time_eq",
]
[[package]]
name = "block"
version = "0.1.6"
@@ -2780,12 +2764,6 @@ dependencies = [
"tiny-keccak",
]
[[package]]
name = "constant_time_eq"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2"
[[package]]
name = "convert_case"
version = "0.4.0"
@@ -9590,8 +9568,6 @@ name = "semantic_index"
version = "0.1.0"
dependencies = [
"anyhow",
"arrayvec",
"blake3",
"client",
"clock",
"collections",
@@ -9604,7 +9580,6 @@ dependencies = [
"heed",
"http 0.1.0",
"language",
"language_model",
"languages",
"log",
"open_ai",
@@ -13778,7 +13753,6 @@ dependencies = [
"audio",
"auto_update",
"backtrace",
"blake3",
"breadcrumbs",
"call",
"channel",

View File

@@ -284,7 +284,6 @@ zed_actions = { path = "crates/zed_actions" }
alacritty_terminal = "0.23"
any_vec = "0.13"
anyhow = "1.0.57"
arrayvec = { version = "0.7.4", features = ["serde"] }
ashpd = "0.9.1"
async-compression = { version = "0.4", features = ["gzip", "futures-io"] }
async-dispatcher = { version = "0.1" }
@@ -299,7 +298,6 @@ bitflags = "2.6.0"
blade-graphics = { git = "https://github.com/zed-industries/blade", rev = "a477c2008db27db0b9f745715e119b3ee7ab7818" }
blade-macros = { git = "https://github.com/zed-industries/blade", rev = "a477c2008db27db0b9f745715e119b3ee7ab7818" }
blade-util = { git = "https://github.com/zed-industries/blade", rev = "a477c2008db27db0b9f745715e119b3ee7ab7818" }
blake3 = "1.5.3"
cap-std = "3.0"
cargo_toml = "0.20"
chrono = { version = "0.4", features = ["serde"] }

View File

@@ -705,7 +705,7 @@
//
"file_types": {
"JSON": ["flake.lock"],
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "tsconfig.json"]
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "tsconfig.json"]
},
// The extensions that Zed should automatically install on startup.
//
@@ -815,6 +815,9 @@
"plugins": ["prettier-plugin-sql"]
}
},
"Starlark": {
"language_servers": ["starpls", "!buck2-lsp", "..."]
},
"Svelte": {
"prettier": {
"allowed": true,

View File

@@ -131,26 +131,6 @@ impl CompletionProvider {
})
}
pub fn stream_completion_bg(
// TODO consider unifying this with stream_completion, since they share so much code
// (design question: do we want to make a Spawn or perhaps Executor trait to abstract over bg and fg executors?)
&self,
request: LanguageModelRequest,
cx: &AppContext,
) -> Task<Result<CompletionResponse>> {
let rate_limiter = self.request_limiter.clone();
let provider = self.provider.clone();
cx.background_executor().spawn(async move {
let lock = rate_limiter.acquire_arc().await;
let response = provider.read().stream_completion(request);
let response = response.await?;
Ok(CompletionResponse {
inner: response,
_lock: lock,
})
})
}
pub fn complete(&self, request: LanguageModelRequest, cx: &AppContext) -> Task<Result<String>> {
let response = self.stream_completion(request, cx);
cx.foreground_executor().spawn(async move {

View File

@@ -954,6 +954,7 @@ fn random_diagnostic(
is_primary,
is_disk_based: false,
is_unnecessary: false,
data: None,
},
}
}

View File

@@ -1130,14 +1130,7 @@ impl AppContext {
for window in self.windows() {
window
.update(self, |_, cx| {
cx.window
.rendered_frame
.dispatch_tree
.clear_pending_keystrokes();
cx.window
.next_frame
.dispatch_tree
.clear_pending_keystrokes();
cx.clear_pending_keystrokes();
})
.ok();
}

View File

@@ -51,7 +51,7 @@
///
use crate::{
Action, ActionRegistry, DispatchPhase, EntityId, FocusId, KeyBinding, KeyContext, Keymap,
KeymatchResult, Keystroke, KeystrokeMatcher, ModifiersChangedEvent, WindowContext,
Keystroke, ModifiersChangedEvent, WindowContext,
};
use collections::FxHashMap;
use smallvec::SmallVec;
@@ -73,7 +73,6 @@ pub(crate) struct DispatchTree {
nodes: Vec<DispatchNode>,
focusable_node_ids: FxHashMap<FocusId, DispatchNodeId>,
view_node_ids: FxHashMap<EntityId, DispatchNodeId>,
keystroke_matchers: FxHashMap<SmallVec<[KeyContext; 4]>, KeystrokeMatcher>,
keymap: Rc<RefCell<Keymap>>,
action_registry: Rc<ActionRegistry>,
}
@@ -111,6 +110,19 @@ impl ReusedSubtree {
}
}
#[derive(Default, Debug)]
pub(crate) struct Replay {
pub(crate) keystroke: Keystroke,
pub(crate) bindings: SmallVec<[KeyBinding; 1]>,
}
#[derive(Default, Debug)]
pub(crate) struct DispatchResult {
pub(crate) pending: SmallVec<[Keystroke; 1]>,
pub(crate) bindings: SmallVec<[KeyBinding; 1]>,
pub(crate) to_replay: SmallVec<[Replay; 1]>,
}
type KeyListener = Rc<dyn Fn(&dyn Any, DispatchPhase, &mut WindowContext)>;
type ModifiersChangedListener = Rc<dyn Fn(&ModifiersChangedEvent, &mut WindowContext)>;
@@ -129,7 +141,6 @@ impl DispatchTree {
nodes: Vec::new(),
focusable_node_ids: FxHashMap::default(),
view_node_ids: FxHashMap::default(),
keystroke_matchers: FxHashMap::default(),
keymap,
action_registry,
}
@@ -142,7 +153,6 @@ impl DispatchTree {
self.nodes.clear();
self.focusable_node_ids.clear();
self.view_node_ids.clear();
self.keystroke_matchers.clear();
}
pub fn len(&self) -> usize {
@@ -310,33 +320,6 @@ impl DispatchTree {
self.nodes.truncate(index);
}
pub fn clear_pending_keystrokes(&mut self) {
self.keystroke_matchers.clear();
}
/// Preserve keystroke matchers from previous frames to support multi-stroke
/// bindings across multiple frames.
pub fn preserve_pending_keystrokes(&mut self, old_tree: &mut Self, focus_id: Option<FocusId>) {
if let Some(node_id) = focus_id.and_then(|focus_id| self.focusable_node_id(focus_id)) {
let dispatch_path = self.dispatch_path(node_id);
self.context_stack.clear();
for node_id in dispatch_path {
let node = self.node(node_id);
if let Some(context) = node.context.clone() {
self.context_stack.push(context);
}
if let Some((context_stack, matcher)) = old_tree
.keystroke_matchers
.remove_entry(self.context_stack.as_slice())
{
self.keystroke_matchers.insert(context_stack, matcher);
}
}
}
}
pub fn on_key_event(&mut self, listener: KeyListener) {
self.active_node().key_listeners.push(listener);
}
@@ -419,74 +402,110 @@ impl DispatchTree {
keymap
.bindings_for_action(action)
.filter(|binding| {
for i in 0..context_stack.len() {
let context = &context_stack[0..=i];
if keymap.binding_enabled(binding, context) {
return true;
}
}
false
let (bindings, _) = keymap.bindings_for_input(&binding.keystrokes, &context_stack);
bindings
.iter()
.next()
.is_some_and(|b| b.action.partial_eq(action))
})
.cloned()
.collect()
}
// dispatch_key pushes the next keystroke into any key binding matchers.
// any matching bindings are returned in the order that they should be dispatched:
// * First by length of binding (so if you have a binding for "b" and "ab", the "ab" binding fires first)
// * Secondly by depth in the tree (so if Editor has a binding for "b" and workspace a
// binding for "b", the Editor action fires first).
pub fn dispatch_key(
&mut self,
keystroke: &Keystroke,
fn bindings_for_input(
&self,
input: &[Keystroke],
dispatch_path: &SmallVec<[DispatchNodeId; 32]>,
) -> KeymatchResult {
let mut bindings = SmallVec::<[KeyBinding; 1]>::new();
let mut pending = false;
) -> (SmallVec<[KeyBinding; 1]>, bool) {
let context_stack: SmallVec<[KeyContext; 4]> = dispatch_path
.iter()
.filter_map(|node_id| self.node(*node_id).context.clone())
.collect();
let mut context_stack: SmallVec<[KeyContext; 4]> = SmallVec::new();
for node_id in dispatch_path {
let node = self.node(*node_id);
if let Some(context) = node.context.clone() {
context_stack.push(context);
}
}
while !context_stack.is_empty() {
let keystroke_matcher = self
.keystroke_matchers
.entry(context_stack.clone())
.or_insert_with(|| KeystrokeMatcher::new(self.keymap.clone()));
let result = keystroke_matcher.match_keystroke(keystroke, &context_stack);
if result.pending && !pending && !bindings.is_empty() {
context_stack.pop();
continue;
}
pending = result.pending || pending;
for new_binding in result.bindings {
match bindings
.iter()
.position(|el| el.keystrokes.len() < new_binding.keystrokes.len())
{
Some(idx) => {
bindings.insert(idx, new_binding);
}
None => bindings.push(new_binding),
}
}
context_stack.pop();
}
KeymatchResult { bindings, pending }
self.keymap
.borrow()
.bindings_for_input(&input, &context_stack)
}
pub fn has_pending_keystrokes(&self) -> bool {
self.keystroke_matchers
.iter()
.any(|(_, matcher)| matcher.has_pending_keystrokes())
/// dispatch_key processes the keystroke
/// input should be set to the value of `pending` from the previous call to dispatch_key.
/// This returns three instructions to the input handler:
/// - bindings: any bindings to execute before processing this keystroke
/// - pending: the new set of pending keystrokes to store
/// - to_replay: any keystroke that had been pushed to pending, but are no-longer matched,
/// these should be replayed first.
pub fn dispatch_key(
&mut self,
mut input: SmallVec<[Keystroke; 1]>,
keystroke: Keystroke,
dispatch_path: &SmallVec<[DispatchNodeId; 32]>,
) -> DispatchResult {
input.push(keystroke.clone());
let (bindings, pending) = self.bindings_for_input(&input, dispatch_path);
if pending {
return DispatchResult {
pending: input,
..Default::default()
};
} else if !bindings.is_empty() {
return DispatchResult {
bindings,
..Default::default()
};
} else if input.len() == 1 {
return DispatchResult::default();
}
input.pop();
let (suffix, mut to_replay) = self.replay_prefix(input, dispatch_path);
let mut result = self.dispatch_key(suffix, keystroke, dispatch_path);
to_replay.extend(result.to_replay);
result.to_replay = to_replay;
return result;
}
/// If the user types a matching prefix of a binding and then waits for a timeout
/// flush_dispatch() converts any previously pending input to replay events.
pub fn flush_dispatch(
&mut self,
input: SmallVec<[Keystroke; 1]>,
dispatch_path: &SmallVec<[DispatchNodeId; 32]>,
) -> SmallVec<[Replay; 1]> {
let (suffix, mut to_replay) = self.replay_prefix(input, dispatch_path);
if suffix.len() > 0 {
to_replay.extend(self.flush_dispatch(suffix, dispatch_path))
}
to_replay
}
/// Converts the longest prefix of input to a replay event and returns the rest.
fn replay_prefix(
&mut self,
mut input: SmallVec<[Keystroke; 1]>,
dispatch_path: &SmallVec<[DispatchNodeId; 32]>,
) -> (SmallVec<[Keystroke; 1]>, SmallVec<[Replay; 1]>) {
let mut to_replay: SmallVec<[Replay; 1]> = Default::default();
for last in (0..input.len()).rev() {
let (bindings, _) = self.bindings_for_input(&input[0..=last], dispatch_path);
if !bindings.is_empty() {
to_replay.push(Replay {
keystroke: input.drain(0..=last).last().unwrap(),
bindings,
});
break;
}
}
if to_replay.is_empty() {
to_replay.push(Replay {
keystroke: input.remove(0),
..Default::default()
});
}
(input, to_replay)
}
pub fn dispatch_path(&self, target: DispatchNodeId) -> SmallVec<[DispatchNodeId; 32]> {

View File

@@ -1,13 +1,11 @@
mod binding;
mod context;
mod matcher;
pub use binding::*;
pub use context::*;
pub(crate) use matcher::*;
use crate::{Action, Keystroke, NoAction};
use collections::{HashMap, HashSet};
use collections::HashMap;
use smallvec::SmallVec;
use std::any::{Any, TypeId};
@@ -21,8 +19,6 @@ pub struct KeymapVersion(usize);
pub struct Keymap {
bindings: Vec<KeyBinding>,
binding_indices_by_action_id: HashMap<TypeId, SmallVec<[usize; 3]>>,
disabled_keystrokes:
HashMap<SmallVec<[Keystroke; 2]>, HashSet<Option<KeyBindingContextPredicate>>>,
version: KeymapVersion,
}
@@ -41,22 +37,13 @@ impl Keymap {
/// Add more bindings to the keymap.
pub fn add_bindings<T: IntoIterator<Item = KeyBinding>>(&mut self, bindings: T) {
let no_action_id = (NoAction {}).type_id();
for binding in bindings {
let action_id = binding.action().as_any().type_id();
if action_id == no_action_id {
self.disabled_keystrokes
.entry(binding.keystrokes)
.or_default()
.insert(binding.context_predicate);
} else {
self.binding_indices_by_action_id
.entry(action_id)
.or_default()
.push(self.bindings.len());
self.bindings.push(binding);
}
self.binding_indices_by_action_id
.entry(action_id)
.or_default()
.push(self.bindings.len());
self.bindings.push(binding);
}
self.version.0 += 1;
@@ -66,7 +53,6 @@ impl Keymap {
pub fn clear(&mut self) {
self.bindings.clear();
self.binding_indices_by_action_id.clear();
self.disabled_keystrokes.clear();
self.version.0 += 1;
}
@@ -89,8 +75,66 @@ impl Keymap {
.filter(move |binding| binding.action().partial_eq(action))
}
/// bindings_for_input returns a list of bindings that match the given input,
/// and a boolean indicating whether or not more bindings might match if
/// the input was longer.
///
/// Precedence is defined by the depth in the tree (matches on the Editor take
/// precedence over matches on the Pane, then the Workspace, etc.). Bindings with
/// no context are treated as the same as the deepest context.
///
/// In the case of multiple bindings at the same depth, the ones defined later in the
/// keymap take precedence (so user bindings take precedence over built-in bindings).
///
/// If a user has disabled a binding with `"x": null` it will not be returned. Disabled
/// bindings are evaluated with the same precedence rules so you can disable a rule in
/// a given context only.
///
/// In the case of multi-key bindings, the
pub fn bindings_for_input(
&self,
input: &[Keystroke],
context_stack: &[KeyContext],
) -> (SmallVec<[KeyBinding; 1]>, bool) {
let possibilities = self.bindings().rev().filter_map(|binding| {
binding
.match_keystrokes(input)
.map(|pending| (binding, pending))
});
let mut bindings: SmallVec<[(KeyBinding, usize); 1]> = SmallVec::new();
let mut is_pending = None;
'outer: for (binding, pending) in possibilities {
for depth in (0..=context_stack.len()).rev() {
if self.binding_enabled(binding, &context_stack[0..depth]) {
if is_pending.is_none() {
is_pending = Some(pending);
}
if !pending {
bindings.push((binding.clone(), depth));
continue 'outer;
}
}
}
}
bindings.sort_by(|a, b| a.1.cmp(&b.1).reverse());
let bindings = bindings
.into_iter()
.map_while(|(binding, _)| {
if binding.action.as_any().type_id() == (NoAction {}).type_id() {
None
} else {
Some(binding)
}
})
.collect();
return (bindings, is_pending.unwrap_or_default());
}
/// Check if the given binding is enabled, given a certain key context.
pub fn binding_enabled(&self, binding: &KeyBinding, context: &[KeyContext]) -> bool {
fn binding_enabled(&self, binding: &KeyBinding, context: &[KeyContext]) -> bool {
// If binding has a context predicate, it must match the current context,
if let Some(predicate) = &binding.context_predicate {
if !predicate.eval(context) {
@@ -98,22 +142,6 @@ impl Keymap {
}
}
if let Some(disabled_predicates) = self.disabled_keystrokes.get(&binding.keystrokes) {
for disabled_predicate in disabled_predicates {
match disabled_predicate {
// The binding must not be globally disabled.
None => return false,
// The binding must not be disabled in the current context.
Some(predicate) => {
if predicate.eval(context) {
return false;
}
}
}
}
}
true
}
}
@@ -168,16 +196,37 @@ mod tests {
keymap.add_bindings(bindings.clone());
// binding is only enabled in a specific context
assert!(!keymap.binding_enabled(&bindings[0], &[KeyContext::parse("barf").unwrap()]));
assert!(keymap.binding_enabled(&bindings[0], &[KeyContext::parse("editor").unwrap()]));
assert!(keymap
.bindings_for_input(
&[Keystroke::parse("ctrl-a").unwrap()],
&[KeyContext::parse("barf").unwrap()],
)
.0
.is_empty());
assert!(!keymap
.bindings_for_input(
&[Keystroke::parse("ctrl-a").unwrap()],
&[KeyContext::parse("editor").unwrap()],
)
.0
.is_empty());
// binding is disabled in a more specific context
assert!(!keymap.binding_enabled(
&bindings[0],
&[KeyContext::parse("editor mode=full").unwrap()]
));
assert!(keymap
.bindings_for_input(
&[Keystroke::parse("ctrl-a").unwrap()],
&[KeyContext::parse("editor mode=full").unwrap()],
)
.0
.is_empty());
// binding is globally disabled
assert!(!keymap.binding_enabled(&bindings[1], &[KeyContext::parse("barf").unwrap()]));
assert!(keymap
.bindings_for_input(
&[Keystroke::parse("ctrl-b").unwrap()],
&[KeyContext::parse("barf").unwrap()],
)
.0
.is_empty());
}
}

View File

@@ -1,4 +1,4 @@
use crate::{Action, KeyBindingContextPredicate, KeyMatch, Keystroke};
use crate::{Action, KeyBindingContextPredicate, Keystroke};
use anyhow::Result;
use smallvec::SmallVec;
@@ -46,17 +46,18 @@ impl KeyBinding {
}
/// Check if the given keystrokes match this binding.
pub fn match_keystrokes(&self, pending_keystrokes: &[Keystroke]) -> KeyMatch {
if self.keystrokes.as_ref().starts_with(pending_keystrokes) {
// If the binding is completed, push it onto the matches list
if self.keystrokes.as_ref().len() == pending_keystrokes.len() {
KeyMatch::Matched
} else {
KeyMatch::Pending
}
} else {
KeyMatch::None
pub fn match_keystrokes(&self, typed: &[Keystroke]) -> Option<bool> {
if self.keystrokes.len() < typed.len() {
return None;
}
for (target, typed) in self.keystrokes.iter().zip(typed.iter()) {
if !typed.should_match(target) {
return None;
}
}
return Some(self.keystrokes.len() > typed.len());
}
/// Get the keystrokes associated with this binding

View File

@@ -1,102 +0,0 @@
use crate::{KeyBinding, KeyContext, Keymap, KeymapVersion, Keystroke};
use smallvec::SmallVec;
use std::{cell::RefCell, rc::Rc};
pub(crate) struct KeystrokeMatcher {
pending_keystrokes: Vec<Keystroke>,
keymap: Rc<RefCell<Keymap>>,
keymap_version: KeymapVersion,
}
pub struct KeymatchResult {
pub bindings: SmallVec<[KeyBinding; 1]>,
pub pending: bool,
}
impl KeystrokeMatcher {
pub fn new(keymap: Rc<RefCell<Keymap>>) -> Self {
let keymap_version = keymap.borrow().version();
Self {
pending_keystrokes: Vec::new(),
keymap_version,
keymap,
}
}
pub fn has_pending_keystrokes(&self) -> bool {
!self.pending_keystrokes.is_empty()
}
/// Pushes a keystroke onto the matcher.
/// The result of the new keystroke is returned:
/// - KeyMatch::None =>
/// No match is valid for this key given any pending keystrokes.
/// - KeyMatch::Pending =>
/// There exist bindings which are still waiting for more keys.
/// - KeyMatch::Complete(matches) =>
/// One or more bindings have received the necessary key presses.
/// Bindings added later will take precedence over earlier bindings.
pub(crate) fn match_keystroke(
&mut self,
keystroke: &Keystroke,
context_stack: &[KeyContext],
) -> KeymatchResult {
let keymap = self.keymap.borrow();
// Clear pending keystrokes if the keymap has changed since the last matched keystroke.
if keymap.version() != self.keymap_version {
self.keymap_version = keymap.version();
self.pending_keystrokes.clear();
}
let mut pending_key = None;
let mut bindings = SmallVec::new();
for binding in keymap.bindings().rev() {
if !keymap.binding_enabled(binding, context_stack) {
continue;
}
for candidate in keystroke.match_candidates() {
self.pending_keystrokes.push(candidate.clone());
match binding.match_keystrokes(&self.pending_keystrokes) {
KeyMatch::Matched => {
bindings.push(binding.clone());
}
KeyMatch::Pending => {
pending_key.get_or_insert(candidate);
}
KeyMatch::None => {}
}
self.pending_keystrokes.pop();
}
}
if bindings.is_empty() && pending_key.is_none() && !self.pending_keystrokes.is_empty() {
drop(keymap);
self.pending_keystrokes.remove(0);
return self.match_keystroke(keystroke, context_stack);
}
let pending = if let Some(pending_key) = pending_key {
self.pending_keystrokes.push(pending_key);
true
} else {
self.pending_keystrokes.clear();
false
};
KeymatchResult { bindings, pending }
}
}
/// The result of matching a keystroke against a given keybinding.
/// - KeyMatch::None => No match is valid for this key given any pending keystrokes.
/// - KeyMatch::Pending => There exist bindings that is still waiting for more keys.
/// - KeyMatch::Some(matches) => One or more bindings have received the necessary key presses.
#[derive(Debug, PartialEq)]
pub enum KeyMatch {
None,
Pending,
Matched,
}

View File

@@ -1,6 +1,5 @@
use anyhow::anyhow;
use serde::Deserialize;
use smallvec::SmallVec;
use std::fmt::Write;
/// A keystroke and associated metadata generated by the platform
@@ -25,33 +24,25 @@ impl Keystroke {
/// and on some keyboards the IME handler converts a sequence of keys into a
/// specific character (for example `"` is typed as `" space` on a brazilian keyboard).
///
/// This method generates a list of potential keystroke candidates that could be matched
/// against when resolving a keybinding.
pub(crate) fn match_candidates(&self) -> SmallVec<[Keystroke; 2]> {
let mut possibilities = SmallVec::new();
match self.ime_key.as_ref() {
Some(ime_key) => {
if ime_key != &self.key {
possibilities.push(Keystroke {
modifiers: Modifiers {
control: self.modifiers.control,
alt: false,
shift: false,
platform: false,
function: false,
},
key: ime_key.to_string(),
ime_key: None,
});
}
possibilities.push(Keystroke {
ime_key: None,
..self.clone()
});
/// This method assumes that `self` was typed and `target' is in the keymap, and checks
/// both possibilities for self against the target.
pub(crate) fn should_match(&self, target: &Keystroke) -> bool {
if let Some(ime_key) = self
.ime_key
.as_ref()
.filter(|ime_key| ime_key != &&self.key)
{
let ime_modifiers = Modifiers {
control: self.modifiers.control,
..Default::default()
};
if &target.key == ime_key && target.modifiers == ime_modifiers {
return true;
}
None => possibilities.push(self.clone()),
}
possibilities
target.modifiers == self.modifiers && target.key == self.key
}
/// key syntax is:

View File

@@ -4,14 +4,14 @@ use crate::{
Context, Corners, CursorStyle, Decorations, DevicePixels, DispatchActionListener,
DispatchNodeId, DispatchTree, DisplayId, Edges, Effect, Entity, EntityId, EventEmitter,
FileDropEvent, Flatten, FontId, Global, GlobalElementId, GlyphId, Hsla, ImageData,
InputHandler, IsZero, KeyBinding, KeyContext, KeyDownEvent, KeyEvent, KeyMatch, KeymatchResult,
Keystroke, KeystrokeEvent, LayoutId, LineLayoutIndex, Model, ModelContext, Modifiers,
InputHandler, IsZero, KeyBinding, KeyContext, KeyDownEvent, KeyEvent, Keystroke,
KeystrokeEvent, LayoutId, LineLayoutIndex, Model, ModelContext, Modifiers,
ModifiersChangedEvent, MonochromeSprite, MouseButton, MouseEvent, MouseMoveEvent, MouseUpEvent,
Path, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler,
PlatformWindow, Point, PolychromeSprite, PromptLevel, Quad, Render, RenderGlyphParams,
RenderImageParams, RenderSvgParams, ResizeEdge, ScaledPixels, Scene, Shadow, SharedString,
Size, StrikethroughStyle, Style, SubscriberSet, Subscription, TaffyLayoutEngine, Task,
TextStyle, TextStyleRefinement, TransformationMatrix, Underline, UnderlineStyle, View,
RenderImageParams, RenderSvgParams, Replay, ResizeEdge, ScaledPixels, Scene, Shadow,
SharedString, Size, StrikethroughStyle, Style, SubscriberSet, Subscription, TaffyLayoutEngine,
Task, TextStyle, TextStyleRefinement, TransformationMatrix, Underline, UnderlineStyle, View,
VisualContext, WeakView, WindowAppearance, WindowBackgroundAppearance, WindowBounds,
WindowControls, WindowDecorations, WindowOptions, WindowParams, WindowTextSystem,
SUBPIXEL_VARIANTS,
@@ -574,34 +574,10 @@ pub(crate) enum DrawPhase {
#[derive(Default, Debug)]
struct PendingInput {
keystrokes: SmallVec<[Keystroke; 1]>,
bindings: SmallVec<[KeyBinding; 1]>,
focus: Option<FocusId>,
timer: Option<Task<()>>,
}
impl PendingInput {
fn input(&self) -> String {
self.keystrokes
.iter()
.flat_map(|k| k.ime_key.clone())
.collect::<Vec<String>>()
.join("")
}
fn used_by_binding(&self, binding: &KeyBinding) -> bool {
if self.keystrokes.is_empty() {
return true;
}
let keystroke = &self.keystrokes[0];
for candidate in keystroke.match_candidates() {
if binding.match_keystrokes(&[candidate]) == KeyMatch::Pending {
return true;
}
}
false
}
}
pub(crate) struct ElementStateBox {
pub(crate) inner: Box<dyn Any>,
#[cfg(debug_assertions)]
@@ -969,10 +945,7 @@ impl<'a> WindowContext<'a> {
}
self.window.focus = Some(handle.id);
self.window
.rendered_frame
.dispatch_tree
.clear_pending_keystrokes();
self.clear_pending_keystrokes();
self.refresh();
}
@@ -1074,17 +1047,6 @@ impl<'a> WindowContext<'a> {
});
}
pub(crate) fn clear_pending_keystrokes(&mut self) {
self.window
.rendered_frame
.dispatch_tree
.clear_pending_keystrokes();
self.window
.next_frame
.dispatch_tree
.clear_pending_keystrokes();
}
/// Schedules the given function to be run at the end of the current effect cycle, allowing entities
/// that are currently on the stack to be returned to the app.
pub fn defer(&mut self, f: impl FnOnce(&mut WindowContext) + 'static) {
@@ -1453,14 +1415,6 @@ impl<'a> WindowContext<'a> {
self.draw_roots();
self.window.dirty_views.clear();
self.window
.next_frame
.dispatch_tree
.preserve_pending_keystrokes(
&mut self.window.rendered_frame.dispatch_tree,
self.window.focus,
);
self.window.next_frame.window_active = self.window.active.get();
// Register requested input handler with the platform window.
@@ -3253,8 +3207,6 @@ impl<'a> WindowContext<'a> {
.dispatch_tree
.dispatch_path(node_id);
let mut bindings: SmallVec<[KeyBinding; 1]> = SmallVec::new();
let mut pending = false;
let mut keystroke: Option<Keystroke> = None;
if let Some(event) = event.downcast_ref::<ModifiersChangedEvent>() {
@@ -3272,23 +3224,11 @@ impl<'a> WindowContext<'a> {
_ => None,
};
if let Some(key) = key {
let key = Keystroke {
keystroke = Some(Keystroke {
key: key.to_string(),
ime_key: None,
modifiers: Modifiers::default(),
};
let KeymatchResult {
bindings: modifier_bindings,
pending: pending_bindings,
} = self
.window
.rendered_frame
.dispatch_tree
.dispatch_key(&key, &dispatch_path);
keystroke = Some(key);
bindings = modifier_bindings;
pending = pending_bindings;
});
}
}
}
@@ -3300,73 +3240,68 @@ impl<'a> WindowContext<'a> {
self.window.pending_modifier.modifiers = event.modifiers
} else if let Some(key_down_event) = event.downcast_ref::<KeyDownEvent>() {
self.window.pending_modifier.saw_keystroke = true;
let KeymatchResult {
bindings: key_down_bindings,
pending: key_down_pending,
} = self
.window
.rendered_frame
.dispatch_tree
.dispatch_key(&key_down_event.keystroke, &dispatch_path);
keystroke = Some(key_down_event.keystroke.clone());
bindings = key_down_bindings;
pending = key_down_pending;
}
if keystroke.is_none() {
let Some(keystroke) = keystroke else {
self.finish_dispatch_key_event(event, dispatch_path);
return;
};
let mut currently_pending = self.window.pending_input.take().unwrap_or_default();
if currently_pending.focus.is_some() && currently_pending.focus != self.window.focus {
currently_pending = PendingInput::default();
}
if pending {
let mut currently_pending = self.window.pending_input.take().unwrap_or_default();
if currently_pending.focus.is_some() && currently_pending.focus != self.window.focus {
currently_pending = PendingInput::default();
}
currently_pending.focus = self.window.focus;
if let Some(keystroke) = keystroke {
currently_pending.keystrokes.push(keystroke.clone());
}
for binding in bindings {
currently_pending.bindings.push(binding);
}
let match_result = self.window.rendered_frame.dispatch_tree.dispatch_key(
currently_pending.keystrokes,
keystroke,
&dispatch_path,
);
if !match_result.to_replay.is_empty() {
self.replay_pending_input(match_result.to_replay)
}
if !match_result.pending.is_empty() {
currently_pending.keystrokes = match_result.pending;
currently_pending.focus = self.window.focus;
currently_pending.timer = Some(self.spawn(|mut cx| async move {
cx.background_executor.timer(Duration::from_secs(1)).await;
cx.update(move |cx| {
cx.clear_pending_keystrokes();
let Some(currently_pending) = cx.window.pending_input.take() else {
let Some(currently_pending) = cx
.window
.pending_input
.take()
.filter(|pending| pending.focus == cx.window.focus)
else {
return;
};
cx.replay_pending_input(currently_pending);
cx.pending_input_changed();
let dispatch_path = cx
.window
.rendered_frame
.dispatch_tree
.dispatch_path(node_id);
let to_replay = cx
.window
.rendered_frame
.dispatch_tree
.flush_dispatch(currently_pending.keystrokes, &dispatch_path);
cx.replay_pending_input(to_replay)
})
.log_err();
}));
self.window.pending_input = Some(currently_pending);
self.pending_input_changed();
self.propagate_event = false;
return;
} else if let Some(currently_pending) = self.window.pending_input.take() {
self.pending_input_changed();
if bindings
.iter()
.all(|binding| !currently_pending.used_by_binding(binding))
{
self.replay_pending_input(currently_pending)
}
}
if !bindings.is_empty() {
self.clear_pending_keystrokes();
}
self.pending_input_changed();
self.propagate_event = true;
for binding in bindings {
for binding in match_result.bindings {
self.dispatch_action_on_node(node_id, binding.action.as_ref());
if !self.propagate_event {
self.dispatch_keystroke_observers(event, Some(binding.action));
@@ -3453,10 +3388,11 @@ impl<'a> WindowContext<'a> {
/// Determine whether a potential multi-stroke key binding is in progress on this window.
pub fn has_pending_keystrokes(&self) -> bool {
self.window
.rendered_frame
.dispatch_tree
.has_pending_keystrokes()
self.window.pending_input.is_some()
}
fn clear_pending_keystrokes(&mut self) {
self.window.pending_input.take();
}
/// Returns the currently pending input keystrokes that might result in a multi-stroke key binding.
@@ -3467,7 +3403,7 @@ impl<'a> WindowContext<'a> {
.map(|pending_input| pending_input.keystrokes.as_slice())
}
fn replay_pending_input(&mut self, currently_pending: PendingInput) {
fn replay_pending_input(&mut self, replays: SmallVec<[Replay; 1]>) {
let node_id = self
.window
.focus
@@ -3479,42 +3415,36 @@ impl<'a> WindowContext<'a> {
})
.unwrap_or_else(|| self.window.rendered_frame.dispatch_tree.root_node_id());
if self.window.focus != currently_pending.focus {
return;
}
let input = currently_pending.input();
self.propagate_event = true;
for binding in currently_pending.bindings {
self.dispatch_action_on_node(node_id, binding.action.as_ref());
if !self.propagate_event {
return;
}
}
let dispatch_path = self
.window
.rendered_frame
.dispatch_tree
.dispatch_path(node_id);
for keystroke in currently_pending.keystrokes {
'replay: for replay in replays {
let event = KeyDownEvent {
keystroke,
keystroke: replay.keystroke.clone(),
is_held: false,
};
self.propagate_event = true;
for binding in replay.bindings {
self.dispatch_action_on_node(node_id, binding.action.as_ref());
if !self.propagate_event {
self.dispatch_keystroke_observers(&event, Some(binding.action));
continue 'replay;
}
}
self.dispatch_key_down_up_event(&event, &dispatch_path);
if !self.propagate_event {
return;
continue 'replay;
}
}
if !input.is_empty() {
if let Some(mut input_handler) = self.window.platform_window.take_input_handler() {
input_handler.dispatch_input(&input, self);
self.window.platform_window.set_input_handler(input_handler)
if let Some(input) = replay.keystroke.ime_key.as_ref().cloned() {
if let Some(mut input_handler) = self.window.platform_window.take_input_handler() {
input_handler.dispatch_input(&input, self);
self.window.platform_window.set_input_handler(input_handler)
}
}
}
}

View File

@@ -27,6 +27,7 @@ use gpui::{
use lazy_static::lazy_static;
use lsp::LanguageServerId;
use parking_lot::Mutex;
use serde_json::Value;
use similar::{ChangeTag, TextDiff};
use smallvec::SmallVec;
use smol::future::yield_now;
@@ -213,6 +214,8 @@ pub struct Diagnostic {
pub is_disk_based: bool,
/// Whether this diagnostic marks unnecessary code.
pub is_unnecessary: bool,
/// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
pub data: Option<Value>,
}
/// TODO - move this into the `project` crate and make it private.
@@ -3844,6 +3847,7 @@ impl Default for Diagnostic {
is_primary: false,
is_disk_based: false,
is_unnecessary: false,
data: None,
}
}
}

View File

@@ -69,6 +69,7 @@ impl DiagnosticEntry<PointUtf16> {
severity: Some(self.diagnostic.severity),
source: self.diagnostic.source.clone(),
message: self.diagnostic.message.clone(),
data: self.diagnostic.data.clone(),
..Default::default()
}
}

View File

@@ -5,7 +5,8 @@ use anyhow::{anyhow, Context as _, Result};
use clock::ReplicaId;
use lsp::{DiagnosticSeverity, LanguageServerId};
use rpc::proto;
use std::{ops::Range, sync::Arc};
use serde_json::Value;
use std::{ops::Range, str::FromStr, sync::Arc};
use text::*;
pub use proto::{BufferState, Operation};
@@ -213,6 +214,7 @@ pub fn serialize_diagnostics<'a>(
code: entry.diagnostic.code.clone(),
is_disk_based: entry.diagnostic.is_disk_based,
is_unnecessary: entry.diagnostic.is_unnecessary,
data: entry.diagnostic.data.as_ref().map(|data| data.to_string()),
})
.collect()
}
@@ -396,6 +398,11 @@ pub fn deserialize_diagnostics(
diagnostics
.into_iter()
.filter_map(|diagnostic| {
let data = if let Some(data) = diagnostic.data {
Some(Value::from_str(&data).ok()?)
} else {
None
};
Some(DiagnosticEntry {
range: deserialize_anchor(diagnostic.start?)?..deserialize_anchor(diagnostic.end?)?,
diagnostic: Diagnostic {
@@ -413,6 +420,7 @@ pub fn deserialize_diagnostics(
is_primary: diagnostic.is_primary,
is_disk_based: diagnostic.is_disk_based,
is_unnecessary: diagnostic.is_unnecessary,
data,
},
})
})

View File

@@ -4595,6 +4595,7 @@ impl Project {
is_primary: true,
is_disk_based,
is_unnecessary,
data: diagnostic.data.clone(),
},
});
if let Some(infos) = &diagnostic.related_information {
@@ -4612,6 +4613,7 @@ impl Project {
is_primary: false,
is_disk_based,
is_unnecessary: false,
data: diagnostic.data.clone(),
},
});
}

View File

@@ -1890,6 +1890,7 @@ message Diagnostic {
Information = 3;
Hint = 4;
}
optional string data = 12;
}
message Operation {

View File

@@ -29,7 +29,7 @@ pub struct KernelSpecification {
impl KernelSpecification {
#[must_use]
fn command(&self, connection_path: &PathBuf) -> anyhow::Result<Command> {
fn command(&self, connection_path: &PathBuf) -> Result<Command> {
let argv = &self.kernelspec.argv;
anyhow::ensure!(!argv.is_empty(), "Empty argv in kernelspec {}", self.name);
@@ -60,7 +60,7 @@ impl KernelSpecification {
// Find a set of open ports. This creates a listener with port set to 0. The listener will be closed at the end when it goes out of scope.
// There's a race condition between closing the ports and usage by a kernel, but it's inherent to the Jupyter protocol.
async fn peek_ports(ip: IpAddr) -> anyhow::Result<[u16; 5]> {
async fn peek_ports(ip: IpAddr) -> Result<[u16; 5]> {
let mut addr_zeroport: SocketAddr = SocketAddr::new(ip, 0);
addr_zeroport.set_port(0);
let mut ports: [u16; 5] = [0; 5];
@@ -166,10 +166,10 @@ impl Kernel {
pub struct RunningKernel {
pub process: smol::process::Child,
_shell_task: Task<anyhow::Result<()>>,
_iopub_task: Task<anyhow::Result<()>>,
_control_task: Task<anyhow::Result<()>>,
_routing_task: Task<anyhow::Result<()>>,
_shell_task: Task<Result<()>>,
_iopub_task: Task<Result<()>>,
_control_task: Task<Result<()>>,
_routing_task: Task<Result<()>>,
connection_path: PathBuf,
pub working_directory: PathBuf,
pub request_tx: mpsc::Sender<JupyterMessage>,
@@ -194,7 +194,7 @@ impl RunningKernel {
working_directory: PathBuf,
fs: Arc<dyn Fs>,
cx: &mut AppContext,
) -> Task<anyhow::Result<(Self, JupyterMessageChannel)>> {
) -> Task<Result<(Self, JupyterMessageChannel)>> {
cx.spawn(|cx| async move {
let ip = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1));
let ports = peek_ports(ip).await?;
@@ -332,7 +332,7 @@ async fn read_kernelspec_at(
// /usr/local/share/jupyter/kernels/python3
kernel_dir: PathBuf,
fs: &dyn Fs,
) -> anyhow::Result<KernelSpecification> {
) -> Result<KernelSpecification> {
let path = kernel_dir;
let kernel_name = if let Some(kernel_name) = path.file_name() {
kernel_name.to_string_lossy().to_string()
@@ -356,7 +356,7 @@ async fn read_kernelspec_at(
}
/// Read a directory of kernelspec directories
async fn read_kernels_dir(path: PathBuf, fs: &dyn Fs) -> anyhow::Result<Vec<KernelSpecification>> {
async fn read_kernels_dir(path: PathBuf, fs: &dyn Fs) -> Result<Vec<KernelSpecification>> {
let mut kernelspec_dirs = fs.read_dir(&path).await?;
let mut valid_kernelspecs = Vec::new();
@@ -376,7 +376,7 @@ async fn read_kernels_dir(path: PathBuf, fs: &dyn Fs) -> anyhow::Result<Vec<Kern
Ok(valid_kernelspecs)
}
pub async fn kernel_specifications(fs: Arc<dyn Fs>) -> anyhow::Result<Vec<KernelSpecification>> {
pub async fn kernel_specifications(fs: Arc<dyn Fs>) -> Result<Vec<KernelSpecification>> {
let data_dirs = dirs::data_dirs();
let kernel_dirs = data_dirs
.iter()

View File

@@ -1,11 +1,13 @@
use async_dispatcher::{set_dispatcher, Dispatcher, Runnable};
use gpui::{AppContext, PlatformDispatcher};
use project::Fs;
use settings::Settings as _;
use std::{sync::Arc, time::Duration};
mod jupyter_settings;
mod kernels;
mod outputs;
mod repl_store;
mod runtime_panel;
mod session;
mod stdio;
@@ -17,6 +19,8 @@ pub use runtime_panel::{RuntimePanel, SessionSupport};
pub use runtimelib::ExecutionState;
pub use session::Session;
use crate::repl_store::ReplStore;
fn zed_dispatcher(cx: &mut AppContext) -> impl Dispatcher {
struct ZedDispatcher {
dispatcher: Arc<dyn PlatformDispatcher>,
@@ -41,8 +45,10 @@ fn zed_dispatcher(cx: &mut AppContext) -> impl Dispatcher {
}
}
pub fn init(cx: &mut AppContext) {
pub fn init(fs: Arc<dyn Fs>, cx: &mut AppContext) {
set_dispatcher(zed_dispatcher(cx));
JupyterSettings::register(cx);
runtime_panel::init(cx)
editor::init_settings(cx);
runtime_panel::init(cx);
ReplStore::init(fs, cx);
}

View File

@@ -0,0 +1,118 @@
use std::sync::Arc;
use anyhow::Result;
use collections::HashMap;
use gpui::{
prelude::*, AppContext, EntityId, Global, Model, ModelContext, Subscription, Task, View,
};
use language::Language;
use project::Fs;
use settings::{Settings, SettingsStore};
use crate::kernels::kernel_specifications;
use crate::{JupyterSettings, KernelSpecification, Session};
struct GlobalReplStore(Model<ReplStore>);
impl Global for GlobalReplStore {}
pub struct ReplStore {
fs: Arc<dyn Fs>,
enabled: bool,
sessions: HashMap<EntityId, View<Session>>,
kernel_specifications: Vec<KernelSpecification>,
_subscriptions: Vec<Subscription>,
}
impl ReplStore {
pub(crate) fn init(fs: Arc<dyn Fs>, cx: &mut AppContext) {
let store = cx.new_model(move |cx| Self::new(fs, cx));
cx.set_global(GlobalReplStore(store))
}
pub fn global(cx: &AppContext) -> Model<Self> {
cx.global::<GlobalReplStore>().0.clone()
}
pub fn new(fs: Arc<dyn Fs>, cx: &mut ModelContext<Self>) -> Self {
let subscriptions = vec![cx.observe_global::<SettingsStore>(move |this, cx| {
this.set_enabled(JupyterSettings::enabled(cx), cx);
})];
Self {
fs,
enabled: JupyterSettings::enabled(cx),
sessions: HashMap::default(),
kernel_specifications: Vec::new(),
_subscriptions: subscriptions,
}
}
pub fn is_enabled(&self) -> bool {
self.enabled
}
pub fn kernel_specifications(&self) -> impl Iterator<Item = &KernelSpecification> {
self.kernel_specifications.iter()
}
pub fn sessions(&self) -> impl Iterator<Item = &View<Session>> {
self.sessions.values()
}
fn set_enabled(&mut self, enabled: bool, cx: &mut ModelContext<Self>) {
if self.enabled != enabled {
self.enabled = enabled;
cx.notify();
}
}
pub fn refresh_kernelspecs(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
let kernel_specifications = kernel_specifications(self.fs.clone());
cx.spawn(|this, mut cx| async move {
let kernel_specifications = kernel_specifications.await?;
this.update(&mut cx, |this, cx| {
this.kernel_specifications = kernel_specifications;
cx.notify();
})
})
}
pub fn kernelspec(
&self,
language: &Language,
cx: &mut ModelContext<Self>,
) -> Option<KernelSpecification> {
let settings = JupyterSettings::get_global(cx);
let language_name = language.code_fence_block_name();
let selected_kernel = settings.kernel_selections.get(language_name.as_ref());
self.kernel_specifications
.iter()
.find(|runtime_specification| {
if let Some(selected) = selected_kernel {
// Top priority is the selected kernel
runtime_specification.name.to_lowercase() == selected.to_lowercase()
} else {
// Otherwise, we'll try to find a kernel that matches the language
runtime_specification.kernelspec.language.to_lowercase()
== language_name.to_lowercase()
}
})
.cloned()
}
pub fn get_session(&self, entity_id: EntityId) -> Option<&View<Session>> {
self.sessions.get(&entity_id)
}
pub fn insert_session(&mut self, entity_id: EntityId, session: View<Session>) {
self.sessions.insert(entity_id, session);
}
pub fn remove_session(&mut self, entity_id: EntityId) {
self.sessions.remove(&entity_id);
}
}

View File

@@ -1,19 +1,19 @@
use crate::repl_store::ReplStore;
use crate::{
jupyter_settings::{JupyterDockPosition, JupyterSettings},
kernels::{kernel_specifications, KernelSpecification},
kernels::KernelSpecification,
session::{Session, SessionEvent},
};
use anyhow::{Context as _, Result};
use collections::HashMap;
use editor::{Anchor, Editor, RangeToAnchorExt};
use gpui::{
actions, prelude::*, AppContext, AsyncWindowContext, EntityId, EventEmitter, FocusHandle,
FocusOutEvent, FocusableView, Subscription, Task, View, WeakView,
actions, prelude::*, AppContext, AsyncWindowContext, EventEmitter, FocusHandle, FocusOutEvent,
FocusableView, Subscription, Task, View, WeakView,
};
use language::{Language, Point};
use multi_buffer::MultiBufferRow;
use project::Fs;
use settings::{Settings as _, SettingsStore};
use settings::Settings as _;
use std::{ops::Range, sync::Arc};
use ui::{prelude::*, ButtonLike, ElevationIndex, KeyBinding};
use util::ResultExt as _;
@@ -28,6 +28,13 @@ actions!(
);
actions!(repl_panel, [ToggleFocus]);
pub enum SessionSupport {
ActiveSession(View<Session>),
Inactive(Box<KernelSpecification>),
RequiresSetup(Arc<str>),
Unsupported,
}
pub fn init(cx: &mut AppContext) {
cx.observe_new_views(
|workspace: &mut Workspace, _cx: &mut ViewContext<Workspace>| {
@@ -35,12 +42,11 @@ pub fn init(cx: &mut AppContext) {
workspace.toggle_panel_focus::<RuntimePanel>(cx);
});
workspace.register_action(|workspace, _: &RefreshKernelspecs, cx| {
if let Some(panel) = workspace.panel::<RuntimePanel>(cx) {
panel.update(cx, |panel, cx| {
panel.refresh_kernelspecs(cx).detach();
});
}
workspace.register_action(|_workspace, _: &RefreshKernelspecs, cx| {
let store = ReplStore::global(cx);
store.update(cx, |store, cx| {
store.refresh_kernelspecs(cx).detach();
});
});
},
)
@@ -145,11 +151,8 @@ pub fn init(cx: &mut AppContext) {
pub struct RuntimePanel {
fs: Arc<dyn Fs>,
enabled: bool,
focus_handle: FocusHandle,
width: Option<Pixels>,
sessions: HashMap<EntityId, View<Session>>,
kernel_specifications: Vec<KernelSpecification>,
_subscriptions: Vec<Subscription>,
}
@@ -168,39 +171,29 @@ impl RuntimePanel {
let subscriptions = vec![
cx.on_focus_in(&focus_handle, Self::focus_in),
cx.on_focus_out(&focus_handle, Self::focus_out),
cx.observe_global::<SettingsStore>(move |this, cx| {
this.set_enabled(JupyterSettings::enabled(cx), cx);
}),
];
let runtime_panel = Self {
fs: fs.clone(),
fs,
width: None,
focus_handle,
kernel_specifications: Vec::new(),
sessions: Default::default(),
_subscriptions: subscriptions,
enabled: JupyterSettings::enabled(cx),
};
runtime_panel
})
})?;
view.update(&mut cx, |this, cx| this.refresh_kernelspecs(cx))?
.await?;
view.update(&mut cx, |_panel, cx| {
let store = ReplStore::global(cx);
store.update(cx, |store, cx| store.refresh_kernelspecs(cx))
})?
.await?;
Ok(view)
})
}
fn set_enabled(&mut self, enabled: bool, cx: &mut ViewContext<Self>) {
if self.enabled != enabled {
self.enabled = enabled;
cx.notify();
}
}
fn focus_in(&mut self, cx: &mut ViewContext<Self>) {
cx.notify();
}
@@ -209,8 +202,7 @@ impl RuntimePanel {
cx.notify();
}
pub fn snippet(
&self,
fn snippet(
editor: WeakView<Editor>,
cx: &mut ViewContext<Self>,
) -> Option<(String, Arc<Language>, Range<Anchor>)> {
@@ -255,93 +247,59 @@ impl RuntimePanel {
Some((selected_text, start_language.clone(), anchor_range))
}
pub fn language(
&self,
editor: WeakView<Editor>,
cx: &mut ViewContext<Self>,
) -> Option<Arc<Language>> {
fn language(editor: WeakView<Editor>, cx: &mut ViewContext<Self>) -> Option<Arc<Language>> {
let editor = editor.upgrade()?;
let selection = editor.read(cx).selections.newest::<usize>(cx);
let buffer = editor.read(cx).buffer().read(cx).snapshot(cx);
buffer.language_at(selection.head()).cloned()
}
pub fn refresh_kernelspecs(&mut self, cx: &mut ViewContext<Self>) -> Task<anyhow::Result<()>> {
let kernel_specifications = kernel_specifications(self.fs.clone());
cx.spawn(|this, mut cx| async move {
let kernel_specifications = kernel_specifications.await?;
pub fn run(&mut self, editor: WeakView<Editor>, cx: &mut ViewContext<Self>) -> Result<()> {
let store = ReplStore::global(cx);
this.update(&mut cx, |this, cx| {
this.kernel_specifications = kernel_specifications;
cx.notify();
})
})
}
pub fn kernelspec(
&self,
language: &Language,
cx: &mut ViewContext<Self>,
) -> Option<KernelSpecification> {
let settings = JupyterSettings::get_global(cx);
let language_name = language.code_fence_block_name();
let selected_kernel = settings.kernel_selections.get(language_name.as_ref());
self.kernel_specifications
.iter()
.find(|runtime_specification| {
if let Some(selected) = selected_kernel {
// Top priority is the selected kernel
runtime_specification.name.to_lowercase() == selected.to_lowercase()
} else {
// Otherwise, we'll try to find a kernel that matches the language
runtime_specification.kernelspec.language.to_lowercase()
== language_name.to_lowercase()
}
})
.cloned()
}
pub fn run(
&mut self,
editor: WeakView<Editor>,
cx: &mut ViewContext<Self>,
) -> anyhow::Result<()> {
if !self.enabled {
if !store.read(cx).is_enabled() {
return Ok(());
}
let (selected_text, language, anchor_range) = match self.snippet(editor.clone(), cx) {
let (selected_text, language, anchor_range) = match Self::snippet(editor.clone(), cx) {
Some(snippet) => snippet,
None => return Ok(()),
};
let entity_id = editor.entity_id();
let kernel_specification = self
.kernelspec(&language, cx)
.with_context(|| format!("No kernel found for language: {}", language.name()))?;
let kernel_specification = store.update(cx, |store, cx| {
store
.kernelspec(&language, cx)
.with_context(|| format!("No kernel found for language: {}", language.name()))
})?;
let session = self.sessions.entry(entity_id).or_insert_with(|| {
let view =
let session = if let Some(session) = store.read(cx).get_session(entity_id).cloned() {
session
} else {
let session =
cx.new_view(|cx| Session::new(editor, self.fs.clone(), kernel_specification, cx));
cx.notify();
let subscription = cx.subscribe(
&view,
|panel: &mut RuntimePanel, _session: View<Session>, event: &SessionEvent, _cx| {
match event {
SessionEvent::Shutdown(shutdown_event) => {
panel.sessions.remove(&shutdown_event.entity_id());
}
let subscription = cx.subscribe(&session, {
let store = store.clone();
move |_this, _session, event, cx| match event {
SessionEvent::Shutdown(shutdown_event) => {
store.update(cx, |store, _cx| {
store.remove_session(shutdown_event.entity_id());
});
}
},
);
}
});
subscription.detach();
view
});
store.update(cx, |store, _cx| {
store.insert_session(entity_id, session.clone());
});
session
};
session.update(cx, |session, cx| {
session.execute(&selected_text, anchor_range, cx);
@@ -350,9 +308,38 @@ impl RuntimePanel {
anyhow::Ok(())
}
pub fn clear_outputs(&mut self, editor: WeakView<Editor>, cx: &mut ViewContext<Self>) {
pub fn session(
&mut self,
editor: WeakView<Editor>,
cx: &mut ViewContext<Self>,
) -> SessionSupport {
let store = ReplStore::global(cx);
let entity_id = editor.entity_id();
if let Some(session) = self.sessions.get_mut(&entity_id) {
if let Some(session) = store.read(cx).get_session(entity_id).cloned() {
return SessionSupport::ActiveSession(session);
};
let language = Self::language(editor, cx);
let language = match language {
Some(language) => language,
None => return SessionSupport::Unsupported,
};
let kernelspec = store.update(cx, |store, cx| store.kernelspec(&language, cx));
match kernelspec {
Some(kernelspec) => SessionSupport::Inactive(Box::new(kernelspec)),
None => match language.name().as_ref() {
"TypeScript" | "Python" => SessionSupport::RequiresSetup(language.name()),
_ => SessionSupport::Unsupported,
},
}
}
pub fn clear_outputs(&mut self, editor: WeakView<Editor>, cx: &mut ViewContext<Self>) {
let store = ReplStore::global(cx);
let entity_id = editor.entity_id();
if let Some(session) = store.read(cx).get_session(entity_id).cloned() {
session.update(cx, |session, cx| {
session.clear_outputs(cx);
});
@@ -361,8 +348,9 @@ impl RuntimePanel {
}
pub fn interrupt(&mut self, editor: WeakView<Editor>, cx: &mut ViewContext<Self>) {
let store = ReplStore::global(cx);
let entity_id = editor.entity_id();
if let Some(session) = self.sessions.get_mut(&entity_id) {
if let Some(session) = store.read(cx).get_session(entity_id).cloned() {
session.update(cx, |session, cx| {
session.interrupt(cx);
});
@@ -370,9 +358,10 @@ impl RuntimePanel {
}
}
pub fn shutdown(&self, editor: WeakView<Editor>, cx: &mut ViewContext<RuntimePanel>) {
pub fn shutdown(&self, editor: WeakView<Editor>, cx: &mut ViewContext<Self>) {
let store = ReplStore::global(cx);
let entity_id = editor.entity_id();
if let Some(session) = self.sessions.get(&entity_id) {
if let Some(session) = store.read(cx).get_session(entity_id).cloned() {
session.update(cx, |session, cx| {
session.shutdown(cx);
});
@@ -381,51 +370,6 @@ impl RuntimePanel {
}
}
pub enum SessionSupport {
ActiveSession(View<Session>),
Inactive(Box<KernelSpecification>),
RequiresSetup(Arc<str>),
Unsupported,
}
impl RuntimePanel {
pub fn session(
&mut self,
editor: WeakView<Editor>,
cx: &mut ViewContext<Self>,
) -> SessionSupport {
let entity_id = editor.entity_id();
let session = self.sessions.get(&entity_id).cloned();
match session {
Some(session) => SessionSupport::ActiveSession(session),
None => {
let language = self.language(editor, cx);
let language = match language {
Some(language) => language,
None => return SessionSupport::Unsupported,
};
// Check for kernelspec
let kernelspec = self.kernelspec(&language, cx);
match kernelspec {
Some(kernelspec) => SessionSupport::Inactive(Box::new(kernelspec)),
None => {
// If no kernelspec but language is one of typescript or python
// then we return RequiresSetup
match language.name().as_ref() {
"TypeScript" | "Python" => {
SessionSupport::RequiresSetup(language.name())
}
_ => SessionSupport::Unsupported,
}
}
}
}
}
}
}
impl Panel for RuntimePanel {
fn persistent_name() -> &'static str {
"RuntimePanel"
@@ -468,8 +412,10 @@ impl Panel for RuntimePanel {
self.width = size;
}
fn icon(&self, _cx: &ui::WindowContext) -> Option<ui::IconName> {
if !self.enabled {
fn icon(&self, cx: &ui::WindowContext) -> Option<ui::IconName> {
let store = ReplStore::global(cx);
if !store.read(cx).is_enabled() {
return None;
}
@@ -495,38 +441,47 @@ impl FocusableView for RuntimePanel {
impl Render for RuntimePanel {
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
let store = ReplStore::global(cx);
let (kernel_specifications, sessions) = store.update(cx, |store, _cx| {
(
store.kernel_specifications().cloned().collect::<Vec<_>>(),
store.sessions().cloned().collect::<Vec<_>>(),
)
});
// When there are no kernel specifications, show a link to the Zed docs explaining how to
// install kernels. It can be assumed they don't have a running kernel if we have no
// specifications.
if self.kernel_specifications.is_empty() {
if kernel_specifications.is_empty() {
return v_flex()
.p_4()
.size_full()
.gap_2()
.child(Label::new("No Jupyter Kernels Available").size(LabelSize::Large))
.child(
Label::new("To start interactively running code in your editor, you need to install and configure Jupyter kernels.")
.size(LabelSize::Default),
)
.child(
h_flex().w_full().p_4().justify_center().gap_2().child(
ButtonLike::new("install-kernels")
.style(ButtonStyle::Filled)
.size(ButtonSize::Large)
.layer(ElevationIndex::ModalSurface)
.child(Label::new("Install Kernels"))
.on_click(move |_, cx| {
cx.open_url(
"https://docs.jupyter.org/en/latest/install/kernels.html",
)
}),
),
)
.child(Label::new("No Jupyter Kernels Available").size(LabelSize::Large))
.child(
Label::new("To start interactively running code in your editor, you need to install and configure Jupyter kernels.")
.size(LabelSize::Default),
)
.child(
h_flex().w_full().p_4().justify_center().gap_2().child(
ButtonLike::new("install-kernels")
.style(ButtonStyle::Filled)
.size(ButtonSize::Large)
.layer(ElevationIndex::ModalSurface)
.child(Label::new("Install Kernels"))
.on_click(move |_, cx| {
cx.open_url(
"https://docs.jupyter.org/en/latest/install/kernels.html",
)
}),
),
)
.into_any_element();
}
// When there are no sessions, show the command to run code in an editor
if self.sessions.is_empty() {
if sessions.is_empty() {
return v_flex()
.p_4()
.size_full()
@@ -546,7 +501,7 @@ impl Render for RuntimePanel {
)
.child(Label::new("Kernels available").size(LabelSize::Large))
.children(
self.kernel_specifications.iter().map(|spec| {
kernel_specifications.into_iter().map(|spec| {
h_flex().gap_2().child(Label::new(spec.name.clone()))
.child(Label::new(spec.kernelspec.language.clone()).color(Color::Muted))
})
@@ -559,8 +514,8 @@ impl Render for RuntimePanel {
.p_4()
.child(Label::new("Jupyter Kernel Sessions").size(LabelSize::Large))
.children(
self.sessions
.values()
sessions
.into_iter()
.map(|session| session.clone().into_any_element()),
)
.into_any_element()

View File

@@ -19,8 +19,6 @@ crate-type = ["bin"]
[dependencies]
anyhow.workspace = true
arrayvec.workspace = true
blake3.workspace = true
client.workspace = true
clock.workspace = true
collections.workspace = true
@@ -30,7 +28,6 @@ futures.workspace = true
futures-batch.workspace = true
gpui.workspace = true
language.workspace = true
language_model.workspace = true
log.workspace = true
heed.workspace = true
http.workspace = true

View File

@@ -12,12 +12,6 @@ use futures::{future::BoxFuture, FutureExt};
use serde::{Deserialize, Serialize};
use std::{fmt, future};
/// Trait for embedding providers. Texts in, vectors out.
pub trait EmbeddingProvider: Sync + Send {
fn embed<'a>(&'a self, texts: &'a [TextToEmbed<'a>]) -> BoxFuture<'a, Result<Vec<Embedding>>>;
fn batch_size(&self) -> usize;
}
#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)]
pub struct Embedding(Vec<f32>);
@@ -74,6 +68,12 @@ impl fmt::Display for Embedding {
}
}
/// Trait for embedding providers. Texts in, vectors out.
pub trait EmbeddingProvider: Sync + Send {
fn embed<'a>(&'a self, texts: &'a [TextToEmbed<'a>]) -> BoxFuture<'a, Result<Vec<Embedding>>>;
fn batch_size(&self) -> usize;
}
#[derive(Debug)]
pub struct TextToEmbed<'a> {
pub text: &'a str,

View File

@@ -1,470 +0,0 @@
use crate::{
chunking::{self, Chunk},
embedding::{Embedding, EmbeddingProvider, TextToEmbed},
indexing::{IndexingEntryHandle, IndexingEntrySet},
};
use anyhow::{anyhow, Context as _, Result};
use collections::Bound;
use fs::Fs;
use futures::stream::StreamExt;
use futures_batch::ChunksTimeoutStreamExt;
use gpui::{AppContext, Model, Task};
use heed::types::{SerdeBincode, Str};
use language::LanguageRegistry;
use log;
use project::{Entry, UpdatedEntriesSet, Worktree};
use serde::{Deserialize, Serialize};
use smol::channel;
use std::{
cmp::Ordering,
future::Future,
iter,
path::Path,
sync::Arc,
time::{Duration, SystemTime},
};
use util::ResultExt;
use worktree::Snapshot;
pub struct EmbeddingIndex {
worktree: Model<Worktree>,
db_connection: heed::Env,
db: heed::Database<Str, SerdeBincode<EmbeddedFile>>,
fs: Arc<dyn Fs>,
language_registry: Arc<LanguageRegistry>,
embedding_provider: Arc<dyn EmbeddingProvider>,
entry_ids_being_indexed: Arc<IndexingEntrySet>,
}
impl EmbeddingIndex {
pub fn new(
worktree: Model<Worktree>,
fs: Arc<dyn Fs>,
db_connection: heed::Env,
embedding_db: heed::Database<Str, SerdeBincode<EmbeddedFile>>,
language_registry: Arc<LanguageRegistry>,
embedding_provider: Arc<dyn EmbeddingProvider>,
entry_ids_being_indexed: Arc<IndexingEntrySet>,
) -> Self {
Self {
worktree,
fs,
db_connection,
db: embedding_db,
language_registry,
embedding_provider,
entry_ids_being_indexed,
}
}
pub fn db(&self) -> &heed::Database<Str, SerdeBincode<EmbeddedFile>> {
&self.db
}
pub fn index_entries_changed_on_disk(
&self,
cx: &AppContext,
) -> impl Future<Output = Result<()>> {
let worktree = self.worktree.read(cx).snapshot();
let worktree_abs_path = worktree.abs_path().clone();
let scan = self.scan_entries(worktree, cx);
let chunk = self.chunk_files(worktree_abs_path, scan.updated_entries, cx);
let embed = Self::embed_files(self.embedding_provider.clone(), chunk.files, cx);
let persist = self.persist_embeddings(scan.deleted_entry_ranges, embed.files, cx);
async move {
futures::try_join!(scan.task, chunk.task, embed.task, persist)?;
Ok(())
}
}
pub fn index_updated_entries(
&self,
updated_entries: UpdatedEntriesSet,
cx: &AppContext,
) -> impl Future<Output = Result<()>> {
let worktree = self.worktree.read(cx).snapshot();
let worktree_abs_path = worktree.abs_path().clone();
let scan = self.scan_updated_entries(worktree, updated_entries.clone(), cx);
let chunk = self.chunk_files(worktree_abs_path, scan.updated_entries, cx);
let embed = Self::embed_files(self.embedding_provider.clone(), chunk.files, cx);
let persist = self.persist_embeddings(scan.deleted_entry_ranges, embed.files, cx);
async move {
futures::try_join!(scan.task, chunk.task, embed.task, persist)?;
Ok(())
}
}
fn scan_entries(&self, worktree: Snapshot, cx: &AppContext) -> ScanEntries {
let (updated_entries_tx, updated_entries_rx) = channel::bounded(512);
let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128);
let db_connection = self.db_connection.clone();
let db = self.db;
let entries_being_indexed = self.entry_ids_being_indexed.clone();
let task = cx.background_executor().spawn(async move {
let txn = db_connection
.read_txn()
.context("failed to create read transaction")?;
let mut db_entries = db
.iter(&txn)
.context("failed to create iterator")?
.move_between_keys()
.peekable();
let mut deletion_range: Option<(Bound<&str>, Bound<&str>)> = None;
for entry in worktree.files(false, 0) {
let entry_db_key = db_key_for_path(&entry.path);
let mut saved_mtime = None;
while let Some(db_entry) = db_entries.peek() {
match db_entry {
Ok((db_path, db_embedded_file)) => match (*db_path).cmp(&entry_db_key) {
Ordering::Less => {
if let Some(deletion_range) = deletion_range.as_mut() {
deletion_range.1 = Bound::Included(db_path);
} else {
deletion_range =
Some((Bound::Included(db_path), Bound::Included(db_path)));
}
db_entries.next();
}
Ordering::Equal => {
if let Some(deletion_range) = deletion_range.take() {
deleted_entry_ranges_tx
.send((
deletion_range.0.map(ToString::to_string),
deletion_range.1.map(ToString::to_string),
))
.await?;
}
saved_mtime = db_embedded_file.mtime;
db_entries.next();
break;
}
Ordering::Greater => {
break;
}
},
Err(_) => return Err(db_entries.next().unwrap().unwrap_err())?,
}
}
if entry.mtime != saved_mtime {
let handle = entries_being_indexed.insert(entry.id);
updated_entries_tx.send((entry.clone(), handle)).await?;
}
}
if let Some(db_entry) = db_entries.next() {
let (db_path, _) = db_entry?;
deleted_entry_ranges_tx
.send((Bound::Included(db_path.to_string()), Bound::Unbounded))
.await?;
}
Ok(())
});
ScanEntries {
updated_entries: updated_entries_rx,
deleted_entry_ranges: deleted_entry_ranges_rx,
task,
}
}
fn scan_updated_entries(
&self,
worktree: Snapshot,
updated_entries: UpdatedEntriesSet,
cx: &AppContext,
) -> ScanEntries {
let (updated_entries_tx, updated_entries_rx) = channel::bounded(512);
let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128);
let entries_being_indexed = self.entry_ids_being_indexed.clone();
let task = cx.background_executor().spawn(async move {
for (path, entry_id, status) in updated_entries.iter() {
match status {
project::PathChange::Added
| project::PathChange::Updated
| project::PathChange::AddedOrUpdated => {
if let Some(entry) = worktree.entry_for_id(*entry_id) {
if entry.is_file() {
let handle = entries_being_indexed.insert(entry.id);
updated_entries_tx.send((entry.clone(), handle)).await?;
}
}
}
project::PathChange::Removed => {
let db_path = db_key_for_path(path);
deleted_entry_ranges_tx
.send((Bound::Included(db_path.clone()), Bound::Included(db_path)))
.await?;
}
project::PathChange::Loaded => {
// Do nothing.
}
}
}
Ok(())
});
ScanEntries {
updated_entries: updated_entries_rx,
deleted_entry_ranges: deleted_entry_ranges_rx,
task,
}
}
fn chunk_files(
&self,
worktree_abs_path: Arc<Path>,
entries: channel::Receiver<(Entry, IndexingEntryHandle)>,
cx: &AppContext,
) -> ChunkFiles {
let language_registry = self.language_registry.clone();
let fs = self.fs.clone();
let (chunked_files_tx, chunked_files_rx) = channel::bounded(2048);
let task = cx.spawn(|cx| async move {
cx.background_executor()
.scoped(|cx| {
for _ in 0..cx.num_cpus() {
cx.spawn(async {
while let Ok((entry, handle)) = entries.recv().await {
let entry_abs_path = worktree_abs_path.join(&entry.path);
let Some(text) = fs
.load(&entry_abs_path)
.await
.with_context(|| {
format!("failed to read path {entry_abs_path:?}")
})
.log_err()
else {
continue;
};
let language = language_registry
.language_for_file_path(&entry.path)
.await
.ok();
let chunked_file = ChunkedFile {
chunks: chunking::chunk_text(
&text,
language.as_ref(),
&entry.path,
),
handle,
path: entry.path,
mtime: entry.mtime,
text,
};
if chunked_files_tx.send(chunked_file).await.is_err() {
return;
}
}
});
}
})
.await;
Ok(())
});
ChunkFiles {
files: chunked_files_rx,
task,
}
}
pub fn embed_files(
embedding_provider: Arc<dyn EmbeddingProvider>,
chunked_files: channel::Receiver<ChunkedFile>,
cx: &AppContext,
) -> EmbedFiles {
let embedding_provider = embedding_provider.clone();
let (embedded_files_tx, embedded_files_rx) = channel::bounded(512);
let task = cx.background_executor().spawn(async move {
let mut chunked_file_batches =
chunked_files.chunks_timeout(512, Duration::from_secs(2));
while let Some(chunked_files) = chunked_file_batches.next().await {
// View the batch of files as a vec of chunks
// Flatten out to a vec of chunks that we can subdivide into batch sized pieces
// Once those are done, reassemble them back into the files in which they belong
// If any embeddings fail for a file, the entire file is discarded
let chunks: Vec<TextToEmbed> = chunked_files
.iter()
.flat_map(|file| {
file.chunks.iter().map(|chunk| TextToEmbed {
text: &file.text[chunk.range.clone()],
digest: chunk.digest,
})
})
.collect::<Vec<_>>();
let mut embeddings: Vec<Option<Embedding>> = Vec::new();
for embedding_batch in chunks.chunks(embedding_provider.batch_size()) {
if let Some(batch_embeddings) =
embedding_provider.embed(embedding_batch).await.log_err()
{
if batch_embeddings.len() == embedding_batch.len() {
embeddings.extend(batch_embeddings.into_iter().map(Some));
continue;
}
log::error!(
"embedding provider returned unexpected embedding count {}, expected {}",
batch_embeddings.len(), embedding_batch.len()
);
}
embeddings.extend(iter::repeat(None).take(embedding_batch.len()));
}
let mut embeddings = embeddings.into_iter();
for chunked_file in chunked_files {
let mut embedded_file = EmbeddedFile {
path: chunked_file.path,
mtime: chunked_file.mtime,
chunks: Vec::new(),
};
let mut embedded_all_chunks = true;
for (chunk, embedding) in
chunked_file.chunks.into_iter().zip(embeddings.by_ref())
{
if let Some(embedding) = embedding {
embedded_file
.chunks
.push(EmbeddedChunk { chunk, embedding });
} else {
embedded_all_chunks = false;
}
}
if embedded_all_chunks {
embedded_files_tx
.send((embedded_file, chunked_file.handle))
.await?;
}
}
}
Ok(())
});
EmbedFiles {
files: embedded_files_rx,
task,
}
}
fn persist_embeddings(
&self,
mut deleted_entry_ranges: channel::Receiver<(Bound<String>, Bound<String>)>,
embedded_files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>,
cx: &AppContext,
) -> Task<Result<()>> {
let db_connection = self.db_connection.clone();
let db = self.db;
cx.background_executor().spawn(async move {
while let Some(deletion_range) = deleted_entry_ranges.next().await {
let mut txn = db_connection.write_txn()?;
let start = deletion_range.0.as_ref().map(|start| start.as_str());
let end = deletion_range.1.as_ref().map(|end| end.as_str());
log::debug!("deleting embeddings in range {:?}", &(start, end));
db.delete_range(&mut txn, &(start, end))?;
txn.commit()?;
}
let mut embedded_files = embedded_files.chunks_timeout(4096, Duration::from_secs(2));
while let Some(embedded_files) = embedded_files.next().await {
let mut txn = db_connection.write_txn()?;
for (file, _) in &embedded_files {
log::debug!("saving embedding for file {:?}", file.path);
let key = db_key_for_path(&file.path);
db.put(&mut txn, &key, file)?;
}
txn.commit()?;
drop(embedded_files);
log::debug!("committed");
}
Ok(())
})
}
pub fn paths(&self, cx: &AppContext) -> Task<Result<Vec<Arc<Path>>>> {
let connection = self.db_connection.clone();
let db = self.db;
cx.background_executor().spawn(async move {
let tx = connection
.read_txn()
.context("failed to create read transaction")?;
let result = db
.iter(&tx)?
.map(|entry| Ok(entry?.1.path.clone()))
.collect::<Result<Vec<Arc<Path>>>>();
drop(tx);
result
})
}
pub fn chunks_for_path(
&self,
path: Arc<Path>,
cx: &AppContext,
) -> Task<Result<Vec<EmbeddedChunk>>> {
let connection = self.db_connection.clone();
let db = self.db;
cx.background_executor().spawn(async move {
let tx = connection
.read_txn()
.context("failed to create read transaction")?;
Ok(db
.get(&tx, &db_key_for_path(&path))?
.ok_or_else(|| anyhow!("no such path"))?
.chunks
.clone())
})
}
}
struct ScanEntries {
updated_entries: channel::Receiver<(Entry, IndexingEntryHandle)>,
deleted_entry_ranges: channel::Receiver<(Bound<String>, Bound<String>)>,
task: Task<Result<()>>,
}
struct ChunkFiles {
files: channel::Receiver<ChunkedFile>,
task: Task<Result<()>>,
}
pub struct ChunkedFile {
pub path: Arc<Path>,
pub mtime: Option<SystemTime>,
pub handle: IndexingEntryHandle,
pub text: String,
pub chunks: Vec<Chunk>,
}
pub struct EmbedFiles {
pub files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>,
pub task: Task<Result<()>>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct EmbeddedFile {
pub path: Arc<Path>,
pub mtime: Option<SystemTime>,
pub chunks: Vec<EmbeddedChunk>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct EmbeddedChunk {
pub chunk: Chunk,
pub embedding: Embedding,
}
fn db_key_for_path(path: &Arc<Path>) -> String {
path.to_string_lossy().replace('/', "\0")
}

View File

@@ -1,49 +0,0 @@
use collections::HashSet;
use parking_lot::Mutex;
use project::ProjectEntryId;
use smol::channel;
use std::sync::{Arc, Weak};
/// The set of entries that are currently being indexed.
pub struct IndexingEntrySet {
entry_ids: Mutex<HashSet<ProjectEntryId>>,
tx: channel::Sender<()>,
}
/// When dropped, removes the entry from the set of entries that are being indexed.
#[derive(Clone)]
pub(crate) struct IndexingEntryHandle {
entry_id: ProjectEntryId,
set: Weak<IndexingEntrySet>,
}
impl IndexingEntrySet {
pub fn new(tx: channel::Sender<()>) -> Self {
Self {
entry_ids: Default::default(),
tx,
}
}
pub fn insert(self: &Arc<Self>, entry_id: ProjectEntryId) -> IndexingEntryHandle {
self.entry_ids.lock().insert(entry_id);
self.tx.send_blocking(()).ok();
IndexingEntryHandle {
entry_id,
set: Arc::downgrade(self),
}
}
pub fn len(&self) -> usize {
self.entry_ids.lock().len()
}
}
impl Drop for IndexingEntryHandle {
fn drop(&mut self) {
if let Some(set) = self.set.upgrade() {
set.tx.send_blocking(()).ok();
set.entry_ids.lock().remove(&self.entry_id);
}
}
}

View File

@@ -1,387 +0,0 @@
use crate::{
embedding::{EmbeddingProvider, TextToEmbed},
worktree_index::{WorktreeIndex, WorktreeIndexHandle},
};
use anyhow::{anyhow, Context, Result};
use collections::HashMap;
use fs::Fs;
use futures::{stream::StreamExt, FutureExt};
use gpui::{
AppContext, Entity, EntityId, EventEmitter, Model, ModelContext, Subscription, Task, WeakModel,
};
use language::LanguageRegistry;
use log;
use project::{Project, Worktree, WorktreeId};
use serde::{Deserialize, Serialize};
use smol::channel;
use std::{cmp::Ordering, num::NonZeroUsize, ops::Range, path::Path, sync::Arc};
use util::ResultExt;
pub struct SearchResult {
pub worktree: Model<Worktree>,
pub path: Arc<Path>,
pub range: Range<usize>,
pub score: f32,
}
pub struct WorktreeSearchResult {
pub worktree_id: WorktreeId,
pub path: Arc<Path>,
pub range: Range<usize>,
pub score: f32,
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub enum Status {
Idle,
Loading,
Scanning { remaining_count: NonZeroUsize },
}
pub struct ProjectIndex {
db_connection: heed::Env,
project: WeakModel<Project>,
worktree_indices: HashMap<EntityId, WorktreeIndexHandle>,
language_registry: Arc<LanguageRegistry>,
fs: Arc<dyn Fs>,
last_status: Status,
status_tx: channel::Sender<()>,
embedding_provider: Arc<dyn EmbeddingProvider>,
_maintain_status: Task<()>,
_subscription: Subscription,
}
impl ProjectIndex {
pub fn new(
project: Model<Project>,
db_connection: heed::Env,
embedding_provider: Arc<dyn EmbeddingProvider>,
cx: &mut ModelContext<Self>,
) -> Self {
let language_registry = project.read(cx).languages().clone();
let fs = project.read(cx).fs().clone();
let (status_tx, mut status_rx) = channel::unbounded();
let mut this = ProjectIndex {
db_connection,
project: project.downgrade(),
worktree_indices: HashMap::default(),
language_registry,
fs,
status_tx,
last_status: Status::Idle,
embedding_provider,
_subscription: cx.subscribe(&project, Self::handle_project_event),
_maintain_status: cx.spawn(|this, mut cx| async move {
while status_rx.next().await.is_some() {
if this
.update(&mut cx, |this, cx| this.update_status(cx))
.is_err()
{
break;
}
}
}),
};
this.update_worktree_indices(cx);
this
}
pub fn status(&self) -> Status {
self.last_status
}
pub fn project(&self) -> WeakModel<Project> {
self.project.clone()
}
pub fn fs(&self) -> Arc<dyn Fs> {
self.fs.clone()
}
fn handle_project_event(
&mut self,
_: Model<Project>,
event: &project::Event,
cx: &mut ModelContext<Self>,
) {
match event {
project::Event::WorktreeAdded | project::Event::WorktreeRemoved(_) => {
self.update_worktree_indices(cx);
}
_ => {}
}
}
fn update_worktree_indices(&mut self, cx: &mut ModelContext<Self>) {
let Some(project) = self.project.upgrade() else {
return;
};
let worktrees = project
.read(cx)
.visible_worktrees(cx)
.filter_map(|worktree| {
if worktree.read(cx).is_local() {
Some((worktree.entity_id(), worktree))
} else {
None
}
})
.collect::<HashMap<_, _>>();
self.worktree_indices
.retain(|worktree_id, _| worktrees.contains_key(worktree_id));
for (worktree_id, worktree) in worktrees {
self.worktree_indices.entry(worktree_id).or_insert_with(|| {
let worktree_index = WorktreeIndex::load(
worktree.clone(),
self.db_connection.clone(),
self.language_registry.clone(),
self.fs.clone(),
self.status_tx.clone(),
self.embedding_provider.clone(),
cx,
);
let load_worktree = cx.spawn(|this, mut cx| async move {
let result = match worktree_index.await {
Ok(worktree_index) => {
this.update(&mut cx, |this, _| {
this.worktree_indices.insert(
worktree_id,
WorktreeIndexHandle::Loaded {
index: worktree_index.clone(),
},
);
})?;
Ok(worktree_index)
}
Err(error) => {
this.update(&mut cx, |this, _cx| {
this.worktree_indices.remove(&worktree_id)
})?;
Err(Arc::new(error))
}
};
this.update(&mut cx, |this, cx| this.update_status(cx))?;
result
});
WorktreeIndexHandle::Loading {
index: load_worktree.shared(),
}
});
}
self.update_status(cx);
}
fn update_status(&mut self, cx: &mut ModelContext<Self>) {
let mut indexing_count = 0;
let mut any_loading = false;
for index in self.worktree_indices.values_mut() {
match index {
WorktreeIndexHandle::Loading { .. } => {
any_loading = true;
break;
}
WorktreeIndexHandle::Loaded { index, .. } => {
indexing_count += index.read(cx).entry_ids_being_indexed().len();
}
}
}
let status = if any_loading {
Status::Loading
} else if let Some(remaining_count) = NonZeroUsize::new(indexing_count) {
Status::Scanning { remaining_count }
} else {
Status::Idle
};
if status != self.last_status {
self.last_status = status;
cx.emit(status);
}
}
pub fn search(
&self,
query: String,
limit: usize,
cx: &AppContext,
) -> Task<Result<Vec<SearchResult>>> {
let (chunks_tx, chunks_rx) = channel::bounded(1024);
let mut worktree_scan_tasks = Vec::new();
for worktree_index in self.worktree_indices.values() {
let worktree_index = worktree_index.clone();
let chunks_tx = chunks_tx.clone();
worktree_scan_tasks.push(cx.spawn(|cx| async move {
let index = match worktree_index {
WorktreeIndexHandle::Loading { index } => {
index.clone().await.map_err(|error| anyhow!(error))?
}
WorktreeIndexHandle::Loaded { index } => index.clone(),
};
index
.read_with(&cx, |index, cx| {
let worktree_id = index.worktree().read(cx).id();
let db_connection = index.db_connection().clone();
let db = index.embedding_index().db().clone();
cx.background_executor().spawn(async move {
let txn = db_connection
.read_txn()
.context("failed to create read transaction")?;
let db_entries = db.iter(&txn).context("failed to iterate database")?;
for db_entry in db_entries {
let (_key, db_embedded_file) = db_entry?;
for chunk in db_embedded_file.chunks {
chunks_tx
.send((worktree_id, db_embedded_file.path.clone(), chunk))
.await?;
}
}
anyhow::Ok(())
})
})?
.await
}));
}
drop(chunks_tx);
let project = self.project.clone();
let embedding_provider = self.embedding_provider.clone();
cx.spawn(|cx| async move {
#[cfg(debug_assertions)]
let embedding_query_start = std::time::Instant::now();
log::info!("Searching for {query}");
let query_embeddings = embedding_provider
.embed(&[TextToEmbed::new(&query)])
.await?;
let query_embedding = query_embeddings
.into_iter()
.next()
.ok_or_else(|| anyhow!("no embedding for query"))?;
let mut results_by_worker = Vec::new();
for _ in 0..cx.background_executor().num_cpus() {
results_by_worker.push(Vec::<WorktreeSearchResult>::new());
}
#[cfg(debug_assertions)]
let search_start = std::time::Instant::now();
cx.background_executor()
.scoped(|cx| {
for results in results_by_worker.iter_mut() {
cx.spawn(async {
while let Ok((worktree_id, path, chunk)) = chunks_rx.recv().await {
let score = chunk.embedding.similarity(&query_embedding);
let ix = match results.binary_search_by(|probe| {
score.partial_cmp(&probe.score).unwrap_or(Ordering::Equal)
}) {
Ok(ix) | Err(ix) => ix,
};
results.insert(
ix,
WorktreeSearchResult {
worktree_id,
path: path.clone(),
range: chunk.chunk.range.clone(),
score,
},
);
results.truncate(limit);
}
});
}
})
.await;
for scan_task in futures::future::join_all(worktree_scan_tasks).await {
scan_task.log_err();
}
project.read_with(&cx, |project, cx| {
let mut search_results = Vec::with_capacity(results_by_worker.len() * limit);
for worker_results in results_by_worker {
search_results.extend(worker_results.into_iter().filter_map(|result| {
Some(SearchResult {
worktree: project.worktree_for_id(result.worktree_id, cx)?,
path: result.path,
range: result.range,
score: result.score,
})
}));
}
search_results.sort_unstable_by(|a, b| {
b.score.partial_cmp(&a.score).unwrap_or(Ordering::Equal)
});
search_results.truncate(limit);
#[cfg(debug_assertions)]
{
let search_elapsed = search_start.elapsed();
log::debug!(
"searched {} entries in {:?}",
search_results.len(),
search_elapsed
);
let embedding_query_elapsed = embedding_query_start.elapsed();
log::debug!("embedding query took {:?}", embedding_query_elapsed);
}
search_results
})
})
}
#[cfg(test)]
pub fn path_count(&self, cx: &AppContext) -> Result<u64> {
let mut result = 0;
for worktree_index in self.worktree_indices.values() {
if let WorktreeIndexHandle::Loaded { index, .. } = worktree_index {
result += index.read(cx).path_count()?;
}
}
Ok(result)
}
pub(crate) fn worktree_index(
&self,
worktree_id: WorktreeId,
cx: &AppContext,
) -> Option<Model<WorktreeIndex>> {
for index in self.worktree_indices.values() {
if let WorktreeIndexHandle::Loaded { index, .. } = index {
if index.read(cx).worktree().read(cx).id() == worktree_id {
return Some(index.clone());
}
}
}
None
}
pub(crate) fn worktree_indices(&self, cx: &AppContext) -> Vec<Model<WorktreeIndex>> {
let mut result = self
.worktree_indices
.values()
.filter_map(|index| {
if let WorktreeIndexHandle::Loaded { index, .. } = index {
Some(index.clone())
} else {
None
}
})
.collect::<Vec<_>>();
result.sort_by_key(|index| index.read(cx).worktree().read(cx).id());
result
}
}
impl EventEmitter<Status> for ProjectIndex {}

View File

@@ -55,12 +55,8 @@ impl ProjectIndexDebugView {
for index in worktree_indices {
let (root_path, worktree_id, worktree_paths) =
index.read_with(&cx, |index, cx| {
let worktree = index.worktree().read(cx);
(
worktree.abs_path(),
worktree.id(),
index.embedding_index().paths(cx),
)
let worktree = index.worktree.read(cx);
(worktree.abs_path(), worktree.id(), index.paths(cx))
})?;
rows.push(Row::Worktree(root_path));
rows.extend(
@@ -86,12 +82,10 @@ impl ProjectIndexDebugView {
cx: &mut ViewContext<Self>,
) -> Option<()> {
let project_index = self.index.read(cx);
let fs = project_index.fs().clone();
let fs = project_index.fs.clone();
let worktree_index = project_index.worktree_index(worktree_id, cx)?.read(cx);
let root_path = worktree_index.worktree().read(cx).abs_path();
let chunks = worktree_index
.embedding_index()
.chunks_for_path(file_path.clone(), cx);
let root_path = worktree_index.worktree.read(cx).abs_path();
let chunks = worktree_index.chunks_for_path(file_path.clone(), cx);
cx.spawn(|this, mut cx| async move {
let chunks = chunks.await?;

File diff suppressed because it is too large Load Diff

View File

@@ -1,457 +0,0 @@
use anyhow::{anyhow, Context as _, Result};
use arrayvec::ArrayString;
use completion::CompletionProvider;
use fs::Fs;
use futures::{stream::StreamExt, TryFutureExt};
use futures_batch::ChunksTimeoutStreamExt;
use gpui::{AppContext, Model, Task};
use heed::types::{SerdeBincode, Str};
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelRequestMessage, Role};
use log;
use project::{Entry, UpdatedEntriesSet, Worktree};
use serde::{Deserialize, Serialize};
use smol::channel;
use std::{
future::Future,
path::Path,
sync::Arc,
time::{Duration, Instant, SystemTime},
};
use util::ResultExt;
use worktree::Snapshot;
use crate::indexing::{IndexingEntryHandle, IndexingEntrySet};
/// This model should be good for summarizing code - fast, low price, and good at outputting English.
///
/// It's called "preferred" because if the model isn't available (e.g. due to lacking the necessary API key),
/// we fall back on the global CompletionProvider's selected model.
const PREFERRED_SUMMARIZATION_MODEL: LanguageModel =
LanguageModel::OpenAi(open_ai::Model::FourOmniMini);
#[derive(Debug, Serialize, Deserialize)]
struct UnsummarizedFile {
// BLAKE3 hash of the source file's contents
content_hash: String,
// The source file's contents
contents: String,
}
#[derive(Debug, Serialize, Deserialize)]
struct SummarizedFile {
// BLAKE3 hash of the source file's contents
content_hash: String,
// The LLM's summary of the file's contents
summary: String,
}
/// This is what blake3's to_hex() method returns - see https://docs.rs/blake3/1.5.3/src/blake3/lib.rs.html#246
type Blake3Digest = ArrayString<{ blake3::OUT_LEN * 2 }>;
#[derive(Debug, Serialize, Deserialize)]
pub struct FileDigest {
path: Arc<Path>,
mtime: Option<SystemTime>,
digest: Blake3Digest,
}
struct SummarizeFiles {
files: channel::Receiver<SummarizedFile>,
task: Task<Result<()>>,
}
struct NeedsSummary {
files: channel::Receiver<UnsummarizedFile>,
task: Task<Result<()>>,
}
pub struct SummaryIndex {
worktree: Model<Worktree>,
fs: Arc<dyn Fs>,
db_connection: heed::Env,
file_digest_db: heed::Database<Str, SerdeBincode<FileDigest>>, // Key: file path. Val: BLAKE3 digest of its contents.
summary_db: heed::Database<Str, Str>, // Key: BLAKE3 digest of a file's contents. Val: LLM summary of those contents.
entry_ids_being_indexed: Arc<IndexingEntrySet>,
}
struct ScanEntries {
updated_entries: channel::Receiver<(Entry, IndexingEntryHandle)>,
task: Task<Result<()>>,
}
struct MightNeedSummaryFiles {
files: channel::Receiver<UnsummarizedFile>,
task: Task<Result<()>>,
}
impl SummaryIndex {
pub fn new(
worktree: Model<Worktree>,
fs: Arc<dyn Fs>,
db_connection: heed::Env,
file_digest_db: heed::Database<Str, SerdeBincode<FileDigest>>,
summary_db: heed::Database<Str, Str>,
entry_ids_being_indexed: Arc<IndexingEntrySet>,
) -> Self {
Self {
worktree,
fs,
db_connection,
file_digest_db,
summary_db,
entry_ids_being_indexed,
}
}
pub fn index_entries_changed_on_disk(
&self,
cx: &AppContext,
) -> impl Future<Output = Result<()>> {
let start = Instant::now();
let worktree = self.worktree.read(cx).snapshot();
let worktree_abs_path = worktree.abs_path().clone();
let scan = self.scan_entries(worktree, cx);
let digest = self.digest_files(worktree_abs_path, scan.updated_entries, cx);
let needs_summary = self.check_summary_cache(digest.files, cx);
let summaries = self.summarize_files(needs_summary.files, cx);
let persist = self.persist_summaries(summaries.files, cx);
async move {
futures::try_join!(
scan.task,
digest.task,
needs_summary.task,
summaries.task,
persist
)?;
log::info!(
"Summarizing everything that changed on disk took {:?}",
start.elapsed()
);
Ok(())
}
}
pub fn index_updated_entries(
&self,
updated_entries: UpdatedEntriesSet,
cx: &AppContext,
) -> impl Future<Output = Result<()>> {
let start = Instant::now();
let worktree = self.worktree.read(cx).snapshot();
let worktree_abs_path = worktree.abs_path().clone();
let scan = self.scan_updated_entries(worktree, updated_entries.clone(), cx);
let digest = self.digest_files(worktree_abs_path, scan.updated_entries, cx);
let needs_summary = self.check_summary_cache(digest.files, cx);
let summaries = self.summarize_files(needs_summary.files, cx);
let persist = self.persist_summaries(summaries.files, cx);
async move {
futures::try_join!(
scan.task,
digest.task,
needs_summary.task,
summaries.task,
persist
)?;
log::info!("Summarizing updated entries took {:?}", start.elapsed());
Ok(())
}
}
fn check_summary_cache(
&self,
mut might_need_summary: channel::Receiver<UnsummarizedFile>,
cx: &AppContext,
) -> NeedsSummary {
let db_connection = self.db_connection.clone();
let db = self.summary_db;
let (needs_summary_tx, needs_summary_rx) = channel::bounded(512);
let task = cx.background_executor().spawn(async move {
while let Some(file) = might_need_summary.next().await {
let tx = db_connection
.read_txn()
.context("Failed to create read transaction for checking which hashes are in summary cache")?;
match db.get(&tx, &file.content_hash) {
Ok(opt_answer) => {
if opt_answer.is_none() {
// It's not in the summary cache db, so we need to summarize it.
log::debug!("{:?} was NOT in the db cache and needs to be resummarized.", &file.content_hash);
needs_summary_tx.send(file).await?;
} else {
log::debug!("{:?} was in the db cache and does not need to be resummarized.", &file.content_hash);
}
}
Err(err) => {
log::error!("Reading from the summaries database failed: {:?}", err);
}
}
}
Ok(())
});
NeedsSummary {
files: needs_summary_rx,
task,
}
}
fn scan_entries(&self, worktree: Snapshot, cx: &AppContext) -> ScanEntries {
let (updated_entries_tx, updated_entries_rx) = channel::bounded(512);
let db_connection = self.db_connection.clone();
let digest_db = self.file_digest_db;
let entries_being_indexed = self.entry_ids_being_indexed.clone();
let task = cx.background_executor().spawn(async move {
let txn = db_connection
.read_txn()
.context("failed to create read transaction")?;
// let mut db_entries = digest_db
// .iter(&txn)
// .context("failed to create iterator")?
// .move_between_keys()
// .peekable();
for entry in worktree.files(false, 0) {
let entry_db_key = db_key_for_path(&entry.path);
match digest_db.get(&txn, &entry_db_key) {
Ok(opt_saved_digest) => {
// The file path is the same, but the mtime is different. Update it!
if entry.mtime != opt_saved_digest.and_then(|digest| digest.mtime) {
let handle = entries_being_indexed.insert(entry.id);
updated_entries_tx.send((entry.clone(), handle)).await?;
}
}
Err(err) => {
log::error!(
"Error trying to get file digest db entry {:?}: {:?}",
&entry_db_key,
err
);
}
}
}
// TODO delete db entries for deleted files
Ok(())
});
ScanEntries {
updated_entries: updated_entries_rx,
task,
}
}
fn scan_updated_entries(
&self,
worktree: Snapshot,
updated_entries: UpdatedEntriesSet,
cx: &AppContext,
) -> ScanEntries {
let (updated_entries_tx, updated_entries_rx) = channel::bounded(512);
// let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128);
let entries_being_indexed = self.entry_ids_being_indexed.clone();
let task = cx.background_executor().spawn(async move {
for (path, entry_id, status) in updated_entries.iter() {
match status {
project::PathChange::Added
| project::PathChange::Updated
| project::PathChange::AddedOrUpdated => {
if let Some(entry) = worktree.entry_for_id(*entry_id) {
if entry.is_file() {
let handle = entries_being_indexed.insert(entry.id);
updated_entries_tx.send((entry.clone(), handle)).await?;
}
}
}
project::PathChange::Removed => {
let _db_path = db_key_for_path(path);
// TODO delete db entries for deleted files
// deleted_entry_ranges_tx
// .send((Bound::Included(db_path.clone()), Bound::Included(db_path)))
// .await?;
}
project::PathChange::Loaded => {
// Do nothing.
}
}
}
Ok(())
});
ScanEntries {
updated_entries: updated_entries_rx,
// deleted_entry_ranges: deleted_entry_ranges_rx,
task,
}
}
fn digest_files(
&self,
worktree_abs_path: Arc<Path>,
entries: channel::Receiver<(Entry, IndexingEntryHandle)>,
cx: &AppContext,
) -> MightNeedSummaryFiles {
let fs = self.fs.clone();
let (might_need_summary_tx, might_need_summary_rx) = channel::bounded(2048);
let task = cx.spawn(|cx| async move {
cx.background_executor()
.scoped(|cx| {
for _ in 0..cx.num_cpus() {
cx.spawn(async {
while let Ok((entry, handle)) = entries.recv().await {
let entry_abs_path = worktree_abs_path.join(&entry.path);
let Some(text) = fs
.load(&entry_abs_path)
.await
.with_context(|| {
format!("failed to read path {entry_abs_path:?}")
})
.log_err()
else {
continue;
};
let content_hash = {
let mut hasher = blake3::Hasher::new();
hasher.update(text.as_bytes());
hasher.finalize().to_hex().to_string()
};
let unsummarized_file = UnsummarizedFile {
content_hash,
contents: text,
};
if let Err(err) = might_need_summary_tx
.send(unsummarized_file)
.map_err(|error| anyhow!(error))
.await
{
log::error!("Error: {:?}", err);
return;
}
}
});
}
})
.await;
Ok(())
});
MightNeedSummaryFiles {
files: might_need_summary_rx,
task,
}
}
fn summarize_files(
&self,
mut unsummarized_files: channel::Receiver<UnsummarizedFile>,
cx: &AppContext,
) -> SummarizeFiles {
let (summarized_tx, summarized_rx) = channel::bounded(512);
let task = cx.spawn(|cx| async move {
while let Some(file) = unsummarized_files.next().await {
log::debug!("Summarizing {:?}", file);
let summary = cx.update(|cx| Self::summarize_code(&file.contents, cx))?;
summarized_tx
.send(SummarizedFile {
content_hash: file.content_hash,
summary: summary.await?,
})
.await?
}
Ok(())
});
SummarizeFiles {
files: summarized_rx,
task,
}
}
fn summarize_code(code: &str, cx: &AppContext) -> impl Future<Output = Result<String>> {
let start = Instant::now();
let provider = CompletionProvider::global(cx);
let model = PREFERRED_SUMMARIZATION_MODEL;
const PROMPT_BEFORE_CODE: &str = "Summarize this code in 3 sentences, using no newlines or bullet points in the summary:";
let prompt = format!("{PROMPT_BEFORE_CODE}\n{code}");
log::debug!(
"Summarizing code by sending this prompt to {:?}: {:?}",
&model,
&prompt
);
let request = LanguageModelRequest {
model,
messages: vec![LanguageModelRequestMessage {
role: Role::User,
content: prompt,
}],
stop: Vec::new(),
temperature: 1.0,
};
let response = provider.stream_completion_bg(request, cx);
cx.background_executor().spawn(async move {
let mut chunks = response.await?;
let mut answer = String::new();
while let Some(chunk) = chunks.next().await {
answer.push_str(chunk?.as_str());
}
log::info!("Code summarization took {:?}", start.elapsed());
Ok(answer)
})
}
fn persist_summaries(
&self,
summaries: channel::Receiver<SummarizedFile>,
cx: &AppContext,
) -> Task<Result<()>> {
let db_connection = self.db_connection.clone();
let db = self.summary_db;
cx.background_executor().spawn(async move {
let mut summaries = summaries.chunks_timeout(4096, Duration::from_secs(2));
while let Some(summaries) = summaries.next().await {
let mut txn = db_connection.write_txn()?;
for file in &summaries {
log::debug!(
"Saving {} bytes of summary for content hash {:?}",
file.summary.len(),
file.content_hash
);
db.put(&mut txn, &file.content_hash, &file.summary)?;
}
txn.commit()?;
drop(summaries);
log::debug!("committed summaries");
}
Ok(())
})
}
}
fn db_key_for_path(path: &Arc<Path>) -> String {
path.to_string_lossy().replace('/', "\0")
}

View File

@@ -1,279 +0,0 @@
use crate::embedding::EmbeddingProvider;
use crate::embedding_index::EmbeddingIndex;
use crate::indexing::IndexingEntrySet;
use crate::summary_index::SummaryIndex;
use anyhow::Result;
use fs::Fs;
use futures::future::Shared;
use gpui::{
AppContext, AsyncAppContext, Context, Model, ModelContext, Subscription, Task, WeakModel,
};
use language::LanguageRegistry;
use log;
use project::{UpdatedEntriesSet, Worktree};
use smol::channel;
use std::sync::Arc;
use util::ResultExt;
#[derive(Clone)]
pub enum WorktreeIndexHandle {
Loading {
index: Shared<Task<Result<Model<WorktreeIndex>, Arc<anyhow::Error>>>>,
},
Loaded {
index: Model<WorktreeIndex>,
},
}
pub struct WorktreeIndex {
worktree: Model<Worktree>,
db_connection: heed::Env,
embedding_index: EmbeddingIndex,
summary_index: SummaryIndex,
entry_ids_being_indexed: Arc<IndexingEntrySet>,
_index_entries: Task<Result<()>>,
_subscription: Subscription,
}
impl WorktreeIndex {
pub fn load(
worktree: Model<Worktree>,
db_connection: heed::Env,
language_registry: Arc<LanguageRegistry>,
fs: Arc<dyn Fs>,
status_tx: channel::Sender<()>,
embedding_provider: Arc<dyn EmbeddingProvider>,
cx: &mut AppContext,
) -> Task<Result<Model<Self>>> {
let worktree_for_index = worktree.clone();
let worktree_for_summary = worktree.clone();
let worktree_abs_path = worktree.read(cx).abs_path();
let embedding_fs = Arc::clone(&fs);
let summary_fs = fs;
cx.spawn(|mut cx| async move {
let entries_being_indexed = Arc::new(IndexingEntrySet::new(status_tx));
let (embedding_index, summary_index) = cx
.background_executor()
.spawn({
let entries_being_indexed = Arc::clone(&entries_being_indexed);
let db_connection = db_connection.clone();
async move {
let mut txn = db_connection.write_txn()?;
let embedding_index = {
let db_name = worktree_abs_path.to_string_lossy();
let db = db_connection.create_database(&mut txn, Some(&db_name))?;
EmbeddingIndex::new(
worktree_for_index,
embedding_fs,
db_connection.clone(),
db,
language_registry,
embedding_provider,
Arc::clone(&entries_being_indexed),
)
};
let summary_index = {
let file_digest_db = {
let db_name =
// Prepend something that wouldn't be found at the beginning of an
// absolute path, so we don't get db key namespace conflicts with
// embeddings, which use the abs path as a key.
format!("digests-{}", worktree_abs_path.to_string_lossy());
db_connection.create_database(&mut txn, Some(&db_name))?
};
let summary_db = {
let db_name =
// Prepend something that wouldn't be found at the beginning of an
// absolute path, so we don't get db key namespace conflicts with
// embeddings, which use the abs path as a key.
format!("summaries-{}", worktree_abs_path.to_string_lossy());
db_connection.create_database(&mut txn, Some(&db_name))?
};
SummaryIndex::new(
worktree_for_summary,
summary_fs,
db_connection.clone(),
file_digest_db,
summary_db,
Arc::clone(&entries_being_indexed),
)
};
txn.commit()?;
anyhow::Ok((embedding_index, summary_index))
}
})
.await?;
cx.new_model(|cx| {
Self::new(
worktree,
db_connection,
embedding_index,
summary_index,
entries_being_indexed,
cx,
)
})
})
}
#[allow(clippy::too_many_arguments)]
pub fn new(
worktree: Model<Worktree>,
db_connection: heed::Env,
embedding_index: EmbeddingIndex,
summary_index: SummaryIndex,
entry_ids_being_indexed: Arc<IndexingEntrySet>,
cx: &mut ModelContext<Self>,
) -> Self {
let (updated_entries_tx, updated_entries_rx) = channel::unbounded();
let _subscription = cx.subscribe(&worktree, move |_this, _worktree, event, _cx| {
if let worktree::Event::UpdatedEntries(update) = event {
dbg!(&update);
log::debug!("Updating entries...");
_ = updated_entries_tx.try_send(update.clone());
} else {
dbg!("non-update event");
}
});
Self {
db_connection,
embedding_index,
summary_index,
worktree,
entry_ids_being_indexed,
_index_entries: cx.spawn(|this, cx| Self::index_entries(this, updated_entries_rx, cx)),
_subscription,
}
}
pub fn entry_ids_being_indexed(&self) -> &IndexingEntrySet {
self.entry_ids_being_indexed.as_ref()
}
pub fn worktree(&self) -> &Model<Worktree> {
&self.worktree
}
pub fn db_connection(&self) -> &heed::Env {
&self.db_connection
}
pub fn embedding_index(&self) -> &EmbeddingIndex {
&self.embedding_index
}
async fn index_entries(
this: WeakModel<Self>,
updated_entries: channel::Receiver<UpdatedEntriesSet>,
mut cx: AsyncAppContext,
) -> Result<()> {
let index = this.update(&mut cx, |this, cx| {
futures::future::try_join(
this.embedding_index.index_entries_changed_on_disk(cx),
this.summary_index.index_entries_changed_on_disk(cx),
)
})?;
index.await.log_err();
while let Ok(updated_entries) = updated_entries.recv().await {
let index = this.update(&mut cx, |this, cx| {
futures::future::try_join(
this.embedding_index
.index_updated_entries(updated_entries.clone(), cx),
this.summary_index
.index_updated_entries(updated_entries, cx),
)
})?;
index.await.log_err();
}
Ok(())
}
#[cfg(test)]
pub fn path_count(&self) -> Result<u64> {
use anyhow::Context;
let txn = self
.db_connection
.read_txn()
.context("failed to create read transaction")?;
Ok(self.embedding_index().db().len(&txn)?)
}
// fn index_updated_entries(
// &self,
// updated_entries: UpdatedEntriesSet,
// cx: &AppContext,
// ) -> impl Future<Output = Result<()>> {
// log::debug!("index_updated_entries({:?})", &updated_entries);
// let worktree = self.worktree.read(cx).snapshot();
// let worktree_abs_path = worktree.abs_path().clone();
// let scan = self.scan_updated_entries(worktree, updated_entries.clone(), cx);
// let processed = self.process_files(worktree_abs_path, scan.updated_entries, cx);
// let embed = Self::embed_files(self.embedding_provider.clone(), processed.chunked_files, cx);
// let might_need_summary = self.check_summary_cache(processed.might_need_summary, cx);
// let summarized = self.summarize_files(might_need_summary.files, cx);
// let persist_summaries = self.persist_summaries(summarized.files, cx);
// let persist_embeds = self.persist_embeddings(scan.deleted_entry_ranges, embed.files, cx);
// async move {
// futures::try_join!(
// scan.task,
// processed.task,
// embed.task,
// might_need_summary.task,
// summarized.task,
// persist_embeds,
// persist_summaries
// )?;
// Ok(())
// }
// }
// fn scan_updated_entries(
// &self,
// worktree: Snapshot,
// updated_entries: UpdatedEntriesSet,
// cx: &AppContext,
// ) -> ScanEntries {
// let (updated_entries_tx, updated_entries_rx) = channel::bounded(512);
// let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128);
// let entries_being_indexed = self.entry_ids_being_indexed.clone();
// let task = cx.background_executor().spawn(async move {
// for (path, entry_id, status) in updated_entries.iter() {
// match status {
// project::PathChange::Added
// | project::PathChange::Updated
// | project::PathChange::AddedOrUpdated => {
// if let Some(entry) = worktree.entry_for_id(*entry_id) {
// if entry.is_file() {
// let handle = entries_being_indexed.insert(entry.id);
// updated_entries_tx.send((entry.clone(), handle)).await?;
// }
// }
// }
// project::PathChange::Removed => {
// let db_path = db_key_for_path(path);
// deleted_entry_ranges_tx
// .send((Bound::Included(db_path.clone()), Bound::Included(db_path)))
// .await?;
// }
// project::PathChange::Loaded => {
// // Do nothing.
// }
// }
// }
// Ok(())
// });
// ScanEntries {
// updated_entries: updated_entries_rx,
// deleted_entry_ranges: deleted_entry_ranges_rx,
// task,
// }
// }
}

View File

@@ -6,7 +6,7 @@ use std::time::Duration;
use collections::HashMap;
use command_palette::CommandPalette;
use editor::{display_map::DisplayRow, DisplayPoint};
use editor::{actions::DeleteLine, display_map::DisplayRow, DisplayPoint};
use futures::StreamExt;
use gpui::{KeyBinding, Modifiers, MouseButton, TestAppContext};
pub use neovim_backed_test_context::*;
@@ -1317,3 +1317,99 @@ async fn test_command_alias(cx: &mut gpui::TestAppContext) {
cx.simulate_keystrokes(": Q");
cx.set_state("ˇHello world", Mode::Normal);
}
#[gpui::test]
async fn test_remap_adjacent_dog_cat(cx: &mut gpui::TestAppContext) {
let mut cx = NeovimBackedTestContext::new(cx).await;
cx.update(|cx| {
cx.bind_keys([
KeyBinding::new(
"d o g",
workspace::SendKeystrokes("🐶".to_string()),
Some("vim_mode == insert"),
),
KeyBinding::new(
"c a t",
workspace::SendKeystrokes("🐱".to_string()),
Some("vim_mode == insert"),
),
])
});
cx.neovim.exec("imap dog 🐶").await;
cx.neovim.exec("imap cat 🐱").await;
cx.set_shared_state("ˇ").await;
cx.simulate_shared_keystrokes("i d o g").await;
cx.shared_state().await.assert_eq("🐶ˇ");
cx.set_shared_state("ˇ").await;
cx.simulate_shared_keystrokes("i d o d o g").await;
cx.shared_state().await.assert_eq("do🐶ˇ");
cx.set_shared_state("ˇ").await;
cx.simulate_shared_keystrokes("i d o c a t").await;
cx.shared_state().await.assert_eq("do🐱ˇ");
}
#[gpui::test]
async fn test_remap_nested_pineapple(cx: &mut gpui::TestAppContext) {
let mut cx = NeovimBackedTestContext::new(cx).await;
cx.update(|cx| {
cx.bind_keys([
KeyBinding::new(
"p i n",
workspace::SendKeystrokes("📌".to_string()),
Some("vim_mode == insert"),
),
KeyBinding::new(
"p i n e",
workspace::SendKeystrokes("🌲".to_string()),
Some("vim_mode == insert"),
),
KeyBinding::new(
"p i n e a p p l e",
workspace::SendKeystrokes("🍍".to_string()),
Some("vim_mode == insert"),
),
])
});
cx.neovim.exec("imap pin 📌").await;
cx.neovim.exec("imap pine 🌲").await;
cx.neovim.exec("imap pineapple 🍍").await;
cx.set_shared_state("ˇ").await;
cx.simulate_shared_keystrokes("i p i n").await;
cx.executor().advance_clock(Duration::from_millis(1000));
cx.run_until_parked();
cx.shared_state().await.assert_eq("📌ˇ");
cx.set_shared_state("ˇ").await;
cx.simulate_shared_keystrokes("i p i n e").await;
cx.executor().advance_clock(Duration::from_millis(1000));
cx.run_until_parked();
cx.shared_state().await.assert_eq("🌲ˇ");
cx.set_shared_state("ˇ").await;
cx.simulate_shared_keystrokes("i p i n e a p p l e").await;
cx.shared_state().await.assert_eq("🍍ˇ");
}
#[gpui::test]
async fn test_escape_while_waiting(cx: &mut gpui::TestAppContext) {
let mut cx = NeovimBackedTestContext::new(cx).await;
cx.set_shared_state("ˇhi").await;
cx.simulate_shared_keystrokes("\" + escape x").await;
cx.shared_state().await.assert_eq("ˇi");
}
#[gpui::test]
async fn test_ctrl_w_override(cx: &mut gpui::TestAppContext) {
let mut cx = NeovimBackedTestContext::new(cx).await;
cx.update(|cx| {
cx.bind_keys([KeyBinding::new("ctrl-w", DeleteLine, None)]);
});
cx.neovim.exec("map <c-w> D").await;
cx.set_shared_state("ˇhi").await;
cx.simulate_shared_keystrokes("ctrl-w").await;
cx.shared_state().await.assert_eq("ˇ");
}

View File

@@ -409,6 +409,7 @@ impl Vim {
state.last_mode = last_mode;
state.mode = mode;
state.operator_stack.clear();
state.selected_register.take();
if mode == Mode::Normal || mode != last_mode {
state.current_tx.take();
state.current_anchor.take();

View File

@@ -0,0 +1,4 @@
{"Exec":{"command":"map <c-w> D"}}
{"Put":{"state":"ˇhi"}}
{"Key":"ctrl-w"}
{"Get":{"state":"ˇ","mode":"Normal"}}

View File

@@ -0,0 +1,6 @@
{"Put":{"state":"ˇhi"}}
{"Key":"\""}
{"Key":"+"}
{"Key":"escape"}
{"Key":"x"}
{"Get":{"state":"ˇi","mode":"Normal"}}

View File

@@ -0,0 +1,24 @@
{"Exec":{"command":"imap dog 🐶"}}
{"Exec":{"command":"imap cat 🐱"}}
{"Put":{"state":"ˇ"}}
{"Key":"i"}
{"Key":"d"}
{"Key":"o"}
{"Key":"g"}
{"Get":{"state":"🐶ˇ","mode":"Insert"}}
{"Put":{"state":"ˇ"}}
{"Key":"i"}
{"Key":"d"}
{"Key":"o"}
{"Key":"d"}
{"Key":"o"}
{"Key":"g"}
{"Get":{"state":"do🐶ˇ","mode":"Insert"}}
{"Put":{"state":"ˇ"}}
{"Key":"i"}
{"Key":"d"}
{"Key":"o"}
{"Key":"c"}
{"Key":"a"}
{"Key":"t"}
{"Get":{"state":"do🐱ˇ","mode":"Insert"}}

View File

@@ -0,0 +1,28 @@
{"Exec":{"command":"imap pin 📌"}}
{"Exec":{"command":"imap pine 🌲"}}
{"Exec":{"command":"imap pineapple 🍍"}}
{"Put":{"state":"ˇ"}}
{"Key":"i"}
{"Key":"p"}
{"Key":"i"}
{"Key":"n"}
{"Get":{"state":"📌ˇ","mode":"Insert"}}
{"Put":{"state":"ˇ"}}
{"Key":"i"}
{"Key":"p"}
{"Key":"i"}
{"Key":"n"}
{"Key":"e"}
{"Get":{"state":"🌲ˇ","mode":"Insert"}}
{"Put":{"state":"ˇ"}}
{"Key":"i"}
{"Key":"p"}
{"Key":"i"}
{"Key":"n"}
{"Key":"e"}
{"Key":"a"}
{"Key":"p"}
{"Key":"p"}
{"Key":"l"}
{"Key":"e"}
{"Get":{"state":"🍍ˇ","mode":"Insert"}}

View File

@@ -106,7 +106,6 @@ vim.workspace = true
welcome.workspace = true
workspace.workspace = true
zed_actions.workspace = true
blake3 = "1.5.3"
[target.'cfg(target_os = "windows")'.build-dependencies]
winresource = "0.1"

View File

@@ -167,7 +167,7 @@ fn init_common(app_state: Arc<AppState>, cx: &mut AppContext) {
supermaven::init(app_state.client.clone(), cx);
inline_completion_registry::init(app_state.client.telemetry().clone(), cx);
assistant::init(app_state.fs.clone(), app_state.client.clone(), cx);
repl::init(cx);
repl::init(app_state.fs.clone(), cx);
extension::init(
app_state.fs.clone(),
app_state.client.clone(),

View File

@@ -3417,7 +3417,7 @@ mod tests {
outline_panel::init((), cx);
terminal_view::init(cx);
assistant::init(app_state.fs.clone(), app_state.client.clone(), cx);
repl::init(cx);
repl::init(app_state.fs.clone(), cx);
tasks_ui::init(cx);
initialize_workspace(app_state.clone(), cx);
app_state