Compare commits
101 Commits
prompt-syn
...
vim-syntax
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0e6f44d87c | ||
|
|
707a4c7f20 | ||
|
|
854076f96d | ||
|
|
cf931247d0 | ||
|
|
b74477d12e | ||
|
|
3077abf9cf | ||
|
|
07dab4e94a | ||
|
|
59686f1f44 | ||
|
|
a60bea8a3d | ||
|
|
b820aa1fcd | ||
|
|
55d91bce53 | ||
|
|
b798392050 | ||
|
|
657c8b1084 | ||
|
|
2bb8aa2f73 | ||
|
|
beeb42da29 | ||
|
|
6d66ff1d95 | ||
|
|
e0b818af62 | ||
|
|
58a400b1ee | ||
|
|
8ab7d44d51 | ||
|
|
56d4c0af9f | ||
|
|
feeda7fa37 | ||
|
|
4a5c55a8f2 | ||
|
|
7c1ae9bcc3 | ||
|
|
6f97da3435 | ||
|
|
63c1033448 | ||
|
|
b16911e756 | ||
|
|
b14401f817 | ||
|
|
17cf865d1e | ||
|
|
b7ec437b13 | ||
|
|
f1aab1120d | ||
|
|
3f90bc81bd | ||
|
|
9d5fb3c3f3 | ||
|
|
864767ad35 | ||
|
|
ec69b68e72 | ||
|
|
9dd18e5ee1 | ||
|
|
2ebe16a52f | ||
|
|
1ed4647203 | ||
|
|
ebed567adb | ||
|
|
a6544c70c5 | ||
|
|
b363e1a482 | ||
|
|
65e3e84cbc | ||
|
|
1e1d4430c2 | ||
|
|
c874f1fa9d | ||
|
|
9a9e96ed5a | ||
|
|
8c46e290df | ||
|
|
aacbb9c2f4 | ||
|
|
f90333f92e | ||
|
|
b24f614ca3 | ||
|
|
cefa0cbed8 | ||
|
|
3fb1023667 | ||
|
|
9c715b470e | ||
|
|
ae219e9e99 | ||
|
|
6d99c12796 | ||
|
|
8fb7fa941a | ||
|
|
22d75b798e | ||
|
|
06a199da4d | ||
|
|
ab6125ddde | ||
|
|
d3bc561f26 | ||
|
|
f13f2dfb70 | ||
|
|
24e4446cd3 | ||
|
|
cc536655a1 | ||
|
|
2a9e73c65d | ||
|
|
4f1728e5ee | ||
|
|
40c91d5df0 | ||
|
|
fe1b36671d | ||
|
|
bb9e2b0403 | ||
|
|
4f8d7f0a6b | ||
|
|
caf3d30bf6 | ||
|
|
df0cf22347 | ||
|
|
a305eda8d1 | ||
|
|
ba7b1db054 | ||
|
|
019c8ded77 | ||
|
|
1704dbea7e | ||
|
|
eefa6c4882 | ||
|
|
1f17df7fb0 | ||
|
|
6d687a2c2c | ||
|
|
32214abb64 | ||
|
|
a78563b80b | ||
|
|
f881cacd8a | ||
|
|
a539a38f13 | ||
|
|
ca6fd101c1 | ||
|
|
f8097c7c98 | ||
|
|
c1427ea802 | ||
|
|
1e83022f03 | ||
|
|
0ee900e8fb | ||
|
|
f9f4be1fc4 | ||
|
|
a00b07371a | ||
|
|
f725b5e248 | ||
|
|
07436b4284 | ||
|
|
8bec4cbecb | ||
|
|
047e7eacec | ||
|
|
1d5d3de85c | ||
|
|
c4dbaa91f0 | ||
|
|
97c01c6720 | ||
|
|
310ea43048 | ||
|
|
6bb4b5fa64 | ||
|
|
e0fa3032ec | ||
|
|
9cf6be2057 | ||
|
|
5462e199fb | ||
|
|
3a60420b41 | ||
|
|
89c184a26f |
4
.github/workflows/ci.yml
vendored
@@ -482,7 +482,9 @@ jobs:
|
||||
- macos_tests
|
||||
- windows_clippy
|
||||
- windows_tests
|
||||
if: always()
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
always()
|
||||
steps:
|
||||
- name: Check all tests passed
|
||||
run: |
|
||||
|
||||
6
Cargo.lock
generated
@@ -114,6 +114,7 @@ dependencies = [
|
||||
"serde_json_lenient",
|
||||
"settings",
|
||||
"smol",
|
||||
"sqlez",
|
||||
"streaming_diff",
|
||||
"telemetry",
|
||||
"telemetry_events",
|
||||
@@ -133,6 +134,7 @@ dependencies = [
|
||||
"workspace-hack",
|
||||
"zed_actions",
|
||||
"zed_llm_client",
|
||||
"zstd",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -525,6 +527,7 @@ dependencies = [
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"indexed_docs",
|
||||
"indoc",
|
||||
"language",
|
||||
"language_model",
|
||||
"languages",
|
||||
@@ -2200,6 +2203,7 @@ dependencies = [
|
||||
"editor",
|
||||
"gpui",
|
||||
"itertools 0.14.0",
|
||||
"settings",
|
||||
"theme",
|
||||
"ui",
|
||||
"workspace",
|
||||
@@ -7069,6 +7073,7 @@ dependencies = [
|
||||
"image",
|
||||
"inventory",
|
||||
"itertools 0.14.0",
|
||||
"libc",
|
||||
"log",
|
||||
"lyon",
|
||||
"media",
|
||||
@@ -8757,6 +8762,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"shellexpand 2.1.2",
|
||||
"smallvec",
|
||||
"smol",
|
||||
"streaming-iterator",
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M17 20H16C14.9391 20 13.9217 19.6629 13.1716 19.0627C12.4214 18.4626 12 17.6487 12 16.8V7.2C12 6.35131 12.4214 5.53737 13.1716 4.93726C13.9217 4.33714 14.9391 4 16 4H17" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M7 20H8C9.06087 20 10.0783 19.5786 10.8284 18.8284C11.5786 18.0783 12 17.0609 12 16V15" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M7 4H8C9.06087 4 10.0783 4.42143 10.8284 5.17157C11.5786 5.92172 12 6.93913 12 8V9" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M11 13H10.4C9.76346 13 9.15302 12.7893 8.70296 12.4142C8.25284 12.0391 8 11.5304 8 11V5C8 4.46957 8.25284 3.96086 8.70296 3.58579C9.15302 3.21071 9.76346 3 10.4 3H11" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M5 13H5.6C6.23654 13 6.84698 12.7893 7.29704 12.4142C7.74716 12.0391 8 11.5304 8 11V5C8 4.46957 7.74716 3.96086 7.29704 3.58579C6.84698 3.21071 6.23654 3 5.6 3H5" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 715 B After Width: | Height: | Size: 617 B |
3
assets/icons/play_alt.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4 3L13 8L4 13V3Z" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 214 B |
8
assets/icons/play_bug.svg
Normal file
@@ -0,0 +1,8 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4 12C2.35977 11.85 1 10.575 1 9" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M1.00875 15.2C1.00875 13.625 0.683456 12.275 4.00001 12.2" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M7 9C7 10.575 5.62857 11.85 4 12" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M4 12.2C6.98117 12.2 7 13.625 7 15.2" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<rect x="2.5" y="9" width="3" height="6" rx="1.5" fill="black"/>
|
||||
<path d="M9 10L13 8L4 3V7.5" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 813 B |
@@ -1,3 +1,8 @@
|
||||
<svg width="17" height="17" viewBox="0 0 17 17" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M6.36667 3.79167C5.53364 3.79167 4.85833 4.46697 4.85833 5.3C4.85833 6.13303 5.53364 6.80833 6.36667 6.80833C7.1997 6.80833 7.875 6.13303 7.875 5.3C7.875 4.46697 7.1997 3.79167 6.36667 3.79167ZM2.1 5.925H3.67944C3.9626 7.14732 5.05824 8.05833 6.36667 8.05833C7.67509 8.05833 8.77073 7.14732 9.05389 5.925H14.9C15.2452 5.925 15.525 5.64518 15.525 5.3C15.525 4.95482 15.2452 4.675 14.9 4.675H9.05389C8.77073 3.45268 7.67509 2.54167 6.36667 2.54167C5.05824 2.54167 3.9626 3.45268 3.67944 4.675H2.1C1.75482 4.675 1.475 4.95482 1.475 5.3C1.475 5.64518 1.75482 5.925 2.1 5.925ZM13.3206 12.325C13.0374 13.5473 11.9418 14.4583 10.6333 14.4583C9.32491 14.4583 8.22927 13.5473 7.94611 12.325H2.1C1.75482 12.325 1.475 12.0452 1.475 11.7C1.475 11.3548 1.75482 11.075 2.1 11.075H7.94611C8.22927 9.85268 9.32491 8.94167 10.6333 8.94167C11.9418 8.94167 13.0374 9.85268 13.3206 11.075H14.9C15.2452 11.075 15.525 11.3548 15.525 11.7C15.525 12.0452 15.2452 12.325 14.9 12.325H13.3206ZM9.125 11.7C9.125 10.867 9.8003 10.1917 10.6333 10.1917C11.4664 10.1917 12.1417 10.867 12.1417 11.7C12.1417 12.533 11.4664 13.2083 10.6333 13.2083C9.8003 13.2083 9.125 12.533 9.125 11.7Z" fill="black"/>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M2 5H4" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<path d="M8 5L14 5" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<path d="M12 11L14 11" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<path d="M2 11H8" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<circle cx="6" cy="5" r="2" fill="black" fill-opacity="0.1" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<circle cx="10" cy="11" r="2" fill="black" fill-opacity="0.1" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 1.3 KiB After Width: | Height: | Size: 657 B |
@@ -1,5 +1,5 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M7 1.75L5.88467 5.14092C5.82759 5.31446 5.73055 5.47218 5.60136 5.60136C5.47218 5.73055 5.31446 5.82759 5.14092 5.88467L1.75 7L5.14092 8.11533C5.31446 8.17241 5.47218 8.26945 5.60136 8.39864C5.73055 8.52782 5.82759 8.68554 5.88467 8.85908L7 12.25L8.11533 8.85908C8.17241 8.68554 8.26945 8.52782 8.39864 8.39864C8.52782 8.26945 8.68554 8.17241 8.85908 8.11533L12.25 7L8.85908 5.88467C8.68554 5.82759 8.52782 5.73055 8.39864 5.60136C8.26945 5.47218 8.17241 5.31446 8.11533 5.14092L7 1.75Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M2.91667 1.75V4.08333M1.75 2.91667H4.08333" stroke="black" stroke-opacity="0.75" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11.0833 9.91667V12.25M9.91667 11.0833H12.25" stroke="black" stroke-opacity="0.75" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M8 2L6.72534 5.87534C6.6601 6.07367 6.5492 6.25392 6.40155 6.40155C6.25392 6.5492 6.07367 6.6601 5.87534 6.72534L2 8L5.87534 9.27466C6.07367 9.3399 6.25392 9.4508 6.40155 9.59845C6.5492 9.74608 6.6601 9.92633 6.72534 10.1247L8 14L9.27466 10.1247C9.3399 9.92633 9.4508 9.74608 9.59845 9.59845C9.74608 9.4508 9.92633 9.3399 10.1247 9.27466L14 8L10.1247 6.72534C9.92633 6.6601 9.74608 6.5492 9.59845 6.40155C9.4508 6.25392 9.3399 6.07367 9.27466 5.87534L8 2Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M3.33334 2V4.66666M2 3.33334H4.66666" stroke="black" stroke-opacity="0.75" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M12.6665 11.3333V14M11.3333 12.6666H13.9999" stroke="black" stroke-opacity="0.75" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 1.0 KiB After Width: | Height: | Size: 998 B |
@@ -31,8 +31,6 @@
|
||||
"ctrl-,": "zed::OpenSettings",
|
||||
"ctrl-q": "zed::Quit",
|
||||
"f4": "debugger::Start",
|
||||
"alt-f4": "debugger::RerunLastSession",
|
||||
"f5": "debugger::Continue",
|
||||
"shift-f5": "debugger::Stop",
|
||||
"ctrl-shift-f5": "debugger::Restart",
|
||||
"f6": "debugger::Pause",
|
||||
@@ -583,11 +581,24 @@
|
||||
"ctrl-alt-r": "task::Rerun",
|
||||
"alt-t": "task::Rerun",
|
||||
"alt-shift-t": "task::Spawn",
|
||||
"alt-shift-r": ["task::Spawn", { "reveal_target": "center" }]
|
||||
"alt-shift-r": ["task::Spawn", { "reveal_target": "center" }],
|
||||
// also possible to spawn tasks by name:
|
||||
// "foo-bar": ["task::Spawn", { "task_name": "MyTask", "reveal_target": "dock" }]
|
||||
// or by tag:
|
||||
// "foo-bar": ["task::Spawn", { "task_tag": "MyTag" }],
|
||||
"f5": "debugger::RerunLastSession"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace && debugger_running",
|
||||
"bindings": {
|
||||
"f5": "zed::NoAction"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace && debugger_stopped",
|
||||
"bindings": {
|
||||
"f5": "debugger::Continue"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -873,7 +884,8 @@
|
||||
"context": "DebugPanel",
|
||||
"bindings": {
|
||||
"ctrl-t": "debugger::ToggleThreadPicker",
|
||||
"ctrl-i": "debugger::ToggleSessionPicker"
|
||||
"ctrl-i": "debugger::ToggleSessionPicker",
|
||||
"shift-alt-escape": "debugger::ToggleExpandItem"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -928,6 +940,13 @@
|
||||
"tab": "channel_modal::ToggleMode"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder",
|
||||
"bindings": {
|
||||
"ctrl-shift-a": "file_finder::ToggleSplitMenu",
|
||||
"ctrl-shift-i": "file_finder::ToggleFilterMenu"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder || (FileFinder > Picker > Editor) || (FileFinder > Picker > menu)",
|
||||
"bindings": {
|
||||
|
||||
@@ -4,8 +4,6 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"f4": "debugger::Start",
|
||||
"alt-f4": "debugger::RerunLastSession",
|
||||
"f5": "debugger::Continue",
|
||||
"shift-f5": "debugger::Stop",
|
||||
"shift-cmd-f5": "debugger::Restart",
|
||||
"f6": "debugger::Pause",
|
||||
@@ -635,7 +633,8 @@
|
||||
"cmd-k shift-right": "workspace::SwapPaneRight",
|
||||
"cmd-k shift-up": "workspace::SwapPaneUp",
|
||||
"cmd-k shift-down": "workspace::SwapPaneDown",
|
||||
"cmd-shift-x": "zed::Extensions"
|
||||
"cmd-shift-x": "zed::Extensions",
|
||||
"f5": "debugger::RerunLastSession"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -652,6 +651,20 @@
|
||||
// "foo-bar": ["task::Spawn", { "task_tag": "MyTag" }],
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace && debugger_running",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"f5": "zed::NoAction"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace && debugger_stopped",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"f5": "debugger::Continue"
|
||||
}
|
||||
},
|
||||
// Bindings from Sublime Text
|
||||
{
|
||||
"context": "Editor",
|
||||
@@ -936,7 +949,8 @@
|
||||
"context": "DebugPanel",
|
||||
"bindings": {
|
||||
"cmd-t": "debugger::ToggleThreadPicker",
|
||||
"cmd-i": "debugger::ToggleSessionPicker"
|
||||
"cmd-i": "debugger::ToggleSessionPicker",
|
||||
"shift-alt-escape": "debugger::ToggleExpandItem"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -987,6 +1001,14 @@
|
||||
"tab": "channel_modal::ToggleMode"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-shift-a": "file_finder::ToggleSplitMenu",
|
||||
"cmd-shift-i": "file_finder::ToggleFilterMenu"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder || (FileFinder > Picker > Editor) || (FileFinder > Picker > menu)",
|
||||
"use_key_equivalents": true,
|
||||
|
||||
@@ -51,7 +51,11 @@
|
||||
"ctrl-k ctrl-l": "editor::ConvertToLowerCase",
|
||||
"shift-alt-m": "markdown::OpenPreviewToTheSide",
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd"
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd",
|
||||
"alt-right": "editor::MoveToNextSubwordEnd",
|
||||
"alt-left": "editor::MoveToPreviousSubwordStart",
|
||||
"alt-shift-right": "editor::SelectToNextSubwordEnd",
|
||||
"alt-shift-left": "editor::SelectToPreviousSubwordStart"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -53,7 +53,11 @@
|
||||
"cmd-shift-j": "editor::JoinLines",
|
||||
"shift-alt-m": "markdown::OpenPreviewToTheSide",
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd"
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd",
|
||||
"ctrl-right": "editor::MoveToNextSubwordEnd",
|
||||
"ctrl-left": "editor::MoveToPreviousSubwordStart",
|
||||
"ctrl-shift-right": "editor::SelectToNextSubwordEnd",
|
||||
"ctrl-shift-left": "editor::SelectToPreviousSubwordStart"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -838,6 +838,19 @@
|
||||
"tab": "editor::AcceptEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "MessageEditor > Editor && VimControl",
|
||||
"bindings": {
|
||||
"enter": "agent::Chat",
|
||||
// TODO: Implement search
|
||||
"/": null,
|
||||
"?": null,
|
||||
"#": null,
|
||||
"*": null,
|
||||
"n": null,
|
||||
"shift-n": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "os != macos && Editor && edit_prediction_conflict",
|
||||
"bindings": {
|
||||
|
||||
@@ -128,6 +128,8 @@
|
||||
//
|
||||
// Default: true
|
||||
"restore_on_file_reopen": true,
|
||||
// Whether to automatically close files that have been deleted on disk.
|
||||
"close_on_file_delete": false,
|
||||
// Size of the drop target in the editor.
|
||||
"drop_target_size": 0.2,
|
||||
// Whether the window should be closed when using 'close active item' on a window with no tabs.
|
||||
@@ -731,13 +733,6 @@
|
||||
// The model to use.
|
||||
"model": "claude-sonnet-4"
|
||||
},
|
||||
// The model to use when applying edits from the agent.
|
||||
"editor_model": {
|
||||
// The provider to use.
|
||||
"provider": "zed.dev",
|
||||
// The model to use.
|
||||
"model": "claude-sonnet-4"
|
||||
},
|
||||
// Additional parameters for language model requests. When making a request to a model, parameters will be taken
|
||||
// from the last entry in this list that matches the model's provider and name. In each entry, both provider
|
||||
// and model are optional, so that you can specify parameters for either one.
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
// Some example tasks for common languages.
|
||||
//
|
||||
// For more documentation on how to configure debug tasks,
|
||||
// see: https://zed.dev/docs/debugger
|
||||
[
|
||||
{
|
||||
"label": "Debug active PHP file",
|
||||
|
||||
5
assets/settings/initial_local_debug_tasks.json
Normal file
@@ -0,0 +1,5 @@
|
||||
// Project-local debug tasks
|
||||
//
|
||||
// For more documentation on how to configure debug tasks,
|
||||
// see: https://zed.dev/docs/debugger
|
||||
[]
|
||||
@@ -46,6 +46,7 @@ git.workspace = true
|
||||
gpui.workspace = true
|
||||
heed.workspace = true
|
||||
html_to_markdown.workspace = true
|
||||
indoc.workspace = true
|
||||
http_client.workspace = true
|
||||
indexed_docs.workspace = true
|
||||
inventory.workspace = true
|
||||
@@ -78,6 +79,7 @@ serde_json.workspace = true
|
||||
serde_json_lenient.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
sqlez.workspace = true
|
||||
streaming_diff.workspace = true
|
||||
telemetry.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
@@ -97,6 +99,7 @@ workspace-hack.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
zed_llm_client.workspace = true
|
||||
zstd.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
buffer_diff = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -1017,6 +1017,15 @@ impl ActiveThread {
|
||||
self.play_notification_sound(cx);
|
||||
self.show_notification("Waiting for tool confirmation", IconName::Info, window, cx);
|
||||
}
|
||||
ThreadEvent::ToolUseLimitReached => {
|
||||
self.play_notification_sound(cx);
|
||||
self.show_notification(
|
||||
"Consecutive tool use limit reached.",
|
||||
IconName::Warning,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
ThreadEvent::StreamedAssistantText(message_id, text) => {
|
||||
if let Some(rendered_message) = self.rendered_messages_by_id.get_mut(&message_id) {
|
||||
rendered_message.append_text(text, cx);
|
||||
|
||||
@@ -1372,6 +1372,7 @@ impl AgentDiff {
|
||||
| ThreadEvent::ToolFinished { .. }
|
||||
| ThreadEvent::CheckpointChanged
|
||||
| ThreadEvent::ToolConfirmationNeeded
|
||||
| ThreadEvent::ToolUseLimitReached
|
||||
| ThreadEvent::CancelEditing => {}
|
||||
}
|
||||
}
|
||||
@@ -1464,7 +1465,10 @@ impl AgentDiff {
|
||||
if !AgentSettings::get_global(cx).single_file_review {
|
||||
for (editor, _) in self.reviewing_editors.drain() {
|
||||
editor
|
||||
.update(cx, |editor, cx| editor.end_temporary_diff_override(cx))
|
||||
.update(cx, |editor, cx| {
|
||||
editor.end_temporary_diff_override(cx);
|
||||
editor.unregister_addon::<EditorAgentDiffAddon>();
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
return;
|
||||
@@ -1560,7 +1564,10 @@ impl AgentDiff {
|
||||
|
||||
if in_workspace {
|
||||
editor
|
||||
.update(cx, |editor, cx| editor.end_temporary_diff_override(cx))
|
||||
.update(cx, |editor, cx| {
|
||||
editor.end_temporary_diff_override(cx);
|
||||
editor.unregister_addon::<EditorAgentDiffAddon>();
|
||||
})
|
||||
.ok();
|
||||
self.reviewing_editors.remove(&editor);
|
||||
}
|
||||
|
||||
@@ -734,6 +734,7 @@ impl Display for RulesContext {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ImageContext {
|
||||
pub project_path: Option<ProjectPath>,
|
||||
pub full_path: Option<Arc<Path>>,
|
||||
pub original_image: Arc<gpui::Image>,
|
||||
// TODO: handle this elsewhere and remove `ignore-interior-mutability` opt-out in clippy.toml
|
||||
// needed due to a false positive of `clippy::mutable_key_type`.
|
||||
|
||||
@@ -14,7 +14,7 @@ use http_client::HttpClientWithUrl;
|
||||
use itertools::Itertools;
|
||||
use language::{Buffer, CodeLabel, HighlightId};
|
||||
use lsp::CompletionContext;
|
||||
use project::{Completion, CompletionIntent, ProjectPath, Symbol, WorktreeId};
|
||||
use project::{Completion, CompletionIntent, CompletionResponse, ProjectPath, Symbol, WorktreeId};
|
||||
use prompt_store::PromptStore;
|
||||
use rope::Point;
|
||||
use text::{Anchor, OffsetRangeExt, ToPoint};
|
||||
@@ -746,7 +746,7 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
_trigger: CompletionContext,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
let state = buffer.update(cx, |buffer, _cx| {
|
||||
let position = buffer_position.to_point(buffer);
|
||||
let line_start = Point::new(position.row, 0);
|
||||
@@ -756,13 +756,13 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
MentionCompletion::try_parse(line, offset_to_line)
|
||||
});
|
||||
let Some(state) = state else {
|
||||
return Task::ready(Ok(None));
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
};
|
||||
|
||||
let Some((workspace, context_store)) =
|
||||
self.workspace.upgrade().zip(self.context_store.upgrade())
|
||||
else {
|
||||
return Task::ready(Ok(None));
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
};
|
||||
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
@@ -815,10 +815,10 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
cx.spawn(async move |_, cx| {
|
||||
let matches = search_task.await;
|
||||
let Some(editor) = editor.upgrade() else {
|
||||
return Ok(None);
|
||||
return Ok(Vec::new());
|
||||
};
|
||||
|
||||
Ok(Some(cx.update(|cx| {
|
||||
let completions = cx.update(|cx| {
|
||||
matches
|
||||
.into_iter()
|
||||
.filter_map(|mat| match mat {
|
||||
@@ -901,7 +901,14 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
),
|
||||
})
|
||||
.collect()
|
||||
})?))
|
||||
})?;
|
||||
|
||||
Ok(vec![CompletionResponse {
|
||||
completions,
|
||||
// Since this does its own filtering (see `filter_completions()` returns false),
|
||||
// there is no benefit to computing whether this set of completions is incomplete.
|
||||
is_incomplete: true,
|
||||
}])
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ use assistant_context_editor::AssistantContext;
|
||||
use collections::{HashSet, IndexSet};
|
||||
use futures::{self, FutureExt};
|
||||
use gpui::{App, Context, Entity, EventEmitter, Image, SharedString, Task, WeakEntity};
|
||||
use language::Buffer;
|
||||
use language::{Buffer, File as _};
|
||||
use language_model::LanguageModelImage;
|
||||
use project::image_store::is_image_file;
|
||||
use project::{Project, ProjectItem, ProjectPath, Symbol};
|
||||
@@ -304,11 +304,13 @@ impl ContextStore {
|
||||
project.open_image(project_path.clone(), cx)
|
||||
})?;
|
||||
let image_item = open_image_task.await?;
|
||||
let image = image_item.read_with(cx, |image_item, _| image_item.image.clone())?;
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
let item = image_item.read(cx);
|
||||
this.insert_image(
|
||||
Some(image_item.read(cx).project_path(cx)),
|
||||
image,
|
||||
Some(item.project_path(cx)),
|
||||
Some(item.file.full_path(cx).into()),
|
||||
item.image.clone(),
|
||||
remove_if_exists,
|
||||
cx,
|
||||
)
|
||||
@@ -317,12 +319,13 @@ impl ContextStore {
|
||||
}
|
||||
|
||||
pub fn add_image_instance(&mut self, image: Arc<Image>, cx: &mut Context<ContextStore>) {
|
||||
self.insert_image(None, image, false, cx);
|
||||
self.insert_image(None, None, image, false, cx);
|
||||
}
|
||||
|
||||
fn insert_image(
|
||||
&mut self,
|
||||
project_path: Option<ProjectPath>,
|
||||
full_path: Option<Arc<Path>>,
|
||||
image: Arc<Image>,
|
||||
remove_if_exists: bool,
|
||||
cx: &mut Context<ContextStore>,
|
||||
@@ -330,6 +333,7 @@ impl ContextStore {
|
||||
let image_task = LanguageModelImage::from_image(image.clone(), cx).shared();
|
||||
let context = AgentContextHandle::Image(ImageContext {
|
||||
project_path,
|
||||
full_path,
|
||||
original_image: image,
|
||||
image_task,
|
||||
context_id: self.next_context_id.post_inc(),
|
||||
|
||||
@@ -152,7 +152,7 @@ impl HistoryStore {
|
||||
let entries = join_all(entries)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter_map(|result| result.log_err())
|
||||
.filter_map(|result| result.log_with_level(log::Level::Debug))
|
||||
.collect::<VecDeque<_>>();
|
||||
|
||||
this.update(cx, |this, _| {
|
||||
|
||||
@@ -112,6 +112,7 @@ pub(crate) fn create_editor(
|
||||
editor.set_placeholder_text("Message the agent – @ to include context", cx);
|
||||
editor.set_show_indent_guides(false, cx);
|
||||
editor.set_soft_wrap();
|
||||
editor.set_use_modal_editing(true);
|
||||
editor.set_context_menu_options(ContextMenuOptions {
|
||||
min_entries_visible: 12,
|
||||
max_entries_visible: 12,
|
||||
|
||||
1
crates/agent/src/prompts/stale_files_prompt_header.txt
Normal file
@@ -0,0 +1 @@
|
||||
These files changed since last read:
|
||||
@@ -0,0 +1,6 @@
|
||||
Generate a detailed summary of this conversation. Include:
|
||||
1. A brief overview of what was discussed
|
||||
2. Key facts or information discovered
|
||||
3. Outcomes or conclusions reached
|
||||
4. Any action items or next steps if any
|
||||
Format it in Markdown with headings and bullet points.
|
||||
4
crates/agent/src/prompts/summarize_thread_prompt.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
Generate a concise 3-7 word title for this conversation, omitting punctuation.
|
||||
Go straight to the title, without any preamble and prefix like `Here's a concise suggestion:...` or `Title:`.
|
||||
If the conversation is about a specific subject, include it in the title.
|
||||
Be descriptive. DO NOT speak in the first person.
|
||||
@@ -179,18 +179,17 @@ impl TerminalTransaction {
|
||||
// Ensure that the assistant cannot accidentally execute commands that are streamed into the terminal
|
||||
let input = Self::sanitize_input(hunk);
|
||||
self.terminal
|
||||
.update(cx, |terminal, _| terminal.input(input));
|
||||
.update(cx, |terminal, _| terminal.input(input.into_bytes()));
|
||||
}
|
||||
|
||||
pub fn undo(&self, cx: &mut App) {
|
||||
self.terminal
|
||||
.update(cx, |terminal, _| terminal.input(CLEAR_INPUT.to_string()));
|
||||
.update(cx, |terminal, _| terminal.input(CLEAR_INPUT.as_bytes()));
|
||||
}
|
||||
|
||||
pub fn complete(&self, cx: &mut App) {
|
||||
self.terminal.update(cx, |terminal, _| {
|
||||
terminal.input(CARRIAGE_RETURN.to_string())
|
||||
});
|
||||
self.terminal
|
||||
.update(cx, |terminal, _| terminal.input(CARRIAGE_RETURN.as_bytes()));
|
||||
}
|
||||
|
||||
fn sanitize_input(mut input: String) -> String {
|
||||
|
||||
@@ -106,7 +106,7 @@ impl TerminalInlineAssistant {
|
||||
});
|
||||
let prompt_editor_render = prompt_editor.clone();
|
||||
let block = terminal_view::BlockProperties {
|
||||
height: 2,
|
||||
height: 4,
|
||||
render: Box::new(move |_| prompt_editor_render.clone().into_any_element()),
|
||||
};
|
||||
terminal_view.update(cx, |terminal_view, cx| {
|
||||
@@ -202,7 +202,7 @@ impl TerminalInlineAssistant {
|
||||
.update(cx, |terminal, cx| {
|
||||
terminal
|
||||
.terminal()
|
||||
.update(cx, |terminal, _| terminal.input(CLEAR_INPUT.to_string()));
|
||||
.update(cx, |terminal, _| terminal.input(CLEAR_INPUT.as_bytes()));
|
||||
})
|
||||
.log_err();
|
||||
|
||||
|
||||
@@ -1428,7 +1428,7 @@ impl Thread {
|
||||
messages: &mut Vec<LanguageModelRequestMessage>,
|
||||
cx: &App,
|
||||
) {
|
||||
const STALE_FILES_HEADER: &str = "These files changed since last read:";
|
||||
const STALE_FILES_HEADER: &str = include_str!("./prompts/stale_files_prompt_header.txt");
|
||||
|
||||
let mut stale_message = String::new();
|
||||
|
||||
@@ -1440,7 +1440,7 @@ impl Thread {
|
||||
};
|
||||
|
||||
if stale_message.is_empty() {
|
||||
write!(&mut stale_message, "{}\n", STALE_FILES_HEADER).ok();
|
||||
write!(&mut stale_message, "{}\n", STALE_FILES_HEADER.trim()).ok();
|
||||
}
|
||||
|
||||
writeln!(&mut stale_message, "- {}", file.path().display()).ok();
|
||||
@@ -1673,6 +1673,7 @@ impl Thread {
|
||||
}
|
||||
CompletionRequestStatus::ToolUseLimitReached => {
|
||||
thread.tool_use_limit_reached = true;
|
||||
cx.emit(ThreadEvent::ToolUseLimitReached);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1854,10 +1855,7 @@ impl Thread {
|
||||
return;
|
||||
}
|
||||
|
||||
let added_user_message = "Generate a concise 3-7 word title for this conversation, omitting punctuation. \
|
||||
Go straight to the title, without any preamble and prefix like `Here's a concise suggestion:...` or `Title:`. \
|
||||
If the conversation is about a specific subject, include it in the title. \
|
||||
Be descriptive. DO NOT speak in the first person.";
|
||||
let added_user_message = include_str!("./prompts/summarize_thread_prompt.txt");
|
||||
|
||||
let request = self.to_summarize_request(
|
||||
&model.model,
|
||||
@@ -1958,12 +1956,7 @@ impl Thread {
|
||||
return;
|
||||
}
|
||||
|
||||
let added_user_message = "Generate a detailed summary of this conversation. Include:\n\
|
||||
1. A brief overview of what was discussed\n\
|
||||
2. Key facts or information discovered\n\
|
||||
3. Outcomes or conclusions reached\n\
|
||||
4. Any action items or next steps if any\n\
|
||||
Format it in Markdown with headings and bullet points.";
|
||||
let added_user_message = include_str!("./prompts/summarize_thread_detailed_prompt.txt");
|
||||
|
||||
let request = self.to_summarize_request(
|
||||
&model,
|
||||
@@ -2851,6 +2844,7 @@ pub enum ThreadEvent {
|
||||
},
|
||||
CheckpointChanged,
|
||||
ToolConfirmationNeeded,
|
||||
ToolUseLimitReached,
|
||||
CancelEditing,
|
||||
CompletionCanceled,
|
||||
}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use std::borrow::Cow;
|
||||
use std::cell::{Ref, RefCell};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, CompletionMode};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
@@ -17,8 +16,7 @@ use gpui::{
|
||||
App, BackgroundExecutor, Context, Entity, EventEmitter, Global, ReadGlobal, SharedString,
|
||||
Subscription, Task, prelude::*,
|
||||
};
|
||||
use heed::Database;
|
||||
use heed::types::SerdeBincode;
|
||||
|
||||
use language_model::{LanguageModelToolResultContent, LanguageModelToolUseId, Role, TokenUsage};
|
||||
use project::context_server_store::{ContextServerStatus, ContextServerStore};
|
||||
use project::{Project, ProjectItem, ProjectPath, Worktree};
|
||||
@@ -35,6 +33,42 @@ use crate::context_server_tool::ContextServerTool;
|
||||
use crate::thread::{
|
||||
DetailedSummaryState, ExceededWindowError, MessageId, ProjectSnapshot, Thread, ThreadId,
|
||||
};
|
||||
use indoc::indoc;
|
||||
use sqlez::{
|
||||
bindable::{Bind, Column},
|
||||
connection::Connection,
|
||||
statement::Statement,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum DataType {
|
||||
#[serde(rename = "json")]
|
||||
Json,
|
||||
#[serde(rename = "zstd")]
|
||||
Zstd,
|
||||
}
|
||||
|
||||
impl Bind for DataType {
|
||||
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||
let value = match self {
|
||||
DataType::Json => "json",
|
||||
DataType::Zstd => "zstd",
|
||||
};
|
||||
value.bind(statement, start_index)
|
||||
}
|
||||
}
|
||||
|
||||
impl Column for DataType {
|
||||
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
||||
let (value, next_index) = String::column(statement, start_index)?;
|
||||
let data_type = match value.as_str() {
|
||||
"json" => DataType::Json,
|
||||
"zstd" => DataType::Zstd,
|
||||
_ => anyhow::bail!("Unknown data type: {}", value),
|
||||
};
|
||||
Ok((data_type, next_index))
|
||||
}
|
||||
}
|
||||
|
||||
const RULES_FILE_NAMES: [&'static str; 6] = [
|
||||
".rules",
|
||||
@@ -866,25 +900,27 @@ impl Global for GlobalThreadsDatabase {}
|
||||
|
||||
pub(crate) struct ThreadsDatabase {
|
||||
executor: BackgroundExecutor,
|
||||
env: heed::Env,
|
||||
threads: Database<SerdeBincode<ThreadId>, SerializedThread>,
|
||||
connection: Arc<Mutex<Connection>>,
|
||||
}
|
||||
|
||||
impl heed::BytesEncode<'_> for SerializedThread {
|
||||
type EItem = SerializedThread;
|
||||
impl ThreadsDatabase {
|
||||
fn connection(&self) -> Arc<Mutex<Connection>> {
|
||||
self.connection.clone()
|
||||
}
|
||||
|
||||
fn bytes_encode(item: &Self::EItem) -> Result<Cow<[u8]>, heed::BoxedError> {
|
||||
serde_json::to_vec(item).map(Cow::Owned).map_err(Into::into)
|
||||
const COMPRESSION_LEVEL: i32 = 3;
|
||||
}
|
||||
|
||||
impl Bind for ThreadId {
|
||||
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||
self.to_string().bind(statement, start_index)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for SerializedThread {
|
||||
type DItem = SerializedThread;
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Result<Self::DItem, heed::BoxedError> {
|
||||
// We implement this type manually because we want to call `SerializedThread::from_json`,
|
||||
// instead of the Deserialize trait implementation for `SerializedThread`.
|
||||
SerializedThread::from_json(bytes).map_err(Into::into)
|
||||
impl Column for ThreadId {
|
||||
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
||||
let (id_str, next_index) = String::column(statement, start_index)?;
|
||||
Ok((ThreadId::from(id_str.as_str()), next_index))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -900,8 +936,8 @@ impl ThreadsDatabase {
|
||||
let database_future = executor
|
||||
.spawn({
|
||||
let executor = executor.clone();
|
||||
let database_path = paths::data_dir().join("threads/threads-db.1.mdb");
|
||||
async move { ThreadsDatabase::new(database_path, executor) }
|
||||
let threads_dir = paths::data_dir().join("threads");
|
||||
async move { ThreadsDatabase::new(threads_dir, executor) }
|
||||
})
|
||||
.then(|result| future::ready(result.map(Arc::new).map_err(Arc::new)))
|
||||
.boxed()
|
||||
@@ -910,41 +946,144 @@ impl ThreadsDatabase {
|
||||
cx.set_global(GlobalThreadsDatabase(database_future));
|
||||
}
|
||||
|
||||
pub fn new(path: PathBuf, executor: BackgroundExecutor) -> Result<Self> {
|
||||
std::fs::create_dir_all(&path)?;
|
||||
pub fn new(threads_dir: PathBuf, executor: BackgroundExecutor) -> Result<Self> {
|
||||
std::fs::create_dir_all(&threads_dir)?;
|
||||
|
||||
let sqlite_path = threads_dir.join("threads.db");
|
||||
let mdb_path = threads_dir.join("threads-db.1.mdb");
|
||||
|
||||
let needs_migration_from_heed = mdb_path.exists();
|
||||
|
||||
let connection = Connection::open_file(&sqlite_path.to_string_lossy());
|
||||
|
||||
connection.exec(indoc! {"
|
||||
CREATE TABLE IF NOT EXISTS threads (
|
||||
id TEXT PRIMARY KEY,
|
||||
summary TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL,
|
||||
data_type TEXT NOT NULL,
|
||||
data BLOB NOT NULL
|
||||
)
|
||||
"})?()
|
||||
.map_err(|e| anyhow!("Failed to create threads table: {}", e))?;
|
||||
|
||||
let db = Self {
|
||||
executor: executor.clone(),
|
||||
connection: Arc::new(Mutex::new(connection)),
|
||||
};
|
||||
|
||||
if needs_migration_from_heed {
|
||||
let db_connection = db.connection();
|
||||
let executor_clone = executor.clone();
|
||||
executor
|
||||
.spawn(async move {
|
||||
log::info!("Starting threads.db migration");
|
||||
Self::migrate_from_heed(&mdb_path, db_connection, executor_clone)?;
|
||||
std::fs::remove_dir_all(mdb_path)?;
|
||||
log::info!("threads.db migrated to sqlite");
|
||||
Ok::<(), anyhow::Error>(())
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
// Remove this migration after 2025-09-01
|
||||
fn migrate_from_heed(
|
||||
mdb_path: &Path,
|
||||
connection: Arc<Mutex<Connection>>,
|
||||
_executor: BackgroundExecutor,
|
||||
) -> Result<()> {
|
||||
use heed::types::SerdeBincode;
|
||||
struct SerializedThreadHeed(SerializedThread);
|
||||
|
||||
impl heed::BytesEncode<'_> for SerializedThreadHeed {
|
||||
type EItem = SerializedThreadHeed;
|
||||
|
||||
fn bytes_encode(
|
||||
item: &Self::EItem,
|
||||
) -> Result<std::borrow::Cow<[u8]>, heed::BoxedError> {
|
||||
serde_json::to_vec(&item.0)
|
||||
.map(std::borrow::Cow::Owned)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for SerializedThreadHeed {
|
||||
type DItem = SerializedThreadHeed;
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Result<Self::DItem, heed::BoxedError> {
|
||||
SerializedThread::from_json(bytes)
|
||||
.map(SerializedThreadHeed)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
const ONE_GB_IN_BYTES: usize = 1024 * 1024 * 1024;
|
||||
|
||||
let env = unsafe {
|
||||
heed::EnvOpenOptions::new()
|
||||
.map_size(ONE_GB_IN_BYTES)
|
||||
.max_dbs(1)
|
||||
.open(path)?
|
||||
.open(mdb_path)?
|
||||
};
|
||||
|
||||
let mut txn = env.write_txn()?;
|
||||
let threads = env.create_database(&mut txn, Some("threads"))?;
|
||||
txn.commit()?;
|
||||
let txn = env.write_txn()?;
|
||||
let threads: heed::Database<SerdeBincode<ThreadId>, SerializedThreadHeed> = env
|
||||
.open_database(&txn, Some("threads"))?
|
||||
.ok_or_else(|| anyhow!("threads database not found"))?;
|
||||
|
||||
Ok(Self {
|
||||
executor,
|
||||
env,
|
||||
threads,
|
||||
})
|
||||
for result in threads.iter(&txn)? {
|
||||
let (thread_id, thread_heed) = result?;
|
||||
Self::save_thread_sync(&connection, thread_id, thread_heed.0)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn save_thread_sync(
|
||||
connection: &Arc<Mutex<Connection>>,
|
||||
id: ThreadId,
|
||||
thread: SerializedThread,
|
||||
) -> Result<()> {
|
||||
let json_data = serde_json::to_string(&thread)?;
|
||||
let summary = thread.summary.to_string();
|
||||
let updated_at = thread.updated_at.to_rfc3339();
|
||||
|
||||
let connection = connection.lock().unwrap();
|
||||
|
||||
let compressed = zstd::encode_all(json_data.as_bytes(), Self::COMPRESSION_LEVEL)?;
|
||||
let data_type = DataType::Zstd;
|
||||
let data = compressed;
|
||||
|
||||
let mut insert = connection.exec_bound::<(ThreadId, String, String, DataType, Vec<u8>)>(indoc! {"
|
||||
INSERT OR REPLACE INTO threads (id, summary, updated_at, data_type, data) VALUES (?, ?, ?, ?, ?)
|
||||
"})?;
|
||||
|
||||
insert((id, summary, updated_at, data_type, data))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn list_threads(&self) -> Task<Result<Vec<SerializedThreadMetadata>>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
let connection = self.connection.clone();
|
||||
|
||||
self.executor.spawn(async move {
|
||||
let txn = env.read_txn()?;
|
||||
let mut iter = threads.iter(&txn)?;
|
||||
let connection = connection.lock().unwrap();
|
||||
let mut select =
|
||||
connection.select_bound::<(), (ThreadId, String, String)>(indoc! {"
|
||||
SELECT id, summary, updated_at FROM threads ORDER BY updated_at DESC
|
||||
"})?;
|
||||
|
||||
let rows = select(())?;
|
||||
let mut threads = Vec::new();
|
||||
while let Some((key, value)) = iter.next().transpose()? {
|
||||
|
||||
for (id, summary, updated_at) in rows {
|
||||
threads.push(SerializedThreadMetadata {
|
||||
id: key,
|
||||
summary: value.summary,
|
||||
updated_at: value.updated_at,
|
||||
id,
|
||||
summary: summary.into(),
|
||||
updated_at: DateTime::parse_from_rfc3339(&updated_at)?.with_timezone(&Utc),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -953,36 +1092,51 @@ impl ThreadsDatabase {
|
||||
}
|
||||
|
||||
pub fn try_find_thread(&self, id: ThreadId) -> Task<Result<Option<SerializedThread>>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
let connection = self.connection.clone();
|
||||
|
||||
self.executor.spawn(async move {
|
||||
let txn = env.read_txn()?;
|
||||
let thread = threads.get(&txn, &id)?;
|
||||
Ok(thread)
|
||||
let connection = connection.lock().unwrap();
|
||||
let mut select = connection.select_bound::<ThreadId, (DataType, Vec<u8>)>(indoc! {"
|
||||
SELECT data_type, data FROM threads WHERE id = ? LIMIT 1
|
||||
"})?;
|
||||
|
||||
let rows = select(id)?;
|
||||
if let Some((data_type, data)) = rows.into_iter().next() {
|
||||
let json_data = match data_type {
|
||||
DataType::Zstd => {
|
||||
let decompressed = zstd::decode_all(&data[..])?;
|
||||
String::from_utf8(decompressed)?
|
||||
}
|
||||
DataType::Json => String::from_utf8(data)?,
|
||||
};
|
||||
|
||||
let thread = SerializedThread::from_json(json_data.as_bytes())?;
|
||||
Ok(Some(thread))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn save_thread(&self, id: ThreadId, thread: SerializedThread) -> Task<Result<()>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
let connection = self.connection.clone();
|
||||
|
||||
self.executor.spawn(async move {
|
||||
let mut txn = env.write_txn()?;
|
||||
threads.put(&mut txn, &id, &thread)?;
|
||||
txn.commit()?;
|
||||
Ok(())
|
||||
})
|
||||
self.executor
|
||||
.spawn(async move { Self::save_thread_sync(&connection, id, thread) })
|
||||
}
|
||||
|
||||
pub fn delete_thread(&self, id: ThreadId) -> Task<Result<()>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
let connection = self.connection.clone();
|
||||
|
||||
self.executor.spawn(async move {
|
||||
let mut txn = env.write_txn()?;
|
||||
threads.delete(&mut txn, &id)?;
|
||||
txn.commit()?;
|
||||
let connection = connection.lock().unwrap();
|
||||
|
||||
let mut delete = connection.exec_bound::<ThreadId>(indoc! {"
|
||||
DELETE FROM threads WHERE id = ?
|
||||
"})?;
|
||||
|
||||
delete(id)?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -304,7 +304,7 @@ impl AddedContext {
|
||||
AgentContextHandle::Thread(handle) => Some(Self::pending_thread(handle, cx)),
|
||||
AgentContextHandle::TextThread(handle) => Some(Self::pending_text_thread(handle, cx)),
|
||||
AgentContextHandle::Rules(handle) => Self::pending_rules(handle, prompt_store, cx),
|
||||
AgentContextHandle::Image(handle) => Some(Self::image(handle)),
|
||||
AgentContextHandle::Image(handle) => Some(Self::image(handle, cx)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -318,7 +318,7 @@ impl AddedContext {
|
||||
AgentContext::Thread(context) => Self::attached_thread(context),
|
||||
AgentContext::TextThread(context) => Self::attached_text_thread(context),
|
||||
AgentContext::Rules(context) => Self::attached_rules(context),
|
||||
AgentContext::Image(context) => Self::image(context.clone()),
|
||||
AgentContext::Image(context) => Self::image(context.clone(), cx),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -333,14 +333,8 @@ impl AddedContext {
|
||||
|
||||
fn file(handle: FileContextHandle, full_path: &Path, cx: &App) -> AddedContext {
|
||||
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
|
||||
let name = full_path
|
||||
.file_name()
|
||||
.map(|n| n.to_string_lossy().into_owned().into())
|
||||
.unwrap_or_else(|| full_path_string.clone());
|
||||
let parent = full_path
|
||||
.parent()
|
||||
.and_then(|p| p.file_name())
|
||||
.map(|n| n.to_string_lossy().into_owned().into());
|
||||
let (name, parent) =
|
||||
extract_file_name_and_directory_from_full_path(full_path, &full_path_string);
|
||||
AddedContext {
|
||||
kind: ContextKind::File,
|
||||
name,
|
||||
@@ -370,14 +364,8 @@ impl AddedContext {
|
||||
|
||||
fn directory(handle: DirectoryContextHandle, full_path: &Path) -> AddedContext {
|
||||
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
|
||||
let name = full_path
|
||||
.file_name()
|
||||
.map(|n| n.to_string_lossy().into_owned().into())
|
||||
.unwrap_or_else(|| full_path_string.clone());
|
||||
let parent = full_path
|
||||
.parent()
|
||||
.and_then(|p| p.file_name())
|
||||
.map(|n| n.to_string_lossy().into_owned().into());
|
||||
let (name, parent) =
|
||||
extract_file_name_and_directory_from_full_path(full_path, &full_path_string);
|
||||
AddedContext {
|
||||
kind: ContextKind::Directory,
|
||||
name,
|
||||
@@ -605,13 +593,23 @@ impl AddedContext {
|
||||
}
|
||||
}
|
||||
|
||||
fn image(context: ImageContext) -> AddedContext {
|
||||
fn image(context: ImageContext, cx: &App) -> AddedContext {
|
||||
let (name, parent, icon_path) = if let Some(full_path) = context.full_path.as_ref() {
|
||||
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
|
||||
let (name, parent) =
|
||||
extract_file_name_and_directory_from_full_path(full_path, &full_path_string);
|
||||
let icon_path = FileIcons::get_icon(&full_path, cx);
|
||||
(name, parent, icon_path)
|
||||
} else {
|
||||
("Image".into(), None, None)
|
||||
};
|
||||
|
||||
AddedContext {
|
||||
kind: ContextKind::Image,
|
||||
name: "Image".into(),
|
||||
parent: None,
|
||||
name,
|
||||
parent,
|
||||
tooltip: None,
|
||||
icon_path: None,
|
||||
icon_path,
|
||||
status: match context.status() {
|
||||
ImageStatus::Loading => ContextStatus::Loading {
|
||||
message: "Loading…".into(),
|
||||
@@ -639,6 +637,22 @@ impl AddedContext {
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_file_name_and_directory_from_full_path(
|
||||
path: &Path,
|
||||
name_fallback: &SharedString,
|
||||
) -> (SharedString, Option<SharedString>) {
|
||||
let name = path
|
||||
.file_name()
|
||||
.map(|n| n.to_string_lossy().into_owned().into())
|
||||
.unwrap_or_else(|| name_fallback.clone());
|
||||
let parent = path
|
||||
.parent()
|
||||
.and_then(|p| p.file_name())
|
||||
.map(|n| n.to_string_lossy().into_owned().into());
|
||||
|
||||
(name, parent)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct ContextFileExcerpt {
|
||||
pub file_name_and_range: SharedString,
|
||||
@@ -765,37 +779,49 @@ impl Component for AddedContext {
|
||||
let mut next_context_id = ContextId::zero();
|
||||
let image_ready = (
|
||||
"Ready",
|
||||
AddedContext::image(ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(Some(LanguageModelImage::empty())).shared(),
|
||||
}),
|
||||
AddedContext::image(
|
||||
ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
full_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(Some(LanguageModelImage::empty())).shared(),
|
||||
},
|
||||
cx,
|
||||
),
|
||||
);
|
||||
|
||||
let image_loading = (
|
||||
"Loading",
|
||||
AddedContext::image(ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: cx
|
||||
.background_spawn(async move {
|
||||
smol::Timer::after(Duration::from_secs(60 * 5)).await;
|
||||
Some(LanguageModelImage::empty())
|
||||
})
|
||||
.shared(),
|
||||
}),
|
||||
AddedContext::image(
|
||||
ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
full_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: cx
|
||||
.background_spawn(async move {
|
||||
smol::Timer::after(Duration::from_secs(60 * 5)).await;
|
||||
Some(LanguageModelImage::empty())
|
||||
})
|
||||
.shared(),
|
||||
},
|
||||
cx,
|
||||
),
|
||||
);
|
||||
|
||||
let image_error = (
|
||||
"Error",
|
||||
AddedContext::image(ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(None).shared(),
|
||||
}),
|
||||
AddedContext::image(
|
||||
ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
full_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(None).shared(),
|
||||
},
|
||||
cx,
|
||||
),
|
||||
);
|
||||
|
||||
Some(
|
||||
|
||||
@@ -372,6 +372,8 @@ impl AgentSettingsContent {
|
||||
None,
|
||||
None,
|
||||
Some(language_model.supports_tools()),
|
||||
Some(language_model.supports_images()),
|
||||
None,
|
||||
)),
|
||||
api_url,
|
||||
});
|
||||
@@ -689,6 +691,7 @@ pub struct AgentSettingsContentV2 {
|
||||
pub enum CompletionMode {
|
||||
#[default]
|
||||
Normal,
|
||||
#[serde(alias = "max")]
|
||||
Burn,
|
||||
}
|
||||
|
||||
|
||||
@@ -60,6 +60,7 @@ zed_actions.workspace = true
|
||||
zed_llm_client.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
indoc.workspace = true
|
||||
language_model = { workspace = true, features = ["test-support"] }
|
||||
languages = { workspace = true, features = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
|
||||
@@ -1646,34 +1646,35 @@ impl ContextEditor {
|
||||
let context = self.context.read(cx);
|
||||
|
||||
let mut text = String::new();
|
||||
for message in context.messages(cx) {
|
||||
if message.offset_range.start >= selection.range().end {
|
||||
break;
|
||||
} else if message.offset_range.end >= selection.range().start {
|
||||
let range = cmp::max(message.offset_range.start, selection.range().start)
|
||||
..cmp::min(message.offset_range.end, selection.range().end);
|
||||
if range.is_empty() {
|
||||
let snapshot = context.buffer().read(cx).snapshot();
|
||||
let point = snapshot.offset_to_point(range.start);
|
||||
selection.start = snapshot.point_to_offset(Point::new(point.row, 0));
|
||||
selection.end = snapshot.point_to_offset(cmp::min(
|
||||
Point::new(point.row + 1, 0),
|
||||
snapshot.max_point(),
|
||||
));
|
||||
for chunk in context.buffer().read(cx).text_for_range(selection.range()) {
|
||||
text.push_str(chunk);
|
||||
}
|
||||
} else {
|
||||
for chunk in context.buffer().read(cx).text_for_range(range) {
|
||||
text.push_str(chunk);
|
||||
}
|
||||
if message.offset_range.end < selection.range().end {
|
||||
text.push('\n');
|
||||
|
||||
// If selection is empty, we want to copy the entire line
|
||||
if selection.range().is_empty() {
|
||||
let snapshot = context.buffer().read(cx).snapshot();
|
||||
let point = snapshot.offset_to_point(selection.range().start);
|
||||
selection.start = snapshot.point_to_offset(Point::new(point.row, 0));
|
||||
selection.end = snapshot
|
||||
.point_to_offset(cmp::min(Point::new(point.row + 1, 0), snapshot.max_point()));
|
||||
for chunk in context.buffer().read(cx).text_for_range(selection.range()) {
|
||||
text.push_str(chunk);
|
||||
}
|
||||
} else {
|
||||
for message in context.messages(cx) {
|
||||
if message.offset_range.start >= selection.range().end {
|
||||
break;
|
||||
} else if message.offset_range.end >= selection.range().start {
|
||||
let range = cmp::max(message.offset_range.start, selection.range().start)
|
||||
..cmp::min(message.offset_range.end, selection.range().end);
|
||||
if !range.is_empty() {
|
||||
for chunk in context.buffer().read(cx).text_for_range(range) {
|
||||
text.push_str(chunk);
|
||||
}
|
||||
if message.offset_range.end < selection.range().end {
|
||||
text.push('\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(text, CopyMetadata { creases }, vec![selection])
|
||||
}
|
||||
|
||||
@@ -3264,74 +3265,92 @@ mod tests {
|
||||
use super::*;
|
||||
use fs::FakeFs;
|
||||
use gpui::{App, TestAppContext, VisualTestContext};
|
||||
use indoc::indoc;
|
||||
use language::{Buffer, LanguageRegistry};
|
||||
use pretty_assertions::assert_eq;
|
||||
use prompt_store::PromptBuilder;
|
||||
use text::OffsetRangeExt;
|
||||
use unindent::Unindent;
|
||||
use util::path;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_copy_paste_whole_message(cx: &mut TestAppContext) {
|
||||
let (context, context_editor, mut cx) = setup_context_editor_text(vec![
|
||||
(Role::User, "What is the Zed editor?"),
|
||||
(
|
||||
Role::Assistant,
|
||||
"Zed is a modern, high-performance code editor designed from the ground up for speed and collaboration.",
|
||||
),
|
||||
(Role::User, ""),
|
||||
],cx).await;
|
||||
|
||||
// Select & Copy whole user message
|
||||
assert_copy_paste_context_editor(
|
||||
&context_editor,
|
||||
message_range(&context, 0, &mut cx),
|
||||
indoc! {"
|
||||
What is the Zed editor?
|
||||
Zed is a modern, high-performance code editor designed from the ground up for speed and collaboration.
|
||||
What is the Zed editor?
|
||||
"},
|
||||
&mut cx,
|
||||
);
|
||||
|
||||
// Select & Copy whole assistant message
|
||||
assert_copy_paste_context_editor(
|
||||
&context_editor,
|
||||
message_range(&context, 1, &mut cx),
|
||||
indoc! {"
|
||||
What is the Zed editor?
|
||||
Zed is a modern, high-performance code editor designed from the ground up for speed and collaboration.
|
||||
What is the Zed editor?
|
||||
Zed is a modern, high-performance code editor designed from the ground up for speed and collaboration.
|
||||
"},
|
||||
&mut cx,
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_copy_paste_no_selection(cx: &mut TestAppContext) {
|
||||
cx.update(init_test);
|
||||
let (context, context_editor, mut cx) = setup_context_editor_text(
|
||||
vec![
|
||||
(Role::User, "user1"),
|
||||
(Role::Assistant, "assistant1"),
|
||||
(Role::Assistant, "assistant2"),
|
||||
(Role::User, ""),
|
||||
],
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let registry = Arc::new(LanguageRegistry::test(cx.executor()));
|
||||
let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
|
||||
let context = cx.new(|cx| {
|
||||
AssistantContext::local(
|
||||
registry,
|
||||
None,
|
||||
None,
|
||||
prompt_builder.clone(),
|
||||
Arc::new(SlashCommandWorkingSet::default()),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
|
||||
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let workspace = window.root(cx).unwrap();
|
||||
let cx = &mut VisualTestContext::from_window(*window, cx);
|
||||
// Copy and paste first assistant message
|
||||
let message_2_range = message_range(&context, 1, &mut cx);
|
||||
assert_copy_paste_context_editor(
|
||||
&context_editor,
|
||||
message_2_range.start..message_2_range.start,
|
||||
indoc! {"
|
||||
user1
|
||||
assistant1
|
||||
assistant2
|
||||
assistant1
|
||||
"},
|
||||
&mut cx,
|
||||
);
|
||||
|
||||
let context_editor = window
|
||||
.update(cx, |_, window, cx| {
|
||||
cx.new(|cx| {
|
||||
ContextEditor::for_context(
|
||||
context,
|
||||
fs,
|
||||
workspace.downgrade(),
|
||||
project,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
context_editor.update_in(cx, |context_editor, window, cx| {
|
||||
context_editor.editor.update(cx, |editor, cx| {
|
||||
editor.set_text("abc\ndef\nghi", window, cx);
|
||||
editor.move_to_beginning(&Default::default(), window, cx);
|
||||
})
|
||||
});
|
||||
|
||||
context_editor.update_in(cx, |context_editor, window, cx| {
|
||||
context_editor.editor.update(cx, |editor, cx| {
|
||||
editor.copy(&Default::default(), window, cx);
|
||||
editor.paste(&Default::default(), window, cx);
|
||||
|
||||
assert_eq!(editor.text(cx), "abc\nabc\ndef\nghi");
|
||||
})
|
||||
});
|
||||
|
||||
context_editor.update_in(cx, |context_editor, window, cx| {
|
||||
context_editor.editor.update(cx, |editor, cx| {
|
||||
editor.cut(&Default::default(), window, cx);
|
||||
assert_eq!(editor.text(cx), "abc\ndef\nghi");
|
||||
|
||||
editor.paste(&Default::default(), window, cx);
|
||||
assert_eq!(editor.text(cx), "abc\nabc\ndef\nghi");
|
||||
})
|
||||
});
|
||||
// Copy and cut second assistant message
|
||||
let message_3_range = message_range(&context, 2, &mut cx);
|
||||
assert_copy_paste_context_editor(
|
||||
&context_editor,
|
||||
message_3_range.start..message_3_range.start,
|
||||
indoc! {"
|
||||
user1
|
||||
assistant1
|
||||
assistant2
|
||||
assistant1
|
||||
assistant2
|
||||
"},
|
||||
&mut cx,
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -3408,6 +3427,129 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
async fn setup_context_editor_text(
|
||||
messages: Vec<(Role, &str)>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> (
|
||||
Entity<AssistantContext>,
|
||||
Entity<ContextEditor>,
|
||||
VisualTestContext,
|
||||
) {
|
||||
cx.update(init_test);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let context = create_context_with_messages(messages, cx);
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
|
||||
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let workspace = window.root(cx).unwrap();
|
||||
let mut cx = VisualTestContext::from_window(*window, cx);
|
||||
|
||||
let context_editor = window
|
||||
.update(&mut cx, |_, window, cx| {
|
||||
cx.new(|cx| {
|
||||
let editor = ContextEditor::for_context(
|
||||
context.clone(),
|
||||
fs,
|
||||
workspace.downgrade(),
|
||||
project,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
editor
|
||||
})
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
(context, context_editor, cx)
|
||||
}
|
||||
|
||||
fn message_range(
|
||||
context: &Entity<AssistantContext>,
|
||||
message_ix: usize,
|
||||
cx: &mut TestAppContext,
|
||||
) -> Range<usize> {
|
||||
context.update(cx, |context, cx| {
|
||||
context
|
||||
.messages(cx)
|
||||
.nth(message_ix)
|
||||
.unwrap()
|
||||
.anchor_range
|
||||
.to_offset(&context.buffer().read(cx).snapshot())
|
||||
})
|
||||
}
|
||||
|
||||
fn assert_copy_paste_context_editor<T: editor::ToOffset>(
|
||||
context_editor: &Entity<ContextEditor>,
|
||||
range: Range<T>,
|
||||
expected_text: &str,
|
||||
cx: &mut VisualTestContext,
|
||||
) {
|
||||
context_editor.update_in(cx, |context_editor, window, cx| {
|
||||
context_editor.editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(None, window, cx, |s| s.select_ranges([range]));
|
||||
});
|
||||
|
||||
context_editor.copy(&Default::default(), window, cx);
|
||||
|
||||
context_editor.editor.update(cx, |editor, cx| {
|
||||
editor.move_to_end(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
context_editor.paste(&Default::default(), window, cx);
|
||||
|
||||
context_editor.editor.update(cx, |editor, cx| {
|
||||
assert_eq!(editor.text(cx), expected_text);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
fn create_context_with_messages(
|
||||
mut messages: Vec<(Role, &str)>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> Entity<AssistantContext> {
|
||||
let registry = Arc::new(LanguageRegistry::test(cx.executor()));
|
||||
let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
|
||||
cx.new(|cx| {
|
||||
let mut context = AssistantContext::local(
|
||||
registry,
|
||||
None,
|
||||
None,
|
||||
prompt_builder.clone(),
|
||||
Arc::new(SlashCommandWorkingSet::default()),
|
||||
cx,
|
||||
);
|
||||
let mut message_1 = context.messages(cx).next().unwrap();
|
||||
let (role, text) = messages.remove(0);
|
||||
|
||||
loop {
|
||||
if role == message_1.role {
|
||||
context.buffer().update(cx, |buffer, cx| {
|
||||
buffer.edit([(message_1.offset_range, text)], None, cx);
|
||||
});
|
||||
break;
|
||||
}
|
||||
let mut ids = HashSet::default();
|
||||
ids.insert(message_1.id);
|
||||
context.cycle_message_roles(ids, cx);
|
||||
message_1 = context.messages(cx).next().unwrap();
|
||||
}
|
||||
|
||||
let mut last_message_id = message_1.id;
|
||||
for (role, text) in messages {
|
||||
context.insert_message_after(last_message_id, role, MessageStatus::Done, cx);
|
||||
let message = context.messages(cx).last().unwrap();
|
||||
last_message_id = message.id;
|
||||
context.buffer().update(cx, |buffer, cx| {
|
||||
buffer.edit([(message.offset_range, text)], None, cx);
|
||||
})
|
||||
}
|
||||
|
||||
context
|
||||
})
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut App) {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
prompt_store::init(cx);
|
||||
|
||||
@@ -46,53 +46,35 @@ pub fn language_model_selector(
|
||||
}
|
||||
|
||||
fn all_models(cx: &App) -> GroupedModels {
|
||||
let mut recommended = Vec::new();
|
||||
let mut recommended_set = HashSet::default();
|
||||
for provider in LanguageModelRegistry::global(cx)
|
||||
.read(cx)
|
||||
.providers()
|
||||
let providers = LanguageModelRegistry::global(cx).read(cx).providers();
|
||||
|
||||
let recommended = providers
|
||||
.iter()
|
||||
{
|
||||
let models = provider.recommended_models(cx);
|
||||
recommended_set.extend(models.iter().map(|model| (model.provider_id(), model.id())));
|
||||
recommended.extend(
|
||||
.flat_map(|provider| {
|
||||
provider
|
||||
.recommended_models(cx)
|
||||
.into_iter()
|
||||
.map(move |model| ModelInfo {
|
||||
model: model.clone(),
|
||||
.map(|model| ModelInfo {
|
||||
model,
|
||||
icon: provider.icon(),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
let other_models = LanguageModelRegistry::global(cx)
|
||||
.read(cx)
|
||||
.providers()
|
||||
.iter()
|
||||
.map(|provider| {
|
||||
(
|
||||
provider.id(),
|
||||
provider
|
||||
.provided_models(cx)
|
||||
.into_iter()
|
||||
.filter_map(|model| {
|
||||
let not_included =
|
||||
!recommended_set.contains(&(model.provider_id(), model.id()));
|
||||
not_included.then(|| ModelInfo {
|
||||
model: model.clone(),
|
||||
icon: provider.icon(),
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
})
|
||||
})
|
||||
.collect::<IndexMap<_, _>>();
|
||||
.collect();
|
||||
|
||||
GroupedModels {
|
||||
recommended,
|
||||
other: other_models,
|
||||
}
|
||||
let other = providers
|
||||
.iter()
|
||||
.flat_map(|provider| {
|
||||
provider
|
||||
.provided_models(cx)
|
||||
.into_iter()
|
||||
.map(|model| ModelInfo {
|
||||
model,
|
||||
icon: provider.icon(),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
GroupedModels::new(other, recommended)
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -234,11 +216,14 @@ struct GroupedModels {
|
||||
|
||||
impl GroupedModels {
|
||||
pub fn new(other: Vec<ModelInfo>, recommended: Vec<ModelInfo>) -> Self {
|
||||
let recommended_ids: HashSet<_> = recommended.iter().map(|info| info.model.id()).collect();
|
||||
let recommended_ids = recommended
|
||||
.iter()
|
||||
.map(|info| (info.model.provider_id(), info.model.id()))
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
let mut other_by_provider: IndexMap<_, Vec<ModelInfo>> = IndexMap::default();
|
||||
for model in other {
|
||||
if recommended_ids.contains(&model.model.id()) {
|
||||
if recommended_ids.contains(&(model.model.provider_id(), model.model.id())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -823,4 +808,26 @@ mod tests {
|
||||
// Recommended models should not appear in "other"
|
||||
assert_models_eq(actual_other_models, vec!["zed/gemini", "copilot/o3"]);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_dont_exclude_models_from_other_providers(_cx: &mut TestAppContext) {
|
||||
let recommended_models = create_models(vec![("zed", "claude")]);
|
||||
let all_models = create_models(vec![
|
||||
("zed", "claude"), // Should be filtered out from "other"
|
||||
("zed", "gemini"),
|
||||
("copilot", "claude"), // Should not be filtered out from "other"
|
||||
]);
|
||||
|
||||
let grouped_models = GroupedModels::new(all_models, recommended_models);
|
||||
|
||||
let actual_other_models = grouped_models
|
||||
.other
|
||||
.values()
|
||||
.flatten()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Recommended models should not appear in "other"
|
||||
assert_models_eq(actual_other_models, vec!["zed/gemini", "copilot/claude"]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ impl SlashCommandCompletionProvider {
|
||||
name_range: Range<Anchor>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
) -> Task<Result<Vec<project::CompletionResponse>>> {
|
||||
let slash_commands = self.slash_commands.clone();
|
||||
let candidates = slash_commands
|
||||
.command_names(cx)
|
||||
@@ -71,28 +71,27 @@ impl SlashCommandCompletionProvider {
|
||||
.await;
|
||||
|
||||
cx.update(|_, cx| {
|
||||
Some(
|
||||
matches
|
||||
.into_iter()
|
||||
.filter_map(|mat| {
|
||||
let command = slash_commands.command(&mat.string, cx)?;
|
||||
let mut new_text = mat.string.clone();
|
||||
let requires_argument = command.requires_argument();
|
||||
let accepts_arguments = command.accepts_arguments();
|
||||
if requires_argument || accepts_arguments {
|
||||
new_text.push(' ');
|
||||
}
|
||||
let completions = matches
|
||||
.into_iter()
|
||||
.filter_map(|mat| {
|
||||
let command = slash_commands.command(&mat.string, cx)?;
|
||||
let mut new_text = mat.string.clone();
|
||||
let requires_argument = command.requires_argument();
|
||||
let accepts_arguments = command.accepts_arguments();
|
||||
if requires_argument || accepts_arguments {
|
||||
new_text.push(' ');
|
||||
}
|
||||
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
let command_name = mat.string.clone();
|
||||
let command_range = command_range.clone();
|
||||
let editor = editor.clone();
|
||||
let workspace = workspace.clone();
|
||||
Arc::new(
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
let command_name = mat.string.clone();
|
||||
let command_range = command_range.clone();
|
||||
let editor = editor.clone();
|
||||
let workspace = workspace.clone();
|
||||
Arc::new(
|
||||
move |intent: CompletionIntent,
|
||||
window: &mut Window,
|
||||
cx: &mut App| {
|
||||
@@ -118,22 +117,27 @@ impl SlashCommandCompletionProvider {
|
||||
}
|
||||
},
|
||||
) as Arc<_>
|
||||
});
|
||||
Some(project::Completion {
|
||||
replace_range: name_range.clone(),
|
||||
documentation: Some(CompletionDocumentation::SingleLine(
|
||||
command.description().into(),
|
||||
)),
|
||||
new_text,
|
||||
label: command.label(cx),
|
||||
icon_path: None,
|
||||
insert_text_mode: None,
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
})
|
||||
});
|
||||
|
||||
Some(project::Completion {
|
||||
replace_range: name_range.clone(),
|
||||
documentation: Some(CompletionDocumentation::SingleLine(
|
||||
command.description().into(),
|
||||
)),
|
||||
new_text,
|
||||
label: command.label(cx),
|
||||
icon_path: None,
|
||||
insert_text_mode: None,
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
vec![project::CompletionResponse {
|
||||
completions,
|
||||
is_incomplete: false,
|
||||
}]
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -147,7 +151,7 @@ impl SlashCommandCompletionProvider {
|
||||
last_argument_range: Range<Anchor>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
) -> Task<Result<Vec<project::CompletionResponse>>> {
|
||||
let new_cancel_flag = Arc::new(AtomicBool::new(false));
|
||||
let mut flag = self.cancel_flag.lock();
|
||||
flag.store(true, SeqCst);
|
||||
@@ -165,28 +169,27 @@ impl SlashCommandCompletionProvider {
|
||||
let workspace = self.workspace.clone();
|
||||
let arguments = arguments.to_vec();
|
||||
cx.background_spawn(async move {
|
||||
Ok(Some(
|
||||
completions
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|new_argument| {
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
Arc::new({
|
||||
let mut completed_arguments = arguments.clone();
|
||||
if new_argument.replace_previous_arguments {
|
||||
completed_arguments.clear();
|
||||
} else {
|
||||
completed_arguments.pop();
|
||||
}
|
||||
completed_arguments.push(new_argument.new_text.clone());
|
||||
let completions = completions
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|new_argument| {
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
Arc::new({
|
||||
let mut completed_arguments = arguments.clone();
|
||||
if new_argument.replace_previous_arguments {
|
||||
completed_arguments.clear();
|
||||
} else {
|
||||
completed_arguments.pop();
|
||||
}
|
||||
completed_arguments.push(new_argument.new_text.clone());
|
||||
|
||||
let command_range = command_range.clone();
|
||||
let command_name = command_name.clone();
|
||||
move |intent: CompletionIntent,
|
||||
let command_range = command_range.clone();
|
||||
let command_name = command_name.clone();
|
||||
move |intent: CompletionIntent,
|
||||
window: &mut Window,
|
||||
cx: &mut App| {
|
||||
if new_argument.after_completion.run()
|
||||
@@ -210,34 +213,41 @@ impl SlashCommandCompletionProvider {
|
||||
!new_argument.after_completion.run()
|
||||
}
|
||||
}
|
||||
}) as Arc<_>
|
||||
});
|
||||
}) as Arc<_>
|
||||
});
|
||||
|
||||
let mut new_text = new_argument.new_text.clone();
|
||||
if new_argument.after_completion == AfterCompletion::Continue {
|
||||
new_text.push(' ');
|
||||
}
|
||||
let mut new_text = new_argument.new_text.clone();
|
||||
if new_argument.after_completion == AfterCompletion::Continue {
|
||||
new_text.push(' ');
|
||||
}
|
||||
|
||||
project::Completion {
|
||||
replace_range: if new_argument.replace_previous_arguments {
|
||||
argument_range.clone()
|
||||
} else {
|
||||
last_argument_range.clone()
|
||||
},
|
||||
label: new_argument.label,
|
||||
icon_path: None,
|
||||
new_text,
|
||||
documentation: None,
|
||||
confirm,
|
||||
insert_text_mode: None,
|
||||
source: CompletionSource::Custom,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
project::Completion {
|
||||
replace_range: if new_argument.replace_previous_arguments {
|
||||
argument_range.clone()
|
||||
} else {
|
||||
last_argument_range.clone()
|
||||
},
|
||||
label: new_argument.label,
|
||||
icon_path: None,
|
||||
new_text,
|
||||
documentation: None,
|
||||
confirm,
|
||||
insert_text_mode: None,
|
||||
source: CompletionSource::Custom,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(vec![project::CompletionResponse {
|
||||
completions,
|
||||
is_incomplete: false,
|
||||
}])
|
||||
})
|
||||
} else {
|
||||
Task::ready(Ok(Some(Vec::new())))
|
||||
Task::ready(Ok(vec![project::CompletionResponse {
|
||||
completions: Vec::new(),
|
||||
is_incomplete: false,
|
||||
}]))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -251,7 +261,7 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
_: editor::CompletionContext,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
) -> Task<Result<Vec<project::CompletionResponse>>> {
|
||||
let Some((name, arguments, command_range, last_argument_range)) =
|
||||
buffer.update(cx, |buffer, _cx| {
|
||||
let position = buffer_position.to_point(buffer);
|
||||
@@ -295,7 +305,10 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
Some((name, arguments, command_range, last_argument_range))
|
||||
})
|
||||
else {
|
||||
return Task::ready(Ok(Some(Vec::new())));
|
||||
return Task::ready(Ok(vec![project::CompletionResponse {
|
||||
completions: Vec::new(),
|
||||
is_incomplete: false,
|
||||
}]));
|
||||
};
|
||||
|
||||
if let Some((arguments, argument_range)) = arguments {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::{borrow::Cow, cell::RefCell};
|
||||
|
||||
use crate::schema::json_schema_for;
|
||||
use anyhow::{Context as _, Result, anyhow, bail};
|
||||
@@ -39,10 +39,11 @@ impl FetchTool {
|
||||
}
|
||||
|
||||
async fn build_message(http_client: Arc<HttpClientWithUrl>, url: &str) -> Result<String> {
|
||||
let mut url = url.to_owned();
|
||||
if !url.starts_with("https://") && !url.starts_with("http://") {
|
||||
url = format!("https://{url}");
|
||||
}
|
||||
let url = if !url.starts_with("https://") && !url.starts_with("http://") {
|
||||
Cow::Owned(format!("https://{url}"))
|
||||
} else {
|
||||
Cow::Borrowed(url)
|
||||
};
|
||||
|
||||
let mut response = http_client.get(&url, AsyncBody::default(), true).await?;
|
||||
|
||||
@@ -156,8 +157,7 @@ impl Tool for FetchTool {
|
||||
|
||||
let text = cx.background_spawn({
|
||||
let http_client = self.http_client.clone();
|
||||
let url = input.url.clone();
|
||||
async move { Self::build_message(http_client, &url).await }
|
||||
async move { Self::build_message(http_client, &input.url).await }
|
||||
});
|
||||
|
||||
cx.foreground_executor()
|
||||
|
||||
@@ -119,14 +119,16 @@ impl Tool for FindPathTool {
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
let output = FindPathToolOutput {
|
||||
glob,
|
||||
paths: matches.clone(),
|
||||
};
|
||||
|
||||
for mat in matches.into_iter().skip(offset).take(RESULTS_PER_PAGE) {
|
||||
for mat in matches.iter().skip(offset).take(RESULTS_PER_PAGE) {
|
||||
write!(&mut message, "\n{}", mat.display()).unwrap();
|
||||
}
|
||||
|
||||
let output = FindPathToolOutput {
|
||||
glob,
|
||||
paths: matches,
|
||||
};
|
||||
|
||||
Ok(ToolResultOutput {
|
||||
content: ToolResultContent::Text(message),
|
||||
output: Some(serde_json::to_value(output)?),
|
||||
@@ -235,8 +237,6 @@ impl ToolCard for FindPathToolCard {
|
||||
format!("{} matches", self.paths.len()).into()
|
||||
};
|
||||
|
||||
let glob_label = self.glob.to_string();
|
||||
|
||||
let content = if !self.paths.is_empty() && self.expanded {
|
||||
Some(
|
||||
v_flex()
|
||||
@@ -310,7 +310,7 @@ impl ToolCard for FindPathToolCard {
|
||||
.gap_1()
|
||||
.child(
|
||||
ToolCallCardHeader::new(IconName::SearchCode, matches_label)
|
||||
.with_code_path(glob_label)
|
||||
.with_code_path(&self.glob)
|
||||
.disclosure_slot(
|
||||
Disclosure::new("path-search-disclosure", self.expanded)
|
||||
.opened_icon(IconName::ChevronUp)
|
||||
|
||||
@@ -182,9 +182,8 @@ impl Tool for TerminalTool {
|
||||
let mut child = pair.slave.spawn_command(cmd)?;
|
||||
let mut reader = pair.master.try_clone_reader()?;
|
||||
drop(pair);
|
||||
let mut content = Vec::new();
|
||||
reader.read_to_end(&mut content)?;
|
||||
let mut content = String::from_utf8(content)?;
|
||||
let mut content = String::new();
|
||||
reader.read_to_string(&mut content)?;
|
||||
// Massage the pty output a bit to try to match what the terminal codepath gives us
|
||||
LineEnding::normalize(&mut content);
|
||||
content = content
|
||||
|
||||
@@ -166,7 +166,7 @@ impl ToolCard for WebSearchToolCard {
|
||||
.gap_1()
|
||||
.children(response.results.iter().enumerate().map(|(index, result)| {
|
||||
let title = result.title.clone();
|
||||
let url = result.url.clone();
|
||||
let url = SharedString::from(result.url.clone());
|
||||
|
||||
Button::new(("result", index), title)
|
||||
.label_size(LabelSize::Small)
|
||||
|
||||
@@ -91,7 +91,7 @@ fn view_release_notes_locally(
|
||||
|
||||
let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
|
||||
let tab_content = SharedString::from(body.title.to_string());
|
||||
let tab_content = Some(SharedString::from(body.title.to_string()));
|
||||
let editor = cx.new(|cx| {
|
||||
Editor::for_multibuffer(buffer, Some(project), window, cx)
|
||||
});
|
||||
|
||||
@@ -16,6 +16,7 @@ doctest = false
|
||||
editor.workspace = true
|
||||
gpui.workspace = true
|
||||
itertools.workspace = true
|
||||
settings.workspace = true
|
||||
theme.workspace = true
|
||||
ui.workspace = true
|
||||
workspace.workspace = true
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
use editor::Editor;
|
||||
use gpui::{
|
||||
Context, Element, EventEmitter, Focusable, IntoElement, ParentElement, Render, StyledText,
|
||||
Subscription, Window,
|
||||
Context, Element, EventEmitter, Focusable, FontWeight, IntoElement, ParentElement, Render,
|
||||
StyledText, Subscription, Window,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use settings::Settings;
|
||||
use std::cmp;
|
||||
use theme::ActiveTheme;
|
||||
use ui::{ButtonLike, ButtonStyle, Label, Tooltip, prelude::*};
|
||||
use workspace::{
|
||||
ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView,
|
||||
TabBarSettings, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView,
|
||||
item::{BreadcrumbText, ItemEvent, ItemHandle},
|
||||
};
|
||||
|
||||
@@ -71,16 +72,23 @@ impl Render for Breadcrumbs {
|
||||
);
|
||||
}
|
||||
|
||||
let highlighted_segments = segments.into_iter().map(|segment| {
|
||||
let highlighted_segments = segments.into_iter().enumerate().map(|(index, segment)| {
|
||||
let mut text_style = window.text_style();
|
||||
if let Some(font) = segment.font {
|
||||
text_style.font_family = font.family;
|
||||
text_style.font_features = font.features;
|
||||
if let Some(ref font) = segment.font {
|
||||
text_style.font_family = font.family.clone();
|
||||
text_style.font_features = font.features.clone();
|
||||
text_style.font_style = font.style;
|
||||
text_style.font_weight = font.weight;
|
||||
}
|
||||
text_style.color = Color::Muted.color(cx);
|
||||
|
||||
if index == 0 && !TabBarSettings::get_global(cx).show && active_item.is_dirty(cx) {
|
||||
if let Some(styled_element) = apply_dirty_filename_style(&segment, &text_style, cx)
|
||||
{
|
||||
return styled_element;
|
||||
}
|
||||
}
|
||||
|
||||
StyledText::new(segment.text.replace('\n', "⏎"))
|
||||
.with_default_highlights(&text_style, segment.highlights.unwrap_or_default())
|
||||
.into_any()
|
||||
@@ -184,3 +192,46 @@ impl ToolbarItemView for Breadcrumbs {
|
||||
self.pane_focused = pane_focused;
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_dirty_filename_style(
|
||||
segment: &BreadcrumbText,
|
||||
text_style: &gpui::TextStyle,
|
||||
cx: &mut Context<Breadcrumbs>,
|
||||
) -> Option<gpui::AnyElement> {
|
||||
let text = segment.text.replace('\n', "⏎");
|
||||
|
||||
let filename_position = std::path::Path::new(&segment.text)
|
||||
.file_name()
|
||||
.and_then(|f| {
|
||||
let filename_str = f.to_string_lossy();
|
||||
segment.text.rfind(filename_str.as_ref())
|
||||
})?;
|
||||
|
||||
let bold_weight = FontWeight::BOLD;
|
||||
let default_color = Color::Default.color(cx);
|
||||
|
||||
if filename_position == 0 {
|
||||
let mut filename_style = text_style.clone();
|
||||
filename_style.font_weight = bold_weight;
|
||||
filename_style.color = default_color;
|
||||
|
||||
return Some(
|
||||
StyledText::new(text)
|
||||
.with_default_highlights(&filename_style, [])
|
||||
.into_any(),
|
||||
);
|
||||
}
|
||||
|
||||
let highlight_style = gpui::HighlightStyle {
|
||||
font_weight: Some(bold_weight),
|
||||
color: Some(default_color),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let highlight = vec![(filename_position..text.len(), highlight_style)];
|
||||
Some(
|
||||
StyledText::new(text)
|
||||
.with_default_highlights(&text_style, highlight)
|
||||
.into_any(),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -35,6 +35,7 @@ pub struct ChannelBuffer {
|
||||
pub enum ChannelBufferEvent {
|
||||
CollaboratorsChanged,
|
||||
Disconnected,
|
||||
Connected,
|
||||
BufferEdited,
|
||||
ChannelChanged,
|
||||
}
|
||||
@@ -103,6 +104,17 @@ impl ChannelBuffer {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn connected(&mut self, cx: &mut Context<Self>) {
|
||||
self.connected = true;
|
||||
if self.subscription.is_none() {
|
||||
let Ok(subscription) = self.client.subscribe_to_entity(self.channel_id.0) else {
|
||||
return;
|
||||
};
|
||||
self.subscription = Some(subscription.set_entity(&cx.entity(), &mut cx.to_async()));
|
||||
cx.emit(ChannelBufferEvent::Connected);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remote_id(&self, cx: &App) -> BufferId {
|
||||
self.buffer.read(cx).remote_id()
|
||||
}
|
||||
|
||||
@@ -972,6 +972,7 @@ impl ChannelStore {
|
||||
.log_err();
|
||||
|
||||
if let Some(operations) = operations {
|
||||
channel_buffer.connected(cx);
|
||||
let client = this.client.clone();
|
||||
cx.background_spawn(async move {
|
||||
let operations = operations.await;
|
||||
@@ -1012,8 +1013,8 @@ impl ChannelStore {
|
||||
|
||||
if let Some(this) = this.upgrade() {
|
||||
this.update(cx, |this, cx| {
|
||||
for (_, buffer) in this.opened_buffers.drain() {
|
||||
if let OpenEntityHandle::Open(buffer) = buffer {
|
||||
for (_, buffer) in &this.opened_buffers {
|
||||
if let OpenEntityHandle::Open(buffer) = &buffer {
|
||||
if let Some(buffer) = buffer.upgrade() {
|
||||
buffer.update(cx, |buffer, cx| buffer.disconnect(cx));
|
||||
}
|
||||
|
||||
@@ -17,8 +17,8 @@ use stripe::{
|
||||
CreateBillingPortalSessionFlowDataAfterCompletionRedirect,
|
||||
CreateBillingPortalSessionFlowDataSubscriptionUpdateConfirm,
|
||||
CreateBillingPortalSessionFlowDataSubscriptionUpdateConfirmItems,
|
||||
CreateBillingPortalSessionFlowDataType, Customer, CustomerId, EventObject, EventType,
|
||||
Expandable, ListEvents, PaymentMethod, Subscription, SubscriptionId, SubscriptionStatus,
|
||||
CreateBillingPortalSessionFlowDataType, CustomerId, EventObject, EventType, ListEvents,
|
||||
PaymentMethod, Subscription, SubscriptionId, SubscriptionStatus,
|
||||
};
|
||||
use util::{ResultExt, maybe};
|
||||
|
||||
@@ -29,7 +29,10 @@ use crate::db::billing_subscription::{
|
||||
use crate::llm::db::subscription_usage_meter::CompletionMode;
|
||||
use crate::llm::{AGENT_EXTENDED_TRIAL_FEATURE_FLAG, DEFAULT_MAX_MONTHLY_SPEND};
|
||||
use crate::rpc::{ResultExt as _, Server};
|
||||
use crate::stripe_client::{StripeCustomerId, StripeSubscriptionId};
|
||||
use crate::stripe_client::{
|
||||
StripeCancellationDetailsReason, StripeClient, StripeCustomerId, StripeSubscription,
|
||||
StripeSubscriptionId,
|
||||
};
|
||||
use crate::{AppState, Error, Result};
|
||||
use crate::{db::UserId, llm::db::LlmDatabase};
|
||||
use crate::{
|
||||
@@ -55,10 +58,6 @@ pub fn router() -> Router {
|
||||
"/billing/subscriptions/manage",
|
||||
post(manage_billing_subscription),
|
||||
)
|
||||
.route(
|
||||
"/billing/subscriptions/migrate",
|
||||
post(migrate_to_new_billing),
|
||||
)
|
||||
.route(
|
||||
"/billing/subscriptions/sync",
|
||||
post(sync_billing_subscription),
|
||||
@@ -426,7 +425,7 @@ async fn manage_billing_subscription(
|
||||
.await?
|
||||
.context("user not found")?;
|
||||
|
||||
let Some(stripe_client) = app.stripe_client.clone() else {
|
||||
let Some(stripe_client) = app.real_stripe_client.clone() else {
|
||||
log::error!("failed to retrieve Stripe client");
|
||||
Err(Error::http(
|
||||
StatusCode::NOT_IMPLEMENTED,
|
||||
@@ -629,86 +628,6 @@ async fn manage_billing_subscription(
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct MigrateToNewBillingBody {
|
||||
github_user_id: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct MigrateToNewBillingResponse {
|
||||
/// The ID of the subscription that was canceled.
|
||||
canceled_subscription_id: Option<String>,
|
||||
}
|
||||
|
||||
async fn migrate_to_new_billing(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
extract::Json(body): extract::Json<MigrateToNewBillingBody>,
|
||||
) -> Result<Json<MigrateToNewBillingResponse>> {
|
||||
let Some(stripe_client) = app.stripe_client.clone() else {
|
||||
log::error!("failed to retrieve Stripe client");
|
||||
Err(Error::http(
|
||||
StatusCode::NOT_IMPLEMENTED,
|
||||
"not supported".into(),
|
||||
))?
|
||||
};
|
||||
|
||||
let user = app
|
||||
.db
|
||||
.get_user_by_github_user_id(body.github_user_id)
|
||||
.await?
|
||||
.context("user not found")?;
|
||||
|
||||
let old_billing_subscriptions_by_user = app
|
||||
.db
|
||||
.get_active_billing_subscriptions(HashSet::from_iter([user.id]))
|
||||
.await?;
|
||||
|
||||
let canceled_subscription_id = if let Some((_billing_customer, billing_subscription)) =
|
||||
old_billing_subscriptions_by_user.get(&user.id)
|
||||
{
|
||||
let stripe_subscription_id = billing_subscription
|
||||
.stripe_subscription_id
|
||||
.parse::<stripe::SubscriptionId>()
|
||||
.context("failed to parse Stripe subscription ID from database")?;
|
||||
|
||||
Subscription::cancel(
|
||||
&stripe_client,
|
||||
&stripe_subscription_id,
|
||||
stripe::CancelSubscription {
|
||||
invoice_now: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
Some(stripe_subscription_id)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let all_feature_flags = app.db.list_feature_flags().await?;
|
||||
let user_feature_flags = app.db.get_user_flags(user.id).await?;
|
||||
|
||||
for feature_flag in ["new-billing", "assistant2"] {
|
||||
let already_in_feature_flag = user_feature_flags.iter().any(|flag| flag == feature_flag);
|
||||
if already_in_feature_flag {
|
||||
continue;
|
||||
}
|
||||
|
||||
let feature_flag = all_feature_flags
|
||||
.iter()
|
||||
.find(|flag| flag.flag == feature_flag)
|
||||
.context("failed to find feature flag: {feature_flag:?}")?;
|
||||
|
||||
app.db.add_user_flag(user.id, feature_flag.id).await?;
|
||||
}
|
||||
|
||||
Ok(Json(MigrateToNewBillingResponse {
|
||||
canceled_subscription_id: canceled_subscription_id
|
||||
.map(|subscription_id| subscription_id.to_string()),
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct SyncBillingSubscriptionBody {
|
||||
github_user_id: i32,
|
||||
@@ -742,23 +661,13 @@ async fn sync_billing_subscription(
|
||||
.get_billing_customer_by_user_id(user.id)
|
||||
.await?
|
||||
.context("billing customer not found")?;
|
||||
let stripe_customer_id = billing_customer
|
||||
.stripe_customer_id
|
||||
.parse::<stripe::CustomerId>()
|
||||
.context("failed to parse Stripe customer ID from database")?;
|
||||
let stripe_customer_id = StripeCustomerId(billing_customer.stripe_customer_id.clone().into());
|
||||
|
||||
let subscriptions = Subscription::list(
|
||||
&stripe_client,
|
||||
&stripe::ListSubscriptions {
|
||||
customer: Some(stripe_customer_id),
|
||||
// Sync all non-canceled subscriptions.
|
||||
status: None,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
let subscriptions = stripe_client
|
||||
.list_subscriptions_for_customer(&stripe_customer_id)
|
||||
.await?;
|
||||
|
||||
for subscription in subscriptions.data {
|
||||
for subscription in subscriptions {
|
||||
let subscription_id = subscription.id.clone();
|
||||
|
||||
sync_subscription(&app, &stripe_client, subscription)
|
||||
@@ -806,6 +715,10 @@ const NUMBER_OF_ALREADY_PROCESSED_PAGES_BEFORE_WE_STOP: usize = 4;
|
||||
/// Polls the Stripe events API periodically to reconcile the records in our
|
||||
/// database with the data in Stripe.
|
||||
pub fn poll_stripe_events_periodically(app: Arc<AppState>, rpc_server: Arc<Server>) {
|
||||
let Some(real_stripe_client) = app.real_stripe_client.clone() else {
|
||||
log::warn!("failed to retrieve Stripe client");
|
||||
return;
|
||||
};
|
||||
let Some(stripe_client) = app.stripe_client.clone() else {
|
||||
log::warn!("failed to retrieve Stripe client");
|
||||
return;
|
||||
@@ -816,7 +729,7 @@ pub fn poll_stripe_events_periodically(app: Arc<AppState>, rpc_server: Arc<Serve
|
||||
let executor = executor.clone();
|
||||
async move {
|
||||
loop {
|
||||
poll_stripe_events(&app, &rpc_server, &stripe_client)
|
||||
poll_stripe_events(&app, &rpc_server, &stripe_client, &real_stripe_client)
|
||||
.await
|
||||
.log_err();
|
||||
|
||||
@@ -829,7 +742,8 @@ pub fn poll_stripe_events_periodically(app: Arc<AppState>, rpc_server: Arc<Serve
|
||||
async fn poll_stripe_events(
|
||||
app: &Arc<AppState>,
|
||||
rpc_server: &Arc<Server>,
|
||||
stripe_client: &stripe::Client,
|
||||
stripe_client: &Arc<dyn StripeClient>,
|
||||
real_stripe_client: &stripe::Client,
|
||||
) -> anyhow::Result<()> {
|
||||
fn event_type_to_string(event_type: EventType) -> String {
|
||||
// Calling `to_string` on `stripe::EventType` members gives us a quoted string,
|
||||
@@ -861,7 +775,7 @@ async fn poll_stripe_events(
|
||||
params.types = Some(event_types.clone());
|
||||
params.limit = Some(EVENTS_LIMIT_PER_PAGE);
|
||||
|
||||
let mut event_pages = stripe::Event::list(&stripe_client, ¶ms)
|
||||
let mut event_pages = stripe::Event::list(&real_stripe_client, ¶ms)
|
||||
.await?
|
||||
.paginate(params);
|
||||
|
||||
@@ -905,7 +819,7 @@ async fn poll_stripe_events(
|
||||
break;
|
||||
} else {
|
||||
log::info!("Stripe events: retrieving next page");
|
||||
event_pages = event_pages.next(&stripe_client).await?;
|
||||
event_pages = event_pages.next(&real_stripe_client).await?;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
@@ -945,7 +859,7 @@ async fn poll_stripe_events(
|
||||
|
||||
let process_result = match event.type_ {
|
||||
EventType::CustomerCreated | EventType::CustomerUpdated => {
|
||||
handle_customer_event(app, stripe_client, event).await
|
||||
handle_customer_event(app, real_stripe_client, event).await
|
||||
}
|
||||
EventType::CustomerSubscriptionCreated
|
||||
| EventType::CustomerSubscriptionUpdated
|
||||
@@ -1020,8 +934,8 @@ async fn handle_customer_event(
|
||||
|
||||
async fn sync_subscription(
|
||||
app: &Arc<AppState>,
|
||||
stripe_client: &stripe::Client,
|
||||
subscription: stripe::Subscription,
|
||||
stripe_client: &Arc<dyn StripeClient>,
|
||||
subscription: StripeSubscription,
|
||||
) -> anyhow::Result<billing_customer::Model> {
|
||||
let subscription_kind = if let Some(stripe_billing) = &app.stripe_billing {
|
||||
stripe_billing
|
||||
@@ -1032,7 +946,7 @@ async fn sync_subscription(
|
||||
};
|
||||
|
||||
let billing_customer =
|
||||
find_or_create_billing_customer(app, stripe_client, subscription.customer)
|
||||
find_or_create_billing_customer(app, stripe_client.as_ref(), &subscription.customer)
|
||||
.await?
|
||||
.context("billing customer not found")?;
|
||||
|
||||
@@ -1060,7 +974,7 @@ async fn sync_subscription(
|
||||
.as_ref()
|
||||
.and_then(|details| details.reason)
|
||||
.map_or(false, |reason| {
|
||||
reason == CancellationDetailsReason::PaymentFailed
|
||||
reason == StripeCancellationDetailsReason::PaymentFailed
|
||||
});
|
||||
|
||||
if was_canceled_due_to_payment_failure {
|
||||
@@ -1077,7 +991,7 @@ async fn sync_subscription(
|
||||
|
||||
if let Some(existing_subscription) = app
|
||||
.db
|
||||
.get_billing_subscription_by_stripe_subscription_id(&subscription.id)
|
||||
.get_billing_subscription_by_stripe_subscription_id(subscription.id.0.as_ref())
|
||||
.await?
|
||||
{
|
||||
app.db
|
||||
@@ -1118,20 +1032,13 @@ async fn sync_subscription(
|
||||
if existing_subscription.kind == Some(SubscriptionKind::ZedFree)
|
||||
&& subscription_kind == Some(SubscriptionKind::ZedProTrial)
|
||||
{
|
||||
let stripe_subscription_id = existing_subscription
|
||||
.stripe_subscription_id
|
||||
.parse::<stripe::SubscriptionId>()
|
||||
.context("failed to parse Stripe subscription ID from database")?;
|
||||
let stripe_subscription_id = StripeSubscriptionId(
|
||||
existing_subscription.stripe_subscription_id.clone().into(),
|
||||
);
|
||||
|
||||
Subscription::cancel(
|
||||
&stripe_client,
|
||||
&stripe_subscription_id,
|
||||
stripe::CancelSubscription {
|
||||
invoice_now: None,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
stripe_client
|
||||
.cancel_subscription(&stripe_subscription_id)
|
||||
.await?;
|
||||
} else {
|
||||
// If the user already has an active billing subscription, ignore the
|
||||
// event and return an `Ok` to signal that it was processed
|
||||
@@ -1198,7 +1105,7 @@ async fn sync_subscription(
|
||||
async fn handle_customer_subscription_event(
|
||||
app: &Arc<AppState>,
|
||||
rpc_server: &Arc<Server>,
|
||||
stripe_client: &stripe::Client,
|
||||
stripe_client: &Arc<dyn StripeClient>,
|
||||
event: stripe::Event,
|
||||
) -> anyhow::Result<()> {
|
||||
let EventObject::Subscription(subscription) = event.data.object else {
|
||||
@@ -1207,7 +1114,7 @@ async fn handle_customer_subscription_event(
|
||||
|
||||
log::info!("handling Stripe {} event: {}", event.type_, event.id);
|
||||
|
||||
let billing_customer = sync_subscription(app, stripe_client, subscription).await?;
|
||||
let billing_customer = sync_subscription(app, stripe_client, subscription.into()).await?;
|
||||
|
||||
// When the user's subscription changes, push down any changes to their plan.
|
||||
rpc_server
|
||||
@@ -1403,30 +1310,20 @@ impl From<CancellationDetailsReason> for StripeCancellationReason {
|
||||
/// Finds or creates a billing customer using the provided customer.
|
||||
pub async fn find_or_create_billing_customer(
|
||||
app: &Arc<AppState>,
|
||||
stripe_client: &stripe::Client,
|
||||
customer_or_id: Expandable<Customer>,
|
||||
stripe_client: &dyn StripeClient,
|
||||
customer_id: &StripeCustomerId,
|
||||
) -> anyhow::Result<Option<billing_customer::Model>> {
|
||||
let customer_id = match &customer_or_id {
|
||||
Expandable::Id(id) => id,
|
||||
Expandable::Object(customer) => customer.id.as_ref(),
|
||||
};
|
||||
|
||||
// If we already have a billing customer record associated with the Stripe customer,
|
||||
// there's nothing more we need to do.
|
||||
if let Some(billing_customer) = app
|
||||
.db
|
||||
.get_billing_customer_by_stripe_customer_id(customer_id)
|
||||
.get_billing_customer_by_stripe_customer_id(customer_id.0.as_ref())
|
||||
.await?
|
||||
{
|
||||
return Ok(Some(billing_customer));
|
||||
}
|
||||
|
||||
// If all we have is a customer ID, resolve it to a full customer record by
|
||||
// hitting the Stripe API.
|
||||
let customer = match customer_or_id {
|
||||
Expandable::Id(id) => Customer::retrieve(stripe_client, &id, &[]).await?,
|
||||
Expandable::Object(customer) => *customer,
|
||||
};
|
||||
let customer = stripe_client.get_customer(customer_id).await?;
|
||||
|
||||
let Some(email) = customer.email else {
|
||||
return Ok(None);
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::db::{BillingCustomerId, BillingSubscriptionId};
|
||||
use crate::stripe_client;
|
||||
use chrono::{Datelike as _, NaiveDate, Utc};
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::Serialize;
|
||||
@@ -159,3 +160,17 @@ pub enum StripeCancellationReason {
|
||||
#[sea_orm(string_value = "payment_failed")]
|
||||
PaymentFailed,
|
||||
}
|
||||
|
||||
impl From<stripe_client::StripeCancellationDetailsReason> for StripeCancellationReason {
|
||||
fn from(value: stripe_client::StripeCancellationDetailsReason) -> Self {
|
||||
match value {
|
||||
stripe_client::StripeCancellationDetailsReason::CancellationRequested => {
|
||||
Self::CancellationRequested
|
||||
}
|
||||
stripe_client::StripeCancellationDetailsReason::PaymentDisputed => {
|
||||
Self::PaymentDisputed
|
||||
}
|
||||
stripe_client::StripeCancellationDetailsReason::PaymentFailed => Self::PaymentFailed,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,6 +30,7 @@ use std::{path::PathBuf, sync::Arc};
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::stripe_billing::StripeBilling;
|
||||
use crate::stripe_client::{RealStripeClient, StripeClient};
|
||||
|
||||
pub type Result<T, E = Error> = std::result::Result<T, E>;
|
||||
|
||||
@@ -270,7 +271,10 @@ pub struct AppState {
|
||||
pub llm_db: Option<Arc<LlmDatabase>>,
|
||||
pub livekit_client: Option<Arc<dyn livekit_api::Client>>,
|
||||
pub blob_store_client: Option<aws_sdk_s3::Client>,
|
||||
pub stripe_client: Option<Arc<stripe::Client>>,
|
||||
/// This is a real instance of the Stripe client; we're working to replace references to this with the
|
||||
/// [`StripeClient`] trait.
|
||||
pub real_stripe_client: Option<Arc<stripe::Client>>,
|
||||
pub stripe_client: Option<Arc<dyn StripeClient>>,
|
||||
pub stripe_billing: Option<Arc<StripeBilling>>,
|
||||
pub executor: Executor,
|
||||
pub kinesis_client: Option<::aws_sdk_kinesis::Client>,
|
||||
@@ -323,7 +327,9 @@ impl AppState {
|
||||
stripe_billing: stripe_client
|
||||
.clone()
|
||||
.map(|stripe_client| Arc::new(StripeBilling::new(stripe_client))),
|
||||
stripe_client,
|
||||
real_stripe_client: stripe_client.clone(),
|
||||
stripe_client: stripe_client
|
||||
.map(|stripe_client| Arc::new(RealStripeClient::new(stripe_client)) as _),
|
||||
executor,
|
||||
kinesis_client: if config.kinesis_access_key.is_some() {
|
||||
build_kinesis_client(&config).await.log_err()
|
||||
|
||||
@@ -4034,23 +4034,19 @@ async fn get_llm_api_token(
|
||||
.as_ref()
|
||||
.context("failed to retrieve Stripe billing object")?;
|
||||
|
||||
let billing_customer =
|
||||
if let Some(billing_customer) = db.get_billing_customer_by_user_id(user.id).await? {
|
||||
billing_customer
|
||||
} else {
|
||||
let customer_id = stripe_billing
|
||||
.find_or_create_customer_by_email(user.email_address.as_deref())
|
||||
.await?
|
||||
.try_into()?;
|
||||
let billing_customer = if let Some(billing_customer) =
|
||||
db.get_billing_customer_by_user_id(user.id).await?
|
||||
{
|
||||
billing_customer
|
||||
} else {
|
||||
let customer_id = stripe_billing
|
||||
.find_or_create_customer_by_email(user.email_address.as_deref())
|
||||
.await?;
|
||||
|
||||
find_or_create_billing_customer(
|
||||
&session.app_state,
|
||||
&stripe_client,
|
||||
stripe::Expandable::Id(customer_id),
|
||||
)
|
||||
find_or_create_billing_customer(&session.app_state, stripe_client.as_ref(), &customer_id)
|
||||
.await?
|
||||
.context("billing customer not found")?
|
||||
};
|
||||
};
|
||||
|
||||
let billing_subscription =
|
||||
if let Some(billing_subscription) = db.get_active_billing_subscription(user.id).await? {
|
||||
|
||||
@@ -111,14 +111,12 @@ impl StripeBilling {
|
||||
|
||||
pub async fn determine_subscription_kind(
|
||||
&self,
|
||||
subscription: &stripe::Subscription,
|
||||
subscription: &StripeSubscription,
|
||||
) -> Option<SubscriptionKind> {
|
||||
let zed_pro_price_id: stripe::PriceId =
|
||||
self.zed_pro_price_id().await.ok()?.try_into().ok()?;
|
||||
let zed_free_price_id: stripe::PriceId =
|
||||
self.zed_free_price_id().await.ok()?.try_into().ok()?;
|
||||
let zed_pro_price_id = self.zed_pro_price_id().await.ok()?;
|
||||
let zed_free_price_id = self.zed_free_price_id().await.ok()?;
|
||||
|
||||
subscription.items.data.iter().find_map(|item| {
|
||||
subscription.items.iter().find_map(|item| {
|
||||
let price = item.price.as_ref()?;
|
||||
|
||||
if price.id == zed_pro_price_id {
|
||||
|
||||
@@ -39,6 +39,8 @@ pub struct StripeSubscription {
|
||||
pub current_period_end: i64,
|
||||
pub current_period_start: i64,
|
||||
pub items: Vec<StripeSubscriptionItem>,
|
||||
pub cancel_at: Option<i64>,
|
||||
pub cancellation_details: Option<StripeCancellationDetails>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash, Clone, derive_more::Display)]
|
||||
@@ -50,6 +52,18 @@ pub struct StripeSubscriptionItem {
|
||||
pub price: Option<StripePrice>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct StripeCancellationDetails {
|
||||
pub reason: Option<StripeCancellationDetailsReason>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub enum StripeCancellationDetailsReason {
|
||||
CancellationRequested,
|
||||
PaymentDisputed,
|
||||
PaymentFailed,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct StripeCreateSubscriptionParams {
|
||||
pub customer: StripeCustomerId,
|
||||
@@ -175,6 +189,8 @@ pub struct StripeCheckoutSession {
|
||||
pub trait StripeClient: Send + Sync {
|
||||
async fn list_customers_by_email(&self, email: &str) -> Result<Vec<StripeCustomer>>;
|
||||
|
||||
async fn get_customer(&self, customer_id: &StripeCustomerId) -> Result<StripeCustomer>;
|
||||
|
||||
async fn create_customer(&self, params: CreateCustomerParams<'_>) -> Result<StripeCustomer>;
|
||||
|
||||
async fn list_subscriptions_for_customer(
|
||||
@@ -198,6 +214,8 @@ pub trait StripeClient: Send + Sync {
|
||||
params: UpdateSubscriptionParams,
|
||||
) -> Result<()>;
|
||||
|
||||
async fn cancel_subscription(&self, subscription_id: &StripeSubscriptionId) -> Result<()>;
|
||||
|
||||
async fn list_prices(&self) -> Result<Vec<StripePrice>>;
|
||||
|
||||
async fn list_meters(&self) -> Result<Vec<StripeMeter>>;
|
||||
|
||||
@@ -74,6 +74,14 @@ impl StripeClient for FakeStripeClient {
|
||||
.collect())
|
||||
}
|
||||
|
||||
async fn get_customer(&self, customer_id: &StripeCustomerId) -> Result<StripeCustomer> {
|
||||
self.customers
|
||||
.lock()
|
||||
.get(customer_id)
|
||||
.cloned()
|
||||
.ok_or_else(|| anyhow!("no customer found for {customer_id:?}"))
|
||||
}
|
||||
|
||||
async fn create_customer(&self, params: CreateCustomerParams<'_>) -> Result<StripeCustomer> {
|
||||
let customer = StripeCustomer {
|
||||
id: StripeCustomerId(format!("cus_{}", Uuid::new_v4()).into()),
|
||||
@@ -135,6 +143,8 @@ impl StripeClient for FakeStripeClient {
|
||||
.and_then(|price_id| self.prices.lock().get(&price_id).cloned()),
|
||||
})
|
||||
.collect(),
|
||||
cancel_at: None,
|
||||
cancellation_details: None,
|
||||
};
|
||||
|
||||
self.subscriptions
|
||||
@@ -158,6 +168,13 @@ impl StripeClient for FakeStripeClient {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn cancel_subscription(&self, subscription_id: &StripeSubscriptionId) -> Result<()> {
|
||||
// TODO: Implement fake subscription cancellation.
|
||||
let _ = subscription_id;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn list_prices(&self) -> Result<Vec<StripePrice>> {
|
||||
let prices = self.prices.lock().values().cloned().collect();
|
||||
|
||||
|
||||
@@ -5,9 +5,9 @@ use anyhow::{Context as _, Result, anyhow};
|
||||
use async_trait::async_trait;
|
||||
use serde::Serialize;
|
||||
use stripe::{
|
||||
CheckoutSession, CheckoutSessionMode, CheckoutSessionPaymentMethodCollection,
|
||||
CreateCheckoutSession, CreateCheckoutSessionLineItems, CreateCheckoutSessionSubscriptionData,
|
||||
CreateCheckoutSessionSubscriptionDataTrialSettings,
|
||||
CancellationDetails, CancellationDetailsReason, CheckoutSession, CheckoutSessionMode,
|
||||
CheckoutSessionPaymentMethodCollection, CreateCheckoutSession, CreateCheckoutSessionLineItems,
|
||||
CreateCheckoutSessionSubscriptionData, CreateCheckoutSessionSubscriptionDataTrialSettings,
|
||||
CreateCheckoutSessionSubscriptionDataTrialSettingsEndBehavior,
|
||||
CreateCheckoutSessionSubscriptionDataTrialSettingsEndBehaviorMissingPaymentMethod,
|
||||
CreateCustomer, Customer, CustomerId, ListCustomers, Price, PriceId, Recurring, Subscription,
|
||||
@@ -17,9 +17,9 @@ use stripe::{
|
||||
};
|
||||
|
||||
use crate::stripe_client::{
|
||||
CreateCustomerParams, StripeCheckoutSession, StripeCheckoutSessionMode,
|
||||
StripeCheckoutSessionPaymentMethodCollection, StripeClient,
|
||||
StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionParams,
|
||||
CreateCustomerParams, StripeCancellationDetails, StripeCancellationDetailsReason,
|
||||
StripeCheckoutSession, StripeCheckoutSessionMode, StripeCheckoutSessionPaymentMethodCollection,
|
||||
StripeClient, StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionParams,
|
||||
StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams,
|
||||
StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeMeter, StripePrice,
|
||||
StripePriceId, StripePriceRecurring, StripeSubscription, StripeSubscriptionId,
|
||||
@@ -57,6 +57,14 @@ impl StripeClient for RealStripeClient {
|
||||
.collect())
|
||||
}
|
||||
|
||||
async fn get_customer(&self, customer_id: &StripeCustomerId) -> Result<StripeCustomer> {
|
||||
let customer_id = customer_id.try_into()?;
|
||||
|
||||
let customer = Customer::retrieve(&self.client, &customer_id, &[]).await?;
|
||||
|
||||
Ok(StripeCustomer::from(customer))
|
||||
}
|
||||
|
||||
async fn create_customer(&self, params: CreateCustomerParams<'_>) -> Result<StripeCustomer> {
|
||||
let customer = Customer::create(
|
||||
&self.client,
|
||||
@@ -157,6 +165,22 @@ impl StripeClient for RealStripeClient {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn cancel_subscription(&self, subscription_id: &StripeSubscriptionId) -> Result<()> {
|
||||
let subscription_id = subscription_id.try_into()?;
|
||||
|
||||
Subscription::cancel(
|
||||
&self.client,
|
||||
&subscription_id,
|
||||
stripe::CancelSubscription {
|
||||
invoice_now: None,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn list_prices(&self) -> Result<Vec<StripePrice>> {
|
||||
let response = stripe::Price::list(
|
||||
&self.client,
|
||||
@@ -273,6 +297,26 @@ impl From<Subscription> for StripeSubscription {
|
||||
current_period_start: value.current_period_start,
|
||||
current_period_end: value.current_period_end,
|
||||
items: value.items.data.into_iter().map(Into::into).collect(),
|
||||
cancel_at: value.cancel_at,
|
||||
cancellation_details: value.cancellation_details.map(Into::into),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CancellationDetails> for StripeCancellationDetails {
|
||||
fn from(value: CancellationDetails) -> Self {
|
||||
Self {
|
||||
reason: value.reason.map(Into::into),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CancellationDetailsReason> for StripeCancellationDetailsReason {
|
||||
fn from(value: CancellationDetailsReason) -> Self {
|
||||
match value {
|
||||
CancellationDetailsReason::CancellationRequested => Self::CancellationRequested,
|
||||
CancellationDetailsReason::PaymentDisputed => Self::PaymentDisputed,
|
||||
CancellationDetailsReason::PaymentFailed => Self::PaymentFailed,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1010,7 +1010,6 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T
|
||||
workspace_b.update_in(cx_b, |workspace, window, cx| {
|
||||
workspace.active_pane().update(cx, |pane, cx| {
|
||||
pane.close_inactive_items(&Default::default(), window, cx)
|
||||
.unwrap()
|
||||
.detach();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -172,6 +172,8 @@ async fn test_subscribe_to_price() {
|
||||
current_period_start: now.timestamp(),
|
||||
current_period_end: (now + Duration::days(30)).timestamp(),
|
||||
items: vec![],
|
||||
cancel_at: None,
|
||||
cancellation_details: None,
|
||||
};
|
||||
stripe_client
|
||||
.subscriptions
|
||||
@@ -211,6 +213,8 @@ async fn test_subscribe_to_price() {
|
||||
id: StripeSubscriptionItemId("si_test".into()),
|
||||
price: Some(price.clone()),
|
||||
}],
|
||||
cancel_at: None,
|
||||
cancellation_details: None,
|
||||
};
|
||||
stripe_client
|
||||
.subscriptions
|
||||
@@ -280,6 +284,8 @@ async fn test_subscribe_to_zed_free() {
|
||||
id: StripeSubscriptionItemId("si_test".into()),
|
||||
price: Some(zed_pro_price.clone()),
|
||||
}],
|
||||
cancel_at: None,
|
||||
cancellation_details: None,
|
||||
};
|
||||
stripe_client.subscriptions.lock().insert(
|
||||
existing_subscription.id.clone(),
|
||||
@@ -309,6 +315,8 @@ async fn test_subscribe_to_zed_free() {
|
||||
id: StripeSubscriptionItemId("si_test".into()),
|
||||
price: Some(zed_pro_price.clone()),
|
||||
}],
|
||||
cancel_at: None,
|
||||
cancellation_details: None,
|
||||
};
|
||||
stripe_client.subscriptions.lock().insert(
|
||||
existing_subscription.id.clone(),
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use crate::stripe_client::FakeStripeClient;
|
||||
use crate::{
|
||||
AppState, Config,
|
||||
db::{NewUserParams, UserId, tests::TestDb},
|
||||
@@ -522,7 +523,8 @@ impl TestServer {
|
||||
llm_db: None,
|
||||
livekit_client: Some(Arc::new(livekit_test_server.create_api_client())),
|
||||
blob_store_client: None,
|
||||
stripe_client: None,
|
||||
real_stripe_client: None,
|
||||
stripe_client: Some(Arc::new(FakeStripeClient::new())),
|
||||
stripe_billing: None,
|
||||
executor,
|
||||
kinesis_client: None,
|
||||
|
||||
@@ -354,6 +354,10 @@ impl ChannelView {
|
||||
editor.set_read_only(true);
|
||||
cx.notify();
|
||||
}),
|
||||
ChannelBufferEvent::Connected => self.editor.update(cx, |editor, cx| {
|
||||
editor.set_read_only(false);
|
||||
cx.notify();
|
||||
}),
|
||||
ChannelBufferEvent::ChannelChanged => {
|
||||
self.editor.update(cx, |_, cx| {
|
||||
cx.emit(editor::EditorEvent::TitleChanged);
|
||||
|
||||
@@ -12,7 +12,7 @@ use language::{
|
||||
Anchor, Buffer, BufferSnapshot, CodeLabel, LanguageRegistry, ToOffset,
|
||||
language_settings::SoftWrap,
|
||||
};
|
||||
use project::{Completion, CompletionSource, search::SearchQuery};
|
||||
use project::{Completion, CompletionResponse, CompletionSource, search::SearchQuery};
|
||||
use settings::Settings;
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
@@ -64,9 +64,9 @@ impl CompletionProvider for MessageEditorCompletionProvider {
|
||||
_: editor::CompletionContext,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
let Some(handle) = self.0.upgrade() else {
|
||||
return Task::ready(Ok(None));
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
};
|
||||
handle.update(cx, |message_editor, cx| {
|
||||
message_editor.completions(buffer, buffer_position, cx)
|
||||
@@ -248,22 +248,21 @@ impl MessageEditor {
|
||||
buffer: &Entity<Buffer>,
|
||||
end_anchor: Anchor,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
if let Some((start_anchor, query, candidates)) =
|
||||
self.collect_mention_candidates(buffer, end_anchor, cx)
|
||||
{
|
||||
if !candidates.is_empty() {
|
||||
return cx.spawn(async move |_, cx| {
|
||||
Ok(Some(
|
||||
Self::resolve_completions_for_candidates(
|
||||
&cx,
|
||||
query.as_str(),
|
||||
&candidates,
|
||||
start_anchor..end_anchor,
|
||||
Self::completion_for_mention,
|
||||
)
|
||||
.await,
|
||||
))
|
||||
let completion_response = Self::resolve_completions_for_candidates(
|
||||
&cx,
|
||||
query.as_str(),
|
||||
&candidates,
|
||||
start_anchor..end_anchor,
|
||||
Self::completion_for_mention,
|
||||
)
|
||||
.await;
|
||||
Ok(vec![completion_response])
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -273,21 +272,23 @@ impl MessageEditor {
|
||||
{
|
||||
if !candidates.is_empty() {
|
||||
return cx.spawn(async move |_, cx| {
|
||||
Ok(Some(
|
||||
Self::resolve_completions_for_candidates(
|
||||
&cx,
|
||||
query.as_str(),
|
||||
candidates,
|
||||
start_anchor..end_anchor,
|
||||
Self::completion_for_emoji,
|
||||
)
|
||||
.await,
|
||||
))
|
||||
let completion_response = Self::resolve_completions_for_candidates(
|
||||
&cx,
|
||||
query.as_str(),
|
||||
candidates,
|
||||
start_anchor..end_anchor,
|
||||
Self::completion_for_emoji,
|
||||
)
|
||||
.await;
|
||||
Ok(vec![completion_response])
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Task::ready(Ok(Some(Vec::new())))
|
||||
Task::ready(Ok(vec![CompletionResponse {
|
||||
completions: Vec::new(),
|
||||
is_incomplete: false,
|
||||
}]))
|
||||
}
|
||||
|
||||
async fn resolve_completions_for_candidates(
|
||||
@@ -296,18 +297,19 @@ impl MessageEditor {
|
||||
candidates: &[StringMatchCandidate],
|
||||
range: Range<Anchor>,
|
||||
completion_fn: impl Fn(&StringMatch) -> (String, CodeLabel),
|
||||
) -> Vec<Completion> {
|
||||
) -> CompletionResponse {
|
||||
const LIMIT: usize = 10;
|
||||
let matches = fuzzy::match_strings(
|
||||
candidates,
|
||||
query,
|
||||
true,
|
||||
10,
|
||||
LIMIT,
|
||||
&Default::default(),
|
||||
cx.background_executor().clone(),
|
||||
)
|
||||
.await;
|
||||
|
||||
matches
|
||||
let completions = matches
|
||||
.into_iter()
|
||||
.map(|mat| {
|
||||
let (new_text, label) = completion_fn(&mat);
|
||||
@@ -322,7 +324,12 @@ impl MessageEditor {
|
||||
source: CompletionSource::Custom,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
CompletionResponse {
|
||||
is_incomplete: completions.len() >= LIMIT,
|
||||
completions,
|
||||
}
|
||||
}
|
||||
|
||||
fn completion_for_mention(mat: &StringMatch) -> (String, CodeLabel) {
|
||||
|
||||
@@ -298,6 +298,7 @@ pub async fn download_adapter_from_github(
|
||||
response.status().to_string()
|
||||
);
|
||||
|
||||
delegate.output_to_console("Download complete".to_owned());
|
||||
match file_type {
|
||||
DownloadedFileType::GzipTar => {
|
||||
let decompressed_bytes = GzipDecoder::new(BufReader::new(response.body_mut()));
|
||||
@@ -369,21 +370,19 @@ pub trait DebugAdapter: 'static + Send + Sync {
|
||||
None
|
||||
}
|
||||
|
||||
fn validate_config(
|
||||
/// Extracts the kind (attach/launch) of debug configuration from the given JSON config.
|
||||
/// This method should only return error when the kind cannot be determined for a given configuration;
|
||||
/// in particular, it *should not* validate whether the request as a whole is valid, because that's best left to the debug adapter itself to decide.
|
||||
fn request_kind(
|
||||
&self,
|
||||
config: &serde_json::Value,
|
||||
) -> Result<StartDebuggingRequestArgumentsRequest> {
|
||||
let map = config.as_object().context("Config isn't an object")?;
|
||||
|
||||
let request_variant = map
|
||||
.get("request")
|
||||
.and_then(|val| val.as_str())
|
||||
.context("request argument is not found or invalid")?;
|
||||
|
||||
match request_variant {
|
||||
"launch" => Ok(StartDebuggingRequestArgumentsRequest::Launch),
|
||||
"attach" => Ok(StartDebuggingRequestArgumentsRequest::Attach),
|
||||
_ => Err(anyhow!("request must be either 'launch' or 'attach'")),
|
||||
match config.get("request") {
|
||||
Some(val) if val == "launch" => Ok(StartDebuggingRequestArgumentsRequest::Launch),
|
||||
Some(val) if val == "attach" => Ok(StartDebuggingRequestArgumentsRequest::Attach),
|
||||
_ => Err(anyhow!(
|
||||
"missing or invalid `request` field in config. Expected 'launch' or 'attach'"
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -413,7 +412,7 @@ impl DebugAdapter for FakeAdapter {
|
||||
serde_json::Value::Null
|
||||
}
|
||||
|
||||
fn validate_config(
|
||||
fn request_kind(
|
||||
&self,
|
||||
config: &serde_json::Value,
|
||||
) -> Result<StartDebuggingRequestArgumentsRequest> {
|
||||
@@ -458,7 +457,7 @@ impl DebugAdapter for FakeAdapter {
|
||||
envs: HashMap::default(),
|
||||
cwd: None,
|
||||
request_args: StartDebuggingRequestArguments {
|
||||
request: self.validate_config(&task_definition.config)?,
|
||||
request: self.request_kind(&task_definition.config)?,
|
||||
configuration: task_definition.config.clone(),
|
||||
},
|
||||
})
|
||||
|
||||
@@ -52,7 +52,7 @@ pub fn send_telemetry(scenario: &DebugScenario, location: TelemetrySpawnLocation
|
||||
return;
|
||||
};
|
||||
let kind = adapter
|
||||
.validate_config(&scenario.config)
|
||||
.request_kind(&scenario.config)
|
||||
.ok()
|
||||
.map(serde_json::to_value)
|
||||
.and_then(Result::ok);
|
||||
|
||||
@@ -4,7 +4,7 @@ use dap_types::{
|
||||
messages::{Message, Response},
|
||||
};
|
||||
use futures::{AsyncRead, AsyncReadExt as _, AsyncWrite, FutureExt as _, channel::oneshot, select};
|
||||
use gpui::AsyncApp;
|
||||
use gpui::{AppContext as _, AsyncApp, Task};
|
||||
use settings::Settings as _;
|
||||
use smallvec::SmallVec;
|
||||
use smol::{
|
||||
@@ -22,7 +22,7 @@ use std::{
|
||||
time::Duration,
|
||||
};
|
||||
use task::TcpArgumentsTemplate;
|
||||
use util::{ResultExt as _, TryFutureExt};
|
||||
use util::{ConnectionResult, ResultExt as _};
|
||||
|
||||
use crate::{adapters::DebugAdapterBinary, debugger_settings::DebuggerSettings};
|
||||
|
||||
@@ -126,7 +126,7 @@ pub(crate) struct TransportDelegate {
|
||||
pending_requests: Requests,
|
||||
transport: Transport,
|
||||
server_tx: Arc<Mutex<Option<Sender<Message>>>>,
|
||||
_tasks: Vec<gpui::Task<Option<()>>>,
|
||||
_tasks: Vec<Task<()>>,
|
||||
}
|
||||
|
||||
impl TransportDelegate {
|
||||
@@ -141,7 +141,7 @@ impl TransportDelegate {
|
||||
log_handlers: Default::default(),
|
||||
current_requests: Default::default(),
|
||||
pending_requests: Default::default(),
|
||||
_tasks: Default::default(),
|
||||
_tasks: Vec::new(),
|
||||
};
|
||||
let messages = this.start_handlers(transport_pipes, cx).await?;
|
||||
Ok((messages, this))
|
||||
@@ -166,45 +166,76 @@ impl TransportDelegate {
|
||||
None
|
||||
};
|
||||
|
||||
let adapter_log_handler = log_handler.clone();
|
||||
cx.update(|cx| {
|
||||
if let Some(stdout) = params.stdout.take() {
|
||||
self._tasks.push(
|
||||
cx.background_executor()
|
||||
.spawn(Self::handle_adapter_log(stdout, log_handler.clone()).log_err()),
|
||||
);
|
||||
self._tasks.push(cx.background_spawn(async move {
|
||||
match Self::handle_adapter_log(stdout, adapter_log_handler).await {
|
||||
ConnectionResult::Timeout => {
|
||||
log::error!("Timed out when handling debugger log");
|
||||
}
|
||||
ConnectionResult::ConnectionReset => {
|
||||
log::info!("Debugger logs connection closed");
|
||||
}
|
||||
ConnectionResult::Result(Ok(())) => {}
|
||||
ConnectionResult::Result(Err(e)) => {
|
||||
log::error!("Error handling debugger log: {e}");
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
self._tasks.push(
|
||||
cx.background_executor().spawn(
|
||||
Self::handle_output(
|
||||
params.output,
|
||||
client_tx,
|
||||
self.pending_requests.clone(),
|
||||
log_handler.clone(),
|
||||
)
|
||||
.log_err(),
|
||||
),
|
||||
);
|
||||
let pending_requests = self.pending_requests.clone();
|
||||
let output_log_handler = log_handler.clone();
|
||||
self._tasks.push(cx.background_spawn(async move {
|
||||
match Self::handle_output(
|
||||
params.output,
|
||||
client_tx,
|
||||
pending_requests,
|
||||
output_log_handler,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(()) => {}
|
||||
Err(e) => log::error!("Error handling debugger output: {e}"),
|
||||
}
|
||||
}));
|
||||
|
||||
if let Some(stderr) = params.stderr.take() {
|
||||
self._tasks.push(
|
||||
cx.background_executor()
|
||||
.spawn(Self::handle_error(stderr, self.log_handlers.clone()).log_err()),
|
||||
);
|
||||
let log_handlers = self.log_handlers.clone();
|
||||
self._tasks.push(cx.background_spawn(async move {
|
||||
match Self::handle_error(stderr, log_handlers).await {
|
||||
ConnectionResult::Timeout => {
|
||||
log::error!("Timed out reading debugger error stream")
|
||||
}
|
||||
ConnectionResult::ConnectionReset => {
|
||||
log::info!("Debugger closed its error stream")
|
||||
}
|
||||
ConnectionResult::Result(Ok(())) => {}
|
||||
ConnectionResult::Result(Err(e)) => {
|
||||
log::error!("Error handling debugger error: {e}")
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
self._tasks.push(
|
||||
cx.background_executor().spawn(
|
||||
Self::handle_input(
|
||||
params.input,
|
||||
client_rx,
|
||||
self.current_requests.clone(),
|
||||
self.pending_requests.clone(),
|
||||
log_handler.clone(),
|
||||
)
|
||||
.log_err(),
|
||||
),
|
||||
);
|
||||
let current_requests = self.current_requests.clone();
|
||||
let pending_requests = self.pending_requests.clone();
|
||||
let log_handler = log_handler.clone();
|
||||
self._tasks.push(cx.background_spawn(async move {
|
||||
match Self::handle_input(
|
||||
params.input,
|
||||
client_rx,
|
||||
current_requests,
|
||||
pending_requests,
|
||||
log_handler,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(()) => {}
|
||||
Err(e) => log::error!("Error handling debugger input: {e}"),
|
||||
}
|
||||
}));
|
||||
})?;
|
||||
|
||||
{
|
||||
@@ -235,7 +266,7 @@ impl TransportDelegate {
|
||||
async fn handle_adapter_log<Stdout>(
|
||||
stdout: Stdout,
|
||||
log_handlers: Option<LogHandlers>,
|
||||
) -> Result<()>
|
||||
) -> ConnectionResult<()>
|
||||
where
|
||||
Stdout: AsyncRead + Unpin + Send + 'static,
|
||||
{
|
||||
@@ -245,13 +276,14 @@ impl TransportDelegate {
|
||||
let result = loop {
|
||||
line.truncate(0);
|
||||
|
||||
let bytes_read = match reader.read_line(&mut line).await {
|
||||
Ok(bytes_read) => bytes_read,
|
||||
Err(e) => break Err(e.into()),
|
||||
};
|
||||
|
||||
if bytes_read == 0 {
|
||||
anyhow::bail!("Debugger log stream closed");
|
||||
match reader
|
||||
.read_line(&mut line)
|
||||
.await
|
||||
.context("reading adapter log line")
|
||||
{
|
||||
Ok(0) => break ConnectionResult::ConnectionReset,
|
||||
Ok(_) => {}
|
||||
Err(e) => break ConnectionResult::Result(Err(e)),
|
||||
}
|
||||
|
||||
if let Some(log_handlers) = log_handlers.as_ref() {
|
||||
@@ -337,35 +369,35 @@ impl TransportDelegate {
|
||||
let mut reader = BufReader::new(server_stdout);
|
||||
|
||||
let result = loop {
|
||||
let message =
|
||||
Self::receive_server_message(&mut reader, &mut recv_buffer, log_handlers.as_ref())
|
||||
.await;
|
||||
|
||||
match message {
|
||||
Ok(Message::Response(res)) => {
|
||||
match Self::receive_server_message(&mut reader, &mut recv_buffer, log_handlers.as_ref())
|
||||
.await
|
||||
{
|
||||
ConnectionResult::Timeout => anyhow::bail!("Timed out when connecting to debugger"),
|
||||
ConnectionResult::ConnectionReset => {
|
||||
log::info!("Debugger closed the connection");
|
||||
return Ok(());
|
||||
}
|
||||
ConnectionResult::Result(Ok(Message::Response(res))) => {
|
||||
if let Some(tx) = pending_requests.lock().await.remove(&res.request_seq) {
|
||||
if let Err(e) = tx.send(Self::process_response(res)) {
|
||||
log::trace!("Did not send response `{:?}` for a cancelled", e);
|
||||
}
|
||||
} else {
|
||||
client_tx.send(Message::Response(res)).await?;
|
||||
};
|
||||
}
|
||||
}
|
||||
Ok(message) => {
|
||||
client_tx.send(message).await?;
|
||||
}
|
||||
Err(e) => break Err(e),
|
||||
ConnectionResult::Result(Ok(message)) => client_tx.send(message).await?,
|
||||
ConnectionResult::Result(Err(e)) => break Err(e),
|
||||
}
|
||||
};
|
||||
|
||||
drop(client_tx);
|
||||
|
||||
log::debug!("Handle adapter output dropped");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
async fn handle_error<Stderr>(stderr: Stderr, log_handlers: LogHandlers) -> Result<()>
|
||||
async fn handle_error<Stderr>(stderr: Stderr, log_handlers: LogHandlers) -> ConnectionResult<()>
|
||||
where
|
||||
Stderr: AsyncRead + Unpin + Send + 'static,
|
||||
{
|
||||
@@ -375,8 +407,12 @@ impl TransportDelegate {
|
||||
let mut reader = BufReader::new(stderr);
|
||||
|
||||
let result = loop {
|
||||
match reader.read_line(&mut buffer).await {
|
||||
Ok(0) => anyhow::bail!("debugger error stream closed"),
|
||||
match reader
|
||||
.read_line(&mut buffer)
|
||||
.await
|
||||
.context("reading error log line")
|
||||
{
|
||||
Ok(0) => break ConnectionResult::ConnectionReset,
|
||||
Ok(_) => {
|
||||
for (kind, log_handler) in log_handlers.lock().iter_mut() {
|
||||
if matches!(kind, LogKind::Adapter) {
|
||||
@@ -386,7 +422,7 @@ impl TransportDelegate {
|
||||
|
||||
buffer.truncate(0);
|
||||
}
|
||||
Err(error) => break Err(error.into()),
|
||||
Err(error) => break ConnectionResult::Result(Err(error)),
|
||||
}
|
||||
};
|
||||
|
||||
@@ -420,7 +456,7 @@ impl TransportDelegate {
|
||||
reader: &mut BufReader<Stdout>,
|
||||
buffer: &mut String,
|
||||
log_handlers: Option<&LogHandlers>,
|
||||
) -> Result<Message>
|
||||
) -> ConnectionResult<Message>
|
||||
where
|
||||
Stdout: AsyncRead + Unpin + Send + 'static,
|
||||
{
|
||||
@@ -428,48 +464,58 @@ impl TransportDelegate {
|
||||
loop {
|
||||
buffer.truncate(0);
|
||||
|
||||
if reader
|
||||
match reader
|
||||
.read_line(buffer)
|
||||
.await
|
||||
.with_context(|| "reading a message from server")?
|
||||
== 0
|
||||
.with_context(|| "reading a message from server")
|
||||
{
|
||||
anyhow::bail!("debugger reader stream closed");
|
||||
Ok(0) => return ConnectionResult::ConnectionReset,
|
||||
Ok(_) => {}
|
||||
Err(e) => return ConnectionResult::Result(Err(e)),
|
||||
};
|
||||
|
||||
if buffer == "\r\n" {
|
||||
break;
|
||||
}
|
||||
|
||||
let parts = buffer.trim().split_once(": ");
|
||||
|
||||
match parts {
|
||||
Some(("Content-Length", value)) => {
|
||||
content_length = Some(value.parse().context("invalid content length")?);
|
||||
if let Some(("Content-Length", value)) = buffer.trim().split_once(": ") {
|
||||
match value.parse().context("invalid content length") {
|
||||
Ok(length) => content_length = Some(length),
|
||||
Err(e) => return ConnectionResult::Result(Err(e)),
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let content_length = content_length.context("missing content length")?;
|
||||
let content_length = match content_length.context("missing content length") {
|
||||
Ok(length) => length,
|
||||
Err(e) => return ConnectionResult::Result(Err(e)),
|
||||
};
|
||||
|
||||
let mut content = vec![0; content_length];
|
||||
reader
|
||||
if let Err(e) = reader
|
||||
.read_exact(&mut content)
|
||||
.await
|
||||
.with_context(|| "reading after a loop")?;
|
||||
.with_context(|| "reading after a loop")
|
||||
{
|
||||
return ConnectionResult::Result(Err(e));
|
||||
}
|
||||
|
||||
let message = std::str::from_utf8(&content).context("invalid utf8 from server")?;
|
||||
let message_str = match std::str::from_utf8(&content).context("invalid utf8 from server") {
|
||||
Ok(str) => str,
|
||||
Err(e) => return ConnectionResult::Result(Err(e)),
|
||||
};
|
||||
|
||||
if let Some(log_handlers) = log_handlers {
|
||||
for (kind, log_handler) in log_handlers.lock().iter_mut() {
|
||||
if matches!(kind, LogKind::Rpc) {
|
||||
log_handler(IoKind::StdOut, &message);
|
||||
log_handler(IoKind::StdOut, message_str);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(serde_json::from_str::<Message>(message)?)
|
||||
ConnectionResult::Result(
|
||||
serde_json::from_str::<Message>(message_str).context("deserializing server message"),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn shutdown(&self) -> Result<()> {
|
||||
@@ -777,71 +823,31 @@ impl FakeTransport {
|
||||
let response_handlers = this.response_handlers.clone();
|
||||
let stdout_writer = Arc::new(Mutex::new(stdout_writer));
|
||||
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
let mut reader = BufReader::new(stdin_reader);
|
||||
let mut buffer = String::new();
|
||||
cx.background_spawn(async move {
|
||||
let mut reader = BufReader::new(stdin_reader);
|
||||
let mut buffer = String::new();
|
||||
|
||||
loop {
|
||||
let message =
|
||||
TransportDelegate::receive_server_message(&mut reader, &mut buffer, None)
|
||||
.await;
|
||||
|
||||
match message {
|
||||
Err(error) => {
|
||||
break anyhow::anyhow!(error);
|
||||
}
|
||||
Ok(message) => {
|
||||
match message {
|
||||
Message::Request(request) => {
|
||||
// redirect reverse requests to stdout writer/reader
|
||||
if request.command == RunInTerminal::COMMAND
|
||||
|| request.command == StartDebugging::COMMAND
|
||||
{
|
||||
let message =
|
||||
serde_json::to_string(&Message::Request(request))
|
||||
.unwrap();
|
||||
|
||||
let mut writer = stdout_writer.lock().await;
|
||||
writer
|
||||
.write_all(
|
||||
TransportDelegate::build_rpc_message(message)
|
||||
.as_bytes(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
writer.flush().await.unwrap();
|
||||
} else {
|
||||
let response = if let Some(handle) = request_handlers
|
||||
.lock()
|
||||
.get_mut(request.command.as_str())
|
||||
{
|
||||
handle(
|
||||
request.seq,
|
||||
request.arguments.unwrap_or(json!({})),
|
||||
)
|
||||
} else {
|
||||
panic!("No request handler for {}", request.command);
|
||||
};
|
||||
let message =
|
||||
serde_json::to_string(&Message::Response(response))
|
||||
.unwrap();
|
||||
|
||||
let mut writer = stdout_writer.lock().await;
|
||||
|
||||
writer
|
||||
.write_all(
|
||||
TransportDelegate::build_rpc_message(message)
|
||||
.as_bytes(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
writer.flush().await.unwrap();
|
||||
}
|
||||
}
|
||||
Message::Event(event) => {
|
||||
loop {
|
||||
match TransportDelegate::receive_server_message(&mut reader, &mut buffer, None)
|
||||
.await
|
||||
{
|
||||
ConnectionResult::Timeout => {
|
||||
anyhow::bail!("Timed out when connecting to debugger");
|
||||
}
|
||||
ConnectionResult::ConnectionReset => {
|
||||
log::info!("Debugger closed the connection");
|
||||
break Ok(());
|
||||
}
|
||||
ConnectionResult::Result(Err(e)) => break Err(e),
|
||||
ConnectionResult::Result(Ok(message)) => {
|
||||
match message {
|
||||
Message::Request(request) => {
|
||||
// redirect reverse requests to stdout writer/reader
|
||||
if request.command == RunInTerminal::COMMAND
|
||||
|| request.command == StartDebugging::COMMAND
|
||||
{
|
||||
let message =
|
||||
serde_json::to_string(&Message::Event(event)).unwrap();
|
||||
serde_json::to_string(&Message::Request(request)).unwrap();
|
||||
|
||||
let mut writer = stdout_writer.lock().await;
|
||||
writer
|
||||
@@ -852,22 +858,58 @@ impl FakeTransport {
|
||||
.await
|
||||
.unwrap();
|
||||
writer.flush().await.unwrap();
|
||||
}
|
||||
Message::Response(response) => {
|
||||
if let Some(handle) =
|
||||
response_handlers.lock().get(response.command.as_str())
|
||||
} else {
|
||||
let response = if let Some(handle) =
|
||||
request_handlers.lock().get_mut(request.command.as_str())
|
||||
{
|
||||
handle(response);
|
||||
handle(request.seq, request.arguments.unwrap_or(json!({})))
|
||||
} else {
|
||||
log::error!("No response handler for {}", response.command);
|
||||
}
|
||||
panic!("No request handler for {}", request.command);
|
||||
};
|
||||
let message =
|
||||
serde_json::to_string(&Message::Response(response))
|
||||
.unwrap();
|
||||
|
||||
let mut writer = stdout_writer.lock().await;
|
||||
|
||||
writer
|
||||
.write_all(
|
||||
TransportDelegate::build_rpc_message(message)
|
||||
.as_bytes(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
writer.flush().await.unwrap();
|
||||
}
|
||||
}
|
||||
Message::Event(event) => {
|
||||
let message =
|
||||
serde_json::to_string(&Message::Event(event)).unwrap();
|
||||
|
||||
let mut writer = stdout_writer.lock().await;
|
||||
writer
|
||||
.write_all(
|
||||
TransportDelegate::build_rpc_message(message).as_bytes(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
writer.flush().await.unwrap();
|
||||
}
|
||||
Message::Response(response) => {
|
||||
if let Some(handle) =
|
||||
response_handlers.lock().get(response.command.as_str())
|
||||
{
|
||||
handle(response);
|
||||
} else {
|
||||
log::error!("No response handler for {}", response.command);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
Ok((
|
||||
TransportPipe::new(Box::new(stdin_writer), Box::new(stdout_reader), None, None),
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
use std::{collections::HashMap, path::PathBuf, sync::OnceLock};
|
||||
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use async_trait::async_trait;
|
||||
use dap::{
|
||||
StartDebuggingRequestArgumentsRequest,
|
||||
adapters::{DebugTaskDefinition, latest_github_release},
|
||||
};
|
||||
use dap::adapters::{DebugTaskDefinition, latest_github_release};
|
||||
use futures::StreamExt;
|
||||
use gpui::AsyncApp;
|
||||
use serde_json::Value;
|
||||
@@ -37,7 +34,7 @@ impl CodeLldbDebugAdapter {
|
||||
Value::String(String::from(task_definition.label.as_ref())),
|
||||
);
|
||||
|
||||
let request = self.validate_config(&configuration)?;
|
||||
let request = self.request_kind(&configuration)?;
|
||||
|
||||
Ok(dap::StartDebuggingRequestArguments {
|
||||
request,
|
||||
@@ -89,48 +86,6 @@ impl DebugAdapter for CodeLldbDebugAdapter {
|
||||
DebugAdapterName(Self::ADAPTER_NAME.into())
|
||||
}
|
||||
|
||||
fn validate_config(
|
||||
&self,
|
||||
config: &serde_json::Value,
|
||||
) -> Result<StartDebuggingRequestArgumentsRequest> {
|
||||
let map = config
|
||||
.as_object()
|
||||
.ok_or_else(|| anyhow!("Config isn't an object"))?;
|
||||
|
||||
let request_variant = map
|
||||
.get("request")
|
||||
.and_then(|r| r.as_str())
|
||||
.ok_or_else(|| anyhow!("request field is required and must be a string"))?;
|
||||
|
||||
match request_variant {
|
||||
"launch" => {
|
||||
// For launch, verify that one of the required configs exists
|
||||
if !(map.contains_key("program")
|
||||
|| map.contains_key("targetCreateCommands")
|
||||
|| map.contains_key("cargo"))
|
||||
{
|
||||
return Err(anyhow!(
|
||||
"launch request requires either 'program', 'targetCreateCommands', or 'cargo' field"
|
||||
));
|
||||
}
|
||||
Ok(StartDebuggingRequestArgumentsRequest::Launch)
|
||||
}
|
||||
"attach" => {
|
||||
// For attach, verify that either pid or program exists
|
||||
if !(map.contains_key("pid") || map.contains_key("program")) {
|
||||
return Err(anyhow!(
|
||||
"attach request requires either 'pid' or 'program' field"
|
||||
));
|
||||
}
|
||||
Ok(StartDebuggingRequestArgumentsRequest::Attach)
|
||||
}
|
||||
_ => Err(anyhow!(
|
||||
"request must be either 'launch' or 'attach', got '{}'",
|
||||
request_variant
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
|
||||
let mut configuration = json!({
|
||||
"request": match zed_scenario.request {
|
||||
|
||||
@@ -178,7 +178,7 @@ impl DebugAdapter for GdbDebugAdapter {
|
||||
let gdb_path = user_setting_path.unwrap_or(gdb_path?);
|
||||
|
||||
let request_args = StartDebuggingRequestArguments {
|
||||
request: self.validate_config(&config.config)?,
|
||||
request: self.request_kind(&config.config)?,
|
||||
configuration: config.config.clone(),
|
||||
};
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use anyhow::{Context as _, anyhow, bail};
|
||||
use anyhow::{Context as _, bail};
|
||||
use dap::{
|
||||
StartDebuggingRequestArguments, StartDebuggingRequestArgumentsRequest,
|
||||
StartDebuggingRequestArguments,
|
||||
adapters::{
|
||||
DebugTaskDefinition, DownloadedFileType, download_adapter_from_github,
|
||||
latest_github_release,
|
||||
@@ -76,8 +76,8 @@ impl GoDebugAdapter {
|
||||
|
||||
let path = paths::debug_adapters_dir()
|
||||
.join("delve-shim-dap")
|
||||
.join(format!("delve-shim-dap{}", asset.tag_name))
|
||||
.join("delve-shim-dap");
|
||||
.join(format!("delve-shim-dap_{}", asset.tag_name))
|
||||
.join(format!("delve-shim-dap{}", std::env::consts::EXE_SUFFIX));
|
||||
self.shim_path.set(path.clone()).ok();
|
||||
|
||||
Ok(path)
|
||||
@@ -350,24 +350,6 @@ impl DebugAdapter for GoDebugAdapter {
|
||||
})
|
||||
}
|
||||
|
||||
fn validate_config(
|
||||
&self,
|
||||
config: &serde_json::Value,
|
||||
) -> Result<StartDebuggingRequestArgumentsRequest> {
|
||||
let map = config.as_object().context("Config isn't an object")?;
|
||||
|
||||
let request_variant = map
|
||||
.get("request")
|
||||
.and_then(|val| val.as_str())
|
||||
.context("request argument is not found or invalid")?;
|
||||
|
||||
match request_variant {
|
||||
"launch" => Ok(StartDebuggingRequestArgumentsRequest::Launch),
|
||||
"attach" => Ok(StartDebuggingRequestArgumentsRequest::Attach),
|
||||
_ => Err(anyhow!("request must be either 'launch' or 'attach'")),
|
||||
}
|
||||
}
|
||||
|
||||
fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
|
||||
let mut args = match &zed_scenario.request {
|
||||
dap::DebugRequest::Attach(attach_config) => {
|
||||
@@ -414,13 +396,15 @@ impl DebugAdapter for GoDebugAdapter {
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
task_definition: &DebugTaskDefinition,
|
||||
_user_installed_path: Option<PathBuf>,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
_cx: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
let adapter_path = paths::debug_adapters_dir().join(&Self::ADAPTER_NAME);
|
||||
let dlv_path = adapter_path.join("dlv");
|
||||
|
||||
let delve_path = if let Some(path) = delegate.which(OsStr::new("dlv")).await {
|
||||
let delve_path = if let Some(path) = user_installed_path {
|
||||
path.to_string_lossy().to_string()
|
||||
} else if let Some(path) = delegate.which(OsStr::new("dlv")).await {
|
||||
path.to_string_lossy().to_string()
|
||||
} else if delegate.fs().is_file(&dlv_path).await {
|
||||
dlv_path.to_string_lossy().to_string()
|
||||
@@ -486,7 +470,7 @@ impl DebugAdapter for GoDebugAdapter {
|
||||
connection: None,
|
||||
request_args: StartDebuggingRequestArguments {
|
||||
configuration: task_definition.config.clone(),
|
||||
request: self.validate_config(&task_definition.config)?,
|
||||
request: self.request_kind(&task_definition.config)?,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
use adapters::latest_github_release;
|
||||
use anyhow::{Context as _, anyhow};
|
||||
use dap::{
|
||||
StartDebuggingRequestArguments, StartDebuggingRequestArgumentsRequest,
|
||||
adapters::DebugTaskDefinition,
|
||||
};
|
||||
use anyhow::Context as _;
|
||||
use dap::{StartDebuggingRequestArguments, adapters::DebugTaskDefinition};
|
||||
use gpui::AsyncApp;
|
||||
use std::{collections::HashMap, path::PathBuf, sync::OnceLock};
|
||||
use task::DebugRequest;
|
||||
@@ -26,7 +23,7 @@ impl JsDebugAdapter {
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
) -> Result<AdapterVersion> {
|
||||
let release = latest_github_release(
|
||||
&format!("{}/{}", "microsoft", Self::ADAPTER_NPM_NAME),
|
||||
&format!("microsoft/{}", Self::ADAPTER_NPM_NAME),
|
||||
true,
|
||||
false,
|
||||
delegate.http_client(),
|
||||
@@ -95,7 +92,7 @@ impl JsDebugAdapter {
|
||||
}),
|
||||
request_args: StartDebuggingRequestArguments {
|
||||
configuration: task_definition.config.clone(),
|
||||
request: self.validate_config(&task_definition.config)?,
|
||||
request: self.request_kind(&task_definition.config)?,
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -107,29 +104,6 @@ impl DebugAdapter for JsDebugAdapter {
|
||||
DebugAdapterName(Self::ADAPTER_NAME.into())
|
||||
}
|
||||
|
||||
fn validate_config(
|
||||
&self,
|
||||
config: &serde_json::Value,
|
||||
) -> Result<dap::StartDebuggingRequestArgumentsRequest> {
|
||||
match config.get("request") {
|
||||
Some(val) if val == "launch" => {
|
||||
if config.get("program").is_none() && config.get("url").is_none() {
|
||||
return Err(anyhow!(
|
||||
"either program or url is required for launch request"
|
||||
));
|
||||
}
|
||||
Ok(StartDebuggingRequestArgumentsRequest::Launch)
|
||||
}
|
||||
Some(val) if val == "attach" => {
|
||||
if !config.get("processId").is_some_and(|val| val.is_u64()) {
|
||||
return Err(anyhow!("processId must be a number"));
|
||||
}
|
||||
Ok(StartDebuggingRequestArgumentsRequest::Attach)
|
||||
}
|
||||
_ => Err(anyhow!("missing or invalid request field in config")),
|
||||
}
|
||||
}
|
||||
|
||||
fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
|
||||
let mut args = json!({
|
||||
"type": "pwa-node",
|
||||
@@ -449,6 +423,8 @@ impl DebugAdapter for JsDebugAdapter {
|
||||
delegate.as_ref(),
|
||||
)
|
||||
.await?;
|
||||
} else {
|
||||
delegate.output_to_console(format!("{} debug adapter is up to date", self.name()));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -94,7 +94,7 @@ impl PhpDebugAdapter {
|
||||
envs: HashMap::default(),
|
||||
request_args: StartDebuggingRequestArguments {
|
||||
configuration: task_definition.config.clone(),
|
||||
request: <Self as DebugAdapter>::validate_config(self, &task_definition.config)?,
|
||||
request: <Self as DebugAdapter>::request_kind(self, &task_definition.config)?,
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -282,10 +282,7 @@ impl DebugAdapter for PhpDebugAdapter {
|
||||
Some(SharedString::new_static("PHP").into())
|
||||
}
|
||||
|
||||
fn validate_config(
|
||||
&self,
|
||||
_: &serde_json::Value,
|
||||
) -> Result<StartDebuggingRequestArgumentsRequest> {
|
||||
fn request_kind(&self, _: &serde_json::Value) -> Result<StartDebuggingRequestArgumentsRequest> {
|
||||
Ok(StartDebuggingRequestArgumentsRequest::Launch)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
use crate::*;
|
||||
use anyhow::{Context as _, anyhow};
|
||||
use dap::{
|
||||
DebugRequest, StartDebuggingRequestArguments, StartDebuggingRequestArgumentsRequest,
|
||||
adapters::DebugTaskDefinition,
|
||||
};
|
||||
use anyhow::Context as _;
|
||||
use dap::{DebugRequest, StartDebuggingRequestArguments, adapters::DebugTaskDefinition};
|
||||
use gpui::{AsyncApp, SharedString};
|
||||
use json_dotpath::DotPaths;
|
||||
use language::{LanguageName, Toolchain};
|
||||
@@ -86,7 +83,7 @@ impl PythonDebugAdapter {
|
||||
&self,
|
||||
task_definition: &DebugTaskDefinition,
|
||||
) -> Result<StartDebuggingRequestArguments> {
|
||||
let request = self.validate_config(&task_definition.config)?;
|
||||
let request = self.request_kind(&task_definition.config)?;
|
||||
|
||||
let mut configuration = task_definition.config.clone();
|
||||
if let Ok(console) = configuration.dot_get_mut("console") {
|
||||
@@ -254,24 +251,6 @@ impl DebugAdapter for PythonDebugAdapter {
|
||||
})
|
||||
}
|
||||
|
||||
fn validate_config(
|
||||
&self,
|
||||
config: &serde_json::Value,
|
||||
) -> Result<StartDebuggingRequestArgumentsRequest> {
|
||||
let map = config.as_object().context("Config isn't an object")?;
|
||||
|
||||
let request_variant = map
|
||||
.get("request")
|
||||
.and_then(|val| val.as_str())
|
||||
.context("request is not valid")?;
|
||||
|
||||
match request_variant {
|
||||
"launch" => Ok(StartDebuggingRequestArgumentsRequest::Launch),
|
||||
"attach" => Ok(StartDebuggingRequestArgumentsRequest::Attach),
|
||||
_ => Err(anyhow!("request must be either 'launch' or 'attach'")),
|
||||
}
|
||||
}
|
||||
|
||||
async fn dap_schema(&self) -> serde_json::Value {
|
||||
json!({
|
||||
"properties": {
|
||||
|
||||
@@ -265,7 +265,7 @@ impl DebugAdapter for RubyDebugAdapter {
|
||||
cwd: None,
|
||||
envs: std::collections::HashMap::default(),
|
||||
request_args: StartDebuggingRequestArguments {
|
||||
request: self.validate_config(&definition.config)?,
|
||||
request: self.request_kind(&definition.config)?,
|
||||
configuration: definition.config.clone(),
|
||||
},
|
||||
})
|
||||
|
||||
@@ -50,6 +50,7 @@ project.workspace = true
|
||||
rpc.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
# serde_json_lenient.workspace = true
|
||||
settings.workspace = true
|
||||
shlex.workspace = true
|
||||
sysinfo.workspace = true
|
||||
|
||||
@@ -3,11 +3,12 @@ use crate::session::DebugSession;
|
||||
use crate::session::running::RunningState;
|
||||
use crate::{
|
||||
ClearAllBreakpoints, Continue, Detach, FocusBreakpointList, FocusConsole, FocusFrames,
|
||||
FocusLoadedSources, FocusModules, FocusTerminal, FocusVariables, Pause, Restart,
|
||||
ShowStackTrace, StepBack, StepInto, StepOut, StepOver, Stop, ToggleIgnoreBreakpoints,
|
||||
ToggleSessionPicker, ToggleThreadPicker, persistence, spawn_task_or_modal,
|
||||
FocusLoadedSources, FocusModules, FocusTerminal, FocusVariables, NewProcessModal,
|
||||
NewProcessMode, Pause, Restart, ShowStackTrace, StepBack, StepInto, StepOut, StepOver, Stop,
|
||||
ToggleExpandItem, ToggleIgnoreBreakpoints, ToggleSessionPicker, ToggleThreadPicker,
|
||||
persistence, spawn_task_or_modal,
|
||||
};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::Result;
|
||||
use command_palette_hooks::CommandPaletteFilter;
|
||||
use dap::StartDebuggingRequestArguments;
|
||||
use dap::adapters::DebugAdapterName;
|
||||
@@ -24,7 +25,7 @@ use gpui::{
|
||||
|
||||
use language::Buffer;
|
||||
use project::debugger::session::{Session, SessionStateEvent};
|
||||
use project::{Fs, ProjectPath, WorktreeId};
|
||||
use project::{Fs, WorktreeId};
|
||||
use project::{Project, debugger::session::ThreadStatus};
|
||||
use rpc::proto::{self};
|
||||
use settings::Settings;
|
||||
@@ -69,6 +70,7 @@ pub struct DebugPanel {
|
||||
pub(crate) thread_picker_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
pub(crate) session_picker_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
fs: Arc<dyn Fs>,
|
||||
is_zoomed: bool,
|
||||
_subscriptions: [Subscription; 1],
|
||||
}
|
||||
|
||||
@@ -103,6 +105,7 @@ impl DebugPanel {
|
||||
fs: workspace.app_state().fs.clone(),
|
||||
thread_picker_menu_handle,
|
||||
session_picker_menu_handle,
|
||||
is_zoomed: false,
|
||||
_subscriptions: [focus_subscription],
|
||||
debug_scenario_scheduled_last: true,
|
||||
}
|
||||
@@ -334,10 +337,17 @@ impl DebugPanel {
|
||||
let Some(task_inventory) = task_store.read(cx).task_inventory() else {
|
||||
return;
|
||||
};
|
||||
let workspace = self.workspace.clone();
|
||||
let Some(scenario) = task_inventory.read(cx).last_scheduled_scenario().cloned() else {
|
||||
window.defer(cx, move |window, cx| {
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
NewProcessModal::show(workspace, window, NewProcessMode::Launch, None, cx);
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
return;
|
||||
};
|
||||
let workspace = self.workspace.clone();
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let task_contexts = workspace
|
||||
@@ -942,68 +952,69 @@ impl DebugPanel {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub(crate) fn save_scenario(
|
||||
&self,
|
||||
scenario: &DebugScenario,
|
||||
worktree_id: WorktreeId,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<ProjectPath>> {
|
||||
self.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
let Some(mut path) = workspace.absolute_path_of_worktree(worktree_id, cx) else {
|
||||
return Task::ready(Err(anyhow!("Couldn't get worktree path")));
|
||||
};
|
||||
// TODO: restore once we have proper comment preserving file edits
|
||||
// pub(crate) fn save_scenario(
|
||||
// &self,
|
||||
// scenario: &DebugScenario,
|
||||
// worktree_id: WorktreeId,
|
||||
// window: &mut Window,
|
||||
// cx: &mut App,
|
||||
// ) -> Task<Result<ProjectPath>> {
|
||||
// self.workspace
|
||||
// .update(cx, |workspace, cx| {
|
||||
// let Some(mut path) = workspace.absolute_path_of_worktree(worktree_id, cx) else {
|
||||
// return Task::ready(Err(anyhow!("Couldn't get worktree path")));
|
||||
// };
|
||||
|
||||
let serialized_scenario = serde_json::to_value(scenario);
|
||||
// let serialized_scenario = serde_json::to_value(scenario);
|
||||
|
||||
cx.spawn_in(window, async move |workspace, cx| {
|
||||
let serialized_scenario = serialized_scenario?;
|
||||
let fs =
|
||||
workspace.read_with(cx, |workspace, _| workspace.app_state().fs.clone())?;
|
||||
// cx.spawn_in(window, async move |workspace, cx| {
|
||||
// let serialized_scenario = serialized_scenario?;
|
||||
// let fs =
|
||||
// workspace.read_with(cx, |workspace, _| workspace.app_state().fs.clone())?;
|
||||
|
||||
path.push(paths::local_settings_folder_relative_path());
|
||||
if !fs.is_dir(path.as_path()).await {
|
||||
fs.create_dir(path.as_path()).await?;
|
||||
}
|
||||
path.pop();
|
||||
// path.push(paths::local_settings_folder_relative_path());
|
||||
// if !fs.is_dir(path.as_path()).await {
|
||||
// fs.create_dir(path.as_path()).await?;
|
||||
// }
|
||||
// path.pop();
|
||||
|
||||
path.push(paths::local_debug_file_relative_path());
|
||||
let path = path.as_path();
|
||||
// path.push(paths::local_debug_file_relative_path());
|
||||
// let path = path.as_path();
|
||||
|
||||
if !fs.is_file(path).await {
|
||||
let content =
|
||||
serde_json::to_string_pretty(&serde_json::Value::Array(vec![
|
||||
serialized_scenario,
|
||||
]))?;
|
||||
// if !fs.is_file(path).await {
|
||||
// fs.create_file(path, Default::default()).await?;
|
||||
// fs.write(
|
||||
// path,
|
||||
// initial_local_debug_tasks_content().to_string().as_bytes(),
|
||||
// )
|
||||
// .await?;
|
||||
// }
|
||||
|
||||
fs.create_file(path, Default::default()).await?;
|
||||
fs.save(path, &content.into(), Default::default()).await?;
|
||||
} else {
|
||||
let content = fs.load(path).await?;
|
||||
let mut values = serde_json::from_str::<Vec<serde_json::Value>>(&content)?;
|
||||
values.push(serialized_scenario);
|
||||
fs.save(
|
||||
path,
|
||||
&serde_json::to_string_pretty(&values).map(Into::into)?,
|
||||
Default::default(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
// let content = fs.load(path).await?;
|
||||
// let mut values =
|
||||
// serde_json_lenient::from_str::<Vec<serde_json::Value>>(&content)?;
|
||||
// values.push(serialized_scenario);
|
||||
// fs.save(
|
||||
// path,
|
||||
// &serde_json_lenient::to_string_pretty(&values).map(Into::into)?,
|
||||
// Default::default(),
|
||||
// )
|
||||
// .await?;
|
||||
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
workspace
|
||||
.project()
|
||||
.read(cx)
|
||||
.project_path_for_absolute_path(&path, cx)
|
||||
.context(
|
||||
"Couldn't get project path for .zed/debug.json in active worktree",
|
||||
)
|
||||
})?
|
||||
})
|
||||
})
|
||||
.unwrap_or_else(|err| Task::ready(Err(err)))
|
||||
}
|
||||
// workspace.update(cx, |workspace, cx| {
|
||||
// workspace
|
||||
// .project()
|
||||
// .read(cx)
|
||||
// .project_path_for_absolute_path(&path, cx)
|
||||
// .context(
|
||||
// "Couldn't get project path for .zed/debug.json in active worktree",
|
||||
// )
|
||||
// })?
|
||||
// })
|
||||
// })
|
||||
// .unwrap_or_else(|err| Task::ready(Err(err)))
|
||||
// }
|
||||
|
||||
pub(crate) fn toggle_thread_picker(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.thread_picker_menu_handle.toggle(window, cx);
|
||||
@@ -1012,6 +1023,22 @@ impl DebugPanel {
|
||||
pub(crate) fn toggle_session_picker(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.session_picker_menu_handle.toggle(window, cx);
|
||||
}
|
||||
|
||||
fn toggle_zoom(
|
||||
&mut self,
|
||||
_: &workspace::ToggleZoom,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if self.is_zoomed {
|
||||
cx.emit(PanelEvent::ZoomOut);
|
||||
} else {
|
||||
if !self.focus_handle(cx).contains_focused(window, cx) {
|
||||
cx.focus_self(window);
|
||||
}
|
||||
cx.emit(PanelEvent::ZoomIn);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn register_session_inner(
|
||||
@@ -1167,6 +1194,15 @@ impl Panel for DebugPanel {
|
||||
}
|
||||
|
||||
fn set_active(&mut self, _: bool, _: &mut Window, _: &mut Context<Self>) {}
|
||||
|
||||
fn is_zoomed(&self, _window: &Window, _cx: &App) -> bool {
|
||||
self.is_zoomed
|
||||
}
|
||||
|
||||
fn set_zoomed(&mut self, zoomed: bool, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.is_zoomed = zoomed;
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for DebugPanel {
|
||||
@@ -1307,6 +1343,23 @@ impl Render for DebugPanel {
|
||||
.ok();
|
||||
}
|
||||
})
|
||||
.on_action(cx.listener(Self::toggle_zoom))
|
||||
.on_action(cx.listener(|panel, _: &ToggleExpandItem, _, cx| {
|
||||
let Some(session) = panel.active_session() else {
|
||||
return;
|
||||
};
|
||||
let active_pane = session
|
||||
.read(cx)
|
||||
.running_state()
|
||||
.read(cx)
|
||||
.active_pane()
|
||||
.clone();
|
||||
active_pane.update(cx, |pane, cx| {
|
||||
let is_zoomed = pane.is_zoomed();
|
||||
pane.set_zoomed(!is_zoomed, cx);
|
||||
});
|
||||
cx.notify();
|
||||
}))
|
||||
.when(self.active_session.is_some(), |this| {
|
||||
this.on_mouse_down(
|
||||
MouseButton::Right,
|
||||
@@ -1410,4 +1463,10 @@ impl workspace::DebuggerProvider for DebuggerProvider {
|
||||
fn debug_scenario_scheduled_last(&self, cx: &App) -> bool {
|
||||
self.0.read(cx).debug_scenario_scheduled_last
|
||||
}
|
||||
|
||||
fn active_thread_state(&self, cx: &App) -> Option<ThreadStatus> {
|
||||
let session = self.0.read(cx).active_session()?;
|
||||
let thread = session.read(cx).running_state().read(cx).thread_id()?;
|
||||
session.read(cx).session(cx).read(cx).thread_state(thread)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ use debugger_panel::{DebugPanel, ToggleFocus};
|
||||
use editor::Editor;
|
||||
use feature_flags::{DebuggerFeatureFlag, FeatureFlagViewExt};
|
||||
use gpui::{App, EntityInputHandler, actions};
|
||||
use new_session_modal::{NewSessionModal, NewSessionMode};
|
||||
use new_process_modal::{NewProcessModal, NewProcessMode};
|
||||
use project::debugger::{self, breakpoint_store::SourceBreakpoint};
|
||||
use session::DebugSession;
|
||||
use settings::Settings;
|
||||
@@ -15,7 +15,7 @@ use workspace::{ItemHandle, ShutdownDebugAdapters, Workspace};
|
||||
pub mod attach_modal;
|
||||
pub mod debugger_panel;
|
||||
mod dropdown_menus;
|
||||
mod new_session_modal;
|
||||
mod new_process_modal;
|
||||
mod persistence;
|
||||
pub(crate) mod session;
|
||||
mod stack_trace_view;
|
||||
@@ -49,6 +49,7 @@ actions!(
|
||||
ToggleThreadPicker,
|
||||
ToggleSessionPicker,
|
||||
RerunLastSession,
|
||||
ToggleExpandItem,
|
||||
]
|
||||
);
|
||||
|
||||
@@ -210,7 +211,7 @@ pub fn init(cx: &mut App) {
|
||||
},
|
||||
)
|
||||
.register_action(|workspace: &mut Workspace, _: &Start, window, cx| {
|
||||
NewSessionModal::show(workspace, window, NewSessionMode::Launch, None, cx);
|
||||
NewProcessModal::show(workspace, window, NewProcessMode::Debug, None, cx);
|
||||
})
|
||||
.register_action(
|
||||
|workspace: &mut Workspace, _: &RerunLastSession, window, cx| {
|
||||
@@ -352,7 +353,7 @@ fn spawn_task_or_modal(
|
||||
.detach_and_log_err(cx)
|
||||
}
|
||||
Spawn::ViaModal { reveal_target } => {
|
||||
NewSessionModal::show(workspace, window, NewSessionMode::Task, *reveal_target, cx);
|
||||
NewProcessModal::show(workspace, window, NewProcessMode::Task, *reveal_target, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,6 +61,28 @@ impl DebuggerPaneItem {
|
||||
DebuggerPaneItem::Terminal => SharedString::new_static("Terminal"),
|
||||
}
|
||||
}
|
||||
pub(crate) fn tab_tooltip(self) -> SharedString {
|
||||
let tooltip = match self {
|
||||
DebuggerPaneItem::Console => {
|
||||
"Displays program output and allows manual input of debugger commands."
|
||||
}
|
||||
DebuggerPaneItem::Variables => {
|
||||
"Shows current values of local and global variables in the current stack frame."
|
||||
}
|
||||
DebuggerPaneItem::BreakpointList => "Lists all active breakpoints set in the code.",
|
||||
DebuggerPaneItem::Frames => {
|
||||
"Displays the call stack, letting you navigate between function calls."
|
||||
}
|
||||
DebuggerPaneItem::Modules => "Shows all modules or libraries loaded by the program.",
|
||||
DebuggerPaneItem::LoadedSources => {
|
||||
"Lists all source files currently loaded and used by the debugger."
|
||||
}
|
||||
DebuggerPaneItem::Terminal => {
|
||||
"Provides an interactive terminal session within the debugging environment."
|
||||
}
|
||||
};
|
||||
SharedString::new_static(tooltip)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DebuggerPaneItem> for SharedString {
|
||||
|
||||
@@ -8,7 +8,8 @@ pub mod variable_list;
|
||||
use std::{any::Any, ops::ControlFlow, path::PathBuf, sync::Arc, time::Duration};
|
||||
|
||||
use crate::{
|
||||
new_session_modal::resolve_path,
|
||||
ToggleExpandItem,
|
||||
new_process_modal::resolve_path,
|
||||
persistence::{self, DebuggerPaneItem, SerializedLayout},
|
||||
};
|
||||
|
||||
@@ -173,6 +174,10 @@ impl Item for SubView {
|
||||
self.kind.to_shared_string()
|
||||
}
|
||||
|
||||
fn tab_tooltip_text(&self, _: &App) -> Option<SharedString> {
|
||||
Some(self.kind.tab_tooltip())
|
||||
}
|
||||
|
||||
fn tab_content(
|
||||
&self,
|
||||
params: workspace::item::TabContentParams,
|
||||
@@ -343,6 +348,7 @@ pub(crate) fn new_debugger_pane(
|
||||
false
|
||||
}
|
||||
})));
|
||||
pane.set_can_toggle_zoom(false, cx);
|
||||
pane.display_nav_history_buttons(None);
|
||||
pane.set_custom_drop_handle(cx, custom_drop_handle);
|
||||
pane.set_should_display_tab_bar(|_, _| true);
|
||||
@@ -399,6 +405,9 @@ pub(crate) fn new_debugger_pane(
|
||||
.p_1()
|
||||
.rounded_md()
|
||||
.cursor_pointer()
|
||||
.when_some(item.tab_tooltip_text(cx), |this, tooltip| {
|
||||
this.tooltip(Tooltip::text(tooltip))
|
||||
})
|
||||
.map(|this| {
|
||||
let theme = cx.theme();
|
||||
if selected {
|
||||
@@ -465,17 +474,19 @@ pub(crate) fn new_debugger_pane(
|
||||
},
|
||||
)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.on_click(cx.listener(move |pane, _, window, cx| {
|
||||
pane.toggle_zoom(&workspace::ToggleZoom, window, cx);
|
||||
.on_click(cx.listener(move |pane, _, _, cx| {
|
||||
let is_zoomed = pane.is_zoomed();
|
||||
pane.set_zoomed(!is_zoomed, cx);
|
||||
cx.notify();
|
||||
}))
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
let zoomed_text =
|
||||
if zoomed { "Zoom Out" } else { "Zoom In" };
|
||||
if zoomed { "Minimize" } else { "Expand" };
|
||||
Tooltip::for_action_in(
|
||||
zoomed_text,
|
||||
&workspace::ToggleZoom,
|
||||
&ToggleExpandItem,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
@@ -559,7 +570,7 @@ impl RunningState {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn relativlize_paths(
|
||||
pub(crate) fn relativize_paths(
|
||||
key: Option<&str>,
|
||||
config: &mut serde_json::Value,
|
||||
context: &TaskContext,
|
||||
@@ -567,12 +578,12 @@ impl RunningState {
|
||||
match config {
|
||||
serde_json::Value::Object(obj) => {
|
||||
obj.iter_mut()
|
||||
.for_each(|(key, value)| Self::relativlize_paths(Some(key), value, context));
|
||||
.for_each(|(key, value)| Self::relativize_paths(Some(key), value, context));
|
||||
}
|
||||
serde_json::Value::Array(array) => {
|
||||
array
|
||||
.iter_mut()
|
||||
.for_each(|value| Self::relativlize_paths(None, value, context));
|
||||
.for_each(|value| Self::relativize_paths(None, value, context));
|
||||
}
|
||||
serde_json::Value::String(s) if key == Some("program") || key == Some("cwd") => {
|
||||
// Some built-in zed tasks wrap their arguments in quotes as they might contain spaces.
|
||||
@@ -799,13 +810,13 @@ impl RunningState {
|
||||
mut config,
|
||||
tcp_connection,
|
||||
} = scenario;
|
||||
Self::relativlize_paths(None, &mut config, &task_context);
|
||||
Self::relativize_paths(None, &mut config, &task_context);
|
||||
Self::substitute_variables_in_config(&mut config, &task_context);
|
||||
|
||||
let request_type = dap_registry
|
||||
.adapter(&adapter)
|
||||
.ok_or_else(|| anyhow!("{}: is not a valid adapter name", &adapter))
|
||||
.and_then(|adapter| adapter.validate_config(&config));
|
||||
.and_then(|adapter| adapter.request_kind(&config));
|
||||
|
||||
let config_is_valid = request_type.is_ok();
|
||||
|
||||
@@ -874,7 +885,6 @@ impl RunningState {
|
||||
args,
|
||||
..task.resolved.clone()
|
||||
};
|
||||
|
||||
let terminal = project
|
||||
.update_in(cx, |project, window, cx| {
|
||||
project.create_terminal(
|
||||
@@ -919,6 +929,12 @@ impl RunningState {
|
||||
};
|
||||
|
||||
if config_is_valid {
|
||||
// Ok(DebugTaskDefinition {
|
||||
// label,
|
||||
// adapter: DebugAdapterName(adapter),
|
||||
// config,
|
||||
// tcp_connection,
|
||||
// })
|
||||
} else if let Some((task, locator_name)) = build_output {
|
||||
let locator_name =
|
||||
locator_name.context("Could not find a valid locator for a build task")?;
|
||||
@@ -937,12 +953,15 @@ impl RunningState {
|
||||
|
||||
let scenario = dap_registry
|
||||
.adapter(&adapter)
|
||||
.context(format!("{}: is not a valid adapter name", &adapter))
|
||||
.ok_or_else(|| anyhow!("{}: is not a valid adapter name", &adapter))
|
||||
.map(|adapter| adapter.config_from_zed_format(zed_config))??;
|
||||
config = scenario.config;
|
||||
Self::substitute_variables_in_config(&mut config, &task_context);
|
||||
} else {
|
||||
anyhow::bail!("No request or build provided");
|
||||
let Err(e) = request_type else {
|
||||
unreachable!();
|
||||
};
|
||||
anyhow::bail!("Zed cannot determine how to run this debug scenario. `build` field was not provided and Debug Adapter won't accept provided configuration because: {e}");
|
||||
};
|
||||
|
||||
Ok(DebugTaskDefinition {
|
||||
@@ -1245,18 +1264,6 @@ impl RunningState {
|
||||
Event::Focus => {
|
||||
this.active_pane = source_pane.clone();
|
||||
}
|
||||
Event::ZoomIn => {
|
||||
source_pane.update(cx, |pane, cx| {
|
||||
pane.set_zoomed(true, cx);
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
Event::ZoomOut => {
|
||||
source_pane.update(cx, |pane, cx| {
|
||||
pane.set_zoomed(false, cx);
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ use gpui::{
|
||||
use language::{Buffer, CodeLabel, ToOffset};
|
||||
use menu::Confirm;
|
||||
use project::{
|
||||
Completion,
|
||||
Completion, CompletionResponse,
|
||||
debugger::session::{CompletionsQuery, OutputToken, Session, SessionEvent},
|
||||
};
|
||||
use settings::Settings;
|
||||
@@ -176,16 +176,18 @@ impl Console {
|
||||
}
|
||||
|
||||
fn render_console(&self, cx: &Context<Self>) -> impl IntoElement {
|
||||
EditorElement::new(&self.console, self.editor_style(cx))
|
||||
EditorElement::new(&self.console, Self::editor_style(&self.console, cx))
|
||||
}
|
||||
|
||||
fn editor_style(&self, cx: &Context<Self>) -> EditorStyle {
|
||||
fn editor_style(editor: &Entity<Editor>, cx: &Context<Self>) -> EditorStyle {
|
||||
let is_read_only = editor.read(cx).read_only(cx);
|
||||
let settings = ThemeSettings::get_global(cx);
|
||||
let theme = cx.theme();
|
||||
let text_style = TextStyle {
|
||||
color: if self.console.read(cx).read_only(cx) {
|
||||
cx.theme().colors().text_disabled
|
||||
color: if is_read_only {
|
||||
theme.colors().text_muted
|
||||
} else {
|
||||
cx.theme().colors().text
|
||||
theme.colors().text
|
||||
},
|
||||
font_family: settings.buffer_font.family.clone(),
|
||||
font_features: settings.buffer_font.features.clone(),
|
||||
@@ -195,15 +197,15 @@ impl Console {
|
||||
..Default::default()
|
||||
};
|
||||
EditorStyle {
|
||||
background: cx.theme().colors().editor_background,
|
||||
local_player: cx.theme().players().local(),
|
||||
background: theme.colors().editor_background,
|
||||
local_player: theme.players().local(),
|
||||
text: text_style,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
fn render_query_bar(&self, cx: &Context<Self>) -> impl IntoElement {
|
||||
EditorElement::new(&self.query_bar, self.editor_style(cx))
|
||||
EditorElement::new(&self.query_bar, Self::editor_style(&self.query_bar, cx))
|
||||
}
|
||||
|
||||
fn update_output(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
@@ -260,9 +262,9 @@ impl CompletionProvider for ConsoleQueryBarCompletionProvider {
|
||||
_trigger: editor::CompletionContext,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
let Some(console) = self.0.upgrade() else {
|
||||
return Task::ready(Ok(None));
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
};
|
||||
|
||||
let support_completions = console
|
||||
@@ -320,7 +322,7 @@ impl ConsoleQueryBarCompletionProvider {
|
||||
buffer: &Entity<Buffer>,
|
||||
buffer_position: language::Anchor,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
let (variables, string_matches) = console.update(cx, |console, cx| {
|
||||
let mut variables = HashMap::default();
|
||||
let mut string_matches = Vec::default();
|
||||
@@ -352,39 +354,43 @@ impl ConsoleQueryBarCompletionProvider {
|
||||
let query = buffer.read(cx).text();
|
||||
|
||||
cx.spawn(async move |_, cx| {
|
||||
const LIMIT: usize = 10;
|
||||
let matches = fuzzy::match_strings(
|
||||
&string_matches,
|
||||
&query,
|
||||
true,
|
||||
10,
|
||||
LIMIT,
|
||||
&Default::default(),
|
||||
cx.background_executor().clone(),
|
||||
)
|
||||
.await;
|
||||
|
||||
Ok(Some(
|
||||
matches
|
||||
.iter()
|
||||
.filter_map(|string_match| {
|
||||
let variable_value = variables.get(&string_match.string)?;
|
||||
let completions = matches
|
||||
.iter()
|
||||
.filter_map(|string_match| {
|
||||
let variable_value = variables.get(&string_match.string)?;
|
||||
|
||||
Some(project::Completion {
|
||||
replace_range: buffer_position..buffer_position,
|
||||
new_text: string_match.string.clone(),
|
||||
label: CodeLabel {
|
||||
filter_range: 0..string_match.string.len(),
|
||||
text: format!("{} {}", string_match.string, variable_value),
|
||||
runs: Vec::new(),
|
||||
},
|
||||
icon_path: None,
|
||||
documentation: None,
|
||||
confirm: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
insert_text_mode: None,
|
||||
})
|
||||
Some(project::Completion {
|
||||
replace_range: buffer_position..buffer_position,
|
||||
new_text: string_match.string.clone(),
|
||||
label: CodeLabel {
|
||||
filter_range: 0..string_match.string.len(),
|
||||
text: format!("{} {}", string_match.string, variable_value),
|
||||
runs: Vec::new(),
|
||||
},
|
||||
icon_path: None,
|
||||
documentation: None,
|
||||
confirm: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
insert_text_mode: None,
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(vec![project::CompletionResponse {
|
||||
is_incomplete: completions.len() >= LIMIT,
|
||||
completions,
|
||||
}])
|
||||
})
|
||||
}
|
||||
|
||||
@@ -394,7 +400,7 @@ impl ConsoleQueryBarCompletionProvider {
|
||||
buffer: &Entity<Buffer>,
|
||||
buffer_position: language::Anchor,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
let completion_task = console.update(cx, |console, cx| {
|
||||
console.session.update(cx, |state, cx| {
|
||||
let frame_id = console.stack_frame_list.read(cx).opened_stack_frame_id();
|
||||
@@ -409,53 +415,56 @@ impl ConsoleQueryBarCompletionProvider {
|
||||
cx.background_executor().spawn(async move {
|
||||
let completions = completion_task.await?;
|
||||
|
||||
Ok(Some(
|
||||
completions
|
||||
.into_iter()
|
||||
.map(|completion| {
|
||||
let new_text = completion
|
||||
.text
|
||||
.as_ref()
|
||||
.unwrap_or(&completion.label)
|
||||
.to_owned();
|
||||
let buffer_text = snapshot.text();
|
||||
let buffer_bytes = buffer_text.as_bytes();
|
||||
let new_bytes = new_text.as_bytes();
|
||||
let completions = completions
|
||||
.into_iter()
|
||||
.map(|completion| {
|
||||
let new_text = completion
|
||||
.text
|
||||
.as_ref()
|
||||
.unwrap_or(&completion.label)
|
||||
.to_owned();
|
||||
let buffer_text = snapshot.text();
|
||||
let buffer_bytes = buffer_text.as_bytes();
|
||||
let new_bytes = new_text.as_bytes();
|
||||
|
||||
let mut prefix_len = 0;
|
||||
for i in (0..new_bytes.len()).rev() {
|
||||
if buffer_bytes.ends_with(&new_bytes[0..i]) {
|
||||
prefix_len = i;
|
||||
break;
|
||||
}
|
||||
let mut prefix_len = 0;
|
||||
for i in (0..new_bytes.len()).rev() {
|
||||
if buffer_bytes.ends_with(&new_bytes[0..i]) {
|
||||
prefix_len = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let buffer_offset = buffer_position.to_offset(&snapshot);
|
||||
let start = buffer_offset - prefix_len;
|
||||
let start = snapshot.clip_offset(start, Bias::Left);
|
||||
let start = snapshot.anchor_before(start);
|
||||
let replace_range = start..buffer_position;
|
||||
let buffer_offset = buffer_position.to_offset(&snapshot);
|
||||
let start = buffer_offset - prefix_len;
|
||||
let start = snapshot.clip_offset(start, Bias::Left);
|
||||
let start = snapshot.anchor_before(start);
|
||||
let replace_range = start..buffer_position;
|
||||
|
||||
project::Completion {
|
||||
replace_range,
|
||||
new_text,
|
||||
label: CodeLabel {
|
||||
filter_range: 0..completion.label.len(),
|
||||
text: completion.label,
|
||||
runs: Vec::new(),
|
||||
},
|
||||
icon_path: None,
|
||||
documentation: None,
|
||||
confirm: None,
|
||||
source: project::CompletionSource::BufferWord {
|
||||
word_range: buffer_position..language::Anchor::MAX,
|
||||
resolved: false,
|
||||
},
|
||||
insert_text_mode: None,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
project::Completion {
|
||||
replace_range,
|
||||
new_text,
|
||||
label: CodeLabel {
|
||||
filter_range: 0..completion.label.len(),
|
||||
text: completion.label,
|
||||
runs: Vec::new(),
|
||||
},
|
||||
icon_path: None,
|
||||
documentation: None,
|
||||
confirm: None,
|
||||
source: project::CompletionSource::BufferWord {
|
||||
word_range: buffer_position..language::Anchor::MAX,
|
||||
resolved: false,
|
||||
},
|
||||
insert_text_mode: None,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(vec![project::CompletionResponse {
|
||||
completions,
|
||||
is_incomplete: false,
|
||||
}])
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ mod inline_values;
|
||||
#[cfg(test)]
|
||||
mod module_list;
|
||||
#[cfg(test)]
|
||||
mod new_session_modal;
|
||||
mod new_process_modal;
|
||||
#[cfg(test)]
|
||||
mod persistence;
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
use dap::DapRegistry;
|
||||
use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext};
|
||||
use project::{FakeFs, Fs, Project};
|
||||
use project::{FakeFs, Project};
|
||||
use serde_json::json;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use task::{DebugRequest, DebugScenario, LaunchRequest, TaskContext, VariableName, ZedDebugConfig};
|
||||
use util::path;
|
||||
|
||||
use crate::new_session_modal::NewSessionMode;
|
||||
// use crate::new_process_modal::NewProcessMode;
|
||||
use crate::tests::{init_test, init_test_workspace};
|
||||
|
||||
#[gpui::test]
|
||||
@@ -152,111 +152,111 @@ async fn test_debug_session_substitutes_variables_and_relativizes_paths(
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_save_debug_scenario_to_file(executor: BackgroundExecutor, cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
// #[gpui::test]
|
||||
// async fn test_save_debug_scenario_to_file(executor: BackgroundExecutor, cx: &mut TestAppContext) {
|
||||
// init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(executor.clone());
|
||||
fs.insert_tree(
|
||||
path!("/project"),
|
||||
json!({
|
||||
"main.rs": "fn main() {}"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
// let fs = FakeFs::new(executor.clone());
|
||||
// fs.insert_tree(
|
||||
// path!("/project"),
|
||||
// json!({
|
||||
// "main.rs": "fn main() {}"
|
||||
// }),
|
||||
// )
|
||||
// .await;
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
|
||||
let workspace = init_test_workspace(&project, cx).await;
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
// let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
|
||||
// let workspace = init_test_workspace(&project, cx).await;
|
||||
// let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
workspace
|
||||
.update(cx, |workspace, window, cx| {
|
||||
crate::new_session_modal::NewSessionModal::show(
|
||||
workspace,
|
||||
window,
|
||||
NewSessionMode::Launch,
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.unwrap();
|
||||
// workspace
|
||||
// .update(cx, |workspace, window, cx| {
|
||||
// crate::new_process_modal::NewProcessModal::show(
|
||||
// workspace,
|
||||
// window,
|
||||
// NewProcessMode::Debug,
|
||||
// None,
|
||||
// cx,
|
||||
// );
|
||||
// })
|
||||
// .unwrap();
|
||||
|
||||
cx.run_until_parked();
|
||||
// cx.run_until_parked();
|
||||
|
||||
let modal = workspace
|
||||
.update(cx, |workspace, _, cx| {
|
||||
workspace.active_modal::<crate::new_session_modal::NewSessionModal>(cx)
|
||||
})
|
||||
.unwrap()
|
||||
.expect("Modal should be active");
|
||||
// let modal = workspace
|
||||
// .update(cx, |workspace, _, cx| {
|
||||
// workspace.active_modal::<crate::new_process_modal::NewProcessModal>(cx)
|
||||
// })
|
||||
// .unwrap()
|
||||
// .expect("Modal should be active");
|
||||
|
||||
modal.update_in(cx, |modal, window, cx| {
|
||||
modal.set_configure("/project/main", "/project", false, window, cx);
|
||||
modal.save_scenario(window, cx);
|
||||
});
|
||||
// modal.update_in(cx, |modal, window, cx| {
|
||||
// modal.set_configure("/project/main", "/project", false, window, cx);
|
||||
// modal.save_scenario(window, cx);
|
||||
// });
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
// cx.executor().run_until_parked();
|
||||
|
||||
let debug_json_content = fs
|
||||
.load(path!("/project/.zed/debug.json").as_ref())
|
||||
.await
|
||||
.expect("debug.json should exist");
|
||||
// let debug_json_content = fs
|
||||
// .load(path!("/project/.zed/debug.json").as_ref())
|
||||
// .await
|
||||
// .expect("debug.json should exist");
|
||||
|
||||
let expected_content = vec![
|
||||
"[",
|
||||
" {",
|
||||
r#" "adapter": "fake-adapter","#,
|
||||
r#" "label": "main (fake-adapter)","#,
|
||||
r#" "request": "launch","#,
|
||||
r#" "program": "/project/main","#,
|
||||
r#" "cwd": "/project","#,
|
||||
r#" "args": [],"#,
|
||||
r#" "env": {}"#,
|
||||
" }",
|
||||
"]",
|
||||
];
|
||||
// let expected_content = vec![
|
||||
// "[",
|
||||
// " {",
|
||||
// r#" "adapter": "fake-adapter","#,
|
||||
// r#" "label": "main (fake-adapter)","#,
|
||||
// r#" "request": "launch","#,
|
||||
// r#" "program": "/project/main","#,
|
||||
// r#" "cwd": "/project","#,
|
||||
// r#" "args": [],"#,
|
||||
// r#" "env": {}"#,
|
||||
// " }",
|
||||
// "]",
|
||||
// ];
|
||||
|
||||
let actual_lines: Vec<&str> = debug_json_content.lines().collect();
|
||||
pretty_assertions::assert_eq!(expected_content, actual_lines);
|
||||
// let actual_lines: Vec<&str> = debug_json_content.lines().collect();
|
||||
// pretty_assertions::assert_eq!(expected_content, actual_lines);
|
||||
|
||||
modal.update_in(cx, |modal, window, cx| {
|
||||
modal.set_configure("/project/other", "/project", true, window, cx);
|
||||
modal.save_scenario(window, cx);
|
||||
});
|
||||
// modal.update_in(cx, |modal, window, cx| {
|
||||
// modal.set_configure("/project/other", "/project", true, window, cx);
|
||||
// modal.save_scenario(window, cx);
|
||||
// });
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
// cx.executor().run_until_parked();
|
||||
|
||||
let debug_json_content = fs
|
||||
.load(path!("/project/.zed/debug.json").as_ref())
|
||||
.await
|
||||
.expect("debug.json should exist after second save");
|
||||
// let debug_json_content = fs
|
||||
// .load(path!("/project/.zed/debug.json").as_ref())
|
||||
// .await
|
||||
// .expect("debug.json should exist after second save");
|
||||
|
||||
let expected_content = vec![
|
||||
"[",
|
||||
" {",
|
||||
r#" "adapter": "fake-adapter","#,
|
||||
r#" "label": "main (fake-adapter)","#,
|
||||
r#" "request": "launch","#,
|
||||
r#" "program": "/project/main","#,
|
||||
r#" "cwd": "/project","#,
|
||||
r#" "args": [],"#,
|
||||
r#" "env": {}"#,
|
||||
" },",
|
||||
" {",
|
||||
r#" "adapter": "fake-adapter","#,
|
||||
r#" "label": "other (fake-adapter)","#,
|
||||
r#" "request": "launch","#,
|
||||
r#" "program": "/project/other","#,
|
||||
r#" "cwd": "/project","#,
|
||||
r#" "args": [],"#,
|
||||
r#" "env": {}"#,
|
||||
" }",
|
||||
"]",
|
||||
];
|
||||
// let expected_content = vec![
|
||||
// "[",
|
||||
// " {",
|
||||
// r#" "adapter": "fake-adapter","#,
|
||||
// r#" "label": "main (fake-adapter)","#,
|
||||
// r#" "request": "launch","#,
|
||||
// r#" "program": "/project/main","#,
|
||||
// r#" "cwd": "/project","#,
|
||||
// r#" "args": [],"#,
|
||||
// r#" "env": {}"#,
|
||||
// " },",
|
||||
// " {",
|
||||
// r#" "adapter": "fake-adapter","#,
|
||||
// r#" "label": "other (fake-adapter)","#,
|
||||
// r#" "request": "launch","#,
|
||||
// r#" "program": "/project/other","#,
|
||||
// r#" "cwd": "/project","#,
|
||||
// r#" "args": [],"#,
|
||||
// r#" "env": {}"#,
|
||||
// " }",
|
||||
// "]",
|
||||
// ];
|
||||
|
||||
let actual_lines: Vec<&str> = debug_json_content.lines().collect();
|
||||
pretty_assertions::assert_eq!(expected_content, actual_lines);
|
||||
}
|
||||
// let actual_lines: Vec<&str> = debug_json_content.lines().collect();
|
||||
// pretty_assertions::assert_eq!(expected_content, actual_lines);
|
||||
// }
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dap_adapter_config_conversion_and_validation(cx: &mut TestAppContext) {
|
||||
@@ -322,7 +322,7 @@ async fn test_dap_adapter_config_conversion_and_validation(cx: &mut TestAppConte
|
||||
);
|
||||
|
||||
let request_type = adapter
|
||||
.validate_config(&debug_scenario.config)
|
||||
.request_kind(&debug_scenario.config)
|
||||
.unwrap_or_else(|_| {
|
||||
panic!(
|
||||
"Adapter {} should validate the config successfully",
|
||||
@@ -1,9 +1,8 @@
|
||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::{
|
||||
AnyElement, Entity, Focusable, FontWeight, ListSizingBehavior, ScrollStrategy, SharedString,
|
||||
Size, StrikethroughStyle, StyledText, UniformListScrollHandle, div, px, uniform_list,
|
||||
Size, StrikethroughStyle, StyledText, Task, UniformListScrollHandle, div, px, uniform_list,
|
||||
};
|
||||
use gpui::{AsyncWindowContext, WeakEntity};
|
||||
use itertools::Itertools;
|
||||
use language::CodeLabel;
|
||||
use language::{Buffer, LanguageName, LanguageRegistry};
|
||||
@@ -18,6 +17,7 @@ use task::TaskContext;
|
||||
|
||||
use std::collections::VecDeque;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
cmp::{Reverse, min},
|
||||
@@ -47,15 +47,10 @@ pub const MENU_ASIDE_MAX_WIDTH: Pixels = px(500.);
|
||||
// Constants for the markdown cache. The purpose of this cache is to reduce flickering due to
|
||||
// documentation not yet being parsed.
|
||||
//
|
||||
// The size of the cache is set to the number of items fetched around the current selection plus one
|
||||
// for the current selection and another to avoid cases where and adjacent selection exits the
|
||||
// cache. The only current benefit of a larger cache would be doing less markdown parsing when the
|
||||
// selection revisits items.
|
||||
//
|
||||
// One future benefit of a larger cache would be reducing flicker on backspace. This would require
|
||||
// not recreating the menu on every change, by not re-querying the language server when
|
||||
// `is_incomplete = false`.
|
||||
const MARKDOWN_CACHE_MAX_SIZE: usize = MARKDOWN_CACHE_BEFORE_ITEMS + MARKDOWN_CACHE_AFTER_ITEMS + 2;
|
||||
// The size of the cache is set to 16, which is roughly 3 times more than the number of items
|
||||
// fetched around the current selection. This way documentation is more often ready for render when
|
||||
// revisiting previous entries, such as when pressing backspace.
|
||||
const MARKDOWN_CACHE_MAX_SIZE: usize = 16;
|
||||
const MARKDOWN_CACHE_BEFORE_ITEMS: usize = 2;
|
||||
const MARKDOWN_CACHE_AFTER_ITEMS: usize = 2;
|
||||
|
||||
@@ -197,27 +192,48 @@ pub enum ContextMenuOrigin {
|
||||
QuickActionBar,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CompletionsMenu {
|
||||
pub id: CompletionId,
|
||||
sort_completions: bool,
|
||||
pub initial_position: Anchor,
|
||||
pub initial_query: Option<Arc<String>>,
|
||||
pub is_incomplete: bool,
|
||||
pub buffer: Entity<Buffer>,
|
||||
pub completions: Rc<RefCell<Box<[Completion]>>>,
|
||||
match_candidates: Rc<[StringMatchCandidate]>,
|
||||
pub entries: Rc<RefCell<Vec<StringMatch>>>,
|
||||
match_candidates: Arc<[StringMatchCandidate]>,
|
||||
pub entries: Rc<RefCell<Box<[StringMatch]>>>,
|
||||
pub selected_item: usize,
|
||||
filter_task: Task<()>,
|
||||
cancel_filter: Arc<AtomicBool>,
|
||||
scroll_handle: UniformListScrollHandle,
|
||||
resolve_completions: bool,
|
||||
show_completion_documentation: bool,
|
||||
pub(super) ignore_completion_provider: bool,
|
||||
last_rendered_range: Rc<RefCell<Option<Range<usize>>>>,
|
||||
markdown_cache: Rc<RefCell<VecDeque<(usize, Entity<Markdown>)>>>,
|
||||
markdown_cache: Rc<RefCell<VecDeque<(MarkdownCacheKey, Entity<Markdown>)>>>,
|
||||
language_registry: Option<Arc<LanguageRegistry>>,
|
||||
language: Option<LanguageName>,
|
||||
snippet_sort_order: SnippetSortOrder,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
enum MarkdownCacheKey {
|
||||
ForCandidate {
|
||||
candidate_id: usize,
|
||||
},
|
||||
ForCompletionMatch {
|
||||
new_text: String,
|
||||
markdown_source: SharedString,
|
||||
},
|
||||
}
|
||||
|
||||
// TODO: There should really be a wrapper around fuzzy match tasks that does this.
|
||||
impl Drop for CompletionsMenu {
|
||||
fn drop(&mut self) {
|
||||
self.cancel_filter.store(true, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
impl CompletionsMenu {
|
||||
pub fn new(
|
||||
id: CompletionId,
|
||||
@@ -225,6 +241,8 @@ impl CompletionsMenu {
|
||||
show_completion_documentation: bool,
|
||||
ignore_completion_provider: bool,
|
||||
initial_position: Anchor,
|
||||
initial_query: Option<Arc<String>>,
|
||||
is_incomplete: bool,
|
||||
buffer: Entity<Buffer>,
|
||||
completions: Box<[Completion]>,
|
||||
snippet_sort_order: SnippetSortOrder,
|
||||
@@ -242,17 +260,21 @@ impl CompletionsMenu {
|
||||
id,
|
||||
sort_completions,
|
||||
initial_position,
|
||||
initial_query,
|
||||
is_incomplete,
|
||||
buffer,
|
||||
show_completion_documentation,
|
||||
ignore_completion_provider,
|
||||
completions: RefCell::new(completions).into(),
|
||||
match_candidates,
|
||||
entries: RefCell::new(Vec::new()).into(),
|
||||
entries: Rc::new(RefCell::new(Box::new([]))),
|
||||
selected_item: 0,
|
||||
filter_task: Task::ready(()),
|
||||
cancel_filter: Arc::new(AtomicBool::new(false)),
|
||||
scroll_handle: UniformListScrollHandle::new(),
|
||||
resolve_completions: true,
|
||||
last_rendered_range: RefCell::new(None).into(),
|
||||
markdown_cache: RefCell::new(VecDeque::with_capacity(MARKDOWN_CACHE_MAX_SIZE)).into(),
|
||||
markdown_cache: RefCell::new(VecDeque::new()).into(),
|
||||
language_registry,
|
||||
language,
|
||||
snippet_sort_order,
|
||||
@@ -303,16 +325,20 @@ impl CompletionsMenu {
|
||||
positions: vec![],
|
||||
string: completion.clone(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
.collect();
|
||||
Self {
|
||||
id,
|
||||
sort_completions,
|
||||
initial_position: selection.start,
|
||||
initial_query: None,
|
||||
is_incomplete: false,
|
||||
buffer,
|
||||
completions: RefCell::new(completions).into(),
|
||||
match_candidates,
|
||||
entries: RefCell::new(entries).into(),
|
||||
selected_item: 0,
|
||||
filter_task: Task::ready(()),
|
||||
cancel_filter: Arc::new(AtomicBool::new(false)),
|
||||
scroll_handle: UniformListScrollHandle::new(),
|
||||
resolve_completions: false,
|
||||
show_completion_documentation: false,
|
||||
@@ -390,14 +416,7 @@ impl CompletionsMenu {
|
||||
) {
|
||||
if self.selected_item != match_index {
|
||||
self.selected_item = match_index;
|
||||
self.scroll_handle
|
||||
.scroll_to_item(self.selected_item, ScrollStrategy::Top);
|
||||
self.resolve_visible_completions(provider, cx);
|
||||
self.start_markdown_parse_for_nearby_entries(cx);
|
||||
if let Some(provider) = provider {
|
||||
self.handle_selection_changed(provider, window, cx);
|
||||
}
|
||||
cx.notify();
|
||||
self.handle_selection_changed(provider, window, cx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -418,18 +437,25 @@ impl CompletionsMenu {
|
||||
}
|
||||
|
||||
fn handle_selection_changed(
|
||||
&self,
|
||||
provider: &dyn CompletionProvider,
|
||||
&mut self,
|
||||
provider: Option<&dyn CompletionProvider>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
let entries = self.entries.borrow();
|
||||
let entry = if self.selected_item < entries.len() {
|
||||
Some(&entries[self.selected_item])
|
||||
} else {
|
||||
None
|
||||
};
|
||||
provider.selection_changed(entry, window, cx);
|
||||
self.scroll_handle
|
||||
.scroll_to_item(self.selected_item, ScrollStrategy::Top);
|
||||
if let Some(provider) = provider {
|
||||
let entries = self.entries.borrow();
|
||||
let entry = if self.selected_item < entries.len() {
|
||||
Some(&entries[self.selected_item])
|
||||
} else {
|
||||
None
|
||||
};
|
||||
provider.selection_changed(entry, window, cx);
|
||||
}
|
||||
self.resolve_visible_completions(provider, cx);
|
||||
self.start_markdown_parse_for_nearby_entries(cx);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn resolve_visible_completions(
|
||||
@@ -444,6 +470,19 @@ impl CompletionsMenu {
|
||||
return;
|
||||
};
|
||||
|
||||
let entries = self.entries.borrow();
|
||||
if entries.is_empty() {
|
||||
return;
|
||||
}
|
||||
if self.selected_item >= entries.len() {
|
||||
log::error!(
|
||||
"bug: completion selected_item >= entries.len(): {} >= {}",
|
||||
self.selected_item,
|
||||
entries.len()
|
||||
);
|
||||
self.selected_item = entries.len() - 1;
|
||||
}
|
||||
|
||||
// Attempt to resolve completions for every item that will be displayed. This matters
|
||||
// because single line documentation may be displayed inline with the completion.
|
||||
//
|
||||
@@ -455,7 +494,6 @@ impl CompletionsMenu {
|
||||
let visible_count = last_rendered_range
|
||||
.clone()
|
||||
.map_or(APPROXIMATE_VISIBLE_COUNT, |range| range.count());
|
||||
let entries = self.entries.borrow();
|
||||
let entry_range = if self.selected_item == 0 {
|
||||
0..min(visible_count, entries.len())
|
||||
} else if self.selected_item == entries.len() - 1 {
|
||||
@@ -508,11 +546,11 @@ impl CompletionsMenu {
|
||||
.update(cx, |editor, cx| {
|
||||
// `resolve_completions` modified state affecting display.
|
||||
cx.notify();
|
||||
editor.with_completions_menu_matching_id(
|
||||
completion_id,
|
||||
|| (),
|
||||
|this| this.start_markdown_parse_for_nearby_entries(cx),
|
||||
);
|
||||
editor.with_completions_menu_matching_id(completion_id, |menu| {
|
||||
if let Some(menu) = menu {
|
||||
menu.start_markdown_parse_for_nearby_entries(cx)
|
||||
}
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
@@ -548,11 +586,11 @@ impl CompletionsMenu {
|
||||
return None;
|
||||
}
|
||||
let candidate_id = entries[index].candidate_id;
|
||||
match &self.completions.borrow()[candidate_id].documentation {
|
||||
Some(CompletionDocumentation::MultiLineMarkdown(source)) if !source.is_empty() => Some(
|
||||
self.get_or_create_markdown(candidate_id, source.clone(), false, cx)
|
||||
.1,
|
||||
),
|
||||
let completions = self.completions.borrow();
|
||||
match &completions[candidate_id].documentation {
|
||||
Some(CompletionDocumentation::MultiLineMarkdown(source)) if !source.is_empty() => self
|
||||
.get_or_create_markdown(candidate_id, Some(source), false, &completions, cx)
|
||||
.map(|(_, markdown)| markdown),
|
||||
Some(_) => None,
|
||||
_ => None,
|
||||
}
|
||||
@@ -561,38 +599,75 @@ impl CompletionsMenu {
|
||||
fn get_or_create_markdown(
|
||||
&self,
|
||||
candidate_id: usize,
|
||||
source: SharedString,
|
||||
source: Option<&SharedString>,
|
||||
is_render: bool,
|
||||
completions: &[Completion],
|
||||
cx: &mut Context<Editor>,
|
||||
) -> (bool, Entity<Markdown>) {
|
||||
) -> Option<(bool, Entity<Markdown>)> {
|
||||
let mut markdown_cache = self.markdown_cache.borrow_mut();
|
||||
if let Some((cache_index, (_, markdown))) = markdown_cache
|
||||
.iter()
|
||||
.find_position(|(id, _)| *id == candidate_id)
|
||||
{
|
||||
let markdown = if is_render && cache_index != 0 {
|
||||
|
||||
let mut has_completion_match_cache_entry = false;
|
||||
let mut matching_entry = markdown_cache.iter().find_position(|(key, _)| match key {
|
||||
MarkdownCacheKey::ForCandidate { candidate_id: id } => *id == candidate_id,
|
||||
MarkdownCacheKey::ForCompletionMatch { .. } => {
|
||||
has_completion_match_cache_entry = true;
|
||||
false
|
||||
}
|
||||
});
|
||||
|
||||
if has_completion_match_cache_entry && matching_entry.is_none() {
|
||||
if let Some(source) = source {
|
||||
matching_entry = markdown_cache.iter().find_position(|(key, _)| {
|
||||
matches!(key, MarkdownCacheKey::ForCompletionMatch { markdown_source, .. }
|
||||
if markdown_source == source)
|
||||
});
|
||||
} else {
|
||||
// Heuristic guess that documentation can be reused when new_text matches. This is
|
||||
// to mitigate documentation flicker while typing. If this is wrong, then resolution
|
||||
// should cause the correct documentation to be displayed soon.
|
||||
let completion = &completions[candidate_id];
|
||||
matching_entry = markdown_cache.iter().find_position(|(key, _)| {
|
||||
matches!(key, MarkdownCacheKey::ForCompletionMatch { new_text, .. }
|
||||
if new_text == &completion.new_text)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((cache_index, (key, markdown))) = matching_entry {
|
||||
let markdown = markdown.clone();
|
||||
|
||||
// Since the markdown source matches, the key can now be ForCandidate.
|
||||
if source.is_some() && matches!(key, MarkdownCacheKey::ForCompletionMatch { .. }) {
|
||||
markdown_cache[cache_index].0 = MarkdownCacheKey::ForCandidate { candidate_id };
|
||||
}
|
||||
|
||||
if is_render && cache_index != 0 {
|
||||
// Move the current selection's cache entry to the front.
|
||||
markdown_cache.rotate_right(1);
|
||||
let cache_len = markdown_cache.len();
|
||||
markdown_cache.swap(0, (cache_index + 1) % cache_len);
|
||||
&markdown_cache[0].1
|
||||
} else {
|
||||
markdown
|
||||
};
|
||||
}
|
||||
|
||||
let is_parsing = markdown.update(cx, |markdown, cx| {
|
||||
// `reset` is called as it's possible for documentation to change due to resolve
|
||||
// requests. It does nothing if `source` is unchanged.
|
||||
markdown.reset(source, cx);
|
||||
if let Some(source) = source {
|
||||
// `reset` is called as it's possible for documentation to change due to resolve
|
||||
// requests. It does nothing if `source` is unchanged.
|
||||
markdown.reset(source.clone(), cx);
|
||||
}
|
||||
markdown.is_parsing()
|
||||
});
|
||||
return (is_parsing, markdown.clone());
|
||||
return Some((is_parsing, markdown));
|
||||
}
|
||||
|
||||
let Some(source) = source else {
|
||||
// Can't create markdown as there is no source.
|
||||
return None;
|
||||
};
|
||||
|
||||
if markdown_cache.len() < MARKDOWN_CACHE_MAX_SIZE {
|
||||
let markdown = cx.new(|cx| {
|
||||
Markdown::new(
|
||||
source,
|
||||
source.clone(),
|
||||
self.language_registry.clone(),
|
||||
self.language.clone(),
|
||||
cx,
|
||||
@@ -601,17 +676,20 @@ impl CompletionsMenu {
|
||||
// Handles redraw when the markdown is done parsing. The current render is for a
|
||||
// deferred draw, and so without this did not redraw when `markdown` notified.
|
||||
cx.observe(&markdown, |_, _, cx| cx.notify()).detach();
|
||||
markdown_cache.push_front((candidate_id, markdown.clone()));
|
||||
(true, markdown)
|
||||
markdown_cache.push_front((
|
||||
MarkdownCacheKey::ForCandidate { candidate_id },
|
||||
markdown.clone(),
|
||||
));
|
||||
Some((true, markdown))
|
||||
} else {
|
||||
debug_assert_eq!(markdown_cache.capacity(), MARKDOWN_CACHE_MAX_SIZE);
|
||||
// Moves the last cache entry to the start. The ring buffer is full, so this does no
|
||||
// copying and just shifts indexes.
|
||||
markdown_cache.rotate_right(1);
|
||||
markdown_cache[0].0 = candidate_id;
|
||||
markdown_cache[0].0 = MarkdownCacheKey::ForCandidate { candidate_id };
|
||||
let markdown = &markdown_cache[0].1;
|
||||
markdown.update(cx, |markdown, cx| markdown.reset(source, cx));
|
||||
(true, markdown.clone())
|
||||
markdown.update(cx, |markdown, cx| markdown.reset(source.clone(), cx));
|
||||
Some((true, markdown.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -774,37 +852,46 @@ impl CompletionsMenu {
|
||||
}
|
||||
|
||||
let mat = &self.entries.borrow()[self.selected_item];
|
||||
let multiline_docs = match self.completions.borrow_mut()[mat.candidate_id]
|
||||
.documentation
|
||||
.as_ref()?
|
||||
{
|
||||
CompletionDocumentation::MultiLinePlainText(text) => div().child(text.clone()),
|
||||
CompletionDocumentation::SingleLineAndMultiLinePlainText {
|
||||
let completions = self.completions.borrow_mut();
|
||||
let multiline_docs = match completions[mat.candidate_id].documentation.as_ref() {
|
||||
Some(CompletionDocumentation::MultiLinePlainText(text)) => div().child(text.clone()),
|
||||
Some(CompletionDocumentation::SingleLineAndMultiLinePlainText {
|
||||
plain_text: Some(text),
|
||||
..
|
||||
} => div().child(text.clone()),
|
||||
CompletionDocumentation::MultiLineMarkdown(source) if !source.is_empty() => {
|
||||
let (is_parsing, markdown) =
|
||||
self.get_or_create_markdown(mat.candidate_id, source.clone(), true, cx);
|
||||
if is_parsing {
|
||||
}) => div().child(text.clone()),
|
||||
Some(CompletionDocumentation::MultiLineMarkdown(source)) if !source.is_empty() => {
|
||||
let Some((false, markdown)) = self.get_or_create_markdown(
|
||||
mat.candidate_id,
|
||||
Some(source),
|
||||
true,
|
||||
&completions,
|
||||
cx,
|
||||
) else {
|
||||
return None;
|
||||
}
|
||||
div().child(
|
||||
MarkdownElement::new(markdown, hover_markdown_style(window, cx))
|
||||
.code_block_renderer(markdown::CodeBlockRenderer::Default {
|
||||
copy_button: false,
|
||||
copy_button_on_hover: false,
|
||||
border: false,
|
||||
})
|
||||
.on_url_click(open_markdown_url),
|
||||
)
|
||||
};
|
||||
Self::render_markdown(markdown, window, cx)
|
||||
}
|
||||
CompletionDocumentation::MultiLineMarkdown(_) => return None,
|
||||
CompletionDocumentation::SingleLine(_) => return None,
|
||||
CompletionDocumentation::Undocumented => return None,
|
||||
CompletionDocumentation::SingleLineAndMultiLinePlainText {
|
||||
plain_text: None, ..
|
||||
} => {
|
||||
None => {
|
||||
// Handle the case where documentation hasn't yet been resolved but there's a
|
||||
// `new_text` match in the cache.
|
||||
//
|
||||
// TODO: It's inconsistent that documentation caching based on matching `new_text`
|
||||
// only works for markdown. Consider generally caching the results of resolving
|
||||
// completions.
|
||||
let Some((false, markdown)) =
|
||||
self.get_or_create_markdown(mat.candidate_id, None, true, &completions, cx)
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
Self::render_markdown(markdown, window, cx)
|
||||
}
|
||||
Some(CompletionDocumentation::MultiLineMarkdown(_)) => return None,
|
||||
Some(CompletionDocumentation::SingleLine(_)) => return None,
|
||||
Some(CompletionDocumentation::Undocumented) => return None,
|
||||
Some(CompletionDocumentation::SingleLineAndMultiLinePlainText {
|
||||
plain_text: None,
|
||||
..
|
||||
}) => {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
@@ -824,6 +911,177 @@ impl CompletionsMenu {
|
||||
)
|
||||
}
|
||||
|
||||
fn render_markdown(
|
||||
markdown: Entity<Markdown>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Div {
|
||||
div().child(
|
||||
MarkdownElement::new(markdown, hover_markdown_style(window, cx))
|
||||
.code_block_renderer(markdown::CodeBlockRenderer::Default {
|
||||
copy_button: false,
|
||||
copy_button_on_hover: false,
|
||||
border: false,
|
||||
})
|
||||
.on_url_click(open_markdown_url),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn filter(
|
||||
&mut self,
|
||||
query: Option<Arc<String>>,
|
||||
provider: Option<Rc<dyn CompletionProvider>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
self.cancel_filter.store(true, Ordering::Relaxed);
|
||||
if let Some(query) = query {
|
||||
self.cancel_filter = Arc::new(AtomicBool::new(false));
|
||||
let matches = self.do_async_filtering(query, cx);
|
||||
let id = self.id;
|
||||
self.filter_task = cx.spawn_in(window, async move |editor, cx| {
|
||||
let matches = matches.await;
|
||||
editor
|
||||
.update_in(cx, |editor, window, cx| {
|
||||
editor.with_completions_menu_matching_id(id, |this| {
|
||||
if let Some(this) = this {
|
||||
this.set_filter_results(matches, provider, window, cx);
|
||||
}
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
} else {
|
||||
self.filter_task = Task::ready(());
|
||||
let matches = self.unfiltered_matches();
|
||||
self.set_filter_results(matches, provider, window, cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn do_async_filtering(
|
||||
&self,
|
||||
query: Arc<String>,
|
||||
cx: &Context<Editor>,
|
||||
) -> Task<Vec<StringMatch>> {
|
||||
let matches_task = cx.background_spawn({
|
||||
let query = query.clone();
|
||||
let match_candidates = self.match_candidates.clone();
|
||||
let cancel_filter = self.cancel_filter.clone();
|
||||
let background_executor = cx.background_executor().clone();
|
||||
async move {
|
||||
fuzzy::match_strings(
|
||||
&match_candidates,
|
||||
&query,
|
||||
query.chars().any(|c| c.is_uppercase()),
|
||||
100,
|
||||
&cancel_filter,
|
||||
background_executor,
|
||||
)
|
||||
.await
|
||||
}
|
||||
});
|
||||
|
||||
let completions = self.completions.clone();
|
||||
let sort_completions = self.sort_completions;
|
||||
let snippet_sort_order = self.snippet_sort_order;
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let mut matches = matches_task.await;
|
||||
|
||||
if sort_completions {
|
||||
matches = Self::sort_string_matches(
|
||||
matches,
|
||||
Some(&query),
|
||||
snippet_sort_order,
|
||||
completions.borrow().as_ref(),
|
||||
);
|
||||
}
|
||||
|
||||
matches
|
||||
})
|
||||
}
|
||||
|
||||
/// Like `do_async_filtering` but there is no filter query, so no need to spawn tasks.
|
||||
pub fn unfiltered_matches(&self) -> Vec<StringMatch> {
|
||||
let mut matches = self
|
||||
.match_candidates
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(candidate_id, candidate)| StringMatch {
|
||||
candidate_id,
|
||||
score: Default::default(),
|
||||
positions: Default::default(),
|
||||
string: candidate.string.clone(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
if self.sort_completions {
|
||||
matches = Self::sort_string_matches(
|
||||
matches,
|
||||
None,
|
||||
self.snippet_sort_order,
|
||||
self.completions.borrow().as_ref(),
|
||||
);
|
||||
}
|
||||
|
||||
matches
|
||||
}
|
||||
|
||||
pub fn set_filter_results(
|
||||
&mut self,
|
||||
matches: Vec<StringMatch>,
|
||||
provider: Option<Rc<dyn CompletionProvider>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
*self.entries.borrow_mut() = matches.into_boxed_slice();
|
||||
self.selected_item = 0;
|
||||
self.handle_selection_changed(provider.as_deref(), window, cx);
|
||||
}
|
||||
|
||||
fn sort_string_matches(
|
||||
matches: Vec<StringMatch>,
|
||||
query: Option<&str>,
|
||||
snippet_sort_order: SnippetSortOrder,
|
||||
completions: &[Completion],
|
||||
) -> Vec<StringMatch> {
|
||||
let mut sortable_items: Vec<SortableMatch<'_>> = matches
|
||||
.into_iter()
|
||||
.map(|string_match| {
|
||||
let completion = &completions[string_match.candidate_id];
|
||||
|
||||
let is_snippet = matches!(
|
||||
&completion.source,
|
||||
CompletionSource::Lsp { lsp_completion, .. }
|
||||
if lsp_completion.kind == Some(CompletionItemKind::SNIPPET)
|
||||
);
|
||||
|
||||
let sort_text =
|
||||
if let CompletionSource::Lsp { lsp_completion, .. } = &completion.source {
|
||||
lsp_completion.sort_text.as_deref()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let (sort_kind, sort_label) = completion.sort_key();
|
||||
|
||||
SortableMatch {
|
||||
string_match,
|
||||
is_snippet,
|
||||
sort_text,
|
||||
sort_kind,
|
||||
sort_label,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Self::sort_matches(&mut sortable_items, query, snippet_sort_order);
|
||||
|
||||
sortable_items
|
||||
.into_iter()
|
||||
.map(|sortable| sortable.string_match)
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn sort_matches(
|
||||
matches: &mut Vec<SortableMatch<'_>>,
|
||||
query: Option<&str>,
|
||||
@@ -857,6 +1115,7 @@ impl CompletionsMenu {
|
||||
let fuzzy_bracket_threshold = max_score * (3.0 / 5.0);
|
||||
|
||||
let query_start_lower = query
|
||||
.as_ref()
|
||||
.and_then(|q| q.chars().next())
|
||||
.and_then(|c| c.to_lowercase().next());
|
||||
|
||||
@@ -890,6 +1149,7 @@ impl CompletionsMenu {
|
||||
};
|
||||
let sort_mixed_case_prefix_length = Reverse(
|
||||
query
|
||||
.as_ref()
|
||||
.map(|q| {
|
||||
q.chars()
|
||||
.zip(mat.string_match.string.chars())
|
||||
@@ -920,97 +1180,32 @@ impl CompletionsMenu {
|
||||
});
|
||||
}
|
||||
|
||||
pub async fn filter(
|
||||
&mut self,
|
||||
query: Option<&str>,
|
||||
provider: Option<Rc<dyn CompletionProvider>>,
|
||||
editor: WeakEntity<Editor>,
|
||||
cx: &mut AsyncWindowContext,
|
||||
) {
|
||||
let mut matches = if let Some(query) = query {
|
||||
fuzzy::match_strings(
|
||||
&self.match_candidates,
|
||||
query,
|
||||
query.chars().any(|c| c.is_uppercase()),
|
||||
100,
|
||||
&Default::default(),
|
||||
cx.background_executor().clone(),
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
self.match_candidates
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(candidate_id, candidate)| StringMatch {
|
||||
candidate_id,
|
||||
score: Default::default(),
|
||||
positions: Default::default(),
|
||||
string: candidate.string.clone(),
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
pub fn preserve_markdown_cache(&mut self, prev_menu: CompletionsMenu) {
|
||||
self.markdown_cache = prev_menu.markdown_cache.clone();
|
||||
|
||||
if self.sort_completions {
|
||||
let completions = self.completions.borrow();
|
||||
|
||||
let mut sortable_items: Vec<SortableMatch<'_>> = matches
|
||||
.into_iter()
|
||||
.map(|string_match| {
|
||||
let completion = &completions[string_match.candidate_id];
|
||||
|
||||
let is_snippet = matches!(
|
||||
&completion.source,
|
||||
CompletionSource::Lsp { lsp_completion, .. }
|
||||
if lsp_completion.kind == Some(CompletionItemKind::SNIPPET)
|
||||
);
|
||||
|
||||
let sort_text =
|
||||
if let CompletionSource::Lsp { lsp_completion, .. } = &completion.source {
|
||||
lsp_completion.sort_text.as_deref()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let (sort_kind, sort_label) = completion.sort_key();
|
||||
|
||||
SortableMatch {
|
||||
string_match,
|
||||
is_snippet,
|
||||
sort_text,
|
||||
sort_kind,
|
||||
sort_label,
|
||||
// Convert ForCandidate cache keys to ForCompletionMatch keys.
|
||||
let prev_completions = prev_menu.completions.borrow();
|
||||
self.markdown_cache
|
||||
.borrow_mut()
|
||||
.retain_mut(|(key, _markdown)| match key {
|
||||
MarkdownCacheKey::ForCompletionMatch { .. } => true,
|
||||
MarkdownCacheKey::ForCandidate { candidate_id } => {
|
||||
if let Some(completion) = prev_completions.get(*candidate_id) {
|
||||
match &completion.documentation {
|
||||
Some(CompletionDocumentation::MultiLineMarkdown(source)) => {
|
||||
*key = MarkdownCacheKey::ForCompletionMatch {
|
||||
new_text: completion.new_text.clone(),
|
||||
markdown_source: source.clone(),
|
||||
};
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Self::sort_matches(&mut sortable_items, query, self.snippet_sort_order);
|
||||
|
||||
matches = sortable_items
|
||||
.into_iter()
|
||||
.map(|sortable| sortable.string_match)
|
||||
.collect();
|
||||
}
|
||||
|
||||
*self.entries.borrow_mut() = matches;
|
||||
self.selected_item = 0;
|
||||
// This keeps the display consistent when y_flipped.
|
||||
self.scroll_handle.scroll_to_item(0, ScrollStrategy::Top);
|
||||
|
||||
if let Some(provider) = provider {
|
||||
cx.update(|window, cx| {
|
||||
// Since this is async, it's possible the menu has been closed and possibly even
|
||||
// another opened. `provider.selection_changed` should not be called in this case.
|
||||
let this_menu_still_active = editor
|
||||
.read_with(cx, |editor, _cx| {
|
||||
editor.with_completions_menu_matching_id(self.id, || false, |_| true)
|
||||
})
|
||||
.unwrap_or(false);
|
||||
if this_menu_still_active {
|
||||
self.handle_selection_changed(&*provider, window, cx);
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use super::*;
|
||||
use crate::{
|
||||
JoinLines,
|
||||
code_context_menus::CodeContextMenu,
|
||||
inline_completion_tests::FakeInlineCompletionProvider,
|
||||
linked_editing_ranges::LinkedEditingRanges,
|
||||
scroll::scroll_amount::ScrollAmount,
|
||||
@@ -8512,108 +8513,123 @@ async fn test_snippet_placeholder_choices(cx: &mut TestAppContext) {
|
||||
async fn test_snippets(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let (text, insertion_ranges) = marked_text_ranges(
|
||||
indoc! {"
|
||||
a.ˇ b
|
||||
a.ˇ b
|
||||
a.ˇ b
|
||||
"},
|
||||
false,
|
||||
);
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
let buffer = cx.update(|cx| MultiBuffer::build_simple(&text, cx));
|
||||
let (editor, cx) = cx.add_window_view(|window, cx| build_editor(buffer, window, cx));
|
||||
cx.set_state(indoc! {"
|
||||
a.ˇ b
|
||||
a.ˇ b
|
||||
a.ˇ b
|
||||
"});
|
||||
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
let snippet = Snippet::parse("f(${1:one}, ${2:two}, ${1:three})$0").unwrap();
|
||||
|
||||
let insertion_ranges = editor
|
||||
.selections
|
||||
.all(cx)
|
||||
.iter()
|
||||
.map(|s| s.range().clone())
|
||||
.collect::<Vec<_>>();
|
||||
editor
|
||||
.insert_snippet(&insertion_ranges, snippet, window, cx)
|
||||
.unwrap();
|
||||
|
||||
fn assert(editor: &mut Editor, cx: &mut Context<Editor>, marked_text: &str) {
|
||||
let (expected_text, selection_ranges) = marked_text_ranges(marked_text, false);
|
||||
assert_eq!(editor.text(cx), expected_text);
|
||||
assert_eq!(editor.selections.ranges::<usize>(cx), selection_ranges);
|
||||
}
|
||||
|
||||
assert(
|
||||
editor,
|
||||
cx,
|
||||
indoc! {"
|
||||
a.f(«one», two, «three») b
|
||||
a.f(«one», two, «three») b
|
||||
a.f(«one», two, «three») b
|
||||
"},
|
||||
);
|
||||
|
||||
// Can't move earlier than the first tab stop
|
||||
assert!(!editor.move_to_prev_snippet_tabstop(window, cx));
|
||||
assert(
|
||||
editor,
|
||||
cx,
|
||||
indoc! {"
|
||||
a.f(«one», two, «three») b
|
||||
a.f(«one», two, «three») b
|
||||
a.f(«one», two, «three») b
|
||||
"},
|
||||
);
|
||||
|
||||
assert!(editor.move_to_next_snippet_tabstop(window, cx));
|
||||
assert(
|
||||
editor,
|
||||
cx,
|
||||
indoc! {"
|
||||
a.f(one, «two», three) b
|
||||
a.f(one, «two», three) b
|
||||
a.f(one, «two», three) b
|
||||
"},
|
||||
);
|
||||
|
||||
editor.move_to_prev_snippet_tabstop(window, cx);
|
||||
assert(
|
||||
editor,
|
||||
cx,
|
||||
indoc! {"
|
||||
a.f(«one», two, «three») b
|
||||
a.f(«one», two, «three») b
|
||||
a.f(«one», two, «three») b
|
||||
"},
|
||||
);
|
||||
|
||||
assert!(editor.move_to_next_snippet_tabstop(window, cx));
|
||||
assert(
|
||||
editor,
|
||||
cx,
|
||||
indoc! {"
|
||||
a.f(one, «two», three) b
|
||||
a.f(one, «two», three) b
|
||||
a.f(one, «two», three) b
|
||||
"},
|
||||
);
|
||||
assert!(editor.move_to_next_snippet_tabstop(window, cx));
|
||||
assert(
|
||||
editor,
|
||||
cx,
|
||||
indoc! {"
|
||||
a.f(one, two, three)ˇ b
|
||||
a.f(one, two, three)ˇ b
|
||||
a.f(one, two, three)ˇ b
|
||||
"},
|
||||
);
|
||||
|
||||
// As soon as the last tab stop is reached, snippet state is gone
|
||||
editor.move_to_prev_snippet_tabstop(window, cx);
|
||||
assert(
|
||||
editor,
|
||||
cx,
|
||||
indoc! {"
|
||||
a.f(one, two, three)ˇ b
|
||||
a.f(one, two, three)ˇ b
|
||||
a.f(one, two, three)ˇ b
|
||||
"},
|
||||
);
|
||||
});
|
||||
|
||||
cx.assert_editor_state(indoc! {"
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
"});
|
||||
|
||||
// Can't move earlier than the first tab stop
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
assert!(!editor.move_to_prev_snippet_tabstop(window, cx))
|
||||
});
|
||||
cx.assert_editor_state(indoc! {"
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
"});
|
||||
|
||||
cx.update_editor(|editor, window, cx| assert!(editor.move_to_next_snippet_tabstop(window, cx)));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
a.f(one, «twoˇ», three) b
|
||||
a.f(one, «twoˇ», three) b
|
||||
a.f(one, «twoˇ», three) b
|
||||
"});
|
||||
|
||||
cx.update_editor(|editor, window, cx| assert!(editor.move_to_prev_snippet_tabstop(window, cx)));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
"});
|
||||
|
||||
cx.update_editor(|editor, window, cx| assert!(editor.move_to_next_snippet_tabstop(window, cx)));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
a.f(one, «twoˇ», three) b
|
||||
a.f(one, «twoˇ», three) b
|
||||
a.f(one, «twoˇ», three) b
|
||||
"});
|
||||
cx.update_editor(|editor, window, cx| assert!(editor.move_to_next_snippet_tabstop(window, cx)));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
a.f(one, two, three)ˇ b
|
||||
a.f(one, two, three)ˇ b
|
||||
a.f(one, two, three)ˇ b
|
||||
"});
|
||||
|
||||
// As soon as the last tab stop is reached, snippet state is gone
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
assert!(!editor.move_to_prev_snippet_tabstop(window, cx))
|
||||
});
|
||||
cx.assert_editor_state(indoc! {"
|
||||
a.f(one, two, three)ˇ b
|
||||
a.f(one, two, three)ˇ b
|
||||
a.f(one, two, three)ˇ b
|
||||
"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_snippet_indentation(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
let snippet = Snippet::parse(indoc! {"
|
||||
/*
|
||||
* Multiline comment with leading indentation
|
||||
*
|
||||
* $1
|
||||
*/
|
||||
$0"})
|
||||
.unwrap();
|
||||
let insertion_ranges = editor
|
||||
.selections
|
||||
.all(cx)
|
||||
.iter()
|
||||
.map(|s| s.range().clone())
|
||||
.collect::<Vec<_>>();
|
||||
editor
|
||||
.insert_snippet(&insertion_ranges, snippet, window, cx)
|
||||
.unwrap();
|
||||
});
|
||||
|
||||
cx.assert_editor_state(indoc! {"
|
||||
/*
|
||||
* Multiline comment with leading indentation
|
||||
*
|
||||
* ˇ
|
||||
*/
|
||||
"});
|
||||
|
||||
cx.update_editor(|editor, window, cx| assert!(editor.move_to_next_snippet_tabstop(window, cx)));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
/*
|
||||
* Multiline comment with leading indentation
|
||||
*
|
||||
*•
|
||||
*/
|
||||
ˇ"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -10479,6 +10495,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
run_description: &'static str,
|
||||
initial_state: String,
|
||||
buffer_marked_text: String,
|
||||
completion_label: &'static str,
|
||||
completion_text: &'static str,
|
||||
expected_with_insert_mode: String,
|
||||
expected_with_replace_mode: String,
|
||||
@@ -10491,6 +10508,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
run_description: "Start of word matches completion text",
|
||||
initial_state: "before ediˇ after".into(),
|
||||
buffer_marked_text: "before <edi|> after".into(),
|
||||
completion_label: "editor",
|
||||
completion_text: "editor",
|
||||
expected_with_insert_mode: "before editorˇ after".into(),
|
||||
expected_with_replace_mode: "before editorˇ after".into(),
|
||||
@@ -10501,6 +10519,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
run_description: "Accept same text at the middle of the word",
|
||||
initial_state: "before ediˇtor after".into(),
|
||||
buffer_marked_text: "before <edi|tor> after".into(),
|
||||
completion_label: "editor",
|
||||
completion_text: "editor",
|
||||
expected_with_insert_mode: "before editorˇtor after".into(),
|
||||
expected_with_replace_mode: "before editorˇ after".into(),
|
||||
@@ -10511,6 +10530,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
run_description: "End of word matches completion text -- cursor at end",
|
||||
initial_state: "before torˇ after".into(),
|
||||
buffer_marked_text: "before <tor|> after".into(),
|
||||
completion_label: "editor",
|
||||
completion_text: "editor",
|
||||
expected_with_insert_mode: "before editorˇ after".into(),
|
||||
expected_with_replace_mode: "before editorˇ after".into(),
|
||||
@@ -10521,6 +10541,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
run_description: "End of word matches completion text -- cursor at start",
|
||||
initial_state: "before ˇtor after".into(),
|
||||
buffer_marked_text: "before <|tor> after".into(),
|
||||
completion_label: "editor",
|
||||
completion_text: "editor",
|
||||
expected_with_insert_mode: "before editorˇtor after".into(),
|
||||
expected_with_replace_mode: "before editorˇ after".into(),
|
||||
@@ -10531,6 +10552,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
run_description: "Prepend text containing whitespace",
|
||||
initial_state: "pˇfield: bool".into(),
|
||||
buffer_marked_text: "<p|field>: bool".into(),
|
||||
completion_label: "pub ",
|
||||
completion_text: "pub ",
|
||||
expected_with_insert_mode: "pub ˇfield: bool".into(),
|
||||
expected_with_replace_mode: "pub ˇ: bool".into(),
|
||||
@@ -10541,6 +10563,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
run_description: "Add element to start of list",
|
||||
initial_state: "[element_ˇelement_2]".into(),
|
||||
buffer_marked_text: "[<element_|element_2>]".into(),
|
||||
completion_label: "element_1",
|
||||
completion_text: "element_1",
|
||||
expected_with_insert_mode: "[element_1ˇelement_2]".into(),
|
||||
expected_with_replace_mode: "[element_1ˇ]".into(),
|
||||
@@ -10551,6 +10574,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
run_description: "Add element to start of list -- first and second elements are equal",
|
||||
initial_state: "[elˇelement]".into(),
|
||||
buffer_marked_text: "[<el|element>]".into(),
|
||||
completion_label: "element",
|
||||
completion_text: "element",
|
||||
expected_with_insert_mode: "[elementˇelement]".into(),
|
||||
expected_with_replace_mode: "[elementˇ]".into(),
|
||||
@@ -10561,6 +10585,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
run_description: "Ends with matching suffix",
|
||||
initial_state: "SubˇError".into(),
|
||||
buffer_marked_text: "<Sub|Error>".into(),
|
||||
completion_label: "SubscriptionError",
|
||||
completion_text: "SubscriptionError",
|
||||
expected_with_insert_mode: "SubscriptionErrorˇError".into(),
|
||||
expected_with_replace_mode: "SubscriptionErrorˇ".into(),
|
||||
@@ -10571,6 +10596,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
run_description: "Suffix is a subsequence -- contiguous",
|
||||
initial_state: "SubˇErr".into(),
|
||||
buffer_marked_text: "<Sub|Err>".into(),
|
||||
completion_label: "SubscriptionError",
|
||||
completion_text: "SubscriptionError",
|
||||
expected_with_insert_mode: "SubscriptionErrorˇErr".into(),
|
||||
expected_with_replace_mode: "SubscriptionErrorˇ".into(),
|
||||
@@ -10581,6 +10607,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
run_description: "Suffix is a subsequence -- non-contiguous -- replace intended",
|
||||
initial_state: "Suˇscrirr".into(),
|
||||
buffer_marked_text: "<Su|scrirr>".into(),
|
||||
completion_label: "SubscriptionError",
|
||||
completion_text: "SubscriptionError",
|
||||
expected_with_insert_mode: "SubscriptionErrorˇscrirr".into(),
|
||||
expected_with_replace_mode: "SubscriptionErrorˇ".into(),
|
||||
@@ -10591,12 +10618,46 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
run_description: "Suffix is a subsequence -- non-contiguous -- replace unintended",
|
||||
initial_state: "foo(indˇix)".into(),
|
||||
buffer_marked_text: "foo(<ind|ix>)".into(),
|
||||
completion_label: "node_index",
|
||||
completion_text: "node_index",
|
||||
expected_with_insert_mode: "foo(node_indexˇix)".into(),
|
||||
expected_with_replace_mode: "foo(node_indexˇ)".into(),
|
||||
expected_with_replace_subsequence_mode: "foo(node_indexˇix)".into(),
|
||||
expected_with_replace_suffix_mode: "foo(node_indexˇix)".into(),
|
||||
},
|
||||
Run {
|
||||
run_description: "Replace range ends before cursor - should extend to cursor",
|
||||
initial_state: "before editˇo after".into(),
|
||||
buffer_marked_text: "before <{ed}>it|o after".into(),
|
||||
completion_label: "editor",
|
||||
completion_text: "editor",
|
||||
expected_with_insert_mode: "before editorˇo after".into(),
|
||||
expected_with_replace_mode: "before editorˇo after".into(),
|
||||
expected_with_replace_subsequence_mode: "before editorˇo after".into(),
|
||||
expected_with_replace_suffix_mode: "before editorˇo after".into(),
|
||||
},
|
||||
Run {
|
||||
run_description: "Uses label for suffix matching",
|
||||
initial_state: "before ediˇtor after".into(),
|
||||
buffer_marked_text: "before <edi|tor> after".into(),
|
||||
completion_label: "editor",
|
||||
completion_text: "editor()",
|
||||
expected_with_insert_mode: "before editor()ˇtor after".into(),
|
||||
expected_with_replace_mode: "before editor()ˇ after".into(),
|
||||
expected_with_replace_subsequence_mode: "before editor()ˇ after".into(),
|
||||
expected_with_replace_suffix_mode: "before editor()ˇ after".into(),
|
||||
},
|
||||
Run {
|
||||
run_description: "Case insensitive subsequence and suffix matching",
|
||||
initial_state: "before EDiˇtoR after".into(),
|
||||
buffer_marked_text: "before <EDi|toR> after".into(),
|
||||
completion_label: "editor",
|
||||
completion_text: "editor",
|
||||
expected_with_insert_mode: "before editorˇtoR after".into(),
|
||||
expected_with_replace_mode: "before editorˇ after".into(),
|
||||
expected_with_replace_subsequence_mode: "before editorˇ after".into(),
|
||||
expected_with_replace_suffix_mode: "before editorˇ after".into(),
|
||||
},
|
||||
];
|
||||
|
||||
for run in runs {
|
||||
@@ -10637,7 +10698,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
handle_completion_request_with_insert_and_replace(
|
||||
&mut cx,
|
||||
&run.buffer_marked_text,
|
||||
vec![run.completion_text],
|
||||
vec![(run.completion_label, run.completion_text)],
|
||||
counter.clone(),
|
||||
)
|
||||
.await;
|
||||
@@ -10697,7 +10758,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext)
|
||||
handle_completion_request_with_insert_and_replace(
|
||||
&mut cx,
|
||||
&buffer_marked_text,
|
||||
vec![completion_text],
|
||||
vec![(completion_text, completion_text)],
|
||||
counter.clone(),
|
||||
)
|
||||
.await;
|
||||
@@ -10731,7 +10792,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext)
|
||||
handle_completion_request_with_insert_and_replace(
|
||||
&mut cx,
|
||||
&buffer_marked_text,
|
||||
vec![completion_text],
|
||||
vec![(completion_text, completion_text)],
|
||||
counter.clone(),
|
||||
)
|
||||
.await;
|
||||
@@ -10818,7 +10879,7 @@ async fn test_completion_replacing_surrounding_text_with_multicursors(cx: &mut T
|
||||
handle_completion_request_with_insert_and_replace(
|
||||
&mut cx,
|
||||
completion_marked_buffer,
|
||||
vec![completion_text],
|
||||
vec![(completion_text, completion_text)],
|
||||
Arc::new(AtomicUsize::new(0)),
|
||||
)
|
||||
.await;
|
||||
@@ -10872,7 +10933,7 @@ async fn test_completion_replacing_surrounding_text_with_multicursors(cx: &mut T
|
||||
handle_completion_request_with_insert_and_replace(
|
||||
&mut cx,
|
||||
completion_marked_buffer,
|
||||
vec![completion_text],
|
||||
vec![(completion_text, completion_text)],
|
||||
Arc::new(AtomicUsize::new(0)),
|
||||
)
|
||||
.await;
|
||||
@@ -10921,7 +10982,7 @@ async fn test_completion_replacing_surrounding_text_with_multicursors(cx: &mut T
|
||||
handle_completion_request_with_insert_and_replace(
|
||||
&mut cx,
|
||||
completion_marked_buffer,
|
||||
vec![completion_text],
|
||||
vec![(completion_text, completion_text)],
|
||||
Arc::new(AtomicUsize::new(0)),
|
||||
)
|
||||
.await;
|
||||
@@ -11139,14 +11200,15 @@ async fn test_completion(cx: &mut TestAppContext) {
|
||||
"});
|
||||
cx.simulate_keystroke(".");
|
||||
handle_completion_request(
|
||||
&mut cx,
|
||||
indoc! {"
|
||||
one.|<>
|
||||
two
|
||||
three
|
||||
"},
|
||||
vec!["first_completion", "second_completion"],
|
||||
true,
|
||||
counter.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
cx.condition(|editor, _| editor.context_menu_visible())
|
||||
@@ -11246,7 +11308,6 @@ async fn test_completion(cx: &mut TestAppContext) {
|
||||
additional edit
|
||||
"});
|
||||
handle_completion_request(
|
||||
&mut cx,
|
||||
indoc! {"
|
||||
one.second_completion
|
||||
two s
|
||||
@@ -11254,7 +11315,9 @@ async fn test_completion(cx: &mut TestAppContext) {
|
||||
additional edit
|
||||
"},
|
||||
vec!["fourth_completion", "fifth_completion", "sixth_completion"],
|
||||
true,
|
||||
counter.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
cx.condition(|editor, _| editor.context_menu_visible())
|
||||
@@ -11264,7 +11327,6 @@ async fn test_completion(cx: &mut TestAppContext) {
|
||||
cx.simulate_keystroke("i");
|
||||
|
||||
handle_completion_request(
|
||||
&mut cx,
|
||||
indoc! {"
|
||||
one.second_completion
|
||||
two si
|
||||
@@ -11272,7 +11334,9 @@ async fn test_completion(cx: &mut TestAppContext) {
|
||||
additional edit
|
||||
"},
|
||||
vec!["fourth_completion", "fifth_completion", "sixth_completion"],
|
||||
true,
|
||||
counter.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
cx.condition(|editor, _| editor.context_menu_visible())
|
||||
@@ -11306,10 +11370,11 @@ async fn test_completion(cx: &mut TestAppContext) {
|
||||
editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
|
||||
});
|
||||
handle_completion_request(
|
||||
&mut cx,
|
||||
"editor.<clo|>",
|
||||
vec!["close", "clobber"],
|
||||
true,
|
||||
counter.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
cx.condition(|editor, _| editor.context_menu_visible())
|
||||
@@ -11326,6 +11391,128 @@ async fn test_completion(cx: &mut TestAppContext) {
|
||||
apply_additional_edits.await.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_completion_reuse(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
lsp::ServerCapabilities {
|
||||
completion_provider: Some(lsp::CompletionOptions {
|
||||
trigger_characters: Some(vec![".".to_string()]),
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
let counter = Arc::new(AtomicUsize::new(0));
|
||||
cx.set_state("objˇ");
|
||||
cx.simulate_keystroke(".");
|
||||
|
||||
// Initial completion request returns complete results
|
||||
let is_incomplete = false;
|
||||
handle_completion_request(
|
||||
"obj.|<>",
|
||||
vec!["a", "ab", "abc"],
|
||||
is_incomplete,
|
||||
counter.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 1);
|
||||
cx.assert_editor_state("obj.ˇ");
|
||||
check_displayed_completions(vec!["a", "ab", "abc"], &mut cx);
|
||||
|
||||
// Type "a" - filters existing completions
|
||||
cx.simulate_keystroke("a");
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 1);
|
||||
cx.assert_editor_state("obj.aˇ");
|
||||
check_displayed_completions(vec!["a", "ab", "abc"], &mut cx);
|
||||
|
||||
// Type "b" - filters existing completions
|
||||
cx.simulate_keystroke("b");
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 1);
|
||||
cx.assert_editor_state("obj.abˇ");
|
||||
check_displayed_completions(vec!["ab", "abc"], &mut cx);
|
||||
|
||||
// Type "c" - filters existing completions
|
||||
cx.simulate_keystroke("c");
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 1);
|
||||
cx.assert_editor_state("obj.abcˇ");
|
||||
check_displayed_completions(vec!["abc"], &mut cx);
|
||||
|
||||
// Backspace to delete "c" - filters existing completions
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.backspace(&Backspace, window, cx);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 1);
|
||||
cx.assert_editor_state("obj.abˇ");
|
||||
check_displayed_completions(vec!["ab", "abc"], &mut cx);
|
||||
|
||||
// Moving cursor to the left dismisses menu.
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.move_left(&MoveLeft, window, cx);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 1);
|
||||
cx.assert_editor_state("obj.aˇb");
|
||||
cx.update_editor(|editor, _, _| {
|
||||
assert_eq!(editor.context_menu_visible(), false);
|
||||
});
|
||||
|
||||
// Type "b" - new request
|
||||
cx.simulate_keystroke("b");
|
||||
let is_incomplete = false;
|
||||
handle_completion_request(
|
||||
"obj.<ab|>a",
|
||||
vec!["ab", "abc"],
|
||||
is_incomplete,
|
||||
counter.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 2);
|
||||
cx.assert_editor_state("obj.abˇb");
|
||||
check_displayed_completions(vec!["ab", "abc"], &mut cx);
|
||||
|
||||
// Backspace to delete "b" - since query was "ab" and is now "a", new request is made.
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.backspace(&Backspace, window, cx);
|
||||
});
|
||||
let is_incomplete = false;
|
||||
handle_completion_request(
|
||||
"obj.<a|>b",
|
||||
vec!["a", "ab", "abc"],
|
||||
is_incomplete,
|
||||
counter.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 3);
|
||||
cx.assert_editor_state("obj.aˇb");
|
||||
check_displayed_completions(vec!["a", "ab", "abc"], &mut cx);
|
||||
|
||||
// Backspace to delete "a" - dismisses menu.
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.backspace(&Backspace, window, cx);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 3);
|
||||
cx.assert_editor_state("obj.ˇb");
|
||||
cx.update_editor(|editor, _, _| {
|
||||
assert_eq!(editor.context_menu_visible(), false);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_word_completion(cx: &mut TestAppContext) {
|
||||
let lsp_fetch_timeout_ms = 10;
|
||||
@@ -12006,9 +12193,11 @@ async fn test_no_duplicated_completion_requests(cx: &mut TestAppContext) {
|
||||
let task_completion_item = closure_completion_item.clone();
|
||||
counter_clone.fetch_add(1, atomic::Ordering::Release);
|
||||
async move {
|
||||
Ok(Some(lsp::CompletionResponse::Array(vec![
|
||||
task_completion_item,
|
||||
])))
|
||||
Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
|
||||
is_incomplete: true,
|
||||
item_defaults: None,
|
||||
items: vec![task_completion_item],
|
||||
})))
|
||||
}
|
||||
});
|
||||
|
||||
@@ -17082,6 +17271,64 @@ async fn test_indent_guide_ends_before_empty_line(cx: &mut TestAppContext) {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_indent_guide_ignored_only_whitespace_lines(cx: &mut TestAppContext) {
|
||||
let (buffer_id, mut cx) = setup_indent_guides_editor(
|
||||
&"
|
||||
function component() {
|
||||
\treturn (
|
||||
\t\t\t
|
||||
\t\t<div>
|
||||
\t\t\t<abc></abc>
|
||||
\t\t</div>
|
||||
\t)
|
||||
}"
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
assert_indent_guides(
|
||||
0..8,
|
||||
vec![
|
||||
indent_guide(buffer_id, 1, 6, 0),
|
||||
indent_guide(buffer_id, 2, 5, 1),
|
||||
indent_guide(buffer_id, 4, 4, 2),
|
||||
],
|
||||
None,
|
||||
&mut cx,
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_indent_guide_fallback_to_next_non_entirely_whitespace_line(cx: &mut TestAppContext) {
|
||||
let (buffer_id, mut cx) = setup_indent_guides_editor(
|
||||
&"
|
||||
function component() {
|
||||
\treturn (
|
||||
\t
|
||||
\t\t<div>
|
||||
\t\t\t<abc></abc>
|
||||
\t\t</div>
|
||||
\t)
|
||||
}"
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
assert_indent_guides(
|
||||
0..8,
|
||||
vec![
|
||||
indent_guide(buffer_id, 1, 6, 0),
|
||||
indent_guide(buffer_id, 2, 5, 1),
|
||||
indent_guide(buffer_id, 4, 4, 2),
|
||||
],
|
||||
None,
|
||||
&mut cx,
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_indent_guide_continuing_off_screen(cx: &mut TestAppContext) {
|
||||
let (buffer_id, mut cx) = setup_indent_guides_editor(
|
||||
@@ -20016,7 +20263,6 @@ println!("5");
|
||||
pane_1
|
||||
.update_in(cx, |pane, window, cx| {
|
||||
pane.close_inactive_items(&CloseInactiveItems::default(), window, cx)
|
||||
.unwrap()
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -20053,7 +20299,6 @@ println!("5");
|
||||
pane_2
|
||||
.update_in(cx, |pane, window, cx| {
|
||||
pane.close_inactive_items(&CloseInactiveItems::default(), window, cx)
|
||||
.unwrap()
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -20229,7 +20474,6 @@ println!("5");
|
||||
});
|
||||
pane.update_in(cx, |pane, window, cx| {
|
||||
pane.close_all_items(&CloseAllItems::default(), window, cx)
|
||||
.unwrap()
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -20583,7 +20827,6 @@ async fn test_invisible_worktree_servers(cx: &mut TestAppContext) {
|
||||
pane.update_in(cx, |pane, window, cx| {
|
||||
pane.close_active_item(&CloseActiveItem::default(), window, cx)
|
||||
})
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap();
|
||||
pane.update_in(cx, |pane, window, cx| {
|
||||
@@ -21010,6 +21253,22 @@ pub fn handle_signature_help_request(
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn check_displayed_completions(expected: Vec<&'static str>, cx: &mut EditorLspTestContext) {
|
||||
cx.update_editor(|editor, _, _| {
|
||||
if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow().as_ref() {
|
||||
let entries = menu.entries.borrow();
|
||||
let entries = entries
|
||||
.iter()
|
||||
.map(|entry| entry.string.as_str())
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(entries, expected);
|
||||
} else {
|
||||
panic!("Expected completions menu");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// Handle completion request passing a marked string specifying where the completion
|
||||
/// should be triggered from using '|' character, what range should be replaced, and what completions
|
||||
/// should be returned using '<' and '>' to delimit the range.
|
||||
@@ -21017,10 +21276,11 @@ pub fn handle_signature_help_request(
|
||||
/// Also see `handle_completion_request_with_insert_and_replace`.
|
||||
#[track_caller]
|
||||
pub fn handle_completion_request(
|
||||
cx: &mut EditorLspTestContext,
|
||||
marked_string: &str,
|
||||
completions: Vec<&'static str>,
|
||||
is_incomplete: bool,
|
||||
counter: Arc<AtomicUsize>,
|
||||
cx: &mut EditorLspTestContext,
|
||||
) -> impl Future<Output = ()> {
|
||||
let complete_from_marker: TextRangeMarker = '|'.into();
|
||||
let replace_range_marker: TextRangeMarker = ('<', '>').into();
|
||||
@@ -21044,8 +21304,10 @@ pub fn handle_completion_request(
|
||||
params.text_document_position.position,
|
||||
complete_from_position
|
||||
);
|
||||
Ok(Some(lsp::CompletionResponse::Array(
|
||||
completions
|
||||
Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
|
||||
is_incomplete: is_incomplete,
|
||||
item_defaults: None,
|
||||
items: completions
|
||||
.iter()
|
||||
.map(|completion_text| lsp::CompletionItem {
|
||||
label: completion_text.to_string(),
|
||||
@@ -21056,7 +21318,7 @@ pub fn handle_completion_request(
|
||||
..Default::default()
|
||||
})
|
||||
.collect(),
|
||||
)))
|
||||
})))
|
||||
}
|
||||
});
|
||||
|
||||
@@ -21068,19 +21330,27 @@ pub fn handle_completion_request(
|
||||
/// Similar to `handle_completion_request`, but a [`CompletionTextEdit::InsertAndReplace`] will be
|
||||
/// given instead, which also contains an `insert` range.
|
||||
///
|
||||
/// This function uses the cursor position to mimic what Rust-Analyzer provides as the `insert` range,
|
||||
/// that is, `replace_range.start..cursor_pos`.
|
||||
/// This function uses markers to define ranges:
|
||||
/// - `|` marks the cursor position
|
||||
/// - `<>` marks the replace range
|
||||
/// - `[]` marks the insert range (optional, defaults to `replace_range.start..cursor_pos`which is what Rust-Analyzer provides)
|
||||
pub fn handle_completion_request_with_insert_and_replace(
|
||||
cx: &mut EditorLspTestContext,
|
||||
marked_string: &str,
|
||||
completions: Vec<&'static str>,
|
||||
completions: Vec<(&'static str, &'static str)>, // (label, new_text)
|
||||
counter: Arc<AtomicUsize>,
|
||||
) -> impl Future<Output = ()> {
|
||||
let complete_from_marker: TextRangeMarker = '|'.into();
|
||||
let replace_range_marker: TextRangeMarker = ('<', '>').into();
|
||||
let insert_range_marker: TextRangeMarker = ('{', '}').into();
|
||||
|
||||
let (_, mut marked_ranges) = marked_text_ranges_by(
|
||||
marked_string,
|
||||
vec![complete_from_marker.clone(), replace_range_marker.clone()],
|
||||
vec![
|
||||
complete_from_marker.clone(),
|
||||
replace_range_marker.clone(),
|
||||
insert_range_marker.clone(),
|
||||
],
|
||||
);
|
||||
|
||||
let complete_from_position =
|
||||
@@ -21088,6 +21358,14 @@ pub fn handle_completion_request_with_insert_and_replace(
|
||||
let replace_range =
|
||||
cx.to_lsp_range(marked_ranges.remove(&replace_range_marker).unwrap()[0].clone());
|
||||
|
||||
let insert_range = match marked_ranges.remove(&insert_range_marker) {
|
||||
Some(ranges) if !ranges.is_empty() => cx.to_lsp_range(ranges[0].clone()),
|
||||
_ => lsp::Range {
|
||||
start: replace_range.start,
|
||||
end: complete_from_position,
|
||||
},
|
||||
};
|
||||
|
||||
let mut request =
|
||||
cx.set_request_handler::<lsp::request::Completion, _, _>(move |url, params, _| {
|
||||
let completions = completions.clone();
|
||||
@@ -21101,16 +21379,13 @@ pub fn handle_completion_request_with_insert_and_replace(
|
||||
Ok(Some(lsp::CompletionResponse::Array(
|
||||
completions
|
||||
.iter()
|
||||
.map(|completion_text| lsp::CompletionItem {
|
||||
label: completion_text.to_string(),
|
||||
.map(|(label, new_text)| lsp::CompletionItem {
|
||||
label: label.to_string(),
|
||||
text_edit: Some(lsp::CompletionTextEdit::InsertAndReplace(
|
||||
lsp::InsertReplaceEdit {
|
||||
insert: lsp::Range {
|
||||
start: replace_range.start,
|
||||
end: complete_from_position,
|
||||
},
|
||||
insert: insert_range,
|
||||
replace: replace_range,
|
||||
new_text: completion_text.to_string(),
|
||||
new_text: new_text.to_string(),
|
||||
},
|
||||
)),
|
||||
..Default::default()
|
||||
|
||||
@@ -682,7 +682,7 @@ impl EditorElement {
|
||||
editor.select(
|
||||
SelectPhase::BeginColumnar {
|
||||
position,
|
||||
reset: false,
|
||||
reset: true,
|
||||
goal_column: point_for_position.exact_unclipped.column(),
|
||||
},
|
||||
window,
|
||||
|
||||
@@ -1095,14 +1095,15 @@ mod tests {
|
||||
//prompt autocompletion menu
|
||||
cx.simulate_keystroke(".");
|
||||
handle_completion_request(
|
||||
&mut cx,
|
||||
indoc! {"
|
||||
one.|<>
|
||||
two
|
||||
three
|
||||
"},
|
||||
vec!["first_completion", "second_completion"],
|
||||
true,
|
||||
counter.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
cx.condition(|editor, _| editor.context_menu_visible()) // wait until completion menu is visible
|
||||
|
||||
@@ -600,7 +600,7 @@ pub(crate) fn handle_from(
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.change_selections_without_showing_completions(None, window, cx, |s| {
|
||||
this.change_selections_without_updating_completions(None, window, cx, |s| {
|
||||
s.select(base_selections);
|
||||
});
|
||||
})
|
||||
|
||||
@@ -22,6 +22,7 @@ use smol::stream::StreamExt;
|
||||
use task::ResolvedTask;
|
||||
use task::TaskContext;
|
||||
use text::BufferId;
|
||||
use ui::SharedString;
|
||||
use util::ResultExt as _;
|
||||
|
||||
pub(crate) fn find_specific_language_server_in_selection<F>(
|
||||
@@ -133,13 +134,22 @@ pub fn lsp_tasks(
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
cx.spawn(async move |cx| {
|
||||
let mut lsp_tasks = Vec::new();
|
||||
let mut lsp_tasks = HashMap::default();
|
||||
while let Some(server_to_query) = lsp_task_sources.next().await {
|
||||
if let Some((server_id, buffers)) = server_to_query {
|
||||
let source_kind = TaskSourceKind::Lsp(server_id);
|
||||
let id_base = source_kind.to_id_base();
|
||||
let mut new_lsp_tasks = Vec::new();
|
||||
for buffer in buffers {
|
||||
let source_kind = match buffer.update(cx, |buffer, _| {
|
||||
buffer.language().map(|language| language.name())
|
||||
}) {
|
||||
Ok(Some(language_name)) => TaskSourceKind::Lsp {
|
||||
server: server_id,
|
||||
language_name: SharedString::from(language_name),
|
||||
},
|
||||
Ok(None) => continue,
|
||||
Err(_) => return Vec::new(),
|
||||
};
|
||||
let id_base = source_kind.to_id_base();
|
||||
let lsp_buffer_context = lsp_task_context(&project, &buffer, cx)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
@@ -168,11 +178,14 @@ pub fn lsp_tasks(
|
||||
);
|
||||
}
|
||||
}
|
||||
lsp_tasks
|
||||
.entry(source_kind)
|
||||
.or_insert_with(Vec::new)
|
||||
.append(&mut new_lsp_tasks);
|
||||
}
|
||||
lsp_tasks.push((source_kind, new_lsp_tasks));
|
||||
}
|
||||
}
|
||||
lsp_tasks
|
||||
lsp_tasks.into_iter().collect()
|
||||
})
|
||||
.race({
|
||||
// `lsp::LSP_REQUEST_TIMEOUT` is larger than we want for the modal to open fast
|
||||
|
||||
@@ -532,7 +532,9 @@ impl EditorTestContext {
|
||||
#[track_caller]
|
||||
pub fn assert_editor_selections(&mut self, expected_selections: Vec<Range<usize>>) {
|
||||
let expected_marked_text =
|
||||
generate_marked_text(&self.buffer_text(), &expected_selections, true);
|
||||
generate_marked_text(&self.buffer_text(), &expected_selections, true)
|
||||
.replace(" \n", "•\n");
|
||||
|
||||
self.assert_selections(expected_selections, expected_marked_text)
|
||||
}
|
||||
|
||||
@@ -561,7 +563,8 @@ impl EditorTestContext {
|
||||
) {
|
||||
let actual_selections = self.editor_selections();
|
||||
let actual_marked_text =
|
||||
generate_marked_text(&self.buffer_text(), &actual_selections, true);
|
||||
generate_marked_text(&self.buffer_text(), &actual_selections, true)
|
||||
.replace(" \n", "•\n");
|
||||
if expected_selections != actual_selections {
|
||||
pretty_assertions::assert_eq!(
|
||||
actual_marked_text,
|
||||
|
||||
@@ -246,6 +246,7 @@ impl ExampleContext {
|
||||
| ThreadEvent::StreamedAssistantThinking(_, _)
|
||||
| ThreadEvent::UsePendingTools { .. }
|
||||
| ThreadEvent::CompletionCanceled => {}
|
||||
ThreadEvent::ToolUseLimitReached => {}
|
||||
ThreadEvent::ToolFinished {
|
||||
tool_use_id,
|
||||
pending_tool_use,
|
||||
|
||||
@@ -759,8 +759,8 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.flat_map(|response| response.completions)
|
||||
.map(|c| c.label.text)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
|
||||
@@ -38,8 +38,8 @@ use std::{
|
||||
};
|
||||
use text::Point;
|
||||
use ui::{
|
||||
ContextMenu, HighlightedLabel, IconButtonShape, ListItem, ListItemSpacing, PopoverMenu,
|
||||
PopoverMenuHandle, Tooltip, prelude::*,
|
||||
ButtonLike, ContextMenu, HighlightedLabel, Indicator, KeyBinding, ListItem, ListItemSpacing,
|
||||
PopoverMenu, PopoverMenuHandle, TintColor, Tooltip, prelude::*,
|
||||
};
|
||||
use util::{ResultExt, maybe, paths::PathWithPosition, post_inc};
|
||||
use workspace::{
|
||||
@@ -47,7 +47,10 @@ use workspace::{
|
||||
notifications::NotifyResultExt, pane,
|
||||
};
|
||||
|
||||
actions!(file_finder, [SelectPrevious, ToggleMenu]);
|
||||
actions!(
|
||||
file_finder,
|
||||
[SelectPrevious, ToggleFilterMenu, ToggleSplitMenu]
|
||||
);
|
||||
|
||||
impl ModalView for FileFinder {
|
||||
fn on_before_dismiss(
|
||||
@@ -56,7 +59,14 @@ impl ModalView for FileFinder {
|
||||
cx: &mut Context<Self>,
|
||||
) -> workspace::DismissDecision {
|
||||
let submenu_focused = self.picker.update(cx, |picker, cx| {
|
||||
picker.delegate.popover_menu_handle.is_focused(window, cx)
|
||||
picker
|
||||
.delegate
|
||||
.filter_popover_menu_handle
|
||||
.is_focused(window, cx)
|
||||
|| picker
|
||||
.delegate
|
||||
.split_popover_menu_handle
|
||||
.is_focused(window, cx)
|
||||
});
|
||||
workspace::DismissDecision::Dismiss(!submenu_focused)
|
||||
}
|
||||
@@ -212,9 +222,30 @@ impl FileFinder {
|
||||
window.dispatch_action(Box::new(menu::SelectPrevious), cx);
|
||||
}
|
||||
|
||||
fn handle_toggle_menu(&mut self, _: &ToggleMenu, window: &mut Window, cx: &mut Context<Self>) {
|
||||
fn handle_filter_toggle_menu(
|
||||
&mut self,
|
||||
_: &ToggleFilterMenu,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.picker.update(cx, |picker, cx| {
|
||||
let menu_handle = &picker.delegate.popover_menu_handle;
|
||||
let menu_handle = &picker.delegate.filter_popover_menu_handle;
|
||||
if menu_handle.is_deployed() {
|
||||
menu_handle.hide(cx);
|
||||
} else {
|
||||
menu_handle.show(window, cx);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_split_toggle_menu(
|
||||
&mut self,
|
||||
_: &ToggleSplitMenu,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.picker.update(cx, |picker, cx| {
|
||||
let menu_handle = &picker.delegate.split_popover_menu_handle;
|
||||
if menu_handle.is_deployed() {
|
||||
menu_handle.hide(cx);
|
||||
} else {
|
||||
@@ -345,7 +376,8 @@ impl Render for FileFinder {
|
||||
.w(modal_max_width)
|
||||
.on_modifiers_changed(cx.listener(Self::handle_modifiers_changed))
|
||||
.on_action(cx.listener(Self::handle_select_prev))
|
||||
.on_action(cx.listener(Self::handle_toggle_menu))
|
||||
.on_action(cx.listener(Self::handle_filter_toggle_menu))
|
||||
.on_action(cx.listener(Self::handle_split_toggle_menu))
|
||||
.on_action(cx.listener(Self::handle_toggle_ignored))
|
||||
.on_action(cx.listener(Self::go_to_file_split_left))
|
||||
.on_action(cx.listener(Self::go_to_file_split_right))
|
||||
@@ -371,7 +403,8 @@ pub struct FileFinderDelegate {
|
||||
history_items: Vec<FoundPath>,
|
||||
separate_history: bool,
|
||||
first_update: bool,
|
||||
popover_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
filter_popover_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
split_popover_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
focus_handle: FocusHandle,
|
||||
include_ignored: Option<bool>,
|
||||
include_ignored_refresh: Task<()>,
|
||||
@@ -758,7 +791,8 @@ impl FileFinderDelegate {
|
||||
history_items,
|
||||
separate_history,
|
||||
first_update: true,
|
||||
popover_menu_handle: PopoverMenuHandle::default(),
|
||||
filter_popover_menu_handle: PopoverMenuHandle::default(),
|
||||
split_popover_menu_handle: PopoverMenuHandle::default(),
|
||||
focus_handle: cx.focus_handle(),
|
||||
include_ignored: FileFinderSettings::get_global(cx).include_ignored,
|
||||
include_ignored_refresh: Task::ready(()),
|
||||
@@ -1137,8 +1171,13 @@ impl FileFinderDelegate {
|
||||
fn key_context(&self, window: &Window, cx: &App) -> KeyContext {
|
||||
let mut key_context = KeyContext::new_with_defaults();
|
||||
key_context.add("FileFinder");
|
||||
if self.popover_menu_handle.is_focused(window, cx) {
|
||||
key_context.add("menu_open");
|
||||
|
||||
if self.filter_popover_menu_handle.is_focused(window, cx) {
|
||||
key_context.add("filter_menu_open");
|
||||
}
|
||||
|
||||
if self.split_popover_menu_handle.is_focused(window, cx) {
|
||||
key_context.add("split_menu_open");
|
||||
}
|
||||
key_context
|
||||
}
|
||||
@@ -1492,62 +1531,112 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
)
|
||||
}
|
||||
|
||||
fn render_footer(&self, _: &mut Window, cx: &mut Context<Picker<Self>>) -> Option<AnyElement> {
|
||||
let context = self.focus_handle.clone();
|
||||
fn render_footer(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<AnyElement> {
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
|
||||
Some(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.p_2()
|
||||
.p_1p5()
|
||||
.justify_between()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.child(
|
||||
IconButton::new("toggle-ignored", IconName::Sliders)
|
||||
.on_click({
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
move |_, window, cx| {
|
||||
focus_handle.dispatch_action(&ToggleIncludeIgnored, window, cx);
|
||||
}
|
||||
PopoverMenu::new("filter-menu-popover")
|
||||
.with_handle(self.filter_popover_menu_handle.clone())
|
||||
.attach(gpui::Corner::BottomRight)
|
||||
.anchor(gpui::Corner::BottomLeft)
|
||||
.offset(gpui::Point {
|
||||
x: px(1.0),
|
||||
y: px(1.0),
|
||||
})
|
||||
.style(ButtonStyle::Subtle)
|
||||
.shape(IconButtonShape::Square)
|
||||
.toggle_state(self.include_ignored.unwrap_or(false))
|
||||
.tooltip({
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
.trigger_with_tooltip(
|
||||
IconButton::new("filter-trigger", IconName::Sliders)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_size(IconSize::Small)
|
||||
.toggle_state(self.include_ignored.unwrap_or(false))
|
||||
.when(self.include_ignored.is_some(), |this| {
|
||||
this.indicator(Indicator::dot().color(Color::Info))
|
||||
}),
|
||||
{
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Filter Options",
|
||||
&ToggleFilterMenu,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
},
|
||||
)
|
||||
.menu({
|
||||
let focus_handle = focus_handle.clone();
|
||||
let include_ignored = self.include_ignored;
|
||||
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Use ignored files",
|
||||
&ToggleIncludeIgnored,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
Some(ContextMenu::build(window, cx, {
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |menu, _, _| {
|
||||
menu.context(focus_handle.clone())
|
||||
.header("Filter Options")
|
||||
.toggleable_entry(
|
||||
"Include Ignored Files",
|
||||
include_ignored.unwrap_or(false),
|
||||
ui::IconPosition::End,
|
||||
Some(ToggleIncludeIgnored.boxed_clone()),
|
||||
move |window, cx| {
|
||||
window.focus(&focus_handle);
|
||||
window.dispatch_action(
|
||||
ToggleIncludeIgnored.boxed_clone(),
|
||||
cx,
|
||||
);
|
||||
},
|
||||
)
|
||||
}
|
||||
}))
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
Button::new("open-selection", "Open").on_click(|_, window, cx| {
|
||||
window.dispatch_action(menu::Confirm.boxed_clone(), cx)
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
PopoverMenu::new("menu-popover")
|
||||
.with_handle(self.popover_menu_handle.clone())
|
||||
.attach(gpui::Corner::TopRight)
|
||||
.anchor(gpui::Corner::BottomRight)
|
||||
PopoverMenu::new("split-menu-popover")
|
||||
.with_handle(self.split_popover_menu_handle.clone())
|
||||
.attach(gpui::Corner::BottomRight)
|
||||
.anchor(gpui::Corner::BottomLeft)
|
||||
.offset(gpui::Point {
|
||||
x: px(1.0),
|
||||
y: px(1.0),
|
||||
})
|
||||
.trigger(
|
||||
Button::new("actions-trigger", "Split…")
|
||||
.selected_label_color(Color::Accent),
|
||||
ButtonLike::new("split-trigger")
|
||||
.child(Label::new("Split…"))
|
||||
.selected_style(ButtonStyle::Tinted(TintColor::Accent))
|
||||
.children(
|
||||
KeyBinding::for_action_in(
|
||||
&ToggleSplitMenu,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(12.))),
|
||||
),
|
||||
)
|
||||
.menu({
|
||||
let focus_handle = focus_handle.clone();
|
||||
|
||||
move |window, cx| {
|
||||
Some(ContextMenu::build(window, cx, {
|
||||
let context = context.clone();
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |menu, _, _| {
|
||||
menu.context(context)
|
||||
menu.context(focus_handle.clone())
|
||||
.action(
|
||||
"Split Left",
|
||||
pane::SplitLeft.boxed_clone(),
|
||||
@@ -1565,6 +1654,21 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
}))
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("open-selection", "Open")
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(
|
||||
&menu::Confirm,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(12.))),
|
||||
)
|
||||
.on_click(|_, window, cx| {
|
||||
window.dispatch_action(menu::Confirm.boxed_clone(), cx)
|
||||
}),
|
||||
),
|
||||
)
|
||||
.into_any(),
|
||||
|
||||
@@ -739,7 +739,6 @@ async fn test_ignored_root(cx: &mut TestAppContext) {
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
workspace.active_pane().update(cx, |pane, cx| {
|
||||
pane.close_active_item(&CloseActiveItem::default(), window, cx)
|
||||
.unwrap()
|
||||
})
|
||||
})
|
||||
.await
|
||||
|
||||
@@ -39,15 +39,32 @@ pub struct UserCaretPosition {
|
||||
}
|
||||
|
||||
impl UserCaretPosition {
|
||||
pub fn at_selection_end(selection: &Selection<Point>, snapshot: &MultiBufferSnapshot) -> Self {
|
||||
pub(crate) fn at_selection_end(
|
||||
selection: &Selection<Point>,
|
||||
snapshot: &MultiBufferSnapshot,
|
||||
) -> Self {
|
||||
let selection_end = selection.head();
|
||||
let line_start = Point::new(selection_end.row, 0);
|
||||
let chars_to_last_position = snapshot
|
||||
.text_summary_for_range::<text::TextSummary, _>(line_start..selection_end)
|
||||
.chars as u32;
|
||||
let (line, character) = if let Some((buffer_snapshot, point, _)) =
|
||||
snapshot.point_to_buffer_point(selection_end)
|
||||
{
|
||||
let line_start = Point::new(point.row, 0);
|
||||
|
||||
let chars_to_last_position = buffer_snapshot
|
||||
.text_summary_for_range::<text::TextSummary, _>(line_start..point)
|
||||
.chars as u32;
|
||||
(line_start.row, chars_to_last_position)
|
||||
} else {
|
||||
let line_start = Point::new(selection_end.row, 0);
|
||||
|
||||
let chars_to_last_position = snapshot
|
||||
.text_summary_for_range::<text::TextSummary, _>(line_start..selection_end)
|
||||
.chars as u32;
|
||||
(selection_end.row, chars_to_last_position)
|
||||
};
|
||||
|
||||
Self {
|
||||
line: NonZeroU32::new(selection_end.row + 1).expect("added 1"),
|
||||
character: NonZeroU32::new(chars_to_last_position + 1).expect("added 1"),
|
||||
line: NonZeroU32::new(line + 1).expect("added 1"),
|
||||
character: NonZeroU32::new(character + 1).expect("added 1"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -202,6 +202,7 @@ pub enum Part {
|
||||
InlineDataPart(InlineDataPart),
|
||||
FunctionCallPart(FunctionCallPart),
|
||||
FunctionResponsePart(FunctionResponsePart),
|
||||
ThoughtPart(ThoughtPart),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
@@ -235,6 +236,13 @@ pub struct FunctionResponsePart {
|
||||
pub function_response: FunctionResponse,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ThoughtPart {
|
||||
pub thought: bool,
|
||||
pub thought_signature: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CitationSource {
|
||||
@@ -281,6 +289,22 @@ pub struct UsageMetadata {
|
||||
pub total_token_count: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ThinkingConfig {
|
||||
pub thinking_budget: u32,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub enum GoogleModelMode {
|
||||
#[default]
|
||||
Default,
|
||||
Thinking {
|
||||
budget_tokens: Option<u32>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GenerationConfig {
|
||||
@@ -296,6 +320,8 @@ pub struct GenerationConfig {
|
||||
pub top_p: Option<f64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub top_k: Option<usize>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub thinking_config: Option<ThinkingConfig>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
@@ -488,6 +514,8 @@ pub enum Model {
|
||||
/// The name displayed in the UI, such as in the assistant panel model dropdown menu.
|
||||
display_name: Option<String>,
|
||||
max_tokens: usize,
|
||||
#[serde(default)]
|
||||
mode: GoogleModelMode,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -544,6 +572,21 @@ impl Model {
|
||||
Model::Custom { max_tokens, .. } => *max_tokens,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mode(&self) -> GoogleModelMode {
|
||||
match self {
|
||||
Self::Gemini15Pro
|
||||
| Self::Gemini15Flash
|
||||
| Self::Gemini20Pro
|
||||
| Self::Gemini20Flash
|
||||
| Self::Gemini20FlashThinking
|
||||
| Self::Gemini20FlashLite
|
||||
| Self::Gemini25ProExp0325
|
||||
| Self::Gemini25ProPreview0325
|
||||
| Self::Gemini25FlashPreview0417 => GoogleModelMode::Default,
|
||||
Self::Custom { mode, .. } => *mode,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Model {
|
||||
|
||||
@@ -126,6 +126,7 @@ uuid.workspace = true
|
||||
waker-fn = "1.2.0"
|
||||
lyon = "1.0"
|
||||
workspace-hack.workspace = true
|
||||
libc.workspace = true
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
block = "0.1"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use gpui::{
|
||||
App, Application, Bounds, Context, KeyBinding, SharedString, Timer, Window, WindowBounds,
|
||||
WindowKind, WindowOptions, actions, div, prelude::*, px, rgb, size,
|
||||
App, Application, Bounds, Context, KeyBinding, PromptButton, PromptLevel, SharedString, Timer,
|
||||
Window, WindowBounds, WindowKind, WindowOptions, actions, div, prelude::*, px, rgb, size,
|
||||
};
|
||||
|
||||
struct SubWindow {
|
||||
@@ -169,6 +169,42 @@ impl Render for WindowDemo {
|
||||
let content_size = window.bounds().size;
|
||||
window.resize(size(content_size.height, content_size.width));
|
||||
}))
|
||||
.child(button("Prompt", |window, cx| {
|
||||
let answer = window.prompt(
|
||||
PromptLevel::Info,
|
||||
"Are you sure?",
|
||||
None,
|
||||
&["Ok", "Cancel"],
|
||||
cx,
|
||||
);
|
||||
|
||||
cx.spawn(async move |_| {
|
||||
if answer.await.unwrap() == 0 {
|
||||
println!("You have clicked Ok");
|
||||
} else {
|
||||
println!("You have clicked Cancel");
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}))
|
||||
.child(button("Prompt (non-English)", |window, cx| {
|
||||
let answer = window.prompt(
|
||||
PromptLevel::Info,
|
||||
"Are you sure?",
|
||||
None,
|
||||
&[PromptButton::ok("确定"), PromptButton::cancel("取消")],
|
||||
cx,
|
||||
);
|
||||
|
||||
cx.spawn(async move |_| {
|
||||
if answer.await.unwrap() == 0 {
|
||||
println!("You have clicked Ok");
|
||||
} else {
|
||||
println!("You have clicked Cancel");
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -195,6 +231,7 @@ fn main() {
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
cx.activate(true);
|
||||
cx.on_action(|_: &Quit, cx| cx.quit());
|
||||
cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]);
|
||||
|
||||
@@ -37,10 +37,10 @@ use crate::{
|
||||
AssetSource, BackgroundExecutor, Bounds, ClipboardItem, CursorStyle, DispatchPhase, DisplayId,
|
||||
EventEmitter, FocusHandle, FocusMap, ForegroundExecutor, Global, KeyBinding, KeyContext,
|
||||
Keymap, Keystroke, LayoutId, Menu, MenuItem, OwnedMenu, PathPromptOptions, Pixels, Platform,
|
||||
PlatformDisplay, PlatformKeyboardLayout, Point, PromptBuilder, PromptHandle, PromptLevel,
|
||||
Render, RenderImage, RenderablePromptHandle, Reservation, ScreenCaptureSource, SharedString,
|
||||
SubscriberSet, Subscription, SvgRenderer, Task, TextSystem, Window, WindowAppearance,
|
||||
WindowHandle, WindowId, WindowInvalidator,
|
||||
PlatformDisplay, PlatformKeyboardLayout, Point, PromptBuilder, PromptButton, PromptHandle,
|
||||
PromptLevel, Render, RenderImage, RenderablePromptHandle, Reservation, ScreenCaptureSource,
|
||||
SharedString, SubscriberSet, Subscription, SvgRenderer, Task, TextSystem, Window,
|
||||
WindowAppearance, WindowHandle, WindowId, WindowInvalidator,
|
||||
colors::{Colors, GlobalColors},
|
||||
current_platform, hash, init_app_menus,
|
||||
};
|
||||
@@ -1578,14 +1578,14 @@ impl App {
|
||||
PromptLevel,
|
||||
&str,
|
||||
Option<&str>,
|
||||
&[&str],
|
||||
&[PromptButton],
|
||||
PromptHandle,
|
||||
&mut Window,
|
||||
&mut App,
|
||||
) -> RenderablePromptHandle
|
||||
+ 'static,
|
||||
) {
|
||||
self.prompt_builder = Some(PromptBuilder::Custom(Box::new(renderer)))
|
||||
self.prompt_builder = Some(PromptBuilder::Custom(Box::new(renderer)));
|
||||
}
|
||||
|
||||
/// Reset the prompt builder to the default implementation.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::{
|
||||
AnyView, AnyWindowHandle, App, AppCell, AppContext, BackgroundExecutor, BorrowAppContext,
|
||||
Entity, EventEmitter, Focusable, ForegroundExecutor, Global, PromptLevel, Render, Reservation,
|
||||
Result, Subscription, Task, VisualContext, Window, WindowHandle,
|
||||
Entity, EventEmitter, Focusable, ForegroundExecutor, Global, PromptButton, PromptLevel, Render,
|
||||
Reservation, Result, Subscription, Task, VisualContext, Window, WindowHandle,
|
||||
};
|
||||
use anyhow::Context as _;
|
||||
use derive_more::{Deref, DerefMut};
|
||||
@@ -314,13 +314,16 @@ impl AsyncWindowContext {
|
||||
/// Present a platform dialog.
|
||||
/// The provided message will be presented, along with buttons for each answer.
|
||||
/// When a button is clicked, the returned Receiver will receive the index of the clicked button.
|
||||
pub fn prompt(
|
||||
pub fn prompt<T>(
|
||||
&mut self,
|
||||
level: PromptLevel,
|
||||
message: &str,
|
||||
detail: Option<&str>,
|
||||
answers: &[&str],
|
||||
) -> oneshot::Receiver<usize> {
|
||||
answers: &[T],
|
||||
) -> oneshot::Receiver<usize>
|
||||
where
|
||||
T: Clone + Into<PromptButton>,
|
||||
{
|
||||
self.window
|
||||
.update(self, |_, window, cx| {
|
||||
window.prompt(level, message, detail, answers, cx)
|
||||
|
||||
@@ -20,11 +20,11 @@ use std::{
|
||||
thread::panicking,
|
||||
};
|
||||
|
||||
use super::Context;
|
||||
use crate::util::atomic_incr_if_not_zero;
|
||||
#[cfg(any(test, feature = "leak-detection"))]
|
||||
use collections::HashMap;
|
||||
|
||||
use super::Context;
|
||||
|
||||
slotmap::new_key_type! {
|
||||
/// A unique identifier for a entity across the application.
|
||||
pub struct EntityId;
|
||||
@@ -529,11 +529,10 @@ impl AnyWeakEntity {
|
||||
let ref_counts = ref_counts.read();
|
||||
let ref_count = ref_counts.counts.get(self.entity_id)?;
|
||||
|
||||
// entity_id is in dropped_entity_ids
|
||||
if ref_count.load(SeqCst) == 0 {
|
||||
if atomic_incr_if_not_zero(ref_count) == 0 {
|
||||
// entity_id is in dropped_entity_ids
|
||||
return None;
|
||||
}
|
||||
ref_count.fetch_add(1, SeqCst);
|
||||
drop(ref_counts);
|
||||
|
||||
Some(AnyEntity {
|
||||
|
||||
@@ -111,7 +111,7 @@ where
|
||||
self.root = Some(new_parent);
|
||||
}
|
||||
|
||||
for node_index in self.stack.drain(..) {
|
||||
for node_index in self.stack.drain(..).rev() {
|
||||
let Node::Internal {
|
||||
max_order: max_ordering,
|
||||
..
|
||||
@@ -119,7 +119,10 @@ where
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
*max_ordering = cmp::max(*max_ordering, ordering);
|
||||
if *max_ordering >= ordering {
|
||||
break;
|
||||
}
|
||||
*max_ordering = ordering;
|
||||
}
|
||||
|
||||
ordering
|
||||
@@ -237,6 +240,7 @@ where
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{Bounds, Point, Size};
|
||||
use rand::{Rng, SeedableRng};
|
||||
|
||||
#[test]
|
||||
fn test_insert() {
|
||||
@@ -294,4 +298,40 @@ mod tests {
|
||||
assert_eq!(tree.insert(bounds5), 1); // bounds5 does not overlap with any other bounds
|
||||
assert_eq!(tree.insert(bounds6), 2); // bounds6 overlaps with bounds4, so it should have a different order
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_random_iterations() {
|
||||
let max_bounds = 100;
|
||||
for seed in 1..=1000 {
|
||||
// let seed = 44;
|
||||
let mut tree = BoundsTree::default();
|
||||
let mut rng = rand::rngs::StdRng::seed_from_u64(seed as u64);
|
||||
let mut expected_quads: Vec<(Bounds<f32>, u32)> = Vec::new();
|
||||
|
||||
// Insert a random number of random AABBs into the tree.
|
||||
let num_bounds = rng.gen_range(1..=max_bounds);
|
||||
for _ in 0..num_bounds {
|
||||
let min_x: f32 = rng.gen_range(-100.0..100.0);
|
||||
let min_y: f32 = rng.gen_range(-100.0..100.0);
|
||||
let width: f32 = rng.gen_range(0.0..50.0);
|
||||
let height: f32 = rng.gen_range(0.0..50.0);
|
||||
let bounds = Bounds {
|
||||
origin: Point { x: min_x, y: min_y },
|
||||
size: Size { width, height },
|
||||
};
|
||||
|
||||
let expected_ordering = expected_quads
|
||||
.iter()
|
||||
.filter_map(|quad| quad.0.intersects(&bounds).then_some(quad.1))
|
||||
.max()
|
||||
.unwrap_or(0)
|
||||
+ 1;
|
||||
expected_quads.push((bounds, expected_ordering));
|
||||
|
||||
// Insert the AABB into the tree and collect intersections.
|
||||
let actual_ordering = tree.insert(bounds);
|
||||
assert_eq!(actual_ordering, expected_ordering);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -147,14 +147,49 @@ impl Keymap {
|
||||
});
|
||||
|
||||
let mut bindings: SmallVec<[(KeyBinding, usize); 1]> = SmallVec::new();
|
||||
let mut is_pending = None;
|
||||
|
||||
// (pending, is_no_action, depth, keystrokes)
|
||||
let mut pending_info_opt: Option<(bool, bool, usize, &[Keystroke])> = None;
|
||||
|
||||
'outer: for (binding, pending) in possibilities {
|
||||
for depth in (0..=context_stack.len()).rev() {
|
||||
if self.binding_enabled(binding, &context_stack[0..depth]) {
|
||||
if is_pending.is_none() {
|
||||
is_pending = Some(pending);
|
||||
let is_no_action = is_no_action(&*binding.action);
|
||||
// We only want to consider a binding pending if it has an action
|
||||
// This, however, means that if we have both a NoAction binding and a binding
|
||||
// with an action at the same depth, we should still set is_pending to true.
|
||||
if let Some(pending_info) = pending_info_opt.as_mut() {
|
||||
let (
|
||||
already_pending,
|
||||
pending_is_no_action,
|
||||
pending_depth,
|
||||
pending_keystrokes,
|
||||
) = *pending_info;
|
||||
|
||||
// We only want to change the pending status if it's not already pending AND if
|
||||
// the existing pending status was set by a NoAction binding. This avoids a NoAction
|
||||
// binding erroneously setting the pending status to true when a binding with an action
|
||||
// already set it to false
|
||||
//
|
||||
// We also want to change the pending status if the keystrokes don't match,
|
||||
// meaning it's different keystrokes than the NoAction that set pending to false
|
||||
if pending
|
||||
&& !already_pending
|
||||
&& pending_is_no_action
|
||||
&& (pending_depth == depth
|
||||
|| pending_keystrokes != binding.keystrokes())
|
||||
{
|
||||
pending_info.0 = !is_no_action;
|
||||
}
|
||||
} else {
|
||||
pending_info_opt = Some((
|
||||
pending && !is_no_action,
|
||||
is_no_action,
|
||||
depth,
|
||||
binding.keystrokes(),
|
||||
));
|
||||
}
|
||||
|
||||
if !pending {
|
||||
bindings.push((binding.clone(), depth));
|
||||
continue 'outer;
|
||||
@@ -174,7 +209,7 @@ impl Keymap {
|
||||
})
|
||||
.collect();
|
||||
|
||||
(bindings, is_pending.unwrap_or_default())
|
||||
(bindings, pending_info_opt.unwrap_or_default().0)
|
||||
}
|
||||
|
||||
/// Check if the given binding is enabled, given a certain key context.
|
||||
@@ -310,6 +345,102 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Tests for https://github.com/zed-industries/zed/issues/30259
|
||||
fn test_multiple_keystroke_binding_disabled() {
|
||||
let bindings = [
|
||||
KeyBinding::new("space w w", ActionAlpha {}, Some("workspace")),
|
||||
KeyBinding::new("space w w", NoAction {}, Some("editor")),
|
||||
];
|
||||
|
||||
let mut keymap = Keymap::default();
|
||||
keymap.add_bindings(bindings.clone());
|
||||
|
||||
let space = || Keystroke::parse("space").unwrap();
|
||||
let w = || Keystroke::parse("w").unwrap();
|
||||
|
||||
let space_w = [space(), w()];
|
||||
let space_w_w = [space(), w(), w()];
|
||||
|
||||
let workspace_context = || [KeyContext::parse("workspace").unwrap()];
|
||||
|
||||
let editor_workspace_context = || {
|
||||
[
|
||||
KeyContext::parse("workspace").unwrap(),
|
||||
KeyContext::parse("editor").unwrap(),
|
||||
]
|
||||
};
|
||||
|
||||
// Ensure `space` results in pending input on the workspace, but not editor
|
||||
let space_workspace = keymap.bindings_for_input(&[space()], &workspace_context());
|
||||
assert!(space_workspace.0.is_empty());
|
||||
assert_eq!(space_workspace.1, true);
|
||||
|
||||
let space_editor = keymap.bindings_for_input(&[space()], &editor_workspace_context());
|
||||
assert!(space_editor.0.is_empty());
|
||||
assert_eq!(space_editor.1, false);
|
||||
|
||||
// Ensure `space w` results in pending input on the workspace, but not editor
|
||||
let space_w_workspace = keymap.bindings_for_input(&space_w, &workspace_context());
|
||||
assert!(space_w_workspace.0.is_empty());
|
||||
assert_eq!(space_w_workspace.1, true);
|
||||
|
||||
let space_w_editor = keymap.bindings_for_input(&space_w, &editor_workspace_context());
|
||||
assert!(space_w_editor.0.is_empty());
|
||||
assert_eq!(space_w_editor.1, false);
|
||||
|
||||
// Ensure `space w w` results in the binding in the workspace, but not in the editor
|
||||
let space_w_w_workspace = keymap.bindings_for_input(&space_w_w, &workspace_context());
|
||||
assert!(!space_w_w_workspace.0.is_empty());
|
||||
assert_eq!(space_w_w_workspace.1, false);
|
||||
|
||||
let space_w_w_editor = keymap.bindings_for_input(&space_w_w, &editor_workspace_context());
|
||||
assert!(space_w_w_editor.0.is_empty());
|
||||
assert_eq!(space_w_w_editor.1, false);
|
||||
|
||||
// Now test what happens if we have another binding defined AFTER the NoAction
|
||||
// that should result in pending
|
||||
let bindings = [
|
||||
KeyBinding::new("space w w", ActionAlpha {}, Some("workspace")),
|
||||
KeyBinding::new("space w w", NoAction {}, Some("editor")),
|
||||
KeyBinding::new("space w x", ActionAlpha {}, Some("editor")),
|
||||
];
|
||||
let mut keymap = Keymap::default();
|
||||
keymap.add_bindings(bindings.clone());
|
||||
|
||||
let space_editor = keymap.bindings_for_input(&[space()], &editor_workspace_context());
|
||||
assert!(space_editor.0.is_empty());
|
||||
assert_eq!(space_editor.1, true);
|
||||
|
||||
// Now test what happens if we have another binding defined BEFORE the NoAction
|
||||
// that should result in pending
|
||||
let bindings = [
|
||||
KeyBinding::new("space w w", ActionAlpha {}, Some("workspace")),
|
||||
KeyBinding::new("space w x", ActionAlpha {}, Some("editor")),
|
||||
KeyBinding::new("space w w", NoAction {}, Some("editor")),
|
||||
];
|
||||
let mut keymap = Keymap::default();
|
||||
keymap.add_bindings(bindings.clone());
|
||||
|
||||
let space_editor = keymap.bindings_for_input(&[space()], &editor_workspace_context());
|
||||
assert!(space_editor.0.is_empty());
|
||||
assert_eq!(space_editor.1, true);
|
||||
|
||||
// Now test what happens if we have another binding defined at a higher context
|
||||
// that should result in pending
|
||||
let bindings = [
|
||||
KeyBinding::new("space w w", ActionAlpha {}, Some("workspace")),
|
||||
KeyBinding::new("space w x", ActionAlpha {}, Some("workspace")),
|
||||
KeyBinding::new("space w w", NoAction {}, Some("editor")),
|
||||
];
|
||||
let mut keymap = Keymap::default();
|
||||
keymap.add_bindings(bindings.clone());
|
||||
|
||||
let space_editor = keymap.bindings_for_input(&[space()], &editor_workspace_context());
|
||||
assert!(space_editor.0.is_empty());
|
||||
assert_eq!(space_editor.1, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bindings_for_action() {
|
||||
let bindings = [
|
||||
|
||||
@@ -418,7 +418,7 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle {
|
||||
level: PromptLevel,
|
||||
msg: &str,
|
||||
detail: Option<&str>,
|
||||
answers: &[&str],
|
||||
answers: &[PromptButton],
|
||||
) -> Option<oneshot::Receiver<usize>>;
|
||||
fn activate(&self);
|
||||
fn is_active(&self) -> bool;
|
||||
@@ -445,6 +445,7 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle {
|
||||
// macOS specific methods
|
||||
fn set_edited(&mut self, _edited: bool) {}
|
||||
fn show_character_palette(&self) {}
|
||||
fn titlebar_double_click(&self) {}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn get_raw_handle(&self) -> windows::HWND;
|
||||
@@ -1244,6 +1245,58 @@ pub enum PromptLevel {
|
||||
Critical,
|
||||
}
|
||||
|
||||
/// Prompt Button
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum PromptButton {
|
||||
/// Ok button
|
||||
Ok(SharedString),
|
||||
/// Cancel button
|
||||
Cancel(SharedString),
|
||||
/// Other button
|
||||
Other(SharedString),
|
||||
}
|
||||
|
||||
impl PromptButton {
|
||||
/// Create a button with label
|
||||
pub fn new(label: impl Into<SharedString>) -> Self {
|
||||
PromptButton::Other(label.into())
|
||||
}
|
||||
|
||||
/// Create an Ok button
|
||||
pub fn ok(label: impl Into<SharedString>) -> Self {
|
||||
PromptButton::Ok(label.into())
|
||||
}
|
||||
|
||||
/// Create a Cancel button
|
||||
pub fn cancel(label: impl Into<SharedString>) -> Self {
|
||||
PromptButton::Cancel(label.into())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn is_cancel(&self) -> bool {
|
||||
matches!(self, PromptButton::Cancel(_))
|
||||
}
|
||||
|
||||
/// Returns the label of the button
|
||||
pub fn label(&self) -> &SharedString {
|
||||
match self {
|
||||
PromptButton::Ok(label) => label,
|
||||
PromptButton::Cancel(label) => label,
|
||||
PromptButton::Other(label) => label,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for PromptButton {
|
||||
fn from(value: &str) -> Self {
|
||||
match value.to_lowercase().as_str() {
|
||||
"ok" => PromptButton::Ok("Ok".into()),
|
||||
"cancel" => PromptButton::Cancel("Cancel".into()),
|
||||
_ => PromptButton::Other(SharedString::from(value.to_owned())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The style of the cursor (pointer)
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
|
||||
pub enum CursorStyle {
|
||||
|
||||
@@ -75,7 +75,7 @@ use crate::platform::linux::{
|
||||
LinuxClient, get_xkb_compose_state, is_within_click_distance, open_uri_internal, read_fd,
|
||||
reveal_path_internal,
|
||||
wayland::{
|
||||
clipboard::{Clipboard, DataOffer, FILE_LIST_MIME_TYPE, TEXT_MIME_TYPE},
|
||||
clipboard::{Clipboard, DataOffer, FILE_LIST_MIME_TYPE, TEXT_MIME_TYPES},
|
||||
cursor::Cursor,
|
||||
serial::{SerialKind, SerialTracker},
|
||||
window::WaylandWindow,
|
||||
@@ -778,8 +778,10 @@ impl LinuxClient for WaylandClient {
|
||||
state.clipboard.set_primary(item);
|
||||
let serial = state.serial_tracker.get(SerialKind::KeyPress);
|
||||
let data_source = primary_selection_manager.create_source(&state.globals.qh, ());
|
||||
for mime_type in TEXT_MIME_TYPES {
|
||||
data_source.offer(mime_type.to_string());
|
||||
}
|
||||
data_source.offer(state.clipboard.self_mime());
|
||||
data_source.offer(TEXT_MIME_TYPE.to_string());
|
||||
primary_selection.set_selection(Some(&data_source), serial);
|
||||
}
|
||||
}
|
||||
@@ -796,8 +798,10 @@ impl LinuxClient for WaylandClient {
|
||||
state.clipboard.set(item);
|
||||
let serial = state.serial_tracker.get(SerialKind::KeyPress);
|
||||
let data_source = data_device_manager.create_data_source(&state.globals.qh, ());
|
||||
for mime_type in TEXT_MIME_TYPES {
|
||||
data_source.offer(mime_type.to_string());
|
||||
}
|
||||
data_source.offer(state.clipboard.self_mime());
|
||||
data_source.offer(TEXT_MIME_TYPE.to_string());
|
||||
data_device.set_selection(Some(&data_source), serial);
|
||||
}
|
||||
}
|
||||
|
||||