Compare commits

..

14 Commits

Author SHA1 Message Date
Joseph T. Lyons
a4132b939f zed 0.96.2 2023-07-21 12:03:41 -04:00
Conrad Irwin
61bb05b978 Fix shift-enter in search (#2772)
Fixes shift-enter to go to previous result.

Release Notes:

- To type a newline in search use `ctrl-enter` (or `ctrl-shift-enter`
for a newline below).
2023-07-21 11:54:39 -04:00
Conrad Irwin
8213c3328e Fix enter in search (#2768)
Fixes a regression in non-vim search caused by my changes to vim search.

Release Notes:

- N/A
2023-07-21 11:54:12 -04:00
Kirill Bulatov
e65ddbc11c Do not highlight fake URLs in terminal (#2770)
Closes https://github.com/zed-industries/community/issues/1794
See also https://github.com/alacritty/alacritty/pull/7101

Release Notes:

- Fixed terminal incorrectly highlighting certain strings as URLs
2023-07-21 11:59:17 +03:00
Joseph T. Lyons
3722de4506 zed 0.96.1 2023-07-20 17:08:19 -04:00
Derek Briggs
65b565656c Icon adjustments (#2766)
Icon tweaks
2023-07-20 17:07:35 -04:00
Mikayla Maki
d080220e9e Folder icons (#2764)
- Updates icons and adds more
- Adds ability to choose folders or chevrons in user settings
- Adds ability to set indent size in user settings
2023-07-20 17:00:03 -04:00
Mikayla Maki
0be56d6a30 Add a double click to reset resized splits (#2762)
fixes https://github.com/zed-industries/community/issues/1791

Release Notes:

- Double clicking on split resize handles now resets the split's
dimensions
2023-07-20 16:55:31 -04:00
Nate Butler
0472a6ff83 Add the local and declare keywords to bash syntax highlighting (#2761)
Release Notes:

- Improved Bash / Shell Script syntax highlighting
2023-07-20 16:55:05 -04:00
Joseph T. Lyons
04a1a96f8c Reuse previously-obtained call object 2023-07-20 16:21:30 -04:00
Joseph T. Lyons
28e04bb412 Add microphone toggle events (#2765)
Release Notes:

- N/A
2023-07-20 16:13:03 -04:00
Joseph T. Lyons
4274ccee62 Fix return type in watch_file_types() 2023-07-19 16:24:38 -04:00
Mikayla Maki
05cd06177c Mute mics by default (#2754)
This adds a setting to mute mics by default.

fixes https://github.com/zed-industries/community/issues/1769

Release notes:

- Fixed a bug with gutter spacing on files that end on a new significant
digit
- Added a setting for muting on join, and set it to true by default.
2023-07-19 15:43:40 -04:00
Joseph T. Lyons
760fece112 v0.96.x preview 2023-07-19 15:33:38 -04:00
141 changed files with 3152 additions and 8782 deletions

View File

@@ -6,23 +6,14 @@ jobs:
discord_release:
runs-on: ubuntu-latest
steps:
- name: Get appropriate URL
id: get-appropriate-url
run: |
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
URL="https://zed.dev/releases/preview/latest"
else
URL="https://zed.dev/releases/stable/latest"
fi
echo "::set-output name=URL::$URL"
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v5.3.0
if: ${{ ! github.event.release.prerelease }}
with:
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
content: |
📣 Zed ${{ github.event.release.tag_name }} was just released!
Restart your Zed or head to ${{ steps.get-appropriate-url.outputs.URL }} to grab it.
Restart your Zed or head to https://zed.dev/releases/stable/latest to grab it.
${{ github.event.release.body }}

View File

@@ -1,5 +0,0 @@
{
"JSON": {
"tab_size": 4
}
}

1816
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -63,7 +63,7 @@ members = [
"crates/theme",
"crates/theme_selector",
"crates/util",
"crates/semantic_index",
"crates/vector_store",
"crates/vim",
"crates/vcs_menu",
"crates/workspace",
@@ -109,12 +109,10 @@ pretty_assertions = "1.3.0"
tree-sitter-bash = { git = "https://github.com/tree-sitter/tree-sitter-bash", rev = "1b0321ee85701d5036c334a6f04761cdc672e64c" }
tree-sitter-c = "0.20.1"
tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev="f44509141e7e483323d2ec178f2d2e6c0fc041c1" }
tree-sitter-cpp = "0.20.0"
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "a2861e88a730287a60c11ea9299c033c7d076e30" }
tree-sitter-elm = { git = "https://github.com/elm-tooling/tree-sitter-elm", rev = "692c50c0b961364c40299e73c1306aecb5d20f40"}
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "4ba9dab6e2602960d95b2b625f3386c27e08084e" }
tree-sitter-embedded-template = "0.20.0"
tree-sitter-glsl = { git = "https://github.com/theHamsta/tree-sitter-glsl", rev = "2a56fb7bc8bb03a1892b4741279dd0a8758b7fb3" }
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "2e1348c3cf2c9323e87c2744796cf3f3868aa82a" }
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "40a81c01a40ac48744e0c8ccabbaba1920441199" }
@@ -131,10 +129,9 @@ tree-sitter-svelte = { git = "https://github.com/Himujjal/tree-sitter-svelte", r
tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a"}
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930"}
tree-sitter-lua = "0.0.14"
tree-sitter-nix = { git = "https://github.com/nix-community/tree-sitter-nix", rev = "66e3e9ce9180ae08fc57372061006ef83f0abde7" }
[patch.crates-io]
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "1c65ca24bc9a734ab70115188f465e12eecf224e" }
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "49226023693107fba9a1191136a4f47f38cdca73" }
async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" }
# TODO - Remove when a version is released with this PR: https://github.com/servo/core-foundation-rs/pull/457

View File

@@ -1,6 +1,6 @@
# syntax = docker/dockerfile:1.2
FROM rust:1.71-bullseye as builder
FROM rust:1.70-bullseye as builder
WORKDIR app
COPY . .

View File

@@ -1,27 +0,0 @@
<svg width="14" height="16" viewBox="0 0 14 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M9.375 8.74577V10.375C7.30597 10.375 6.69403 10.375 4.625 10.375V10.1226L9.375 5.87742V5.625H4.625V7.27717" stroke="black" stroke-width="1.25"/>
<circle cx="0.5" cy="8" r="0.5" fill="black" fill-opacity="0.3"/>
<circle cx="1.49976" cy="5.82825" r="0.5" fill="black" fill-opacity="0.6"/>
<circle cx="1.49976" cy="10.1719" r="0.5" fill="black" fill-opacity="0.6"/>
<circle cx="13.5" cy="8.01581" r="0.5" fill="black" fill-opacity="0.3"/>
<circle cx="12.5" cy="5.84387" r="0.5" fill="black" fill-opacity="0.6"/>
<circle cx="12.5" cy="10.1877" r="0.5" fill="black" fill-opacity="0.6"/>
<circle cx="6.99213" cy="1.48438" r="0.5" fill="black" fill-opacity="0.3"/>
<circle cx="4.50391" cy="2.48438" r="0.5" fill="black" fill-opacity="0.6"/>
<circle cx="2.49976" cy="3.48438" r="0.5" fill="black" fill-opacity="0.3"/>
<circle cx="2.49976" cy="12.5" r="0.5" fill="black" fill-opacity="0.3"/>
<circle cx="0.5" cy="12.016" r="0.5" fill="black" fill-opacity="0.3"/>
<circle cx="0.5" cy="3.98438" r="0.5" fill="black" fill-opacity="0.3"/>
<circle cx="13.5" cy="12.016" r="0.5" fill="black" fill-opacity="0.3"/>
<circle cx="13.5" cy="3.98438" r="0.5" fill="black" fill-opacity="0.3"/>
<circle cx="2.49976" cy="14.516" r="0.5" fill="black" fill-opacity="0.3"/>
<circle cx="2.48413" cy="1.48438" r="0.5" fill="black" fill-opacity="0.3"/>
<circle cx="11.5" cy="14.516" r="0.5" fill="black" fill-opacity="0.3"/>
<circle cx="11.5" cy="1.48438" r="0.5" fill="black" fill-opacity="0.3"/>
<circle cx="11.5" cy="3.48438" r="0.5" fill="black" fill-opacity="0.3"/>
<circle cx="11.5" cy="12.516" r="0.5" fill="black" fill-opacity="0.3"/>
<circle cx="9.49609" cy="2.48438" r="0.5" fill="black" fill-opacity="0.6"/>
<circle cx="6.99213" cy="14.5" r="0.5" fill="black" fill-opacity="0.3"/>
<circle cx="4.50391" cy="13.516" r="0.5" fill="black" fill-opacity="0.6"/>
<circle cx="9.49609" cy="13.5" r="0.5" fill="black" fill-opacity="0.6"/>
</svg>

Before

Width:  |  Height:  |  Size: 2.0 KiB

View File

@@ -1,5 +1,5 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M6 7.63H8" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<rect x="2" y="2" width="10" height="3" rx="0.5" fill="black" fill-opacity="0.3" stroke="black" stroke-width="1.25"/>
<path d="M6 7.63H8" stroke="black" stroke-opacity="0.66" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<rect x="2" y="2" width="10" height="3" rx="0.5" fill="black" fill-opacity="0.33" stroke="black" stroke-width="1.25"/>
<path d="M2.59375 5H11.4375L10.5581 11.5664C10.5248 11.8146 10.313 12 10.0625 12H3.93944C3.68812 12 3.47585 11.8134 3.44358 11.5642L2.59375 5Z" stroke="black" stroke-width="1.25"/>
</svg>

Before

Width:  |  Height:  |  Size: 527 B

After

Width:  |  Height:  |  Size: 529 B

View File

@@ -1,6 +1,6 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M7 11C5.46973 11 4.1268 11.1873 3.31522 11.3327C2.94367 11.3992 2.60079 11.0563 2.66733 10.6848C2.81266 9.8732 3 8.53027 3 7C3 5.8387 2.89211 4.78529 2.77656 3.99011C2.73589 3.71017 3.19546 3.51715 3.36119 3.7464C4.09612 4.76304 5.23301 6.23301 6.5 7.5C7.76699 8.76699 9.23696 9.90388 10.2536 10.6388C10.4828 10.8045 10.2898 11.2641 10.0099 11.2234C9.21472 11.1079 8.1613 11 7 11Z" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M12.365 3.8478L10.3381 1.82088" stroke="black" stroke-opacity="0.3" stroke-width="1.25" stroke-linecap="round"/>
<path d="M11.3516 7.36803L6.64062 2.64155" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round"/>
<path d="M12.3594 3.35938C12.3594 3.35938 12.0146 2.9209 11.5312 2.4375C11.0479 1.9541 10.6406 1.64062 10.6406 1.64062" stroke="black" stroke-opacity="0.33" stroke-width="1.25" stroke-linecap="round"/>
<path d="M11.3516 7.36803C11.3516 7.36803 10.7962 5.88996 9.48438 4.57812C8.17254 3.26629 6.64062 2.64155 6.64062 2.64155" stroke="black" stroke-opacity="0.66" stroke-width="1.25" stroke-linecap="round"/>
<rect x="2.72266" y="8.73828" width="3.58525" height="2.72899" rx="0.5" transform="rotate(45 2.72266 8.73828)" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 950 B

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -1,4 +1,4 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M12 10.5C12 10.7761 11.7761 11 11.5 11H2.5C2.22386 11 2 10.7761 2 10.5V4.88C2 4.60386 2.22386 4.38 2.5 4.38H4.4342C4.61518 4.38 4.78204 4.2822 4.87046 4.12428L5.35681 3.25572C5.44524 3.0978 5.61209 3 5.79308 3H8.20692C8.38791 3 8.55476 3.0978 8.64319 3.25572L9.12954 4.12428C9.21796 4.2822 9.38482 4.38 9.5658 4.38H11.5C11.7761 4.38 12 4.60386 12 4.88V10.5Z" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M7.005 9C7.90246 9 8.63 8.27246 8.63 7.375C8.63 6.47754 7.90246 5.75 7.005 5.75C6.10754 5.75 5.38 6.47754 5.38 7.375C5.38 8.27246 6.10754 9 7.005 9Z" fill="black" fill-opacity="0.3" stroke="black" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M7.005 9C7.90246 9 8.63 8.27246 8.63 7.375C8.63 6.47754 7.90246 5.75 7.005 5.75C6.10754 5.75 5.38 6.47754 5.38 7.375C5.38 8.27246 6.10754 9 7.005 9Z" fill="black" fill-opacity="0.33" stroke="black" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 850 B

After

Width:  |  Height:  |  Size: 851 B

View File

@@ -1,4 +0,0 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M5.46115 8.43419C7.30678 8.43419 8.92229 7.43411 8.92229 5.21171C8.92229 2.98933 7.30678 1.98926 5.46115 1.98926C3.61553 1.98926 2 2.98933 2 5.21171C2 6.028 2.21794 6.67935 2.58519 7.17685C2.7184 7.35732 2.69033 7.77795 2.58387 7.97539C2.32908 8.44793 2.81048 8.9657 3.33372 8.84571C3.72539 8.75597 4.13621 8.63447 4.49574 8.4715C4.62736 8.41181 4.7727 8.38777 4.91631 8.40402C5.09471 8.42416 5.27678 8.43419 5.46115 8.43419Z" fill="black" fill-opacity="0.33" stroke="black" stroke-width="0.990499" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M11.6055 5.87971C11.4329 5.66762 11.1208 5.6357 10.9088 5.80842C10.6967 5.98114 10.6648 6.29308 10.8375 6.50518L11.6055 5.87971ZM6.4361 10.4149C6.21522 10.2536 5.90539 10.3018 5.74404 10.5226C5.58268 10.7435 5.6309 11.0533 5.85177 11.2147L6.4361 10.4149ZM12.3808 8.25929C12.3808 7.28754 12.1013 6.48847 11.6055 5.87971L10.8375 6.50518C11.1712 6.91492 11.3903 7.48485 11.3903 8.25929H12.3808ZM11.6988 10.5186C12.137 9.92499 12.3808 9.16705 12.3808 8.25929H11.3903C11.3903 8.98414 11.1982 9.52892 10.9019 9.93034L11.6988 10.5186ZM9.1854 11.9702C9.58603 12.1518 10.0316 12.2822 10.4412 12.3761L10.6625 11.4106C10.2888 11.3249 9.91276 11.2124 9.59435 11.068L9.1854 11.9702ZM8.42443 11.977C8.62663 11.977 8.8273 11.9661 9.02494 11.9437L8.91361 10.9595C8.75447 10.9775 8.59097 10.9865 8.42443 10.9865V11.977ZM5.85177 11.2147C6.5749 11.743 7.49105 11.977 8.42443 11.977V10.9865C7.64656 10.9865 6.9503 10.7906 6.4361 10.4149L5.85177 11.2147ZM9.59435 11.068C9.38377 10.9726 9.14869 10.9329 8.91361 10.9595L9.02494 11.9437C9.07704 11.9378 9.13271 11.9463 9.1854 11.9702L9.59435 11.068ZM10.8658 11.2581C10.8784 11.2813 10.8772 11.2932 10.8762 11.2995C10.8746 11.3097 10.8681 11.3291 10.8481 11.3517C10.8049 11.4004 10.7343 11.4271 10.6625 11.4106L10.4412 12.3761C10.8927 12.4796 11.3244 12.3073 11.5891 12.0089C11.8602 11.7033 11.9778 11.2332 11.7377 10.7879L10.8658 11.2581ZM10.9019 9.93034C10.7358 10.1554 10.7116 10.4435 10.7161 10.6293C10.7209 10.8293 10.7634 11.0682 10.8658 11.2581L11.7377 10.7879C11.739 10.7905 11.7304 10.7736 11.7214 10.7331C11.713 10.6954 11.7074 10.6506 11.7063 10.6054C11.7052 10.5594 11.709 10.5234 11.7139 10.5006C11.7196 10.4738 11.7217 10.4876 11.6988 10.5186L10.9019 9.93034Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 2.3 KiB

View File

@@ -1,5 +1,5 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<ellipse cx="7" cy="4" rx="5" ry="2" fill="black" fill-opacity="0.3" stroke="black" stroke-width="1.25"/>
<ellipse cx="7" cy="4" rx="5" ry="2" fill="black" fill-opacity="0.33" stroke="black" stroke-width="1.25"/>
<path d="M12 4V10C12 11.1046 9.76142 12 7 12C4.23858 12 2 11.1046 2 10V4" stroke="black" stroke-width="1.25"/>
<path d="M12 7C12 8.10457 9.76142 9 7 9C4.23858 9 2 8.10457 2 7" stroke="black" stroke-width="1.25"/>
</svg>

Before

Width:  |  Height:  |  Size: 422 B

After

Width:  |  Height:  |  Size: 423 B

View File

@@ -1,5 +1,5 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M2 4H10" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round"/>
<path d="M2 4H10" stroke="black" stroke-opacity="0.66" stroke-width="1.25" stroke-linecap="round"/>
<path d="M2 7H12" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
<path d="M2 10H8" stroke="black" stroke-opacity="0.3" stroke-width="1.25" stroke-linecap="round"/>
<path d="M2 10H8" stroke="black" stroke-opacity="0.33" stroke-width="1.25" stroke-linecap="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 379 B

After

Width:  |  Height:  |  Size: 381 B

View File

@@ -1,179 +1,159 @@
{
"suffixes": {
"aac": "audio",
"accdb": "storage",
"bak": "backup",
"bash": "terminal",
"bash_aliases": "terminal",
"bash_logout": "terminal",
"bash_profile": "terminal",
"bashrc": "terminal",
"bmp": "image",
"c": "code",
"cc": "code",
"conf": "settings",
"cpp": "code",
"css": "code",
"csv": "storage",
"dat": "storage",
"db": "storage",
"dbf": "storage",
"dll": "storage",
"doc": "document",
"docx": "document",
"eslintrc": "eslint",
"eslintrc.js": "eslint",
"eslintrc.json": "eslint",
"fmp": "storage",
"fp7": "storage",
"flac": "audio",
"fish": "terminal",
"frm": "storage",
"gdb": "storage",
"gitattributes": "vcs",
"gitignore": "vcs",
"gitmodules": "vcs",
"gif": "image",
"go": "code",
"h": "code",
"handlebars": "code",
"hbs": "template",
"htm": "template",
"html": "template",
"ib": "storage",
"ico": "image",
"ini": "settings",
"java": "code",
"jpeg": "image",
"jpg": "image",
"js": "code",
"json": "storage",
"ldf": "storage",
"lock": "lock",
"log": "log",
"mdb": "storage",
"md": "document",
"mdf": "storage",
"mdx": "document",
"mp3": "audio",
"mp4": "video",
"myd": "storage",
"myi": "storage",
"ods": "document",
"odp": "document",
"odt": "document",
"ogg": "video",
"pdb": "storage",
"pdf": "document",
"php": "code",
"png": "image",
"ppt": "document",
"pptx": "document",
"prettierignore": "prettier",
"prettierrc": "prettier",
"profile": "terminal",
"ps1": "terminal",
"psd": "image",
"py": "code",
"rb": "code",
"rkt": "code",
"rs": "rust",
"rtf": "document",
"sav": "storage",
"scm": "code",
"sh": "terminal",
"sqlite": "storage",
"sdf": "storage",
"svelte": "template",
"svg": "image",
"swift": "code",
"ts": "typescript",
"tsx": "code",
"tiff": "image",
"toml": "toml",
"tsv": "storage",
"txt": "document",
"wav": "audio",
"webm": "video",
"xls": "document",
"xlsx": "document",
"xml": "template",
"yaml": "settings",
"yml": "settings",
"zlogin": "terminal",
"zsh": "terminal",
"zsh_aliases": "terminal",
"zshenv": "terminal",
"zsh_histfile": "terminal",
"zsh_profile": "terminal",
"zshrc": "terminal"
"suffixes": {
"aac": "audio",
"bash": "terminal",
"bmp": "image",
"c": "code",
"conf": "settings",
"cpp": "code",
"cc": "code",
"css": "code",
"doc": "document",
"docx": "document",
"eslintrc": "eslint",
"eslintrc.js": "eslint",
"eslintrc.json": "eslint",
"flac": "audio",
"fish": "terminal",
"gitattributes": "vcs",
"gitignore": "vcs",
"gitmodules": "vcs",
"gif": "image",
"go": "code",
"h": "code",
"handlebars": "code",
"hbs": "template",
"htm": "template",
"html": "template",
"svelte": "template",
"hpp": "code",
"ico": "image",
"ini": "settings",
"java": "code",
"jpeg": "image",
"jpg": "image",
"js": "code",
"json": "storage",
"lock": "lock",
"log": "log",
"md": "document",
"mdx": "document",
"mp3": "audio",
"mp4": "video",
"ods": "document",
"odp": "document",
"odt": "document",
"ogg": "video",
"pdf": "document",
"php": "code",
"png": "image",
"ppt": "document",
"pptx": "document",
"prettierrc": "prettier",
"prettierignore": "prettier",
"ps1": "terminal",
"psd": "image",
"py": "code",
"rb": "code",
"rkt": "code",
"rs": "rust",
"rtf": "document",
"scm": "code",
"sh": "terminal",
"bashrc": "terminal",
"bash_profile": "terminal",
"bash_aliases": "terminal",
"bash_logout": "terminal",
"profile": "terminal",
"zshrc": "terminal",
"zshenv": "terminal",
"zsh_profile": "terminal",
"zsh_aliases": "terminal",
"zsh_histfile": "terminal",
"zlogin": "terminal",
"sql": "code",
"svg": "image",
"swift": "code",
"tiff": "image",
"toml": "toml",
"ts": "typescript",
"tsx": "code",
"txt": "document",
"wav": "audio",
"webm": "video",
"xls": "document",
"xlsx": "document",
"xml": "template",
"yaml": "settings",
"yml": "settings",
"zsh": "terminal"
},
"types": {
"audio": {
"icon": "icons/file_icons/audio.svg"
},
"types": {
"audio": {
"icon": "icons/file_icons/audio.svg"
},
"code": {
"icon": "icons/file_icons/code.svg"
},
"collapsed_chevron": {
"icon": "icons/file_icons/chevron_right.svg"
},
"collapsed_folder": {
"icon": "icons/file_icons/folder.svg"
},
"default": {
"icon": "icons/file_icons/file.svg"
},
"document": {
"icon": "icons/file_icons/book.svg"
},
"eslint": {
"icon": "icons/file_icons/eslint.svg"
},
"expanded_chevron": {
"icon": "icons/file_icons/chevron_down.svg"
},
"expanded_folder": {
"icon": "icons/file_icons/folder_open.svg"
},
"image": {
"icon": "icons/file_icons/image.svg"
},
"lock": {
"icon": "icons/file_icons/lock.svg"
},
"log": {
"icon": "icons/file_icons/info.svg"
},
"prettier": {
"icon": "icons/file_icons/prettier.svg"
},
"rust": {
"icon": "icons/file_icons/rust.svg"
},
"settings": {
"icon": "icons/file_icons/settings.svg"
},
"storage": {
"icon": "icons/file_icons/database.svg"
},
"template": {
"icon": "icons/file_icons/html.svg"
},
"terminal": {
"icon": "icons/file_icons/terminal.svg"
},
"toml": {
"icon": "icons/file_icons/toml.svg"
},
"typescript": {
"icon": "icons/file_icons/typescript.svg"
},
"vcs": {
"icon": "icons/file_icons/git.svg"
},
"video": {
"icon": "icons/file_icons/video.svg"
}
"code": {
"icon": "icons/file_icons/code.svg"
},
"collapsed_chevron": {
"icon": "icons/file_icons/chevron_right.svg"
},
"collapsed_folder": {
"icon": "icons/file_icons/folder.svg"
},
"default": {
"icon": "icons/file_icons/file.svg"
},
"document": {
"icon": "icons/file_icons/book.svg"
},
"eslint": {
"icon": "icons/file_icons/eslint.svg"
},
"expanded_chevron": {
"icon": "icons/file_icons/chevron_down.svg"
},
"expanded_folder": {
"icon": "icons/file_icons/folder_open.svg"
},
"image": {
"icon": "icons/file_icons/image.svg"
},
"lock": {
"icon": "icons/file_icons/lock.svg"
},
"log": {
"icon": "icons/file_icons/info.svg"
},
"prettier": {
"icon": "icons/file_icons/prettier.svg"
},
"rust": {
"icon": "icons/file_icons/rust.svg"
},
"settings": {
"icon": "icons/file_icons/settings.svg"
},
"storage": {
"icon": "icons/file_icons/database.svg"
},
"template": {
"icon": "icons/file_icons/html.svg"
},
"terminal": {
"icon": "icons/file_icons/terminal.svg"
},
"toml": {
"icon": "icons/file_icons/toml.svg"
},
"typescript": {
"icon": "icons/file_icons/typescript.svg"
},
"vcs": {
"icon": "icons/file_icons/git.svg"
},
"video": {
"icon": "icons/file_icons/video.svg"
}
}
}

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 43 KiB

After

Width:  |  Height:  |  Size: 687 B

View File

@@ -1,4 +1,5 @@
<svg width="15" height="14" viewBox="0 0 15 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M3.49165 6.13802C3.4991 5.86198 3.72386 5.64062 4 5.64062H13C13.2761 5.64062 13.4991 5.86198 13.4916 6.13802C13.4529 7.57407 13.2341 11.625 12 11.625H2C3.23412 11.625 3.45287 7.57407 3.49165 6.13802Z" fill="black" stroke="black" stroke-width="1.25" stroke-linejoin="round"/>
<path d="M4.00781 11.625H2.42841C2.18186 11.625 1.97212 11.4453 1.93432 11.2017L0.651964 2.93603C0.604944 2.63296 0.839355 2.35938 1.14605 2.35938H4.6164C4.95332 2.35938 5.26759 2.52904 5.45244 2.81072L5.8125 3.35938H8.89008C9.37767 3.35938 9.79418 3.71103 9.87593 4.19171L10.125 5.65625" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M7 2.53125H2.21875V10.625L4.5 4.59375H7.96875L7 2.53125Z" fill="black"/>
<path d="M4.47293 4.94363C4.54554 4.74743 4.73263 4.61719 4.94184 4.61719H12.8755C13.2237 4.61719 13.4653 4.9642 13.3445 5.29074L11.1208 11.2986C11.0482 11.4948 10.8611 11.625 10.6519 11.625H2.71821C2.37002 11.625 2.12844 11.278 2.2493 10.9514L4.47293 4.94363Z" fill="black" fill-opacity="0.33" stroke="black" stroke-width="1.25"/>
<path d="M8 4.4024L7.27505 2.93264C7.10664 2.59119 6.75894 2.375 6.37821 2.375H2.5C2.22386 2.375 2 2.59886 2 2.875V11.125C2 11.4011 2.22386 11.625 2.5 11.625H4.00781" stroke="black" stroke-width="1.25" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 760 B

After

Width:  |  Height:  |  Size: 745 B

View File

@@ -1,6 +1,6 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<circle cx="4" cy="10" r="2" stroke="black" stroke-width="1.25"/>
<circle cx="10" cy="4" r="2" fill="black" fill-opacity="0.3" stroke="black" stroke-width="1.25"/>
<circle cx="10" cy="4" r="2" fill="black" fill-opacity="0.33" stroke="black" stroke-width="1.25"/>
<line x1="3.625" y1="2.625" x2="3.625" y2="7.375" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
<path d="M10 6V6C10 8.20914 8.20914 10 6 10V10" stroke="black" stroke-width="1.25"/>
</svg>

Before

Width:  |  Height:  |  Size: 462 B

After

Width:  |  Height:  |  Size: 463 B

View File

@@ -1,6 +1,6 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M6.5 3C6.91421 3 7.25 2.66421 7.25 2.25C7.25 1.83579 6.91421 1.5 6.5 1.5C6.08579 1.5 5.75 1.83579 5.75 2.25C5.75 2.66421 6.08579 3 6.5 3Z" fill="black" stroke="black" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M6 8L9 5L12 8H6Z" fill="black" fill-opacity="0.3"/>
<path d="M6 8L9 5L12 8H6Z" fill="black" fill-opacity="0.33"/>
<path d="M2 10L5 7L7.375 9.375" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M6 8L7.5 6.5L9 5L10.5 6.5L12 8" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M3.375 2H2.5C2.22386 2 2 2.22386 2 2.5V11.5C2 11.7761 2.22386 12 2.5 12H7.35938M9.64062 2H11.5C11.7761 2 12 2.22386 12 2.5V11.5C12 11.7761 11.7761 12 11.5 12H10.125" stroke="black" stroke-width="1.25" stroke-linecap="round"/>

Before

Width:  |  Height:  |  Size: 865 B

After

Width:  |  Height:  |  Size: 866 B

View File

@@ -1,6 +1,6 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect x="3" y="5" width="8" height="7" rx="0.5" stroke="black" stroke-width="1.25"/>
<path d="M4 4C4 2.89543 4.89543 2 6 2H8C9.10457 2 10 2.89543 10 4V5H4V4Z" stroke="black" stroke-opacity="0.6" stroke-width="1.25"/>
<path d="M4 4C4 2.89543 4.89543 2 6 2H8C9.10457 2 10 2.89543 10 4V5H4V4Z" stroke="black" stroke-opacity="0.66" stroke-width="1.25"/>
<circle cx="7" cy="8" r="1" fill="black"/>
<path d="M7 8V9.375" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 444 B

After

Width:  |  Height:  |  Size: 445 B

View File

@@ -1,3 +0,0 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M12 12L9.41379 9.41379M2 6.31034C2 3.92981 3.92981 2 6.31034 2C8.6909 2 10.6207 3.92981 10.6207 6.31034C10.6207 8.6909 8.6909 10.6207 6.31034 10.6207C3.92981 10.6207 2 8.6909 2 6.31034Z" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 383 B

View File

@@ -1,8 +1,8 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M2.03125 2.96875C2.03125 2.41647 2.47897 1.96875 3.03125 1.96875H5V12H3.03125C2.47897 12 2.03125 11.5523 2.03125 11V2.96875Z" fill="black" fill-opacity="0.3"/>
<path d="M2.03125 2.96875C2.03125 2.41647 2.47897 1.96875 3.03125 1.96875H5V12H3.03125C2.47897 12 2.03125 11.5523 2.03125 11V2.96875Z" fill="black" fill-opacity="0.33"/>
<rect x="2" y="2" width="10" height="10" rx="0.5" stroke="black" stroke-width="1.25"/>
<path d="M9.5 5L7.5 5" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M9.5 7H7.5" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M9.5 9H7.5" stroke="black" stroke-opacity="0.3" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M9.5 7H7.5" stroke="black" stroke-opacity="0.66" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M9.5 9H7.5" stroke="black" stroke-opacity="0.33" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M5 2V13" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 820 B

After

Width:  |  Height:  |  Size: 823 B

View File

@@ -1,4 +1,4 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M1.62677 3.88472L6.99983 6.78517M1.62677 3.88472L1.63137 9.90006L7.00442 12.8005M1.62677 3.88472L4.31117 2.54211M6.99983 6.78517L7.00442 12.8005M6.99983 6.78517L9.68414 5.33084M7.00442 12.8005L12.373 9.89186L12.3684 3.87652M4.31117 2.54211L6.99556 1.1995L12.3684 3.87652M4.31117 2.54211L9.68414 5.33084M12.3684 3.87652L9.68414 5.33084" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M7.03125 12.5625V6.78125L1.5625 3.9375V9.75L7.03125 12.5625Z" fill="black" fill-opacity="0.3"/>
<path d="M7.03125 12.5625V6.78125L1.5625 3.9375V9.75L7.03125 12.5625Z" fill="black" fill-opacity="0.33"/>
</svg>

Before

Width:  |  Height:  |  Size: 637 B

After

Width:  |  Height:  |  Size: 638 B

View File

@@ -1,3 +0,0 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M7 3V11M11 7H3" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 188 B

View File

@@ -1,12 +1,12 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M2 2.86328H8.51563" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
<path d="M11 2.86328L12 2.86328" stroke="black" stroke-opacity="0.3" stroke-width="1.25" stroke-linecap="round"/>
<path d="M9.64062 5.6263L12 5.6263" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round"/>
<path d="M11 2.86328L12 2.86328" stroke="black" stroke-opacity="0.33" stroke-width="1.25" stroke-linecap="round"/>
<path d="M9.64062 5.6263L12 5.6263" stroke="black" stroke-opacity="0.66" stroke-width="1.25" stroke-linecap="round"/>
<path d="M4.79688 5.6263L7.15625 5.6263" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
<path d="M2 5.6263L2.35937 5.6263" stroke="black" stroke-opacity="0.3" stroke-width="1.25" stroke-linecap="round"/>
<path d="M2 5.6263L2.35937 5.6263" stroke="black" stroke-opacity="0.33" stroke-width="1.25" stroke-linecap="round"/>
<path d="M7.15625 8.3737L12 8.3737" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
<path d="M2 8.3737L4.64062 8.3737" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round"/>
<path d="M2 8.3737L4.64062 8.3737" stroke="black" stroke-opacity="0.66" stroke-width="1.25" stroke-linecap="round"/>
<path d="M2 11.1094H3.54687" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
<path d="M5.97656 11.1094H8.35938" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
<path d="M10.8203 11.1094L12 11.1094" stroke="black" stroke-opacity="0.3" stroke-width="1.25" stroke-linecap="round"/>
<path d="M10.8203 11.1094L12 11.1094" stroke="black" stroke-opacity="0.33" stroke-width="1.25" stroke-linecap="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -1,5 +0,0 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M2.03125 2V2.03125M2.03125 8C2.03125 10 5 10 5 10M2.03125 8V2.03125M2.03125 8L2.03125 11M2.03125 2.03125C2.03125 4 5 4 5 4" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<rect x="7.375" y="2.375" width="4.25" height="3.25" rx="1.125" fill="black" fill-opacity="0.33" stroke="black" stroke-width="1.25"/>
<rect x="7.375" y="8.375" width="4.25" height="3.25" rx="1.125" fill="black" fill-opacity="0.33" stroke="black" stroke-width="1.25"/>
</svg>

Before

Width:  |  Height:  |  Size: 588 B

View File

@@ -1,11 +0,0 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M7 12C4.97279 12 3.22735 10.7936 2.4425 9.0595M7 2C9.11228 2 10.9186 3.30981 11.6512 5.16152" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<circle cx="1.65625" cy="1.67188" r="0.625" fill="black" fill-opacity="0.5"/>
<circle cx="3.71094" cy="1.67188" r="0.625" fill="black" fill-opacity="0.5"/>
<circle cx="4.96094" cy="3.36719" r="0.625" fill="black" fill-opacity="0.5"/>
<circle cx="3.71094" cy="4.79688" r="0.625" fill="black" fill-opacity="0.5"/>
<circle cx="4.60156" cy="6.67188" r="0.625" fill="black" fill-opacity="0.5"/>
<circle cx="1.65625" cy="4.17188" r="0.625" fill="black" fill-opacity="0.5"/>
<circle cx="1.65625" cy="6.67188" r="0.625" fill="black" fill-opacity="0.5"/>
<path d="M10.7802 10.8195C10.838 10.8195 10.8906 10.8527 10.9155 10.9048L11.7174 12.5811C11.8088 12.7721 12.0017 12.8938 12.2135 12.8938H12.3394C12.7483 12.8938 13.0142 12.4635 12.8314 12.0978L12.1619 10.7589C12.1232 10.6816 12.1582 10.5823 12.241 10.5349C12.7565 10.2397 13.0695 9.66858 13.0695 9.00391C13.0695 8.43361 12.8777 7.97006 12.5248 7.64951C12.1725 7.3295 11.6652 7.15703 11.043 7.15703H9.49609C9.19234 7.15703 8.94609 7.40327 8.94609 7.70703V12.3438C8.94609 12.6475 9.19234 12.8938 9.49609 12.8938H9.60156C9.90532 12.8938 10.1516 12.6475 10.1516 12.3438V10.9695C10.1516 10.8867 10.2187 10.8195 10.3016 10.8195H10.7802ZM10.1516 8.31328C10.1516 8.23044 10.2187 8.16328 10.3016 8.16328H10.8984C11.2023 8.16328 11.4371 8.2449 11.5954 8.38814C11.7529 8.5308 11.8406 8.73993 11.8406 9.00781C11.8406 9.28155 11.751 9.49461 11.5909 9.63971C11.4302 9.7854 11.1925 9.86797 10.8867 9.86797H10.3016C10.2187 9.86797 10.1516 9.80081 10.1516 9.71797V8.31328Z" fill="black" stroke="black" stroke-width="0.1"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.8 KiB

View File

@@ -1,5 +0,0 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M4.10517 5.8012C4.07193 5.73172 4.00176 5.6875 3.92475 5.6875H3.44609C3.33564 5.6875 3.24609 5.77704 3.24609 5.8875V7.26172C3.24609 7.53786 3.02224 7.76172 2.74609 7.76172H2.64062C2.36448 7.76172 2.14062 7.53786 2.14062 7.26172V2.625C2.14062 2.34886 2.36448 2.125 2.64062 2.125H4.1875C5.41406 2.125 6.16406 2.80469 6.16406 3.92188C6.16406 4.57081 5.85885 5.12418 5.36073 5.40943C5.25888 5.46775 5.20921 5.59421 5.2617 5.69918L5.93117 7.03811C6.09739 7.37056 5.85564 7.76172 5.48395 7.76172H5.35806C5.16552 7.76172 4.99009 7.65117 4.907 7.47748L4.10517 5.8012ZM3.44609 3.03125C3.33564 3.03125 3.24609 3.12079 3.24609 3.23125V4.63594C3.24609 4.74639 3.33564 4.83594 3.44609 4.83594H4.03125C4.66016 4.83594 5.03516 4.49609 5.03516 3.92578C5.03516 3.36719 4.66797 3.03125 4.04297 3.03125H3.44609Z" fill="black" fill-opacity="0.5"/>
<path d="M3.92475 5.7375C3.98251 5.7375 4.03514 5.77067 4.06006 5.82277L4.8619 7.49905C4.95329 7.69011 5.14627 7.81172 5.35806 7.81172H5.48395C5.89281 7.81172 6.15873 7.38145 5.97589 7.01575L5.30642 5.67682C5.26778 5.59953 5.30269 5.50028 5.38557 5.45282C5.90107 5.15762 6.21406 4.58655 6.21406 3.92188C6.21406 3.35158 6.02226 2.88803 5.66936 2.56748C5.31705 2.24747 4.80973 2.075 4.1875 2.075H2.64062C2.33687 2.075 2.09062 2.32124 2.09062 2.625V7.26172C2.09062 7.56548 2.33687 7.81172 2.64062 7.81172H2.74609C3.04985 7.81172 3.29609 7.56548 3.29609 7.26172V5.8875C3.29609 5.80466 3.36325 5.7375 3.44609 5.7375H3.92475ZM3.29609 3.23125C3.29609 3.14841 3.36325 3.08125 3.44609 3.08125H4.04297C4.34688 3.08125 4.58164 3.16287 4.73988 3.30611C4.89748 3.44876 4.98516 3.6579 4.98516 3.92578C4.98516 4.19952 4.89553 4.41258 4.73546 4.55768C4.57475 4.70337 4.33706 4.78594 4.03125 4.78594H3.44609C3.36325 4.78594 3.29609 4.71878 3.29609 4.63594V3.23125Z" stroke="black" stroke-opacity="0.5" stroke-width="0.1"/>
<path d="M9.32812 6.65625V9.32812M9.32812 12V9.32812M12 9.32812H9.32812M6.65625 9.32812H9.32812M9.32812 9.32812L11.1094 7.54688M9.32812 9.32812L7.54688 11.1094M9.32812 9.32812L11.1094 11.1094M9.32812 9.32812L7.54688 7.54688" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 2.2 KiB

View File

@@ -1,5 +0,0 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M3.96454 5.6762C3.93131 5.60672 3.86114 5.5625 3.78412 5.5625H3.30547C3.19501 5.5625 3.10547 5.65204 3.10547 5.7625V7.13672C3.10547 7.41286 2.88161 7.63672 2.60547 7.63672H2.5C2.22386 7.63672 2 7.41286 2 7.13672V2.5C2 2.22386 2.22386 2 2.5 2H4.04688C5.27344 2 6.02344 2.67969 6.02344 3.79688C6.02344 4.44581 5.71823 4.99918 5.2201 5.28443C5.11826 5.34275 5.06859 5.46921 5.12107 5.57418L5.79054 6.91311C5.95677 7.24556 5.71502 7.63672 5.34333 7.63672H5.21743C5.02489 7.63672 4.84946 7.52617 4.76638 7.35248L3.96454 5.6762ZM3.30547 2.90625C3.19501 2.90625 3.10547 2.99579 3.10547 3.10625V4.51094C3.10547 4.62139 3.19501 4.71094 3.30547 4.71094H3.89062C4.51953 4.71094 4.89453 4.37109 4.89453 3.80078C4.89453 3.24219 4.52734 2.90625 3.90234 2.90625H3.30547Z" fill="black" fill-opacity="0.5"/>
<path d="M3.78412 5.6125C3.84188 5.6125 3.89451 5.64567 3.91944 5.69777L4.72127 7.37405C4.81266 7.56511 5.00564 7.68672 5.21743 7.68672H5.34333C5.75219 7.68672 6.01811 7.25645 5.83526 6.89075L5.1658 5.55182C5.12715 5.47453 5.16207 5.37528 5.24495 5.32782C5.76044 5.03262 6.07344 4.46155 6.07344 3.79688C6.07344 3.22658 5.88164 2.76303 5.52873 2.44248C5.17642 2.12247 4.6691 1.95 4.04688 1.95H2.5C2.19624 1.95 1.95 2.19624 1.95 2.5V7.13672C1.95 7.44048 2.19624 7.68672 2.5 7.68672H2.60547C2.90923 7.68672 3.15547 7.44048 3.15547 7.13672V5.7625C3.15547 5.67966 3.22263 5.6125 3.30547 5.6125H3.78412ZM3.15547 3.10625C3.15547 3.02341 3.22263 2.95625 3.30547 2.95625H3.90234C4.20626 2.95625 4.44101 3.03787 4.59926 3.18111C4.75686 3.32376 4.84453 3.5329 4.84453 3.80078C4.84453 4.07452 4.75491 4.28758 4.59484 4.43268C4.43413 4.57837 4.19643 4.66094 3.89062 4.66094H3.30547C3.22263 4.66094 3.15547 4.59378 3.15547 4.51094V3.10625Z" stroke="black" stroke-opacity="0.5" stroke-width="0.1"/>
<path d="M7.5 5.88672C9.433 5.88672 11 7.45372 11 9.38672V12M11 12L13 10M11 12L9 10" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 2.0 KiB

View File

@@ -1,4 +1,4 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M4.27935 9.98207C4.32063 9.4038 3.9204 8.89049 3.35998 8.80276L2.60081 8.68387C2.37979 8.64945 2.20167 8.48001 2.15225 8.25614L2.01378 7.63511C1.96382 7.41235 2.05233 7.1807 2.23696 7.05125L2.8631 6.61242C3.33337 6.28297 3.47456 5.6369 3.18621 5.13364L2.79467 4.45092C2.68118 4.25261 2.69801 4.00374 2.83757 3.82321L3.22314 3.32436C3.3627 3.14438 3.59621 3.06994 3.81071 3.13772L4.57531 3.37769C5.11944 3.54879 5.70048 3.26159 5.90683 2.71886L6.1811 1.99782C6.26255 1.78395 6.46345 1.64285 6.68772 1.6423L7.31007 1.64063C7.53434 1.64007 7.73579 1.78006 7.81834 1.99337L8.09965 2.72275C8.30821 3.26214 8.88655 3.54712 9.42903 3.37714L10.1632 3.14716C10.3772 3.07994 10.6096 3.15382 10.7492 3.3327L11.1374 3.83099" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.76988 10.5933C7.76988 10.6595 7.8236 10.7133 7.88988 10.7133H7.97588C8.32602 10.7133 8.60988 10.9971 8.60988 11.3472C8.60988 11.6974 8.32602 11.9812 7.97588 11.9812H6.05587C5.70573 11.9812 5.42188 11.6974 5.42188 11.3472C5.42188 10.9971 5.70573 10.7133 6.05587 10.7133H6.14188C6.20815 10.7133 6.26188 10.6595 6.26188 10.5933V6.66925C6.26188 6.60298 6.20815 6.54925 6.14188 6.54925H6.05588C5.70573 6.54925 5.42188 6.2654 5.42188 5.91525C5.42188 5.5651 5.70573 5.28125 6.05588 5.28125H8.89988C10.0518 5.28125 11.8619 5.71487 11.8619 7.15185C11.8619 7.67078 11.7284 8.10362 11.4642 8.45348C11.1981 8.79765 10.8458 9.05637 10.4056 9.22931C10.3782 9.24007 10.3673 9.27304 10.3829 9.29801L11.2163 10.6342C11.247 10.6834 11.3008 10.7133 11.3588 10.7133H11.7319C12.082 10.7133 12.3659 10.9971 12.3659 11.3472C12.3659 11.6974 12.082 11.9812 11.7319 11.9812H10.5637C10.4955 11.9812 10.432 11.9465 10.3952 11.889L8.96523 9.65406C8.92847 9.59661 8.86496 9.56185 8.79676 9.56185H7.96988C7.85942 9.56185 7.76988 9.65139 7.76988 9.76185V10.5933ZM8.61188 6.54925C9.02963 6.54925 10.125 6.54925 10.2339 7.18785C10.2975 7.56123 10.1181 7.86557 9.88118 8.07715C9.64227 8.29046 9.20527 8.38985 8.58788 8.38985H7.86988C7.81465 8.38985 7.76988 8.34508 7.76988 8.28985V6.64925C7.76988 6.59402 7.81465 6.54925 7.86988 6.54925H8.61188Z" fill="black"/>
<path d="M4.27935 9.98207C4.32063 9.4038 3.9204 8.89049 3.35998 8.80276L2.60081 8.68387C2.37979 8.64945 2.20167 8.48001 2.15225 8.25614L2.01378 7.63511C1.96382 7.41235 2.05233 7.1807 2.23696 7.05125L2.8631 6.61242C3.33337 6.28297 3.47456 5.6369 3.18621 5.13364L2.79467 4.45092C2.68118 4.25261 2.69801 4.00374 2.83757 3.82321L3.22314 3.32436C3.3627 3.14438 3.59621 3.06994 3.81071 3.13772L4.57531 3.37769C5.11944 3.54879 5.70048 3.26159 5.90683 2.71886L6.1811 1.99782C6.26255 1.78395 6.46345 1.64285 6.68772 1.6423L7.31007 1.64063C7.53434 1.64007 7.73579 1.78006 7.81834 1.99337L8.09965 2.72275C8.30821 3.26214 8.88655 3.54712 9.42903 3.37714L10.1632 3.14716C10.3772 3.07994 10.6096 3.15382 10.7492 3.3327L11.1374 3.83099" stroke="black" stroke-opacity="0.66" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.76988 10.5933C7.76988 10.6595 7.8236 10.7133 7.88988 10.7133H7.97588C8.32602 10.7133 8.60988 10.9971 8.60988 11.3472V11.3472C8.60988 11.6974 8.32602 11.9812 7.97588 11.9812H6.05587C5.70573 11.9812 5.42188 11.6974 5.42188 11.3472V11.3472C5.42188 10.9971 5.70573 10.7133 6.05587 10.7133H6.14188C6.20815 10.7133 6.26188 10.6595 6.26188 10.5933V6.66925C6.26188 6.60298 6.20815 6.54925 6.14188 6.54925H6.05588C5.70573 6.54925 5.42188 6.2654 5.42188 5.91525V5.91525C5.42188 5.5651 5.70573 5.28125 6.05588 5.28125H8.89988C10.0518 5.28125 11.8619 5.71487 11.8619 7.15185C11.8619 7.67078 11.7284 8.10362 11.4642 8.45348C11.1981 8.79765 10.8458 9.05637 10.4056 9.22931V9.22931C10.3782 9.24007 10.3673 9.27304 10.3829 9.29801L11.2163 10.6342C11.247 10.6834 11.3008 10.7133 11.3588 10.7133H11.7319C12.082 10.7133 12.3659 10.9971 12.3659 11.3472V11.3472C12.3659 11.6974 12.082 11.9812 11.7319 11.9812H10.5637C10.4955 11.9812 10.432 11.9465 10.3952 11.889L8.96523 9.65406C8.92847 9.59661 8.86496 9.56185 8.79676 9.56185H7.96988C7.85942 9.56185 7.76988 9.65139 7.76988 9.76185V10.5933ZM8.61188 6.54925C9.02963 6.54925 10.125 6.54925 10.2339 7.18785C10.2975 7.56123 10.1181 7.86557 9.88118 8.07715C9.64227 8.29046 9.20527 8.38985 8.58788 8.38985H7.86988C7.81465 8.38985 7.76988 8.34508 7.76988 8.28985V6.64925C7.76988 6.59402 7.81465 6.54925 7.86988 6.54925H8.61188Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 2.3 KiB

After

Width:  |  Height:  |  Size: 2.3 KiB

View File

@@ -1,4 +1,4 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M2.60081 8.94324L3.35998 9.06214C3.9204 9.14986 4.32063 9.66317 4.27935 10.2414L4.22342 11.0252C4.20713 11.2536 4.32877 11.4686 4.53024 11.568L5.09174 11.8446C5.29321 11.9441 5.53379 11.9068 5.69834 11.7519L6.26255 11.2186C6.67855 10.8253 7.32041 10.8253 7.7369 11.2186L8.3011 11.7519C8.46565 11.9074 8.70572 11.9441 8.90772 11.8446L9.47027 11.5674C9.67124 11.4686 9.79234 11.2541 9.77607 11.0264L9.72007 10.2414C9.67883 9.66317 10.079 9.14986 10.6394 9.06214L11.3986 8.94324C11.6197 8.90883 11.7978 8.73938 11.8477 8.51607L11.9862 7.89504C12.0362 7.67172 11.9477 7.44007 11.763 7.31117L11.1293 6.86731C10.6617 6.53959 10.5189 5.89966 10.8013 5.3969L11.1841 4.71586C11.2954 4.51754 11.277 4.26923 11.1374 4.09036L10.7492 3.59207C10.6096 3.41319 10.3772 3.33932 10.1632 3.40653L9.42903 3.63651C8.88655 3.80649 8.30821 3.52152 8.09965 2.98213L7.81834 2.25275C7.73579 2.03944 7.53434 1.89945 7.31007 1.9L6.68772 1.90167C6.46345 1.90222 6.26255 2.04333 6.1811 2.25719L5.90683 2.97824C5.70048 3.52097 5.11944 3.80816 4.57531 3.63706L3.81071 3.39709C3.59621 3.32932 3.3627 3.40375 3.22314 3.58374L2.83757 4.08258C2.69801 4.26312 2.68118 4.51199 2.79467 4.7103L3.18621 5.39302C3.47456 5.89628 3.33337 6.54235 2.8631 6.87179L2.23696 7.31062C2.05233 7.44007 1.96382 7.67173 2.01378 7.89448L2.15225 8.51552C2.20167 8.73938 2.37979 8.90883 2.60081 8.94324Z" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M8.14913 5.85093L8.14909 5.85089C7.51453 5.21637 6.48549 5.21637 5.85092 5.85089L5.85089 5.85092C5.21637 6.48549 5.21637 7.51453 5.85089 8.14909L5.85093 8.14913C6.48549 8.78362 7.51452 8.78362 8.14908 8.14913L8.14913 8.14908C8.78362 7.51452 8.78362 6.48549 8.14913 5.85093Z" fill="black" fill-opacity="0.3" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M8.14913 5.85093L8.14909 5.85089C7.51453 5.21637 6.48549 5.21637 5.85092 5.85089L5.85089 5.85092C5.21637 6.48549 5.21637 7.51453 5.85089 8.14909L5.85093 8.14913C6.48549 8.78362 7.51452 8.78362 8.14908 8.14913L8.14913 8.14908C8.78362 7.51452 8.78362 6.48549 8.14913 5.85093Z" fill="black" fill-opacity="0.33" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.9 KiB

After

Width:  |  Height:  |  Size: 1.9 KiB

View File

@@ -1,5 +1,5 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M5 5H9" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
<path d="M7 5L7 10" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
<path d="M4 2H2.5C2.22386 2 2 2.22386 2 2.5V11.5C2 11.7761 2.22386 12 2.5 12H4M10 2H11.5C11.7761 2 12 2.22386 12 2.5V11.5C12 11.7761 11.7761 12 11.5 12H10" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round"/>
<path d="M4 2H2.5C2.22386 2 2 2.22386 2 2.5V11.5C2 11.7761 2.22386 12 2.5 12H4M10 2H11.5C11.7761 2 12 2.22386 12 2.5V11.5C12 11.7761 11.7761 12 11.5 12H10" stroke="black" stroke-opacity="0.66" stroke-width="1.25" stroke-linecap="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 497 B

After

Width:  |  Height:  |  Size: 498 B

View File

@@ -1,5 +1,5 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M12 4.375V2.5C12 2.22386 11.7761 2 11.5 2H2.5C2.22386 2 2 2.22386 2 2.5V11.5C2 11.7761 2.22386 12 2.5 12H3.375" stroke="black" stroke-opacity="0.6" stroke-width="1.25" stroke-linecap="round"/>
<path d="M12 4.375V2.5C12 2.22386 11.7761 2 11.5 2H2.5C2.22386 2 2 2.22386 2 2.5V11.5C2 11.7761 2.22386 12 2.5 12H3.375" stroke="black" stroke-opacity="0.66" stroke-width="1.25" stroke-linecap="round"/>
<path d="M10.6836 7.82805C10.7933 7.65392 10.9823 7.57377 11.174 7.57377C11.2904 7.57377 11.4019 7.59384 11.5092 7.62792C11.8324 7.73069 12.2148 7.63925 12.3392 7.32368L12.3773 7.22707C12.4703 6.99131 12.3823 6.71761 12.1522 6.61154C11.8328 6.46436 11.4984 6.375 11.1262 6.375C9.87708 6.375 8.91935 7.60671 9.4239 8.84869C9.54205 9.13951 9.74219 9.36166 9.9515 9.54337C10.1061 9.6776 10.2858 9.80516 10.4475 9.92002C10.4972 9.95529 10.5452 9.98936 10.5903 10.0221C11.0283 10.34 11.2526 10.5876 11.2526 10.9466C11.2526 11.1518 11.1622 11.3133 11.016 11.4128C10.8777 11.5071 10.7055 11.5357 10.5454 11.5222C10.3931 11.5093 10.2529 11.4717 10.1214 11.4196C9.81633 11.2989 9.45533 11.4015 9.33641 11.7073L9.2814 11.8487C9.19162 12.0796 9.2749 12.3463 9.49799 12.4539C10.0894 12.7391 10.7377 12.8279 11.3915 12.5872C12.0569 12.3423 12.595 11.7708 12.595 10.9068C12.595 10.1301 12.1336 9.69583 11.6966 9.36109C11.606 9.29163 11.5259 9.23292 11.4493 9.17682C11.3259 9.08638 11.1964 8.99109 11.0734 8.88536C10.8937 8.73082 10.7518 8.57274 10.6595 8.38613C10.5746 8.21464 10.5815 7.99013 10.6836 7.82805Z" fill="black"/>
<path d="M6.98644 7.70936H7.69396C7.98162 7.70936 8.21481 7.47617 8.21481 7.18851V7.02346C8.21481 6.73581 7.98162 6.50261 7.69396 6.50261H4.96848C4.68082 6.50261 4.44763 6.73581 4.44763 7.02346V7.18851C4.44763 7.47617 4.68082 7.70936 4.96848 7.70936H5.676V12.102C5.676 12.3896 5.90919 12.6228 6.19685 12.6228H6.46559C6.75325 12.6228 6.98644 12.3896 6.98644 12.102V7.70936Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.8 KiB

After

Width:  |  Height:  |  Size: 1.8 KiB

View File

@@ -1,4 +1,4 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M1.65625 2H11.8437C12.1199 2 12.3438 2.22386 12.3438 2.5V9.34375M12.3438 12H2.15625C1.88011 12 1.65625 11.7761 1.65625 11.5V4.65625" stroke="black" stroke-width="1.25" stroke-linecap="round"/>
<path d="M9 7.01562L5.65624 9.3125L5.65624 4.6875L9 7.01562Z" fill="black" fill-opacity="0.3" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M9 7.01562L5.65624 9.3125L5.65624 4.6875L9 7.01562Z" fill="black" fill-opacity="0.33" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 483 B

After

Width:  |  Height:  |  Size: 484 B

View File

@@ -22,7 +22,6 @@
"alt-cmd-right": "pane::ActivateNextItem",
"cmd-w": "pane::CloseActiveItem",
"alt-cmd-t": "pane::CloseInactiveItems",
"ctrl-alt-cmd-w": "workspace::CloseInactiveTabsAndPanes",
"cmd-k u": "pane::CloseCleanItems",
"cmd-k cmd-w": "pane::CloseAllItems",
"cmd-shift-w": "workspace::CloseWindow",
@@ -227,26 +226,12 @@
"alt-enter": "search::SelectAllMatches"
}
},
{
"context": "BufferSearchBar > Editor",
"bindings": {
"up": "search::PreviousHistoryQuery",
"down": "search::NextHistoryQuery"
}
},
{
"context": "ProjectSearchBar",
"bindings": {
"escape": "project_search::ToggleFocus"
}
},
{
"context": "ProjectSearchBar > Editor",
"bindings": {
"up": "search::PreviousHistoryQuery",
"down": "search::NextHistoryQuery"
}
},
{
"context": "ProjectSearchView",
"bindings": {
@@ -421,11 +406,11 @@
"cmd-b": "workspace::ToggleLeftDock",
"cmd-r": "workspace::ToggleRightDock",
"cmd-j": "workspace::ToggleBottomDock",
"alt-cmd-y": "workspace::CloseAllDocks",
"cmd-shift-f": "workspace::NewSearch",
"cmd-k cmd-t": "theme_selector::Toggle",
"cmd-k cmd-s": "zed::OpenKeymap",
"cmd-t": "project_symbols::Toggle",
"cmd-ctrl-t": "semantic_search::Toggle",
"cmd-p": "file_finder::Toggle",
"cmd-shift-p": "command_palette::Toggle",
"cmd-shift-m": "diagnostics::Deploy",
@@ -461,22 +446,8 @@
},
{
"bindings": {
"cmd-k cmd-left": [
"workspace::ActivatePaneInDirection",
"Left"
],
"cmd-k cmd-right": [
"workspace::ActivatePaneInDirection",
"Right"
],
"cmd-k cmd-up": [
"workspace::ActivatePaneInDirection",
"Up"
],
"cmd-k cmd-down": [
"workspace::ActivatePaneInDirection",
"Down"
]
"cmd-k cmd-left": "workspace::ActivatePreviousPane",
"cmd-k cmd-right": "workspace::ActivateNextPane"
}
},
// Bindings from Atom
@@ -542,11 +513,8 @@
"cmd-alt-c": "project_panel::CopyPath",
"alt-cmd-shift-c": "project_panel::CopyRelativePath",
"f2": "project_panel::Rename",
"enter": "project_panel::Rename",
"space": "project_panel::Open",
"backspace": "project_panel::Delete",
"alt-cmd-r": "project_panel::RevealInFinder",
"alt-shift-f": "project_panel::NewSearchInDirectory"
"alt-cmd-r": "project_panel::RevealInFinder"
}
},
{

View File

@@ -2,6 +2,12 @@
{
"context": "Editor && VimControl && !VimWaiting && !menu",
"bindings": {
"g": [
"vim::PushOperator",
{
"Namespace": "G"
}
],
"i": [
"vim::PushOperator",
{
@@ -24,8 +30,6 @@
"j": "vim::Down",
"down": "vim::Down",
"enter": "vim::NextLineStart",
"tab": "vim::Tab",
"shift-tab": "vim::Tab",
"k": "vim::Up",
"up": "vim::Up",
"l": "vim::Right",
@@ -104,32 +108,6 @@
"*": "vim::MoveToNext",
"#": "vim::MoveToPrev",
"0": "vim::StartOfLine", // When no number operator present, use start of line motion
// "g" commands
"g g": "vim::StartOfDocument",
"g h": "editor::Hover",
"g t": "pane::ActivateNextItem",
"g shift-t": "pane::ActivatePrevItem",
"g d": "editor::GoToDefinition",
"g shift-d": "editor::GoToTypeDefinition",
"g .": "editor::ToggleCodeActions", // zed specific
"g shift-a": "editor::FindAllReferences", // zed specific
"g *": [
"vim::MoveToNext",
{
"partialWord": true
}
],
"g #": [
"vim::MoveToPrev",
{
"partialWord": true
}
],
// z commands
"z t": "editor::ScrollCursorTop",
"z z": "editor::ScrollCursorCenter",
"z b": "editor::ScrollCursorBottom",
// Count support
"1": [
"vim::Number",
1
@@ -165,75 +143,7 @@
"9": [
"vim::Number",
9
],
// window related commands (ctrl-w X)
"ctrl-w left": [
"workspace::ActivatePaneInDirection",
"Left"
],
"ctrl-w right": [
"workspace::ActivatePaneInDirection",
"Right"
],
"ctrl-w up": [
"workspace::ActivatePaneInDirection",
"Up"
],
"ctrl-w down": [
"workspace::ActivatePaneInDirection",
"Down"
],
"ctrl-w h": [
"workspace::ActivatePaneInDirection",
"Left"
],
"ctrl-w l": [
"workspace::ActivatePaneInDirection",
"Right"
],
"ctrl-w k": [
"workspace::ActivatePaneInDirection",
"Up"
],
"ctrl-w j": [
"workspace::ActivatePaneInDirection",
"Down"
],
"ctrl-w ctrl-h": [
"workspace::ActivatePaneInDirection",
"Left"
],
"ctrl-w ctrl-l": [
"workspace::ActivatePaneInDirection",
"Right"
],
"ctrl-w ctrl-k": [
"workspace::ActivatePaneInDirection",
"Up"
],
"ctrl-w ctrl-j": [
"workspace::ActivatePaneInDirection",
"Down"
],
"ctrl-w g t": "pane::ActivateNextItem",
"ctrl-w ctrl-g t": "pane::ActivateNextItem",
"ctrl-w g shift-t": "pane::ActivatePrevItem",
"ctrl-w ctrl-g shift-t": "pane::ActivatePrevItem",
"ctrl-w w": "workspace::ActivateNextPane",
"ctrl-w ctrl-w": "workspace::ActivateNextPane",
"ctrl-w p": "workspace::ActivatePreviousPane",
"ctrl-w ctrl-p": "workspace::ActivatePreviousPane",
"ctrl-w shift-w": "workspace::ActivatePreviousPane",
"ctrl-w ctrl-shift-w": "workspace::ActivatePreviousPane",
"ctrl-w v": "pane::SplitLeft",
"ctrl-w ctrl-v": "pane::SplitLeft",
"ctrl-w s": "pane::SplitUp",
"ctrl-w shift-s": "pane::SplitUp",
"ctrl-w ctrl-s": "pane::SplitUp",
"ctrl-w c": "pane::CloseAllItems",
"ctrl-w ctrl-c": "pane::CloseAllItems",
"ctrl-w q": "pane::CloseAllItems",
"ctrl-w ctrl-q": "pane::CloseAllItems"
]
}
},
{
@@ -254,6 +164,12 @@
"vim::PushOperator",
"Yank"
],
"z": [
"vim::PushOperator",
{
"Namespace": "Z"
}
],
"i": [
"vim::SwitchMode",
"Insert"
@@ -289,14 +205,7 @@
"?": [
"vim::Search",
{
"backwards": true
}
],
";": "vim::RepeatFind",
",": [
"vim::RepeatFind",
{
"backwards": true
"backwards": true,
}
],
"ctrl-f": "vim::PageDown",
@@ -327,11 +236,33 @@
]
}
},
{
"context": "Editor && vim_operator == g",
"bindings": {
"g": "vim::StartOfDocument",
"h": "editor::Hover",
"t": "pane::ActivateNextItem",
"shift-t": "pane::ActivatePrevItem",
"d": "editor::GoToDefinition",
"shift-d": "editor::GoToTypeDefinition",
"*": [
"vim::MoveToNext",
{
"partialWord": true
}
],
"#": [
"vim::MoveToPrev",
{
"partialWord": true
}
]
}
},
{
"context": "Editor && vim_operator == c",
"bindings": {
"c": "vim::CurrentLine",
"d": "editor::Rename" // zed specific
"c": "vim::CurrentLine"
}
},
{
@@ -346,6 +277,14 @@
"y": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_operator == z",
"bindings": {
"t": "editor::ScrollCursorTop",
"z": "editor::ScrollCursorCenter",
"b": "editor::ScrollCursorBottom",
}
},
{
"context": "Editor && VimObject",
"bindings": {
@@ -398,7 +337,7 @@
"bindings": {
"escape": "vim::NormalBefore",
"ctrl-c": "vim::NormalBefore",
"ctrl-[": "vim::NormalBefore"
"ctrl-[": "vim::NormalBefore",
}
},
{

View File

@@ -50,13 +50,6 @@
// Whether to pop the completions menu while typing in an editor without
// explicitly requesting it.
"show_completions_on_input": true,
// Whether to show wrap guides in the editor. Setting this to true will
// show a guide at the 'preferred_line_length' value if softwrap is set to
// 'preferred_line_length', and will show any additional guides as specified
// by the 'wrap_guides' setting.
"show_wrap_guides": true,
// Character counts at which to show wrap guides in the editor.
"wrap_guides": [],
// Whether to use additional LSP queries to format (and amend) the code after
// every "trigger" symbol input, defined by LSP server capabilities.
"use_on_type_format": true,
@@ -324,8 +317,8 @@
// the terminal will default to matching the buffer's font family.
// "font_family": "Zed Mono"
},
// Difference settings for semantic_index
"semantic_index": {
// Difference settings for vector_store
"vector_store": {
"enabled": false,
"reindexing_delay_seconds": 600
},
@@ -363,6 +356,12 @@
// LSP Specific settings.
"lsp": {
// Specify the LSP name as a key here.
// As of 8/10/22, supported LSPs are:
// pyright
// gopls
// rust-analyzer
// typescript-language-server
// vscode-json-languageserver
// "rust-analyzer": {
// //These initialization options are merged into Zed's defaults
// "initialization_options": {

View File

@@ -1637,7 +1637,6 @@ impl ConversationEditor {
let mut editor = Editor::for_buffer(conversation.read(cx).buffer.clone(), None, cx);
editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
editor.set_show_gutter(false, cx);
editor.set_show_wrap_guides(false, cx);
editor
});

View File

@@ -20,7 +20,7 @@ use live_kit_client::{
};
use postage::stream::Stream;
use project::Project;
use std::{future::Future, mem, pin::Pin, sync::Arc, time::Duration};
use std::{future::Future, mem, panic::Location, pin::Pin, sync::Arc, time::Duration};
use util::{post_inc, ResultExt, TryFutureExt};
pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
@@ -1089,6 +1089,7 @@ impl Room {
#[track_caller]
pub fn share_microphone(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
dbg!(Location::caller());
if self.status.is_offline() {
return Task::ready(Err(anyhow!("room is offline")));
} else if self.is_sharing_mic() {

View File

@@ -183,7 +183,7 @@ async fn apply_server_operation(
let username;
{
let mut plan = plan.lock();
let user = plan.user(user_id);
let mut user = plan.user(user_id);
if user.online {
return false;
}

View File

@@ -374,7 +374,7 @@ impl CollabTitlebarItem {
"Share Feedback",
feedback::feedback_editor::GiveFeedback,
),
ContextMenuItem::action("Sign Out", SignOut),
ContextMenuItem::action("Sign out", SignOut),
]
} else {
vec![

View File

@@ -338,9 +338,9 @@ impl Copilot {
let (server, fake_server) =
LanguageServer::fake("copilot".into(), Default::default(), cx.to_async());
let http = util::http::FakeHttpClient::create(|_| async { unreachable!() });
let this = cx.add_model(|_| Self {
let this = cx.add_model(|cx| Self {
http: http.clone(),
node_runtime: NodeRuntime::instance(http),
node_runtime: NodeRuntime::instance(http, cx.background().clone()),
server: CopilotServer::Running(RunningCopilotServer {
lsp: Arc::new(server),
sign_in_status: SignInStatus::Authorized,

View File

@@ -10,6 +10,7 @@ doctest = false
[features]
test-support = [
"rand",
"copilot/test-support",
"text/test-support",
"language/test-support",
@@ -61,8 +62,8 @@ serde.workspace = true
serde_derive.workspace = true
smallvec.workspace = true
smol.workspace = true
rand.workspace = true
rand = { workspace = true, optional = true }
tree-sitter-rust = { workspace = true, optional = true }
tree-sitter-html = { workspace = true, optional = true }
tree-sitter-typescript = { workspace = true, optional = true }

View File

@@ -397,7 +397,7 @@ impl InlayMap {
buffer_snapshot: MultiBufferSnapshot,
mut buffer_edits: Vec<text::Edit<usize>>,
) -> (InlaySnapshot, Vec<InlayEdit>) {
let snapshot = &mut self.snapshot;
let mut snapshot = &mut self.snapshot;
if buffer_edits.is_empty() {
if snapshot.buffer.trailing_excerpt_update_count()
@@ -572,6 +572,7 @@ impl InlayMap {
})
.collect();
let buffer_snapshot = snapshot.buffer.clone();
drop(snapshot);
let (snapshot, edits) = self.sync(buffer_snapshot, buffer_edits);
(snapshot, edits)
}
@@ -634,6 +635,7 @@ impl InlayMap {
}
log::info!("removing inlays: {:?}", to_remove);
drop(snapshot);
let (snapshot, edits) = self.splice(to_remove, to_insert);
(snapshot, edits)
}

View File

@@ -74,7 +74,6 @@ pub use multi_buffer::{
};
use ordered_float::OrderedFloat;
use project::{FormatTrigger, Location, LocationLink, Project, ProjectPath, ProjectTransaction};
use rand::{seq::SliceRandom, thread_rng};
use scroll::{
autoscroll::Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide,
};
@@ -89,7 +88,7 @@ use std::{
cmp::{self, Ordering, Reverse},
mem,
num::NonZeroU32,
ops::{ControlFlow, Deref, DerefMut, Range, RangeInclusive},
ops::{ControlFlow, Deref, DerefMut, Range},
path::Path,
sync::Arc,
time::{Duration, Instant},
@@ -227,10 +226,6 @@ actions!(
MoveLineUp,
MoveLineDown,
JoinLines,
SortLinesCaseSensitive,
SortLinesCaseInsensitive,
ReverseLines,
ShuffleLines,
Transpose,
Cut,
Copy,
@@ -349,10 +344,6 @@ pub fn init(cx: &mut AppContext) {
cx.add_action(Editor::outdent);
cx.add_action(Editor::delete_line);
cx.add_action(Editor::join_lines);
cx.add_action(Editor::sort_lines_case_sensitive);
cx.add_action(Editor::sort_lines_case_insensitive);
cx.add_action(Editor::reverse_lines);
cx.add_action(Editor::shuffle_lines);
cx.add_action(Editor::delete_to_previous_word_start);
cx.add_action(Editor::delete_to_previous_subword_start);
cx.add_action(Editor::delete_to_next_word_end);
@@ -543,7 +534,6 @@ pub struct Editor {
show_local_selections: bool,
mode: EditorMode,
show_gutter: bool,
show_wrap_guides: Option<bool>,
placeholder_text: Option<Arc<str>>,
highlighted_rows: Option<Range<u32>>,
#[allow(clippy::type_complexity)]
@@ -573,7 +563,6 @@ pub struct Editor {
inlay_hint_cache: InlayHintCache,
next_inlay_id: usize,
_subscriptions: Vec<Subscription>,
pixel_position_of_newest_cursor: Option<Vector2F>,
}
pub struct EditorSnapshot {
@@ -1376,7 +1365,6 @@ impl Editor {
show_local_selections: true,
mode,
show_gutter: mode == EditorMode::Full,
show_wrap_guides: None,
placeholder_text: None,
highlighted_rows: None,
background_highlights: Default::default(),
@@ -1406,7 +1394,6 @@ impl Editor {
copilot_state: Default::default(),
inlay_hint_cache: InlayHintCache::new(inlay_hint_settings),
gutter_hovered: false,
pixel_position_of_newest_cursor: None,
_subscriptions: vec![
cx.observe(&buffer, Self::on_buffer_changed),
cx.subscribe(&buffer, Self::on_buffer_event),
@@ -1539,7 +1526,7 @@ impl Editor {
self.collapse_matches = collapse_matches;
}
pub fn range_for_match<T: std::marker::Copy>(&self, range: &Range<T>) -> Range<T> {
fn range_for_match<T: std::marker::Copy>(&self, range: &Range<T>) -> Range<T> {
if self.collapse_matches {
return range.start..range.start;
}
@@ -4216,96 +4203,6 @@ impl Editor {
});
}
pub fn sort_lines_case_sensitive(
&mut self,
_: &SortLinesCaseSensitive,
cx: &mut ViewContext<Self>,
) {
self.manipulate_lines(cx, |lines| lines.sort())
}
pub fn sort_lines_case_insensitive(
&mut self,
_: &SortLinesCaseInsensitive,
cx: &mut ViewContext<Self>,
) {
self.manipulate_lines(cx, |lines| lines.sort_by_key(|line| line.to_lowercase()))
}
pub fn reverse_lines(&mut self, _: &ReverseLines, cx: &mut ViewContext<Self>) {
self.manipulate_lines(cx, |lines| lines.reverse())
}
pub fn shuffle_lines(&mut self, _: &ShuffleLines, cx: &mut ViewContext<Self>) {
self.manipulate_lines(cx, |lines| lines.shuffle(&mut thread_rng()))
}
fn manipulate_lines<Fn>(&mut self, cx: &mut ViewContext<Self>, mut callback: Fn)
where
Fn: FnMut(&mut [&str]),
{
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let buffer = self.buffer.read(cx).snapshot(cx);
let mut edits = Vec::new();
let selections = self.selections.all::<Point>(cx);
let mut selections = selections.iter().peekable();
let mut contiguous_row_selections = Vec::new();
let mut new_selections = Vec::new();
while let Some(selection) = selections.next() {
let (start_row, end_row) = consume_contiguous_rows(
&mut contiguous_row_selections,
selection,
&display_map,
&mut selections,
);
let start_point = Point::new(start_row, 0);
let end_point = Point::new(end_row - 1, buffer.line_len(end_row - 1));
let text = buffer
.text_for_range(start_point..end_point)
.collect::<String>();
let mut lines = text.split("\n").collect_vec();
let lines_len = lines.len();
callback(&mut lines);
// This is a current limitation with selections.
// If we wanted to support removing or adding lines, we'd need to fix the logic associated with selections.
debug_assert!(
lines.len() == lines_len,
"callback should not change the number of lines"
);
edits.push((start_point..end_point, lines.join("\n")));
let start_anchor = buffer.anchor_after(start_point);
let end_anchor = buffer.anchor_before(end_point);
// Make selection and push
new_selections.push(Selection {
id: selection.id,
start: start_anchor.to_offset(&buffer),
end: end_anchor.to_offset(&buffer),
goal: SelectionGoal::None,
reversed: selection.reversed,
});
}
self.transact(cx, |this, cx| {
this.buffer.update(cx, |buffer, cx| {
buffer.edit(edits, None, cx);
});
this.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select(new_selections);
});
this.request_autoscroll(Autoscroll::fit(), cx);
});
}
pub fn duplicate_line(&mut self, _: &DuplicateLine, cx: &mut ViewContext<Self>) {
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let buffer = &display_map.buffer_snapshot;
@@ -5399,7 +5296,7 @@ impl Editor {
pub fn select_all(&mut self, _: &SelectAll, cx: &mut ViewContext<Self>) {
let end = self.buffer.read(cx).read(cx).len();
self.change_selections(None, cx, |s| {
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges(vec![0..end]);
});
}
@@ -6376,8 +6273,8 @@ impl Editor {
.range
.to_offset(definition.target.buffer.read(cx));
let range = self.range_for_match(&range);
if Some(&definition.target.buffer) == self.buffer.read(cx).as_singleton().as_ref() {
let range = self.range_for_match(&range);
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range]);
});
@@ -6394,6 +6291,7 @@ impl Editor {
// When selecting a definition in a different buffer, disable the nav history
// to avoid creating a history entry at the previous cursor location.
pane.update(cx, |pane, _| pane.disable_history());
let range = target_editor.range_for_match(&range);
target_editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range]);
});
@@ -7186,24 +7084,6 @@ impl Editor {
.text()
}
pub fn wrap_guides(&self, cx: &AppContext) -> SmallVec<[(usize, bool); 2]> {
let mut wrap_guides = smallvec::smallvec![];
if self.show_wrap_guides == Some(false) {
return wrap_guides;
}
let settings = self.buffer.read(cx).settings_at(0, cx);
if settings.show_wrap_guides {
if let SoftWrap::Column(soft_wrap) = self.soft_wrap_mode(cx) {
wrap_guides.push((soft_wrap as usize, true));
}
wrap_guides.extend(settings.wrap_guides.iter().map(|guide| (*guide, false)))
}
wrap_guides
}
pub fn soft_wrap_mode(&self, cx: &AppContext) -> SoftWrap {
let settings = self.buffer.read(cx).settings_at(0, cx);
let mode = self
@@ -7250,11 +7130,6 @@ impl Editor {
cx.notify();
}
pub fn set_show_wrap_guides(&mut self, show_gutter: bool, cx: &mut ViewContext<Self>) {
self.show_wrap_guides = Some(show_gutter);
cx.notify();
}
pub fn reveal_in_finder(&mut self, _: &RevealInFinder, cx: &mut ViewContext<Self>) {
if let Some(buffer) = self.buffer().read(cx).as_singleton() {
if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) {
@@ -7443,78 +7318,6 @@ impl Editor {
results
}
pub fn background_highlight_row_ranges<T: 'static>(
&self,
search_range: Range<Anchor>,
display_snapshot: &DisplaySnapshot,
count: usize,
) -> Vec<RangeInclusive<DisplayPoint>> {
let mut results = Vec::new();
let buffer = &display_snapshot.buffer_snapshot;
let Some((_, ranges)) = self.background_highlights
.get(&TypeId::of::<T>()) else {
return vec![];
};
let start_ix = match ranges.binary_search_by(|probe| {
let cmp = probe.end.cmp(&search_range.start, buffer);
if cmp.is_gt() {
Ordering::Greater
} else {
Ordering::Less
}
}) {
Ok(i) | Err(i) => i,
};
let mut push_region = |start: Option<Point>, end: Option<Point>| {
if let (Some(start_display), Some(end_display)) = (start, end) {
results.push(
start_display.to_display_point(display_snapshot)
..=end_display.to_display_point(display_snapshot),
);
}
};
let mut start_row: Option<Point> = None;
let mut end_row: Option<Point> = None;
if ranges.len() > count {
return vec![];
}
for range in &ranges[start_ix..] {
if range.start.cmp(&search_range.end, buffer).is_ge() {
break;
}
let end = range.end.to_point(buffer);
if let Some(current_row) = &end_row {
if end.row == current_row.row {
continue;
}
}
let start = range.start.to_point(buffer);
if start_row.is_none() {
assert_eq!(end_row, None);
start_row = Some(start);
end_row = Some(end);
continue;
}
if let Some(current_end) = end_row.as_mut() {
if start.row > current_end.row + 1 {
push_region(start_row, end_row);
start_row = Some(start);
end_row = Some(end);
} else {
// Merge two hunks.
*current_end = end;
}
} else {
unreachable!();
}
}
// We might still have a hunk that was not rendered (if there was a search hit on the last line)
push_region(start_row, end_row);
results
}
pub fn highlight_text<T: 'static>(
&mut self,
ranges: Vec<Range<Anchor>>,

View File

@@ -2500,156 +2500,6 @@ fn test_join_lines_with_multi_selection(cx: &mut TestAppContext) {
});
}
#[gpui::test]
async fn test_manipulate_lines_with_single_selection(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
// Test sort_lines_case_insensitive()
cx.set_state(indoc! {"
«z
y
x
Z
Y
Xˇ»
"});
cx.update_editor(|e, cx| e.sort_lines_case_insensitive(&SortLinesCaseInsensitive, cx));
cx.assert_editor_state(indoc! {"
«x
X
y
Y
z
Zˇ»
"});
// Test reverse_lines()
cx.set_state(indoc! {"
«5
4
3
2
1ˇ»
"});
cx.update_editor(|e, cx| e.reverse_lines(&ReverseLines, cx));
cx.assert_editor_state(indoc! {"
«1
2
3
4
5ˇ»
"});
// Skip testing shuffle_line()
// From here on out, test more complex cases of manipulate_lines() with a single driver method: sort_lines_case_sensitive()
// Since all methods calling manipulate_lines() are doing the exact same general thing (reordering lines)
// Don't manipulate when cursor is on single line, but expand the selection
cx.set_state(indoc! {"
ddˇdd
ccc
bb
a
"});
cx.update_editor(|e, cx| e.sort_lines_case_sensitive(&SortLinesCaseSensitive, cx));
cx.assert_editor_state(indoc! {"
«ddddˇ»
ccc
bb
a
"});
// Basic manipulate case
// Start selection moves to column 0
// End of selection shrinks to fit shorter line
cx.set_state(indoc! {"
dd«d
ccc
bb
aaaaaˇ»
"});
cx.update_editor(|e, cx| e.sort_lines_case_sensitive(&SortLinesCaseSensitive, cx));
cx.assert_editor_state(indoc! {"
«aaaaa
bb
ccc
dddˇ»
"});
// Manipulate case with newlines
cx.set_state(indoc! {"
dd«d
ccc
bb
aaaaa
ˇ»
"});
cx.update_editor(|e, cx| e.sort_lines_case_sensitive(&SortLinesCaseSensitive, cx));
cx.assert_editor_state(indoc! {"
«
aaaaa
bb
ccc
dddˇ»
"});
}
#[gpui::test]
async fn test_manipulate_lines_with_multi_selection(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
// Manipulate with multiple selections on a single line
cx.set_state(indoc! {"
dd«dd
cˇ»c«c
bb
aaaˇ»aa
"});
cx.update_editor(|e, cx| e.sort_lines_case_sensitive(&SortLinesCaseSensitive, cx));
cx.assert_editor_state(indoc! {"
«aaaaa
bb
ccc
ddddˇ»
"});
// Manipulate with multiple disjoin selections
cx.set_state(indoc! {"
4
3
2
1ˇ»
dd«dd
ccc
bb
aaaˇ»aa
"});
cx.update_editor(|e, cx| e.sort_lines_case_sensitive(&SortLinesCaseSensitive, cx));
cx.assert_editor_state(indoc! {"
«1
2
3
4
5ˇ»
«aaaaa
bb
ccc
ddddˇ»
"});
}
#[gpui::test]
fn test_duplicate_line(cx: &mut TestAppContext) {
init_test(cx, |_| {});

View File

@@ -61,7 +61,6 @@ enum FoldMarkers {}
struct SelectionLayout {
head: DisplayPoint,
cursor_shape: CursorShape,
is_newest: bool,
range: Range<DisplayPoint>,
}
@@ -71,7 +70,6 @@ impl SelectionLayout {
line_mode: bool,
cursor_shape: CursorShape,
map: &DisplaySnapshot,
is_newest: bool,
) -> Self {
if line_mode {
let selection = selection.map(|p| p.to_point(&map.buffer_snapshot));
@@ -79,7 +77,6 @@ impl SelectionLayout {
Self {
head: selection.head().to_display_point(map),
cursor_shape,
is_newest,
range: point_range.start.to_display_point(map)
..point_range.end.to_display_point(map),
}
@@ -88,7 +85,6 @@ impl SelectionLayout {
Self {
head: selection.head(),
cursor_shape,
is_newest,
range: selection.range(),
}
}
@@ -172,10 +168,6 @@ impl EditorElement {
.on_drag(MouseButton::Left, {
let position_map = position_map.clone();
move |event, editor, cx| {
if event.end {
return;
}
if !Self::mouse_dragged(
editor,
event.platform_event,
@@ -545,36 +537,6 @@ impl EditorElement {
corner_radius: 0.,
});
}
let scroll_left =
layout.position_map.snapshot.scroll_position().x() * layout.position_map.em_width;
for (wrap_position, active) in layout.wrap_guides.iter() {
let x =
(text_bounds.origin_x() + wrap_position + layout.position_map.em_width / 2.)
- scroll_left;
if x < text_bounds.origin_x()
|| (layout.show_scrollbars && x > self.scrollbar_left(&bounds))
{
continue;
}
let color = if *active {
self.style.active_wrap_guide
} else {
self.style.wrap_guide
};
scene.push_quad(Quad {
bounds: RectF::new(
vec2f(x, text_bounds.origin_y()),
vec2f(1., text_bounds.height()),
),
background: Some(color),
border: Border::new(0., Color::transparent_black()),
corner_radius: 0.,
});
}
}
}
@@ -902,12 +864,6 @@ impl EditorElement {
let x = cursor_character_x - scroll_left;
let y = cursor_position.row() as f32 * layout.position_map.line_height
- scroll_top;
if selection.is_newest {
editor.pixel_position_of_newest_cursor = Some(vec2f(
bounds.origin_x() + x + block_width / 2.,
bounds.origin_y() + y + layout.position_map.line_height / 2.,
));
}
cursors.push(Cursor {
color: selection_style.cursor,
block_width,
@@ -1048,10 +1004,6 @@ impl EditorElement {
scene.pop_layer();
}
fn scrollbar_left(&self, bounds: &RectF) -> f32 {
bounds.max_x() - self.style.theme.scrollbar.width
}
fn paint_scrollbar(
&mut self,
scene: &mut SceneBuilder,
@@ -1070,7 +1022,7 @@ impl EditorElement {
let top = bounds.min_y();
let bottom = bounds.max_y();
let right = bounds.max_x();
let left = self.scrollbar_left(&bounds);
let left = right - style.width;
let row_range = &layout.scrollbar_row_range;
let max_row = layout.max_row as f32 + (row_range.end - row_range.start);
@@ -1107,6 +1059,8 @@ impl EditorElement {
if layout.is_singleton && scrollbar_settings.selections {
let start_anchor = Anchor::min();
let end_anchor = Anchor::max();
let mut start_row = None;
let mut end_row = None;
let color = scrollbar_theme.selections;
let border = Border {
width: 1.,
@@ -1117,32 +1071,54 @@ impl EditorElement {
bottom: false,
left: true,
};
let mut push_region = |start: DisplayPoint, end: DisplayPoint| {
let start_y = y_for_row(start.row() as f32);
let mut end_y = y_for_row(end.row() as f32);
if end_y - start_y < 1. {
end_y = start_y + 1.;
}
let bounds = RectF::from_points(vec2f(left, start_y), vec2f(right, end_y));
let mut push_region = |start, end| {
if let (Some(start_display), Some(end_display)) = (start, end) {
let start_y = y_for_row(start_display as f32);
let mut end_y = y_for_row(end_display as f32);
if end_y - start_y < 1. {
end_y = start_y + 1.;
}
let bounds = RectF::from_points(vec2f(left, start_y), vec2f(right, end_y));
scene.push_quad(Quad {
bounds,
background: Some(color),
border,
corner_radius: style.thumb.corner_radius,
})
scene.push_quad(Quad {
bounds,
background: Some(color),
border,
corner_radius: style.thumb.corner_radius,
})
}
};
let background_ranges = editor
.background_highlight_row_ranges::<crate::items::BufferSearchHighlights>(
for (row, _) in &editor
.background_highlights_in_range_for::<crate::items::BufferSearchHighlights>(
start_anchor..end_anchor,
&layout.position_map.snapshot,
50000,
);
for row in background_ranges {
let start = row.start();
let end = row.end();
push_region(*start, *end);
&theme,
)
{
let start_display = row.start;
let end_display = row.end;
if start_row.is_none() {
assert_eq!(end_row, None);
start_row = Some(start_display.row());
end_row = Some(end_display.row());
continue;
}
if let Some(current_end) = end_row.as_mut() {
if start_display.row() > *current_end + 1 {
push_region(start_row, end_row);
start_row = Some(start_display.row());
end_row = Some(end_display.row());
} else {
// Merge two hunks.
*current_end = end_display.row();
}
} else {
unreachable!();
}
}
// We might still have a hunk that was not rendered (if there was a search hit on the last line)
push_region(start_row, end_row);
}
if layout.is_singleton && scrollbar_settings.git_diff {
@@ -1231,10 +1207,6 @@ impl EditorElement {
})
.on_drag(MouseButton::Left, {
move |event, editor: &mut Editor, cx| {
if event.end {
return;
}
let y = event.prev_mouse_position.y();
let new_y = event.position.y();
if thumb_top < y && y < thumb_bottom {
@@ -1338,15 +1310,16 @@ impl EditorElement {
}
}
fn column_pixels(&self, column: usize, cx: &ViewContext<Editor>) -> f32 {
fn max_line_number_width(&self, snapshot: &EditorSnapshot, cx: &ViewContext<Editor>) -> f32 {
let digit_count = (snapshot.max_buffer_row() as f32 + 1.).log10().floor() as usize + 1;
let style = &self.style;
cx.text_layout_cache()
.layout_str(
" ".repeat(column).as_str(),
"1".repeat(digit_count).as_str(),
style.text.font_size,
&[(
column,
digit_count,
RunStyle {
font_id: style.text.font_id,
color: Color::black(),
@@ -1357,11 +1330,6 @@ impl EditorElement {
.width()
}
fn max_line_number_width(&self, snapshot: &EditorSnapshot, cx: &ViewContext<Editor>) -> f32 {
let digit_count = (snapshot.max_buffer_row() as f32 + 1.).log10().floor() as usize + 1;
self.column_pixels(digit_count, cx)
}
//Folds contained in a hunk are ignored apart from shrinking visual size
//If a fold contains any hunks then that fold line is marked as modified
fn layout_git_gutters(
@@ -2009,7 +1977,6 @@ impl Element<Editor> for EditorElement {
let snapshot = editor.snapshot(cx);
let style = self.style.clone();
let line_height = (style.text.font_size * style.line_height_scalar).round();
let gutter_padding;
@@ -2047,12 +2014,6 @@ impl Element<Editor> for EditorElement {
}
};
let wrap_guides = editor
.wrap_guides(cx)
.iter()
.map(|(guide, active)| (self.column_pixels(*guide, cx), *active))
.collect();
let scroll_height = (snapshot.max_point().row() + 1) as f32 * line_height;
if let EditorMode::AutoHeight { max_lines } = snapshot.mode {
size.set_y(
@@ -2147,7 +2108,6 @@ impl Element<Editor> for EditorElement {
line_mode,
cursor_shape,
&snapshot.display_snapshot,
false,
));
}
selections.extend(remote_selections);
@@ -2157,7 +2117,6 @@ impl Element<Editor> for EditorElement {
.selections
.disjoint_in_range(start_anchor..end_anchor, cx);
local_selections.extend(editor.selections.pending(cx));
let newest = editor.selections.newest(cx);
for selection in &local_selections {
let is_empty = selection.start == selection.end;
let selection_start = snapshot.prev_line_boundary(selection.start).1;
@@ -2180,13 +2139,11 @@ impl Element<Editor> for EditorElement {
local_selections
.into_iter()
.map(|selection| {
let is_newest = selection == newest;
SelectionLayout::new(
selection,
editor.selections.line_mode,
editor.cursor_shape,
&snapshot.display_snapshot,
is_newest,
)
})
.collect(),
@@ -2413,7 +2370,6 @@ impl Element<Editor> for EditorElement {
snapshot,
}),
visible_display_row_range: start_row..end_row,
wrap_guides,
gutter_size,
gutter_padding,
text_size,
@@ -2564,7 +2520,6 @@ pub struct LayoutState {
gutter_margin: f32,
text_size: Vector2F,
mode: EditorMode,
wrap_guides: SmallVec<[(f32, bool); 2]>,
visible_display_row_range: Range<u32>,
active_rows: BTreeMap<u32, bool>,
highlighted_rows: Option<Range<u32>>,

View File

@@ -571,6 +571,7 @@ fn new_update_task(
if let Some(buffer) =
refresh_multi_buffer.buffer(pending_refresh_query.buffer_id)
{
drop(refresh_multi_buffer);
editor.inlay_hint_cache.update_tasks.insert(
pending_refresh_query.excerpt_id,
UpdateTask {

View File

@@ -7,10 +7,8 @@ use anyhow::{Context, Result};
use collections::HashSet;
use futures::future::try_join_all;
use gpui::{
elements::*,
geometry::vector::{vec2f, Vector2F},
AppContext, AsyncAppContext, Entity, ModelHandle, Subscription, Task, View, ViewContext,
ViewHandle, WeakViewHandle,
elements::*, geometry::vector::vec2f, AppContext, AsyncAppContext, Entity, ModelHandle,
Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
};
use language::{
proto::serialize_anchor as serialize_text_anchor, Bias, Buffer, OffsetRangeExt, Point,
@@ -752,10 +750,6 @@ impl Item for Editor {
Some(Box::new(handle.clone()))
}
fn pixel_position_of_cursor(&self) -> Option<Vector2F> {
self.pixel_position_of_newest_cursor
}
fn breadcrumb_location(&self) -> ToolbarItemLocation {
ToolbarItemLocation::PrimaryLeft { flex: None }
}

View File

@@ -138,7 +138,7 @@ impl SelectionsCollection {
.collect()
}
/// Returns all of the selections, adjusted to take into account the selection line_mode
// Returns all of the selections, adjusted to take into account the selection line_mode
pub fn all_adjusted(&self, cx: &mut AppContext) -> Vec<Selection<Point>> {
let mut selections = self.all::<Point>(cx);
if self.line_mode {

View File

@@ -1,6 +1,6 @@
use anyhow::Result;
use collections::HashMap;
use git2::{BranchType, StatusShow};
use git2::{BranchType, ErrorCode};
use parking_lot::Mutex;
use rpc::proto;
use serde_derive::{Deserialize, Serialize};
@@ -10,7 +10,6 @@ use std::{
os::unix::prelude::OsStrExt,
path::{Component, Path, PathBuf},
sync::Arc,
time::SystemTime,
};
use sum_tree::{MapSeekTarget, TreeMap};
use util::ResultExt;
@@ -26,30 +25,24 @@ pub struct Branch {
#[async_trait::async_trait]
pub trait GitRepository: Send {
fn reload_index(&self);
fn load_index_text(&self, relative_file_path: &Path) -> Option<String>;
fn branch_name(&self) -> Option<String>;
/// Get the statuses of all of the files in the index that start with the given
/// path and have changes with resepect to the HEAD commit. This is fast because
/// the index stores hashes of trees, so that unchanged directories can be skipped.
fn staged_statuses(&self, path_prefix: &Path) -> TreeMap<RepoPath, GitFileStatus>;
fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>>;
/// Get the status of a given file in the working directory with respect to
/// the index. In the common case, when there are no changes, this only requires
/// an index lookup. The index stores the mtime of each file when it was added,
/// so there's no work to do if the mtime matches.
fn unstaged_status(&self, path: &RepoPath, mtime: SystemTime) -> Option<GitFileStatus>;
fn status(&self, path: &RepoPath) -> Result<Option<GitFileStatus>>;
/// Get the status of a given file in the working directory with respect to
/// the HEAD commit. In the common case, when there are no changes, this only
/// requires an index lookup and blob comparison between the index and the HEAD
/// commit. The index stores the mtime of each file when it was added, so there's
/// no need to consider the working directory file if the mtime matches.
fn status(&self, path: &RepoPath, mtime: SystemTime) -> Option<GitFileStatus>;
fn branches(&self) -> Result<Vec<Branch>>;
fn change_branch(&self, _: &str) -> Result<()>;
fn create_branch(&self, _: &str) -> Result<()>;
fn branches(&self) -> Result<Vec<Branch>> {
Ok(vec![])
}
fn change_branch(&self, _: &str) -> Result<()> {
Ok(())
}
fn create_branch(&self, _: &str) -> Result<()> {
Ok(())
}
}
impl std::fmt::Debug for dyn GitRepository {
@@ -58,6 +51,7 @@ impl std::fmt::Debug for dyn GitRepository {
}
}
#[async_trait::async_trait]
impl GitRepository for LibGitRepository {
fn reload_index(&self) {
if let Ok(mut index) = self.index() {
@@ -95,67 +89,39 @@ impl GitRepository for LibGitRepository {
Some(branch.to_string())
}
fn staged_statuses(&self, path_prefix: &Path) -> TreeMap<RepoPath, GitFileStatus> {
fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>> {
let statuses = self.statuses(None).log_err()?;
let mut map = TreeMap::default();
let mut options = git2::StatusOptions::new();
options.pathspec(path_prefix);
options.show(StatusShow::Index);
for status in statuses
.iter()
.filter(|status| !status.status().contains(git2::Status::IGNORED))
{
let path = RepoPath(PathBuf::from(OsStr::from_bytes(status.path_bytes())));
let Some(status) = read_status(status.status()) else {
continue
};
if let Some(statuses) = self.statuses(Some(&mut options)).log_err() {
for status in statuses.iter() {
let path = RepoPath(PathBuf::from(OsStr::from_bytes(status.path_bytes())));
let status = status.status();
if !status.contains(git2::Status::IGNORED) {
if let Some(status) = read_status(status) {
map.insert(path, status)
}
map.insert(path, status)
}
Some(map)
}
fn status(&self, path: &RepoPath) -> Result<Option<GitFileStatus>> {
let status = self.status_file(path);
match status {
Ok(status) => Ok(read_status(status)),
Err(e) => {
if e.code() == ErrorCode::NotFound {
Ok(None)
} else {
Err(e.into())
}
}
}
map
}
fn unstaged_status(&self, path: &RepoPath, mtime: SystemTime) -> Option<GitFileStatus> {
// If the file has not changed since it was added to the index, then
// there can't be any changes.
if matches_index(self, path, mtime) {
return None;
}
let mut options = git2::StatusOptions::new();
options.pathspec(&path.0);
options.disable_pathspec_match(true);
options.include_untracked(true);
options.recurse_untracked_dirs(true);
options.include_unmodified(true);
options.show(StatusShow::Workdir);
let statuses = self.statuses(Some(&mut options)).log_err()?;
let status = statuses.get(0).and_then(|s| read_status(s.status()));
status
}
fn status(&self, path: &RepoPath, mtime: SystemTime) -> Option<GitFileStatus> {
let mut options = git2::StatusOptions::new();
options.pathspec(&path.0);
options.disable_pathspec_match(true);
options.include_untracked(true);
options.recurse_untracked_dirs(true);
options.include_unmodified(true);
// If the file has not changed since it was added to the index, then
// there's no need to examine the working directory file: just compare
// the blob in the index to the one in the HEAD commit.
if matches_index(self, path, mtime) {
options.show(StatusShow::Index);
}
let statuses = self.statuses(Some(&mut options)).log_err()?;
let status = statuses.get(0).and_then(|s| read_status(s.status()));
status
}
fn branches(&self) -> Result<Vec<Branch>> {
let local_branches = self.branches(Some(BranchType::Local))?;
let valid_branches = local_branches
@@ -198,21 +164,6 @@ impl GitRepository for LibGitRepository {
}
}
fn matches_index(repo: &LibGitRepository, path: &RepoPath, mtime: SystemTime) -> bool {
if let Some(index) = repo.index().log_err() {
if let Some(entry) = index.get_path(&path, 0) {
if let Some(mtime) = mtime.duration_since(SystemTime::UNIX_EPOCH).log_err() {
if entry.mtime.seconds() == mtime.as_secs() as i32
&& entry.mtime.nanoseconds() == mtime.subsec_nanos()
{
return true;
}
}
}
}
false
}
fn read_status(status: git2::Status) -> Option<GitFileStatus> {
if status.contains(git2::Status::CONFLICTED) {
Some(GitFileStatus::Conflict)
@@ -262,40 +213,18 @@ impl GitRepository for FakeGitRepository {
state.branch_name.clone()
}
fn staged_statuses(&self, path_prefix: &Path) -> TreeMap<RepoPath, GitFileStatus> {
fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>> {
let state = self.state.lock();
let mut map = TreeMap::default();
let state = self.state.lock();
for (repo_path, status) in state.worktree_statuses.iter() {
if repo_path.0.starts_with(path_prefix) {
map.insert(repo_path.to_owned(), status.to_owned());
}
map.insert(repo_path.to_owned(), status.to_owned());
}
map
Some(map)
}
fn unstaged_status(&self, _path: &RepoPath, _mtime: SystemTime) -> Option<GitFileStatus> {
None
}
fn status(&self, path: &RepoPath, _mtime: SystemTime) -> Option<GitFileStatus> {
fn status(&self, path: &RepoPath) -> Result<Option<GitFileStatus>> {
let state = self.state.lock();
state.worktree_statuses.get(path).cloned()
}
fn branches(&self) -> Result<Vec<Branch>> {
Ok(vec![])
}
fn change_branch(&self, name: &str) -> Result<()> {
let mut state = self.state.lock();
state.branch_name = Some(name.to_owned());
Ok(())
}
fn create_branch(&self, name: &str) -> Result<()> {
let mut state = self.state.lock();
state.branch_name = Some(name.to_owned());
Ok(())
Ok(state.worktree_statuses.get(path).cloned())
}
}

View File

@@ -1128,12 +1128,6 @@ impl AppContext {
self.keystroke_matcher.clear_bindings();
}
pub fn binding_for_action(&self, action: &dyn Action) -> Option<&Binding> {
self.keystroke_matcher
.bindings_for_action(action.id())
.find(|binding| binding.action().eq(action))
}
pub fn default_global<T: 'static + Default>(&mut self) -> &T {
let type_id = TypeId::of::<T>();
self.update(|this| {
@@ -3417,14 +3411,18 @@ impl<'a, 'b, 'c, V: View> LayoutContext<'a, 'b, 'c, V> {
handler_depth = Some(contexts.len())
}
let action_contexts = if let Some(depth) = handler_depth {
&contexts[depth..]
} else {
&contexts
};
self.keystroke_matcher
.keystrokes_for_action(action, action_contexts)
.bindings_for_action(action.id())
.find_map(|b| {
let highest_handler = handler_depth?;
if action.eq(b.action())
&& (0..=highest_handler).any(|depth| b.match_context(&contexts[depth..]))
{
Some(b.keystrokes().into())
} else {
None
}
})
}
fn notify_if_view_ancestors_change(&mut self, view_id: usize) {

View File

@@ -518,18 +518,6 @@ impl<'a> WindowContext<'a> {
// NOTE: The order of event pushes is important! MouseUp events MUST be fired
// before click events, and so the MouseUp events need to be pushed before
// MouseClick events.
// Synthesize one last drag event to end the drag
mouse_events.push(MouseEvent::Drag(MouseDrag {
region: Default::default(),
prev_mouse_position: self.window.mouse_position,
platform_event: MouseMovedEvent {
position: e.position,
pressed_button: Some(e.button),
modifiers: e.modifiers,
},
end: true,
}));
mouse_events.push(MouseEvent::Up(MouseUp {
region: Default::default(),
platform_event: e.clone(),
@@ -577,16 +565,8 @@ impl<'a> WindowContext<'a> {
region: Default::default(),
prev_mouse_position: self.window.mouse_position,
platform_event: e.clone(),
end: false,
}));
} else if let Some((_, clicked_button)) = self.window.clicked_region {
mouse_events.push(MouseEvent::Drag(MouseDrag {
region: Default::default(),
prev_mouse_position: self.window.mouse_position,
platform_event: e.clone(),
end: true,
}));
// Mouse up event happened outside the current window. Simulate mouse up button event
let button_event = e.to_button_event(clicked_button);
mouse_events.push(MouseEvent::Up(MouseUp {

View File

@@ -147,9 +147,6 @@ impl<V: View> Element<V> for Resizable<V> {
let max_size = side.relevant_component(constraint.max);
let on_resize = self.on_resize.clone();
move |event, view: &mut V, cx| {
if event.end {
return;
}
let new_size = min_size
.max(prev_size + side.compute_delta(event))
.min(max_size)

View File

@@ -10,8 +10,8 @@ use crate::{
mac::{
platform::NSViewLayerContentsRedrawDuringViewResize, renderer::Renderer, screen::Screen,
},
Event, InputHandler, KeyDownEvent, Modifiers, ModifiersChangedEvent, MouseButton,
MouseButtonEvent, MouseMovedEvent, Scene, WindowBounds, WindowKind,
Event, InputHandler, KeyDownEvent, ModifiersChangedEvent, MouseButton, MouseButtonEvent,
MouseMovedEvent, Scene, WindowBounds, WindowKind,
},
};
use block::ConcreteBlock;
@@ -1053,44 +1053,7 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) {
let window_height = window_state_borrow.content_size().y();
let event = unsafe { Event::from_native(native_event, Some(window_height)) };
if let Some(mut event) = event {
let synthesized_second_event = match &mut event {
Event::MouseDown(
event @ MouseButtonEvent {
button: MouseButton::Left,
modifiers: Modifiers { ctrl: true, .. },
..
},
) => {
*event = MouseButtonEvent {
button: MouseButton::Right,
modifiers: Modifiers {
ctrl: false,
..event.modifiers
},
click_count: 1,
..*event
};
Some(Event::MouseUp(MouseButtonEvent {
button: MouseButton::Right,
..*event
}))
}
// Because we map a ctrl-left_down to a right_down -> right_up let's ignore
// the ctrl-left_up to avoid having a mismatch in button down/up events if the
// user is still holding ctrl when releasing the left mouse button
Event::MouseUp(MouseButtonEvent {
button: MouseButton::Left,
modifiers: Modifiers { ctrl: true, .. },
..
}) => return,
_ => None,
};
if let Some(event) = event {
match &event {
Event::MouseMoved(
event @ MouseMovedEvent {
@@ -1142,9 +1105,6 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) {
if let Some(mut callback) = window_state_borrow.event_callback.take() {
drop(window_state_borrow);
callback(event);
if let Some(event) = synthesized_second_event {
callback(event);
}
window_state.borrow_mut().event_callback = Some(callback);
}
}

View File

@@ -32,7 +32,6 @@ pub struct MouseDrag {
pub region: RectF,
pub prev_mouse_position: Vector2F,
pub platform_event: MouseMovedEvent,
pub end: bool,
}
impl Deref for MouseDrag {

View File

@@ -182,8 +182,8 @@ impl CachedLspAdapter {
self.adapter.workspace_configuration(cx)
}
pub fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) {
self.adapter.process_diagnostics(params)
pub async fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) {
self.adapter.process_diagnostics(params).await
}
pub async fn process_completion(&self, completion_item: &mut lsp::CompletionItem) {
@@ -262,7 +262,7 @@ pub trait LspAdapter: 'static + Send + Sync {
container_dir: PathBuf,
) -> Option<LanguageServerBinary>;
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
async fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
async fn process_completion(&self, _: &mut lsp::CompletionItem) {}
@@ -339,8 +339,6 @@ pub struct LanguageConfig {
#[serde(default)]
pub line_comment: Option<Arc<str>>,
#[serde(default)]
pub collapsed_placeholder: String,
#[serde(default)]
pub block_comment: Option<(Arc<str>, Arc<str>)>,
#[serde(default)]
pub overrides: HashMap<String, LanguageConfigOverride>,
@@ -410,7 +408,6 @@ impl Default for LanguageConfig {
line_comment: Default::default(),
block_comment: Default::default(),
overrides: Default::default(),
collapsed_placeholder: Default::default(),
}
}
}
@@ -526,10 +523,9 @@ pub struct OutlineConfig {
pub struct EmbeddingConfig {
pub query: Query,
pub item_capture_ix: u32,
pub name_capture_ix: Option<u32>,
pub name_capture_ix: u32,
pub context_capture_ix: Option<u32>,
pub collapse_capture_ix: Option<u32>,
pub keep_capture_ix: Option<u32>,
pub extra_context_capture_ix: Option<u32>,
}
struct InjectionConfig {
@@ -844,8 +840,8 @@ impl LanguageRegistry {
}
}
}
Err(e) => {
log::error!("failed to load language {name}:\n{:?}", e);
Err(err) => {
log::error!("failed to load language {name} - {err}");
let mut state = this.state.write();
state.mark_language_loaded(id);
if let Some(mut txs) = state.loading_languages.remove(&id) {
@@ -853,7 +849,7 @@ impl LanguageRegistry {
let _ = tx.send(Err(anyhow!(
"failed to load language {}: {}",
name,
e
err
)));
}
}
@@ -1188,39 +1184,25 @@ impl Language {
pub fn with_queries(mut self, queries: LanguageQueries) -> Result<Self> {
if let Some(query) = queries.highlights {
self = self
.with_highlights_query(query.as_ref())
.context("Error loading highlights query")?;
self = self.with_highlights_query(query.as_ref())?;
}
if let Some(query) = queries.brackets {
self = self
.with_brackets_query(query.as_ref())
.context("Error loading brackets query")?;
self = self.with_brackets_query(query.as_ref())?;
}
if let Some(query) = queries.indents {
self = self
.with_indents_query(query.as_ref())
.context("Error loading indents query")?;
self = self.with_indents_query(query.as_ref())?;
}
if let Some(query) = queries.outline {
self = self
.with_outline_query(query.as_ref())
.context("Error loading outline query")?;
self = self.with_outline_query(query.as_ref())?;
}
if let Some(query) = queries.embedding {
self = self
.with_embedding_query(query.as_ref())
.context("Error loading embedding query")?;
self = self.with_embedding_query(query.as_ref())?;
}
if let Some(query) = queries.injections {
self = self
.with_injection_query(query.as_ref())
.context("Error loading injection query")?;
self = self.with_injection_query(query.as_ref())?;
}
if let Some(query) = queries.overrides {
self = self
.with_override_query(query.as_ref())
.context("Error loading override query")?;
self = self.with_override_query(query.as_ref())?;
}
Ok(self)
}
@@ -1265,26 +1247,23 @@ impl Language {
let mut item_capture_ix = None;
let mut name_capture_ix = None;
let mut context_capture_ix = None;
let mut collapse_capture_ix = None;
let mut keep_capture_ix = None;
let mut extra_context_capture_ix = None;
get_capture_indices(
&query,
&mut [
("item", &mut item_capture_ix),
("name", &mut name_capture_ix),
("context", &mut context_capture_ix),
("keep", &mut keep_capture_ix),
("collapse", &mut collapse_capture_ix),
("context.extra", &mut extra_context_capture_ix),
],
);
if let Some(item_capture_ix) = item_capture_ix {
if let Some((item_capture_ix, name_capture_ix)) = item_capture_ix.zip(name_capture_ix) {
grammar.embedding_config = Some(EmbeddingConfig {
query,
item_capture_ix,
name_capture_ix,
context_capture_ix,
collapse_capture_ix,
keep_capture_ix,
extra_context_capture_ix,
});
}
Ok(self)
@@ -1487,6 +1466,12 @@ impl Language {
None
}
pub async fn process_diagnostics(&self, diagnostics: &mut lsp::PublishDiagnosticsParams) {
for adapter in &self.adapters {
adapter.process_diagnostics(diagnostics).await;
}
}
pub async fn process_completion(self: &Arc<Self>, completion: &mut lsp::CompletionItem) {
for adapter in &self.adapters {
adapter.process_completion(completion).await;
@@ -1563,20 +1548,9 @@ impl Language {
pub fn grammar(&self) -> Option<&Arc<Grammar>> {
self.grammar.as_ref()
}
pub fn default_scope(self: &Arc<Self>) -> LanguageScope {
LanguageScope {
language: self.clone(),
override_id: None,
}
}
}
impl LanguageScope {
pub fn collapsed_placeholder(&self) -> &str {
self.language.config.collapsed_placeholder.as_ref()
}
pub fn line_comment_prefix(&self) -> Option<&Arc<str>> {
Override::as_option(
self.config_override().map(|o| &o.line_comment),
@@ -1750,7 +1724,7 @@ impl LspAdapter for Arc<FakeLspAdapter> {
unreachable!();
}
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
async fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
async fn disk_based_diagnostic_sources(&self) -> Vec<String> {
self.disk_based_diagnostics_sources.clone()

View File

@@ -44,8 +44,6 @@ pub struct LanguageSettings {
pub hard_tabs: bool,
pub soft_wrap: SoftWrap,
pub preferred_line_length: u32,
pub show_wrap_guides: bool,
pub wrap_guides: Vec<usize>,
pub format_on_save: FormatOnSave,
pub remove_trailing_whitespace_on_save: bool,
pub ensure_final_newline_on_save: bool,
@@ -86,10 +84,6 @@ pub struct LanguageSettingsContent {
#[serde(default)]
pub preferred_line_length: Option<u32>,
#[serde(default)]
pub show_wrap_guides: Option<bool>,
#[serde(default)]
pub wrap_guides: Option<Vec<usize>>,
#[serde(default)]
pub format_on_save: Option<FormatOnSave>,
#[serde(default)]
pub remove_trailing_whitespace_on_save: Option<bool>,
@@ -384,9 +378,6 @@ fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent
merge(&mut settings.tab_size, src.tab_size);
merge(&mut settings.hard_tabs, src.hard_tabs);
merge(&mut settings.soft_wrap, src.soft_wrap);
merge(&mut settings.show_wrap_guides, src.show_wrap_guides);
merge(&mut settings.wrap_guides, src.wrap_guides.clone());
merge(
&mut settings.preferred_line_length,
src.preferred_line_length,

View File

@@ -58,14 +58,11 @@ fn build_bridge(swift_target: &SwiftTarget) {
"cargo:rerun-if-changed={}/Package.resolved",
SWIFT_PACKAGE_NAME
);
let swift_package_root = swift_package_root();
let swift_target_folder = swift_target_folder();
if !Command::new("swift")
.arg("build")
.args(["--configuration", &env::var("PROFILE").unwrap()])
.args(["--triple", &swift_target.target.triple])
.args(["--build-path".into(), swift_target_folder])
.current_dir(&swift_package_root)
.status()
.unwrap()
@@ -131,12 +128,6 @@ fn swift_package_root() -> PathBuf {
env::current_dir().unwrap().join(SWIFT_PACKAGE_NAME)
}
fn swift_target_folder() -> PathBuf {
env::current_dir()
.unwrap()
.join(format!("../../target/{SWIFT_PACKAGE_NAME}"))
}
fn copy_dir(source: &Path, destination: &Path) {
assert!(
Command::new("rm")
@@ -164,7 +155,8 @@ fn copy_dir(source: &Path, destination: &Path) {
impl SwiftTarget {
fn out_dir_path(&self) -> PathBuf {
swift_target_folder()
swift_package_root()
.join(".build")
.join(&self.target.unversioned_triple)
.join(env::var("PROFILE").unwrap())
}

View File

@@ -1,6 +1,9 @@
use anyhow::{anyhow, bail, Context, Result};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
use futures::lock::Mutex;
use futures::{future::Shared, FutureExt};
use gpui::{executor::Background, Task};
use serde::Deserialize;
use smol::{fs, io::BufReader, process::Command};
use std::process::{Output, Stdio};
@@ -30,12 +33,20 @@ pub struct NpmInfoDistTags {
pub struct NodeRuntime {
http: Arc<dyn HttpClient>,
background: Arc<Background>,
installation_path: Mutex<Option<Shared<Task<Result<PathBuf, Arc<anyhow::Error>>>>>>,
}
impl NodeRuntime {
pub fn instance(http: Arc<dyn HttpClient>) -> Arc<NodeRuntime> {
pub fn instance(http: Arc<dyn HttpClient>, background: Arc<Background>) -> Arc<NodeRuntime> {
RUNTIME_INSTANCE
.get_or_init(|| Arc::new(NodeRuntime { http }))
.get_or_init(|| {
Arc::new(NodeRuntime {
http,
background,
installation_path: Mutex::new(None),
})
})
.clone()
}
@@ -50,17 +61,7 @@ impl NodeRuntime {
subcommand: &str,
args: &[&str],
) -> Result<Output> {
let attempt = || async move {
let installation_path = self.install_if_needed().await?;
let mut env_path = installation_path.join("bin").into_os_string();
if let Some(existing_path) = std::env::var_os("PATH") {
if !existing_path.is_empty() {
env_path.push(":");
env_path.push(&existing_path);
}
}
let attempt = |installation_path: PathBuf| async move {
let node_binary = installation_path.join("bin/node");
let npm_file = installation_path.join("bin/npm");
@@ -73,7 +74,6 @@ impl NodeRuntime {
}
let mut command = Command::new(node_binary);
command.env("PATH", env_path);
command.arg(npm_file).arg(subcommand).args(args);
if let Some(directory) = directory {
@@ -83,9 +83,10 @@ impl NodeRuntime {
command.output().await.map_err(|e| anyhow!("{e}"))
};
let mut output = attempt().await;
let installation_path = self.install_if_needed().await?;
let mut output = attempt(installation_path.clone()).await;
if output.is_err() {
output = attempt().await;
output = attempt(installation_path).await;
if output.is_err() {
return Err(anyhow!(
"failed to launch npm subcommand {subcommand} subcommand"
@@ -157,8 +158,23 @@ impl NodeRuntime {
}
async fn install_if_needed(&self) -> Result<PathBuf> {
log::info!("Node runtime install_if_needed");
let task = self
.installation_path
.lock()
.await
.get_or_insert_with(|| {
let http = self.http.clone();
self.background
.spawn(async move { Self::install(http).await.map_err(Arc::new) })
.shared()
})
.clone();
task.await.map_err(|e| anyhow!("{}", e))
}
async fn install(http: Arc<dyn HttpClient>) -> Result<PathBuf> {
log::info!("installing Node runtime");
let arch = match consts::ARCH {
"x86_64" => "x64",
"aarch64" => "arm64",
@@ -189,8 +205,7 @@ impl NodeRuntime {
let file_name = format!("node-{VERSION}-darwin-{arch}.tar.gz");
let url = format!("https://nodejs.org/dist/{VERSION}/{file_name}");
let mut response = self
.http
let mut response = http
.get(&url, Default::default(), true)
.await
.context("error downloading Node binary tarball")?;

View File

@@ -259,7 +259,6 @@ pub enum Event {
LanguageServerLog(LanguageServerId, String),
Notification(String),
ActiveEntryChanged(Option<ProjectEntryId>),
ActivateProjectPanel,
WorktreeAdded,
WorktreeRemoved(WorktreeId),
WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
@@ -2769,21 +2768,24 @@ impl Project {
language_server
.on_notification::<lsp::notification::PublishDiagnostics, _>({
let adapter = adapter.clone();
move |mut params, mut cx| {
move |mut params, cx| {
let this = this;
let adapter = adapter.clone();
adapter.process_diagnostics(&mut params);
if let Some(this) = this.upgrade(&cx) {
this.update(&mut cx, |this, cx| {
this.update_diagnostics(
server_id,
params,
&adapter.disk_based_diagnostic_sources,
cx,
)
.log_err();
});
}
cx.spawn(|mut cx| async move {
adapter.process_diagnostics(&mut params).await;
if let Some(this) = this.upgrade(&cx) {
this.update(&mut cx, |this, cx| {
this.update_diagnostics(
server_id,
params,
&adapter.disk_based_diagnostic_sources,
cx,
)
.log_err();
});
}
})
.detach();
}
})
.detach();

View File

@@ -1,6 +1,7 @@
use crate::{search::PathMatcher, worktree::WorktreeHandle, Event, *};
use crate::{worktree::WorktreeHandle, Event, *};
use fs::{FakeFs, LineEnding, RealFs};
use futures::{future, StreamExt};
use globset::Glob;
use gpui::{executor::Deterministic, test::subscribe, AppContext};
use language::{
language_settings::{AllLanguageSettings, LanguageSettingsContent},
@@ -3640,7 +3641,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
vec![PathMatcher::new("*.odd").unwrap()],
vec![Glob::new("*.odd").unwrap().compile_matcher()],
Vec::new()
),
cx
@@ -3658,7 +3659,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
vec![PathMatcher::new("*.rs").unwrap()],
vec![Glob::new("*.rs").unwrap().compile_matcher()],
Vec::new()
),
cx
@@ -3680,8 +3681,8 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
false,
true,
vec![
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap(),
Glob::new("*.ts").unwrap().compile_matcher(),
Glob::new("*.odd").unwrap().compile_matcher(),
],
Vec::new()
),
@@ -3704,9 +3705,9 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
false,
true,
vec![
PathMatcher::new("*.rs").unwrap(),
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap(),
Glob::new("*.rs").unwrap().compile_matcher(),
Glob::new("*.ts").unwrap().compile_matcher(),
Glob::new("*.odd").unwrap().compile_matcher(),
],
Vec::new()
),
@@ -3751,7 +3752,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
false,
true,
Vec::new(),
vec![PathMatcher::new("*.odd").unwrap()],
vec![Glob::new("*.odd").unwrap().compile_matcher()],
),
cx
)
@@ -3774,7 +3775,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
false,
true,
Vec::new(),
vec![PathMatcher::new("*.rs").unwrap()],
vec![Glob::new("*.rs").unwrap().compile_matcher()],
),
cx
)
@@ -3796,8 +3797,8 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
true,
Vec::new(),
vec![
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap(),
Glob::new("*.ts").unwrap().compile_matcher(),
Glob::new("*.odd").unwrap().compile_matcher(),
],
),
cx
@@ -3820,9 +3821,9 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
true,
Vec::new(),
vec![
PathMatcher::new("*.rs").unwrap(),
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap(),
Glob::new("*.rs").unwrap().compile_matcher(),
Glob::new("*.ts").unwrap().compile_matcher(),
Glob::new("*.odd").unwrap().compile_matcher(),
],
),
cx
@@ -3859,8 +3860,8 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query,
false,
true,
vec![PathMatcher::new("*.odd").unwrap()],
vec![PathMatcher::new("*.odd").unwrap()],
vec![Glob::new("*.odd").unwrap().compile_matcher()],
vec![Glob::new("*.odd").unwrap().compile_matcher()],
),
cx
)
@@ -3877,8 +3878,8 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query,
false,
true,
vec![PathMatcher::new("*.ts").unwrap()],
vec![PathMatcher::new("*.ts").unwrap()],
vec![Glob::new("*.ts").unwrap().compile_matcher()],
vec![Glob::new("*.ts").unwrap().compile_matcher()],
),
cx
)
@@ -3896,12 +3897,12 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
false,
true,
vec![
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap()
Glob::new("*.ts").unwrap().compile_matcher(),
Glob::new("*.odd").unwrap().compile_matcher()
],
vec![
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap()
Glob::new("*.ts").unwrap().compile_matcher(),
Glob::new("*.odd").unwrap().compile_matcher()
],
),
cx
@@ -3920,12 +3921,12 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
false,
true,
vec![
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap()
Glob::new("*.ts").unwrap().compile_matcher(),
Glob::new("*.odd").unwrap().compile_matcher()
],
vec![
PathMatcher::new("*.rs").unwrap(),
PathMatcher::new("*.odd").unwrap()
Glob::new("*.rs").unwrap().compile_matcher(),
Glob::new("*.odd").unwrap().compile_matcher()
],
),
cx

View File

@@ -1,5 +1,5 @@
use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
use anyhow::{Context, Result};
use anyhow::Result;
use client::proto;
use globset::{Glob, GlobMatcher};
use itertools::Itertools;
@@ -9,7 +9,7 @@ use smol::future::yield_now;
use std::{
io::{BufRead, BufReader, Read},
ops::Range,
path::{Path, PathBuf},
path::Path,
sync::Arc,
};
@@ -20,8 +20,8 @@ pub enum SearchQuery {
query: Arc<str>,
whole_word: bool,
case_sensitive: bool,
files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>,
files_to_include: Vec<GlobMatcher>,
files_to_exclude: Vec<GlobMatcher>,
},
Regex {
regex: Regex,
@@ -29,43 +29,18 @@ pub enum SearchQuery {
multiline: bool,
whole_word: bool,
case_sensitive: bool,
files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>,
files_to_include: Vec<GlobMatcher>,
files_to_exclude: Vec<GlobMatcher>,
},
}
#[derive(Clone, Debug)]
pub struct PathMatcher {
maybe_path: PathBuf,
glob: GlobMatcher,
}
impl std::fmt::Display for PathMatcher {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.maybe_path.to_string_lossy().fmt(f)
}
}
impl PathMatcher {
pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
Ok(PathMatcher {
glob: Glob::new(&maybe_glob)?.compile_matcher(),
maybe_path: PathBuf::from(maybe_glob),
})
}
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
}
}
impl SearchQuery {
pub fn text(
query: impl ToString,
whole_word: bool,
case_sensitive: bool,
files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>,
files_to_include: Vec<GlobMatcher>,
files_to_exclude: Vec<GlobMatcher>,
) -> Self {
let query = query.to_string();
let search = AhoCorasickBuilder::new()
@@ -86,8 +61,8 @@ impl SearchQuery {
query: impl ToString,
whole_word: bool,
case_sensitive: bool,
files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>,
files_to_include: Vec<GlobMatcher>,
files_to_exclude: Vec<GlobMatcher>,
) -> Result<Self> {
let mut query = query.to_string();
let initial_query = Arc::from(query.as_str());
@@ -121,16 +96,16 @@ impl SearchQuery {
message.query,
message.whole_word,
message.case_sensitive,
deserialize_path_matches(&message.files_to_include)?,
deserialize_path_matches(&message.files_to_exclude)?,
deserialize_globs(&message.files_to_include)?,
deserialize_globs(&message.files_to_exclude)?,
)
} else {
Ok(Self::text(
message.query,
message.whole_word,
message.case_sensitive,
deserialize_path_matches(&message.files_to_include)?,
deserialize_path_matches(&message.files_to_exclude)?,
deserialize_globs(&message.files_to_include)?,
deserialize_globs(&message.files_to_exclude)?,
))
}
}
@@ -145,12 +120,12 @@ impl SearchQuery {
files_to_include: self
.files_to_include()
.iter()
.map(|matcher| matcher.to_string())
.map(|g| g.glob().to_string())
.join(","),
files_to_exclude: self
.files_to_exclude()
.iter()
.map(|matcher| matcher.to_string())
.map(|g| g.glob().to_string())
.join(","),
}
}
@@ -291,7 +266,7 @@ impl SearchQuery {
matches!(self, Self::Regex { .. })
}
pub fn files_to_include(&self) -> &[PathMatcher] {
pub fn files_to_include(&self) -> &[GlobMatcher] {
match self {
Self::Text {
files_to_include, ..
@@ -302,7 +277,7 @@ impl SearchQuery {
}
}
pub fn files_to_exclude(&self) -> &[PathMatcher] {
pub fn files_to_exclude(&self) -> &[GlobMatcher] {
match self {
Self::Text {
files_to_exclude, ..
@@ -331,63 +306,11 @@ impl SearchQuery {
}
}
fn deserialize_path_matches(glob_set: &str) -> anyhow::Result<Vec<PathMatcher>> {
fn deserialize_globs(glob_set: &str) -> Result<Vec<GlobMatcher>> {
glob_set
.split(',')
.map(str::trim)
.filter(|glob_str| !glob_str.is_empty())
.map(|glob_str| {
PathMatcher::new(glob_str)
.with_context(|| format!("deserializing path match glob {glob_str}"))
})
.map(|glob_str| Ok(Glob::new(glob_str)?.compile_matcher()))
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn path_matcher_creation_for_valid_paths() {
for valid_path in [
"file",
"Cargo.toml",
".DS_Store",
"~/dir/another_dir/",
"./dir/file",
"dir/[a-z].txt",
"../dir/filé",
] {
let path_matcher = PathMatcher::new(valid_path).unwrap_or_else(|e| {
panic!("Valid path {valid_path} should be accepted, but got: {e}")
});
assert!(
path_matcher.is_match(valid_path),
"Path matcher for valid path {valid_path} should match itself"
)
}
}
#[test]
fn path_matcher_creation_for_globs() {
for invalid_glob in ["dir/[].txt", "dir/[a-z.txt", "dir/{file"] {
match PathMatcher::new(invalid_glob) {
Ok(_) => panic!("Invalid glob {invalid_glob} should not be accepted"),
Err(_expected) => {}
}
}
for valid_glob in [
"dir/?ile",
"dir/*.txt",
"dir/**/file",
"dir/[a-z].txt",
"{dir,file}",
] {
match PathMatcher::new(valid_glob) {
Ok(_expected) => {}
Err(e) => panic!("Valid glob {valid_glob} should be accepted, but got: {e}"),
}
}
}
}

View File

@@ -2015,6 +2015,37 @@ impl LocalSnapshot {
entry
}
#[must_use = "Changed paths must be used for diffing later"]
fn scan_statuses(
&mut self,
repo_ptr: &dyn GitRepository,
work_directory: &RepositoryWorkDirectory,
) -> Vec<Arc<Path>> {
let mut changes = vec![];
let mut edits = vec![];
let statuses = repo_ptr.statuses();
for mut entry in self
.descendent_entries(false, false, &work_directory.0)
.cloned()
{
let Ok(repo_path) = entry.path.strip_prefix(&work_directory.0) else {
continue;
};
let repo_path = RepoPath(repo_path.to_path_buf());
let git_file_status = statuses.as_ref().and_then(|s| s.get(&repo_path).copied());
if entry.git_status != git_file_status {
entry.git_status = git_file_status;
changes.push(entry.path.clone());
edits.push(Edit::Insert(entry));
}
}
self.entries_by_path.edit(edits, &());
changes
}
fn ancestor_inodes_for_path(&self, path: &Path) -> TreeSet<u64> {
let mut inodes = TreeSet::default();
for ancestor in path.ancestors().skip(1) {
@@ -2158,38 +2189,6 @@ impl BackgroundScannerState {
.any(|p| entry.path.starts_with(p))
}
fn enqueue_scan_dir(&self, abs_path: Arc<Path>, entry: &Entry, scan_job_tx: &Sender<ScanJob>) {
let path = entry.path.clone();
let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true);
let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path);
let mut containing_repository = None;
if !ignore_stack.is_all() {
if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) {
if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) {
containing_repository = Some((
workdir_path,
repo.repo_ptr.clone(),
repo.repo_ptr.lock().staged_statuses(repo_path),
));
}
}
}
if !ancestor_inodes.contains(&entry.inode) {
ancestor_inodes.insert(entry.inode);
scan_job_tx
.try_send(ScanJob {
abs_path,
path,
ignore_stack,
scan_queue: scan_job_tx.clone(),
ancestor_inodes,
is_external: entry.is_external,
containing_repository,
})
.unwrap();
}
}
fn reuse_entry_id(&mut self, entry: &mut Entry) {
if let Some(removed_entry_id) = self.removed_entry_ids.remove(&entry.inode) {
entry.id = removed_entry_id;
@@ -2202,7 +2201,7 @@ impl BackgroundScannerState {
self.reuse_entry_id(&mut entry);
let entry = self.snapshot.insert_entry(entry, fs);
if entry.path.file_name() == Some(&DOT_GIT) {
self.build_git_repository(entry.path.clone(), fs);
self.build_repository(entry.path.clone(), fs);
}
#[cfg(test)]
@@ -2216,6 +2215,7 @@ impl BackgroundScannerState {
parent_path: &Arc<Path>,
entries: impl IntoIterator<Item = Entry>,
ignore: Option<Arc<Gitignore>>,
fs: &dyn Fs,
) {
let mut parent_entry = if let Some(parent_entry) = self
.snapshot
@@ -2244,12 +2244,16 @@ impl BackgroundScannerState {
.insert(abs_parent_path, (ignore, false));
}
let parent_entry_id = parent_entry.id;
self.scanned_dirs.insert(parent_entry_id);
self.scanned_dirs.insert(parent_entry.id);
let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
let mut entries_by_id_edits = Vec::new();
let mut dotgit_path = None;
for entry in entries {
if entry.path.file_name() == Some(&DOT_GIT) {
dotgit_path = Some(entry.path.clone());
}
entries_by_id_edits.push(Edit::Insert(PathEntry {
id: entry.id,
path: entry.path.clone(),
@@ -2264,6 +2268,9 @@ impl BackgroundScannerState {
.edit(entries_by_path_edits, &());
self.snapshot.entries_by_id.edit(entries_by_id_edits, &());
if let Some(dotgit_path) = dotgit_path {
self.build_repository(dotgit_path, fs);
}
if let Err(ix) = self.changed_paths.binary_search(parent_path) {
self.changed_paths.insert(ix, parent_path.clone());
}
@@ -2339,7 +2346,7 @@ impl BackgroundScannerState {
});
match repository {
None => {
self.build_git_repository(dot_git_dir.into(), fs);
self.build_repository(dot_git_dir.into(), fs);
}
Some((entry_id, repository)) => {
if repository.git_dir_scan_id == scan_id {
@@ -2363,13 +2370,19 @@ impl BackgroundScannerState {
.repository_entries
.update(&work_dir, |entry| entry.branch = branch.map(Into::into));
self.update_git_statuses(&work_dir, &*repository);
let changed_paths = self.snapshot.scan_statuses(&*repository, &work_dir);
util::extend_sorted(
&mut self.changed_paths,
changed_paths,
usize::MAX,
Ord::cmp,
)
}
}
}
// Remove any git repositories whose .git entry no longer exists.
let snapshot = &mut self.snapshot;
let mut snapshot = &mut self.snapshot;
let mut repositories = mem::take(&mut snapshot.git_repositories);
let mut repository_entries = mem::take(&mut snapshot.repository_entries);
repositories.retain(|work_directory_id, _| {
@@ -2384,15 +2397,7 @@ impl BackgroundScannerState {
snapshot.repository_entries = repository_entries;
}
fn build_git_repository(
&mut self,
dot_git_path: Arc<Path>,
fs: &dyn Fs,
) -> Option<(
RepositoryWorkDirectory,
Arc<Mutex<dyn GitRepository>>,
TreeMap<RepoPath, GitFileStatus>,
)> {
fn build_repository(&mut self, dot_git_path: Arc<Path>, fs: &dyn Fs) -> Option<()> {
log::info!("build git repository {:?}", dot_git_path);
let work_dir_path: Arc<Path> = dot_git_path.parent().unwrap().into();
@@ -2424,54 +2429,22 @@ impl BackgroundScannerState {
},
);
let staged_statuses = self.update_git_statuses(&work_directory, &*repo_lock);
let changed_paths = self
.snapshot
.scan_statuses(repo_lock.deref(), &work_directory);
drop(repo_lock);
self.snapshot.git_repositories.insert(
work_dir_id,
LocalRepositoryEntry {
git_dir_scan_id: 0,
repo_ptr: repository.clone(),
repo_ptr: repository,
git_dir_path: dot_git_path.clone(),
},
);
Some((work_directory, repository, staged_statuses))
}
fn update_git_statuses(
&mut self,
work_directory: &RepositoryWorkDirectory,
repo: &dyn GitRepository,
) -> TreeMap<RepoPath, GitFileStatus> {
let staged_statuses = repo.staged_statuses(Path::new(""));
let mut changes = vec![];
let mut edits = vec![];
for mut entry in self
.snapshot
.descendent_entries(false, false, &work_directory.0)
.cloned()
{
let Ok(repo_path) = entry.path.strip_prefix(&work_directory.0) else {
continue;
};
let repo_path = RepoPath(repo_path.to_path_buf());
let git_file_status = combine_git_statuses(
staged_statuses.get(&repo_path).copied(),
repo.unstaged_status(&repo_path, entry.mtime),
);
if entry.git_status != git_file_status {
entry.git_status = git_file_status;
changes.push(entry.path.clone());
edits.push(Edit::Insert(entry));
}
}
self.snapshot.entries_by_path.edit(edits, &());
util::extend_sorted(&mut self.changed_paths, changes, usize::MAX, Ord::cmp);
staged_statuses
util::extend_sorted(&mut self.changed_paths, changed_paths, usize::MAX, Ord::cmp);
Some(())
}
}
@@ -3058,8 +3031,16 @@ impl BackgroundScanner {
) {
use futures::FutureExt as _;
let (root_abs_path, root_inode) = {
let snapshot = &self.state.lock().snapshot;
(
snapshot.abs_path.clone(),
snapshot.root_entry().map(|e| e.inode),
)
};
// Populate ignores above the root.
let root_abs_path = self.state.lock().snapshot.abs_path.clone();
let ignore_stack;
for ancestor in root_abs_path.ancestors().skip(1) {
if let Ok(ignore) = build_gitignore(&ancestor.join(&*GITIGNORE), self.fs.as_ref()).await
{
@@ -3070,24 +3051,31 @@ impl BackgroundScanner {
.insert(ancestor.into(), (ignore.into(), false));
}
}
let (scan_job_tx, scan_job_rx) = channel::unbounded();
{
let mut state = self.state.lock();
state.snapshot.scan_id += 1;
if let Some(mut root_entry) = state.snapshot.root_entry().cloned() {
let ignore_stack = state
.snapshot
.ignore_stack_for_abs_path(&root_abs_path, true);
if ignore_stack.is_all() {
ignore_stack = state
.snapshot
.ignore_stack_for_abs_path(&root_abs_path, true);
if ignore_stack.is_all() {
if let Some(mut root_entry) = state.snapshot.root_entry().cloned() {
root_entry.is_ignored = true;
state.insert_entry(root_entry.clone(), self.fs.as_ref());
state.insert_entry(root_entry, self.fs.as_ref());
}
state.enqueue_scan_dir(root_abs_path, &root_entry, &scan_job_tx);
}
};
// Perform an initial scan of the directory.
let (scan_job_tx, scan_job_rx) = channel::unbounded();
smol::block_on(scan_job_tx.send(ScanJob {
abs_path: root_abs_path,
path: Arc::from(Path::new("")),
ignore_stack,
ancestor_inodes: TreeSet::from_ordered_entries(root_inode),
is_external: false,
scan_queue: scan_job_tx.clone(),
}))
.unwrap();
drop(scan_job_tx);
self.scan_dirs(true, scan_job_rx).await;
{
@@ -3275,7 +3263,20 @@ impl BackgroundScanner {
if let Some(entry) = state.snapshot.entry_for_path(ancestor) {
if entry.kind == EntryKind::UnloadedDir {
let abs_path = root_path.join(ancestor);
state.enqueue_scan_dir(abs_path.into(), entry, &scan_job_tx);
let ignore_stack =
state.snapshot.ignore_stack_for_abs_path(&abs_path, true);
let ancestor_inodes =
state.snapshot.ancestor_inodes_for_path(&ancestor);
scan_job_tx
.try_send(ScanJob {
abs_path: abs_path.into(),
path: ancestor.into(),
ignore_stack,
scan_queue: scan_job_tx.clone(),
ancestor_inodes,
is_external: entry.is_external,
})
.unwrap();
state.paths_to_scan.insert(path.clone());
break;
}
@@ -3390,16 +3391,18 @@ impl BackgroundScanner {
let mut ignore_stack = job.ignore_stack.clone();
let mut new_ignore = None;
let (root_abs_path, root_char_bag, next_entry_id) = {
let (root_abs_path, root_char_bag, next_entry_id, repository) = {
let snapshot = &self.state.lock().snapshot;
(
snapshot.abs_path().clone(),
snapshot.root_char_bag,
self.next_entry_id.clone(),
snapshot
.local_repo_for_path(&job.path)
.map(|(work_dir, repo)| (work_dir, repo.clone())),
)
};
let mut dotgit_path = None;
let mut root_canonical_path = None;
let mut new_entries: Vec<Entry> = Vec::new();
let mut new_jobs: Vec<Option<ScanJob>> = Vec::new();
@@ -3462,10 +3465,6 @@ impl BackgroundScanner {
}
}
}
// If we find a .git, we'll need to load the repository.
else if child_name == *DOT_GIT {
dotgit_path = Some(child_path.clone());
}
let mut child_entry = Entry::new(
child_path.clone(),
@@ -3526,7 +3525,6 @@ impl BackgroundScanner {
},
ancestor_inodes,
scan_queue: job.scan_queue.clone(),
containing_repository: job.containing_repository.clone(),
}));
} else {
new_jobs.push(None);
@@ -3534,17 +3532,14 @@ impl BackgroundScanner {
} else {
child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false);
if !child_entry.is_ignored {
if let Some((repository_dir, repository, staged_statuses)) =
&job.containing_repository
{
if let Ok(repo_path) = child_entry.path.strip_prefix(&repository_dir.0) {
let repo_path = RepoPath(repo_path.into());
child_entry.git_status = combine_git_statuses(
staged_statuses.get(&repo_path).copied(),
repository
.lock()
.unstaged_status(&repo_path, child_entry.mtime),
);
if let Some((repo_path, repo)) = &repository {
if let Ok(path) = child_path.strip_prefix(&repo_path.0) {
child_entry.git_status = repo
.repo_ptr
.lock()
.status(&RepoPath(path.into()))
.log_err()
.flatten();
}
}
}
@@ -3554,39 +3549,27 @@ impl BackgroundScanner {
}
let mut state = self.state.lock();
// Identify any subdirectories that should not be scanned.
let mut job_ix = 0;
let mut new_jobs = new_jobs.into_iter();
for entry in &mut new_entries {
state.reuse_entry_id(entry);
if entry.is_dir() {
let new_job = new_jobs.next().expect("missing scan job for entry");
if state.should_scan_directory(&entry) {
job_ix += 1;
if let Some(new_job) = new_job {
job.scan_queue
.try_send(new_job)
.expect("channel is unbounded");
}
} else {
log::debug!("defer scanning directory {:?}", entry.path);
entry.kind = EntryKind::UnloadedDir;
new_jobs.remove(job_ix);
}
}
}
assert!(new_jobs.next().is_none());
state.populate_dir(&job.path, new_entries, new_ignore);
let repository =
dotgit_path.and_then(|path| state.build_git_repository(path, self.fs.as_ref()));
for new_job in new_jobs {
if let Some(mut new_job) = new_job {
if let Some(containing_repository) = &repository {
new_job.containing_repository = Some(containing_repository.clone());
}
job.scan_queue
.try_send(new_job)
.expect("channel is unbounded");
}
}
state.populate_dir(&job.path, new_entries, new_ignore, self.fs.as_ref());
Ok(())
}
@@ -3655,10 +3638,13 @@ impl BackgroundScanner {
if let Some((work_dir, repo)) =
state.snapshot.local_repo_for_path(&path)
{
if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
let repo_path = RepoPath(repo_path.into());
let repo = repo.repo_ptr.lock();
fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
if let Ok(path) = path.strip_prefix(work_dir.0) {
fs_entry.git_status = repo
.repo_ptr
.lock()
.status(&RepoPath(path.into()))
.log_err()
.flatten()
}
}
}
@@ -3666,7 +3652,20 @@ impl BackgroundScanner {
if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_dir()) {
if state.should_scan_directory(&fs_entry) {
state.enqueue_scan_dir(abs_path, &fs_entry, scan_queue_tx);
let mut ancestor_inodes =
state.snapshot.ancestor_inodes_for_path(&path);
if !ancestor_inodes.contains(&metadata.inode) {
ancestor_inodes.insert(metadata.inode);
smol::block_on(scan_queue_tx.send(ScanJob {
abs_path,
path: path.clone(),
ignore_stack,
ancestor_inodes,
is_external: fs_entry.is_external,
scan_queue: scan_queue_tx.clone(),
}))
.unwrap();
}
} else {
fs_entry.kind = EntryKind::UnloadedDir;
}
@@ -3823,7 +3822,18 @@ impl BackgroundScanner {
if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() {
let state = self.state.lock();
if state.should_scan_directory(&entry) {
state.enqueue_scan_dir(abs_path.clone(), &entry, &job.scan_queue);
job.scan_queue
.try_send(ScanJob {
abs_path: abs_path.clone(),
path: entry.path.clone(),
ignore_stack: child_ignore_stack.clone(),
scan_queue: job.scan_queue.clone(),
ancestor_inodes: state
.snapshot
.ancestor_inodes_for_path(&entry.path),
is_external: false,
})
.unwrap();
}
}
@@ -4012,11 +4022,6 @@ struct ScanJob {
scan_queue: Sender<ScanJob>,
ancestor_inodes: TreeSet<u64>,
is_external: bool,
containing_repository: Option<(
RepositoryWorkDirectory,
Arc<Mutex<dyn GitRepository>>,
TreeMap<RepoPath, GitFileStatus>,
)>,
}
struct UpdateIgnoreStatusJob {
@@ -4343,22 +4348,3 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
}
}
}
fn combine_git_statuses(
staged: Option<GitFileStatus>,
unstaged: Option<GitFileStatus>,
) -> Option<GitFileStatus> {
if let Some(staged) = staged {
if let Some(unstaged) = unstaged {
if unstaged != staged {
Some(GitFileStatus::Modified)
} else {
Some(staged)
}
} else {
Some(staged)
}
} else {
unstaged
}
}

View File

@@ -4,7 +4,7 @@ mod project_panel_settings;
use context_menu::{ContextMenu, ContextMenuItem};
use db::kvp::KEY_VALUE_STORE;
use drag_and_drop::{DragAndDrop, Draggable};
use editor::{scroll::autoscroll::Autoscroll, Cancel, Editor};
use editor::{Cancel, Editor};
use file_associations::FileAssociations;
use futures::stream::StreamExt;
@@ -115,7 +115,6 @@ actions!(
[
ExpandSelectedEntry,
CollapseSelectedEntry,
CollapseAllEntries,
NewDirectory,
NewFile,
Copy,
@@ -126,9 +125,7 @@ actions!(
Paste,
Delete,
Rename,
Open,
ToggleFocus,
NewSearchInDirectory,
ToggleFocus
]
);
@@ -141,7 +138,6 @@ pub fn init(assets: impl AssetSource, cx: &mut AppContext) {
file_associations::init(assets, cx);
cx.add_action(ProjectPanel::expand_selected_entry);
cx.add_action(ProjectPanel::collapse_selected_entry);
cx.add_action(ProjectPanel::collapse_all_entries);
cx.add_action(ProjectPanel::select_prev);
cx.add_action(ProjectPanel::select_next);
cx.add_action(ProjectPanel::new_file);
@@ -149,14 +145,12 @@ pub fn init(assets: impl AssetSource, cx: &mut AppContext) {
cx.add_action(ProjectPanel::rename);
cx.add_async_action(ProjectPanel::delete);
cx.add_async_action(ProjectPanel::confirm);
cx.add_async_action(ProjectPanel::open_file);
cx.add_action(ProjectPanel::cancel);
cx.add_action(ProjectPanel::cut);
cx.add_action(ProjectPanel::copy);
cx.add_action(ProjectPanel::copy_path);
cx.add_action(ProjectPanel::copy_relative_path);
cx.add_action(ProjectPanel::reveal_in_finder);
cx.add_action(ProjectPanel::new_search_in_directory);
cx.add_action(
|this: &mut ProjectPanel, action: &Paste, cx: &mut ViewContext<ProjectPanel>| {
this.paste(action, cx);
@@ -175,10 +169,6 @@ pub enum Event {
},
DockPositionChanged,
Focus,
NewSearchInDirectory {
dir_entry: Entry,
},
ActivatePanel,
}
#[derive(Serialize, Deserialize)]
@@ -205,9 +195,6 @@ impl ProjectPanel {
cx.notify();
}
}
project::Event::ActivateProjectPanel => {
cx.emit(Event::ActivatePanel);
}
project::Event::WorktreeRemoved(id) => {
this.expanded_dir_ids.remove(id);
this.update_visible_entries(None, cx);
@@ -430,12 +417,6 @@ impl ProjectPanel {
CopyRelativePath,
));
menu_entries.push(ContextMenuItem::action("Reveal in Finder", RevealInFinder));
if entry.is_dir() {
menu_entries.push(ContextMenuItem::action(
"Search Inside",
NewSearchInDirectory,
));
}
if let Some(clipboard_entry) = self.clipboard_entry {
if clipboard_entry.worktree_id() == worktree.id() {
menu_entries.push(ContextMenuItem::action("Paste", Paste));
@@ -516,12 +497,6 @@ impl ProjectPanel {
}
}
pub fn collapse_all_entries(&mut self, _: &CollapseAllEntries, cx: &mut ViewContext<Self>) {
self.expanded_dir_ids.clear();
self.update_visible_entries(None, cx);
cx.notify();
}
fn toggle_expanded(&mut self, entry_id: ProjectEntryId, cx: &mut ViewContext<Self>) {
if let Some(worktree_id) = self.project.read(cx).worktree_id_for_entry(entry_id, cx) {
if let Some(expanded_dir_ids) = self.expanded_dir_ids.get_mut(&worktree_id) {
@@ -570,20 +545,15 @@ impl ProjectPanel {
fn confirm(&mut self, _: &Confirm, cx: &mut ViewContext<Self>) -> Option<Task<Result<()>>> {
if let Some(task) = self.confirm_edit(cx) {
return Some(task);
}
None
}
fn open_file(&mut self, _: &Open, cx: &mut ViewContext<Self>) -> Option<Task<Result<()>>> {
if let Some((_, entry)) = self.selected_entry(cx) {
Some(task)
} else if let Some((_, entry)) = self.selected_entry(cx) {
if entry.is_file() {
self.open_entry(entry.id, true, cx);
}
None
} else {
None
}
None
}
fn confirm_edit(&mut self, cx: &mut ViewContext<Self>) -> Option<Task<Result<()>>> {
@@ -760,20 +730,13 @@ impl ProjectPanel {
is_dir: entry.is_dir(),
processing_filename: None,
});
let file_name = entry
let filename = entry
.path
.file_name()
.map(|s| s.to_string_lossy())
.unwrap_or_default()
.to_string();
let file_stem = entry.path.file_stem().map(|s| s.to_string_lossy());
let selection_end =
file_stem.map_or(file_name.len(), |file_stem| file_stem.len());
.map_or(String::new(), |s| s.to_string_lossy().to_string());
self.filename_editor.update(cx, |editor, cx| {
editor.set_text(file_name, cx);
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([0..selection_end])
})
editor.set_text(filename, cx);
editor.select_all(&Default::default(), cx);
});
cx.focus(&self.filename_editor);
self.update_visible_entries(None, cx);
@@ -965,20 +928,6 @@ impl ProjectPanel {
}
}
pub fn new_search_in_directory(
&mut self,
_: &NewSearchInDirectory,
cx: &mut ViewContext<Self>,
) {
if let Some((_, entry)) = self.selected_entry(cx) {
if entry.is_dir() {
cx.emit(Event::NewSearchInDirectory {
dir_entry: entry.clone(),
});
}
}
}
fn move_entry(
&mut self,
entry_to_move: ProjectEntryId,
@@ -1033,10 +982,7 @@ impl ProjectPanel {
None
}
pub fn selected_entry<'a>(
&self,
cx: &'a AppContext,
) -> Option<(&'a Worktree, &'a project::Entry)> {
fn selected_entry<'a>(&self, cx: &'a AppContext) -> Option<(&'a Worktree, &'a project::Entry)> {
let (worktree, entry) = self.selected_entry_handle(cx)?;
Some((worktree.read(cx), entry))
}
@@ -1731,11 +1677,7 @@ mod tests {
use project::FakeFs;
use serde_json::json;
use settings::SettingsStore;
use std::{
collections::HashSet,
path::Path,
sync::atomic::{self, AtomicUsize},
};
use std::{collections::HashSet, path::Path};
use workspace::{pane, AppState};
#[gpui::test]
@@ -1971,7 +1913,7 @@ mod tests {
.update(cx, |panel, cx| {
panel
.filename_editor
.update(cx, |editor, cx| editor.set_text("another-filename.txt", cx));
.update(cx, |editor, cx| editor.set_text("another-filename", cx));
panel.confirm(&Confirm, cx).unwrap()
})
.await
@@ -1985,14 +1927,14 @@ mod tests {
" v b",
" > 3",
" > 4",
" another-filename.txt <== selected",
" another-filename <== selected",
" > C",
" .dockerignore",
" the-new-filename",
]
);
select_path(&panel, "root1/b/another-filename.txt", cx);
select_path(&panel, "root1/b/another-filename", cx);
panel.update(cx, |panel, cx| panel.rename(&Rename, cx));
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
@@ -2003,7 +1945,7 @@ mod tests {
" v b",
" > 3",
" > 4",
" [EDITOR: 'another-filename.txt'] <== selected",
" [EDITOR: 'another-filename'] <== selected",
" > C",
" .dockerignore",
" the-new-filename",
@@ -2011,15 +1953,9 @@ mod tests {
);
let confirm = panel.update(cx, |panel, cx| {
panel.filename_editor.update(cx, |editor, cx| {
let file_name_selections = editor.selections.all::<usize>(cx);
assert_eq!(file_name_selections.len(), 1, "File editing should have a single selection, but got: {file_name_selections:?}");
let file_name_selection = &file_name_selections[0];
assert_eq!(file_name_selection.start, 0, "Should select the file name from the start");
assert_eq!(file_name_selection.end, "another-filename".len(), "Should not select file extension");
editor.set_text("a-different-filename.tar.gz", cx)
});
panel
.filename_editor
.update(cx, |editor, cx| editor.set_text("a-different-filename", cx));
panel.confirm(&Confirm, cx).unwrap()
});
assert_eq!(
@@ -2031,7 +1967,7 @@ mod tests {
" v b",
" > 3",
" > 4",
" [PROCESSING: 'a-different-filename.tar.gz'] <== selected",
" [PROCESSING: 'a-different-filename'] <== selected",
" > C",
" .dockerignore",
" the-new-filename",
@@ -2048,42 +1984,13 @@ mod tests {
" v b",
" > 3",
" > 4",
" a-different-filename.tar.gz <== selected",
" a-different-filename <== selected",
" > C",
" .dockerignore",
" the-new-filename",
]
);
panel.update(cx, |panel, cx| panel.rename(&Rename, cx));
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&[
"v root1",
" > .git",
" > a",
" v b",
" > 3",
" > 4",
" [EDITOR: 'a-different-filename.tar.gz'] <== selected",
" > C",
" .dockerignore",
" the-new-filename",
]
);
panel.update(cx, |panel, cx| {
panel.filename_editor.update(cx, |editor, cx| {
let file_name_selections = editor.selections.all::<usize>(cx);
assert_eq!(file_name_selections.len(), 1, "File editing should have a single selection, but got: {file_name_selections:?}");
let file_name_selection = &file_name_selections[0];
assert_eq!(file_name_selection.start, 0, "Should select the file name from the start");
assert_eq!(file_name_selection.end, "a-different-filename.tar".len(), "Should not select file extension, but still may select anything up to the last dot");
});
panel.cancel(&Cancel, cx)
});
panel.update(cx, |panel, cx| panel.new_directory(&NewDirectory, cx));
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
@@ -2095,7 +2002,7 @@ mod tests {
" > [EDITOR: ''] <== selected",
" > 3",
" > 4",
" a-different-filename.tar.gz",
" a-different-filename",
" > C",
" .dockerignore",
]
@@ -2118,7 +2025,7 @@ mod tests {
" > [PROCESSING: 'new-dir']",
" > 3 <== selected",
" > 4",
" a-different-filename.tar.gz",
" a-different-filename",
" > C",
" .dockerignore",
]
@@ -2135,7 +2042,7 @@ mod tests {
" > 3 <== selected",
" > 4",
" > new-dir",
" a-different-filename.tar.gz",
" a-different-filename",
" > C",
" .dockerignore",
]
@@ -2152,7 +2059,7 @@ mod tests {
" > [EDITOR: '3'] <== selected",
" > 4",
" > new-dir",
" a-different-filename.tar.gz",
" a-different-filename",
" > C",
" .dockerignore",
]
@@ -2170,7 +2077,7 @@ mod tests {
" > 3 <== selected",
" > 4",
" > new-dir",
" a-different-filename.tar.gz",
" a-different-filename",
" > C",
" .dockerignore",
]
@@ -2397,7 +2304,7 @@ mod tests {
toggle_expand_dir(&panel, "src/test", cx);
select_path(&panel, "src/test/first.rs", cx);
panel.update(cx, |panel, cx| panel.open_file(&Open, cx));
panel.update(cx, |panel, cx| panel.confirm(&Confirm, cx));
cx.foreground().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
@@ -2425,7 +2332,7 @@ mod tests {
ensure_no_open_items_and_panes(window_id, &workspace, cx);
select_path(&panel, "src/test/second.rs", cx);
panel.update(cx, |panel, cx| panel.open_file(&Open, cx));
panel.update(cx, |panel, cx| panel.confirm(&Confirm, cx));
cx.foreground().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
@@ -2609,140 +2516,6 @@ mod tests {
);
}
#[gpui::test]
async fn test_new_search_in_directory_trigger(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/src",
json!({
"test": {
"first.rs": "// First Rust file",
"second.rs": "// Second Rust file",
"third.rs": "// Third Rust file",
}
}),
)
.await;
let project = Project::test(fs.clone(), ["/src".as_ref()], cx).await;
let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let panel = workspace.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx));
let new_search_events_count = Arc::new(AtomicUsize::new(0));
let _subscription = panel.update(cx, |_, cx| {
let subcription_count = Arc::clone(&new_search_events_count);
cx.subscribe(&cx.handle(), move |_, _, event, _| {
if matches!(event, Event::NewSearchInDirectory { .. }) {
subcription_count.fetch_add(1, atomic::Ordering::SeqCst);
}
})
});
toggle_expand_dir(&panel, "src/test", cx);
select_path(&panel, "src/test/first.rs", cx);
panel.update(cx, |panel, cx| panel.confirm(&Confirm, cx));
cx.foreground().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&[
"v src",
" v test",
" first.rs <== selected",
" second.rs",
" third.rs"
]
);
panel.update(cx, |panel, cx| {
panel.new_search_in_directory(&NewSearchInDirectory, cx)
});
assert_eq!(
new_search_events_count.load(atomic::Ordering::SeqCst),
0,
"Should not trigger new search in directory when called on a file"
);
select_path(&panel, "src/test", cx);
panel.update(cx, |panel, cx| panel.confirm(&Confirm, cx));
cx.foreground().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&[
"v src",
" v test <== selected",
" first.rs",
" second.rs",
" third.rs"
]
);
panel.update(cx, |panel, cx| {
panel.new_search_in_directory(&NewSearchInDirectory, cx)
});
assert_eq!(
new_search_events_count.load(atomic::Ordering::SeqCst),
1,
"Should trigger new search in directory when called on a directory"
);
}
#[gpui::test]
async fn test_collapse_all_entries(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/project_root",
json!({
"dir_1": {
"nested_dir": {
"file_a.py": "# File contents",
"file_b.py": "# File contents",
"file_c.py": "# File contents",
},
"file_1.py": "# File contents",
"file_2.py": "# File contents",
"file_3.py": "# File contents",
},
"dir_2": {
"file_1.py": "# File contents",
"file_2.py": "# File contents",
"file_3.py": "# File contents",
}
}),
)
.await;
let project = Project::test(fs.clone(), ["/project_root".as_ref()], cx).await;
let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let panel = workspace.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx));
panel.update(cx, |panel, cx| {
panel.collapse_all_entries(&CollapseAllEntries, cx)
});
cx.foreground().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&["v project_root", " > dir_1", " > dir_2",]
);
// Open dir_1 and make sure nested_dir was collapsed when running collapse_all_entries
toggle_expand_dir(&panel, "project_root/dir_1", cx);
cx.foreground().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&[
"v project_root",
" v dir_1 <== selected",
" > nested_dir",
" file_1.py",
" file_2.py",
" file_3.py",
" > dir_2",
]
);
}
fn toggle_expand_dir(
panel: &ViewHandle<ProjectPanel>,
path: impl AsRef<Path>,
@@ -2943,4 +2716,3 @@ mod tests {
});
}
}
// TODO - a workspace command?

View File

@@ -20,7 +20,6 @@ settings = { path = "../settings" }
theme = { path = "../theme" }
util = { path = "../util" }
workspace = { path = "../workspace" }
semantic_index = { path = "../semantic_index" }
anyhow.workspace = true
futures.workspace = true
log.workspace = true

View File

@@ -1,6 +1,6 @@
use crate::{
NextHistoryQuery, PreviousHistoryQuery, SearchHistory, SearchOptions, SelectAllMatches,
SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, ToggleRegex, ToggleWholeWord,
SearchOptions, SelectAllMatches, SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive,
ToggleRegex, ToggleWholeWord,
};
use collections::HashMap;
use editor::Editor;
@@ -46,8 +46,6 @@ pub fn init(cx: &mut AppContext) {
cx.add_action(BufferSearchBar::select_prev_match_on_pane);
cx.add_action(BufferSearchBar::select_all_matches_on_pane);
cx.add_action(BufferSearchBar::handle_editor_cancel);
cx.add_action(BufferSearchBar::next_history_query);
cx.add_action(BufferSearchBar::previous_history_query);
add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx);
add_toggle_option_action::<ToggleWholeWord>(SearchOptions::WHOLE_WORD, cx);
add_toggle_option_action::<ToggleRegex>(SearchOptions::REGEX, cx);
@@ -67,7 +65,7 @@ fn add_toggle_option_action<A: Action>(option: SearchOptions, cx: &mut AppContex
}
pub struct BufferSearchBar {
query_editor: ViewHandle<Editor>,
pub query_editor: ViewHandle<Editor>,
active_searchable_item: Option<Box<dyn SearchableItemHandle>>,
active_match_index: Option<usize>,
active_searchable_item_subscription: Option<Subscription>,
@@ -78,7 +76,6 @@ pub struct BufferSearchBar {
default_options: SearchOptions,
query_contains_error: bool,
dismissed: bool,
search_history: SearchHistory,
}
impl Entity for BufferSearchBar {
@@ -109,48 +106,6 @@ impl View for BufferSearchBar {
.map(|active_searchable_item| active_searchable_item.supported_options())
.unwrap_or_default();
let previous_query_keystrokes =
cx.binding_for_action(&PreviousHistoryQuery {})
.map(|binding| {
binding
.keystrokes()
.iter()
.map(|k| k.to_string())
.collect::<Vec<_>>()
});
let next_query_keystrokes = cx.binding_for_action(&NextHistoryQuery {}).map(|binding| {
binding
.keystrokes()
.iter()
.map(|k| k.to_string())
.collect::<Vec<_>>()
});
let new_placeholder_text = match (previous_query_keystrokes, next_query_keystrokes) {
(Some(previous_query_keystrokes), Some(next_query_keystrokes)) => {
format!(
"Search ({}/{} for previous/next query)",
previous_query_keystrokes.join(" "),
next_query_keystrokes.join(" ")
)
}
(None, Some(next_query_keystrokes)) => {
format!(
"Search ({} for next query)",
next_query_keystrokes.join(" ")
)
}
(Some(previous_query_keystrokes), None) => {
format!(
"Search ({} for previous query)",
previous_query_keystrokes.join(" ")
)
}
(None, None) => String::new(),
};
self.query_editor.update(cx, |editor, cx| {
editor.set_placeholder_text(new_placeholder_text, cx);
});
Flex::row()
.with_child(
Flex::row()
@@ -303,7 +258,6 @@ impl BufferSearchBar {
pending_search: None,
query_contains_error: false,
dismissed: true,
search_history: SearchHistory::default(),
}
}
@@ -387,7 +341,7 @@ impl BufferSearchBar {
cx: &mut ViewContext<Self>,
) -> oneshot::Receiver<()> {
let options = options.unwrap_or(self.default_options);
if query != self.query(cx) || self.search_options != options {
if query != self.query_editor.read(cx).text(cx) || self.search_options != options {
self.query_editor.update(cx, |query_editor, cx| {
query_editor.buffer().update(cx, |query_buffer, cx| {
let len = query_buffer.len(cx);
@@ -614,7 +568,7 @@ impl BufferSearchBar {
}
fn select_all_matches(&mut self, _: &SelectAllMatches, cx: &mut ViewContext<Self>) {
if !self.dismissed && self.active_match_index.is_some() {
if !self.dismissed {
if let Some(searchable_item) = self.active_searchable_item.as_ref() {
if let Some(matches) = self
.searchable_items_with_matches
@@ -720,7 +674,7 @@ impl BufferSearchBar {
fn update_matches(&mut self, cx: &mut ViewContext<Self>) -> oneshot::Receiver<()> {
let (done_tx, done_rx) = oneshot::channel();
let query = self.query(cx);
let query = self.query_editor.read(cx).text(cx);
self.pending_search.take();
if let Some(active_searchable_item) = self.active_searchable_item.as_ref() {
if query.is_empty() {
@@ -753,7 +707,6 @@ impl BufferSearchBar {
)
};
let query_text = query.as_str().to_string();
let matches = active_searchable_item.find_matches(query, cx);
let active_searchable_item = active_searchable_item.downgrade();
@@ -767,7 +720,6 @@ impl BufferSearchBar {
.insert(active_searchable_item.downgrade(), matches);
this.update_match_index(cx);
this.search_history.add(query_text);
if !this.dismissed {
let matches = this
.searchable_items_with_matches
@@ -801,28 +753,6 @@ impl BufferSearchBar {
cx.notify();
}
}
fn next_history_query(&mut self, _: &NextHistoryQuery, cx: &mut ViewContext<Self>) {
if let Some(new_query) = self.search_history.next().map(str::to_string) {
let _ = self.search(&new_query, Some(self.search_options), cx);
} else {
self.search_history.reset_selection();
let _ = self.search("", Some(self.search_options), cx);
}
}
fn previous_history_query(&mut self, _: &PreviousHistoryQuery, cx: &mut ViewContext<Self>) {
if self.query(cx).is_empty() {
if let Some(new_query) = self.search_history.current().map(str::to_string) {
let _ = self.search(&new_query, Some(self.search_options), cx);
return;
}
}
if let Some(new_query) = self.search_history.previous().map(str::to_string) {
let _ = self.search(&new_query, Some(self.search_options), cx);
}
}
}
#[cfg(test)]
@@ -1245,16 +1175,9 @@ mod tests {
.await
.unwrap();
search_bar.update(cx, |search_bar, cx| {
cx.focus(search_bar.query_editor.as_any());
search_bar.activate_current_match(cx);
});
cx.read_window(window_id, |cx| {
assert!(
!editor.is_focused(cx),
"Initially, the editor should not be focused"
);
});
let initial_selections = editor.update(cx, |editor, cx| {
let initial_selections = editor.selections.display_ranges(cx);
assert_eq!(
@@ -1268,16 +1191,7 @@ mod tests {
});
search_bar.update(cx, |search_bar, cx| {
cx.focus(search_bar.query_editor.as_any());
search_bar.select_all_matches(&SelectAllMatches, cx);
});
cx.read_window(window_id, |cx| {
assert!(
editor.is_focused(cx),
"Should focus editor after successful SelectAllMatches"
);
});
search_bar.update(cx, |search_bar, cx| {
let all_selections =
editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
assert_eq!(
@@ -1285,6 +1199,8 @@ mod tests {
expected_query_matches_count,
"Should select all `a` characters in the buffer, but got: {all_selections:?}"
);
});
search_bar.update(cx, |search_bar, _| {
assert_eq!(
search_bar.active_match_index,
Some(0),
@@ -1294,14 +1210,6 @@ mod tests {
search_bar.update(cx, |search_bar, cx| {
search_bar.select_next_match(&SelectNextMatch, cx);
});
cx.read_window(window_id, |cx| {
assert!(
editor.is_focused(cx),
"Should still have editor focused after SelectNextMatch"
);
});
search_bar.update(cx, |search_bar, cx| {
let all_selections =
editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
assert_eq!(
@@ -1313,6 +1221,8 @@ mod tests {
all_selections, initial_selections,
"Next match should be different from the first selection"
);
});
search_bar.update(cx, |search_bar, _| {
assert_eq!(
search_bar.active_match_index,
Some(1),
@@ -1321,16 +1231,7 @@ mod tests {
});
search_bar.update(cx, |search_bar, cx| {
cx.focus(search_bar.query_editor.as_any());
search_bar.select_all_matches(&SelectAllMatches, cx);
});
cx.read_window(window_id, |cx| {
assert!(
editor.is_focused(cx),
"Should focus editor after successful SelectAllMatches"
);
});
search_bar.update(cx, |search_bar, cx| {
let all_selections =
editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
assert_eq!(
@@ -1338,6 +1239,8 @@ mod tests {
expected_query_matches_count,
"Should select all `a` characters in the buffer, but got: {all_selections:?}"
);
});
search_bar.update(cx, |search_bar, _| {
assert_eq!(
search_bar.active_match_index,
Some(1),
@@ -1347,14 +1250,6 @@ mod tests {
search_bar.update(cx, |search_bar, cx| {
search_bar.select_prev_match(&SelectPrevMatch, cx);
});
cx.read_window(window_id, |cx| {
assert!(
editor.is_focused(cx),
"Should still have editor focused after SelectPrevMatch"
);
});
let last_match_selections = search_bar.update(cx, |search_bar, cx| {
let all_selections =
editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
assert_eq!(
@@ -1366,191 +1261,13 @@ mod tests {
all_selections, initial_selections,
"Previous match should be the same as the first selection"
);
});
search_bar.update(cx, |search_bar, _| {
assert_eq!(
search_bar.active_match_index,
Some(0),
"Match index should be updated to the previous one"
);
all_selections
});
search_bar
.update(cx, |search_bar, cx| {
cx.focus(search_bar.query_editor.as_any());
search_bar.search("abas_nonexistent_match", None, cx)
})
.await
.unwrap();
search_bar.update(cx, |search_bar, cx| {
search_bar.select_all_matches(&SelectAllMatches, cx);
});
cx.read_window(window_id, |cx| {
assert!(
!editor.is_focused(cx),
"Should not switch focus to editor if SelectAllMatches does not find any matches"
);
});
search_bar.update(cx, |search_bar, cx| {
let all_selections =
editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
assert_eq!(
all_selections, last_match_selections,
"Should not select anything new if there are no matches"
);
assert!(
search_bar.active_match_index.is_none(),
"For no matches, there should be no active match index"
);
});
}
#[gpui::test]
async fn test_search_query_history(cx: &mut TestAppContext) {
crate::project_search::tests::init_test(cx);
let buffer_text = r#"
A regular expression (shortened as regex or regexp;[1] also referred to as
rational expression[2][3]) is a sequence of characters that specifies a search
pattern in text. Usually such patterns are used by string-searching algorithms
for "find" or "find and replace" operations on strings, or for input validation.
"#
.unindent();
let buffer = cx.add_model(|cx| Buffer::new(0, buffer_text, cx));
let (window_id, _root_view) = cx.add_window(|_| EmptyView);
let editor = cx.add_view(window_id, |cx| Editor::for_buffer(buffer.clone(), None, cx));
let search_bar = cx.add_view(window_id, |cx| {
let mut search_bar = BufferSearchBar::new(cx);
search_bar.set_active_pane_item(Some(&editor), cx);
search_bar.show(cx);
search_bar
});
// Add 3 search items into the history.
search_bar
.update(cx, |search_bar, cx| search_bar.search("a", None, cx))
.await
.unwrap();
search_bar
.update(cx, |search_bar, cx| search_bar.search("b", None, cx))
.await
.unwrap();
search_bar
.update(cx, |search_bar, cx| {
search_bar.search("c", Some(SearchOptions::CASE_SENSITIVE), cx)
})
.await
.unwrap();
// Ensure that the latest search is active.
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// Next history query after the latest should set the query to the empty string.
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// First previous query for empty current query should set the query to the latest.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// Further previous items should go over the history in reverse order.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "b");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// Previous items should never go behind the first history item.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "a");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "a");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// Next items should go over the history in the original order.
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "b");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar
.update(cx, |search_bar, cx| search_bar.search("ba", None, cx))
.await
.unwrap();
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "ba");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
// New search input should add another entry to history and move the selection to the end of the history.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "b");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "ba");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
}
}

View File

@@ -1,14 +1,15 @@
use crate::{
NextHistoryQuery, PreviousHistoryQuery, SearchHistory, SearchOptions, SelectNextMatch,
SelectPrevMatch, ToggleCaseSensitive, ToggleRegex, ToggleWholeWord,
SearchOptions, SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, ToggleRegex,
ToggleWholeWord,
};
use anyhow::Context;
use anyhow::Result;
use collections::HashMap;
use editor::{
items::active_match_index, scroll::autoscroll::Autoscroll, Anchor, Editor, MultiBuffer,
SelectAll, MAX_TAB_TITLE_LEN,
};
use futures::StreamExt;
use globset::{Glob, GlobMatcher};
use gpui::{
actions,
elements::*,
@@ -17,12 +18,7 @@ use gpui::{
Task, View, ViewContext, ViewHandle, WeakModelHandle, WeakViewHandle,
};
use menu::Confirm;
use postage::stream::Stream;
use project::{
search::{PathMatcher, SearchQuery},
Entry, Project,
};
use semantic_index::SemanticIndex;
use project::{search::SearchQuery, Project};
use smallvec::SmallVec;
use std::{
any::{Any, TypeId},
@@ -40,10 +36,7 @@ use workspace::{
ItemNavHistory, Pane, ToolbarItemLocation, ToolbarItemView, Workspace, WorkspaceId,
};
actions!(
project_search,
[SearchInNew, ToggleFocus, NextField, ToggleSemanticSearch]
);
actions!(project_search, [SearchInNew, ToggleFocus, NextField]);
#[derive(Default)]
struct ActiveSearches(HashMap<WeakModelHandle<Project>, WeakViewHandle<ProjectSearchView>>);
@@ -56,8 +49,6 @@ pub fn init(cx: &mut AppContext) {
cx.add_action(ProjectSearchBar::search_in_new);
cx.add_action(ProjectSearchBar::select_next_match);
cx.add_action(ProjectSearchBar::select_prev_match);
cx.add_action(ProjectSearchBar::next_history_query);
cx.add_action(ProjectSearchBar::previous_history_query);
cx.capture_action(ProjectSearchBar::tab);
cx.capture_action(ProjectSearchBar::tab_previous);
add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx);
@@ -85,7 +76,6 @@ struct ProjectSearch {
match_ranges: Vec<Range<Anchor>>,
active_query: Option<SearchQuery>,
search_id: usize,
search_history: SearchHistory,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -99,7 +89,6 @@ pub struct ProjectSearchView {
model: ModelHandle<ProjectSearch>,
query_editor: ViewHandle<Editor>,
results_editor: ViewHandle<Editor>,
semantic: Option<SemanticSearchState>,
search_options: SearchOptions,
panels_with_errors: HashSet<InputPanel>,
active_match_index: Option<usize>,
@@ -109,12 +98,6 @@ pub struct ProjectSearchView {
excluded_files_editor: ViewHandle<Editor>,
}
struct SemanticSearchState {
file_count: usize,
outstanding_file_count: usize,
_progress_task: Task<()>,
}
pub struct ProjectSearchBar {
active_project_search: Option<ViewHandle<ProjectSearchView>>,
subscription: Option<Subscription>,
@@ -134,7 +117,6 @@ impl ProjectSearch {
match_ranges: Default::default(),
active_query: None,
search_id: 0,
search_history: SearchHistory::default(),
}
}
@@ -148,7 +130,6 @@ impl ProjectSearch {
match_ranges: self.match_ranges.clone(),
active_query: self.active_query.clone(),
search_id: self.search_id,
search_history: self.search_history.clone(),
})
}
@@ -157,7 +138,6 @@ impl ProjectSearch {
.project
.update(cx, |project, cx| project.search(query.clone(), cx));
self.search_id += 1;
self.search_history.add(query.as_str().to_string());
self.active_query = Some(query);
self.match_ranges.clear();
self.pending_search = Some(cx.spawn_weak(|this, mut cx| async move {
@@ -192,58 +172,6 @@ impl ProjectSearch {
}));
cx.notify();
}
fn semantic_search(&mut self, query: SearchQuery, cx: &mut ModelContext<Self>) {
let search = SemanticIndex::global(cx).map(|index| {
index.update(cx, |semantic_index, cx| {
semantic_index.search_project(
self.project.clone(),
query.as_str().to_owned(),
10,
query.files_to_include().to_vec(),
query.files_to_exclude().to_vec(),
cx,
)
})
});
self.search_id += 1;
self.match_ranges.clear();
self.search_history.add(query.as_str().to_string());
self.pending_search = Some(cx.spawn(|this, mut cx| async move {
let results = search?.await.log_err()?;
let (_task, mut match_ranges) = this.update(&mut cx, |this, cx| {
this.excerpts.update(cx, |excerpts, cx| {
excerpts.clear(cx);
let matches = results
.into_iter()
.map(|result| (result.buffer, vec![result.range.start..result.range.start]))
.collect();
excerpts.stream_excerpts_with_context_lines(matches, 3, cx)
})
});
while let Some(match_range) = match_ranges.next().await {
this.update(&mut cx, |this, cx| {
this.match_ranges.push(match_range);
while let Ok(Some(match_range)) = match_ranges.try_next() {
this.match_ranges.push(match_range);
}
cx.notify();
});
}
this.update(&mut cx, |this, cx| {
this.pending_search.take();
cx.notify();
});
None
}));
cx.notify();
}
}
pub enum ViewEvent {
@@ -267,67 +195,13 @@ impl View for ProjectSearchView {
enum Status {}
let theme = theme::current(cx).clone();
let text = if model.pending_search.is_some() {
Cow::Borrowed("Searching...")
} else if let Some(semantic) = &self.semantic {
if semantic.outstanding_file_count > 0 {
Cow::Owned(format!(
"Indexing. {} of {}...",
semantic.file_count - semantic.outstanding_file_count,
semantic.file_count
))
} else {
Cow::Borrowed("Indexing complete")
}
} else if self.query_editor.read(cx).text(cx).is_empty() {
Cow::Borrowed("")
let text = if self.query_editor.read(cx).text(cx).is_empty() {
""
} else if model.pending_search.is_some() {
"Searching..."
} else {
Cow::Borrowed("No results")
"No results"
};
let previous_query_keystrokes =
cx.binding_for_action(&PreviousHistoryQuery {})
.map(|binding| {
binding
.keystrokes()
.iter()
.map(|k| k.to_string())
.collect::<Vec<_>>()
});
let next_query_keystrokes =
cx.binding_for_action(&NextHistoryQuery {}).map(|binding| {
binding
.keystrokes()
.iter()
.map(|k| k.to_string())
.collect::<Vec<_>>()
});
let new_placeholder_text = match (previous_query_keystrokes, next_query_keystrokes) {
(Some(previous_query_keystrokes), Some(next_query_keystrokes)) => {
format!(
"Search ({}/{} for previous/next query)",
previous_query_keystrokes.join(" "),
next_query_keystrokes.join(" ")
)
}
(None, Some(next_query_keystrokes)) => {
format!(
"Search ({} for next query)",
next_query_keystrokes.join(" ")
)
}
(Some(previous_query_keystrokes), None) => {
format!(
"Search ({} for previous query)",
previous_query_keystrokes.join(" ")
)
}
(None, None) => String::new(),
};
self.query_editor.update(cx, |editor, cx| {
editor.set_placeholder_text(new_placeholder_text, cx);
});
MouseEventHandler::<Status, _>::new(0, cx, |_, _| {
Label::new(text, theme.search.results_status.clone())
.aligned()
@@ -616,7 +490,6 @@ impl ProjectSearchView {
model,
query_editor,
results_editor,
semantic: None,
search_options: options,
panels_with_errors: HashSet::new(),
active_match_index: None,
@@ -628,27 +501,6 @@ impl ProjectSearchView {
this
}
pub fn new_search_in_directory(
workspace: &mut Workspace,
dir_entry: &Entry,
cx: &mut ViewContext<Workspace>,
) {
if !dir_entry.is_dir() {
return;
}
let Some(filter_str) = dir_entry.path.to_str() else { return; };
let model = cx.add_model(|cx| ProjectSearch::new(workspace.project().clone(), cx));
let search = cx.add_view(|cx| ProjectSearchView::new(model, cx));
workspace.add_item(Box::new(search.clone()), cx);
search.update(cx, |search, cx| {
search
.included_files_editor
.update(cx, |editor, cx| editor.set_text(filter_str, cx));
search.focus_query_editor(cx)
});
}
// Re-activate the most recently activated search or the most recent if it has been closed.
// If no search exists in the workspace, create a new one.
fn deploy(
@@ -703,16 +555,6 @@ impl ProjectSearchView {
}
fn search(&mut self, cx: &mut ViewContext<Self>) {
if let Some(semantic) = &mut self.semantic {
if semantic.outstanding_file_count > 0 {
return;
}
if let Some(query) = self.build_search_query(cx) {
self.model
.update(cx, |model, cx| model.semantic_search(query, cx));
}
}
if let Some(query) = self.build_search_query(cx) {
self.model.update(cx, |model, cx| model.search(query, cx));
}
@@ -721,7 +563,7 @@ impl ProjectSearchView {
fn build_search_query(&mut self, cx: &mut ViewContext<Self>) -> Option<SearchQuery> {
let text = self.query_editor.read(cx).text(cx);
let included_files =
match Self::parse_path_matches(&self.included_files_editor.read(cx).text(cx)) {
match Self::load_glob_set(&self.included_files_editor.read(cx).text(cx)) {
Ok(included_files) => {
self.panels_with_errors.remove(&InputPanel::Include);
included_files
@@ -733,7 +575,7 @@ impl ProjectSearchView {
}
};
let excluded_files =
match Self::parse_path_matches(&self.excluded_files_editor.read(cx).text(cx)) {
match Self::load_glob_set(&self.excluded_files_editor.read(cx).text(cx)) {
Ok(excluded_files) => {
self.panels_with_errors.remove(&InputPanel::Exclude);
excluded_files
@@ -773,14 +615,11 @@ impl ProjectSearchView {
}
}
fn parse_path_matches(text: &str) -> anyhow::Result<Vec<PathMatcher>> {
fn load_glob_set(text: &str) -> Result<Vec<GlobMatcher>> {
text.split(',')
.map(str::trim)
.filter(|maybe_glob_str| !maybe_glob_str.is_empty())
.map(|maybe_glob_str| {
PathMatcher::new(maybe_glob_str)
.with_context(|| format!("parsing {maybe_glob_str} as path matcher"))
})
.filter(|glob_str| !glob_str.is_empty())
.map(|glob_str| anyhow::Ok(Glob::new(glob_str)?.compile_matcher()))
.collect()
}
@@ -793,7 +632,6 @@ impl ProjectSearchView {
let range_to_select = match_ranges[new_index].clone();
self.results_editor.update(cx, |editor, cx| {
let range_to_select = editor.range_for_match(&range_to_select);
editor.unfold_ranges([range_to_select.clone()], false, true, cx);
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range_to_select])
@@ -835,12 +673,8 @@ impl ProjectSearchView {
let is_new_search = self.search_id != prev_search_id;
self.results_editor.update(cx, |editor, cx| {
if is_new_search {
let range_to_select = match_ranges
.first()
.clone()
.map(|range| editor.range_for_match(range));
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges(range_to_select)
s.select_ranges(match_ranges.first().cloned())
});
}
editor.highlight_background::<Self>(
@@ -1017,7 +851,6 @@ impl ProjectSearchBar {
if let Some(search_view) = self.active_project_search.as_ref() {
search_view.update(cx, |search_view, cx| {
search_view.search_options.toggle(option);
search_view.semantic = None;
search_view.search(cx);
});
cx.notify();
@@ -1027,61 +860,6 @@ impl ProjectSearchBar {
}
}
fn toggle_semantic_search(&mut self, cx: &mut ViewContext<Self>) -> bool {
if let Some(search_view) = self.active_project_search.as_ref() {
search_view.update(cx, |search_view, cx| {
if search_view.semantic.is_some() {
search_view.semantic = None;
} else if let Some(semantic_index) = SemanticIndex::global(cx) {
// TODO: confirm that it's ok to send this project
search_view.search_options = SearchOptions::none();
let project = search_view.model.read(cx).project.clone();
let index_task = semantic_index.update(cx, |semantic_index, cx| {
semantic_index.index_project(project, cx)
});
cx.spawn(|search_view, mut cx| async move {
let (files_to_index, mut files_remaining_rx) = index_task.await?;
search_view.update(&mut cx, |search_view, cx| {
cx.notify();
search_view.semantic = Some(SemanticSearchState {
file_count: files_to_index,
outstanding_file_count: files_to_index,
_progress_task: cx.spawn(|search_view, mut cx| async move {
while let Some(count) = files_remaining_rx.recv().await {
search_view
.update(&mut cx, |search_view, cx| {
if let Some(semantic_search_state) =
&mut search_view.semantic
{
semantic_search_state.outstanding_file_count =
count;
cx.notify();
if count == 0 {
return;
}
}
})
.ok();
}
}),
});
})?;
anyhow::Ok(())
})
.detach_and_log_err(cx);
}
cx.notify();
});
cx.notify();
true
} else {
false
}
}
fn render_nav_button(
&self,
icon: &'static str,
@@ -1159,42 +937,6 @@ impl ProjectSearchBar {
.into_any()
}
fn render_semantic_search_button(&self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
let tooltip_style = theme::current(cx).tooltip.clone();
let is_active = if let Some(search) = self.active_project_search.as_ref() {
let search = search.read(cx);
search.semantic.is_some()
} else {
false
};
let region_id = 3;
MouseEventHandler::<Self, _>::new(region_id, cx, |state, cx| {
let theme = theme::current(cx);
let style = theme
.search
.option_button
.in_state(is_active)
.style_for(state);
Label::new("Semantic", style.text.clone())
.contained()
.with_style(style.container)
})
.on_click(MouseButton::Left, move |_, this, cx| {
this.toggle_semantic_search(cx);
})
.with_cursor_style(CursorStyle::PointingHand)
.with_tooltip::<Self>(
region_id,
format!("Toggle Semantic Search"),
Some(Box::new(ToggleSemanticSearch)),
tooltip_style,
cx,
)
.into_any()
}
fn is_option_enabled(&self, option: SearchOptions, cx: &AppContext) -> bool {
if let Some(search) = self.active_project_search.as_ref() {
search.read(cx).search_options.contains(option)
@@ -1202,47 +944,6 @@ impl ProjectSearchBar {
false
}
}
fn next_history_query(&mut self, _: &NextHistoryQuery, cx: &mut ViewContext<Self>) {
if let Some(search_view) = self.active_project_search.as_ref() {
search_view.update(cx, |search_view, cx| {
let new_query = search_view.model.update(cx, |model, _| {
if let Some(new_query) = model.search_history.next().map(str::to_string) {
new_query
} else {
model.search_history.reset_selection();
String::new()
}
});
search_view.set_query(&new_query, cx);
});
}
}
fn previous_history_query(&mut self, _: &PreviousHistoryQuery, cx: &mut ViewContext<Self>) {
if let Some(search_view) = self.active_project_search.as_ref() {
search_view.update(cx, |search_view, cx| {
if search_view.query_editor.read(cx).text(cx).is_empty() {
if let Some(new_query) = search_view
.model
.read(cx)
.search_history
.current()
.map(str::to_string)
{
search_view.set_query(&new_query, cx);
return;
}
}
if let Some(new_query) = search_view.model.update(cx, |model, _| {
model.search_history.previous().map(str::to_string)
}) {
search_view.set_query(&new_query, cx);
}
});
}
}
}
impl Entity for ProjectSearchBar {
@@ -1325,14 +1026,8 @@ impl View for ProjectSearchBar {
.with_child(self.render_nav_button(">", Direction::Next, cx))
.aligned(),
)
.with_child({
let row = if SemanticIndex::enabled(cx) {
Flex::row().with_child(self.render_semantic_search_button(cx))
} else {
Flex::row()
};
let row = row
.with_child(
Flex::row()
.with_child(self.render_option_button(
"Case",
SearchOptions::CASE_SENSITIVE,
@@ -1350,10 +1045,8 @@ impl View for ProjectSearchBar {
))
.contained()
.with_style(theme.search.option_button_group)
.aligned();
row
})
.aligned(),
)
.contained()
.with_margin_bottom(row_spacing),
)
@@ -1424,7 +1117,6 @@ pub mod tests {
use editor::DisplayPoint;
use gpui::{color::Color, executor::Deterministic, TestAppContext};
use project::FakeFs;
use semantic_index::semantic_index_settings::SemanticIndexSettings;
use serde_json::json;
use settings::SettingsStore;
use std::sync::Arc;
@@ -1722,320 +1414,6 @@ pub mod tests {
});
}
#[gpui::test]
async fn test_new_project_search_in_directory(
deterministic: Arc<Deterministic>,
cx: &mut TestAppContext,
) {
init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/dir",
json!({
"a": {
"one.rs": "const ONE: usize = 1;",
"two.rs": "const TWO: usize = one::ONE + one::ONE;",
},
"b": {
"three.rs": "const THREE: usize = one::ONE + two::TWO;",
"four.rs": "const FOUR: usize = one::ONE + three::THREE;",
},
}),
)
.await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let worktree_id = project.read_with(cx, |project, cx| {
project.worktrees(cx).next().unwrap().read(cx).id()
});
let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project, cx));
let active_item = cx.read(|cx| {
workspace
.read(cx)
.active_pane()
.read(cx)
.active_item()
.and_then(|item| item.downcast::<ProjectSearchView>())
});
assert!(
active_item.is_none(),
"Expected no search panel to be active, but got: {active_item:?}"
);
let one_file_entry = cx.update(|cx| {
workspace
.read(cx)
.project()
.read(cx)
.entry_for_path(&(worktree_id, "a/one.rs").into(), cx)
.expect("no entry for /a/one.rs file")
});
assert!(one_file_entry.is_file());
workspace.update(cx, |workspace, cx| {
ProjectSearchView::new_search_in_directory(workspace, &one_file_entry, cx)
});
let active_search_entry = cx.read(|cx| {
workspace
.read(cx)
.active_pane()
.read(cx)
.active_item()
.and_then(|item| item.downcast::<ProjectSearchView>())
});
assert!(
active_search_entry.is_none(),
"Expected no search panel to be active for file entry"
);
let a_dir_entry = cx.update(|cx| {
workspace
.read(cx)
.project()
.read(cx)
.entry_for_path(&(worktree_id, "a").into(), cx)
.expect("no entry for /a/ directory")
});
assert!(a_dir_entry.is_dir());
workspace.update(cx, |workspace, cx| {
ProjectSearchView::new_search_in_directory(workspace, &a_dir_entry, cx)
});
let Some(search_view) = cx.read(|cx| {
workspace
.read(cx)
.active_pane()
.read(cx)
.active_item()
.and_then(|item| item.downcast::<ProjectSearchView>())
}) else {
panic!("Search view expected to appear after new search in directory event trigger")
};
deterministic.run_until_parked();
search_view.update(cx, |search_view, cx| {
assert!(
search_view.query_editor.is_focused(cx),
"On new search in directory, focus should be moved into query editor"
);
search_view.excluded_files_editor.update(cx, |editor, cx| {
assert!(
editor.display_text(cx).is_empty(),
"New search in directory should not have any excluded files"
);
});
search_view.included_files_editor.update(cx, |editor, cx| {
assert_eq!(
editor.display_text(cx),
a_dir_entry.path.to_str().unwrap(),
"New search in directory should have included dir entry path"
);
});
});
search_view.update(cx, |search_view, cx| {
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("const", cx));
search_view.search(cx);
});
deterministic.run_until_parked();
search_view.update(cx, |search_view, cx| {
assert_eq!(
search_view
.results_editor
.update(cx, |editor, cx| editor.display_text(cx)),
"\n\nconst ONE: usize = 1;\n\n\nconst TWO: usize = one::ONE + one::ONE;",
"New search in directory should have a filter that matches a certain directory"
);
});
}
#[gpui::test]
async fn test_search_query_history(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/dir",
json!({
"one.rs": "const ONE: usize = 1;",
"two.rs": "const TWO: usize = one::ONE + one::ONE;",
"three.rs": "const THREE: usize = one::ONE + two::TWO;",
"four.rs": "const FOUR: usize = one::ONE + three::THREE;",
}),
)
.await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let (window_id, workspace) = cx.add_window(|cx| Workspace::test_new(project, cx));
workspace.update(cx, |workspace, cx| {
ProjectSearchView::deploy(workspace, &workspace::NewSearch, cx)
});
let search_view = cx.read(|cx| {
workspace
.read(cx)
.active_pane()
.read(cx)
.active_item()
.and_then(|item| item.downcast::<ProjectSearchView>())
.expect("Search view expected to appear after new search event trigger")
});
let search_bar = cx.add_view(window_id, |cx| {
let mut search_bar = ProjectSearchBar::new();
search_bar.set_active_pane_item(Some(&search_view), cx);
// search_bar.show(cx);
search_bar
});
// Add 3 search items into the history + another unsubmitted one.
search_view.update(cx, |search_view, cx| {
search_view.search_options = SearchOptions::CASE_SENSITIVE;
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("ONE", cx));
search_view.search(cx);
});
cx.foreground().run_until_parked();
search_view.update(cx, |search_view, cx| {
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("TWO", cx));
search_view.search(cx);
});
cx.foreground().run_until_parked();
search_view.update(cx, |search_view, cx| {
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("THREE", cx));
search_view.search(cx);
});
cx.foreground().run_until_parked();
search_view.update(cx, |search_view, cx| {
search_view.query_editor.update(cx, |query_editor, cx| {
query_editor.set_text("JUST_TEXT_INPUT", cx)
});
});
cx.foreground().run_until_parked();
// Ensure that the latest input with search settings is active.
search_view.update(cx, |search_view, cx| {
assert_eq!(
search_view.query_editor.read(cx).text(cx),
"JUST_TEXT_INPUT"
);
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// Next history query after the latest should set the query to the empty string.
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// First previous query for empty current query should set the query to the latest submitted one.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// Further previous items should go over the history in reverse order.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// Previous items should never go behind the first history item.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "ONE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "ONE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// Next items should go over the history in the original order.
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_view.update(cx, |search_view, cx| {
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("TWO_NEW", cx));
search_view.search(cx);
});
cx.foreground().run_until_parked();
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO_NEW");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// New search input should add another entry to history and move the selection to the end of the history.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO_NEW");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
}
pub fn init_test(cx: &mut TestAppContext) {
cx.foreground().forbid_parking();
let fonts = cx.font_cache();
@@ -2045,7 +1423,6 @@ pub mod tests {
cx.update(|cx| {
cx.set_global(SettingsStore::test(cx));
cx.set_global(ActiveSearches::default());
settings::register::<SemanticIndexSettings>(cx);
theme::init((), cx);
cx.update_global::<SettingsStore, _, _>(|store, _| {

View File

@@ -3,7 +3,6 @@ pub use buffer_search::BufferSearchBar;
use gpui::{actions, Action, AppContext};
use project::search::SearchQuery;
pub use project_search::{ProjectSearchBar, ProjectSearchView};
use smallvec::SmallVec;
pub mod buffer_search;
pub mod project_search;
@@ -22,8 +21,6 @@ actions!(
SelectNextMatch,
SelectPrevMatch,
SelectAllMatches,
NextHistoryQuery,
PreviousHistoryQuery,
]
);
@@ -56,10 +53,6 @@ impl SearchOptions {
}
}
pub fn none() -> SearchOptions {
SearchOptions::NONE
}
pub fn from_query(query: &SearchQuery) -> SearchOptions {
let mut options = SearchOptions::NONE;
options.set(SearchOptions::WHOLE_WORD, query.whole_word());
@@ -68,187 +61,3 @@ impl SearchOptions {
options
}
}
const SEARCH_HISTORY_LIMIT: usize = 20;
#[derive(Default, Debug, Clone)]
pub struct SearchHistory {
history: SmallVec<[String; SEARCH_HISTORY_LIMIT]>,
selected: Option<usize>,
}
impl SearchHistory {
pub fn add(&mut self, search_string: String) {
if let Some(i) = self.selected {
if search_string == self.history[i] {
return;
}
}
if let Some(previously_searched) = self.history.last_mut() {
if search_string.find(previously_searched.as_str()).is_some() {
*previously_searched = search_string;
self.selected = Some(self.history.len() - 1);
return;
}
}
self.history.push(search_string);
if self.history.len() > SEARCH_HISTORY_LIMIT {
self.history.remove(0);
}
self.selected = Some(self.history.len() - 1);
}
pub fn next(&mut self) -> Option<&str> {
let history_size = self.history.len();
if history_size == 0 {
return None;
}
let selected = self.selected?;
if selected == history_size - 1 {
return None;
}
let next_index = selected + 1;
self.selected = Some(next_index);
Some(&self.history[next_index])
}
pub fn current(&self) -> Option<&str> {
Some(&self.history[self.selected?])
}
pub fn previous(&mut self) -> Option<&str> {
let history_size = self.history.len();
if history_size == 0 {
return None;
}
let prev_index = match self.selected {
Some(selected_index) => {
if selected_index == 0 {
return None;
} else {
selected_index - 1
}
}
None => history_size - 1,
};
self.selected = Some(prev_index);
Some(&self.history[prev_index])
}
pub fn reset_selection(&mut self) {
self.selected = None;
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_add() {
let mut search_history = SearchHistory::default();
assert_eq!(
search_history.current(),
None,
"No current selection should be set fo the default search history"
);
search_history.add("rust".to_string());
assert_eq!(
search_history.current(),
Some("rust"),
"Newly added item should be selected"
);
// check if duplicates are not added
search_history.add("rust".to_string());
assert_eq!(
search_history.history.len(),
1,
"Should not add a duplicate"
);
assert_eq!(search_history.current(), Some("rust"));
// check if new string containing the previous string replaces it
search_history.add("rustlang".to_string());
assert_eq!(
search_history.history.len(),
1,
"Should replace previous item if it's a substring"
);
assert_eq!(search_history.current(), Some("rustlang"));
// push enough items to test SEARCH_HISTORY_LIMIT
for i in 0..SEARCH_HISTORY_LIMIT * 2 {
search_history.add(format!("item{i}"));
}
assert!(search_history.history.len() <= SEARCH_HISTORY_LIMIT);
}
#[test]
fn test_next_and_previous() {
let mut search_history = SearchHistory::default();
assert_eq!(
search_history.next(),
None,
"Default search history should not have a next item"
);
search_history.add("Rust".to_string());
assert_eq!(search_history.next(), None);
search_history.add("JavaScript".to_string());
assert_eq!(search_history.next(), None);
search_history.add("TypeScript".to_string());
assert_eq!(search_history.next(), None);
assert_eq!(search_history.current(), Some("TypeScript"));
assert_eq!(search_history.previous(), Some("JavaScript"));
assert_eq!(search_history.current(), Some("JavaScript"));
assert_eq!(search_history.previous(), Some("Rust"));
assert_eq!(search_history.current(), Some("Rust"));
assert_eq!(search_history.previous(), None);
assert_eq!(search_history.current(), Some("Rust"));
assert_eq!(search_history.next(), Some("JavaScript"));
assert_eq!(search_history.current(), Some("JavaScript"));
assert_eq!(search_history.next(), Some("TypeScript"));
assert_eq!(search_history.current(), Some("TypeScript"));
assert_eq!(search_history.next(), None);
assert_eq!(search_history.current(), Some("TypeScript"));
}
#[test]
fn test_reset_selection() {
let mut search_history = SearchHistory::default();
search_history.add("Rust".to_string());
search_history.add("JavaScript".to_string());
search_history.add("TypeScript".to_string());
assert_eq!(search_history.current(), Some("TypeScript"));
search_history.reset_selection();
assert_eq!(search_history.current(), None);
assert_eq!(
search_history.previous(),
Some("TypeScript"),
"Should start from the end after reset on previous item query"
);
search_history.previous();
assert_eq!(search_history.current(), Some("JavaScript"));
search_history.previous();
assert_eq!(search_history.current(), Some("Rust"));
search_history.reset_selection();
assert_eq!(search_history.current(), None);
}
}

View File

@@ -1,321 +0,0 @@
use anyhow::{anyhow, Ok, Result};
use language::{Grammar, Language};
use std::{
cmp::{self, Reverse},
collections::HashSet,
ops::Range,
path::Path,
sync::Arc,
};
use tree_sitter::{Parser, QueryCursor};
#[derive(Debug, PartialEq, Clone)]
pub struct Document {
pub name: String,
pub range: Range<usize>,
pub content: String,
pub embedding: Vec<f32>,
}
const CODE_CONTEXT_TEMPLATE: &str =
"The below code snippet is from file '<path>'\n\n```<language>\n<item>\n```";
const ENTIRE_FILE_TEMPLATE: &str =
"The below snippet is from file '<path>'\n\n```<language>\n<item>\n```";
const MARKDOWN_CONTEXT_TEMPLATE: &str = "The below file contents is from file '<path>'\n\n<item>";
pub const PARSEABLE_ENTIRE_FILE_TYPES: &[&str] =
&["TOML", "YAML", "CSS", "HEEX", "ERB", "SVELTE", "HTML"];
pub struct CodeContextRetriever {
pub parser: Parser,
pub cursor: QueryCursor,
}
// Every match has an item, this represents the fundamental treesitter symbol and anchors the search
// Every match has one or more 'name' captures. These indicate the display range of the item for deduplication.
// If there are preceeding comments, we track this with a context capture
// If there is a piece that should be collapsed in hierarchical queries, we capture it with a collapse capture
// If there is a piece that should be kept inside a collapsed node, we capture it with a keep capture
#[derive(Debug, Clone)]
pub struct CodeContextMatch {
pub start_col: usize,
pub item_range: Option<Range<usize>>,
pub name_range: Option<Range<usize>>,
pub context_ranges: Vec<Range<usize>>,
pub collapse_ranges: Vec<Range<usize>>,
}
impl CodeContextRetriever {
pub fn new() -> Self {
Self {
parser: Parser::new(),
cursor: QueryCursor::new(),
}
}
fn parse_entire_file(
&self,
relative_path: &Path,
language_name: Arc<str>,
content: &str,
) -> Result<Vec<Document>> {
let document_span = ENTIRE_FILE_TEMPLATE
.replace("<path>", relative_path.to_string_lossy().as_ref())
.replace("<language>", language_name.as_ref())
.replace("<item>", &content);
Ok(vec![Document {
range: 0..content.len(),
content: document_span,
embedding: Vec::new(),
name: language_name.to_string(),
}])
}
fn parse_markdown_file(&self, relative_path: &Path, content: &str) -> Result<Vec<Document>> {
let document_span = MARKDOWN_CONTEXT_TEMPLATE
.replace("<path>", relative_path.to_string_lossy().as_ref())
.replace("<item>", &content);
Ok(vec![Document {
range: 0..content.len(),
content: document_span,
embedding: Vec::new(),
name: "Markdown".to_string(),
}])
}
fn get_matches_in_file(
&mut self,
content: &str,
grammar: &Arc<Grammar>,
) -> Result<Vec<CodeContextMatch>> {
let embedding_config = grammar
.embedding_config
.as_ref()
.ok_or_else(|| anyhow!("no embedding queries"))?;
self.parser.set_language(grammar.ts_language).unwrap();
let tree = self
.parser
.parse(&content, None)
.ok_or_else(|| anyhow!("parsing failed"))?;
let mut captures: Vec<CodeContextMatch> = Vec::new();
let mut collapse_ranges: Vec<Range<usize>> = Vec::new();
let mut keep_ranges: Vec<Range<usize>> = Vec::new();
for mat in self.cursor.matches(
&embedding_config.query,
tree.root_node(),
content.as_bytes(),
) {
let mut start_col = 0;
let mut item_range: Option<Range<usize>> = None;
let mut name_range: Option<Range<usize>> = None;
let mut context_ranges: Vec<Range<usize>> = Vec::new();
collapse_ranges.clear();
keep_ranges.clear();
for capture in mat.captures {
if capture.index == embedding_config.item_capture_ix {
item_range = Some(capture.node.byte_range());
start_col = capture.node.start_position().column;
} else if Some(capture.index) == embedding_config.name_capture_ix {
name_range = Some(capture.node.byte_range());
} else if Some(capture.index) == embedding_config.context_capture_ix {
context_ranges.push(capture.node.byte_range());
} else if Some(capture.index) == embedding_config.collapse_capture_ix {
collapse_ranges.push(capture.node.byte_range());
} else if Some(capture.index) == embedding_config.keep_capture_ix {
keep_ranges.push(capture.node.byte_range());
}
}
captures.push(CodeContextMatch {
start_col,
item_range,
name_range,
context_ranges,
collapse_ranges: subtract_ranges(&collapse_ranges, &keep_ranges),
});
}
Ok(captures)
}
pub fn parse_file_with_template(
&mut self,
relative_path: &Path,
content: &str,
language: Arc<Language>,
) -> Result<Vec<Document>> {
let language_name = language.name();
if PARSEABLE_ENTIRE_FILE_TYPES.contains(&language_name.as_ref()) {
return self.parse_entire_file(relative_path, language_name, &content);
} else if &language_name.to_string() == &"Markdown".to_string() {
return self.parse_markdown_file(relative_path, &content);
}
let mut documents = self.parse_file(content, language)?;
for document in &mut documents {
document.content = CODE_CONTEXT_TEMPLATE
.replace("<path>", relative_path.to_string_lossy().as_ref())
.replace("<language>", language_name.as_ref())
.replace("item", &document.content);
}
Ok(documents)
}
pub fn parse_file(&mut self, content: &str, language: Arc<Language>) -> Result<Vec<Document>> {
let grammar = language
.grammar()
.ok_or_else(|| anyhow!("no grammar for language"))?;
// Iterate through query matches
let matches = self.get_matches_in_file(content, grammar)?;
let language_scope = language.default_scope();
let placeholder = language_scope.collapsed_placeholder();
let mut documents = Vec::new();
let mut collapsed_ranges_within = Vec::new();
let mut parsed_name_ranges = HashSet::new();
for (i, context_match) in matches.iter().enumerate() {
// Items which are collapsible but not embeddable have no item range
let item_range = if let Some(item_range) = context_match.item_range.clone() {
item_range
} else {
continue;
};
// Checks for deduplication
let name;
if let Some(name_range) = context_match.name_range.clone() {
name = content
.get(name_range.clone())
.map_or(String::new(), |s| s.to_string());
if parsed_name_ranges.contains(&name_range) {
continue;
}
parsed_name_ranges.insert(name_range);
} else {
name = String::new();
}
collapsed_ranges_within.clear();
'outer: for remaining_match in &matches[(i + 1)..] {
for collapsed_range in &remaining_match.collapse_ranges {
if item_range.start <= collapsed_range.start
&& item_range.end >= collapsed_range.end
{
collapsed_ranges_within.push(collapsed_range.clone());
} else {
break 'outer;
}
}
}
collapsed_ranges_within.sort_by_key(|r| (r.start, Reverse(r.end)));
let mut document_content = String::new();
for context_range in &context_match.context_ranges {
add_content_from_range(
&mut document_content,
content,
context_range.clone(),
context_match.start_col,
);
document_content.push_str("\n");
}
let mut offset = item_range.start;
for collapsed_range in &collapsed_ranges_within {
if collapsed_range.start > offset {
add_content_from_range(
&mut document_content,
content,
offset..collapsed_range.start,
context_match.start_col,
);
offset = collapsed_range.start;
}
if collapsed_range.end > offset {
document_content.push_str(placeholder);
offset = collapsed_range.end;
}
}
if offset < item_range.end {
add_content_from_range(
&mut document_content,
content,
offset..item_range.end,
context_match.start_col,
);
}
documents.push(Document {
name,
content: document_content,
range: item_range.clone(),
embedding: vec![],
})
}
return Ok(documents);
}
}
pub(crate) fn subtract_ranges(
ranges: &[Range<usize>],
ranges_to_subtract: &[Range<usize>],
) -> Vec<Range<usize>> {
let mut result = Vec::new();
let mut ranges_to_subtract = ranges_to_subtract.iter().peekable();
for range in ranges {
let mut offset = range.start;
while offset < range.end {
if let Some(range_to_subtract) = ranges_to_subtract.peek() {
if offset < range_to_subtract.start {
let next_offset = cmp::min(range_to_subtract.start, range.end);
result.push(offset..next_offset);
offset = next_offset;
} else {
let next_offset = cmp::min(range_to_subtract.end, range.end);
offset = next_offset;
}
if offset >= range_to_subtract.end {
ranges_to_subtract.next();
}
} else {
result.push(offset..range.end);
offset = range.end;
}
}
}
result
}
fn add_content_from_range(
output: &mut String,
content: &str,
range: Range<usize>,
start_col: usize,
) {
for mut line in content.get(range.clone()).unwrap_or("").lines() {
for _ in 0..start_col {
if line.starts_with(' ') {
line = &line[1..];
} else {
break;
}
}
output.push_str(line);
output.push('\n');
}
output.pop();
}

View File

@@ -1,817 +0,0 @@
mod db;
mod embedding;
mod parsing;
pub mod semantic_index_settings;
#[cfg(test)]
mod semantic_index_tests;
use crate::semantic_index_settings::SemanticIndexSettings;
use anyhow::{anyhow, Result};
use db::VectorDatabase;
use embedding::{EmbeddingProvider, OpenAIEmbeddings};
use futures::{channel::oneshot, Future};
use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
use language::{Anchor, Buffer, Language, LanguageRegistry};
use parking_lot::Mutex;
use parsing::{CodeContextRetriever, Document, PARSEABLE_ENTIRE_FILE_TYPES};
use postage::watch;
use project::{search::PathMatcher, Fs, Project, WorktreeId};
use smol::channel;
use std::{
cmp::Ordering,
collections::HashMap,
mem,
ops::Range,
path::{Path, PathBuf},
sync::{Arc, Weak},
time::{Instant, SystemTime},
};
use util::{
channel::{ReleaseChannel, RELEASE_CHANNEL, RELEASE_CHANNEL_NAME},
http::HttpClient,
paths::EMBEDDINGS_DIR,
ResultExt,
};
const SEMANTIC_INDEX_VERSION: usize = 6;
const EMBEDDINGS_BATCH_SIZE: usize = 80;
pub fn init(
fs: Arc<dyn Fs>,
http_client: Arc<dyn HttpClient>,
language_registry: Arc<LanguageRegistry>,
cx: &mut AppContext,
) {
settings::register::<SemanticIndexSettings>(cx);
let db_file_path = EMBEDDINGS_DIR
.join(Path::new(RELEASE_CHANNEL_NAME.as_str()))
.join("embeddings_db");
if *RELEASE_CHANNEL == ReleaseChannel::Stable
|| !settings::get::<SemanticIndexSettings>(cx).enabled
{
return;
}
cx.spawn(move |mut cx| async move {
let semantic_index = SemanticIndex::new(
fs,
db_file_path,
Arc::new(OpenAIEmbeddings {
client: http_client,
executor: cx.background(),
}),
language_registry,
cx.clone(),
)
.await?;
cx.update(|cx| {
cx.set_global(semantic_index.clone());
});
anyhow::Ok(())
})
.detach();
}
pub struct SemanticIndex {
fs: Arc<dyn Fs>,
database_url: Arc<PathBuf>,
embedding_provider: Arc<dyn EmbeddingProvider>,
language_registry: Arc<LanguageRegistry>,
db_update_tx: channel::Sender<DbOperation>,
parsing_files_tx: channel::Sender<PendingFile>,
_db_update_task: Task<()>,
_embed_batch_tasks: Vec<Task<()>>,
_batch_files_task: Task<()>,
_parsing_files_tasks: Vec<Task<()>>,
projects: HashMap<WeakModelHandle<Project>, ProjectState>,
}
struct ProjectState {
worktree_db_ids: Vec<(WorktreeId, i64)>,
outstanding_job_count_rx: watch::Receiver<usize>,
_outstanding_job_count_tx: Arc<Mutex<watch::Sender<usize>>>,
}
struct JobHandle {
tx: Weak<Mutex<watch::Sender<usize>>>,
}
impl ProjectState {
fn db_id_for_worktree_id(&self, id: WorktreeId) -> Option<i64> {
self.worktree_db_ids
.iter()
.find_map(|(worktree_id, db_id)| {
if *worktree_id == id {
Some(*db_id)
} else {
None
}
})
}
fn worktree_id_for_db_id(&self, id: i64) -> Option<WorktreeId> {
self.worktree_db_ids
.iter()
.find_map(|(worktree_id, db_id)| {
if *db_id == id {
Some(*worktree_id)
} else {
None
}
})
}
}
pub struct PendingFile {
worktree_db_id: i64,
relative_path: PathBuf,
absolute_path: PathBuf,
language: Arc<Language>,
modified_time: SystemTime,
job_handle: JobHandle,
}
pub struct SearchResult {
pub buffer: ModelHandle<Buffer>,
pub range: Range<Anchor>,
}
enum DbOperation {
InsertFile {
worktree_id: i64,
documents: Vec<Document>,
path: PathBuf,
mtime: SystemTime,
job_handle: JobHandle,
},
Delete {
worktree_id: i64,
path: PathBuf,
},
FindOrCreateWorktree {
path: PathBuf,
sender: oneshot::Sender<Result<i64>>,
},
FileMTimes {
worktree_id: i64,
sender: oneshot::Sender<Result<HashMap<PathBuf, SystemTime>>>,
},
WorktreePreviouslyIndexed {
path: Arc<Path>,
sender: oneshot::Sender<Result<bool>>,
},
}
enum EmbeddingJob {
Enqueue {
worktree_id: i64,
path: PathBuf,
mtime: SystemTime,
documents: Vec<Document>,
job_handle: JobHandle,
},
Flush,
}
impl SemanticIndex {
pub fn global(cx: &AppContext) -> Option<ModelHandle<SemanticIndex>> {
if cx.has_global::<ModelHandle<Self>>() {
Some(cx.global::<ModelHandle<SemanticIndex>>().clone())
} else {
None
}
}
pub fn enabled(cx: &AppContext) -> bool {
settings::get::<SemanticIndexSettings>(cx).enabled
&& *RELEASE_CHANNEL != ReleaseChannel::Stable
}
async fn new(
fs: Arc<dyn Fs>,
database_url: PathBuf,
embedding_provider: Arc<dyn EmbeddingProvider>,
language_registry: Arc<LanguageRegistry>,
mut cx: AsyncAppContext,
) -> Result<ModelHandle<Self>> {
let t0 = Instant::now();
let database_url = Arc::new(database_url);
let db = cx
.background()
.spawn(VectorDatabase::new(fs.clone(), database_url.clone()))
.await?;
log::trace!(
"db initialization took {:?} milliseconds",
t0.elapsed().as_millis()
);
Ok(cx.add_model(|cx| {
let t0 = Instant::now();
// Perform database operations
let (db_update_tx, db_update_rx) = channel::unbounded();
let _db_update_task = cx.background().spawn({
async move {
while let Ok(job) = db_update_rx.recv().await {
Self::run_db_operation(&db, job)
}
}
});
// Group documents into batches and send them to the embedding provider.
let (embed_batch_tx, embed_batch_rx) =
channel::unbounded::<Vec<(i64, Vec<Document>, PathBuf, SystemTime, JobHandle)>>();
let mut _embed_batch_tasks = Vec::new();
for _ in 0..cx.background().num_cpus() {
let embed_batch_rx = embed_batch_rx.clone();
_embed_batch_tasks.push(cx.background().spawn({
let db_update_tx = db_update_tx.clone();
let embedding_provider = embedding_provider.clone();
async move {
while let Ok(embeddings_queue) = embed_batch_rx.recv().await {
Self::compute_embeddings_for_batch(
embeddings_queue,
&embedding_provider,
&db_update_tx,
)
.await;
}
}
}));
}
// Group documents into batches and send them to the embedding provider.
let (batch_files_tx, batch_files_rx) = channel::unbounded::<EmbeddingJob>();
let _batch_files_task = cx.background().spawn(async move {
let mut queue_len = 0;
let mut embeddings_queue = vec![];
while let Ok(job) = batch_files_rx.recv().await {
Self::enqueue_documents_to_embed(
job,
&mut queue_len,
&mut embeddings_queue,
&embed_batch_tx,
);
}
});
// Parse files into embeddable documents.
let (parsing_files_tx, parsing_files_rx) = channel::unbounded::<PendingFile>();
let mut _parsing_files_tasks = Vec::new();
for _ in 0..cx.background().num_cpus() {
let fs = fs.clone();
let parsing_files_rx = parsing_files_rx.clone();
let batch_files_tx = batch_files_tx.clone();
let db_update_tx = db_update_tx.clone();
_parsing_files_tasks.push(cx.background().spawn(async move {
let mut retriever = CodeContextRetriever::new();
while let Ok(pending_file) = parsing_files_rx.recv().await {
Self::parse_file(
&fs,
pending_file,
&mut retriever,
&batch_files_tx,
&parsing_files_rx,
&db_update_tx,
)
.await;
}
}));
}
log::trace!(
"semantic index task initialization took {:?} milliseconds",
t0.elapsed().as_millis()
);
Self {
fs,
database_url,
embedding_provider,
language_registry,
db_update_tx,
parsing_files_tx,
_db_update_task,
_embed_batch_tasks,
_batch_files_task,
_parsing_files_tasks,
projects: HashMap::new(),
}
}))
}
fn run_db_operation(db: &VectorDatabase, job: DbOperation) {
match job {
DbOperation::InsertFile {
worktree_id,
documents,
path,
mtime,
job_handle,
} => {
db.insert_file(worktree_id, path, mtime, documents)
.log_err();
drop(job_handle)
}
DbOperation::Delete { worktree_id, path } => {
db.delete_file(worktree_id, path).log_err();
}
DbOperation::FindOrCreateWorktree { path, sender } => {
let id = db.find_or_create_worktree(&path);
sender.send(id).ok();
}
DbOperation::FileMTimes {
worktree_id: worktree_db_id,
sender,
} => {
let file_mtimes = db.get_file_mtimes(worktree_db_id);
sender.send(file_mtimes).ok();
}
DbOperation::WorktreePreviouslyIndexed { path, sender } => {
let worktree_indexed = db.worktree_previously_indexed(path.as_ref());
sender.send(worktree_indexed).ok();
}
}
}
async fn compute_embeddings_for_batch(
mut embeddings_queue: Vec<(i64, Vec<Document>, PathBuf, SystemTime, JobHandle)>,
embedding_provider: &Arc<dyn EmbeddingProvider>,
db_update_tx: &channel::Sender<DbOperation>,
) {
let mut batch_documents = vec![];
for (_, documents, _, _, _) in embeddings_queue.iter() {
batch_documents.extend(documents.iter().map(|document| document.content.as_str()));
}
if let Ok(embeddings) = embedding_provider.embed_batch(batch_documents).await {
log::trace!(
"created {} embeddings for {} files",
embeddings.len(),
embeddings_queue.len(),
);
let mut i = 0;
let mut j = 0;
for embedding in embeddings.iter() {
while embeddings_queue[i].1.len() == j {
i += 1;
j = 0;
}
embeddings_queue[i].1[j].embedding = embedding.to_owned();
j += 1;
}
for (worktree_id, documents, path, mtime, job_handle) in embeddings_queue.into_iter() {
db_update_tx
.send(DbOperation::InsertFile {
worktree_id,
documents,
path,
mtime,
job_handle,
})
.await
.unwrap();
}
}
}
fn enqueue_documents_to_embed(
job: EmbeddingJob,
queue_len: &mut usize,
embeddings_queue: &mut Vec<(i64, Vec<Document>, PathBuf, SystemTime, JobHandle)>,
embed_batch_tx: &channel::Sender<Vec<(i64, Vec<Document>, PathBuf, SystemTime, JobHandle)>>,
) {
let should_flush = match job {
EmbeddingJob::Enqueue {
documents,
worktree_id,
path,
mtime,
job_handle,
} => {
*queue_len += &documents.len();
embeddings_queue.push((worktree_id, documents, path, mtime, job_handle));
*queue_len >= EMBEDDINGS_BATCH_SIZE
}
EmbeddingJob::Flush => true,
};
if should_flush {
embed_batch_tx
.try_send(mem::take(embeddings_queue))
.unwrap();
*queue_len = 0;
}
}
async fn parse_file(
fs: &Arc<dyn Fs>,
pending_file: PendingFile,
retriever: &mut CodeContextRetriever,
batch_files_tx: &channel::Sender<EmbeddingJob>,
parsing_files_rx: &channel::Receiver<PendingFile>,
db_update_tx: &channel::Sender<DbOperation>,
) {
if let Some(content) = fs.load(&pending_file.absolute_path).await.log_err() {
if let Some(documents) = retriever
.parse_file_with_template(
&pending_file.relative_path,
&content,
pending_file.language,
)
.log_err()
{
log::trace!(
"parsed path {:?}: {} documents",
pending_file.relative_path,
documents.len()
);
if documents.len() == 0 {
db_update_tx
.send(DbOperation::InsertFile {
worktree_id: pending_file.worktree_db_id,
documents,
path: pending_file.relative_path,
mtime: pending_file.modified_time,
job_handle: pending_file.job_handle,
})
.await
.unwrap();
} else {
batch_files_tx
.try_send(EmbeddingJob::Enqueue {
worktree_id: pending_file.worktree_db_id,
path: pending_file.relative_path,
mtime: pending_file.modified_time,
job_handle: pending_file.job_handle,
documents,
})
.unwrap();
}
}
}
if parsing_files_rx.len() == 0 {
batch_files_tx.try_send(EmbeddingJob::Flush).unwrap();
}
}
fn find_or_create_worktree(&self, path: PathBuf) -> impl Future<Output = Result<i64>> {
let (tx, rx) = oneshot::channel();
self.db_update_tx
.try_send(DbOperation::FindOrCreateWorktree { path, sender: tx })
.unwrap();
async move { rx.await? }
}
fn get_file_mtimes(
&self,
worktree_id: i64,
) -> impl Future<Output = Result<HashMap<PathBuf, SystemTime>>> {
let (tx, rx) = oneshot::channel();
self.db_update_tx
.try_send(DbOperation::FileMTimes {
worktree_id,
sender: tx,
})
.unwrap();
async move { rx.await? }
}
fn worktree_previously_indexed(&self, path: Arc<Path>) -> impl Future<Output = Result<bool>> {
let (tx, rx) = oneshot::channel();
self.db_update_tx
.try_send(DbOperation::WorktreePreviouslyIndexed { path, sender: tx })
.unwrap();
async move { rx.await? }
}
pub fn project_previously_indexed(
&mut self,
project: ModelHandle<Project>,
cx: &mut ModelContext<Self>,
) -> Task<Result<bool>> {
let worktree_scans_complete = project
.read(cx)
.worktrees(cx)
.map(|worktree| {
let scan_complete = worktree.read(cx).as_local().unwrap().scan_complete();
async move {
scan_complete.await;
}
})
.collect::<Vec<_>>();
let worktrees_indexed_previously = project
.read(cx)
.worktrees(cx)
.map(|worktree| self.worktree_previously_indexed(worktree.read(cx).abs_path()))
.collect::<Vec<_>>();
cx.spawn(|_, _cx| async move {
futures::future::join_all(worktree_scans_complete).await;
let worktree_indexed_previously =
futures::future::join_all(worktrees_indexed_previously).await;
Ok(worktree_indexed_previously
.iter()
.filter(|worktree| worktree.is_ok())
.all(|v| v.as_ref().log_err().is_some_and(|v| v.to_owned())))
})
}
pub fn index_project(
&mut self,
project: ModelHandle<Project>,
cx: &mut ModelContext<Self>,
) -> Task<Result<(usize, watch::Receiver<usize>)>> {
let t0 = Instant::now();
let worktree_scans_complete = project
.read(cx)
.worktrees(cx)
.map(|worktree| {
let scan_complete = worktree.read(cx).as_local().unwrap().scan_complete();
async move {
scan_complete.await;
}
})
.collect::<Vec<_>>();
let worktree_db_ids = project
.read(cx)
.worktrees(cx)
.map(|worktree| {
self.find_or_create_worktree(worktree.read(cx).abs_path().to_path_buf())
})
.collect::<Vec<_>>();
let language_registry = self.language_registry.clone();
let db_update_tx = self.db_update_tx.clone();
let parsing_files_tx = self.parsing_files_tx.clone();
cx.spawn(|this, mut cx| async move {
futures::future::join_all(worktree_scans_complete).await;
let worktree_db_ids = futures::future::join_all(worktree_db_ids).await;
let worktrees = project.read_with(&cx, |project, cx| {
project
.worktrees(cx)
.map(|worktree| worktree.read(cx).snapshot())
.collect::<Vec<_>>()
});
let mut worktree_file_mtimes = HashMap::new();
let mut db_ids_by_worktree_id = HashMap::new();
for (worktree, db_id) in worktrees.iter().zip(worktree_db_ids) {
let db_id = db_id?;
db_ids_by_worktree_id.insert(worktree.id(), db_id);
worktree_file_mtimes.insert(
worktree.id(),
this.read_with(&cx, |this, _| this.get_file_mtimes(db_id))
.await?,
);
}
let (job_count_tx, job_count_rx) = watch::channel_with(0);
let job_count_tx = Arc::new(Mutex::new(job_count_tx));
this.update(&mut cx, |this, _| {
this.projects.insert(
project.downgrade(),
ProjectState {
worktree_db_ids: db_ids_by_worktree_id
.iter()
.map(|(a, b)| (*a, *b))
.collect(),
outstanding_job_count_rx: job_count_rx.clone(),
_outstanding_job_count_tx: job_count_tx.clone(),
},
);
});
cx.background()
.spawn(async move {
let mut count = 0;
for worktree in worktrees.into_iter() {
let mut file_mtimes = worktree_file_mtimes.remove(&worktree.id()).unwrap();
for file in worktree.files(false, 0) {
let absolute_path = worktree.absolutize(&file.path);
if let Ok(language) = language_registry
.language_for_file(&absolute_path, None)
.await
{
if !PARSEABLE_ENTIRE_FILE_TYPES.contains(&language.name().as_ref())
&& &language.name().as_ref() != &"Markdown"
&& language
.grammar()
.and_then(|grammar| grammar.embedding_config.as_ref())
.is_none()
{
continue;
}
let path_buf = file.path.to_path_buf();
let stored_mtime = file_mtimes.remove(&file.path.to_path_buf());
let already_stored = stored_mtime
.map_or(false, |existing_mtime| existing_mtime == file.mtime);
if !already_stored {
count += 1;
*job_count_tx.lock().borrow_mut() += 1;
let job_handle = JobHandle {
tx: Arc::downgrade(&job_count_tx),
};
parsing_files_tx
.try_send(PendingFile {
worktree_db_id: db_ids_by_worktree_id[&worktree.id()],
relative_path: path_buf,
absolute_path,
language,
job_handle,
modified_time: file.mtime,
})
.unwrap();
}
}
}
for file in file_mtimes.keys() {
db_update_tx
.try_send(DbOperation::Delete {
worktree_id: db_ids_by_worktree_id[&worktree.id()],
path: file.to_owned(),
})
.unwrap();
}
}
log::trace!(
"walking worktree took {:?} milliseconds",
t0.elapsed().as_millis()
);
anyhow::Ok((count, job_count_rx))
})
.await
})
}
pub fn outstanding_job_count_rx(
&self,
project: &ModelHandle<Project>,
) -> Option<watch::Receiver<usize>> {
Some(
self.projects
.get(&project.downgrade())?
.outstanding_job_count_rx
.clone(),
)
}
pub fn search_project(
&mut self,
project: ModelHandle<Project>,
phrase: String,
limit: usize,
includes: Vec<PathMatcher>,
excludes: Vec<PathMatcher>,
cx: &mut ModelContext<Self>,
) -> Task<Result<Vec<SearchResult>>> {
let project_state = if let Some(state) = self.projects.get(&project.downgrade()) {
state
} else {
return Task::ready(Err(anyhow!("project not added")));
};
let worktree_db_ids = project
.read(cx)
.worktrees(cx)
.filter_map(|worktree| {
let worktree_id = worktree.read(cx).id();
project_state.db_id_for_worktree_id(worktree_id)
})
.collect::<Vec<_>>();
let embedding_provider = self.embedding_provider.clone();
let database_url = self.database_url.clone();
let fs = self.fs.clone();
cx.spawn(|this, mut cx| async move {
let database = VectorDatabase::new(fs.clone(), database_url.clone()).await?;
let phrase_embedding = embedding_provider
.embed_batch(vec![&phrase])
.await?
.into_iter()
.next()
.unwrap();
let file_ids =
database.retrieve_included_file_ids(&worktree_db_ids, &includes, &excludes)?;
let batch_n = cx.background().num_cpus();
let ids_len = file_ids.clone().len();
let batch_size = if ids_len <= batch_n {
ids_len
} else {
ids_len / batch_n
};
let mut result_tasks = Vec::new();
for batch in file_ids.chunks(batch_size) {
let batch = batch.into_iter().map(|v| *v).collect::<Vec<i64>>();
let limit = limit.clone();
let fs = fs.clone();
let database_url = database_url.clone();
let phrase_embedding = phrase_embedding.clone();
let task = cx.background().spawn(async move {
let database = VectorDatabase::new(fs, database_url).await.log_err();
if database.is_none() {
return Err(anyhow!("failed to acquire database connection"));
} else {
database
.unwrap()
.top_k_search(&phrase_embedding, limit, batch.as_slice())
}
});
result_tasks.push(task);
}
let batch_results = futures::future::join_all(result_tasks).await;
let mut results = Vec::new();
for batch_result in batch_results {
if batch_result.is_ok() {
for (id, similarity) in batch_result.unwrap() {
let ix = match results.binary_search_by(|(_, s)| {
similarity.partial_cmp(&s).unwrap_or(Ordering::Equal)
}) {
Ok(ix) => ix,
Err(ix) => ix,
};
results.insert(ix, (id, similarity));
results.truncate(limit);
}
}
}
let ids = results.into_iter().map(|(id, _)| id).collect::<Vec<i64>>();
let documents = database.get_documents_by_ids(ids.as_slice())?;
let mut tasks = Vec::new();
let mut ranges = Vec::new();
let weak_project = project.downgrade();
project.update(&mut cx, |project, cx| {
for (worktree_db_id, file_path, byte_range) in documents {
let project_state =
if let Some(state) = this.read(cx).projects.get(&weak_project) {
state
} else {
return Err(anyhow!("project not added"));
};
if let Some(worktree_id) = project_state.worktree_id_for_db_id(worktree_db_id) {
tasks.push(project.open_buffer((worktree_id, file_path), cx));
ranges.push(byte_range);
}
}
Ok(())
})?;
let buffers = futures::future::join_all(tasks).await;
Ok(buffers
.into_iter()
.zip(ranges)
.filter_map(|(buffer, range)| {
let buffer = buffer.log_err()?;
let range = buffer.read_with(&cx, |buffer, _| {
buffer.anchor_before(range.start)..buffer.anchor_after(range.end)
});
Some(SearchResult { buffer, range })
})
.collect::<Vec<_>>())
})
}
}
impl Entity for SemanticIndex {
type Event = ();
}
impl Drop for JobHandle {
fn drop(&mut self) {
if let Some(tx) = self.tx.upgrade() {
let mut tx = tx.lock();
*tx.borrow_mut() -= 1;
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -202,7 +202,7 @@ where
self.position = D::default();
}
let entry = self.stack.last_mut().unwrap();
let mut entry = self.stack.last_mut().unwrap();
if !descending {
if entry.index == 0 {
self.stack.pop();
@@ -438,7 +438,6 @@ where
} => {
if ascending {
entry.index += 1;
entry.position = self.position.clone();
}
for (child_tree, child_summary) in child_trees[entry.index..]

View File

@@ -738,7 +738,7 @@ mod tests {
for _ in 0..num_operations {
let splice_end = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1);
let splice_start = rng.gen_range(0..splice_end + 1);
let count = rng.gen_range(0..10);
let count = rng.gen_range(0..3);
let tree_end = tree.extent::<Count>(&());
let new_items = rng
.sample_iter(distributions::Standard)
@@ -805,12 +805,10 @@ mod tests {
}
assert_eq!(filter_cursor.item(), None);
let mut pos = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1);
let mut before_start = false;
let mut cursor = tree.cursor::<Count>();
let start_pos = rng.gen_range(0..=reference_items.len());
cursor.seek(&Count(start_pos), Bias::Right, &());
let mut pos = rng.gen_range(start_pos..=reference_items.len());
cursor.seek_forward(&Count(pos), Bias::Right, &());
cursor.seek(&Count(pos), Bias::Right, &());
for i in 0..10 {
assert_eq!(cursor.start().0, pos);

View File

@@ -16,7 +16,7 @@ db = { path = "../db" }
theme = { path = "../theme" }
util = { path = "../util" }
alacritty_terminal = { git = "https://github.com/alacritty/alacritty", rev = "7b9f32300ee0a249c0872302c97635b460e45ba5" }
alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev = "a51dbe25d67e84d6ed4261e640d3954fbdd9be45" }
procinfo = { git = "https://github.com/zed-industries/wezterm", rev = "5cd757e5f2eb039ed0c6bb6512223e69d5efc64d", default-features = false }
smallvec.workspace = true
smol.workspace = true

View File

@@ -114,7 +114,11 @@ fn rgb_for_index(i: &u8) -> (u8, u8, u8) {
//Convenience method to convert from a GPUI color to an alacritty Rgb
pub fn to_alac_rgb(color: Color) -> AlacRgb {
AlacRgb::new(color.r, color.g, color.g)
AlacRgb {
r: color.r,
g: color.g,
b: color.g,
}
}
#[cfg(test)]

View File

@@ -73,12 +73,10 @@ const DEBUG_CELL_WIDTH: f32 = 5.;
const DEBUG_LINE_HEIGHT: f32 = 5.;
lazy_static! {
// Regex Copied from alacritty's ui_config.rs and modified its declaration slightly:
// * avoid Rust-specific escaping.
// * use more strict regex for `file://` protocol matching: original regex has `file:` inside, but we want to avoid matching `some::file::module` strings.
// Regex Copied from alacritty's ui_config.rs
static ref URL_REGEX: RegexSearch = RegexSearch::new(r#"(ipfs:|ipns:|magnet:|mailto:|gemini://|gopher://|https://|http://|news:|file://|git://|ssh:|ftp://)[^\u{0000}-\u{001F}\u{007F}-\u{009F}<>"\s{-}\^⟨⟩`]+"#).unwrap();
static ref WORD_REGEX: RegexSearch = RegexSearch::new(r#"[\w.:/@\-~]+"#).unwrap();
static ref WORD_REGEX: RegexSearch = RegexSearch::new("[\\w.:/@-]+").unwrap();
}
///Upward flowing events, for changing the title and such

View File

@@ -411,10 +411,6 @@ impl TerminalElement {
})
// Update drag selections
.on_drag(MouseButton::Left, move |event, _: &mut TerminalView, cx| {
if event.end {
return;
}
if cx.is_self_focused() {
if let Some(conn_handle) = connection.upgrade(cx) {
conn_handle.update(cx, |terminal, cx| {

View File

@@ -187,56 +187,37 @@ impl TerminalView {
}
let potential_abs_paths = possible_open_targets(&workspace, maybe_path, cx);
if let Some(path) = potential_abs_paths.into_iter().next() {
let is_dir = path.path_like.is_dir();
let visible = path.path_like.is_dir();
let task_workspace = workspace.clone();
cx.spawn(|_, mut cx| async move {
let opened_items = task_workspace
let opened_item = task_workspace
.update(&mut cx, |workspace, cx| {
workspace.open_paths(vec![path.path_like], is_dir, cx)
workspace.open_abs_path(path.path_like, visible, cx)
})
.context("workspace update")?
.await;
anyhow::ensure!(
opened_items.len() == 1,
"For a single path open, expected single opened item"
);
let opened_item = opened_items
.into_iter()
.next()
.unwrap()
.transpose()
.context("path open")?;
if is_dir {
task_workspace.update(&mut cx, |workspace, cx| {
workspace.project().update(cx, |_, cx| {
cx.emit(project::Event::ActivateProjectPanel);
})
})?;
} else {
if let Some(row) = path.row {
let col = path.column.unwrap_or(0);
if let Some(active_editor) =
opened_item.and_then(|item| item.downcast::<Editor>())
{
active_editor
.downgrade()
.update(&mut cx, |editor, cx| {
let snapshot = editor.snapshot(cx).display_snapshot;
let point = snapshot.buffer_snapshot.clip_point(
language::Point::new(
row.saturating_sub(1),
col.saturating_sub(1),
),
Bias::Left,
);
editor.change_selections(
Some(Autoscroll::center()),
cx,
|s| s.select_ranges([point..point]),
);
})
.log_err();
}
.await
.context("workspace update")?;
if let Some(row) = path.row {
let col = path.column.unwrap_or(0);
if let Some(active_editor) = opened_item.downcast::<Editor>() {
active_editor
.downgrade()
.update(&mut cx, |editor, cx| {
let snapshot = editor.snapshot(cx).display_snapshot;
let point = snapshot.buffer_snapshot.clip_point(
language::Point::new(
row.saturating_sub(1),
col.saturating_sub(1),
),
Bias::Left,
);
editor.change_selections(
Some(Autoscroll::center()),
cx,
|s| s.select_ranges([point..point]),
);
})
.log_err();
}
}
anyhow::Ok(())
@@ -444,16 +425,6 @@ fn possible_open_targets(
let maybe_path = path_like.path_like;
let potential_abs_paths = if maybe_path.is_absolute() {
vec![maybe_path]
} else if maybe_path.starts_with("~") {
if let Some(abs_path) = maybe_path
.strip_prefix("~")
.ok()
.and_then(|maybe_path| Some(dirs::home_dir()?.join(maybe_path)))
{
vec![abs_path]
} else {
Vec::new()
}
} else if let Some(workspace) = workspace.upgrade(cx) {
workspace.update(cx, |workspace, cx| {
workspace

View File

@@ -402,7 +402,6 @@ pub struct StatusBar {
pub height: f32,
pub item_spacing: f32,
pub cursor_position: TextStyle,
pub vim_mode_indicator: ContainedText,
pub active_language: Interactive<ContainedText>,
pub auto_update_progress_message: TextStyle,
pub auto_update_done_message: TextStyle,
@@ -692,8 +691,6 @@ pub struct Editor {
pub document_highlight_read_background: Color,
pub document_highlight_write_background: Color,
pub diff: DiffStyle,
pub wrap_guide: Color,
pub active_wrap_guide: Color,
pub line_number: Color,
pub line_number_active: Color,
pub guest_selections: Vec<SelectionStyle>,

View File

@@ -1,11 +1,11 @@
[package]
name = "semantic_index"
name = "vector_store"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/semantic_index.rs"
path = "src/vector_store.rs"
doctest = false
[dependencies]
@@ -20,7 +20,6 @@ editor = { path = "../editor" }
rpc = { path = "../rpc" }
settings = { path = "../settings" }
anyhow.workspace = true
postage.workspace = true
futures.workspace = true
smol.workspace = true
rusqlite = { version = "0.27.0", features = ["blob", "array", "modern_sqlite"] }
@@ -34,10 +33,8 @@ async-trait.workspace = true
bincode = "1.3.3"
matrixmultiply = "0.3.7"
tiktoken-rs = "0.5.0"
parking_lot.workspace = true
rand.workspace = true
schemars.workspace = true
globset.workspace = true
[dev-dependencies]
gpui = { path = "../gpui", features = ["test-support"] }
@@ -46,20 +43,7 @@ project = { path = "../project", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }
workspace = { path = "../workspace", features = ["test-support"] }
settings = { path = "../settings", features = ["test-support"]}
pretty_assertions.workspace = true
tree-sitter-rust = "*"
rand.workspace = true
unindent.workspace = true
tempdir.workspace = true
ctor.workspace = true
env_logger.workspace = true
tree-sitter-typescript.workspace = true
tree-sitter-json.workspace = true
tree-sitter-rust.workspace = true
tree-sitter-toml.workspace = true
tree-sitter-cpp.workspace = true
tree-sitter-elixir.workspace = true
tree-sitter-lua.workspace = true
tree-sitter-ruby.workspace = true
tree-sitter-php.workspace = true

View File

@@ -1,20 +1,20 @@
use crate::{parsing::Document, SEMANTIC_INDEX_VERSION};
use anyhow::{anyhow, Context, Result};
use project::{search::PathMatcher, Fs};
use std::{
cmp::Ordering,
collections::HashMap,
path::{Path, PathBuf},
rc::Rc,
time::SystemTime,
};
use anyhow::{anyhow, Result};
use crate::parsing::ParsedFile;
use crate::VECTOR_STORE_VERSION;
use rpc::proto::Timestamp;
use rusqlite::{
params,
types::{FromSql, FromSqlResult, ValueRef},
};
use std::{
cmp::Ordering,
collections::HashMap,
ops::Range,
path::{Path, PathBuf},
rc::Rc,
sync::Arc,
time::SystemTime,
};
#[derive(Debug)]
pub struct FileRecord {
@@ -42,94 +42,48 @@ pub struct VectorDatabase {
}
impl VectorDatabase {
pub async fn new(fs: Arc<dyn Fs>, path: Arc<PathBuf>) -> Result<Self> {
if let Some(db_directory) = path.parent() {
fs.create_dir(db_directory).await?;
}
pub fn new(path: String) -> Result<Self> {
let this = Self {
db: rusqlite::Connection::open(path.as_path())?,
db: rusqlite::Connection::open(path)?,
};
this.initialize_database()?;
Ok(this)
}
fn get_existing_version(&self) -> Result<i64> {
let mut version_query = self
.db
.prepare("SELECT version from semantic_index_config")?;
version_query
.query_row([], |row| Ok(row.get::<_, i64>(0)?))
.map_err(|err| anyhow!("version query failed: {err}"))
}
fn initialize_database(&self) -> Result<()> {
rusqlite::vtab::array::load_module(&self.db)?;
// Delete existing tables, if SEMANTIC_INDEX_VERSION is bumped
if self
.get_existing_version()
.map_or(false, |version| version == SEMANTIC_INDEX_VERSION as i64)
{
log::trace!("vector database schema up to date");
return Ok(());
}
log::trace!("vector database schema out of date. updating...");
self.db
.execute("DROP TABLE IF EXISTS documents", [])
.context("failed to drop 'documents' table")?;
self.db
.execute("DROP TABLE IF EXISTS files", [])
.context("failed to drop 'files' table")?;
self.db
.execute("DROP TABLE IF EXISTS worktrees", [])
.context("failed to drop 'worktrees' table")?;
self.db
.execute("DROP TABLE IF EXISTS semantic_index_config", [])
.context("failed to drop 'semantic_index_config' table")?;
// This will create the database if it doesnt exist
// Initialize Vector Databasing Tables
self.db.execute(
"CREATE TABLE semantic_index_config (
version INTEGER NOT NULL
)",
[],
)?;
self.db.execute(
"INSERT INTO semantic_index_config (version) VALUES (?1)",
params![SEMANTIC_INDEX_VERSION],
)?;
self.db.execute(
"CREATE TABLE worktrees (
"CREATE TABLE IF NOT EXISTS worktrees (
id INTEGER PRIMARY KEY AUTOINCREMENT,
absolute_path VARCHAR NOT NULL
);
CREATE UNIQUE INDEX worktrees_absolute_path ON worktrees (absolute_path);
CREATE UNIQUE INDEX IF NOT EXISTS worktrees_absolute_path ON worktrees (absolute_path);
",
[],
)?;
self.db.execute(
"CREATE TABLE files (
"CREATE TABLE IF NOT EXISTS files (
id INTEGER PRIMARY KEY AUTOINCREMENT,
worktree_id INTEGER NOT NULL,
relative_path VARCHAR NOT NULL,
mtime_seconds INTEGER NOT NULL,
mtime_nanos INTEGER NOT NULL,
vector_store_version INTEGER NOT NULL,
FOREIGN KEY(worktree_id) REFERENCES worktrees(id) ON DELETE CASCADE
)",
[],
)?;
self.db.execute(
"CREATE TABLE documents (
"CREATE TABLE IF NOT EXISTS documents (
id INTEGER PRIMARY KEY AUTOINCREMENT,
file_id INTEGER NOT NULL,
start_byte INTEGER NOT NULL,
end_byte INTEGER NOT NULL,
offset INTEGER NOT NULL,
name VARCHAR NOT NULL,
embedding BLOB NOT NULL,
FOREIGN KEY(file_id) REFERENCES files(id) ON DELETE CASCADE
@@ -137,7 +91,6 @@ impl VectorDatabase {
[],
)?;
log::trace!("vector database initialized with updated schema.");
Ok(())
}
@@ -149,44 +102,43 @@ impl VectorDatabase {
Ok(())
}
pub fn insert_file(
&self,
worktree_id: i64,
path: PathBuf,
mtime: SystemTime,
documents: Vec<Document>,
) -> Result<()> {
pub fn insert_file(&self, worktree_id: i64, indexed_file: ParsedFile) -> Result<()> {
// Write to files table, and return generated id.
self.db.execute(
"
DELETE FROM files WHERE worktree_id = ?1 AND relative_path = ?2;
",
params![worktree_id, path.to_str()],
params![worktree_id, indexed_file.path.to_str()],
)?;
let mtime = Timestamp::from(mtime);
let mtime = Timestamp::from(indexed_file.mtime);
self.db.execute(
"
INSERT INTO files
(worktree_id, relative_path, mtime_seconds, mtime_nanos)
(worktree_id, relative_path, mtime_seconds, mtime_nanos, vector_store_version)
VALUES
(?1, ?2, $3, $4);
(?1, ?2, $3, $4, $5);
",
params![worktree_id, path.to_str(), mtime.seconds, mtime.nanos],
params![
worktree_id,
indexed_file.path.to_str(),
mtime.seconds,
mtime.nanos,
VECTOR_STORE_VERSION
],
)?;
let file_id = self.db.last_insert_rowid();
// Currently inserting at approximately 3400 documents a second
// I imagine we can speed this up with a bulk insert of some kind.
for document in documents {
for document in indexed_file.documents {
let embedding_blob = bincode::serialize(&document.embedding)?;
self.db.execute(
"INSERT INTO documents (file_id, start_byte, end_byte, name, embedding) VALUES (?1, ?2, ?3, ?4, $5)",
"INSERT INTO documents (file_id, offset, name, embedding) VALUES (?1, ?2, ?3, ?4)",
params![
file_id,
document.range.start.to_string(),
document.range.end.to_string(),
document.offset.to_string(),
document.name,
embedding_blob
],
@@ -196,23 +148,6 @@ impl VectorDatabase {
Ok(())
}
pub fn worktree_previously_indexed(&self, worktree_root_path: &Path) -> Result<bool> {
let mut worktree_query = self
.db
.prepare("SELECT id FROM worktrees WHERE absolute_path = ?1")?;
let worktree_id = worktree_query
.query_row(params![worktree_root_path.to_string_lossy()], |row| {
Ok(row.get::<_, i64>(0)?)
})
.map_err(|err| anyhow!(err));
if worktree_id.is_ok() {
return Ok(true);
} else {
return Ok(false);
}
}
pub fn find_or_create_worktree(&self, worktree_root_path: &Path) -> Result<i64> {
// Check that the absolute path doesnt exist
let mut worktree_query = self
@@ -266,12 +201,12 @@ impl VectorDatabase {
pub fn top_k_search(
&self,
worktree_ids: &[i64],
query_embedding: &Vec<f32>,
limit: usize,
file_ids: &[i64],
) -> Result<Vec<(i64, f32)>> {
) -> Result<Vec<(i64, PathBuf, usize, String)>> {
let mut results = Vec::<(i64, f32)>::with_capacity(limit + 1);
self.for_each_document(file_ids, |id, embedding| {
self.for_each_document(&worktree_ids, |id, embedding| {
let similarity = dot(&embedding, &query_embedding);
let ix = match results
.binary_search_by(|(_, s)| similarity.partial_cmp(&s).unwrap_or(Ordering::Equal))
@@ -283,57 +218,29 @@ impl VectorDatabase {
results.truncate(limit);
})?;
Ok(results)
let ids = results.into_iter().map(|(id, _)| id).collect::<Vec<_>>();
self.get_documents_by_ids(&ids)
}
pub fn retrieve_included_file_ids(
fn for_each_document(
&self,
worktree_ids: &[i64],
includes: &[PathMatcher],
excludes: &[PathMatcher],
) -> Result<Vec<i64>> {
let mut file_query = self.db.prepare(
"
SELECT
id, relative_path
FROM
files
WHERE
worktree_id IN rarray(?)
",
)?;
let mut file_ids = Vec::<i64>::new();
let mut rows = file_query.query([ids_to_sql(worktree_ids)])?;
while let Some(row) = rows.next()? {
let file_id = row.get(0)?;
let relative_path = row.get_ref(1)?.as_str()?;
let included =
includes.is_empty() || includes.iter().any(|glob| glob.is_match(relative_path));
let excluded = excludes.iter().any(|glob| glob.is_match(relative_path));
if included && !excluded {
file_ids.push(file_id);
}
}
Ok(file_ids)
}
fn for_each_document(&self, file_ids: &[i64], mut f: impl FnMut(i64, Vec<f32>)) -> Result<()> {
mut f: impl FnMut(i64, Vec<f32>),
) -> Result<()> {
let mut query_statement = self.db.prepare(
"
SELECT
id, embedding
documents.id, documents.embedding
FROM
documents
documents, files
WHERE
file_id IN rarray(?)
documents.file_id = files.id AND
files.worktree_id IN rarray(?)
",
)?;
query_statement
.query_map(params![ids_to_sql(&file_ids)], |row| {
.query_map(params![ids_to_sql(worktree_ids)], |row| {
Ok((row.get(0)?, row.get::<_, Embedding>(1)?))
})?
.filter_map(|row| row.ok())
@@ -341,15 +248,11 @@ impl VectorDatabase {
Ok(())
}
pub fn get_documents_by_ids(&self, ids: &[i64]) -> Result<Vec<(i64, PathBuf, Range<usize>)>> {
fn get_documents_by_ids(&self, ids: &[i64]) -> Result<Vec<(i64, PathBuf, usize, String)>> {
let mut statement = self.db.prepare(
"
SELECT
documents.id,
files.worktree_id,
files.relative_path,
documents.start_byte,
documents.end_byte
documents.id, files.worktree_id, files.relative_path, documents.offset, documents.name
FROM
documents, files
WHERE
@@ -363,14 +266,15 @@ impl VectorDatabase {
row.get::<_, i64>(0)?,
row.get::<_, i64>(1)?,
row.get::<_, String>(2)?.into(),
row.get(3)?..row.get(4)?,
row.get(3)?,
row.get(4)?,
))
})?;
let mut values_by_id = HashMap::<i64, (i64, PathBuf, Range<usize>)>::default();
let mut values_by_id = HashMap::<i64, (i64, PathBuf, usize, String)>::default();
for row in result_iter {
let (id, worktree_id, path, range) = row?;
values_by_id.insert(id, (worktree_id, path, range));
let (id, worktree_id, path, offset, name) = row?;
values_by_id.insert(id, (worktree_id, path, offset, name));
}
let mut results = Vec::with_capacity(ids.len());

View File

@@ -67,16 +67,17 @@ impl EmbeddingProvider for DummyEmbeddings {
}
}
const OPENAI_INPUT_LIMIT: usize = 8190;
const INPUT_LIMIT: usize = 8190;
impl OpenAIEmbeddings {
fn truncate(span: String) -> String {
let mut tokens = OPENAI_BPE_TOKENIZER.encode_with_special_tokens(span.as_ref());
if tokens.len() > OPENAI_INPUT_LIMIT {
tokens.truncate(OPENAI_INPUT_LIMIT);
if tokens.len() > INPUT_LIMIT {
tokens.truncate(INPUT_LIMIT);
let result = OPENAI_BPE_TOKENIZER.decode(tokens.clone());
if result.is_ok() {
let transformed = result.unwrap();
// assert_ne!(transformed, span);
return transformed;
}
}
@@ -87,7 +88,6 @@ impl OpenAIEmbeddings {
async fn send_request(&self, api_key: &str, spans: Vec<&str>) -> Result<Response<AsyncBody>> {
let request = Request::post("https://api.openai.com/v1/embeddings")
.redirect_policy(isahc::config::RedirectPolicy::Follow)
.timeout(Duration::from_secs(4))
.header("Content-Type", "application/json")
.header("Authorization", format!("Bearer {}", api_key))
.body(
@@ -106,7 +106,7 @@ impl OpenAIEmbeddings {
#[async_trait]
impl EmbeddingProvider for OpenAIEmbeddings {
async fn embed_batch(&self, spans: Vec<&str>) -> Result<Vec<Vec<f32>>> {
const BACKOFF_SECONDS: [usize; 3] = [45, 75, 125];
const BACKOFF_SECONDS: [usize; 3] = [65, 180, 360];
const MAX_RETRIES: usize = 3;
let api_key = OPENAI_API_KEY
@@ -114,7 +114,6 @@ impl EmbeddingProvider for OpenAIEmbeddings {
.ok_or_else(|| anyhow!("no api key"))?;
let mut request_number = 0;
let mut truncated = false;
let mut response: Response<AsyncBody>;
let mut spans: Vec<String> = spans.iter().map(|x| x.to_string()).collect();
while request_number < MAX_RETRIES {
@@ -133,25 +132,14 @@ impl EmbeddingProvider for OpenAIEmbeddings {
match response.status() {
StatusCode::TOO_MANY_REQUESTS => {
let delay = Duration::from_secs(BACKOFF_SECONDS[request_number - 1] as u64);
log::trace!(
"open ai rate limiting, delaying request by {:?} seconds",
delay.as_secs()
);
self.executor.timer(delay).await;
}
StatusCode::BAD_REQUEST => {
// Only truncate if it hasnt been truncated before
if !truncated {
for span in spans.iter_mut() {
*span = Self::truncate(span.clone());
}
truncated = true;
} else {
// If failing once already truncated, log the error and break the loop
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
log::trace!("open ai bad request: {:?} {:?}", &response.status(), body);
break;
log::info!("BAD REQUEST: {:?}", &response.status());
// Don't worry about delaying bad request, as we can assume
// we haven't been rate limited yet.
for span in spans.iter_mut() {
*span = Self::truncate(span.to_string());
}
}
StatusCode::OK => {
@@ -159,7 +147,7 @@ impl EmbeddingProvider for OpenAIEmbeddings {
response.body_mut().read_to_string(&mut body).await?;
let response: OpenAIEmbeddingResponse = serde_json::from_str(&body)?;
log::trace!(
log::info!(
"openai embedding completed. tokens: {:?}",
response.usage.total_tokens
);

View File

@@ -0,0 +1,172 @@
use crate::{SearchResult, VectorStore};
use editor::{scroll::autoscroll::Autoscroll, Editor};
use gpui::{
actions, elements::*, AnyElement, AppContext, ModelHandle, MouseState, Task, ViewContext,
WeakViewHandle,
};
use picker::{Picker, PickerDelegate, PickerEvent};
use project::{Project, ProjectPath};
use std::{collections::HashMap, sync::Arc, time::Duration};
use util::ResultExt;
use workspace::Workspace;
const MIN_QUERY_LEN: usize = 5;
const EMBEDDING_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(500);
actions!(semantic_search, [Toggle]);
pub type SemanticSearch = Picker<SemanticSearchDelegate>;
pub struct SemanticSearchDelegate {
workspace: WeakViewHandle<Workspace>,
project: ModelHandle<Project>,
vector_store: ModelHandle<VectorStore>,
selected_match_index: usize,
matches: Vec<SearchResult>,
history: HashMap<String, Vec<SearchResult>>,
}
impl SemanticSearchDelegate {
// This is currently searching on every keystroke,
// This is wildly overkill, and has the potential to get expensive
// We will need to update this to throttle searching
pub fn new(
workspace: WeakViewHandle<Workspace>,
project: ModelHandle<Project>,
vector_store: ModelHandle<VectorStore>,
) -> Self {
Self {
workspace,
project,
vector_store,
selected_match_index: 0,
matches: vec![],
history: HashMap::new(),
}
}
}
impl PickerDelegate for SemanticSearchDelegate {
fn placeholder_text(&self) -> Arc<str> {
"Search repository in natural language...".into()
}
fn confirm(&mut self, _: bool, cx: &mut ViewContext<SemanticSearch>) {
if let Some(search_result) = self.matches.get(self.selected_match_index) {
// Open Buffer
let search_result = search_result.clone();
let buffer = self.project.update(cx, |project, cx| {
project.open_buffer(
ProjectPath {
worktree_id: search_result.worktree_id,
path: search_result.file_path.clone().into(),
},
cx,
)
});
let workspace = self.workspace.clone();
let position = search_result.clone().offset;
cx.spawn(|_, mut cx| async move {
let buffer = buffer.await?;
workspace.update(&mut cx, |workspace, cx| {
let editor = workspace.open_project_item::<Editor>(buffer, cx);
editor.update(cx, |editor, cx| {
editor.change_selections(Some(Autoscroll::center()), cx, |s| {
s.select_ranges([position..position])
});
});
})?;
Ok::<_, anyhow::Error>(())
})
.detach_and_log_err(cx);
cx.emit(PickerEvent::Dismiss);
}
}
fn dismissed(&mut self, _cx: &mut ViewContext<SemanticSearch>) {}
fn match_count(&self) -> usize {
self.matches.len()
}
fn selected_index(&self) -> usize {
self.selected_match_index
}
fn set_selected_index(&mut self, ix: usize, _cx: &mut ViewContext<SemanticSearch>) {
self.selected_match_index = ix;
}
fn update_matches(&mut self, query: String, cx: &mut ViewContext<SemanticSearch>) -> Task<()> {
log::info!("Searching for {:?}...", query);
if query.len() < MIN_QUERY_LEN {
log::info!("Query below minimum length");
return Task::ready(());
}
let vector_store = self.vector_store.clone();
let project = self.project.clone();
cx.spawn(|this, mut cx| async move {
cx.background().timer(EMBEDDING_DEBOUNCE_INTERVAL).await;
let retrieved_cached = this.update(&mut cx, |this, _| {
let delegate = this.delegate_mut();
if delegate.history.contains_key(&query) {
let historic_results = delegate.history.get(&query).unwrap().to_owned();
delegate.matches = historic_results.clone();
true
} else {
false
}
});
if let Some(retrieved) = retrieved_cached.log_err() {
if !retrieved {
let task = vector_store.update(&mut cx, |store, cx| {
store.search(project.clone(), query.to_string(), 10, cx)
});
if let Some(results) = task.await.log_err() {
log::info!("Not queried previously, searching...");
this.update(&mut cx, |this, _| {
let delegate = this.delegate_mut();
delegate.matches = results.clone();
delegate.history.insert(query, results);
})
.ok();
}
} else {
log::info!("Already queried, retrieved directly from cached history");
}
}
})
}
fn render_match(
&self,
ix: usize,
mouse_state: &mut MouseState,
selected: bool,
cx: &AppContext,
) -> AnyElement<Picker<Self>> {
let theme = theme::current(cx);
let style = &theme.picker.item;
let current_style = style.in_state(selected).style_for(mouse_state);
let search_result = &self.matches[ix];
let path = search_result.file_path.to_string_lossy();
let name = search_result.name.clone();
Flex::column()
.with_child(Text::new(name, current_style.label.text.clone()).with_soft_wrap(false))
.with_child(Label::new(
path.to_string(),
style.inactive_state().default.label.clone(),
))
.contained()
.with_style(current_style.container)
.into_any()
}
}

View File

@@ -0,0 +1,115 @@
use std::{path::PathBuf, sync::Arc, time::SystemTime};
use anyhow::{anyhow, Ok, Result};
use project::Fs;
use tree_sitter::{Parser, QueryCursor};
use crate::PendingFile;
#[derive(Debug, PartialEq, Clone)]
pub struct Document {
pub offset: usize,
pub name: String,
pub embedding: Vec<f32>,
}
#[derive(Debug, PartialEq, Clone)]
pub struct ParsedFile {
pub path: PathBuf,
pub mtime: SystemTime,
pub documents: Vec<Document>,
}
const CODE_CONTEXT_TEMPLATE: &str =
"The below code snippet is from file '<path>'\n\n```<language>\n<item>\n```";
pub struct CodeContextRetriever {
pub parser: Parser,
pub cursor: QueryCursor,
pub fs: Arc<dyn Fs>,
}
impl CodeContextRetriever {
pub async fn parse_file(
&mut self,
pending_file: PendingFile,
) -> Result<(ParsedFile, Vec<String>)> {
let grammar = pending_file
.language
.grammar()
.ok_or_else(|| anyhow!("no grammar for language"))?;
let embedding_config = grammar
.embedding_config
.as_ref()
.ok_or_else(|| anyhow!("no embedding queries"))?;
let content = self.fs.load(&pending_file.absolute_path).await?;
self.parser.set_language(grammar.ts_language).unwrap();
let tree = self
.parser
.parse(&content, None)
.ok_or_else(|| anyhow!("parsing failed"))?;
let mut documents = Vec::new();
let mut context_spans = Vec::new();
// Iterate through query matches
for mat in self.cursor.matches(
&embedding_config.query,
tree.root_node(),
content.as_bytes(),
) {
// log::info!("-----MATCH-----");
let mut name = Vec::new();
let mut item: Option<&str> = None;
let mut offset: Option<usize> = None;
for capture in mat.captures {
if capture.index == embedding_config.item_capture_ix {
offset = Some(capture.node.byte_range().start);
item = content.get(capture.node.byte_range());
} else if capture.index == embedding_config.name_capture_ix {
if let Some(name_content) = content.get(capture.node.byte_range()) {
name.push(name_content);
}
}
if let Some(context_capture_ix) = embedding_config.context_capture_ix {
if capture.index == context_capture_ix {
if let Some(context) = content.get(capture.node.byte_range()) {
name.push(context);
}
}
}
}
if item.is_some() && offset.is_some() && name.len() > 0 {
let context_span = CODE_CONTEXT_TEMPLATE
.replace("<path>", pending_file.relative_path.to_str().unwrap())
.replace("<language>", &pending_file.language.name().to_lowercase())
.replace("<item>", item.unwrap());
// log::info!("Name: {:?}", name);
// log::info!("Span: {:?}", util::truncate(&context_span, 100));
context_spans.push(context_span);
documents.push(Document {
name: name.join(" "),
offset: offset.unwrap(),
embedding: Vec::new(),
})
}
}
return Ok((
ParsedFile {
path: pending_file.relative_path,
mtime: pending_file.modified_time,
documents,
},
context_spans,
));
}
}

View File

@@ -0,0 +1,770 @@
mod db;
mod embedding;
mod modal;
mod parsing;
mod vector_store_settings;
#[cfg(test)]
mod vector_store_tests;
use crate::vector_store_settings::VectorStoreSettings;
use anyhow::{anyhow, Result};
use db::VectorDatabase;
use embedding::{EmbeddingProvider, OpenAIEmbeddings};
use futures::{channel::oneshot, Future};
use gpui::{
AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, ViewContext,
WeakModelHandle,
};
use language::{Language, LanguageRegistry};
use modal::{SemanticSearch, SemanticSearchDelegate, Toggle};
use parsing::{CodeContextRetriever, ParsedFile};
use project::{Fs, PathChange, Project, ProjectEntryId, WorktreeId};
use smol::channel;
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::Arc,
time::{Duration, Instant, SystemTime},
};
use tree_sitter::{Parser, QueryCursor};
use util::{
channel::{ReleaseChannel, RELEASE_CHANNEL, RELEASE_CHANNEL_NAME},
http::HttpClient,
paths::EMBEDDINGS_DIR,
ResultExt,
};
use workspace::{Workspace, WorkspaceCreated};
const VECTOR_STORE_VERSION: usize = 0;
const EMBEDDINGS_BATCH_SIZE: usize = 150;
pub fn init(
fs: Arc<dyn Fs>,
http_client: Arc<dyn HttpClient>,
language_registry: Arc<LanguageRegistry>,
cx: &mut AppContext,
) {
settings::register::<VectorStoreSettings>(cx);
let db_file_path = EMBEDDINGS_DIR
.join(Path::new(RELEASE_CHANNEL_NAME.as_str()))
.join("embeddings_db");
SemanticSearch::init(cx);
cx.add_action(
|workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext<Workspace>| {
if cx.has_global::<ModelHandle<VectorStore>>() {
let vector_store = cx.global::<ModelHandle<VectorStore>>().clone();
workspace.toggle_modal(cx, |workspace, cx| {
let project = workspace.project().clone();
let workspace = cx.weak_handle();
cx.add_view(|cx| {
SemanticSearch::new(
SemanticSearchDelegate::new(workspace, project, vector_store),
cx,
)
})
});
}
},
);
if *RELEASE_CHANNEL == ReleaseChannel::Stable
|| !settings::get::<VectorStoreSettings>(cx).enabled
{
return;
}
cx.spawn(move |mut cx| async move {
let vector_store = VectorStore::new(
fs,
db_file_path,
// Arc::new(embedding::DummyEmbeddings {}),
Arc::new(OpenAIEmbeddings {
client: http_client,
executor: cx.background(),
}),
language_registry,
cx.clone(),
)
.await?;
cx.update(|cx| {
cx.set_global(vector_store.clone());
cx.subscribe_global::<WorkspaceCreated, _>({
let vector_store = vector_store.clone();
move |event, cx| {
let workspace = &event.0;
if let Some(workspace) = workspace.upgrade(cx) {
let project = workspace.read(cx).project().clone();
if project.read(cx).is_local() {
vector_store.update(cx, |store, cx| {
store.add_project(project, cx).detach();
});
}
}
}
})
.detach();
});
anyhow::Ok(())
})
.detach();
}
pub struct VectorStore {
fs: Arc<dyn Fs>,
database_url: Arc<PathBuf>,
embedding_provider: Arc<dyn EmbeddingProvider>,
language_registry: Arc<LanguageRegistry>,
db_update_tx: channel::Sender<DbOperation>,
parsing_files_tx: channel::Sender<PendingFile>,
_db_update_task: Task<()>,
_embed_batch_task: Task<()>,
_batch_files_task: Task<()>,
_parsing_files_tasks: Vec<Task<()>>,
projects: HashMap<WeakModelHandle<Project>, ProjectState>,
}
struct ProjectState {
worktree_db_ids: Vec<(WorktreeId, i64)>,
pending_files: HashMap<PathBuf, (PendingFile, SystemTime)>,
_subscription: gpui::Subscription,
}
impl ProjectState {
fn db_id_for_worktree_id(&self, id: WorktreeId) -> Option<i64> {
self.worktree_db_ids
.iter()
.find_map(|(worktree_id, db_id)| {
if *worktree_id == id {
Some(*db_id)
} else {
None
}
})
}
fn worktree_id_for_db_id(&self, id: i64) -> Option<WorktreeId> {
self.worktree_db_ids
.iter()
.find_map(|(worktree_id, db_id)| {
if *db_id == id {
Some(*worktree_id)
} else {
None
}
})
}
fn update_pending_files(&mut self, pending_file: PendingFile, indexing_time: SystemTime) {
// If Pending File Already Exists, Replace it with the new one
// but keep the old indexing time
if let Some(old_file) = self
.pending_files
.remove(&pending_file.relative_path.clone())
{
self.pending_files.insert(
pending_file.relative_path.clone(),
(pending_file, old_file.1),
);
} else {
self.pending_files.insert(
pending_file.relative_path.clone(),
(pending_file, indexing_time),
);
};
}
fn get_outstanding_files(&mut self) -> Vec<PendingFile> {
let mut outstanding_files = vec![];
let mut remove_keys = vec![];
for key in self.pending_files.keys().into_iter() {
if let Some(pending_details) = self.pending_files.get(key) {
let (pending_file, index_time) = pending_details;
if index_time <= &SystemTime::now() {
outstanding_files.push(pending_file.clone());
remove_keys.push(key.clone());
}
}
}
for key in remove_keys.iter() {
self.pending_files.remove(key);
}
return outstanding_files;
}
}
#[derive(Clone, Debug)]
pub struct PendingFile {
worktree_db_id: i64,
relative_path: PathBuf,
absolute_path: PathBuf,
language: Arc<Language>,
modified_time: SystemTime,
}
#[derive(Debug, Clone)]
pub struct SearchResult {
pub worktree_id: WorktreeId,
pub name: String,
pub offset: usize,
pub file_path: PathBuf,
}
enum DbOperation {
InsertFile {
worktree_id: i64,
indexed_file: ParsedFile,
},
Delete {
worktree_id: i64,
path: PathBuf,
},
FindOrCreateWorktree {
path: PathBuf,
sender: oneshot::Sender<Result<i64>>,
},
FileMTimes {
worktree_id: i64,
sender: oneshot::Sender<Result<HashMap<PathBuf, SystemTime>>>,
},
}
enum EmbeddingJob {
Enqueue {
worktree_id: i64,
parsed_file: ParsedFile,
document_spans: Vec<String>,
},
Flush,
}
impl VectorStore {
async fn new(
fs: Arc<dyn Fs>,
database_url: PathBuf,
embedding_provider: Arc<dyn EmbeddingProvider>,
language_registry: Arc<LanguageRegistry>,
mut cx: AsyncAppContext,
) -> Result<ModelHandle<Self>> {
let database_url = Arc::new(database_url);
let db = cx
.background()
.spawn({
let fs = fs.clone();
let database_url = database_url.clone();
async move {
if let Some(db_directory) = database_url.parent() {
fs.create_dir(db_directory).await.log_err();
}
let db = VectorDatabase::new(database_url.to_string_lossy().to_string())?;
anyhow::Ok(db)
}
})
.await?;
Ok(cx.add_model(|cx| {
// paths_tx -> embeddings_tx -> db_update_tx
//db_update_tx/rx: Updating Database
let (db_update_tx, db_update_rx) = channel::unbounded();
let _db_update_task = cx.background().spawn(async move {
while let Ok(job) = db_update_rx.recv().await {
match job {
DbOperation::InsertFile {
worktree_id,
indexed_file,
} => {
db.insert_file(worktree_id, indexed_file).log_err();
}
DbOperation::Delete { worktree_id, path } => {
db.delete_file(worktree_id, path).log_err();
}
DbOperation::FindOrCreateWorktree { path, sender } => {
let id = db.find_or_create_worktree(&path);
sender.send(id).ok();
}
DbOperation::FileMTimes {
worktree_id: worktree_db_id,
sender,
} => {
let file_mtimes = db.get_file_mtimes(worktree_db_id);
sender.send(file_mtimes).ok();
}
}
}
});
// embed_tx/rx: Embed Batch and Send to Database
let (embed_batch_tx, embed_batch_rx) =
channel::unbounded::<Vec<(i64, ParsedFile, Vec<String>)>>();
let _embed_batch_task = cx.background().spawn({
let db_update_tx = db_update_tx.clone();
let embedding_provider = embedding_provider.clone();
async move {
while let Ok(mut embeddings_queue) = embed_batch_rx.recv().await {
// Construct Batch
let mut document_spans = vec![];
for (_, _, document_span) in embeddings_queue.iter() {
document_spans.extend(document_span.iter().map(|s| s.as_str()));
}
if let Ok(embeddings) = embedding_provider.embed_batch(document_spans).await
{
let mut i = 0;
let mut j = 0;
for embedding in embeddings.iter() {
while embeddings_queue[i].1.documents.len() == j {
i += 1;
j = 0;
}
embeddings_queue[i].1.documents[j].embedding = embedding.to_owned();
j += 1;
}
for (worktree_id, indexed_file, _) in embeddings_queue.into_iter() {
for document in indexed_file.documents.iter() {
// TODO: Update this so it doesn't panic
assert!(
document.embedding.len() > 0,
"Document Embedding Not Complete"
);
}
db_update_tx
.send(DbOperation::InsertFile {
worktree_id,
indexed_file,
})
.await
.unwrap();
}
}
}
}
});
// batch_tx/rx: Batch Files to Send for Embeddings
let (batch_files_tx, batch_files_rx) = channel::unbounded::<EmbeddingJob>();
let _batch_files_task = cx.background().spawn(async move {
let mut queue_len = 0;
let mut embeddings_queue = vec![];
while let Ok(job) = batch_files_rx.recv().await {
let should_flush = match job {
EmbeddingJob::Enqueue {
document_spans,
worktree_id,
parsed_file,
} => {
queue_len += &document_spans.len();
embeddings_queue.push((worktree_id, parsed_file, document_spans));
queue_len >= EMBEDDINGS_BATCH_SIZE
}
EmbeddingJob::Flush => true,
};
if should_flush {
embed_batch_tx.try_send(embeddings_queue).unwrap();
embeddings_queue = vec![];
queue_len = 0;
}
}
});
// parsing_files_tx/rx: Parsing Files to Embeddable Documents
let (parsing_files_tx, parsing_files_rx) = channel::unbounded::<PendingFile>();
let mut _parsing_files_tasks = Vec::new();
// for _ in 0..cx.background().num_cpus() {
for _ in 0..1 {
let fs = fs.clone();
let parsing_files_rx = parsing_files_rx.clone();
let batch_files_tx = batch_files_tx.clone();
_parsing_files_tasks.push(cx.background().spawn(async move {
let parser = Parser::new();
let cursor = QueryCursor::new();
let mut retriever = CodeContextRetriever { parser, cursor, fs };
while let Ok(pending_file) = parsing_files_rx.recv().await {
if let Some((indexed_file, document_spans)) =
retriever.parse_file(pending_file.clone()).await.log_err()
{
batch_files_tx
.try_send(EmbeddingJob::Enqueue {
worktree_id: pending_file.worktree_db_id,
parsed_file: indexed_file,
document_spans,
})
.unwrap();
}
if parsing_files_rx.len() == 0 {
batch_files_tx.try_send(EmbeddingJob::Flush).unwrap();
}
}
}));
}
Self {
fs,
database_url,
embedding_provider,
language_registry,
db_update_tx,
parsing_files_tx,
_db_update_task,
_embed_batch_task,
_batch_files_task,
_parsing_files_tasks,
projects: HashMap::new(),
}
}))
}
fn find_or_create_worktree(&self, path: PathBuf) -> impl Future<Output = Result<i64>> {
let (tx, rx) = oneshot::channel();
self.db_update_tx
.try_send(DbOperation::FindOrCreateWorktree { path, sender: tx })
.unwrap();
async move { rx.await? }
}
fn get_file_mtimes(
&self,
worktree_id: i64,
) -> impl Future<Output = Result<HashMap<PathBuf, SystemTime>>> {
let (tx, rx) = oneshot::channel();
self.db_update_tx
.try_send(DbOperation::FileMTimes {
worktree_id,
sender: tx,
})
.unwrap();
async move { rx.await? }
}
fn add_project(
&mut self,
project: ModelHandle<Project>,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
let worktree_scans_complete = project
.read(cx)
.worktrees(cx)
.map(|worktree| {
let scan_complete = worktree.read(cx).as_local().unwrap().scan_complete();
async move {
scan_complete.await;
}
})
.collect::<Vec<_>>();
let worktree_db_ids = project
.read(cx)
.worktrees(cx)
.map(|worktree| {
self.find_or_create_worktree(worktree.read(cx).abs_path().to_path_buf())
})
.collect::<Vec<_>>();
let fs = self.fs.clone();
let language_registry = self.language_registry.clone();
let database_url = self.database_url.clone();
let db_update_tx = self.db_update_tx.clone();
let parsing_files_tx = self.parsing_files_tx.clone();
cx.spawn(|this, mut cx| async move {
futures::future::join_all(worktree_scans_complete).await;
let worktree_db_ids = futures::future::join_all(worktree_db_ids).await;
if let Some(db_directory) = database_url.parent() {
fs.create_dir(db_directory).await.log_err();
}
let worktrees = project.read_with(&cx, |project, cx| {
project
.worktrees(cx)
.map(|worktree| worktree.read(cx).snapshot())
.collect::<Vec<_>>()
});
let mut worktree_file_times = HashMap::new();
let mut db_ids_by_worktree_id = HashMap::new();
for (worktree, db_id) in worktrees.iter().zip(worktree_db_ids) {
let db_id = db_id?;
db_ids_by_worktree_id.insert(worktree.id(), db_id);
worktree_file_times.insert(
worktree.id(),
this.read_with(&cx, |this, _| this.get_file_mtimes(db_id))
.await?,
);
}
cx.background()
.spawn({
let db_ids_by_worktree_id = db_ids_by_worktree_id.clone();
let db_update_tx = db_update_tx.clone();
let language_registry = language_registry.clone();
let parsing_files_tx = parsing_files_tx.clone();
async move {
let t0 = Instant::now();
for worktree in worktrees.into_iter() {
let mut file_mtimes =
worktree_file_times.remove(&worktree.id()).unwrap();
for file in worktree.files(false, 0) {
let absolute_path = worktree.absolutize(&file.path);
if let Ok(language) = language_registry
.language_for_file(&absolute_path, None)
.await
{
if language
.grammar()
.and_then(|grammar| grammar.embedding_config.as_ref())
.is_none()
{
continue;
}
let path_buf = file.path.to_path_buf();
let stored_mtime = file_mtimes.remove(&file.path.to_path_buf());
let already_stored = stored_mtime
.map_or(false, |existing_mtime| {
existing_mtime == file.mtime
});
if !already_stored {
parsing_files_tx
.try_send(PendingFile {
worktree_db_id: db_ids_by_worktree_id
[&worktree.id()],
relative_path: path_buf,
absolute_path,
language,
modified_time: file.mtime,
})
.unwrap();
}
}
}
for file in file_mtimes.keys() {
db_update_tx
.try_send(DbOperation::Delete {
worktree_id: db_ids_by_worktree_id[&worktree.id()],
path: file.to_owned(),
})
.unwrap();
}
}
log::info!(
"Parsing Worktree Completed in {:?}",
t0.elapsed().as_millis()
);
}
})
.detach();
// let mut pending_files: Vec<(PathBuf, ((i64, PathBuf, Arc<Language>, SystemTime), SystemTime))> = vec![];
this.update(&mut cx, |this, cx| {
// The below is managing for updated on save
// Currently each time a file is saved, this code is run, and for all the files that were changed, if the current time is
// greater than the previous embedded time by the REINDEXING_DELAY variable, we will send the file off to be indexed.
let _subscription = cx.subscribe(&project, |this, project, event, cx| {
if let project::Event::WorktreeUpdatedEntries(worktree_id, changes) = event {
this.project_entries_changed(project, changes.clone(), cx, worktree_id);
}
});
this.projects.insert(
project.downgrade(),
ProjectState {
pending_files: HashMap::new(),
worktree_db_ids: db_ids_by_worktree_id.into_iter().collect(),
_subscription,
},
);
});
anyhow::Ok(())
})
}
pub fn search(
&mut self,
project: ModelHandle<Project>,
phrase: String,
limit: usize,
cx: &mut ModelContext<Self>,
) -> Task<Result<Vec<SearchResult>>> {
let project_state = if let Some(state) = self.projects.get(&project.downgrade()) {
state
} else {
return Task::ready(Err(anyhow!("project not added")));
};
let worktree_db_ids = project
.read(cx)
.worktrees(cx)
.filter_map(|worktree| {
let worktree_id = worktree.read(cx).id();
project_state.db_id_for_worktree_id(worktree_id)
})
.collect::<Vec<_>>();
let embedding_provider = self.embedding_provider.clone();
let database_url = self.database_url.clone();
cx.spawn(|this, cx| async move {
let documents = cx
.background()
.spawn(async move {
let database = VectorDatabase::new(database_url.to_string_lossy().into())?;
let phrase_embedding = embedding_provider
.embed_batch(vec![&phrase])
.await?
.into_iter()
.next()
.unwrap();
database.top_k_search(&worktree_db_ids, &phrase_embedding, limit)
})
.await?;
this.read_with(&cx, |this, _| {
let project_state = if let Some(state) = this.projects.get(&project.downgrade()) {
state
} else {
return Err(anyhow!("project not added"));
};
Ok(documents
.into_iter()
.filter_map(|(worktree_db_id, file_path, offset, name)| {
let worktree_id = project_state.worktree_id_for_db_id(worktree_db_id)?;
Some(SearchResult {
worktree_id,
name,
offset,
file_path,
})
})
.collect())
})
})
}
fn project_entries_changed(
&mut self,
project: ModelHandle<Project>,
changes: Arc<[(Arc<Path>, ProjectEntryId, PathChange)]>,
cx: &mut ModelContext<'_, VectorStore>,
worktree_id: &WorktreeId,
) -> Option<()> {
let reindexing_delay = settings::get::<VectorStoreSettings>(cx).reindexing_delay_seconds;
let worktree = project
.read(cx)
.worktree_for_id(worktree_id.clone(), cx)?
.read(cx)
.snapshot();
let worktree_db_id = self
.projects
.get(&project.downgrade())?
.db_id_for_worktree_id(worktree.id())?;
let file_mtimes = self.get_file_mtimes(worktree_db_id);
let language_registry = self.language_registry.clone();
cx.spawn(|this, mut cx| async move {
let file_mtimes = file_mtimes.await.log_err()?;
for change in changes.into_iter() {
let change_path = change.0.clone();
let absolute_path = worktree.absolutize(&change_path);
// Skip if git ignored or symlink
if let Some(entry) = worktree.entry_for_id(change.1) {
if entry.is_ignored || entry.is_symlink || entry.is_external {
continue;
}
}
match change.2 {
PathChange::Removed => this.update(&mut cx, |this, _| {
this.db_update_tx
.try_send(DbOperation::Delete {
worktree_id: worktree_db_id,
path: absolute_path,
})
.unwrap();
}),
_ => {
if let Ok(language) = language_registry
.language_for_file(&change_path.to_path_buf(), None)
.await
{
if language
.grammar()
.and_then(|grammar| grammar.embedding_config.as_ref())
.is_none()
{
continue;
}
let modified_time =
change_path.metadata().log_err()?.modified().log_err()?;
let existing_time = file_mtimes.get(&change_path.to_path_buf());
let already_stored = existing_time
.map_or(false, |existing_time| &modified_time != existing_time);
if !already_stored {
this.update(&mut cx, |this, _| {
let reindex_time = modified_time
+ Duration::from_secs(reindexing_delay as u64);
let project_state =
this.projects.get_mut(&project.downgrade())?;
project_state.update_pending_files(
PendingFile {
relative_path: change_path.to_path_buf(),
absolute_path,
modified_time,
worktree_db_id,
language: language.clone(),
},
reindex_time,
);
for file in project_state.get_outstanding_files() {
this.parsing_files_tx.try_send(file).unwrap();
}
Some(())
});
}
}
}
}
}
Some(())
})
.detach();
Some(())
}
}
impl Entity for VectorStore {
type Event = ();
}

View File

@@ -4,21 +4,21 @@ use serde::{Deserialize, Serialize};
use settings::Setting;
#[derive(Deserialize, Debug)]
pub struct SemanticIndexSettings {
pub struct VectorStoreSettings {
pub enabled: bool,
pub reindexing_delay_seconds: usize,
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
pub struct SemanticIndexSettingsContent {
pub struct VectorStoreSettingsContent {
pub enabled: Option<bool>,
pub reindexing_delay_seconds: Option<usize>,
}
impl Setting for SemanticIndexSettings {
const KEY: Option<&'static str> = Some("semantic_index");
impl Setting for VectorStoreSettings {
const KEY: Option<&'static str> = Some("vector_store");
type FileContent = SemanticIndexSettingsContent;
type FileContent = VectorStoreSettingsContent;
fn load(
default_value: &Self::FileContent,

View File

@@ -0,0 +1,161 @@
use crate::{
db::dot, embedding::EmbeddingProvider, vector_store_settings::VectorStoreSettings, VectorStore,
};
use anyhow::Result;
use async_trait::async_trait;
use gpui::{Task, TestAppContext};
use language::{Language, LanguageConfig, LanguageRegistry};
use project::{project_settings::ProjectSettings, FakeFs, Project};
use rand::{rngs::StdRng, Rng};
use serde_json::json;
use settings::SettingsStore;
use std::sync::Arc;
use unindent::Unindent;
#[gpui::test]
async fn test_vector_store(cx: &mut TestAppContext) {
cx.update(|cx| {
cx.set_global(SettingsStore::test(cx));
settings::register::<VectorStoreSettings>(cx);
settings::register::<ProjectSettings>(cx);
});
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/the-root",
json!({
"src": {
"file1.rs": "
fn aaa() {
println!(\"aaaa!\");
}
fn zzzzzzzzz() {
println!(\"SLEEPING\");
}
".unindent(),
"file2.rs": "
fn bbb() {
println!(\"bbbb!\");
}
".unindent(),
}
}),
)
.await;
let languages = Arc::new(LanguageRegistry::new(Task::ready(())));
let rust_language = Arc::new(
Language::new(
LanguageConfig {
name: "Rust".into(),
path_suffixes: vec!["rs".into()],
..Default::default()
},
Some(tree_sitter_rust::language()),
)
.with_embedding_query(
r#"
(function_item
name: (identifier) @name
body: (block)) @item
"#,
)
.unwrap(),
);
languages.add(rust_language);
let db_dir = tempdir::TempDir::new("vector-store").unwrap();
let db_path = db_dir.path().join("db.sqlite");
let store = VectorStore::new(
fs.clone(),
db_path,
Arc::new(FakeEmbeddingProvider),
languages,
cx.to_async(),
)
.await
.unwrap();
let project = Project::test(fs, ["/the-root".as_ref()], cx).await;
let worktree_id = project.read_with(cx, |project, cx| {
project.worktrees(cx).next().unwrap().read(cx).id()
});
store
.update(cx, |store, cx| store.add_project(project.clone(), cx))
.await
.unwrap();
cx.foreground().run_until_parked();
let search_results = store
.update(cx, |store, cx| {
store.search(project.clone(), "aaaa".to_string(), 5, cx)
})
.await
.unwrap();
assert_eq!(search_results[0].offset, 0);
assert_eq!(search_results[0].name, "aaa");
assert_eq!(search_results[0].worktree_id, worktree_id);
}
#[gpui::test]
fn test_dot_product(mut rng: StdRng) {
assert_eq!(dot(&[1., 0., 0., 0., 0.], &[0., 1., 0., 0., 0.]), 0.);
assert_eq!(dot(&[2., 0., 0., 0., 0.], &[3., 1., 0., 0., 0.]), 6.);
for _ in 0..100 {
let size = 1536;
let mut a = vec![0.; size];
let mut b = vec![0.; size];
for (a, b) in a.iter_mut().zip(b.iter_mut()) {
*a = rng.gen();
*b = rng.gen();
}
assert_eq!(
round_to_decimals(dot(&a, &b), 1),
round_to_decimals(reference_dot(&a, &b), 1)
);
}
fn round_to_decimals(n: f32, decimal_places: i32) -> f32 {
let factor = (10.0 as f32).powi(decimal_places);
(n * factor).round() / factor
}
fn reference_dot(a: &[f32], b: &[f32]) -> f32 {
a.iter().zip(b.iter()).map(|(a, b)| a * b).sum()
}
}
struct FakeEmbeddingProvider;
#[async_trait]
impl EmbeddingProvider for FakeEmbeddingProvider {
async fn embed_batch(&self, spans: Vec<&str>) -> Result<Vec<Vec<f32>>> {
Ok(spans
.iter()
.map(|span| {
let mut result = vec![1.0; 26];
for letter in span.chars() {
let letter = letter.to_ascii_lowercase();
if letter as u32 >= 'a' as u32 {
let ix = (letter as u32) - ('a' as u32);
if ix < 26 {
result[ix as usize] += 1.0;
}
}
}
let norm = result.iter().map(|x| x * x).sum::<f32>().sqrt();
for x in &mut result {
*x /= norm;
}
result
})
.collect())
}
}

View File

@@ -32,8 +32,6 @@ language = { path = "../language" }
search = { path = "../search" }
settings = { path = "../settings" }
workspace = { path = "../workspace" }
theme = { path = "../theme" }
language_selector = { path = "../language_selector"}
[dev-dependencies]
indoc.workspace = true
@@ -46,4 +44,3 @@ project = { path = "../project", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
settings = { path = "../settings" }
workspace = { path = "../workspace", features = ["test-support"] }
theme = { path = "../theme", features = ["test-support"] }

View File

@@ -1,107 +0,0 @@
use gpui::{
elements::{Empty, Label},
AnyElement, Element, Entity, Subscription, View, ViewContext,
};
use settings::SettingsStore;
use workspace::{item::ItemHandle, StatusItemView};
use crate::{state::Mode, Vim, VimEvent, VimModeSetting};
pub struct ModeIndicator {
pub mode: Option<Mode>,
_subscription: Subscription,
}
impl ModeIndicator {
pub fn new(cx: &mut ViewContext<Self>) -> Self {
let handle = cx.handle().downgrade();
let _subscription = cx.subscribe_global::<VimEvent, _>(move |&event, cx| {
if let Some(mode_indicator) = handle.upgrade(cx) {
match event {
VimEvent::ModeChanged { mode } => {
cx.update_window(mode_indicator.window_id(), |cx| {
mode_indicator.update(cx, move |mode_indicator, cx| {
mode_indicator.set_mode(mode, cx);
})
});
}
}
}
});
cx.observe_global::<SettingsStore, _>(move |mode_indicator, cx| {
if settings::get::<VimModeSetting>(cx).0 {
mode_indicator.mode = cx
.has_global::<Vim>()
.then(|| cx.global::<Vim>().state.mode);
} else {
mode_indicator.mode.take();
}
})
.detach();
// Vim doesn't exist in some tests
let mode = cx
.has_global::<Vim>()
.then(|| {
let vim = cx.global::<Vim>();
vim.enabled.then(|| vim.state.mode)
})
.flatten();
Self {
mode,
_subscription,
}
}
pub fn set_mode(&mut self, mode: Mode, cx: &mut ViewContext<Self>) {
if self.mode != Some(mode) {
self.mode = Some(mode);
cx.notify();
}
}
}
impl Entity for ModeIndicator {
type Event = ();
}
impl View for ModeIndicator {
fn ui_name() -> &'static str {
"ModeIndicatorView"
}
fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
let Some(mode) = self.mode.as_ref() else {
return Empty::new().into_any();
};
let theme = &theme::current(cx).workspace.status_bar;
// we always choose text to be 12 monospace characters
// so that as the mode indicator changes, the rest of the
// UI stays still.
let text = match mode {
Mode::Normal => "-- NORMAL --",
Mode::Insert => "-- INSERT --",
Mode::Visual { line: false } => "-- VISUAL --",
Mode::Visual { line: true } => "VISUAL LINE ",
};
Label::new(text, theme.vim_mode_indicator.text.clone())
.contained()
.with_style(theme.vim_mode_indicator.container)
.into_any()
}
}
impl StatusItemView for ModeIndicator {
fn set_active_pane_item(
&mut self,
_active_pane_item: Option<&dyn ItemHandle>,
_cx: &mut ViewContext<Self>,
) {
// nothing to do.
}
}

View File

@@ -62,12 +62,6 @@ struct PreviousWordStart {
ignore_punctuation: bool,
}
#[derive(Clone, Deserialize, PartialEq)]
struct RepeatFind {
#[serde(default)]
backwards: bool,
}
actions!(
vim,
[
@@ -88,10 +82,7 @@ actions!(
NextLineStart,
]
);
impl_actions!(
vim,
[NextWordStart, NextWordEnd, PreviousWordStart, RepeatFind]
);
impl_actions!(vim, [NextWordStart, NextWordEnd, PreviousWordStart]);
pub fn init(cx: &mut AppContext) {
cx.add_action(|_: &mut Workspace, _: &Left, cx: _| motion(Motion::Left, cx));
@@ -132,15 +123,13 @@ pub fn init(cx: &mut AppContext) {
&PreviousWordStart { ignore_punctuation }: &PreviousWordStart,
cx: _| { motion(Motion::PreviousWordStart { ignore_punctuation }, cx) },
);
cx.add_action(|_: &mut Workspace, &NextLineStart, cx: _| motion(Motion::NextLineStart, cx));
cx.add_action(|_: &mut Workspace, action: &RepeatFind, cx: _| {
repeat_motion(action.backwards, cx)
})
cx.add_action(|_: &mut Workspace, &NextLineStart, cx: _| motion(Motion::NextLineStart, cx))
}
pub(crate) fn motion(motion: Motion, cx: &mut WindowContext) {
if let Some(Operator::FindForward { .. }) | Some(Operator::FindBackward { .. }) =
Vim::read(cx).active_operator()
if let Some(Operator::Namespace(_))
| Some(Operator::FindForward { .. })
| Some(Operator::FindBackward { .. }) = Vim::read(cx).active_operator()
{
Vim::update(cx, |vim, cx| vim.pop_operator(cx));
}
@@ -157,35 +146,6 @@ pub(crate) fn motion(motion: Motion, cx: &mut WindowContext) {
Vim::update(cx, |vim, cx| vim.clear_operator(cx));
}
fn repeat_motion(backwards: bool, cx: &mut WindowContext) {
let find = match Vim::read(cx).state.last_find.clone() {
Some(Motion::FindForward { before, text }) => {
if backwards {
Motion::FindBackward {
after: before,
text,
}
} else {
Motion::FindForward { before, text }
}
}
Some(Motion::FindBackward { after, text }) => {
if backwards {
Motion::FindForward {
before: after,
text,
}
} else {
Motion::FindBackward { after, text }
}
}
_ => return,
};
motion(find, cx)
}
// Motion handling is specified here:
// https://github.com/vim/vim/blob/master/runtime/doc/motion.txt
impl Motion {
@@ -783,23 +743,4 @@ mod test {
cx.simulate_shared_keystrokes(["%"]).await;
cx.assert_shared_state("func boop(ˇ) {\n}").await;
}
#[gpui::test]
async fn test_comma_semicolon(cx: &mut gpui::TestAppContext) {
let mut cx = NeovimBackedTestContext::new(cx).await;
cx.set_shared_state("ˇone two three four").await;
cx.simulate_shared_keystrokes(["f", "o"]).await;
cx.assert_shared_state("one twˇo three four").await;
cx.simulate_shared_keystrokes([","]).await;
cx.assert_shared_state("ˇone two three four").await;
cx.simulate_shared_keystrokes(["2", ";"]).await;
cx.assert_shared_state("one two three fˇour").await;
cx.simulate_shared_keystrokes(["shift-t", "e"]).await;
cx.assert_shared_state("one two threeˇ four").await;
cx.simulate_shared_keystrokes(["3", ";"]).await;
cx.assert_shared_state("oneˇ two three four").await;
cx.simulate_shared_keystrokes([","]).await;
cx.assert_shared_state("one two thˇree four").await;
}
}

View File

@@ -107,7 +107,7 @@ pub fn normal_motion(
Some(Operator::Delete) => delete_motion(vim, motion, times, cx),
Some(Operator::Yank) => yank_motion(vim, motion, times, cx),
Some(operator) => {
// Can't do anything for text objects, Ignoring
// Can't do anything for text objects or namespace operators. Ignoring
error!("Unexpected normal mode motion operator: {:?}", operator)
}
}
@@ -441,8 +441,11 @@ mod test {
use indoc::indoc;
use crate::{
state::Mode::{self},
test::{ExemptionFeatures, NeovimBackedTestContext},
state::{
Mode::{self, *},
Namespace, Operator,
},
test::{ExemptionFeatures, NeovimBackedTestContext, VimTestContext},
};
#[gpui::test]
@@ -607,6 +610,22 @@ mod test {
.await;
}
#[gpui::test]
async fn test_g_prefix_and_abort(cx: &mut gpui::TestAppContext) {
let mut cx = VimTestContext::new(cx, true).await;
// Can abort with escape to get back to normal mode
cx.simulate_keystroke("g");
assert_eq!(cx.mode(), Normal);
assert_eq!(
cx.active_operator(),
Some(Operator::Namespace(Namespace::G))
);
cx.simulate_keystroke("escape");
assert_eq!(cx.mode(), Normal);
assert_eq!(cx.active_operator(), None);
}
#[gpui::test]
async fn test_gg(cx: &mut gpui::TestAppContext) {
let mut cx = NeovimBackedTestContext::new(cx).await;

View File

@@ -93,7 +93,7 @@ fn search_submit(workspace: &mut Workspace, _: &SearchSubmit, cx: &mut ViewConte
pane.update(cx, |pane, cx| {
if let Some(search_bar) = pane.toolbar().read(cx).item_of_type::<BufferSearchBar>() {
search_bar.update(cx, |search_bar, cx| {
let state = &mut vim.state.search;
let mut state = &mut vim.state.search;
let mut count = state.count;
// in the case that the query has changed, the search bar
@@ -222,7 +222,7 @@ mod test {
});
search_bar.read_with(cx.cx, |bar, cx| {
assert_eq!(bar.query(cx), "cc");
assert_eq!(bar.query_editor.read(cx).text(cx), "cc");
});
deterministic.run_until_parked();

View File

@@ -3,8 +3,6 @@ use language::CursorShape;
use serde::{Deserialize, Serialize};
use workspace::searchable::Direction;
use crate::motion::Motion;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Deserialize, Serialize)]
pub enum Mode {
Normal,
@@ -18,9 +16,16 @@ impl Default for Mode {
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Deserialize)]
pub enum Namespace {
G,
Z,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Deserialize)]
pub enum Operator {
Number(usize),
Namespace(Namespace),
Change,
Delete,
Yank,
@@ -35,8 +40,6 @@ pub struct VimState {
pub mode: Mode,
pub operator_stack: Vec<Operator>,
pub search: SearchState,
pub last_find: Option<Motion>,
}
pub struct SearchState {
@@ -123,6 +126,8 @@ impl Operator {
pub fn id(&self) -> &'static str {
match self {
Operator::Number(_) => "n",
Operator::Namespace(Namespace::G) => "g",
Operator::Namespace(Namespace::Z) => "z",
Operator::Object { around: false } => "i",
Operator::Object { around: true } => "a",
Operator::Change => "c",

View File

@@ -4,8 +4,6 @@ mod neovim_connection;
mod vim_binding_test_context;
mod vim_test_context;
use std::sync::Arc;
use command_palette::CommandPalette;
use editor::DisplayPoint;
pub use neovim_backed_binding_test_context::*;
@@ -16,7 +14,7 @@ pub use vim_test_context::*;
use indoc::indoc;
use search::BufferSearchBar;
use crate::{state::Mode, ModeIndicator};
use crate::state::Mode;
#[gpui::test]
async fn test_initially_disabled(cx: &mut gpui::TestAppContext) {
@@ -99,7 +97,7 @@ async fn test_buffer_search(cx: &mut gpui::TestAppContext) {
});
search_bar.read_with(cx.cx, |bar, cx| {
assert_eq!(bar.query(cx), "");
assert_eq!(bar.query_editor.read(cx).text(cx), "");
})
}
@@ -175,7 +173,7 @@ async fn test_selection_on_search(cx: &mut gpui::TestAppContext) {
});
search_bar.read_with(cx.cx, |bar, cx| {
assert_eq!(bar.query(cx), "cc");
assert_eq!(bar.query_editor.read(cx).text(cx), "cc");
});
// wait for the query editor change event to fire.
@@ -197,57 +195,3 @@ async fn test_selection_on_search(cx: &mut gpui::TestAppContext) {
cx.simulate_keystrokes(["shift-n"]);
cx.assert_state(indoc! {"aa\nbb\nˇcc\ncc\ncc\n"}, Mode::Normal);
}
#[gpui::test]
async fn test_status_indicator(
cx: &mut gpui::TestAppContext,
deterministic: Arc<gpui::executor::Deterministic>,
) {
let mut cx = VimTestContext::new(cx, true).await;
deterministic.run_until_parked();
let mode_indicator = cx.workspace(|workspace, cx| {
let status_bar = workspace.status_bar().read(cx);
let mode_indicator = status_bar.item_of_type::<ModeIndicator>();
assert!(mode_indicator.is_some());
mode_indicator.unwrap()
});
assert_eq!(
cx.workspace(|_, cx| mode_indicator.read(cx).mode),
Some(Mode::Normal)
);
// shows the correct mode
cx.simulate_keystrokes(["i"]);
deterministic.run_until_parked();
assert_eq!(
cx.workspace(|_, cx| mode_indicator.read(cx).mode),
Some(Mode::Insert)
);
// shows even in search
cx.simulate_keystrokes(["escape", "v", "/"]);
deterministic.run_until_parked();
assert_eq!(
cx.workspace(|_, cx| mode_indicator.read(cx).mode),
Some(Mode::Visual { line: false })
);
// hides if vim mode is disabled
cx.disable_vim();
deterministic.run_until_parked();
cx.workspace(|workspace, cx| {
let status_bar = workspace.status_bar().read(cx);
let mode_indicator = status_bar.item_of_type::<ModeIndicator>().unwrap();
assert!(mode_indicator.read(cx).mode.is_none());
});
cx.enable_vim();
deterministic.run_until_parked();
cx.workspace(|workspace, cx| {
let status_bar = workspace.status_bar().read(cx);
let mode_indicator = status_bar.item_of_type::<ModeIndicator>().unwrap();
assert!(mode_indicator.read(cx).mode.is_some());
});
}

View File

@@ -43,10 +43,6 @@ impl<'a> VimTestContext<'a> {
toolbar.add_item(project_search_bar, cx);
})
});
workspace.status_bar().update(cx, |status_bar, cx| {
let vim_mode_indicator = cx.add_view(ModeIndicator::new);
status_bar.add_right_item(vim_mode_indicator, cx);
});
});
Self { cx }

View File

@@ -3,7 +3,6 @@ mod test;
mod editor_events;
mod insert;
mod mode_indicator;
mod motion;
mod normal;
mod object;
@@ -15,11 +14,10 @@ use anyhow::Result;
use collections::CommandPaletteFilter;
use editor::{Bias, Editor, EditorMode, Event};
use gpui::{
actions, impl_actions, keymap_matcher::KeymapContext, keymap_matcher::MatchResult, AppContext,
Subscription, ViewContext, ViewHandle, WeakViewHandle, WindowContext,
actions, impl_actions, keymap_matcher::KeymapContext, AppContext, Subscription, ViewContext,
ViewHandle, WeakViewHandle, WindowContext,
};
use language::CursorShape;
pub use mode_indicator::ModeIndicator;
use motion::Motion;
use normal::normal_replace;
use serde::Deserialize;
@@ -43,11 +41,6 @@ struct Number(u8);
actions!(vim, [Tab, Enter]);
impl_actions!(vim, [Number, SwitchMode, PushOperator]);
#[derive(Copy, Clone, Debug)]
enum VimEvent {
ModeChanged { mode: Mode },
}
pub fn init(cx: &mut AppContext) {
settings::register::<VimModeSetting>(cx);
@@ -97,10 +90,7 @@ pub fn init(cx: &mut AppContext) {
}
pub fn observe_keystrokes(cx: &mut WindowContext) {
cx.observe_keystrokes(|_keystroke, result, handled_by, cx| {
if result == &MatchResult::Pending {
return true;
}
cx.observe_keystrokes(|_keystroke, _result, handled_by, cx| {
if let Some(handled_by) = handled_by {
// Keystroke is handled by the vim system, so continue forward
if handled_by.namespace() == "vim" {
@@ -126,6 +116,7 @@ pub fn observe_keystrokes(cx: &mut WindowContext) {
pub struct Vim {
active_editor: Option<WeakViewHandle<Editor>>,
editor_subscription: Option<Subscription>,
enabled: bool,
state: VimState,
}
@@ -184,8 +175,6 @@ impl Vim {
self.state.mode = mode;
self.state.operator_stack.clear();
cx.emit_global(VimEvent::ModeChanged { mode });
// Sync editor settings like clip mode
self.sync_vim_settings(cx);
@@ -254,14 +243,10 @@ impl Vim {
match Vim::read(cx).active_operator() {
Some(Operator::FindForward { before }) => {
let find = Motion::FindForward { before, text };
Vim::update(cx, |vim, _| vim.state.last_find = Some(find.clone()));
motion::motion(find, cx)
motion::motion(Motion::FindForward { before, text }, cx)
}
Some(Operator::FindBackward { after }) => {
let find = Motion::FindBackward { after, text };
Vim::update(cx, |vim, _| vim.state.last_find = Some(find.clone()));
motion::motion(find, cx)
motion::motion(Motion::FindBackward { after, text }, cx)
}
Some(Operator::Replace) => match Vim::read(cx).state.mode {
Mode::Normal => normal_replace(text, cx),

View File

@@ -58,9 +58,7 @@ pub fn visual_motion(motion: Motion, times: Option<usize>, cx: &mut WindowContex
pub fn visual_object(object: Object, cx: &mut WindowContext) {
Vim::update(cx, |vim, cx| {
if let Some(Operator::Object { around }) = vim.active_operator() {
vim.pop_operator(cx);
if let Operator::Object { around } = vim.pop_operator(cx) {
vim.update_active_editor(cx, |editor, cx| {
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.move_with(|map, selection| {

View File

@@ -1,17 +0,0 @@
{"Put":{"state":"ˇone two three four"}}
{"Key":"f"}
{"Key":"o"}
{"Get":{"state":"one twˇo three four","mode":"Normal"}}
{"Key":","}
{"Get":{"state":"ˇone two three four","mode":"Normal"}}
{"Key":"2"}
{"Key":";"}
{"Get":{"state":"one two three fˇour","mode":"Normal"}}
{"Key":"shift-t"}
{"Key":"e"}
{"Get":{"state":"one two threeˇ four","mode":"Normal"}}
{"Key":"3"}
{"Key":";"}
{"Get":{"state":"oneˇ two three four","mode":"Normal"}}
{"Key":","}
{"Get":{"state":"one two thˇree four","mode":"Normal"}}

View File

@@ -5,7 +5,6 @@ use crate::{
use crate::{AutosaveSetting, DelayedDebouncedEditAction, WorkspaceSettings};
use anyhow::Result;
use client::{proto, Client};
use gpui::geometry::vector::Vector2F;
use gpui::{
fonts::HighlightStyle, AnyElement, AnyViewHandle, AppContext, ModelHandle, Task, View,
ViewContext, ViewHandle, WeakViewHandle, WindowContext,
@@ -204,9 +203,6 @@ pub trait Item: View {
fn show_toolbar(&self) -> bool {
true
}
fn pixel_position_of_cursor(&self) -> Option<Vector2F> {
None
}
}
pub trait ItemHandle: 'static + fmt::Debug {
@@ -275,7 +271,6 @@ pub trait ItemHandle: 'static + fmt::Debug {
fn breadcrumbs(&self, theme: &Theme, cx: &AppContext) -> Option<Vec<BreadcrumbText>>;
fn serialized_item_kind(&self) -> Option<&'static str>;
fn show_toolbar(&self, cx: &AppContext) -> bool;
fn pixel_position_of_cursor(&self, cx: &AppContext) -> Option<Vector2F>;
}
pub trait WeakItemHandle {
@@ -620,10 +615,6 @@ impl<T: Item> ItemHandle for ViewHandle<T> {
fn show_toolbar(&self, cx: &AppContext) -> bool {
self.read(cx).show_toolbar()
}
fn pixel_position_of_cursor(&self, cx: &AppContext) -> Option<Vector2F> {
self.read(cx).pixel_position_of_cursor()
}
}
impl From<Box<dyn ItemHandle>> for AnyViewHandle {

View File

@@ -542,12 +542,6 @@ impl Pane {
self.items.get(self.active_item_index).cloned()
}
pub fn pixel_position_of_cursor(&self, cx: &AppContext) -> Option<Vector2F> {
self.items
.get(self.active_item_index)?
.pixel_position_of_cursor(cx)
}
pub fn item_for_entry(
&self,
entry_id: ProjectEntryId,
@@ -746,10 +740,6 @@ impl Pane {
_: &CloseAllItems,
cx: &mut ViewContext<Self>,
) -> Option<Task<Result<()>>> {
if self.items.is_empty() {
return None;
}
Some(self.close_items(cx, move |_| true))
}

Some files were not shown because too many files have changed in this diff Show More