Compare commits
196 Commits
v0.182.10
...
improve-re
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d42ce71772 | ||
|
|
82511d4300 | ||
|
|
46a7cd93d9 | ||
|
|
d253889fe3 | ||
|
|
29149c2eb5 | ||
|
|
853f47b9a2 | ||
|
|
86dbbdc921 | ||
|
|
d78cf50efb | ||
|
|
0182e09e33 | ||
|
|
6f6e207eb5 | ||
|
|
149cdeca29 | ||
|
|
92dc812aea | ||
|
|
c7e80c80c6 | ||
|
|
c381a500f8 | ||
|
|
ff4334efc7 | ||
|
|
b1e4e6048a | ||
|
|
d0f806456c | ||
|
|
b6cce1ed91 | ||
|
|
05fc9ee396 | ||
|
|
8f52bb92b6 | ||
|
|
144fd0b00d | ||
|
|
cd4a3fd679 | ||
|
|
42c3f4e7cf | ||
|
|
90dec1d451 | ||
|
|
afabcd1547 | ||
|
|
ccf9aef767 | ||
|
|
aef78dcffd | ||
|
|
6f0951ff77 | ||
|
|
5e094553fa | ||
|
|
32829d9f12 | ||
|
|
e4cf7fe8f5 | ||
|
|
2b89b97cd1 | ||
|
|
e26f0a331f | ||
|
|
7e1b419243 | ||
|
|
98d001bad5 | ||
|
|
d4a985a6e3 | ||
|
|
616d17f517 | ||
|
|
e1c42315dc | ||
|
|
cfc848d24b | ||
|
|
d4761cea47 | ||
|
|
b794919842 | ||
|
|
fc1252b0cd | ||
|
|
12b012eab3 | ||
|
|
77f32582e2 | ||
|
|
0d6e455bf6 | ||
|
|
5f897b0e00 | ||
|
|
d74f0735c2 | ||
|
|
a8b1ef3531 | ||
|
|
c8ccc472b5 | ||
|
|
26b9c32e96 | ||
|
|
db56254517 | ||
|
|
9d91908256 | ||
|
|
6b80eb556c | ||
|
|
2603f36737 | ||
|
|
6c93d107c2 | ||
|
|
5b6efa4c02 | ||
|
|
84aa480344 | ||
|
|
6db29eb90a | ||
|
|
ff41be30dc | ||
|
|
47b663a8df | ||
|
|
1d9915f88a | ||
|
|
584fa3db53 | ||
|
|
a051194195 | ||
|
|
78ecc3cef0 | ||
|
|
ac8a4ba5d4 | ||
|
|
9863b48dd7 | ||
|
|
fddaa31655 | ||
|
|
b45230784d | ||
|
|
4a57664c7f | ||
|
|
0eb0a3c7dc | ||
|
|
6278761460 | ||
|
|
f2ce183286 | ||
|
|
98891e4c70 | ||
|
|
5e57f148ac | ||
|
|
128779f615 | ||
|
|
b25c3334cc | ||
|
|
77544f42b1 | ||
|
|
b864a9b0ae | ||
|
|
e4844b281d | ||
|
|
d1ffda9bfe | ||
|
|
8ffa58414d | ||
|
|
fb78cbbd45 | ||
|
|
17719f9f87 | ||
|
|
055df30757 | ||
|
|
429d4580cf | ||
|
|
62ebae96e3 | ||
|
|
0036a33263 | ||
|
|
b22faf96e0 | ||
|
|
dafe994eef | ||
|
|
5994ac5cec | ||
|
|
97a9a5de10 | ||
|
|
730f2e7083 | ||
|
|
141ad72d97 | ||
|
|
a5fe6d1e61 | ||
|
|
5734ffbb18 | ||
|
|
932a7c6440 | ||
|
|
6a60bb189b | ||
|
|
5909d1258b | ||
|
|
78662f8fea | ||
|
|
c2e3134963 | ||
|
|
66b3e03baa | ||
|
|
7caa2c2ea0 | ||
|
|
08ce230bae | ||
|
|
1df01eabfe | ||
|
|
2f5c662c42 | ||
|
|
a03fb3791e | ||
|
|
dd7bc5f199 | ||
|
|
c7d3fbcac1 | ||
|
|
1164829cad | ||
|
|
e09eeb7446 | ||
|
|
cdcad708f6 | ||
|
|
bd4c9b45b6 | ||
|
|
d4736a5427 | ||
|
|
353ae2335b | ||
|
|
ad39d3226f | ||
|
|
c35238bd72 | ||
|
|
5757e352b0 | ||
|
|
c124838a73 | ||
|
|
5ebac7e30c | ||
|
|
c143846e42 | ||
|
|
71c2a11bd9 | ||
|
|
2440faf4b2 | ||
|
|
c0262cf62f | ||
|
|
fd256d159d | ||
|
|
a2a3d1a4bd | ||
|
|
294a1b63c0 | ||
|
|
ffdf725f32 | ||
|
|
8ee6a2b454 | ||
|
|
cf65d9437a | ||
|
|
66dd6726df | ||
|
|
44cb8e582b | ||
|
|
73305ce45e | ||
|
|
94b75f3ad9 | ||
|
|
384868e597 | ||
|
|
d88694f8da | ||
|
|
90f30b5c20 | ||
|
|
24d4f8ca18 | ||
|
|
804066a047 | ||
|
|
4a356466b1 | ||
|
|
0921762b59 | ||
|
|
46b1df2e2d | ||
|
|
986da332db | ||
|
|
dad33f7cc2 | ||
|
|
64241f7d2f | ||
|
|
fbbc23bec3 | ||
|
|
26f4705198 | ||
|
|
3abf95216c | ||
|
|
b0b52f299c | ||
|
|
53cde329da | ||
|
|
b55b310ad0 | ||
|
|
8ab25e2bac | ||
|
|
c10b1f7c61 | ||
|
|
cb1ee01a66 | ||
|
|
90bcde116f | ||
|
|
8ac378b86e | ||
|
|
55760295d9 | ||
|
|
9dfb907f97 | ||
|
|
e20daa7639 | ||
|
|
b46ab367ef | ||
|
|
12212dc329 | ||
|
|
324e4658ba | ||
|
|
ed500dacb6 | ||
|
|
2f4b48129b | ||
|
|
ed7c55a04e | ||
|
|
6db4ab381c | ||
|
|
0e72a7e6ce | ||
|
|
3dc3ab062d | ||
|
|
ed63f216e3 | ||
|
|
ba767a1998 | ||
|
|
23c3f5f410 | ||
|
|
b3be294c90 | ||
|
|
af5318df98 | ||
|
|
60c420a2da | ||
|
|
ee6c33ffb3 | ||
|
|
9ae4f4b158 | ||
|
|
915a1cb116 | ||
|
|
aead0e11ff | ||
|
|
2752c08810 | ||
|
|
780143298a | ||
|
|
088d7c1342 | ||
|
|
64de6bd2a8 | ||
|
|
6aa0248ab3 | ||
|
|
342134fbab | ||
|
|
b47aa33459 | ||
|
|
9f6c5e2877 | ||
|
|
7bf6cd4ccf | ||
|
|
c7963c8a93 | ||
|
|
dd4629433b | ||
|
|
2e56935997 | ||
|
|
e43a397f1d | ||
|
|
9d0fe164a7 | ||
|
|
6d7fef6fd3 | ||
|
|
b67d3fd21b | ||
|
|
1cb4f8288d | ||
|
|
3a8fe4d973 | ||
|
|
9d6d152918 |
19
.github/ISSUE_TEMPLATE/99_other.yml
vendored
Normal file
19
.github/ISSUE_TEMPLATE/99_other.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Other [Staff Only]
|
||||
description: Zed Staff Only
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
|
||||
IF YOU DO NOT WORK FOR ZED INDUSTRIES DO NOT CREATE ISSUES WITH THIS TEMPLATE.
|
||||
THEY WILL BE AUTO-CLOSED AND MAY RESULT IN YOU BEING BANNED FROM THE ZED ISSUE TRACKER.
|
||||
|
||||
FEATURE REQUESTS / SUPPORT REQUESTS SHOULD BE OPENED AS DISCUSSIONS:
|
||||
https://github.com/zed-industries/zed/discussions/new/choose
|
||||
validations:
|
||||
required: true
|
||||
42
.github/workflows/ci.yml
vendored
42
.github/workflows/ci.yml
vendored
@@ -225,7 +225,7 @@ jobs:
|
||||
|
||||
- name: Check for new vulnerable dependencies
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/dependency-review-action@3b139cfc5fae8b618d3eae3675e383bb1769c019 # v4
|
||||
uses: actions/dependency-review-action@67d4f4bd7a9b17a0db54d2a7519187c65e339de8 # v4
|
||||
with:
|
||||
license-check: false
|
||||
|
||||
@@ -465,6 +465,7 @@ jobs:
|
||||
- job_spec
|
||||
- style
|
||||
- migration_checks
|
||||
# run_tests: If adding required tests, add them here and to script below.
|
||||
- workspace_hack
|
||||
- linux_tests
|
||||
- build_remote_server
|
||||
@@ -482,11 +483,14 @@ jobs:
|
||||
|
||||
# Only check test jobs if they were supposed to run
|
||||
if [[ "${{ needs.job_spec.outputs.run_tests }}" == "true" ]]; then
|
||||
[[ "${{ needs.workspace_hack.result }}" != 'success' ]] && { RET_CODE=1; echo "Workspace Hack failed"; }
|
||||
[[ "${{ needs.macos_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "macOS tests failed"; }
|
||||
[[ "${{ needs.linux_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Linux tests failed"; }
|
||||
[[ "${{ needs.windows_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows tests failed"; }
|
||||
[[ "${{ needs.windows_clippy.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows clippy failed"; }
|
||||
[[ "${{ needs.build_remote_server.result }}" != 'success' ]] && { RET_CODE=1; echo "Remote server build failed"; }
|
||||
# This check is intentionally disabled. See: https://github.com/zed-industries/zed/pull/28431
|
||||
# [[ "${{ needs.migration_checks.result }}" != 'success' ]] && { RET_CODE=1; echo "Migration Checks failed"; }
|
||||
fi
|
||||
if [[ "$RET_CODE" -eq 0 ]]; then
|
||||
echo "All tests passed successfully!"
|
||||
@@ -590,7 +594,7 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
name: Linux x86_x64 release bundle
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2004
|
||||
- buildjet-16vcpu-ubuntu-2004 # ubuntu 20.04 for minimal glibc
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
@@ -618,26 +622,23 @@ jobs:
|
||||
- name: Create Linux .tar.gz bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
- name: Upload Artifact to Workflow - zed (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload Linux remote server to workflow run if main branch or specific label
|
||||
- name: Upload Artifact to Workflow - zed-remote-server (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.gz
|
||||
path: target/zed-remote-server-linux-x86_64.gz
|
||||
|
||||
- name: Upload app bundle to release
|
||||
- name: Upload Artifacts to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
@@ -676,29 +677,26 @@ jobs:
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
|
||||
- name: Create and upload Linux .tar.gz bundle
|
||||
- name: Create and upload Linux .tar.gz bundles
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
- name: Upload Artifact to Workflow - zed (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload Linux remote server to workflow run if main branch or specific label
|
||||
- name: Upload Artifact to Workflow - zed-remote-server (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.gz
|
||||
path: target/zed-remote-server-linux-aarch64.gz
|
||||
|
||||
- name: Upload app bundle to release
|
||||
- name: Upload Artifacts to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
@@ -739,7 +737,7 @@ jobs:
|
||||
echo "/nix/var/nix/profiles/default/bin" >> $GITHUB_PATH
|
||||
echo "/Users/administrator/.nix-profile/bin" >> $GITHUB_PATH
|
||||
|
||||
- uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f # v31
|
||||
- uses: cachix/install-nix-action@d1ca217b388ee87b2507a9a93bf01368bde7cec2 # v31
|
||||
if: ${{ matrix.system.install_nix }}
|
||||
with:
|
||||
github_access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
8
.github/workflows/deploy_collab.yml
vendored
8
.github/workflows/deploy_collab.yml
vendored
@@ -117,12 +117,10 @@ jobs:
|
||||
export ZED_KUBE_NAMESPACE=production
|
||||
export ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT=10
|
||||
export ZED_API_LOAD_BALANCER_SIZE_UNIT=2
|
||||
export ZED_LLM_LOAD_BALANCER_SIZE_UNIT=2
|
||||
elif [[ $GITHUB_REF_NAME = "collab-staging" ]]; then
|
||||
export ZED_KUBE_NAMESPACE=staging
|
||||
export ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT=1
|
||||
export ZED_API_LOAD_BALANCER_SIZE_UNIT=1
|
||||
export ZED_LLM_LOAD_BALANCER_SIZE_UNIT=1
|
||||
else
|
||||
echo "cowardly refusing to deploy from an unknown branch"
|
||||
exit 1
|
||||
@@ -147,9 +145,3 @@ jobs:
|
||||
envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
|
||||
kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
|
||||
echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
|
||||
|
||||
export ZED_SERVICE_NAME=llm
|
||||
export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_LLM_LOAD_BALANCER_SIZE_UNIT
|
||||
envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
|
||||
kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
|
||||
echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
|
||||
|
||||
2
.github/workflows/release_nightly.yml
vendored
2
.github/workflows/release_nightly.yml
vendored
@@ -206,7 +206,7 @@ jobs:
|
||||
echo "/nix/var/nix/profiles/default/bin" >> $GITHUB_PATH
|
||||
echo "/Users/administrator/.nix-profile/bin" >> $GITHUB_PATH
|
||||
|
||||
- uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f # v31
|
||||
- uses: cachix/install-nix-action@d1ca217b388ee87b2507a9a93bf01368bde7cec2 # v31
|
||||
if: ${{ matrix.system.install_nix }}
|
||||
with:
|
||||
github_access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
[
|
||||
{
|
||||
"label": "Debug Zed with LLDB",
|
||||
"adapter": "LLDB",
|
||||
"label": "Debug Zed (CodeLLDB)",
|
||||
"adapter": "CodeLLDB",
|
||||
"program": "$ZED_WORKTREE_ROOT/target/debug/zed",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT"
|
||||
},
|
||||
{
|
||||
"label": "Debug Zed with GDB",
|
||||
"label": "Debug Zed (GDB)",
|
||||
"adapter": "GDB",
|
||||
"program": "$ZED_WORKTREE_ROOT/target/debug/zed",
|
||||
"request": "launch",
|
||||
|
||||
102
Cargo.lock
generated
102
Cargo.lock
generated
@@ -326,7 +326,6 @@ dependencies = [
|
||||
"serde_json",
|
||||
"strum",
|
||||
"thiserror 2.0.12",
|
||||
"util",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
@@ -719,6 +718,7 @@ dependencies = [
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"ui",
|
||||
"unindent",
|
||||
"util",
|
||||
@@ -1182,6 +1182,18 @@ dependencies = [
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "auto_update_helper"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"log",
|
||||
"simplelog",
|
||||
"windows 0.61.1",
|
||||
"winresource",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "auto_update_ui"
|
||||
version = "0.1.0"
|
||||
@@ -2931,7 +2943,6 @@ dependencies = [
|
||||
name = "collab"
|
||||
version = "0.44.0"
|
||||
dependencies = [
|
||||
"anthropic",
|
||||
"anyhow",
|
||||
"assistant",
|
||||
"assistant_context_editor",
|
||||
@@ -3175,14 +3186,18 @@ dependencies = [
|
||||
name = "component_preview"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"collections",
|
||||
"component",
|
||||
"db",
|
||||
"gpui",
|
||||
"languages",
|
||||
"notifications",
|
||||
"project",
|
||||
"serde",
|
||||
"ui",
|
||||
"ui_input",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
]
|
||||
@@ -3987,7 +4002,6 @@ dependencies = [
|
||||
"node_runtime",
|
||||
"parking_lot",
|
||||
"paths",
|
||||
"regex",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -4019,7 +4033,6 @@ dependencies = [
|
||||
"gpui",
|
||||
"language",
|
||||
"paths",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"task",
|
||||
@@ -4163,6 +4176,7 @@ dependencies = [
|
||||
"collections",
|
||||
"command_palette_hooks",
|
||||
"dap",
|
||||
"db",
|
||||
"editor",
|
||||
"env_logger 0.11.8",
|
||||
"feature_flags",
|
||||
@@ -4181,6 +4195,7 @@ dependencies = [
|
||||
"settings",
|
||||
"sysinfo",
|
||||
"task",
|
||||
"tasks_ui",
|
||||
"terminal_view",
|
||||
"theme",
|
||||
"ui",
|
||||
@@ -4301,19 +4316,24 @@ dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"collections",
|
||||
"component",
|
||||
"ctor",
|
||||
"editor",
|
||||
"env_logger 0.11.8",
|
||||
"gpui",
|
||||
"indoc",
|
||||
"language",
|
||||
"linkme",
|
||||
"log",
|
||||
"lsp",
|
||||
"markdown",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"rand 0.8.5",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"text",
|
||||
"theme",
|
||||
"ui",
|
||||
"unindent",
|
||||
@@ -4551,6 +4571,7 @@ dependencies = [
|
||||
"client",
|
||||
"clock",
|
||||
"collections",
|
||||
"command_palette_hooks",
|
||||
"convert_case 0.8.0",
|
||||
"ctor",
|
||||
"db",
|
||||
@@ -4857,28 +4878,40 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"agent",
|
||||
"anyhow",
|
||||
"assistant_settings",
|
||||
"assistant_tool",
|
||||
"assistant_tools",
|
||||
"async-watch",
|
||||
"chrono",
|
||||
"clap",
|
||||
"client",
|
||||
"collections",
|
||||
"context_server",
|
||||
"dap",
|
||||
"env_logger 0.11.8",
|
||||
"extension",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"gpui_tokio",
|
||||
"handlebars 4.5.0",
|
||||
"language",
|
||||
"language_extension",
|
||||
"language_model",
|
||||
"language_models",
|
||||
"languages",
|
||||
"node_runtime",
|
||||
"paths",
|
||||
"project",
|
||||
"prompt_store",
|
||||
"release_channel",
|
||||
"reqwest_client",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"shellexpand 2.1.2",
|
||||
"toml 0.8.20",
|
||||
"unindent",
|
||||
"util",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
@@ -4973,10 +5006,10 @@ dependencies = [
|
||||
"async-tar",
|
||||
"async-trait",
|
||||
"collections",
|
||||
"convert_case 0.8.0",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"heck 0.5.0",
|
||||
"http_client",
|
||||
"language",
|
||||
"log",
|
||||
@@ -7061,9 +7094,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.8.0"
|
||||
version = "2.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3954d50fe15b02142bf25d3b8bdadb634ec3948f103d04ffe3031bc8fe9d7058"
|
||||
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown 0.15.2",
|
||||
@@ -7169,9 +7202,12 @@ name = "install_cli"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"gpui",
|
||||
"release_channel",
|
||||
"smol",
|
||||
"util",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
@@ -7920,9 +7956,9 @@ checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa"
|
||||
|
||||
[[package]]
|
||||
name = "libmimalloc-sys"
|
||||
version = "0.1.41"
|
||||
version = "0.1.42"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6b20daca3a4ac14dbdc753c5e90fc7b490a48a9131daed3c9a9ced7b2defd37b"
|
||||
checksum = "ec9d6fac27761dabcd4ee73571cdb06b7022dc99089acbe5435691edffaac0f4"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
@@ -8593,9 +8629,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "mimalloc"
|
||||
version = "0.1.45"
|
||||
version = "0.1.46"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "03cb1f88093fe50061ca1195d336ffec131347c7b833db31f9ab62a2d1b7925f"
|
||||
checksum = "995942f432bbb4822a7e9c3faa87a695185b0d09273ba85f097b54f4e458f2af"
|
||||
dependencies = [
|
||||
"libmimalloc-sys",
|
||||
]
|
||||
@@ -10933,9 +10969,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "prometheus"
|
||||
version = "0.13.4"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3d33c28a30771f7f96db69893f78b857f7450d7e0237e9c8fc6427a81bae7ed1"
|
||||
checksum = "3ca5326d8d0b950a9acd87e6a3f94745394f62e4dae1b1ee22b2bc0c394af43a"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"fnv",
|
||||
@@ -10943,7 +10979,7 @@ dependencies = [
|
||||
"memchr",
|
||||
"parking_lot",
|
||||
"protobuf",
|
||||
"thiserror 1.0.69",
|
||||
"thiserror 2.0.12",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -11118,9 +11154,23 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "protobuf"
|
||||
version = "2.28.0"
|
||||
version = "3.7.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94"
|
||||
checksum = "d65a1d4ddae7d8b5de68153b48f6aa3bba8cb002b243dbdbc55a5afbc98f99f4"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"protobuf-support",
|
||||
"thiserror 1.0.69",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "protobuf-support"
|
||||
version = "3.7.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3e36c2f31e0a47f9280fb347ef5e461ffcd2c52dd520d8e216b52f93b0b0d7d6"
|
||||
dependencies = [
|
||||
"thiserror 1.0.69",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "psm"
|
||||
@@ -13205,9 +13255,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.14.0"
|
||||
version = "1.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd"
|
||||
checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
@@ -14173,9 +14223,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"collections",
|
||||
"debugger_ui",
|
||||
"editor",
|
||||
"feature_flags",
|
||||
"file_icons",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
@@ -17583,6 +17631,7 @@ dependencies = [
|
||||
"ui",
|
||||
"util",
|
||||
"uuid",
|
||||
"windows 0.61.1",
|
||||
"workspace-hack",
|
||||
"zed_actions",
|
||||
]
|
||||
@@ -17745,6 +17794,8 @@ dependencies = [
|
||||
"wasmtime-cranelift",
|
||||
"wasmtime-environ",
|
||||
"winapi",
|
||||
"windows-core 0.61.0",
|
||||
"windows-numerics",
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
@@ -18089,7 +18140,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.182.10"
|
||||
version = "0.183.0"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"agent",
|
||||
@@ -18185,7 +18236,6 @@ dependencies = [
|
||||
"settings",
|
||||
"settings_ui",
|
||||
"shellexpand 2.1.2",
|
||||
"simplelog",
|
||||
"smol",
|
||||
"snippet_provider",
|
||||
"snippets_ui",
|
||||
@@ -18220,6 +18270,7 @@ dependencies = [
|
||||
"workspace-hack",
|
||||
"zed_actions",
|
||||
"zeta",
|
||||
"zlog",
|
||||
"zlog_settings",
|
||||
]
|
||||
|
||||
@@ -18269,7 +18320,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed_html"
|
||||
version = "0.2.0"
|
||||
version = "0.2.1"
|
||||
dependencies = [
|
||||
"zed_extension_api 0.1.0",
|
||||
]
|
||||
@@ -18532,7 +18583,10 @@ dependencies = [
|
||||
name = "zlog"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"log",
|
||||
"tempfile",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
|
||||
25
Cargo.toml
25
Cargo.toml
@@ -15,6 +15,7 @@ members = [
|
||||
"crates/assistant_tools",
|
||||
"crates/audio",
|
||||
"crates/auto_update",
|
||||
"crates/auto_update_helper",
|
||||
"crates/auto_update_ui",
|
||||
"crates/aws_http_client",
|
||||
"crates/bedrock",
|
||||
@@ -222,6 +223,7 @@ assistant_tool = { path = "crates/assistant_tool" }
|
||||
assistant_tools = { path = "crates/assistant_tools" }
|
||||
audio = { path = "crates/audio" }
|
||||
auto_update = { path = "crates/auto_update" }
|
||||
auto_update_helper = { path = "crates/auto_update_helper" }
|
||||
auto_update_ui = { path = "crates/auto_update_ui" }
|
||||
aws_http_client = { path = "crates/aws_http_client" }
|
||||
bedrock = { path = "crates/bedrock" }
|
||||
@@ -399,8 +401,12 @@ async-tungstenite = "0.29.1"
|
||||
async-watch = "0.3.1"
|
||||
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
|
||||
aws-config = { version = "1.6.1", features = ["behavior-version-latest"] }
|
||||
aws-credential-types = { version = "1.2.2", features = ["hardcoded-credentials"] }
|
||||
aws-sdk-bedrockruntime = { version = "1.80.0", features = ["behavior-version-latest"] }
|
||||
aws-credential-types = { version = "1.2.2", features = [
|
||||
"hardcoded-credentials",
|
||||
] }
|
||||
aws-sdk-bedrockruntime = { version = "1.80.0", features = [
|
||||
"behavior-version-latest",
|
||||
] }
|
||||
aws-smithy-runtime-api = { version = "1.7.4", features = ["http-1x", "client"] }
|
||||
aws-smithy-types = { version = "1.3.0", features = ["http-body-1-x"] }
|
||||
base64 = "0.22"
|
||||
@@ -439,6 +445,7 @@ futures-lite = "1.13"
|
||||
git2 = { version = "0.20.1", default-features = false }
|
||||
globset = "0.4"
|
||||
handlebars = "4.3"
|
||||
heck = "0.5"
|
||||
heed = { version = "0.21.0", features = ["read-txn-no-tls"] }
|
||||
hex = "0.4.3"
|
||||
html5ever = "0.27.0"
|
||||
@@ -615,12 +622,10 @@ features = [
|
||||
[workspace.dependencies.windows]
|
||||
version = "0.61"
|
||||
features = [
|
||||
"Foundation_Collections",
|
||||
"Foundation_Numerics",
|
||||
"Storage_Search",
|
||||
"Storage_Streams",
|
||||
"System_Threading",
|
||||
"UI_StartScreen",
|
||||
"UI_ViewManagement",
|
||||
"Wdk_System_SystemServices",
|
||||
"Win32_Globalization",
|
||||
@@ -647,6 +652,7 @@ features = [
|
||||
"Win32_System_SystemInformation",
|
||||
"Win32_System_SystemServices",
|
||||
"Win32_System_Threading",
|
||||
"Win32_System_Variant",
|
||||
"Win32_System_WinRT",
|
||||
"Win32_UI_Controls",
|
||||
"Win32_UI_HiDpi",
|
||||
@@ -654,6 +660,7 @@ features = [
|
||||
"Win32_UI_Input_KeyboardAndMouse",
|
||||
"Win32_UI_Shell",
|
||||
"Win32_UI_Shell_Common",
|
||||
"Win32_UI_Shell_PropertiesSystem",
|
||||
"Win32_UI_WindowsAndMessaging",
|
||||
]
|
||||
|
||||
@@ -777,4 +784,12 @@ let_underscore_future = "allow"
|
||||
too_many_arguments = "allow"
|
||||
|
||||
[workspace.metadata.cargo-machete]
|
||||
ignored = ["bindgen", "cbindgen", "prost_build", "serde", "component", "linkme", "workspace-hack"]
|
||||
ignored = [
|
||||
"bindgen",
|
||||
"cbindgen",
|
||||
"prost_build",
|
||||
"serde",
|
||||
"component",
|
||||
"linkme",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
1
assets/icons/binary.svg
Normal file
1
assets/icons/binary.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-binary-icon lucide-binary"><rect x="14" y="14" width="4" height="6" rx="2"/><rect x="6" y="4" width="4" height="6" rx="2"/><path d="M6 20h4"/><path d="M14 10h4"/><path d="M6 14h2v6"/><path d="M14 4h2v6"/></svg>
|
||||
|
After Width: | Height: | Size: 413 B |
1
assets/icons/file_icons/vyper.svg
Normal file
1
assets/icons/file_icons/vyper.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="16" height="16" fill="none" xml:space="preserve" xmlns="http://www.w3.org/2000/svg"><g style="fill:#000;fill-opacity:1" fill="#180c25"><path d="m-116.1-101.4-28.9-28.9a6.7 6.7 0 0 1-1.8-4.7v-41.2c0-2.4-2.4-4.8-4.8-4.8h-9.6a5.2 5.2 0 0 0-4.8 4.8v48c0 2.5 1 5 2.7 6.8l33.6 33.6a9.6 9.6 0 0 0 6.8 2.8h4.8c2.7 0 4.8-2.2 4.8-4.8v-4.8c0-2.5-1-5-2.8-6.8zM-79.6-176.2c0-2.4-2.4-4.8-4.8-4.8h-9.7a5.2 5.2 0 0 0-4.7 4.8v41.2c0 1.8-.8 3.5-2 4.7l-9.6 9.7a9.5 9.5 0 0 0-2.8 6.8v4.8c0 2.6 2.1 4.7 4.8 4.7h4.8c2.4 0 4.9-.9 6.7-2.8l14.4-14.3a9.6 9.6 0 0 0 2.8-6.8v-48z" style="fill:#000;fill-opacity:1;stroke-width:.255894" transform="translate(21.6 22.7) scale(.11067)"/></g></svg>
|
||||
|
After Width: | Height: | Size: 677 B |
1
assets/icons/flame.svg
Normal file
1
assets/icons/flame.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-flame-icon lucide-flame"><path d="M8.5 14.5A2.5 2.5 0 0 0 11 12c0-1.38-.5-2-1-3-1.072-2.143-.224-4.054 2-6 .5 2.5 2 4.9 4 6.5 2 1.6 3 3.5 3 5.5a7 7 0 1 1-14 0c0-1.153.433-2.294 1-3a2.5 2.5 0 0 0 2.5 2.5z"/></svg>
|
||||
|
After Width: | Height: | Size: 415 B |
1
assets/icons/function.svg
Normal file
1
assets/icons/function.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-square-function-icon lucide-square-function"><rect width="18" height="18" x="3" y="3" rx="2" ry="2"/><path d="M9 17c2 0 2.8-1 2.8-2.8V10c0-2 1-3.3 3.2-3"/><path d="M9 11.2h5.7"/></svg>
|
||||
|
After Width: | Height: | Size: 387 B |
5
assets/icons/layout.svg
Normal file
5
assets/icons/layout.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M20 14H4C3.44772 14 3 14.4477 3 15V20C3 20.5523 3.44772 21 4 21H20C20.5523 21 21 20.5523 21 20V15C21 14.4477 20.5523 14 20 14Z" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11 3H4C3.44772 3 3 3.44772 3 4V9C3 9.55228 3.44772 10 4 10H11C11.5523 10 12 9.55228 12 9V4C12 3.44772 11.5523 3 11 3Z" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M20 3H17C16.4477 3 16 3.44772 16 4V9C16 9.55228 16.4477 10 17 10H20C20.5523 10 21 9.55228 21 9V4C21 3.44772 20.5523 3 20 3Z" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 746 B |
@@ -354,11 +354,11 @@
|
||||
"alt-shift-left": "editor::SelectSmallerSyntaxNode", // Shrink Selection
|
||||
"ctrl-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection
|
||||
"ctrl-f2": "editor::SelectAllMatches", // Select all occurrences of current word
|
||||
"ctrl-d": ["editor::SelectNext", { "replace_newest": false }],
|
||||
"ctrl-shift-down": ["editor::SelectNext", { "replace_newest": false }], // Add selection to Next Find Match
|
||||
"ctrl-shift-up": ["editor::SelectPrevious", { "replace_newest": false }],
|
||||
"ctrl-k ctrl-d": ["editor::SelectNext", { "replace_newest": true }],
|
||||
"ctrl-k ctrl-shift-d": ["editor::SelectPrevious", { "replace_newest": true }],
|
||||
"ctrl-d": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch / find_under_expand
|
||||
"ctrl-shift-down": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch
|
||||
"ctrl-shift-up": ["editor::SelectPrevious", { "replace_newest": false }], // editor.action.addSelectionToPreviousFindMatch
|
||||
"ctrl-k ctrl-d": ["editor::SelectNext", { "replace_newest": true }], // editor.action.moveSelectionToNextFindMatch / find_under_expand_skip
|
||||
"ctrl-k ctrl-shift-d": ["editor::SelectPrevious", { "replace_newest": true }], // editor.action.moveSelectionToPreviousFindMatch
|
||||
"ctrl-k ctrl-i": "editor::Hover",
|
||||
"ctrl-/": ["editor::ToggleComments", { "advance_downwards": false }],
|
||||
"ctrl-u": "editor::UndoSelection",
|
||||
@@ -647,7 +647,6 @@
|
||||
},
|
||||
{
|
||||
"context": "AgentPanel && prompt_editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-n": "agent::NewTextThread",
|
||||
"cmd-alt-t": "agent::NewThread"
|
||||
@@ -663,7 +662,6 @@
|
||||
},
|
||||
{
|
||||
"context": "EditMessageEditor > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"escape": "menu::Cancel",
|
||||
"enter": "menu::Confirm",
|
||||
@@ -672,7 +670,6 @@
|
||||
},
|
||||
{
|
||||
"context": "AgentFeedbackMessageEditor > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"escape": "menu::Cancel",
|
||||
"enter": "menu::Confirm",
|
||||
@@ -785,6 +782,7 @@
|
||||
"shift-tab": "git_panel::FocusEditor",
|
||||
"escape": "git_panel::ToggleFocus",
|
||||
"ctrl-enter": "git::Commit",
|
||||
"ctrl-shift-enter": "git::Amend",
|
||||
"alt-enter": "menu::SecondaryConfirm",
|
||||
"delete": ["git::RestoreFile", { "skip_prompt": false }],
|
||||
"backspace": ["git::RestoreFile", { "skip_prompt": false }],
|
||||
@@ -793,18 +791,25 @@
|
||||
"ctrl-delete": ["git::RestoreFile", { "skip_prompt": false }]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitPanel && CommitEditor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"escape": "git::Cancel"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitCommit > Editor",
|
||||
"bindings": {
|
||||
"escape": "menu::Cancel",
|
||||
"enter": "editor::Newline",
|
||||
"ctrl-enter": "git::Commit",
|
||||
"ctrl-shift-enter": "git::Amend",
|
||||
"alt-l": "git::GenerateCommitMessage"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitPanel",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-g ctrl-g": "git::Fetch",
|
||||
"ctrl-g up": "git::Push",
|
||||
@@ -821,6 +826,7 @@
|
||||
"context": "GitDiff > Editor",
|
||||
"bindings": {
|
||||
"ctrl-enter": "git::Commit",
|
||||
"ctrl-shift-enter": "git::Amend",
|
||||
"ctrl-space": "git::StageAll",
|
||||
"ctrl-shift-space": "git::UnstageAll"
|
||||
}
|
||||
@@ -839,6 +845,7 @@
|
||||
"shift-tab": "git_panel::FocusChanges",
|
||||
"enter": "editor::Newline",
|
||||
"ctrl-enter": "git::Commit",
|
||||
"ctrl-shift-enter": "git::Amend",
|
||||
"alt-up": "git_panel::FocusChanges",
|
||||
"alt-l": "git::GenerateCommitMessage"
|
||||
}
|
||||
|
||||
@@ -491,12 +491,15 @@
|
||||
"alt-shift-down": "editor::DuplicateLineDown",
|
||||
"ctrl-shift-right": "editor::SelectLargerSyntaxNode", // Expand Selection
|
||||
"ctrl-shift-left": "editor::SelectSmallerSyntaxNode", // Shrink Selection
|
||||
"cmd-d": ["editor::SelectNext", { "replace_newest": false }], // Add selection to Next Find Match
|
||||
"cmd-d": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch / find_under_expand
|
||||
"cmd-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection
|
||||
"cmd-f2": "editor::SelectAllMatches", // Select all occurrences of current word
|
||||
"ctrl-cmd-d": ["editor::SelectPrevious", { "replace_newest": false }],
|
||||
"cmd-k cmd-d": ["editor::SelectNext", { "replace_newest": true }],
|
||||
"cmd-k ctrl-cmd-d": ["editor::SelectPrevious", { "replace_newest": true }],
|
||||
"cmd-k cmd-d": ["editor::SelectNext", { "replace_newest": true }], // editor.action.moveSelectionToNextFindMatch / find_under_expand_skip
|
||||
// macOS binds `ctrl-cmd-d` to Show Dictionary which breaks these two binds
|
||||
// To use `ctrl-cmd-d` or `ctrl-k ctrl-cmd-d` in Zed you must execute this command and then restart:
|
||||
// defaults write com.apple.symbolichotkeys AppleSymbolicHotKeys -dict-add 70 '<dict><key>enabled</key><false/></dict>'
|
||||
"ctrl-cmd-d": ["editor::SelectPrevious", { "replace_newest": false }], // editor.action.addSelectionToPreviousFindMatch
|
||||
"cmd-k ctrl-cmd-d": ["editor::SelectPrevious", { "replace_newest": true }], // editor.action.moveSelectionToPreviousFindMatch
|
||||
"cmd-k cmd-i": "editor::Hover",
|
||||
"cmd-/": ["editor::ToggleComments", { "advance_downwards": false }],
|
||||
"cmd-u": "editor::UndoSelection",
|
||||
@@ -852,17 +855,26 @@
|
||||
"shift-tab": "git_panel::FocusEditor",
|
||||
"escape": "git_panel::ToggleFocus",
|
||||
"cmd-enter": "git::Commit",
|
||||
"cmd-shift-enter": "git::Amend",
|
||||
"backspace": ["git::RestoreFile", { "skip_prompt": false }],
|
||||
"delete": ["git::RestoreFile", { "skip_prompt": false }],
|
||||
"cmd-backspace": ["git::RestoreFile", { "skip_prompt": true }],
|
||||
"cmd-delete": ["git::RestoreFile", { "skip_prompt": true }]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitPanel && CommitEditor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"escape": "git::Cancel"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitDiff > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-enter": "git::Commit",
|
||||
"cmd-shift-enter": "git::Amend",
|
||||
"cmd-ctrl-y": "git::StageAll",
|
||||
"cmd-ctrl-shift-y": "git::UnstageAll"
|
||||
}
|
||||
@@ -873,6 +885,7 @@
|
||||
"bindings": {
|
||||
"enter": "editor::Newline",
|
||||
"cmd-enter": "git::Commit",
|
||||
"cmd-shift-enter": "git::Amend",
|
||||
"tab": "git_panel::FocusChanges",
|
||||
"shift-tab": "git_panel::FocusChanges",
|
||||
"alt-up": "git_panel::FocusChanges",
|
||||
@@ -902,6 +915,7 @@
|
||||
"enter": "editor::Newline",
|
||||
"escape": "menu::Cancel",
|
||||
"cmd-enter": "git::Commit",
|
||||
"cmd-shift-enter": "git::Amend",
|
||||
"alt-tab": "git::GenerateCommitMessage"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -37,6 +37,8 @@
|
||||
"ctrl-shift-a": "editor::SelectLargerSyntaxNode",
|
||||
"ctrl-shift-d": "editor::DuplicateSelection",
|
||||
"alt-f3": "editor::SelectAllMatches", // find_all_under
|
||||
// "ctrl-f3": "", // find_under (cancels any selections)
|
||||
// "cmd-alt-shift-g": "" // find_under_prev (cancels any selections)
|
||||
"f9": "editor::SortLinesCaseSensitive",
|
||||
"ctrl-f9": "editor::SortLinesCaseInsensitive",
|
||||
"f12": "editor::GoToDefinition",
|
||||
@@ -49,7 +51,9 @@
|
||||
"ctrl-k ctrl-l": "editor::ConvertToLowerCase",
|
||||
"shift-alt-m": "markdown::OpenPreviewToTheSide",
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd"
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd",
|
||||
"f3": "editor::FindNextMatch",
|
||||
"shift-f3": "editor::FindPreviousMatch"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -58,6 +62,12 @@
|
||||
"ctrl-r": "outline::Toggle"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && !agent_diff",
|
||||
"bindings": {
|
||||
"ctrl-k ctrl-z": "git::Restore"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Pane",
|
||||
"bindings": {
|
||||
|
||||
@@ -38,6 +38,8 @@
|
||||
"cmd-shift-a": "editor::SelectLargerSyntaxNode",
|
||||
"cmd-shift-d": "editor::DuplicateSelection",
|
||||
"ctrl-cmd-g": "editor::SelectAllMatches", // find_all_under
|
||||
// "cmd-alt-g": "", // find_under (cancels any selections)
|
||||
// "cmd-alt-shift-g": "" // find_under_prev (cancels any selections)
|
||||
"f5": "editor::SortLinesCaseSensitive",
|
||||
"ctrl-f5": "editor::SortLinesCaseInsensitive",
|
||||
"shift-f12": "editor::FindAllReferences",
|
||||
@@ -51,7 +53,9 @@
|
||||
"cmd-shift-j": "editor::JoinLines",
|
||||
"shift-alt-m": "markdown::OpenPreviewToTheSide",
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd"
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd",
|
||||
"cmd-g": "editor::FindNextMatch",
|
||||
"cmd-shift-g": "editor::FindPreviousMatch"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -60,6 +64,12 @@
|
||||
"cmd-r": "outline::Toggle"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && !agent_diff",
|
||||
"bindings": {
|
||||
"cmd-k cmd-z": "git::Restore"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Pane",
|
||||
"bindings": {
|
||||
|
||||
@@ -203,6 +203,7 @@
|
||||
"c": "vim::PushChange",
|
||||
"shift-c": "vim::ChangeToEndOfLine",
|
||||
"d": "vim::PushDelete",
|
||||
"delete": "vim::DeleteRight",
|
||||
"shift-d": "vim::DeleteToEndOfLine",
|
||||
"shift-j": "vim::JoinLines",
|
||||
"g shift-j": "vim::JoinLinesNoWhitespace",
|
||||
@@ -538,6 +539,7 @@
|
||||
"bindings": {
|
||||
"d": "vim::CurrentLine",
|
||||
"s": "vim::PushDeleteSurrounds",
|
||||
"v": "vim::PushForcedMotion", // "d v"
|
||||
"o": "editor::ToggleSelectedDiffHunks", // "d o"
|
||||
"shift-o": "git::ToggleStaged",
|
||||
"p": "git::Restore", // "d p"
|
||||
@@ -586,6 +588,7 @@
|
||||
"context": "vim_operator == y",
|
||||
"bindings": {
|
||||
"y": "vim::CurrentLine",
|
||||
"v": "vim::PushForcedMotion",
|
||||
"s": ["vim::PushAddSurrounds", {}]
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,148 +1,65 @@
|
||||
You are an AI assistant integrated into a code editor. You have the programming ability of an expert programmer who takes pride in writing high-quality code and is driven to the point of obsession about solving problems effectively. Your goal is to do one of the following two things:
|
||||
You are a powerful agentic AI coding assistant. You operate exclusively in Zed, the world's best IDE.
|
||||
|
||||
1. Help users answer questions and perform tasks related to their codebase.
|
||||
2. Answer general-purpose questions unrelated to their particular codebase.
|
||||
You are pair programming with a USER to solve their coding task.
|
||||
The task may require creating a new codebase, modifying or debugging an existing codebase, or simply answering a question.
|
||||
Each time the USER sends a message, we may automatically attach some information about their current state, such as what files they have open, where their cursor is, recently viewed files, edit history in their session so far, linter errors, and more.
|
||||
This information may or may not be relevant to the coding task, it is up for you to decide.
|
||||
Your main goal is to follow the USER's instructions at each message.
|
||||
|
||||
It will be up to you to decide which of these you are doing based on what the user has told you. When unclear, ask clarifying questions to understand the user's intent before proceeding.
|
||||
<communication>
|
||||
1. Be conversational but professional.
|
||||
2. Refer to the USER in the second person and yourself in the first person.
|
||||
3. Format your responses in markdown. Use backticks to format file, directory, function, and class names. Use \( and \) for inline math, \[ and \] for block math.
|
||||
4. NEVER lie or make things up.
|
||||
5. Refrain from apologizing all the time when results are unexpected. Instead, just try your best to proceed or explain the circumstances to the user without apologizing.
|
||||
</communication>
|
||||
|
||||
You should only perform actions that modify the user's system if explicitly requested by the user:
|
||||
- If the user asks a question about how to accomplish a task, provide guidance or information, and use read-only tools (e.g., search) to assist. You may suggest potential actions, but do not directly modify the user's system without explicit instruction.
|
||||
- If the user clearly requests that you perform an action, carry out the action directly without explaining why you are doing so.
|
||||
<tool_calling>
|
||||
You have tools at your disposal to solve the coding task. Follow these rules regarding tool calls:
|
||||
1. ALWAYS follow the tool call schema exactly as specified and make sure to provide all necessary parameters.
|
||||
2. The conversation may reference tools that are no longer available. NEVER call tools that are not explicitly provided.
|
||||
3. **NEVER refer to tool names when speaking to the USER.** For example, instead of saying 'I need to use the edit_file tool to edit your file', just say 'I will edit your file'.
|
||||
4. Only calls tools when they are necessary. If the USER's task is general or you already know the answer, just respond without calling tools.
|
||||
5. Before calling each tool, first explain to the USER why you are calling it.
|
||||
</tool_calling>
|
||||
|
||||
When answering questions, it's okay to give incomplete examples containing comments about what would go there in a real version. When being asked to directly perform tasks on the code base, you must ALWAYS make fully working code. You may never "simplify" the code by omitting or deleting functionality you know the user has requested, and you must NEVER write comments like "in a full version, this would..." - instead, you must actually implement the real version. Don't be lazy!
|
||||
<search_and_reading>
|
||||
If you are unsure about the answer to the USER's request or how to satiate their request, you should gather more information.
|
||||
This can be done with additional tool calls, asking clarifying questions, etc...
|
||||
|
||||
Note that project files are automatically backed up. The user can always get them back later if anything goes wrong, so there's
|
||||
no need to create backup files (e.g. `.bak` files) because these files will just take up unnecessary space on the user's disk.
|
||||
For example, if you've performed a semantic search, and the results may not fully answer the USER's request, or merit gathering more information, feel free to call more tools.
|
||||
Similarly, if you've performed an edit that may partially satiate the USER's query, but you're not confident, gather more information or use more tools
|
||||
before ending your turn.
|
||||
|
||||
When attempting to resolve issues around failing tests, never simply remove the failing tests. Unless the user explicitly asks you to remove tests, ALWAYS attempt to fix the code causing the tests to fail.
|
||||
Bias towards not asking the user for help if you can find the answer yourself.
|
||||
</search_and_reading>
|
||||
|
||||
Ignore "TODO"-type comments unless they're relevant to the user's explicit request or the user specifically asks you to address them. It is, however, okay to include them in codebase summaries.
|
||||
<making_code_changes>
|
||||
When making code changes, NEVER output code to the USER, unless requested. Instead use one of the code edit tools to implement the change.
|
||||
Use the code edit tools at most once per turn.
|
||||
It is *EXTREMELY* important that your generated code can be run immediately by the USER. To ensure this, follow these instructions carefully:
|
||||
1. Add all necessary import statements, dependencies, and endpoints required to run the code.
|
||||
2. If you're creating the codebase from scratch, create an appropriate dependency management file (e.g. requirements.txt) with package versions and a helpful README.
|
||||
3. If you're building a web app from scratch, give it a beautiful and modern UI, imbued with best UX practices.
|
||||
4. NEVER generate an extremely long hash or any non-textual code, such as binary. These are not helpful to the USER and are very expensive.
|
||||
5. Unless you are appending some small easy to apply edit to a file, or creating a new file, you MUST read the the contents or section of what you're editing before editing it.
|
||||
6. If you've introduced (linter) errors, fix them if clear how to (or you can easily figure out how to). Do not make uneducated guesses. And DO NOT loop more than 3 times on fixing linter errors on the same file. On the third time, you should stop and ask the user what to do next.
|
||||
7. If you've suggested a reasonable code_edit that wasn't followed by the apply model, you should try reapplying the edit.
|
||||
</making_code_changes>
|
||||
|
||||
<style>
|
||||
Editing code:
|
||||
- Make sure to take previous edits into account.
|
||||
- The edits you perform might lead to errors or warnings. At the end of your changes, check whether you introduced any problems, and fix them before providing a summary of the changes you made.
|
||||
- You may only attempt to fix these up to 3 times. If you have tried 3 times to fix them, and there are still problems remaining, you must not continue trying to fix them, and must instead tell the user that there are problems remaining - and ask if the user would like you to attempt to solve them further.
|
||||
- Do not fix errors unrelated to your changes unless the user explicitly asks you to do so.
|
||||
- Prefer to move files over recreating them. The move can be followed by minor edits if required.
|
||||
- If you seem to be stuck, never go back and "simplify the implementation" by deleting the parts of the implementation you're stuck on and replacing them with comments. If you ever feel the urge to do this, instead immediately stop whatever you're doing (even if the code is in a broken state), report that you are stuck, explain what you're stuck on, and ask the user how to proceed.
|
||||
<debugging>
|
||||
When debugging, only make code changes if you are certain that you can solve the problem.
|
||||
Otherwise, follow debugging best practices:
|
||||
1. Address the root cause instead of the symptoms.
|
||||
2. Add descriptive logging statements and error messages to track variable and code state.
|
||||
3. Add test functions and statements to isolate the problem.
|
||||
</debugging>
|
||||
|
||||
Tool use:
|
||||
- Make sure to adhere to the tools schema.
|
||||
- Provide every required argument.
|
||||
- DO NOT use tools to access items that are already available in the context section.
|
||||
- Use only the tools that are currently available.
|
||||
- DO NOT use a tool that is not available just because it appears in the conversation. This means the user turned it off.
|
||||
|
||||
Responding:
|
||||
- Be concise and direct in your responses.
|
||||
- Never apologize or thank the user.
|
||||
- Don't comment that you have just realized or understood something.
|
||||
- When you are going to make a tool call, tersely explain your reasoning for choosing to use that tool, with no flourishes or commentary beyond that information.
|
||||
For example, rather than saying "You're absolutely right! Thank you for providing that context. Now I understand that we're missing a dependency, and I need to add it:" say "I'll add that missing dependency:" instead.
|
||||
- Also, don't restate what a tool call is about to do (or just did).
|
||||
For example, don't say "Now I'm going to check diagnostics to see if there are any warnings or errors," followed by running a tool which checks diagnostics and reports warnings or errors; instead, just request the tool call without saying anything.
|
||||
- All tool results are provided to you automatically, so DO NOT thank the user when this happens.
|
||||
|
||||
Whenever you mention a code block, you MUST use ONLY the following format:
|
||||
|
||||
```language path/to/Something.blah#L123-456
|
||||
(code goes here)
|
||||
```
|
||||
|
||||
The `#L123-456` means the line number range 123 through 456, and the path/to/Something.blah
|
||||
is a path in the project. (If there is no valid path in the project, then you can use
|
||||
/dev/null/path.extension for its path.) This is the ONLY valid way to format code blocks, because the Markdown parser
|
||||
does not understand the more common ```language syntax, or bare ``` blocks. It only
|
||||
understands this path-based syntax, and if the path is missing, then it will error and you will have to do it over again.
|
||||
|
||||
Just to be really clear about this, if you ever find yourself writing three backticks followed by a language name, STOP!
|
||||
You have made a mistake. You can only ever put paths after triple backticks!
|
||||
|
||||
<example>
|
||||
Based on all the information I've gathered, here's a summary of how this system works:
|
||||
1. The README file is loaded into the system.
|
||||
2. The system finds the first two headers, including everything in between. In this case, that would be:
|
||||
|
||||
```path/to/README.md#L8-12
|
||||
# First Header
|
||||
|
||||
This is the info under the first header.
|
||||
|
||||
## Sub-header
|
||||
```
|
||||
|
||||
3. Then the system finds the last header in the README:
|
||||
|
||||
```path/to/README.md#L27-29
|
||||
## Last Header
|
||||
|
||||
This is the last header in the README.
|
||||
```
|
||||
|
||||
4. Finally, it passes this information on to the next process.
|
||||
</example>
|
||||
|
||||
<example>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
|
||||
```/dev/null/example.md#L1-3
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
```
|
||||
</example>
|
||||
|
||||
Here are examples of ways you must never render code blocks:
|
||||
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
|
||||
```
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
```
|
||||
</bad_example_do_not_do_this>
|
||||
|
||||
This example is unacceptable because it does not include the path.
|
||||
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
|
||||
```markdown
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
```
|
||||
</bad_example_do_not_do_this>
|
||||
|
||||
This example is unacceptable because it has the language instead of the path.
|
||||
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
</bad_example_do_not_do_this>
|
||||
|
||||
This example is unacceptable because it uses indentation to mark the code block
|
||||
instead of backticks with a path.
|
||||
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
|
||||
```markdown
|
||||
/dev/null/example.md#L1-3
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
```
|
||||
</bad_example_do_not_do_this>
|
||||
|
||||
This example is unacceptable because the path is in the wrong place. The path must be directly after the opening backticks.
|
||||
</style>
|
||||
<calling_external_apis>
|
||||
1. Unless explicitly requested by the USER, use the best suited external APIs and packages to solve the task. There is no need to ask the USER for permission.
|
||||
2. When selecting which version of an API or package to use, choose one that is compatible with the USER's dependency management file. If no such file exists or if the package is not present, use the latest version that is in your training data.
|
||||
3. If an external API requires an API Key, be sure to point this out to the USER. Adhere to best security practices (e.g. DO NOT hardcode an API key in a place where it can be exposed)
|
||||
</calling_external_apis>
|
||||
|
||||
The user has opened a project that contains the following root directories/files. Whenever you specify a path in the project, it must be a relative path which begins with one of these root directories/files:
|
||||
|
||||
|
||||
@@ -80,6 +80,8 @@
|
||||
// Values are clamped to the [0.0, 1.0] range.
|
||||
"inactive_opacity": 1.0
|
||||
},
|
||||
// Layout mode of the bottom dock. Defaults to "contained"
|
||||
"bottom_dock_layout": "contained",
|
||||
// The direction that you want to split panes horizontally. Defaults to "up"
|
||||
"pane_split_direction_horizontal": "up",
|
||||
// The direction that you want to split panes horizontally. Defaults to "left"
|
||||
@@ -657,25 +659,25 @@
|
||||
"name": "Write",
|
||||
"enable_all_context_servers": true,
|
||||
"tools": {
|
||||
"terminal": true,
|
||||
"batch_tool": true,
|
||||
"code_actions": true,
|
||||
"code_symbols": true,
|
||||
"contents": true,
|
||||
"batch_tool": false,
|
||||
"code_actions": false,
|
||||
"code_symbols": false,
|
||||
"contents": false,
|
||||
"copy_path": false,
|
||||
"create_file": true,
|
||||
"delete_path": false,
|
||||
"diagnostics": true,
|
||||
"find_replace_file": true,
|
||||
"edit_file": true,
|
||||
"fetch": true,
|
||||
"list_directory": false,
|
||||
"list_directory": true,
|
||||
"move_path": false,
|
||||
"now": true,
|
||||
"now": false,
|
||||
"path_search": true,
|
||||
"read_file": true,
|
||||
"regex_search": true,
|
||||
"rename": true,
|
||||
"symbol_info": true,
|
||||
"rename": false,
|
||||
"symbol_info": false,
|
||||
"terminal": true,
|
||||
"thinking": true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::{Keep, KeepAll, Reject, RejectAll, Thread, ThreadEvent};
|
||||
use anyhow::Result;
|
||||
use buffer_diff::DiffHunkStatus;
|
||||
use collections::{HashMap, HashSet};
|
||||
use collections::HashSet;
|
||||
use editor::{
|
||||
Direction, Editor, EditorEvent, MultiBuffer, ToPoint,
|
||||
actions::{GoToHunk, GoToPreviousHunk},
|
||||
@@ -355,24 +355,16 @@ impl AgentDiff {
|
||||
self.update_selection(&diff_hunks_in_ranges, window, cx);
|
||||
}
|
||||
|
||||
let mut ranges_by_buffer = HashMap::default();
|
||||
for hunk in &diff_hunks_in_ranges {
|
||||
let buffer = self.multibuffer.read(cx).buffer(hunk.buffer_id);
|
||||
if let Some(buffer) = buffer {
|
||||
ranges_by_buffer
|
||||
.entry(buffer.clone())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(hunk.buffer_range.clone());
|
||||
self.thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.reject_edits_in_range(buffer, hunk.buffer_range.clone(), cx)
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
|
||||
for (buffer, ranges) in ranges_by_buffer {
|
||||
self.thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.reject_edits_in_ranges(buffer, ranges, cx)
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn update_selection(
|
||||
|
||||
@@ -265,6 +265,9 @@ pub struct Thread {
|
||||
feedback: Option<ThreadFeedback>,
|
||||
message_feedback: HashMap<MessageId, ThreadFeedback>,
|
||||
last_auto_capture_at: Option<Instant>,
|
||||
request_callback: Option<
|
||||
Box<dyn FnMut(&LanguageModelRequest, &[Result<LanguageModelCompletionEvent, String>])>,
|
||||
>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@@ -315,6 +318,7 @@ impl Thread {
|
||||
feedback: None,
|
||||
message_feedback: HashMap::default(),
|
||||
last_auto_capture_at: None,
|
||||
request_callback: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -382,9 +386,18 @@ impl Thread {
|
||||
feedback: None,
|
||||
message_feedback: HashMap::default(),
|
||||
last_auto_capture_at: None,
|
||||
request_callback: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_request_callback(
|
||||
&mut self,
|
||||
callback: impl 'static
|
||||
+ FnMut(&LanguageModelRequest, &[Result<LanguageModelCompletionEvent, String>]),
|
||||
) {
|
||||
self.request_callback = Some(Box::new(callback));
|
||||
}
|
||||
|
||||
pub fn id(&self) -> &ThreadId {
|
||||
&self.id
|
||||
}
|
||||
@@ -827,7 +840,7 @@ impl Thread {
|
||||
})
|
||||
.collect(),
|
||||
initial_project_snapshot,
|
||||
cumulative_token_usage: this.cumulative_token_usage.clone(),
|
||||
cumulative_token_usage: this.cumulative_token_usage,
|
||||
detailed_summary_state: this.detailed_summary_state.clone(),
|
||||
exceeded_window_error: this.exceeded_window_error.clone(),
|
||||
})
|
||||
@@ -1013,17 +1026,28 @@ impl Thread {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let pending_completion_id = post_inc(&mut self.completion_count);
|
||||
let request_callback_parameters = if self.request_callback.is_some() {
|
||||
Some((request.clone(), Vec::new()))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let task = cx.spawn(async move |thread, cx| {
|
||||
let stream = model.stream_completion(request, &cx);
|
||||
let initial_token_usage =
|
||||
thread.read_with(cx, |thread, _cx| thread.cumulative_token_usage.clone());
|
||||
thread.read_with(cx, |thread, _cx| thread.cumulative_token_usage);
|
||||
let stream_completion = async {
|
||||
let mut request_callback_parameters = request_callback_parameters;
|
||||
let mut events = stream.await?;
|
||||
let mut stop_reason = StopReason::EndTurn;
|
||||
let mut current_token_usage = TokenUsage::default();
|
||||
|
||||
while let Some(event) = events.next().await {
|
||||
if let Some((_, response_events)) = request_callback_parameters.as_mut() {
|
||||
response_events
|
||||
.push(event.as_ref().map_err(|error| error.to_string()).cloned());
|
||||
}
|
||||
|
||||
let event = event?;
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
@@ -1039,9 +1063,9 @@ impl Thread {
|
||||
stop_reason = reason;
|
||||
}
|
||||
LanguageModelCompletionEvent::UsageUpdate(token_usage) => {
|
||||
thread.cumulative_token_usage =
|
||||
thread.cumulative_token_usage.clone() + token_usage.clone()
|
||||
- current_token_usage.clone();
|
||||
thread.cumulative_token_usage = thread.cumulative_token_usage
|
||||
+ token_usage
|
||||
- current_token_usage;
|
||||
current_token_usage = token_usage;
|
||||
}
|
||||
LanguageModelCompletionEvent::Text(chunk) => {
|
||||
@@ -1126,7 +1150,7 @@ impl Thread {
|
||||
}
|
||||
})?;
|
||||
|
||||
anyhow::Ok(stop_reason)
|
||||
anyhow::Ok((stop_reason, request_callback_parameters))
|
||||
};
|
||||
|
||||
let result = stream_completion.await;
|
||||
@@ -1135,14 +1159,24 @@ impl Thread {
|
||||
.update(cx, |thread, cx| {
|
||||
thread.finalize_pending_checkpoint(cx);
|
||||
match result.as_ref() {
|
||||
Ok(stop_reason) => match stop_reason {
|
||||
StopReason::ToolUse => {
|
||||
let tool_uses = thread.use_pending_tools(cx);
|
||||
cx.emit(ThreadEvent::UsePendingTools { tool_uses });
|
||||
Ok((stop_reason, request_callback_parameters)) => {
|
||||
match stop_reason {
|
||||
StopReason::ToolUse => {
|
||||
let tool_uses = thread.use_pending_tools(cx);
|
||||
cx.emit(ThreadEvent::UsePendingTools { tool_uses });
|
||||
}
|
||||
StopReason::EndTurn => {}
|
||||
StopReason::MaxTokens => {}
|
||||
}
|
||||
StopReason::EndTurn => {}
|
||||
StopReason::MaxTokens => {}
|
||||
},
|
||||
|
||||
if let Some((request_callback, (request, response_events))) = thread
|
||||
.request_callback
|
||||
.as_mut()
|
||||
.zip(request_callback_parameters.as_ref())
|
||||
{
|
||||
request_callback(request, response_events);
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
if error.is::<PaymentRequiredError>() {
|
||||
cx.emit(ThreadEvent::ShowError(ThreadError::PaymentRequired));
|
||||
@@ -1179,12 +1213,14 @@ impl Thread {
|
||||
thread.cancel_last_completion(cx);
|
||||
}
|
||||
}
|
||||
cx.emit(ThreadEvent::Stopped(result.map_err(Arc::new)));
|
||||
cx.emit(ThreadEvent::Stopped(
|
||||
result.map(|result| result.0).map_err(Arc::new),
|
||||
));
|
||||
|
||||
thread.auto_capture_telemetry(cx);
|
||||
|
||||
if let Ok(initial_usage) = initial_token_usage {
|
||||
let usage = thread.cumulative_token_usage.clone() - initial_usage;
|
||||
let usage = thread.cumulative_token_usage - initial_usage;
|
||||
|
||||
telemetry::event!(
|
||||
"Assistant Thread Completion",
|
||||
@@ -1801,14 +1837,14 @@ impl Thread {
|
||||
.update(cx, |action_log, cx| action_log.keep_all_edits(cx));
|
||||
}
|
||||
|
||||
pub fn reject_edits_in_ranges(
|
||||
pub fn reject_edits_in_range(
|
||||
&mut self,
|
||||
buffer: Entity<language::Buffer>,
|
||||
buffer_ranges: Vec<Range<language::Anchor>>,
|
||||
buffer_range: Range<language::Anchor>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
self.action_log.update(cx, |action_log, cx| {
|
||||
action_log.reject_edits_in_ranges(buffer, buffer_ranges, cx)
|
||||
action_log.reject_edits_in_range(buffer, buffer_range, cx)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1864,6 +1900,10 @@ impl Thread {
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub fn cumulative_token_usage(&self) -> TokenUsage {
|
||||
self.cumulative_token_usage
|
||||
}
|
||||
|
||||
pub fn total_token_usage(&self, cx: &App) -> TotalTokenUsage {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let Some(model) = model_registry.default_model() else {
|
||||
|
||||
@@ -25,5 +25,4 @@ serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
strum.workspace = true
|
||||
thiserror.workspace = true
|
||||
util.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
@@ -10,7 +10,6 @@ use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::{EnumIter, EnumString};
|
||||
use thiserror::Error;
|
||||
use util::ResultExt as _;
|
||||
|
||||
pub use supported_countries::*;
|
||||
|
||||
@@ -363,11 +362,25 @@ pub struct RateLimitInfo {
|
||||
|
||||
impl RateLimitInfo {
|
||||
fn from_headers(headers: &HeaderMap<HeaderValue>) -> Self {
|
||||
// Check if any rate limit headers exist
|
||||
let has_rate_limit_headers = headers
|
||||
.keys()
|
||||
.any(|k| k.as_str().starts_with("anthropic-ratelimit-"));
|
||||
|
||||
if !has_rate_limit_headers {
|
||||
return Self {
|
||||
requests: None,
|
||||
tokens: None,
|
||||
input_tokens: None,
|
||||
output_tokens: None,
|
||||
};
|
||||
}
|
||||
|
||||
Self {
|
||||
requests: RateLimit::from_headers("requests", headers).log_err(),
|
||||
tokens: RateLimit::from_headers("tokens", headers).log_err(),
|
||||
input_tokens: RateLimit::from_headers("input-tokens", headers).log_err(),
|
||||
output_tokens: RateLimit::from_headers("output-tokens", headers).log_err(),
|
||||
requests: RateLimit::from_headers("requests", headers).ok(),
|
||||
tokens: RateLimit::from_headers("tokens", headers).ok(),
|
||||
input_tokens: RateLimit::from_headers("input-tokens", headers).ok(),
|
||||
output_tokens: RateLimit::from_headers("output-tokens", headers).ok(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ use collections::{BTreeSet, HashMap, HashSet, hash_map};
|
||||
use editor::{
|
||||
Anchor, Editor, EditorEvent, MenuInlineCompletionsPolicy, ProposedChangeLocation,
|
||||
ProposedChangesEditor, RowExt, ToOffset as _, ToPoint,
|
||||
actions::{FoldAt, MoveToEndOfLine, Newline, ShowCompletions, UnfoldAt},
|
||||
actions::{MoveToEndOfLine, Newline, ShowCompletions},
|
||||
display_map::{
|
||||
BlockContext, BlockId, BlockPlacement, BlockProperties, BlockStyle, Crease, CreaseMetadata,
|
||||
CustomBlockId, FoldId, RenderBlock, ToDisplayPoint,
|
||||
@@ -1053,7 +1053,7 @@ impl ContextEditor {
|
||||
let creases = editor.insert_creases(creases, cx);
|
||||
|
||||
for buffer_row in buffer_rows_to_fold.into_iter().rev() {
|
||||
editor.fold_at(&FoldAt { buffer_row }, window, cx);
|
||||
editor.fold_at(buffer_row, window, cx);
|
||||
}
|
||||
|
||||
creases
|
||||
@@ -1109,7 +1109,7 @@ impl ContextEditor {
|
||||
buffer_rows_to_fold.clear();
|
||||
}
|
||||
for buffer_row in buffer_rows_to_fold.into_iter().rev() {
|
||||
editor.fold_at(&FoldAt { buffer_row }, window, cx);
|
||||
editor.fold_at(buffer_row, window, cx);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -1844,13 +1844,7 @@ impl ContextEditor {
|
||||
|_, _, _, _| Empty.into_any(),
|
||||
);
|
||||
editor.insert_creases(vec![crease], cx);
|
||||
editor.fold_at(
|
||||
&FoldAt {
|
||||
buffer_row: start_row,
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
editor.fold_at(start_row, window, cx);
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -2042,7 +2036,7 @@ impl ContextEditor {
|
||||
cx,
|
||||
);
|
||||
for buffer_row in buffer_rows_to_fold.into_iter().rev() {
|
||||
editor.fold_at(&FoldAt { buffer_row }, window, cx);
|
||||
editor.fold_at(buffer_row, window, cx);
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -2820,7 +2814,7 @@ fn render_thought_process_fold_icon_button(
|
||||
.start
|
||||
.to_point(&editor.buffer().read(cx).read(cx));
|
||||
let buffer_row = MultiBufferRow(buffer_start.row);
|
||||
editor.unfold_at(&UnfoldAt { buffer_row }, window, cx);
|
||||
editor.unfold_at(buffer_row, window, cx);
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
@@ -2847,7 +2841,7 @@ fn render_fold_icon_button(
|
||||
.start
|
||||
.to_point(&editor.buffer().read(cx).read(cx));
|
||||
let buffer_row = MultiBufferRow(buffer_start.row);
|
||||
editor.unfold_at(&UnfoldAt { buffer_row }, window, cx);
|
||||
editor.unfold_at(buffer_row, window, cx);
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
@@ -2907,7 +2901,7 @@ fn quote_selection_fold_placeholder(title: String, editor: WeakEntity<Editor>) -
|
||||
.start
|
||||
.to_point(&editor.buffer().read(cx).read(cx));
|
||||
let buffer_row = MultiBufferRow(buffer_start.row);
|
||||
editor.unfold_at(&UnfoldAt { buffer_row }, window, cx);
|
||||
editor.unfold_at(buffer_row, window, cx);
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
|
||||
@@ -3,7 +3,7 @@ use buffer_diff::BufferDiff;
|
||||
use collections::BTreeMap;
|
||||
use futures::{StreamExt, channel::mpsc};
|
||||
use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
|
||||
use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
|
||||
use language::{Anchor, Buffer, BufferEvent, DiskState, Point};
|
||||
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
|
||||
use std::{cmp, ops::Range, sync::Arc};
|
||||
use text::{Edit, Patch, Rope};
|
||||
@@ -240,7 +240,7 @@ impl ActionLog {
|
||||
.await;
|
||||
|
||||
diff.update(cx, |diff, cx| {
|
||||
diff.set_snapshot(diff_snapshot, &buffer_snapshot, None, cx)
|
||||
diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx)
|
||||
})?;
|
||||
}
|
||||
this.update(cx, |this, cx| {
|
||||
@@ -363,10 +363,10 @@ impl ActionLog {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reject_edits_in_ranges(
|
||||
pub fn reject_edits_in_range(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
buffer_ranges: Vec<Range<impl language::ToPoint>>,
|
||||
buffer_range: Range<impl language::ToPoint>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
|
||||
@@ -403,15 +403,29 @@ impl ActionLog {
|
||||
}
|
||||
TrackedBufferStatus::Modified => {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
let mut buffer_row_ranges = buffer_ranges
|
||||
.into_iter()
|
||||
.map(|range| {
|
||||
range.start.to_point(buffer).row..range.end.to_point(buffer).row
|
||||
})
|
||||
.peekable();
|
||||
let buffer_range =
|
||||
buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
|
||||
|
||||
let mut edits_to_revert = Vec::new();
|
||||
for edit in tracked_buffer.unreviewed_changes.edits() {
|
||||
if buffer_range.end.row < edit.new.start {
|
||||
break;
|
||||
} else if buffer_range.start.row > edit.new.end {
|
||||
continue;
|
||||
}
|
||||
|
||||
let old_range = tracked_buffer
|
||||
.base_text
|
||||
.point_to_offset(Point::new(edit.old.start, 0))
|
||||
..tracked_buffer.base_text.point_to_offset(cmp::min(
|
||||
Point::new(edit.old.end, 0),
|
||||
tracked_buffer.base_text.max_point(),
|
||||
));
|
||||
let old_text = tracked_buffer
|
||||
.base_text
|
||||
.chunks_in_range(old_range)
|
||||
.collect::<String>();
|
||||
|
||||
let new_range = tracked_buffer
|
||||
.snapshot
|
||||
.anchor_before(Point::new(edit.new.start, 0))
|
||||
@@ -419,35 +433,7 @@ impl ActionLog {
|
||||
Point::new(edit.new.end, 0),
|
||||
tracked_buffer.snapshot.max_point(),
|
||||
));
|
||||
let new_row_range = new_range.start.to_point(buffer).row
|
||||
..new_range.end.to_point(buffer).row;
|
||||
|
||||
let mut revert = false;
|
||||
while let Some(buffer_row_range) = buffer_row_ranges.peek() {
|
||||
if buffer_row_range.end < new_row_range.start {
|
||||
buffer_row_ranges.next();
|
||||
} else if buffer_row_range.start > new_row_range.end {
|
||||
break;
|
||||
} else {
|
||||
revert = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if revert {
|
||||
let old_range = tracked_buffer
|
||||
.base_text
|
||||
.point_to_offset(Point::new(edit.old.start, 0))
|
||||
..tracked_buffer.base_text.point_to_offset(cmp::min(
|
||||
Point::new(edit.old.end, 0),
|
||||
tracked_buffer.base_text.max_point(),
|
||||
));
|
||||
let old_text = tracked_buffer
|
||||
.base_text
|
||||
.chunks_in_range(old_range)
|
||||
.collect::<String>();
|
||||
edits_to_revert.push((new_range, old_text));
|
||||
}
|
||||
edits_to_revert.push((new_range, old_text));
|
||||
}
|
||||
|
||||
buffer.edit(edits_to_revert, None, cx);
|
||||
@@ -613,7 +599,6 @@ fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edi
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum ChangeAuthor {
|
||||
User,
|
||||
Agent,
|
||||
@@ -1150,48 +1135,9 @@ mod tests {
|
||||
)]
|
||||
);
|
||||
|
||||
// If the rejected range doesn't overlap with any hunk, we ignore it.
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(4, 0)..Point::new(4, 0)],
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
"abc\ndE\nXYZf\nghi\njkl\nmnO"
|
||||
);
|
||||
assert_eq!(
|
||||
unreviewed_hunks(&action_log, cx),
|
||||
vec![(
|
||||
buffer.clone(),
|
||||
vec![
|
||||
HunkStatus {
|
||||
range: Point::new(1, 0)..Point::new(3, 0),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "def\n".into(),
|
||||
},
|
||||
HunkStatus {
|
||||
range: Point::new(5, 0)..Point::new(5, 3),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "mno".into(),
|
||||
}
|
||||
],
|
||||
)]
|
||||
);
|
||||
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(1, 0)],
|
||||
cx,
|
||||
)
|
||||
log.reject_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1214,11 +1160,7 @@ mod tests {
|
||||
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(4, 0)..Point::new(4, 0)],
|
||||
cx,
|
||||
)
|
||||
log.reject_edits_in_range(buffer.clone(), Point::new(4, 0)..Point::new(4, 0), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1230,82 +1172,6 @@ mod tests {
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
|
||||
.unwrap();
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(file_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.update(|cx| {
|
||||
action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer
|
||||
.edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
|
||||
.unwrap()
|
||||
});
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer
|
||||
.edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
|
||||
.unwrap()
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
"abc\ndE\nXYZf\nghi\njkl\nmnO"
|
||||
);
|
||||
assert_eq!(
|
||||
unreviewed_hunks(&action_log, cx),
|
||||
vec![(
|
||||
buffer.clone(),
|
||||
vec![
|
||||
HunkStatus {
|
||||
range: Point::new(1, 0)..Point::new(3, 0),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "def\n".into(),
|
||||
},
|
||||
HunkStatus {
|
||||
range: Point::new(5, 0)..Point::new(5, 3),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "mno".into(),
|
||||
}
|
||||
],
|
||||
)]
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
|
||||
..buffer.read(cx).anchor_before(Point::new(1, 0));
|
||||
let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
|
||||
..buffer.read(cx).anchor_before(Point::new(5, 3));
|
||||
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
|
||||
.detach();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
"abc\ndef\nghi\njkl\nmno"
|
||||
);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
"abc\ndef\nghi\njkl\nmno"
|
||||
);
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_reject_deleted_file(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
@@ -1349,11 +1215,7 @@ mod tests {
|
||||
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(0, 0)],
|
||||
cx,
|
||||
)
|
||||
log.reject_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(0, 0), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1404,11 +1266,7 @@ mod tests {
|
||||
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(0, 11)],
|
||||
cx,
|
||||
)
|
||||
log.reject_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(0, 11), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1454,7 +1312,7 @@ mod tests {
|
||||
.update(cx, |log, cx| {
|
||||
let range = buffer.read(cx).random_byte_range(0, &mut rng);
|
||||
log::info!("rejecting edits in range {:?}", range);
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
|
||||
log.reject_edits_in_range(buffer.clone(), range, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -40,5 +40,6 @@ gpui = { workspace = true, features = ["test-support"] }
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
rand.workspace = true
|
||||
settings = { workspace = true, features = ["test-support"] }
|
||||
workspace = { workspace = true, features = ["test-support"] }
|
||||
unindent.workspace = true
|
||||
|
||||
@@ -7,8 +7,8 @@ mod create_directory_tool;
|
||||
mod create_file_tool;
|
||||
mod delete_path_tool;
|
||||
mod diagnostics_tool;
|
||||
mod edit_file_tool;
|
||||
mod fetch_tool;
|
||||
mod find_replace_file_tool;
|
||||
mod list_directory_tool;
|
||||
mod move_path_tool;
|
||||
mod now_tool;
|
||||
@@ -39,8 +39,8 @@ use crate::create_directory_tool::CreateDirectoryTool;
|
||||
use crate::create_file_tool::CreateFileTool;
|
||||
use crate::delete_path_tool::DeletePathTool;
|
||||
use crate::diagnostics_tool::DiagnosticsTool;
|
||||
use crate::edit_file_tool::EditFileTool;
|
||||
use crate::fetch_tool::FetchTool;
|
||||
use crate::find_replace_file_tool::FindReplaceFileTool;
|
||||
use crate::list_directory_tool::ListDirectoryTool;
|
||||
use crate::now_tool::NowTool;
|
||||
use crate::open_tool::OpenTool;
|
||||
@@ -62,7 +62,7 @@ pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
|
||||
registry.register_tool(CreateFileTool);
|
||||
registry.register_tool(CopyPathTool);
|
||||
registry.register_tool(DeletePathTool);
|
||||
registry.register_tool(FindReplaceFileTool);
|
||||
registry.register_tool(EditFileTool);
|
||||
registry.register_tool(SymbolInfoTool);
|
||||
registry.register_tool(CodeActionTool);
|
||||
registry.register_tool(MovePathTool);
|
||||
|
||||
183
crates/assistant_tools/src/edit_file_tool.rs
Normal file
183
crates/assistant_tools/src/edit_file_tool.rs
Normal file
@@ -0,0 +1,183 @@
|
||||
use crate::{replace::replace_with_flexible_indent, schema::json_schema_for};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool, ToolResult};
|
||||
use gpui::{App, AppContext, AsyncApp, Entity, Task};
|
||||
use language_model::{LanguageModelRequestMessage, LanguageModelToolSchemaFormat};
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
use ui::IconName;
|
||||
|
||||
use crate::replace::replace_exact;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct EditFileToolInput {
|
||||
/// The full path of the file to modify in the project.
|
||||
///
|
||||
/// WARNING: When specifying which file path need changing, you MUST
|
||||
/// start each path with one of the project's root directories.
|
||||
///
|
||||
/// The following examples assume we have two root directories in the project:
|
||||
/// - backend
|
||||
/// - frontend
|
||||
///
|
||||
/// <example>
|
||||
/// `backend/src/main.rs`
|
||||
///
|
||||
/// Notice how the file path starts with root-1. Without that, the path
|
||||
/// would be ambiguous and the call would fail!
|
||||
/// </example>
|
||||
///
|
||||
/// <example>
|
||||
/// `frontend/db.js`
|
||||
/// </example>
|
||||
pub path: PathBuf,
|
||||
|
||||
/// A user-friendly markdown description of what's being replaced. This will be shown in the UI.
|
||||
///
|
||||
/// <example>Fix API endpoint URLs</example>
|
||||
/// <example>Update copyright year in `page_footer`</example>
|
||||
pub display_description: String,
|
||||
|
||||
/// The text to replace.
|
||||
pub old_string: String,
|
||||
|
||||
/// The text to replace it with.
|
||||
pub new_string: String,
|
||||
}
|
||||
|
||||
pub struct EditFileTool;
|
||||
|
||||
impl Tool for EditFileTool {
|
||||
fn name(&self) -> String {
|
||||
"edit_file".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("edit_file_tool/description.md").to_string()
|
||||
}
|
||||
|
||||
fn icon(&self) -> IconName {
|
||||
IconName::Pencil
|
||||
}
|
||||
|
||||
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
|
||||
json_schema_for::<EditFileToolInput>(format)
|
||||
}
|
||||
|
||||
fn ui_text(&self, input: &serde_json::Value) -> String {
|
||||
match serde_json::from_value::<EditFileToolInput>(input.clone()) {
|
||||
Ok(input) => input.display_description,
|
||||
Err(_) => "Edit file".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
cx: &mut App,
|
||||
) -> ToolResult {
|
||||
let input = match serde_json::from_value::<EditFileToolInput>(input) {
|
||||
Ok(input) => input,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
|
||||
};
|
||||
|
||||
cx.spawn(async move |cx: &mut AsyncApp| {
|
||||
let project_path = project.read_with(cx, |project, cx| {
|
||||
project
|
||||
.find_project_path(&input.path, cx)
|
||||
.context("Path not found in project")
|
||||
})??;
|
||||
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(project_path, cx))?
|
||||
.await?;
|
||||
|
||||
let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
|
||||
|
||||
if input.old_string.is_empty() {
|
||||
return Err(anyhow!("`old_string` cannot be empty. Use a different tool if you want to create a file."));
|
||||
}
|
||||
|
||||
if input.old_string == input.new_string {
|
||||
return Err(anyhow!("The `old_string` and `new_string` are identical, so no changes would be made."));
|
||||
}
|
||||
|
||||
let result = cx
|
||||
.background_spawn(async move {
|
||||
// Try to match exactly
|
||||
let diff = replace_exact(&input.old_string, &input.new_string, &snapshot)
|
||||
.await
|
||||
// If that fails, try being flexible about indentation
|
||||
.or_else(|| replace_with_flexible_indent(&input.old_string, &input.new_string, &snapshot))?;
|
||||
|
||||
if diff.edits.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let old_text = snapshot.text();
|
||||
|
||||
Some((old_text, diff))
|
||||
})
|
||||
.await;
|
||||
|
||||
let Some((old_text, diff)) = result else {
|
||||
let err = buffer.read_with(cx, |buffer, _cx| {
|
||||
let file_exists = buffer
|
||||
.file()
|
||||
.map_or(false, |file| file.disk_state().exists());
|
||||
|
||||
if !file_exists {
|
||||
anyhow!("{} does not exist", input.path.display())
|
||||
} else if buffer.is_empty() {
|
||||
anyhow!(
|
||||
"{} is empty, so the provided `old_string` wasn't found.",
|
||||
input.path.display()
|
||||
)
|
||||
} else {
|
||||
anyhow!("Failed to match the provided `old_string`")
|
||||
}
|
||||
})?;
|
||||
|
||||
return Err(err)
|
||||
};
|
||||
|
||||
let snapshot = cx.update(|cx| {
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.buffer_read(buffer.clone(), cx)
|
||||
});
|
||||
let snapshot = buffer.update(cx, |buffer, cx| {
|
||||
buffer.finalize_last_transaction();
|
||||
buffer.apply_diff(diff, cx);
|
||||
buffer.finalize_last_transaction();
|
||||
buffer.snapshot()
|
||||
});
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.buffer_edited(buffer.clone(), cx)
|
||||
});
|
||||
snapshot
|
||||
})?;
|
||||
|
||||
project.update( cx, |project, cx| {
|
||||
project.save_buffer(buffer, cx)
|
||||
})?.await?;
|
||||
|
||||
let diff_str = cx.background_spawn(async move {
|
||||
let new_text = snapshot.text();
|
||||
language::unified_diff(&old_text, &new_text)
|
||||
}).await;
|
||||
|
||||
|
||||
Ok(format!("Edited {}:\n\n```diff\n{}\n```", input.path.display(), diff_str))
|
||||
|
||||
}).into()
|
||||
}
|
||||
}
|
||||
45
crates/assistant_tools/src/edit_file_tool/description.md
Normal file
45
crates/assistant_tools/src/edit_file_tool/description.md
Normal file
@@ -0,0 +1,45 @@
|
||||
This is a tool for editing files. For moving or renaming files, you should generally use the `terminal` tool with the 'mv' command instead. For larger edits, use the `create_file` tool to overwrite files.
|
||||
|
||||
Before using this tool:
|
||||
|
||||
1. Use the `read_file` tool to understand the file's contents and context
|
||||
|
||||
2. Verify the directory path is correct (only applicable when creating new files):
|
||||
- Use the `list_directory` tool to verify the parent directory exists and is the correct location
|
||||
|
||||
To make a file edit, provide the following:
|
||||
1. path: The full path to the file you wish to modify in the project. This path must include the root directory in the project.
|
||||
2. old_string: The text to replace (must be unique within the file, and must match the file contents exactly, including all whitespace and indentation)
|
||||
3. new_string: The edited text, which will replace the old_string in the file.
|
||||
|
||||
The tool will replace ONE occurrence of old_string with new_string in the specified file.
|
||||
|
||||
CRITICAL REQUIREMENTS FOR USING THIS TOOL:
|
||||
|
||||
1. UNIQUENESS: The old_string MUST uniquely identify the specific instance you want to change. This means:
|
||||
- Include AT LEAST 3-5 lines of context BEFORE the change point
|
||||
- Include AT LEAST 3-5 lines of context AFTER the change point
|
||||
- Include all whitespace, indentation, and surrounding code exactly as it appears in the file
|
||||
|
||||
2. SINGLE INSTANCE: This tool can only change ONE instance at a time. If you need to change multiple instances:
|
||||
- Make separate calls to this tool for each instance
|
||||
- Each call must uniquely identify its specific instance using extensive context
|
||||
|
||||
3. VERIFICATION: Before using this tool:
|
||||
- Check how many instances of the target text exist in the file
|
||||
- If multiple instances exist, gather enough context to uniquely identify each one
|
||||
- Plan separate tool calls for each instance
|
||||
|
||||
WARNING: If you do not follow these requirements:
|
||||
- The tool will fail if old_string matches multiple locations
|
||||
- The tool will fail if old_string doesn't match exactly (including whitespace)
|
||||
- You may change the wrong instance if you don't include enough context
|
||||
|
||||
When making edits:
|
||||
- Ensure the edit results in idiomatic, correct code
|
||||
- Do not leave the code in a broken state
|
||||
- Always use fully-qualified project paths (starting with the name of one of the project's root directories)
|
||||
|
||||
If you want to create a new file, use the `create_file` tool instead of this tool. Don't pass an empty `old_string`.
|
||||
|
||||
Remember: when making multiple file edits in a row to the same file, you should prefer to send all edits in a single message with multiple calls to this tool, rather than multiple messages with a single call each.
|
||||
@@ -1,268 +0,0 @@
|
||||
use crate::{replace::replace_with_flexible_indent, schema::json_schema_for};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool, ToolResult};
|
||||
use gpui::{App, AppContext, AsyncApp, Entity, Task};
|
||||
use language_model::{LanguageModelRequestMessage, LanguageModelToolSchemaFormat};
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
use ui::IconName;
|
||||
|
||||
use crate::replace::replace_exact;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct FindReplaceFileToolInput {
|
||||
/// The path of the file to modify.
|
||||
///
|
||||
/// WARNING: When specifying which file path need changing, you MUST
|
||||
/// start each path with one of the project's root directories.
|
||||
///
|
||||
/// The following examples assume we have two root directories in the project:
|
||||
/// - backend
|
||||
/// - frontend
|
||||
///
|
||||
/// <example>
|
||||
/// `backend/src/main.rs`
|
||||
///
|
||||
/// Notice how the file path starts with root-1. Without that, the path
|
||||
/// would be ambiguous and the call would fail!
|
||||
/// </example>
|
||||
///
|
||||
/// <example>
|
||||
/// `frontend/db.js`
|
||||
/// </example>
|
||||
pub path: PathBuf,
|
||||
|
||||
/// A user-friendly markdown description of what's being replaced. This will be shown in the UI.
|
||||
///
|
||||
/// <example>Fix API endpoint URLs</example>
|
||||
/// <example>Update copyright year in `page_footer`</example>
|
||||
pub display_description: String,
|
||||
|
||||
/// The unique string to find in the file. This string cannot be empty;
|
||||
/// if the string is empty, the tool call will fail. Remember, do not use this tool
|
||||
/// to create new files from scratch, or to overwrite existing files! Use a different
|
||||
/// approach if you want to do that.
|
||||
///
|
||||
/// If this string appears more than once in the file, this tool call will fail,
|
||||
/// so it is absolutely critical that you verify ahead of time that the string
|
||||
/// is unique. You can search within the file to verify this.
|
||||
///
|
||||
/// To make the string more likely to be unique, include a minimum of 3 lines of context
|
||||
/// before the string you actually want to find, as well as a minimum of 3 lines of
|
||||
/// context after the string you want to find. (These lines of context should appear
|
||||
/// in the `replace` string as well.) If 3 lines of context is not enough to obtain
|
||||
/// a string that appears only once in the file, then double the number of context lines
|
||||
/// until the string becomes unique. (Start with 3 lines before and 3 lines after
|
||||
/// though, because too much context is needlessly costly.)
|
||||
///
|
||||
/// Do not alter the context lines of code in any way, and make sure to preserve all
|
||||
/// whitespace and indentation for all lines of code. This string must be exactly as
|
||||
/// it appears in the file, because this tool will do a literal find/replace, and if
|
||||
/// even one character in this string is different in any way from how it appears
|
||||
/// in the file, then the tool call will fail.
|
||||
///
|
||||
/// If you get an error that the `find` string was not found, this means that either
|
||||
/// you made a mistake, or that the file has changed since you last looked at it.
|
||||
/// Either way, when this happens, you should retry doing this tool call until it
|
||||
/// succeeds, up to 3 times. Each time you retry, you should take another look at
|
||||
/// the exact text of the file in question, to make sure that you are searching for
|
||||
/// exactly the right string. Regardless of whether it was because you made a mistake
|
||||
/// or because the file changed since you last looked at it, you should be extra
|
||||
/// careful when retrying in this way. It's a bad experience for the user if
|
||||
/// this `find` string isn't found, so be super careful to get it exactly right!
|
||||
///
|
||||
/// <example>
|
||||
/// If a file contains this code:
|
||||
///
|
||||
/// ```ignore
|
||||
/// fn check_user_permissions(user_id: &str) -> Result<bool> {
|
||||
/// // Check if user exists first
|
||||
/// let user = database.find_user(user_id)?;
|
||||
///
|
||||
/// // This is the part we want to modify
|
||||
/// if user.role == "admin" {
|
||||
/// return Ok(true);
|
||||
/// }
|
||||
///
|
||||
/// // Check other permissions
|
||||
/// check_custom_permissions(user_id)
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Your find string should include at least 3 lines of context before and after the part
|
||||
/// you want to change:
|
||||
///
|
||||
/// ```ignore
|
||||
/// fn check_user_permissions(user_id: &str) -> Result<bool> {
|
||||
/// // Check if user exists first
|
||||
/// let user = database.find_user(user_id)?;
|
||||
///
|
||||
/// // This is the part we want to modify
|
||||
/// if user.role == "admin" {
|
||||
/// return Ok(true);
|
||||
/// }
|
||||
///
|
||||
/// // Check other permissions
|
||||
/// check_custom_permissions(user_id)
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// And your replace string might look like:
|
||||
///
|
||||
/// ```ignore
|
||||
/// fn check_user_permissions(user_id: &str) -> Result<bool> {
|
||||
/// // Check if user exists first
|
||||
/// let user = database.find_user(user_id)?;
|
||||
///
|
||||
/// // This is the part we want to modify
|
||||
/// if user.role == "admin" || user.role == "superuser" {
|
||||
/// return Ok(true);
|
||||
/// }
|
||||
///
|
||||
/// // Check other permissions
|
||||
/// check_custom_permissions(user_id)
|
||||
/// }
|
||||
/// ```
|
||||
/// </example>
|
||||
pub find: String,
|
||||
|
||||
/// The string to replace the one unique occurrence of the find string with.
|
||||
pub replace: String,
|
||||
}
|
||||
|
||||
pub struct FindReplaceFileTool;
|
||||
|
||||
impl Tool for FindReplaceFileTool {
|
||||
fn name(&self) -> String {
|
||||
"find_replace_file".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("find_replace_tool/description.md").to_string()
|
||||
}
|
||||
|
||||
fn icon(&self) -> IconName {
|
||||
IconName::Pencil
|
||||
}
|
||||
|
||||
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
|
||||
json_schema_for::<FindReplaceFileToolInput>(format)
|
||||
}
|
||||
|
||||
fn ui_text(&self, input: &serde_json::Value) -> String {
|
||||
match serde_json::from_value::<FindReplaceFileToolInput>(input.clone()) {
|
||||
Ok(input) => input.display_description,
|
||||
Err(_) => "Edit file".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
cx: &mut App,
|
||||
) -> ToolResult {
|
||||
let input = match serde_json::from_value::<FindReplaceFileToolInput>(input) {
|
||||
Ok(input) => input,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
|
||||
};
|
||||
|
||||
cx.spawn(async move |cx: &mut AsyncApp| {
|
||||
let project_path = project.read_with(cx, |project, cx| {
|
||||
project
|
||||
.find_project_path(&input.path, cx)
|
||||
.context("Path not found in project")
|
||||
})??;
|
||||
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(project_path, cx))?
|
||||
.await?;
|
||||
|
||||
let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
|
||||
|
||||
if input.find.is_empty() {
|
||||
return Err(anyhow!("`find` string cannot be empty. Use a different tool if you want to create a file."));
|
||||
}
|
||||
|
||||
if input.find == input.replace {
|
||||
return Err(anyhow!("The `find` and `replace` strings are identical, so no changes would be made."));
|
||||
}
|
||||
|
||||
let result = cx
|
||||
.background_spawn(async move {
|
||||
// Try to match exactly
|
||||
let diff = replace_exact(&input.find, &input.replace, &snapshot)
|
||||
.await
|
||||
// If that fails, try being flexible about indentation
|
||||
.or_else(|| replace_with_flexible_indent(&input.find, &input.replace, &snapshot))?;
|
||||
|
||||
if diff.edits.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let old_text = snapshot.text();
|
||||
|
||||
Some((old_text, diff))
|
||||
})
|
||||
.await;
|
||||
|
||||
let Some((old_text, diff)) = result else {
|
||||
let err = buffer.read_with(cx, |buffer, _cx| {
|
||||
let file_exists = buffer
|
||||
.file()
|
||||
.map_or(false, |file| file.disk_state().exists());
|
||||
|
||||
if !file_exists {
|
||||
anyhow!("{} does not exist", input.path.display())
|
||||
} else if buffer.is_empty() {
|
||||
anyhow!(
|
||||
"{} is empty, so the provided `find` string wasn't found.",
|
||||
input.path.display()
|
||||
)
|
||||
} else {
|
||||
anyhow!("Failed to match the provided `find` string")
|
||||
}
|
||||
})?;
|
||||
|
||||
return Err(err)
|
||||
};
|
||||
|
||||
let snapshot = cx.update(|cx| {
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.buffer_read(buffer.clone(), cx)
|
||||
});
|
||||
let snapshot = buffer.update(cx, |buffer, cx| {
|
||||
buffer.finalize_last_transaction();
|
||||
buffer.apply_diff(diff, cx);
|
||||
buffer.finalize_last_transaction();
|
||||
buffer.snapshot()
|
||||
});
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.buffer_edited(buffer.clone(), cx)
|
||||
});
|
||||
snapshot
|
||||
})?;
|
||||
|
||||
project.update( cx, |project, cx| {
|
||||
project.save_buffer(buffer, cx)
|
||||
})?.await?;
|
||||
|
||||
let diff_str = cx.background_spawn(async move {
|
||||
let new_text = snapshot.text();
|
||||
language::unified_diff(&old_text, &new_text)
|
||||
}).await;
|
||||
|
||||
|
||||
Ok(format!("Edited {}:\n\n```diff\n{}\n```", input.path.display(), diff_str))
|
||||
|
||||
}).into()
|
||||
}
|
||||
}
|
||||
@@ -12,7 +12,7 @@ use util::markdown::MarkdownString;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ListDirectoryToolInput {
|
||||
/// The relative path of the directory to list.
|
||||
/// The fully-qualified path of the directory to list in the project.
|
||||
///
|
||||
/// This path should never be absolute, and the first component
|
||||
/// of the path should always be a root directory in a project.
|
||||
|
||||
@@ -1 +1 @@
|
||||
Lists files and directories in a given path.
|
||||
Lists files and directories in a given path. Prefer the `regex_search` or `path_search` tools when searching the codebase.
|
||||
|
||||
@@ -6,14 +6,14 @@ use language_model::{LanguageModelRequestMessage, LanguageModelToolSchemaFormat}
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
use std::{cmp, fmt::Write as _, path::PathBuf, sync::Arc};
|
||||
use ui::IconName;
|
||||
use util::paths::PathMatcher;
|
||||
use worktree::Snapshot;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct PathSearchToolInput {
|
||||
/// The glob to search all project paths for.
|
||||
/// The glob to match against every path in the project.
|
||||
///
|
||||
/// <example>
|
||||
/// If the project has the following root directories:
|
||||
@@ -76,66 +76,114 @@ impl Tool for PathSearchTool {
|
||||
Ok(input) => (input.offset, input.glob),
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
|
||||
};
|
||||
|
||||
let path_matcher = match PathMatcher::new([
|
||||
// Sometimes models try to search for "". In this case, return all paths in the project.
|
||||
if glob.is_empty() { "*" } else { &glob },
|
||||
]) {
|
||||
Ok(matcher) => matcher,
|
||||
Err(err) => return Task::ready(Err(anyhow!("Invalid glob: {err}"))).into(),
|
||||
};
|
||||
let snapshots: Vec<Snapshot> = project
|
||||
.read(cx)
|
||||
.worktrees(cx)
|
||||
.map(|worktree| worktree.read(cx).snapshot())
|
||||
.collect();
|
||||
|
||||
let offset = offset as usize;
|
||||
let task = search_paths(&glob, project, cx);
|
||||
cx.background_spawn(async move {
|
||||
let mut matches = Vec::new();
|
||||
|
||||
for worktree in snapshots {
|
||||
let root_name = worktree.root_name();
|
||||
|
||||
// Don't consider ignored entries.
|
||||
for entry in worktree.entries(false, 0) {
|
||||
if path_matcher.is_match(&entry.path) {
|
||||
matches.push(
|
||||
PathBuf::from(root_name)
|
||||
.join(&entry.path)
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matches.is_empty() {
|
||||
Ok(format!("No paths in the project matched the glob {glob:?}"))
|
||||
} else {
|
||||
// Sort to group entries in the same directory together.
|
||||
matches.sort();
|
||||
|
||||
let total_matches = matches.len();
|
||||
let response = if total_matches > RESULTS_PER_PAGE + offset as usize {
|
||||
let paginated_matches: Vec<_> = matches
|
||||
.into_iter()
|
||||
.skip(offset as usize)
|
||||
.take(RESULTS_PER_PAGE)
|
||||
.collect();
|
||||
|
||||
format!(
|
||||
"Found {} total matches. Showing results {}-{} (provide 'offset' parameter for more results):\n\n{}",
|
||||
total_matches,
|
||||
offset + 1,
|
||||
offset as usize + paginated_matches.len(),
|
||||
paginated_matches.join("\n")
|
||||
)
|
||||
} else {
|
||||
matches.join("\n")
|
||||
};
|
||||
|
||||
Ok(response)
|
||||
let matches = task.await?;
|
||||
let paginated_matches = &matches[cmp::min(offset, matches.len())..cmp::min(offset + RESULTS_PER_PAGE, matches.len())];
|
||||
let mut message = format!(
|
||||
"Found {} total matches. Showing results {}-{} (provide 'offset' parameter for more results):\n",
|
||||
matches.len(),
|
||||
offset + 1,
|
||||
offset as usize + paginated_matches.len(),
|
||||
);
|
||||
for mat in matches.into_iter().skip(offset).take(RESULTS_PER_PAGE) {
|
||||
write!(&mut message, "\n{}", mat.display()).unwrap();
|
||||
}
|
||||
Ok(message)
|
||||
}).into()
|
||||
}
|
||||
}
|
||||
|
||||
fn search_paths(glob: &str, project: Entity<Project>, cx: &mut App) -> Task<Result<Vec<PathBuf>>> {
|
||||
let path_matcher = match PathMatcher::new([
|
||||
// Sometimes models try to search for "". In this case, return all paths in the project.
|
||||
if glob.is_empty() { "*" } else { glob },
|
||||
]) {
|
||||
Ok(matcher) => matcher,
|
||||
Err(err) => return Task::ready(Err(anyhow!("Invalid glob: {err}"))).into(),
|
||||
};
|
||||
let snapshots: Vec<Snapshot> = project
|
||||
.read(cx)
|
||||
.worktrees(cx)
|
||||
.map(|worktree| worktree.read(cx).snapshot())
|
||||
.collect();
|
||||
|
||||
cx.background_spawn(async move {
|
||||
Ok(snapshots
|
||||
.iter()
|
||||
.flat_map(|snapshot| {
|
||||
let root_name = PathBuf::from(snapshot.root_name());
|
||||
snapshot
|
||||
.entries(false, 0)
|
||||
.map(move |entry| root_name.join(&entry.path))
|
||||
.filter(|path| path_matcher.is_match(&path))
|
||||
})
|
||||
.collect())
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use gpui::TestAppContext;
|
||||
use project::{FakeFs, Project};
|
||||
use settings::SettingsStore;
|
||||
use util::path;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_path_search_tool(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
serde_json::json!({
|
||||
"apple": {
|
||||
"banana": {
|
||||
"carrot": "1",
|
||||
},
|
||||
"bandana": {
|
||||
"carbonara": "2",
|
||||
},
|
||||
"endive": "3"
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
|
||||
let matches = cx
|
||||
.update(|cx| search_paths("root/**/car*", project.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
matches,
|
||||
&[
|
||||
PathBuf::from("root/apple/banana/carrot"),
|
||||
PathBuf::from("root/apple/bandana/carbonara")
|
||||
]
|
||||
);
|
||||
|
||||
let matches = cx
|
||||
.update(|cx| search_paths("**/car*", project.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
matches,
|
||||
&[
|
||||
PathBuf::from("root/apple/banana/carrot"),
|
||||
PathBuf::from("root/apple/bandana/carbonara")
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
Returns paths in the project which match the given glob.
|
||||
Fast file pattern matching tool that works with any codebase size
|
||||
|
||||
Results are paginated with 50 matches per page. Use the optional 'offset' parameter to request subsequent pages.
|
||||
- Supports glob patterns like "**/*.js" or "src/**/*.ts"
|
||||
- Returns matching file paths sorted alphabetically
|
||||
- Prefer the `regex_search` tool to this tool when searching for symbols unless you have specific information about paths.
|
||||
- Use this tool when you need to find files by name patterns
|
||||
- Results are paginated with 50 matches per page. Use the optional 'offset' parameter to request subsequent pages.
|
||||
|
||||
@@ -106,6 +106,7 @@ impl Tool for RegexSearchTool {
|
||||
false,
|
||||
case_sensitive,
|
||||
false,
|
||||
false,
|
||||
PathMatcher::default(),
|
||||
PathMatcher::default(),
|
||||
None,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
Searches the entire project for the given regular expression.
|
||||
|
||||
Returns a list of paths that matched the query. For each path, it returns some excerpts of the matched text.
|
||||
|
||||
Results are paginated with 20 matches per page. Use the optional 'offset' parameter to request subsequent pages.
|
||||
|
||||
This tool is not aware of semantics and does not use any information from language servers, so it should only be used when no available semantic tool (e.g. one that uses language servers) could fit a particular use case instead.
|
||||
- Prefer this tool when searching for files containing symbols in the project.
|
||||
- Supports full regex syntax (eg. "log.*Error", "function\\s+\\w+", etc.)
|
||||
- Use this tool when you need to find files containing specific patterns
|
||||
- Results are paginated with 20 matches per page. Use the optional 'offset' parameter to request subsequent pages.
|
||||
|
||||
@@ -27,6 +27,8 @@ serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
tempfile.workspace = true
|
||||
which.workspace = true
|
||||
workspace.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[target.'cfg(not(target_os = "windows"))'.dependencies]
|
||||
which.workspace = true
|
||||
|
||||
@@ -23,7 +23,6 @@ use std::{
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
use which::which;
|
||||
use workspace::Workspace;
|
||||
|
||||
const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification";
|
||||
@@ -63,7 +62,7 @@ pub struct AutoUpdater {
|
||||
pending_poll: Option<Task<Option<()>>>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct JsonRelease {
|
||||
pub version: String,
|
||||
pub url: String,
|
||||
@@ -237,6 +236,46 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut App) -> Option<()> {
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
struct InstallerDir(tempfile::TempDir);
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
impl InstallerDir {
|
||||
async fn new() -> Result<Self> {
|
||||
Ok(Self(
|
||||
tempfile::Builder::new()
|
||||
.prefix("zed-auto-update")
|
||||
.tempdir()?,
|
||||
))
|
||||
}
|
||||
|
||||
fn path(&self) -> &Path {
|
||||
self.0.path()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
struct InstallerDir(PathBuf);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
impl InstallerDir {
|
||||
async fn new() -> Result<Self> {
|
||||
let installer_dir = std::env::current_exe()?
|
||||
.parent()
|
||||
.context("No parent dir for Zed.exe")?
|
||||
.join("updates");
|
||||
if smol::fs::metadata(&installer_dir).await.is_ok() {
|
||||
smol::fs::remove_dir_all(&installer_dir).await?;
|
||||
}
|
||||
smol::fs::create_dir(&installer_dir).await?;
|
||||
Ok(Self(installer_dir))
|
||||
}
|
||||
|
||||
fn path(&self) -> &Path {
|
||||
self.0.as_path()
|
||||
}
|
||||
}
|
||||
|
||||
impl AutoUpdater {
|
||||
pub fn get(cx: &mut App) -> Option<Entity<Self>> {
|
||||
cx.default_global::<GlobalAutoUpdate>().0.clone()
|
||||
@@ -469,22 +508,21 @@ impl AutoUpdater {
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
let temp_dir = tempfile::Builder::new()
|
||||
.prefix("zed-auto-update")
|
||||
.tempdir()?;
|
||||
|
||||
let installer_dir = InstallerDir::new().await?;
|
||||
let filename = match OS {
|
||||
"macos" => Ok("Zed.dmg"),
|
||||
"linux" => Ok("zed.tar.gz"),
|
||||
"windows" => Ok("ZedUpdateInstaller.exe"),
|
||||
_ => Err(anyhow!("not supported: {:?}", OS)),
|
||||
}?;
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
anyhow::ensure!(
|
||||
which("rsync").is_ok(),
|
||||
which::which("rsync").is_ok(),
|
||||
"Aborting. Could not find rsync which is required for auto-updates."
|
||||
);
|
||||
|
||||
let downloaded_asset = temp_dir.path().join(filename);
|
||||
let downloaded_asset = installer_dir.path().join(filename);
|
||||
download_release(&downloaded_asset, release, client, &cx).await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
@@ -493,8 +531,9 @@ impl AutoUpdater {
|
||||
})?;
|
||||
|
||||
let binary_path = match OS {
|
||||
"macos" => install_release_macos(&temp_dir, downloaded_asset, &cx).await,
|
||||
"linux" => install_release_linux(&temp_dir, downloaded_asset, &cx).await,
|
||||
"macos" => install_release_macos(&installer_dir, downloaded_asset, &cx).await,
|
||||
"linux" => install_release_linux(&installer_dir, downloaded_asset, &cx).await,
|
||||
"windows" => install_release_windows(downloaded_asset).await,
|
||||
_ => Err(anyhow!("not supported: {:?}", OS)),
|
||||
}?;
|
||||
|
||||
@@ -629,7 +668,7 @@ async fn download_release(
|
||||
}
|
||||
|
||||
async fn install_release_linux(
|
||||
temp_dir: &tempfile::TempDir,
|
||||
temp_dir: &InstallerDir,
|
||||
downloaded_tar_gz: PathBuf,
|
||||
cx: &AsyncApp,
|
||||
) -> Result<PathBuf> {
|
||||
@@ -696,7 +735,7 @@ async fn install_release_linux(
|
||||
}
|
||||
|
||||
async fn install_release_macos(
|
||||
temp_dir: &tempfile::TempDir,
|
||||
temp_dir: &InstallerDir,
|
||||
downloaded_dmg: PathBuf,
|
||||
cx: &AsyncApp,
|
||||
) -> Result<PathBuf> {
|
||||
@@ -743,3 +782,41 @@ async fn install_release_macos(
|
||||
|
||||
Ok(running_app_path)
|
||||
}
|
||||
|
||||
async fn install_release_windows(downloaded_installer: PathBuf) -> Result<PathBuf> {
|
||||
let output = Command::new(downloaded_installer)
|
||||
.arg("/verysilent")
|
||||
.arg("/update=true")
|
||||
.arg("!desktopicon")
|
||||
.arg("!quicklaunchicon")
|
||||
.output()
|
||||
.await?;
|
||||
anyhow::ensure!(
|
||||
output.status.success(),
|
||||
"failed to start installer: {:?}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
Ok(std::env::current_exe()?)
|
||||
}
|
||||
|
||||
pub fn check_pending_installation() -> bool {
|
||||
let Some(installer_path) = std::env::current_exe()
|
||||
.ok()
|
||||
.and_then(|p| p.parent().map(|p| p.join("updates")))
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
|
||||
// The installer will create a flag file after it finishes updating
|
||||
let flag_file = installer_path.join("versions.txt");
|
||||
if flag_file.exists() {
|
||||
if let Some(helper) = installer_path
|
||||
.parent()
|
||||
.map(|p| p.join("tools\\auto_update_helper.exe"))
|
||||
{
|
||||
let _ = std::process::Command::new(helper).spawn();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
29
crates/auto_update_helper/Cargo.toml
Normal file
29
crates/auto_update_helper/Cargo.toml
Normal file
@@ -0,0 +1,29 @@
|
||||
[package]
|
||||
name = "auto_update_helper"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[[bin]]
|
||||
name = "auto_update_helper"
|
||||
path = "src/auto_update_helper.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
log.workspace = true
|
||||
simplelog.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
windows.workspace = true
|
||||
|
||||
[target.'cfg(target_os = "windows")'.build-dependencies]
|
||||
winresource = "0.1"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-pc-windows-msvc"]
|
||||
1
crates/auto_update_helper/LICENSE-GPL
Symbolic link
1
crates/auto_update_helper/LICENSE-GPL
Symbolic link
@@ -0,0 +1 @@
|
||||
../../LICENSE-GPL
|
||||
15
crates/auto_update_helper/build.rs
Normal file
15
crates/auto_update_helper/build.rs
Normal file
@@ -0,0 +1,15 @@
|
||||
fn main() {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
println!("cargo:rerun-if-changed=manifest.xml");
|
||||
|
||||
let mut res = winresource::WindowsResource::new();
|
||||
res.set_manifest_file("manifest.xml");
|
||||
res.set_icon("app-icon.ico");
|
||||
|
||||
if let Err(e) = res.compile() {
|
||||
eprintln!("{}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
16
crates/auto_update_helper/manifest.xml
Normal file
16
crates/auto_update_helper/manifest.xml
Normal file
@@ -0,0 +1,16 @@
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0" xmlns:asmv3="urn:schemas-microsoft-com:asm.v3">
|
||||
<asmv3:application>
|
||||
<asmv3:windowsSettings>
|
||||
<dpiAware xmlns="http://schemas.microsoft.com/SMI/2005/WindowsSettings">true</dpiAware>
|
||||
<dpiAwareness xmlns="http://schemas.microsoft.com/SMI/2016/WindowsSettings">PerMonitorV2</dpiAwareness>
|
||||
</asmv3:windowsSettings>
|
||||
</asmv3:application>
|
||||
<dependency>
|
||||
<dependentAssembly>
|
||||
<assemblyIdentity type='win32'
|
||||
name='Microsoft.Windows.Common-Controls'
|
||||
version='6.0.0.0' processorArchitecture='*'
|
||||
publicKeyToken='6595b64144ccf1df' />
|
||||
</dependentAssembly>
|
||||
</dependency>
|
||||
</assembly>
|
||||
94
crates/auto_update_helper/src/auto_update_helper.rs
Normal file
94
crates/auto_update_helper/src/auto_update_helper.rs
Normal file
@@ -0,0 +1,94 @@
|
||||
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
mod dialog;
|
||||
#[cfg(target_os = "windows")]
|
||||
mod updater;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn main() {
|
||||
if let Err(e) = windows_impl::run() {
|
||||
log::error!("Error: Zed update failed, {:?}", e);
|
||||
windows_impl::show_error(format!("Error: {:?}", e));
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
fn main() {}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
mod windows_impl {
|
||||
use std::path::Path;
|
||||
|
||||
use super::dialog::create_dialog_window;
|
||||
use super::updater::perform_update;
|
||||
use anyhow::{Context, Result};
|
||||
use windows::{
|
||||
Win32::{
|
||||
Foundation::{HWND, LPARAM, WPARAM},
|
||||
UI::WindowsAndMessaging::{
|
||||
DispatchMessageW, GetMessageW, MB_ICONERROR, MB_SYSTEMMODAL, MSG, MessageBoxW,
|
||||
PostMessageW, WM_USER,
|
||||
},
|
||||
},
|
||||
core::HSTRING,
|
||||
};
|
||||
|
||||
pub(crate) const WM_JOB_UPDATED: u32 = WM_USER + 1;
|
||||
pub(crate) const WM_TERMINATE: u32 = WM_USER + 2;
|
||||
|
||||
pub(crate) fn run() -> Result<()> {
|
||||
let helper_dir = std::env::current_exe()?
|
||||
.parent()
|
||||
.context("No parent directory")?
|
||||
.to_path_buf();
|
||||
init_log(&helper_dir)?;
|
||||
let app_dir = helper_dir
|
||||
.parent()
|
||||
.context("No parent directory")?
|
||||
.to_path_buf();
|
||||
|
||||
log::info!("======= Starting Zed update =======");
|
||||
let (tx, rx) = std::sync::mpsc::channel();
|
||||
let hwnd = create_dialog_window(rx)?.0 as isize;
|
||||
std::thread::spawn(move || {
|
||||
let result = perform_update(app_dir.as_path(), Some(hwnd));
|
||||
tx.send(result).ok();
|
||||
unsafe { PostMessageW(Some(HWND(hwnd as _)), WM_TERMINATE, WPARAM(0), LPARAM(0)) }.ok();
|
||||
});
|
||||
unsafe {
|
||||
let mut message = MSG::default();
|
||||
while GetMessageW(&mut message, None, 0, 0).as_bool() {
|
||||
DispatchMessageW(&message);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn init_log(helper_dir: &Path) -> Result<()> {
|
||||
simplelog::WriteLogger::init(
|
||||
simplelog::LevelFilter::Info,
|
||||
simplelog::Config::default(),
|
||||
std::fs::File::options()
|
||||
.append(true)
|
||||
.create(true)
|
||||
.open(helper_dir.join("auto_update_helper.log"))?,
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn show_error(mut content: String) {
|
||||
if content.len() > 600 {
|
||||
content.truncate(600);
|
||||
content.push_str("...\n");
|
||||
}
|
||||
let _ = unsafe {
|
||||
MessageBoxW(
|
||||
None,
|
||||
&HSTRING::from(content),
|
||||
windows::core::w!("Error: Zed update failed."),
|
||||
MB_ICONERROR | MB_SYSTEMMODAL,
|
||||
)
|
||||
};
|
||||
}
|
||||
}
|
||||
236
crates/auto_update_helper/src/dialog.rs
Normal file
236
crates/auto_update_helper/src/dialog.rs
Normal file
@@ -0,0 +1,236 @@
|
||||
use std::{cell::RefCell, sync::mpsc::Receiver};
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
use windows::{
|
||||
Win32::{
|
||||
Foundation::{HWND, LPARAM, LRESULT, RECT, WPARAM},
|
||||
Graphics::Gdi::{
|
||||
BeginPaint, CLEARTYPE_QUALITY, CLIP_DEFAULT_PRECIS, CreateFontW, DEFAULT_CHARSET,
|
||||
DeleteObject, EndPaint, FW_NORMAL, LOGFONTW, OUT_TT_ONLY_PRECIS, PAINTSTRUCT,
|
||||
ReleaseDC, SelectObject, TextOutW,
|
||||
},
|
||||
System::LibraryLoader::GetModuleHandleW,
|
||||
UI::{
|
||||
Controls::{PBM_SETRANGE, PBM_SETSTEP, PBM_STEPIT, PROGRESS_CLASS},
|
||||
WindowsAndMessaging::{
|
||||
CREATESTRUCTW, CS_HREDRAW, CS_VREDRAW, CreateWindowExW, DefWindowProcW,
|
||||
GWLP_USERDATA, GetDesktopWindow, GetWindowLongPtrW, GetWindowRect, HICON,
|
||||
IMAGE_ICON, LR_DEFAULTSIZE, LR_SHARED, LoadImageW, PostQuitMessage, RegisterClassW,
|
||||
SPI_GETICONTITLELOGFONT, SYSTEM_PARAMETERS_INFO_UPDATE_FLAGS, SendMessageW,
|
||||
SetWindowLongPtrW, SystemParametersInfoW, WINDOW_EX_STYLE, WM_CLOSE, WM_CREATE,
|
||||
WM_DESTROY, WM_NCCREATE, WM_PAINT, WNDCLASSW, WS_CAPTION, WS_CHILD, WS_EX_TOPMOST,
|
||||
WS_POPUP, WS_VISIBLE,
|
||||
},
|
||||
},
|
||||
},
|
||||
core::HSTRING,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
updater::JOBS,
|
||||
windows_impl::{WM_JOB_UPDATED, WM_TERMINATE, show_error},
|
||||
};
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Debug)]
|
||||
struct DialogInfo {
|
||||
rx: Receiver<Result<()>>,
|
||||
progress_bar: isize,
|
||||
}
|
||||
|
||||
pub(crate) fn create_dialog_window(receiver: Receiver<Result<()>>) -> Result<HWND> {
|
||||
unsafe {
|
||||
let class_name = windows::core::w!("Zed-Auto-Updater-Dialog-Class");
|
||||
let module = GetModuleHandleW(None).context("unable to get module handle")?;
|
||||
let handle = LoadImageW(
|
||||
Some(module.into()),
|
||||
windows::core::PCWSTR(1 as _),
|
||||
IMAGE_ICON,
|
||||
0,
|
||||
0,
|
||||
LR_DEFAULTSIZE | LR_SHARED,
|
||||
)
|
||||
.context("unable to load icon file")?;
|
||||
let wc = WNDCLASSW {
|
||||
lpfnWndProc: Some(wnd_proc),
|
||||
lpszClassName: class_name,
|
||||
style: CS_HREDRAW | CS_VREDRAW,
|
||||
hIcon: HICON(handle.0),
|
||||
..Default::default()
|
||||
};
|
||||
RegisterClassW(&wc);
|
||||
let mut rect = RECT::default();
|
||||
GetWindowRect(GetDesktopWindow(), &mut rect)
|
||||
.context("unable to get desktop window rect")?;
|
||||
let width = 400;
|
||||
let height = 150;
|
||||
let info = Box::new(RefCell::new(DialogInfo {
|
||||
rx: receiver,
|
||||
progress_bar: 0,
|
||||
}));
|
||||
|
||||
let hwnd = CreateWindowExW(
|
||||
WS_EX_TOPMOST,
|
||||
class_name,
|
||||
windows::core::w!("Zed Editor"),
|
||||
WS_VISIBLE | WS_POPUP | WS_CAPTION,
|
||||
rect.right / 2 - width / 2,
|
||||
rect.bottom / 2 - height / 2,
|
||||
width,
|
||||
height,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
Some(Box::into_raw(info) as _),
|
||||
)
|
||||
.context("unable to create dialog window")?;
|
||||
Ok(hwnd)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! return_if_failed {
|
||||
($e:expr) => {
|
||||
match $e {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
return LRESULT(e.code().0 as _);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! make_lparam {
|
||||
($l:expr, $h:expr) => {
|
||||
LPARAM(($l as u32 | ($h as u32) << 16) as isize)
|
||||
};
|
||||
}
|
||||
|
||||
unsafe extern "system" fn wnd_proc(
|
||||
hwnd: HWND,
|
||||
msg: u32,
|
||||
wparam: WPARAM,
|
||||
lparam: LPARAM,
|
||||
) -> LRESULT {
|
||||
match msg {
|
||||
WM_NCCREATE => unsafe {
|
||||
let create_struct = lparam.0 as *const CREATESTRUCTW;
|
||||
let info = (*create_struct).lpCreateParams as *mut RefCell<DialogInfo>;
|
||||
let info = Box::from_raw(info);
|
||||
SetWindowLongPtrW(hwnd, GWLP_USERDATA, Box::into_raw(info) as _);
|
||||
DefWindowProcW(hwnd, msg, wparam, lparam)
|
||||
},
|
||||
WM_CREATE => unsafe {
|
||||
// Create progress bar
|
||||
let mut rect = RECT::default();
|
||||
return_if_failed!(GetWindowRect(hwnd, &mut rect));
|
||||
let progress_bar = return_if_failed!(CreateWindowExW(
|
||||
WINDOW_EX_STYLE(0),
|
||||
PROGRESS_CLASS,
|
||||
None,
|
||||
WS_CHILD | WS_VISIBLE,
|
||||
20,
|
||||
50,
|
||||
340,
|
||||
35,
|
||||
Some(hwnd),
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
));
|
||||
SendMessageW(
|
||||
progress_bar,
|
||||
PBM_SETRANGE,
|
||||
None,
|
||||
Some(make_lparam!(0, JOBS.len() * 10)),
|
||||
);
|
||||
SendMessageW(progress_bar, PBM_SETSTEP, Some(WPARAM(10)), None);
|
||||
with_dialog_data(hwnd, |data| {
|
||||
data.borrow_mut().progress_bar = progress_bar.0 as isize
|
||||
});
|
||||
LRESULT(0)
|
||||
},
|
||||
WM_PAINT => unsafe {
|
||||
let mut ps = PAINTSTRUCT::default();
|
||||
let hdc = BeginPaint(hwnd, &mut ps);
|
||||
|
||||
let font_name = get_system_ui_font_name();
|
||||
let font = CreateFontW(
|
||||
24,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
FW_NORMAL.0 as _,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
DEFAULT_CHARSET,
|
||||
OUT_TT_ONLY_PRECIS,
|
||||
CLIP_DEFAULT_PRECIS,
|
||||
CLEARTYPE_QUALITY,
|
||||
0,
|
||||
&HSTRING::from(font_name),
|
||||
);
|
||||
let temp = SelectObject(hdc, font.into());
|
||||
let string = HSTRING::from("Zed Editor is updating...");
|
||||
return_if_failed!(TextOutW(hdc, 20, 15, &string).ok());
|
||||
return_if_failed!(DeleteObject(temp).ok());
|
||||
|
||||
return_if_failed!(EndPaint(hwnd, &ps).ok());
|
||||
ReleaseDC(Some(hwnd), hdc);
|
||||
|
||||
LRESULT(0)
|
||||
},
|
||||
WM_JOB_UPDATED => with_dialog_data(hwnd, |data| {
|
||||
let progress_bar = data.borrow().progress_bar;
|
||||
unsafe { SendMessageW(HWND(progress_bar as _), PBM_STEPIT, None, None) }
|
||||
}),
|
||||
WM_TERMINATE => {
|
||||
with_dialog_data(hwnd, |data| {
|
||||
if let Ok(result) = data.borrow_mut().rx.recv() {
|
||||
if let Err(e) = result {
|
||||
log::error!("Failed to update Zed: {:?}", e);
|
||||
show_error(format!("Error: {:?}", e));
|
||||
}
|
||||
}
|
||||
});
|
||||
unsafe { PostQuitMessage(0) };
|
||||
LRESULT(0)
|
||||
}
|
||||
WM_CLOSE => LRESULT(0), // Prevent user occasionally closing the window
|
||||
WM_DESTROY => {
|
||||
unsafe { PostQuitMessage(0) };
|
||||
LRESULT(0)
|
||||
}
|
||||
_ => unsafe { DefWindowProcW(hwnd, msg, wparam, lparam) },
|
||||
}
|
||||
}
|
||||
|
||||
fn with_dialog_data<F, T>(hwnd: HWND, f: F) -> T
|
||||
where
|
||||
F: FnOnce(&RefCell<DialogInfo>) -> T,
|
||||
{
|
||||
let raw = unsafe { GetWindowLongPtrW(hwnd, GWLP_USERDATA) as *mut RefCell<DialogInfo> };
|
||||
let data = unsafe { Box::from_raw(raw) };
|
||||
let result = f(data.as_ref());
|
||||
unsafe { SetWindowLongPtrW(hwnd, GWLP_USERDATA, Box::into_raw(data) as _) };
|
||||
result
|
||||
}
|
||||
|
||||
fn get_system_ui_font_name() -> String {
|
||||
unsafe {
|
||||
let mut info: LOGFONTW = std::mem::zeroed();
|
||||
if SystemParametersInfoW(
|
||||
SPI_GETICONTITLELOGFONT,
|
||||
std::mem::size_of::<LOGFONTW>() as u32,
|
||||
Some(&mut info as *mut _ as _),
|
||||
SYSTEM_PARAMETERS_INFO_UPDATE_FLAGS(0),
|
||||
)
|
||||
.is_ok()
|
||||
{
|
||||
let font_name = String::from_utf16_lossy(&info.lfFaceName);
|
||||
font_name.trim_matches(char::from(0)).to_owned()
|
||||
} else {
|
||||
"MS Shell Dlg".to_owned()
|
||||
}
|
||||
}
|
||||
}
|
||||
171
crates/auto_update_helper/src/updater.rs
Normal file
171
crates/auto_update_helper/src/updater.rs
Normal file
@@ -0,0 +1,171 @@
|
||||
use std::{
|
||||
os::windows::process::CommandExt,
|
||||
path::Path,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use windows::Win32::{
|
||||
Foundation::{HWND, LPARAM, WPARAM},
|
||||
System::Threading::CREATE_NEW_PROCESS_GROUP,
|
||||
UI::WindowsAndMessaging::PostMessageW,
|
||||
};
|
||||
|
||||
use crate::windows_impl::WM_JOB_UPDATED;
|
||||
|
||||
type Job = fn(&Path) -> Result<()>;
|
||||
|
||||
#[cfg(not(test))]
|
||||
pub(crate) const JOBS: [Job; 6] = [
|
||||
// Delete old files
|
||||
|app_dir| {
|
||||
let zed_executable = app_dir.join("Zed.exe");
|
||||
log::info!("Removing old file: {}", zed_executable.display());
|
||||
std::fs::remove_file(&zed_executable).context(format!(
|
||||
"Failed to remove old file {}",
|
||||
zed_executable.display()
|
||||
))
|
||||
},
|
||||
|app_dir| {
|
||||
let zed_cli = app_dir.join("bin\\zed.exe");
|
||||
log::info!("Removing old file: {}", zed_cli.display());
|
||||
std::fs::remove_file(&zed_cli)
|
||||
.context(format!("Failed to remove old file {}", zed_cli.display()))
|
||||
},
|
||||
// Copy new files
|
||||
|app_dir| {
|
||||
let zed_executable_source = app_dir.join("install\\Zed.exe");
|
||||
let zed_executable_dest = app_dir.join("Zed.exe");
|
||||
log::info!(
|
||||
"Copying new file {} to {}",
|
||||
zed_executable_source.display(),
|
||||
zed_executable_dest.display()
|
||||
);
|
||||
std::fs::copy(&zed_executable_source, &zed_executable_dest)
|
||||
.map(|_| ())
|
||||
.context(format!(
|
||||
"Failed to copy new file {} to {}",
|
||||
zed_executable_source.display(),
|
||||
zed_executable_dest.display()
|
||||
))
|
||||
},
|
||||
|app_dir| {
|
||||
let zed_cli_source = app_dir.join("install\\bin\\zed.exe");
|
||||
let zed_cli_dest = app_dir.join("bin\\zed.exe");
|
||||
log::info!(
|
||||
"Copying new file {} to {}",
|
||||
zed_cli_source.display(),
|
||||
zed_cli_dest.display()
|
||||
);
|
||||
std::fs::copy(&zed_cli_source, &zed_cli_dest)
|
||||
.map(|_| ())
|
||||
.context(format!(
|
||||
"Failed to copy new file {} to {}",
|
||||
zed_cli_source.display(),
|
||||
zed_cli_dest.display()
|
||||
))
|
||||
},
|
||||
// Clean up installer folder and updates folder
|
||||
|app_dir| {
|
||||
let updates_folder = app_dir.join("updates");
|
||||
log::info!("Cleaning up: {}", updates_folder.display());
|
||||
std::fs::remove_dir_all(&updates_folder).context(format!(
|
||||
"Failed to remove updates folder {}",
|
||||
updates_folder.display()
|
||||
))
|
||||
},
|
||||
|app_dir| {
|
||||
let installer_folder = app_dir.join("install");
|
||||
log::info!("Cleaning up: {}", installer_folder.display());
|
||||
std::fs::remove_dir_all(&installer_folder).context(format!(
|
||||
"Failed to remove installer folder {}",
|
||||
installer_folder.display()
|
||||
))
|
||||
},
|
||||
];
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) const JOBS: [Job; 2] = [
|
||||
|_| {
|
||||
std::thread::sleep(Duration::from_millis(1000));
|
||||
if let Ok(config) = std::env::var("ZED_AUTO_UPDATE") {
|
||||
match config.as_str() {
|
||||
"err" => Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Simulated error",
|
||||
))
|
||||
.context("Anyhow!"),
|
||||
_ => panic!("Unknown ZED_AUTO_UPDATE value: {}", config),
|
||||
}
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
},
|
||||
|_| {
|
||||
std::thread::sleep(Duration::from_millis(1000));
|
||||
if let Ok(config) = std::env::var("ZED_AUTO_UPDATE") {
|
||||
match config.as_str() {
|
||||
"err" => Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Simulated error",
|
||||
))
|
||||
.context("Anyhow!"),
|
||||
_ => panic!("Unknown ZED_AUTO_UPDATE value: {}", config),
|
||||
}
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
},
|
||||
];
|
||||
|
||||
pub(crate) fn perform_update(app_dir: &Path, hwnd: Option<isize>) -> Result<()> {
|
||||
let hwnd = hwnd.map(|ptr| HWND(ptr as _));
|
||||
|
||||
for job in JOBS.iter() {
|
||||
let start = Instant::now();
|
||||
loop {
|
||||
if start.elapsed().as_secs() > 2 {
|
||||
return Err(anyhow::anyhow!("Timed out"));
|
||||
}
|
||||
match (*job)(app_dir) {
|
||||
Ok(_) => {
|
||||
unsafe { PostMessageW(hwnd, WM_JOB_UPDATED, WPARAM(0), LPARAM(0))? };
|
||||
break;
|
||||
}
|
||||
Err(err) => {
|
||||
// Check if it's a "not found" error
|
||||
let io_err = err.downcast_ref::<std::io::Error>().unwrap();
|
||||
if io_err.kind() == std::io::ErrorKind::NotFound {
|
||||
log::warn!("File or folder not found.");
|
||||
unsafe { PostMessageW(hwnd, WM_JOB_UPDATED, WPARAM(0), LPARAM(0))? };
|
||||
break;
|
||||
}
|
||||
|
||||
log::error!("Operation failed: {}", err);
|
||||
std::thread::sleep(Duration::from_millis(50));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let _ = std::process::Command::new(app_dir.join("Zed.exe"))
|
||||
.creation_flags(CREATE_NEW_PROCESS_GROUP.0)
|
||||
.spawn();
|
||||
log::info!("Update completed successfully");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::perform_update;
|
||||
|
||||
#[test]
|
||||
fn test_perform_update() {
|
||||
let app_dir = std::path::Path::new("C:/");
|
||||
assert!(perform_update(app_dir, None).is_ok());
|
||||
|
||||
// Simulate a timeout
|
||||
unsafe { std::env::set_var("ZED_AUTO_UPDATE", "err") };
|
||||
let ret = perform_update(app_dir, None);
|
||||
assert!(ret.is_err_and(|e| e.to_string().as_str() == "Timed out"));
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,21 @@
|
||||
use futures::channel::oneshot;
|
||||
use git2::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch};
|
||||
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task};
|
||||
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, TaskLabel};
|
||||
use language::{Language, LanguageRegistry};
|
||||
use rope::Rope;
|
||||
use std::{cmp::Ordering, future::Future, iter, mem, ops::Range, sync::Arc};
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
future::Future,
|
||||
iter,
|
||||
ops::Range,
|
||||
sync::{Arc, LazyLock},
|
||||
};
|
||||
use sum_tree::SumTree;
|
||||
use text::{Anchor, Bias, BufferId, OffsetRangeExt, Point, ToOffset as _};
|
||||
use util::ResultExt;
|
||||
|
||||
pub static CALCULATE_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
|
||||
|
||||
pub struct BufferDiff {
|
||||
pub buffer_id: BufferId,
|
||||
inner: BufferDiffInner,
|
||||
@@ -181,10 +189,12 @@ impl BufferDiffSnapshot {
|
||||
base_text_exists = false;
|
||||
};
|
||||
|
||||
let hunks = cx.background_spawn({
|
||||
let buffer = buffer.clone();
|
||||
async move { compute_hunks(base_text_pair, buffer) }
|
||||
});
|
||||
let hunks = cx
|
||||
.background_executor()
|
||||
.spawn_labeled(*CALCULATE_DIFF_TASK, {
|
||||
let buffer = buffer.clone();
|
||||
async move { compute_hunks(base_text_pair, buffer) }
|
||||
});
|
||||
|
||||
async move {
|
||||
let (base_text, hunks) = futures::join!(base_text_snapshot, hunks);
|
||||
@@ -208,17 +218,18 @@ impl BufferDiffSnapshot {
|
||||
) -> impl Future<Output = Self> + use<> {
|
||||
let base_text_exists = base_text.is_some();
|
||||
let base_text_pair = base_text.map(|text| (text, base_text_snapshot.as_rope().clone()));
|
||||
cx.background_spawn(async move {
|
||||
Self {
|
||||
inner: BufferDiffInner {
|
||||
base_text: base_text_snapshot,
|
||||
pending_hunks: SumTree::new(&buffer),
|
||||
hunks: compute_hunks(base_text_pair, buffer),
|
||||
base_text_exists,
|
||||
},
|
||||
secondary_diff: None,
|
||||
}
|
||||
})
|
||||
cx.background_executor()
|
||||
.spawn_labeled(*CALCULATE_DIFF_TASK, async move {
|
||||
Self {
|
||||
inner: BufferDiffInner {
|
||||
base_text: base_text_snapshot,
|
||||
pending_hunks: SumTree::new(&buffer),
|
||||
hunks: compute_hunks(base_text_pair, buffer),
|
||||
base_text_exists,
|
||||
},
|
||||
secondary_diff: None,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -381,6 +392,7 @@ impl BufferDiffInner {
|
||||
while let Some(PendingHunk {
|
||||
buffer_range,
|
||||
diff_base_byte_range,
|
||||
new_status,
|
||||
..
|
||||
}) = pending_hunks_iter.next()
|
||||
{
|
||||
@@ -439,16 +451,23 @@ impl BufferDiffInner {
|
||||
let index_end = prev_unstaged_hunk_base_text_end + end_overshoot;
|
||||
let index_byte_range = index_start..index_end;
|
||||
|
||||
let replacement_text = if stage {
|
||||
log::debug!("stage hunk {:?}", buffer_offset_range);
|
||||
buffer
|
||||
.text_for_range(buffer_offset_range)
|
||||
.collect::<String>()
|
||||
} else {
|
||||
log::debug!("unstage hunk {:?}", buffer_offset_range);
|
||||
head_text
|
||||
.chunks_in_range(diff_base_byte_range.clone())
|
||||
.collect::<String>()
|
||||
let replacement_text = match new_status {
|
||||
DiffHunkSecondaryStatus::SecondaryHunkRemovalPending => {
|
||||
log::debug!("staging hunk {:?}", buffer_offset_range);
|
||||
buffer
|
||||
.text_for_range(buffer_offset_range)
|
||||
.collect::<String>()
|
||||
}
|
||||
DiffHunkSecondaryStatus::SecondaryHunkAdditionPending => {
|
||||
log::debug!("unstaging hunk {:?}", buffer_offset_range);
|
||||
head_text
|
||||
.chunks_in_range(diff_base_byte_range.clone())
|
||||
.collect::<String>()
|
||||
}
|
||||
_ => {
|
||||
debug_assert!(false);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
edits.push((index_byte_range, replacement_text));
|
||||
@@ -631,28 +650,6 @@ impl BufferDiffInner {
|
||||
})
|
||||
}
|
||||
|
||||
fn set_state(
|
||||
&mut self,
|
||||
new_state: Self,
|
||||
buffer: &text::BufferSnapshot,
|
||||
) -> Option<Range<Anchor>> {
|
||||
let (base_text_changed, changed_range) =
|
||||
match (self.base_text_exists, new_state.base_text_exists) {
|
||||
(false, false) => (true, None),
|
||||
(true, true) if self.base_text.remote_id() == new_state.base_text.remote_id() => {
|
||||
(false, new_state.compare(&self, buffer))
|
||||
}
|
||||
_ => (true, Some(text::Anchor::MIN..text::Anchor::MAX)),
|
||||
};
|
||||
|
||||
let pending_hunks = mem::replace(&mut self.pending_hunks, SumTree::new(buffer));
|
||||
*self = new_state;
|
||||
if !base_text_changed {
|
||||
self.pending_hunks = pending_hunks;
|
||||
}
|
||||
changed_range
|
||||
}
|
||||
|
||||
fn compare(&self, old: &Self, new_snapshot: &text::BufferSnapshot) -> Option<Range<Anchor>> {
|
||||
let mut new_cursor = self.hunks.cursor::<()>(new_snapshot);
|
||||
let mut old_cursor = old.hunks.cursor::<()>(new_snapshot);
|
||||
@@ -1011,26 +1008,61 @@ impl BufferDiff {
|
||||
&mut self,
|
||||
new_snapshot: BufferDiffSnapshot,
|
||||
buffer: &text::BufferSnapshot,
|
||||
secondary_changed_range: Option<Range<Anchor>>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Range<Anchor>> {
|
||||
let changed_range = self.inner.set_state(new_snapshot.inner, buffer);
|
||||
self.set_snapshot_with_secondary(new_snapshot, buffer, None, false, cx)
|
||||
}
|
||||
|
||||
let changed_range = match (secondary_changed_range, changed_range) {
|
||||
(None, None) => None,
|
||||
(Some(unstaged_range), None) => self.range_to_hunk_range(unstaged_range, &buffer, cx),
|
||||
(None, Some(uncommitted_range)) => Some(uncommitted_range),
|
||||
(Some(unstaged_range), Some(uncommitted_range)) => {
|
||||
let mut start = uncommitted_range.start;
|
||||
let mut end = uncommitted_range.end;
|
||||
if let Some(unstaged_range) = self.range_to_hunk_range(unstaged_range, &buffer, cx)
|
||||
{
|
||||
start = unstaged_range.start.min(&uncommitted_range.start, &buffer);
|
||||
end = unstaged_range.end.max(&uncommitted_range.end, &buffer);
|
||||
pub fn set_snapshot_with_secondary(
|
||||
&mut self,
|
||||
new_snapshot: BufferDiffSnapshot,
|
||||
buffer: &text::BufferSnapshot,
|
||||
secondary_diff_change: Option<Range<Anchor>>,
|
||||
clear_pending_hunks: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Range<Anchor>> {
|
||||
log::debug!("set snapshot with secondary {secondary_diff_change:?}");
|
||||
|
||||
let state = &mut self.inner;
|
||||
let new_state = new_snapshot.inner;
|
||||
let (base_text_changed, mut changed_range) =
|
||||
match (state.base_text_exists, new_state.base_text_exists) {
|
||||
(false, false) => (true, None),
|
||||
(true, true) if state.base_text.remote_id() == new_state.base_text.remote_id() => {
|
||||
(false, new_state.compare(&state, buffer))
|
||||
}
|
||||
_ => (true, Some(text::Anchor::MIN..text::Anchor::MAX)),
|
||||
};
|
||||
|
||||
if let Some(secondary_changed_range) = secondary_diff_change {
|
||||
if let Some(secondary_hunk_range) =
|
||||
self.range_to_hunk_range(secondary_changed_range, &buffer, cx)
|
||||
{
|
||||
if let Some(range) = &mut changed_range {
|
||||
range.start = secondary_hunk_range.start.min(&range.start, &buffer);
|
||||
range.end = secondary_hunk_range.end.max(&range.end, &buffer);
|
||||
} else {
|
||||
changed_range = Some(secondary_hunk_range);
|
||||
}
|
||||
Some(start..end)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
let state = &mut self.inner;
|
||||
state.base_text_exists = new_state.base_text_exists;
|
||||
state.base_text = new_state.base_text;
|
||||
state.hunks = new_state.hunks;
|
||||
if base_text_changed || clear_pending_hunks {
|
||||
if let Some((first, last)) = state.pending_hunks.first().zip(state.pending_hunks.last())
|
||||
{
|
||||
if let Some(range) = &mut changed_range {
|
||||
range.start = range.start.min(&first.buffer_range.start, &buffer);
|
||||
range.end = range.end.max(&last.buffer_range.end, &buffer);
|
||||
} else {
|
||||
changed_range = Some(first.buffer_range.start..last.buffer_range.end);
|
||||
}
|
||||
}
|
||||
state.pending_hunks = SumTree::new(buffer);
|
||||
}
|
||||
|
||||
cx.emit(BufferDiffEvent::DiffChanged {
|
||||
changed_range: changed_range.clone(),
|
||||
@@ -1138,7 +1170,7 @@ impl BufferDiff {
|
||||
return;
|
||||
};
|
||||
this.update(cx, |this, cx| {
|
||||
this.set_snapshot(snapshot, &buffer, None, cx);
|
||||
this.set_snapshot(snapshot, &buffer, cx);
|
||||
})
|
||||
.log_err();
|
||||
drop(complete_on_drop)
|
||||
@@ -1163,7 +1195,7 @@ impl BufferDiff {
|
||||
cx,
|
||||
);
|
||||
let snapshot = cx.background_executor().block(snapshot);
|
||||
self.set_snapshot(snapshot, &buffer, None, cx);
|
||||
self.set_snapshot(snapshot, &buffer, cx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1752,13 +1784,13 @@ mod tests {
|
||||
|
||||
let unstaged_diff = cx.new(|cx| {
|
||||
let mut diff = BufferDiff::new(&buffer, cx);
|
||||
diff.set_snapshot(unstaged, &buffer, None, cx);
|
||||
diff.set_snapshot(unstaged, &buffer, cx);
|
||||
diff
|
||||
});
|
||||
|
||||
let uncommitted_diff = cx.new(|cx| {
|
||||
let mut diff = BufferDiff::new(&buffer, cx);
|
||||
diff.set_snapshot(uncommitted, &buffer, None, cx);
|
||||
diff.set_snapshot(uncommitted, &buffer, cx);
|
||||
diff.set_secondary_diff(unstaged_diff);
|
||||
diff
|
||||
});
|
||||
@@ -1819,12 +1851,12 @@ mod tests {
|
||||
let uncommitted = BufferDiffSnapshot::new_sync(buffer.clone(), head_text.clone(), cx);
|
||||
let unstaged_diff = cx.new(|cx| {
|
||||
let mut diff = BufferDiff::new(&buffer, cx);
|
||||
diff.set_snapshot(unstaged, &buffer, None, cx);
|
||||
diff.set_snapshot(unstaged, &buffer, cx);
|
||||
diff
|
||||
});
|
||||
let uncommitted_diff = cx.new(|cx| {
|
||||
let mut diff = BufferDiff::new(&buffer, cx);
|
||||
diff.set_snapshot(uncommitted, &buffer, None, cx);
|
||||
diff.set_snapshot(uncommitted, &buffer, cx);
|
||||
diff.set_secondary_diff(unstaged_diff.clone());
|
||||
diff
|
||||
});
|
||||
|
||||
@@ -18,7 +18,6 @@ sqlite = ["sea-orm/sqlx-sqlite", "sqlx/sqlite"]
|
||||
test-support = ["sqlite"]
|
||||
|
||||
[dependencies]
|
||||
anthropic.workspace = true
|
||||
anyhow.workspace = true
|
||||
async-stripe.workspace = true
|
||||
async-tungstenite.workspace = true
|
||||
@@ -44,7 +43,7 @@ log.workspace = true
|
||||
nanoid.workspace = true
|
||||
open_ai.workspace = true
|
||||
parking_lot.workspace = true
|
||||
prometheus = "0.13"
|
||||
prometheus = "0.14"
|
||||
prost.workspace = true
|
||||
rand.workspace = true
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
|
||||
@@ -505,7 +505,10 @@ CREATE TABLE IF NOT EXISTS billing_subscriptions (
|
||||
stripe_subscription_id TEXT NOT NULL,
|
||||
stripe_subscription_status TEXT NOT NULL,
|
||||
stripe_cancel_at TIMESTAMP,
|
||||
stripe_cancellation_reason TEXT
|
||||
stripe_cancellation_reason TEXT,
|
||||
kind TEXT,
|
||||
stripe_current_period_start BIGINT,
|
||||
stripe_current_period_end BIGINT
|
||||
);
|
||||
|
||||
CREATE INDEX "ix_billing_subscriptions_on_billing_customer_id" ON billing_subscriptions (billing_customer_id);
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
alter table billing_subscriptions
|
||||
add column kind text,
|
||||
add column stripe_current_period_start bigint,
|
||||
add column stripe_current_period_end bigint;
|
||||
@@ -21,7 +21,9 @@ use stripe::{
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::api::events::SnowflakeRow;
|
||||
use crate::db::billing_subscription::{StripeCancellationReason, StripeSubscriptionStatus};
|
||||
use crate::db::billing_subscription::{
|
||||
StripeCancellationReason, StripeSubscriptionStatus, SubscriptionKind,
|
||||
};
|
||||
use crate::llm::{DEFAULT_MAX_MONTHLY_SPEND, FREE_TIER_MONTHLY_SPENDING_LIMIT};
|
||||
use crate::rpc::{ResultExt as _, Server};
|
||||
use crate::{AppState, Cents, Error, Result};
|
||||
@@ -184,7 +186,10 @@ async fn list_billing_subscriptions(
|
||||
.into_iter()
|
||||
.map(|subscription| BillingSubscriptionJson {
|
||||
id: subscription.id,
|
||||
name: "Zed LLM Usage".to_string(),
|
||||
name: match subscription.kind {
|
||||
Some(SubscriptionKind::ZedPro) => "Zed Pro".to_string(),
|
||||
None => "Zed LLM Usage".to_string(),
|
||||
},
|
||||
status: subscription.stripe_subscription_status,
|
||||
cancel_at: subscription.stripe_cancel_at.map(|cancel_at| {
|
||||
cancel_at
|
||||
@@ -198,9 +203,16 @@ async fn list_billing_subscriptions(
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
enum ProductCode {
|
||||
ZedPro,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct CreateBillingSubscriptionBody {
|
||||
github_user_id: i32,
|
||||
product: Option<ProductCode>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@@ -274,15 +286,30 @@ async fn create_billing_subscription(
|
||||
customer.id
|
||||
};
|
||||
|
||||
let default_model = llm_db.model(rpc::LanguageModelProvider::Anthropic, "claude-3-7-sonnet")?;
|
||||
let stripe_model = stripe_billing.register_model(default_model).await?;
|
||||
let success_url = format!(
|
||||
"{}/account?checkout_complete=1",
|
||||
app.config.zed_dot_dev_url()
|
||||
);
|
||||
let checkout_session_url = stripe_billing
|
||||
.checkout(customer_id, &user.github_login, &stripe_model, &success_url)
|
||||
.await?;
|
||||
let checkout_session_url = match body.product {
|
||||
Some(ProductCode::ZedPro) => {
|
||||
let success_url = format!(
|
||||
"{}/account?checkout_complete=1",
|
||||
app.config.zed_dot_dev_url()
|
||||
);
|
||||
stripe_billing
|
||||
.checkout_with_zed_pro(customer_id, &user.github_login, &success_url)
|
||||
.await?
|
||||
}
|
||||
None => {
|
||||
let default_model =
|
||||
llm_db.model(rpc::LanguageModelProvider::Anthropic, "claude-3-7-sonnet")?;
|
||||
let stripe_model = stripe_billing.register_model(default_model).await?;
|
||||
let success_url = format!(
|
||||
"{}/account?checkout_complete=1",
|
||||
app.config.zed_dot_dev_url()
|
||||
);
|
||||
stripe_billing
|
||||
.checkout(customer_id, &user.github_login, &stripe_model, &success_url)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Json(CreateBillingSubscriptionResponse {
|
||||
checkout_session_url,
|
||||
}))
|
||||
@@ -291,6 +318,10 @@ async fn create_billing_subscription(
|
||||
#[derive(Debug, PartialEq, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
enum ManageSubscriptionIntent {
|
||||
/// The user intends to manage their subscription.
|
||||
///
|
||||
/// This will open the Stripe billing portal without putting the user in a specific flow.
|
||||
ManageSubscription,
|
||||
/// The user intends to cancel their subscription.
|
||||
Cancel,
|
||||
/// The user intends to stop the cancellation of their subscription.
|
||||
@@ -378,7 +409,8 @@ async fn manage_billing_subscription(
|
||||
}
|
||||
|
||||
let flow = match body.intent {
|
||||
ManageSubscriptionIntent::Cancel => CreateBillingPortalSessionFlowData {
|
||||
ManageSubscriptionIntent::ManageSubscription => None,
|
||||
ManageSubscriptionIntent::Cancel => Some(CreateBillingPortalSessionFlowData {
|
||||
type_: CreateBillingPortalSessionFlowDataType::SubscriptionCancel,
|
||||
after_completion: Some(CreateBillingPortalSessionFlowDataAfterCompletion {
|
||||
type_: stripe::CreateBillingPortalSessionFlowDataAfterCompletionType::Redirect,
|
||||
@@ -394,12 +426,12 @@ async fn manage_billing_subscription(
|
||||
},
|
||||
),
|
||||
..Default::default()
|
||||
},
|
||||
}),
|
||||
ManageSubscriptionIntent::StopCancellation => unreachable!(),
|
||||
};
|
||||
|
||||
let mut params = CreateBillingPortalSession::new(customer_id);
|
||||
params.flow_data = Some(flow);
|
||||
params.flow_data = flow;
|
||||
let return_url = format!("{}/account", app.config.zed_dot_dev_url());
|
||||
params.return_url = Some(&return_url);
|
||||
|
||||
@@ -664,6 +696,23 @@ async fn handle_customer_subscription_event(
|
||||
|
||||
log::info!("handling Stripe {} event: {}", event.type_, event.id);
|
||||
|
||||
let subscription_kind =
|
||||
if let Some(zed_pro_price_id) = app.config.stripe_zed_pro_price_id.as_deref() {
|
||||
let has_zed_pro_price = subscription.items.data.iter().any(|item| {
|
||||
item.price
|
||||
.as_ref()
|
||||
.map_or(false, |price| price.id.as_str() == zed_pro_price_id)
|
||||
});
|
||||
|
||||
if has_zed_pro_price {
|
||||
Some(SubscriptionKind::ZedPro)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let billing_customer =
|
||||
find_or_create_billing_customer(app, stripe_client, subscription.customer)
|
||||
.await?
|
||||
@@ -700,6 +749,7 @@ async fn handle_customer_subscription_event(
|
||||
existing_subscription.id,
|
||||
&UpdateBillingSubscriptionParams {
|
||||
billing_customer_id: ActiveValue::set(billing_customer.id),
|
||||
kind: ActiveValue::set(subscription_kind),
|
||||
stripe_subscription_id: ActiveValue::set(subscription.id.to_string()),
|
||||
stripe_subscription_status: ActiveValue::set(subscription.status.into()),
|
||||
stripe_cancel_at: ActiveValue::set(
|
||||
@@ -714,6 +764,12 @@ async fn handle_customer_subscription_event(
|
||||
.and_then(|details| details.reason)
|
||||
.map(|reason| reason.into()),
|
||||
),
|
||||
stripe_current_period_start: ActiveValue::set(Some(
|
||||
subscription.current_period_start,
|
||||
)),
|
||||
stripe_current_period_end: ActiveValue::set(Some(
|
||||
subscription.current_period_end,
|
||||
)),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
@@ -748,12 +804,15 @@ async fn handle_customer_subscription_event(
|
||||
app.db
|
||||
.create_billing_subscription(&CreateBillingSubscriptionParams {
|
||||
billing_customer_id: billing_customer.id,
|
||||
kind: subscription_kind,
|
||||
stripe_subscription_id: subscription.id.to_string(),
|
||||
stripe_subscription_status: subscription.status.into(),
|
||||
stripe_cancellation_reason: subscription
|
||||
.cancellation_details
|
||||
.and_then(|details| details.reason)
|
||||
.map(|reason| reason.into()),
|
||||
stripe_current_period_start: Some(subscription.current_period_start),
|
||||
stripe_current_period_end: Some(subscription.current_period_end),
|
||||
})
|
||||
.await?;
|
||||
}
|
||||
|
||||
@@ -1,22 +1,30 @@
|
||||
use crate::db::billing_subscription::{StripeCancellationReason, StripeSubscriptionStatus};
|
||||
use crate::db::billing_subscription::{
|
||||
StripeCancellationReason, StripeSubscriptionStatus, SubscriptionKind,
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CreateBillingSubscriptionParams {
|
||||
pub billing_customer_id: BillingCustomerId,
|
||||
pub kind: Option<SubscriptionKind>,
|
||||
pub stripe_subscription_id: String,
|
||||
pub stripe_subscription_status: StripeSubscriptionStatus,
|
||||
pub stripe_cancellation_reason: Option<StripeCancellationReason>,
|
||||
pub stripe_current_period_start: Option<i64>,
|
||||
pub stripe_current_period_end: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct UpdateBillingSubscriptionParams {
|
||||
pub billing_customer_id: ActiveValue<BillingCustomerId>,
|
||||
pub kind: ActiveValue<Option<SubscriptionKind>>,
|
||||
pub stripe_subscription_id: ActiveValue<String>,
|
||||
pub stripe_subscription_status: ActiveValue<StripeSubscriptionStatus>,
|
||||
pub stripe_cancel_at: ActiveValue<Option<DateTime>>,
|
||||
pub stripe_cancellation_reason: ActiveValue<Option<StripeCancellationReason>>,
|
||||
pub stripe_current_period_start: ActiveValue<Option<i64>>,
|
||||
pub stripe_current_period_end: ActiveValue<Option<i64>>,
|
||||
}
|
||||
|
||||
impl Database {
|
||||
@@ -28,9 +36,12 @@ impl Database {
|
||||
self.transaction(|tx| async move {
|
||||
billing_subscription::Entity::insert(billing_subscription::ActiveModel {
|
||||
billing_customer_id: ActiveValue::set(params.billing_customer_id),
|
||||
kind: ActiveValue::set(params.kind),
|
||||
stripe_subscription_id: ActiveValue::set(params.stripe_subscription_id.clone()),
|
||||
stripe_subscription_status: ActiveValue::set(params.stripe_subscription_status),
|
||||
stripe_cancellation_reason: ActiveValue::set(params.stripe_cancellation_reason),
|
||||
stripe_current_period_start: ActiveValue::set(params.stripe_current_period_start),
|
||||
stripe_current_period_end: ActiveValue::set(params.stripe_current_period_end),
|
||||
..Default::default()
|
||||
})
|
||||
.exec_without_returning(&*tx)
|
||||
|
||||
@@ -9,10 +9,13 @@ pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: BillingSubscriptionId,
|
||||
pub billing_customer_id: BillingCustomerId,
|
||||
pub kind: Option<SubscriptionKind>,
|
||||
pub stripe_subscription_id: String,
|
||||
pub stripe_subscription_status: StripeSubscriptionStatus,
|
||||
pub stripe_cancel_at: Option<DateTime>,
|
||||
pub stripe_cancellation_reason: Option<StripeCancellationReason>,
|
||||
pub stripe_current_period_start: Option<i64>,
|
||||
pub stripe_current_period_end: Option<i64>,
|
||||
pub created_at: DateTime,
|
||||
}
|
||||
|
||||
@@ -34,6 +37,14 @@ impl Related<super::billing_customer::Entity> for Entity {
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Eq, PartialEq, Copy, Clone, Debug, EnumIter, DeriveActiveEnum, Hash, Serialize)]
|
||||
#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SubscriptionKind {
|
||||
#[sea_orm(string_value = "zed_pro")]
|
||||
ZedPro,
|
||||
}
|
||||
|
||||
/// The status of a Stripe subscription.
|
||||
///
|
||||
/// [Stripe docs](https://docs.stripe.com/api/subscriptions/object#subscription_object-status)
|
||||
|
||||
@@ -39,9 +39,12 @@ async fn test_get_active_billing_subscriptions(db: &Arc<Database>) {
|
||||
|
||||
db.create_billing_subscription(&CreateBillingSubscriptionParams {
|
||||
billing_customer_id: customer.id,
|
||||
kind: None,
|
||||
stripe_subscription_id: "sub_active_user".into(),
|
||||
stripe_subscription_status: StripeSubscriptionStatus::Active,
|
||||
stripe_cancellation_reason: None,
|
||||
stripe_current_period_start: None,
|
||||
stripe_current_period_end: None,
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -74,9 +77,12 @@ async fn test_get_active_billing_subscriptions(db: &Arc<Database>) {
|
||||
|
||||
db.create_billing_subscription(&CreateBillingSubscriptionParams {
|
||||
billing_customer_id: customer.id,
|
||||
kind: None,
|
||||
stripe_subscription_id: "sub_past_due_user".into(),
|
||||
stripe_subscription_status: StripeSubscriptionStatus::PastDue,
|
||||
stripe_cancellation_reason: None,
|
||||
stripe_current_period_start: None,
|
||||
stripe_current_period_end: None,
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -182,6 +182,7 @@ pub struct Config {
|
||||
pub slack_panics_webhook: Option<String>,
|
||||
pub auto_join_channel_id: Option<ChannelId>,
|
||||
pub stripe_api_key: Option<String>,
|
||||
pub stripe_zed_pro_price_id: Option<String>,
|
||||
pub supermaven_admin_api_key: Option<Arc<str>>,
|
||||
pub user_backfiller_github_access_token: Option<Arc<str>>,
|
||||
}
|
||||
@@ -237,6 +238,7 @@ impl Config {
|
||||
migrations_path: None,
|
||||
seed_path: None,
|
||||
stripe_api_key: None,
|
||||
stripe_zed_pro_price_id: None,
|
||||
supermaven_admin_api_key: None,
|
||||
user_backfiller_github_access_token: None,
|
||||
kinesis_region: None,
|
||||
@@ -253,7 +255,6 @@ impl Config {
|
||||
pub enum ServiceMode {
|
||||
Api,
|
||||
Collab,
|
||||
Llm,
|
||||
All,
|
||||
}
|
||||
|
||||
@@ -265,10 +266,6 @@ impl ServiceMode {
|
||||
pub fn is_api(&self) -> bool {
|
||||
matches!(self, Self::Api | Self::All)
|
||||
}
|
||||
|
||||
pub fn is_llm(&self) -> bool {
|
||||
matches!(self, Self::Llm | Self::All)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AppState {
|
||||
@@ -327,9 +324,12 @@ impl AppState {
|
||||
llm_db,
|
||||
livekit_client,
|
||||
blob_store_client: build_blob_store_client(&config).await.log_err(),
|
||||
stripe_billing: stripe_client
|
||||
.clone()
|
||||
.map(|stripe_client| Arc::new(StripeBilling::new(stripe_client))),
|
||||
stripe_billing: stripe_client.clone().map(|stripe_client| {
|
||||
Arc::new(StripeBilling::new(
|
||||
stripe_client,
|
||||
config.stripe_zed_pro_price_id.clone(),
|
||||
))
|
||||
}),
|
||||
stripe_client,
|
||||
rate_limiter: Arc::new(RateLimiter::new(db)),
|
||||
executor,
|
||||
|
||||
@@ -1,448 +1,10 @@
|
||||
mod authorization;
|
||||
pub mod db;
|
||||
mod token;
|
||||
|
||||
use crate::api::CloudflareIpCountryHeader;
|
||||
use crate::api::events::SnowflakeRow;
|
||||
use crate::build_kinesis_client;
|
||||
use crate::rpc::MIN_ACCOUNT_AGE_FOR_LLM_USE;
|
||||
use crate::{Cents, Config, Error, Result, db::UserId, executor::Executor};
|
||||
use anyhow::{Context as _, anyhow};
|
||||
use authorization::authorize_access_to_language_model;
|
||||
use axum::routing::get;
|
||||
use axum::{
|
||||
Extension, Json, Router, TypedHeader,
|
||||
body::Body,
|
||||
http::{self, HeaderName, HeaderValue, Request, StatusCode},
|
||||
middleware::{self, Next},
|
||||
response::{IntoResponse, Response},
|
||||
routing::post,
|
||||
};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use collections::HashMap;
|
||||
use db::TokenUsage;
|
||||
use db::{ActiveUserCount, LlmDatabase, usage_measure::UsageMeasure};
|
||||
use futures::{Stream, StreamExt as _};
|
||||
use reqwest_client::ReqwestClient;
|
||||
use rpc::{
|
||||
EXPIRED_LLM_TOKEN_HEADER_NAME, LanguageModelProvider, PerformCompletionParams, proto::Plan,
|
||||
};
|
||||
use rpc::{ListModelsResponse, MAX_LLM_MONTHLY_SPEND_REACHED_HEADER_NAME};
|
||||
use serde_json::json;
|
||||
use std::{
|
||||
pin::Pin,
|
||||
sync::Arc,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
use strum::IntoEnumIterator;
|
||||
use tokio::sync::RwLock;
|
||||
use util::ResultExt;
|
||||
use crate::Cents;
|
||||
|
||||
pub use token::*;
|
||||
|
||||
const ACTIVE_USER_COUNT_CACHE_DURATION: Duration = Duration::seconds(30);
|
||||
|
||||
pub struct LlmState {
|
||||
pub config: Config,
|
||||
pub executor: Executor,
|
||||
pub db: Arc<LlmDatabase>,
|
||||
pub http_client: ReqwestClient,
|
||||
pub kinesis_client: Option<aws_sdk_kinesis::Client>,
|
||||
active_user_count_by_model:
|
||||
RwLock<HashMap<(LanguageModelProvider, String), (DateTime<Utc>, ActiveUserCount)>>,
|
||||
}
|
||||
|
||||
impl LlmState {
|
||||
pub async fn new(config: Config, executor: Executor) -> Result<Arc<Self>> {
|
||||
let database_url = config
|
||||
.llm_database_url
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("missing LLM_DATABASE_URL"))?;
|
||||
let max_connections = config
|
||||
.llm_database_max_connections
|
||||
.ok_or_else(|| anyhow!("missing LLM_DATABASE_MAX_CONNECTIONS"))?;
|
||||
|
||||
let mut db_options = db::ConnectOptions::new(database_url);
|
||||
db_options.max_connections(max_connections);
|
||||
let mut db = LlmDatabase::new(db_options, executor.clone()).await?;
|
||||
db.initialize().await?;
|
||||
|
||||
let db = Arc::new(db);
|
||||
|
||||
let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION"));
|
||||
let http_client =
|
||||
ReqwestClient::user_agent(&user_agent).context("failed to construct http client")?;
|
||||
|
||||
let this = Self {
|
||||
executor,
|
||||
db,
|
||||
http_client,
|
||||
kinesis_client: if config.kinesis_access_key.is_some() {
|
||||
build_kinesis_client(&config).await.log_err()
|
||||
} else {
|
||||
None
|
||||
},
|
||||
active_user_count_by_model: RwLock::new(HashMap::default()),
|
||||
config,
|
||||
};
|
||||
|
||||
Ok(Arc::new(this))
|
||||
}
|
||||
|
||||
pub async fn get_active_user_count(
|
||||
&self,
|
||||
provider: LanguageModelProvider,
|
||||
model: &str,
|
||||
) -> Result<ActiveUserCount> {
|
||||
let now = Utc::now();
|
||||
|
||||
{
|
||||
let active_user_count_by_model = self.active_user_count_by_model.read().await;
|
||||
if let Some((last_updated, count)) =
|
||||
active_user_count_by_model.get(&(provider, model.to_string()))
|
||||
{
|
||||
if now - *last_updated < ACTIVE_USER_COUNT_CACHE_DURATION {
|
||||
return Ok(*count);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut cache = self.active_user_count_by_model.write().await;
|
||||
let new_count = self.db.get_active_user_count(provider, model, now).await?;
|
||||
cache.insert((provider, model.to_string()), (now, new_count));
|
||||
Ok(new_count)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn routes() -> Router<(), Body> {
|
||||
Router::new()
|
||||
.route("/models", get(list_models))
|
||||
.route("/completion", post(perform_completion))
|
||||
.layer(middleware::from_fn(validate_api_token))
|
||||
}
|
||||
|
||||
async fn validate_api_token<B>(mut req: Request<B>, next: Next<B>) -> impl IntoResponse {
|
||||
let token = req
|
||||
.headers()
|
||||
.get(http::header::AUTHORIZATION)
|
||||
.and_then(|header| header.to_str().ok())
|
||||
.ok_or_else(|| {
|
||||
Error::http(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"missing authorization header".to_string(),
|
||||
)
|
||||
})?
|
||||
.strip_prefix("Bearer ")
|
||||
.ok_or_else(|| {
|
||||
Error::http(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"invalid authorization header".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let state = req.extensions().get::<Arc<LlmState>>().unwrap();
|
||||
match LlmTokenClaims::validate(token, &state.config) {
|
||||
Ok(claims) => {
|
||||
if state.db.is_access_token_revoked(&claims.jti).await? {
|
||||
return Err(Error::http(
|
||||
StatusCode::UNAUTHORIZED,
|
||||
"unauthorized".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
tracing::Span::current()
|
||||
.record("user_id", claims.user_id)
|
||||
.record("login", claims.github_user_login.clone())
|
||||
.record("authn.jti", &claims.jti)
|
||||
.record("is_staff", claims.is_staff);
|
||||
|
||||
req.extensions_mut().insert(claims);
|
||||
Ok::<_, Error>(next.run(req).await.into_response())
|
||||
}
|
||||
Err(ValidateLlmTokenError::Expired) => Err(Error::Http(
|
||||
StatusCode::UNAUTHORIZED,
|
||||
"unauthorized".to_string(),
|
||||
[(
|
||||
HeaderName::from_static(EXPIRED_LLM_TOKEN_HEADER_NAME),
|
||||
HeaderValue::from_static("true"),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
)),
|
||||
Err(_err) => Err(Error::http(
|
||||
StatusCode::UNAUTHORIZED,
|
||||
"unauthorized".to_string(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
async fn list_models(
|
||||
Extension(state): Extension<Arc<LlmState>>,
|
||||
Extension(claims): Extension<LlmTokenClaims>,
|
||||
country_code_header: Option<TypedHeader<CloudflareIpCountryHeader>>,
|
||||
) -> Result<Json<ListModelsResponse>> {
|
||||
let country_code = country_code_header.map(|header| header.to_string());
|
||||
|
||||
let mut accessible_models = Vec::new();
|
||||
|
||||
for (provider, model) in state.db.all_models() {
|
||||
let authorize_result = authorize_access_to_language_model(
|
||||
&state.config,
|
||||
&claims,
|
||||
country_code.as_deref(),
|
||||
provider,
|
||||
&model.name,
|
||||
);
|
||||
|
||||
if authorize_result.is_ok() {
|
||||
accessible_models.push(rpc::LanguageModel {
|
||||
provider,
|
||||
name: model.name,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Json(ListModelsResponse {
|
||||
models: accessible_models,
|
||||
}))
|
||||
}
|
||||
|
||||
async fn perform_completion(
|
||||
Extension(state): Extension<Arc<LlmState>>,
|
||||
Extension(claims): Extension<LlmTokenClaims>,
|
||||
country_code_header: Option<TypedHeader<CloudflareIpCountryHeader>>,
|
||||
Json(params): Json<PerformCompletionParams>,
|
||||
) -> Result<impl IntoResponse> {
|
||||
let model = normalize_model_name(
|
||||
state.db.model_names_for_provider(params.provider),
|
||||
params.model,
|
||||
);
|
||||
|
||||
let bypass_account_age_check = claims.has_llm_subscription || claims.bypass_account_age_check;
|
||||
if !bypass_account_age_check {
|
||||
if Utc::now().naive_utc() - claims.account_created_at < MIN_ACCOUNT_AGE_FOR_LLM_USE {
|
||||
Err(anyhow!("account too young"))?
|
||||
}
|
||||
}
|
||||
|
||||
authorize_access_to_language_model(
|
||||
&state.config,
|
||||
&claims,
|
||||
country_code_header
|
||||
.map(|header| header.to_string())
|
||||
.as_deref(),
|
||||
params.provider,
|
||||
&model,
|
||||
)?;
|
||||
|
||||
check_usage_limit(&state, params.provider, &model, &claims).await?;
|
||||
|
||||
let stream = match params.provider {
|
||||
LanguageModelProvider::Anthropic => {
|
||||
let api_key = if claims.is_staff {
|
||||
state
|
||||
.config
|
||||
.anthropic_staff_api_key
|
||||
.as_ref()
|
||||
.context("no Anthropic AI staff API key configured on the server")?
|
||||
} else {
|
||||
state
|
||||
.config
|
||||
.anthropic_api_key
|
||||
.as_ref()
|
||||
.context("no Anthropic AI API key configured on the server")?
|
||||
};
|
||||
|
||||
let mut request: anthropic::Request =
|
||||
serde_json::from_str(params.provider_request.get())?;
|
||||
|
||||
// Override the model on the request with the latest version of the model that is
|
||||
// known to the server.
|
||||
//
|
||||
// Right now, we use the version that's defined in `model.id()`, but we will likely
|
||||
// want to change this code once a new version of an Anthropic model is released,
|
||||
// so that users can use the new version, without having to update Zed.
|
||||
request.model = match model.as_str() {
|
||||
"claude-3-5-sonnet" => anthropic::Model::Claude3_5Sonnet.id().to_string(),
|
||||
"claude-3-7-sonnet" => anthropic::Model::Claude3_7Sonnet.id().to_string(),
|
||||
"claude-3-opus" => anthropic::Model::Claude3Opus.id().to_string(),
|
||||
"claude-3-haiku" => anthropic::Model::Claude3Haiku.id().to_string(),
|
||||
"claude-3-sonnet" => anthropic::Model::Claude3Sonnet.id().to_string(),
|
||||
_ => request.model,
|
||||
};
|
||||
|
||||
let (chunks, rate_limit_info) = anthropic::stream_completion_with_rate_limit_info(
|
||||
&state.http_client,
|
||||
anthropic::ANTHROPIC_API_URL,
|
||||
api_key,
|
||||
request,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
anthropic::AnthropicError::ApiError(ref api_error) => match api_error.code() {
|
||||
Some(anthropic::ApiErrorCode::RateLimitError) => {
|
||||
tracing::info!(
|
||||
target: "upstream rate limit exceeded",
|
||||
user_id = claims.user_id,
|
||||
login = claims.github_user_login,
|
||||
authn.jti = claims.jti,
|
||||
is_staff = claims.is_staff,
|
||||
provider = params.provider.to_string(),
|
||||
model = model
|
||||
);
|
||||
|
||||
Error::http(
|
||||
StatusCode::TOO_MANY_REQUESTS,
|
||||
"Upstream Anthropic rate limit exceeded.".to_string(),
|
||||
)
|
||||
}
|
||||
Some(anthropic::ApiErrorCode::InvalidRequestError) => {
|
||||
Error::http(StatusCode::BAD_REQUEST, api_error.message.clone())
|
||||
}
|
||||
Some(anthropic::ApiErrorCode::OverloadedError) => {
|
||||
Error::http(StatusCode::SERVICE_UNAVAILABLE, api_error.message.clone())
|
||||
}
|
||||
Some(_) => {
|
||||
Error::http(StatusCode::INTERNAL_SERVER_ERROR, api_error.message.clone())
|
||||
}
|
||||
None => Error::Internal(anyhow!(err)),
|
||||
},
|
||||
anthropic::AnthropicError::Other(err) => Error::Internal(err),
|
||||
})?;
|
||||
|
||||
if let Some(rate_limit_info) = rate_limit_info {
|
||||
tracing::info!(
|
||||
target: "upstream rate limit",
|
||||
is_staff = claims.is_staff,
|
||||
provider = params.provider.to_string(),
|
||||
model = model,
|
||||
tokens_remaining = rate_limit_info.tokens.as_ref().map(|limits| limits.remaining),
|
||||
input_tokens_remaining = rate_limit_info.input_tokens.as_ref().map(|limits| limits.remaining),
|
||||
output_tokens_remaining = rate_limit_info.output_tokens.as_ref().map(|limits| limits.remaining),
|
||||
requests_remaining = rate_limit_info.requests.as_ref().map(|limits| limits.remaining),
|
||||
requests_reset = ?rate_limit_info.requests.as_ref().map(|limits| limits.reset),
|
||||
tokens_reset = ?rate_limit_info.tokens.as_ref().map(|limits| limits.reset),
|
||||
input_tokens_reset = ?rate_limit_info.input_tokens.as_ref().map(|limits| limits.reset),
|
||||
output_tokens_reset = ?rate_limit_info.output_tokens.as_ref().map(|limits| limits.reset),
|
||||
);
|
||||
}
|
||||
|
||||
chunks
|
||||
.map(move |event| {
|
||||
let chunk = event?;
|
||||
let (
|
||||
input_tokens,
|
||||
output_tokens,
|
||||
cache_creation_input_tokens,
|
||||
cache_read_input_tokens,
|
||||
) = match &chunk {
|
||||
anthropic::Event::MessageStart {
|
||||
message: anthropic::Response { usage, .. },
|
||||
}
|
||||
| anthropic::Event::MessageDelta { usage, .. } => (
|
||||
usage.input_tokens.unwrap_or(0) as usize,
|
||||
usage.output_tokens.unwrap_or(0) as usize,
|
||||
usage.cache_creation_input_tokens.unwrap_or(0) as usize,
|
||||
usage.cache_read_input_tokens.unwrap_or(0) as usize,
|
||||
),
|
||||
_ => (0, 0, 0, 0),
|
||||
};
|
||||
|
||||
anyhow::Ok(CompletionChunk {
|
||||
bytes: serde_json::to_vec(&chunk).unwrap(),
|
||||
input_tokens,
|
||||
output_tokens,
|
||||
cache_creation_input_tokens,
|
||||
cache_read_input_tokens,
|
||||
})
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
LanguageModelProvider::OpenAi => {
|
||||
let api_key = state
|
||||
.config
|
||||
.openai_api_key
|
||||
.as_ref()
|
||||
.context("no OpenAI API key configured on the server")?;
|
||||
let chunks = open_ai::stream_completion(
|
||||
&state.http_client,
|
||||
open_ai::OPEN_AI_API_URL,
|
||||
api_key,
|
||||
serde_json::from_str(params.provider_request.get())?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
chunks
|
||||
.map(|event| {
|
||||
event.map(|chunk| {
|
||||
let input_tokens =
|
||||
chunk.usage.as_ref().map_or(0, |u| u.prompt_tokens) as usize;
|
||||
let output_tokens =
|
||||
chunk.usage.as_ref().map_or(0, |u| u.completion_tokens) as usize;
|
||||
CompletionChunk {
|
||||
bytes: serde_json::to_vec(&chunk).unwrap(),
|
||||
input_tokens,
|
||||
output_tokens,
|
||||
cache_creation_input_tokens: 0,
|
||||
cache_read_input_tokens: 0,
|
||||
}
|
||||
})
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
LanguageModelProvider::Google => {
|
||||
let api_key = state
|
||||
.config
|
||||
.google_ai_api_key
|
||||
.as_ref()
|
||||
.context("no Google AI API key configured on the server")?;
|
||||
let chunks = google_ai::stream_generate_content(
|
||||
&state.http_client,
|
||||
google_ai::API_URL,
|
||||
api_key,
|
||||
serde_json::from_str(params.provider_request.get())?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
chunks
|
||||
.map(|event| {
|
||||
event.map(|chunk| {
|
||||
// TODO - implement token counting for Google AI
|
||||
CompletionChunk {
|
||||
bytes: serde_json::to_vec(&chunk).unwrap(),
|
||||
input_tokens: 0,
|
||||
output_tokens: 0,
|
||||
cache_creation_input_tokens: 0,
|
||||
cache_read_input_tokens: 0,
|
||||
}
|
||||
})
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Response::new(Body::wrap_stream(TokenCountingStream {
|
||||
state,
|
||||
claims,
|
||||
provider: params.provider,
|
||||
model,
|
||||
tokens: TokenUsage::default(),
|
||||
inner_stream: stream,
|
||||
})))
|
||||
}
|
||||
|
||||
fn normalize_model_name(known_models: Vec<String>, name: String) -> String {
|
||||
if let Some(known_model_name) = known_models
|
||||
.iter()
|
||||
.filter(|known_model_name| name.starts_with(known_model_name.as_str()))
|
||||
.max_by_key(|known_model_name| known_model_name.len())
|
||||
{
|
||||
known_model_name.to_string()
|
||||
} else {
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
/// The maximum monthly spending an individual user can reach on the free tier
|
||||
/// before they have to pay.
|
||||
pub const FREE_TIER_MONTHLY_SPENDING_LIMIT: Cents = Cents::from_dollars(10);
|
||||
@@ -452,330 +14,3 @@ pub const FREE_TIER_MONTHLY_SPENDING_LIMIT: Cents = Cents::from_dollars(10);
|
||||
///
|
||||
/// Used to prevent surprise bills.
|
||||
pub const DEFAULT_MAX_MONTHLY_SPEND: Cents = Cents::from_dollars(10);
|
||||
|
||||
async fn check_usage_limit(
|
||||
state: &Arc<LlmState>,
|
||||
provider: LanguageModelProvider,
|
||||
model_name: &str,
|
||||
claims: &LlmTokenClaims,
|
||||
) -> Result<()> {
|
||||
if claims.is_staff {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let user_id = UserId::from_proto(claims.user_id);
|
||||
let model = state.db.model(provider, model_name)?;
|
||||
let free_tier = claims.free_tier_monthly_spending_limit();
|
||||
|
||||
let spending_this_month = state
|
||||
.db
|
||||
.get_user_spending_for_month(user_id, Utc::now())
|
||||
.await?;
|
||||
if spending_this_month >= free_tier {
|
||||
if !claims.has_llm_subscription {
|
||||
return Err(Error::http(
|
||||
StatusCode::PAYMENT_REQUIRED,
|
||||
"Maximum spending limit reached for this month.".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let monthly_spend = spending_this_month.saturating_sub(free_tier);
|
||||
if monthly_spend >= Cents(claims.max_monthly_spend_in_cents) {
|
||||
return Err(Error::Http(
|
||||
StatusCode::FORBIDDEN,
|
||||
"Maximum spending limit reached for this month.".to_string(),
|
||||
[(
|
||||
HeaderName::from_static(MAX_LLM_MONTHLY_SPEND_REACHED_HEADER_NAME),
|
||||
HeaderValue::from_static("true"),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let active_users = state.get_active_user_count(provider, model_name).await?;
|
||||
|
||||
let users_in_recent_minutes = active_users.users_in_recent_minutes.max(1);
|
||||
let users_in_recent_days = active_users.users_in_recent_days.max(1);
|
||||
|
||||
let per_user_max_requests_per_minute =
|
||||
model.max_requests_per_minute as usize / users_in_recent_minutes;
|
||||
let per_user_max_tokens_per_minute =
|
||||
model.max_tokens_per_minute as usize / users_in_recent_minutes;
|
||||
let per_user_max_input_tokens_per_minute =
|
||||
model.max_input_tokens_per_minute as usize / users_in_recent_minutes;
|
||||
let per_user_max_output_tokens_per_minute =
|
||||
model.max_output_tokens_per_minute as usize / users_in_recent_minutes;
|
||||
let per_user_max_tokens_per_day = model.max_tokens_per_day as usize / users_in_recent_days;
|
||||
|
||||
let usage = state
|
||||
.db
|
||||
.get_usage(user_id, provider, model_name, Utc::now())
|
||||
.await?;
|
||||
|
||||
let checks = match (provider, model_name) {
|
||||
(LanguageModelProvider::Anthropic, "claude-3-7-sonnet") => vec![
|
||||
(
|
||||
usage.requests_this_minute,
|
||||
per_user_max_requests_per_minute,
|
||||
UsageMeasure::RequestsPerMinute,
|
||||
),
|
||||
(
|
||||
usage.input_tokens_this_minute,
|
||||
per_user_max_tokens_per_minute,
|
||||
UsageMeasure::InputTokensPerMinute,
|
||||
),
|
||||
(
|
||||
usage.output_tokens_this_minute,
|
||||
per_user_max_tokens_per_minute,
|
||||
UsageMeasure::OutputTokensPerMinute,
|
||||
),
|
||||
(
|
||||
usage.tokens_this_day,
|
||||
per_user_max_tokens_per_day,
|
||||
UsageMeasure::TokensPerDay,
|
||||
),
|
||||
],
|
||||
_ => vec![
|
||||
(
|
||||
usage.requests_this_minute,
|
||||
per_user_max_requests_per_minute,
|
||||
UsageMeasure::RequestsPerMinute,
|
||||
),
|
||||
(
|
||||
usage.tokens_this_minute,
|
||||
per_user_max_tokens_per_minute,
|
||||
UsageMeasure::TokensPerMinute,
|
||||
),
|
||||
(
|
||||
usage.tokens_this_day,
|
||||
per_user_max_tokens_per_day,
|
||||
UsageMeasure::TokensPerDay,
|
||||
),
|
||||
],
|
||||
};
|
||||
|
||||
for (used, limit, usage_measure) in checks {
|
||||
if used > limit {
|
||||
let resource = match usage_measure {
|
||||
UsageMeasure::RequestsPerMinute => "requests_per_minute",
|
||||
UsageMeasure::TokensPerMinute => "tokens_per_minute",
|
||||
UsageMeasure::InputTokensPerMinute => "input_tokens_per_minute",
|
||||
UsageMeasure::OutputTokensPerMinute => "output_tokens_per_minute",
|
||||
UsageMeasure::TokensPerDay => "tokens_per_day",
|
||||
};
|
||||
|
||||
tracing::info!(
|
||||
target: "user rate limit",
|
||||
user_id = claims.user_id,
|
||||
login = claims.github_user_login,
|
||||
authn.jti = claims.jti,
|
||||
is_staff = claims.is_staff,
|
||||
provider = provider.to_string(),
|
||||
model = model.name,
|
||||
usage_measure = resource,
|
||||
requests_this_minute = usage.requests_this_minute,
|
||||
tokens_this_minute = usage.tokens_this_minute,
|
||||
input_tokens_this_minute = usage.input_tokens_this_minute,
|
||||
output_tokens_this_minute = usage.output_tokens_this_minute,
|
||||
tokens_this_day = usage.tokens_this_day,
|
||||
users_in_recent_minutes = users_in_recent_minutes,
|
||||
users_in_recent_days = users_in_recent_days,
|
||||
max_requests_per_minute = per_user_max_requests_per_minute,
|
||||
max_tokens_per_minute = per_user_max_tokens_per_minute,
|
||||
max_input_tokens_per_minute = per_user_max_input_tokens_per_minute,
|
||||
max_output_tokens_per_minute = per_user_max_output_tokens_per_minute,
|
||||
max_tokens_per_day = per_user_max_tokens_per_day,
|
||||
);
|
||||
|
||||
SnowflakeRow::new(
|
||||
"Language Model Rate Limited",
|
||||
Some(claims.metrics_id),
|
||||
claims.is_staff,
|
||||
claims.system_id.clone(),
|
||||
json!({
|
||||
"usage": usage,
|
||||
"users_in_recent_minutes": users_in_recent_minutes,
|
||||
"users_in_recent_days": users_in_recent_days,
|
||||
"max_requests_per_minute": per_user_max_requests_per_minute,
|
||||
"max_tokens_per_minute": per_user_max_tokens_per_minute,
|
||||
"max_input_tokens_per_minute": per_user_max_input_tokens_per_minute,
|
||||
"max_output_tokens_per_minute": per_user_max_output_tokens_per_minute,
|
||||
"max_tokens_per_day": per_user_max_tokens_per_day,
|
||||
"plan": match claims.plan {
|
||||
Plan::Free => "free".to_string(),
|
||||
Plan::ZedPro => "zed_pro".to_string(),
|
||||
},
|
||||
"model": model.name.clone(),
|
||||
"provider": provider.to_string(),
|
||||
"usage_measure": resource.to_string(),
|
||||
}),
|
||||
)
|
||||
.write(&state.kinesis_client, &state.config.kinesis_stream)
|
||||
.await
|
||||
.log_err();
|
||||
|
||||
return Err(Error::http(
|
||||
StatusCode::TOO_MANY_REQUESTS,
|
||||
format!("Rate limit exceeded. Maximum {} reached.", resource),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct CompletionChunk {
|
||||
bytes: Vec<u8>,
|
||||
input_tokens: usize,
|
||||
output_tokens: usize,
|
||||
cache_creation_input_tokens: usize,
|
||||
cache_read_input_tokens: usize,
|
||||
}
|
||||
|
||||
struct TokenCountingStream<S> {
|
||||
state: Arc<LlmState>,
|
||||
claims: LlmTokenClaims,
|
||||
provider: LanguageModelProvider,
|
||||
model: String,
|
||||
tokens: TokenUsage,
|
||||
inner_stream: S,
|
||||
}
|
||||
|
||||
impl<S> Stream for TokenCountingStream<S>
|
||||
where
|
||||
S: Stream<Item = Result<CompletionChunk, anyhow::Error>> + Unpin,
|
||||
{
|
||||
type Item = Result<Vec<u8>, anyhow::Error>;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
match Pin::new(&mut self.inner_stream).poll_next(cx) {
|
||||
Poll::Ready(Some(Ok(mut chunk))) => {
|
||||
chunk.bytes.push(b'\n');
|
||||
self.tokens.input += chunk.input_tokens;
|
||||
self.tokens.output += chunk.output_tokens;
|
||||
self.tokens.input_cache_creation += chunk.cache_creation_input_tokens;
|
||||
self.tokens.input_cache_read += chunk.cache_read_input_tokens;
|
||||
Poll::Ready(Some(Ok(chunk.bytes)))
|
||||
}
|
||||
Poll::Ready(Some(Err(e))) => Poll::Ready(Some(Err(e))),
|
||||
Poll::Ready(None) => Poll::Ready(None),
|
||||
Poll::Pending => Poll::Pending,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Drop for TokenCountingStream<S> {
|
||||
fn drop(&mut self) {
|
||||
let state = self.state.clone();
|
||||
let claims = self.claims.clone();
|
||||
let provider = self.provider;
|
||||
let model = std::mem::take(&mut self.model);
|
||||
let tokens = self.tokens;
|
||||
self.state.executor.spawn_detached(async move {
|
||||
let usage = state
|
||||
.db
|
||||
.record_usage(
|
||||
UserId::from_proto(claims.user_id),
|
||||
claims.is_staff,
|
||||
provider,
|
||||
&model,
|
||||
tokens,
|
||||
claims.has_llm_subscription,
|
||||
Cents(claims.max_monthly_spend_in_cents),
|
||||
claims.free_tier_monthly_spending_limit(),
|
||||
Utc::now(),
|
||||
)
|
||||
.await
|
||||
.log_err();
|
||||
|
||||
if let Some(usage) = usage {
|
||||
tracing::info!(
|
||||
target: "user usage",
|
||||
user_id = claims.user_id,
|
||||
login = claims.github_user_login,
|
||||
authn.jti = claims.jti,
|
||||
is_staff = claims.is_staff,
|
||||
provider = provider.to_string(),
|
||||
model = model,
|
||||
requests_this_minute = usage.requests_this_minute,
|
||||
tokens_this_minute = usage.tokens_this_minute,
|
||||
input_tokens_this_minute = usage.input_tokens_this_minute,
|
||||
output_tokens_this_minute = usage.output_tokens_this_minute,
|
||||
);
|
||||
|
||||
let properties = json!({
|
||||
"has_llm_subscription": claims.has_llm_subscription,
|
||||
"max_monthly_spend_in_cents": claims.max_monthly_spend_in_cents,
|
||||
"plan": match claims.plan {
|
||||
Plan::Free => "free".to_string(),
|
||||
Plan::ZedPro => "zed_pro".to_string(),
|
||||
},
|
||||
"model": model,
|
||||
"provider": provider,
|
||||
"usage": usage,
|
||||
"tokens": tokens
|
||||
});
|
||||
SnowflakeRow::new(
|
||||
"Language Model Used",
|
||||
Some(claims.metrics_id),
|
||||
claims.is_staff,
|
||||
claims.system_id.clone(),
|
||||
properties,
|
||||
)
|
||||
.write(&state.kinesis_client, &state.config.kinesis_stream)
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn log_usage_periodically(state: Arc<LlmState>) {
|
||||
state.executor.clone().spawn_detached(async move {
|
||||
loop {
|
||||
state
|
||||
.executor
|
||||
.sleep(std::time::Duration::from_secs(30))
|
||||
.await;
|
||||
|
||||
for provider in LanguageModelProvider::iter() {
|
||||
for model in state.db.model_names_for_provider(provider) {
|
||||
if let Some(active_user_count) = state
|
||||
.get_active_user_count(provider, &model)
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
tracing::info!(
|
||||
target: "active user counts",
|
||||
provider = provider.to_string(),
|
||||
model = model,
|
||||
users_in_recent_minutes = active_user_count.users_in_recent_minutes,
|
||||
users_in_recent_days = active_user_count.users_in_recent_days,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(usages) = state
|
||||
.db
|
||||
.get_application_wide_usages_by_model(Utc::now())
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
for usage in usages {
|
||||
tracing::info!(
|
||||
target: "computed usage",
|
||||
provider = usage.provider.to_string(),
|
||||
model = usage.model,
|
||||
requests_this_minute = usage.requests_this_minute,
|
||||
tokens_this_minute = usage.tokens_this_minute,
|
||||
input_tokens_this_minute = usage.input_tokens_this_minute,
|
||||
output_tokens_this_minute = usage.output_tokens_this_minute,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,330 +0,0 @@
|
||||
use reqwest::StatusCode;
|
||||
use rpc::LanguageModelProvider;
|
||||
|
||||
use crate::llm::LlmTokenClaims;
|
||||
use crate::{Config, Error, Result};
|
||||
|
||||
pub fn authorize_access_to_language_model(
|
||||
config: &Config,
|
||||
claims: &LlmTokenClaims,
|
||||
country_code: Option<&str>,
|
||||
provider: LanguageModelProvider,
|
||||
model: &str,
|
||||
) -> Result<()> {
|
||||
authorize_access_for_country(config, country_code, provider)?;
|
||||
authorize_access_to_model(config, claims, provider, model)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn authorize_access_to_model(
|
||||
config: &Config,
|
||||
claims: &LlmTokenClaims,
|
||||
provider: LanguageModelProvider,
|
||||
model: &str,
|
||||
) -> Result<()> {
|
||||
if claims.is_staff {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if provider == LanguageModelProvider::Anthropic {
|
||||
if model == "claude-3-5-sonnet" || model == "claude-3-7-sonnet" {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if claims.has_llm_closed_beta_feature_flag
|
||||
&& Some(model) == config.llm_closed_beta_model_name.as_deref()
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
Err(Error::http(
|
||||
StatusCode::FORBIDDEN,
|
||||
format!("access to model {model:?} is not included in your plan"),
|
||||
))
|
||||
}
|
||||
|
||||
fn authorize_access_for_country(
|
||||
config: &Config,
|
||||
country_code: Option<&str>,
|
||||
provider: LanguageModelProvider,
|
||||
) -> Result<()> {
|
||||
// In development we won't have the `CF-IPCountry` header, so we can't check
|
||||
// the country code.
|
||||
//
|
||||
// This shouldn't be necessary, as anyone running in development will need to provide
|
||||
// their own API credentials in order to use an LLM provider.
|
||||
if config.is_development() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// https://developers.cloudflare.com/fundamentals/reference/http-request-headers/#cf-ipcountry
|
||||
let country_code = match country_code {
|
||||
// `XX` - Used for clients without country code data.
|
||||
None | Some("XX") => Err(Error::http(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"no country code".to_string(),
|
||||
))?,
|
||||
// `T1` - Used for clients using the Tor network.
|
||||
Some("T1") => Err(Error::http(
|
||||
StatusCode::FORBIDDEN,
|
||||
format!("access to {provider:?} models is not available over Tor"),
|
||||
))?,
|
||||
Some(country_code) => country_code,
|
||||
};
|
||||
|
||||
let is_country_supported_by_provider = match provider {
|
||||
LanguageModelProvider::Anthropic => anthropic::is_supported_country(country_code),
|
||||
LanguageModelProvider::OpenAi => open_ai::is_supported_country(country_code),
|
||||
LanguageModelProvider::Google => google_ai::is_supported_country(country_code),
|
||||
};
|
||||
if !is_country_supported_by_provider {
|
||||
Err(Error::http(
|
||||
StatusCode::UNAVAILABLE_FOR_LEGAL_REASONS,
|
||||
format!(
|
||||
"access to {provider:?} models is not available in your region ({country_code})"
|
||||
),
|
||||
))?
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use axum::response::IntoResponse;
|
||||
use pretty_assertions::assert_eq;
|
||||
use rpc::proto::Plan;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_authorize_access_to_language_model_with_supported_country(
|
||||
_cx: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let config = Config::test();
|
||||
|
||||
let claims = LlmTokenClaims {
|
||||
user_id: 99,
|
||||
plan: Plan::ZedPro,
|
||||
is_staff: true,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let cases = vec![
|
||||
(LanguageModelProvider::Anthropic, "US"), // United States
|
||||
(LanguageModelProvider::Anthropic, "GB"), // United Kingdom
|
||||
(LanguageModelProvider::OpenAi, "US"), // United States
|
||||
(LanguageModelProvider::OpenAi, "GB"), // United Kingdom
|
||||
(LanguageModelProvider::Google, "US"), // United States
|
||||
(LanguageModelProvider::Google, "GB"), // United Kingdom
|
||||
];
|
||||
|
||||
for (provider, country_code) in cases {
|
||||
authorize_access_to_language_model(
|
||||
&config,
|
||||
&claims,
|
||||
Some(country_code),
|
||||
provider,
|
||||
"the-model",
|
||||
)
|
||||
.unwrap_or_else(|_| {
|
||||
panic!("expected authorization to return Ok for {provider:?}: {country_code}")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_authorize_access_to_language_model_with_unsupported_country(
|
||||
_cx: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let config = Config::test();
|
||||
|
||||
let claims = LlmTokenClaims {
|
||||
user_id: 99,
|
||||
plan: Plan::ZedPro,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let cases = vec![
|
||||
(LanguageModelProvider::Anthropic, "AF"), // Afghanistan
|
||||
(LanguageModelProvider::Anthropic, "BY"), // Belarus
|
||||
(LanguageModelProvider::Anthropic, "CF"), // Central African Republic
|
||||
(LanguageModelProvider::Anthropic, "CN"), // China
|
||||
(LanguageModelProvider::Anthropic, "CU"), // Cuba
|
||||
(LanguageModelProvider::Anthropic, "ER"), // Eritrea
|
||||
(LanguageModelProvider::Anthropic, "ET"), // Ethiopia
|
||||
(LanguageModelProvider::Anthropic, "IR"), // Iran
|
||||
(LanguageModelProvider::Anthropic, "KP"), // North Korea
|
||||
(LanguageModelProvider::Anthropic, "XK"), // Kosovo
|
||||
(LanguageModelProvider::Anthropic, "LY"), // Libya
|
||||
(LanguageModelProvider::Anthropic, "MM"), // Myanmar
|
||||
(LanguageModelProvider::Anthropic, "RU"), // Russia
|
||||
(LanguageModelProvider::Anthropic, "SO"), // Somalia
|
||||
(LanguageModelProvider::Anthropic, "SS"), // South Sudan
|
||||
(LanguageModelProvider::Anthropic, "SD"), // Sudan
|
||||
(LanguageModelProvider::Anthropic, "SY"), // Syria
|
||||
(LanguageModelProvider::Anthropic, "VE"), // Venezuela
|
||||
(LanguageModelProvider::Anthropic, "YE"), // Yemen
|
||||
(LanguageModelProvider::OpenAi, "KP"), // North Korea
|
||||
(LanguageModelProvider::Google, "KP"), // North Korea
|
||||
];
|
||||
|
||||
for (provider, country_code) in cases {
|
||||
let error_response = authorize_access_to_language_model(
|
||||
&config,
|
||||
&claims,
|
||||
Some(country_code),
|
||||
provider,
|
||||
"the-model",
|
||||
)
|
||||
.expect_err(&format!(
|
||||
"expected authorization to return an error for {provider:?}: {country_code}"
|
||||
))
|
||||
.into_response();
|
||||
|
||||
assert_eq!(
|
||||
error_response.status(),
|
||||
StatusCode::UNAVAILABLE_FOR_LEGAL_REASONS
|
||||
);
|
||||
let response_body = hyper::body::to_bytes(error_response.into_body())
|
||||
.await
|
||||
.unwrap()
|
||||
.to_vec();
|
||||
assert_eq!(
|
||||
String::from_utf8(response_body).unwrap(),
|
||||
format!(
|
||||
"access to {provider:?} models is not available in your region ({country_code})"
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_authorize_access_to_language_model_with_tor(_cx: &mut gpui::TestAppContext) {
|
||||
let config = Config::test();
|
||||
|
||||
let claims = LlmTokenClaims {
|
||||
user_id: 99,
|
||||
plan: Plan::ZedPro,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let cases = vec![
|
||||
(LanguageModelProvider::Anthropic, "T1"), // Tor
|
||||
(LanguageModelProvider::OpenAi, "T1"), // Tor
|
||||
(LanguageModelProvider::Google, "T1"), // Tor
|
||||
];
|
||||
|
||||
for (provider, country_code) in cases {
|
||||
let error_response = authorize_access_to_language_model(
|
||||
&config,
|
||||
&claims,
|
||||
Some(country_code),
|
||||
provider,
|
||||
"the-model",
|
||||
)
|
||||
.expect_err(&format!(
|
||||
"expected authorization to return an error for {provider:?}: {country_code}"
|
||||
))
|
||||
.into_response();
|
||||
|
||||
assert_eq!(error_response.status(), StatusCode::FORBIDDEN);
|
||||
let response_body = hyper::body::to_bytes(error_response.into_body())
|
||||
.await
|
||||
.unwrap()
|
||||
.to_vec();
|
||||
assert_eq!(
|
||||
String::from_utf8(response_body).unwrap(),
|
||||
format!("access to {provider:?} models is not available over Tor")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_authorize_access_to_language_model_based_on_plan() {
|
||||
let config = Config::test();
|
||||
|
||||
let test_cases = vec![
|
||||
// Pro plan should have access to claude-3.5-sonnet
|
||||
(
|
||||
Plan::ZedPro,
|
||||
LanguageModelProvider::Anthropic,
|
||||
"claude-3-5-sonnet",
|
||||
true,
|
||||
),
|
||||
// Free plan should have access to claude-3.5-sonnet
|
||||
(
|
||||
Plan::Free,
|
||||
LanguageModelProvider::Anthropic,
|
||||
"claude-3-5-sonnet",
|
||||
true,
|
||||
),
|
||||
// Pro plan should NOT have access to other Anthropic models
|
||||
(
|
||||
Plan::ZedPro,
|
||||
LanguageModelProvider::Anthropic,
|
||||
"claude-3-opus",
|
||||
false,
|
||||
),
|
||||
];
|
||||
|
||||
for (plan, provider, model, expected_access) in test_cases {
|
||||
let claims = LlmTokenClaims {
|
||||
plan,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let result =
|
||||
authorize_access_to_language_model(&config, &claims, Some("US"), provider, model);
|
||||
|
||||
if expected_access {
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Expected access to be granted for plan {:?}, provider {:?}, model {}",
|
||||
plan,
|
||||
provider,
|
||||
model
|
||||
);
|
||||
} else {
|
||||
let error = result.expect_err(&format!(
|
||||
"Expected access to be denied for plan {:?}, provider {:?}, model {}",
|
||||
plan, provider, model
|
||||
));
|
||||
let response = error.into_response();
|
||||
assert_eq!(response.status(), StatusCode::FORBIDDEN);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_authorize_access_to_language_model_for_staff() {
|
||||
let config = Config::test();
|
||||
|
||||
let claims = LlmTokenClaims {
|
||||
is_staff: true,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
// Staff should have access to all models
|
||||
let test_cases = vec![
|
||||
(LanguageModelProvider::Anthropic, "claude-3-5-sonnet"),
|
||||
(LanguageModelProvider::Anthropic, "claude-2"),
|
||||
(LanguageModelProvider::Anthropic, "claude-123-agi"),
|
||||
(LanguageModelProvider::OpenAi, "gpt-4"),
|
||||
(LanguageModelProvider::Google, "gemini-pro"),
|
||||
];
|
||||
|
||||
for (provider, model) in test_cases {
|
||||
let result =
|
||||
authorize_access_to_language_model(&config, &claims, Some("US"), provider, model);
|
||||
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Expected staff to have access to provider {:?}, model {}",
|
||||
provider,
|
||||
model
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -20,7 +20,6 @@ use std::future::Future;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::anyhow;
|
||||
pub use queries::usages::{ActiveUserCount, TokenUsage};
|
||||
pub use sea_orm::ConnectOptions;
|
||||
use sea_orm::prelude::*;
|
||||
use sea_orm::{
|
||||
|
||||
@@ -2,5 +2,4 @@ use super::*;
|
||||
|
||||
pub mod billing_events;
|
||||
pub mod providers;
|
||||
pub mod revoked_access_tokens;
|
||||
pub mod usages;
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
use super::*;
|
||||
|
||||
impl LlmDatabase {
|
||||
/// Returns whether the access token with the given `jti` has been revoked.
|
||||
pub async fn is_access_token_revoked(&self, jti: &str) -> Result<bool> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(revoked_access_token::Entity::find()
|
||||
.filter(revoked_access_token::Column::Jti.eq(jti))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.is_some())
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
@@ -1,56 +1,12 @@
|
||||
use crate::db::UserId;
|
||||
use crate::llm::Cents;
|
||||
use chrono::{Datelike, Duration};
|
||||
use chrono::Datelike;
|
||||
use futures::StreamExt as _;
|
||||
use rpc::LanguageModelProvider;
|
||||
use sea_orm::QuerySelect;
|
||||
use std::{iter, str::FromStr};
|
||||
use std::str::FromStr;
|
||||
use strum::IntoEnumIterator as _;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Copy, Default, serde::Serialize)]
|
||||
pub struct TokenUsage {
|
||||
pub input: usize,
|
||||
pub input_cache_creation: usize,
|
||||
pub input_cache_read: usize,
|
||||
pub output: usize,
|
||||
}
|
||||
|
||||
impl TokenUsage {
|
||||
pub fn total(&self) -> usize {
|
||||
self.input + self.input_cache_creation + self.input_cache_read + self.output
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Copy, serde::Serialize)]
|
||||
pub struct Usage {
|
||||
pub requests_this_minute: usize,
|
||||
pub tokens_this_minute: usize,
|
||||
pub input_tokens_this_minute: usize,
|
||||
pub output_tokens_this_minute: usize,
|
||||
pub tokens_this_day: usize,
|
||||
pub tokens_this_month: TokenUsage,
|
||||
pub spending_this_month: Cents,
|
||||
pub lifetime_spending: Cents,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct ApplicationWideUsage {
|
||||
pub provider: LanguageModelProvider,
|
||||
pub model: String,
|
||||
pub requests_this_minute: usize,
|
||||
pub tokens_this_minute: usize,
|
||||
pub input_tokens_this_minute: usize,
|
||||
pub output_tokens_this_minute: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default)]
|
||||
pub struct ActiveUserCount {
|
||||
pub users_in_recent_minutes: usize,
|
||||
pub users_in_recent_days: usize,
|
||||
}
|
||||
|
||||
impl LlmDatabase {
|
||||
pub async fn initialize_usage_measures(&mut self) -> Result<()> {
|
||||
let all_measures = self
|
||||
@@ -90,100 +46,6 @@ impl LlmDatabase {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_application_wide_usages_by_model(
|
||||
&self,
|
||||
now: DateTimeUtc,
|
||||
) -> Result<Vec<ApplicationWideUsage>> {
|
||||
self.transaction(|tx| async move {
|
||||
let past_minute = now - Duration::minutes(1);
|
||||
let requests_per_minute = self.usage_measure_ids[&UsageMeasure::RequestsPerMinute];
|
||||
let tokens_per_minute = self.usage_measure_ids[&UsageMeasure::TokensPerMinute];
|
||||
let input_tokens_per_minute =
|
||||
self.usage_measure_ids[&UsageMeasure::InputTokensPerMinute];
|
||||
let output_tokens_per_minute =
|
||||
self.usage_measure_ids[&UsageMeasure::OutputTokensPerMinute];
|
||||
|
||||
let mut results = Vec::new();
|
||||
for ((provider, model_name), model) in self.models.iter() {
|
||||
let mut usages = usage::Entity::find()
|
||||
.filter(
|
||||
usage::Column::Timestamp
|
||||
.gte(past_minute.naive_utc())
|
||||
.and(usage::Column::IsStaff.eq(false))
|
||||
.and(usage::Column::ModelId.eq(model.id))
|
||||
.and(
|
||||
usage::Column::MeasureId
|
||||
.eq(requests_per_minute)
|
||||
.or(usage::Column::MeasureId.eq(tokens_per_minute)),
|
||||
),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut requests_this_minute = 0;
|
||||
let mut tokens_this_minute = 0;
|
||||
let mut input_tokens_this_minute = 0;
|
||||
let mut output_tokens_this_minute = 0;
|
||||
while let Some(usage) = usages.next().await {
|
||||
let usage = usage?;
|
||||
if usage.measure_id == requests_per_minute {
|
||||
requests_this_minute += Self::get_live_buckets(
|
||||
&usage,
|
||||
now.naive_utc(),
|
||||
UsageMeasure::RequestsPerMinute,
|
||||
)
|
||||
.0
|
||||
.iter()
|
||||
.copied()
|
||||
.sum::<i64>() as usize;
|
||||
} else if usage.measure_id == tokens_per_minute {
|
||||
tokens_this_minute += Self::get_live_buckets(
|
||||
&usage,
|
||||
now.naive_utc(),
|
||||
UsageMeasure::TokensPerMinute,
|
||||
)
|
||||
.0
|
||||
.iter()
|
||||
.copied()
|
||||
.sum::<i64>() as usize;
|
||||
} else if usage.measure_id == input_tokens_per_minute {
|
||||
input_tokens_this_minute += Self::get_live_buckets(
|
||||
&usage,
|
||||
now.naive_utc(),
|
||||
UsageMeasure::InputTokensPerMinute,
|
||||
)
|
||||
.0
|
||||
.iter()
|
||||
.copied()
|
||||
.sum::<i64>() as usize;
|
||||
} else if usage.measure_id == output_tokens_per_minute {
|
||||
output_tokens_this_minute += Self::get_live_buckets(
|
||||
&usage,
|
||||
now.naive_utc(),
|
||||
UsageMeasure::OutputTokensPerMinute,
|
||||
)
|
||||
.0
|
||||
.iter()
|
||||
.copied()
|
||||
.sum::<i64>() as usize;
|
||||
}
|
||||
}
|
||||
|
||||
results.push(ApplicationWideUsage {
|
||||
provider: *provider,
|
||||
model: model_name.clone(),
|
||||
requests_this_minute,
|
||||
tokens_this_minute,
|
||||
input_tokens_this_minute,
|
||||
output_tokens_this_minute,
|
||||
})
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_user_spending_for_month(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
@@ -223,499 +85,6 @@ impl LlmDatabase {
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_usage(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
provider: LanguageModelProvider,
|
||||
model_name: &str,
|
||||
now: DateTimeUtc,
|
||||
) -> Result<Usage> {
|
||||
self.transaction(|tx| async move {
|
||||
let model = self
|
||||
.models
|
||||
.get(&(provider, model_name.to_string()))
|
||||
.ok_or_else(|| anyhow!("unknown model {provider}:{model_name}"))?;
|
||||
|
||||
let usages = usage::Entity::find()
|
||||
.filter(
|
||||
usage::Column::UserId
|
||||
.eq(user_id)
|
||||
.and(usage::Column::ModelId.eq(model.id)),
|
||||
)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let month = now.date_naive().month() as i32;
|
||||
let year = now.date_naive().year();
|
||||
let monthly_usage = monthly_usage::Entity::find()
|
||||
.filter(
|
||||
monthly_usage::Column::UserId
|
||||
.eq(user_id)
|
||||
.and(monthly_usage::Column::ModelId.eq(model.id))
|
||||
.and(monthly_usage::Column::Month.eq(month))
|
||||
.and(monthly_usage::Column::Year.eq(year)),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?;
|
||||
let lifetime_usage = lifetime_usage::Entity::find()
|
||||
.filter(
|
||||
lifetime_usage::Column::UserId
|
||||
.eq(user_id)
|
||||
.and(lifetime_usage::Column::ModelId.eq(model.id)),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?;
|
||||
|
||||
let requests_this_minute =
|
||||
self.get_usage_for_measure(&usages, now, UsageMeasure::RequestsPerMinute)?;
|
||||
let tokens_this_minute =
|
||||
self.get_usage_for_measure(&usages, now, UsageMeasure::TokensPerMinute)?;
|
||||
let input_tokens_this_minute =
|
||||
self.get_usage_for_measure(&usages, now, UsageMeasure::InputTokensPerMinute)?;
|
||||
let output_tokens_this_minute =
|
||||
self.get_usage_for_measure(&usages, now, UsageMeasure::OutputTokensPerMinute)?;
|
||||
let tokens_this_day =
|
||||
self.get_usage_for_measure(&usages, now, UsageMeasure::TokensPerDay)?;
|
||||
let spending_this_month = if let Some(monthly_usage) = &monthly_usage {
|
||||
calculate_spending(
|
||||
model,
|
||||
monthly_usage.input_tokens as usize,
|
||||
monthly_usage.cache_creation_input_tokens as usize,
|
||||
monthly_usage.cache_read_input_tokens as usize,
|
||||
monthly_usage.output_tokens as usize,
|
||||
)
|
||||
} else {
|
||||
Cents::ZERO
|
||||
};
|
||||
let lifetime_spending = if let Some(lifetime_usage) = &lifetime_usage {
|
||||
calculate_spending(
|
||||
model,
|
||||
lifetime_usage.input_tokens as usize,
|
||||
lifetime_usage.cache_creation_input_tokens as usize,
|
||||
lifetime_usage.cache_read_input_tokens as usize,
|
||||
lifetime_usage.output_tokens as usize,
|
||||
)
|
||||
} else {
|
||||
Cents::ZERO
|
||||
};
|
||||
|
||||
Ok(Usage {
|
||||
requests_this_minute,
|
||||
tokens_this_minute,
|
||||
input_tokens_this_minute,
|
||||
output_tokens_this_minute,
|
||||
tokens_this_day,
|
||||
tokens_this_month: TokenUsage {
|
||||
input: monthly_usage
|
||||
.as_ref()
|
||||
.map_or(0, |usage| usage.input_tokens as usize),
|
||||
input_cache_creation: monthly_usage
|
||||
.as_ref()
|
||||
.map_or(0, |usage| usage.cache_creation_input_tokens as usize),
|
||||
input_cache_read: monthly_usage
|
||||
.as_ref()
|
||||
.map_or(0, |usage| usage.cache_read_input_tokens as usize),
|
||||
output: monthly_usage
|
||||
.as_ref()
|
||||
.map_or(0, |usage| usage.output_tokens as usize),
|
||||
},
|
||||
spending_this_month,
|
||||
lifetime_spending,
|
||||
})
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn record_usage(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
is_staff: bool,
|
||||
provider: LanguageModelProvider,
|
||||
model_name: &str,
|
||||
tokens: TokenUsage,
|
||||
has_llm_subscription: bool,
|
||||
max_monthly_spend: Cents,
|
||||
free_tier_monthly_spending_limit: Cents,
|
||||
now: DateTimeUtc,
|
||||
) -> Result<Usage> {
|
||||
self.transaction(|tx| async move {
|
||||
let model = self.model(provider, model_name)?;
|
||||
|
||||
let usages = usage::Entity::find()
|
||||
.filter(
|
||||
usage::Column::UserId
|
||||
.eq(user_id)
|
||||
.and(usage::Column::ModelId.eq(model.id)),
|
||||
)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let requests_this_minute = self
|
||||
.update_usage_for_measure(
|
||||
user_id,
|
||||
is_staff,
|
||||
model.id,
|
||||
&usages,
|
||||
UsageMeasure::RequestsPerMinute,
|
||||
now,
|
||||
1,
|
||||
&tx,
|
||||
)
|
||||
.await?;
|
||||
let tokens_this_minute = self
|
||||
.update_usage_for_measure(
|
||||
user_id,
|
||||
is_staff,
|
||||
model.id,
|
||||
&usages,
|
||||
UsageMeasure::TokensPerMinute,
|
||||
now,
|
||||
tokens.total(),
|
||||
&tx,
|
||||
)
|
||||
.await?;
|
||||
let input_tokens_this_minute = self
|
||||
.update_usage_for_measure(
|
||||
user_id,
|
||||
is_staff,
|
||||
model.id,
|
||||
&usages,
|
||||
UsageMeasure::InputTokensPerMinute,
|
||||
now,
|
||||
// Cache read input tokens are not counted for the purposes of rate limits (but they are still billed).
|
||||
tokens.input + tokens.input_cache_creation,
|
||||
&tx,
|
||||
)
|
||||
.await?;
|
||||
let output_tokens_this_minute = self
|
||||
.update_usage_for_measure(
|
||||
user_id,
|
||||
is_staff,
|
||||
model.id,
|
||||
&usages,
|
||||
UsageMeasure::OutputTokensPerMinute,
|
||||
now,
|
||||
tokens.output,
|
||||
&tx,
|
||||
)
|
||||
.await?;
|
||||
let tokens_this_day = self
|
||||
.update_usage_for_measure(
|
||||
user_id,
|
||||
is_staff,
|
||||
model.id,
|
||||
&usages,
|
||||
UsageMeasure::TokensPerDay,
|
||||
now,
|
||||
tokens.total(),
|
||||
&tx,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let month = now.date_naive().month() as i32;
|
||||
let year = now.date_naive().year();
|
||||
|
||||
// Update monthly usage
|
||||
let monthly_usage = monthly_usage::Entity::find()
|
||||
.filter(
|
||||
monthly_usage::Column::UserId
|
||||
.eq(user_id)
|
||||
.and(monthly_usage::Column::ModelId.eq(model.id))
|
||||
.and(monthly_usage::Column::Month.eq(month))
|
||||
.and(monthly_usage::Column::Year.eq(year)),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?;
|
||||
|
||||
let monthly_usage = match monthly_usage {
|
||||
Some(usage) => {
|
||||
monthly_usage::Entity::update(monthly_usage::ActiveModel {
|
||||
id: ActiveValue::unchanged(usage.id),
|
||||
input_tokens: ActiveValue::set(usage.input_tokens + tokens.input as i64),
|
||||
cache_creation_input_tokens: ActiveValue::set(
|
||||
usage.cache_creation_input_tokens + tokens.input_cache_creation as i64,
|
||||
),
|
||||
cache_read_input_tokens: ActiveValue::set(
|
||||
usage.cache_read_input_tokens + tokens.input_cache_read as i64,
|
||||
),
|
||||
output_tokens: ActiveValue::set(usage.output_tokens + tokens.output as i64),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?
|
||||
}
|
||||
None => {
|
||||
monthly_usage::ActiveModel {
|
||||
user_id: ActiveValue::set(user_id),
|
||||
model_id: ActiveValue::set(model.id),
|
||||
month: ActiveValue::set(month),
|
||||
year: ActiveValue::set(year),
|
||||
input_tokens: ActiveValue::set(tokens.input as i64),
|
||||
cache_creation_input_tokens: ActiveValue::set(
|
||||
tokens.input_cache_creation as i64,
|
||||
),
|
||||
cache_read_input_tokens: ActiveValue::set(tokens.input_cache_read as i64),
|
||||
output_tokens: ActiveValue::set(tokens.output as i64),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
let spending_this_month = calculate_spending(
|
||||
model,
|
||||
monthly_usage.input_tokens as usize,
|
||||
monthly_usage.cache_creation_input_tokens as usize,
|
||||
monthly_usage.cache_read_input_tokens as usize,
|
||||
monthly_usage.output_tokens as usize,
|
||||
);
|
||||
|
||||
if !is_staff
|
||||
&& spending_this_month > free_tier_monthly_spending_limit
|
||||
&& has_llm_subscription
|
||||
&& (spending_this_month - free_tier_monthly_spending_limit) <= max_monthly_spend
|
||||
{
|
||||
billing_event::ActiveModel {
|
||||
id: ActiveValue::not_set(),
|
||||
idempotency_key: ActiveValue::not_set(),
|
||||
user_id: ActiveValue::set(user_id),
|
||||
model_id: ActiveValue::set(model.id),
|
||||
input_tokens: ActiveValue::set(tokens.input as i64),
|
||||
input_cache_creation_tokens: ActiveValue::set(
|
||||
tokens.input_cache_creation as i64,
|
||||
),
|
||||
input_cache_read_tokens: ActiveValue::set(tokens.input_cache_read as i64),
|
||||
output_tokens: ActiveValue::set(tokens.output as i64),
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// Update lifetime usage
|
||||
let lifetime_usage = lifetime_usage::Entity::find()
|
||||
.filter(
|
||||
lifetime_usage::Column::UserId
|
||||
.eq(user_id)
|
||||
.and(lifetime_usage::Column::ModelId.eq(model.id)),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?;
|
||||
|
||||
let lifetime_usage = match lifetime_usage {
|
||||
Some(usage) => {
|
||||
lifetime_usage::Entity::update(lifetime_usage::ActiveModel {
|
||||
id: ActiveValue::unchanged(usage.id),
|
||||
input_tokens: ActiveValue::set(usage.input_tokens + tokens.input as i64),
|
||||
cache_creation_input_tokens: ActiveValue::set(
|
||||
usage.cache_creation_input_tokens + tokens.input_cache_creation as i64,
|
||||
),
|
||||
cache_read_input_tokens: ActiveValue::set(
|
||||
usage.cache_read_input_tokens + tokens.input_cache_read as i64,
|
||||
),
|
||||
output_tokens: ActiveValue::set(usage.output_tokens + tokens.output as i64),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?
|
||||
}
|
||||
None => {
|
||||
lifetime_usage::ActiveModel {
|
||||
user_id: ActiveValue::set(user_id),
|
||||
model_id: ActiveValue::set(model.id),
|
||||
input_tokens: ActiveValue::set(tokens.input as i64),
|
||||
cache_creation_input_tokens: ActiveValue::set(
|
||||
tokens.input_cache_creation as i64,
|
||||
),
|
||||
cache_read_input_tokens: ActiveValue::set(tokens.input_cache_read as i64),
|
||||
output_tokens: ActiveValue::set(tokens.output as i64),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
let lifetime_spending = calculate_spending(
|
||||
model,
|
||||
lifetime_usage.input_tokens as usize,
|
||||
lifetime_usage.cache_creation_input_tokens as usize,
|
||||
lifetime_usage.cache_read_input_tokens as usize,
|
||||
lifetime_usage.output_tokens as usize,
|
||||
);
|
||||
|
||||
Ok(Usage {
|
||||
requests_this_minute,
|
||||
tokens_this_minute,
|
||||
input_tokens_this_minute,
|
||||
output_tokens_this_minute,
|
||||
tokens_this_day,
|
||||
tokens_this_month: TokenUsage {
|
||||
input: monthly_usage.input_tokens as usize,
|
||||
input_cache_creation: monthly_usage.cache_creation_input_tokens as usize,
|
||||
input_cache_read: monthly_usage.cache_read_input_tokens as usize,
|
||||
output: monthly_usage.output_tokens as usize,
|
||||
},
|
||||
spending_this_month,
|
||||
lifetime_spending,
|
||||
})
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Returns the active user count for the specified model.
|
||||
pub async fn get_active_user_count(
|
||||
&self,
|
||||
provider: LanguageModelProvider,
|
||||
model_name: &str,
|
||||
now: DateTimeUtc,
|
||||
) -> Result<ActiveUserCount> {
|
||||
self.transaction(|tx| async move {
|
||||
let minute_since = now - Duration::minutes(5);
|
||||
let day_since = now - Duration::days(5);
|
||||
|
||||
let model = self
|
||||
.models
|
||||
.get(&(provider, model_name.to_string()))
|
||||
.ok_or_else(|| anyhow!("unknown model {provider}:{model_name}"))?;
|
||||
|
||||
let tokens_per_minute = self.usage_measure_ids[&UsageMeasure::TokensPerMinute];
|
||||
|
||||
let users_in_recent_minutes = usage::Entity::find()
|
||||
.filter(
|
||||
usage::Column::ModelId
|
||||
.eq(model.id)
|
||||
.and(usage::Column::MeasureId.eq(tokens_per_minute))
|
||||
.and(usage::Column::Timestamp.gte(minute_since.naive_utc()))
|
||||
.and(usage::Column::IsStaff.eq(false)),
|
||||
)
|
||||
.select_only()
|
||||
.column(usage::Column::UserId)
|
||||
.group_by(usage::Column::UserId)
|
||||
.count(&*tx)
|
||||
.await? as usize;
|
||||
|
||||
let users_in_recent_days = usage::Entity::find()
|
||||
.filter(
|
||||
usage::Column::ModelId
|
||||
.eq(model.id)
|
||||
.and(usage::Column::MeasureId.eq(tokens_per_minute))
|
||||
.and(usage::Column::Timestamp.gte(day_since.naive_utc()))
|
||||
.and(usage::Column::IsStaff.eq(false)),
|
||||
)
|
||||
.select_only()
|
||||
.column(usage::Column::UserId)
|
||||
.group_by(usage::Column::UserId)
|
||||
.count(&*tx)
|
||||
.await? as usize;
|
||||
|
||||
Ok(ActiveUserCount {
|
||||
users_in_recent_minutes,
|
||||
users_in_recent_days,
|
||||
})
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn update_usage_for_measure(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
is_staff: bool,
|
||||
model_id: ModelId,
|
||||
usages: &[usage::Model],
|
||||
usage_measure: UsageMeasure,
|
||||
now: DateTimeUtc,
|
||||
usage_to_add: usize,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<usize> {
|
||||
let now = now.naive_utc();
|
||||
let measure_id = *self
|
||||
.usage_measure_ids
|
||||
.get(&usage_measure)
|
||||
.ok_or_else(|| anyhow!("usage measure {usage_measure} not found"))?;
|
||||
|
||||
let mut id = None;
|
||||
let mut timestamp = now;
|
||||
let mut buckets = vec![0_i64];
|
||||
|
||||
if let Some(old_usage) = usages.iter().find(|usage| usage.measure_id == measure_id) {
|
||||
id = Some(old_usage.id);
|
||||
let (live_buckets, buckets_since) =
|
||||
Self::get_live_buckets(old_usage, now, usage_measure);
|
||||
if !live_buckets.is_empty() {
|
||||
buckets.clear();
|
||||
buckets.extend_from_slice(live_buckets);
|
||||
buckets.extend(iter::repeat(0).take(buckets_since));
|
||||
timestamp =
|
||||
old_usage.timestamp + (usage_measure.bucket_duration() * buckets_since as i32);
|
||||
}
|
||||
}
|
||||
|
||||
*buckets.last_mut().unwrap() += usage_to_add as i64;
|
||||
let total_usage = buckets.iter().sum::<i64>() as usize;
|
||||
|
||||
let mut model = usage::ActiveModel {
|
||||
user_id: ActiveValue::set(user_id),
|
||||
is_staff: ActiveValue::set(is_staff),
|
||||
model_id: ActiveValue::set(model_id),
|
||||
measure_id: ActiveValue::set(measure_id),
|
||||
timestamp: ActiveValue::set(timestamp),
|
||||
buckets: ActiveValue::set(buckets),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
if let Some(id) = id {
|
||||
model.id = ActiveValue::unchanged(id);
|
||||
model.update(tx).await?;
|
||||
} else {
|
||||
usage::Entity::insert(model)
|
||||
.exec_without_returning(tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(total_usage)
|
||||
}
|
||||
|
||||
fn get_usage_for_measure(
|
||||
&self,
|
||||
usages: &[usage::Model],
|
||||
now: DateTimeUtc,
|
||||
usage_measure: UsageMeasure,
|
||||
) -> Result<usize> {
|
||||
let now = now.naive_utc();
|
||||
let measure_id = *self
|
||||
.usage_measure_ids
|
||||
.get(&usage_measure)
|
||||
.ok_or_else(|| anyhow!("usage measure {usage_measure} not found"))?;
|
||||
let Some(usage) = usages.iter().find(|usage| usage.measure_id == measure_id) else {
|
||||
return Ok(0);
|
||||
};
|
||||
|
||||
let (live_buckets, _) = Self::get_live_buckets(usage, now, usage_measure);
|
||||
Ok(live_buckets.iter().sum::<i64>() as _)
|
||||
}
|
||||
|
||||
fn get_live_buckets(
|
||||
usage: &usage::Model,
|
||||
now: chrono::NaiveDateTime,
|
||||
measure: UsageMeasure,
|
||||
) -> (&[i64], usize) {
|
||||
let seconds_since_usage = (now - usage.timestamp).num_seconds().max(0);
|
||||
let buckets_since_usage =
|
||||
seconds_since_usage as f32 / measure.bucket_duration().num_seconds() as f32;
|
||||
let buckets_since_usage = buckets_since_usage.ceil() as usize;
|
||||
let mut live_buckets = &[] as &[i64];
|
||||
if buckets_since_usage < measure.bucket_count() {
|
||||
let expired_bucket_count =
|
||||
(usage.buckets.len() + buckets_since_usage).saturating_sub(measure.bucket_count());
|
||||
live_buckets = &usage.buckets[expired_bucket_count..];
|
||||
while live_buckets.first() == Some(&0) {
|
||||
live_buckets = &live_buckets[1..];
|
||||
}
|
||||
}
|
||||
(live_buckets, buckets_since_usage)
|
||||
}
|
||||
}
|
||||
|
||||
fn calculate_spending(
|
||||
@@ -741,32 +110,3 @@ fn calculate_spending(
|
||||
+ output_token_cost;
|
||||
Cents::new(spending as u32)
|
||||
}
|
||||
|
||||
const MINUTE_BUCKET_COUNT: usize = 12;
|
||||
const DAY_BUCKET_COUNT: usize = 48;
|
||||
|
||||
impl UsageMeasure {
|
||||
fn bucket_count(&self) -> usize {
|
||||
match self {
|
||||
UsageMeasure::RequestsPerMinute => MINUTE_BUCKET_COUNT,
|
||||
UsageMeasure::TokensPerMinute
|
||||
| UsageMeasure::InputTokensPerMinute
|
||||
| UsageMeasure::OutputTokensPerMinute => MINUTE_BUCKET_COUNT,
|
||||
UsageMeasure::TokensPerDay => DAY_BUCKET_COUNT,
|
||||
}
|
||||
}
|
||||
|
||||
fn total_duration(&self) -> Duration {
|
||||
match self {
|
||||
UsageMeasure::RequestsPerMinute => Duration::minutes(1),
|
||||
UsageMeasure::TokensPerMinute
|
||||
| UsageMeasure::InputTokensPerMinute
|
||||
| UsageMeasure::OutputTokensPerMinute => Duration::minutes(1),
|
||||
UsageMeasure::TokensPerDay => Duration::hours(24),
|
||||
}
|
||||
}
|
||||
|
||||
fn bucket_duration(&self) -> Duration {
|
||||
self.total_duration() / self.bucket_count() as i32
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
pub mod billing_event;
|
||||
pub mod lifetime_usage;
|
||||
pub mod model;
|
||||
pub mod monthly_usage;
|
||||
pub mod provider;
|
||||
pub mod revoked_access_token;
|
||||
pub mod usage;
|
||||
pub mod usage_measure;
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
use crate::{db::UserId, llm::db::ModelId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "lifetime_usages")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub user_id: UserId,
|
||||
pub model_id: ModelId,
|
||||
pub input_tokens: i64,
|
||||
pub cache_creation_input_tokens: i64,
|
||||
pub cache_read_input_tokens: i64,
|
||||
pub output_tokens: i64,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
@@ -1,19 +0,0 @@
|
||||
use chrono::NaiveDateTime;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
use crate::llm::db::RevokedAccessTokenId;
|
||||
|
||||
/// A revoked access token.
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "revoked_access_tokens")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: RevokedAccessTokenId,
|
||||
pub jti: String,
|
||||
pub revoked_at: NaiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
@@ -1,6 +1,4 @@
|
||||
mod billing_tests;
|
||||
mod provider_tests;
|
||||
mod usage_tests;
|
||||
|
||||
use gpui::BackgroundExecutor;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
@@ -1,152 +0,0 @@
|
||||
use crate::{
|
||||
Cents,
|
||||
db::UserId,
|
||||
llm::{
|
||||
FREE_TIER_MONTHLY_SPENDING_LIMIT,
|
||||
db::{LlmDatabase, TokenUsage, queries::providers::ModelParams},
|
||||
},
|
||||
test_llm_db,
|
||||
};
|
||||
use chrono::{DateTime, Utc};
|
||||
use pretty_assertions::assert_eq;
|
||||
use rpc::LanguageModelProvider;
|
||||
|
||||
test_llm_db!(
|
||||
test_billing_limit_exceeded,
|
||||
test_billing_limit_exceeded_postgres
|
||||
);
|
||||
|
||||
async fn test_billing_limit_exceeded(db: &mut LlmDatabase) {
|
||||
let provider = LanguageModelProvider::Anthropic;
|
||||
let model = "fake-claude-limerick";
|
||||
const PRICE_PER_MILLION_INPUT_TOKENS: i32 = 5;
|
||||
const PRICE_PER_MILLION_OUTPUT_TOKENS: i32 = 5;
|
||||
|
||||
// Initialize the database and insert the model
|
||||
db.initialize().await.unwrap();
|
||||
db.insert_models(&[ModelParams {
|
||||
provider,
|
||||
name: model.to_string(),
|
||||
max_requests_per_minute: 5,
|
||||
max_tokens_per_minute: 10_000,
|
||||
max_tokens_per_day: 50_000,
|
||||
price_per_million_input_tokens: PRICE_PER_MILLION_INPUT_TOKENS,
|
||||
price_per_million_output_tokens: PRICE_PER_MILLION_OUTPUT_TOKENS,
|
||||
}])
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Set a fixed datetime for consistent testing
|
||||
let now = DateTime::parse_from_rfc3339("2024-08-08T22:46:33Z")
|
||||
.unwrap()
|
||||
.with_timezone(&Utc);
|
||||
|
||||
let user_id = UserId::from_proto(123);
|
||||
|
||||
let max_monthly_spend = Cents::from_dollars(11);
|
||||
|
||||
// Record usage that brings us close to the limit but doesn't exceed it
|
||||
// Let's say we use $10.50 worth of tokens
|
||||
let tokens_to_use = 210_000_000; // This will cost $10.50 at $0.05 per 1 million tokens
|
||||
let usage = TokenUsage {
|
||||
input: tokens_to_use,
|
||||
input_cache_creation: 0,
|
||||
input_cache_read: 0,
|
||||
output: 0,
|
||||
};
|
||||
|
||||
// Verify that before we record any usage, there are 0 billing events
|
||||
let billing_events = db.get_billing_events().await.unwrap();
|
||||
assert_eq!(billing_events.len(), 0);
|
||||
|
||||
db.record_usage(
|
||||
user_id,
|
||||
false,
|
||||
provider,
|
||||
model,
|
||||
usage,
|
||||
true,
|
||||
max_monthly_spend,
|
||||
FREE_TIER_MONTHLY_SPENDING_LIMIT,
|
||||
now,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Verify the recorded usage and spending
|
||||
let recorded_usage = db.get_usage(user_id, provider, model, now).await.unwrap();
|
||||
// Verify that we exceeded the free tier usage
|
||||
assert_eq!(recorded_usage.spending_this_month, Cents::new(1050));
|
||||
assert!(recorded_usage.spending_this_month > FREE_TIER_MONTHLY_SPENDING_LIMIT);
|
||||
|
||||
// Verify that there is one `billing_event` record
|
||||
let billing_events = db.get_billing_events().await.unwrap();
|
||||
assert_eq!(billing_events.len(), 1);
|
||||
|
||||
let (billing_event, _model) = &billing_events[0];
|
||||
assert_eq!(billing_event.user_id, user_id);
|
||||
assert_eq!(billing_event.input_tokens, tokens_to_use as i64);
|
||||
assert_eq!(billing_event.input_cache_creation_tokens, 0);
|
||||
assert_eq!(billing_event.input_cache_read_tokens, 0);
|
||||
assert_eq!(billing_event.output_tokens, 0);
|
||||
|
||||
// Record usage that puts us at $20.50
|
||||
let usage_2 = TokenUsage {
|
||||
input: 200_000_000, // This will cost $10 more, pushing us from $10.50 to $20.50,
|
||||
input_cache_creation: 0,
|
||||
input_cache_read: 0,
|
||||
output: 0,
|
||||
};
|
||||
db.record_usage(
|
||||
user_id,
|
||||
false,
|
||||
provider,
|
||||
model,
|
||||
usage_2,
|
||||
true,
|
||||
max_monthly_spend,
|
||||
FREE_TIER_MONTHLY_SPENDING_LIMIT,
|
||||
now,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Verify the updated usage and spending
|
||||
let updated_usage = db.get_usage(user_id, provider, model, now).await.unwrap();
|
||||
assert_eq!(updated_usage.spending_this_month, Cents::new(2050));
|
||||
|
||||
// Verify that there are now two billing events
|
||||
let billing_events = db.get_billing_events().await.unwrap();
|
||||
assert_eq!(billing_events.len(), 2);
|
||||
|
||||
let tokens_to_exceed = 20_000_000; // This will cost $1.00 more, pushing us from $20.50 to $21.50, which is over the $11 monthly maximum limit
|
||||
let usage_exceeding = TokenUsage {
|
||||
input: tokens_to_exceed,
|
||||
input_cache_creation: 0,
|
||||
input_cache_read: 0,
|
||||
output: 0,
|
||||
};
|
||||
|
||||
// This should still create a billing event as it's the first request that exceeds the limit
|
||||
db.record_usage(
|
||||
user_id,
|
||||
false,
|
||||
provider,
|
||||
model,
|
||||
usage_exceeding,
|
||||
true,
|
||||
FREE_TIER_MONTHLY_SPENDING_LIMIT,
|
||||
max_monthly_spend,
|
||||
now,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
// Verify the updated usage and spending
|
||||
let updated_usage = db.get_usage(user_id, provider, model, now).await.unwrap();
|
||||
assert_eq!(updated_usage.spending_this_month, Cents::new(2150));
|
||||
|
||||
// Verify that we never exceed the user max spending for the user
|
||||
// and avoid charging them.
|
||||
let billing_events = db.get_billing_events().await.unwrap();
|
||||
assert_eq!(billing_events.len(), 2);
|
||||
}
|
||||
@@ -1,306 +0,0 @@
|
||||
use crate::llm::FREE_TIER_MONTHLY_SPENDING_LIMIT;
|
||||
use crate::{
|
||||
Cents,
|
||||
db::UserId,
|
||||
llm::db::{
|
||||
LlmDatabase, TokenUsage,
|
||||
queries::{providers::ModelParams, usages::Usage},
|
||||
},
|
||||
test_llm_db,
|
||||
};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use pretty_assertions::assert_eq;
|
||||
use rpc::LanguageModelProvider;
|
||||
|
||||
test_llm_db!(test_tracking_usage, test_tracking_usage_postgres);
|
||||
|
||||
async fn test_tracking_usage(db: &mut LlmDatabase) {
|
||||
let provider = LanguageModelProvider::Anthropic;
|
||||
let model = "claude-3-5-sonnet";
|
||||
|
||||
db.initialize().await.unwrap();
|
||||
db.insert_models(&[ModelParams {
|
||||
provider,
|
||||
name: model.to_string(),
|
||||
max_requests_per_minute: 5,
|
||||
max_tokens_per_minute: 10_000,
|
||||
max_tokens_per_day: 50_000,
|
||||
price_per_million_input_tokens: 50,
|
||||
price_per_million_output_tokens: 50,
|
||||
}])
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// We're using a fixed datetime to prevent flakiness based on the clock.
|
||||
let t0 = DateTime::parse_from_rfc3339("2024-08-08T22:46:33Z")
|
||||
.unwrap()
|
||||
.with_timezone(&Utc);
|
||||
let user_id = UserId::from_proto(123);
|
||||
|
||||
let now = t0;
|
||||
db.record_usage(
|
||||
user_id,
|
||||
false,
|
||||
provider,
|
||||
model,
|
||||
TokenUsage {
|
||||
input: 1000,
|
||||
input_cache_creation: 0,
|
||||
input_cache_read: 0,
|
||||
output: 0,
|
||||
},
|
||||
false,
|
||||
Cents::ZERO,
|
||||
FREE_TIER_MONTHLY_SPENDING_LIMIT,
|
||||
now,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let now = t0 + Duration::seconds(10);
|
||||
db.record_usage(
|
||||
user_id,
|
||||
false,
|
||||
provider,
|
||||
model,
|
||||
TokenUsage {
|
||||
input: 2000,
|
||||
input_cache_creation: 0,
|
||||
input_cache_read: 0,
|
||||
output: 0,
|
||||
},
|
||||
false,
|
||||
Cents::ZERO,
|
||||
FREE_TIER_MONTHLY_SPENDING_LIMIT,
|
||||
now,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let usage = db.get_usage(user_id, provider, model, now).await.unwrap();
|
||||
assert_eq!(
|
||||
usage,
|
||||
Usage {
|
||||
requests_this_minute: 2,
|
||||
tokens_this_minute: 3000,
|
||||
input_tokens_this_minute: 3000,
|
||||
output_tokens_this_minute: 0,
|
||||
tokens_this_day: 3000,
|
||||
tokens_this_month: TokenUsage {
|
||||
input: 3000,
|
||||
input_cache_creation: 0,
|
||||
input_cache_read: 0,
|
||||
output: 0,
|
||||
},
|
||||
spending_this_month: Cents::ZERO,
|
||||
lifetime_spending: Cents::ZERO,
|
||||
}
|
||||
);
|
||||
|
||||
let now = t0 + Duration::seconds(60);
|
||||
let usage = db.get_usage(user_id, provider, model, now).await.unwrap();
|
||||
assert_eq!(
|
||||
usage,
|
||||
Usage {
|
||||
requests_this_minute: 1,
|
||||
tokens_this_minute: 2000,
|
||||
input_tokens_this_minute: 2000,
|
||||
output_tokens_this_minute: 0,
|
||||
tokens_this_day: 3000,
|
||||
tokens_this_month: TokenUsage {
|
||||
input: 3000,
|
||||
input_cache_creation: 0,
|
||||
input_cache_read: 0,
|
||||
output: 0,
|
||||
},
|
||||
spending_this_month: Cents::ZERO,
|
||||
lifetime_spending: Cents::ZERO,
|
||||
}
|
||||
);
|
||||
|
||||
let now = t0 + Duration::seconds(60);
|
||||
db.record_usage(
|
||||
user_id,
|
||||
false,
|
||||
provider,
|
||||
model,
|
||||
TokenUsage {
|
||||
input: 3000,
|
||||
input_cache_creation: 0,
|
||||
input_cache_read: 0,
|
||||
output: 0,
|
||||
},
|
||||
false,
|
||||
Cents::ZERO,
|
||||
FREE_TIER_MONTHLY_SPENDING_LIMIT,
|
||||
now,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let usage = db.get_usage(user_id, provider, model, now).await.unwrap();
|
||||
assert_eq!(
|
||||
usage,
|
||||
Usage {
|
||||
requests_this_minute: 2,
|
||||
tokens_this_minute: 5000,
|
||||
input_tokens_this_minute: 5000,
|
||||
output_tokens_this_minute: 0,
|
||||
tokens_this_day: 6000,
|
||||
tokens_this_month: TokenUsage {
|
||||
input: 6000,
|
||||
input_cache_creation: 0,
|
||||
input_cache_read: 0,
|
||||
output: 0,
|
||||
},
|
||||
spending_this_month: Cents::ZERO,
|
||||
lifetime_spending: Cents::ZERO,
|
||||
}
|
||||
);
|
||||
|
||||
let t1 = t0 + Duration::hours(24);
|
||||
let now = t1;
|
||||
let usage = db.get_usage(user_id, provider, model, now).await.unwrap();
|
||||
assert_eq!(
|
||||
usage,
|
||||
Usage {
|
||||
requests_this_minute: 0,
|
||||
tokens_this_minute: 0,
|
||||
input_tokens_this_minute: 0,
|
||||
output_tokens_this_minute: 0,
|
||||
tokens_this_day: 5000,
|
||||
tokens_this_month: TokenUsage {
|
||||
input: 6000,
|
||||
input_cache_creation: 0,
|
||||
input_cache_read: 0,
|
||||
output: 0,
|
||||
},
|
||||
spending_this_month: Cents::ZERO,
|
||||
lifetime_spending: Cents::ZERO,
|
||||
}
|
||||
);
|
||||
|
||||
db.record_usage(
|
||||
user_id,
|
||||
false,
|
||||
provider,
|
||||
model,
|
||||
TokenUsage {
|
||||
input: 4000,
|
||||
input_cache_creation: 0,
|
||||
input_cache_read: 0,
|
||||
output: 0,
|
||||
},
|
||||
false,
|
||||
Cents::ZERO,
|
||||
FREE_TIER_MONTHLY_SPENDING_LIMIT,
|
||||
now,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let usage = db.get_usage(user_id, provider, model, now).await.unwrap();
|
||||
assert_eq!(
|
||||
usage,
|
||||
Usage {
|
||||
requests_this_minute: 1,
|
||||
tokens_this_minute: 4000,
|
||||
input_tokens_this_minute: 4000,
|
||||
output_tokens_this_minute: 0,
|
||||
tokens_this_day: 9000,
|
||||
tokens_this_month: TokenUsage {
|
||||
input: 10000,
|
||||
input_cache_creation: 0,
|
||||
input_cache_read: 0,
|
||||
output: 0,
|
||||
},
|
||||
spending_this_month: Cents::ZERO,
|
||||
lifetime_spending: Cents::ZERO,
|
||||
}
|
||||
);
|
||||
|
||||
// We're using a fixed datetime to prevent flakiness based on the clock.
|
||||
let now = DateTime::parse_from_rfc3339("2024-10-08T22:15:58Z")
|
||||
.unwrap()
|
||||
.with_timezone(&Utc);
|
||||
|
||||
// Test cache creation input tokens
|
||||
db.record_usage(
|
||||
user_id,
|
||||
false,
|
||||
provider,
|
||||
model,
|
||||
TokenUsage {
|
||||
input: 1000,
|
||||
input_cache_creation: 500,
|
||||
input_cache_read: 0,
|
||||
output: 0,
|
||||
},
|
||||
false,
|
||||
Cents::ZERO,
|
||||
FREE_TIER_MONTHLY_SPENDING_LIMIT,
|
||||
now,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let usage = db.get_usage(user_id, provider, model, now).await.unwrap();
|
||||
assert_eq!(
|
||||
usage,
|
||||
Usage {
|
||||
requests_this_minute: 1,
|
||||
tokens_this_minute: 1500,
|
||||
input_tokens_this_minute: 1500,
|
||||
output_tokens_this_minute: 0,
|
||||
tokens_this_day: 1500,
|
||||
tokens_this_month: TokenUsage {
|
||||
input: 1000,
|
||||
input_cache_creation: 500,
|
||||
input_cache_read: 0,
|
||||
output: 0,
|
||||
},
|
||||
spending_this_month: Cents::ZERO,
|
||||
lifetime_spending: Cents::ZERO,
|
||||
}
|
||||
);
|
||||
|
||||
// Test cache read input tokens
|
||||
db.record_usage(
|
||||
user_id,
|
||||
false,
|
||||
provider,
|
||||
model,
|
||||
TokenUsage {
|
||||
input: 1000,
|
||||
input_cache_creation: 0,
|
||||
input_cache_read: 300,
|
||||
output: 0,
|
||||
},
|
||||
false,
|
||||
Cents::ZERO,
|
||||
FREE_TIER_MONTHLY_SPENDING_LIMIT,
|
||||
now,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let usage = db.get_usage(user_id, provider, model, now).await.unwrap();
|
||||
assert_eq!(
|
||||
usage,
|
||||
Usage {
|
||||
requests_this_minute: 2,
|
||||
tokens_this_minute: 2800,
|
||||
input_tokens_this_minute: 2500,
|
||||
output_tokens_this_minute: 0,
|
||||
tokens_this_day: 2800,
|
||||
tokens_this_month: TokenUsage {
|
||||
input: 2000,
|
||||
input_cache_creation: 500,
|
||||
input_cache_read: 300,
|
||||
output: 0,
|
||||
},
|
||||
spending_this_month: Cents::ZERO,
|
||||
lifetime_spending: Cents::ZERO,
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -9,14 +9,14 @@ use axum::{
|
||||
|
||||
use collab::api::CloudflareIpCountryHeader;
|
||||
use collab::api::billing::sync_llm_usage_with_stripe_periodically;
|
||||
use collab::llm::{db::LlmDatabase, log_usage_periodically};
|
||||
use collab::llm::db::LlmDatabase;
|
||||
use collab::migrations::run_database_migrations;
|
||||
use collab::user_backfiller::spawn_user_backfiller;
|
||||
use collab::{
|
||||
AppState, Config, RateLimiter, Result, api::fetch_extensions_from_blob_store_periodically, db,
|
||||
env, executor::Executor, rpc::ResultExt,
|
||||
};
|
||||
use collab::{ServiceMode, api::billing::poll_stripe_events_periodically, llm::LlmState};
|
||||
use collab::{ServiceMode, api::billing::poll_stripe_events_periodically};
|
||||
use db::Database;
|
||||
use std::{
|
||||
env::args,
|
||||
@@ -74,11 +74,10 @@ async fn main() -> Result<()> {
|
||||
let mode = match args.next().as_deref() {
|
||||
Some("collab") => ServiceMode::Collab,
|
||||
Some("api") => ServiceMode::Api,
|
||||
Some("llm") => ServiceMode::Llm,
|
||||
Some("all") => ServiceMode::All,
|
||||
_ => {
|
||||
return Err(anyhow!(
|
||||
"usage: collab <version | migrate | seed | serve <api|collab|llm|all>>"
|
||||
"usage: collab <version | migrate | seed | serve <api|collab|all>>"
|
||||
))?;
|
||||
}
|
||||
};
|
||||
@@ -97,20 +96,9 @@ async fn main() -> Result<()> {
|
||||
|
||||
let mut on_shutdown = None;
|
||||
|
||||
if mode.is_llm() {
|
||||
setup_llm_database(&config).await?;
|
||||
|
||||
let state = LlmState::new(config.clone(), Executor::Production).await?;
|
||||
|
||||
log_usage_periodically(state.clone());
|
||||
|
||||
app = app
|
||||
.merge(collab::llm::routes())
|
||||
.layer(Extension(state.clone()));
|
||||
}
|
||||
|
||||
if mode.is_collab() || mode.is_api() {
|
||||
setup_app_database(&config).await?;
|
||||
setup_llm_database(&config).await?;
|
||||
|
||||
let state = AppState::new(config, Executor::Production).await?;
|
||||
|
||||
@@ -336,18 +324,11 @@ async fn handle_root(Extension(mode): Extension<ServiceMode>) -> String {
|
||||
format!("zed:{mode} v{VERSION} ({})", REVISION.unwrap_or("unknown"))
|
||||
}
|
||||
|
||||
async fn handle_liveness_probe(
|
||||
app_state: Option<Extension<Arc<AppState>>>,
|
||||
llm_state: Option<Extension<Arc<LlmState>>>,
|
||||
) -> Result<String> {
|
||||
async fn handle_liveness_probe(app_state: Option<Extension<Arc<AppState>>>) -> Result<String> {
|
||||
if let Some(state) = app_state {
|
||||
state.db.get_all_users(0, 1).await?;
|
||||
}
|
||||
|
||||
if let Some(llm_state) = llm_state {
|
||||
llm_state.db.list_providers().await?;
|
||||
}
|
||||
|
||||
Ok("ok".to_string())
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{Cents, Result, llm};
|
||||
use anyhow::Context as _;
|
||||
use anyhow::{Context as _, anyhow};
|
||||
use chrono::{Datelike, Utc};
|
||||
use collections::HashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -10,6 +10,7 @@ use tokio::sync::RwLock;
|
||||
pub struct StripeBilling {
|
||||
state: RwLock<StripeBillingState>,
|
||||
client: Arc<stripe::Client>,
|
||||
zed_pro_price_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -31,10 +32,11 @@ struct StripeBillingPrice {
|
||||
}
|
||||
|
||||
impl StripeBilling {
|
||||
pub fn new(client: Arc<stripe::Client>) -> Self {
|
||||
pub fn new(client: Arc<stripe::Client>, zed_pro_price_id: Option<String>) -> Self {
|
||||
Self {
|
||||
client,
|
||||
state: RwLock::default(),
|
||||
zed_pro_price_id,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -382,6 +384,32 @@ impl StripeBilling {
|
||||
let session = stripe::CheckoutSession::create(&self.client, params).await?;
|
||||
Ok(session.url.context("no checkout session URL")?)
|
||||
}
|
||||
|
||||
pub async fn checkout_with_zed_pro(
|
||||
&self,
|
||||
customer_id: stripe::CustomerId,
|
||||
github_login: &str,
|
||||
success_url: &str,
|
||||
) -> Result<String> {
|
||||
let zed_pro_price_id = self
|
||||
.zed_pro_price_id
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("Zed Pro price ID not set"))?;
|
||||
|
||||
let mut params = stripe::CreateCheckoutSession::new();
|
||||
params.mode = Some(stripe::CheckoutSessionMode::Subscription);
|
||||
params.customer = Some(customer_id);
|
||||
params.client_reference_id = Some(github_login);
|
||||
params.line_items = Some(vec![stripe::CreateCheckoutSessionLineItems {
|
||||
price: Some(zed_pro_price_id.clone()),
|
||||
quantity: Some(1),
|
||||
..Default::default()
|
||||
}]);
|
||||
params.success_url = Some(success_url);
|
||||
|
||||
let session = stripe::CheckoutSession::create(&self.client, params).await?;
|
||||
Ok(session.url.context("no checkout session URL")?)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
|
||||
@@ -694,7 +694,15 @@ async fn test_collaborating_with_code_actions(
|
||||
// Confirming the code action will trigger a resolve request.
|
||||
let confirm_action = editor_b
|
||||
.update_in(cx_b, |editor, window, cx| {
|
||||
Editor::confirm_code_action(editor, &ConfirmCodeAction { item_ix: Some(0) }, window, cx)
|
||||
Editor::confirm_code_action(
|
||||
editor,
|
||||
&ConfirmCodeAction {
|
||||
item_ix: Some(0),
|
||||
from_mouse_context_menu: false,
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.unwrap();
|
||||
fake_language_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
|
||||
|
||||
@@ -557,6 +557,7 @@ impl TestServer {
|
||||
migrations_path: None,
|
||||
seed_path: None,
|
||||
stripe_api_key: None,
|
||||
stripe_zed_pro_price_id: None,
|
||||
supermaven_admin_api_key: None,
|
||||
user_backfiller_github_access_token: None,
|
||||
kinesis_region: None,
|
||||
|
||||
@@ -34,6 +34,7 @@ static MENTIONS_SEARCH: LazyLock<SearchQuery> = LazyLock::new(|| {
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
None,
|
||||
|
||||
@@ -191,6 +191,14 @@ pub fn components() -> AllComponents {
|
||||
all_components
|
||||
}
|
||||
|
||||
// #[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
// pub enum ComponentStatus {
|
||||
// WorkInProgress,
|
||||
// EngineeringReady,
|
||||
// Live,
|
||||
// Deprecated,
|
||||
// }
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum ComponentScope {
|
||||
Collaboration,
|
||||
@@ -241,24 +249,30 @@ pub struct ComponentExample {
|
||||
impl RenderOnce for ComponentExample {
|
||||
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
|
||||
div()
|
||||
.pt_2()
|
||||
.w_full()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.gap_3()
|
||||
.child(
|
||||
div()
|
||||
.child(self.variant_name.clone())
|
||||
.text_size(rems(1.25))
|
||||
.text_color(cx.theme().colors().text),
|
||||
.flex()
|
||||
.flex_col()
|
||||
.child(
|
||||
div()
|
||||
.child(self.variant_name.clone())
|
||||
.text_size(rems(1.0))
|
||||
.text_color(cx.theme().colors().text),
|
||||
)
|
||||
.when_some(self.description, |this, description| {
|
||||
this.child(
|
||||
div()
|
||||
.text_size(rems(0.875))
|
||||
.text_color(cx.theme().colors().text_muted)
|
||||
.child(description.clone()),
|
||||
)
|
||||
}),
|
||||
)
|
||||
.when_some(self.description, |this, description| {
|
||||
this.child(
|
||||
div()
|
||||
.text_size(rems(0.9375))
|
||||
.text_color(cx.theme().colors().text_muted)
|
||||
.child(description.clone()),
|
||||
)
|
||||
})
|
||||
.child(
|
||||
div()
|
||||
.flex()
|
||||
@@ -268,11 +282,11 @@ impl RenderOnce for ComponentExample {
|
||||
.justify_center()
|
||||
.p_8()
|
||||
.border_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.border_color(cx.theme().colors().border.opacity(0.5))
|
||||
.bg(pattern_slash(
|
||||
cx.theme().colors().surface_background.opacity(0.5),
|
||||
24.0,
|
||||
24.0,
|
||||
12.0,
|
||||
12.0,
|
||||
))
|
||||
.shadow_sm()
|
||||
.child(self.element),
|
||||
|
||||
@@ -16,12 +16,16 @@ default = []
|
||||
|
||||
[dependencies]
|
||||
client.workspace = true
|
||||
collections.workspace = true
|
||||
component.workspace = true
|
||||
gpui.workspace = true
|
||||
languages.workspace = true
|
||||
notifications.workspace = true
|
||||
project.workspace = true
|
||||
ui.workspace = true
|
||||
workspace.workspace = true
|
||||
notifications.workspace = true
|
||||
collections.workspace = true
|
||||
ui_input.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
workspace.workspace = true
|
||||
db.workspace = true
|
||||
anyhow.workspace = true
|
||||
serde.workspace = true
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
//!
|
||||
//! A view for exploring Zed components.
|
||||
|
||||
mod persistence;
|
||||
|
||||
use std::iter::Iterator;
|
||||
use std::sync::Arc;
|
||||
|
||||
@@ -9,24 +11,27 @@ use client::UserStore;
|
||||
use component::{ComponentId, ComponentMetadata, components};
|
||||
use gpui::{
|
||||
App, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity, Window, list, prelude::*,
|
||||
uniform_list,
|
||||
};
|
||||
|
||||
use collections::HashMap;
|
||||
|
||||
use gpui::{ListState, ScrollHandle, UniformListScrollHandle};
|
||||
use gpui::{ListState, ScrollHandle, ScrollStrategy, UniformListScrollHandle};
|
||||
use languages::LanguageRegistry;
|
||||
use notifications::status_toast::{StatusToast, ToastIcon};
|
||||
use persistence::COMPONENT_PREVIEW_DB;
|
||||
use project::Project;
|
||||
use ui::{Divider, ListItem, ListSubHeader, prelude::*};
|
||||
use ui::{Divider, HighlightedLabel, ListItem, ListSubHeader, prelude::*};
|
||||
|
||||
use ui_input::SingleLineInput;
|
||||
use workspace::{AppState, ItemId, SerializableItem};
|
||||
use workspace::{Item, Workspace, WorkspaceId, item::ItemEvent};
|
||||
|
||||
pub fn init(app_state: Arc<AppState>, cx: &mut App) {
|
||||
workspace::register_serializable_item::<ComponentPreview>(cx);
|
||||
|
||||
let app_state = app_state.clone();
|
||||
|
||||
cx.observe_new(move |workspace: &mut Workspace, _, cx| {
|
||||
cx.observe_new(move |workspace: &mut Workspace, _window, cx| {
|
||||
let app_state = app_state.clone();
|
||||
let weak_workspace = cx.entity().downgrade();
|
||||
|
||||
@@ -44,6 +49,7 @@ pub fn init(app_state: Arc<AppState>, cx: &mut App) {
|
||||
user_store,
|
||||
None,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -64,13 +70,13 @@ pub fn init(app_state: Arc<AppState>, cx: &mut App) {
|
||||
enum PreviewEntry {
|
||||
AllComponents,
|
||||
Separator,
|
||||
Component(ComponentMetadata),
|
||||
Component(ComponentMetadata, Option<Vec<usize>>),
|
||||
SectionHeader(SharedString),
|
||||
}
|
||||
|
||||
impl From<ComponentMetadata> for PreviewEntry {
|
||||
fn from(component: ComponentMetadata) -> Self {
|
||||
PreviewEntry::Component(component)
|
||||
PreviewEntry::Component(component, None)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,6 +94,7 @@ enum PreviewPage {
|
||||
}
|
||||
|
||||
struct ComponentPreview {
|
||||
workspace_id: Option<WorkspaceId>,
|
||||
focus_handle: FocusHandle,
|
||||
_view_scroll_handle: ScrollHandle,
|
||||
nav_scroll_handle: UniformListScrollHandle,
|
||||
@@ -99,6 +106,8 @@ struct ComponentPreview {
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
user_store: Entity<UserStore>,
|
||||
filter_editor: Entity<SingleLineInput>,
|
||||
filter_text: String,
|
||||
}
|
||||
|
||||
impl ComponentPreview {
|
||||
@@ -108,11 +117,14 @@ impl ComponentPreview {
|
||||
user_store: Entity<UserStore>,
|
||||
selected_index: impl Into<Option<usize>>,
|
||||
active_page: Option<PreviewPage>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let sorted_components = components().all_sorted();
|
||||
let selected_index = selected_index.into().unwrap_or(0);
|
||||
let active_page = active_page.unwrap_or(PreviewPage::AllComponents);
|
||||
let filter_editor =
|
||||
cx.new(|cx| SingleLineInput::new(window, cx, "Find components or usages…"));
|
||||
|
||||
let component_list = ListState::new(
|
||||
sorted_components.len(),
|
||||
@@ -132,6 +144,7 @@ impl ComponentPreview {
|
||||
);
|
||||
|
||||
let mut component_preview = Self {
|
||||
workspace_id: None,
|
||||
focus_handle: cx.focus_handle(),
|
||||
_view_scroll_handle: ScrollHandle::new(),
|
||||
nav_scroll_handle: UniformListScrollHandle::new(),
|
||||
@@ -143,6 +156,8 @@ impl ComponentPreview {
|
||||
components: sorted_components,
|
||||
component_list,
|
||||
cursor_index: selected_index,
|
||||
filter_editor,
|
||||
filter_text: String::new(),
|
||||
};
|
||||
|
||||
if component_preview.cursor_index > 0 {
|
||||
@@ -154,6 +169,13 @@ impl ComponentPreview {
|
||||
component_preview
|
||||
}
|
||||
|
||||
pub fn active_page_id(&self, _cx: &App) -> ActivePageId {
|
||||
match &self.active_page {
|
||||
PreviewPage::AllComponents => ActivePageId::default(),
|
||||
PreviewPage::Component(component_id) => ActivePageId(component_id.0.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
fn scroll_to_preview(&mut self, ix: usize, cx: &mut Context<Self>) {
|
||||
self.component_list.scroll_to_reveal_item(ix);
|
||||
self.cursor_index = ix;
|
||||
@@ -162,6 +184,7 @@ impl ComponentPreview {
|
||||
|
||||
fn set_active_page(&mut self, page: PreviewPage, cx: &mut Context<Self>) {
|
||||
self.active_page = page;
|
||||
cx.emit(ItemEvent::UpdateTab);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
@@ -169,20 +192,94 @@ impl ComponentPreview {
|
||||
self.components[ix].clone()
|
||||
}
|
||||
|
||||
fn filtered_components(&self) -> Vec<ComponentMetadata> {
|
||||
if self.filter_text.is_empty() {
|
||||
return self.components.clone();
|
||||
}
|
||||
|
||||
let filter = self.filter_text.to_lowercase();
|
||||
self.components
|
||||
.iter()
|
||||
.filter(|component| {
|
||||
let component_name = component.name().to_lowercase();
|
||||
let scope_name = component.scope().to_string().to_lowercase();
|
||||
let description = component
|
||||
.description()
|
||||
.map(|d| d.to_lowercase())
|
||||
.unwrap_or_default();
|
||||
|
||||
component_name.contains(&filter)
|
||||
|| scope_name.contains(&filter)
|
||||
|| description.contains(&filter)
|
||||
})
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn scope_ordered_entries(&self) -> Vec<PreviewEntry> {
|
||||
use std::collections::HashMap;
|
||||
|
||||
let mut scope_groups: HashMap<ComponentScope, Vec<ComponentMetadata>> = HashMap::default();
|
||||
let mut scope_groups: HashMap<
|
||||
ComponentScope,
|
||||
Vec<(ComponentMetadata, Option<Vec<usize>>)>,
|
||||
> = HashMap::default();
|
||||
let lowercase_filter = self.filter_text.to_lowercase();
|
||||
|
||||
for component in &self.components {
|
||||
scope_groups
|
||||
.entry(component.scope())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(component.clone());
|
||||
if self.filter_text.is_empty() {
|
||||
scope_groups
|
||||
.entry(component.scope())
|
||||
.or_insert_with(Vec::new)
|
||||
.push((component.clone(), None));
|
||||
continue;
|
||||
}
|
||||
|
||||
// let full_component_name = component.name();
|
||||
let scopeless_name = component.scopeless_name();
|
||||
let scope_name = component.scope().to_string();
|
||||
let description = component.description().unwrap_or_default();
|
||||
|
||||
let lowercase_scopeless = scopeless_name.to_lowercase();
|
||||
let lowercase_scope = scope_name.to_lowercase();
|
||||
let lowercase_desc = description.to_lowercase();
|
||||
|
||||
if lowercase_scopeless.contains(&lowercase_filter) {
|
||||
if let Some(index) = lowercase_scopeless.find(&lowercase_filter) {
|
||||
let end = index + lowercase_filter.len();
|
||||
|
||||
if end <= scopeless_name.len() {
|
||||
let mut positions = Vec::new();
|
||||
for i in index..end {
|
||||
if scopeless_name.is_char_boundary(i) {
|
||||
positions.push(i);
|
||||
}
|
||||
}
|
||||
|
||||
if !positions.is_empty() {
|
||||
scope_groups
|
||||
.entry(component.scope())
|
||||
.or_insert_with(Vec::new)
|
||||
.push((component.clone(), Some(positions)));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if lowercase_scopeless.contains(&lowercase_filter)
|
||||
|| lowercase_scope.contains(&lowercase_filter)
|
||||
|| lowercase_desc.contains(&lowercase_filter)
|
||||
{
|
||||
scope_groups
|
||||
.entry(component.scope())
|
||||
.or_insert_with(Vec::new)
|
||||
.push((component.clone(), None));
|
||||
}
|
||||
}
|
||||
|
||||
// Sort the components in each group
|
||||
for components in scope_groups.values_mut() {
|
||||
components.sort_by_key(|c| c.name().to_lowercase());
|
||||
components.sort_by_key(|(c, _)| c.sort_name());
|
||||
}
|
||||
|
||||
let mut entries = Vec::new();
|
||||
@@ -204,10 +301,10 @@ impl ComponentPreview {
|
||||
if !components.is_empty() {
|
||||
entries.push(PreviewEntry::SectionHeader(scope.to_string().into()));
|
||||
let mut sorted_components = components;
|
||||
sorted_components.sort_by_key(|component| component.sort_name());
|
||||
sorted_components.sort_by_key(|(component, _)| component.sort_name());
|
||||
|
||||
for component in sorted_components {
|
||||
entries.push(PreviewEntry::Component(component));
|
||||
for (component, positions) in sorted_components {
|
||||
entries.push(PreviewEntry::Component(component, positions));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -219,10 +316,10 @@ impl ComponentPreview {
|
||||
entries.push(PreviewEntry::Separator);
|
||||
entries.push(PreviewEntry::SectionHeader("Uncategorized".into()));
|
||||
let mut sorted_components = components.clone();
|
||||
sorted_components.sort_by_key(|c| c.sort_name());
|
||||
sorted_components.sort_by_key(|(c, _)| c.sort_name());
|
||||
|
||||
for component in sorted_components {
|
||||
entries.push(PreviewEntry::Component(component.clone()));
|
||||
for (component, positions) in sorted_components {
|
||||
entries.push(PreviewEntry::Component(component, positions));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -237,14 +334,33 @@ impl ComponentPreview {
|
||||
cx: &Context<Self>,
|
||||
) -> impl IntoElement + use<> {
|
||||
match entry {
|
||||
PreviewEntry::Component(component_metadata) => {
|
||||
PreviewEntry::Component(component_metadata, highlight_positions) => {
|
||||
let id = component_metadata.id();
|
||||
let selected = self.active_page == PreviewPage::Component(id.clone());
|
||||
let name = component_metadata.scopeless_name();
|
||||
|
||||
ListItem::new(ix)
|
||||
.child(
|
||||
Label::new(component_metadata.scopeless_name().clone())
|
||||
.color(Color::Default),
|
||||
)
|
||||
.child(if let Some(_positions) = highlight_positions {
|
||||
let name_lower = name.to_lowercase();
|
||||
let filter_lower = self.filter_text.to_lowercase();
|
||||
let valid_positions = if let Some(start) = name_lower.find(&filter_lower) {
|
||||
let end = start + filter_lower.len();
|
||||
(start..end).collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
if valid_positions.is_empty() {
|
||||
Label::new(name.clone())
|
||||
.color(Color::Default)
|
||||
.into_any_element()
|
||||
} else {
|
||||
HighlightedLabel::new(name.clone(), valid_positions).into_any_element()
|
||||
}
|
||||
} else {
|
||||
Label::new(name.clone())
|
||||
.color(Color::Default)
|
||||
.into_any_element()
|
||||
})
|
||||
.selectable(true)
|
||||
.toggle_state(selected)
|
||||
.inset(true)
|
||||
@@ -282,20 +398,70 @@ impl ComponentPreview {
|
||||
}
|
||||
|
||||
fn update_component_list(&mut self, cx: &mut Context<Self>) {
|
||||
let new_len = self.scope_ordered_entries().len();
|
||||
let entries = self.scope_ordered_entries();
|
||||
let new_len = entries.len();
|
||||
let weak_entity = cx.entity().downgrade();
|
||||
|
||||
if new_len > 0 {
|
||||
self.nav_scroll_handle
|
||||
.scroll_to_item(0, ScrollStrategy::Top);
|
||||
}
|
||||
|
||||
let filtered_components = self.filtered_components();
|
||||
|
||||
if !self.filter_text.is_empty() && !matches!(self.active_page, PreviewPage::AllComponents) {
|
||||
if let PreviewPage::Component(ref component_id) = self.active_page {
|
||||
let component_still_visible = filtered_components
|
||||
.iter()
|
||||
.any(|component| component.id() == *component_id);
|
||||
|
||||
if !component_still_visible {
|
||||
if !filtered_components.is_empty() {
|
||||
let first_component = &filtered_components[0];
|
||||
self.set_active_page(PreviewPage::Component(first_component.id()), cx);
|
||||
} else {
|
||||
self.set_active_page(PreviewPage::AllComponents, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.component_list = ListState::new(
|
||||
filtered_components.len(),
|
||||
gpui::ListAlignment::Top,
|
||||
px(1500.0),
|
||||
{
|
||||
let components = filtered_components.clone();
|
||||
let this = cx.entity().downgrade();
|
||||
move |ix, window: &mut Window, cx: &mut App| {
|
||||
if ix >= components.len() {
|
||||
return div().w_full().h_0().into_any_element();
|
||||
}
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
let component = &components[ix];
|
||||
this.render_preview(component, window, cx)
|
||||
.into_any_element()
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
let new_list = ListState::new(
|
||||
new_len,
|
||||
gpui::ListAlignment::Top,
|
||||
px(1500.0),
|
||||
move |ix, window, cx| {
|
||||
if ix >= entries.len() {
|
||||
return div().w_full().h_0().into_any_element();
|
||||
}
|
||||
|
||||
let entry = &entries[ix];
|
||||
|
||||
weak_entity
|
||||
.update(cx, |this, cx| match entry {
|
||||
PreviewEntry::Component(component) => this
|
||||
PreviewEntry::Component(component, _) => this
|
||||
.render_preview(component, window, cx)
|
||||
.into_any_element(),
|
||||
PreviewEntry::SectionHeader(shared_string) => this
|
||||
@@ -309,6 +475,7 @@ impl ComponentPreview {
|
||||
);
|
||||
|
||||
self.component_list = new_list;
|
||||
cx.emit(ItemEvent::UpdateTab);
|
||||
}
|
||||
|
||||
fn render_scope_header(
|
||||
@@ -377,16 +544,27 @@ impl ComponentPreview {
|
||||
.into_any_element()
|
||||
}
|
||||
|
||||
fn render_all_components(&self) -> impl IntoElement {
|
||||
fn render_all_components(&self, cx: &Context<Self>) -> impl IntoElement {
|
||||
v_flex()
|
||||
.id("component-list")
|
||||
.px_8()
|
||||
.pt_4()
|
||||
.size_full()
|
||||
.child(
|
||||
list(self.component_list.clone())
|
||||
.flex_grow()
|
||||
.with_sizing_behavior(gpui::ListSizingBehavior::Auto),
|
||||
if self.filtered_components().is_empty() && !self.filter_text.is_empty() {
|
||||
div()
|
||||
.size_full()
|
||||
.items_center()
|
||||
.justify_center()
|
||||
.text_color(cx.theme().colors().text_muted)
|
||||
.child(format!("No components matching '{}'.", self.filter_text))
|
||||
.into_any_element()
|
||||
} else {
|
||||
list(self.component_list.clone())
|
||||
.flex_grow()
|
||||
.with_sizing_behavior(gpui::ListSizingBehavior::Auto)
|
||||
.into_any_element()
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
@@ -432,6 +610,19 @@ impl ComponentPreview {
|
||||
|
||||
impl Render for ComponentPreview {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
// TODO: move this into the struct
|
||||
let current_filter = self.filter_editor.update(cx, |input, cx| {
|
||||
if input.is_empty(cx) {
|
||||
String::new()
|
||||
} else {
|
||||
input.editor().read(cx).text(cx).to_string()
|
||||
}
|
||||
});
|
||||
|
||||
if current_filter != self.filter_text {
|
||||
self.filter_text = current_filter;
|
||||
self.update_component_list(cx);
|
||||
}
|
||||
let sidebar_entries = self.scope_ordered_entries();
|
||||
let active_page = self.active_page.clone();
|
||||
|
||||
@@ -449,14 +640,22 @@ impl Render for ComponentPreview {
|
||||
.border_color(cx.theme().colors().border)
|
||||
.h_full()
|
||||
.child(
|
||||
uniform_list(
|
||||
gpui::uniform_list(
|
||||
cx.entity().clone(),
|
||||
"component-nav",
|
||||
sidebar_entries.len(),
|
||||
move |this, range, _window, cx| {
|
||||
range
|
||||
.map(|ix| {
|
||||
this.render_sidebar_entry(ix, &sidebar_entries[ix], cx)
|
||||
.filter_map(|ix| {
|
||||
if ix < sidebar_entries.len() {
|
||||
Some(this.render_sidebar_entry(
|
||||
ix,
|
||||
&sidebar_entries[ix],
|
||||
cx,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
},
|
||||
@@ -481,12 +680,29 @@ impl Render for ComponentPreview {
|
||||
),
|
||||
),
|
||||
)
|
||||
.child(match active_page {
|
||||
PreviewPage::AllComponents => self.render_all_components().into_any_element(),
|
||||
PreviewPage::Component(id) => self
|
||||
.render_component_page(&id, window, cx)
|
||||
.into_any_element(),
|
||||
})
|
||||
.child(
|
||||
v_flex()
|
||||
.id("content-area")
|
||||
.flex_1()
|
||||
.size_full()
|
||||
.overflow_hidden()
|
||||
.child(
|
||||
div()
|
||||
.p_2()
|
||||
.w_full()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(self.filter_editor.clone()),
|
||||
)
|
||||
.child(match active_page {
|
||||
PreviewPage::AllComponents => {
|
||||
self.render_all_components(cx).into_any_element()
|
||||
}
|
||||
PreviewPage::Component(id) => self
|
||||
.render_component_page(&id, window, cx)
|
||||
.into_any_element(),
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -498,6 +714,21 @@ impl Focusable for ComponentPreview {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ActivePageId(pub String);
|
||||
|
||||
impl Default for ActivePageId {
|
||||
fn default() -> Self {
|
||||
ActivePageId("AllComponents".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ComponentId> for ActivePageId {
|
||||
fn from(id: ComponentId) -> Self {
|
||||
ActivePageId(id.0.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl Item for ComponentPreview {
|
||||
type Event = ItemEvent;
|
||||
|
||||
@@ -516,7 +747,7 @@ impl Item for ComponentPreview {
|
||||
fn clone_on_split(
|
||||
&self,
|
||||
_workspace_id: Option<WorkspaceId>,
|
||||
_window: &mut Window,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<gpui::Entity<Self>>
|
||||
where
|
||||
@@ -535,6 +766,7 @@ impl Item for ComponentPreview {
|
||||
user_store,
|
||||
selected_index,
|
||||
Some(active_page),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}))
|
||||
@@ -543,6 +775,15 @@ impl Item for ComponentPreview {
|
||||
fn to_item_events(event: &Self::Event, mut f: impl FnMut(workspace::item::ItemEvent)) {
|
||||
f(*event)
|
||||
}
|
||||
|
||||
fn added_to_workspace(
|
||||
&mut self,
|
||||
workspace: &mut Workspace,
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Self>,
|
||||
) {
|
||||
self.workspace_id = workspace.database_id();
|
||||
}
|
||||
}
|
||||
|
||||
impl SerializableItem for ComponentPreview {
|
||||
@@ -553,26 +794,53 @@ impl SerializableItem for ComponentPreview {
|
||||
fn deserialize(
|
||||
project: Entity<Project>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
_workspace_id: WorkspaceId,
|
||||
_item_id: ItemId,
|
||||
workspace_id: WorkspaceId,
|
||||
item_id: ItemId,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<gpui::Result<Entity<Self>>> {
|
||||
let deserialized_active_page =
|
||||
match COMPONENT_PREVIEW_DB.get_active_page(item_id, workspace_id) {
|
||||
Ok(page) => {
|
||||
if let Some(page) = page {
|
||||
ActivePageId(page)
|
||||
} else {
|
||||
ActivePageId::default()
|
||||
}
|
||||
}
|
||||
Err(_) => ActivePageId::default(),
|
||||
};
|
||||
|
||||
let user_store = project.read(cx).user_store().clone();
|
||||
let language_registry = project.read(cx).languages().clone();
|
||||
let preview_page = if deserialized_active_page.0 == ActivePageId::default().0 {
|
||||
Some(PreviewPage::default())
|
||||
} else {
|
||||
let component_str = deserialized_active_page.0;
|
||||
let component_registry = components();
|
||||
let all_components = component_registry.all();
|
||||
let found_component = all_components.iter().find(|c| c.id().0 == component_str);
|
||||
|
||||
if let Some(component) = found_component {
|
||||
Some(PreviewPage::Component(component.id().clone()))
|
||||
} else {
|
||||
Some(PreviewPage::default())
|
||||
}
|
||||
};
|
||||
|
||||
window.spawn(cx, async move |cx| {
|
||||
let user_store = user_store.clone();
|
||||
let language_registry = language_registry.clone();
|
||||
let weak_workspace = workspace.clone();
|
||||
cx.update(|_, cx| {
|
||||
cx.update(move |window, cx| {
|
||||
Ok(cx.new(|cx| {
|
||||
ComponentPreview::new(
|
||||
weak_workspace,
|
||||
language_registry,
|
||||
user_store,
|
||||
None,
|
||||
None,
|
||||
preview_page,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}))
|
||||
@@ -581,34 +849,41 @@ impl SerializableItem for ComponentPreview {
|
||||
}
|
||||
|
||||
fn cleanup(
|
||||
_workspace_id: WorkspaceId,
|
||||
_alive_items: Vec<ItemId>,
|
||||
workspace_id: WorkspaceId,
|
||||
alive_items: Vec<ItemId>,
|
||||
_window: &mut Window,
|
||||
_cx: &mut App,
|
||||
cx: &mut App,
|
||||
) -> Task<gpui::Result<()>> {
|
||||
Task::ready(Ok(()))
|
||||
// window.spawn(cx, |_| {
|
||||
// ...
|
||||
// })
|
||||
cx.background_spawn(async move {
|
||||
COMPONENT_PREVIEW_DB
|
||||
.delete_unloaded_items(workspace_id, alive_items)
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
||||
fn serialize(
|
||||
&mut self,
|
||||
_workspace: &mut Workspace,
|
||||
_item_id: ItemId,
|
||||
item_id: ItemId,
|
||||
_closing: bool,
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Self>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Task<gpui::Result<()>>> {
|
||||
// TODO: Serialize the active index so we can re-open to the same place
|
||||
None
|
||||
let active_page = self.active_page_id(cx);
|
||||
let workspace_id = self.workspace_id?;
|
||||
Some(cx.background_spawn(async move {
|
||||
COMPONENT_PREVIEW_DB
|
||||
.save_active_page(item_id, workspace_id, active_page.0)
|
||||
.await
|
||||
}))
|
||||
}
|
||||
|
||||
fn should_serialize(&self, _event: &Self::Event) -> bool {
|
||||
false
|
||||
fn should_serialize(&self, event: &Self::Event) -> bool {
|
||||
matches!(event, ItemEvent::UpdateTab)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: use language registry to allow rendering markdown
|
||||
#[derive(IntoElement)]
|
||||
pub struct ComponentPreviewPage {
|
||||
// languages: Arc<LanguageRegistry>,
|
||||
|
||||
73
crates/component_preview/src/persistence.rs
Normal file
73
crates/component_preview/src/persistence.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
use anyhow::Result;
|
||||
use db::{define_connection, query, sqlez::statement::Statement, sqlez_macros::sql};
|
||||
use workspace::{ItemId, WorkspaceDb, WorkspaceId};
|
||||
|
||||
define_connection! {
|
||||
pub static ref COMPONENT_PREVIEW_DB: ComponentPreviewDb<WorkspaceDb> =
|
||||
&[sql!(
|
||||
CREATE TABLE component_previews (
|
||||
workspace_id INTEGER,
|
||||
item_id INTEGER UNIQUE,
|
||||
active_page_id TEXT,
|
||||
PRIMARY KEY(workspace_id, item_id),
|
||||
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
||||
ON DELETE CASCADE
|
||||
) STRICT;
|
||||
)];
|
||||
}
|
||||
|
||||
impl ComponentPreviewDb {
|
||||
pub async fn save_active_page(
|
||||
&self,
|
||||
item_id: ItemId,
|
||||
workspace_id: WorkspaceId,
|
||||
active_page_id: String,
|
||||
) -> Result<()> {
|
||||
let query = "INSERT INTO component_previews(item_id, workspace_id, active_page_id)
|
||||
VALUES (?1, ?2, ?3)
|
||||
ON CONFLICT DO UPDATE SET
|
||||
active_page_id = ?3";
|
||||
self.write(move |conn| {
|
||||
let mut statement = Statement::prepare(conn, query)?;
|
||||
let mut next_index = statement.bind(&item_id, 1)?;
|
||||
next_index = statement.bind(&workspace_id, next_index)?;
|
||||
statement.bind(&active_page_id, next_index)?;
|
||||
statement.exec()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
query! {
|
||||
pub fn get_active_page(item_id: ItemId, workspace_id: WorkspaceId) -> Result<Option<String>> {
|
||||
SELECT active_page_id
|
||||
FROM component_previews
|
||||
WHERE item_id = ? AND workspace_id = ?
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn delete_unloaded_items(
|
||||
&self,
|
||||
workspace: WorkspaceId,
|
||||
alive_items: Vec<ItemId>,
|
||||
) -> Result<()> {
|
||||
let placeholders = alive_items
|
||||
.iter()
|
||||
.map(|_| "?")
|
||||
.collect::<Vec<&str>>()
|
||||
.join(", ");
|
||||
|
||||
let query = format!(
|
||||
"DELETE FROM component_previews WHERE workspace_id = ? AND item_id NOT IN ({placeholders})"
|
||||
);
|
||||
|
||||
self.write(move |conn| {
|
||||
let mut statement = Statement::prepare(conn, query)?;
|
||||
let mut next_index = statement.bind(&workspace, 1)?;
|
||||
for id in alive_items {
|
||||
next_index = statement.bind(&id, next_index)?;
|
||||
}
|
||||
statement.exec()
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
@@ -39,7 +39,6 @@ log.workspace = true
|
||||
node_runtime.workspace = true
|
||||
parking_lot.workspace = true
|
||||
paths.workspace = true
|
||||
regex.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
|
||||
@@ -3,13 +3,13 @@ use anyhow::{Context as _, Ok, Result, anyhow};
|
||||
use async_compression::futures::bufread::GzipDecoder;
|
||||
use async_tar::Archive;
|
||||
use async_trait::async_trait;
|
||||
use dap_types::StartDebuggingRequestArguments;
|
||||
use futures::io::BufReader;
|
||||
use gpui::{AsyncApp, SharedString};
|
||||
pub use http_client::{HttpClient, github::latest_github_release};
|
||||
use language::LanguageToolchainStore;
|
||||
use node_runtime::NodeRuntime;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use settings::WorktreeId;
|
||||
use smol::{self, fs::File, lock::Mutex};
|
||||
use std::{
|
||||
@@ -20,9 +20,9 @@ use std::{
|
||||
net::Ipv4Addr,
|
||||
ops::Deref,
|
||||
path::PathBuf,
|
||||
sync::{Arc, LazyLock},
|
||||
sync::Arc,
|
||||
};
|
||||
use task::{DebugAdapterConfig, DebugTaskDefinition};
|
||||
use task::DebugTaskDefinition;
|
||||
use util::ResultExt;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
@@ -93,13 +93,15 @@ pub struct TcpArguments {
|
||||
pub port: u16,
|
||||
pub timeout: Option<u64>,
|
||||
}
|
||||
#[derive(Default, Debug, Clone)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DebugAdapterBinary {
|
||||
pub adapter_name: DebugAdapterName,
|
||||
pub command: String,
|
||||
pub arguments: Option<Vec<OsString>>,
|
||||
pub envs: Option<HashMap<String, String>>,
|
||||
pub cwd: Option<PathBuf>,
|
||||
pub connection: Option<TcpArguments>,
|
||||
pub request_args: StartDebuggingRequestArguments,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -220,7 +222,7 @@ pub trait DebugAdapter: 'static + Send + Sync {
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugAdapterConfig,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
@@ -284,21 +286,11 @@ pub trait DebugAdapter: 'static + Send + Sync {
|
||||
async fn get_installed_binary(
|
||||
&self,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugAdapterConfig,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary>;
|
||||
|
||||
/// Should return base configuration to make the debug adapter work
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value;
|
||||
|
||||
fn attach_processes_filter(&self) -> regex::Regex {
|
||||
EMPTY_REGEX.clone()
|
||||
}
|
||||
}
|
||||
|
||||
static EMPTY_REGEX: LazyLock<regex::Regex> =
|
||||
LazyLock::new(|| regex::Regex::new("").expect("Regex compilation to succeed"));
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub struct FakeAdapter {}
|
||||
|
||||
@@ -309,6 +301,31 @@ impl FakeAdapter {
|
||||
pub fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> StartDebuggingRequestArguments {
|
||||
use serde_json::json;
|
||||
use task::DebugRequestType;
|
||||
|
||||
let value = json!({
|
||||
"request": match config.request {
|
||||
DebugRequestType::Launch(_) => "launch",
|
||||
DebugRequestType::Attach(_) => "attach",
|
||||
},
|
||||
"process_id": if let DebugRequestType::Attach(attach_config) = &config.request {
|
||||
attach_config.process_id
|
||||
} else {
|
||||
None
|
||||
},
|
||||
});
|
||||
let request = match config.request {
|
||||
DebugRequestType::Launch(_) => dap_types::StartDebuggingRequestArgumentsRequest::Launch,
|
||||
DebugRequestType::Attach(_) => dap_types::StartDebuggingRequestArgumentsRequest::Attach,
|
||||
};
|
||||
StartDebuggingRequestArguments {
|
||||
configuration: value,
|
||||
request,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
@@ -321,16 +338,18 @@ impl DebugAdapter for FakeAdapter {
|
||||
async fn get_binary(
|
||||
&self,
|
||||
_: &dyn DapDelegate,
|
||||
_: &DebugAdapterConfig,
|
||||
config: &DebugTaskDefinition,
|
||||
_: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
Ok(DebugAdapterBinary {
|
||||
adapter_name: Self::ADAPTER_NAME.into(),
|
||||
command: "command".into(),
|
||||
arguments: None,
|
||||
connection: None,
|
||||
envs: None,
|
||||
cwd: None,
|
||||
request_args: self.request_args(config),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -352,33 +371,10 @@ impl DebugAdapter for FakeAdapter {
|
||||
async fn get_installed_binary(
|
||||
&self,
|
||||
_: &dyn DapDelegate,
|
||||
_: &DebugAdapterConfig,
|
||||
_: &DebugTaskDefinition,
|
||||
_: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
unimplemented!("get installed binary");
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
|
||||
use serde_json::json;
|
||||
use task::DebugRequestType;
|
||||
|
||||
json!({
|
||||
"request": match config.request {
|
||||
DebugRequestType::Launch(_) => "launch",
|
||||
DebugRequestType::Attach(_) => "attach",
|
||||
},
|
||||
"process_id": if let DebugRequestType::Attach(attach_config) = &config.request {
|
||||
attach_config.process_id
|
||||
} else {
|
||||
None
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fn attach_processes_filter(&self) -> regex::Regex {
|
||||
static REGEX: LazyLock<regex::Regex> =
|
||||
LazyLock::new(|| regex::Regex::new("^fake-binary").unwrap());
|
||||
REGEX.clone()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,7 +39,6 @@ impl SessionId {
|
||||
/// Represents a connection to the debug adapter process, either via stdout/stdin or a socket.
|
||||
pub struct DebugAdapterClient {
|
||||
id: SessionId,
|
||||
name: DebugAdapterName,
|
||||
sequence_count: AtomicU64,
|
||||
binary: DebugAdapterBinary,
|
||||
executor: BackgroundExecutor,
|
||||
@@ -51,7 +50,6 @@ pub type DapMessageHandler = Box<dyn FnMut(Message) + 'static + Send + Sync>;
|
||||
impl DebugAdapterClient {
|
||||
pub async fn start(
|
||||
id: SessionId,
|
||||
name: DebugAdapterName,
|
||||
binary: DebugAdapterBinary,
|
||||
message_handler: DapMessageHandler,
|
||||
cx: AsyncApp,
|
||||
@@ -60,7 +58,6 @@ impl DebugAdapterClient {
|
||||
TransportDelegate::start(&binary, cx.clone()).await?;
|
||||
let this = Self {
|
||||
id,
|
||||
name,
|
||||
binary,
|
||||
transport_delegate,
|
||||
sequence_count: AtomicU64::new(1),
|
||||
@@ -91,6 +88,7 @@ impl DebugAdapterClient {
|
||||
) -> Result<Self> {
|
||||
let binary = match self.transport_delegate.transport() {
|
||||
crate::transport::Transport::Tcp(tcp_transport) => DebugAdapterBinary {
|
||||
adapter_name: binary.adapter_name,
|
||||
command: binary.command,
|
||||
arguments: binary.arguments,
|
||||
envs: binary.envs,
|
||||
@@ -100,11 +98,12 @@ impl DebugAdapterClient {
|
||||
port: tcp_transport.port,
|
||||
timeout: Some(tcp_transport.timeout),
|
||||
}),
|
||||
request_args: binary.request_args,
|
||||
},
|
||||
_ => self.binary.clone(),
|
||||
};
|
||||
|
||||
Self::start(session_id, self.name(), binary, message_handler, cx).await
|
||||
Self::start(session_id, binary, message_handler, cx).await
|
||||
}
|
||||
|
||||
async fn handle_receive_messages(
|
||||
@@ -189,7 +188,17 @@ impl DebugAdapterClient {
|
||||
|
||||
let response = response??;
|
||||
match response.success {
|
||||
true => Ok(serde_json::from_value(response.body.unwrap_or_default())?),
|
||||
true => {
|
||||
if let Some(json) = response.body {
|
||||
Ok(serde_json::from_value(json)?)
|
||||
// Note: dap types configure themselves to return `None` when an empty object is received,
|
||||
// which then fails here...
|
||||
} else if let Ok(result) = serde_json::from_value(serde_json::Value::Object(Default::default())) {
|
||||
Ok(result)
|
||||
} else {
|
||||
Ok(serde_json::from_value(Default::default())?)
|
||||
}
|
||||
}
|
||||
false => Err(anyhow!("Request failed: {}", response.message.unwrap_or_default())),
|
||||
}
|
||||
}
|
||||
@@ -211,7 +220,7 @@ impl DebugAdapterClient {
|
||||
}
|
||||
|
||||
pub fn name(&self) -> DebugAdapterName {
|
||||
self.name.clone()
|
||||
self.binary.adapter_name.clone()
|
||||
}
|
||||
pub fn binary(&self) -> &DebugAdapterBinary {
|
||||
&self.binary
|
||||
@@ -238,14 +247,14 @@ impl DebugAdapterClient {
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub async fn on_request<R: dap_types::requests::Request, F>(&self, handler: F)
|
||||
pub fn on_request<R: dap_types::requests::Request, F>(&self, handler: F)
|
||||
where
|
||||
F: 'static
|
||||
+ Send
|
||||
+ FnMut(u64, R::Arguments) -> Result<R::Response, dap_types::ErrorResponse>,
|
||||
{
|
||||
let transport = self.transport_delegate.transport().as_fake();
|
||||
transport.on_request::<R, F>(handler).await;
|
||||
transport.on_request::<R, F>(handler);
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
@@ -282,7 +291,7 @@ mod tests {
|
||||
use crate::{client::DebugAdapterClient, debugger_settings::DebuggerSettings};
|
||||
use dap_types::{
|
||||
Capabilities, InitializeRequestArguments, InitializeRequestArgumentsPathFormat,
|
||||
RunInTerminalRequestArguments,
|
||||
RunInTerminalRequestArguments, StartDebuggingRequestArguments,
|
||||
messages::Events,
|
||||
requests::{Initialize, Request, RunInTerminal},
|
||||
};
|
||||
@@ -312,13 +321,17 @@ mod tests {
|
||||
|
||||
let client = DebugAdapterClient::start(
|
||||
crate::client::SessionId(1),
|
||||
DebugAdapterName("adapter".into()),
|
||||
DebugAdapterBinary {
|
||||
adapter_name: "adapter".into(),
|
||||
command: "command".into(),
|
||||
arguments: Default::default(),
|
||||
envs: Default::default(),
|
||||
connection: None,
|
||||
cwd: None,
|
||||
request_args: StartDebuggingRequestArguments {
|
||||
configuration: serde_json::Value::Null,
|
||||
request: dap_types::StartDebuggingRequestArgumentsRequest::Launch,
|
||||
},
|
||||
},
|
||||
Box::new(|_| panic!("Did not expect to hit this code path")),
|
||||
cx.to_async(),
|
||||
@@ -326,14 +339,12 @@ mod tests {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
client
|
||||
.on_request::<Initialize, _>(move |_, _| {
|
||||
Ok(dap_types::Capabilities {
|
||||
supports_configuration_done_request: Some(true),
|
||||
..Default::default()
|
||||
})
|
||||
client.on_request::<Initialize, _>(move |_, _| {
|
||||
Ok(dap_types::Capabilities {
|
||||
supports_configuration_done_request: Some(true),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
@@ -381,13 +392,17 @@ mod tests {
|
||||
|
||||
let client = DebugAdapterClient::start(
|
||||
crate::client::SessionId(1),
|
||||
DebugAdapterName("adapter".into()),
|
||||
DebugAdapterBinary {
|
||||
adapter_name: "adapter".into(),
|
||||
command: "command".into(),
|
||||
arguments: Default::default(),
|
||||
envs: Default::default(),
|
||||
connection: None,
|
||||
cwd: None,
|
||||
request_args: StartDebuggingRequestArguments {
|
||||
configuration: serde_json::Value::Null,
|
||||
request: dap_types::StartDebuggingRequestArgumentsRequest::Launch,
|
||||
},
|
||||
},
|
||||
Box::new({
|
||||
let called_event_handler = called_event_handler.clone();
|
||||
@@ -431,13 +446,17 @@ mod tests {
|
||||
|
||||
let client = DebugAdapterClient::start(
|
||||
crate::client::SessionId(1),
|
||||
DebugAdapterName("test-adapter".into()),
|
||||
DebugAdapterBinary {
|
||||
adapter_name: "test-adapter".into(),
|
||||
command: "command".into(),
|
||||
arguments: Default::default(),
|
||||
envs: Default::default(),
|
||||
connection: None,
|
||||
cwd: None,
|
||||
request_args: dap_types::StartDebuggingRequestArguments {
|
||||
configuration: serde_json::Value::Null,
|
||||
request: dap_types::StartDebuggingRequestArgumentsRequest::Launch,
|
||||
},
|
||||
},
|
||||
Box::new({
|
||||
let called_event_handler = called_event_handler.clone();
|
||||
|
||||
@@ -8,7 +8,7 @@ struct DapRegistryState {
|
||||
adapters: BTreeMap<DebugAdapterName, Arc<dyn DebugAdapter>>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
#[derive(Clone, Default)]
|
||||
/// Stores available debug adapters.
|
||||
pub struct DapRegistry(Arc<RwLock<DapRegistryState>>);
|
||||
|
||||
|
||||
@@ -699,14 +699,8 @@ impl StdioTransport {
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
type RequestHandler = Box<
|
||||
dyn Send
|
||||
+ FnMut(
|
||||
u64,
|
||||
serde_json::Value,
|
||||
Arc<Mutex<async_pipe::PipeWriter>>,
|
||||
) -> std::pin::Pin<Box<dyn std::future::Future<Output = ()> + Send>>,
|
||||
>;
|
||||
type RequestHandler =
|
||||
Box<dyn Send + FnMut(u64, serde_json::Value) -> dap_types::messages::Response>;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
type ResponseHandler = Box<dyn Send + Fn(Response)>;
|
||||
@@ -714,45 +708,41 @@ type ResponseHandler = Box<dyn Send + Fn(Response)>;
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub struct FakeTransport {
|
||||
// for sending fake response back from adapter side
|
||||
request_handlers: Arc<Mutex<HashMap<&'static str, RequestHandler>>>,
|
||||
request_handlers: Arc<parking_lot::Mutex<HashMap<&'static str, RequestHandler>>>,
|
||||
// for reverse request responses
|
||||
response_handlers: Arc<Mutex<HashMap<&'static str, ResponseHandler>>>,
|
||||
response_handlers: Arc<parking_lot::Mutex<HashMap<&'static str, ResponseHandler>>>,
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl FakeTransport {
|
||||
pub async fn on_request<R: dap_types::requests::Request, F>(&self, mut handler: F)
|
||||
pub fn on_request<R: dap_types::requests::Request, F>(&self, mut handler: F)
|
||||
where
|
||||
F: 'static + Send + FnMut(u64, R::Arguments) -> Result<R::Response, ErrorResponse>,
|
||||
{
|
||||
self.request_handlers.lock().await.insert(
|
||||
self.request_handlers.lock().insert(
|
||||
R::COMMAND,
|
||||
Box::new(
|
||||
move |seq, args, writer: Arc<Mutex<async_pipe::PipeWriter>>| {
|
||||
let response = handler(seq, serde_json::from_value(args).unwrap());
|
||||
|
||||
let message = serde_json::to_string(&Message::Response(Response {
|
||||
Box::new(move |seq, args| {
|
||||
let result = handler(seq, serde_json::from_value(args).unwrap());
|
||||
let response = match result {
|
||||
Ok(response) => Response {
|
||||
seq: seq + 1,
|
||||
request_seq: seq,
|
||||
success: response.as_ref().is_ok(),
|
||||
success: true,
|
||||
command: R::COMMAND.into(),
|
||||
body: util::maybe!({ serde_json::to_value(response.ok()?).ok() }),
|
||||
body: Some(serde_json::to_value(response).unwrap()),
|
||||
message: None,
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let writer = writer.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
let mut writer = writer.lock().await;
|
||||
writer
|
||||
.write_all(TransportDelegate::build_rpc_message(message).as_bytes())
|
||||
.await
|
||||
.unwrap();
|
||||
writer.flush().await.unwrap();
|
||||
})
|
||||
},
|
||||
),
|
||||
},
|
||||
Err(response) => Response {
|
||||
seq: seq + 1,
|
||||
request_seq: seq,
|
||||
success: false,
|
||||
command: R::COMMAND.into(),
|
||||
body: Some(serde_json::to_value(response).unwrap()),
|
||||
message: None,
|
||||
},
|
||||
};
|
||||
response
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -762,14 +752,13 @@ impl FakeTransport {
|
||||
{
|
||||
self.response_handlers
|
||||
.lock()
|
||||
.await
|
||||
.insert(R::COMMAND, Box::new(handler));
|
||||
}
|
||||
|
||||
async fn start(cx: AsyncApp) -> Result<(TransportPipe, Self)> {
|
||||
let this = Self {
|
||||
request_handlers: Arc::new(Mutex::new(HashMap::default())),
|
||||
response_handlers: Arc::new(Mutex::new(HashMap::default())),
|
||||
request_handlers: Arc::new(parking_lot::Mutex::new(HashMap::default())),
|
||||
response_handlers: Arc::new(parking_lot::Mutex::new(HashMap::default())),
|
||||
};
|
||||
use dap_types::requests::{Request, RunInTerminal, StartDebugging};
|
||||
use serde_json::json;
|
||||
@@ -816,23 +805,31 @@ impl FakeTransport {
|
||||
.unwrap();
|
||||
writer.flush().await.unwrap();
|
||||
} else {
|
||||
if let Some(handle) = request_handlers
|
||||
let response = if let Some(handle) = request_handlers
|
||||
.lock()
|
||||
.await
|
||||
.get_mut(request.command.as_str())
|
||||
{
|
||||
handle(
|
||||
request.seq,
|
||||
request.arguments.unwrap_or(json!({})),
|
||||
stdout_writer.clone(),
|
||||
)
|
||||
.await;
|
||||
} else {
|
||||
log::error!(
|
||||
"No request handler for {}",
|
||||
request.command
|
||||
);
|
||||
}
|
||||
panic!("No request handler for {}", request.command);
|
||||
};
|
||||
let message =
|
||||
serde_json::to_string(&Message::Response(response))
|
||||
.unwrap();
|
||||
|
||||
let mut writer = stdout_writer.lock().await;
|
||||
|
||||
writer
|
||||
.write_all(
|
||||
TransportDelegate::build_rpc_message(message)
|
||||
.as_bytes(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
writer.flush().await.unwrap();
|
||||
}
|
||||
}
|
||||
Message::Event(event) => {
|
||||
@@ -850,10 +847,8 @@ impl FakeTransport {
|
||||
writer.flush().await.unwrap();
|
||||
}
|
||||
Message::Response(response) => {
|
||||
if let Some(handle) = response_handlers
|
||||
.lock()
|
||||
.await
|
||||
.get(response.command.as_str())
|
||||
if let Some(handle) =
|
||||
response_handlers.lock().get(response.command.as_str())
|
||||
{
|
||||
handle(response);
|
||||
} else {
|
||||
|
||||
@@ -27,7 +27,6 @@ dap.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
paths.workspace = true
|
||||
regex.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
task.workspace = true
|
||||
|
||||
@@ -4,7 +4,7 @@ use anyhow::{Result, bail};
|
||||
use async_trait::async_trait;
|
||||
use dap::adapters::latest_github_release;
|
||||
use gpui::AsyncApp;
|
||||
use task::{DebugAdapterConfig, DebugRequestType, DebugTaskDefinition};
|
||||
use task::{DebugRequestType, DebugTaskDefinition};
|
||||
|
||||
use crate::*;
|
||||
|
||||
@@ -15,6 +15,42 @@ pub(crate) struct CodeLldbDebugAdapter {
|
||||
|
||||
impl CodeLldbDebugAdapter {
|
||||
const ADAPTER_NAME: &'static str = "CodeLLDB";
|
||||
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> dap::StartDebuggingRequestArguments {
|
||||
let mut configuration = json!({
|
||||
"request": match config.request {
|
||||
DebugRequestType::Launch(_) => "launch",
|
||||
DebugRequestType::Attach(_) => "attach",
|
||||
},
|
||||
});
|
||||
let map = configuration.as_object_mut().unwrap();
|
||||
// CodeLLDB uses `name` for a terminal label.
|
||||
map.insert("name".into(), Value::String(config.label.clone()));
|
||||
let request = config.request.to_dap();
|
||||
match &config.request {
|
||||
DebugRequestType::Attach(attach) => {
|
||||
map.insert("pid".into(), attach.process_id.into());
|
||||
}
|
||||
DebugRequestType::Launch(launch) => {
|
||||
map.insert("program".into(), launch.program.clone().into());
|
||||
|
||||
if !launch.args.is_empty() {
|
||||
map.insert("args".into(), launch.args.clone().into());
|
||||
}
|
||||
|
||||
if let Some(stop_on_entry) = config.stop_on_entry {
|
||||
map.insert("stopOnEntry".into(), stop_on_entry.into());
|
||||
}
|
||||
if let Some(cwd) = launch.cwd.as_ref() {
|
||||
map.insert("cwd".into(), cwd.to_string_lossy().into_owned().into());
|
||||
}
|
||||
}
|
||||
}
|
||||
dap::StartDebuggingRequestArguments {
|
||||
request,
|
||||
configuration,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
@@ -86,7 +122,7 @@ impl DebugAdapter for CodeLldbDebugAdapter {
|
||||
async fn get_installed_binary(
|
||||
&self,
|
||||
_: &dyn DapDelegate,
|
||||
_: &DebugAdapterConfig,
|
||||
config: &DebugTaskDefinition,
|
||||
_: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
@@ -105,37 +141,16 @@ impl DebugAdapter for CodeLldbDebugAdapter {
|
||||
Ok(DebugAdapterBinary {
|
||||
command,
|
||||
cwd: Some(adapter_dir),
|
||||
..Default::default()
|
||||
arguments: Some(vec![
|
||||
"--settings".into(),
|
||||
json!({"sourceLanguages": ["cpp", "rust"]})
|
||||
.to_string()
|
||||
.into(),
|
||||
]),
|
||||
request_args: self.request_args(config),
|
||||
adapter_name: "test".into(),
|
||||
envs: None,
|
||||
connection: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
|
||||
let mut args = json!({
|
||||
"request": match config.request {
|
||||
DebugRequestType::Launch(_) => "launch",
|
||||
DebugRequestType::Attach(_) => "attach",
|
||||
},
|
||||
});
|
||||
let map = args.as_object_mut().unwrap();
|
||||
match &config.request {
|
||||
DebugRequestType::Attach(attach) => {
|
||||
map.insert("pid".into(), attach.process_id.into());
|
||||
}
|
||||
DebugRequestType::Launch(launch) => {
|
||||
map.insert("program".into(), launch.program.clone().into());
|
||||
|
||||
if !launch.args.is_empty() {
|
||||
map.insert("args".into(), launch.args.clone().into());
|
||||
}
|
||||
|
||||
if let Some(stop_on_entry) = config.stop_on_entry {
|
||||
map.insert("stopOnEntry".into(), stop_on_entry.into());
|
||||
}
|
||||
if let Some(cwd) = launch.cwd.as_ref() {
|
||||
map.insert("cwd".into(), cwd.to_string_lossy().into_owned().into());
|
||||
}
|
||||
}
|
||||
}
|
||||
args
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ mod codelldb;
|
||||
mod gdb;
|
||||
mod go;
|
||||
mod javascript;
|
||||
mod lldb;
|
||||
mod php;
|
||||
mod python;
|
||||
|
||||
@@ -12,7 +11,7 @@ use anyhow::{Result, anyhow};
|
||||
use async_trait::async_trait;
|
||||
use codelldb::CodeLldbDebugAdapter;
|
||||
use dap::{
|
||||
DapRegistry,
|
||||
DapRegistry, DebugRequestType,
|
||||
adapters::{
|
||||
self, AdapterVersion, DapDelegate, DebugAdapter, DebugAdapterBinary, DebugAdapterName,
|
||||
GithubRepo,
|
||||
@@ -21,18 +20,16 @@ use dap::{
|
||||
use gdb::GdbDebugAdapter;
|
||||
use go::GoDebugAdapter;
|
||||
use javascript::JsDebugAdapter;
|
||||
use lldb::LldbDebugAdapter;
|
||||
use php::PhpDebugAdapter;
|
||||
use python::PythonDebugAdapter;
|
||||
use serde_json::{Value, json};
|
||||
use task::{DebugAdapterConfig, TCPHost};
|
||||
use task::TCPHost;
|
||||
|
||||
pub fn init(registry: Arc<DapRegistry>) {
|
||||
registry.add_adapter(Arc::from(CodeLldbDebugAdapter::default()));
|
||||
registry.add_adapter(Arc::from(PythonDebugAdapter));
|
||||
registry.add_adapter(Arc::from(PhpDebugAdapter));
|
||||
registry.add_adapter(Arc::from(JsDebugAdapter::default()));
|
||||
registry.add_adapter(Arc::from(LldbDebugAdapter));
|
||||
registry.add_adapter(Arc::from(JsDebugAdapter));
|
||||
registry.add_adapter(Arc::from(GoDebugAdapter));
|
||||
registry.add_adapter(Arc::from(GdbDebugAdapter));
|
||||
}
|
||||
@@ -51,3 +48,16 @@ pub(crate) async fn configure_tcp_connection(
|
||||
|
||||
Ok((host, port, timeout))
|
||||
}
|
||||
|
||||
trait ToDap {
|
||||
fn to_dap(&self) -> dap::StartDebuggingRequestArgumentsRequest;
|
||||
}
|
||||
|
||||
impl ToDap for DebugRequestType {
|
||||
fn to_dap(&self) -> dap::StartDebuggingRequestArgumentsRequest {
|
||||
match self {
|
||||
Self::Launch(_) => dap::StartDebuggingRequestArgumentsRequest::Launch,
|
||||
Self::Attach(_) => dap::StartDebuggingRequestArgumentsRequest::Attach,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,8 +2,9 @@ use std::ffi::OsStr;
|
||||
|
||||
use anyhow::{Result, bail};
|
||||
use async_trait::async_trait;
|
||||
use dap::StartDebuggingRequestArguments;
|
||||
use gpui::AsyncApp;
|
||||
use task::{DebugAdapterConfig, DebugRequestType, DebugTaskDefinition};
|
||||
use task::{DebugRequestType, DebugTaskDefinition};
|
||||
|
||||
use crate::*;
|
||||
|
||||
@@ -12,68 +13,8 @@ pub(crate) struct GdbDebugAdapter;
|
||||
|
||||
impl GdbDebugAdapter {
|
||||
const ADAPTER_NAME: &'static str = "GDB";
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl DebugAdapter for GdbDebugAdapter {
|
||||
fn name(&self) -> DebugAdapterName {
|
||||
DebugAdapterName(Self::ADAPTER_NAME.into())
|
||||
}
|
||||
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &dyn DapDelegate,
|
||||
_: &DebugAdapterConfig,
|
||||
user_installed_path: Option<std::path::PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
let user_setting_path = user_installed_path
|
||||
.filter(|p| p.exists())
|
||||
.and_then(|p| p.to_str().map(|s| s.to_string()));
|
||||
|
||||
let gdb_path = delegate
|
||||
.which(OsStr::new("gdb"))
|
||||
.and_then(|p| p.to_str().map(|s| s.to_string()))
|
||||
.ok_or(anyhow!("Could not find gdb in path"));
|
||||
|
||||
if gdb_path.is_err() && user_setting_path.is_none() {
|
||||
bail!("Could not find gdb path or it's not installed");
|
||||
}
|
||||
|
||||
let gdb_path = user_setting_path.unwrap_or(gdb_path?);
|
||||
|
||||
Ok(DebugAdapterBinary {
|
||||
command: gdb_path,
|
||||
arguments: Some(vec!["-i=dap".into()]),
|
||||
envs: None,
|
||||
cwd: None,
|
||||
connection: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn install_binary(
|
||||
&self,
|
||||
_version: AdapterVersion,
|
||||
_delegate: &dyn DapDelegate,
|
||||
) -> Result<()> {
|
||||
unimplemented!("GDB debug adapter cannot be installed by Zed (yet)")
|
||||
}
|
||||
|
||||
async fn fetch_latest_adapter_version(&self, _: &dyn DapDelegate) -> Result<AdapterVersion> {
|
||||
unimplemented!("Fetch latest GDB version not implemented (yet)")
|
||||
}
|
||||
|
||||
async fn get_installed_binary(
|
||||
&self,
|
||||
_: &dyn DapDelegate,
|
||||
_: &DebugAdapterConfig,
|
||||
_: Option<std::path::PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
unimplemented!("GDB cannot be installed by Zed (yet)")
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> StartDebuggingRequestArguments {
|
||||
let mut args = json!({
|
||||
"request": match config.request {
|
||||
DebugRequestType::Launch(_) => "launch",
|
||||
@@ -105,6 +46,71 @@ impl DebugAdapter for GdbDebugAdapter {
|
||||
}
|
||||
}
|
||||
}
|
||||
args
|
||||
StartDebuggingRequestArguments {
|
||||
configuration: args,
|
||||
request: config.request.to_dap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl DebugAdapter for GdbDebugAdapter {
|
||||
fn name(&self) -> DebugAdapterName {
|
||||
DebugAdapterName(Self::ADAPTER_NAME.into())
|
||||
}
|
||||
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<std::path::PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
let user_setting_path = user_installed_path
|
||||
.filter(|p| p.exists())
|
||||
.and_then(|p| p.to_str().map(|s| s.to_string()));
|
||||
|
||||
let gdb_path = delegate
|
||||
.which(OsStr::new("gdb"))
|
||||
.and_then(|p| p.to_str().map(|s| s.to_string()))
|
||||
.ok_or(anyhow!("Could not find gdb in path"));
|
||||
|
||||
if gdb_path.is_err() && user_setting_path.is_none() {
|
||||
bail!("Could not find gdb path or it's not installed");
|
||||
}
|
||||
|
||||
let gdb_path = user_setting_path.unwrap_or(gdb_path?);
|
||||
|
||||
Ok(DebugAdapterBinary {
|
||||
adapter_name: Self::ADAPTER_NAME.into(),
|
||||
command: gdb_path,
|
||||
arguments: Some(vec!["-i=dap".into()]),
|
||||
envs: None,
|
||||
cwd: None,
|
||||
connection: None,
|
||||
request_args: self.request_args(config),
|
||||
})
|
||||
}
|
||||
|
||||
async fn install_binary(
|
||||
&self,
|
||||
_version: AdapterVersion,
|
||||
_delegate: &dyn DapDelegate,
|
||||
) -> Result<()> {
|
||||
unimplemented!("GDB debug adapter cannot be installed by Zed (yet)")
|
||||
}
|
||||
|
||||
async fn fetch_latest_adapter_version(&self, _: &dyn DapDelegate) -> Result<AdapterVersion> {
|
||||
unimplemented!("Fetch latest GDB version not implemented (yet)")
|
||||
}
|
||||
|
||||
async fn get_installed_binary(
|
||||
&self,
|
||||
_: &dyn DapDelegate,
|
||||
_: &DebugTaskDefinition,
|
||||
_: Option<std::path::PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
unimplemented!("GDB cannot be installed by Zed (yet)")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use dap::StartDebuggingRequestArguments;
|
||||
use gpui::AsyncApp;
|
||||
use std::{ffi::OsStr, path::PathBuf};
|
||||
use task::DebugTaskDefinition;
|
||||
@@ -9,6 +10,31 @@ pub(crate) struct GoDebugAdapter;
|
||||
|
||||
impl GoDebugAdapter {
|
||||
const ADAPTER_NAME: &'static str = "Delve";
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> StartDebuggingRequestArguments {
|
||||
let mut args = match &config.request {
|
||||
dap::DebugRequestType::Attach(attach_config) => {
|
||||
json!({
|
||||
"processId": attach_config.process_id,
|
||||
})
|
||||
}
|
||||
dap::DebugRequestType::Launch(launch_config) => json!({
|
||||
"program": launch_config.program,
|
||||
"cwd": launch_config.cwd,
|
||||
"args": launch_config.args
|
||||
}),
|
||||
};
|
||||
|
||||
let map = args.as_object_mut().unwrap();
|
||||
|
||||
if let Some(stop_on_entry) = config.stop_on_entry {
|
||||
map.insert("stopOnEntry".into(), stop_on_entry.into());
|
||||
}
|
||||
|
||||
StartDebuggingRequestArguments {
|
||||
configuration: args,
|
||||
request: config.request.to_dap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
@@ -20,7 +46,7 @@ impl DebugAdapter for GoDebugAdapter {
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugAdapterConfig,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
@@ -53,7 +79,7 @@ impl DebugAdapter for GoDebugAdapter {
|
||||
async fn get_installed_binary(
|
||||
&self,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugAdapterConfig,
|
||||
config: &DebugTaskDefinition,
|
||||
_: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
@@ -66,6 +92,7 @@ impl DebugAdapter for GoDebugAdapter {
|
||||
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
|
||||
|
||||
Ok(DebugAdapterBinary {
|
||||
adapter_name: self.name(),
|
||||
command: delve_path,
|
||||
arguments: Some(vec![
|
||||
"dap".into(),
|
||||
@@ -79,29 +106,7 @@ impl DebugAdapter for GoDebugAdapter {
|
||||
port,
|
||||
timeout,
|
||||
}),
|
||||
request_args: self.request_args(config),
|
||||
})
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
|
||||
let mut args = match &config.request {
|
||||
dap::DebugRequestType::Attach(attach_config) => {
|
||||
json!({
|
||||
"processId": attach_config.process_id,
|
||||
})
|
||||
}
|
||||
dap::DebugRequestType::Launch(launch_config) => json!({
|
||||
"program": launch_config.program,
|
||||
"cwd": launch_config.cwd,
|
||||
"args": launch_config.args
|
||||
}),
|
||||
};
|
||||
|
||||
let map = args.as_object_mut().unwrap();
|
||||
|
||||
if let Some(stop_on_entry) = config.stop_on_entry {
|
||||
map.insert("stopOnEntry".into(), stop_on_entry.into());
|
||||
}
|
||||
|
||||
args
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,28 +1,52 @@
|
||||
use adapters::latest_github_release;
|
||||
use dap::StartDebuggingRequestArguments;
|
||||
use gpui::AsyncApp;
|
||||
use regex::Regex;
|
||||
use std::path::PathBuf;
|
||||
use task::{DebugRequestType, DebugTaskDefinition};
|
||||
|
||||
use crate::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct JsDebugAdapter {
|
||||
attach_processes: Regex,
|
||||
}
|
||||
pub(crate) struct JsDebugAdapter;
|
||||
|
||||
impl Default for JsDebugAdapter {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
attach_processes: Regex::new(r"(?i)^(?:node|bun|iojs)(?:$|\b)")
|
||||
.expect("Regex compilation to succeed"),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl JsDebugAdapter {
|
||||
const ADAPTER_NAME: &'static str = "JavaScript";
|
||||
const ADAPTER_NPM_NAME: &'static str = "vscode-js-debug";
|
||||
const ADAPTER_PATH: &'static str = "js-debug/src/dapDebugServer.js";
|
||||
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> StartDebuggingRequestArguments {
|
||||
let mut args = json!({
|
||||
"type": "pwa-node",
|
||||
"request": match config.request {
|
||||
DebugRequestType::Launch(_) => "launch",
|
||||
DebugRequestType::Attach(_) => "attach",
|
||||
},
|
||||
});
|
||||
let map = args.as_object_mut().unwrap();
|
||||
match &config.request {
|
||||
DebugRequestType::Attach(attach) => {
|
||||
map.insert("processId".into(), attach.process_id.into());
|
||||
}
|
||||
DebugRequestType::Launch(launch) => {
|
||||
map.insert("program".into(), launch.program.clone().into());
|
||||
|
||||
if !launch.args.is_empty() {
|
||||
map.insert("args".into(), launch.args.clone().into());
|
||||
}
|
||||
|
||||
if let Some(stop_on_entry) = config.stop_on_entry {
|
||||
map.insert("stopOnEntry".into(), stop_on_entry.into());
|
||||
}
|
||||
if let Some(cwd) = launch.cwd.as_ref() {
|
||||
map.insert("cwd".into(), cwd.to_string_lossy().into_owned().into());
|
||||
}
|
||||
}
|
||||
}
|
||||
StartDebuggingRequestArguments {
|
||||
configuration: args,
|
||||
request: config.request.to_dap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
@@ -60,7 +84,7 @@ impl DebugAdapter for JsDebugAdapter {
|
||||
async fn get_installed_binary(
|
||||
&self,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugAdapterConfig,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
@@ -82,6 +106,7 @@ impl DebugAdapter for JsDebugAdapter {
|
||||
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
|
||||
|
||||
Ok(DebugAdapterBinary {
|
||||
adapter_name: self.name(),
|
||||
command: delegate
|
||||
.node_runtime()
|
||||
.binary_path()
|
||||
@@ -100,6 +125,7 @@ impl DebugAdapter for JsDebugAdapter {
|
||||
port,
|
||||
timeout,
|
||||
}),
|
||||
request_args: self.request_args(config),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -118,39 +144,4 @@ impl DebugAdapter for JsDebugAdapter {
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
|
||||
let mut args = json!({
|
||||
"type": "pwa-node",
|
||||
"request": match config.request {
|
||||
DebugRequestType::Launch(_) => "launch",
|
||||
DebugRequestType::Attach(_) => "attach",
|
||||
},
|
||||
});
|
||||
let map = args.as_object_mut().unwrap();
|
||||
match &config.request {
|
||||
DebugRequestType::Attach(attach) => {
|
||||
map.insert("processId".into(), attach.process_id.into());
|
||||
}
|
||||
DebugRequestType::Launch(launch) => {
|
||||
map.insert("program".into(), launch.program.clone().into());
|
||||
|
||||
if !launch.args.is_empty() {
|
||||
map.insert("args".into(), launch.args.clone().into());
|
||||
}
|
||||
|
||||
if let Some(stop_on_entry) = config.stop_on_entry {
|
||||
map.insert("stopOnEntry".into(), stop_on_entry.into());
|
||||
}
|
||||
if let Some(cwd) = launch.cwd.as_ref() {
|
||||
map.insert("cwd".into(), cwd.to_string_lossy().into_owned().into());
|
||||
}
|
||||
}
|
||||
}
|
||||
args
|
||||
}
|
||||
|
||||
fn attach_processes_filter(&self) -> Regex {
|
||||
self.attach_processes.clone()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,99 +0,0 @@
|
||||
use std::{ffi::OsStr, path::PathBuf};
|
||||
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use gpui::AsyncApp;
|
||||
use task::{DebugAdapterConfig, DebugRequestType, DebugTaskDefinition};
|
||||
|
||||
use crate::*;
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct LldbDebugAdapter;
|
||||
|
||||
impl LldbDebugAdapter {
|
||||
const ADAPTER_NAME: &'static str = "LLDB";
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl DebugAdapter for LldbDebugAdapter {
|
||||
fn name(&self) -> DebugAdapterName {
|
||||
DebugAdapterName(Self::ADAPTER_NAME.into())
|
||||
}
|
||||
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &dyn DapDelegate,
|
||||
_: &DebugAdapterConfig,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
let lldb_dap_path = if let Some(user_installed_path) = user_installed_path {
|
||||
user_installed_path.to_string_lossy().into()
|
||||
} else {
|
||||
delegate
|
||||
.which(OsStr::new("lldb-dap"))
|
||||
.and_then(|p| p.to_str().map(|s| s.to_string()))
|
||||
.ok_or(anyhow!("Could not find lldb-dap in path"))?
|
||||
};
|
||||
|
||||
Ok(DebugAdapterBinary {
|
||||
command: lldb_dap_path,
|
||||
arguments: None,
|
||||
envs: None,
|
||||
cwd: None,
|
||||
connection: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn install_binary(
|
||||
&self,
|
||||
_version: AdapterVersion,
|
||||
_delegate: &dyn DapDelegate,
|
||||
) -> Result<()> {
|
||||
unimplemented!("LLDB debug adapter cannot be installed by Zed (yet)")
|
||||
}
|
||||
|
||||
async fn fetch_latest_adapter_version(&self, _: &dyn DapDelegate) -> Result<AdapterVersion> {
|
||||
unimplemented!("Fetch latest adapter version not implemented for lldb (yet)")
|
||||
}
|
||||
|
||||
async fn get_installed_binary(
|
||||
&self,
|
||||
_: &dyn DapDelegate,
|
||||
_: &DebugAdapterConfig,
|
||||
_: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
unimplemented!("LLDB debug adapter cannot be installed by Zed (yet)")
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
|
||||
let mut args = json!({
|
||||
"request": match config.request {
|
||||
DebugRequestType::Launch(_) => "launch",
|
||||
DebugRequestType::Attach(_) => "attach",
|
||||
},
|
||||
});
|
||||
let map = args.as_object_mut().unwrap();
|
||||
match &config.request {
|
||||
DebugRequestType::Attach(attach) => {
|
||||
map.insert("pid".into(), attach.process_id.into());
|
||||
}
|
||||
DebugRequestType::Launch(launch) => {
|
||||
map.insert("program".into(), launch.program.clone().into());
|
||||
|
||||
if !launch.args.is_empty() {
|
||||
map.insert("args".into(), launch.args.clone().into());
|
||||
}
|
||||
|
||||
if let Some(stop_on_entry) = config.stop_on_entry {
|
||||
map.insert("stopOnEntry".into(), stop_on_entry.into());
|
||||
}
|
||||
if let Some(cwd) = launch.cwd.as_ref() {
|
||||
map.insert("cwd".into(), cwd.to_string_lossy().into_owned().into());
|
||||
}
|
||||
}
|
||||
}
|
||||
args
|
||||
}
|
||||
}
|
||||
@@ -13,6 +13,28 @@ impl PhpDebugAdapter {
|
||||
const ADAPTER_NAME: &'static str = "PHP";
|
||||
const ADAPTER_PACKAGE_NAME: &'static str = "vscode-php-debug";
|
||||
const ADAPTER_PATH: &'static str = "extension/out/phpDebug.js";
|
||||
|
||||
fn request_args(
|
||||
&self,
|
||||
config: &DebugTaskDefinition,
|
||||
) -> Result<dap::StartDebuggingRequestArguments> {
|
||||
match &config.request {
|
||||
dap::DebugRequestType::Attach(_) => {
|
||||
anyhow::bail!("php adapter does not support attaching")
|
||||
}
|
||||
dap::DebugRequestType::Launch(launch_config) => {
|
||||
Ok(dap::StartDebuggingRequestArguments {
|
||||
configuration: json!({
|
||||
"program": launch_config.program,
|
||||
"cwd": launch_config.cwd,
|
||||
"args": launch_config.args,
|
||||
"stopOnEntry": config.stop_on_entry.unwrap_or_default(),
|
||||
}),
|
||||
request: config.request.to_dap(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
@@ -50,7 +72,7 @@ impl DebugAdapter for PhpDebugAdapter {
|
||||
async fn get_installed_binary(
|
||||
&self,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugAdapterConfig,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
@@ -72,6 +94,7 @@ impl DebugAdapter for PhpDebugAdapter {
|
||||
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
|
||||
|
||||
Ok(DebugAdapterBinary {
|
||||
adapter_name: self.name(),
|
||||
command: delegate
|
||||
.node_runtime()
|
||||
.binary_path()
|
||||
@@ -89,6 +112,7 @@ impl DebugAdapter for PhpDebugAdapter {
|
||||
}),
|
||||
cwd: None,
|
||||
envs: None,
|
||||
request_args: self.request_args(config)?,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -107,21 +131,4 @@ impl DebugAdapter for PhpDebugAdapter {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
|
||||
match &config.request {
|
||||
dap::DebugRequestType::Attach(_) => {
|
||||
// php adapter does not support attaching
|
||||
json!({})
|
||||
}
|
||||
dap::DebugRequestType::Launch(launch_config) => {
|
||||
json!({
|
||||
"program": launch_config.program,
|
||||
"cwd": launch_config.cwd,
|
||||
"args": launch_config.args,
|
||||
"stopOnEntry": config.stop_on_entry.unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::*;
|
||||
use dap::DebugRequestType;
|
||||
use dap::{DebugRequestType, StartDebuggingRequestArguments};
|
||||
use gpui::AsyncApp;
|
||||
use std::{ffi::OsStr, path::PathBuf};
|
||||
use task::DebugTaskDefinition;
|
||||
@@ -12,6 +12,38 @@ impl PythonDebugAdapter {
|
||||
const ADAPTER_PACKAGE_NAME: &'static str = "debugpy";
|
||||
const ADAPTER_PATH: &'static str = "src/debugpy/adapter";
|
||||
const LANGUAGE_NAME: &'static str = "Python";
|
||||
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> StartDebuggingRequestArguments {
|
||||
let mut args = json!({
|
||||
"request": match config.request {
|
||||
DebugRequestType::Launch(_) => "launch",
|
||||
DebugRequestType::Attach(_) => "attach",
|
||||
},
|
||||
"subProcess": true,
|
||||
"redirectOutput": true,
|
||||
});
|
||||
let map = args.as_object_mut().unwrap();
|
||||
match &config.request {
|
||||
DebugRequestType::Attach(attach) => {
|
||||
map.insert("processId".into(), attach.process_id.into());
|
||||
}
|
||||
DebugRequestType::Launch(launch) => {
|
||||
map.insert("program".into(), launch.program.clone().into());
|
||||
map.insert("args".into(), launch.args.clone().into());
|
||||
|
||||
if let Some(stop_on_entry) = config.stop_on_entry {
|
||||
map.insert("stopOnEntry".into(), stop_on_entry.into());
|
||||
}
|
||||
if let Some(cwd) = launch.cwd.as_ref() {
|
||||
map.insert("cwd".into(), cwd.to_string_lossy().into_owned().into());
|
||||
}
|
||||
}
|
||||
}
|
||||
StartDebuggingRequestArguments {
|
||||
configuration: args,
|
||||
request: config.request.to_dap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
@@ -64,7 +96,7 @@ impl DebugAdapter for PythonDebugAdapter {
|
||||
async fn get_installed_binary(
|
||||
&self,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugAdapterConfig,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
@@ -76,7 +108,7 @@ impl DebugAdapter for PythonDebugAdapter {
|
||||
user_installed_path
|
||||
} else {
|
||||
let adapter_path = paths::debug_adapters_dir().join(self.name().as_ref());
|
||||
let file_name_prefix = format!("{}_", Self::ADAPTER_PACKAGE_NAME);
|
||||
let file_name_prefix = format!("{}_", Self::ADAPTER_NAME);
|
||||
|
||||
util::fs::find_file_name_in_dir(adapter_path.as_path(), |file_name| {
|
||||
file_name.starts_with(&file_name_prefix)
|
||||
@@ -109,6 +141,7 @@ impl DebugAdapter for PythonDebugAdapter {
|
||||
};
|
||||
|
||||
Ok(DebugAdapterBinary {
|
||||
adapter_name: self.name(),
|
||||
command: python_path.ok_or(anyhow!("failed to find binary path for python"))?,
|
||||
arguments: Some(vec![
|
||||
debugpy_dir.join(Self::ADAPTER_PATH).into(),
|
||||
@@ -122,35 +155,7 @@ impl DebugAdapter for PythonDebugAdapter {
|
||||
}),
|
||||
cwd: None,
|
||||
envs: None,
|
||||
request_args: self.request_args(config),
|
||||
})
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
|
||||
let mut args = json!({
|
||||
"request": match config.request {
|
||||
DebugRequestType::Launch(_) => "launch",
|
||||
DebugRequestType::Attach(_) => "attach",
|
||||
},
|
||||
"subProcess": true,
|
||||
"redirectOutput": true,
|
||||
});
|
||||
let map = args.as_object_mut().unwrap();
|
||||
match &config.request {
|
||||
DebugRequestType::Attach(attach) => {
|
||||
map.insert("processId".into(), attach.process_id.into());
|
||||
}
|
||||
DebugRequestType::Launch(launch) => {
|
||||
map.insert("program".into(), launch.program.clone().into());
|
||||
map.insert("args".into(), launch.args.clone().into());
|
||||
|
||||
if let Some(stop_on_entry) = config.stop_on_entry {
|
||||
map.insert("stopOnEntry".into(), stop_on_entry.into());
|
||||
}
|
||||
if let Some(cwd) = launch.cwd.as_ref() {
|
||||
map.insert("cwd".into(), cwd.to_string_lossy().into_owned().into());
|
||||
}
|
||||
}
|
||||
}
|
||||
args
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ client.workspace = true
|
||||
collections.workspace = true
|
||||
command_palette_hooks.workspace = true
|
||||
dap.workspace = true
|
||||
db.workspace = true
|
||||
editor.workspace = true
|
||||
feature_flags.workspace = true
|
||||
futures.workspace = true
|
||||
@@ -45,6 +46,7 @@ serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
sysinfo.workspace = true
|
||||
task.workspace = true
|
||||
tasks_ui.workspace = true
|
||||
terminal_view.workspace = true
|
||||
theme.workspace = true
|
||||
ui.workspace = true
|
||||
|
||||
@@ -4,7 +4,6 @@ use gpui::Subscription;
|
||||
use gpui::{DismissEvent, Entity, EventEmitter, Focusable, Render};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
|
||||
use std::cell::LazyCell;
|
||||
use std::sync::Arc;
|
||||
use sysinfo::System;
|
||||
use ui::{Context, Tooltip, prelude::*};
|
||||
@@ -24,7 +23,7 @@ pub(crate) struct AttachModalDelegate {
|
||||
matches: Vec<StringMatch>,
|
||||
placeholder_text: Arc<str>,
|
||||
project: Entity<project::Project>,
|
||||
debug_config: task::DebugTaskDefinition,
|
||||
pub(crate) debug_config: task::DebugTaskDefinition,
|
||||
candidates: Arc<[Candidate]>,
|
||||
}
|
||||
|
||||
@@ -58,7 +57,7 @@ impl AttachModal {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let mut processes: Vec<_> = System::new_all()
|
||||
let mut processes: Box<[_]> = System::new_all()
|
||||
.processes()
|
||||
.values()
|
||||
.map(|process| {
|
||||
@@ -75,30 +74,18 @@ impl AttachModal {
|
||||
})
|
||||
.collect();
|
||||
processes.sort_by_key(|k| k.name.clone());
|
||||
let processes = processes.into_iter().collect();
|
||||
Self::with_processes(project, debug_config, processes, modal, window, cx)
|
||||
}
|
||||
|
||||
pub(super) fn with_processes(
|
||||
project: Entity<project::Project>,
|
||||
debug_config: task::DebugTaskDefinition,
|
||||
processes: Vec<Candidate>,
|
||||
processes: Arc<[Candidate]>,
|
||||
modal: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let adapter = project
|
||||
.read(cx)
|
||||
.debug_adapters()
|
||||
.adapter(&debug_config.adapter);
|
||||
let filter = LazyCell::new(|| adapter.map(|adapter| adapter.attach_processes_filter()));
|
||||
let processes = processes
|
||||
.into_iter()
|
||||
.filter(|process| {
|
||||
filter
|
||||
.as_ref()
|
||||
.map_or(false, |filter| filter.is_match(&process.name))
|
||||
})
|
||||
.collect();
|
||||
let picker = cx.new(|cx| {
|
||||
Picker::uniform_list(
|
||||
AttachModalDelegate::new(project, debug_config, processes),
|
||||
@@ -117,9 +104,10 @@ impl AttachModal {
|
||||
}
|
||||
|
||||
impl Render for AttachModal {
|
||||
fn render(&mut self, _window: &mut Window, _: &mut Context<Self>) -> impl ui::IntoElement {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl ui::IntoElement {
|
||||
v_flex()
|
||||
.key_context("AttachModal")
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.w(rems(34.))
|
||||
.child(self.picker.clone())
|
||||
}
|
||||
@@ -240,10 +228,7 @@ impl PickerDelegate for AttachModalDelegate {
|
||||
let config = self.debug_config.clone();
|
||||
self.project
|
||||
.update(cx, |project, cx| {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
let ret = project.fake_debug_session(config.request, None, false, cx);
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
let ret = project.start_debug_session(config.into(), cx);
|
||||
let ret = project.start_debug_session(config, cx);
|
||||
ret
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::{
|
||||
ClearAllBreakpoints, Continue, CreateDebuggingSession, Disconnect, Pause, Restart, StepBack,
|
||||
StepInto, StepOut, StepOver, Stop, ToggleIgnoreBreakpoints,
|
||||
StepInto, StepOut, StepOver, Stop, ToggleIgnoreBreakpoints, persistence,
|
||||
};
|
||||
use crate::{new_session_modal::NewSessionModal, session::DebugSession};
|
||||
use anyhow::{Result, anyhow};
|
||||
@@ -15,6 +15,7 @@ use gpui::{
|
||||
Action, App, AsyncWindowContext, Context, Entity, EntityId, EventEmitter, FocusHandle,
|
||||
Focusable, Subscription, Task, WeakEntity, actions,
|
||||
};
|
||||
|
||||
use project::{
|
||||
Project,
|
||||
debugger::{
|
||||
@@ -76,8 +77,45 @@ impl DebugPanel {
|
||||
let project = workspace.project().clone();
|
||||
let dap_store = project.read(cx).dap_store();
|
||||
|
||||
let _subscriptions =
|
||||
vec![cx.subscribe_in(&dap_store, window, Self::handle_dap_store_event)];
|
||||
let weak = cx.weak_entity();
|
||||
|
||||
let modal_subscription =
|
||||
cx.observe_new::<tasks_ui::TasksModal>(move |_, window, cx| {
|
||||
let modal_entity = cx.entity();
|
||||
|
||||
weak.update(cx, |_: &mut DebugPanel, cx| {
|
||||
let Some(window) = window else {
|
||||
log::error!("Debug panel couldn't subscribe to tasks modal because there was no window");
|
||||
return;
|
||||
};
|
||||
|
||||
cx.subscribe_in(
|
||||
&modal_entity,
|
||||
window,
|
||||
|panel, _, event: &tasks_ui::ShowAttachModal, window, cx| {
|
||||
panel.workspace.update(cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
workspace.toggle_modal(window, cx, |window, cx| {
|
||||
crate::attach_modal::AttachModal::new(
|
||||
project,
|
||||
event.debug_config.clone(),
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
}).ok();
|
||||
},
|
||||
)
|
||||
.detach();
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
|
||||
let _subscriptions = vec![
|
||||
cx.subscribe_in(&dap_store, window, Self::handle_dap_store_event),
|
||||
modal_subscription,
|
||||
];
|
||||
|
||||
let debug_panel = Self {
|
||||
size: px(300.),
|
||||
@@ -94,6 +132,87 @@ impl DebugPanel {
|
||||
})
|
||||
}
|
||||
|
||||
fn filter_action_types(&self, cx: &mut App) {
|
||||
let (has_active_session, supports_restart, support_step_back, status) = self
|
||||
.active_session()
|
||||
.map(|item| {
|
||||
let running = item.read(cx).mode().as_running().cloned();
|
||||
|
||||
match running {
|
||||
Some(running) => {
|
||||
let caps = running.read(cx).capabilities(cx);
|
||||
(
|
||||
!running.read(cx).session().read(cx).is_terminated(),
|
||||
caps.supports_restart_request.unwrap_or_default(),
|
||||
caps.supports_step_back.unwrap_or_default(),
|
||||
running.read(cx).thread_status(cx),
|
||||
)
|
||||
}
|
||||
None => (false, false, false, None),
|
||||
}
|
||||
})
|
||||
.unwrap_or((false, false, false, None));
|
||||
|
||||
let filter = CommandPaletteFilter::global_mut(cx);
|
||||
let debugger_action_types = [
|
||||
TypeId::of::<Disconnect>(),
|
||||
TypeId::of::<Stop>(),
|
||||
TypeId::of::<ToggleIgnoreBreakpoints>(),
|
||||
];
|
||||
|
||||
let running_action_types = [TypeId::of::<Pause>()];
|
||||
|
||||
let stopped_action_type = [
|
||||
TypeId::of::<Continue>(),
|
||||
TypeId::of::<StepOver>(),
|
||||
TypeId::of::<StepInto>(),
|
||||
TypeId::of::<StepOut>(),
|
||||
TypeId::of::<editor::actions::DebuggerRunToCursor>(),
|
||||
TypeId::of::<editor::actions::DebuggerEvaluateSelectedText>(),
|
||||
];
|
||||
|
||||
let step_back_action_type = [TypeId::of::<StepBack>()];
|
||||
let restart_action_type = [TypeId::of::<Restart>()];
|
||||
|
||||
if has_active_session {
|
||||
filter.show_action_types(debugger_action_types.iter());
|
||||
|
||||
if supports_restart {
|
||||
filter.show_action_types(restart_action_type.iter());
|
||||
} else {
|
||||
filter.hide_action_types(&restart_action_type);
|
||||
}
|
||||
|
||||
if support_step_back {
|
||||
filter.show_action_types(step_back_action_type.iter());
|
||||
} else {
|
||||
filter.hide_action_types(&step_back_action_type);
|
||||
}
|
||||
|
||||
match status {
|
||||
Some(ThreadStatus::Running) => {
|
||||
filter.show_action_types(running_action_types.iter());
|
||||
filter.hide_action_types(&stopped_action_type);
|
||||
}
|
||||
Some(ThreadStatus::Stopped) => {
|
||||
filter.show_action_types(stopped_action_type.iter());
|
||||
filter.hide_action_types(&running_action_types);
|
||||
}
|
||||
_ => {
|
||||
filter.hide_action_types(&running_action_types);
|
||||
filter.hide_action_types(&stopped_action_type);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// show only the `debug: start`
|
||||
filter.hide_action_types(&debugger_action_types);
|
||||
filter.hide_action_types(&step_back_action_type);
|
||||
filter.hide_action_types(&restart_action_type);
|
||||
filter.hide_action_types(&running_action_types);
|
||||
filter.hide_action_types(&stopped_action_type);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load(
|
||||
workspace: WeakEntity<Workspace>,
|
||||
cx: AsyncWindowContext,
|
||||
@@ -111,63 +230,15 @@ impl DebugPanel {
|
||||
)
|
||||
});
|
||||
|
||||
cx.observe_new::<DebugPanel>(|debug_panel, _, cx| {
|
||||
Self::filter_action_types(debug_panel, cx);
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.observe(&debug_panel, |_, debug_panel, cx| {
|
||||
let (has_active_session, supports_restart, support_step_back) = debug_panel
|
||||
.update(cx, |this, cx| {
|
||||
this.active_session()
|
||||
.map(|item| {
|
||||
let running = item.read(cx).mode().as_running().cloned();
|
||||
|
||||
match running {
|
||||
Some(running) => {
|
||||
let caps = running.read(cx).capabilities(cx);
|
||||
(
|
||||
true,
|
||||
caps.supports_restart_request.unwrap_or_default(),
|
||||
caps.supports_step_back.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
None => (false, false, false),
|
||||
}
|
||||
})
|
||||
.unwrap_or((false, false, false))
|
||||
});
|
||||
|
||||
let filter = CommandPaletteFilter::global_mut(cx);
|
||||
let debugger_action_types = [
|
||||
TypeId::of::<Continue>(),
|
||||
TypeId::of::<StepOver>(),
|
||||
TypeId::of::<StepInto>(),
|
||||
TypeId::of::<StepOut>(),
|
||||
TypeId::of::<Stop>(),
|
||||
TypeId::of::<Disconnect>(),
|
||||
TypeId::of::<Pause>(),
|
||||
TypeId::of::<ToggleIgnoreBreakpoints>(),
|
||||
];
|
||||
|
||||
let step_back_action_type = [TypeId::of::<StepBack>()];
|
||||
let restart_action_type = [TypeId::of::<Restart>()];
|
||||
|
||||
if has_active_session {
|
||||
filter.show_action_types(debugger_action_types.iter());
|
||||
|
||||
if supports_restart {
|
||||
filter.show_action_types(restart_action_type.iter());
|
||||
} else {
|
||||
filter.hide_action_types(&restart_action_type);
|
||||
}
|
||||
|
||||
if support_step_back {
|
||||
filter.show_action_types(step_back_action_type.iter());
|
||||
} else {
|
||||
filter.hide_action_types(&step_back_action_type);
|
||||
}
|
||||
} else {
|
||||
// show only the `debug: start`
|
||||
filter.hide_action_types(&debugger_action_types);
|
||||
filter.hide_action_types(&step_back_action_type);
|
||||
filter.hide_action_types(&restart_action_type);
|
||||
}
|
||||
debug_panel.update(cx, |debug_panel, cx| {
|
||||
Self::filter_action_types(debug_panel, cx);
|
||||
});
|
||||
})
|
||||
.detach();
|
||||
|
||||
@@ -222,29 +293,49 @@ impl DebugPanel {
|
||||
);
|
||||
};
|
||||
|
||||
let Some(project) = self.project.upgrade() else {
|
||||
return log::error!("Debug Panel out lived it's weak reference to Project");
|
||||
};
|
||||
let adapter_name = session.read(cx).adapter_name();
|
||||
|
||||
if self
|
||||
.sessions
|
||||
.iter()
|
||||
.any(|item| item.read(cx).session_id(cx) == *session_id)
|
||||
{
|
||||
// We already have an item for this session.
|
||||
return;
|
||||
}
|
||||
let session_item = DebugSession::running(
|
||||
project,
|
||||
self.workspace.clone(),
|
||||
session,
|
||||
cx.weak_entity(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
let session_id = *session_id;
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let serialized_layout =
|
||||
persistence::get_serialized_pane_layout(adapter_name).await;
|
||||
|
||||
self.sessions.push(session_item.clone());
|
||||
self.activate_session(session_item, window, cx);
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
let Some(project) = this.project.upgrade() else {
|
||||
return log::error!(
|
||||
"Debug Panel out lived it's weak reference to Project"
|
||||
);
|
||||
};
|
||||
|
||||
if this
|
||||
.sessions
|
||||
.iter()
|
||||
.any(|item| item.read(cx).session_id(cx) == session_id)
|
||||
{
|
||||
// We already have an item for this session.
|
||||
return;
|
||||
}
|
||||
let session_item = DebugSession::running(
|
||||
project,
|
||||
this.workspace.clone(),
|
||||
session,
|
||||
cx.weak_entity(),
|
||||
serialized_layout,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
if let Some(running) = session_item.read(cx).mode().as_running().cloned() {
|
||||
// We might want to make this an event subscription and only notify when a new thread is selected
|
||||
// This is used to filter the command menu correctly
|
||||
cx.observe(&running, |_, _, cx| cx.notify()).detach();
|
||||
}
|
||||
|
||||
this.sessions.push(session_item.clone());
|
||||
this.activate_session(session_item, window, cx);
|
||||
})
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
dap_store::DapStoreEvent::RunInTerminal {
|
||||
title,
|
||||
@@ -338,30 +429,58 @@ impl DebugPanel {
|
||||
})
|
||||
}
|
||||
|
||||
fn close_session(&mut self, entity_id: EntityId, cx: &mut Context<Self>) {
|
||||
fn close_session(&mut self, entity_id: EntityId, window: &mut Window, cx: &mut Context<Self>) {
|
||||
let Some(session) = self
|
||||
.sessions
|
||||
.iter()
|
||||
.find(|other| entity_id == other.entity_id())
|
||||
.cloned()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
session.update(cx, |session, cx| session.shutdown(cx));
|
||||
let session_id = session.update(cx, |this, cx| this.session_id(cx));
|
||||
let should_prompt = self
|
||||
.project
|
||||
.update(cx, |this, cx| {
|
||||
let session = this.dap_store().read(cx).session_by_id(session_id);
|
||||
session.map(|session| !session.read(cx).is_terminated())
|
||||
})
|
||||
.ok()
|
||||
.flatten()
|
||||
.unwrap_or_default();
|
||||
|
||||
self.sessions.retain(|other| entity_id != other.entity_id());
|
||||
|
||||
if let Some(active_session_id) = self
|
||||
.active_session
|
||||
.as_ref()
|
||||
.map(|session| session.entity_id())
|
||||
{
|
||||
if active_session_id == entity_id {
|
||||
self.active_session = self.sessions.first().cloned();
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
if should_prompt {
|
||||
let response = cx.prompt(
|
||||
gpui::PromptLevel::Warning,
|
||||
"This Debug Session is still running. Are you sure you want to terminate it?",
|
||||
None,
|
||||
&["Yes", "No"],
|
||||
);
|
||||
if response.await == Ok(1) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
session.update(cx, |session, cx| session.shutdown(cx)).ok();
|
||||
this.update(cx, |this, cx| {
|
||||
this.sessions.retain(|other| entity_id != other.entity_id());
|
||||
|
||||
if let Some(active_session_id) = this
|
||||
.active_session
|
||||
.as_ref()
|
||||
.map(|session| session.entity_id())
|
||||
{
|
||||
if active_session_id == entity_id {
|
||||
this.active_session = this.sessions.first().cloned();
|
||||
}
|
||||
}
|
||||
cx.notify()
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
fn sessions_drop_down_menu(
|
||||
&self,
|
||||
active_session: &Entity<DebugSession>,
|
||||
@@ -375,19 +494,26 @@ impl DebugPanel {
|
||||
DropdownMenu::new_with_element(
|
||||
"debugger-session-list",
|
||||
label,
|
||||
ContextMenu::build(window, cx, move |mut this, _, _| {
|
||||
ContextMenu::build(window, cx, move |mut this, _, cx| {
|
||||
let context_menu = cx.weak_entity();
|
||||
for session in sessions.into_iter() {
|
||||
let weak_session = session.downgrade();
|
||||
let weak_id = weak_session.entity_id();
|
||||
let weak_session_id = weak_session.entity_id();
|
||||
|
||||
this = this.custom_entry(
|
||||
{
|
||||
let weak = weak.clone();
|
||||
let context_menu = context_menu.clone();
|
||||
move |_, cx| {
|
||||
weak_session
|
||||
.read_with(cx, |session, cx| {
|
||||
let context_menu = context_menu.clone();
|
||||
let id: SharedString =
|
||||
format!("debug-session-{}", session.session_id(cx).0)
|
||||
.into();
|
||||
h_flex()
|
||||
.w_full()
|
||||
.group(id.clone())
|
||||
.justify_between()
|
||||
.child(session.label_element(cx))
|
||||
.child(
|
||||
@@ -395,14 +521,28 @@ impl DebugPanel {
|
||||
"close-debug-session",
|
||||
IconName::Close,
|
||||
)
|
||||
.visible_on_hover(id.clone())
|
||||
.icon_size(IconSize::Small)
|
||||
.on_click({
|
||||
let weak = weak.clone();
|
||||
move |_, _, cx| {
|
||||
move |_, window, cx| {
|
||||
weak.update(cx, |panel, cx| {
|
||||
panel.close_session(weak_id, cx);
|
||||
panel.close_session(
|
||||
weak_session_id,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
context_menu
|
||||
.update(cx, |this, cx| {
|
||||
this.cancel(
|
||||
&Default::default(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}),
|
||||
)
|
||||
|
||||
@@ -1,15 +1,19 @@
|
||||
use dap::debugger_settings::DebuggerSettings;
|
||||
use debugger_panel::{DebugPanel, ToggleFocus};
|
||||
use editor::Editor;
|
||||
use feature_flags::{Debugger, FeatureFlagViewExt};
|
||||
use gpui::{App, actions};
|
||||
use gpui::{App, EntityInputHandler, actions};
|
||||
use new_session_modal::NewSessionModal;
|
||||
use project::debugger::{self, breakpoint_store::SourceBreakpoint};
|
||||
use session::DebugSession;
|
||||
use settings::Settings;
|
||||
use util::maybe;
|
||||
use workspace::{ShutdownDebugAdapters, Workspace};
|
||||
|
||||
pub mod attach_modal;
|
||||
pub mod debugger_panel;
|
||||
mod new_session_modal;
|
||||
mod persistence;
|
||||
pub(crate) mod session;
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -110,7 +114,9 @@ pub fn init(cx: &mut App) {
|
||||
.active_session()
|
||||
.and_then(|session| session.read(cx).mode().as_running().cloned())
|
||||
}) {
|
||||
active_item.update(cx, |item, cx| item.stop_thread(cx))
|
||||
cx.defer(move |cx| {
|
||||
active_item.update(cx, |item, cx| item.stop_thread(cx))
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -151,8 +157,105 @@ pub fn init(cx: &mut App) {
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
)
|
||||
.register_action(|workspace: &mut Workspace, _: &Start, window, cx| {
|
||||
tasks_ui::toggle_modal(
|
||||
workspace,
|
||||
None,
|
||||
task::TaskModal::DebugModal,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.detach();
|
||||
});
|
||||
})
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.observe_new({
|
||||
move |editor: &mut Editor, _, cx| {
|
||||
editor
|
||||
.register_action(cx.listener(
|
||||
move |editor, _: &editor::actions::DebuggerRunToCursor, _, cx| {
|
||||
maybe!({
|
||||
let debug_panel =
|
||||
editor.workspace()?.read(cx).panel::<DebugPanel>(cx)?;
|
||||
let cursor_point: language::Point = editor.selections.newest(cx).head();
|
||||
let active_session = debug_panel.read(cx).active_session()?;
|
||||
|
||||
let (buffer, position, _) = editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.point_to_buffer_point(cursor_point, cx)?;
|
||||
|
||||
let path =
|
||||
debugger::breakpoint_store::BreakpointStore::abs_path_from_buffer(
|
||||
&buffer, cx,
|
||||
)?;
|
||||
|
||||
let source_breakpoint = SourceBreakpoint {
|
||||
row: position.row,
|
||||
path,
|
||||
message: None,
|
||||
condition: None,
|
||||
hit_condition: None,
|
||||
state: debugger::breakpoint_store::BreakpointState::Enabled,
|
||||
};
|
||||
|
||||
active_session
|
||||
.update(cx, |session_item, _| {
|
||||
session_item.mode().as_running().cloned()
|
||||
})?
|
||||
.update(cx, |state, cx| {
|
||||
if let Some(thread_id) = state.selected_thread_id() {
|
||||
state.session().update(cx, |session, cx| {
|
||||
session.run_to_position(
|
||||
source_breakpoint,
|
||||
thread_id,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
Some(())
|
||||
});
|
||||
},
|
||||
))
|
||||
.detach();
|
||||
|
||||
editor
|
||||
.register_action(cx.listener(
|
||||
move |editor, _: &editor::actions::DebuggerEvaluateSelectedText, window, cx| {
|
||||
maybe!({
|
||||
let debug_panel =
|
||||
editor.workspace()?.read(cx).panel::<DebugPanel>(cx)?;
|
||||
let active_session = debug_panel.read(cx).active_session()?;
|
||||
|
||||
let text = editor.text_for_range(
|
||||
editor.selections.newest(cx).range(),
|
||||
&mut None,
|
||||
window,
|
||||
cx,
|
||||
)?;
|
||||
|
||||
active_session
|
||||
.update(cx, |session_item, _| {
|
||||
session_item.mode().as_running().cloned()
|
||||
})?
|
||||
.update(cx, |state, cx| {
|
||||
let stack_id = state.selected_stack_frame_id(cx);
|
||||
|
||||
state.session().update(cx, |session, cx| {
|
||||
session.evaluate(text, None, stack_id, None, cx);
|
||||
});
|
||||
});
|
||||
Some(())
|
||||
});
|
||||
},
|
||||
))
|
||||
.detach();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ use gpui::{
|
||||
App, AppContext, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, TextStyle,
|
||||
WeakEntity,
|
||||
};
|
||||
use project::Project;
|
||||
use settings::Settings;
|
||||
use task::{DebugTaskDefinition, LaunchConfig};
|
||||
use theme::ThemeSettings;
|
||||
@@ -59,7 +60,7 @@ impl NewSessionModal {
|
||||
debug_panel: WeakEntity<DebugPanel>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let debugger = past_debug_definition
|
||||
.as_ref()
|
||||
@@ -102,7 +103,8 @@ impl NewSessionModal {
|
||||
},
|
||||
})
|
||||
}
|
||||
fn start_new_session(&self, cx: &mut Context<Self>) -> Result<()> {
|
||||
|
||||
fn start_new_session(&self, window: &mut Window, cx: &mut Context<Self>) -> Result<()> {
|
||||
let workspace = self.workspace.clone();
|
||||
let config = self
|
||||
.debug_config(cx)
|
||||
@@ -112,10 +114,41 @@ impl NewSessionModal {
|
||||
panel.past_debug_definition = Some(config.clone());
|
||||
});
|
||||
|
||||
let task_contexts = workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
tasks_ui::task_contexts(workspace, window, cx)
|
||||
})
|
||||
.ok();
|
||||
|
||||
cx.spawn(async move |this, cx| {
|
||||
let task_context = if let Some(task) = task_contexts {
|
||||
task.await
|
||||
.active_worktree_context
|
||||
.map_or(task::TaskContext::default(), |context| context.1)
|
||||
} else {
|
||||
task::TaskContext::default()
|
||||
};
|
||||
let project = workspace.update(cx, |workspace, _| workspace.project().clone())?;
|
||||
let task =
|
||||
project.update(cx, |this, cx| this.start_debug_session(config.into(), cx))?;
|
||||
|
||||
let task = project.update(cx, |this, cx| {
|
||||
if let Some(debug_config) =
|
||||
config
|
||||
.clone()
|
||||
.to_zed_format()
|
||||
.ok()
|
||||
.and_then(|task_template| {
|
||||
task_template
|
||||
.resolve_task("debug_task", &task_context)
|
||||
.and_then(|resolved_task| {
|
||||
resolved_task.resolved_debug_adapter_config()
|
||||
})
|
||||
})
|
||||
{
|
||||
this.start_debug_session(debug_config, cx)
|
||||
} else {
|
||||
this.start_debug_session(config, cx)
|
||||
}
|
||||
})?;
|
||||
let spawn_result = task.await;
|
||||
if spawn_result.is_ok() {
|
||||
this.update(cx, |_, cx| {
|
||||
@@ -139,25 +172,13 @@ impl NewSessionModal {
|
||||
attach.update(cx, |this, cx| {
|
||||
if selected_debugger != this.debug_definition.adapter {
|
||||
this.debug_definition.adapter = selected_debugger.into();
|
||||
if let Some(project) = this
|
||||
.workspace
|
||||
.read_with(cx, |workspace, _| workspace.project().clone())
|
||||
.ok()
|
||||
{
|
||||
this.attach_picker = Some(cx.new(|cx| {
|
||||
let modal = AttachModal::new(
|
||||
project,
|
||||
this.debug_definition.clone(),
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
window.focus(&modal.focus_handle(cx));
|
||||
|
||||
modal
|
||||
}));
|
||||
}
|
||||
this.attach_picker.update(cx, |this, cx| {
|
||||
this.picker.update(cx, |this, cx| {
|
||||
this.delegate.debug_config.adapter = selected_debugger.into();
|
||||
this.focus(window, cx);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
@@ -224,7 +245,6 @@ impl NewSessionModal {
|
||||
ContextMenu::build(window, cx, move |mut menu, _, cx| {
|
||||
let setter_for_name = |task: DebugTaskDefinition| {
|
||||
let weak = weak.clone();
|
||||
let workspace = workspace.clone();
|
||||
move |window: &mut Window, cx: &mut App| {
|
||||
weak.update(cx, |this, cx| {
|
||||
this.last_selected_profile_name = Some(SharedString::from(&task.label));
|
||||
@@ -239,12 +259,19 @@ impl NewSessionModal {
|
||||
);
|
||||
}
|
||||
DebugRequestType::Attach(_) => {
|
||||
let Ok(project) = this
|
||||
.workspace
|
||||
.read_with(cx, |this, _| this.project().clone())
|
||||
else {
|
||||
return;
|
||||
};
|
||||
this.mode = NewSessionMode::attach(
|
||||
this.debugger.clone(),
|
||||
workspace.clone(),
|
||||
project,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
this.mode.focus_handle(cx).focus(window);
|
||||
if let Some((debugger, attach)) =
|
||||
this.debugger.as_ref().zip(this.mode.as_attach())
|
||||
{
|
||||
@@ -333,18 +360,16 @@ impl LaunchMode {
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AttachMode {
|
||||
workspace: WeakEntity<Workspace>,
|
||||
debug_definition: DebugTaskDefinition,
|
||||
attach_picker: Option<Entity<AttachModal>>,
|
||||
focus_handle: FocusHandle,
|
||||
attach_picker: Entity<AttachModal>,
|
||||
}
|
||||
|
||||
impl AttachMode {
|
||||
fn new(
|
||||
debugger: Option<SharedString>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
project: Entity<Project>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
cx: &mut Context<NewSessionModal>,
|
||||
) -> Entity<Self> {
|
||||
let debug_definition = DebugTaskDefinition {
|
||||
label: "Attach New Session Setup".into(),
|
||||
@@ -355,27 +380,15 @@ impl AttachMode {
|
||||
initialize_args: None,
|
||||
stop_on_entry: Some(false),
|
||||
};
|
||||
let attach_picker = cx.new(|cx| {
|
||||
let modal = AttachModal::new(project, debug_definition.clone(), false, window, cx);
|
||||
window.focus(&modal.focus_handle(cx));
|
||||
|
||||
let attach_picker = if let Some(project) = debugger.and(
|
||||
workspace
|
||||
.read_with(cx, |workspace, _| workspace.project().clone())
|
||||
.ok(),
|
||||
) {
|
||||
Some(cx.new(|cx| {
|
||||
let modal = AttachModal::new(project, debug_definition.clone(), false, window, cx);
|
||||
window.focus(&modal.focus_handle(cx));
|
||||
|
||||
modal
|
||||
}))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
cx.new(|cx| Self {
|
||||
workspace,
|
||||
modal
|
||||
});
|
||||
cx.new(|_| Self {
|
||||
debug_definition,
|
||||
attach_picker,
|
||||
focus_handle: cx.focus_handle(),
|
||||
})
|
||||
}
|
||||
fn debug_task(&self) -> task::AttachConfig {
|
||||
@@ -412,7 +425,7 @@ impl Focusable for NewSessionMode {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
match &self {
|
||||
NewSessionMode::Launch(entity) => entity.read(cx).program.focus_handle(cx),
|
||||
NewSessionMode::Attach(entity) => entity.read(cx).focus_handle.clone(),
|
||||
NewSessionMode::Attach(entity) => entity.read(cx).attach_picker.focus_handle(cx),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -444,8 +457,11 @@ impl RenderOnce for LaunchMode {
|
||||
}
|
||||
|
||||
impl RenderOnce for AttachMode {
|
||||
fn render(self, _: &mut Window, _: &mut App) -> impl IntoElement {
|
||||
v_flex().w_full().children(self.attach_picker.clone())
|
||||
fn render(self, _: &mut Window, cx: &mut App) -> impl IntoElement {
|
||||
v_flex()
|
||||
.w_full()
|
||||
.track_focus(&self.attach_picker.focus_handle(cx))
|
||||
.child(self.attach_picker.clone())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -465,13 +481,17 @@ impl RenderOnce for NewSessionMode {
|
||||
impl NewSessionMode {
|
||||
fn attach(
|
||||
debugger: Option<SharedString>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
project: Entity<Project>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
cx: &mut Context<NewSessionModal>,
|
||||
) -> Self {
|
||||
Self::Attach(AttachMode::new(debugger, workspace, window, cx))
|
||||
Self::Attach(AttachMode::new(debugger, project, window, cx))
|
||||
}
|
||||
fn launch(past_launch_config: Option<LaunchConfig>, window: &mut Window, cx: &mut App) -> Self {
|
||||
fn launch(
|
||||
past_launch_config: Option<LaunchConfig>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<NewSessionModal>,
|
||||
) -> Self {
|
||||
Self::Launch(LaunchMode::new(past_launch_config, window, cx))
|
||||
}
|
||||
}
|
||||
@@ -560,18 +580,25 @@ impl Render for NewSessionModal {
|
||||
.toggle_state(matches!(self.mode, NewSessionMode::Attach(_)))
|
||||
.style(ui::ButtonStyle::Subtle)
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
let Ok(project) = this
|
||||
.workspace
|
||||
.read_with(cx, |this, _| this.project().clone())
|
||||
else {
|
||||
return;
|
||||
};
|
||||
this.mode = NewSessionMode::attach(
|
||||
this.debugger.clone(),
|
||||
this.workspace.clone(),
|
||||
project,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
this.mode.focus_handle(cx).focus(window);
|
||||
if let Some((debugger, attach)) =
|
||||
this.debugger.as_ref().zip(this.mode.as_attach())
|
||||
{
|
||||
Self::update_attach_picker(&attach, &debugger, window, cx);
|
||||
}
|
||||
this.mode.focus_handle(cx).focus(window);
|
||||
|
||||
cx.notify();
|
||||
}))
|
||||
.last(),
|
||||
@@ -614,8 +641,8 @@ impl Render for NewSessionModal {
|
||||
})
|
||||
.child(
|
||||
Button::new("debugger-spawn", "Start")
|
||||
.on_click(cx.listener(|this, _, _, cx| {
|
||||
this.start_new_session(cx).log_err();
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.start_new_session(window, cx).log_err();
|
||||
}))
|
||||
.disabled(self.debugger.is_none()),
|
||||
),
|
||||
|
||||
257
crates/debugger_ui/src/persistence.rs
Normal file
257
crates/debugger_ui/src/persistence.rs
Normal file
@@ -0,0 +1,257 @@
|
||||
use collections::HashMap;
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use gpui::{Axis, Context, Entity, EntityId, Focusable, Subscription, WeakEntity, Window};
|
||||
use project::Project;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ui::{App, SharedString};
|
||||
use util::ResultExt;
|
||||
use workspace::{Member, Pane, PaneAxis, Workspace};
|
||||
|
||||
use crate::session::running::{
|
||||
self, RunningState, SubView, breakpoint_list::BreakpointList, console::Console,
|
||||
module_list::ModuleList, stack_frame_list::StackFrameList, variable_list::VariableList,
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub(crate) enum DebuggerPaneItem {
|
||||
Console,
|
||||
Variables,
|
||||
BreakpointList,
|
||||
Frames,
|
||||
Modules,
|
||||
}
|
||||
|
||||
impl DebuggerPaneItem {
|
||||
pub(crate) fn to_shared_string(self) -> SharedString {
|
||||
match self {
|
||||
DebuggerPaneItem::Console => SharedString::new_static("Console"),
|
||||
DebuggerPaneItem::Variables => SharedString::new_static("Variables"),
|
||||
DebuggerPaneItem::BreakpointList => SharedString::new_static("Breakpoints"),
|
||||
DebuggerPaneItem::Frames => SharedString::new_static("Frames"),
|
||||
DebuggerPaneItem::Modules => SharedString::new_static("Modules"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub(crate) struct SerializedAxis(pub Axis);
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub(crate) enum SerializedPaneLayout {
|
||||
Pane(SerializedPane),
|
||||
Group {
|
||||
axis: SerializedAxis,
|
||||
flexes: Option<Vec<f32>>,
|
||||
children: Vec<SerializedPaneLayout>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub(crate) struct SerializedPane {
|
||||
pub children: Vec<DebuggerPaneItem>,
|
||||
pub active_item: Option<DebuggerPaneItem>,
|
||||
}
|
||||
|
||||
const DEBUGGER_PANEL_PREFIX: &str = "debugger_panel_";
|
||||
|
||||
pub(crate) async fn serialize_pane_layout(
|
||||
adapter_name: SharedString,
|
||||
pane_group: SerializedPaneLayout,
|
||||
) -> anyhow::Result<()> {
|
||||
if let Ok(serialized_pane_group) = serde_json::to_string(&pane_group) {
|
||||
KEY_VALUE_STORE
|
||||
.write_kvp(
|
||||
format!("{DEBUGGER_PANEL_PREFIX}-{adapter_name}"),
|
||||
serialized_pane_group,
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
Err(anyhow::anyhow!(
|
||||
"Failed to serialize pane group with serde_json as a string"
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build_serialized_pane_layout(
|
||||
pane_group: &Member,
|
||||
cx: &mut App,
|
||||
) -> SerializedPaneLayout {
|
||||
match pane_group {
|
||||
Member::Axis(PaneAxis {
|
||||
axis,
|
||||
members,
|
||||
flexes,
|
||||
bounding_boxes: _,
|
||||
}) => SerializedPaneLayout::Group {
|
||||
axis: SerializedAxis(*axis),
|
||||
children: members
|
||||
.iter()
|
||||
.map(|member| build_serialized_pane_layout(member, cx))
|
||||
.collect::<Vec<_>>(),
|
||||
flexes: Some(flexes.lock().clone()),
|
||||
},
|
||||
Member::Pane(pane_handle) => SerializedPaneLayout::Pane(serialize_pane(pane_handle, cx)),
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_pane(pane: &Entity<Pane>, cx: &mut App) -> SerializedPane {
|
||||
let pane = pane.read(cx);
|
||||
let children = pane
|
||||
.items()
|
||||
.filter_map(|item| {
|
||||
item.act_as::<SubView>(cx)
|
||||
.map(|view| view.read(cx).view_kind())
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let active_item = pane
|
||||
.active_item()
|
||||
.and_then(|item| item.act_as::<SubView>(cx))
|
||||
.map(|view| view.read(cx).view_kind());
|
||||
|
||||
SerializedPane {
|
||||
children,
|
||||
active_item,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn get_serialized_pane_layout(
|
||||
adapter_name: impl AsRef<str>,
|
||||
) -> Option<SerializedPaneLayout> {
|
||||
let key = format!("{DEBUGGER_PANEL_PREFIX}-{}", adapter_name.as_ref());
|
||||
|
||||
KEY_VALUE_STORE
|
||||
.read_kvp(&key)
|
||||
.log_err()
|
||||
.flatten()
|
||||
.and_then(|value| serde_json::from_str::<SerializedPaneLayout>(&value).ok())
|
||||
}
|
||||
|
||||
pub(crate) fn deserialize_pane_layout(
|
||||
serialized: SerializedPaneLayout,
|
||||
workspace: &WeakEntity<Workspace>,
|
||||
project: &Entity<Project>,
|
||||
stack_frame_list: &Entity<StackFrameList>,
|
||||
variable_list: &Entity<VariableList>,
|
||||
module_list: &Entity<ModuleList>,
|
||||
console: &Entity<Console>,
|
||||
breakpoint_list: &Entity<BreakpointList>,
|
||||
subscriptions: &mut HashMap<EntityId, Subscription>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<RunningState>,
|
||||
) -> Option<Member> {
|
||||
match serialized {
|
||||
SerializedPaneLayout::Group {
|
||||
axis,
|
||||
flexes,
|
||||
children,
|
||||
} => {
|
||||
let mut members = Vec::new();
|
||||
for child in children {
|
||||
if let Some(new_member) = deserialize_pane_layout(
|
||||
child,
|
||||
workspace,
|
||||
project,
|
||||
stack_frame_list,
|
||||
variable_list,
|
||||
module_list,
|
||||
console,
|
||||
breakpoint_list,
|
||||
subscriptions,
|
||||
window,
|
||||
cx,
|
||||
) {
|
||||
members.push(new_member);
|
||||
}
|
||||
}
|
||||
|
||||
if members.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if members.len() == 1 {
|
||||
return Some(members.remove(0));
|
||||
}
|
||||
|
||||
Some(Member::Axis(PaneAxis::load(
|
||||
axis.0,
|
||||
members,
|
||||
flexes.clone(),
|
||||
)))
|
||||
}
|
||||
SerializedPaneLayout::Pane(serialized_pane) => {
|
||||
let pane = running::new_debugger_pane(workspace.clone(), project.clone(), window, cx);
|
||||
subscriptions.insert(
|
||||
pane.entity_id(),
|
||||
cx.subscribe_in(&pane, window, RunningState::handle_pane_event),
|
||||
);
|
||||
|
||||
let sub_views: Vec<_> = serialized_pane
|
||||
.children
|
||||
.iter()
|
||||
.map(|child| match child {
|
||||
DebuggerPaneItem::Frames => Box::new(SubView::new(
|
||||
pane.focus_handle(cx),
|
||||
stack_frame_list.clone().into(),
|
||||
DebuggerPaneItem::Frames,
|
||||
None,
|
||||
cx,
|
||||
)),
|
||||
DebuggerPaneItem::Variables => Box::new(SubView::new(
|
||||
variable_list.focus_handle(cx),
|
||||
variable_list.clone().into(),
|
||||
DebuggerPaneItem::Variables,
|
||||
None,
|
||||
cx,
|
||||
)),
|
||||
DebuggerPaneItem::BreakpointList => Box::new(SubView::new(
|
||||
breakpoint_list.focus_handle(cx),
|
||||
breakpoint_list.clone().into(),
|
||||
DebuggerPaneItem::BreakpointList,
|
||||
None,
|
||||
cx,
|
||||
)),
|
||||
DebuggerPaneItem::Modules => Box::new(SubView::new(
|
||||
pane.focus_handle(cx),
|
||||
module_list.clone().into(),
|
||||
DebuggerPaneItem::Modules,
|
||||
None,
|
||||
cx,
|
||||
)),
|
||||
|
||||
DebuggerPaneItem::Console => Box::new(SubView::new(
|
||||
pane.focus_handle(cx),
|
||||
console.clone().into(),
|
||||
DebuggerPaneItem::Console,
|
||||
Some(Box::new({
|
||||
let console = console.clone().downgrade();
|
||||
move |cx| {
|
||||
console
|
||||
.read_with(cx, |console, cx| console.show_indicator(cx))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
})),
|
||||
cx,
|
||||
)),
|
||||
})
|
||||
.collect();
|
||||
|
||||
pane.update(cx, |pane, cx| {
|
||||
let mut active_idx = 0;
|
||||
for (idx, sub_view) in sub_views.into_iter().enumerate() {
|
||||
if serialized_pane
|
||||
.active_item
|
||||
.is_some_and(|active| active == sub_view.read(cx).view_kind())
|
||||
{
|
||||
active_idx = idx;
|
||||
}
|
||||
pane.add_item(sub_view, false, false, None, window, cx);
|
||||
}
|
||||
|
||||
pane.activate_item(active_idx, false, false, window, cx);
|
||||
});
|
||||
|
||||
Some(Member::Pane(pane.clone()))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
pub mod running;
|
||||
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use dap::client::SessionId;
|
||||
use gpui::{App, Entity, EventEmitter, FocusHandle, Focusable, Subscription, Task, WeakEntity};
|
||||
use project::Project;
|
||||
@@ -14,6 +16,7 @@ use workspace::{
|
||||
};
|
||||
|
||||
use crate::debugger_panel::DebugPanel;
|
||||
use crate::persistence::SerializedPaneLayout;
|
||||
|
||||
pub(crate) enum DebugSessionState {
|
||||
Running(Entity<running::RunningState>),
|
||||
@@ -30,6 +33,7 @@ impl DebugSessionState {
|
||||
pub struct DebugSession {
|
||||
remote_id: Option<workspace::ViewId>,
|
||||
mode: DebugSessionState,
|
||||
label: OnceLock<String>,
|
||||
dap_store: WeakEntity<DapStore>,
|
||||
_debug_panel: WeakEntity<DebugPanel>,
|
||||
_worktree_store: WeakEntity<WorktreeStore>,
|
||||
@@ -49,6 +53,7 @@ impl DebugSession {
|
||||
workspace: WeakEntity<Workspace>,
|
||||
session: Entity<Session>,
|
||||
_debug_panel: WeakEntity<DebugPanel>,
|
||||
serialized_pane_layout: Option<SerializedPaneLayout>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Entity<Self> {
|
||||
@@ -57,6 +62,7 @@ impl DebugSession {
|
||||
session.clone(),
|
||||
project.clone(),
|
||||
workspace.clone(),
|
||||
serialized_pane_layout,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -68,6 +74,7 @@ impl DebugSession {
|
||||
})],
|
||||
remote_id: None,
|
||||
mode: DebugSessionState::Running(mode),
|
||||
label: OnceLock::new(),
|
||||
dap_store: project.read(cx).dap_store().downgrade(),
|
||||
_debug_panel,
|
||||
_worktree_store: project.read(cx).worktree_store().downgrade(),
|
||||
@@ -92,36 +99,45 @@ impl DebugSession {
|
||||
}
|
||||
|
||||
pub(crate) fn label(&self, cx: &App) -> String {
|
||||
if let Some(label) = self.label.get() {
|
||||
return label.to_owned();
|
||||
}
|
||||
|
||||
let session_id = match &self.mode {
|
||||
DebugSessionState::Running(running_state) => running_state.read(cx).session_id(),
|
||||
};
|
||||
|
||||
let Ok(Some(session)) = self
|
||||
.dap_store
|
||||
.read_with(cx, |store, _| store.session_by_id(session_id))
|
||||
else {
|
||||
return "".to_owned();
|
||||
};
|
||||
session
|
||||
.read(cx)
|
||||
.as_local()
|
||||
.expect("Remote Debug Sessions are not implemented yet")
|
||||
.label()
|
||||
|
||||
self.label
|
||||
.get_or_init(|| {
|
||||
session
|
||||
.read(cx)
|
||||
.as_local()
|
||||
.expect("Remote Debug Sessions are not implemented yet")
|
||||
.label()
|
||||
})
|
||||
.to_owned()
|
||||
}
|
||||
|
||||
pub(crate) fn label_element(&self, cx: &App) -> AnyElement {
|
||||
let label = self.label(cx);
|
||||
|
||||
let (icon, color) = match &self.mode {
|
||||
let icon = match &self.mode {
|
||||
DebugSessionState::Running(state) => {
|
||||
if state.read(cx).session().read(cx).is_terminated() {
|
||||
(Some(Indicator::dot().color(Color::Error)), Color::Error)
|
||||
Some(Indicator::dot().color(Color::Error))
|
||||
} else {
|
||||
match state.read(cx).thread_status(cx).unwrap_or_default() {
|
||||
project::debugger::session::ThreadStatus::Stopped => (
|
||||
Some(Indicator::dot().color(Color::Conflict)),
|
||||
Color::Conflict,
|
||||
),
|
||||
_ => (Some(Indicator::dot().color(Color::Success)), Color::Success),
|
||||
project::debugger::session::ThreadStatus::Stopped => {
|
||||
Some(Indicator::dot().color(Color::Conflict))
|
||||
}
|
||||
_ => Some(Indicator::dot().color(Color::Success)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -131,7 +147,7 @@ impl DebugSession {
|
||||
.gap_2()
|
||||
.when_some(icon, |this, indicator| this.child(indicator))
|
||||
.justify_between()
|
||||
.child(Label::new(label).color(color))
|
||||
.child(Label::new(label))
|
||||
.into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,22 @@
|
||||
mod console;
|
||||
mod loaded_source_list;
|
||||
mod module_list;
|
||||
pub(crate) mod breakpoint_list;
|
||||
pub(crate) mod console;
|
||||
pub(crate) mod loaded_source_list;
|
||||
pub(crate) mod module_list;
|
||||
pub mod stack_frame_list;
|
||||
pub mod variable_list;
|
||||
|
||||
use std::{any::Any, ops::ControlFlow, sync::Arc};
|
||||
use std::{any::Any, ops::ControlFlow, sync::Arc, time::Duration};
|
||||
|
||||
use crate::persistence::{self, DebuggerPaneItem, SerializedPaneLayout};
|
||||
|
||||
use super::DebugPanelItemEvent;
|
||||
use breakpoint_list::BreakpointList;
|
||||
use collections::HashMap;
|
||||
use console::Console;
|
||||
use dap::{Capabilities, Thread, client::SessionId, debugger_settings::DebuggerSettings};
|
||||
use gpui::{
|
||||
Action as _, AnyView, AppContext, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
|
||||
NoAction, Subscription, WeakEntity,
|
||||
NoAction, Subscription, Task, WeakEntity,
|
||||
};
|
||||
use loaded_source_list::LoadedSourceList;
|
||||
use module_list::ModuleList;
|
||||
@@ -24,13 +28,15 @@ use rpc::proto::ViewId;
|
||||
use settings::Settings;
|
||||
use stack_frame_list::StackFrameList;
|
||||
use ui::{
|
||||
App, Context, ContextMenu, DropdownMenu, InteractiveElement, IntoElement, ParentElement,
|
||||
Render, SharedString, Styled, Window, div, h_flex, v_flex,
|
||||
ActiveTheme, AnyElement, App, Context, ContextMenu, DropdownMenu, FluentBuilder,
|
||||
InteractiveElement, IntoElement, Label, LabelCommon as _, ParentElement, Render, SharedString,
|
||||
StatefulInteractiveElement, Styled, Tab, Window, div, h_flex, v_flex,
|
||||
};
|
||||
use util::ResultExt;
|
||||
use variable_list::VariableList;
|
||||
use workspace::{
|
||||
ActivePaneDecorator, DraggedTab, Item, Pane, PaneGroup, Workspace, move_item, pane::Event,
|
||||
ActivePaneDecorator, DraggedTab, Item, Member, Pane, PaneGroup, Workspace,
|
||||
item::TabContentParams, move_item, pane::Event,
|
||||
};
|
||||
|
||||
pub struct RunningState {
|
||||
@@ -47,6 +53,7 @@ pub struct RunningState {
|
||||
_console: Entity<Console>,
|
||||
panes: PaneGroup,
|
||||
pane_close_subscriptions: HashMap<EntityId, Subscription>,
|
||||
_schedule_serialize: Option<Task<()>>,
|
||||
}
|
||||
|
||||
impl Render for RunningState {
|
||||
@@ -80,25 +87,32 @@ impl Render for RunningState {
|
||||
}
|
||||
}
|
||||
|
||||
struct SubView {
|
||||
pub(crate) struct SubView {
|
||||
inner: AnyView,
|
||||
pane_focus_handle: FocusHandle,
|
||||
tab_name: SharedString,
|
||||
kind: DebuggerPaneItem,
|
||||
show_indicator: Box<dyn Fn(&App) -> bool>,
|
||||
}
|
||||
|
||||
impl SubView {
|
||||
fn new(
|
||||
pub(crate) fn new(
|
||||
pane_focus_handle: FocusHandle,
|
||||
view: AnyView,
|
||||
tab_name: SharedString,
|
||||
kind: DebuggerPaneItem,
|
||||
show_indicator: Option<Box<dyn Fn(&App) -> bool>>,
|
||||
cx: &mut App,
|
||||
) -> Entity<Self> {
|
||||
cx.new(|_| Self {
|
||||
tab_name,
|
||||
kind,
|
||||
inner: view,
|
||||
pane_focus_handle,
|
||||
show_indicator: show_indicator.unwrap_or(Box::new(|_| false)),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn view_kind(&self) -> DebuggerPaneItem {
|
||||
self.kind
|
||||
}
|
||||
}
|
||||
impl Focusable for SubView {
|
||||
fn focus_handle(&self, _: &App) -> FocusHandle {
|
||||
@@ -108,8 +122,34 @@ impl Focusable for SubView {
|
||||
impl EventEmitter<()> for SubView {}
|
||||
impl Item for SubView {
|
||||
type Event = ();
|
||||
|
||||
/// This is used to serialize debugger pane layouts
|
||||
/// A SharedString gets converted to a enum and back during serialization/deserialization.
|
||||
fn tab_content_text(&self, _window: &Window, _cx: &App) -> Option<SharedString> {
|
||||
Some(self.tab_name.clone())
|
||||
Some(self.kind.to_shared_string())
|
||||
}
|
||||
|
||||
fn tab_content(
|
||||
&self,
|
||||
params: workspace::item::TabContentParams,
|
||||
_: &Window,
|
||||
cx: &App,
|
||||
) -> AnyElement {
|
||||
let label = Label::new(self.kind.to_shared_string())
|
||||
.size(ui::LabelSize::Small)
|
||||
.color(params.text_color())
|
||||
.line_height_style(ui::LineHeightStyle::UiLabel);
|
||||
|
||||
if !params.selected && self.show_indicator.as_ref()(cx) {
|
||||
return h_flex()
|
||||
.justify_between()
|
||||
.child(ui::Indicator::dot())
|
||||
.gap_2()
|
||||
.child(label)
|
||||
.into_any_element();
|
||||
}
|
||||
|
||||
label.into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -119,7 +159,7 @@ impl Render for SubView {
|
||||
}
|
||||
}
|
||||
|
||||
fn new_debugger_pane(
|
||||
pub(crate) fn new_debugger_pane(
|
||||
workspace: WeakEntity<Workspace>,
|
||||
project: Entity<Project>,
|
||||
window: &mut Window,
|
||||
@@ -158,7 +198,7 @@ fn new_debugger_pane(
|
||||
new_debugger_pane(workspace.clone(), project.clone(), window, cx);
|
||||
let _previous_subscription = running.pane_close_subscriptions.insert(
|
||||
new_pane.entity_id(),
|
||||
cx.subscribe(&new_pane, RunningState::handle_pane_event),
|
||||
cx.subscribe_in(&new_pane, window, RunningState::handle_pane_event),
|
||||
);
|
||||
debug_assert!(_previous_subscription.is_none());
|
||||
running
|
||||
@@ -242,7 +282,81 @@ fn new_debugger_pane(
|
||||
})));
|
||||
pane.display_nav_history_buttons(None);
|
||||
pane.set_custom_drop_handle(cx, custom_drop_handle);
|
||||
pane.set_should_display_tab_bar(|_, _| true);
|
||||
pane.set_render_tab_bar_buttons(cx, |_, _, _| (None, None));
|
||||
pane.set_render_tab_bar(cx, |pane, window, cx| {
|
||||
let active_pane_item = pane.active_item();
|
||||
h_flex()
|
||||
.w_full()
|
||||
.px_2()
|
||||
.gap_1()
|
||||
.h(Tab::container_height(cx))
|
||||
.drag_over::<DraggedTab>(|bar, _, _, cx| {
|
||||
bar.bg(cx.theme().colors().drop_target_background)
|
||||
})
|
||||
.on_drop(
|
||||
cx.listener(move |this, dragged_tab: &DraggedTab, window, cx| {
|
||||
this.drag_split_direction = None;
|
||||
this.handle_tab_drop(dragged_tab, this.items_len(), window, cx)
|
||||
}),
|
||||
)
|
||||
.bg(cx.theme().colors().tab_bar_background)
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.children(pane.items().enumerate().map(|(ix, item)| {
|
||||
let selected = active_pane_item
|
||||
.as_ref()
|
||||
.map_or(false, |active| active.item_id() == item.item_id());
|
||||
let item_ = item.boxed_clone();
|
||||
div()
|
||||
.id(SharedString::from(format!(
|
||||
"debugger_tab_{}",
|
||||
item.item_id().as_u64()
|
||||
)))
|
||||
.p_1()
|
||||
.rounded_md()
|
||||
.cursor_pointer()
|
||||
.map(|this| {
|
||||
if selected {
|
||||
this.bg(cx.theme().colors().tab_active_background)
|
||||
} else {
|
||||
let hover_color = cx.theme().colors().element_hover;
|
||||
this.hover(|style| style.bg(hover_color))
|
||||
}
|
||||
})
|
||||
.on_click(cx.listener(move |this, _, window, cx| {
|
||||
let index = this.index_for_item(&*item_);
|
||||
if let Some(index) = index {
|
||||
this.activate_item(index, true, true, window, cx);
|
||||
}
|
||||
}))
|
||||
.child(item.tab_content(
|
||||
TabContentParams {
|
||||
selected,
|
||||
..Default::default()
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
))
|
||||
.on_drop(
|
||||
cx.listener(move |this, dragged_tab: &DraggedTab, window, cx| {
|
||||
this.drag_split_direction = None;
|
||||
this.handle_tab_drop(dragged_tab, ix, window, cx)
|
||||
}),
|
||||
)
|
||||
.on_drag(
|
||||
DraggedTab {
|
||||
item: item.boxed_clone(),
|
||||
pane: cx.entity().clone(),
|
||||
detail: 0,
|
||||
is_active: selected,
|
||||
ix,
|
||||
},
|
||||
|tab, _, _, cx| cx.new(|_| tab.clone()),
|
||||
)
|
||||
}))
|
||||
.into_any_element()
|
||||
});
|
||||
pane
|
||||
});
|
||||
|
||||
@@ -253,6 +367,7 @@ impl RunningState {
|
||||
session: Entity<Session>,
|
||||
project: Entity<Project>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
serialized_pane_layout: Option<SerializedPaneLayout>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -281,6 +396,8 @@ impl RunningState {
|
||||
)
|
||||
});
|
||||
|
||||
let breakpoints = BreakpointList::new(session.clone(), workspace.clone(), &project, cx);
|
||||
|
||||
let _subscriptions = vec![
|
||||
cx.observe(&module_list, |_, _, cx| cx.notify()),
|
||||
cx.subscribe_in(&session, window, |this, _, event, window, cx| {
|
||||
@@ -306,87 +423,40 @@ impl RunningState {
|
||||
}),
|
||||
];
|
||||
|
||||
let leftmost_pane = new_debugger_pane(workspace.clone(), project.clone(), window, cx);
|
||||
leftmost_pane.update(cx, |this, cx| {
|
||||
this.add_item(
|
||||
Box::new(SubView::new(
|
||||
this.focus_handle(cx),
|
||||
stack_frame_list.clone().into(),
|
||||
SharedString::new_static("Frames"),
|
||||
cx,
|
||||
)),
|
||||
true,
|
||||
false,
|
||||
None,
|
||||
let mut pane_close_subscriptions = HashMap::default();
|
||||
let panes = if let Some(root) = serialized_pane_layout.and_then(|serialized_layout| {
|
||||
persistence::deserialize_pane_layout(
|
||||
serialized_layout,
|
||||
&workspace,
|
||||
&project,
|
||||
&stack_frame_list,
|
||||
&variable_list,
|
||||
&module_list,
|
||||
&console,
|
||||
&breakpoints,
|
||||
&mut pane_close_subscriptions,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}) {
|
||||
workspace::PaneGroup::with_root(root)
|
||||
} else {
|
||||
pane_close_subscriptions.clear();
|
||||
let root = Self::default_pane_layout(
|
||||
project,
|
||||
&workspace,
|
||||
&stack_frame_list,
|
||||
&variable_list,
|
||||
&module_list,
|
||||
&console,
|
||||
breakpoints,
|
||||
&mut pane_close_subscriptions,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
let center_pane = new_debugger_pane(workspace.clone(), project.clone(), window, cx);
|
||||
center_pane.update(cx, |this, cx| {
|
||||
this.add_item(
|
||||
Box::new(SubView::new(
|
||||
variable_list.focus_handle(cx),
|
||||
variable_list.clone().into(),
|
||||
SharedString::new_static("Variables"),
|
||||
cx,
|
||||
)),
|
||||
true,
|
||||
false,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
this.add_item(
|
||||
Box::new(SubView::new(
|
||||
this.focus_handle(cx),
|
||||
module_list.clone().into(),
|
||||
SharedString::new_static("Modules"),
|
||||
cx,
|
||||
)),
|
||||
false,
|
||||
false,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
this.activate_item(0, false, false, window, cx);
|
||||
});
|
||||
let rightmost_pane = new_debugger_pane(workspace.clone(), project.clone(), window, cx);
|
||||
rightmost_pane.update(cx, |this, cx| {
|
||||
this.add_item(
|
||||
Box::new(SubView::new(
|
||||
this.focus_handle(cx),
|
||||
console.clone().into(),
|
||||
SharedString::new_static("Console"),
|
||||
cx,
|
||||
)),
|
||||
true,
|
||||
false,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
let pane_close_subscriptions = HashMap::from_iter(
|
||||
[&leftmost_pane, ¢er_pane, &rightmost_pane]
|
||||
.into_iter()
|
||||
.map(|entity| {
|
||||
(
|
||||
entity.entity_id(),
|
||||
cx.subscribe(entity, Self::handle_pane_event),
|
||||
)
|
||||
}),
|
||||
);
|
||||
let group_root = workspace::PaneAxis::new(
|
||||
gpui::Axis::Horizontal,
|
||||
[leftmost_pane, center_pane, rightmost_pane]
|
||||
.into_iter()
|
||||
.map(workspace::Member::Pane)
|
||||
.collect(),
|
||||
);
|
||||
|
||||
let panes = PaneGroup::with_root(workspace::Member::Axis(group_root));
|
||||
workspace::PaneGroup::with_root(root)
|
||||
};
|
||||
|
||||
Self {
|
||||
session,
|
||||
@@ -402,21 +472,57 @@ impl RunningState {
|
||||
_module_list: module_list,
|
||||
_console: console,
|
||||
pane_close_subscriptions,
|
||||
_schedule_serialize: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_pane_event(
|
||||
fn serialize_layout(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self._schedule_serialize.is_none() {
|
||||
self._schedule_serialize = Some(cx.spawn_in(window, async move |this, cx| {
|
||||
cx.background_executor()
|
||||
.timer(Duration::from_millis(100))
|
||||
.await;
|
||||
|
||||
let Some((adapter_name, pane_group)) = this
|
||||
.update(cx, |this, cx| {
|
||||
let adapter_name = this.session.read(cx).adapter_name();
|
||||
(
|
||||
adapter_name,
|
||||
persistence::build_serialized_pane_layout(&this.panes.root, cx),
|
||||
)
|
||||
})
|
||||
.ok()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
persistence::serialize_pane_layout(adapter_name, pane_group)
|
||||
.await
|
||||
.log_err();
|
||||
|
||||
this.update(cx, |this, _| {
|
||||
this._schedule_serialize.take();
|
||||
})
|
||||
.ok();
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn handle_pane_event(
|
||||
this: &mut RunningState,
|
||||
source_pane: Entity<Pane>,
|
||||
source_pane: &Entity<Pane>,
|
||||
event: &Event,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<RunningState>,
|
||||
) {
|
||||
this.serialize_layout(window, cx);
|
||||
if let Event::Remove { .. } = event {
|
||||
let _did_find_pane = this.panes.remove(&source_pane).is_ok();
|
||||
debug_assert!(_did_find_pane);
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn go_to_selected_stack_frame(&self, window: &Window, cx: &mut Context<Self>) {
|
||||
if self.thread_id.is_some() {
|
||||
self.stack_frame_list
|
||||
@@ -432,6 +538,10 @@ impl RunningState {
|
||||
self.session_id
|
||||
}
|
||||
|
||||
pub(crate) fn selected_stack_frame_id(&self, cx: &App) -> Option<dap::StackFrameId> {
|
||||
self.stack_frame_list.read(cx).selected_stack_frame_id()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn stack_frame_list(&self) -> &Entity<StackFrameList> {
|
||||
&self.stack_frame_list
|
||||
@@ -456,7 +566,7 @@ impl RunningState {
|
||||
.find_map(|pane| {
|
||||
pane.read(cx)
|
||||
.items_of_type::<SubView>()
|
||||
.position(|view| view.read(cx).tab_name == *"Modules")
|
||||
.position(|view| view.read(cx).view_kind().to_shared_string() == *"Modules")
|
||||
.map(|view| (view, pane))
|
||||
})
|
||||
.unwrap();
|
||||
@@ -492,7 +602,6 @@ impl RunningState {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn selected_thread_id(&self) -> Option<ThreadId> {
|
||||
self.thread_id
|
||||
}
|
||||
@@ -659,7 +768,7 @@ impl RunningState {
|
||||
DropdownMenu::new(
|
||||
("thread-list", self.session_id.0),
|
||||
selected_thread_name,
|
||||
ContextMenu::build(window, cx, move |mut this, _, _| {
|
||||
ContextMenu::build_eager(window, cx, move |mut this, _, _| {
|
||||
for (thread, _) in threads {
|
||||
let state = state.clone();
|
||||
let thread_id = thread.id;
|
||||
@@ -673,6 +782,127 @@ impl RunningState {
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
fn default_pane_layout(
|
||||
project: Entity<Project>,
|
||||
workspace: &WeakEntity<Workspace>,
|
||||
stack_frame_list: &Entity<StackFrameList>,
|
||||
variable_list: &Entity<VariableList>,
|
||||
module_list: &Entity<ModuleList>,
|
||||
console: &Entity<Console>,
|
||||
breakpoints: Entity<BreakpointList>,
|
||||
subscriptions: &mut HashMap<EntityId, Subscription>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<'_, RunningState>,
|
||||
) -> Member {
|
||||
let leftmost_pane = new_debugger_pane(workspace.clone(), project.clone(), window, cx);
|
||||
leftmost_pane.update(cx, |this, cx| {
|
||||
this.add_item(
|
||||
Box::new(SubView::new(
|
||||
this.focus_handle(cx),
|
||||
stack_frame_list.clone().into(),
|
||||
DebuggerPaneItem::Frames,
|
||||
None,
|
||||
cx,
|
||||
)),
|
||||
true,
|
||||
false,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
this.add_item(
|
||||
Box::new(SubView::new(
|
||||
breakpoints.focus_handle(cx),
|
||||
breakpoints.into(),
|
||||
DebuggerPaneItem::BreakpointList,
|
||||
None,
|
||||
cx,
|
||||
)),
|
||||
true,
|
||||
false,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
this.activate_item(0, false, false, window, cx);
|
||||
});
|
||||
let center_pane = new_debugger_pane(workspace.clone(), project.clone(), window, cx);
|
||||
center_pane.update(cx, |this, cx| {
|
||||
this.add_item(
|
||||
Box::new(SubView::new(
|
||||
variable_list.focus_handle(cx),
|
||||
variable_list.clone().into(),
|
||||
DebuggerPaneItem::Variables,
|
||||
None,
|
||||
cx,
|
||||
)),
|
||||
true,
|
||||
false,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
this.add_item(
|
||||
Box::new(SubView::new(
|
||||
this.focus_handle(cx),
|
||||
module_list.clone().into(),
|
||||
DebuggerPaneItem::Modules,
|
||||
None,
|
||||
cx,
|
||||
)),
|
||||
false,
|
||||
false,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
this.activate_item(0, false, false, window, cx);
|
||||
});
|
||||
let rightmost_pane = new_debugger_pane(workspace.clone(), project.clone(), window, cx);
|
||||
rightmost_pane.update(cx, |this, cx| {
|
||||
let weak_console = console.downgrade();
|
||||
this.add_item(
|
||||
Box::new(SubView::new(
|
||||
this.focus_handle(cx),
|
||||
console.clone().into(),
|
||||
DebuggerPaneItem::Console,
|
||||
Some(Box::new(move |cx| {
|
||||
weak_console
|
||||
.read_with(cx, |console, cx| console.show_indicator(cx))
|
||||
.unwrap_or_default()
|
||||
})),
|
||||
cx,
|
||||
)),
|
||||
true,
|
||||
false,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
subscriptions.extend(
|
||||
[&leftmost_pane, ¢er_pane, &rightmost_pane]
|
||||
.into_iter()
|
||||
.map(|entity| {
|
||||
(
|
||||
entity.entity_id(),
|
||||
cx.subscribe_in(entity, window, Self::handle_pane_event),
|
||||
)
|
||||
}),
|
||||
);
|
||||
|
||||
let group_root = workspace::PaneAxis::new(
|
||||
gpui::Axis::Horizontal,
|
||||
[leftmost_pane, center_pane, rightmost_pane]
|
||||
.into_iter()
|
||||
.map(workspace::Member::Pane)
|
||||
.collect(),
|
||||
);
|
||||
|
||||
Member::Axis(group_root)
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<DebugPanelItemEvent> for RunningState {}
|
||||
|
||||
482
crates/debugger_ui/src/session/running/breakpoint_list.rs
Normal file
482
crates/debugger_ui/src/session/running/breakpoint_list.rs
Normal file
@@ -0,0 +1,482 @@
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use dap::ExceptionBreakpointsFilter;
|
||||
use editor::Editor;
|
||||
use gpui::{
|
||||
AppContext, Entity, FocusHandle, Focusable, ListState, MouseButton, Stateful, Task, WeakEntity,
|
||||
list,
|
||||
};
|
||||
use language::Point;
|
||||
use project::{
|
||||
Project,
|
||||
debugger::{
|
||||
breakpoint_store::{BreakpointEditAction, BreakpointStore, SourceBreakpoint},
|
||||
session::Session,
|
||||
},
|
||||
worktree_store::WorktreeStore,
|
||||
};
|
||||
use ui::{
|
||||
App, Clickable, Color, Context, Div, Icon, IconButton, IconName, Indicator, InteractiveElement,
|
||||
IntoElement, Label, LabelCommon, LabelSize, ListItem, ParentElement, Render, RenderOnce,
|
||||
Scrollbar, ScrollbarState, SharedString, StatefulInteractiveElement, Styled, Window, div,
|
||||
h_flex, px, v_flex,
|
||||
};
|
||||
use util::{ResultExt, maybe};
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct BreakpointList {
|
||||
workspace: WeakEntity<Workspace>,
|
||||
breakpoint_store: Entity<BreakpointStore>,
|
||||
worktree_store: Entity<WorktreeStore>,
|
||||
list_state: ListState,
|
||||
scrollbar_state: ScrollbarState,
|
||||
breakpoints: Vec<BreakpointEntry>,
|
||||
session: Entity<Session>,
|
||||
hide_scrollbar_task: Option<Task<()>>,
|
||||
show_scrollbar: bool,
|
||||
focus_handle: FocusHandle,
|
||||
}
|
||||
|
||||
impl Focusable for BreakpointList {
|
||||
fn focus_handle(&self, _: &App) -> gpui::FocusHandle {
|
||||
self.focus_handle.clone()
|
||||
}
|
||||
}
|
||||
impl BreakpointList {
|
||||
pub(super) fn new(
|
||||
session: Entity<Session>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
project: &Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Entity<Self> {
|
||||
let project = project.read(cx);
|
||||
let breakpoint_store = project.breakpoint_store();
|
||||
let worktree_store = project.worktree_store();
|
||||
|
||||
cx.new(|cx| {
|
||||
let weak: gpui::WeakEntity<Self> = cx.weak_entity();
|
||||
let list_state = ListState::new(
|
||||
0,
|
||||
gpui::ListAlignment::Top,
|
||||
px(1000.),
|
||||
move |ix, window, cx| {
|
||||
let Ok(Some(breakpoint)) =
|
||||
weak.update(cx, |this, _| this.breakpoints.get(ix).cloned())
|
||||
else {
|
||||
return div().into_any_element();
|
||||
};
|
||||
|
||||
breakpoint.render(window, cx).into_any_element()
|
||||
},
|
||||
);
|
||||
Self {
|
||||
breakpoint_store,
|
||||
worktree_store,
|
||||
scrollbar_state: ScrollbarState::new(list_state.clone()),
|
||||
list_state,
|
||||
breakpoints: Default::default(),
|
||||
hide_scrollbar_task: None,
|
||||
show_scrollbar: false,
|
||||
workspace,
|
||||
session,
|
||||
focus_handle: cx.focus_handle(),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn hide_scrollbar(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
const SCROLLBAR_SHOW_INTERVAL: Duration = Duration::from_secs(1);
|
||||
self.hide_scrollbar_task = Some(cx.spawn_in(window, async move |panel, cx| {
|
||||
cx.background_executor()
|
||||
.timer(SCROLLBAR_SHOW_INTERVAL)
|
||||
.await;
|
||||
panel
|
||||
.update(cx, |panel, cx| {
|
||||
panel.show_scrollbar = false;
|
||||
cx.notify();
|
||||
})
|
||||
.log_err();
|
||||
}))
|
||||
}
|
||||
|
||||
fn render_vertical_scrollbar(&self, cx: &mut Context<Self>) -> Option<Stateful<Div>> {
|
||||
if !(self.show_scrollbar || self.scrollbar_state.is_dragging()) {
|
||||
return None;
|
||||
}
|
||||
Some(
|
||||
div()
|
||||
.occlude()
|
||||
.id("breakpoint-list-vertical-scrollbar")
|
||||
.on_mouse_move(cx.listener(|_, _, _, cx| {
|
||||
cx.notify();
|
||||
cx.stop_propagation()
|
||||
}))
|
||||
.on_hover(|_, _, cx| {
|
||||
cx.stop_propagation();
|
||||
})
|
||||
.on_any_mouse_down(|_, _, cx| {
|
||||
cx.stop_propagation();
|
||||
})
|
||||
.on_mouse_up(
|
||||
MouseButton::Left,
|
||||
cx.listener(|_, _, _, cx| {
|
||||
cx.stop_propagation();
|
||||
}),
|
||||
)
|
||||
.on_scroll_wheel(cx.listener(|_, _, _, cx| {
|
||||
cx.notify();
|
||||
}))
|
||||
.h_full()
|
||||
.absolute()
|
||||
.right_1()
|
||||
.top_1()
|
||||
.bottom_0()
|
||||
.w(px(12.))
|
||||
.cursor_default()
|
||||
.children(Scrollbar::vertical(self.scrollbar_state.clone())),
|
||||
)
|
||||
}
|
||||
}
|
||||
impl Render for BreakpointList {
|
||||
fn render(
|
||||
&mut self,
|
||||
_window: &mut ui::Window,
|
||||
cx: &mut ui::Context<Self>,
|
||||
) -> impl ui::IntoElement {
|
||||
let old_len = self.breakpoints.len();
|
||||
let breakpoints = self.breakpoint_store.read(cx).all_breakpoints(cx);
|
||||
self.breakpoints.clear();
|
||||
let weak = cx.weak_entity();
|
||||
let breakpoints = breakpoints.into_iter().flat_map(|(path, mut breakpoints)| {
|
||||
let relative_worktree_path = self
|
||||
.worktree_store
|
||||
.read(cx)
|
||||
.find_worktree(&path, cx)
|
||||
.and_then(|(worktree, relative_path)| {
|
||||
worktree
|
||||
.read(cx)
|
||||
.is_visible()
|
||||
.then(|| Path::new(worktree.read(cx).root_name()).join(relative_path))
|
||||
});
|
||||
breakpoints.sort_by_key(|breakpoint| breakpoint.row);
|
||||
let weak = weak.clone();
|
||||
breakpoints.into_iter().filter_map(move |breakpoint| {
|
||||
debug_assert_eq!(&path, &breakpoint.path);
|
||||
let file_name = breakpoint.path.file_name()?;
|
||||
|
||||
let dir = relative_worktree_path
|
||||
.clone()
|
||||
.unwrap_or_else(|| PathBuf::from(&*breakpoint.path))
|
||||
.parent()
|
||||
.and_then(|parent| {
|
||||
parent
|
||||
.to_str()
|
||||
.map(ToOwned::to_owned)
|
||||
.map(SharedString::from)
|
||||
});
|
||||
let name = file_name
|
||||
.to_str()
|
||||
.map(ToOwned::to_owned)
|
||||
.map(SharedString::from)?;
|
||||
let weak = weak.clone();
|
||||
let line = format!("Line {}", breakpoint.row + 1).into();
|
||||
Some(BreakpointEntry {
|
||||
kind: BreakpointEntryKind::LineBreakpoint(LineBreakpoint {
|
||||
name,
|
||||
dir,
|
||||
line,
|
||||
breakpoint,
|
||||
}),
|
||||
weak,
|
||||
})
|
||||
})
|
||||
});
|
||||
let exception_breakpoints =
|
||||
self.session
|
||||
.read(cx)
|
||||
.exception_breakpoints()
|
||||
.map(|(data, is_enabled)| BreakpointEntry {
|
||||
kind: BreakpointEntryKind::ExceptionBreakpoint(ExceptionBreakpoint {
|
||||
id: data.filter.clone(),
|
||||
data: data.clone(),
|
||||
is_enabled: *is_enabled,
|
||||
}),
|
||||
weak: weak.clone(),
|
||||
});
|
||||
self.breakpoints
|
||||
.extend(breakpoints.chain(exception_breakpoints));
|
||||
if self.breakpoints.len() != old_len {
|
||||
self.list_state.reset(self.breakpoints.len());
|
||||
}
|
||||
v_flex()
|
||||
.id("breakpoint-list")
|
||||
.on_hover(cx.listener(|this, hovered, window, cx| {
|
||||
if *hovered {
|
||||
this.show_scrollbar = true;
|
||||
this.hide_scrollbar_task.take();
|
||||
cx.notify();
|
||||
} else if !this.focus_handle.contains_focused(window, cx) {
|
||||
this.hide_scrollbar(window, cx);
|
||||
}
|
||||
}))
|
||||
.size_full()
|
||||
.m_0p5()
|
||||
.child(list(self.list_state.clone()).flex_grow())
|
||||
.children(self.render_vertical_scrollbar(cx))
|
||||
}
|
||||
}
|
||||
#[derive(Clone, Debug)]
|
||||
struct LineBreakpoint {
|
||||
name: SharedString,
|
||||
dir: Option<SharedString>,
|
||||
line: SharedString,
|
||||
breakpoint: SourceBreakpoint,
|
||||
}
|
||||
|
||||
impl LineBreakpoint {
|
||||
fn render(self, weak: WeakEntity<BreakpointList>) -> ListItem {
|
||||
let LineBreakpoint {
|
||||
name,
|
||||
dir,
|
||||
line,
|
||||
breakpoint,
|
||||
} = self;
|
||||
let icon_name = if breakpoint.state.is_enabled() {
|
||||
IconName::DebugBreakpoint
|
||||
} else {
|
||||
IconName::DebugDisabledBreakpoint
|
||||
};
|
||||
let path = breakpoint.path;
|
||||
let row = breakpoint.row;
|
||||
let indicator = div()
|
||||
.id(SharedString::from(format!(
|
||||
"breakpoint-ui-toggle-{:?}/{}:{}",
|
||||
dir, name, line
|
||||
)))
|
||||
.cursor_pointer()
|
||||
.on_click({
|
||||
let weak = weak.clone();
|
||||
let path = path.clone();
|
||||
move |_, _, cx| {
|
||||
weak.update(cx, |this, cx| {
|
||||
this.breakpoint_store.update(cx, |this, cx| {
|
||||
if let Some((buffer, breakpoint)) =
|
||||
this.breakpoint_at_row(&path, row, cx)
|
||||
{
|
||||
this.toggle_breakpoint(
|
||||
buffer,
|
||||
breakpoint,
|
||||
BreakpointEditAction::InvertState,
|
||||
cx,
|
||||
);
|
||||
} else {
|
||||
log::error!("Couldn't find breakpoint at row event though it exists: row {row}")
|
||||
}
|
||||
})
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
})
|
||||
.child(Indicator::icon(Icon::new(icon_name)).color(Color::Debugger))
|
||||
.on_mouse_down(MouseButton::Left, move |_, _, _| {});
|
||||
ListItem::new(SharedString::from(format!(
|
||||
"breakpoint-ui-item-{:?}/{}:{}",
|
||||
dir, name, line
|
||||
)))
|
||||
.start_slot(indicator)
|
||||
.rounded()
|
||||
.end_hover_slot(
|
||||
IconButton::new(
|
||||
SharedString::from(format!(
|
||||
"breakpoint-ui-on-click-go-to-line-remove-{:?}/{}:{}",
|
||||
dir, name, line
|
||||
)),
|
||||
IconName::Close,
|
||||
)
|
||||
.on_click({
|
||||
let weak = weak.clone();
|
||||
let path = path.clone();
|
||||
move |_, _, cx| {
|
||||
weak.update(cx, |this, cx| {
|
||||
this.breakpoint_store.update(cx, |this, cx| {
|
||||
if let Some((buffer, breakpoint)) =
|
||||
this.breakpoint_at_row(&path, row, cx)
|
||||
{
|
||||
this.toggle_breakpoint(
|
||||
buffer,
|
||||
breakpoint,
|
||||
BreakpointEditAction::Toggle,
|
||||
cx,
|
||||
);
|
||||
} else {
|
||||
log::error!("Couldn't find breakpoint at row event though it exists: row {row}")
|
||||
}
|
||||
})
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
})
|
||||
.icon_size(ui::IconSize::XSmall),
|
||||
)
|
||||
.child(
|
||||
v_flex()
|
||||
.id(SharedString::from(format!(
|
||||
"breakpoint-ui-on-click-go-to-line-{:?}/{}:{}",
|
||||
dir, name, line
|
||||
)))
|
||||
.on_click(move |_, window, cx| {
|
||||
let path = path.clone();
|
||||
let weak = weak.clone();
|
||||
let row = breakpoint.row;
|
||||
maybe!({
|
||||
let task = weak
|
||||
.update(cx, |this, cx| {
|
||||
this.worktree_store.update(cx, |this, cx| {
|
||||
this.find_or_create_worktree(path, false, cx)
|
||||
})
|
||||
})
|
||||
.ok()?;
|
||||
window
|
||||
.spawn(cx, async move |cx| {
|
||||
let (worktree, relative_path) = task.await?;
|
||||
let worktree_id = worktree.update(cx, |this, _| this.id())?;
|
||||
let item = weak
|
||||
.update_in(cx, |this, window, cx| {
|
||||
this.workspace.update(cx, |this, cx| {
|
||||
this.open_path(
|
||||
(worktree_id, relative_path),
|
||||
None,
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
})??
|
||||
.await?;
|
||||
if let Some(editor) = item.downcast::<Editor>() {
|
||||
editor
|
||||
.update_in(cx, |this, window, cx| {
|
||||
this.go_to_singleton_buffer_point(
|
||||
Point { row, column: 0 },
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
Result::<_, anyhow::Error>::Ok(())
|
||||
})
|
||||
.detach();
|
||||
|
||||
Some(())
|
||||
});
|
||||
})
|
||||
.cursor_pointer()
|
||||
.py_1()
|
||||
.items_center()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
Label::new(name)
|
||||
.size(LabelSize::Small)
|
||||
.line_height_style(ui::LineHeightStyle::UiLabel),
|
||||
)
|
||||
.children(dir.map(|dir| {
|
||||
Label::new(dir)
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small)
|
||||
.line_height_style(ui::LineHeightStyle::UiLabel)
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
Label::new(line)
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted)
|
||||
.line_height_style(ui::LineHeightStyle::UiLabel),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
#[derive(Clone, Debug)]
|
||||
struct ExceptionBreakpoint {
|
||||
id: String,
|
||||
data: ExceptionBreakpointsFilter,
|
||||
is_enabled: bool,
|
||||
}
|
||||
|
||||
impl ExceptionBreakpoint {
|
||||
fn render(self, list: WeakEntity<BreakpointList>) -> ListItem {
|
||||
let color = if self.is_enabled {
|
||||
Color::Debugger
|
||||
} else {
|
||||
Color::Muted
|
||||
};
|
||||
let id = SharedString::from(&self.id);
|
||||
ListItem::new(SharedString::from(format!(
|
||||
"exception-breakpoint-ui-item-{}",
|
||||
self.id
|
||||
)))
|
||||
.rounded()
|
||||
.start_slot(
|
||||
div()
|
||||
.id(SharedString::from(format!(
|
||||
"exception-breakpoint-ui-item-{}-click-handler",
|
||||
self.id
|
||||
)))
|
||||
.on_click(move |_, _, cx| {
|
||||
list.update(cx, |this, cx| {
|
||||
this.session.update(cx, |this, cx| {
|
||||
this.toggle_exception_breakpoint(&id, cx);
|
||||
});
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
.cursor_pointer()
|
||||
.child(Indicator::icon(Icon::new(IconName::Flame)).color(color)),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.py_1()
|
||||
.gap_1()
|
||||
.child(
|
||||
Label::new(self.data.label)
|
||||
.size(LabelSize::Small)
|
||||
.line_height_style(ui::LineHeightStyle::UiLabel),
|
||||
)
|
||||
.children(self.data.description.map(|description| {
|
||||
Label::new(description)
|
||||
.size(LabelSize::XSmall)
|
||||
.line_height_style(ui::LineHeightStyle::UiLabel)
|
||||
.color(Color::Muted)
|
||||
})),
|
||||
)
|
||||
}
|
||||
}
|
||||
#[derive(Clone, Debug)]
|
||||
enum BreakpointEntryKind {
|
||||
LineBreakpoint(LineBreakpoint),
|
||||
ExceptionBreakpoint(ExceptionBreakpoint),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct BreakpointEntry {
|
||||
kind: BreakpointEntryKind,
|
||||
weak: WeakEntity<BreakpointList>,
|
||||
}
|
||||
impl RenderOnce for BreakpointEntry {
|
||||
fn render(self, _: &mut ui::Window, _: &mut App) -> impl ui::IntoElement {
|
||||
match self.kind {
|
||||
BreakpointEntryKind::LineBreakpoint(line_breakpoint) => {
|
||||
line_breakpoint.render(self.weak)
|
||||
}
|
||||
BreakpointEntryKind::ExceptionBreakpoint(exception_breakpoint) => {
|
||||
exception_breakpoint.render(self.weak)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,7 +17,7 @@ use project::{
|
||||
use settings::Settings;
|
||||
use std::{cell::RefCell, rc::Rc, usize};
|
||||
use theme::ThemeSettings;
|
||||
use ui::prelude::*;
|
||||
use ui::{Divider, prelude::*};
|
||||
|
||||
pub struct Console {
|
||||
console: Entity<Editor>,
|
||||
@@ -105,6 +105,10 @@ impl Console {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn show_indicator(&self, cx: &App) -> bool {
|
||||
self.session.read(cx).has_new_output(self.last_token)
|
||||
}
|
||||
|
||||
pub fn add_messages<'a>(
|
||||
&mut self,
|
||||
events: impl Iterator<Item = &'a OutputEvent>,
|
||||
@@ -141,7 +145,7 @@ impl Console {
|
||||
state.evaluate(
|
||||
expression,
|
||||
Some(dap::EvaluateArgumentsContext::Variables),
|
||||
self.stack_frame_list.read(cx).current_stack_frame_id(),
|
||||
self.stack_frame_list.read(cx).selected_stack_frame_id(),
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
@@ -229,8 +233,8 @@ impl Render for Console {
|
||||
.size_full()
|
||||
.child(self.render_console(cx))
|
||||
.when(self.is_local(cx), |this| {
|
||||
this.child(self.render_query_bar(cx))
|
||||
.pt(DynamicSpacing::Base04.rems(cx))
|
||||
this.child(Divider::horizontal())
|
||||
.child(self.render_query_bar(cx))
|
||||
})
|
||||
.border_2()
|
||||
}
|
||||
@@ -384,7 +388,7 @@ impl ConsoleQueryBarCompletionProvider {
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
let completion_task = console.update(cx, |console, cx| {
|
||||
console.session.update(cx, |state, cx| {
|
||||
let frame_id = console.stack_frame_list.read(cx).current_stack_frame_id();
|
||||
let frame_id = console.stack_frame_list.read(cx).selected_stack_frame_id();
|
||||
|
||||
state.completions(
|
||||
CompletionsQuery::new(buffer.read(cx), buffer_position, frame_id),
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user