Compare commits

..

17 Commits

Author SHA1 Message Date
morgankrey
7a462ca07b Quote Escape 2025-12-22 10:39:59 -06:00
morgankrey
ba75c7b0c4 Fix Excaping 2025-12-22 10:10:12 -06:00
morgankrey
79390a7d22 Speed up 2025-12-22 10:01:12 -06:00
morgankrey
baac7ce3dc adding local testing and removing step 2 2025-12-21 20:28:35 -06:00
morgankrey
c0918dba8e Merge branch 'main' of https://github.com/zed-industries/zed 2025-12-21 13:41:35 -06:00
morgankrey
f9bf1fc9ae Merge branch 'main' of https://github.com/zed-industries/zed 2025-12-19 11:37:24 -06:00
morgankrey
ba8583c8e5 Merge branch 'main' of https://github.com/zed-industries/zed 2025-12-19 05:55:10 -06:00
morgankrey
1798498ec4 Merge branch 'main' of https://github.com/zed-industries/zed 2025-11-24 13:25:05 -06:00
morgankrey
15e327b2c2 Merge branch 'main' of https://github.com/zed-industries/zed 2025-11-21 10:37:14 -06:00
morgankrey
d0e9243cdf Merge branch 'main' of https://github.com/zed-industries/zed 2025-11-14 00:09:21 -06:00
morgankrey
94dcf3fa6e Merge branch 'main' of https://github.com/zed-industries/zed 2025-11-07 13:04:31 -06:00
morgankrey
279364340e Merge branch 'main' of https://github.com/zed-industries/zed 2025-11-05 11:46:34 -06:00
morgankrey
8e2727721b Merge branch 'main' of https://github.com/zed-industries/zed 2025-11-05 05:42:57 -06:00
morgankrey
57d5fd0c0b undo 2025-10-10 10:23:55 -05:00
morgankrey
817e486ec0 Merge branch 'main' of https://github.com/zed-industries/zed 2025-10-10 10:21:27 -05:00
morgankrey
887570d852 Merge branch 'main' of https://github.com/zed-industries/zed 2025-10-10 08:08:53 -05:00
morgankrey
5fb4109309 Add Grok 2025-10-10 08:08:51 -05:00
83 changed files with 1632 additions and 2446 deletions

View File

@@ -1,55 +0,0 @@
# Phase 2: Explore Repository
You are analyzing a codebase to understand its structure before reviewing documentation impact.
## Objective
Produce a structured overview of the repository to inform subsequent documentation analysis.
## Instructions
1. **Identify Primary Languages and Frameworks**
- Scan for Cargo.toml, package.json, or other manifest files
- Note the primary language(s) and key dependencies
2. **Map Documentation Structure**
- This project uses **mdBook** (https://rust-lang.github.io/mdBook/)
- Documentation is in `docs/src/`
- Table of contents: `docs/src/SUMMARY.md` (mdBook format: https://rust-lang.github.io/mdBook/format/summary.html)
- Style guide: `docs/.rules`
- Agent guidelines: `docs/AGENTS.md`
- Formatting: Prettier (config in `docs/.prettierrc`)
3. **Identify Build and Tooling**
- Note build systems (cargo, npm, etc.)
- Identify documentation tooling (mdbook, etc.)
4. **Output Format**
Produce a JSON summary:
```json
{
"primary_language": "Rust",
"frameworks": ["GPUI"],
"documentation": {
"system": "mdBook",
"location": "docs/src/",
"toc_file": "docs/src/SUMMARY.md",
"toc_format": "https://rust-lang.github.io/mdBook/format/summary.html",
"style_guide": "docs/.rules",
"agent_guidelines": "docs/AGENTS.md",
"formatter": "prettier",
"formatter_config": "docs/.prettierrc",
"custom_preprocessor": "docs_preprocessor (handles {#kb action::Name} syntax)"
},
"key_directories": {
"source": "crates/",
"docs": "docs/src/",
"extensions": "extensions/"
}
}
```
## Constraints
- Read-only: Do not modify any files
- Focus on structure, not content details
- Complete within 2 minutes

View File

@@ -25,7 +25,6 @@ self-hosted-runner:
- namespace-profile-32x64-ubuntu-2204
# Namespace Ubuntu 24.04 (like ubuntu-latest)
- namespace-profile-2x4-ubuntu-2404
- namespace-profile-8x32-ubuntu-2404
# Namespace Limited Preview
- namespace-profile-8x16-ubuntu-2004-arm-m4
- namespace-profile-8x32-ubuntu-2004-arm-m4

View File

@@ -23,7 +23,8 @@ permissions:
env:
FACTORY_API_KEY: ${{ secrets.FACTORY_API_KEY }}
DROID_MODEL: claude-opus-4-5-20251101
ANALYSIS_MODEL: gemini-3-flash-preview
WRITING_MODEL: claude-opus-4-5-20251101
jobs:
docs-automation:
@@ -83,111 +84,103 @@ jobs:
env:
GH_TOKEN: ${{ github.token }}
# Phase 0: Guardrails are loaded via AGENTS.md in each phase
# Phase 2: Explore Repository (Read-Only - default)
- name: "Phase 2: Explore Repository"
id: phase2
# Filter for docs-relevant files
- name: "Filter docs-relevant files"
id: filter
run: |
"$DROID_BIN" exec \
-m "$DROID_MODEL" \
-f .factory/prompts/docs-automation/phase2-explore.md \
> /tmp/phase2-output.txt 2>&1 || true
echo "Repository exploration complete"
cat /tmp/phase2-output.txt
# Phase 3: Analyze Changes (Read-Only - default)
- name: "Phase 3: Analyze Changes"
id: phase3
run: |
CHANGED_FILES=$(tr '\n' ' ' < /tmp/changed_files.txt)
echo "Analyzing changes in: $CHANGED_FILES"
# Build prompt with context
cat > /tmp/phase3-prompt.md << 'EOF'
$(cat .factory/prompts/docs-automation/phase3-analyze.md)
## Context
### Changed Files
$CHANGED_FILES
### Phase 2 Output
$(cat /tmp/phase2-output.txt)
EOF
"$DROID_BIN" exec \
-m "$DROID_MODEL" \
"$(cat .factory/prompts/docs-automation/phase3-analyze.md)
Changed files: $CHANGED_FILES" \
> /tmp/phase3-output.md 2>&1 || true
echo "Change analysis complete"
cat /tmp/phase3-output.md
# Phase 4: Plan Documentation Impact (Read-Only - default)
- name: "Phase 4: Plan Documentation Impact"
id: phase4
run: |
"$DROID_BIN" exec \
-m "$DROID_MODEL" \
-f .factory/prompts/docs-automation/phase4-plan.md \
> /tmp/phase4-plan.md 2>&1 || true
echo "Documentation plan complete"
cat /tmp/phase4-plan.md
# Check if updates are required
if grep -q "NO_UPDATES_REQUIRED" /tmp/phase4-plan.md; then
echo "updates_required=false" >> "$GITHUB_OUTPUT"
# Patterns for files that could affect documentation
PATTERNS="crates/.*/src/.*\.rs|assets/settings/.*|assets/keymaps/.*|extensions/.*|docs/.*"
RELEVANT=$(grep -E "$PATTERNS" /tmp/changed_files.txt || true)
if [ -z "$RELEVANT" ]; then
echo "No docs-relevant files changed"
echo "has_relevant=false" >> "$GITHUB_OUTPUT"
else
echo "updates_required=true" >> "$GITHUB_OUTPUT"
echo "Docs-relevant files found:"
echo "$RELEVANT"
echo "has_relevant=true" >> "$GITHUB_OUTPUT"
fi
# Phase 5: Apply Plan (Write-Enabled with --auto medium)
- name: "Phase 5: Apply Documentation Plan"
id: phase5
if: steps.phase4.outputs.updates_required == 'true'
# Combined: Analyze + Plan (using fast model)
- name: "Analyze & Plan"
id: analyze
if: steps.filter.outputs.has_relevant == 'true'
run: |
CHANGED_FILES=$(tr '\n' ' ' < /tmp/changed_files.txt)
GUIDELINES='## Documentation Guidelines
### Requires Update: New features, changed keybindings, modified settings, deprecated functionality
### No Update: Internal refactoring, performance fixes, bug fixes, test/CI changes
### Output JSON: {"updates_required": bool, "summary": str, "planned_changes": [{file, section, change_type, description}]}'
"$DROID_BIN" exec \
-m "$DROID_MODEL" \
-m "$ANALYSIS_MODEL" \
--auto low \
"Analyze code changes for documentation impact.
$GUIDELINES
Changed files: $CHANGED_FILES
Output the JSON structure. Be conservative - only flag user-visible changes." \
> /tmp/analysis.json 2>&1 || true
echo "Analysis complete:"
cat /tmp/analysis.json
# Check if updates required
if grep -q '"updates_required":\s*true' /tmp/analysis.json; then
echo "updates_required=true" >> "$GITHUB_OUTPUT"
else
echo "updates_required=false" >> "$GITHUB_OUTPUT"
fi
# Combined: Apply + Summarize (using writing model)
- name: "Apply Documentation Changes"
id: apply
if: steps.analyze.outputs.updates_required == 'true'
run: |
ANALYSIS=$(cat /tmp/analysis.json)
"$DROID_BIN" exec \
-m "$WRITING_MODEL" \
--auto medium \
-f .factory/prompts/docs-automation/phase5-apply.md \
> /tmp/phase5-report.md 2>&1 || true
echo "Documentation updates applied"
cat /tmp/phase5-report.md
"Apply documentation changes from this analysis:
$ANALYSIS
Instructions:
1. Edit each specified file
2. Follow mdBook format, use {#kb action::Name} for keybindings
3. Output summary:
## Changes Applied
- [file]: [change]
## Summary for PR
[2-3 sentences]" \
> /tmp/apply-report.md 2>&1 || true
echo "Changes applied:"
cat /tmp/apply-report.md
cp /tmp/apply-report.md /tmp/phase6-summary.md
# Phase 5b: Format with Prettier
- name: "Phase 5b: Format with Prettier"
id: phase5b
if: steps.phase4.outputs.updates_required == 'true'
# Format with Prettier (only changed files)
- name: "Format with Prettier"
id: format
if: steps.analyze.outputs.updates_required == 'true'
run: |
echo "Formatting documentation with Prettier..."
cd docs && prettier --write src/
CHANGED_DOCS=$(git diff --name-only docs/src/ | sed 's|^docs/||' | tr '\n' ' ')
if [ -n "$CHANGED_DOCS" ]; then
echo "Formatting: $CHANGED_DOCS"
cd docs && prettier --write "$CHANGED_DOCS"
fi
echo "Verifying Prettier formatting passes..."
cd docs && prettier --check src/
echo "Prettier formatting complete"
# Phase 6: Summarize Changes (Read-Only - default)
- name: "Phase 6: Summarize Changes"
id: phase6
if: steps.phase4.outputs.updates_required == 'true'
run: |
# Get git diff of docs
git diff docs/src/ > /tmp/docs-diff.txt || true
"$DROID_BIN" exec \
-m "$DROID_MODEL" \
-f .factory/prompts/docs-automation/phase6-summarize.md \
> /tmp/phase6-summary.md 2>&1 || true
echo "Summary generated"
cat /tmp/phase6-summary.md
# Phase 7: Commit and Open PR
- name: "Phase 7: Create PR"
id: phase7
if: steps.phase4.outputs.updates_required == 'true'
# Create PR
- name: "Create PR"
id: create_pr
if: steps.analyze.outputs.updates_required == 'true'
run: |
# Check if there are actual changes
if git diff --quiet docs/src/; then
@@ -202,6 +195,21 @@ jobs:
# Daily batch branch - one branch per day, multiple commits accumulate
BRANCH_NAME="docs/auto-update-$(date +%Y-%m-%d)"
# Get source PR info for attribution
SOURCE_PR_INFO=""
if [ "${{ steps.changed.outputs.source }}" == "pr" ]; then
PR_NUM="${{ steps.changed.outputs.ref }}"
PR_DETAILS=$(gh pr view "$PR_NUM" --json title,author,url 2>/dev/null || echo "{}")
SOURCE_TITLE=$(echo "$PR_DETAILS" | jq -r '.title // "Unknown"')
SOURCE_AUTHOR=$(echo "$PR_DETAILS" | jq -r '.author.login // "Unknown"')
SOURCE_URL=$(echo "$PR_DETAILS" | jq -r '.url // ""')
SOURCE_PR_INFO="
---
**Source**: [#$PR_NUM]($SOURCE_URL) - $SOURCE_TITLE
**Author**: @$SOURCE_AUTHOR
"
fi
# Stash local changes from phase 5
git stash push -m "docs-automation-changes" -- docs/src/
@@ -232,16 +240,37 @@ jobs:
# Push
git push -u origin "$BRANCH_NAME"
# Check if PR already exists for this branch
EXISTING_PR=$(gh pr list --head "$BRANCH_NAME" --json number --jq '.[0].number' || echo "")
# Build the PR body section for this update
PR_BODY_SECTION="## Update from $(date '+%Y-%m-%d %H:%M')
$SOURCE_PR_INFO
$(cat /tmp/phase6-summary.md)
"
if [ -n "$EXISTING_PR" ]; then
echo "PR #$EXISTING_PR already exists for branch $BRANCH_NAME, updated with new commit"
# Check if PR already exists for this branch
EXISTING_PR=$(gh pr list --head "$BRANCH_NAME" --json number,url,body --jq '.[0]' || echo "")
if [ -n "$EXISTING_PR" ] && [ "$EXISTING_PR" != "null" ]; then
PR_NUM=$(echo "$EXISTING_PR" | jq -r '.number')
PR_URL=$(echo "$EXISTING_PR" | jq -r '.url')
EXISTING_BODY=$(echo "$EXISTING_PR" | jq -r '.body // ""')
# Append new summary to existing PR body
NEW_BODY="${EXISTING_BODY}
---
${PR_BODY_SECTION}"
echo "$NEW_BODY" > /tmp/updated-pr-body.md
gh pr edit "$PR_NUM" --body-file /tmp/updated-pr-body.md
echo "PR #$PR_NUM updated: $PR_URL"
else
# Create new PR
echo "$PR_BODY_SECTION" > /tmp/new-pr-body.md
gh pr create \
--title "docs: automated documentation update ($(date +%Y-%m-%d))" \
--body-file /tmp/phase6-summary.md \
--body-file /tmp/new-pr-body.md \
--base main || true
echo "PR created on branch: $BRANCH_NAME"
fi
@@ -255,10 +284,12 @@ jobs:
echo "## Documentation Automation Summary" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
if [ "${{ steps.phase4.outputs.updates_required }}" == "false" ]; then
if [ "${{ steps.filter.outputs.has_relevant }}" == "false" ]; then
echo "No docs-relevant files changed. Skipped analysis." >> "$GITHUB_STEP_SUMMARY"
elif [ "${{ steps.analyze.outputs.updates_required }}" == "false" ]; then
echo "No documentation updates required for this change." >> "$GITHUB_STEP_SUMMARY"
elif [ -f /tmp/phase6-summary.md ]; then
cat /tmp/phase6-summary.md >> "$GITHUB_STEP_SUMMARY"
else
echo "Workflow completed. Check individual phase outputs for details." >> "$GITHUB_STEP_SUMMARY"
echo "Workflow completed. Check individual step outputs for details." >> "$GITHUB_STEP_SUMMARY"
fi

View File

@@ -51,7 +51,7 @@ jobs:
needs:
- orchestrate
if: needs.orchestrate.outputs.check_rust == 'true'
runs-on: namespace-profile-4x8-ubuntu-2204
runs-on: namespace-profile-16x32-ubuntu-2204
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -79,7 +79,7 @@ jobs:
needs:
- orchestrate
if: needs.orchestrate.outputs.check_extension == 'true'
runs-on: namespace-profile-8x32-ubuntu-2404
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683

View File

@@ -23,6 +23,7 @@ In particular we love PRs that are:
If you're looking for concrete ideas:
- [Curated board of issues](https://github.com/orgs/zed-industries/projects/69) suitable for everyone from first-time contributors to seasoned community champions.
- [Triaged bugs with confirmed steps to reproduce](https://github.com/zed-industries/zed/issues?q=is%3Aissue%20state%3Aopen%20type%3ABug%20label%3Astate%3Areproducible).
- [Area labels](https://github.com/zed-industries/zed/labels?q=area%3A*) to browse bugs in a specific part of the product you care about (after clicking on an area label, add type:Bug to the search).

14
Cargo.lock generated
View File

@@ -5212,7 +5212,6 @@ dependencies = [
"anyhow",
"arrayvec",
"brotli",
"buffer_diff",
"client",
"clock",
"cloud_api_types",
@@ -5250,9 +5249,7 @@ dependencies = [
"strum 0.27.2",
"telemetry",
"telemetry_events",
"text",
"thiserror 2.0.17",
"time",
"ui",
"util",
"uuid",
@@ -5357,10 +5354,8 @@ dependencies = [
"anyhow",
"buffer_diff",
"client",
"clock",
"cloud_llm_client",
"codestral",
"collections",
"command_palette_hooks",
"copilot",
"edit_prediction",
@@ -5369,20 +5364,18 @@ dependencies = [
"feature_flags",
"fs",
"futures 0.3.31",
"git",
"gpui",
"indoc",
"language",
"language_model",
"log",
"lsp",
"markdown",
"menu",
"multi_buffer",
"paths",
"pretty_assertions",
"project",
"regex",
"release_channel",
"semver",
"serde_json",
"settings",
"supermaven",
@@ -5395,7 +5388,6 @@ dependencies = [
"workspace",
"zed_actions",
"zeta_prompt",
"zlog",
]
[[package]]
@@ -20978,7 +20970,7 @@ dependencies = [
[[package]]
name = "zed_proto"
version = "0.3.1"
version = "0.3.0"
dependencies = [
"zed_extension_api 0.7.0",
]

View File

@@ -241,7 +241,6 @@
"ctrl-alt-l": "agent::OpenRulesLibrary",
"ctrl-i": "agent::ToggleProfileSelector",
"ctrl-alt-/": "agent::ToggleModelSelector",
"alt-tab": "agent::CycleFavoriteModels",
"ctrl-shift-j": "agent::ToggleNavigationMenu",
"ctrl-alt-i": "agent::ToggleOptionsMenu",
"ctrl-alt-shift-n": "agent::ToggleNewThreadMenu",
@@ -254,6 +253,7 @@
"ctrl-y": "agent::AllowOnce",
"ctrl-alt-y": "agent::AllowAlways",
"ctrl-alt-z": "agent::RejectOnce",
"alt-tab": "agent::CycleFavoriteModels",
},
},
{
@@ -285,6 +285,38 @@
"ctrl-alt-t": "agent::NewThread",
},
},
{
"context": "MessageEditor && !Picker > Editor && !use_modifier_to_send",
"bindings": {
"enter": "agent::Chat",
"ctrl-enter": "agent::ChatWithFollow",
"ctrl-i": "agent::ToggleProfileSelector",
"shift-ctrl-r": "agent::OpenAgentDiff",
"ctrl-shift-y": "agent::KeepAll",
"ctrl-shift-n": "agent::RejectAll",
"ctrl-shift-v": "agent::PasteRaw",
},
},
{
"context": "MessageEditor && !Picker > Editor && use_modifier_to_send",
"bindings": {
"ctrl-enter": "agent::Chat",
"enter": "editor::Newline",
"ctrl-i": "agent::ToggleProfileSelector",
"shift-ctrl-r": "agent::OpenAgentDiff",
"ctrl-shift-y": "agent::KeepAll",
"ctrl-shift-n": "agent::RejectAll",
"ctrl-shift-v": "agent::PasteRaw",
},
},
{
"context": "EditMessageEditor > Editor",
"bindings": {
"escape": "menu::Cancel",
"enter": "menu::Confirm",
"alt-enter": "editor::Newline",
},
},
{
"context": "AgentFeedbackMessageEditor > Editor",
"bindings": {
@@ -299,25 +331,14 @@
"ctrl-enter": "menu::Confirm",
},
},
{
"context": "AcpThread > Editor",
"use_key_equivalents": true,
"bindings": {
"ctrl-enter": "agent::ChatWithFollow",
"ctrl-i": "agent::ToggleProfileSelector",
"ctrl-shift-r": "agent::OpenAgentDiff",
"ctrl-shift-y": "agent::KeepAll",
"ctrl-shift-n": "agent::RejectAll",
"ctrl-shift-v": "agent::PasteRaw",
"shift-tab": "agent::CycleModeSelector",
"alt-tab": "agent::CycleFavoriteModels",
},
},
{
"context": "AcpThread > Editor && !use_modifier_to_send",
"use_key_equivalents": true,
"bindings": {
"enter": "agent::Chat",
"shift-ctrl-r": "agent::OpenAgentDiff",
"ctrl-shift-y": "agent::KeepAll",
"ctrl-shift-n": "agent::RejectAll",
},
},
{
@@ -325,7 +346,11 @@
"use_key_equivalents": true,
"bindings": {
"ctrl-enter": "agent::Chat",
"enter": "editor::Newline",
"shift-ctrl-r": "agent::OpenAgentDiff",
"ctrl-shift-y": "agent::KeepAll",
"ctrl-shift-n": "agent::RejectAll",
"shift-tab": "agent::CycleModeSelector",
"alt-tab": "agent::CycleFavoriteModels",
},
},
{
@@ -792,7 +817,7 @@
},
},
{
"context": "InlineAssistant",
"context": "PromptEditor",
"bindings": {
"ctrl-[": "agent::CyclePreviousInlineAssist",
"ctrl-]": "agent::CycleNextInlineAssist",

View File

@@ -282,7 +282,6 @@
"cmd-alt-p": "agent::ManageProfiles",
"cmd-i": "agent::ToggleProfileSelector",
"cmd-alt-/": "agent::ToggleModelSelector",
"alt-tab": "agent::CycleFavoriteModels",
"cmd-shift-j": "agent::ToggleNavigationMenu",
"cmd-alt-m": "agent::ToggleOptionsMenu",
"cmd-alt-shift-n": "agent::ToggleNewThreadMenu",
@@ -295,6 +294,7 @@
"cmd-y": "agent::AllowOnce",
"cmd-alt-y": "agent::AllowAlways",
"cmd-alt-z": "agent::RejectOnce",
"alt-tab": "agent::CycleFavoriteModels",
},
},
{
@@ -326,6 +326,41 @@
"cmd-alt-t": "agent::NewThread",
},
},
{
"context": "MessageEditor && !Picker > Editor && !use_modifier_to_send",
"use_key_equivalents": true,
"bindings": {
"enter": "agent::Chat",
"cmd-enter": "agent::ChatWithFollow",
"cmd-i": "agent::ToggleProfileSelector",
"shift-ctrl-r": "agent::OpenAgentDiff",
"cmd-shift-y": "agent::KeepAll",
"cmd-shift-n": "agent::RejectAll",
"cmd-shift-v": "agent::PasteRaw",
},
},
{
"context": "MessageEditor && !Picker > Editor && use_modifier_to_send",
"use_key_equivalents": true,
"bindings": {
"cmd-enter": "agent::Chat",
"enter": "editor::Newline",
"cmd-i": "agent::ToggleProfileSelector",
"shift-ctrl-r": "agent::OpenAgentDiff",
"cmd-shift-y": "agent::KeepAll",
"cmd-shift-n": "agent::RejectAll",
"cmd-shift-v": "agent::PasteRaw",
},
},
{
"context": "EditMessageEditor > Editor",
"use_key_equivalents": true,
"bindings": {
"escape": "menu::Cancel",
"enter": "menu::Confirm",
"alt-enter": "editor::Newline",
},
},
{
"context": "AgentFeedbackMessageEditor > Editor",
"use_key_equivalents": true,
@@ -347,25 +382,16 @@
"cmd-enter": "menu::Confirm",
},
},
{
"context": "AcpThread > Editor",
"use_key_equivalents": true,
"bindings": {
"shift-ctrl-r": "agent::OpenAgentDiff",
"cmd-shift-y": "agent::KeepAll",
"cmd-shift-n": "agent::RejectAll",
"cmd-enter": "agent::ChatWithFollow",
"cmd-shift-v": "agent::PasteRaw",
"cmd-i": "agent::ToggleProfileSelector",
"shift-tab": "agent::CycleModeSelector",
"alt-tab": "agent::CycleFavoriteModels",
},
},
{
"context": "AcpThread > Editor && !use_modifier_to_send",
"use_key_equivalents": true,
"bindings": {
"enter": "agent::Chat",
"shift-ctrl-r": "agent::OpenAgentDiff",
"cmd-shift-y": "agent::KeepAll",
"cmd-shift-n": "agent::RejectAll",
"shift-tab": "agent::CycleModeSelector",
"alt-tab": "agent::CycleFavoriteModels",
},
},
{
@@ -373,7 +399,11 @@
"use_key_equivalents": true,
"bindings": {
"cmd-enter": "agent::Chat",
"enter": "editor::Newline",
"shift-ctrl-r": "agent::OpenAgentDiff",
"cmd-shift-y": "agent::KeepAll",
"cmd-shift-n": "agent::RejectAll",
"shift-tab": "agent::CycleModeSelector",
"alt-tab": "agent::CycleFavoriteModels",
},
},
{
@@ -853,7 +883,7 @@
},
},
{
"context": "InlineAssistant > Editor",
"context": "PromptEditor",
"use_key_equivalents": true,
"bindings": {
"cmd-alt-/": "agent::ToggleModelSelector",

View File

@@ -241,7 +241,6 @@
"shift-alt-l": "agent::OpenRulesLibrary",
"shift-alt-p": "agent::ManageProfiles",
"ctrl-i": "agent::ToggleProfileSelector",
"alt-tab": "agent::CycleFavoriteModels",
"shift-alt-/": "agent::ToggleModelSelector",
"shift-alt-j": "agent::ToggleNavigationMenu",
"shift-alt-i": "agent::ToggleOptionsMenu",
@@ -255,6 +254,7 @@
"shift-alt-a": "agent::AllowOnce",
"ctrl-alt-y": "agent::AllowAlways",
"shift-alt-z": "agent::RejectOnce",
"alt-tab": "agent::CycleFavoriteModels",
},
},
{
@@ -287,6 +287,41 @@
"ctrl-alt-t": "agent::NewThread",
},
},
{
"context": "MessageEditor && !Picker > Editor && !use_modifier_to_send",
"use_key_equivalents": true,
"bindings": {
"enter": "agent::Chat",
"ctrl-enter": "agent::ChatWithFollow",
"ctrl-i": "agent::ToggleProfileSelector",
"ctrl-shift-r": "agent::OpenAgentDiff",
"ctrl-shift-y": "agent::KeepAll",
"ctrl-shift-n": "agent::RejectAll",
"ctrl-shift-v": "agent::PasteRaw",
},
},
{
"context": "MessageEditor && !Picker > Editor && use_modifier_to_send",
"use_key_equivalents": true,
"bindings": {
"ctrl-enter": "agent::Chat",
"enter": "editor::Newline",
"ctrl-i": "agent::ToggleProfileSelector",
"ctrl-shift-r": "agent::OpenAgentDiff",
"ctrl-shift-y": "agent::KeepAll",
"ctrl-shift-n": "agent::RejectAll",
"ctrl-shift-v": "agent::PasteRaw",
},
},
{
"context": "EditMessageEditor > Editor",
"use_key_equivalents": true,
"bindings": {
"escape": "menu::Cancel",
"enter": "menu::Confirm",
"alt-enter": "editor::Newline",
},
},
{
"context": "AgentFeedbackMessageEditor > Editor",
"use_key_equivalents": true,
@@ -302,25 +337,16 @@
"ctrl-enter": "menu::Confirm",
},
},
{
"context": "AcpThread > Editor",
"use_key_equivalents": true,
"bindings": {
"ctrl-enter": "agent::ChatWithFollow",
"ctrl-i": "agent::ToggleProfileSelector",
"ctrl-shift-r": "agent::OpenAgentDiff",
"ctrl-shift-y": "agent::KeepAll",
"ctrl-shift-n": "agent::RejectAll",
"ctrl-shift-v": "agent::PasteRaw",
"shift-tab": "agent::CycleModeSelector",
"alt-tab": "agent::CycleFavoriteModels",
},
},
{
"context": "AcpThread > Editor && !use_modifier_to_send",
"use_key_equivalents": true,
"bindings": {
"enter": "agent::Chat",
"ctrl-shift-r": "agent::OpenAgentDiff",
"ctrl-shift-y": "agent::KeepAll",
"ctrl-shift-n": "agent::RejectAll",
"shift-tab": "agent::CycleModeSelector",
"alt-tab": "agent::CycleFavoriteModels",
},
},
{
@@ -328,7 +354,11 @@
"use_key_equivalents": true,
"bindings": {
"ctrl-enter": "agent::Chat",
"enter": "editor::Newline",
"ctrl-shift-r": "agent::OpenAgentDiff",
"ctrl-shift-y": "agent::KeepAll",
"ctrl-shift-n": "agent::RejectAll",
"shift-tab": "agent::CycleModeSelector",
"alt-tab": "agent::CycleFavoriteModels",
},
},
{
@@ -796,7 +826,7 @@
},
},
{
"context": "InlineAssistant",
"context": "PromptEditor",
"use_key_equivalents": true,
"bindings": {
"ctrl-[": "agent::CyclePreviousInlineAssist",

View File

@@ -24,7 +24,7 @@
},
},
{
"context": "InlineAssistant > Editor",
"context": "InlineAssistEditor",
"use_key_equivalents": true,
"bindings": {
"ctrl-shift-backspace": "editor::Cancel",

View File

@@ -24,7 +24,7 @@
},
},
{
"context": "InlineAssistant > Editor",
"context": "InlineAssistEditor",
"use_key_equivalents": true,
"bindings": {
"cmd-shift-backspace": "editor::Cancel",

View File

@@ -202,6 +202,12 @@ pub trait AgentModelSelector: 'static {
fn should_render_footer(&self) -> bool {
false
}
/// Whether this selector supports the favorites feature.
/// Only the native agent uses the model ID format that maps to settings.
fn supports_favorites(&self) -> bool {
false
}
}
/// Icon for a model in the model selector.

View File

@@ -1167,6 +1167,10 @@ impl acp_thread::AgentModelSelector for NativeAgentModelSelector {
fn should_render_footer(&self) -> bool {
true
}
fn supports_favorites(&self) -> bool {
true
}
}
impl acp_thread::AgentConnection for NativeAgentConnection {

View File

@@ -1,14 +1,10 @@
use std::{any::Any, path::Path, rc::Rc, sync::Arc};
use agent_client_protocol as acp;
use agent_servers::{AgentServer, AgentServerDelegate};
use agent_settings::AgentSettings;
use anyhow::Result;
use collections::HashSet;
use fs::Fs;
use gpui::{App, Entity, SharedString, Task};
use prompt_store::PromptStore;
use settings::{LanguageModelSelection, Settings as _, update_settings_file};
use crate::{HistoryStore, NativeAgent, NativeAgentConnection, templates::Templates};
@@ -75,38 +71,6 @@ impl AgentServer for NativeAgentServer {
fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
self
}
fn favorite_model_ids(&self, cx: &mut App) -> HashSet<acp::ModelId> {
AgentSettings::get_global(cx).favorite_model_ids()
}
fn toggle_favorite_model(
&self,
model_id: acp::ModelId,
should_be_favorite: bool,
fs: Arc<dyn Fs>,
cx: &App,
) {
let selection = model_id_to_selection(&model_id);
update_settings_file(fs, cx, move |settings, _| {
let agent = settings.agent.get_or_insert_default();
if should_be_favorite {
agent.add_favorite_model(selection.clone());
} else {
agent.remove_favorite_model(&selection);
}
});
}
}
/// Convert a ModelId (e.g. "anthropic/claude-3-5-sonnet") to a LanguageModelSelection.
fn model_id_to_selection(model_id: &acp::ModelId) -> LanguageModelSelection {
let id = model_id.0.as_ref();
let (provider, model) = id.split_once('/').unwrap_or(("", id));
LanguageModelSelection {
provider: provider.to_owned().into(),
model: model.to_owned(),
}
}
#[cfg(test)]

View File

@@ -4,8 +4,6 @@ mod codex;
mod custom;
mod gemini;
use collections::HashSet;
#[cfg(any(test, feature = "test-support"))]
pub mod e2e_tests;
@@ -58,19 +56,9 @@ impl AgentServerDelegate {
pub trait AgentServer: Send {
fn logo(&self) -> ui::IconName;
fn name(&self) -> SharedString;
fn connect(
&self,
root_dir: Option<&Path>,
delegate: AgentServerDelegate,
cx: &mut App,
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>>;
fn into_any(self: Rc<Self>) -> Rc<dyn Any>;
fn default_mode(&self, _cx: &mut App) -> Option<agent_client_protocol::SessionModeId> {
None
}
fn set_default_mode(
&self,
_mode_id: Option<agent_client_protocol::SessionModeId>,
@@ -91,18 +79,14 @@ pub trait AgentServer: Send {
) {
}
fn favorite_model_ids(&self, _cx: &mut App) -> HashSet<agent_client_protocol::ModelId> {
HashSet::default()
}
fn toggle_favorite_model(
fn connect(
&self,
_model_id: agent_client_protocol::ModelId,
_should_be_favorite: bool,
_fs: Arc<dyn Fs>,
_cx: &App,
) {
}
root_dir: Option<&Path>,
delegate: AgentServerDelegate,
cx: &mut App,
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>>;
fn into_any(self: Rc<Self>) -> Rc<dyn Any>;
}
impl dyn AgentServer {

View File

@@ -1,5 +1,4 @@
use agent_client_protocol as acp;
use collections::HashSet;
use fs::Fs;
use settings::{SettingsStore, update_settings_file};
use std::path::Path;
@@ -73,48 +72,6 @@ impl AgentServer for ClaudeCode {
});
}
fn favorite_model_ids(&self, cx: &mut App) -> HashSet<acp::ModelId> {
let settings = cx.read_global(|settings: &SettingsStore, _| {
settings.get::<AllAgentServersSettings>(None).claude.clone()
});
settings
.as_ref()
.map(|s| {
s.favorite_models
.iter()
.map(|id| acp::ModelId::new(id.clone()))
.collect()
})
.unwrap_or_default()
}
fn toggle_favorite_model(
&self,
model_id: acp::ModelId,
should_be_favorite: bool,
fs: Arc<dyn Fs>,
cx: &App,
) {
update_settings_file(fs, cx, move |settings, _| {
let favorite_models = &mut settings
.agent_servers
.get_or_insert_default()
.claude
.get_or_insert_default()
.favorite_models;
let model_id_str = model_id.to_string();
if should_be_favorite {
if !favorite_models.contains(&model_id_str) {
favorite_models.push(model_id_str);
}
} else {
favorite_models.retain(|id| id != &model_id_str);
}
});
}
fn connect(
&self,
root_dir: Option<&Path>,

View File

@@ -5,7 +5,6 @@ use std::{any::Any, path::Path};
use acp_thread::AgentConnection;
use agent_client_protocol as acp;
use anyhow::{Context as _, Result};
use collections::HashSet;
use fs::Fs;
use gpui::{App, AppContext as _, SharedString, Task};
use project::agent_server_store::{AllAgentServersSettings, CODEX_NAME};
@@ -74,48 +73,6 @@ impl AgentServer for Codex {
});
}
fn favorite_model_ids(&self, cx: &mut App) -> HashSet<acp::ModelId> {
let settings = cx.read_global(|settings: &SettingsStore, _| {
settings.get::<AllAgentServersSettings>(None).codex.clone()
});
settings
.as_ref()
.map(|s| {
s.favorite_models
.iter()
.map(|id| acp::ModelId::new(id.clone()))
.collect()
})
.unwrap_or_default()
}
fn toggle_favorite_model(
&self,
model_id: acp::ModelId,
should_be_favorite: bool,
fs: Arc<dyn Fs>,
cx: &App,
) {
update_settings_file(fs, cx, move |settings, _| {
let favorite_models = &mut settings
.agent_servers
.get_or_insert_default()
.codex
.get_or_insert_default()
.favorite_models;
let model_id_str = model_id.to_string();
if should_be_favorite {
if !favorite_models.contains(&model_id_str) {
favorite_models.push(model_id_str);
}
} else {
favorite_models.retain(|id| id != &model_id_str);
}
});
}
fn connect(
&self,
root_dir: Option<&Path>,

View File

@@ -2,7 +2,6 @@ use crate::{AgentServer, AgentServerDelegate, load_proxy_env};
use acp_thread::AgentConnection;
use agent_client_protocol as acp;
use anyhow::{Context as _, Result};
use collections::HashSet;
use fs::Fs;
use gpui::{App, AppContext as _, SharedString, Task};
use project::agent_server_store::{AllAgentServersSettings, ExternalAgentServerName};
@@ -55,7 +54,6 @@ impl AgentServer for CustomAgentServer {
.or_insert_with(|| settings::CustomAgentServerSettings::Extension {
default_model: None,
default_mode: None,
favorite_models: Vec::new(),
});
match settings {
@@ -92,7 +90,6 @@ impl AgentServer for CustomAgentServer {
.or_insert_with(|| settings::CustomAgentServerSettings::Extension {
default_model: None,
default_mode: None,
favorite_models: Vec::new(),
});
match settings {
@@ -104,66 +101,6 @@ impl AgentServer for CustomAgentServer {
});
}
fn favorite_model_ids(&self, cx: &mut App) -> HashSet<acp::ModelId> {
let settings = cx.read_global(|settings: &SettingsStore, _| {
settings
.get::<AllAgentServersSettings>(None)
.custom
.get(&self.name())
.cloned()
});
settings
.as_ref()
.map(|s| {
s.favorite_models()
.iter()
.map(|id| acp::ModelId::new(id.clone()))
.collect()
})
.unwrap_or_default()
}
fn toggle_favorite_model(
&self,
model_id: acp::ModelId,
should_be_favorite: bool,
fs: Arc<dyn Fs>,
cx: &App,
) {
let name = self.name();
update_settings_file(fs, cx, move |settings, _| {
let settings = settings
.agent_servers
.get_or_insert_default()
.custom
.entry(name.clone())
.or_insert_with(|| settings::CustomAgentServerSettings::Extension {
default_model: None,
default_mode: None,
favorite_models: Vec::new(),
});
let favorite_models = match settings {
settings::CustomAgentServerSettings::Custom {
favorite_models, ..
}
| settings::CustomAgentServerSettings::Extension {
favorite_models, ..
} => favorite_models,
};
let model_id_str = model_id.to_string();
if should_be_favorite {
if !favorite_models.contains(&model_id_str) {
favorite_models.push(model_id_str);
}
} else {
favorite_models.retain(|id| id != &model_id_str);
}
});
}
fn connect(
&self,
root_dir: Option<&Path>,

View File

@@ -460,7 +460,6 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
ignore_system_version: None,
default_mode: None,
default_model: None,
favorite_models: vec![],
}),
gemini: Some(crate::gemini::tests::local_command().into()),
codex: Some(BuiltinAgentServerSettings {
@@ -470,7 +469,6 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
ignore_system_version: None,
default_mode: None,
default_model: None,
favorite_models: vec![],
}),
custom: collections::HashMap::default(),
},

View File

@@ -31,7 +31,7 @@ use rope::Point;
use settings::Settings;
use std::{cell::RefCell, fmt::Write, rc::Rc, sync::Arc};
use theme::ThemeSettings;
use ui::{ContextMenu, prelude::*};
use ui::prelude::*;
use util::{ResultExt, debug_panic};
use workspace::{CollaboratorId, Workspace};
use zed_actions::agent::{Chat, PasteRaw};
@@ -132,21 +132,6 @@ impl MessageEditor {
placement: Some(ContextMenuPlacement::Above),
});
editor.register_addon(MessageEditorAddon::new());
editor.set_custom_context_menu(|editor, _point, window, cx| {
let has_selection = editor.has_non_empty_selection(&editor.display_snapshot(cx));
Some(ContextMenu::build(window, cx, |menu, _, _| {
menu.action("Cut", Box::new(editor::actions::Cut))
.action_disabled_when(
!has_selection,
"Copy",
Box::new(editor::actions::Copy),
)
.action("Paste", Box::new(editor::actions::Paste))
}))
});
editor
});
let mention_set =

View File

@@ -3,19 +3,19 @@ use std::{cmp::Reverse, rc::Rc, sync::Arc};
use acp_thread::{AgentModelIcon, AgentModelInfo, AgentModelList, AgentModelSelector};
use agent_client_protocol::ModelId;
use agent_servers::AgentServer;
use agent_settings::AgentSettings;
use anyhow::Result;
use collections::{HashSet, IndexMap};
use fs::Fs;
use futures::FutureExt;
use fuzzy::{StringMatchCandidate, match_strings};
use gpui::{
Action, AsyncWindowContext, BackgroundExecutor, DismissEvent, FocusHandle, Subscription, Task,
WeakEntity,
Action, AsyncWindowContext, BackgroundExecutor, DismissEvent, FocusHandle, Task, WeakEntity,
};
use itertools::Itertools;
use ordered_float::OrderedFloat;
use picker::{Picker, PickerDelegate};
use settings::SettingsStore;
use settings::Settings;
use ui::{DocumentationAside, DocumentationEdge, DocumentationSide, IntoElement, prelude::*};
use util::ResultExt;
use zed_actions::agent::OpenSettings;
@@ -54,9 +54,7 @@ pub struct AcpModelPickerDelegate {
selected_index: usize,
selected_description: Option<(usize, SharedString, bool)>,
selected_model: Option<AgentModelInfo>,
favorites: HashSet<ModelId>,
_refresh_models_task: Task<()>,
_settings_subscription: Subscription,
focus_handle: FocusHandle,
}
@@ -104,19 +102,6 @@ impl AcpModelPickerDelegate {
})
};
let agent_server_for_subscription = agent_server.clone();
let settings_subscription =
cx.observe_global_in::<SettingsStore>(window, move |picker, window, cx| {
// Only refresh if the favorites actually changed to avoid redundant work
// when other settings are modified (e.g., user editing settings.json)
let new_favorites = agent_server_for_subscription.favorite_model_ids(cx);
if new_favorites != picker.delegate.favorites {
picker.delegate.favorites = new_favorites;
picker.refresh(window, cx);
}
});
let favorites = agent_server.favorite_model_ids(cx);
Self {
selector,
agent_server,
@@ -126,9 +111,7 @@ impl AcpModelPickerDelegate {
selected_model: None,
selected_index: 0,
selected_description: None,
favorites,
_refresh_models_task: refresh_models_task,
_settings_subscription: settings_subscription,
focus_handle,
}
}
@@ -137,37 +120,40 @@ impl AcpModelPickerDelegate {
self.selected_model.as_ref()
}
pub fn favorites_count(&self) -> usize {
self.favorites.len()
}
pub fn cycle_favorite_models(&mut self, window: &mut Window, cx: &mut Context<Picker<Self>>) {
if self.favorites.is_empty() {
if !self.selector.supports_favorites() {
return;
}
let Some(models) = &self.models else {
let favorites = AgentSettings::get_global(cx).favorite_model_ids();
if favorites.is_empty() {
return;
}
let Some(models) = self.models.clone() else {
return;
};
let all_models: Vec<&AgentModelInfo> = match models {
AgentModelList::Flat(list) => list.iter().collect(),
AgentModelList::Grouped(index_map) => index_map.values().flatten().collect(),
let all_models: Vec<AgentModelInfo> = match models {
AgentModelList::Flat(list) => list,
AgentModelList::Grouped(index_map) => index_map
.into_values()
.flatten()
.collect::<Vec<AgentModelInfo>>(),
};
let favorite_models: Vec<_> = all_models
.into_iter()
.filter(|model| self.favorites.contains(&model.id))
let favorite_models = all_models
.iter()
.filter(|model| favorites.contains(&model.id))
.unique_by(|model| &model.id)
.collect();
.cloned()
.collect::<Vec<_>>();
if favorite_models.is_empty() {
return;
}
let current_id = self.selected_model.as_ref().map(|m| &m.id);
let current_id = self.selected_model.as_ref().map(|m| m.id.clone());
let current_index_in_favorites = current_id
.as_ref()
.and_then(|id| favorite_models.iter().position(|m| &m.id == id))
.unwrap_or(usize::MAX);
@@ -234,7 +220,11 @@ impl PickerDelegate for AcpModelPickerDelegate {
window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Task<()> {
let favorites = self.favorites.clone();
let favorites = if self.selector.supports_favorites() {
AgentSettings::get_global(cx).favorite_model_ids()
} else {
Default::default()
};
cx.spawn_in(window, async move |this, cx| {
let filtered_models = match this
@@ -327,20 +317,21 @@ impl PickerDelegate for AcpModelPickerDelegate {
let default_model = self.agent_server.default_model(cx);
let is_default = default_model.as_ref() == Some(&model_info.id);
let supports_favorites = self.selector.supports_favorites();
let is_favorite = *is_favorite;
let handle_action_click = {
let model_id = model_info.id.clone();
let fs = self.fs.clone();
let agent_server = self.agent_server.clone();
cx.listener(move |_, _, _, cx| {
agent_server.toggle_favorite_model(
move |cx: &App| {
crate::favorite_models::toggle_model_id_in_settings(
model_id.clone(),
!is_favorite,
fs.clone(),
cx,
);
})
}
};
Some(
@@ -366,8 +357,10 @@ impl PickerDelegate for AcpModelPickerDelegate {
})
.is_selected(is_selected)
.is_focused(selected)
.is_favorite(is_favorite)
.on_toggle_favorite(handle_action_click),
.when(supports_favorites, |this| {
this.is_favorite(is_favorite)
.on_toggle_favorite(handle_action_click)
}),
)
.into_any_element(),
)
@@ -610,46 +603,6 @@ mod tests {
.collect()
}
#[gpui::test]
async fn test_fuzzy_match(cx: &mut TestAppContext) {
let models = create_model_list(vec![
(
"zed",
vec![
"Claude 3.7 Sonnet",
"Claude 3.7 Sonnet Thinking",
"gpt-4.1",
"gpt-4.1-nano",
],
),
("openai", vec!["gpt-3.5-turbo", "gpt-4.1", "gpt-4.1-nano"]),
("ollama", vec!["mistral", "deepseek"]),
]);
// Results should preserve models order whenever possible.
// In the case below, `zed/gpt-4.1` and `openai/gpt-4.1` have identical
// similarity scores, but `zed/gpt-4.1` was higher in the models list,
// so it should appear first in the results.
let results = fuzzy_search(models.clone(), "41".into(), cx.executor()).await;
assert_models_eq(
results,
vec![
("zed", vec!["gpt-4.1", "gpt-4.1-nano"]),
("openai", vec!["gpt-4.1", "gpt-4.1-nano"]),
],
);
// Fuzzy search
let results = fuzzy_search(models.clone(), "4n".into(), cx.executor()).await;
assert_models_eq(
results,
vec![
("zed", vec!["gpt-4.1-nano"]),
("openai", vec!["gpt-4.1-nano"]),
],
);
}
#[gpui::test]
fn test_favorites_section_appears_when_favorites_exist(_cx: &mut TestAppContext) {
let models = create_model_list(vec![
@@ -786,48 +739,42 @@ mod tests {
}
#[gpui::test]
fn test_favorites_count_returns_correct_count(_cx: &mut TestAppContext) {
let empty_favorites: HashSet<ModelId> = HashSet::default();
assert_eq!(empty_favorites.len(), 0);
let one_favorite = create_favorites(vec!["model-a"]);
assert_eq!(one_favorite.len(), 1);
let multiple_favorites = create_favorites(vec!["model-a", "model-b", "model-c"]);
assert_eq!(multiple_favorites.len(), 3);
let with_duplicates = create_favorites(vec!["model-a", "model-a", "model-b"]);
assert_eq!(with_duplicates.len(), 2);
}
#[gpui::test]
fn test_is_favorite_flag_set_correctly_in_entries(_cx: &mut TestAppContext) {
let models = AgentModelList::Flat(vec![
acp_thread::AgentModelInfo {
id: acp::ModelId::new("favorite-model".to_string()),
name: "Favorite".into(),
description: None,
icon: None,
},
acp_thread::AgentModelInfo {
id: acp::ModelId::new("regular-model".to_string()),
name: "Regular".into(),
description: None,
icon: None,
},
async fn test_fuzzy_match(cx: &mut TestAppContext) {
let models = create_model_list(vec![
(
"zed",
vec![
"Claude 3.7 Sonnet",
"Claude 3.7 Sonnet Thinking",
"gpt-4.1",
"gpt-4.1-nano",
],
),
("openai", vec!["gpt-3.5-turbo", "gpt-4.1", "gpt-4.1-nano"]),
("ollama", vec!["mistral", "deepseek"]),
]);
let favorites = create_favorites(vec!["favorite-model"]);
let entries = info_list_to_picker_entries(models, &favorites);
// Results should preserve models order whenever possible.
// In the case below, `zed/gpt-4.1` and `openai/gpt-4.1` have identical
// similarity scores, but `zed/gpt-4.1` was higher in the models list,
// so it should appear first in the results.
let results = fuzzy_search(models.clone(), "41".into(), cx.executor()).await;
assert_models_eq(
results,
vec![
("zed", vec!["gpt-4.1", "gpt-4.1-nano"]),
("openai", vec!["gpt-4.1", "gpt-4.1-nano"]),
],
);
for entry in &entries {
if let AcpModelPickerEntry::Model(info, is_favorite) = entry {
if info.id.0.as_ref() == "favorite-model" {
assert!(*is_favorite, "favorite-model should have is_favorite=true");
} else if info.id.0.as_ref() == "regular-model" {
assert!(!*is_favorite, "regular-model should have is_favorite=false");
}
}
}
// Fuzzy search
let results = fuzzy_search(models.clone(), "4n".into(), cx.executor()).await;
assert_models_eq(
results,
vec![
("zed", vec!["gpt-4.1-nano"]),
("openai", vec!["gpt-4.1-nano"]),
],
);
}
}

View File

@@ -2,13 +2,17 @@ use std::rc::Rc;
use std::sync::Arc;
use acp_thread::{AgentModelIcon, AgentModelInfo, AgentModelSelector};
use agent_servers::AgentServer;
use agent_settings::AgentSettings;
use fs::Fs;
use gpui::{Entity, FocusHandle};
use picker::popover_menu::PickerPopoverMenu;
use ui::{ButtonLike, PopoverMenuHandle, TintColor, Tooltip, prelude::*};
use settings::Settings as _;
use ui::{ButtonLike, KeyBinding, PopoverMenuHandle, TintColor, Tooltip, prelude::*};
use zed_actions::agent::ToggleModelSelector;
use crate::CycleFavoriteModels;
use crate::acp::{AcpModelSelector, model_selector::acp_model_selector};
use crate::ui::ModelSelectorTooltip;
pub struct AcpModelSelectorPopover {
selector: Entity<AcpModelSelector>,
@@ -19,7 +23,7 @@ pub struct AcpModelSelectorPopover {
impl AcpModelSelectorPopover {
pub(crate) fn new(
selector: Rc<dyn AgentModelSelector>,
agent_server: Rc<dyn agent_servers::AgentServer>,
agent_server: Rc<dyn AgentServer>,
fs: Arc<dyn Fs>,
menu_handle: PopoverMenuHandle<AcpModelSelector>,
focus_handle: FocusHandle,
@@ -60,8 +64,7 @@ impl AcpModelSelectorPopover {
impl Render for AcpModelSelectorPopover {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let selector = self.selector.read(cx);
let model = selector.delegate.active_model();
let model = self.selector.read(cx).delegate.active_model();
let model_name = model
.as_ref()
.map(|model| model.name.clone())
@@ -77,13 +80,43 @@ impl Render for AcpModelSelectorPopover {
(Color::Muted, IconName::ChevronDown)
};
let show_cycle_row = selector.delegate.favorites_count() > 1;
let tooltip = Tooltip::element({
move |_, _cx| {
ModelSelectorTooltip::new(focus_handle.clone())
.show_cycle_row(show_cycle_row)
.into_any_element()
move |_, cx| {
let focus_handle = focus_handle.clone();
let should_show_cycle_row = !AgentSettings::get_global(cx)
.favorite_model_ids()
.is_empty();
v_flex()
.gap_1()
.child(
h_flex()
.gap_2()
.justify_between()
.child(Label::new("Change Model"))
.child(KeyBinding::for_action_in(
&ToggleModelSelector,
&focus_handle,
cx,
)),
)
.when(should_show_cycle_row, |this| {
this.child(
h_flex()
.pt_1()
.gap_2()
.border_t_1()
.border_color(cx.theme().colors().border_variant)
.justify_between()
.child(Label::new("Cycle Favorited Models"))
.child(KeyBinding::for_action_in(
&CycleFavoriteModels,
&focus_handle,
cx,
)),
)
})
.into_any()
}
});

View File

@@ -47,9 +47,8 @@ use terminal_view::terminal_panel::TerminalPanel;
use text::Anchor;
use theme::{AgentFontSize, ThemeSettings};
use ui::{
Callout, CommonAnimationExt, ContextMenu, ContextMenuEntry, Disclosure, Divider, DividerColor,
ElevationIndex, KeyBinding, PopoverMenuHandle, SpinnerLabel, TintColor, Tooltip, WithScrollbar,
prelude::*, right_click_menu,
Callout, CommonAnimationExt, Disclosure, Divider, DividerColor, ElevationIndex, KeyBinding,
PopoverMenuHandle, SpinnerLabel, TintColor, Tooltip, WithScrollbar, prelude::*,
};
use util::{ResultExt, size::format_file_size, time::duration_alt_display};
use workspace::{CollaboratorId, NewTerminal, Workspace};
@@ -2039,7 +2038,7 @@ impl AcpThreadView {
}
})
.text_xs()
.child(editor.clone().into_any_element())
.child(editor.clone().into_any_element()),
)
.when(editor_focus, |this| {
let base_container = h_flex()
@@ -2155,6 +2154,7 @@ impl AcpThreadView {
if this_is_blank {
return None;
}
Some(
self.render_thinking_block(
entry_ix,
@@ -2180,7 +2180,7 @@ impl AcpThreadView {
.when(is_last, |this| this.pb_4())
.w_full()
.text_ui(cx)
.child(self.render_message_context_menu(entry_ix, message_body, cx))
.child(message_body)
.into_any()
}
}
@@ -2287,70 +2287,6 @@ impl AcpThreadView {
}
}
fn render_message_context_menu(
&self,
entry_ix: usize,
message_body: AnyElement,
cx: &Context<Self>,
) -> AnyElement {
let entity = cx.entity();
let workspace = self.workspace.clone();
right_click_menu(format!("agent_context_menu-{}", entry_ix))
.trigger(move |_, _, _| message_body)
.menu(move |window, cx| {
let focus = window.focused(cx);
let entity = entity.clone();
let workspace = workspace.clone();
ContextMenu::build(window, cx, move |menu, _, cx| {
let is_at_top = entity.read(cx).list_state.logical_scroll_top().item_ix == 0;
let scroll_item = if is_at_top {
ContextMenuEntry::new("Scroll to Bottom").handler({
let entity = entity.clone();
move |_, cx| {
entity.update(cx, |this, cx| {
this.scroll_to_bottom(cx);
});
}
})
} else {
ContextMenuEntry::new("Scroll to Top").handler({
let entity = entity.clone();
move |_, cx| {
entity.update(cx, |this, cx| {
this.scroll_to_top(cx);
});
}
})
};
let open_thread_as_markdown = ContextMenuEntry::new("Open Thread as Markdown")
.handler({
let entity = entity.clone();
let workspace = workspace.clone();
move |window, cx| {
if let Some(workspace) = workspace.upgrade() {
entity
.update(cx, |this, cx| {
this.open_thread_as_markdown(workspace, window, cx)
})
.detach_and_log_err(cx);
}
}
});
menu.when_some(focus, |menu, focus| menu.context(focus))
.action("Copy", Box::new(markdown::CopyAsMarkdown))
.separator()
.item(scroll_item)
.item(open_thread_as_markdown)
})
})
.into_any_element()
}
fn tool_card_header_bg(&self, cx: &Context<Self>) -> Hsla {
cx.theme()
.colors()
@@ -4352,6 +4288,37 @@ impl AcpThreadView {
v_flex()
.on_action(cx.listener(Self::expand_message_editor))
.on_action(cx.listener(|this, _: &ToggleProfileSelector, window, cx| {
if let Some(profile_selector) = this.profile_selector.as_ref() {
profile_selector.read(cx).menu_handle().toggle(window, cx);
} else if let Some(mode_selector) = this.mode_selector() {
mode_selector.read(cx).menu_handle().toggle(window, cx);
}
}))
.on_action(cx.listener(|this, _: &CycleModeSelector, window, cx| {
if let Some(profile_selector) = this.profile_selector.as_ref() {
profile_selector.update(cx, |profile_selector, cx| {
profile_selector.cycle_profile(cx);
});
} else if let Some(mode_selector) = this.mode_selector() {
mode_selector.update(cx, |mode_selector, cx| {
mode_selector.cycle_mode(window, cx);
});
}
}))
.on_action(cx.listener(|this, _: &ToggleModelSelector, window, cx| {
if let Some(model_selector) = this.model_selector.as_ref() {
model_selector
.update(cx, |model_selector, cx| model_selector.toggle(window, cx));
}
}))
.on_action(cx.listener(|this, _: &CycleFavoriteModels, window, cx| {
if let Some(model_selector) = this.model_selector.as_ref() {
model_selector.update(cx, |model_selector, cx| {
model_selector.cycle_favorite_models(window, cx);
});
}
}))
.p_2()
.gap_2()
.border_t_1()
@@ -6038,37 +6005,6 @@ impl Render for AcpThreadView {
.on_action(cx.listener(Self::allow_always))
.on_action(cx.listener(Self::allow_once))
.on_action(cx.listener(Self::reject_once))
.on_action(cx.listener(|this, _: &ToggleProfileSelector, window, cx| {
if let Some(profile_selector) = this.profile_selector.as_ref() {
profile_selector.read(cx).menu_handle().toggle(window, cx);
} else if let Some(mode_selector) = this.mode_selector() {
mode_selector.read(cx).menu_handle().toggle(window, cx);
}
}))
.on_action(cx.listener(|this, _: &CycleModeSelector, window, cx| {
if let Some(profile_selector) = this.profile_selector.as_ref() {
profile_selector.update(cx, |profile_selector, cx| {
profile_selector.cycle_profile(cx);
});
} else if let Some(mode_selector) = this.mode_selector() {
mode_selector.update(cx, |mode_selector, cx| {
mode_selector.cycle_mode(window, cx);
});
}
}))
.on_action(cx.listener(|this, _: &ToggleModelSelector, window, cx| {
if let Some(model_selector) = this.model_selector.as_ref() {
model_selector
.update(cx, |model_selector, cx| model_selector.toggle(window, cx));
}
}))
.on_action(cx.listener(|this, _: &CycleFavoriteModels, window, cx| {
if let Some(model_selector) = this.model_selector.as_ref() {
model_selector.update(cx, |model_selector, cx| {
model_selector.cycle_favorite_models(window, cx);
});
}
}))
.track_focus(&self.focus_handle)
.bg(cx.theme().colors().panel_background)
.child(match &self.thread_state {

View File

@@ -1370,7 +1370,6 @@ async fn open_new_agent_servers_entry_in_settings_editor(
env: Some(HashMap::default()),
default_mode: None,
default_model: None,
favorite_models: vec![],
},
);
}

View File

@@ -1,7 +1,6 @@
use crate::{
ModelUsageContext,
language_model_selector::{LanguageModelSelector, language_model_selector},
ui::ModelSelectorTooltip,
};
use fs::Fs;
use gpui::{Entity, FocusHandle, SharedString};
@@ -10,6 +9,7 @@ use picker::popover_menu::PickerPopoverMenu;
use settings::update_settings_file;
use std::sync::Arc;
use ui::{ButtonLike, PopoverMenuHandle, TintColor, Tooltip, prelude::*};
use zed_actions::agent::ToggleModelSelector;
pub struct AgentModelSelector {
selector: Entity<LanguageModelSelector>,
@@ -81,12 +81,6 @@ impl AgentModelSelector {
pub fn active_model(&self, cx: &App) -> Option<language_model::ConfiguredModel> {
self.selector.read(cx).delegate.active_model(cx)
}
pub fn cycle_favorite_models(&self, window: &mut Window, cx: &mut Context<Self>) {
self.selector.update(cx, |selector, cx| {
selector.delegate.cycle_favorite_models(window, cx);
});
}
}
impl Render for AgentModelSelector {
@@ -104,18 +98,8 @@ impl Render for AgentModelSelector {
Color::Muted
};
let show_cycle_row = self.selector.read(cx).delegate.favorites_count() > 1;
let focus_handle = self.focus_handle.clone();
let tooltip = Tooltip::element({
move |_, _cx| {
ModelSelectorTooltip::new(focus_handle.clone())
.show_cycle_row(show_cycle_row)
.into_any_element()
}
});
PickerPopoverMenu::new(
self.selector.clone(),
ButtonLike::new("active-model")
@@ -141,7 +125,9 @@ impl Render for AgentModelSelector {
.color(color)
.size(IconSize::XSmall),
),
tooltip,
move |_window, cx| {
Tooltip::for_action_in("Change Model", &ToggleModelSelector, &focus_handle, cx)
},
gpui::Corner::TopRight,
cx,
)

View File

@@ -1,5 +1,6 @@
use std::sync::Arc;
use agent_client_protocol::ModelId;
use fs::Fs;
use language_model::LanguageModel;
use settings::{LanguageModelSelection, update_settings_file};
@@ -12,11 +13,20 @@ fn language_model_to_selection(model: &Arc<dyn LanguageModel>) -> LanguageModelS
}
}
fn model_id_to_selection(model_id: &ModelId) -> LanguageModelSelection {
let id = model_id.0.as_ref();
let (provider, model) = id.split_once('/').unwrap_or(("", id));
LanguageModelSelection {
provider: provider.to_owned().into(),
model: model.to_owned(),
}
}
pub fn toggle_in_settings(
model: Arc<dyn LanguageModel>,
should_be_favorite: bool,
fs: Arc<dyn Fs>,
cx: &mut App,
cx: &App,
) {
let selection = language_model_to_selection(&model);
update_settings_file(fs, cx, move |settings, _| {
@@ -28,3 +38,20 @@ pub fn toggle_in_settings(
}
});
}
pub fn toggle_model_id_in_settings(
model_id: ModelId,
should_be_favorite: bool,
fs: Arc<dyn Fs>,
cx: &App,
) {
let selection = model_id_to_selection(&model_id);
update_settings_file(fs, cx, move |settings, _| {
let agent = settings.agent.get_or_insert_default();
if should_be_favorite {
agent.add_favorite_model(selection.clone());
} else {
agent.remove_favorite_model(&selection);
}
});
}

View File

@@ -40,9 +40,7 @@ use crate::completion_provider::{
use crate::mention_set::paste_images_as_context;
use crate::mention_set::{MentionSet, crease_for_mention};
use crate::terminal_codegen::TerminalCodegen;
use crate::{
CycleFavoriteModels, CycleNextInlineAssist, CyclePreviousInlineAssist, ModelUsageContext,
};
use crate::{CycleNextInlineAssist, CyclePreviousInlineAssist, ModelUsageContext};
actions!(inline_assistant, [ThumbsUpResult, ThumbsDownResult]);
@@ -150,7 +148,7 @@ impl<T: 'static> Render for PromptEditor<T> {
.into_any_element();
v_flex()
.key_context("InlineAssistant")
.key_context("PromptEditor")
.capture_action(cx.listener(Self::paste))
.block_mouse_except_scroll()
.size_full()
@@ -164,6 +162,10 @@ impl<T: 'static> Render for PromptEditor<T> {
.bg(cx.theme().colors().editor_background)
.child(
h_flex()
.on_action(cx.listener(|this, _: &ToggleModelSelector, window, cx| {
this.model_selector
.update(cx, |model_selector, cx| model_selector.toggle(window, cx));
}))
.on_action(cx.listener(Self::confirm))
.on_action(cx.listener(Self::cancel))
.on_action(cx.listener(Self::move_up))
@@ -172,15 +174,6 @@ impl<T: 'static> Render for PromptEditor<T> {
.on_action(cx.listener(Self::thumbs_down))
.capture_action(cx.listener(Self::cycle_prev))
.capture_action(cx.listener(Self::cycle_next))
.on_action(cx.listener(|this, _: &ToggleModelSelector, window, cx| {
this.model_selector
.update(cx, |model_selector, cx| model_selector.toggle(window, cx));
}))
.on_action(cx.listener(|this, _: &CycleFavoriteModels, window, cx| {
this.model_selector.update(cx, |model_selector, cx| {
model_selector.cycle_favorite_models(window, cx);
});
}))
.child(
WithRemSize::new(ui_font_size)
.h_full()
@@ -862,7 +855,7 @@ impl<T: 'static> PromptEditor<T> {
.map(|this| {
if rated {
this.disabled(true)
.icon_color(Color::Disabled)
.icon_color(Color::Ignored)
.tooltip(move |_, cx| {
Tooltip::with_meta(
"Good Result",
@@ -872,15 +865,8 @@ impl<T: 'static> PromptEditor<T> {
)
})
} else {
this.icon_color(Color::Muted).tooltip(
move |_, cx| {
Tooltip::for_action(
"Good Result",
&ThumbsUpResult,
cx,
)
},
)
this.icon_color(Color::Muted)
.tooltip(Tooltip::text("Good Result"))
}
})
.on_click(cx.listener(|this, _, window, cx| {
@@ -893,7 +879,7 @@ impl<T: 'static> PromptEditor<T> {
.map(|this| {
if rated {
this.disabled(true)
.icon_color(Color::Disabled)
.icon_color(Color::Ignored)
.tooltip(move |_, cx| {
Tooltip::with_meta(
"Bad Result",
@@ -903,15 +889,8 @@ impl<T: 'static> PromptEditor<T> {
)
})
} else {
this.icon_color(Color::Muted).tooltip(
move |_, cx| {
Tooltip::for_action(
"Bad Result",
&ThumbsDownResult,
cx,
)
},
)
this.icon_color(Color::Muted)
.tooltip(Tooltip::text("Bad Result"))
}
})
.on_click(cx.listener(|this, _, window, cx| {
@@ -1109,6 +1088,7 @@ impl<T: 'static> PromptEditor<T> {
let colors = cx.theme().colors();
div()
.key_context("InlineAssistEditor")
.size_full()
.p_2()
.pl_1()

View File

@@ -20,14 +20,14 @@ use crate::ui::{ModelSelectorFooter, ModelSelectorHeader, ModelSelectorListItem}
type OnModelChanged = Arc<dyn Fn(Arc<dyn LanguageModel>, &mut App) + 'static>;
type GetActiveModel = Arc<dyn Fn(&App) -> Option<ConfiguredModel> + 'static>;
type OnToggleFavorite = Arc<dyn Fn(Arc<dyn LanguageModel>, bool, &mut App) + 'static>;
type OnToggleFavorite = Arc<dyn Fn(Arc<dyn LanguageModel>, bool, &App) + 'static>;
pub type LanguageModelSelector = Picker<LanguageModelPickerDelegate>;
pub fn language_model_selector(
get_active_model: impl Fn(&App) -> Option<ConfiguredModel> + 'static,
on_model_changed: impl Fn(Arc<dyn LanguageModel>, &mut App) + 'static,
on_toggle_favorite: impl Fn(Arc<dyn LanguageModel>, bool, &mut App) + 'static,
on_toggle_favorite: impl Fn(Arc<dyn LanguageModel>, bool, &App) + 'static,
popover_styles: bool,
focus_handle: FocusHandle,
window: &mut Window,
@@ -133,7 +133,7 @@ impl LanguageModelPickerDelegate {
fn new(
get_active_model: impl Fn(&App) -> Option<ConfiguredModel> + 'static,
on_model_changed: impl Fn(Arc<dyn LanguageModel>, &mut App) + 'static,
on_toggle_favorite: impl Fn(Arc<dyn LanguageModel>, bool, &mut App) + 'static,
on_toggle_favorite: impl Fn(Arc<dyn LanguageModel>, bool, &App) + 'static,
popover_styles: bool,
focus_handle: FocusHandle,
window: &mut Window,
@@ -250,10 +250,6 @@ impl LanguageModelPickerDelegate {
(self.get_active_model)(cx)
}
pub fn favorites_count(&self) -> usize {
self.all_models.favorites.len()
}
pub fn cycle_favorite_models(&mut self, window: &mut Window, cx: &mut Context<Picker<Self>>) {
if self.all_models.favorites.is_empty() {
return;
@@ -565,10 +561,7 @@ impl PickerDelegate for LanguageModelPickerDelegate {
let handle_action_click = {
let model = model_info.model.clone();
let on_toggle_favorite = self.on_toggle_favorite.clone();
cx.listener(move |picker, _, window, cx| {
on_toggle_favorite(model.clone(), !is_favorite, cx);
picker.refresh(window, cx);
})
move |cx: &App| on_toggle_favorite(model.clone(), !is_favorite, cx)
};
Some(

View File

@@ -1,8 +1,8 @@
use crate::{
language_model_selector::{LanguageModelSelector, language_model_selector},
ui::{BurnModeTooltip, ModelSelectorTooltip},
ui::BurnModeTooltip,
};
use agent_settings::CompletionMode;
use agent_settings::{AgentSettings, CompletionMode};
use anyhow::Result;
use assistant_slash_command::{SlashCommand, SlashCommandOutputSection, SlashCommandWorkingSet};
use assistant_slash_commands::{DefaultSlashCommand, FileSlashCommand, selections_creases};
@@ -2252,18 +2252,43 @@ impl TextThreadEditor {
.color(color)
.size(IconSize::XSmall);
let show_cycle_row = self
.language_model_selector
.read(cx)
.delegate
.favorites_count()
> 1;
let tooltip = Tooltip::element({
move |_, _cx| {
ModelSelectorTooltip::new(focus_handle.clone())
.show_cycle_row(show_cycle_row)
.into_any_element()
move |_, cx| {
let focus_handle = focus_handle.clone();
let should_show_cycle_row = !AgentSettings::get_global(cx)
.favorite_model_ids()
.is_empty();
v_flex()
.gap_1()
.child(
h_flex()
.gap_2()
.justify_between()
.child(Label::new("Change Model"))
.child(KeyBinding::for_action_in(
&ToggleModelSelector,
&focus_handle,
cx,
)),
)
.when(should_show_cycle_row, |this| {
this.child(
h_flex()
.pt_1()
.gap_2()
.border_t_1()
.border_color(cx.theme().colors().border_variant)
.justify_between()
.child(Label::new("Cycle Favorited Models"))
.child(KeyBinding::for_action_in(
&CycleFavoriteModels,
&focus_handle,
cx,
)),
)
})
.into_any()
}
});

View File

@@ -1,8 +1,5 @@
use gpui::{Action, ClickEvent, FocusHandle, prelude::*};
use gpui::{Action, FocusHandle, prelude::*};
use ui::{ElevationIndex, KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*};
use zed_actions::agent::ToggleModelSelector;
use crate::CycleFavoriteModels;
enum ModelIcon {
Name(IconName),
@@ -51,7 +48,7 @@ pub struct ModelSelectorListItem {
is_selected: bool,
is_focused: bool,
is_favorite: bool,
on_toggle_favorite: Option<Box<dyn Fn(&ClickEvent, &mut Window, &mut App) + 'static>>,
on_toggle_favorite: Option<Box<dyn Fn(&App) + 'static>>,
}
impl ModelSelectorListItem {
@@ -92,10 +89,7 @@ impl ModelSelectorListItem {
self
}
pub fn on_toggle_favorite(
mut self,
handler: impl Fn(&ClickEvent, &mut Window, &mut App) + 'static,
) -> Self {
pub fn on_toggle_favorite(mut self, handler: impl Fn(&App) + 'static) -> Self {
self.on_toggle_favorite = Some(Box::new(handler));
self
}
@@ -147,7 +141,7 @@ impl RenderOnce for ModelSelectorListItem {
.icon_color(color)
.icon_size(IconSize::Small)
.tooltip(Tooltip::text(tooltip))
.on_click(move |event, window, cx| (handle_click)(event, window, cx)),
.on_click(move |_, _, cx| (handle_click)(cx)),
)
}
}))
@@ -193,57 +187,3 @@ impl RenderOnce for ModelSelectorFooter {
)
}
}
#[derive(IntoElement)]
pub struct ModelSelectorTooltip {
focus_handle: FocusHandle,
show_cycle_row: bool,
}
impl ModelSelectorTooltip {
pub fn new(focus_handle: FocusHandle) -> Self {
Self {
focus_handle,
show_cycle_row: true,
}
}
pub fn show_cycle_row(mut self, show: bool) -> Self {
self.show_cycle_row = show;
self
}
}
impl RenderOnce for ModelSelectorTooltip {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
v_flex()
.gap_1()
.child(
h_flex()
.gap_2()
.justify_between()
.child(Label::new("Change Model"))
.child(KeyBinding::for_action_in(
&ToggleModelSelector,
&self.focus_handle,
cx,
)),
)
.when(self.show_cycle_row, |this| {
this.child(
h_flex()
.pt_1()
.gap_2()
.border_t_1()
.border_color(cx.theme().colors().border_variant)
.justify_between()
.child(Label::new("Cycle Favorited Models"))
.child(KeyBinding::for_action_in(
&CycleFavoriteModels,
&self.focus_handle,
cx,
)),
)
})
}
}

View File

@@ -314,12 +314,6 @@ impl BufferDiffSnapshot {
self.inner.hunks.is_empty()
}
pub fn base_text_string(&self) -> Option<String> {
self.inner
.base_text_exists
.then(|| self.inner.base_text.text())
}
pub fn secondary_diff(&self) -> Option<&BufferDiffSnapshot> {
self.secondary_diff.as_deref()
}

View File

@@ -113,7 +113,7 @@ impl CopilotSweAgentBot {
const USER_ID: i32 = 198982749;
/// The alias of the GitHub copilot user. Although https://api.github.com/users/copilot
/// yields a 404, GitHub still refers to the copilot bot user as @Copilot in some cases.
const NAME_ALIAS: &'static str = "Copilot";
const NAME_ALIAS: &'static str = "copilot";
/// Returns the `created_at` timestamp for the Dependabot bot user.
fn created_at() -> &'static NaiveDateTime {

View File

@@ -6745,13 +6745,8 @@ async fn test_preview_tabs(cx: &mut TestAppContext) {
});
// Split pane to the right
pane.update_in(cx, |pane, window, cx| {
pane.split(
workspace::SplitDirection::Right,
workspace::SplitMode::default(),
window,
cx,
);
pane.update(cx, |pane, cx| {
pane.split(workspace::SplitDirection::Right, cx);
});
cx.run_until_parked();
let right_pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());

View File

@@ -1579,10 +1579,8 @@ impl Panel for DebugPanel {
Some(proto::PanelId::DebugPanel)
}
fn icon(&self, _window: &Window, cx: &App) -> Option<IconName> {
DebuggerSettings::get_global(cx)
.button
.then_some(IconName::Debug)
fn icon(&self, _window: &Window, _cx: &App) -> Option<IconName> {
Some(IconName::Debug)
}
fn icon_tooltip(&self, _window: &Window, cx: &App) -> Option<&'static str> {

View File

@@ -19,7 +19,6 @@ ai_onboarding.workspace = true
anyhow.workspace = true
arrayvec.workspace = true
brotli.workspace = true
buffer_diff.workspace = true
client.workspace = true
cloud_llm_client.workspace = true
collections.workspace = true
@@ -53,9 +52,7 @@ settings.workspace = true
strum.workspace = true
telemetry.workspace = true
telemetry_events.workspace = true
text.workspace = true
thiserror.workspace = true
time.workspace = true
ui.workspace = true
util.workspace = true
uuid.workspace = true

View File

@@ -1,375 +0,0 @@
use crate::{
EditPredictionStore, StoredEvent,
cursor_excerpt::editable_and_context_ranges_for_cursor_position, example_spec::ExampleSpec,
};
use anyhow::Result;
use buffer_diff::BufferDiffSnapshot;
use collections::HashMap;
use gpui::{App, Entity, Task};
use language::{Buffer, ToPoint as _};
use project::Project;
use std::{collections::hash_map, fmt::Write as _, path::Path, sync::Arc};
use text::{BufferSnapshot as TextBufferSnapshot, ToOffset as _};
pub fn capture_example(
project: Entity<Project>,
buffer: Entity<Buffer>,
cursor_anchor: language::Anchor,
last_event_is_expected_patch: bool,
cx: &mut App,
) -> Option<Task<Result<ExampleSpec>>> {
let ep_store = EditPredictionStore::try_global(cx)?;
let snapshot = buffer.read(cx).snapshot();
let file = snapshot.file()?;
let worktree_id = file.worktree_id(cx);
let repository = project.read(cx).active_repository(cx)?;
let repository_snapshot = repository.read(cx).snapshot();
let worktree = project.read(cx).worktree_for_id(worktree_id, cx)?;
let cursor_path = worktree.read(cx).root_name().join(file.path());
if worktree.read(cx).abs_path() != repository_snapshot.work_directory_abs_path {
return None;
}
let repository_url = repository_snapshot
.remote_origin_url
.clone()
.or_else(|| repository_snapshot.remote_upstream_url.clone())?;
let revision = repository_snapshot.head_commit.as_ref()?.sha.to_string();
let mut events = ep_store.update(cx, |store, cx| {
store.edit_history_for_project_with_pause_split_last_event(&project, cx)
});
let git_store = project.read(cx).git_store().clone();
Some(cx.spawn(async move |mut cx| {
let snapshots_by_path = collect_snapshots(&project, &git_store, &events, &mut cx).await?;
let cursor_excerpt = cx
.background_executor()
.spawn(async move { compute_cursor_excerpt(&snapshot, cursor_anchor) })
.await;
let uncommitted_diff = cx
.background_executor()
.spawn(async move { compute_uncommitted_diff(snapshots_by_path) })
.await;
let mut edit_history = String::new();
let mut expected_patch = String::new();
if last_event_is_expected_patch {
if let Some(stored_event) = events.pop() {
zeta_prompt::write_event(&mut expected_patch, &stored_event.event);
}
}
for stored_event in &events {
zeta_prompt::write_event(&mut edit_history, &stored_event.event);
if !edit_history.ends_with('\n') {
edit_history.push('\n');
}
}
let name = generate_timestamp_name();
Ok(ExampleSpec {
name,
repository_url,
revision,
uncommitted_diff,
cursor_path: cursor_path.as_std_path().into(),
cursor_position: cursor_excerpt,
edit_history,
expected_patch,
})
}))
}
fn compute_cursor_excerpt(
snapshot: &language::BufferSnapshot,
cursor_anchor: language::Anchor,
) -> String {
let cursor_point = cursor_anchor.to_point(snapshot);
let (_editable_range, context_range) =
editable_and_context_ranges_for_cursor_position(cursor_point, snapshot, 100, 50);
let context_start_offset = context_range.start.to_offset(snapshot);
let cursor_offset = cursor_anchor.to_offset(snapshot);
let cursor_offset_in_excerpt = cursor_offset.saturating_sub(context_start_offset);
let mut excerpt = snapshot.text_for_range(context_range).collect::<String>();
if cursor_offset_in_excerpt <= excerpt.len() {
excerpt.insert_str(cursor_offset_in_excerpt, zeta_prompt::CURSOR_MARKER);
}
excerpt
}
async fn collect_snapshots(
project: &Entity<Project>,
git_store: &Entity<project::git_store::GitStore>,
events: &[StoredEvent],
cx: &mut gpui::AsyncApp,
) -> Result<HashMap<Arc<Path>, (TextBufferSnapshot, BufferDiffSnapshot)>> {
let mut snapshots_by_path = HashMap::default();
for stored_event in events {
let zeta_prompt::Event::BufferChange { path, .. } = stored_event.event.as_ref();
if let Some((project_path, full_path)) = project.read_with(cx, |project, cx| {
let project_path = project.find_project_path(path, cx)?;
let full_path = project
.worktree_for_id(project_path.worktree_id, cx)?
.read(cx)
.root_name()
.join(&project_path.path)
.as_std_path()
.into();
Some((project_path, full_path))
})? {
if let hash_map::Entry::Vacant(entry) = snapshots_by_path.entry(full_path) {
let buffer = project
.update(cx, |project, cx| {
project.open_buffer(project_path.clone(), cx)
})?
.await?;
let diff = git_store
.update(cx, |git_store, cx| {
git_store.open_uncommitted_diff(buffer.clone(), cx)
})?
.await?;
let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx))?;
entry.insert((stored_event.old_snapshot.clone(), diff_snapshot));
}
}
}
Ok(snapshots_by_path)
}
fn compute_uncommitted_diff(
snapshots_by_path: HashMap<Arc<Path>, (TextBufferSnapshot, BufferDiffSnapshot)>,
) -> String {
let mut uncommitted_diff = String::new();
for (full_path, (before_text, diff_snapshot)) in snapshots_by_path {
if let Some(head_text) = &diff_snapshot.base_text_string() {
let file_diff = language::unified_diff(head_text, &before_text.text());
if !file_diff.is_empty() {
let path_str = full_path.to_string_lossy();
writeln!(uncommitted_diff, "--- a/{path_str}").ok();
writeln!(uncommitted_diff, "+++ b/{path_str}").ok();
uncommitted_diff.push_str(&file_diff);
if !uncommitted_diff.ends_with('\n') {
uncommitted_diff.push('\n');
}
}
}
}
uncommitted_diff
}
fn generate_timestamp_name() -> String {
let format = time::format_description::parse("[year]-[month]-[day] [hour]:[minute]:[second]");
match format {
Ok(format) => {
let now = time::OffsetDateTime::now_local()
.unwrap_or_else(|_| time::OffsetDateTime::now_utc());
now.format(&format)
.unwrap_or_else(|_| "unknown-time".to_string())
}
Err(_) => "unknown-time".to_string(),
}
}
#[cfg(test)]
mod tests {
use super::*;
use client::{Client, UserStore};
use clock::FakeSystemClock;
use gpui::{AppContext as _, TestAppContext, http_client::FakeHttpClient};
use indoc::indoc;
use language::{Anchor, Point};
use project::{FakeFs, Project};
use serde_json::json;
use settings::SettingsStore;
use std::path::Path;
#[gpui::test]
async fn test_capture_example(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
let committed_contents = indoc! {"
fn main() {
one();
two();
three();
four();
five();
six();
seven();
eight();
nine();
}
"};
let disk_contents = indoc! {"
fn main() {
// comment 1
one();
two();
three();
four();
five();
six();
seven();
eight();
// comment 2
nine();
}
"};
fs.insert_tree(
"/project",
json!({
".git": {},
"src": {
"main.rs": disk_contents,
}
}),
)
.await;
fs.set_head_for_repo(
Path::new("/project/.git"),
&[("src/main.rs", committed_contents.to_string())],
"abc123def456",
);
fs.set_remote_for_repo(
Path::new("/project/.git"),
"origin",
"https://github.com/test/repo.git",
);
let project = Project::test(fs.clone(), ["/project".as_ref()], cx).await;
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer("/project/src/main.rs", cx)
})
.await
.unwrap();
let ep_store = cx.read(|cx| EditPredictionStore::try_global(cx).unwrap());
ep_store.update(cx, |ep_store, cx| {
ep_store.register_buffer(&buffer, &project, cx)
});
cx.run_until_parked();
buffer.update(cx, |buffer, cx| {
let point = Point::new(6, 0);
buffer.edit([(point..point, " // comment 3\n")], None, cx);
let point = Point::new(4, 0);
buffer.edit([(point..point, " // comment 4\n")], None, cx);
pretty_assertions::assert_eq!(
buffer.text(),
indoc! {"
fn main() {
// comment 1
one();
two();
// comment 4
three();
four();
// comment 3
five();
six();
seven();
eight();
// comment 2
nine();
}
"}
);
});
cx.run_until_parked();
let mut example = cx
.update(|cx| {
capture_example(project.clone(), buffer.clone(), Anchor::MIN, false, cx).unwrap()
})
.await
.unwrap();
example.name = "test".to_string();
pretty_assertions::assert_eq!(
example,
ExampleSpec {
name: "test".to_string(),
repository_url: "https://github.com/test/repo.git".to_string(),
revision: "abc123def456".to_string(),
uncommitted_diff: indoc! {"
--- a/project/src/main.rs
+++ b/project/src/main.rs
@@ -1,4 +1,5 @@
fn main() {
+ // comment 1
one();
two();
three();
@@ -7,5 +8,6 @@
six();
seven();
eight();
+ // comment 2
nine();
}
"}
.to_string(),
cursor_path: Path::new("project/src/main.rs").into(),
cursor_position: indoc! {"
<|user_cursor|>fn main() {
// comment 1
one();
two();
// comment 4
three();
four();
// comment 3
five();
six();
seven();
eight();
// comment 2
nine();
}
"}
.to_string(),
edit_history: indoc! {"
--- a/project/src/main.rs
+++ b/project/src/main.rs
@@ -2,8 +2,10 @@
// comment 1
one();
two();
+ // comment 4
three();
four();
+ // comment 3
five();
six();
seven();
"}
.to_string(),
expected_patch: "".to_string(),
}
);
}
fn init_test(cx: &mut TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
zlog::init_test();
let http_client = FakeHttpClient::with_404_response();
let client = Client::new(Arc::new(FakeSystemClock::new()), http_client, cx);
language_model::init(client.clone(), cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
EditPredictionStore::global(&client, &user_store, cx);
})
}
}

View File

@@ -35,7 +35,6 @@ use semver::Version;
use serde::de::DeserializeOwned;
use settings::{EditPredictionProvider, SettingsStore, update_settings_file};
use std::collections::{VecDeque, hash_map};
use text::Edit;
use workspace::Workspace;
use std::ops::Range;
@@ -58,9 +57,9 @@ pub mod open_ai_response;
mod prediction;
pub mod sweep_ai;
#[cfg(any(test, feature = "test-support", feature = "cli-support"))]
pub mod udiff;
mod capture_example;
mod zed_edit_prediction_delegate;
pub mod zeta1;
pub mod zeta2;
@@ -75,7 +74,6 @@ pub use crate::prediction::EditPrediction;
pub use crate::prediction::EditPredictionId;
use crate::prediction::EditPredictionResult;
pub use crate::sweep_ai::SweepAi;
pub use capture_example::capture_example;
pub use language_model::ApiKeyState;
pub use telemetry_events::EditPredictionRating;
pub use zed_edit_prediction_delegate::ZedEditPredictionDelegate;
@@ -233,15 +231,8 @@ pub struct EditPredictionFinishedDebugEvent {
pub type RequestDebugInfo = predict_edits_v3::DebugInfo;
/// An event with associated metadata for reconstructing buffer state.
#[derive(Clone)]
pub struct StoredEvent {
pub event: Arc<zeta_prompt::Event>,
pub old_snapshot: TextBufferSnapshot,
}
struct ProjectState {
events: VecDeque<StoredEvent>,
events: VecDeque<Arc<zeta_prompt::Event>>,
last_event: Option<LastEvent>,
recent_paths: VecDeque<ProjectPath>,
registered_buffers: HashMap<gpui::EntityId, RegisteredBuffer>,
@@ -257,7 +248,7 @@ struct ProjectState {
}
impl ProjectState {
pub fn events(&self, cx: &App) -> Vec<StoredEvent> {
pub fn events(&self, cx: &App) -> Vec<Arc<zeta_prompt::Event>> {
self.events
.iter()
.cloned()
@@ -269,7 +260,7 @@ impl ProjectState {
.collect()
}
pub fn events_split_by_pause(&self, cx: &App) -> Vec<StoredEvent> {
pub fn events_split_by_pause(&self, cx: &App) -> Vec<Arc<zeta_prompt::Event>> {
self.events
.iter()
.cloned()
@@ -424,7 +415,7 @@ impl LastEvent {
&self,
license_detection_watchers: &HashMap<WorktreeId, Rc<LicenseDetectionWatcher>>,
cx: &App,
) -> Option<StoredEvent> {
) -> Option<Arc<zeta_prompt::Event>> {
let path = buffer_path_with_id_fallback(self.new_file.as_ref(), &self.new_snapshot, cx);
let old_path = buffer_path_with_id_fallback(self.old_file.as_ref(), &self.old_snapshot, cx);
@@ -439,22 +430,19 @@ impl LastEvent {
})
});
let diff = compute_diff_between_snapshots(&self.old_snapshot, &self.new_snapshot)?;
let diff = language::unified_diff(&self.old_snapshot.text(), &self.new_snapshot.text());
if path == old_path && diff.is_empty() {
None
} else {
Some(StoredEvent {
event: Arc::new(zeta_prompt::Event::BufferChange {
old_path,
path,
diff,
in_open_source_repo,
// TODO: Actually detect if this edit was predicted or not
predicted: false,
}),
old_snapshot: self.old_snapshot.clone(),
})
Some(Arc::new(zeta_prompt::Event::BufferChange {
old_path,
path,
diff,
in_open_source_repo,
// TODO: Actually detect if this edit was predicted or not
predicted: false,
}))
}
}
@@ -487,52 +475,6 @@ impl LastEvent {
}
}
pub(crate) fn compute_diff_between_snapshots(
old_snapshot: &TextBufferSnapshot,
new_snapshot: &TextBufferSnapshot,
) -> Option<String> {
let edits: Vec<Edit<usize>> = new_snapshot
.edits_since::<usize>(&old_snapshot.version)
.collect();
let (first_edit, last_edit) = edits.first().zip(edits.last())?;
let old_start_point = old_snapshot.offset_to_point(first_edit.old.start);
let old_end_point = old_snapshot.offset_to_point(last_edit.old.end);
let new_start_point = new_snapshot.offset_to_point(first_edit.new.start);
let new_end_point = new_snapshot.offset_to_point(last_edit.new.end);
const CONTEXT_LINES: u32 = 3;
let old_context_start_row = old_start_point.row.saturating_sub(CONTEXT_LINES);
let new_context_start_row = new_start_point.row.saturating_sub(CONTEXT_LINES);
let old_context_end_row =
(old_end_point.row + 1 + CONTEXT_LINES).min(old_snapshot.max_point().row);
let new_context_end_row =
(new_end_point.row + 1 + CONTEXT_LINES).min(new_snapshot.max_point().row);
let old_start_line_offset = old_snapshot.point_to_offset(Point::new(old_context_start_row, 0));
let new_start_line_offset = new_snapshot.point_to_offset(Point::new(new_context_start_row, 0));
let old_end_line_offset = old_snapshot
.point_to_offset(Point::new(old_context_end_row + 1, 0).min(old_snapshot.max_point()));
let new_end_line_offset = new_snapshot
.point_to_offset(Point::new(new_context_end_row + 1, 0).min(new_snapshot.max_point()));
let old_edit_range = old_start_line_offset..old_end_line_offset;
let new_edit_range = new_start_line_offset..new_end_line_offset;
let old_region_text: String = old_snapshot.text_for_range(old_edit_range).collect();
let new_region_text: String = new_snapshot.text_for_range(new_edit_range).collect();
let diff = language::unified_diff_with_offsets(
&old_region_text,
&new_region_text,
old_context_start_row,
new_context_start_row,
);
Some(diff)
}
fn buffer_path_with_id_fallback(
file: Option<&Arc<dyn File>>,
snapshot: &TextBufferSnapshot,
@@ -701,7 +643,7 @@ impl EditPredictionStore {
&self,
project: &Entity<Project>,
cx: &App,
) -> Vec<StoredEvent> {
) -> Vec<Arc<zeta_prompt::Event>> {
self.projects
.get(&project.entity_id())
.map(|project_state| project_state.events(cx))
@@ -712,7 +654,7 @@ impl EditPredictionStore {
&self,
project: &Entity<Project>,
cx: &App,
) -> Vec<StoredEvent> {
) -> Vec<Arc<zeta_prompt::Event>> {
self.projects
.get(&project.entity_id())
.map(|project_state| project_state.events_split_by_pause(cx))
@@ -1594,10 +1536,8 @@ impl EditPredictionStore {
self.get_or_init_project(&project, cx);
let project_state = self.projects.get(&project.entity_id()).unwrap();
let stored_events = project_state.events(cx);
let has_events = !stored_events.is_empty();
let events: Vec<Arc<zeta_prompt::Event>> =
stored_events.into_iter().map(|e| e.event).collect();
let events = project_state.events(cx);
let has_events = !events.is_empty();
let debug_tx = project_state.debug_tx.clone();
let snapshot = active_buffer.read(cx).snapshot();

View File

@@ -1,5 +1,5 @@
use super::*;
use crate::{compute_diff_between_snapshots, udiff::apply_diff_to_string, zeta1::MAX_EVENT_TOKENS};
use crate::{udiff::apply_diff_to_string, zeta1::MAX_EVENT_TOKENS};
use client::{UserStore, test::FakeServer};
use clock::{FakeSystemClock, ReplicaId};
use cloud_api_types::{CreateLlmTokenResponse, LlmToken};
@@ -360,7 +360,7 @@ async fn test_edit_history_getter_pause_splits_last_event(cx: &mut TestAppContex
ep_store.edit_history_for_project(&project, cx)
});
assert_eq!(events.len(), 1);
let zeta_prompt::Event::BufferChange { diff, .. } = events[0].event.as_ref();
let zeta_prompt::Event::BufferChange { diff, .. } = events[0].as_ref();
assert_eq!(
diff.as_str(),
indoc! {"
@@ -377,7 +377,7 @@ async fn test_edit_history_getter_pause_splits_last_event(cx: &mut TestAppContex
ep_store.edit_history_for_project_with_pause_split_last_event(&project, cx)
});
assert_eq!(events.len(), 2);
let zeta_prompt::Event::BufferChange { diff, .. } = events[0].event.as_ref();
let zeta_prompt::Event::BufferChange { diff, .. } = events[0].as_ref();
assert_eq!(
diff.as_str(),
indoc! {"
@@ -389,7 +389,7 @@ async fn test_edit_history_getter_pause_splits_last_event(cx: &mut TestAppContex
"}
);
let zeta_prompt::Event::BufferChange { diff, .. } = events[1].event.as_ref();
let zeta_prompt::Event::BufferChange { diff, .. } = events[1].as_ref();
assert_eq!(
diff.as_str(),
indoc! {"
@@ -2082,74 +2082,6 @@ async fn test_unauthenticated_with_custom_url_allows_prediction_impl(cx: &mut Te
);
}
#[gpui::test]
fn test_compute_diff_between_snapshots(cx: &mut TestAppContext) {
let buffer = cx.new(|cx| {
Buffer::local(
indoc! {"
zero
one
two
three
four
five
six
seven
eight
nine
ten
eleven
twelve
thirteen
fourteen
fifteen
sixteen
seventeen
eighteen
nineteen
twenty
twenty-one
twenty-two
twenty-three
twenty-four
"},
cx,
)
});
let old_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot());
buffer.update(cx, |buffer, cx| {
let point = Point::new(12, 0);
buffer.edit([(point..point, "SECOND INSERTION\n")], None, cx);
let point = Point::new(8, 0);
buffer.edit([(point..point, "FIRST INSERTION\n")], None, cx);
});
let new_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot());
let diff = compute_diff_between_snapshots(&old_snapshot, &new_snapshot).unwrap();
assert_eq!(
diff,
indoc! {"
@@ -6,10 +6,12 @@
five
six
seven
+FIRST INSERTION
eight
nine
ten
eleven
+SECOND INSERTION
twelve
thirteen
fourteen
"}
);
}
#[ctor::ctor]
fn init_logger() {
zlog::init_test();

View File

@@ -1,7 +1,7 @@
use serde::{Deserialize, Serialize};
use std::{fmt::Write as _, mem, path::Path, sync::Arc};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ExampleSpec {
#[serde(default)]
pub name: String,

View File

@@ -45,11 +45,6 @@ pub async fn run_format_prompt(
let snapshot = state.buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
let project = state.project.clone();
let (_, input) = ep_store.update(&mut cx, |ep_store, cx| {
let events = ep_store
.edit_history_for_project(&project, cx)
.into_iter()
.map(|e| e.event)
.collect();
anyhow::Ok(zeta2_prompt_input(
&snapshot,
example
@@ -58,7 +53,7 @@ pub async fn run_format_prompt(
.context("context must be set")?
.files
.clone(),
events,
ep_store.edit_history_for_project(&project, cx),
example.spec.cursor_path.clone(),
example
.buffer

View File

@@ -15,7 +15,8 @@ doctest = false
[dependencies]
anyhow.workspace = true
buffer_diff.workspace = true
collections.workspace = true
git.workspace = true
log.workspace = true
time.workspace = true
client.workspace = true
cloud_llm_client.workspace = true
@@ -49,18 +50,11 @@ zed_actions.workspace = true
zeta_prompt.workspace = true
[dev-dependencies]
clock.workspace = true
copilot = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
futures.workspace = true
indoc.workspace = true
language_model.workspace = true
lsp = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
project = { workspace = true, features = ["test-support"] }
release_channel.workspace = true
semver.workspace = true
serde_json.workspace = true
theme = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }
zlog.workspace = true

View File

@@ -915,8 +915,11 @@ impl EditPredictionButton {
.when(
cx.has_flag::<PredictEditsRatePredictionsFeatureFlag>(),
|this| {
this.action("Capture Prediction Example", CaptureExample.boxed_clone())
.action("Rate Predictions", RatePredictions.boxed_clone())
this.action(
"Capture Edit Prediction Example",
CaptureExample.boxed_clone(),
)
.action("Rate Predictions", RatePredictions.boxed_clone())
},
);
}

View File

@@ -2,17 +2,25 @@ mod edit_prediction_button;
mod edit_prediction_context_view;
mod rate_prediction_modal;
use std::any::{Any as _, TypeId};
use std::path::Path;
use std::sync::Arc;
use command_palette_hooks::CommandPaletteFilter;
use edit_prediction::{ResetOnboarding, Zeta2FeatureFlag, capture_example};
use edit_prediction::{
EditPredictionStore, ResetOnboarding, Zeta2FeatureFlag, example_spec::ExampleSpec,
};
use edit_prediction_context_view::EditPredictionContextView;
use editor::Editor;
use feature_flags::FeatureFlagAppExt as _;
use gpui::actions;
use language::language_settings::AllLanguageSettings;
use git::repository::DiffType;
use gpui::{Window, actions};
use language::ToPoint as _;
use log;
use project::DisableAiSettings;
use rate_prediction_modal::RatePredictionsModal;
use settings::{Settings as _, SettingsStore};
use std::any::{Any as _, TypeId};
use text::ToOffset as _;
use ui::{App, prelude::*};
use workspace::{SplitDirection, Workspace};
@@ -48,9 +56,7 @@ pub fn init(cx: &mut App) {
}
});
workspace.register_action(|workspace, _: &CaptureExample, window, cx| {
capture_example_as_markdown(workspace, window, cx);
});
workspace.register_action(capture_edit_prediction_example);
workspace.register_action_renderer(|div, _, _, cx| {
let has_flag = cx.has_flag::<Zeta2FeatureFlag>();
div.when(has_flag, |div| {
@@ -132,48 +138,182 @@ fn feature_gate_predict_edits_actions(cx: &mut App) {
.detach();
}
fn capture_example_as_markdown(
fn capture_edit_prediction_example(
workspace: &mut Workspace,
_: &CaptureExample,
window: &mut Window,
cx: &mut Context<Workspace>,
) -> Option<()> {
) {
let Some(ep_store) = EditPredictionStore::try_global(cx) else {
return;
};
let project = workspace.project().clone();
let (worktree_root, repository) = {
let project_ref = project.read(cx);
let worktree_root = project_ref
.visible_worktrees(cx)
.next()
.map(|worktree| worktree.read(cx).abs_path());
let repository = project_ref.active_repository(cx);
(worktree_root, repository)
};
let (Some(worktree_root), Some(repository)) = (worktree_root, repository) else {
log::error!("CaptureExampleSpec: missing worktree or active repository");
return;
};
let repository_snapshot = repository.read(cx).snapshot();
if worktree_root.as_ref() != repository_snapshot.work_directory_abs_path.as_ref() {
log::error!(
"repository is not at worktree root (repo={:?}, worktree={:?})",
repository_snapshot.work_directory_abs_path,
worktree_root
);
return;
}
let Some(repository_url) = repository_snapshot
.remote_origin_url
.clone()
.or_else(|| repository_snapshot.remote_upstream_url.clone())
else {
log::error!("active repository has no origin/upstream remote url");
return;
};
let Some(revision) = repository_snapshot
.head_commit
.as_ref()
.map(|commit| commit.sha.to_string())
else {
log::error!("active repository has no head commit");
return;
};
let mut events = ep_store.update(cx, |store, cx| {
store.edit_history_for_project_with_pause_split_last_event(&project, cx)
});
let Some(editor) = workspace.active_item_as::<Editor>(cx) else {
log::error!("no active editor");
return;
};
let Some(project_path) = editor.read(cx).project_path(cx) else {
log::error!("active editor has no project path");
return;
};
let Some((buffer, cursor_anchor)) = editor
.read(cx)
.buffer()
.read(cx)
.text_anchor_for_position(editor.read(cx).selections.newest_anchor().head(), cx)
else {
log::error!("failed to resolve cursor buffer/anchor");
return;
};
let snapshot = buffer.read(cx).snapshot();
let cursor_point = cursor_anchor.to_point(&snapshot);
let (_editable_range, context_range) =
edit_prediction::cursor_excerpt::editable_and_context_ranges_for_cursor_position(
cursor_point,
&snapshot,
100,
50,
);
let cursor_path: Arc<Path> = repository
.read(cx)
.project_path_to_repo_path(&project_path, cx)
.map(|repo_path| Path::new(repo_path.as_unix_str()).into())
.unwrap_or_else(|| Path::new(project_path.path.as_unix_str()).into());
let cursor_position = {
let context_start_offset = context_range.start.to_offset(&snapshot);
let cursor_offset = cursor_anchor.to_offset(&snapshot);
let cursor_offset_in_excerpt = cursor_offset.saturating_sub(context_start_offset);
let mut excerpt = snapshot.text_for_range(context_range).collect::<String>();
if cursor_offset_in_excerpt <= excerpt.len() {
excerpt.insert_str(cursor_offset_in_excerpt, zeta_prompt::CURSOR_MARKER);
}
excerpt
};
let markdown_language = workspace
.app_state()
.languages
.language_for_name("Markdown");
let fs = workspace.app_state().fs.clone();
let project = workspace.project().clone();
let editor = workspace.active_item_as::<Editor>(cx)?;
let editor = editor.read(cx);
let (buffer, cursor_anchor) = editor
.buffer()
.read(cx)
.text_anchor_for_position(editor.selections.newest_anchor().head(), cx)?;
let example = capture_example(project.clone(), buffer, cursor_anchor, true, cx)?;
let examples_dir = AllLanguageSettings::get_global(cx)
.edit_predictions
.examples_dir
.clone();
cx.spawn_in(window, async move |workspace_entity, cx| {
let markdown_language = markdown_language.await?;
let example_spec = example.await?;
let buffer = if let Some(dir) = examples_dir {
fs.create_dir(&dir).await.ok();
let mut path = dir.join(&example_spec.name.replace(' ', "--").replace(':', "-"));
path.set_extension("md");
project.update(cx, |project, cx| project.open_local_buffer(&path, cx))
} else {
project.update(cx, |project, cx| project.create_buffer(false, cx))
}?
.await?;
let uncommitted_diff_rx = repository.update(cx, |repository, cx| {
repository.diff(DiffType::HeadToWorktree, cx)
})?;
let uncommitted_diff = match uncommitted_diff_rx.await {
Ok(Ok(diff)) => diff,
Ok(Err(error)) => {
log::error!("failed to compute uncommitted diff: {error:#}");
return Ok(());
}
Err(error) => {
log::error!("uncommitted diff channel dropped: {error:#}");
return Ok(());
}
};
let mut edit_history = String::new();
let mut expected_patch = String::new();
if let Some(last_event) = events.pop() {
for event in &events {
zeta_prompt::write_event(&mut edit_history, event);
if !edit_history.ends_with('\n') {
edit_history.push('\n');
}
edit_history.push('\n');
}
zeta_prompt::write_event(&mut expected_patch, &last_event);
}
let format =
time::format_description::parse("[year]-[month]-[day] [hour]:[minute]:[second]");
let name = match format {
Ok(format) => {
let now = time::OffsetDateTime::now_local()
.unwrap_or_else(|_| time::OffsetDateTime::now_utc());
now.format(&format)
.unwrap_or_else(|_| "unknown-time".to_string())
}
Err(_) => "unknown-time".to_string(),
};
let markdown = ExampleSpec {
name,
repository_url,
revision,
uncommitted_diff,
cursor_path,
cursor_position,
edit_history,
expected_patch,
}
.to_markdown();
let buffer = project
.update(cx, |project, cx| project.create_buffer(false, cx))?
.await?;
buffer.update(cx, |buffer, cx| {
buffer.set_text(example_spec.to_markdown(), cx);
buffer.set_text(markdown, cx);
buffer.set_language(Some(markdown_language), cx);
})?;
workspace_entity.update_in(cx, |workspace, window, cx| {
workspace.add_item_to_active_pane(
Box::new(
@@ -187,5 +327,4 @@ fn capture_example_as_markdown(
})
})
.detach_and_log_err(cx);
None
}

View File

@@ -29602,17 +29602,6 @@ async fn test_newline_task_list_continuation(cx: &mut TestAppContext) {
- [ ] ˇ
"});
// Case 2.1: Works with uppercase checked marker too
cx.set_state(indoc! {"
- [X] completed taskˇ
"});
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
cx.wait_for_autoindent_applied().await;
cx.assert_editor_state(indoc! {"
- [X] completed task
- [ ] ˇ
"});
// Case 3: Cursor position doesn't matter - content after marker is what counts
cx.set_state(indoc! {"
- [ ] taˇsk

View File

@@ -164,6 +164,11 @@ pub fn deploy_context_menu(
window.focus(&editor.focus_handle(cx), cx);
}
// Don't show context menu for inline editors
if !editor.mode().is_full() {
return;
}
let display_map = editor.display_snapshot(cx);
let source_anchor = display_map.display_point_to_anchor(point, text::Bias::Right);
let context_menu = if let Some(custom) = editor.custom_context_menu.take() {
@@ -174,11 +179,6 @@ pub fn deploy_context_menu(
};
menu
} else {
// Don't show context menu for inline editors (only applies to default menu)
if !editor.mode().is_full() {
return;
}
// Don't show the context menu if there isn't a project associated with this editor
let Some(project) = editor.project.clone() else {
return;

View File

@@ -1760,19 +1760,16 @@ impl PickerDelegate for FileFinderDelegate {
menu.context(focus_handle)
.action(
"Split Left",
pane::SplitLeft::default().boxed_clone(),
pane::SplitLeft.boxed_clone(),
)
.action(
"Split Right",
pane::SplitRight::default().boxed_clone(),
)
.action(
"Split Up",
pane::SplitUp::default().boxed_clone(),
pane::SplitRight.boxed_clone(),
)
.action("Split Up", pane::SplitUp.boxed_clone())
.action(
"Split Down",
pane::SplitDown::default().boxed_clone(),
pane::SplitDown.boxed_clone(),
)
}
}))

View File

@@ -156,16 +156,8 @@ impl GitRepository for FakeGitRepository {
})
}
fn remote_url(&self, name: &str) -> BoxFuture<'_, Option<String>> {
let name = name.to_string();
let fut = self.with_state_async(false, move |state| {
state
.remotes
.get(&name)
.context("remote not found")
.cloned()
});
async move { fut.await.ok() }.boxed()
fn remote_url(&self, _name: &str) -> BoxFuture<'_, Option<String>> {
async move { None }.boxed()
}
fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {

View File

@@ -1857,18 +1857,6 @@ impl FakeFs {
.unwrap();
}
pub fn set_remote_for_repo(
&self,
dot_git: &Path,
name: impl Into<String>,
url: impl Into<String>,
) {
self.with_git_state(dot_git, true, |state| {
state.remotes.insert(name.into(), url.into());
})
.unwrap();
}
pub fn insert_branches(&self, dot_git: &Path, branches: &[&str]) {
self.with_git_state(dot_git, true, |state| {
if let Some(first) = branches.first()

View File

@@ -8,9 +8,9 @@ use git::{
parse_git_remote_url,
};
use gpui::{
AnyElement, App, AppContext as _, AsyncApp, AsyncWindowContext, ClipboardItem, Context,
Element, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, IntoElement,
ParentElement, PromptLevel, Render, Styled, Task, WeakEntity, Window, actions,
AnyElement, App, AppContext as _, AsyncApp, AsyncWindowContext, Context, Element, Entity,
EventEmitter, FocusHandle, Focusable, InteractiveElement, IntoElement, ParentElement,
PromptLevel, Render, Styled, Task, WeakEntity, Window, actions,
};
use language::{
Anchor, Buffer, Capability, DiskState, File, LanguageRegistry, LineEnding, OffsetRangeExt as _,
@@ -24,7 +24,7 @@ use std::{
sync::Arc,
};
use theme::ActiveTheme;
use ui::{ButtonLike, DiffStat, Tooltip, prelude::*};
use ui::{DiffStat, Tooltip, prelude::*};
use util::{ResultExt, paths::PathStyle, rel_path::RelPath, truncate_and_trailoff};
use workspace::item::TabTooltipContent;
use workspace::{
@@ -383,7 +383,6 @@ impl CommitView {
fn render_header(&self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let commit = &self.commit;
let author_name = commit.author_name.clone();
let commit_sha = commit.sha.clone();
let commit_date = time::OffsetDateTime::from_unix_timestamp(commit.commit_timestamp)
.unwrap_or_else(|_| time::OffsetDateTime::now_utc());
let local_offset = time::UtcOffset::current_local_offset().unwrap_or(time::UtcOffset::UTC);
@@ -430,19 +429,6 @@ impl CommitView {
.full_width()
});
let clipboard_has_link = cx
.read_from_clipboard()
.and_then(|entry| entry.text())
.map_or(false, |clipboard_text| {
clipboard_text.trim() == commit_sha.as_ref()
});
let (copy_icon, copy_icon_color) = if clipboard_has_link {
(IconName::Check, Color::Success)
} else {
(IconName::Copy, Color::Muted)
};
h_flex()
.border_b_1()
.border_color(cx.theme().colors().border_variant)
@@ -468,47 +454,13 @@ impl CommitView {
h_flex()
.gap_1()
.child(Label::new(author_name).color(Color::Default))
.child({
ButtonLike::new("sha")
.child(
h_flex()
.group("sha_btn")
.size_full()
.max_w_32()
.gap_0p5()
.child(
Label::new(commit_sha.clone())
.color(Color::Muted)
.size(LabelSize::Small)
.truncate()
.buffer_font(cx),
)
.child(
div().visible_on_hover("sha_btn").child(
Icon::new(copy_icon)
.color(copy_icon_color)
.size(IconSize::Small),
),
),
)
.tooltip({
let commit_sha = commit_sha.clone();
move |_, cx| {
Tooltip::with_meta(
"Copy Commit SHA",
None,
commit_sha.clone(),
cx,
)
}
})
.on_click(move |_, _, cx| {
cx.stop_propagation();
cx.write_to_clipboard(ClipboardItem::new_string(
commit_sha.to_string(),
));
})
}),
.child(
Label::new(format!("Commit:{}", commit.sha))
.color(Color::Muted)
.size(LabelSize::Small)
.truncate()
.buffer_font(cx),
),
)
.child(
h_flex()

View File

@@ -3638,7 +3638,7 @@ impl GitPanel {
self.entry_count += 1;
let is_staging_or_staged = GitPanel::stage_status_for_entry(status_entry, repo)
.as_bool()
.unwrap_or(true);
.unwrap_or(false);
if repo.had_conflict_on_last_merge_head_change(&status_entry.repo_path) {
self.conflicted_count += 1;

View File

@@ -2154,6 +2154,7 @@ impl Interactivity {
|| cx.active_drag.is_some() && !self.drag_over_styles.is_empty()
{
let hitbox = hitbox.clone();
let was_hovered = hitbox.is_hovered(window);
let hover_state = self.hover_style.as_ref().and_then(|_| {
element_state
.as_ref()
@@ -2161,12 +2162,8 @@ impl Interactivity {
.cloned()
});
let current_view = window.current_view();
window.on_mouse_event(move |_: &MouseMoveEvent, phase, window, cx| {
let hovered = hitbox.is_hovered(window);
let was_hovered = hover_state
.as_ref()
.is_some_and(|state| state.borrow().element);
if phase == DispatchPhase::Capture && hovered != was_hovered {
if let Some(hover_state) = &hover_state {
hover_state.borrow_mut().element = hovered;
@@ -2182,13 +2179,12 @@ impl Interactivity {
.as_ref()
.and_then(|element| element.hover_state.as_ref())
.cloned();
let was_group_hovered = group_hitbox_id.is_hovered(window);
let current_view = window.current_view();
window.on_mouse_event(move |_: &MouseMoveEvent, phase, window, cx| {
let group_hovered = group_hitbox_id.is_hovered(window);
let was_group_hovered = hover_state
.as_ref()
.is_some_and(|state| state.borrow().group);
if phase == DispatchPhase::Capture && group_hovered != was_group_hovered {
if let Some(hover_state) = &hover_state {
hover_state.borrow_mut().group = group_hovered;

View File

@@ -67,7 +67,7 @@ use task::RunnableTag;
pub use task_context::{ContextLocation, ContextProvider, RunnableRange};
pub use text_diff::{
DiffOptions, apply_diff_patch, line_diff, text_diff, text_diff_with_options, unified_diff,
unified_diff_with_offsets, word_diff_ranges,
word_diff_ranges,
};
use theme::SyntaxTheme;
pub use toolchain::{

View File

@@ -392,7 +392,6 @@ pub struct EditPredictionSettings {
/// Whether edit predictions are enabled in the assistant panel.
/// This setting has no effect if globally disabled.
pub enabled_in_text_threads: bool,
pub examples_dir: Option<Arc<Path>>,
}
impl EditPredictionSettings {
@@ -700,7 +699,6 @@ impl settings::Settings for AllLanguageSettings {
copilot: copilot_settings,
codestral: codestral_settings,
enabled_in_text_threads,
examples_dir: edit_predictions.examples_dir,
},
defaults: default_language_settings,
languages,

View File

@@ -1,139 +1,25 @@
use crate::{CharClassifier, CharKind, CharScopeContext, LanguageScope};
use anyhow::{Context, anyhow};
use imara_diff::{
Algorithm, Sink, diff,
intern::{InternedInput, Interner, Token},
Algorithm, UnifiedDiffBuilder, diff,
intern::{InternedInput, Token},
sources::lines_with_terminator,
};
use std::{fmt::Write, iter, ops::Range, sync::Arc};
use std::{iter, ops::Range, sync::Arc};
const MAX_WORD_DIFF_LEN: usize = 512;
const MAX_WORD_DIFF_LINE_COUNT: usize = 8;
/// Computes a diff between two strings, returning a unified diff string.
pub fn unified_diff(old_text: &str, new_text: &str) -> String {
unified_diff_with_offsets(old_text, new_text, 0, 0)
}
/// Computes a diff between two strings, returning a unified diff string with
/// hunk headers adjusted to reflect the given starting line numbers (1-indexed).
pub fn unified_diff_with_offsets(
old_text: &str,
new_text: &str,
old_start_line: u32,
new_start_line: u32,
) -> String {
let input = InternedInput::new(old_text, new_text);
diff(
Algorithm::Histogram,
&input,
OffsetUnifiedDiffBuilder::new(&input, old_start_line, new_start_line),
UnifiedDiffBuilder::new(&input),
)
}
/// A unified diff builder that applies line number offsets to hunk headers.
struct OffsetUnifiedDiffBuilder<'a> {
before: &'a [Token],
after: &'a [Token],
interner: &'a Interner<&'a str>,
pos: u32,
before_hunk_start: u32,
after_hunk_start: u32,
before_hunk_len: u32,
after_hunk_len: u32,
old_line_offset: u32,
new_line_offset: u32,
buffer: String,
dst: String,
}
impl<'a> OffsetUnifiedDiffBuilder<'a> {
fn new(input: &'a InternedInput<&'a str>, old_line_offset: u32, new_line_offset: u32) -> Self {
Self {
before_hunk_start: 0,
after_hunk_start: 0,
before_hunk_len: 0,
after_hunk_len: 0,
old_line_offset,
new_line_offset,
buffer: String::with_capacity(8),
dst: String::new(),
interner: &input.interner,
before: &input.before,
after: &input.after,
pos: 0,
}
}
fn print_tokens(&mut self, tokens: &[Token], prefix: char) {
for &token in tokens {
writeln!(&mut self.buffer, "{prefix}{}", self.interner[token]).unwrap();
}
}
fn flush(&mut self) {
if self.before_hunk_len == 0 && self.after_hunk_len == 0 {
return;
}
let end = (self.pos + 3).min(self.before.len() as u32);
self.update_pos(end, end);
writeln!(
&mut self.dst,
"@@ -{},{} +{},{} @@",
self.before_hunk_start + 1 + self.old_line_offset,
self.before_hunk_len,
self.after_hunk_start + 1 + self.new_line_offset,
self.after_hunk_len,
)
.unwrap();
write!(&mut self.dst, "{}", &self.buffer).unwrap();
self.buffer.clear();
self.before_hunk_len = 0;
self.after_hunk_len = 0;
}
fn update_pos(&mut self, print_to: u32, move_to: u32) {
self.print_tokens(&self.before[self.pos as usize..print_to as usize], ' ');
let len = print_to - self.pos;
self.pos = move_to;
self.before_hunk_len += len;
self.after_hunk_len += len;
}
}
impl Sink for OffsetUnifiedDiffBuilder<'_> {
type Out = String;
fn process_change(&mut self, before: Range<u32>, after: Range<u32>) {
if before.start - self.pos > 6 {
self.flush();
}
if self.before_hunk_len == 0 && self.after_hunk_len == 0 {
self.pos = before.start.saturating_sub(3);
self.before_hunk_start = self.pos;
self.after_hunk_start = after.start.saturating_sub(3);
}
self.update_pos(before.start, before.end);
self.before_hunk_len += before.end - before.start;
self.after_hunk_len += after.end - after.start;
self.print_tokens(
&self.before[before.start as usize..before.end as usize],
'-',
);
self.print_tokens(&self.after[after.start as usize..after.end as usize], '+');
}
fn finish(mut self) -> Self::Out {
self.flush();
self.dst
}
}
/// Computes a diff between two strings, returning a vector of old and new row
/// ranges.
pub fn line_diff(old_text: &str, new_text: &str) -> Vec<(Range<u32>, Range<u32>)> {
@@ -441,30 +327,4 @@ mod tests {
let patch = unified_diff(old_text, new_text);
assert_eq!(apply_diff_patch(old_text, &patch).unwrap(), new_text);
}
#[test]
fn test_unified_diff_with_offsets() {
let old_text = "foo\nbar\nbaz\n";
let new_text = "foo\nBAR\nbaz\n";
let expected_diff_body = " foo\n-bar\n+BAR\n baz\n";
let diff_no_offset = unified_diff(old_text, new_text);
assert_eq!(
diff_no_offset,
format!("@@ -1,3 +1,3 @@\n{}", expected_diff_body)
);
let diff_with_offset = unified_diff_with_offsets(old_text, new_text, 9, 11);
assert_eq!(
diff_with_offset,
format!("@@ -10,3 +12,3 @@\n{}", expected_diff_body)
);
let diff_with_offset = unified_diff_with_offsets(old_text, new_text, 99, 104);
assert_eq!(
diff_with_offset,
format!("@@ -100,3 +105,3 @@\n{}", expected_diff_body)
);
}
}

View File

@@ -22,7 +22,7 @@ rewrap_prefixes = [
]
unordered_list = ["- ", "* ", "+ "]
ordered_list = [{ pattern = "(\\d+)\\. ", format = "{1}. " }]
task_list = { prefixes = ["- [ ] ", "- [x] ", "- [X] "], continuation = "- [ ] " }
task_list = { prefixes = ["- [ ] ", "- [x] "], continuation = "- [ ] " }
auto_indent_on_paste = false
auto_indent_using_last_non_empty_line = false

View File

@@ -22,9 +22,9 @@ use collections::{HashMap, HashSet};
use gpui::{
AnyElement, App, BorderStyle, Bounds, ClipboardItem, CursorStyle, DispatchPhase, Edges, Entity,
FocusHandle, Focusable, FontStyle, FontWeight, GlobalElementId, Hitbox, Hsla, Image,
ImageFormat, KeyContext, Length, MouseButton, MouseDownEvent, MouseEvent, MouseMoveEvent,
MouseUpEvent, Point, ScrollHandle, Stateful, StrikethroughStyle, StyleRefinement, StyledText,
Task, TextLayout, TextRun, TextStyle, TextStyleRefinement, actions, img, point, quad,
ImageFormat, KeyContext, Length, MouseDownEvent, MouseEvent, MouseMoveEvent, MouseUpEvent,
Point, ScrollHandle, Stateful, StrikethroughStyle, StyleRefinement, StyledText, Task,
TextLayout, TextRun, TextStyle, TextStyleRefinement, actions, img, point, quad,
};
use language::{Language, LanguageRegistry, Rope};
use parser::CodeBlockMetadata;
@@ -112,7 +112,6 @@ pub struct Markdown {
options: Options,
copied_code_blocks: HashSet<ElementId>,
code_block_scroll_handles: HashMap<usize, ScrollHandle>,
context_menu_selected_text: Option<String>,
}
struct Options {
@@ -182,7 +181,6 @@ impl Markdown {
},
copied_code_blocks: HashSet::default(),
code_block_scroll_handles: HashMap::default(),
context_menu_selected_text: None,
};
this.parse(cx);
this
@@ -207,7 +205,6 @@ impl Markdown {
},
copied_code_blocks: HashSet::default(),
code_block_scroll_handles: HashMap::default(),
context_menu_selected_text: None,
};
this.parse(cx);
this
@@ -292,14 +289,6 @@ impl Markdown {
}
}
pub fn selected_text(&self) -> Option<String> {
if self.selection.end <= self.selection.start {
None
} else {
Some(self.source[self.selection.start..self.selection.end].to_string())
}
}
fn copy(&self, text: &RenderedText, _: &mut Window, cx: &mut Context<Self>) {
if self.selection.end <= self.selection.start {
return;
@@ -308,11 +297,7 @@ impl Markdown {
cx.write_to_clipboard(ClipboardItem::new_string(text));
}
fn copy_as_markdown(&mut self, _: &mut Window, cx: &mut Context<Self>) {
if let Some(text) = self.context_menu_selected_text.take() {
cx.write_to_clipboard(ClipboardItem::new_string(text));
return;
}
fn copy_as_markdown(&self, _: &mut Window, cx: &mut Context<Self>) {
if self.selection.end <= self.selection.start {
return;
}
@@ -320,10 +305,6 @@ impl Markdown {
cx.write_to_clipboard(ClipboardItem::new_string(text));
}
fn capture_selection_for_context_menu(&mut self) {
self.context_menu_selected_text = self.selected_text();
}
fn parse(&mut self, cx: &mut Context<Self>) {
if self.source.is_empty() {
return;
@@ -684,19 +665,6 @@ impl MarkdownElement {
let on_open_url = self.on_url_click.take();
self.on_mouse_event(window, cx, {
let hitbox = hitbox.clone();
move |markdown, event: &MouseDownEvent, phase, window, _| {
if phase.capture()
&& event.button == MouseButton::Right
&& hitbox.is_hovered(window)
{
// Capture selected text so it survives until menu item is clicked
markdown.capture_selection_for_context_menu();
}
}
});
self.on_mouse_event(window, cx, {
let rendered_text = rendered_text.clone();
let hitbox = hitbox.clone();
@@ -745,7 +713,7 @@ impl MarkdownElement {
window.prevent_default();
cx.notify();
}
} else if phase.capture() && event.button == MouseButton::Left {
} else if phase.capture() {
markdown.selection = Selection::default();
markdown.pressed_link = None;
cx.notify();

View File

@@ -1868,7 +1868,6 @@ pub struct BuiltinAgentServerSettings {
pub ignore_system_version: Option<bool>,
pub default_mode: Option<String>,
pub default_model: Option<String>,
pub favorite_models: Vec<String>,
}
impl BuiltinAgentServerSettings {
@@ -1892,7 +1891,6 @@ impl From<settings::BuiltinAgentServerSettings> for BuiltinAgentServerSettings {
ignore_system_version: value.ignore_system_version,
default_mode: value.default_mode,
default_model: value.default_model,
favorite_models: value.favorite_models,
}
}
}
@@ -1924,10 +1922,6 @@ pub enum CustomAgentServerSettings {
///
/// Default: None
default_model: Option<String>,
/// The favorite models for this agent.
///
/// Default: []
favorite_models: Vec<String>,
},
Extension {
/// The default mode to use for this agent.
@@ -1942,10 +1936,6 @@ pub enum CustomAgentServerSettings {
///
/// Default: None
default_model: Option<String>,
/// The favorite models for this agent.
///
/// Default: []
favorite_models: Vec<String>,
},
}
@@ -1972,17 +1962,6 @@ impl CustomAgentServerSettings {
}
}
}
pub fn favorite_models(&self) -> &[String] {
match self {
CustomAgentServerSettings::Custom {
favorite_models, ..
}
| CustomAgentServerSettings::Extension {
favorite_models, ..
} => favorite_models,
}
}
}
impl From<settings::CustomAgentServerSettings> for CustomAgentServerSettings {
@@ -1994,7 +1973,6 @@ impl From<settings::CustomAgentServerSettings> for CustomAgentServerSettings {
env,
default_mode,
default_model,
favorite_models,
} => CustomAgentServerSettings::Custom {
command: AgentServerCommand {
path: PathBuf::from(shellexpand::tilde(&path.to_string_lossy()).as_ref()),
@@ -2003,16 +1981,13 @@ impl From<settings::CustomAgentServerSettings> for CustomAgentServerSettings {
},
default_mode,
default_model,
favorite_models,
},
settings::CustomAgentServerSettings::Extension {
default_mode,
default_model,
favorite_models,
} => CustomAgentServerSettings::Extension {
default_mode,
default_model,
favorite_models,
},
}
}
@@ -2338,7 +2313,6 @@ mod extension_agent_tests {
ignore_system_version: None,
default_mode: None,
default_model: None,
favorite_models: vec![],
};
let BuiltinAgentServerSettings { path, .. } = settings.into();
@@ -2355,7 +2329,6 @@ mod extension_agent_tests {
env: None,
default_mode: None,
default_model: None,
favorite_models: vec![],
};
let converted: CustomAgentServerSettings = settings.into();

View File

@@ -5756,7 +5756,6 @@ impl Repository {
cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
}
fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
let repository_id = self.snapshot.id;
let rx = self.send_job(None, move |state, _| async move {

View File

@@ -1293,33 +1293,17 @@ impl Project {
cx.subscribe(&worktree_store, Self::on_worktree_store_event)
.detach();
if init_worktree_trust {
let trust_remote_project = match &connection_options {
RemoteConnectionOptions::Ssh(..) | RemoteConnectionOptions::Wsl(..) => false,
RemoteConnectionOptions::Docker(..) => true,
};
let remote_host = RemoteHostLocation::from(connection_options);
trusted_worktrees::track_worktree_trust(
worktree_store.clone(),
Some(remote_host.clone()),
None,
Some((remote_proto.clone(), REMOTE_SERVER_PROJECT_ID)),
cx,
);
if trust_remote_project {
if let Some(trusted_worktres) = TrustedWorktrees::try_get_global(cx) {
trusted_worktres.update(cx, |trusted_worktres, cx| {
trusted_worktres.trust(
worktree_store
.read(cx)
.worktrees()
.map(|worktree| worktree.read(cx).id())
.map(PathTrust::Worktree)
.collect(),
Some(remote_host),
cx,
);
})
match &connection_options {
RemoteConnectionOptions::Wsl(..) | RemoteConnectionOptions::Ssh(..) => {
trusted_worktrees::track_worktree_trust(
worktree_store.clone(),
Some(RemoteHostLocation::from(connection_options)),
None,
Some((remote_proto.clone(), REMOTE_SERVER_PROJECT_ID)),
cx,
);
}
RemoteConnectionOptions::Docker(..) => {}
}
}

View File

@@ -337,13 +337,6 @@ impl TrustedWorktreesStore {
if restricted_host != remote_host {
return true;
}
// When trusting an abs path on the host, we transitively trust all single file worktrees on this host too.
if is_file && !new_trusted_abs_paths.is_empty() {
trusted_paths.insert(PathTrust::Worktree(*restricted_worktree));
return false;
}
let retain = (!is_file || new_trusted_other_worktrees.is_empty())
&& new_trusted_abs_paths.iter().all(|new_trusted_path| {
!restricted_worktree_path.starts_with(new_trusted_path)
@@ -1052,13 +1045,6 @@ mod tests {
"single-file worktree should be restricted initially"
);
let can_trust_directory =
trusted_worktrees.update(cx, |store, cx| store.can_trust(dir_worktree_id, cx));
assert!(
!can_trust_directory,
"directory worktree should be restricted initially"
);
trusted_worktrees.update(cx, |store, cx| {
store.trust(
HashSet::from_iter([PathTrust::Worktree(dir_worktree_id)]),
@@ -1078,78 +1064,6 @@ mod tests {
);
}
#[gpui::test]
async fn test_parent_path_trust_enables_single_file(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/"),
json!({
"project": { "main.rs": "fn main() {}" },
"standalone.rs": "fn standalone() {}"
}),
)
.await;
let project = Project::test(
fs,
[path!("/project").as_ref(), path!("/standalone.rs").as_ref()],
cx,
)
.await;
let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
let (dir_worktree_id, file_worktree_id) = worktree_store.read_with(cx, |store, cx| {
let worktrees: Vec<_> = store.worktrees().collect();
assert_eq!(worktrees.len(), 2);
let (dir_worktree, file_worktree) = if worktrees[0].read(cx).is_single_file() {
(&worktrees[1], &worktrees[0])
} else {
(&worktrees[0], &worktrees[1])
};
assert!(!dir_worktree.read(cx).is_single_file());
assert!(file_worktree.read(cx).is_single_file());
(dir_worktree.read(cx).id(), file_worktree.read(cx).id())
});
let trusted_worktrees = init_trust_global(worktree_store, cx);
let can_trust_file =
trusted_worktrees.update(cx, |store, cx| store.can_trust(file_worktree_id, cx));
assert!(
!can_trust_file,
"single-file worktree should be restricted initially"
);
let can_trust_directory =
trusted_worktrees.update(cx, |store, cx| store.can_trust(dir_worktree_id, cx));
assert!(
!can_trust_directory,
"directory worktree should be restricted initially"
);
trusted_worktrees.update(cx, |store, cx| {
store.trust(
HashSet::from_iter([PathTrust::AbsPath(PathBuf::from(path!("/project")))]),
None,
cx,
);
});
let can_trust_dir =
trusted_worktrees.update(cx, |store, cx| store.can_trust(dir_worktree_id, cx));
let can_trust_file_after =
trusted_worktrees.update(cx, |store, cx| store.can_trust(file_worktree_id, cx));
assert!(
can_trust_dir,
"directory worktree should be trusted after its parent is trusted"
);
assert!(
can_trust_file_after,
"single-file worktree should be trusted after directory worktree trust via its parent directory trust"
);
}
#[gpui::test]
async fn test_abs_path_trust_covers_multiple_worktrees(cx: &mut TestAppContext) {
init_test(cx);

View File

@@ -53,9 +53,7 @@ async fn check_for_docker() -> Result<(), DevContainerError> {
}
}
async fn ensure_devcontainer_cli(
node_runtime: &NodeRuntime,
) -> Result<(PathBuf, bool), DevContainerError> {
async fn ensure_devcontainer_cli(node_runtime: NodeRuntime) -> Result<PathBuf, DevContainerError> {
let mut command = util::command::new_smol_command(&dev_container_cli());
command.arg("--version");
@@ -65,42 +63,23 @@ async fn ensure_devcontainer_cli(
e
);
let Ok(node_runtime_path) = node_runtime.binary_path().await else {
return Err(DevContainerError::NodeRuntimeNotAvailable);
};
let datadir_cli_path = paths::devcontainer_dir()
.join("node_modules")
.join("@devcontainers")
.join("cli")
.join(format!("{}.js", &dev_container_cli()));
log::debug!(
"devcontainer not found in path, using local location: ${}",
datadir_cli_path.display()
);
.join(".bin")
.join(&dev_container_cli());
let mut command =
util::command::new_smol_command(node_runtime_path.as_os_str().display().to_string());
command.arg(datadir_cli_path.display().to_string());
util::command::new_smol_command(&datadir_cli_path.as_os_str().display().to_string());
command.arg("--version");
match command.output().await {
Err(e) => log::error!(
if let Err(e) = command.output().await {
log::error!(
"Unable to find devcontainer CLI in Data dir. Will try to install. Error: {:?}",
e
),
Ok(output) => {
if output.status.success() {
log::info!("Found devcontainer CLI in Data dir");
return Ok((datadir_cli_path.clone(), false));
} else {
log::error!(
"Could not run devcontainer CLI from data_dir. Will try once more to install. Output: {:?}",
output
);
}
}
);
} else {
log::info!("Found devcontainer CLI in Data dir");
return Ok(datadir_cli_path.clone());
}
if let Err(e) = fs::create_dir_all(paths::devcontainer_dir()).await {
@@ -122,9 +101,7 @@ async fn ensure_devcontainer_cli(
return Err(DevContainerError::DevContainerCliNotAvailable);
};
let mut command =
util::command::new_smol_command(node_runtime_path.as_os_str().display().to_string());
command.arg(datadir_cli_path.display().to_string());
let mut command = util::command::new_smol_command(&datadir_cli_path.display().to_string());
command.arg("--version");
if let Err(e) = command.output().await {
log::error!(
@@ -133,42 +110,22 @@ async fn ensure_devcontainer_cli(
);
Err(DevContainerError::DevContainerCliNotAvailable)
} else {
Ok((datadir_cli_path, false))
Ok(datadir_cli_path)
}
} else {
log::info!("Found devcontainer cli on $PATH, using it");
Ok((PathBuf::from(&dev_container_cli()), true))
Ok(PathBuf::from(&dev_container_cli()))
}
}
async fn devcontainer_up(
path_to_cli: &PathBuf,
found_in_path: bool,
node_runtime: &NodeRuntime,
path: Arc<Path>,
) -> Result<DevContainerUp, DevContainerError> {
let Ok(node_runtime_path) = node_runtime.binary_path().await else {
log::error!("Unable to find node runtime path");
return Err(DevContainerError::NodeRuntimeNotAvailable);
};
let mut command = if found_in_path {
let mut command = util::command::new_smol_command(path_to_cli.display().to_string());
command.arg("up");
command.arg("--workspace-folder");
command.arg(path.display().to_string());
command
} else {
let mut command =
util::command::new_smol_command(node_runtime_path.as_os_str().display().to_string());
command.arg(path_to_cli.display().to_string());
command.arg("up");
command.arg("--workspace-folder");
command.arg(path.display().to_string());
command
};
log::debug!("Running full devcontainer up command: {:?}", command);
let mut command = util::command::new_smol_command(path_to_cli.display().to_string());
command.arg("up");
command.arg("--workspace-folder");
command.arg(path.display().to_string());
match command.output().await {
Ok(output) => {
@@ -278,7 +235,7 @@ pub(crate) async fn start_dev_container(
) -> Result<(Connection, String), DevContainerError> {
check_for_docker().await?;
let (path_to_devcontainer_cli, found_in_path) = ensure_devcontainer_cli(&node_runtime).await?;
let path_to_devcontainer_cli = ensure_devcontainer_cli(node_runtime).await?;
let Some(directory) = project_directory(cx) else {
return Err(DevContainerError::DevContainerNotFound);
@@ -288,13 +245,7 @@ pub(crate) async fn start_dev_container(
container_id,
remote_workspace_folder,
..
}) = devcontainer_up(
&path_to_devcontainer_cli,
found_in_path,
&node_runtime,
directory.clone(),
)
.await
}) = devcontainer_up(&path_to_devcontainer_cli, directory.clone()).await
{
let project_name = get_project_name(
&path_to_devcontainer_cli,
@@ -322,7 +273,6 @@ pub(crate) enum DevContainerError {
DevContainerUpFailed,
DevContainerNotFound,
DevContainerParseFailed,
NodeRuntimeNotAvailable,
}
#[cfg(test)]

View File

@@ -158,9 +158,6 @@ fn handle_rpc_messages_over_child_process_stdio(
}
};
let status = remote_proxy_process.status().await?.code().unwrap_or(1);
if status != 0 {
anyhow::bail!("Remote server exited with status {status}");
}
match result {
Ok(_) => Ok(status),
Err(error) => Err(error),

View File

@@ -582,21 +582,19 @@ impl RemoteConnection for DockerExecConnection {
return Task::ready(Err(anyhow!("Remote binary path not set")));
};
let mut docker_args = vec!["exec".to_string()];
let mut docker_args = vec![
"exec".to_string(),
"-w".to_string(),
self.remote_dir_for_server.clone(),
"-i".to_string(),
self.connection_options.container_id.to_string(),
];
for env_var in ["RUST_LOG", "RUST_BACKTRACE", "ZED_GENERATE_MINIDUMPS"] {
if let Some(value) = std::env::var(env_var).ok() {
docker_args.push("-e".to_string());
docker_args.push(format!("{}='{}'", env_var, value));
}
}
docker_args.extend([
"-w".to_string(),
self.remote_dir_for_server.clone(),
"-i".to_string(),
self.connection_options.container_id.to_string(),
]);
let val = remote_binary_relpath
.display(self.path_style())
.into_owned();

View File

@@ -56,7 +56,6 @@ merge_from_overwrites!(
std::sync::Arc<str>,
gpui::SharedString,
std::path::PathBuf,
std::sync::Arc<std::path::Path>,
gpui::Modifiers,
gpui::FontFeatures,
gpui::FontWeight

View File

@@ -363,13 +363,6 @@ pub struct BuiltinAgentServerSettings {
///
/// Default: None
pub default_model: Option<String>,
/// The favorite models for this agent.
///
/// These are the model IDs as reported by the agent.
///
/// Default: []
#[serde(default)]
pub favorite_models: Vec<String>,
}
#[with_fallible_options]
@@ -394,13 +387,6 @@ pub enum CustomAgentServerSettings {
///
/// Default: None
default_model: Option<String>,
/// The favorite models for this agent.
///
/// These are the model IDs as reported by the agent.
///
/// Default: []
#[serde(default)]
favorite_models: Vec<String>,
},
Extension {
/// The default mode to use for this agent.
@@ -415,12 +401,5 @@ pub enum CustomAgentServerSettings {
///
/// Default: None
default_model: Option<String>,
/// The favorite models for this agent.
///
/// These are the model IDs as reported by the agent.
///
/// Default: []
#[serde(default)]
favorite_models: Vec<String>,
},
}

View File

@@ -1,4 +1,4 @@
use std::{num::NonZeroU32, path::Path};
use std::num::NonZeroU32;
use collections::{HashMap, HashSet};
use gpui::{Modifiers, SharedString};
@@ -167,8 +167,6 @@ pub struct EditPredictionSettingsContent {
/// Whether edit predictions are enabled in the assistant prompt editor.
/// This has no effect if globally disabled.
pub enabled_in_text_threads: Option<bool>,
/// The directory where manually captured edit prediction examples are stored.
pub examples_dir: Option<Arc<Path>>,
}
#[with_fallible_options]

View File

@@ -30,8 +30,8 @@ use workspace::{
ActivateNextPane, ActivatePane, ActivatePaneDown, ActivatePaneLeft, ActivatePaneRight,
ActivatePaneUp, ActivatePreviousPane, DraggedSelection, DraggedTab, ItemId, MoveItemToPane,
MoveItemToPaneInDirection, MovePaneDown, MovePaneLeft, MovePaneRight, MovePaneUp, NewTerminal,
Pane, PaneGroup, SplitDirection, SplitDown, SplitLeft, SplitMode, SplitRight, SplitUp,
SwapPaneDown, SwapPaneLeft, SwapPaneRight, SwapPaneUp, ToggleZoom, Workspace,
Pane, PaneGroup, SplitDirection, SplitDown, SplitLeft, SplitRight, SplitUp, SwapPaneDown,
SwapPaneLeft, SwapPaneRight, SwapPaneUp, ToggleZoom, Workspace,
dock::{DockPosition, Panel, PanelEvent, PanelHandle},
item::SerializableItem,
move_active_item, move_item, pane,
@@ -192,10 +192,10 @@ impl TerminalPanel {
split_context.clone(),
|menu, split_context| menu.context(split_context),
)
.action("Split Right", SplitRight::default().boxed_clone())
.action("Split Left", SplitLeft::default().boxed_clone())
.action("Split Up", SplitUp::default().boxed_clone())
.action("Split Down", SplitDown::default().boxed_clone())
.action("Split Right", SplitRight.boxed_clone())
.action("Split Left", SplitLeft.boxed_clone())
.action("Split Up", SplitUp.boxed_clone())
.action("Split Down", SplitDown.boxed_clone())
})
.into()
}
@@ -380,49 +380,47 @@ impl TerminalPanel {
}
self.serialize(cx);
}
&pane::Event::Split { direction, mode } => {
match mode {
SplitMode::ClonePane | SplitMode::EmptyPane => {
let clone = matches!(mode, SplitMode::ClonePane);
let new_pane = self.new_pane_with_active_terminal(clone, window, cx);
let pane = pane.clone();
cx.spawn_in(window, async move |panel, cx| {
let Some(new_pane) = new_pane.await else {
return;
};
panel
.update_in(cx, |panel, window, cx| {
panel
.center
.split(&pane, &new_pane, direction, cx)
.log_err();
window.focus(&new_pane.focus_handle(cx), cx);
})
.ok();
})
.detach();
}
SplitMode::MovePane => {
let Some(item) =
pane.update(cx, |pane, cx| pane.take_active_item(window, cx))
else {
&pane::Event::Split {
direction,
clone_active_item,
} => {
if clone_active_item {
let fut = self.new_pane_with_cloned_active_terminal(window, cx);
let pane = pane.clone();
cx.spawn_in(window, async move |panel, cx| {
let Some(new_pane) = fut.await else {
return;
};
let Ok(project) = self
.workspace
.update(cx, |workspace, _| workspace.project().clone())
else {
return;
};
let new_pane =
new_terminal_pane(self.workspace.clone(), project, false, window, cx);
new_pane.update(cx, |pane, cx| {
pane.add_item(item, true, true, None, window, cx);
});
self.center.split(&pane, &new_pane, direction, cx).log_err();
window.focus(&new_pane.focus_handle(cx), cx);
}
};
panel
.update_in(cx, |panel, window, cx| {
panel
.center
.split(&pane, &new_pane, direction, cx)
.log_err();
window.focus(&new_pane.focus_handle(cx), cx);
})
.ok();
})
.detach();
} else {
let Some(item) = pane.update(cx, |pane, cx| pane.take_active_item(window, cx))
else {
return;
};
let Ok(project) = self
.workspace
.update(cx, |workspace, _| workspace.project().clone())
else {
return;
};
let new_pane =
new_terminal_pane(self.workspace.clone(), project, false, window, cx);
new_pane.update(cx, |pane, cx| {
pane.add_item(item, true, true, None, window, cx);
});
self.center.split(&pane, &new_pane, direction, cx).log_err();
window.focus(&new_pane.focus_handle(cx), cx);
}
}
pane::Event::Focus => {
self.active_pane = pane.clone();
@@ -435,9 +433,8 @@ impl TerminalPanel {
}
}
fn new_pane_with_active_terminal(
fn new_pane_with_cloned_active_terminal(
&mut self,
clone: bool,
window: &mut Window,
cx: &mut Context<Self>,
) -> Task<Option<Entity<Pane>>> {
@@ -449,34 +446,21 @@ impl TerminalPanel {
let weak_workspace = self.workspace.clone();
let project = workspace.project().clone();
let active_pane = &self.active_pane;
let terminal_view = if clone {
active_pane
.read(cx)
.active_item()
.and_then(|item| item.downcast::<TerminalView>())
} else {
None
};
let working_directory = if clone {
terminal_view
.as_ref()
.and_then(|terminal_view| {
terminal_view
.read(cx)
.terminal()
.read(cx)
.working_directory()
})
.or_else(|| default_working_directory(workspace, cx))
} else {
default_working_directory(workspace, cx)
};
let is_zoomed = if clone {
active_pane.read(cx).is_zoomed()
} else {
false
};
let terminal_view = active_pane
.read(cx)
.active_item()
.and_then(|item| item.downcast::<TerminalView>());
let working_directory = terminal_view
.as_ref()
.and_then(|terminal_view| {
terminal_view
.read(cx)
.terminal()
.read(cx)
.working_directory()
})
.or_else(|| default_working_directory(workspace, cx));
let is_zoomed = active_pane.read(cx).is_zoomed();
cx.spawn_in(window, async move |panel, cx| {
let terminal = project
.update(cx, |project, cx| match terminal_view {
@@ -1498,7 +1482,7 @@ impl Render for TerminalPanel {
window.focus(&pane.read(cx).focus_handle(cx), cx);
} else {
let future =
terminal_panel.new_pane_with_active_terminal(true, window, cx);
terminal_panel.new_pane_with_cloned_active_terminal(window, cx);
cx.spawn_in(window, async move |terminal_panel, cx| {
if let Some(new_pane) = future.await {
_ = terminal_panel.update_in(

View File

@@ -109,9 +109,7 @@ pub async fn extract_seekable_zip<R: AsyncRead + AsyncSeek + Unpin>(
.await
.with_context(|| format!("extracting into file {path:?}"))?;
if let Some(perms) = entry.unix_permissions()
&& perms != 0o000
{
if let Some(perms) = entry.unix_permissions() {
use std::os::unix::fs::PermissionsExt;
let permissions = std::fs::Permissions::from_mode(u32::from(perms));
file.set_permissions(permissions)
@@ -134,8 +132,7 @@ mod tests {
use super::*;
#[allow(unused_variables)]
async fn compress_zip(src_dir: &Path, dst: &Path, keep_file_permissions: bool) -> Result<()> {
async fn compress_zip(src_dir: &Path, dst: &Path) -> Result<()> {
let mut out = smol::fs::File::create(dst).await?;
let mut writer = ZipFileWriter::new(&mut out);
@@ -158,8 +155,8 @@ mod tests {
ZipEntryBuilder::new(filename.into(), async_zip::Compression::Deflate);
use std::os::unix::fs::PermissionsExt;
let metadata = std::fs::metadata(path)?;
let perms = keep_file_permissions.then(|| metadata.permissions().mode() as u16);
builder = builder.unix_permissions(perms.unwrap_or_default());
let perms = metadata.permissions().mode() as u16;
builder = builder.unix_permissions(perms);
writer.write_entry_whole(builder, &data).await?;
}
#[cfg(not(unix))]
@@ -209,9 +206,7 @@ mod tests {
let zip_file = test_dir.path().join("test.zip");
smol::block_on(async {
compress_zip(test_dir.path(), &zip_file, true)
.await
.unwrap();
compress_zip(test_dir.path(), &zip_file).await.unwrap();
let reader = read_archive(&zip_file).await;
let dir = tempfile::tempdir().unwrap();
@@ -242,9 +237,7 @@ mod tests {
// Create zip
let zip_file = test_dir.path().join("test.zip");
compress_zip(test_dir.path(), &zip_file, true)
.await
.unwrap();
compress_zip(test_dir.path(), &zip_file).await.unwrap();
// Extract to new location
let extract_dir = tempfile::tempdir().unwrap();
@@ -258,39 +251,4 @@ mod tests {
assert_eq!(extracted_perms.mode() & 0o777, 0o755);
});
}
#[cfg(unix)]
#[test]
fn test_extract_zip_sets_default_permissions() {
use std::os::unix::fs::PermissionsExt;
smol::block_on(async {
let test_dir = tempfile::tempdir().unwrap();
let executable_path = test_dir.path().join("my_script");
// Create an executable file
std::fs::write(&executable_path, "#!/bin/bash\necho 'Hello'").unwrap();
// Create zip
let zip_file = test_dir.path().join("test.zip");
compress_zip(test_dir.path(), &zip_file, false)
.await
.unwrap();
// Extract to new location
let extract_dir = tempfile::tempdir().unwrap();
let reader = read_archive(&zip_file).await;
extract_zip(extract_dir.path(), reader).await.unwrap();
// Check permissions are preserved
let extracted_path = extract_dir.path().join("my_script");
assert!(extracted_path.exists());
let extracted_perms = std::fs::metadata(&extracted_path).unwrap().permissions();
assert_eq!(
extracted_perms.mode() & 0o777,
0o644,
"Expected default set of permissions for unzipped file with no permissions set."
);
});
}
}

View File

@@ -1468,28 +1468,24 @@ fn generate_commands(_: &App) -> Vec<VimCommand> {
action.range.replace(range.clone());
Some(Box::new(action))
}),
VimCommand::new(("sp", "lit"), workspace::SplitHorizontal::default()).filename(
|_, filename| {
Some(
VimSplit {
vertical: false,
filename,
}
.boxed_clone(),
)
},
),
VimCommand::new(("vs", "plit"), workspace::SplitVertical::default()).filename(
|_, filename| {
Some(
VimSplit {
vertical: true,
filename,
}
.boxed_clone(),
)
},
),
VimCommand::new(("sp", "lit"), workspace::SplitHorizontal).filename(|_, filename| {
Some(
VimSplit {
vertical: false,
filename,
}
.boxed_clone(),
)
}),
VimCommand::new(("vs", "plit"), workspace::SplitVertical).filename(|_, filename| {
Some(
VimSplit {
vertical: true,
filename,
}
.boxed_clone(),
)
}),
VimCommand::new(("tabe", "dit"), workspace::NewFile)
.filename(|_action, filename| Some(VimEdit { filename }.boxed_clone())),
VimCommand::new(("tabnew", ""), workspace::NewFile)

View File

@@ -1037,9 +1037,7 @@ impl Render for PanelButtons {
.anchor(menu_anchor)
.attach(menu_attach)
.trigger(move |is_active, _window, _cx| {
// Include active state in element ID to invalidate the cached
// tooltip when panel state changes (e.g., via keyboard shortcut)
IconButton::new((name, is_active_button as u64), icon)
IconButton::new(name, icon)
.icon_size(IconSize::Small)
.toggle_state(is_active_button)
.on_click({

View File

@@ -197,41 +197,6 @@ pub struct DeploySearch {
pub excluded_files: Option<String>,
}
#[derive(Clone, Copy, PartialEq, Debug, Deserialize, JsonSchema, Default)]
#[serde(deny_unknown_fields)]
pub enum SplitMode {
/// Clone the current pane.
#[default]
ClonePane,
/// Create an empty new pane.
EmptyPane,
/// Move the item into a new pane. This will map to nop if only one pane exists.
MovePane,
}
macro_rules! split_structs {
($($name:ident => $doc:literal),* $(,)?) => {
$(
#[doc = $doc]
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default, Action)]
#[action(namespace = pane)]
#[serde(deny_unknown_fields, default)]
pub struct $name {
pub mode: SplitMode,
}
)*
};
}
split_structs!(
SplitLeft => "Splits the pane to the left.",
SplitRight => "Splits the pane to the right.",
SplitUp => "Splits the pane upward.",
SplitDown => "Splits the pane downward.",
SplitHorizontal => "Splits the pane horizontally.",
SplitVertical => "Splits the pane vertically."
);
actions!(
pane,
[
@@ -253,6 +218,14 @@ actions!(
JoinAll,
/// Reopens the most recently closed item.
ReopenClosedItem,
/// Splits the pane to the left, cloning the current item.
SplitLeft,
/// Splits the pane upward, cloning the current item.
SplitUp,
/// Splits the pane to the right, cloning the current item.
SplitRight,
/// Splits the pane downward, cloning the current item.
SplitDown,
/// Splits the pane to the left, moving the current item.
SplitAndMoveLeft,
/// Splits the pane upward, moving the current item.
@@ -261,6 +234,10 @@ actions!(
SplitAndMoveRight,
/// Splits the pane downward, moving the current item.
SplitAndMoveDown,
/// Splits the pane horizontally.
SplitHorizontal,
/// Splits the pane vertically.
SplitVertical,
/// Swaps the current item with the one to the left.
SwapItemLeft,
/// Swaps the current item with the one to the right.
@@ -302,7 +279,7 @@ pub enum Event {
},
Split {
direction: SplitDirection,
mode: SplitMode,
clone_active_item: bool,
},
ItemPinned,
ItemUnpinned,
@@ -334,10 +311,13 @@ impl fmt::Debug for Event {
.debug_struct("RemovedItem")
.field("item", &item.item_id())
.finish(),
Event::Split { direction, mode } => f
Event::Split {
direction,
clone_active_item,
} => f
.debug_struct("Split")
.field("direction", direction)
.field("mode", mode)
.field("clone_active_item", clone_active_item)
.finish(),
Event::JoinAll => f.write_str("JoinAll"),
Event::JoinIntoNext => f.write_str("JoinIntoNext"),
@@ -2315,7 +2295,10 @@ impl Pane {
let save_task = if let Some(project_path) = project_path {
let (worktree, path) = project_path.await?;
let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id())?;
let new_path = ProjectPath { worktree_id, path };
let new_path = ProjectPath {
worktree_id,
path: path,
};
pane.update_in(cx, |pane, window, cx| {
if let Some(item) = pane.item_for_path(new_path.clone(), cx) {
@@ -2374,30 +2357,19 @@ impl Pane {
}
}
pub fn split(
&mut self,
direction: SplitDirection,
mode: SplitMode,
window: &mut Window,
cx: &mut Context<Self>,
) {
if self.items.len() <= 1 && mode == SplitMode::MovePane {
// MovePane with only one pane present behaves like a SplitEmpty in the opposite direction
let active_item = self.active_item();
pub fn split(&mut self, direction: SplitDirection, cx: &mut Context<Self>) {
cx.emit(Event::Split {
direction,
clone_active_item: true,
});
}
pub fn split_and_move(&mut self, direction: SplitDirection, cx: &mut Context<Self>) {
if self.items.len() > 1 {
cx.emit(Event::Split {
direction: direction.opposite(),
mode: SplitMode::EmptyPane,
direction,
clone_active_item: false,
});
// ensure that we focus the moved pane
// in this case we know that the window is the same as the active_item
if let Some(active_item) = active_item {
cx.defer_in(window, move |_, window, cx| {
let focus_handle = active_item.item_focus_handle(cx);
window.focus(&focus_handle, cx);
});
}
} else {
cx.emit(Event::Split { direction, mode });
}
}
@@ -3852,17 +3824,16 @@ fn default_render_tab_bar_buttons(
.with_handle(pane.split_item_context_menu_handle.clone())
.menu(move |window, cx| {
ContextMenu::build(window, cx, |menu, _, _| {
let mode = SplitMode::MovePane;
if can_split_move {
menu.action("Split Right", SplitRight { mode }.boxed_clone())
.action("Split Left", SplitLeft { mode }.boxed_clone())
.action("Split Up", SplitUp { mode }.boxed_clone())
.action("Split Down", SplitDown { mode }.boxed_clone())
menu.action("Split Right", SplitAndMoveRight.boxed_clone())
.action("Split Left", SplitAndMoveLeft.boxed_clone())
.action("Split Up", SplitAndMoveUp.boxed_clone())
.action("Split Down", SplitAndMoveDown.boxed_clone())
} else {
menu.action("Split Right", SplitRight::default().boxed_clone())
.action("Split Left", SplitLeft::default().boxed_clone())
.action("Split Up", SplitUp::default().boxed_clone())
.action("Split Down", SplitDown::default().boxed_clone())
menu.action("Split Right", SplitRight.boxed_clone())
.action("Split Left", SplitLeft.boxed_clone())
.action("Split Up", SplitUp.boxed_clone())
.action("Split Down", SplitDown.boxed_clone())
}
})
.into()
@@ -3921,35 +3892,33 @@ impl Render for Pane {
.size_full()
.flex_none()
.overflow_hidden()
.on_action(cx.listener(|pane, split: &SplitLeft, window, cx| {
pane.split(SplitDirection::Left, split.mode, window, cx)
.on_action(
cx.listener(|pane, _: &SplitLeft, _, cx| pane.split(SplitDirection::Left, cx)),
)
.on_action(cx.listener(|pane, _: &SplitUp, _, cx| pane.split(SplitDirection::Up, cx)))
.on_action(cx.listener(|pane, _: &SplitHorizontal, _, cx| {
pane.split(SplitDirection::horizontal(cx), cx)
}))
.on_action(cx.listener(|pane, split: &SplitUp, window, cx| {
pane.split(SplitDirection::Up, split.mode, window, cx)
.on_action(cx.listener(|pane, _: &SplitVertical, _, cx| {
pane.split(SplitDirection::vertical(cx), cx)
}))
.on_action(cx.listener(|pane, split: &SplitHorizontal, window, cx| {
pane.split(SplitDirection::horizontal(cx), split.mode, window, cx)
.on_action(
cx.listener(|pane, _: &SplitRight, _, cx| pane.split(SplitDirection::Right, cx)),
)
.on_action(
cx.listener(|pane, _: &SplitDown, _, cx| pane.split(SplitDirection::Down, cx)),
)
.on_action(cx.listener(|pane, _: &SplitAndMoveUp, _, cx| {
pane.split_and_move(SplitDirection::Up, cx)
}))
.on_action(cx.listener(|pane, split: &SplitVertical, window, cx| {
pane.split(SplitDirection::vertical(cx), split.mode, window, cx)
.on_action(cx.listener(|pane, _: &SplitAndMoveDown, _, cx| {
pane.split_and_move(SplitDirection::Down, cx)
}))
.on_action(cx.listener(|pane, split: &SplitRight, window, cx| {
pane.split(SplitDirection::Right, split.mode, window, cx)
.on_action(cx.listener(|pane, _: &SplitAndMoveLeft, _, cx| {
pane.split_and_move(SplitDirection::Left, cx)
}))
.on_action(cx.listener(|pane, split: &SplitDown, window, cx| {
pane.split(SplitDirection::Down, split.mode, window, cx)
}))
.on_action(cx.listener(|pane, _: &SplitAndMoveUp, window, cx| {
pane.split(SplitDirection::Up, SplitMode::MovePane, window, cx)
}))
.on_action(cx.listener(|pane, _: &SplitAndMoveDown, window, cx| {
pane.split(SplitDirection::Down, SplitMode::MovePane, window, cx)
}))
.on_action(cx.listener(|pane, _: &SplitAndMoveLeft, window, cx| {
pane.split(SplitDirection::Left, SplitMode::MovePane, window, cx)
}))
.on_action(cx.listener(|pane, _: &SplitAndMoveRight, window, cx| {
pane.split(SplitDirection::Right, SplitMode::MovePane, window, cx)
.on_action(cx.listener(|pane, _: &SplitAndMoveRight, _, cx| {
pane.split_and_move(SplitDirection::Right, cx)
}))
.on_action(cx.listener(|_, _: &JoinIntoNext, _, cx| {
cx.emit(Event::JoinIntoNext);
@@ -4474,14 +4443,11 @@ impl Render for DraggedTab {
#[cfg(test)]
mod tests {
use std::{iter::zip, num::NonZero};
use std::num::NonZero;
use super::*;
use crate::{
Member,
item::test::{TestItem, TestProjectItem},
};
use gpui::{AppContext, Axis, TestAppContext, VisualTestContext, size};
use crate::item::test::{TestItem, TestProjectItem};
use gpui::{TestAppContext, VisualTestContext, size};
use project::FakeFs;
use settings::SettingsStore;
use theme::LoadThemes;
@@ -7159,32 +7125,6 @@ mod tests {
assert_item_labels(&pane, ["A", "C*", "B"], cx);
}
#[gpui::test]
async fn test_split_empty(cx: &mut TestAppContext) {
for split_direction in SplitDirection::all() {
test_single_pane_split(["A"], split_direction, SplitMode::EmptyPane, cx).await;
}
}
#[gpui::test]
async fn test_split_clone(cx: &mut TestAppContext) {
for split_direction in SplitDirection::all() {
test_single_pane_split(["A"], split_direction, SplitMode::ClonePane, cx).await;
}
}
#[gpui::test]
async fn test_split_move_right_on_single_pane(cx: &mut TestAppContext) {
test_single_pane_split(["A"], SplitDirection::Right, SplitMode::MovePane, cx).await;
}
#[gpui::test]
async fn test_split_move(cx: &mut TestAppContext) {
for split_direction in SplitDirection::all() {
test_single_pane_split(["A", "B"], split_direction, SplitMode::MovePane, cx).await;
}
}
fn init_test(cx: &mut TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
@@ -7280,163 +7220,4 @@ mod tests {
"pane items do not match expectation"
);
}
// Assert the item label, with the active item label expected active index
#[track_caller]
fn assert_item_labels_active_index(
pane: &Entity<Pane>,
expected_states: &[&str],
expected_active_idx: usize,
cx: &mut VisualTestContext,
) {
let actual_states = pane.update(cx, |pane, cx| {
pane.items
.iter()
.enumerate()
.map(|(ix, item)| {
let mut state = item
.to_any_view()
.downcast::<TestItem>()
.unwrap()
.read(cx)
.label
.clone();
if ix == pane.active_item_index {
assert_eq!(ix, expected_active_idx);
}
if item.is_dirty(cx) {
state.push('^');
}
if pane.is_tab_pinned(ix) {
state.push('!');
}
state
})
.collect::<Vec<_>>()
});
assert_eq!(
actual_states, expected_states,
"pane items do not match expectation"
);
}
#[track_caller]
fn assert_pane_ids_on_axis<const COUNT: usize>(
workspace: &Entity<Workspace>,
expected_ids: [&EntityId; COUNT],
expected_axis: Axis,
cx: &mut VisualTestContext,
) {
workspace.read_with(cx, |workspace, _| match &workspace.center.root {
Member::Axis(axis) => {
assert_eq!(axis.axis, expected_axis);
assert_eq!(axis.members.len(), expected_ids.len());
assert!(
zip(expected_ids, &axis.members).all(|(e, a)| {
if let Member::Pane(p) = a {
p.entity_id() == *e
} else {
false
}
}),
"pane ids do not match expectation: {expected_ids:?} != {actual_ids:?}",
actual_ids = axis.members
);
}
Member::Pane(_) => panic!("expected axis"),
});
}
async fn test_single_pane_split<const COUNT: usize>(
pane_labels: [&str; COUNT],
direction: SplitDirection,
operation: SplitMode,
cx: &mut TestAppContext,
) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
let project = Project::test(fs, None, cx).await;
let (workspace, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
let mut pane_before =
workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
for label in pane_labels {
add_labeled_item(&pane_before, label, false, cx);
}
pane_before.update_in(cx, |pane, window, cx| {
pane.split(direction, operation, window, cx)
});
cx.executor().run_until_parked();
let pane_after = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
let num_labels = pane_labels.len();
let last_as_active = format!("{}*", String::from(pane_labels[num_labels - 1]));
// check labels for all split operations
match operation {
SplitMode::EmptyPane => {
assert_item_labels_active_index(&pane_before, &pane_labels, num_labels - 1, cx);
assert_item_labels(&pane_after, [], cx);
}
SplitMode::ClonePane => {
assert_item_labels_active_index(&pane_before, &pane_labels, num_labels - 1, cx);
assert_item_labels(&pane_after, [&last_as_active], cx);
}
SplitMode::MovePane => {
let head = &pane_labels[..(num_labels - 1)];
if num_labels == 1 {
// We special-case this behavior and actually execute an empty pane command
// followed by a refocus of the old pane for this case.
pane_before = workspace.read_with(cx, |workspace, _cx| {
workspace
.panes()
.into_iter()
.find(|pane| *pane != &pane_after)
.unwrap()
.clone()
});
};
assert_item_labels_active_index(
&pane_before,
&head,
head.len().saturating_sub(1),
cx,
);
assert_item_labels(&pane_after, [&last_as_active], cx);
pane_after.update_in(cx, |pane, window, cx| {
window.focused(cx).is_some_and(|focus_handle| {
focus_handle == pane.active_item().unwrap().item_focus_handle(cx)
})
});
}
}
// expected axis depends on split direction
let expected_axis = match direction {
SplitDirection::Right | SplitDirection::Left => Axis::Horizontal,
SplitDirection::Up | SplitDirection::Down => Axis::Vertical,
};
// expected ids depends on split direction
let expected_ids = match direction {
SplitDirection::Right | SplitDirection::Down => {
[&pane_before.entity_id(), &pane_after.entity_id()]
}
SplitDirection::Left | SplitDirection::Up => {
[&pane_after.entity_id(), &pane_before.entity_id()]
}
};
// check pane axes for all operations
match operation {
SplitMode::EmptyPane | SplitMode::ClonePane => {
assert_pane_ids_on_axis(&workspace, expected_ids, expected_axis, cx);
}
SplitMode::MovePane => {
assert_pane_ids_on_axis(&workspace, expected_ids, expected_axis, cx);
}
}
}
}

View File

@@ -4262,19 +4262,16 @@ impl Workspace {
item: item.boxed_clone(),
});
}
pane::Event::Split { direction, mode } => {
match mode {
SplitMode::ClonePane => {
self.split_and_clone(pane.clone(), *direction, window, cx)
.detach();
}
SplitMode::EmptyPane => {
self.split_pane(pane.clone(), *direction, window, cx);
}
SplitMode::MovePane => {
self.split_and_move(pane.clone(), *direction, window, cx);
}
};
pane::Event::Split {
direction,
clone_active_item,
} => {
if *clone_active_item {
self.split_and_clone(pane.clone(), *direction, window, cx)
.detach();
} else {
self.split_and_move(pane.clone(), *direction, window, cx);
}
}
pane::Event::JoinIntoNext => {
self.join_pane_into_next(pane.clone(), window, cx);

View File

@@ -3817,7 +3817,7 @@ mod tests {
})
.unwrap();
cx.dispatch_action(window.into(), pane::SplitRight::default());
cx.dispatch_action(window.into(), pane::SplitRight);
let editor_2 = cx.update(|cx| {
let pane_2 = workspace.read(cx).active_pane().clone();
assert_ne!(pane_1, pane_2);

View File

@@ -32,10 +32,10 @@ pub fn app_menus(cx: &mut App) -> Vec<Menu> {
MenuItem::submenu(Menu {
name: "Editor Layout".into(),
items: vec![
MenuItem::action("Split Up", workspace::SplitUp::default()),
MenuItem::action("Split Down", workspace::SplitDown::default()),
MenuItem::action("Split Left", workspace::SplitLeft::default()),
MenuItem::action("Split Right", workspace::SplitRight::default()),
MenuItem::action("Split Up", workspace::SplitUp),
MenuItem::action("Split Down", workspace::SplitDown),
MenuItem::action("Split Left", workspace::SplitLeft),
MenuItem::action("Split Right", workspace::SplitRight),
],
}),
MenuItem::separator(),

View File

@@ -2,6 +2,10 @@
This file governs automated documentation updates triggered by code changes. All automation phases must comply with these rules.
## Repository Context
This is the **Zed code editor** repository, a Rust-based application using the custom **GPUI** UI framework. The project is a large monorepo with ~200 crates organized under `crates/`. Documentation is built with **mdBook** and uses a custom preprocessor (`docs_preprocessor`) that handles special syntax like `{#kb action::Name}` for keybindings. The documentation source is in `docs/src/` with a table of contents in `SUMMARY.md`, and all docs must pass Prettier formatting (80 char line width). The style guide (`docs/.rules`) and agent guidelines (`docs/AGENTS.md`) provide specific conventions for documentation writing.
## Documentation System
This documentation uses **mdBook** (https://rust-lang.github.io/mdBook/).

View File

@@ -85,8 +85,6 @@ You can type `@` to mention files, directories, symbols, previous threads, and r
Copying images and pasting them in the panel's message editor is also supported.
When you paste multi-line code selections copied from an editor buffer, Zed automatically formats them as @mentions with the file context. To paste content without this automatic formatting, use {#kb agent::PasteRaw} to paste raw text directly.
### Selection as Context
Additionally, you can also select text in a buffer and add it as context by using the {#kb agent::AddSelectionToThread} keybinding, running the {#action agent::AddSelectionToThread} action, or choosing the "Selection" item in the `@` menu.
@@ -102,8 +100,6 @@ You can also do this at any time with an ongoing thread via the "Agent Options"
After you've configured your LLM providers—either via [a custom API key](./llm-providers.md) or through [Zed's hosted models](./models.md)—you can switch between them by clicking on the model selector on the message editor or by using the {#kb agent::ToggleModelSelector} keybinding.
If you have favorited models configured, you can cycle through them with {#kb agent::CycleFavoriteModels} without opening the model selector.
> The same model can be offered via multiple providers - for example, Claude Sonnet 4 is available via Zed Pro, OpenRouter, Anthropic directly, and more.
> Make sure you've selected the correct model **_provider_** for the model you'd like to use, delineated by the logo to the left of the model in the model selector.

View File

@@ -305,7 +305,7 @@ To use GitHub Copilot as your provider, set this within `settings.json`:
}
```
To sign in to GitHub Copilot, click on the Copilot icon in the status bar. A popup window appears displaying a device code. Click the copy button to copy the code, then click "Connect to GitHub" to open the GitHub verification page in your browser. Paste the code when prompted. The popup window closes automatically after successful authorization.
You should be able to sign-in to GitHub Copilot by clicking on the Copilot icon in the status bar and following the setup instructions.
#### Using GitHub Copilot Enterprise
@@ -348,17 +348,10 @@ You should be able to sign-in to Supermaven by clicking on the Supermaven icon i
### Codestral {#codestral}
To use Mistral's Codestral as your provider:
To use Mistral's Codestral as your provider, start by going to the Agent Panel settings view by running the {#action agent::OpenSettings} action.
Look for the Mistral item and add a Codestral API key in the corresponding text input.
1. Open the Settings Editor (`Cmd+,` on macOS, `Ctrl+,` on Linux/Windows)
2. Search for "Edit Predictions" and click **Configure Providers**
3. Find the Codestral section and enter your API key from the
[Codestral dashboard](https://console.mistral.ai/codestral)
Alternatively, click the edit prediction icon in the status bar and select
**Configure Providers** from the menu.
After adding your API key, set Codestral as your provider in `settings.json`:
After that, you should be able to switch your provider to it in your `settings.json` file:
```json [settings]
{

View File

@@ -62,7 +62,7 @@ The `download_file` capability grants extensions the ability to download files u
To allow any file to be downloaded:
```toml
{ kind = "download_file", host = "*", path = ["**"] }
{ kind = "download_file", host = "github.com", path = ["**"] }
```
To allow any file to be downloaded from `github.com`:

View File

@@ -1,6 +1,6 @@
[package]
name = "zed_proto"
version = "0.3.1"
version = "0.3.0"
edition.workspace = true
publish.workspace = true
license = "Apache-2.0"

View File

@@ -1,7 +1,7 @@
id = "proto"
name = "Proto"
description = "Protocol Buffers support."
version = "0.3.1"
version = "0.3.0"
schema_version = 1
authors = ["Zed Industries <support@zed.dev>"]
repository = "https://github.com/zed-industries/zed"

628
script/test-docs-automation Executable file
View File

@@ -0,0 +1,628 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
PROMPTS_DIR="$REPO_ROOT/.factory/prompts/docs-automation"
OUTPUT_DIR="${TMPDIR:-/tmp}/docs-automation-test"
# Default values
BASE_BRANCH="main"
# Use fast model for analysis, powerful model for writing
ANALYSIS_MODEL="${ANALYSIS_MODEL:-gemini-3-flash-preview}"
WRITING_MODEL="${WRITING_MODEL:-claude-opus-4-5-20251101}"
DRY_RUN=false
VERBOSE=false
PR_NUMBER=""
SOURCE_BRANCH=""
# Patterns for files that could affect documentation
DOCS_RELEVANT_PATTERNS=(
"crates/.*/src/.*\.rs" # Rust source files
"assets/settings/.*" # Settings schemas
"assets/keymaps/.*" # Keymaps
"extensions/.*" # Extensions
"docs/.*" # Docs themselves
)
usage() {
cat << EOF
Usage: $(basename "$0") [OPTIONS]
Test the documentation automation workflow locally.
OPTIONS:
-p, --pr NUMBER PR number to analyze (uses gh pr diff)
-r, --branch BRANCH Remote branch to compare (e.g., origin/feature-branch)
-b, --base BRANCH Base branch to compare against (default: main)
-d, --dry-run Preview changes without modifying files
-s, --skip-apply Alias for --dry-run
-v, --verbose Show full output from each phase
-o, --output DIR Output directory for phase artifacts (default: $OUTPUT_DIR)
-h, --help Show this help message
EXAMPLES:
# Analyze a PR (most common use case)
$(basename "$0") --pr 12345
# Analyze a PR with dry run (no file changes)
$(basename "$0") --pr 12345 --dry-run
# Analyze a remote branch against main
$(basename "$0") --branch origin/feature-branch
ENVIRONMENT:
FACTORY_API_KEY Required: Your Factory API key
ANALYSIS_MODEL Model for analysis (default: gemini-2.0-flash)
WRITING_MODEL Model for writing (default: claude-opus-4-5-20251101)
GH_TOKEN Required for --pr option (or gh auth login)
EOF
exit 0
}
while [[ $# -gt 0 ]]; do
case $1 in
-p|--pr)
PR_NUMBER="$2"
shift 2
;;
-r|--branch)
SOURCE_BRANCH="$2"
shift 2
;;
-b|--base)
BASE_BRANCH="$2"
shift 2
;;
-d|--dry-run|-s|--skip-apply)
DRY_RUN=true
shift
;;
-v|--verbose)
VERBOSE=true
shift
;;
-o|--output)
OUTPUT_DIR="$2"
shift 2
;;
-h|--help)
usage
;;
*)
echo "Unknown option: $1"
usage
;;
esac
done
# Cleanup function for restoring original branch
cleanup_on_exit() {
if [[ -f "$OUTPUT_DIR/original-branch.txt" ]]; then
ORIGINAL_BRANCH=$(cat "$OUTPUT_DIR/original-branch.txt")
CURRENT=$(git -C "$REPO_ROOT" rev-parse --abbrev-ref HEAD 2>/dev/null || echo "")
if [[ "$CURRENT" != "$ORIGINAL_BRANCH" && -n "$ORIGINAL_BRANCH" ]]; then
echo ""
echo "Restoring original branch: $ORIGINAL_BRANCH"
git -C "$REPO_ROOT" checkout "$ORIGINAL_BRANCH" 2>/dev/null || true
if [[ "$CURRENT" == temp-analysis-* ]]; then
git -C "$REPO_ROOT" branch -D "$CURRENT" 2>/dev/null || true
fi
fi
fi
}
trap cleanup_on_exit EXIT
# Check for required tools
if ! command -v droid &> /dev/null; then
echo "Error: droid CLI not found. Install from https://app.factory.ai/cli"
exit 1
fi
if [[ -z "${FACTORY_API_KEY:-}" ]]; then
echo "Error: FACTORY_API_KEY environment variable is not set"
exit 1
fi
# Check gh CLI if PR mode
if [[ -n "$PR_NUMBER" ]]; then
if ! command -v gh &> /dev/null; then
echo "Error: gh CLI not found. Install from https://cli.github.com/"
echo "Required for --pr option"
exit 1
fi
fi
# Create output directory
mkdir -p "$OUTPUT_DIR"
echo "========================================"
echo "Documentation Automation Test"
echo "========================================"
echo "Output directory: $OUTPUT_DIR"
echo "Analysis model: $ANALYSIS_MODEL"
echo "Writing model: $WRITING_MODEL"
echo "Started at: $(date '+%Y-%m-%d %H:%M:%S')"
echo ""
cd "$REPO_ROOT"
# Get changed files based on mode
echo "=== Getting changed files ==="
if [[ -n "$PR_NUMBER" ]]; then
# PR mode: use gh pr diff like the workflow does
echo "Analyzing PR #$PR_NUMBER"
# Get PR info for context
echo "Fetching PR details..."
gh pr view "$PR_NUMBER" --json title,headRefName,baseRefName,state > "$OUTPUT_DIR/pr-info.json" 2>/dev/null || true
if [[ -f "$OUTPUT_DIR/pr-info.json" ]]; then
PR_TITLE=$(jq -r '.title // "Unknown"' "$OUTPUT_DIR/pr-info.json")
PR_HEAD=$(jq -r '.headRefName // "Unknown"' "$OUTPUT_DIR/pr-info.json")
PR_BASE=$(jq -r '.baseRefName // "Unknown"' "$OUTPUT_DIR/pr-info.json")
PR_STATE=$(jq -r '.state // "Unknown"' "$OUTPUT_DIR/pr-info.json")
echo " Title: $PR_TITLE"
echo " Branch: $PR_HEAD -> $PR_BASE"
echo " State: $PR_STATE"
fi
echo ""
# Get the list of changed files
gh pr diff "$PR_NUMBER" --name-only > "$OUTPUT_DIR/changed_files.txt"
# Also save the full diff for analysis
gh pr diff "$PR_NUMBER" > "$OUTPUT_DIR/pr-diff.patch" 2>/dev/null || true
# Checkout the PR branch to have the code available for analysis
echo "Checking out PR branch for analysis..."
ORIGINAL_BRANCH=$(git rev-parse --abbrev-ref HEAD)
echo "$ORIGINAL_BRANCH" > "$OUTPUT_DIR/original-branch.txt"
gh pr checkout "$PR_NUMBER" --force 2>/dev/null || {
echo "Warning: Could not checkout PR branch. Analysis will use current branch state."
}
elif [[ -n "$SOURCE_BRANCH" ]]; then
# Remote branch mode
echo "Analyzing branch: $SOURCE_BRANCH"
echo "Base branch: $BASE_BRANCH"
# Fetch the branches
git fetch origin 2>/dev/null || true
# Resolve branch refs
SOURCE_REF="$SOURCE_BRANCH"
BASE_REF="origin/$BASE_BRANCH"
# Get merge base
MERGE_BASE=$(git merge-base "$BASE_REF" "$SOURCE_REF" 2>/dev/null) || {
echo "Error: Could not find merge base between $BASE_REF and $SOURCE_REF"
exit 1
}
echo "Merge base: $MERGE_BASE"
# Get changed files
git diff --name-only "$MERGE_BASE" "$SOURCE_REF" > "$OUTPUT_DIR/changed_files.txt"
# Checkout the source branch for analysis
echo "Checking out $SOURCE_BRANCH for analysis..."
ORIGINAL_BRANCH=$(git rev-parse --abbrev-ref HEAD)
echo "$ORIGINAL_BRANCH" > "$OUTPUT_DIR/original-branch.txt"
git checkout "$SOURCE_BRANCH" 2>/dev/null || git checkout -b "temp-analysis-$$" "$SOURCE_REF" || {
echo "Warning: Could not checkout branch. Analysis will use current branch state."
}
else
# Current branch mode (original behavior)
CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
echo "Analyzing current branch: $CURRENT_BRANCH"
echo "Base branch: $BASE_BRANCH"
# Fetch the base branch
git fetch origin "$BASE_BRANCH" 2>/dev/null || true
# Get merge base
MERGE_BASE=$(git merge-base "origin/$BASE_BRANCH" HEAD 2>/dev/null || git merge-base "$BASE_BRANCH" HEAD)
echo "Merge base: $MERGE_BASE"
git diff --name-only "$MERGE_BASE" HEAD > "$OUTPUT_DIR/changed_files.txt"
fi
if [[ ! -s "$OUTPUT_DIR/changed_files.txt" ]]; then
echo "No changed files found."
exit 0
fi
echo ""
echo "Changed files ($(wc -l < "$OUTPUT_DIR/changed_files.txt" | tr -d ' ') files):"
cat "$OUTPUT_DIR/changed_files.txt"
echo ""
# Early exit: Filter for docs-relevant files only
echo "=== Filtering for docs-relevant files ==="
DOCS_RELEVANT_FILES=""
while IFS= read -r file; do
for pattern in "${DOCS_RELEVANT_PATTERNS[@]}"; do
if [[ "$file" =~ $pattern ]]; then
DOCS_RELEVANT_FILES="$DOCS_RELEVANT_FILES $file"
break
fi
done
done < "$OUTPUT_DIR/changed_files.txt"
# Trim leading space
DOCS_RELEVANT_FILES="${DOCS_RELEVANT_FILES# }"
if [[ -z "$DOCS_RELEVANT_FILES" ]]; then
echo "No docs-relevant files changed (only tests, configs, CI, etc.)"
echo "Skipping documentation analysis."
exit 0
fi
echo "Docs-relevant files: $(echo "$DOCS_RELEVANT_FILES" | wc -w | tr -d ' ')"
echo "$DOCS_RELEVANT_FILES" | tr ' ' '\n' | head -20
echo ""
# Combined Phase: Analyze + Plan (using fast model)
echo "=== Analyzing Changes & Planning Documentation Impact ==="
echo "Model: $ANALYSIS_MODEL"
echo "Started at: $(date '+%H:%M:%S')"
echo ""
CHANGED_FILES=$(tr '\n' ' ' < "$OUTPUT_DIR/changed_files.txt")
# Write prompt to temp file to avoid escaping issues
cat > "$OUTPUT_DIR/analysis-prompt.txt" << 'PROMPT_EOF'
Analyze these code changes and determine if documentation updates are needed.
## Documentation Guidelines
### Requires Documentation Update
- New user-facing features or commands
- Changed keybindings or default behaviors
- Modified settings schema or options
- Deprecated or removed functionality
### Does NOT Require Documentation Update
- Internal refactoring without behavioral changes
- Performance optimizations (unless user-visible)
- Bug fixes that restore documented behavior
- Test changes, CI/CD changes
### In-Scope: docs/src/**/*.md
### Out-of-Scope: CHANGELOG.md, README.md, code comments, rustdoc
### Output Format Required
You MUST output a JSON object with this exact structure:
{
"updates_required": true or false,
"summary": "Brief description of changes",
"planned_changes": [
{
"file": "docs/src/path/to/file.md",
"section": "Section name",
"change_type": "update or add or deprecate",
"description": "What to change"
}
],
"skipped_files": ["reason1", "reason2"]
}
Be conservative - only flag documentation updates for user-visible changes.
## Changed Files
PROMPT_EOF
echo "$CHANGED_FILES" >> "$OUTPUT_DIR/analysis-prompt.txt"
ANALYSIS_START=$(date +%s)
droid exec \
-m "$ANALYSIS_MODEL" \
--auto low \
-f "$OUTPUT_DIR/analysis-prompt.txt" \
> "$OUTPUT_DIR/analysis.json" 2>&1 || true
ANALYSIS_END=$(date +%s)
ANALYSIS_DURATION=$((ANALYSIS_END - ANALYSIS_START))
echo "Completed in ${ANALYSIS_DURATION}s"
echo ""
echo "--- Analysis Result ---"
cat "$OUTPUT_DIR/analysis.json"
echo ""
echo "-----------------------"
echo ""
# Check if updates are required (parse JSON output)
UPDATES_REQUIRED=$(grep -o '"updates_required":\s*true' "$OUTPUT_DIR/analysis.json" || echo "")
if [[ -z "$UPDATES_REQUIRED" ]]; then
echo "=== No documentation updates required ==="
echo "Analysis determined no documentation changes are needed."
cat "$OUTPUT_DIR/analysis.json"
exit 0
fi
echo "Documentation updates ARE required."
echo ""
# Extract planned changes for the next phase
ANALYSIS_OUTPUT=$(cat "$OUTPUT_DIR/analysis.json")
if [[ "$DRY_RUN" == "true" ]]; then
# Combined Preview Phase (dry-run): Show what would change
echo "=== Preview: Generating Proposed Changes ==="
echo "Model: $WRITING_MODEL"
echo "Started at: $(date '+%H:%M:%S')"
echo ""
PREVIEW_START=$(date +%s)
# Write preview prompt to temp file
cat > "$OUTPUT_DIR/preview-prompt.txt" << PREVIEW_EOF
Generate a PREVIEW of the documentation changes. Do NOT modify any files.
Based on this analysis:
$ANALYSIS_OUTPUT
For each planned change:
1. Read the current file
2. Show the CURRENT section that would be modified
3. Show the PROPOSED new content
4. Generate a unified diff
Output format:
---
## File: [path]
### Current:
(paste exact current content)
### Proposed:
(paste proposed new content)
### Diff:
(unified diff with - and + lines)
---
Show the ACTUAL content, not summaries.
PREVIEW_EOF
droid exec \
-m "$WRITING_MODEL" \
--auto low \
-f "$OUTPUT_DIR/preview-prompt.txt" \
> "$OUTPUT_DIR/preview.md" 2>&1 || true
PREVIEW_END=$(date +%s)
PREVIEW_DURATION=$((PREVIEW_END - PREVIEW_START))
echo "Completed in ${PREVIEW_DURATION}s"
echo ""
echo "--- Proposed Changes ---"
cat "$OUTPUT_DIR/preview.md"
echo "------------------------"
echo ""
echo "=== Dry run complete ==="
echo "Total time: Analysis ${ANALYSIS_DURATION}s + Preview ${PREVIEW_DURATION}s = $((ANALYSIS_DURATION + PREVIEW_DURATION))s"
echo "To apply changes, run without --dry-run flag."
echo "Output saved to: $OUTPUT_DIR/"
exit 0
fi
# Combined Phase: Apply Changes + Generate Summary (using writing model)
echo "=== Applying Documentation Changes ==="
echo "Model: $WRITING_MODEL"
echo "Started at: $(date '+%H:%M:%S')"
echo ""
APPLY_START=$(date +%s)
# Write apply prompt to temp file
cat > "$OUTPUT_DIR/apply-prompt.txt" << APPLY_EOF
Apply the documentation changes specified in this analysis:
$ANALYSIS_OUTPUT
Instructions:
1. For each planned change, edit the specified file
2. Follow the mdBook format and style from docs/AGENTS.md
3. Use {#kb action::Name} syntax for keybindings
4. After making changes, output a brief summary
Output format:
## Changes Applied
- [file]: [what was changed]
## Summary for PR
[2-3 sentence summary suitable for a PR description]
APPLY_EOF
droid exec \
-m "$WRITING_MODEL" \
--auto medium \
-f "$OUTPUT_DIR/apply-prompt.txt" \
> "$OUTPUT_DIR/apply-report.md" 2>&1 || true
APPLY_END=$(date +%s)
APPLY_DURATION=$((APPLY_END - APPLY_START))
echo "Completed in ${APPLY_DURATION}s"
echo ""
echo "--- Apply Report ---"
cat "$OUTPUT_DIR/apply-report.md"
echo "--------------------"
echo ""
# Format with Prettier (only changed files)
echo "=== Formatting with Prettier ==="
cd "$REPO_ROOT"
CHANGED_DOCS=$(git diff --name-only docs/src/ 2>/dev/null | sed 's|^docs/||' | tr '\n' ' ')
if [[ -n "$CHANGED_DOCS" ]]; then
echo "Formatting: $CHANGED_DOCS"
if command -v pnpm &> /dev/null; then
(cd docs && pnpm dlx prettier@3.5.0 $CHANGED_DOCS --write) 2>/dev/null || true
elif command -v prettier &> /dev/null; then
(cd docs && prettier --write $CHANGED_DOCS) 2>/dev/null || true
fi
echo "Done"
else
echo "No changed docs files to format"
fi
echo ""
# Generate summary from the apply report
cp "$OUTPUT_DIR/apply-report.md" "$OUTPUT_DIR/phase6-summary.md"
# Phase 7: Create Branch and PR
echo "=== Phase 7: Create Branch and PR ==="
# Check if there are actual changes
if git -C "$REPO_ROOT" diff --quiet docs/src/; then
echo "No documentation changes detected after Phase 5"
echo ""
echo "=== Test Complete (no changes to commit) ==="
exit 0
fi
# Check if gh CLI is available
if ! command -v gh &> /dev/null; then
echo "Warning: gh CLI not found. Skipping PR creation."
echo "Install from https://cli.github.com/ to enable automatic PR creation."
echo ""
echo "Documentation changes (git status):"
git -C "$REPO_ROOT" status --short docs/src/
echo ""
echo "To review the diff:"
echo " git diff docs/src/"
echo ""
echo "To discard changes:"
echo " git checkout docs/src/"
exit 0
fi
cd "$REPO_ROOT"
# Daily batch branch - one branch per day, multiple commits accumulate
BRANCH_NAME="docs/auto-update-$(date +%Y-%m-%d)"
# Stash local changes from phase 5
echo "Stashing documentation changes..."
git stash push -m "docs-automation-changes" -- docs/src/
# Check if branch already exists on remote
if git ls-remote --exit-code --heads origin "$BRANCH_NAME" > /dev/null 2>&1; then
echo "Branch $BRANCH_NAME exists, checking out and updating..."
git fetch origin "$BRANCH_NAME"
git checkout -B "$BRANCH_NAME" "origin/$BRANCH_NAME"
else
echo "Creating new branch $BRANCH_NAME from main..."
git fetch origin main
git checkout -B "$BRANCH_NAME" origin/main
fi
# Apply stashed changes
echo "Applying documentation changes..."
git stash pop || true
# Stage and commit
git add docs/src/
# Get source PR info for attribution
SOURCE_PR_INFO=""
TRIGGER_INFO=""
if [[ -n "$PR_NUMBER" ]]; then
# Fetch PR details: title, author, url
PR_DETAILS=$(gh pr view "$PR_NUMBER" --json title,author,url 2>/dev/null || echo "{}")
SOURCE_TITLE=$(echo "$PR_DETAILS" | jq -r '.title // "Unknown"')
SOURCE_AUTHOR=$(echo "$PR_DETAILS" | jq -r '.author.login // "Unknown"')
SOURCE_URL=$(echo "$PR_DETAILS" | jq -r '.url // ""')
TRIGGER_INFO="Triggered by: PR #$PR_NUMBER"
SOURCE_PR_INFO="
---
**Source**: [#$PR_NUMBER]($SOURCE_URL) - $SOURCE_TITLE
**Author**: @$SOURCE_AUTHOR
"
elif [[ -n "$SOURCE_BRANCH" ]]; then
TRIGGER_INFO="Triggered by: branch $SOURCE_BRANCH"
SOURCE_PR_INFO="
---
**Source**: Branch \`$SOURCE_BRANCH\`
"
fi
# Build commit message
SUMMARY=$(head -50 < "$OUTPUT_DIR/phase6-summary.md" 2>/dev/null || echo "Automated documentation update")
git commit -m "docs: auto-update documentation
${SUMMARY}
${TRIGGER_INFO}
Co-authored-by: factory-droid[bot] <138933559+factory-droid[bot]@users.noreply.github.com>" || {
echo "Nothing to commit"
exit 0
}
# Push
echo "Pushing to origin/$BRANCH_NAME..."
git push -u origin "$BRANCH_NAME"
# Build the PR body section for this update
PR_BODY_SECTION="## Update from $(date '+%Y-%m-%d %H:%M')
$SOURCE_PR_INFO
$(cat "$OUTPUT_DIR/phase6-summary.md")
"
# Check if PR already exists for this branch
EXISTING_PR=$(gh pr list --head "$BRANCH_NAME" --json number,url,body --jq '.[0]' 2>/dev/null || echo "")
if [[ -n "$EXISTING_PR" && "$EXISTING_PR" != "null" ]]; then
PR_NUM=$(echo "$EXISTING_PR" | jq -r '.number')
PR_URL=$(echo "$EXISTING_PR" | jq -r '.url')
EXISTING_BODY=$(echo "$EXISTING_PR" | jq -r '.body // ""')
# Append new summary to existing PR body
echo "Updating PR body with new summary..."
NEW_BODY="${EXISTING_BODY}
---
${PR_BODY_SECTION}"
echo "$NEW_BODY" > "$OUTPUT_DIR/updated-pr-body.md"
gh pr edit "$PR_NUM" --body-file "$OUTPUT_DIR/updated-pr-body.md"
echo ""
echo "=== Updated existing PR ==="
echo "PR #$PR_NUM: $PR_URL"
echo "New commit added and PR description updated."
else
# Create new PR with full body
echo "Creating new PR..."
echo "$PR_BODY_SECTION" > "$OUTPUT_DIR/new-pr-body.md"
PR_URL=$(gh pr create \
--title "docs: automated documentation update ($(date +%Y-%m-%d))" \
--body-file "$OUTPUT_DIR/new-pr-body.md" \
--base main 2>&1) || {
echo "Failed to create PR: $PR_URL"
exit 1
}
echo ""
echo "=== PR Created ==="
echo "$PR_URL"
fi
echo ""
echo "=== Test Complete ==="
echo "Total time: Analysis ${ANALYSIS_DURATION}s + Apply ${APPLY_DURATION}s = $((ANALYSIS_DURATION + APPLY_DURATION))s"
echo "All outputs saved to: $OUTPUT_DIR/"

View File

@@ -48,7 +48,7 @@ fn run_clippy() -> Step<Run> {
fn check_rust() -> NamedJob {
let job = Job::default()
.with_repository_owner_guard()
.runs_on(runners::LINUX_MEDIUM)
.runs_on(runners::LINUX_DEFAULT)
.timeout_minutes(3u32)
.add_step(steps::checkout_repo())
.add_step(steps::cache_rust_dependencies_namespace())
@@ -66,7 +66,7 @@ pub(crate) fn check_extension() -> NamedJob {
let (cache_download, cache_hit) = cache_zed_extension_cli();
let job = Job::default()
.with_repository_owner_guard()
.runs_on(runners::LINUX_LARGE_RAM)
.runs_on(runners::LINUX_SMALL)
.timeout_minutes(2u32)
.add_step(steps::checkout_repo())
.add_step(cache_download)

View File

@@ -8,9 +8,6 @@ pub const LINUX_MEDIUM: Runner = Runner("namespace-profile-4x8-ubuntu-2204");
pub const LINUX_X86_BUNDLER: Runner = Runner("namespace-profile-32x64-ubuntu-2004");
pub const LINUX_ARM_BUNDLER: Runner = Runner("namespace-profile-8x32-ubuntu-2004-arm-m4");
// Larger Ubuntu runner with glibc 2.39 for extension bundling
pub const LINUX_LARGE_RAM: Runner = Runner("namespace-profile-8x32-ubuntu-2404");
pub const MAC_DEFAULT: Runner = Runner("self-mini-macos");
pub const WINDOWS_DEFAULT: Runner = Runner("self-32vcpu-windows-2022");