Compare commits

...

2 Commits

Author SHA1 Message Date
Bennet Bo Fenner
f202af131f Reword docs 2025-12-16 09:07:37 +01:00
Patrick Elsen
a2309c1888 language_models: adds 'auto_discover' setting for Ollama
This adds a setting flag named 'auto_discover' for the Ollama LLM
executor, which disables Zed from automatically picking up all pulled
models that Ollama has, only showing manually listed models.

By default, auto discovery is enabled, preseving the current behaviour.

Release Notes:

- Adds an 'auto_discover' field for the language_models.ollama setting.
2025-11-07 17:17:04 +01:00
4 changed files with 33 additions and 2 deletions

View File

@@ -43,6 +43,7 @@ static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
#[derive(Default, Debug, Clone, PartialEq)]
pub struct OllamaSettings {
pub api_url: String,
pub auto_discover: bool,
pub available_models: Vec<AvailableModel>,
}
@@ -241,10 +242,13 @@ impl LanguageModelProvider for OllamaLanguageModelProvider {
fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
let mut models: HashMap<String, ollama::Model> = HashMap::new();
let settings = OllamaLanguageModelProvider::settings(cx);
// Add models from the Ollama API
for model in self.state.read(cx).fetched_models.iter() {
models.insert(model.name.clone(), model.clone());
if settings.auto_discover {
for model in self.state.read(cx).fetched_models.iter() {
models.insert(model.name.clone(), model.clone());
}
}
// Override with available models from settings

View File

@@ -82,6 +82,7 @@ impl settings::Settings for AllLanguageModelSettings {
},
ollama: OllamaSettings {
api_url: ollama.api_url.unwrap(),
auto_discover: ollama.auto_discover.unwrap_or(true),
available_models: ollama.available_models.unwrap_or_default(),
},
open_router: OpenRouterSettings {

View File

@@ -92,6 +92,7 @@ pub enum BedrockAuthMethodContent {
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
pub struct OllamaSettingsContent {
pub api_url: Option<String>,
pub auto_discover: Option<bool>,
pub available_models: Option<Vec<OllamaAvailableModel>>,
}

View File

@@ -327,6 +327,31 @@ Download and install Ollama from [ollama.com/download](https://ollama.com/downlo
3. In the Agent Panel, select one of the Ollama models using the model dropdown.
#### Autodiscovery
Zed automatically discovers models that Ollama has pulled. To disable this behavior, set the `auto_discover` field to `false` in the Ollama settings. When auto-discovery is disabled, you must manually specify which models are available.
```json [settings]
{
"language_models": {
"ollama": {
"api_url": "http://localhost:11434",
"auto_discover": false,
"available_models": [
{
"name": "qwen2.5-coder",
"display_name": "qwen 2.5 coder",
"max_tokens": 32768,
"supports_tools": true,
"supports_thinking": true,
"supports_images": true
}
]
}
}
}
```
#### Ollama Context Length {#ollama-context}
Zed has pre-configured maximum context lengths (`max_tokens`) to match the capabilities of common models.