feat(chat): add dynamic provider catalog and zen support

This commit is contained in:
2026-03-08 21:29:48 +00:00
parent 3526c963a4
commit d35feed98c
7 changed files with 5880 additions and 68 deletions

View File

@@ -7,15 +7,61 @@ from integrations.models import UserAPIKey
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
PROVIDER_MODELS = { CHAT_PROVIDER_CONFIG = {
"openai": "gpt-4o", "openai": {
"anthropic": "anthropic/claude-sonnet-4-20250514", "label": "OpenAI",
"gemini": "gemini/gemini-2.0-flash", "needs_api_key": True,
"ollama": "ollama/llama3.1", "default_model": "gpt-4o",
"groq": "groq/llama-3.3-70b-versatile", "api_base": None,
"mistral": "mistral/mistral-large-latest", },
"github_models": "github/gpt-4o", "anthropic": {
"openrouter": "openrouter/auto", "label": "Anthropic",
"needs_api_key": True,
"default_model": "anthropic/claude-sonnet-4-20250514",
"api_base": None,
},
"gemini": {
"label": "Google Gemini",
"needs_api_key": True,
"default_model": "gemini/gemini-2.0-flash",
"api_base": None,
},
"ollama": {
"label": "Ollama",
"needs_api_key": True,
"default_model": "ollama/llama3.1",
"api_base": None,
},
"groq": {
"label": "Groq",
"needs_api_key": True,
"default_model": "groq/llama-3.3-70b-versatile",
"api_base": None,
},
"mistral": {
"label": "Mistral",
"needs_api_key": True,
"default_model": "mistral/mistral-large-latest",
"api_base": None,
},
"github_models": {
"label": "GitHub Models",
"needs_api_key": True,
"default_model": "github/gpt-4o",
"api_base": None,
},
"openrouter": {
"label": "OpenRouter",
"needs_api_key": True,
"default_model": "openrouter/auto",
"api_base": None,
},
"opencode_zen": {
"label": "OpenCode Zen",
"needs_api_key": True,
"default_model": "openai/gpt-4o-mini",
"api_base": "https://opencode.ai/zen/v1",
},
} }
@@ -27,9 +73,82 @@ def _safe_get(obj, key, default=None):
return getattr(obj, key, default) return getattr(obj, key, default)
def _normalize_provider_id(provider_id):
value = str(provider_id or "").strip()
if value.startswith("LlmProviders."):
value = value.split(".", 1)[1]
return value.lower()
def _default_provider_label(provider_id):
return provider_id.replace("_", " ").title()
def is_chat_provider_available(provider_id):
normalized_provider = _normalize_provider_id(provider_id)
return normalized_provider in CHAT_PROVIDER_CONFIG
def get_provider_catalog():
seen = set()
catalog = []
for provider_id in getattr(litellm, "provider_list", []):
normalized_provider = _normalize_provider_id(provider_id)
if not normalized_provider or normalized_provider in seen:
continue
seen.add(normalized_provider)
provider_config = CHAT_PROVIDER_CONFIG.get(normalized_provider)
if provider_config:
catalog.append(
{
"id": normalized_provider,
"label": provider_config["label"],
"available_for_chat": True,
"needs_api_key": provider_config["needs_api_key"],
"default_model": provider_config["default_model"],
"api_base": provider_config["api_base"],
}
)
continue
catalog.append(
{
"id": normalized_provider,
"label": _default_provider_label(normalized_provider),
"available_for_chat": False,
"needs_api_key": None,
"default_model": None,
"api_base": None,
}
)
# Include app-supported OpenAI-compatible aliases that are not part of
# LiteLLM's native provider list (for example OpenCode Zen).
for provider_id, provider_config in CHAT_PROVIDER_CONFIG.items():
normalized_provider = _normalize_provider_id(provider_id)
if not normalized_provider or normalized_provider in seen:
continue
seen.add(normalized_provider)
catalog.append(
{
"id": normalized_provider,
"label": provider_config["label"],
"available_for_chat": True,
"needs_api_key": provider_config["needs_api_key"],
"default_model": provider_config["default_model"],
"api_base": provider_config["api_base"],
}
)
return catalog
def get_llm_api_key(user, provider): def get_llm_api_key(user, provider):
"""Get the user's API key for the given provider.""" """Get the user's API key for the given provider."""
normalized_provider = (provider or "").strip().lower() normalized_provider = _normalize_provider_id(provider)
try: try:
key_obj = UserAPIKey.objects.get(user=user, provider=normalized_provider) key_obj = UserAPIKey.objects.get(user=user, provider=normalized_provider)
return key_obj.get_api_key() return key_obj.get_api_key()
@@ -85,26 +204,36 @@ async def stream_chat_completion(user, messages, provider, tools=None):
Yields SSE-formatted strings. Yields SSE-formatted strings.
""" """
normalized_provider = (provider or "").strip().lower() normalized_provider = _normalize_provider_id(provider)
provider_config = CHAT_PROVIDER_CONFIG.get(normalized_provider)
if not provider_config:
payload = {
"error": f"Provider is not available for chat: {normalized_provider}."
}
yield f"data: {json.dumps(payload)}\n\n"
return
api_key = get_llm_api_key(user, normalized_provider) api_key = get_llm_api_key(user, normalized_provider)
if not api_key: if provider_config["needs_api_key"] and not api_key:
payload = { payload = {
"error": f"No API key found for provider: {normalized_provider}. Please add one in Settings." "error": f"No API key found for provider: {normalized_provider}. Please add one in Settings."
} }
yield f"data: {json.dumps(payload)}\n\n" yield f"data: {json.dumps(payload)}\n\n"
return return
model = PROVIDER_MODELS.get(normalized_provider, "gpt-4o") completion_kwargs = {
"model": provider_config["default_model"],
"messages": messages,
"tools": tools,
"tool_choice": "auto" if tools else None,
"stream": True,
"api_key": api_key,
}
if provider_config["api_base"]:
completion_kwargs["api_base"] = provider_config["api_base"]
try: try:
response = await litellm.acompletion( response = await litellm.acompletion(**completion_kwargs)
model=model,
messages=messages,
tools=tools,
tool_choice="auto" if tools else None,
stream=True,
api_key=api_key,
)
async for chunk in response: async for chunk in response:
choices = _safe_get(chunk, "choices", []) or [] choices = _safe_get(chunk, "choices", []) or []

View File

@@ -1,10 +1,13 @@
from django.urls import include, path from django.urls import include, path
from rest_framework.routers import DefaultRouter from rest_framework.routers import DefaultRouter
from .views import ChatViewSet from .views import ChatProviderCatalogViewSet, ChatViewSet
router = DefaultRouter() router = DefaultRouter()
router.register(r"conversations", ChatViewSet, basename="chat-conversation") router.register(r"conversations", ChatViewSet, basename="chat-conversation")
router.register(
r"providers", ChatProviderCatalogViewSet, basename="chat-provider-catalog"
)
urlpatterns = [ urlpatterns = [
path("", include(router.urls)), path("", include(router.urls)),

View File

@@ -9,7 +9,12 @@ from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response from rest_framework.response import Response
from .agent_tools import AGENT_TOOLS, execute_tool, serialize_tool_result from .agent_tools import AGENT_TOOLS, execute_tool, serialize_tool_result
from .llm_client import get_system_prompt, stream_chat_completion from .llm_client import (
get_provider_catalog,
get_system_prompt,
is_chat_provider_available,
stream_chat_completion,
)
from .models import ChatConversation, ChatMessage from .models import ChatConversation, ChatMessage
from .serializers import ChatConversationSerializer from .serializers import ChatConversationSerializer
@@ -106,6 +111,11 @@ class ChatViewSet(viewsets.ModelViewSet):
) )
provider = (request.data.get("provider") or "openai").strip().lower() provider = (request.data.get("provider") or "openai").strip().lower()
if not is_chat_provider_available(provider):
return Response(
{"error": f"Provider is not available for chat: {provider}."},
status=status.HTTP_400_BAD_REQUEST,
)
ChatMessage.objects.create( ChatMessage.objects.create(
conversation=conversation, conversation=conversation,
@@ -262,3 +272,10 @@ class ChatViewSet(viewsets.ModelViewSet):
response["Cache-Control"] = "no-cache" response["Cache-Control"] = "no-cache"
response["X-Accel-Buffering"] = "no" response["X-Accel-Buffering"] = "no"
return response return response
class ChatProviderCatalogViewSet(viewsets.ViewSet):
permission_classes = [IsAuthenticated]
def list(self, request):
return Response(get_provider_catalog())

5628
frontend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -566,6 +566,15 @@ export type RecommendationResponse = {
}; };
}; };
export type ChatProviderCatalogEntry = {
id: string;
label: string;
available_for_chat: boolean;
needs_api_key: boolean | null;
default_model: string | null;
api_base: string | null;
};
export type CollectionItineraryDay = { export type CollectionItineraryDay = {
id: string; id: string;
collection: string; // UUID of the collection collection: string; // UUID of the collection

View File

@@ -2,11 +2,7 @@
import { onMount } from 'svelte'; import { onMount } from 'svelte';
import { t } from 'svelte-i18n'; import { t } from 'svelte-i18n';
import { mdiRobot, mdiSend, mdiPlus, mdiDelete, mdiMenu, mdiClose } from '@mdi/js'; import { mdiRobot, mdiSend, mdiPlus, mdiDelete, mdiMenu, mdiClose } from '@mdi/js';
import type { ChatProviderCatalogEntry } from '$lib/types.js';
type Provider = {
value: string;
label: string;
};
type Conversation = { type Conversation = {
id: string; id: string;
@@ -29,18 +25,30 @@
let streamingContent = ''; let streamingContent = '';
let selectedProvider = 'openai'; let selectedProvider = 'openai';
const providers: Provider[] = [ let providerCatalog: ChatProviderCatalogEntry[] = [];
{ value: 'openai', label: 'OpenAI' }, $: chatProviders = providerCatalog.filter((provider) => provider.available_for_chat);
{ value: 'anthropic', label: 'Anthropic' },
{ value: 'gemini', label: 'Google Gemini' },
{ value: 'ollama', label: 'Ollama' },
{ value: 'groq', label: 'Groq' },
{ value: 'mistral', label: 'Mistral' },
{ value: 'github_models', label: 'GitHub Models' },
{ value: 'openrouter', label: 'OpenRouter' }
];
onMount(loadConversations); onMount(async () => {
await Promise.all([loadConversations(), loadProviderCatalog()]);
});
async function loadProviderCatalog() {
const res = await fetch('/api/chat/providers/');
if (!res.ok) {
return;
}
const catalog = (await res.json()) as ChatProviderCatalogEntry[];
providerCatalog = catalog;
const availableProviders = catalog.filter((provider) => provider.available_for_chat);
if (!availableProviders.length) {
return;
}
if (!availableProviders.some((provider) => provider.id === selectedProvider)) {
selectedProvider = availableProviders[0].id;
}
}
async function loadConversations() { async function loadConversations() {
const res = await fetch('/api/chat/conversations/'); const res = await fetch('/api/chat/conversations/');
@@ -86,6 +94,7 @@
async function sendMessage() { async function sendMessage() {
if (!inputMessage.trim() || isStreaming) return; if (!inputMessage.trim() || isStreaming) return;
if (!chatProviders.some((provider) => provider.id === selectedProvider)) return;
let conversation = activeConversation; let conversation = activeConversation;
if (!conversation) { if (!conversation) {
@@ -258,9 +267,13 @@
</svg> </svg>
<h1 class="text-lg font-semibold">{$t('chat.title')}</h1> <h1 class="text-lg font-semibold">{$t('chat.title')}</h1>
<div class="ml-auto"> <div class="ml-auto">
<select class="select select-bordered select-sm" bind:value={selectedProvider}> <select
{#each providers as provider} class="select select-bordered select-sm"
<option value={provider.value}>{provider.label}</option> bind:value={selectedProvider}
disabled={chatProviders.length === 0}
>
{#each chatProviders as provider}
<option value={provider.id}>{provider.label}</option>
{/each} {/each}
</select> </select>
</div> </div>
@@ -325,7 +338,7 @@
<button <button
class="btn btn-primary" class="btn btn-primary"
on:click={sendMessage} on:click={sendMessage}
disabled={isStreaming || !inputMessage.trim()} disabled={isStreaming || !inputMessage.trim() || chatProviders.length === 0}
title={$t('chat.send')} title={$t('chat.send')}
> >
{#if isStreaming} {#if isStreaming}

View File

@@ -3,7 +3,7 @@
import { page } from '$app/stores'; import { page } from '$app/stores';
import { addToast } from '$lib/toasts'; import { addToast } from '$lib/toasts';
import { CURRENCY_LABELS, CURRENCY_OPTIONS } from '$lib/money'; import { CURRENCY_LABELS, CURRENCY_OPTIONS } from '$lib/money';
import type { ImmichIntegration, User } from '$lib/types.js'; import type { ChatProviderCatalogEntry, ImmichIntegration, User } from '$lib/types.js';
import type { PageData } from './$types'; import type { PageData } from './$types';
import { onMount } from 'svelte'; import { onMount } from 'svelte';
import { browser } from '$app/environment'; import { browser } from '$app/environment';
@@ -46,6 +46,7 @@
let userApiKeys: UserAPIKey[] = data.props.apiKeys ?? []; let userApiKeys: UserAPIKey[] = data.props.apiKeys ?? [];
let apiKeysConfigError: string | null = data.props.apiKeysConfigError ?? null; let apiKeysConfigError: string | null = data.props.apiKeysConfigError ?? null;
let newApiKeyProvider = 'anthropic'; let newApiKeyProvider = 'anthropic';
let providerCatalog: ChatProviderCatalogEntry[] = [];
let newApiKeyValue = ''; let newApiKeyValue = '';
let isSavingApiKey = false; let isSavingApiKey = false;
let deletingApiKeyId: string | null = null; let deletingApiKeyId: string | null = null;
@@ -53,21 +54,26 @@
let isLoadingMcpToken = false; let isLoadingMcpToken = false;
let activeSection: string = 'profile'; let activeSection: string = 'profile';
const API_KEY_PROVIDER_OPTIONS = [ async function loadProviderCatalog() {
{ value: 'anthropic', labelKey: 'settings.api_key_provider_anthropic' }, const res = await fetch('/api/chat/providers/');
{ value: 'openai', labelKey: 'settings.api_key_provider_openai' }, if (!res.ok) {
{ value: 'gemini', labelKey: 'settings.api_key_provider_gemini' }, return;
{ value: 'ollama', labelKey: 'settings.api_key_provider_ollama' }, }
{ value: 'groq', labelKey: 'settings.api_key_provider_groq' },
{ value: 'mistral', labelKey: 'settings.api_key_provider_mistral' }, providerCatalog = await res.json();
{ value: 'github_models', labelKey: 'settings.api_key_provider_github_models' }, if (!providerCatalog.length) {
{ value: 'openrouter', labelKey: 'settings.api_key_provider_openrouter' } return;
]; }
if (!providerCatalog.some((provider) => provider.id === newApiKeyProvider)) {
newApiKeyProvider = providerCatalog[0].id;
}
}
function getApiKeyProviderLabel(provider: string): string { function getApiKeyProviderLabel(provider: string): string {
const option = API_KEY_PROVIDER_OPTIONS.find((entry) => entry.value === provider); const catalogProvider = providerCatalog.find((entry) => entry.id === provider);
if (option) { if (catalogProvider) {
return $t(option.labelKey); return catalogProvider.label;
} }
if (provider === 'google_maps') { if (provider === 'google_maps') {
@@ -127,6 +133,8 @@
]; ];
onMount(async () => { onMount(async () => {
void loadProviderCatalog();
if (browser) { if (browser) {
const queryParams = new URLSearchParams($page.url.search); const queryParams = new URLSearchParams($page.url.search);
const pageParam = queryParams.get('page'); const pageParam = queryParams.get('page');
@@ -1642,9 +1650,10 @@
id="api-key-provider" id="api-key-provider"
class="select select-bordered select-primary w-full" class="select select-bordered select-primary w-full"
bind:value={newApiKeyProvider} bind:value={newApiKeyProvider}
disabled={providerCatalog.length === 0}
> >
{#each API_KEY_PROVIDER_OPTIONS as option} {#each providerCatalog as provider}
<option value={option.value}>{$t(option.labelKey)}</option> <option value={provider.id}>{provider.label}</option>
{/each} {/each}
</select> </select>
</div> </div>
@@ -1665,7 +1674,11 @@
{$t('settings.api_key_write_only_hint')} {$t('settings.api_key_write_only_hint')}
</p> </p>
</div> </div>
<button class="btn btn-primary" type="submit" disabled={isSavingApiKey}> <button
class="btn btn-primary"
type="submit"
disabled={isSavingApiKey || providerCatalog.length === 0}
>
{#if isSavingApiKey} {#if isSavingApiKey}
<span class="loading loading-spinner loading-sm"></span> <span class="loading loading-spinner loading-sm"></span>
{/if} {/if}