fix(chat): add saved AI defaults and harden suggestions

This commit is contained in:
2026-03-09 20:32:13 +00:00
parent 21954df3ee
commit bb54503235
38 changed files with 3949 additions and 105 deletions

View File

@@ -1,7 +1,9 @@
import logging
import json
import re
import litellm
from django.conf import settings
from django.shortcuts import get_object_or_404
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
@@ -11,10 +13,17 @@ from rest_framework.views import APIView
from adventures.models import Collection
from chat.agent_tools import search_places
from chat.llm_client import (
CHAT_PROVIDER_CONFIG,
_safe_error_payload,
get_llm_api_key,
get_system_prompt,
is_chat_provider_available,
normalize_gateway_model,
)
from integrations.models import UserAISettings
logger = logging.getLogger(__name__)
class DaySuggestionsView(APIView):
@@ -52,7 +61,7 @@ class DaySuggestionsView(APIView):
location = location_context or self._get_collection_location(collection)
system_prompt = get_system_prompt(request.user, collection)
provider = "openai"
provider, model = self._resolve_provider_and_model(request)
if not is_chat_provider_available(provider):
return Response(
@@ -78,12 +87,22 @@ class DaySuggestionsView(APIView):
user_prompt=prompt,
user=request.user,
provider=provider,
model=model,
)
return Response({"suggestions": suggestions}, status=status.HTTP_200_OK)
except Exception:
except Exception as exc:
logger.exception("Failed to generate day suggestions")
payload = _safe_error_payload(exc)
status_code = {
"model_not_found": status.HTTP_400_BAD_REQUEST,
"authentication_failed": status.HTTP_401_UNAUTHORIZED,
"rate_limited": status.HTTP_429_TOO_MANY_REQUESTS,
"invalid_request": status.HTTP_400_BAD_REQUEST,
"provider_unreachable": status.HTTP_503_SERVICE_UNAVAILABLE,
}.get(payload.get("error_category"), status.HTTP_500_INTERNAL_SERVER_ERROR)
return Response(
{"error": "Failed to generate suggestions. Please try again."},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
payload,
status=status_code,
)
def _get_collection_location(self, collection):
@@ -174,31 +193,98 @@ class DaySuggestionsView(APIView):
category=tool_category_map.get(category, "tourism"),
radius=8,
)
if not isinstance(result, dict):
return ""
if result.get("error"):
return ""
raw_results = result.get("results")
if not isinstance(raw_results, list):
return ""
entries = []
for place in result.get("results", [])[:5]:
for place in raw_results[:5]:
if not isinstance(place, dict):
continue
name = place.get("name")
address = place.get("address") or ""
if name:
entries.append(f"{name} ({address})" if address else name)
return "; ".join(entries)
def _get_suggestions_from_llm(self, system_prompt, user_prompt, user, provider):
def _resolve_provider_and_model(self, request):
request_provider = (request.data.get("provider") or "").strip().lower() or None
request_model = (request.data.get("model") or "").strip() or None
user_settings = UserAISettings.objects.filter(user=request.user).first() # type: ignore[attr-defined]
preferred_provider = (
(user_settings.preferred_provider or "").strip().lower()
if user_settings and user_settings.preferred_provider
else None
)
preferred_model = (
(user_settings.preferred_model or "").strip()
if user_settings and user_settings.preferred_model
else None
)
settings_provider = (settings.VOYAGE_AI_PROVIDER or "").strip().lower() or None
provider = request_provider or preferred_provider or settings_provider
if not provider or not is_chat_provider_available(provider):
provider = (
settings_provider
if is_chat_provider_available(settings_provider)
else None
)
if not provider or not is_chat_provider_available(provider):
provider = "openai" if is_chat_provider_available("openai") else provider
provider_config = CHAT_PROVIDER_CONFIG.get(provider or "", {})
default_model = (
(settings.VOYAGE_AI_MODEL or "").strip()
if provider == settings_provider and settings.VOYAGE_AI_MODEL
else None
) or provider_config.get("default_model")
model_from_user_defaults = (
preferred_model
if preferred_provider and provider == preferred_provider
else None
)
model = request_model or model_from_user_defaults or default_model
return provider, model
def _get_suggestions_from_llm(
self, system_prompt, user_prompt, user, provider, model
):
api_key = get_llm_api_key(user, provider)
if not api_key:
raise ValueError("No API key available")
response = litellm.completion(
model="gpt-4o-mini",
messages=[
provider_config = CHAT_PROVIDER_CONFIG.get(provider, {})
resolved_model = normalize_gateway_model(
provider,
model or provider_config.get("default_model"),
)
if not resolved_model:
raise ValueError("No model configured for provider")
completion_kwargs = {
"model": resolved_model,
"messages": [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_prompt},
],
api_key=api_key,
temperature=0.7,
max_tokens=1000,
"api_key": api_key,
"max_tokens": 1000,
}
if provider_config.get("api_base"):
completion_kwargs["api_base"] = provider_config["api_base"]
response = litellm.completion(
**completion_kwargs,
)
content = (response.choices[0].message.content or "").strip()