fix(ai): critical fixes for agent-redesign - provider selection and auto-learn
Fix 1: Provider/Model Selection (Critical - unblocks LLM)
- Add /api/chat/providers/{id}/models/ endpoint to fetch available models
- Auto-select first configured provider instead of hardcoded 'openai'
- Add model dropdown populated from provider API
- Filter provider list to only show configured providers
- Show helpful error when no providers configured
Fix 2: Auto-Learn Preferences (Replaces manual input)
- Create auto_profile.py utility to infer preferences from user data
- Learn interests from Activity sport types and Location categories
- Learn trip style from Lodging types (hostel=budget, resort=luxury, etc.)
- Learn geographic preferences from VisitedRegion/VisitedCity
- Call auto-learn on every chat start (send_message)
- System prompt now indicates preferences are auto-inferred
Fix 3: Remove Manual Preference UI
- Remove travel_preferences section from Settings
- Remove preference form fields and save logic
- Remove preference fetch from server-side load
- Keep UserRecommendationPreferenceProfile type for backend use
The LLM should now work correctly:
- Users with any configured provider will have it auto-selected
- Model list is fetched dynamically from provider API
- Preferences are learned from actual travel history
This commit is contained in:
@@ -335,25 +335,22 @@ Be conversational, helpful, and enthusiastic about travel. Keep responses concis
|
||||
else:
|
||||
try:
|
||||
profile = UserRecommendationPreferenceProfile.objects.get(user=user)
|
||||
preference_lines = []
|
||||
|
||||
if profile.cuisines:
|
||||
preference_lines.append(
|
||||
f"🍽️ **Cuisine Preferences**: {profile.cuisines}"
|
||||
)
|
||||
if profile.interests:
|
||||
preference_lines.append(
|
||||
f"🎯 **Interests**: {_format_interests(profile.interests)}"
|
||||
)
|
||||
if profile.trip_style:
|
||||
preference_lines.append(f"✈️ **Travel Style**: {profile.trip_style}")
|
||||
if profile.notes:
|
||||
preference_lines.append(f"📝 **Additional Notes**: {profile.notes}")
|
||||
if profile.interests or profile.trip_style or profile.notes:
|
||||
base_prompt += "\n\n## Traveler Preferences\n"
|
||||
base_prompt += "*(Automatically inferred from travel history)*\n\n"
|
||||
|
||||
if preference_lines:
|
||||
base_prompt += "\n\n## Traveler Preferences\n" + "\n".join(
|
||||
preference_lines
|
||||
)
|
||||
if profile.interests:
|
||||
interests_str = (
|
||||
", ".join(profile.interests)
|
||||
if isinstance(profile.interests, list)
|
||||
else str(profile.interests)
|
||||
)
|
||||
base_prompt += f"🎯 **Interests**: {interests_str}\n"
|
||||
if profile.trip_style:
|
||||
base_prompt += f"✈️ **Travel Style**: {profile.trip_style}\n"
|
||||
if profile.notes:
|
||||
base_prompt += f"📍 **Patterns**: {profile.notes}\n"
|
||||
except UserRecommendationPreferenceProfile.DoesNotExist:
|
||||
pass
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
|
||||
from asgiref.sync import sync_to_async
|
||||
from adventures.models import Collection
|
||||
@@ -19,6 +20,8 @@ from ..llm_client import (
|
||||
from ..models import ChatConversation, ChatMessage
|
||||
from ..serializers import ChatConversationSerializer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ChatViewSet(viewsets.ModelViewSet):
|
||||
serializer_class = ChatConversationSerializer
|
||||
@@ -108,6 +111,15 @@ class ChatViewSet(viewsets.ModelViewSet):
|
||||
|
||||
@action(detail=True, methods=["post"])
|
||||
def send_message(self, request, pk=None):
|
||||
# Auto-learn preferences from user's travel history
|
||||
from integrations.utils.auto_profile import update_auto_preference_profile
|
||||
|
||||
try:
|
||||
update_auto_preference_profile(request.user)
|
||||
except Exception as exc:
|
||||
logger.warning("Auto-profile update failed: %s", exc)
|
||||
# Continue anyway - not critical
|
||||
|
||||
conversation = self.get_object()
|
||||
user_content = (request.data.get("message") or "").strip()
|
||||
if not user_content:
|
||||
@@ -323,6 +335,93 @@ class ChatProviderCatalogViewSet(viewsets.ViewSet):
|
||||
def list(self, request):
|
||||
return Response(get_provider_catalog(user=request.user))
|
||||
|
||||
@action(detail=True, methods=["get"])
|
||||
def models(self, request, pk=None):
|
||||
"""Fetch available models from a provider's API."""
|
||||
from chat.llm_client import get_llm_api_key
|
||||
|
||||
provider = (pk or "").lower()
|
||||
|
||||
api_key = get_llm_api_key(request.user, provider)
|
||||
if not api_key:
|
||||
return Response(
|
||||
{"error": "No API key configured for this provider"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
try:
|
||||
if provider == "openai":
|
||||
import openai
|
||||
|
||||
client = openai.OpenAI(api_key=api_key)
|
||||
models = client.models.list()
|
||||
chat_models = [
|
||||
model.id
|
||||
for model in models
|
||||
if any(prefix in model.id for prefix in ["gpt-", "o1-", "chatgpt"])
|
||||
]
|
||||
return Response({"models": sorted(set(chat_models), reverse=True)})
|
||||
|
||||
if provider in ["anthropic", "claude"]:
|
||||
return Response(
|
||||
{
|
||||
"models": [
|
||||
"claude-sonnet-4-20250514",
|
||||
"claude-opus-4-20250514",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
"claude-3-5-haiku-20241022",
|
||||
"claude-3-haiku-20240307",
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
if provider in ["gemini", "google"]:
|
||||
return Response(
|
||||
{
|
||||
"models": [
|
||||
"gemini-2.0-flash",
|
||||
"gemini-1.5-pro",
|
||||
"gemini-1.5-flash",
|
||||
"gemini-1.5-flash-8b",
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
if provider in ["groq"]:
|
||||
return Response(
|
||||
{
|
||||
"models": [
|
||||
"llama-3.3-70b-versatile",
|
||||
"llama-3.1-70b-versatile",
|
||||
"llama-3.1-8b-instant",
|
||||
"mixtral-8x7b-32768",
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
if provider in ["ollama"]:
|
||||
import requests
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
"http://localhost:11434/api/tags", timeout=5
|
||||
)
|
||||
if response.ok:
|
||||
data = response.json()
|
||||
models = [item["name"] for item in data.get("models", [])]
|
||||
return Response({"models": models})
|
||||
except Exception:
|
||||
pass
|
||||
return Response({"models": []})
|
||||
|
||||
return Response({"models": []})
|
||||
except Exception as exc:
|
||||
logger.error("Failed to fetch models for %s: %s", provider, exc)
|
||||
return Response(
|
||||
{"error": f"Failed to fetch models: {str(exc)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
from .capabilities import CapabilitiesView
|
||||
from .day_suggestions import DaySuggestionsView
|
||||
|
||||
Reference in New Issue
Block a user