feat(chat): add LLM-powered travel agent with multi-provider support

Implement a full chat-based travel agent using LiteLLM for multi-provider
LLM support (OpenAI, Anthropic, Gemini, Ollama, Groq, Mistral, etc.).

Backend:
- New 'chat' Django app with ChatConversation and ChatMessage models
- Streaming SSE endpoint via StreamingHttpResponse
- 5 agent tools: search_places, list_trips, get_trip_details,
  add_to_itinerary, get_weather
- LiteLLM client wrapper with per-user API key retrieval
- System prompt with user preference context injection

Frontend:
- New /chat route with full-page chat UI (DaisyUI + Tailwind)
- Collapsible conversation sidebar with CRUD
- SSE streaming response display with tool call visualization
- Provider selector dropdown
- SSE proxy fix to stream text/event-stream without buffering
- Navbar link and i18n keys
This commit is contained in:
2026-03-08 18:44:44 +00:00
parent d4e0ef14b8
commit 757140ec70
20 changed files with 1518 additions and 6 deletions

View File

View File

@@ -0,0 +1,17 @@
from django.contrib import admin
from .models import ChatConversation, ChatMessage
@admin.register(ChatConversation)
class ChatConversationAdmin(admin.ModelAdmin):
list_display = ("id", "user", "title", "updated_at", "created_at")
search_fields = ("title", "user__username")
list_filter = ("created_at", "updated_at")
@admin.register(ChatMessage)
class ChatMessageAdmin(admin.ModelAdmin):
list_display = ("id", "conversation", "role", "name", "created_at")
search_fields = ("conversation__id", "content", "name")
list_filter = ("role", "created_at")

View File

@@ -0,0 +1,527 @@
import json
from datetime import date as date_cls
import requests
from django.contrib.contenttypes.models import ContentType
from django.db import models
from adventures.models import Collection, CollectionItineraryItem, Location
AGENT_TOOLS = [
{
"type": "function",
"function": {
"name": "search_places",
"description": "Search for places of interest near a location. Returns tourist attractions, restaurants, hotels, etc.",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "Location name or address to search near",
},
"category": {
"type": "string",
"enum": ["tourism", "food", "lodging"],
"description": "Category of places",
},
"radius": {
"type": "number",
"description": "Search radius in km (default 10)",
},
},
"required": ["location"],
},
},
},
{
"type": "function",
"function": {
"name": "list_trips",
"description": "List the user's trip collections with dates and descriptions",
"parameters": {"type": "object", "properties": {}},
},
},
{
"type": "function",
"function": {
"name": "get_trip_details",
"description": "Get full details of a trip including all itinerary items, locations, transportation, and lodging",
"parameters": {
"type": "object",
"properties": {
"collection_id": {
"type": "string",
"description": "UUID of the collection/trip",
}
},
"required": ["collection_id"],
},
},
},
{
"type": "function",
"function": {
"name": "add_to_itinerary",
"description": "Add a new location to a trip's itinerary on a specific date",
"parameters": {
"type": "object",
"properties": {
"collection_id": {
"type": "string",
"description": "UUID of the collection/trip",
},
"name": {"type": "string", "description": "Name of the location"},
"description": {
"type": "string",
"description": "Description of why to visit",
},
"latitude": {
"type": "number",
"description": "Latitude coordinate",
},
"longitude": {
"type": "number",
"description": "Longitude coordinate",
},
"date": {
"type": "string",
"description": "Date in YYYY-MM-DD format",
},
"location_address": {
"type": "string",
"description": "Full address of the location",
},
},
"required": ["collection_id", "name", "latitude", "longitude"],
},
},
},
{
"type": "function",
"function": {
"name": "get_weather",
"description": "Get temperature/weather data for a location on specific dates",
"parameters": {
"type": "object",
"properties": {
"latitude": {"type": "number", "description": "Latitude"},
"longitude": {"type": "number", "description": "Longitude"},
"dates": {
"type": "array",
"items": {"type": "string"},
"description": "List of dates in YYYY-MM-DD format",
},
},
"required": ["latitude", "longitude", "dates"],
},
},
},
]
NOMINATIM_URL = "https://nominatim.openstreetmap.org/search"
OVERPASS_URL = "https://overpass-api.de/api/interpreter"
OPEN_METEO_ARCHIVE_URL = "https://archive-api.open-meteo.com/v1/archive"
OPEN_METEO_FORECAST_URL = "https://api.open-meteo.com/v1/forecast"
REQUEST_HEADERS = {"User-Agent": "Voyage/1.0"}
def _build_overpass_query(latitude, longitude, radius_meters, category):
if category == "food":
node_filter = '["amenity"~"restaurant|cafe|bar|fast_food"]'
elif category == "lodging":
node_filter = '["tourism"~"hotel|hostel|guest_house|motel|apartment"]'
else:
node_filter = '["tourism"~"attraction|museum|viewpoint|gallery|theme_park"]'
return f"""
[out:json][timeout:25];
(
node{node_filter}(around:{int(radius_meters)},{latitude},{longitude});
way{node_filter}(around:{int(radius_meters)},{latitude},{longitude});
relation{node_filter}(around:{int(radius_meters)},{latitude},{longitude});
);
out center 20;
"""
def _parse_address(tags):
if not tags:
return ""
if tags.get("addr:full"):
return tags["addr:full"]
street = tags.get("addr:street", "")
house = tags.get("addr:housenumber", "")
city = (
tags.get("addr:city") or tags.get("addr:town") or tags.get("addr:village") or ""
)
parts = [f"{street} {house}".strip(), city]
return ", ".join([p for p in parts if p])
def search_places(user, **kwargs):
try:
location_name = kwargs.get("location")
if not location_name:
return {"error": "location is required"}
category = kwargs.get("category") or "tourism"
radius_km = float(kwargs.get("radius") or 10)
radius_meters = max(500, min(int(radius_km * 1000), 50000))
geocode_resp = requests.get(
NOMINATIM_URL,
params={"q": location_name, "format": "json", "limit": 1},
headers=REQUEST_HEADERS,
timeout=10,
)
geocode_resp.raise_for_status()
geocode_data = geocode_resp.json()
if not geocode_data:
return {"error": f"Could not geocode location: {location_name}"}
base_lat = float(geocode_data[0]["lat"])
base_lon = float(geocode_data[0]["lon"])
query = _build_overpass_query(base_lat, base_lon, radius_meters, category)
overpass_resp = requests.post(
OVERPASS_URL,
data={"data": query},
headers=REQUEST_HEADERS,
timeout=20,
)
overpass_resp.raise_for_status()
overpass_data = overpass_resp.json()
results = []
for item in (overpass_data.get("elements") or [])[:20]:
tags = item.get("tags") or {}
name = tags.get("name")
if not name:
continue
latitude = item.get("lat")
longitude = item.get("lon")
if latitude is None or longitude is None:
center = item.get("center") or {}
latitude = center.get("lat")
longitude = center.get("lon")
if latitude is None or longitude is None:
continue
results.append(
{
"name": name,
"address": _parse_address(tags),
"latitude": latitude,
"longitude": longitude,
"category": category,
}
)
if len(results) >= 5:
break
return {
"location": location_name,
"category": category,
"results": results,
}
except requests.RequestException as exc:
return {"error": f"Places API request failed: {exc}"}
except (TypeError, ValueError) as exc:
return {"error": f"Invalid search parameters: {exc}"}
except Exception as exc:
return {"error": str(exc)}
def list_trips(user, **kwargs):
try:
collections = Collection.objects.filter(user=user).prefetch_related("locations")
trips = []
for collection in collections:
trips.append(
{
"id": str(collection.id),
"name": collection.name,
"start_date": collection.start_date.isoformat()
if collection.start_date
else None,
"end_date": collection.end_date.isoformat()
if collection.end_date
else None,
"description": collection.description or "",
"location_count": collection.locations.count(),
}
)
return {"trips": trips}
except Exception as exc:
return {"error": str(exc)}
def get_trip_details(user, **kwargs):
try:
collection_id = kwargs.get("collection_id")
if not collection_id:
return {"error": "collection_id is required"}
collection = (
Collection.objects.filter(user=user)
.prefetch_related(
"locations",
"transportation_set",
"lodging_set",
"itinerary_items__content_type",
)
.get(id=collection_id)
)
itinerary = []
for item in collection.itinerary_items.all().order_by("date", "order"):
content_obj = item.item
itinerary.append(
{
"id": str(item.id),
"date": item.date.isoformat() if item.date else None,
"order": item.order,
"is_global": item.is_global,
"content_type": item.content_type.model,
"object_id": str(item.object_id),
"name": getattr(content_obj, "name", ""),
}
)
return {
"trip": {
"id": str(collection.id),
"name": collection.name,
"description": collection.description or "",
"start_date": collection.start_date.isoformat()
if collection.start_date
else None,
"end_date": collection.end_date.isoformat()
if collection.end_date
else None,
"locations": [
{
"id": str(location.id),
"name": location.name,
"description": location.description or "",
"location": location.location or "",
"latitude": float(location.latitude)
if location.latitude is not None
else None,
"longitude": float(location.longitude)
if location.longitude is not None
else None,
}
for location in collection.locations.all()
],
"transportation": [
{
"id": str(t.id),
"name": t.name,
"type": t.type,
"date": t.date.isoformat() if t.date else None,
"end_date": t.end_date.isoformat() if t.end_date else None,
}
for t in collection.transportation_set.all()
],
"lodging": [
{
"id": str(l.id),
"name": l.name,
"type": l.type,
"check_in": l.check_in.isoformat() if l.check_in else None,
"check_out": l.check_out.isoformat() if l.check_out else None,
"location": l.location or "",
}
for l in collection.lodging_set.all()
],
"itinerary": itinerary,
}
}
except Collection.DoesNotExist:
return {"error": "Trip not found"}
except Exception as exc:
return {"error": str(exc)}
def add_to_itinerary(user, **kwargs):
try:
collection_id = kwargs.get("collection_id")
name = kwargs.get("name")
latitude = kwargs.get("latitude")
longitude = kwargs.get("longitude")
description = kwargs.get("description")
location_address = kwargs.get("location_address")
date = kwargs.get("date")
if not collection_id or not name or latitude is None or longitude is None:
return {
"error": "collection_id, name, latitude, and longitude are required"
}
collection = Collection.objects.get(id=collection_id, user=user)
location = Location.objects.create(
user=user,
name=name,
latitude=latitude,
longitude=longitude,
description=description or "",
location=location_address or "",
)
collection.locations.add(location)
content_type = ContentType.objects.get_for_model(Location)
itinerary_date = date
if not itinerary_date:
if collection.start_date:
itinerary_date = collection.start_date.isoformat()
else:
itinerary_date = date_cls.today().isoformat()
try:
itinerary_date_obj = date_cls.fromisoformat(itinerary_date)
except ValueError:
return {"error": "date must be in YYYY-MM-DD format"}
max_order = (
CollectionItineraryItem.objects.filter(
collection=collection,
date=itinerary_date_obj,
is_global=False,
).aggregate(models.Max("order"))["order__max"]
or 0
)
itinerary_item = CollectionItineraryItem.objects.create(
collection=collection,
content_type=content_type,
object_id=location.id,
date=itinerary_date_obj,
order=max_order + 1,
)
return {
"success": True,
"location": {
"id": str(location.id),
"name": location.name,
"latitude": float(location.latitude),
"longitude": float(location.longitude),
},
"itinerary_item": {
"id": str(itinerary_item.id),
"date": itinerary_date_obj.isoformat(),
"order": itinerary_item.order,
},
}
except Collection.DoesNotExist:
return {"error": "Trip not found"}
except Exception as exc:
return {"error": str(exc)}
def _fetch_temperature_for_date(latitude, longitude, date_value):
for url in (OPEN_METEO_ARCHIVE_URL, OPEN_METEO_FORECAST_URL):
try:
response = requests.get(
url,
params={
"latitude": latitude,
"longitude": longitude,
"start_date": date_value,
"end_date": date_value,
"daily": "temperature_2m_max,temperature_2m_min",
"timezone": "UTC",
},
timeout=8,
)
response.raise_for_status()
data = response.json()
except requests.RequestException:
continue
except ValueError:
continue
daily = data.get("daily") or {}
max_values = daily.get("temperature_2m_max") or []
min_values = daily.get("temperature_2m_min") or []
if not max_values or not min_values:
continue
try:
avg = (float(max_values[0]) + float(min_values[0])) / 2
except (TypeError, ValueError, IndexError):
continue
return {
"date": date_value,
"available": True,
"temperature_c": round(avg, 1),
}
return {
"date": date_value,
"available": False,
"temperature_c": None,
}
def get_weather(user, **kwargs):
try:
raw_latitude = kwargs.get("latitude")
raw_longitude = kwargs.get("longitude")
if raw_latitude is None or raw_longitude is None:
return {"error": "latitude and longitude are required"}
latitude = float(raw_latitude)
longitude = float(raw_longitude)
dates = kwargs.get("dates") or []
if not isinstance(dates, list) or not dates:
return {"error": "dates must be a non-empty list"}
results = [
_fetch_temperature_for_date(latitude, longitude, date_value)
for date_value in dates
]
return {
"latitude": latitude,
"longitude": longitude,
"results": results,
}
except (TypeError, ValueError):
return {"error": "latitude and longitude must be numeric"}
except Exception as exc:
return {"error": str(exc)}
def execute_tool(tool_name, user, **kwargs):
tool_map = {
"search_places": search_places,
"list_trips": list_trips,
"get_trip_details": get_trip_details,
"add_to_itinerary": add_to_itinerary,
"get_weather": get_weather,
}
tool_fn = tool_map.get(tool_name)
if not tool_fn:
return {"error": f"Unknown tool: {tool_name}"}
try:
return tool_fn(user, **kwargs)
except Exception as exc:
return {"error": str(exc)}
def serialize_tool_result(result):
try:
return json.dumps(result)
except TypeError:
return json.dumps({"error": "Tool returned non-serializable data"})

View File

@@ -0,0 +1,6 @@
from django.apps import AppConfig
class ChatConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "chat"

View File

@@ -0,0 +1,142 @@
import json
import litellm
from integrations.models import UserAPIKey
PROVIDER_MODELS = {
"openai": "gpt-4o",
"anthropic": "anthropic/claude-sonnet-4-20250514",
"gemini": "gemini/gemini-2.0-flash",
"ollama": "ollama/llama3.1",
"groq": "groq/llama-3.3-70b-versatile",
"mistral": "mistral/mistral-large-latest",
"github_models": "github/gpt-4o",
"openrouter": "openrouter/auto",
}
def _safe_get(obj, key, default=None):
if obj is None:
return default
if isinstance(obj, dict):
return obj.get(key, default)
return getattr(obj, key, default)
def get_llm_api_key(user, provider):
"""Get the user's API key for the given provider."""
normalized_provider = (provider or "").strip().lower()
try:
key_obj = UserAPIKey.objects.get(user=user, provider=normalized_provider)
return key_obj.get_api_key()
except UserAPIKey.DoesNotExist:
return None
def get_system_prompt(user, collection=None):
"""Build the system prompt with user context."""
from integrations.models import UserRecommendationPreferenceProfile
base_prompt = """You are a helpful travel planning assistant for the Voyage travel app. You help users discover places, plan trips, and organize their itineraries.
Your capabilities:
- Search for interesting places (restaurants, tourist attractions, hotels) near any location
- View and manage the user's trip collections and itineraries
- Add new locations to trip itineraries
- Check weather/temperature data for travel dates
When suggesting places:
- Be specific with names, addresses, and why a place is worth visiting
- Consider the user's travel dates and weather conditions
- Group suggestions logically (by area, by type, by day)
When modifying itineraries:
- Always confirm with the user before adding items
- Suggest logical ordering based on geography
- Consider travel time between locations
Be conversational, helpful, and enthusiastic about travel. Keep responses concise but informative."""
try:
profile = UserRecommendationPreferenceProfile.objects.get(user=user)
prefs = []
if profile.cuisines:
prefs.append(f"Cuisine preferences: {profile.cuisines}")
if profile.interests:
prefs.append(f"Interests: {profile.interests}")
if profile.trip_style:
prefs.append(f"Travel style: {profile.trip_style}")
if profile.notes:
prefs.append(f"Additional notes: {profile.notes}")
if prefs:
base_prompt += "\n\nUser preferences:\n" + "\n".join(prefs)
except UserRecommendationPreferenceProfile.DoesNotExist:
pass
return base_prompt
async def stream_chat_completion(user, messages, provider, tools=None):
"""Stream a chat completion using LiteLLM.
Yields SSE-formatted strings.
"""
normalized_provider = (provider or "").strip().lower()
api_key = get_llm_api_key(user, normalized_provider)
if not api_key:
payload = {
"error": f"No API key found for provider: {normalized_provider}. Please add one in Settings."
}
yield f"data: {json.dumps(payload)}\n\n"
return
model = PROVIDER_MODELS.get(normalized_provider, "gpt-4o")
try:
response = await litellm.acompletion(
model=model,
messages=messages,
tools=tools,
tool_choice="auto" if tools else None,
stream=True,
api_key=api_key,
)
async for chunk in response:
choices = _safe_get(chunk, "choices", []) or []
if not choices:
continue
delta = _safe_get(choices[0], "delta")
if not delta:
continue
chunk_data = {}
content = _safe_get(delta, "content")
if content:
chunk_data["content"] = content
tool_calls = _safe_get(delta, "tool_calls") or []
if tool_calls:
serialized = []
for tool_call in tool_calls:
function = _safe_get(tool_call, "function")
serialized.append(
{
"id": _safe_get(tool_call, "id"),
"type": _safe_get(tool_call, "type"),
"function": {
"name": _safe_get(function, "name", "") or "",
"arguments": _safe_get(function, "arguments", "") or "",
},
}
)
chunk_data["tool_calls"] = serialized
if chunk_data:
yield f"data: {json.dumps(chunk_data)}\n\n"
yield "data: [DONE]\n\n"
except Exception as exc:
yield f"data: {json.dumps({'error': str(exc)})}\n\n"

View File

@@ -0,0 +1,90 @@
# Generated by Django 5.2.12 on 2026-03-08
import django.db.models.deletion
import uuid
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="ChatConversation",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("title", models.CharField(blank=True, default="", max_length=255)),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="chat_conversations",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"ordering": ["-updated_at"],
},
),
migrations.CreateModel(
name="ChatMessage",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
(
"role",
models.CharField(
choices=[
("user", "User"),
("assistant", "Assistant"),
("system", "System"),
("tool", "Tool"),
],
max_length=20,
),
),
("content", models.TextField(blank=True, default="")),
("tool_calls", models.JSONField(blank=True, null=True)),
(
"tool_call_id",
models.CharField(blank=True, max_length=255, null=True),
),
("name", models.CharField(blank=True, max_length=255, null=True)),
("created_at", models.DateTimeField(auto_now_add=True)),
(
"conversation",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="messages",
to="chat.chatconversation",
),
),
],
options={
"ordering": ["created_at"],
},
),
]

View File

@@ -0,0 +1,47 @@
import uuid
from django.conf import settings
from django.db import models
class ChatConversation(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="chat_conversations",
)
title = models.CharField(max_length=255, blank=True, default="")
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
ordering = ["-updated_at"]
def __str__(self):
return f"{self.title or 'Untitled'} ({self.user.username})"
class ChatMessage(models.Model):
ROLE_CHOICES = [
("user", "User"),
("assistant", "Assistant"),
("system", "System"),
("tool", "Tool"),
]
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
conversation = models.ForeignKey(
ChatConversation,
on_delete=models.CASCADE,
related_name="messages",
)
role = models.CharField(max_length=20, choices=ROLE_CHOICES)
content = models.TextField(blank=True, default="")
tool_calls = models.JSONField(null=True, blank=True)
tool_call_id = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255, blank=True, null=True)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ["created_at"]

View File

@@ -0,0 +1,25 @@
from rest_framework import serializers
from .models import ChatConversation, ChatMessage
class ChatMessageSerializer(serializers.ModelSerializer):
class Meta:
model = ChatMessage
fields = [
"id",
"role",
"content",
"tool_calls",
"tool_call_id",
"name",
"created_at",
]
class ChatConversationSerializer(serializers.ModelSerializer):
messages = ChatMessageSerializer(many=True, read_only=True)
class Meta:
model = ChatConversation
fields = ["id", "title", "created_at", "updated_at", "messages"]

View File

@@ -0,0 +1,11 @@
from django.urls import include, path
from rest_framework.routers import DefaultRouter
from .views import ChatViewSet
router = DefaultRouter()
router.register(r"conversations", ChatViewSet, basename="chat-conversation")
urlpatterns = [
path("", include(router.urls)),
]

View File

@@ -0,0 +1,260 @@
import asyncio
import json
from asgiref.sync import sync_to_async
from django.http import StreamingHttpResponse
from rest_framework import status, viewsets
from rest_framework.decorators import action
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from .agent_tools import AGENT_TOOLS, execute_tool, serialize_tool_result
from .llm_client import get_system_prompt, stream_chat_completion
from .models import ChatConversation, ChatMessage
from .serializers import ChatConversationSerializer
class ChatViewSet(viewsets.ModelViewSet):
serializer_class = ChatConversationSerializer
permission_classes = [IsAuthenticated]
def get_queryset(self):
return ChatConversation.objects.filter(user=self.request.user).prefetch_related(
"messages"
)
def list(self, request, *args, **kwargs):
conversations = self.get_queryset().only("id", "title", "updated_at")
data = [
{
"id": str(conversation.id),
"title": conversation.title,
"updated_at": conversation.updated_at,
}
for conversation in conversations
]
return Response(data)
def create(self, request, *args, **kwargs):
conversation = ChatConversation.objects.create(
user=request.user,
title=(request.data.get("title") or "").strip(),
)
serializer = self.get_serializer(conversation)
return Response(serializer.data, status=status.HTTP_201_CREATED)
def _build_llm_messages(self, conversation, user):
messages = [{"role": "system", "content": get_system_prompt(user)}]
for message in conversation.messages.all().order_by("created_at"):
payload = {
"role": message.role,
"content": message.content,
}
if message.role == "assistant" and message.tool_calls:
payload["tool_calls"] = message.tool_calls
if message.role == "tool":
payload["tool_call_id"] = message.tool_call_id
payload["name"] = message.name
messages.append(payload)
return messages
def _async_to_sync_generator(self, async_gen):
loop = asyncio.new_event_loop()
try:
while True:
try:
yield loop.run_until_complete(async_gen.__anext__())
except StopAsyncIteration:
break
finally:
loop.run_until_complete(loop.shutdown_asyncgens())
loop.close()
@staticmethod
def _merge_tool_call_delta(accumulator, tool_calls_delta):
for idx, tool_call in enumerate(tool_calls_delta or []):
while len(accumulator) <= idx:
accumulator.append(
{
"id": None,
"type": "function",
"function": {"name": "", "arguments": ""},
}
)
current = accumulator[idx]
if tool_call.get("id"):
current["id"] = tool_call.get("id")
if tool_call.get("type"):
current["type"] = tool_call.get("type")
function_data = tool_call.get("function") or {}
if function_data.get("name"):
current["function"]["name"] = function_data.get("name")
if function_data.get("arguments"):
current["function"]["arguments"] += function_data.get("arguments")
@action(detail=True, methods=["post"])
def send_message(self, request, pk=None):
conversation = self.get_object()
user_content = (request.data.get("message") or "").strip()
if not user_content:
return Response(
{"error": "message is required"},
status=status.HTTP_400_BAD_REQUEST,
)
provider = (request.data.get("provider") or "openai").strip().lower()
ChatMessage.objects.create(
conversation=conversation,
role="user",
content=user_content,
)
conversation.save(update_fields=["updated_at"])
if not conversation.title:
conversation.title = user_content[:120]
conversation.save(update_fields=["title", "updated_at"])
llm_messages = self._build_llm_messages(conversation, request.user)
async def event_stream():
current_messages = list(llm_messages)
encountered_error = False
while True:
content_chunks = []
tool_calls_accumulator = []
async for chunk in stream_chat_completion(
request.user,
current_messages,
provider,
tools=AGENT_TOOLS,
):
if not chunk.startswith("data: "):
yield chunk
continue
payload = chunk[len("data: ") :].strip()
if payload == "[DONE]":
continue
yield chunk
try:
data = json.loads(payload)
except json.JSONDecodeError:
continue
if data.get("error"):
encountered_error = True
break
if data.get("content"):
content_chunks.append(data["content"])
if data.get("tool_calls"):
self._merge_tool_call_delta(
tool_calls_accumulator,
data["tool_calls"],
)
if encountered_error:
break
assistant_content = "".join(content_chunks)
if tool_calls_accumulator:
assistant_with_tools = {
"role": "assistant",
"content": assistant_content,
"tool_calls": tool_calls_accumulator,
}
current_messages.append(assistant_with_tools)
await sync_to_async(
ChatMessage.objects.create, thread_sensitive=True
)(
conversation=conversation,
role="assistant",
content=assistant_content,
tool_calls=tool_calls_accumulator,
)
await sync_to_async(conversation.save, thread_sensitive=True)(
update_fields=["updated_at"]
)
for tool_call in tool_calls_accumulator:
function_payload = tool_call.get("function") or {}
function_name = function_payload.get("name") or ""
raw_arguments = function_payload.get("arguments") or "{}"
try:
arguments = json.loads(raw_arguments)
except json.JSONDecodeError:
arguments = {}
if not isinstance(arguments, dict):
arguments = {}
result = await sync_to_async(
execute_tool, thread_sensitive=True
)(
function_name,
request.user,
**arguments,
)
result_content = serialize_tool_result(result)
current_messages.append(
{
"role": "tool",
"tool_call_id": tool_call.get("id"),
"name": function_name,
"content": result_content,
}
)
await sync_to_async(
ChatMessage.objects.create, thread_sensitive=True
)(
conversation=conversation,
role="tool",
content=result_content,
tool_call_id=tool_call.get("id"),
name=function_name,
)
await sync_to_async(conversation.save, thread_sensitive=True)(
update_fields=["updated_at"]
)
tool_event = {
"tool_result": {
"tool_call_id": tool_call.get("id"),
"name": function_name,
"result": result,
}
}
yield f"data: {json.dumps(tool_event)}\n\n"
continue
await sync_to_async(ChatMessage.objects.create, thread_sensitive=True)(
conversation=conversation,
role="assistant",
content=assistant_content,
)
await sync_to_async(conversation.save, thread_sensitive=True)(
update_fields=["updated_at"]
)
yield "data: [DONE]\n\n"
break
response = StreamingHttpResponse(
streaming_content=self._async_to_sync_generator(event_stream()),
content_type="text/event-stream",
)
response["Cache-Control"] = "no-cache"
response["X-Accel-Buffering"] = "no"
return response

View File

@@ -69,6 +69,7 @@ INSTALLED_APPS = (
"worldtravel",
"users",
"integrations",
"chat",
"mcp_server",
"django.contrib.gis",
# 'achievements', # Not done yet, will be added later in a future update

View File

@@ -31,6 +31,7 @@ schema_view = get_schema_view(
urlpatterns = [
path("api/", include("adventures.urls")),
path("api/", include("worldtravel.urls")),
path("api/chat/", include("chat.urls")),
path(
getattr(settings, "DJANGO_MCP_ENDPOINT", "api/mcp"),
MCPServerStreamableHttpView.as_view(

View File

@@ -33,3 +33,4 @@ legacy-cgi==2.6.4
requests>=2.32.5
cryptography>=46.0.5
django-mcp-server>=0.5.7
litellm>=1.72.3