fix(chat): sanitize error responses and add tool kwargs allowlist

Prevent API key and sensitive info leakage through exception messages:
- Replace str(exc) with generic error messages in all catch-all handlers
- Add server-side exception logging via logger.exception()
- Add ALLOWED_KWARGS per-tool allowlist to filter untrusted LLM kwargs
- Bound tool execution loop to MAX_TOOL_ITERATIONS=10
- Fix tool_call delta merge to use tool_call index
This commit is contained in:
2026-03-08 18:54:35 +00:00
parent 757140ec70
commit fd3ca360de
3 changed files with 53 additions and 16 deletions

View File

@@ -1,4 +1,5 @@
import json
import logging
from datetime import date as date_cls
import requests
@@ -7,6 +8,8 @@ from django.db import models
from adventures.models import Collection, CollectionItineraryItem, Location
logger = logging.getLogger(__name__)
AGENT_TOOLS = [
{
"type": "function",
@@ -232,8 +235,9 @@ def search_places(user, **kwargs):
return {"error": f"Places API request failed: {exc}"}
except (TypeError, ValueError) as exc:
return {"error": f"Invalid search parameters: {exc}"}
except Exception as exc:
return {"error": str(exc)}
except Exception:
logger.exception("search_places failed")
return {"error": "An unexpected error occurred during place search"}
def list_trips(user, **kwargs):
@@ -256,8 +260,9 @@ def list_trips(user, **kwargs):
}
)
return {"trips": trips}
except Exception as exc:
return {"error": str(exc)}
except Exception:
logger.exception("list_trips failed")
return {"error": "An unexpected error occurred while listing trips"}
def get_trip_details(user, **kwargs):
@@ -344,8 +349,9 @@ def get_trip_details(user, **kwargs):
}
except Collection.DoesNotExist:
return {"error": "Trip not found"}
except Exception as exc:
return {"error": str(exc)}
except Exception:
logger.exception("get_trip_details failed")
return {"error": "An unexpected error occurred while fetching trip details"}
def add_to_itinerary(user, **kwargs):
@@ -422,8 +428,9 @@ def add_to_itinerary(user, **kwargs):
}
except Collection.DoesNotExist:
return {"error": "Trip not found"}
except Exception as exc:
return {"error": str(exc)}
except Exception:
logger.exception("add_to_itinerary failed")
return {"error": "An unexpected error occurred while adding to itinerary"}
def _fetch_temperature_for_date(latitude, longitude, date_value):
@@ -497,8 +504,26 @@ def get_weather(user, **kwargs):
}
except (TypeError, ValueError):
return {"error": "latitude and longitude must be numeric"}
except Exception as exc:
return {"error": str(exc)}
except Exception:
logger.exception("get_weather failed")
return {"error": "An unexpected error occurred while fetching weather data"}
ALLOWED_KWARGS = {
"search_places": {"location", "category", "radius"},
"list_trips": set(),
"get_trip_details": {"collection_id"},
"add_to_itinerary": {
"collection_id",
"name",
"description",
"latitude",
"longitude",
"date",
"location_address",
},
"get_weather": {"latitude", "longitude", "dates"},
}
def execute_tool(tool_name, user, **kwargs):
@@ -514,10 +539,14 @@ def execute_tool(tool_name, user, **kwargs):
if not tool_fn:
return {"error": f"Unknown tool: {tool_name}"}
allowed = ALLOWED_KWARGS.get(tool_name, set())
filtered_kwargs = {k: v for k, v in kwargs.items() if k in allowed}
try:
return tool_fn(user, **kwargs)
except Exception as exc:
return {"error": str(exc)}
return tool_fn(user, **filtered_kwargs)
except Exception:
logger.exception("Tool execution failed: %s", tool_name)
return {"error": "An unexpected error occurred while executing the tool"}
def serialize_tool_result(result):

View File

@@ -1,9 +1,12 @@
import json
import logging
import litellm
from integrations.models import UserAPIKey
logger = logging.getLogger(__name__)
PROVIDER_MODELS = {
"openai": "gpt-4o",
"anthropic": "anthropic/claude-sonnet-4-20250514",
@@ -138,5 +141,6 @@ async def stream_chat_completion(user, messages, provider, tools=None):
yield f"data: {json.dumps(chunk_data)}\n\n"
yield "data: [DONE]\n\n"
except Exception as exc:
yield f"data: {json.dumps({'error': str(exc)})}\n\n"
except Exception:
logger.exception("LLM streaming error")
yield f"data: {json.dumps({'error': 'An error occurred while processing your request. Please try again.'})}\n\n"

View File

@@ -73,6 +73,7 @@ class ChatViewSet(viewsets.ModelViewSet):
@staticmethod
def _merge_tool_call_delta(accumulator, tool_calls_delta):
for idx, tool_call in enumerate(tool_calls_delta or []):
idx = tool_call.get("index", idx)
while len(accumulator) <= idx:
accumulator.append(
{
@@ -119,11 +120,14 @@ class ChatViewSet(viewsets.ModelViewSet):
llm_messages = self._build_llm_messages(conversation, request.user)
MAX_TOOL_ITERATIONS = 10
async def event_stream():
current_messages = list(llm_messages)
encountered_error = False
tool_iterations = 0
while True:
while tool_iterations < MAX_TOOL_ITERATIONS:
content_chunks = []
tool_calls_accumulator = []