Generate a chat response using the language model and return a ChatResponse object.
| Parameters: |
-
messages
(List[Message])
–
The chat history/messages.
|
| Returns: |
-
ChatResponse –
The response object containing the AI's reply as a Message.
|
Source code in llm_utils/aiweb_common/generate/ChatResponse.py
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37 | def generate_response(self, messages):
"""
Generate a chat response using the language model and return a ChatResponse object.
Args:
messages (List[Message]): The chat history/messages.
Returns:
ChatResponse: The response object containing the AI's reply as a Message.
"""
from aiweb_common.generate.ChatSchemas import ChatResponse, Message
# Get the raw response and metadata from the chat service
response_content, response_meta = self.chat_service.generate_langchain_response(messages)
# If the response includes an image update, encode it in Message.content as JSON
# Convention: If response_content is a dict with 'text' and 'image_update', encode as JSON string
import json
if isinstance(response_content, dict) and "text" in response_content:
# Example: {"text": "...", "image_update": {...}}
content = json.dumps(response_content)
else:
content = response_content
# Compose the Message object (role: ai)
message = Message(role="ai", content=content)
return ChatResponse(response=message)
|