ChatServicer

Bases: QueryInterface

Source code in llm_utils/aiweb_common/generate/ChatServicer.py
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
class ChatServicer(QueryInterface):
    def __init__(self, language_model_interface, prompt):
        super().__init__(language_model_interface)
        assembled_system_chat_template = self.preparer.assemble_chat_template(prompt=prompt)
        self.assembled_system_chat_template = assembled_system_chat_template

    def generate_langchain_response(self, messages):
        """
        Generate a response from the language model, supporting image update triggers.

        Args:
            messages (List[Message]): The chat history/messages.

        Returns:
            Tuple[str|dict, Any]: The response content (str or dict if image update) and metadata.
        """
        chain = self.assembled_system_chat_template | self.language_model_interface
        with get_openai_callback() as response_meta:
            response = chain.invoke({"messages": messages})

        # If the response contains an image update, encode as dict
        # Convention: If response has 'image_update' attribute, return dict
        if hasattr(response, "image_update") and response.image_update is not None:
            return {
                "text": response.content,
                "image_update": response.image_update
            }, response_meta
        return response.content, response_meta

    def update_history(self, message, chat_history):
        if message.role == "ai":
            chat_history.append(AIMessage(content=message.content))
        elif message.role == "human":
            chat_history.append(HumanMessage(content=message.content))
        return chat_history

generate_langchain_response(messages)

Generate a response from the language model, supporting image update triggers.

Parameters:
  • messages (List[Message]) –

    The chat history/messages.

Returns:
  • Tuple[str|dict, Any]: The response content (str or dict if image update) and metadata.

Source code in llm_utils/aiweb_common/generate/ChatServicer.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
def generate_langchain_response(self, messages):
    """
    Generate a response from the language model, supporting image update triggers.

    Args:
        messages (List[Message]): The chat history/messages.

    Returns:
        Tuple[str|dict, Any]: The response content (str or dict if image update) and metadata.
    """
    chain = self.assembled_system_chat_template | self.language_model_interface
    with get_openai_callback() as response_meta:
        response = chain.invoke({"messages": messages})

    # If the response contains an image update, encode as dict
    # Convention: If response has 'image_update' attribute, return dict
    if hasattr(response, "image_update") and response.image_update is not None:
        return {
            "text": response.content,
            "image_update": response.image_update
        }, response_meta
    return response.content, response_meta