Skip to content

Commit f411682

Browse files
committed
--no-edit
1 parent 5ae771f commit f411682

File tree

8 files changed

+237
-20
lines changed

8 files changed

+237
-20
lines changed

frontend/public/wechat-qr.png

194 KB
Loading

langgraph_integration.py

Lines changed: 87 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,87 @@
1+
import os
2+
3+
from typing import Annotated, TypedDict, List
4+
from langgraph.graph import StateGraph, START
5+
from langgraph.graph.message import add_messages
6+
from langchain_openai import ChatOpenAI
7+
from mirix import Mirix
8+
from langchain_core.messages import SystemMessage, HumanMessage, AIMessage
9+
from dotenv import load_dotenv
10+
load_dotenv()
11+
12+
# Configuration
13+
# OPENAI_API_KEY = 'sk-xxx' # Replace with your actual OpenAI API key
14+
# MEM0_API_KEY = 'your-mem0-key' # Replace with your actual Mem0 API key
15+
16+
# Initialize LangChain and Mem0
17+
llm = ChatOpenAI(model="gpt-4o-mini")
18+
API_KEY = os.getenv("AZURE_OPENAI_API_KEY")
19+
20+
mirix_agent = Mirix(
21+
api_key=API_KEY,
22+
model_provider="azure_opena",
23+
config_path="mirix/configs/mirix_azure_example.yaml",
24+
)
25+
26+
class State(TypedDict):
27+
messages: Annotated[List[HumanMessage | AIMessage], add_messages]
28+
user_id: str
29+
30+
graph = StateGraph(State)
31+
32+
def chatbot(state: State):
33+
messages = state["messages"]
34+
user_id = state["user_id"]
35+
36+
try:
37+
38+
system_message = mirix_agent.construct_system_message(messages[-1].content, user_id=user_id)
39+
40+
full_messages = [system_message] + messages
41+
42+
response = llm.invoke(full_messages)
43+
44+
# Store the interaction with Mirix
45+
try:
46+
interaction = f"User: {messages[-1].content}\n\nAssistant: {response.content}"
47+
mirix_agent.add(interaction, user_id=user_id)
48+
except Exception as e:
49+
print(f"Error saving memory: {e}")
50+
51+
return {"messages": [response]}
52+
53+
except Exception as e:
54+
print(f"Error in chatbot: {e}")
55+
# Fallback response without memory context
56+
response = llm.invoke(messages)
57+
return {"messages": [response]}
58+
59+
graph.add_node("chatbot", chatbot)
60+
graph.add_edge(START, "chatbot")
61+
graph.add_edge("chatbot", "chatbot")
62+
63+
compiled_graph = graph.compile()
64+
65+
def run_conversation(user_input: str, mem0_user_id: str):
66+
67+
user = mirix_agent.create_user(user_name='alice')
68+
69+
config = {"configurable": {"thread_id": user.id}}
70+
state = {"messages": [HumanMessage(content=user_input)], "user_id": user.id}
71+
72+
for event in compiled_graph.stream(state, config):
73+
for value in event.values():
74+
if value.get("messages"):
75+
print("Customer Support:", value["messages"][-1].content)
76+
return
77+
78+
if __name__ == "__main__":
79+
print("Welcome to Customer Support! How can I assist you today?")
80+
mem0_user_id = "alice" # You can generate or retrieve this based on your user management system
81+
while True:
82+
user_input = input("You: ")
83+
if user_input.lower() in ['quit', 'exit', 'bye']:
84+
print("Customer Support: Thank you for contacting us. Have a great day!")
85+
break
86+
run_conversation(user_input, mem0_user_id)
87+

mirix/agent/agent.py

Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1437,6 +1437,97 @@ def build_system_prompt(self, retrieved_memories: dict) -> str:
14371437

14381438
return system_prompt
14391439

1440+
def construct_system_message(self, message: str) -> str:
1441+
1442+
# Step 1: extract topic from "message"
1443+
topics = None
1444+
1445+
try:
1446+
1447+
# Create temporary messages for topic extraction
1448+
temporary_messages = [prepare_input_message_create(MessageCreate(
1449+
role=MessageRole.user,
1450+
content=message,
1451+
), self.agent_state.id, wrap_user_message=False, wrap_system_message=True)]
1452+
1453+
temporary_messages.append(prepare_input_message_create(MessageCreate(
1454+
role=MessageRole.user,
1455+
content="The above are the inputs from the user, please look at these content and extract the topic (brief description of what the user is focusing on) from these content. If there are multiple focuses in these content, then extract them all and put them into one string separated by ';'. Call the function `update_topic` to update the topic with the extracted topics.",
1456+
), self.agent_state.id, wrap_user_message=False, wrap_system_message=True))
1457+
1458+
temporary_messages = [
1459+
prepare_input_message_create(MessageCreate(
1460+
role=MessageRole.system,
1461+
content="You are a helpful assistant that extracts the topic from the user's input.",
1462+
), self.agent_state.id, wrap_user_message=False, wrap_system_message=True),
1463+
] + temporary_messages
1464+
1465+
# Define the function for topic extraction
1466+
functions = [{
1467+
'name': 'update_topic',
1468+
'description': "Update the topic of the conversation/content. The topic will be used for retrieving relevant information from the database",
1469+
'parameters': {
1470+
'type': 'object',
1471+
'properties': {
1472+
'topic': {
1473+
'type': 'string',
1474+
'description': 'The topic of the current conversation/content. If there are multiple topics then separate them with ";".'}
1475+
},
1476+
'required': ['topic']
1477+
},
1478+
}]
1479+
1480+
# Use LLMClient to extract topics
1481+
llm_client = LLMClient.create(
1482+
llm_config=self.agent_state.llm_config,
1483+
put_inner_thoughts_first=True,
1484+
)
1485+
1486+
if llm_client:
1487+
response = llm_client.send_llm_request(
1488+
messages=temporary_messages,
1489+
tools=functions,
1490+
stream=False,
1491+
force_tool_call='update_topic',
1492+
)
1493+
else:
1494+
# Fallback to existing create function
1495+
response = create(
1496+
llm_config=self.agent_state.llm_config,
1497+
messages=temporary_messages,
1498+
functions=functions,
1499+
force_tool_call='update_topic',
1500+
)
1501+
1502+
# Extract topics from the response
1503+
for choice in response.choices:
1504+
if hasattr(choice.message, 'tool_calls') and choice.message.tool_calls is not None and len(choice.message.tool_calls) > 0:
1505+
try:
1506+
function_args = json.loads(choice.message.tool_calls[0].function.arguments)
1507+
topics = function_args.get('topic')
1508+
self.logger.info(f"Extracted topics: {topics}")
1509+
break
1510+
except (json.JSONDecodeError, KeyError) as parse_error:
1511+
self.logger.warning(f"Failed to parse topic extraction response: {parse_error}")
1512+
continue
1513+
1514+
except Exception as e:
1515+
self.logger.info(f"Error in extracting the topic from the message: {e}")
1516+
topics = None
1517+
1518+
# Step 2: build system prompt with topic
1519+
# Get the raw system prompt
1520+
in_context_messages = self.agent_manager.get_in_context_messages(agent_id=self.agent_state.id, actor=self.user)
1521+
raw_system = in_context_messages[0].content[0].text if in_context_messages and in_context_messages[0].role == MessageRole.system else ""
1522+
1523+
# Build the complete system prompt with memories
1524+
complete_system_prompt, _ = self.build_system_prompt_with_memories(
1525+
raw_system=raw_system,
1526+
topics=topics
1527+
)
1528+
1529+
return complete_system_prompt
1530+
14401531
def inner_step(
14411532
self,
14421533
first_input_messge: Message,

mirix/agent/agent_wrapper.py

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -262,7 +262,17 @@ def __init__(self, agent_config_file, load_from=None):
262262

263263
# For GEMINI models, extract all unprocessed images and fill temporary_messages
264264
if self.model_name in GEMINI_MODELS and self.google_client is not None:
265-
self._process_existing_uploaded_files()
265+
self._process_existing_uploaded_files(user_id=self.client.user.id)
266+
267+
def construct_system_message(self,
268+
message: str,
269+
user_id: str) -> str:
270+
"""
271+
Construct a system message from a message.
272+
"""
273+
return self.client.construct_system_message(agent_id=self.agent_states.agent_state.id,
274+
message=message,
275+
user_id=user_id)
266276

267277
def update_chat_agent_system_prompt(self, is_screen_monitoring: bool):
268278
'''
@@ -641,7 +651,7 @@ def _check_and_connect_gmail(self):
641651
self.logger.warning(f"Error checking Gmail credentials: {e}")
642652
return False
643653

644-
def _process_existing_uploaded_files(self):
654+
def _process_existing_uploaded_files(self, user_id: str):
645655
"""Process any existing uploaded files for Gemini models."""
646656
uploaded_mappings = self.client.server.cloud_file_mapping_manager.list_files_with_status(status='uploaded')
647657

@@ -2028,7 +2038,7 @@ def _complete_gemini_initialization(self) -> bool:
20282038
self.temp_message_accumulator.uri_to_create_time = self.uri_to_create_time
20292039

20302040
# Process existing uploaded files
2031-
self._process_existing_uploaded_files()
2041+
self._process_existing_uploaded_files(user_id=self.client.user.id)
20322042

20332043
self.logger.info("Gemini initialization completed successfully!")
20342044
return True

mirix/client/client.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1059,6 +1059,12 @@ def get_in_context_messages(self, agent_id: str) -> List[Message]:
10591059

10601060
# agent interactions
10611061

1062+
def construct_system_message(self, agent_id: str, message: str, user_id: str) -> str:
1063+
"""
1064+
Construct a system message from a message.
1065+
"""
1066+
return self.server.construct_system_message(agent_id=agent_id, message=message, actor=self.server.user_manager.get_user_by_id(user_id))
1067+
10621068
def send_messages(
10631069
self,
10641070
agent_id: str,

mirix/functions/function_sets/memory_tools.py

Lines changed: 22 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -524,24 +524,29 @@ def trigger_memory_update(self: "Agent", user_message: object, memory_types: Lis
524524
responses = []
525525
overall_start = time.time()
526526

527-
# Use ThreadPoolExecutor for parallel processing
528-
with ThreadPoolExecutor(max_workers=len(valid_agent_types)) as pool:
529-
futures = []
530-
for agent_type in valid_agent_types:
531-
matching_agents = [agent for agent in agents if agent.agent_type == agent_type]
532-
if not matching_agents:
533-
raise ValueError(f"No agent found with type '{agent_type}'")
534-
futures.append(
535-
pool.submit(message_queue.send_message_in_queue,
536-
client, matching_agents[0].id, payloads, agent_type)
537-
)
538-
539-
for future in tqdm(as_completed(futures), total=len(futures)):
540-
response, agent_type = future.result()
541-
responses.append(response)
527+
if len(valid_agent_types) > 0:
528+
529+
# Use ThreadPoolExecutor for parallel processing
530+
with ThreadPoolExecutor(max_workers=len(valid_agent_types)) as pool:
531+
futures = []
532+
for agent_type in valid_agent_types:
533+
matching_agents = [agent for agent in agents if agent.agent_type == agent_type]
534+
if not matching_agents:
535+
raise ValueError(f"No agent found with type '{agent_type}'")
536+
futures.append(
537+
pool.submit(message_queue.send_message_in_queue,
538+
client, matching_agents[0].id, payloads, agent_type)
539+
)
540+
541+
for future in tqdm(as_completed(futures), total=len(futures)):
542+
response, agent_type = future.result()
543+
responses.append(response)
544+
545+
overall_end = time.time()
546+
response_message = f'[System Message] {len(valid_agent_types)} memory agents have been triggered in parallel to update the memory. Total time: {overall_end - overall_start:.2f} seconds.'
547+
else:
548+
response_message = '[System Message] Valid agent types are empty. No memory agents have been triggered to update the memory.'
542549

543-
overall_end = time.time()
544-
response_message = f'[System Message] {len(valid_agent_types)} memory agents have been triggered in parallel to update the memory. Total time: {overall_end - overall_start:.2f} seconds.'
545550
return response_message
546551

547552
else:

mirix/sdk.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -162,6 +162,13 @@ def list_users(self) -> Dict[str, Any]:
162162
users = self._agent.client.server.user_manager.list_users()
163163
return users
164164

165+
166+
def construct_system_message(self, message: str, user_id: str) -> str:
167+
"""
168+
Construct a system message from a message.
169+
"""
170+
return self._agent.construct_system_message(message, user_id)
171+
165172
def get_user_by_name(self, user_name: str):
166173
"""
167174
Get a user by their name.

mirix/server/server.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -831,6 +831,17 @@ def system_message(
831831
# Run the agent state forward
832832
return self._step(actor=actor, agent_id=agent_id, input_messages=message)
833833

834+
def construct_system_message(self, agent_id: str, message: str, actor: User) -> str:
835+
"""
836+
Construct a system message from a message.
837+
"""
838+
logger.debug(f"Got message: {message}")
839+
mirix_agent = None
840+
mirix_agent = self.load_agent(agent_id=agent_id, actor=actor)
841+
if mirix_agent is None:
842+
raise KeyError(f"Agent (user={actor.id}, agent={agent_id}) is not loaded")
843+
return mirix_agent.construct_system_message(message=message)
844+
834845
def send_messages(
835846
self,
836847
actor: User,

0 commit comments

Comments
 (0)