openai-agents记忆持久化(neo4j)
目录
- 环境安装
- 模型配置
- Memory配置
- 测试
环境安装
mem0ai[graph]
安装
uv pip install "mem0ai[graph]"
docker 启动neo4j
数据库
docker run \-p 7474:7474 -p 7687:7687 \-e NEO4J_AUTH=neo4j/password \neo4j:5
模型配置
from dotenv import load_dotenv
import os
from openai import AsyncOpenAI
from agents import OpenAIChatCompletionsModel, set_tracing_disabled,Agent,Runnerload_dotenv()
QWEN_API_KEY = os.getenv("QWEN_API_KEY")
QWEN_BASE_URL = os.getenv("QWEN_BASE_URL")
QWEN_MODEL_NAME = os.getenv("QWEN_MODEL_NAME")
client = AsyncOpenAI(base_url=QWEN_BASE_URL, api_key=QWEN_API_KEY)
set_tracing_disabled(disabled=True)
qwen_model = OpenAIChatCompletionsModel(model=QWEN_MODEL_NAME, openai_client=client)
Memory配置
from mem0 import Memoryconfig = {"embedder": {"provider": "openai","config": {"model": "text-embedding-v2","embedding_dims": 1536,"api_key": QWEN_API_KEY,"openai_base_url": "https://dashscope.aliyuncs.com/compatible-mode/v1"}
},"graph_store": {"provider": "neo4j","config": {"url": "bolt://localhost:7687","username": "neo4j","password": "password",},},"llm": {"provider":"openai","config":{"model": "qwen-plus","api_key": QWEN_API_KEY,"openai_base_url": "https://dashscope.aliyuncs.com/compatible-mode/v1"},},
}
memory = Memory.from_config(config_dict=config)
测试
import asyncioasync def chat_with_memories(message: str, user_id: str = "default_user") -> str:# Retrieve relevant memoriesrelevant_memories = memory.search(query=message, user_id=user_id, limit=3)memories_str = "\n".join(f"- {entry['memory']}" for entry in relevant_memories["results"])# Generate Assistant responsesystem_prompt = f"You are a helpful AI. Answer the question based on query and memories.\nUser Memories:\n{memories_str}"messages = [{"role": "system", "content": system_prompt}, {"role": "user", "content": message}]agent = Agent(name="Assistant",instructions=system_prompt,model=OpenAIChatCompletionsModel(model=QWEN_MODEL_NAME, openai_client=client))assistant_response = await Runner.run(agent, message)# Create new memories from the conversationmessages.append({"role": "assistant", "content": assistant_response.final_output})memory.add(messages, user_id=user_id)return assistant_response.final_output
async def run_example_conversations():# Example 1: Simple greetingprint("=== Example 1: Simple Greeting ===")response1 = await chat_with_memories("Hello, how are you? My name is Bob")print(f"User: Hello, how are you?")print(f"AI: {response1}\n")# Example 2: Follow-up questionprint("=== Example 2: Follow-up Question ===")response2 = await chat_with_memories("What's my name?")print(f"User: What's my name?")print(f"AI: {response2}\n")await run_example_conversations()
=== Example 1: Simple Greeting ===
User: Hello, how are you?
AI: Hello, Bob! I'm just a virtual assistant, so I don't have feelings, but I'm here and ready to help you with whatever you need. How can I assist you today?=== Example 2: Follow-up Question ===
User: What's my name?
AI: Your name is Bob.
代码链接:https://github.com/zhouruiliangxian/Awesome-demo/blob/main/AgentsTest/openai-agents-test/ipynbs/memory_men0.ipynb