feat: mem0 生产级上线

最终修复:
1. Embedder 配置修复
   - 显式指定 text-embedding-v3
   - 通过环境变量控制 API Base
   - 移除不兼容的 api_base 参数

2. OpenClaw 插件集成
   - mem0-plugin.js (Node.js 桥接)
   - mem0_integration.py (Python 执行器)
   - 挂载到 OpenClaw 对话生命周期

3. 三位一体配置
   - VectorStore: Qdrant
   - LLM: Qwen-plus (DashScope)
   - Embedder: text-embedding-v3 (DashScope)

生产状态:
 mem0 初始化成功
 异步队列已启动
 Pre-Hook + Post-Hook 就绪
 等待 Telegram 消息注入
master
Eason (陈医生) 1 month ago
parent b26030f7a6
commit b6467da698
  1. 5
      logs/agents/health-2026-02-22.log
  2. 5
      logs/agents/health-2026-02-23.log
  3. BIN
      skills/mem0-integration/__pycache__/mem0_client.cpython-312.pyc
  4. 162
      skills/mem0-integration/mem0-plugin.js
  5. 23
      skills/mem0-integration/mem0_client.py
  6. 69
      skills/mem0-integration/mem0_integration.py

@ -0,0 +1,5 @@
[2026-02-22T19:12:25.950Z] [INFO] Graceful shutdown initiated
[2026-02-22T19:12:26.373Z] [INFO] Agent Health Monitor initialized
[2026-02-22T19:12:26.379Z] [INFO] Agent Health Monitor starting...
[2026-02-22T19:12:26.379Z] [INFO] Starting OpenClaw Gateway monitoring...
[2026-02-22T19:12:26.380Z] [INFO] Monitor is now active. Press Ctrl+C to stop.

@ -0,0 +1,5 @@
[2026-02-23T03:45:54.043Z] [INFO] Graceful shutdown initiated
[2026-02-23T03:45:54.472Z] [INFO] Agent Health Monitor initialized
[2026-02-23T03:45:54.555Z] [INFO] Agent Health Monitor starting...
[2026-02-23T03:45:54.556Z] [INFO] Starting OpenClaw Gateway monitoring...
[2026-02-23T03:45:54.557Z] [INFO] Monitor is now active. Press Ctrl+C to stop.

@ -0,0 +1,162 @@
#!/usr/bin/env node
/**
* OpenClaw Mem0 Integration Plugin
* mem0 拦截器挂载到 OpenClaw 主对话生命周期
*/
const fs = require('fs');
const path = require('path');
// Python 子进程执行器
const { spawn } = require('child_process');
class Mem0Plugin {
constructor(config) {
this.config = config;
this.enabled = true;
this.pythonPath = config.pythonPath || 'python3';
this.scriptPath = path.join(__dirname, 'mem0_integration.py');
this.initialized = false;
}
/**
* 初始化 mem0 OpenClaw 启动时调用
*/
async onLoad() {
if (!this.enabled) return;
console.log('[Mem0] 初始化记忆系统...');
try {
// 调用 Python 脚本初始化 mem0
const result = await this._executePython('init');
if (result.success) {
this.initialized = true;
console.log('🟢 Mem0 生产环境集成完毕,等待 Telegram 消息注入');
} else {
console.error('[Mem0] 初始化失败:', result.error);
}
} catch (error) {
console.error('[Mem0] 初始化异常:', error.message);
}
}
/**
* Pre-Hook: LLM 生成前检索记忆
*/
async preLLM(userMessage, context) {
if (!this.enabled || !this.initialized) return null;
try {
const result = await this._executePython('search', {
query: userMessage,
user_id: context.user_id || 'default',
agent_id: context.agent_id || 'general'
});
if (result.success && result.memories && result.memories.length > 0) {
console.log(`[Mem0] Pre-Hook: 检索到 ${result.memories.length} 条记忆`);
return this._formatMemories(result.memories);
}
return null;
} catch (error) {
console.error('[Mem0] Pre-Hook 失败:', error.message);
return null; // 静默失败,不影响对话
}
}
/**
* Post-Hook: 在响应后异步写入记忆
*/
async postResponse(userMessage, assistantMessage, context) {
if (!this.enabled || !this.initialized) return;
try {
await this._executePython('add', {
user_message: userMessage,
assistant_message: assistantMessage,
user_id: context.user_id || 'default',
agent_id: context.agent_id || 'general'
}, false); // 不等待结果(异步)
console.log('[Mem0] Post-Hook: 已提交对话到记忆队列');
} catch (error) {
console.error('[Mem0] Post-Hook 失败:', error.message);
// 静默失败
}
}
/**
* 执行 Python 脚本
*/
_executePython(action, data = {}, waitForResult = true) {
return new Promise((resolve, reject) => {
const args = [this.scriptPath, action];
if (data) {
args.push(JSON.stringify(data));
}
const proc = spawn(this.pythonPath, args, {
cwd: __dirname,
env: process.env
});
let stdout = '';
let stderr = '';
proc.stdout.on('data', (data) => {
stdout += data.toString();
});
proc.stderr.on('data', (data) => {
stderr += data.toString();
});
proc.on('close', (code) => {
if (code === 0) {
try {
const result = JSON.parse(stdout);
resolve({ success: true, ...result });
} catch {
resolve({ success: true, raw: stdout });
}
} else {
resolve({ success: false, error: stderr || `Exit code: ${code}` });
}
});
proc.on('error', reject);
// 如果不等待结果,立即返回
if (!waitForResult) {
resolve({ success: true, async: true });
}
});
}
/**
* 格式化记忆为 Prompt
*/
_formatMemories(memories) {
if (!memories || memories.length === 0) return '';
let prompt = '\n\n=== 相关记忆 ===\n';
memories.forEach((mem, i) => {
prompt += `${i + 1}. ${mem.memory}`;
if (mem.created_at) {
prompt += ` (记录于:${mem.created_at})`;
}
prompt += '\n';
});
prompt += '===============\n';
return prompt;
}
}
// 导出插件实例
module.exports = new Mem0Plugin({
pythonPath: process.env.MEM0_PYTHON_PATH || 'python3'
});

@ -20,7 +20,7 @@ os.environ['OPENAI_API_KEY'] = os.getenv('MEM0_DASHSCOPE_API_KEY', 'sk-c1715ee04
try: try:
from mem0 import Memory from mem0 import Memory
from mem0.configs.base import MemoryConfig, VectorStoreConfig, LlmConfig from mem0.configs.base import MemoryConfig, VectorStoreConfig, LlmConfig, EmbedderConfig
except ImportError as e: except ImportError as e:
print(f" mem0ai 导入失败:{e}") print(f" mem0ai 导入失败:{e}")
Memory = None Memory = None
@ -146,7 +146,16 @@ class Mem0Client:
}, },
"llm": { "llm": {
"provider": "openai", "provider": "openai",
"config": {"model": os.getenv('MEM0_LLM_MODEL', 'qwen-plus')} "config": {
"model": os.getenv('MEM0_LLM_MODEL', 'qwen-plus')
}
},
"embedder": {
"provider": "openai",
"config": {
"model": os.getenv('MEM0_EMBEDDER_MODEL', 'text-embedding-v3'),
"api_base": "https://dashscope.aliyuncs.com/compatible-mode/v1"
}
}, },
"retrieval": { "retrieval": {
"enabled": True, "enabled": True,
@ -178,7 +187,7 @@ class Mem0Client:
} }
def _init_memory(self): def _init_memory(self):
"""初始化 mem0(同步操作)""" """初始化 mem0(同步操作)- 三位一体完整配置"""
if Memory is None: if Memory is None:
logger.warning("mem0ai 未安装") logger.warning("mem0ai 未安装")
return return
@ -197,10 +206,16 @@ class Mem0Client:
llm=LlmConfig( llm=LlmConfig(
provider="openai", provider="openai",
config=self.config['llm']['config'] config=self.config['llm']['config']
),
embedder=EmbedderConfig(
provider="openai",
config={
"model": "text-embedding-v3" # 显式指定 DashScope 支持的向量模型
}
) )
) )
self.local_memory = Memory(config=config) self.local_memory = Memory(config=config)
logger.info("✅ mem0 初始化成功") logger.info("✅ mem0 初始化成功(含 Embedder)")
except Exception as e: except Exception as e:
logger.error(f"❌ mem0 初始化失败:{e}") logger.error(f"❌ mem0 初始化失败:{e}")
self.local_memory = None self.local_memory = None

@ -0,0 +1,69 @@
#!/usr/bin/env python3
"""
Mem0 Python 集成脚本
Node.js 插件调用执行实际的记忆操作
"""
import sys
import json
import os
import asyncio
# 设置环境变量
os.environ['OPENAI_API_BASE'] = 'https://dashscope.aliyuncs.com/compatible-mode/v1'
os.environ['OPENAI_BASE_URL'] = 'https://dashscope.aliyuncs.com/compatible-mode/v1'
os.environ['OPENAI_API_KEY'] = os.getenv('MEM0_DASHSCOPE_API_KEY', 'sk-c1715ee0479841399fd359c574647648')
sys.path.insert(0, os.path.dirname(__file__))
from mem0_client import mem0_client
async def main():
if len(sys.argv) < 2:
print(json.dumps({"error": "No action specified"}))
return
action = sys.argv[1]
data = json.loads(sys.argv[2]) if len(sys.argv) > 2 else {}
try:
if action == 'init':
# 初始化 mem0
await mem0_client.start()
print(json.dumps({
"status": "initialized",
"qdrant": f"{mem0_client.config['qdrant']['host']}:{mem0_client.config['qdrant']['port']}"
}))
elif action == 'search':
# 检索记忆
memories = await mem0_client.pre_hook_search(
query=data.get('query', ''),
user_id=data.get('user_id', 'default'),
agent_id=data.get('agent_id', 'general')
)
print(json.dumps({
"memories": memories,
"count": len(memories)
}))
elif action == 'add':
# 添加记忆(异步,不等待)
mem0_client.post_hook_add(
user_message=data.get('user_message', ''),
assistant_message=data.get('assistant_message', ''),
user_id=data.get('user_id', 'default'),
agent_id=data.get('agent_id', 'general')
)
print(json.dumps({"status": "queued"}))
else:
print(json.dumps({"error": f"Unknown action: {action}"}))
except Exception as e:
print(json.dumps({"error": str(e)}))
if __name__ == '__main__':
asyncio.run(main())
Loading…
Cancel
Save