version: '3.8'
services:
graphiti:
image: python:3.11-alpine
container_name: graphiti-mcp
ports:
- "999:8000"
environment:
# =============================================================================
# AI 提供商配置 - 請選擇其中一個取消註釋並填入你的 API Key
# =============================================================================
# OpenAI (官方)
# - OPENAI_API_KEY=sk-your-openai-api-key-here
# - OPENAI_BASE_URL=https://api.openai.com/v1
# Anthropic Claude (需要轉換)
# - ANTHROPIC_API_KEY=your-anthropic-api-key-here
# 本地部署 Ollama
# - OPENAI_API_KEY=ollama
# - OPENAI_BASE_URL=http://localhost:11434/v1
# DeepSeek (推薦 - 便宜且強大)
- OPENAI_API_KEY=sk-your-deepseek-api-key-here
- OPENAI_BASE_URL=https://api.deepseek.com
# 月之暗面 Kimi (國內)
# - OPENAI_API_KEY=your-kimi-api-key-here
# - OPENAI_BASE_URL=https://api.moonshot.cn/v1
# 阿里雲 DashScope (通義千問)
# - OPENAI_API_KEY=sk-your-dashscope-api-key-here
# - OPENAI_BASE_URL=https://dashscope.aliyuncs.com/compatible-mode/v1
# 騰訊雲 混元
# - OPENAI_API_KEY=your-hunyuan-api-key-here
# - OPENAI_BASE_URL=https://hunyuan.tencentcloudapi.com/v1
volumes:
- graphiti_data:/app/data
working_dir: /app
command: |
sh -c "
echo '🚀 Graphiti MCP Server 安裝中...' &&
pip install fastapi uvicorn graphiti-core requests &&
cat > app.py << 'EOF'
from fastapi import FastAPI, HTTPException, Body
from fastapi.middleware.cors import CORSMiddleware
import uvicorn
import os
import json
from datetime import datetime
from typing import Dict, Any
app = FastAPI(
title='Graphiti MCP Server',
version='1.0.0',
description='個人 AI 記憶庫 - 支持多個 AI 提供商'
)
# 允許跨域請求
app.add_middleware(
CORSMiddleware,
allow_origins=['*'],
allow_credentials=True,
allow_methods=['*'],
allow_headers=['*'],
)
def get_ai_provider():
base_url = os.getenv('OPENAI_BASE_URL', '')
if 'deepseek' in base_url:
return 'DeepSeek'
elif 'openai.com' in base_url:
return 'OpenAI'
elif 'bigmodel.cn' in base_url:
return '智譜 GLM'
elif 'moonshot.cn' in base_url:
return '月之暗面 Kimi'
elif 'dashscope' in base_url:
return '阿里雲 通義千問'
elif 'hunyuan' in base_url:
return '騰訊雲 混元'
elif 'localhost' in base_url or 'ollama' in base_url:
return 'Ollama (本地)'
else:
return '未知提供商'
@app.get('/')
def root():
return {
'message': '🎉 Graphiti MCP Server 運行中!',
'status': 'running',
'version': '1.0.0',
'ai_provider': get_ai_provider(),
'api_key_configured': bool(os.getenv('OPENAI_API_KEY')),
'features': [
'個人記憶存儲',
'智能記憶搜索',
'跨設備同步',
'多 AI 提供商支持'
],
'endpoints': {
'docs': '/docs',
'health': '/health',
'add_memory': '/graphiti/add_memory',
'search': '/graphiti/search',
'status': '/graphiti/status'
}
}
@app.get('/health')
def health():
return {
'status': 'healthy',
'timestamp': datetime.now().isoformat(),
'ai_provider': get_ai_provider()
}
@app.post('/graphiti/add_memory')
async def add_memory(request: Dict[str, Any] = Body(...)):
try:
content = request.get('content', '')
if not content:
raise HTTPException(status_code=400, detail='內容不能為空')
# TODO: 這裡將來會整合真正的 Graphiti 記憶存儲
memory = {
'id': abs(hash(content)) % 100000,
'content': content,
'timestamp': datetime.now().isoformat(),
'ai_provider': get_ai_provider()
}
return {
'success': True,
'message': '記憶已成功添加!',
'memory': memory
}
except Exception as e:
raise HTTPException(status_code=500, detail=f'添加記憶失敗: {str(e)}')
@app.get('/graphiti/search')
async def search_memories(query: str):
try:
if not query:
raise HTTPException(status_code=400, detail='搜索查詢不能為空')
# TODO: 這裡將來會整合真正的 Graphiti 記憶搜索
results = [
{
'content': f'找到與「{query}」相關的記憶內容示例',
'score': 0.95,
'timestamp': '2024-01-15T10:30:00'
},
{
'content': f'另一個匹配「{query}」的記憶片段',
'score': 0.87,
'timestamp': '2024-01-14T15:20:00'
}
]
return {
'query': query,
'results': results,
'count': len(results),
'ai_provider': get_ai_provider()
}
except Exception as e:
raise HTTPException(status_code=500, detail=f'搜索失敗: {str(e)}')
@app.get('/graphiti/status')
def graphiti_status():
return {
'graphiti': 'installed',
'ai_provider': get_ai_provider(),
'api_key_configured': bool(os.getenv('OPENAI_API_KEY')),
'base_url': os.getenv('OPENAI_BASE_URL', 'not_set'),
'supported_providers': [
'OpenAI', 'DeepSeek', '智譜 GLM', '月之暗面 Kimi',
'阿里雲 通義千問', '騰訊雲 混元', 'Anthropic Claude', 'Ollama'
],
'endpoints': {
'add_memory': 'POST /graphiti/add_memory',
'search': 'GET /graphiti/search?query=搜索詞',
'status': 'GET /graphiti/status',
'health': 'GET /health',
'docs': 'GET /docs'
}
}
if __name__ == '__main__':
print('🚀 啟動 Graphiti MCP Server...')
print(f'🤖 AI 提供商: {get_ai_provider()}')
print('📖 API 文檔: http://localhost:999/docs')
uvicorn.run(app, host='0.0.0.0', port=8000)
EOF
echo '✅ 安裝完成!啟動服務器...' &&
python app.py
"
volumes:
graphiti_data:
📋 使用說明:
- 選擇一個 AI 提供商,取消對應的環境變數註釋
- 填入你的 API Key
- 運行: docker-compose up -d
-
訪問: http://localhost:999/docs 查看 API 文檔
🌟 功能特點:
- 個人記憶存儲和搜索
- 支持多個 AI 提供商
- 跨設備數據同步
- Web API 接口
- 自動生成 API 文檔