凌晨2点,机房断网了。Agent还在跑——因为它根本不需要网。
离线模式是OpenClaw面向企业私有化部署和边缘计算场景的核心功能。通过集成本地模型、预加载技能包和本地知识库,Agent可以在完全断网的环境中运行。这对于金融、医疗、军工等对数据安全有极高要求的行业来说,是刚需。
# offline-config.yaml
mode: "offline" # online | offline | hybrid
models:
local:
primary:
name: "llama-3-70b"
provider: "ollama"
endpoint: "http://localhost:11434"
quantization: "q4_K_M"
contextWindow: 8192
fallback:
name: "phi-3-mini"
provider: "ollama"
endpoint: "http://localhost:11434"
quantization: "q4_K_M"
embedding:
local:
name: "nomic-embed-text"
provider: "ollama"
endpoint: "http://localhost:11434"
dimensions: 768
skills:
preload:
- "file-ops"
- "code-analysis"
- "data-processing"
cacheDir: "/var/lib/openclaw/skills-cache"
knowledge:
local:
vectorStore: "chromadb"
persistDir: "/var/lib/openclaw/knowledge"
indexFiles:
- "/data/company-docs/"
- "/data/product-manuals/"
# 1. 安装Ollama
curl -fsSL https://ollama.ai/install.sh | sh
# 2. 拉取模型(需要提前在有网环境下载)
ollama pull llama3:70b-instruct-q4_K_M
ollama pull nomic-embed-text
# 3. 验证模型
ollama list
# 4. 配置OpenClaw使用本地模型
openclaw config set model.provider ollama
openclaw config set model.endpoint http://localhost:11434
openclaw config set model.name llama3:70b-instruct-q4_K_M
# 5. 切换到离线模式
openclaw config set mode offline
openclaw gateway restart
# 在边缘设备上部署
openclaw deploy edge \
--target "raspberry-pi-01" \
--model "phi-3-mini:q4_K_M" \
--skills "sensor-read,local-alert,data-log" \
--storage "/mnt/edge/openclaw"
# 边缘节点配置
# edge-config.yaml
edge:
nodeId: "edge-rpi-01"
parentGateway: "https://gateway.company.com"
syncInterval: "5m"
model:
name: "phi-3-mini"
endpoint: "http://localhost:11434"
constraints:
maxMemory: "4GB"
maxCPU: "80%"
batteryAware: true # 低电量时自动降级
sync:
mode: "eventual" # 最终一致性
conflictResolution: "server-wins"
queueSize: 1000
| 场景 | 模型推荐 | 最低配置 |
|---|---|---|
| 边缘IoT | Phi-3 Mini / Qwen-1.8B | 4GB RAM, ARM64 |
| 企业内网 | Llama-3-70B / Qwen-72B | 64GB RAM, GPU |
| 笔记本开发 | Llama-3-8B / Mistral-7B | 16GB RAM, CPU |
const { OpenClawClient, OfflineMode } = require('@openclaw/sdk');
async function runOfflineAgent() {
// 初始化离线客户端
const client = new OpenClawClient({
mode: OfflineMode.FULL,
modelEndpoint: 'http://localhost:11434',
modelName: 'llama3:70b-instruct-q4_K_M',
embeddingEndpoint: 'http://localhost:11434',
embeddingModel: 'nomic-embed-text'
});
// 创建离线会话
const session = await client.sessions.create({
name: 'offline-analysis',
skills: ['file-ops', 'code-analysis', 'data-processing']
});
// 使用本地知识库查询
const result = await session.send(
'根据公司安全规范文档,分析这段代码是否有安全漏洞。'
);
console.log('分析结果:', result.content);
// 任务队列(网络恢复后同步)
await session.queueTask({
type: 'sync-to-cloud',
payload: result,
priority: 'high'
});
return result;
}
async function smartRouter(query, sensitivity = 'normal') {
const client = new OpenClawClient({ mode: 'hybrid' });
if (sensitivity === 'high') {
// 高敏感数据 → 本地模型
return await client.chat(query, {
model: 'local:llama3:70b',
noExternalAccess: true
});
} else if (sensitivity === 'normal') {
// 普通任务 → 本地优先,失败回退云端
try {
return await client.chat(query, { model: 'local:llama3:70b' });
} catch (e) {
console.log('本地模型不可用,回退云端');
return await client.chat(query, { model: 'cloud:claude-3-5-sonnet' });
}
} else {
// 低敏感+高智能需求 → 直接云端
return await client.chat(query, { model: 'cloud:gpt-4o' });
}
}
📅 更新时间:2026-05-11 | 📖 更多OpenClaw教程请访问 工具教程索引