Compare commits
182 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| feaca20582 | |||
| 40af505e90 | |||
| f812dbcb85 | |||
| 59225d97b3 | |||
| 483f773e1d | |||
| b4bbe820a2 | |||
| 1702bb2747 | |||
| b6f661c3c5 | |||
| ac543cff20 | |||
| c88affa020 | |||
| 67998ad702 | |||
| c104b23ddb | |||
| 698adca707 | |||
| 50a877b4c1 | |||
| fa7b615508 | |||
| ea9eccfe8b | |||
| eb036b4d95 | |||
| 1c07d5b411 | |||
| 0fe3834349 | |||
| 33f9d66b54 | |||
| 368f39829f | |||
| 9376c26018 | |||
| 08e131d7c6 | |||
| 36db977b35 | |||
| 92b0ebb61a | |||
| 9c312180a2 | |||
| a433c37c53 | |||
| af8e805016 | |||
| 9f127f896d | |||
| b98971c635 | |||
| 2ee0229740 | |||
| 0d2b57ee2e | |||
| b85a445955 | |||
| dbd8c77519 | |||
| 34db67428f | |||
| 01d0c6b23a | |||
| 79f0a5ae30 | |||
| 5bdeeba213 | |||
| b5447175ff | |||
| 0dc05771ba | |||
| 10f9ea3454 | |||
| 3ec532bc29 | |||
| f1688c5910 | |||
| fd9f140268 | |||
| b2dccf86eb | |||
| f0c106a938 | |||
| e276e66c05 | |||
| 2300f21315 | |||
| 2575edb1d2 | |||
| d31f2c2d97 | |||
| 9d7f6c5aaf | |||
| f9081fcfa7 | |||
| a4d95dec0e | |||
| 31508b8ec7 | |||
| 3d9069552c | |||
| 3b074041bf | |||
| 9a95318b85 | |||
| ccd572b827 | |||
| 41dd23175f | |||
| 864d754b56 | |||
| ccd52f3394 | |||
| eb01aa451d | |||
| c785b45f2d | |||
| ffb8b81f90 | |||
| 65f856d710 | |||
| 1682620377 | |||
| aa455ae89b | |||
| a9ffd38912 | |||
| 86a0584513 | |||
| abef4c5719 | |||
| 483b2336c4 | |||
| 14cda3bc9a | |||
| 6e8f0fa43c | |||
| a965b129f8 | |||
| c135de41b7 | |||
| 2d2c2ac9e6 | |||
| 5e774bbd70 | |||
| 33015067eb | |||
| 6b10c0b891 | |||
| bf817e30d2 | |||
| 0051a0c296 | |||
| d753de91f1 | |||
| f6b2f61a01 | |||
| 70e7910cb9 | |||
| a8868768e8 | |||
| 67293c50df | |||
| 1646079d25 | |||
| 25b639435f | |||
| 77779844e5 | |||
| f658d5806a | |||
| 7134fe0824 | |||
| 263802b3df | |||
| 3c25fddb2a | |||
| a6a46bdd25 | |||
| 235d4d2f1c | |||
| bd1e8c8e1a | |||
| f81807bff6 | |||
| bb7006313c | |||
| 9a49626376 | |||
| 8b978a721f | |||
| 75b4c1d4a4 | |||
| b2087e6065 | |||
| ad8f81ad76 | |||
| c58e1c1fb3 | |||
| cb0779d761 | |||
| daca2d9354 | |||
| 3c1e710c38 | |||
| 0aefde95f2 | |||
| a84aa60554 | |||
| edd4b37325 | |||
| c5f0155061 | |||
| 9ee06ed6fc | |||
| ac6b43e9f4 | |||
| 6c5573ad96 | |||
| 1d57a0d1e5 | |||
| 9780c7d797 | |||
| 35a5451a17 | |||
| 8e81d44d54 | |||
| 86ad0c6a2b | |||
| 6ecf89d6a9 | |||
| 691efa4d8c | |||
| d1e3f435b4 | |||
| 44c3e264ad | |||
| f2b6013329 | |||
| 05d3c51a30 | |||
| 2ceda31ce2 | |||
| 9069bc3c1f | |||
| 9319fe18da | |||
| cc454a86c8 | |||
| 256e8ccebf | |||
| 72c9e6b6ca | |||
| 755a129ca2 | |||
| 8b0d3684c5 | |||
| cdb5ac1471 | |||
| 67acb1a0bb | |||
| 9eac6bafef | |||
| a12f2ff439 | |||
| a38a4d132e | |||
| 48aba73d3a | |||
| a1ab1e1a11 | |||
| f394abf35c | |||
| 52e0271bd5 | |||
| 6c0a48efff | |||
| 87b5bca449 | |||
| be40c0c5a5 | |||
| 6527871928 | |||
| 0bda80de9c | |||
| 02f57f4d98 | |||
| ef83dd44d7 | |||
| a986b6b912 | |||
| b6b1186e3b | |||
| 00dc0c8670 | |||
| 43f2a0a815 | |||
| 50b5bd4d73 | |||
| 8c074870a1 | |||
| 61d1841ce3 | |||
| eb396cf38f | |||
| 9f1657b9be | |||
| 8fecd4286c | |||
| df21d92da3 | |||
| 8d65924704 | |||
| 756c3cadff | |||
| ee870028ff | |||
| 83183a39a5 | |||
| 7a941fb753 | |||
| bcdbce0bee | |||
| abb844d7f8 | |||
| 48733d5ee2 | |||
| 2d118af78f | |||
| 8d7e7e994e | |||
| d38d706c8e | |||
| 523188da08 | |||
| 82f7fbbe0f | |||
| c1b2fceca5 | |||
| be6e9fca5d | |||
| 75c11dfb92 | |||
| 48270fbbf3 | |||
| 18a456b24e | |||
| 71e89801b5 | |||
| 46f6e79557 | |||
| c301b1d4d0 | |||
| 981a93d942 |
@@ -1,97 +0,0 @@
|
||||
# Mem0 Integration: Dual-Scope Recall + Per-Turn Memory
|
||||
|
||||
## Context
|
||||
|
||||
Mem0 auto-save works but the integration is missing key features from mem0 best practices: per-turn recall, multi-level scoping, and proper context injection. This causes the bot to "forget" on follow-up turns and not differentiate users.
|
||||
|
||||
## What's Missing (vs mem0 docs)
|
||||
|
||||
1. **Per-turn recall** — only first turn gets memory context, follow-ups get nothing
|
||||
2. **Dual-scope** — no sender vs group distinction. All memories use single hardcoded `user_id`
|
||||
3. **System prompt injection** — memory prepended to user message (pollutes session history)
|
||||
4. **`agent_id` scoping** — mem0 supports agent-level patterns, not used
|
||||
|
||||
## Changes
|
||||
|
||||
### 1. `src/memory/mem0.rs` — Use session_id for multi-level scoping
|
||||
|
||||
Map zeroclaw's `session_id` param to mem0's `user_id`. This enables per-user and per-group memory namespaces without changing the `Memory` trait.
|
||||
|
||||
```rust
|
||||
// Add helper:
|
||||
fn effective_user_id(&self, session_id: Option<&str>) -> &str {
|
||||
session_id.filter(|s| !s.is_empty()).unwrap_or(&self.user_id)
|
||||
}
|
||||
|
||||
// In store(): use effective_user_id(session_id) as mem0 user_id
|
||||
// In recall(): use effective_user_id(session_id) as mem0 user_id
|
||||
// In list(): use effective_user_id(session_id) as mem0 user_id
|
||||
```
|
||||
|
||||
### 2. `src/channels/mod.rs` ~line 2229 — Per-turn dual-scope recall
|
||||
|
||||
Remove `if !had_prior_history` gate. Always recall from both sender scope and group scope (for group chats).
|
||||
|
||||
```rust
|
||||
// Detect group chat
|
||||
let is_group = msg.reply_target.contains("@g.us")
|
||||
|| msg.reply_target.starts_with("group:");
|
||||
|
||||
// Sender-scope recall (always)
|
||||
let sender_context = build_memory_context(
|
||||
ctx.memory.as_ref(), &msg.content, ctx.min_relevance_score,
|
||||
Some(&msg.sender),
|
||||
).await;
|
||||
|
||||
// Group-scope recall (groups only)
|
||||
let group_context = if is_group {
|
||||
build_memory_context(
|
||||
ctx.memory.as_ref(), &msg.content, ctx.min_relevance_score,
|
||||
Some(&history_key),
|
||||
).await
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
// Merge (deduplicate by checking substring overlap)
|
||||
let memory_context = merge_memory_contexts(&sender_context, &group_context);
|
||||
```
|
||||
|
||||
### 3. `src/channels/mod.rs` ~line 2244 — Inject into system prompt
|
||||
|
||||
Move memory context from user message to system prompt. Re-fetched each turn, doesn't pollute session.
|
||||
|
||||
```rust
|
||||
let mut system_prompt = build_channel_system_prompt(...);
|
||||
if !memory_context.is_empty() {
|
||||
system_prompt.push_str(&format!("\n\n{memory_context}"));
|
||||
}
|
||||
let mut history = vec![ChatMessage::system(system_prompt)];
|
||||
```
|
||||
|
||||
### 4. `src/channels/mod.rs` — Dual-scope auto-save
|
||||
|
||||
Find existing auto-save call. For group messages, store twice:
|
||||
- `store(key, content, category, Some(&msg.sender))` — personal facts
|
||||
- `store(key, content, category, Some(&history_key))` — group context
|
||||
|
||||
Both async, non-blocking. DMs only store to sender scope.
|
||||
|
||||
### 5. `src/memory/mem0.rs` — Add `agent_id` support (optional)
|
||||
|
||||
Pass `self.app_name` as `agent_id` param to mem0 API for agent behavior tracking.
|
||||
|
||||
## Files to Modify
|
||||
|
||||
1. `src/memory/mem0.rs` — session_id → user_id mapping
|
||||
2. `src/channels/mod.rs` — per-turn recall, dual-scope, system prompt injection, dual-scope save
|
||||
|
||||
## Verification
|
||||
|
||||
1. `cargo check --features whatsapp-web,memory-mem0`
|
||||
2. `cargo test --features whatsapp-web,memory-mem0`
|
||||
3. Deploy to Synology
|
||||
4. Test DM: "我鍾意食壽司" → next turn "我鍾意食咩" → should recall
|
||||
5. Test group: Joe says "我鍾意食壽司" → someone else asks "Joe 鍾意食咩" → should recall from group scope
|
||||
6. Check mem0 server logs: GET with `user_id=sender` AND `user_id=group_key`
|
||||
7. Check mem0 server logs: POST with both user_ids for group messages
|
||||
@@ -118,3 +118,7 @@ PROVIDER=openrouter
|
||||
# Optional: Brave Search (requires API key from https://brave.com/search/api)
|
||||
# WEB_SEARCH_PROVIDER=brave
|
||||
# BRAVE_API_KEY=your-brave-search-api-key
|
||||
#
|
||||
# Optional: SearXNG (self-hosted, requires instance URL)
|
||||
# WEB_SEARCH_PROVIDER=searxng
|
||||
# SEARXNG_INSTANCE_URL=https://searx.example.com
|
||||
|
||||
@@ -36,6 +36,145 @@
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/**"
|
||||
|
||||
"channel:bluesky":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/bluesky.rs"
|
||||
|
||||
"channel:clawdtalk":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/clawdtalk.rs"
|
||||
|
||||
"channel:cli":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/cli.rs"
|
||||
|
||||
"channel:dingtalk":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/dingtalk.rs"
|
||||
|
||||
"channel:discord":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/discord.rs"
|
||||
- "src/channels/discord_history.rs"
|
||||
|
||||
"channel:email":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/email_channel.rs"
|
||||
- "src/channels/gmail_push.rs"
|
||||
|
||||
"channel:imessage":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/imessage.rs"
|
||||
|
||||
"channel:irc":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/irc.rs"
|
||||
|
||||
"channel:lark":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/lark.rs"
|
||||
|
||||
"channel:linq":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/linq.rs"
|
||||
|
||||
"channel:matrix":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/matrix.rs"
|
||||
|
||||
"channel:mattermost":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/mattermost.rs"
|
||||
|
||||
"channel:mochat":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/mochat.rs"
|
||||
|
||||
"channel:mqtt":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/mqtt.rs"
|
||||
|
||||
"channel:nextcloud-talk":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/nextcloud_talk.rs"
|
||||
|
||||
"channel:nostr":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/nostr.rs"
|
||||
|
||||
"channel:notion":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/notion.rs"
|
||||
|
||||
"channel:qq":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/qq.rs"
|
||||
|
||||
"channel:reddit":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/reddit.rs"
|
||||
|
||||
"channel:signal":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/signal.rs"
|
||||
|
||||
"channel:slack":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/slack.rs"
|
||||
|
||||
"channel:telegram":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/telegram.rs"
|
||||
|
||||
"channel:twitter":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/twitter.rs"
|
||||
|
||||
"channel:wati":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/wati.rs"
|
||||
|
||||
"channel:webhook":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/webhook.rs"
|
||||
|
||||
"channel:wecom":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/wecom.rs"
|
||||
|
||||
"channel:whatsapp":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/channels/whatsapp.rs"
|
||||
- "src/channels/whatsapp_storage.rs"
|
||||
- "src/channels/whatsapp_web.rs"
|
||||
|
||||
"gateway":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
@@ -101,6 +240,73 @@
|
||||
- any-glob-to-any-file:
|
||||
- "src/providers/**"
|
||||
|
||||
"provider:anthropic":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/providers/anthropic.rs"
|
||||
|
||||
"provider:azure-openai":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/providers/azure_openai.rs"
|
||||
|
||||
"provider:bedrock":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/providers/bedrock.rs"
|
||||
|
||||
"provider:claude-code":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/providers/claude_code.rs"
|
||||
|
||||
"provider:compatible":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/providers/compatible.rs"
|
||||
|
||||
"provider:copilot":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/providers/copilot.rs"
|
||||
|
||||
"provider:gemini":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/providers/gemini.rs"
|
||||
- "src/providers/gemini_cli.rs"
|
||||
|
||||
"provider:glm":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/providers/glm.rs"
|
||||
|
||||
"provider:kilocli":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/providers/kilocli.rs"
|
||||
|
||||
"provider:ollama":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/providers/ollama.rs"
|
||||
|
||||
"provider:openai":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/providers/openai.rs"
|
||||
- "src/providers/openai_codex.rs"
|
||||
|
||||
"provider:openrouter":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/providers/openrouter.rs"
|
||||
|
||||
"provider:telnyx":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/providers/telnyx.rs"
|
||||
|
||||
"service":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
@@ -121,6 +327,101 @@
|
||||
- any-glob-to-any-file:
|
||||
- "src/tools/**"
|
||||
|
||||
"tool:browser":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/tools/browser.rs"
|
||||
- "src/tools/browser_delegate.rs"
|
||||
- "src/tools/browser_open.rs"
|
||||
- "src/tools/text_browser.rs"
|
||||
- "src/tools/screenshot.rs"
|
||||
|
||||
"tool:composio":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/tools/composio.rs"
|
||||
|
||||
"tool:cron":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/tools/cron_add.rs"
|
||||
- "src/tools/cron_list.rs"
|
||||
- "src/tools/cron_remove.rs"
|
||||
- "src/tools/cron_run.rs"
|
||||
- "src/tools/cron_runs.rs"
|
||||
- "src/tools/cron_update.rs"
|
||||
|
||||
"tool:file":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/tools/file_edit.rs"
|
||||
- "src/tools/file_read.rs"
|
||||
- "src/tools/file_write.rs"
|
||||
- "src/tools/glob_search.rs"
|
||||
- "src/tools/content_search.rs"
|
||||
|
||||
"tool:google-workspace":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/tools/google_workspace.rs"
|
||||
|
||||
"tool:mcp":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/tools/mcp_client.rs"
|
||||
- "src/tools/mcp_deferred.rs"
|
||||
- "src/tools/mcp_protocol.rs"
|
||||
- "src/tools/mcp_tool.rs"
|
||||
- "src/tools/mcp_transport.rs"
|
||||
|
||||
"tool:memory":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/tools/memory_forget.rs"
|
||||
- "src/tools/memory_recall.rs"
|
||||
- "src/tools/memory_store.rs"
|
||||
|
||||
"tool:microsoft365":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/tools/microsoft365/**"
|
||||
|
||||
"tool:shell":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/tools/shell.rs"
|
||||
- "src/tools/node_tool.rs"
|
||||
- "src/tools/cli_discovery.rs"
|
||||
|
||||
"tool:sop":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/tools/sop_advance.rs"
|
||||
- "src/tools/sop_approve.rs"
|
||||
- "src/tools/sop_execute.rs"
|
||||
- "src/tools/sop_list.rs"
|
||||
- "src/tools/sop_status.rs"
|
||||
|
||||
"tool:web":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/tools/web_fetch.rs"
|
||||
- "src/tools/web_search_tool.rs"
|
||||
- "src/tools/web_search_provider_routing.rs"
|
||||
- "src/tools/http_request.rs"
|
||||
|
||||
"tool:security":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/tools/security_ops.rs"
|
||||
- "src/tools/verifiable_intent.rs"
|
||||
|
||||
"tool:cloud":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "src/tools/cloud_ops.rs"
|
||||
- "src/tools/cloud_patterns.rs"
|
||||
|
||||
"tunnel":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
|
||||
@@ -7,7 +7,7 @@ on:
|
||||
branches: [master]
|
||||
|
||||
concurrency:
|
||||
group: ci-${{ github.event.pull_request.number || github.sha }}
|
||||
group: ci-${{ github.event.pull_request.number || 'push-master' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
@@ -154,7 +154,7 @@ jobs:
|
||||
run: mkdir -p web/dist && touch web/dist/.gitkeep
|
||||
|
||||
- name: Check all features
|
||||
run: cargo check --all-features --locked
|
||||
run: cargo check --features ci-all --locked
|
||||
|
||||
docs-quality:
|
||||
name: Docs Quality
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
name: PR Path Labeler
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
label:
|
||||
name: Apply path labels
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5
|
||||
with:
|
||||
sync-labels: true
|
||||
@@ -1,6 +1,22 @@
|
||||
name: Pub Homebrew Core
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
release_tag:
|
||||
description: "Existing release tag to publish (vX.Y.Z)"
|
||||
required: true
|
||||
type: string
|
||||
dry_run:
|
||||
description: "Patch formula only (no push/PR)"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
secrets:
|
||||
HOMEBREW_UPSTREAM_PR_TOKEN:
|
||||
required: false
|
||||
HOMEBREW_CORE_BOT_TOKEN:
|
||||
required: false
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
release_tag:
|
||||
|
||||
@@ -19,6 +19,7 @@ env:
|
||||
jobs:
|
||||
detect-version-change:
|
||||
name: Detect Version Bump
|
||||
if: github.repository == 'zeroclaw-labs/zeroclaw'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
changed: ${{ steps.check.outputs.changed }}
|
||||
@@ -40,6 +41,14 @@ jobs:
|
||||
echo "Current version: ${current}"
|
||||
echo "Previous version: ${previous}"
|
||||
|
||||
# Skip if stable release workflow will handle this version
|
||||
# (indicated by an existing or imminent stable tag)
|
||||
if git ls-remote --exit-code --tags origin "refs/tags/v${current}" >/dev/null 2>&1; then
|
||||
echo "Stable tag v${current} exists — stable release workflow handles crates.io"
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "$current" != "$previous" && -n "$current" ]]; then
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
echo "version=${current}" >> "$GITHUB_OUTPUT"
|
||||
@@ -102,6 +111,22 @@ jobs:
|
||||
- name: Clean web build artifacts
|
||||
run: rm -rf web/node_modules web/src web/package.json web/package-lock.json web/tsconfig*.json web/vite.config.ts web/index.html
|
||||
|
||||
- name: Publish aardvark-sys to crates.io
|
||||
shell: bash
|
||||
env:
|
||||
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||
run: |
|
||||
OUTPUT=$(cargo publish --locked --allow-dirty --no-verify -p aardvark-sys 2>&1) && exit 0
|
||||
echo "$OUTPUT"
|
||||
if echo "$OUTPUT" | grep -q 'already exists'; then
|
||||
echo "::notice::aardvark-sys already on crates.io — skipping"
|
||||
exit 0
|
||||
fi
|
||||
exit 1
|
||||
|
||||
- name: Wait for aardvark-sys to index
|
||||
run: sleep 15
|
||||
|
||||
- name: Publish to crates.io
|
||||
shell: bash
|
||||
env:
|
||||
|
||||
@@ -67,6 +67,24 @@ jobs:
|
||||
- name: Clean web build artifacts
|
||||
run: rm -rf web/node_modules web/src web/package.json web/package-lock.json web/tsconfig*.json web/vite.config.ts web/index.html
|
||||
|
||||
- name: Publish aardvark-sys to crates.io
|
||||
if: "!inputs.dry_run"
|
||||
shell: bash
|
||||
env:
|
||||
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||
run: |
|
||||
OUTPUT=$(cargo publish --locked --allow-dirty --no-verify -p aardvark-sys 2>&1) && exit 0
|
||||
echo "$OUTPUT"
|
||||
if echo "$OUTPUT" | grep -q 'already exists'; then
|
||||
echo "::notice::aardvark-sys already on crates.io — skipping"
|
||||
exit 0
|
||||
fi
|
||||
exit 1
|
||||
|
||||
- name: Wait for aardvark-sys to index
|
||||
if: "!inputs.dry_run"
|
||||
run: sleep 15
|
||||
|
||||
- name: Publish (dry run)
|
||||
if: inputs.dry_run
|
||||
run: cargo publish --dry-run --locked --allow-dirty --no-verify
|
||||
|
||||
@@ -21,25 +21,48 @@ env:
|
||||
jobs:
|
||||
version:
|
||||
name: Resolve Version
|
||||
if: github.repository == 'zeroclaw-labs/zeroclaw'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.ver.outputs.version }}
|
||||
tag: ${{ steps.ver.outputs.tag }}
|
||||
skip: ${{ steps.ver.outputs.skip }}
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
- name: Compute beta version
|
||||
id: ver
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
base_version=$(sed -n 's/^version = "\([^"]*\)"/\1/p' Cargo.toml | head -1)
|
||||
|
||||
# Skip beta if this is a version bump commit (stable release handles it)
|
||||
commit_msg=$(git log -1 --pretty=format:"%s")
|
||||
if [[ "$commit_msg" =~ ^chore:\ bump\ version ]]; then
|
||||
echo "Version bump commit detected — skipping beta release"
|
||||
echo "skip=true" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Skip beta if a stable tag already exists for this version
|
||||
if git ls-remote --exit-code --tags origin "refs/tags/v${base_version}" >/dev/null 2>&1; then
|
||||
echo "Stable tag v${base_version} exists — skipping beta release"
|
||||
echo "skip=true" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
beta_tag="v${base_version}-beta.${GITHUB_RUN_NUMBER}"
|
||||
echo "version=${base_version}" >> "$GITHUB_OUTPUT"
|
||||
echo "tag=${beta_tag}" >> "$GITHUB_OUTPUT"
|
||||
echo "skip=false" >> "$GITHUB_OUTPUT"
|
||||
echo "Beta release: ${beta_tag}"
|
||||
|
||||
release-notes:
|
||||
name: Generate Release Notes
|
||||
needs: [version]
|
||||
if: github.repository == 'zeroclaw-labs/zeroclaw' && needs.version.outputs.skip != 'true'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
notes: ${{ steps.notes.outputs.body }}
|
||||
@@ -130,6 +153,8 @@ jobs:
|
||||
|
||||
web:
|
||||
name: Build Web Dashboard
|
||||
needs: [version]
|
||||
if: github.repository == 'zeroclaw-labs/zeroclaw' && needs.version.outputs.skip != 'true'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
@@ -241,9 +266,65 @@ jobs:
|
||||
path: zeroclaw-${{ matrix.target }}.${{ matrix.ext }}
|
||||
retention-days: 7
|
||||
|
||||
build-desktop:
|
||||
name: Build Desktop App (macOS Universal)
|
||||
needs: [version]
|
||||
if: needs.version.outputs.skip != 'true'
|
||||
runs-on: macos-14
|
||||
timeout-minutes: 40
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
targets: aarch64-apple-darwin,x86_64-apple-darwin
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2
|
||||
with:
|
||||
prefix-key: macos-tauri
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install Tauri CLI
|
||||
run: cargo install tauri-cli --locked
|
||||
|
||||
- name: Sync Tauri version with Cargo.toml
|
||||
shell: bash
|
||||
run: |
|
||||
VERSION=$(sed -n 's/^version = "\([^"]*\)"/\1/p' Cargo.toml | head -1)
|
||||
cd apps/tauri
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
jq --arg v "$VERSION" '.version = $v' tauri.conf.json > tmp.json && mv tmp.json tauri.conf.json
|
||||
else
|
||||
sed -i '' "s/\"version\": \"[^\"]*\"/\"version\": \"$VERSION\"/" tauri.conf.json
|
||||
fi
|
||||
echo "Tauri version set to: $VERSION"
|
||||
|
||||
- name: Build Tauri app (universal binary)
|
||||
working-directory: apps/tauri
|
||||
run: cargo tauri build --target universal-apple-darwin
|
||||
|
||||
- name: Prepare desktop release assets
|
||||
run: |
|
||||
mkdir -p desktop-assets
|
||||
find target -name '*.dmg' -exec cp {} desktop-assets/ZeroClaw.dmg \; 2>/dev/null || true
|
||||
find target -name '*.app.tar.gz' -exec cp {} desktop-assets/ZeroClaw-macos.app.tar.gz \; 2>/dev/null || true
|
||||
find target -name '*.app.tar.gz.sig' -exec cp {} desktop-assets/ZeroClaw-macos.app.tar.gz.sig \; 2>/dev/null || true
|
||||
echo "--- Desktop assets ---"
|
||||
ls -lh desktop-assets/
|
||||
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
with:
|
||||
name: desktop-macos
|
||||
path: desktop-assets/*
|
||||
retention-days: 7
|
||||
|
||||
publish:
|
||||
name: Publish Beta Release
|
||||
needs: [version, release-notes, build]
|
||||
needs: [version, release-notes, build, build-desktop]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
@@ -253,16 +334,21 @@ jobs:
|
||||
pattern: zeroclaw-*
|
||||
path: artifacts
|
||||
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
|
||||
with:
|
||||
name: desktop-macos
|
||||
path: artifacts/desktop-macos
|
||||
|
||||
- name: Generate checksums
|
||||
run: |
|
||||
cd artifacts
|
||||
find . -type f \( -name '*.tar.gz' -o -name '*.zip' \) -exec sha256sum {} + | sed 's| \./[^/]*/| |' > SHA256SUMS
|
||||
find . -type f \( -name '*.tar.gz' -o -name '*.zip' -o -name '*.dmg' \) -exec sha256sum {} + | sed 's| \./[^/]*/| |' > SHA256SUMS
|
||||
cat SHA256SUMS
|
||||
|
||||
- name: Collect release assets
|
||||
run: |
|
||||
mkdir -p release-assets
|
||||
find artifacts -type f \( -name '*.tar.gz' -o -name '*.zip' -o -name 'SHA256SUMS' \) -exec cp {} release-assets/ \;
|
||||
find artifacts -type f \( -name '*.tar.gz' -o -name '*.zip' -o -name '*.dmg' -o -name 'SHA256SUMS' \) -exec cp {} release-assets/ \;
|
||||
cp install.sh release-assets/
|
||||
echo "--- Assets ---"
|
||||
ls -lh release-assets/
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
name: Release Stable
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v[0-9]+.[0-9]+.[0-9]+" # stable tags only (no -beta suffix)
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
@@ -33,11 +36,22 @@ jobs:
|
||||
- name: Validate semver and Cargo.toml match
|
||||
id: check
|
||||
shell: bash
|
||||
env:
|
||||
INPUT_VERSION: ${{ inputs.version || '' }}
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
EVENT_NAME: ${{ github.event_name }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
input_version="${{ inputs.version }}"
|
||||
cargo_version=$(sed -n 's/^version = "\([^"]*\)"/\1/p' Cargo.toml | head -1)
|
||||
|
||||
# Resolve version from tag push or manual input
|
||||
if [[ "$EVENT_NAME" == "push" ]]; then
|
||||
# Tag push: extract version from tag name (v0.5.9 -> 0.5.9)
|
||||
input_version="${REF_NAME#v}"
|
||||
else
|
||||
input_version="$INPUT_VERSION"
|
||||
fi
|
||||
|
||||
if [[ ! "$input_version" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "::error::Version must be semver (X.Y.Z). Got: ${input_version}"
|
||||
exit 1
|
||||
@@ -49,9 +63,13 @@ jobs:
|
||||
fi
|
||||
|
||||
tag="v${input_version}"
|
||||
if git ls-remote --exit-code --tags origin "refs/tags/${tag}" >/dev/null 2>&1; then
|
||||
echo "::error::Tag ${tag} already exists."
|
||||
exit 1
|
||||
|
||||
# Only check tag existence for manual dispatch (tag push means it already exists)
|
||||
if [[ "$EVENT_NAME" != "push" ]]; then
|
||||
if git ls-remote --exit-code --tags origin "refs/tags/${tag}" >/dev/null 2>&1; then
|
||||
echo "::error::Tag ${tag} already exists."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "tag=${tag}" >> "$GITHUB_OUTPUT"
|
||||
@@ -255,9 +273,64 @@ jobs:
|
||||
path: zeroclaw-${{ matrix.target }}.${{ matrix.ext }}
|
||||
retention-days: 14
|
||||
|
||||
build-desktop:
|
||||
name: Build Desktop App (macOS Universal)
|
||||
needs: [validate]
|
||||
runs-on: macos-14
|
||||
timeout-minutes: 40
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
targets: aarch64-apple-darwin,x86_64-apple-darwin
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2
|
||||
with:
|
||||
prefix-key: macos-tauri
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install Tauri CLI
|
||||
run: cargo install tauri-cli --locked
|
||||
|
||||
- name: Sync Tauri version with Cargo.toml
|
||||
shell: bash
|
||||
run: |
|
||||
VERSION=$(sed -n 's/^version = "\([^"]*\)"/\1/p' Cargo.toml | head -1)
|
||||
cd apps/tauri
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
jq --arg v "$VERSION" '.version = $v' tauri.conf.json > tmp.json && mv tmp.json tauri.conf.json
|
||||
else
|
||||
sed -i '' "s/\"version\": \"[^\"]*\"/\"version\": \"$VERSION\"/" tauri.conf.json
|
||||
fi
|
||||
echo "Tauri version set to: $VERSION"
|
||||
|
||||
- name: Build Tauri app (universal binary)
|
||||
working-directory: apps/tauri
|
||||
run: cargo tauri build --target universal-apple-darwin
|
||||
|
||||
- name: Prepare desktop release assets
|
||||
run: |
|
||||
mkdir -p desktop-assets
|
||||
find target -name '*.dmg' -exec cp {} desktop-assets/ZeroClaw.dmg \; 2>/dev/null || true
|
||||
find target -name '*.app.tar.gz' -exec cp {} desktop-assets/ZeroClaw-macos.app.tar.gz \; 2>/dev/null || true
|
||||
find target -name '*.app.tar.gz.sig' -exec cp {} desktop-assets/ZeroClaw-macos.app.tar.gz.sig \; 2>/dev/null || true
|
||||
echo "--- Desktop assets ---"
|
||||
ls -lh desktop-assets/
|
||||
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
with:
|
||||
name: desktop-macos
|
||||
path: desktop-assets/*
|
||||
retention-days: 14
|
||||
|
||||
publish:
|
||||
name: Publish Stable Release
|
||||
needs: [validate, release-notes, build]
|
||||
needs: [validate, release-notes, build, build-desktop]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
@@ -267,16 +340,21 @@ jobs:
|
||||
pattern: zeroclaw-*
|
||||
path: artifacts
|
||||
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
|
||||
with:
|
||||
name: desktop-macos
|
||||
path: artifacts/desktop-macos
|
||||
|
||||
- name: Generate checksums
|
||||
run: |
|
||||
cd artifacts
|
||||
find . -type f \( -name '*.tar.gz' -o -name '*.zip' \) -exec sha256sum {} + | sed 's| \./[^/]*/| |' > SHA256SUMS
|
||||
find . -type f \( -name '*.tar.gz' -o -name '*.zip' -o -name '*.dmg' \) -exec sha256sum {} + | sed 's| \./[^/]*/| |' > SHA256SUMS
|
||||
cat SHA256SUMS
|
||||
|
||||
- name: Collect release assets
|
||||
run: |
|
||||
mkdir -p release-assets
|
||||
find artifacts -type f \( -name '*.tar.gz' -o -name '*.zip' -o -name 'SHA256SUMS' \) -exec cp {} release-assets/ \;
|
||||
find artifacts -type f \( -name '*.tar.gz' -o -name '*.zip' -o -name '*.dmg' -o -name 'SHA256SUMS' \) -exec cp {} release-assets/ \;
|
||||
cp install.sh release-assets/
|
||||
echo "--- Assets ---"
|
||||
ls -lh release-assets/
|
||||
@@ -286,6 +364,14 @@ jobs:
|
||||
NOTES: ${{ needs.release-notes.outputs.notes }}
|
||||
run: printf '%s\n' "$NOTES" > release-notes.md
|
||||
|
||||
- name: Create tag if manual dispatch
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
env:
|
||||
TAG: ${{ needs.validate.outputs.tag }}
|
||||
run: |
|
||||
git tag -a "$TAG" -m "zeroclaw $TAG"
|
||||
git push origin "$TAG"
|
||||
|
||||
- name: Create GitHub Release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||
@@ -323,6 +409,21 @@ jobs:
|
||||
- name: Clean web build artifacts
|
||||
run: rm -rf web/node_modules web/src web/package.json web/package-lock.json web/tsconfig*.json web/vite.config.ts web/index.html
|
||||
|
||||
- name: Publish aardvark-sys to crates.io
|
||||
env:
|
||||
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||
run: |
|
||||
OUTPUT=$(cargo publish --locked --allow-dirty --no-verify -p aardvark-sys 2>&1) && exit 0
|
||||
echo "$OUTPUT"
|
||||
if echo "$OUTPUT" | grep -q 'already exists'; then
|
||||
echo "::notice::aardvark-sys already on crates.io — skipping"
|
||||
exit 0
|
||||
fi
|
||||
exit 1
|
||||
|
||||
- name: Wait for aardvark-sys to index
|
||||
run: sleep 15
|
||||
|
||||
- name: Publish to crates.io
|
||||
env:
|
||||
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||
@@ -446,6 +547,16 @@ jobs:
|
||||
dry_run: false
|
||||
secrets: inherit
|
||||
|
||||
homebrew:
|
||||
name: Update Homebrew Core
|
||||
needs: [validate, publish]
|
||||
if: ${{ !cancelled() && needs.publish.result == 'success' }}
|
||||
uses: ./.github/workflows/pub-homebrew-core.yml
|
||||
with:
|
||||
release_tag: ${{ needs.validate.outputs.tag }}
|
||||
dry_run: false
|
||||
secrets: inherit
|
||||
|
||||
# ── Post-publish: tweet after release + website are live ──────────────
|
||||
# Docker push can be slow; don't let it block the tweet.
|
||||
tweet:
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
# AGENTS.md — ZeroClaw
|
||||
|
||||
Cross-tool agent instructions for any AI coding assistant working on this repository.
|
||||
|
||||
## Commands
|
||||
|
||||
```bash
|
||||
cargo fmt --all -- --check
|
||||
cargo clippy --all-targets -- -D warnings
|
||||
cargo test
|
||||
```
|
||||
|
||||
Full pre-PR validation (recommended):
|
||||
|
||||
```bash
|
||||
./dev/ci.sh all
|
||||
```
|
||||
|
||||
Docs-only changes: run markdown lint and link-integrity checks. If touching bootstrap scripts: `bash -n install.sh`.
|
||||
|
||||
## Project Snapshot
|
||||
|
||||
ZeroClaw is a Rust-first autonomous agent runtime optimized for performance, efficiency, stability, extensibility, sustainability, and security.
|
||||
|
||||
Core architecture is trait-driven and modular. Extend by implementing traits and registering in factory modules.
|
||||
|
||||
Key extension points:
|
||||
|
||||
- `src/providers/traits.rs` (`Provider`)
|
||||
- `src/channels/traits.rs` (`Channel`)
|
||||
- `src/tools/traits.rs` (`Tool`)
|
||||
- `src/memory/traits.rs` (`Memory`)
|
||||
- `src/observability/traits.rs` (`Observer`)
|
||||
- `src/runtime/traits.rs` (`RuntimeAdapter`)
|
||||
- `src/peripherals/traits.rs` (`Peripheral`) — hardware boards (STM32, RPi GPIO)
|
||||
|
||||
## Repository Map
|
||||
|
||||
- `src/main.rs` — CLI entrypoint and command routing
|
||||
- `src/lib.rs` — module exports and shared command enums
|
||||
- `src/config/` — schema + config loading/merging
|
||||
- `src/agent/` — orchestration loop
|
||||
- `src/gateway/` — webhook/gateway server
|
||||
- `src/security/` — policy, pairing, secret store
|
||||
- `src/memory/` — markdown/sqlite memory backends + embeddings/vector merge
|
||||
- `src/providers/` — model providers and resilient wrapper
|
||||
- `src/channels/` — Telegram/Discord/Slack/etc channels
|
||||
- `src/tools/` — tool execution surface (shell, file, memory, browser)
|
||||
- `src/peripherals/` — hardware peripherals (STM32, RPi GPIO)
|
||||
- `src/runtime/` — runtime adapters (currently native)
|
||||
- `docs/` — topic-based documentation (setup-guides, reference, ops, security, hardware, contributing, maintainers)
|
||||
- `.github/` — CI, templates, automation workflows
|
||||
|
||||
## Risk Tiers
|
||||
|
||||
- **Low risk**: docs/chore/tests-only changes
|
||||
- **Medium risk**: most `src/**` behavior changes without boundary/security impact
|
||||
- **High risk**: `src/security/**`, `src/runtime/**`, `src/gateway/**`, `src/tools/**`, `.github/workflows/**`, access-control boundaries
|
||||
|
||||
When uncertain, classify as higher risk.
|
||||
|
||||
## Workflow
|
||||
|
||||
1. **Read before write** — inspect existing module, factory wiring, and adjacent tests before editing.
|
||||
2. **One concern per PR** — avoid mixed feature+refactor+infra patches.
|
||||
3. **Implement minimal patch** — no speculative abstractions, no config keys without a concrete use case.
|
||||
4. **Validate by risk tier** — docs-only: lightweight checks. Code changes: full relevant checks.
|
||||
5. **Document impact** — update PR notes for behavior, risk, side effects, and rollback.
|
||||
6. **Queue hygiene** — stacked PR: declare `Depends on #...`. Replacing old PR: declare `Supersedes #...`.
|
||||
|
||||
Branch/commit/PR rules:
|
||||
- Work from a non-`master` branch. Open a PR to `master`; do not push directly.
|
||||
- Use conventional commit titles. Prefer small PRs (`size: XS/S/M`).
|
||||
- Follow `.github/pull_request_template.md` fully.
|
||||
- Never commit secrets, personal data, or real identity information (see `@docs/contributing/pr-discipline.md`).
|
||||
|
||||
## Anti-Patterns
|
||||
|
||||
- Do not add heavy dependencies for minor convenience.
|
||||
- Do not silently weaken security policy or access constraints.
|
||||
- Do not add speculative config/feature flags "just in case".
|
||||
- Do not mix massive formatting-only changes with functional changes.
|
||||
- Do not modify unrelated modules "while here".
|
||||
- Do not bypass failing checks without explicit explanation.
|
||||
- Do not hide behavior-changing side effects in refactor commits.
|
||||
- Do not include personal identity or sensitive information in test data, examples, docs, or commits.
|
||||
|
||||
## Linked References
|
||||
|
||||
- `@docs/contributing/change-playbooks.md` — adding providers, channels, tools, peripherals; security/gateway changes; architecture boundaries
|
||||
- `@docs/contributing/pr-discipline.md` — privacy rules, superseded-PR attribution/templates, handoff template
|
||||
- `@docs/contributing/docs-contract.md` — docs system contract, i18n rules, locale parity
|
||||
@@ -1,90 +1,16 @@
|
||||
# CLAUDE.md — ZeroClaw
|
||||
# CLAUDE.md — ZeroClaw (Claude Code)
|
||||
|
||||
## Commands
|
||||
> **Shared instructions live in [`AGENTS.md`](./AGENTS.md).**
|
||||
> This file contains only Claude Code-specific directives.
|
||||
|
||||
```bash
|
||||
cargo fmt --all -- --check
|
||||
cargo clippy --all-targets -- -D warnings
|
||||
cargo test
|
||||
```
|
||||
## Claude Code Settings
|
||||
|
||||
Full pre-PR validation (recommended):
|
||||
Claude Code should read and follow all instructions in `AGENTS.md` at the repository root for project conventions, commands, risk tiers, workflow rules, and anti-patterns.
|
||||
|
||||
```bash
|
||||
./dev/ci.sh all
|
||||
```
|
||||
## Hooks
|
||||
|
||||
Docs-only changes: run markdown lint and link-integrity checks. If touching bootstrap scripts: `bash -n install.sh`.
|
||||
_No custom hooks defined yet._
|
||||
|
||||
## Project Snapshot
|
||||
## Slash Commands
|
||||
|
||||
ZeroClaw is a Rust-first autonomous agent runtime optimized for performance, efficiency, stability, extensibility, sustainability, and security.
|
||||
|
||||
Core architecture is trait-driven and modular. Extend by implementing traits and registering in factory modules.
|
||||
|
||||
Key extension points:
|
||||
|
||||
- `src/providers/traits.rs` (`Provider`)
|
||||
- `src/channels/traits.rs` (`Channel`)
|
||||
- `src/tools/traits.rs` (`Tool`)
|
||||
- `src/memory/traits.rs` (`Memory`)
|
||||
- `src/observability/traits.rs` (`Observer`)
|
||||
- `src/runtime/traits.rs` (`RuntimeAdapter`)
|
||||
- `src/peripherals/traits.rs` (`Peripheral`) — hardware boards (STM32, RPi GPIO)
|
||||
|
||||
## Repository Map
|
||||
|
||||
- `src/main.rs` — CLI entrypoint and command routing
|
||||
- `src/lib.rs` — module exports and shared command enums
|
||||
- `src/config/` — schema + config loading/merging
|
||||
- `src/agent/` — orchestration loop
|
||||
- `src/gateway/` — webhook/gateway server
|
||||
- `src/security/` — policy, pairing, secret store
|
||||
- `src/memory/` — markdown/sqlite memory backends + embeddings/vector merge
|
||||
- `src/providers/` — model providers and resilient wrapper
|
||||
- `src/channels/` — Telegram/Discord/Slack/etc channels
|
||||
- `src/tools/` — tool execution surface (shell, file, memory, browser)
|
||||
- `src/peripherals/` — hardware peripherals (STM32, RPi GPIO)
|
||||
- `src/runtime/` — runtime adapters (currently native)
|
||||
- `docs/` — topic-based documentation (setup-guides, reference, ops, security, hardware, contributing, maintainers)
|
||||
- `.github/` — CI, templates, automation workflows
|
||||
|
||||
## Risk Tiers
|
||||
|
||||
- **Low risk**: docs/chore/tests-only changes
|
||||
- **Medium risk**: most `src/**` behavior changes without boundary/security impact
|
||||
- **High risk**: `src/security/**`, `src/runtime/**`, `src/gateway/**`, `src/tools/**`, `.github/workflows/**`, access-control boundaries
|
||||
|
||||
When uncertain, classify as higher risk.
|
||||
|
||||
## Workflow
|
||||
|
||||
1. **Read before write** — inspect existing module, factory wiring, and adjacent tests before editing.
|
||||
2. **One concern per PR** — avoid mixed feature+refactor+infra patches.
|
||||
3. **Implement minimal patch** — no speculative abstractions, no config keys without a concrete use case.
|
||||
4. **Validate by risk tier** — docs-only: lightweight checks. Code changes: full relevant checks.
|
||||
5. **Document impact** — update PR notes for behavior, risk, side effects, and rollback.
|
||||
6. **Queue hygiene** — stacked PR: declare `Depends on #...`. Replacing old PR: declare `Supersedes #...`.
|
||||
|
||||
Branch/commit/PR rules:
|
||||
- Work from a non-`master` branch. Open a PR to `master`; do not push directly.
|
||||
- Use conventional commit titles. Prefer small PRs (`size: XS/S/M`).
|
||||
- Follow `.github/pull_request_template.md` fully.
|
||||
- Never commit secrets, personal data, or real identity information (see `@docs/contributing/pr-discipline.md`).
|
||||
|
||||
## Anti-Patterns
|
||||
|
||||
- Do not add heavy dependencies for minor convenience.
|
||||
- Do not silently weaken security policy or access constraints.
|
||||
- Do not add speculative config/feature flags "just in case".
|
||||
- Do not mix massive formatting-only changes with functional changes.
|
||||
- Do not modify unrelated modules "while here".
|
||||
- Do not bypass failing checks without explicit explanation.
|
||||
- Do not hide behavior-changing side effects in refactor commits.
|
||||
- Do not include personal identity or sensitive information in test data, examples, docs, or commits.
|
||||
|
||||
## Linked References
|
||||
|
||||
- `@docs/contributing/change-playbooks.md` — adding providers, channels, tools, peripherals; security/gateway changes; architecture boundaries
|
||||
- `@docs/contributing/pr-discipline.md` — privacy rules, superseded-PR attribution/templates, handoff template
|
||||
- `@docs/contributing/docs-contract.md` — docs system contract, i18n rules, locale parity
|
||||
_No custom slash commands defined yet._
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
[workspace]
|
||||
members = [".", "crates/robot-kit"]
|
||||
members = [".", "crates/robot-kit", "crates/aardvark-sys", "apps/tauri"]
|
||||
resolver = "2"
|
||||
|
||||
[package]
|
||||
name = "zeroclawlabs"
|
||||
version = "0.5.4"
|
||||
version = "0.6.1"
|
||||
edition = "2021"
|
||||
authors = ["theonlyhennygod"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
@@ -96,6 +96,9 @@ zip = { version = "8.1", default-features = false, features = ["deflate"] }
|
||||
anyhow = "1.0"
|
||||
thiserror = "2.0"
|
||||
|
||||
# Aardvark I2C/SPI/GPIO USB adapter (Total Phase) — stub when SDK absent
|
||||
aardvark-sys = { path = "crates/aardvark-sys", version = "0.1.0" }
|
||||
|
||||
# UUID generation
|
||||
uuid = { version = "1.22", default-features = false, features = ["v4", "std"] }
|
||||
|
||||
@@ -147,6 +150,7 @@ which = "8.0"
|
||||
|
||||
# WebSocket client channels (Discord/Lark/DingTalk/Nostr)
|
||||
tokio-tungstenite = { version = "0.29", features = ["rustls-tls-webpki-roots"] }
|
||||
tokio-socks = "0.5"
|
||||
futures-util = { version = "0.3", default-features = false, features = ["sink"] }
|
||||
nostr-sdk = { version = "0.44", default-features = false, features = ["nip04", "nip59"], optional = true }
|
||||
regex = "1.10"
|
||||
@@ -196,6 +200,9 @@ pdf-extract = { version = "0.10", optional = true }
|
||||
# WASM plugin runtime (extism)
|
||||
extism = { version = "1.20", optional = true }
|
||||
|
||||
# Cross-platform audio capture for voice wake word detection (optional, enable with --features voice-wake)
|
||||
cpal = { version = "0.15", optional = true }
|
||||
|
||||
# Terminal QR rendering for WhatsApp Web pairing flow.
|
||||
qrcode = { version = "0.14", optional = true }
|
||||
|
||||
@@ -218,15 +225,13 @@ landlock = { version = "0.4", optional = true }
|
||||
libc = "0.2"
|
||||
|
||||
[features]
|
||||
default = ["observability-prometheus", "channel-nostr", "skill-creation"]
|
||||
default = ["observability-prometheus", "channel-nostr", "channel-lark", "skill-creation"]
|
||||
channel-nostr = ["dep:nostr-sdk"]
|
||||
hardware = ["nusb", "tokio-serial"]
|
||||
channel-matrix = ["dep:matrix-sdk"]
|
||||
channel-lark = ["dep:prost"]
|
||||
channel-feishu = ["channel-lark"] # Alias for Feishu users (Lark and Feishu are the same platform)
|
||||
memory-postgres = ["dep:postgres"]
|
||||
# memory-mem0 = Mem0 (OpenMemory) memory backend via REST API
|
||||
memory-mem0 = []
|
||||
observability-prometheus = ["dep:prometheus"]
|
||||
observability-otel = ["dep:opentelemetry", "dep:opentelemetry_sdk", "dep:opentelemetry-otlp"]
|
||||
peripheral-rpi = ["rppal"]
|
||||
@@ -249,8 +254,30 @@ rag-pdf = ["dep:pdf-extract"]
|
||||
skill-creation = []
|
||||
# whatsapp-web = Native WhatsApp Web client with custom rusqlite storage backend
|
||||
whatsapp-web = ["dep:wa-rs", "dep:wa-rs-core", "dep:wa-rs-binary", "dep:wa-rs-proto", "dep:wa-rs-ureq-http", "dep:wa-rs-tokio-transport", "dep:serde-big-array", "dep:prost", "dep:qrcode"]
|
||||
# voice-wake = Voice wake word detection via microphone (cpal)
|
||||
voice-wake = ["dep:cpal"]
|
||||
# WASM plugin system (extism-based)
|
||||
plugins-wasm = ["dep:extism"]
|
||||
# Meta-feature for CI: all features except those requiring system C libraries
|
||||
# not available on standard CI runners (e.g., voice-wake needs libasound2-dev).
|
||||
ci-all = [
|
||||
"channel-nostr",
|
||||
"hardware",
|
||||
"channel-matrix",
|
||||
"channel-lark",
|
||||
"memory-postgres",
|
||||
"observability-prometheus",
|
||||
"observability-otel",
|
||||
"peripheral-rpi",
|
||||
"browser-native",
|
||||
"sandbox-landlock",
|
||||
"sandbox-bubblewrap",
|
||||
"probe",
|
||||
"rag-pdf",
|
||||
"skill-creation",
|
||||
"whatsapp-web",
|
||||
"plugins-wasm",
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
opt-level = "z" # Optimize for size
|
||||
|
||||
@@ -12,7 +12,7 @@ RUN npm run build
|
||||
FROM rust:1.94-slim@sha256:da9dab7a6b8dd428e71718402e97207bb3e54167d37b5708616050b1e8f60ed6 AS builder
|
||||
|
||||
WORKDIR /app
|
||||
ARG ZEROCLAW_CARGO_FEATURES="memory-postgres"
|
||||
ARG ZEROCLAW_CARGO_FEATURES="memory-postgres,channel-lark"
|
||||
|
||||
# Install build dependencies
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
@@ -23,9 +23,11 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
|
||||
# 1. Copy manifests to cache dependencies
|
||||
COPY Cargo.toml Cargo.lock ./
|
||||
# Remove robot-kit from workspace members — it is excluded by .dockerignore
|
||||
# and is not needed for the Docker build (hardware-only crate).
|
||||
RUN sed -i 's/members = \[".", "crates\/robot-kit"\]/members = ["."]/' Cargo.toml
|
||||
# Include every workspace member: Cargo.lock is generated for the full workspace.
|
||||
# Previously we used sed to drop `crates/robot-kit`, which made the manifest disagree
|
||||
# with the lockfile and caused `cargo --locked` to fail (Cargo refused to rewrite the lock).
|
||||
COPY crates/robot-kit/ crates/robot-kit/
|
||||
COPY crates/aardvark-sys/ crates/aardvark-sys/
|
||||
# Create dummy targets declared in Cargo.toml so manifest parsing succeeds.
|
||||
RUN mkdir -p src benches \
|
||||
&& echo "fn main() {}" > src/main.rs \
|
||||
@@ -77,6 +79,10 @@ RUN mkdir -p /zeroclaw-data/.zeroclaw /zeroclaw-data/workspace && \
|
||||
'port = 42617' \
|
||||
'host = "[::]"' \
|
||||
'allow_public_bind = true' \
|
||||
'' \
|
||||
'[autonomy]' \
|
||||
'level = "supervised"' \
|
||||
'auto_approve = ["file_read", "file_write", "file_edit", "memory_recall", "memory_store", "web_search_tool", "web_fetch", "calculator", "glob_search", "content_search", "image_info", "weather", "git_operations"]' \
|
||||
> /zeroclaw-data/.zeroclaw/config.toml && \
|
||||
chown -R 65534:65534 /zeroclaw-data
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ RUN npm run build
|
||||
FROM rust:1.94-bookworm AS builder
|
||||
|
||||
WORKDIR /app
|
||||
ARG ZEROCLAW_CARGO_FEATURES="memory-postgres"
|
||||
ARG ZEROCLAW_CARGO_FEATURES="memory-postgres,channel-lark"
|
||||
|
||||
# Install build dependencies
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
@@ -38,9 +38,10 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
|
||||
# 1. Copy manifests to cache dependencies
|
||||
COPY Cargo.toml Cargo.lock ./
|
||||
# Remove robot-kit from workspace members — it is excluded by .dockerignore
|
||||
# and is not needed for the Docker build (hardware-only crate).
|
||||
RUN sed -i 's/members = \[".", "crates\/robot-kit"\]/members = ["."]/' Cargo.toml
|
||||
# Include every workspace member: Cargo.lock is generated for the full workspace.
|
||||
# Previously we used sed to drop `crates/robot-kit`, which made the manifest disagree
|
||||
# with the lockfile and caused `cargo --locked` to fail (Cargo refused to rewrite the lock).
|
||||
COPY crates/robot-kit/ crates/robot-kit/
|
||||
# Create dummy targets declared in Cargo.toml so manifest parsing succeeds.
|
||||
RUN mkdir -p src benches \
|
||||
&& echo "fn main() {}" > src/main.rs \
|
||||
@@ -88,6 +89,10 @@ RUN mkdir -p /zeroclaw-data/.zeroclaw /zeroclaw-data/workspace && \
|
||||
'port = 42617' \
|
||||
'host = "[::]"' \
|
||||
'allow_public_bind = true' \
|
||||
'' \
|
||||
'[autonomy]' \
|
||||
'level = "supervised"' \
|
||||
'auto_approve = ["file_read", "file_write", "file_edit", "memory_recall", "memory_store", "web_search_tool", "web_fetch", "calculator", "glob_search", "content_search", "image_info", "weather", "git_operations"]' \
|
||||
> /zeroclaw-data/.zeroclaw/config.toml && \
|
||||
chown -R 65534:65534 /zeroclaw-data
|
||||
|
||||
|
||||
@@ -300,7 +300,7 @@ React 19 + Vite 6 + Tailwind CSS 4 web dashboard served directly from the Gatewa
|
||||
|
||||
- **Core:** shell, file read/write/edit, git operations, glob search, content search
|
||||
- **Web:** browser control, web fetch, web search, screenshot, image info, PDF read
|
||||
- **Integrations:** Jira, Notion, Google Workspace, Microsoft 365, LinkedIn, Composio, Pushover
|
||||
- **Integrations:** Jira, Notion, Google Workspace, Microsoft 365, LinkedIn, Composio, Pushover, Weather (wttr.in)
|
||||
- **MCP:** Model Context Protocol tool wrapper + deferred tool sets
|
||||
- **Scheduling:** cron add/remove/update/run, schedule tool
|
||||
- **Memory:** recall, store, forget, knowledge, project intel
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
[package]
|
||||
name = "zeroclaw-desktop"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
description = "ZeroClaw Desktop — Tauri-powered system tray app"
|
||||
publish = false
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2.0", features = [] }
|
||||
|
||||
[dependencies]
|
||||
tauri = { version = "2.0", features = ["tray-icon", "image-png"] }
|
||||
tauri-plugin-shell = "2.0"
|
||||
tauri-plugin-store = "2.0"
|
||||
tauri-plugin-single-instance = "2.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
|
||||
tokio = { version = "1.50", features = ["rt-multi-thread", "macros", "sync", "time"] }
|
||||
anyhow = "1.0"
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
objc2 = "0.6"
|
||||
objc2-app-kit = { version = "0.3", features = ["NSApplication", "NSImage", "NSRunningApplication"] }
|
||||
objc2-foundation = { version = "0.3", features = ["NSData"] }
|
||||
|
||||
[features]
|
||||
default = ["custom-protocol"]
|
||||
custom-protocol = ["tauri/custom-protocol"]
|
||||
@@ -0,0 +1,3 @@
|
||||
fn main() {
|
||||
tauri_build::build();
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"$schema": "../gen/schemas/desktop-schema.json",
|
||||
"identifier": "default",
|
||||
"description": "Default capability set for ZeroClaw Desktop",
|
||||
"windows": ["main"],
|
||||
"permissions": [
|
||||
"core:default",
|
||||
"shell:allow-open",
|
||||
"store:allow-get",
|
||||
"store:allow-set",
|
||||
"store:allow-save",
|
||||
"store:allow-load"
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"identifier": "desktop",
|
||||
"description": "Desktop-specific permissions for ZeroClaw",
|
||||
"windows": ["main"],
|
||||
"permissions": [
|
||||
"core:default",
|
||||
"shell:allow-open",
|
||||
"shell:allow-execute",
|
||||
"store:allow-get",
|
||||
"store:allow-set",
|
||||
"store:allow-save",
|
||||
"store:allow-load"
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"identifier": "mobile",
|
||||
"description": "Mobile-specific permissions for ZeroClaw",
|
||||
"windows": ["main"],
|
||||
"permissions": [
|
||||
"core:default"
|
||||
]
|
||||
}
|
||||
|
After Width: | Height: | Size: 1002 B |
|
After Width: | Height: | Size: 243 B |
|
After Width: | Height: | Size: 243 B |
@@ -0,0 +1,4 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128">
|
||||
<rect width="128" height="128" rx="16" fill="#DC322F"/>
|
||||
<text x="64" y="80" font-size="64" font-family="monospace" font-weight="bold" fill="white" text-anchor="middle">Z</text>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 251 B |
|
After Width: | Height: | Size: 199 B |
|
After Width: | Height: | Size: 208 B |
|
After Width: | Height: | Size: 168 B |
|
After Width: | Height: | Size: 201 B |
@@ -0,0 +1,17 @@
|
||||
use crate::gateway_client::GatewayClient;
|
||||
use crate::state::SharedState;
|
||||
use tauri::State;
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn send_message(
|
||||
state: State<'_, SharedState>,
|
||||
message: String,
|
||||
) -> Result<serde_json::Value, String> {
|
||||
let s = state.read().await;
|
||||
let client = GatewayClient::new(&s.gateway_url, s.token.as_deref());
|
||||
drop(s);
|
||||
client
|
||||
.send_webhook_message(&message)
|
||||
.await
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
use crate::gateway_client::GatewayClient;
|
||||
use crate::state::SharedState;
|
||||
use tauri::State;
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn list_channels(state: State<'_, SharedState>) -> Result<serde_json::Value, String> {
|
||||
let s = state.read().await;
|
||||
let client = GatewayClient::new(&s.gateway_url, s.token.as_deref());
|
||||
drop(s);
|
||||
client.get_status().await.map_err(|e| e.to_string())
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
use crate::gateway_client::GatewayClient;
|
||||
use crate::state::SharedState;
|
||||
use tauri::State;
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_status(state: State<'_, SharedState>) -> Result<serde_json::Value, String> {
|
||||
let s = state.read().await;
|
||||
let client = GatewayClient::new(&s.gateway_url, s.token.as_deref());
|
||||
drop(s);
|
||||
client.get_status().await.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_health(state: State<'_, SharedState>) -> Result<bool, String> {
|
||||
let s = state.read().await;
|
||||
let client = GatewayClient::new(&s.gateway_url, s.token.as_deref());
|
||||
drop(s);
|
||||
client.get_health().await.map_err(|e| e.to_string())
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
pub mod agent;
|
||||
pub mod channels;
|
||||
pub mod gateway;
|
||||
pub mod pairing;
|
||||
@@ -0,0 +1,19 @@
|
||||
use crate::gateway_client::GatewayClient;
|
||||
use crate::state::SharedState;
|
||||
use tauri::State;
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn initiate_pairing(state: State<'_, SharedState>) -> Result<serde_json::Value, String> {
|
||||
let s = state.read().await;
|
||||
let client = GatewayClient::new(&s.gateway_url, s.token.as_deref());
|
||||
drop(s);
|
||||
client.initiate_pairing().await.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_devices(state: State<'_, SharedState>) -> Result<serde_json::Value, String> {
|
||||
let s = state.read().await;
|
||||
let client = GatewayClient::new(&s.gateway_url, s.token.as_deref());
|
||||
drop(s);
|
||||
client.get_devices().await.map_err(|e| e.to_string())
|
||||
}
|
||||
@@ -0,0 +1,213 @@
|
||||
//! HTTP client for communicating with the ZeroClaw gateway.
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
|
||||
pub struct GatewayClient {
|
||||
pub(crate) base_url: String,
|
||||
pub(crate) token: Option<String>,
|
||||
client: reqwest::Client,
|
||||
}
|
||||
|
||||
impl GatewayClient {
|
||||
pub fn new(base_url: &str, token: Option<&str>) -> Self {
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(10))
|
||||
.build()
|
||||
.unwrap_or_default();
|
||||
Self {
|
||||
base_url: base_url.to_string(),
|
||||
token: token.map(String::from),
|
||||
client,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn auth_header(&self) -> Option<String> {
|
||||
self.token.as_ref().map(|t| format!("Bearer {t}"))
|
||||
}
|
||||
|
||||
pub async fn get_status(&self) -> Result<serde_json::Value> {
|
||||
let mut req = self.client.get(format!("{}/api/status", self.base_url));
|
||||
if let Some(auth) = self.auth_header() {
|
||||
req = req.header("Authorization", auth);
|
||||
}
|
||||
let resp = req.send().await.context("status request failed")?;
|
||||
Ok(resp.json().await?)
|
||||
}
|
||||
|
||||
pub async fn get_health(&self) -> Result<bool> {
|
||||
match self
|
||||
.client
|
||||
.get(format!("{}/health", self.base_url))
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(resp) => Ok(resp.status().is_success()),
|
||||
Err(_) => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_devices(&self) -> Result<serde_json::Value> {
|
||||
let mut req = self.client.get(format!("{}/api/devices", self.base_url));
|
||||
if let Some(auth) = self.auth_header() {
|
||||
req = req.header("Authorization", auth);
|
||||
}
|
||||
let resp = req.send().await.context("devices request failed")?;
|
||||
Ok(resp.json().await?)
|
||||
}
|
||||
|
||||
pub async fn initiate_pairing(&self) -> Result<serde_json::Value> {
|
||||
let mut req = self
|
||||
.client
|
||||
.post(format!("{}/api/pairing/initiate", self.base_url));
|
||||
if let Some(auth) = self.auth_header() {
|
||||
req = req.header("Authorization", auth);
|
||||
}
|
||||
let resp = req.send().await.context("pairing request failed")?;
|
||||
Ok(resp.json().await?)
|
||||
}
|
||||
|
||||
/// Check whether the gateway requires pairing.
|
||||
pub async fn requires_pairing(&self) -> Result<bool> {
|
||||
let resp = self
|
||||
.client
|
||||
.get(format!("{}/health", self.base_url))
|
||||
.send()
|
||||
.await
|
||||
.context("health request failed")?;
|
||||
let body: serde_json::Value = resp.json().await?;
|
||||
Ok(body["require_pairing"].as_bool().unwrap_or(false))
|
||||
}
|
||||
|
||||
/// Request a new pairing code from the gateway (localhost-only admin endpoint).
|
||||
pub async fn request_new_paircode(&self) -> Result<String> {
|
||||
let resp = self
|
||||
.client
|
||||
.post(format!("{}/admin/paircode/new", self.base_url))
|
||||
.send()
|
||||
.await
|
||||
.context("paircode request failed")?;
|
||||
let body: serde_json::Value = resp.json().await?;
|
||||
body["pairing_code"]
|
||||
.as_str()
|
||||
.map(String::from)
|
||||
.context("no pairing_code in response")
|
||||
}
|
||||
|
||||
/// Exchange a pairing code for a bearer token.
|
||||
pub async fn pair_with_code(&self, code: &str) -> Result<String> {
|
||||
let resp = self
|
||||
.client
|
||||
.post(format!("{}/pair", self.base_url))
|
||||
.header("X-Pairing-Code", code)
|
||||
.send()
|
||||
.await
|
||||
.context("pair request failed")?;
|
||||
if !resp.status().is_success() {
|
||||
anyhow::bail!("pair request returned {}", resp.status());
|
||||
}
|
||||
let body: serde_json::Value = resp.json().await?;
|
||||
body["token"]
|
||||
.as_str()
|
||||
.map(String::from)
|
||||
.context("no token in pair response")
|
||||
}
|
||||
|
||||
/// Validate an existing token by calling a protected endpoint.
|
||||
pub async fn validate_token(&self) -> Result<bool> {
|
||||
let mut req = self.client.get(format!("{}/api/status", self.base_url));
|
||||
if let Some(auth) = self.auth_header() {
|
||||
req = req.header("Authorization", auth);
|
||||
}
|
||||
match req.send().await {
|
||||
Ok(resp) => Ok(resp.status().is_success()),
|
||||
Err(_) => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
/// Auto-pair with the gateway: request a new code and exchange it for a token.
|
||||
pub async fn auto_pair(&self) -> Result<String> {
|
||||
let code = self.request_new_paircode().await?;
|
||||
self.pair_with_code(&code).await
|
||||
}
|
||||
|
||||
pub async fn send_webhook_message(&self, message: &str) -> Result<serde_json::Value> {
|
||||
let mut req = self
|
||||
.client
|
||||
.post(format!("{}/webhook", self.base_url))
|
||||
.json(&serde_json::json!({ "message": message }));
|
||||
if let Some(auth) = self.auth_header() {
|
||||
req = req.header("Authorization", auth);
|
||||
}
|
||||
let resp = req.send().await.context("webhook request failed")?;
|
||||
Ok(resp.json().await?)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn client_creation_no_token() {
|
||||
let client = GatewayClient::new("http://127.0.0.1:42617", None);
|
||||
assert_eq!(client.base_url, "http://127.0.0.1:42617");
|
||||
assert!(client.token.is_none());
|
||||
assert!(client.auth_header().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn client_creation_with_token() {
|
||||
let client = GatewayClient::new("http://localhost:8080", Some("test-token"));
|
||||
assert_eq!(client.base_url, "http://localhost:8080");
|
||||
assert_eq!(client.token.as_deref(), Some("test-token"));
|
||||
assert_eq!(client.auth_header().unwrap(), "Bearer test-token");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn client_custom_url() {
|
||||
let client = GatewayClient::new("https://zeroclaw.example.com:9999", None);
|
||||
assert_eq!(client.base_url, "https://zeroclaw.example.com:9999");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn auth_header_format() {
|
||||
let client = GatewayClient::new("http://localhost", Some("zc_abc123"));
|
||||
assert_eq!(client.auth_header().unwrap(), "Bearer zc_abc123");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn health_returns_false_for_unreachable_host() {
|
||||
// Connect to a port that should not be listening.
|
||||
let client = GatewayClient::new("http://127.0.0.1:1", None);
|
||||
let result = client.get_health().await.unwrap();
|
||||
assert!(!result, "health should be false for unreachable host");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn status_fails_for_unreachable_host() {
|
||||
let client = GatewayClient::new("http://127.0.0.1:1", None);
|
||||
let result = client.get_status().await;
|
||||
assert!(result.is_err(), "status should fail for unreachable host");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn devices_fails_for_unreachable_host() {
|
||||
let client = GatewayClient::new("http://127.0.0.1:1", None);
|
||||
let result = client.get_devices().await;
|
||||
assert!(result.is_err(), "devices should fail for unreachable host");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn pairing_fails_for_unreachable_host() {
|
||||
let client = GatewayClient::new("http://127.0.0.1:1", None);
|
||||
let result = client.initiate_pairing().await;
|
||||
assert!(result.is_err(), "pairing should fail for unreachable host");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn webhook_fails_for_unreachable_host() {
|
||||
let client = GatewayClient::new("http://127.0.0.1:1", None);
|
||||
let result = client.send_webhook_message("hello").await;
|
||||
assert!(result.is_err(), "webhook should fail for unreachable host");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
//! Background health polling for the ZeroClaw gateway.
|
||||
|
||||
use crate::gateway_client::GatewayClient;
|
||||
use crate::state::SharedState;
|
||||
use crate::tray::icon;
|
||||
use std::time::Duration;
|
||||
use tauri::{AppHandle, Emitter, Runtime};
|
||||
|
||||
const POLL_INTERVAL: Duration = Duration::from_secs(5);
|
||||
|
||||
/// Spawn a background task that polls gateway health and updates state + tray.
|
||||
pub fn spawn_health_poller<R: Runtime>(app: AppHandle<R>, state: SharedState) {
|
||||
tauri::async_runtime::spawn(async move {
|
||||
loop {
|
||||
let (url, token) = {
|
||||
let s = state.read().await;
|
||||
(s.gateway_url.clone(), s.token.clone())
|
||||
};
|
||||
|
||||
let client = GatewayClient::new(&url, token.as_deref());
|
||||
let healthy = client.get_health().await.unwrap_or(false);
|
||||
|
||||
let (connected, agent_status) = {
|
||||
let mut s = state.write().await;
|
||||
s.connected = healthy;
|
||||
(s.connected, s.agent_status)
|
||||
};
|
||||
|
||||
// Update the tray icon and tooltip to reflect current state.
|
||||
if let Some(tray) = app.tray_by_id("main") {
|
||||
let _ = tray.set_icon(Some(icon::icon_for_state(connected, agent_status)));
|
||||
let _ = tray.set_tooltip(Some(icon::tooltip_for_state(connected, agent_status)));
|
||||
}
|
||||
|
||||
let _ = app.emit("zeroclaw://status-changed", healthy);
|
||||
|
||||
tokio::time::sleep(POLL_INTERVAL).await;
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,136 @@
|
||||
//! ZeroClaw Desktop — Tauri application library.
|
||||
|
||||
pub mod commands;
|
||||
pub mod gateway_client;
|
||||
pub mod health;
|
||||
pub mod state;
|
||||
pub mod tray;
|
||||
|
||||
use gateway_client::GatewayClient;
|
||||
use state::shared_state;
|
||||
use tauri::{Manager, RunEvent};
|
||||
|
||||
/// Attempt to auto-pair with the gateway so the WebView has a valid token
|
||||
/// before the React frontend mounts. Runs on localhost so the admin endpoints
|
||||
/// are accessible without auth.
|
||||
async fn auto_pair(state: &state::SharedState) -> Option<String> {
|
||||
let url = {
|
||||
let s = state.read().await;
|
||||
s.gateway_url.clone()
|
||||
};
|
||||
|
||||
let client = GatewayClient::new(&url, None);
|
||||
|
||||
// Check if gateway is reachable and requires pairing.
|
||||
if !client.requires_pairing().await.unwrap_or(false) {
|
||||
return None; // Pairing disabled — no token needed.
|
||||
}
|
||||
|
||||
// Check if we already have a valid token in state.
|
||||
{
|
||||
let s = state.read().await;
|
||||
if let Some(ref token) = s.token {
|
||||
let authed = GatewayClient::new(&url, Some(token));
|
||||
if authed.validate_token().await.unwrap_or(false) {
|
||||
return Some(token.clone()); // Existing token is valid.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No valid token — auto-pair by requesting a new code and exchanging it.
|
||||
let client = GatewayClient::new(&url, None);
|
||||
match client.auto_pair().await {
|
||||
Ok(token) => {
|
||||
let mut s = state.write().await;
|
||||
s.token = Some(token.clone());
|
||||
Some(token)
|
||||
}
|
||||
Err(_) => None, // Gateway may not be ready yet; health poller will retry.
|
||||
}
|
||||
}
|
||||
|
||||
/// Inject a bearer token into the WebView's localStorage so the React app
|
||||
/// skips the pairing dialog. Uses Tauri's WebviewWindow scripting API.
|
||||
fn inject_token_into_webview<R: tauri::Runtime>(window: &tauri::WebviewWindow<R>, token: &str) {
|
||||
let escaped = token.replace('\\', "\\\\").replace('\'', "\\'");
|
||||
let script = format!("localStorage.setItem('zeroclaw_token', '{escaped}')");
|
||||
// WebviewWindow scripting is the standard Tauri API for running JS in the WebView.
|
||||
let _ = window.eval(&script);
|
||||
}
|
||||
|
||||
/// Set the macOS dock icon programmatically so it shows even in dev builds
|
||||
/// (which don't have a proper .app bundle).
|
||||
#[cfg(target_os = "macos")]
|
||||
fn set_dock_icon() {
|
||||
use objc2::{AnyThread, MainThreadMarker};
|
||||
use objc2_app_kit::NSApplication;
|
||||
use objc2_app_kit::NSImage;
|
||||
use objc2_foundation::NSData;
|
||||
|
||||
let icon_bytes = include_bytes!("../icons/128x128.png");
|
||||
// Safety: setup() runs on the main thread in Tauri.
|
||||
let mtm = unsafe { MainThreadMarker::new_unchecked() };
|
||||
let data = NSData::with_bytes(icon_bytes);
|
||||
if let Some(image) = NSImage::initWithData(NSImage::alloc(), &data) {
|
||||
let app = NSApplication::sharedApplication(mtm);
|
||||
unsafe { app.setApplicationIconImage(Some(&image)) };
|
||||
}
|
||||
}
|
||||
|
||||
/// Configure and run the Tauri application.
|
||||
pub fn run() {
|
||||
let shared = shared_state();
|
||||
|
||||
tauri::Builder::default()
|
||||
.plugin(tauri_plugin_shell::init())
|
||||
.plugin(tauri_plugin_store::Builder::default().build())
|
||||
.plugin(tauri_plugin_single_instance::init(|app, _args, _cwd| {
|
||||
// When a second instance launches, focus the existing window.
|
||||
if let Some(window) = app.get_webview_window("main") {
|
||||
let _ = window.show();
|
||||
let _ = window.set_focus();
|
||||
}
|
||||
}))
|
||||
.manage(shared.clone())
|
||||
.invoke_handler(tauri::generate_handler![
|
||||
commands::gateway::get_status,
|
||||
commands::gateway::get_health,
|
||||
commands::channels::list_channels,
|
||||
commands::pairing::initiate_pairing,
|
||||
commands::pairing::get_devices,
|
||||
commands::agent::send_message,
|
||||
])
|
||||
.setup(move |app| {
|
||||
// Set macOS dock icon (needed for dev builds without .app bundle).
|
||||
#[cfg(target_os = "macos")]
|
||||
set_dock_icon();
|
||||
|
||||
// Set up the system tray.
|
||||
let _ = tray::setup_tray(app);
|
||||
|
||||
// Auto-pair with gateway and inject token into the WebView.
|
||||
let app_handle = app.handle().clone();
|
||||
let pair_state = shared.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
if let Some(token) = auto_pair(&pair_state).await {
|
||||
if let Some(window) = app_handle.get_webview_window("main") {
|
||||
inject_token_into_webview(&window, &token);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Start background health polling.
|
||||
health::spawn_health_poller(app.handle().clone(), shared.clone());
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.build(tauri::generate_context!())
|
||||
.expect("error while building tauri application")
|
||||
.run(|_app, event| {
|
||||
// Keep the app running in the background when all windows are closed.
|
||||
// This is the standard pattern for menu bar / tray apps.
|
||||
if let RunEvent::ExitRequested { api, .. } = event {
|
||||
api.prevent_exit();
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
//! ZeroClaw Desktop — main entry point.
|
||||
//!
|
||||
//! Prevents an additional console window on Windows in release.
|
||||
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
|
||||
|
||||
fn main() {
|
||||
zeroclaw_desktop::run();
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
//! Mobile entry point for ZeroClaw Desktop (iOS/Android).
|
||||
|
||||
#[tauri::mobile_entry_point]
|
||||
fn main() {
|
||||
zeroclaw_desktop::run();
|
||||
}
|
||||
@@ -0,0 +1,99 @@
|
||||
//! Shared application state for Tauri.
|
||||
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
/// Agent status as reported by the gateway.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, serde::Serialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum AgentStatus {
|
||||
Idle,
|
||||
Working,
|
||||
Error,
|
||||
}
|
||||
|
||||
/// Shared application state behind an `Arc<RwLock<_>>`.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AppState {
|
||||
pub gateway_url: String,
|
||||
pub token: Option<String>,
|
||||
pub connected: bool,
|
||||
pub agent_status: AgentStatus,
|
||||
}
|
||||
|
||||
impl Default for AppState {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
gateway_url: "http://127.0.0.1:42617".to_string(),
|
||||
token: None,
|
||||
connected: false,
|
||||
agent_status: AgentStatus::Idle,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Thread-safe wrapper around `AppState`.
|
||||
pub type SharedState = Arc<RwLock<AppState>>;
|
||||
|
||||
/// Create the default shared state.
|
||||
pub fn shared_state() -> SharedState {
|
||||
Arc::new(RwLock::new(AppState::default()))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn default_state() {
|
||||
let state = AppState::default();
|
||||
assert_eq!(state.gateway_url, "http://127.0.0.1:42617");
|
||||
assert!(state.token.is_none());
|
||||
assert!(!state.connected);
|
||||
assert_eq!(state.agent_status, AgentStatus::Idle);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn shared_state_is_cloneable() {
|
||||
let s1 = shared_state();
|
||||
let s2 = s1.clone();
|
||||
// Both references point to the same allocation.
|
||||
assert!(Arc::ptr_eq(&s1, &s2));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn shared_state_concurrent_read_write() {
|
||||
let state = shared_state();
|
||||
|
||||
// Write from one handle.
|
||||
{
|
||||
let mut s = state.write().await;
|
||||
s.connected = true;
|
||||
s.agent_status = AgentStatus::Working;
|
||||
s.token = Some("zc_test".to_string());
|
||||
}
|
||||
|
||||
// Read from cloned handle.
|
||||
let state2 = state.clone();
|
||||
let s = state2.read().await;
|
||||
assert!(s.connected);
|
||||
assert_eq!(s.agent_status, AgentStatus::Working);
|
||||
assert_eq!(s.token.as_deref(), Some("zc_test"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn agent_status_serialization() {
|
||||
assert_eq!(
|
||||
serde_json::to_string(&AgentStatus::Idle).unwrap(),
|
||||
"\"idle\""
|
||||
);
|
||||
assert_eq!(
|
||||
serde_json::to_string(&AgentStatus::Working).unwrap(),
|
||||
"\"working\""
|
||||
);
|
||||
assert_eq!(
|
||||
serde_json::to_string(&AgentStatus::Error).unwrap(),
|
||||
"\"error\""
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
//! Tray menu event handling.
|
||||
|
||||
use tauri::{menu::MenuEvent, AppHandle, Manager, Runtime};
|
||||
|
||||
pub fn handle_menu_event<R: Runtime>(app: &AppHandle<R>, event: MenuEvent) {
|
||||
match event.id().as_ref() {
|
||||
"show" => show_main_window(app, None),
|
||||
"chat" => show_main_window(app, Some("/agent")),
|
||||
"quit" => {
|
||||
app.exit(0);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn show_main_window<R: Runtime>(app: &AppHandle<R>, navigate_to: Option<&str>) {
|
||||
if let Some(window) = app.get_webview_window("main") {
|
||||
let _ = window.show();
|
||||
let _ = window.set_focus();
|
||||
if let Some(path) = navigate_to {
|
||||
let script = format!("window.location.hash = '{path}'");
|
||||
let _ = window.eval(&script);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,105 @@
|
||||
//! Tray icon management — swap icon based on connection/agent status.
|
||||
|
||||
use crate::state::AgentStatus;
|
||||
use tauri::image::Image;
|
||||
|
||||
/// Embedded tray icon PNGs (22x22, RGBA).
|
||||
const ICON_IDLE: &[u8] = include_bytes!("../../icons/tray-idle.png");
|
||||
const ICON_WORKING: &[u8] = include_bytes!("../../icons/tray-working.png");
|
||||
const ICON_ERROR: &[u8] = include_bytes!("../../icons/tray-error.png");
|
||||
const ICON_DISCONNECTED: &[u8] = include_bytes!("../../icons/tray-disconnected.png");
|
||||
|
||||
/// Select the appropriate tray icon for the current state.
|
||||
pub fn icon_for_state(connected: bool, status: AgentStatus) -> Image<'static> {
|
||||
let bytes: &[u8] = if !connected {
|
||||
ICON_DISCONNECTED
|
||||
} else {
|
||||
match status {
|
||||
AgentStatus::Idle => ICON_IDLE,
|
||||
AgentStatus::Working => ICON_WORKING,
|
||||
AgentStatus::Error => ICON_ERROR,
|
||||
}
|
||||
};
|
||||
Image::from_bytes(bytes).expect("embedded tray icon is a valid PNG")
|
||||
}
|
||||
|
||||
/// Tooltip text for the current state.
|
||||
pub fn tooltip_for_state(connected: bool, status: AgentStatus) -> &'static str {
|
||||
if !connected {
|
||||
return "ZeroClaw — Disconnected";
|
||||
}
|
||||
match status {
|
||||
AgentStatus::Idle => "ZeroClaw — Idle",
|
||||
AgentStatus::Working => "ZeroClaw — Working",
|
||||
AgentStatus::Error => "ZeroClaw — Error",
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn icon_disconnected_when_not_connected() {
|
||||
// Should not panic — icon bytes are valid PNGs.
|
||||
let _img = icon_for_state(false, AgentStatus::Idle);
|
||||
let _img = icon_for_state(false, AgentStatus::Working);
|
||||
let _img = icon_for_state(false, AgentStatus::Error);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn icon_connected_variants() {
|
||||
let _idle = icon_for_state(true, AgentStatus::Idle);
|
||||
let _working = icon_for_state(true, AgentStatus::Working);
|
||||
let _error = icon_for_state(true, AgentStatus::Error);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tooltip_disconnected() {
|
||||
assert_eq!(
|
||||
tooltip_for_state(false, AgentStatus::Idle),
|
||||
"ZeroClaw — Disconnected"
|
||||
);
|
||||
// Agent status is irrelevant when disconnected.
|
||||
assert_eq!(
|
||||
tooltip_for_state(false, AgentStatus::Working),
|
||||
"ZeroClaw — Disconnected"
|
||||
);
|
||||
assert_eq!(
|
||||
tooltip_for_state(false, AgentStatus::Error),
|
||||
"ZeroClaw — Disconnected"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tooltip_connected_variants() {
|
||||
assert_eq!(
|
||||
tooltip_for_state(true, AgentStatus::Idle),
|
||||
"ZeroClaw — Idle"
|
||||
);
|
||||
assert_eq!(
|
||||
tooltip_for_state(true, AgentStatus::Working),
|
||||
"ZeroClaw — Working"
|
||||
);
|
||||
assert_eq!(
|
||||
tooltip_for_state(true, AgentStatus::Error),
|
||||
"ZeroClaw — Error"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn embedded_icons_are_valid_png() {
|
||||
// Verify the PNG signature (first 8 bytes) of each embedded icon.
|
||||
let png_sig: &[u8] = &[0x89, b'P', b'N', b'G', 0x0D, 0x0A, 0x1A, 0x0A];
|
||||
assert!(ICON_IDLE.starts_with(png_sig), "idle icon not valid PNG");
|
||||
assert!(
|
||||
ICON_WORKING.starts_with(png_sig),
|
||||
"working icon not valid PNG"
|
||||
);
|
||||
assert!(ICON_ERROR.starts_with(png_sig), "error icon not valid PNG");
|
||||
assert!(
|
||||
ICON_DISCONNECTED.starts_with(png_sig),
|
||||
"disconnected icon not valid PNG"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
//! Tray menu construction.
|
||||
|
||||
use tauri::{
|
||||
menu::{Menu, MenuItemBuilder, PredefinedMenuItem},
|
||||
App, Runtime,
|
||||
};
|
||||
|
||||
pub fn create_tray_menu<R: Runtime>(app: &App<R>) -> Result<Menu<R>, tauri::Error> {
|
||||
let show = MenuItemBuilder::with_id("show", "Show Dashboard").build(app)?;
|
||||
let chat = MenuItemBuilder::with_id("chat", "Agent Chat").build(app)?;
|
||||
let sep1 = PredefinedMenuItem::separator(app)?;
|
||||
let status = MenuItemBuilder::with_id("status", "Status: Checking...")
|
||||
.enabled(false)
|
||||
.build(app)?;
|
||||
let sep2 = PredefinedMenuItem::separator(app)?;
|
||||
let quit = MenuItemBuilder::with_id("quit", "Quit ZeroClaw").build(app)?;
|
||||
|
||||
Menu::with_items(app, &[&show, &chat, &sep1, &status, &sep2, &quit])
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
//! System tray integration for ZeroClaw Desktop.
|
||||
|
||||
pub mod events;
|
||||
pub mod icon;
|
||||
pub mod menu;
|
||||
|
||||
use tauri::{
|
||||
tray::{TrayIcon, TrayIconBuilder, TrayIconEvent},
|
||||
App, Manager, Runtime,
|
||||
};
|
||||
|
||||
/// Set up the system tray icon and menu.
|
||||
pub fn setup_tray<R: Runtime>(app: &App<R>) -> Result<TrayIcon<R>, tauri::Error> {
|
||||
let menu = menu::create_tray_menu(app)?;
|
||||
|
||||
TrayIconBuilder::with_id("main")
|
||||
.tooltip("ZeroClaw — Disconnected")
|
||||
.icon(icon::icon_for_state(false, crate::state::AgentStatus::Idle))
|
||||
.menu(&menu)
|
||||
.show_menu_on_left_click(false)
|
||||
.on_menu_event(events::handle_menu_event)
|
||||
.on_tray_icon_event(|tray, event| {
|
||||
if let TrayIconEvent::Click { button, .. } = event {
|
||||
if button == tauri::tray::MouseButton::Left {
|
||||
let app = tray.app_handle();
|
||||
if let Some(window) = app.get_webview_window("main") {
|
||||
let _ = window.show();
|
||||
let _ = window.set_focus();
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.build(app)
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"$schema": "https://raw.githubusercontent.com/tauri-apps/tauri/dev/crates/tauri-cli/config.schema.json",
|
||||
"productName": "ZeroClaw",
|
||||
"version": "0.6.1",
|
||||
"identifier": "ai.zeroclawlabs.desktop",
|
||||
"build": {
|
||||
"devUrl": "http://127.0.0.1:42617/_app/",
|
||||
"frontendDist": "http://127.0.0.1:42617/_app/"
|
||||
},
|
||||
"app": {
|
||||
"windows": [
|
||||
{
|
||||
"title": "ZeroClaw",
|
||||
"width": 1200,
|
||||
"height": 800,
|
||||
"resizable": true,
|
||||
"fullscreen": false,
|
||||
"visible": false
|
||||
}
|
||||
],
|
||||
"security": {
|
||||
"csp": "default-src 'self' http://127.0.0.1:* ws://127.0.0.1:*; connect-src 'self' http://127.0.0.1:* ws://127.0.0.1:*; script-src 'self' 'unsafe-inline' http://127.0.0.1:*; style-src 'self' 'unsafe-inline' http://127.0.0.1:*; img-src 'self' http://127.0.0.1:* data:"
|
||||
}
|
||||
},
|
||||
"bundle": {
|
||||
"active": true,
|
||||
"targets": "all",
|
||||
"icon": [
|
||||
"icons/32x32.png",
|
||||
"icons/128x128.png",
|
||||
"icons/icon.icns",
|
||||
"icons/icon.ico"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -263,7 +263,7 @@ fn bench_memory_operations(c: &mut Criterion) {
|
||||
c.bench_function("memory_recall_top10", |b| {
|
||||
b.iter(|| {
|
||||
rt.block_on(async {
|
||||
mem.recall(black_box("zeroclaw agent"), 10, None)
|
||||
mem.recall(black_box("zeroclaw agent"), 10, None, None, None)
|
||||
.await
|
||||
.unwrap()
|
||||
})
|
||||
|
||||
@@ -0,0 +1,25 @@
|
||||
[package]
|
||||
name = "aardvark-sys"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
authors = ["theonlyhennygod"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "Low-level bindings for the Total Phase Aardvark I2C/SPI/GPIO USB adapter"
|
||||
repository = "https://github.com/zeroclaw-labs/zeroclaw"
|
||||
|
||||
# NOTE: This crate is the ONLY place in ZeroClaw where unsafe code is permitted.
|
||||
# The rest of the workspace remains #![forbid(unsafe_code)].
|
||||
#
|
||||
# Stub implementation: the Total Phase SDK (aardvark.h + aardvark.so) is NOT
|
||||
# yet committed. All AardvarkHandle methods return Err(AardvarkError::NotFound)
|
||||
# at runtime. No unsafe code is needed for the stub.
|
||||
#
|
||||
# To enable real hardware (once SDK files are in vendor/):
|
||||
# 1. Add `bindgen = "0.69"` to [build-dependencies]
|
||||
# 2. Add `libc = "0.2"` to [dependencies]
|
||||
# 3. Uncomment the build.rs bindgen call
|
||||
# 4. Replace stub method bodies with FFI calls via mod bindings
|
||||
|
||||
[dependencies]
|
||||
libloading = "0.8"
|
||||
thiserror = "2.0"
|
||||
@@ -0,0 +1,27 @@
|
||||
//! Build script for aardvark-sys.
|
||||
//!
|
||||
//! # SDK present (real hardware)
|
||||
//! When the Total Phase SDK files are in `vendor/`:
|
||||
//! - Sets linker search path for aardvark.so
|
||||
//! - Generates src/bindings.rs via bindgen
|
||||
//!
|
||||
//! # SDK absent (stub)
|
||||
//! Does nothing. All AardvarkHandle methods return errors at runtime.
|
||||
|
||||
fn main() {
|
||||
// Stub: SDK not yet in vendor/
|
||||
// Uncomment and fill in when aardvark.h + aardvark.so are available:
|
||||
//
|
||||
// println!("cargo:rustc-link-search=native=crates/aardvark-sys/vendor");
|
||||
// println!("cargo:rustc-link-lib=dylib=aardvark");
|
||||
// println!("cargo:rerun-if-changed=vendor/aardvark.h");
|
||||
//
|
||||
// let bindings = bindgen::Builder::default()
|
||||
// .header("vendor/aardvark.h")
|
||||
// .parse_callbacks(Box::new(bindgen::CargoCallbacks::new()))
|
||||
// .generate()
|
||||
// .expect("Unable to generate aardvark bindings");
|
||||
// bindings
|
||||
// .write_to_file("src/bindings.rs")
|
||||
// .expect("Could not write bindings");
|
||||
}
|
||||
@@ -0,0 +1,475 @@
|
||||
//! Bindings for the Total Phase Aardvark I2C/SPI/GPIO USB adapter.
|
||||
//!
|
||||
//! Uses [`libloading`] to load `aardvark.so` at runtime — the same pattern
|
||||
//! the official Total Phase C stub (`aardvark.c`) uses internally.
|
||||
//!
|
||||
//! # Library search order
|
||||
//!
|
||||
//! 1. `ZEROCLAW_AARDVARK_LIB` environment variable (full path to `aardvark.so`)
|
||||
//! 2. `<workspace>/crates/aardvark-sys/vendor/aardvark.so` (development default)
|
||||
//! 3. `./aardvark.so` (next to the binary, for deployment)
|
||||
//!
|
||||
//! If none resolve, every method returns
|
||||
//! [`Err(AardvarkError::LibraryNotFound)`](AardvarkError::LibraryNotFound).
|
||||
//!
|
||||
//! # Safety
|
||||
//!
|
||||
//! This crate is the **only** place in ZeroClaw where `unsafe` is permitted.
|
||||
//! All `unsafe` is confined to `extern "C"` call sites inside this file.
|
||||
//! The public API is fully safe Rust.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use libloading::{Library, Symbol};
|
||||
use thiserror::Error;
|
||||
|
||||
// ── Constants from aardvark.h ─────────────────────────────────────────────
|
||||
|
||||
/// Bit set on a port returned by `aa_find_devices` when that port is in use.
|
||||
const AA_PORT_NOT_FREE: u16 = 0x8000;
|
||||
/// Configure adapter for I2C + GPIO (I2C master mode, SPI disabled).
|
||||
const AA_CONFIG_GPIO_I2C: i32 = 0x02;
|
||||
/// Configure adapter for SPI + GPIO (SPI master mode, I2C disabled).
|
||||
const AA_CONFIG_SPI_GPIO: i32 = 0x01;
|
||||
/// No I2C flags (standard 7-bit addressing, normal stop condition).
|
||||
const AA_I2C_NO_FLAGS: i32 = 0x00;
|
||||
/// Enable both onboard I2C pullup resistors (hardware v2+ only).
|
||||
const AA_I2C_PULLUP_BOTH: u8 = 0x03;
|
||||
|
||||
// ── Library loading ───────────────────────────────────────────────────────
|
||||
|
||||
static AARDVARK_LIB: OnceLock<Option<Library>> = OnceLock::new();
|
||||
|
||||
fn lib() -> Option<&'static Library> {
|
||||
AARDVARK_LIB
|
||||
.get_or_init(|| {
|
||||
let candidates: Vec<PathBuf> = vec![
|
||||
// 1. Explicit env-var override (full path)
|
||||
std::env::var("ZEROCLAW_AARDVARK_LIB")
|
||||
.ok()
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_default(),
|
||||
// 2. Vendor directory shipped with this crate (dev default)
|
||||
{
|
||||
let mut p = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
p.push("vendor/aardvark.so");
|
||||
p
|
||||
},
|
||||
// 3. Next to the running binary (deployment)
|
||||
std::env::current_exe()
|
||||
.ok()
|
||||
.and_then(|e| e.parent().map(|d| d.join("aardvark.so")))
|
||||
.unwrap_or_default(),
|
||||
// 4. Current working directory
|
||||
PathBuf::from("aardvark.so"),
|
||||
];
|
||||
let mut tried_any = false;
|
||||
for path in &candidates {
|
||||
if path.as_os_str().is_empty() {
|
||||
continue;
|
||||
}
|
||||
tried_any = true;
|
||||
match unsafe { Library::new(path) } {
|
||||
Ok(lib) => {
|
||||
// Verify the .so exports aa_c_version (Total Phase version gate).
|
||||
// The .so exports c_aa_* symbols (not aa_*); aa_c_version is the
|
||||
// one non-prefixed symbol used to confirm library identity.
|
||||
let version_ok = unsafe {
|
||||
lib.get::<unsafe extern "C" fn() -> u32>(b"aa_c_version\0").is_ok()
|
||||
};
|
||||
if !version_ok {
|
||||
eprintln!(
|
||||
"[aardvark-sys] {} loaded but aa_c_version not found — \
|
||||
not a valid Aardvark library, skipping",
|
||||
path.display()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
eprintln!("[aardvark-sys] loaded library from {}", path.display());
|
||||
return Some(lib);
|
||||
}
|
||||
Err(e) => {
|
||||
let msg = e.to_string();
|
||||
// Surface architecture mismatch explicitly — the most common
|
||||
// failure on Apple Silicon machines with an x86_64 SDK.
|
||||
if msg.contains("incompatible architecture") || msg.contains("mach-o file") {
|
||||
eprintln!(
|
||||
"[aardvark-sys] ARCHITECTURE MISMATCH loading {}: {}\n\
|
||||
[aardvark-sys] The vendored aardvark.so is x86_64 but this \
|
||||
binary is {}.\n\
|
||||
[aardvark-sys] Download the arm64 SDK from https://www.totalphase.com/downloads/ \
|
||||
or build with --target x86_64-apple-darwin.",
|
||||
path.display(),
|
||||
msg,
|
||||
std::env::consts::ARCH,
|
||||
);
|
||||
} else {
|
||||
eprintln!(
|
||||
"[aardvark-sys] could not load {}: {}",
|
||||
path.display(),
|
||||
msg
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if !tried_any {
|
||||
eprintln!("[aardvark-sys] no library candidates found; set ZEROCLAW_AARDVARK_LIB or place aardvark.so next to the binary");
|
||||
}
|
||||
None
|
||||
})
|
||||
.as_ref()
|
||||
}
|
||||
|
||||
/// Errors returned by Aardvark hardware operations.
|
||||
#[derive(Debug, Error)]
|
||||
pub enum AardvarkError {
|
||||
/// No Aardvark adapter found — adapter not plugged in.
|
||||
#[error("Aardvark adapter not found — is it plugged in?")]
|
||||
NotFound,
|
||||
/// `aa_open` returned a non-positive handle.
|
||||
#[error("Aardvark open failed (code {0})")]
|
||||
OpenFailed(i32),
|
||||
/// `aa_i2c_write` returned a negative status code.
|
||||
#[error("I2C write failed (code {0})")]
|
||||
I2cWriteFailed(i32),
|
||||
/// `aa_i2c_read` returned a negative status code.
|
||||
#[error("I2C read failed (code {0})")]
|
||||
I2cReadFailed(i32),
|
||||
/// `aa_spi_write` returned a negative status code.
|
||||
#[error("SPI transfer failed (code {0})")]
|
||||
SpiTransferFailed(i32),
|
||||
/// GPIO operation returned a negative status code.
|
||||
#[error("GPIO error (code {0})")]
|
||||
GpioError(i32),
|
||||
/// `aardvark.so` could not be found or loaded.
|
||||
#[error("aardvark.so not found — set ZEROCLAW_AARDVARK_LIB or place it next to the binary")]
|
||||
LibraryNotFound,
|
||||
}
|
||||
|
||||
/// Convenience `Result` alias for this crate.
|
||||
pub type Result<T> = std::result::Result<T, AardvarkError>;
|
||||
|
||||
// ── Handle ────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Safe RAII handle over the Aardvark C library handle.
|
||||
///
|
||||
/// Automatically closes the adapter on `Drop`.
|
||||
///
|
||||
/// **Usage pattern:** open a fresh handle per command and let it drop at the
|
||||
/// end of each operation (lazy-open / eager-close).
|
||||
pub struct AardvarkHandle {
|
||||
handle: i32,
|
||||
}
|
||||
|
||||
impl AardvarkHandle {
|
||||
// ── Lifecycle ─────────────────────────────────────────────────────────
|
||||
|
||||
/// Open the first available (free) Aardvark adapter.
|
||||
pub fn open() -> Result<Self> {
|
||||
let ports = Self::find_devices();
|
||||
let port = ports.first().copied().ok_or(AardvarkError::NotFound)?;
|
||||
Self::open_port(i32::from(port))
|
||||
}
|
||||
|
||||
/// Open a specific Aardvark adapter by port index.
|
||||
pub fn open_port(port: i32) -> Result<Self> {
|
||||
let lib = lib().ok_or(AardvarkError::LibraryNotFound)?;
|
||||
let handle: i32 = unsafe {
|
||||
let f: Symbol<unsafe extern "C" fn(i32) -> i32> = lib
|
||||
.get(b"c_aa_open\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
f(port)
|
||||
};
|
||||
if handle <= 0 {
|
||||
Err(AardvarkError::OpenFailed(handle))
|
||||
} else {
|
||||
Ok(Self { handle })
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the port numbers of all **free** connected adapters.
|
||||
///
|
||||
/// Ports in-use by another process are filtered out.
|
||||
/// Returns an empty `Vec` when `aardvark.so` cannot be loaded.
|
||||
pub fn find_devices() -> Vec<u16> {
|
||||
let Some(lib) = lib() else {
|
||||
eprintln!("[aardvark-sys] find_devices: library not loaded");
|
||||
return Vec::new();
|
||||
};
|
||||
let mut ports = [0u16; 16];
|
||||
let n: i32 = unsafe {
|
||||
let f: std::result::Result<Symbol<unsafe extern "C" fn(i32, *mut u16) -> i32>, _> =
|
||||
lib.get(b"c_aa_find_devices\0");
|
||||
match f {
|
||||
Ok(f) => f(16, ports.as_mut_ptr()),
|
||||
Err(e) => {
|
||||
eprintln!("[aardvark-sys] find_devices: symbol lookup failed: {e}");
|
||||
return Vec::new();
|
||||
}
|
||||
}
|
||||
};
|
||||
eprintln!(
|
||||
"[aardvark-sys] find_devices: c_aa_find_devices returned {n}, ports={:?}",
|
||||
&ports[..n.max(0) as usize]
|
||||
);
|
||||
if n <= 0 {
|
||||
return Vec::new();
|
||||
}
|
||||
let free: Vec<u16> = ports[..n as usize]
|
||||
.iter()
|
||||
.filter(|&&p| (p & AA_PORT_NOT_FREE) == 0)
|
||||
.copied()
|
||||
.collect();
|
||||
eprintln!("[aardvark-sys] find_devices: free ports={free:?}");
|
||||
free
|
||||
}
|
||||
|
||||
// ── I2C ───────────────────────────────────────────────────────────────
|
||||
|
||||
/// Enable I2C mode and set the bitrate (kHz).
|
||||
pub fn i2c_enable(&self, bitrate_khz: u32) -> Result<()> {
|
||||
let lib = lib().ok_or(AardvarkError::LibraryNotFound)?;
|
||||
unsafe {
|
||||
let configure: Symbol<unsafe extern "C" fn(i32, i32) -> i32> = lib
|
||||
.get(b"c_aa_configure\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
configure(self.handle, AA_CONFIG_GPIO_I2C);
|
||||
let pullup: Symbol<unsafe extern "C" fn(i32, u8) -> i32> = lib
|
||||
.get(b"c_aa_i2c_pullup\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
pullup(self.handle, AA_I2C_PULLUP_BOTH);
|
||||
let bitrate: Symbol<unsafe extern "C" fn(i32, i32) -> i32> = lib
|
||||
.get(b"c_aa_i2c_bitrate\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
bitrate(self.handle, bitrate_khz as i32);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write `data` bytes to the I2C device at `addr`.
|
||||
pub fn i2c_write(&self, addr: u8, data: &[u8]) -> Result<()> {
|
||||
let lib = lib().ok_or(AardvarkError::LibraryNotFound)?;
|
||||
let ret: i32 = unsafe {
|
||||
let f: Symbol<unsafe extern "C" fn(i32, u16, i32, u16, *const u8) -> i32> = lib
|
||||
.get(b"c_aa_i2c_write\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
f(
|
||||
self.handle,
|
||||
u16::from(addr),
|
||||
AA_I2C_NO_FLAGS,
|
||||
data.len() as u16,
|
||||
data.as_ptr(),
|
||||
)
|
||||
};
|
||||
if ret < 0 {
|
||||
Err(AardvarkError::I2cWriteFailed(ret))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Read `len` bytes from the I2C device at `addr`.
|
||||
pub fn i2c_read(&self, addr: u8, len: usize) -> Result<Vec<u8>> {
|
||||
let lib = lib().ok_or(AardvarkError::LibraryNotFound)?;
|
||||
let mut buf = vec![0u8; len];
|
||||
let ret: i32 = unsafe {
|
||||
let f: Symbol<unsafe extern "C" fn(i32, u16, i32, u16, *mut u8) -> i32> = lib
|
||||
.get(b"c_aa_i2c_read\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
f(
|
||||
self.handle,
|
||||
u16::from(addr),
|
||||
AA_I2C_NO_FLAGS,
|
||||
len as u16,
|
||||
buf.as_mut_ptr(),
|
||||
)
|
||||
};
|
||||
if ret < 0 {
|
||||
Err(AardvarkError::I2cReadFailed(ret))
|
||||
} else {
|
||||
Ok(buf)
|
||||
}
|
||||
}
|
||||
|
||||
/// Write then read — standard I2C register-read pattern.
|
||||
pub fn i2c_write_read(&self, addr: u8, write_data: &[u8], read_len: usize) -> Result<Vec<u8>> {
|
||||
self.i2c_write(addr, write_data)?;
|
||||
self.i2c_read(addr, read_len)
|
||||
}
|
||||
|
||||
/// Scan the I2C bus, returning addresses of all responding devices.
|
||||
///
|
||||
/// Probes `0x08–0x77` with a 1-byte read; returns addresses that ACK.
|
||||
pub fn i2c_scan(&self) -> Vec<u8> {
|
||||
let Some(lib) = lib() else {
|
||||
return Vec::new();
|
||||
};
|
||||
let Ok(f): std::result::Result<
|
||||
Symbol<unsafe extern "C" fn(i32, u16, i32, u16, *mut u8) -> i32>,
|
||||
_,
|
||||
> = (unsafe { lib.get(b"c_aa_i2c_read\0") }) else {
|
||||
return Vec::new();
|
||||
};
|
||||
let mut found = Vec::new();
|
||||
let mut buf = [0u8; 1];
|
||||
for addr in 0x08u16..=0x77 {
|
||||
let ret = unsafe { f(self.handle, addr, AA_I2C_NO_FLAGS, 1, buf.as_mut_ptr()) };
|
||||
// ret > 0: bytes received → device ACKed
|
||||
// ret == 0: NACK → no device at this address
|
||||
// ret < 0: error code → skip
|
||||
if ret > 0 {
|
||||
found.push(addr as u8);
|
||||
}
|
||||
}
|
||||
found
|
||||
}
|
||||
|
||||
// ── SPI ───────────────────────────────────────────────────────────────
|
||||
|
||||
/// Enable SPI mode and set the bitrate (kHz).
|
||||
pub fn spi_enable(&self, bitrate_khz: u32) -> Result<()> {
|
||||
let lib = lib().ok_or(AardvarkError::LibraryNotFound)?;
|
||||
unsafe {
|
||||
let configure: Symbol<unsafe extern "C" fn(i32, i32) -> i32> = lib
|
||||
.get(b"c_aa_configure\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
configure(self.handle, AA_CONFIG_SPI_GPIO);
|
||||
// SPI mode 0: polarity=rising/falling(0), phase=sample/setup(0), MSB first(0)
|
||||
let spi_cfg: Symbol<unsafe extern "C" fn(i32, i32, i32, i32) -> i32> = lib
|
||||
.get(b"c_aa_spi_configure\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
spi_cfg(self.handle, 0, 0, 0);
|
||||
let bitrate: Symbol<unsafe extern "C" fn(i32, i32) -> i32> = lib
|
||||
.get(b"c_aa_spi_bitrate\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
bitrate(self.handle, bitrate_khz as i32);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Full-duplex SPI transfer.
|
||||
///
|
||||
/// Sends `send` bytes; returns the simultaneously received bytes (same length).
|
||||
pub fn spi_transfer(&self, send: &[u8]) -> Result<Vec<u8>> {
|
||||
let lib = lib().ok_or(AardvarkError::LibraryNotFound)?;
|
||||
let mut recv = vec![0u8; send.len()];
|
||||
// aa_spi_write(aardvark, out_num_bytes, data_out, in_num_bytes, data_in)
|
||||
let ret: i32 = unsafe {
|
||||
let f: Symbol<unsafe extern "C" fn(i32, u16, *const u8, u16, *mut u8) -> i32> = lib
|
||||
.get(b"c_aa_spi_write\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
f(
|
||||
self.handle,
|
||||
send.len() as u16,
|
||||
send.as_ptr(),
|
||||
recv.len() as u16,
|
||||
recv.as_mut_ptr(),
|
||||
)
|
||||
};
|
||||
if ret < 0 {
|
||||
Err(AardvarkError::SpiTransferFailed(ret))
|
||||
} else {
|
||||
Ok(recv)
|
||||
}
|
||||
}
|
||||
|
||||
// ── GPIO ──────────────────────────────────────────────────────────────
|
||||
|
||||
/// Set GPIO pin directions and output values.
|
||||
///
|
||||
/// `direction`: bitmask — `1` = output, `0` = input.
|
||||
/// `value`: output state bitmask.
|
||||
pub fn gpio_set(&self, direction: u8, value: u8) -> Result<()> {
|
||||
let lib = lib().ok_or(AardvarkError::LibraryNotFound)?;
|
||||
unsafe {
|
||||
let dir_f: Symbol<unsafe extern "C" fn(i32, u8) -> i32> = lib
|
||||
.get(b"c_aa_gpio_direction\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
let d = dir_f(self.handle, direction);
|
||||
if d < 0 {
|
||||
return Err(AardvarkError::GpioError(d));
|
||||
}
|
||||
let set_f: Symbol<unsafe extern "C" fn(i32, u8) -> i32> =
|
||||
lib.get(b"c_aa_gpio_set\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
let r = set_f(self.handle, value);
|
||||
if r < 0 {
|
||||
return Err(AardvarkError::GpioError(r));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Read the current GPIO pin states as a bitmask.
|
||||
pub fn gpio_get(&self) -> Result<u8> {
|
||||
let lib = lib().ok_or(AardvarkError::LibraryNotFound)?;
|
||||
let ret: i32 = unsafe {
|
||||
let f: Symbol<unsafe extern "C" fn(i32) -> i32> = lib
|
||||
.get(b"c_aa_gpio_get\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
f(self.handle)
|
||||
};
|
||||
if ret < 0 {
|
||||
Err(AardvarkError::GpioError(ret))
|
||||
} else {
|
||||
Ok(ret as u8)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for AardvarkHandle {
|
||||
fn drop(&mut self) {
|
||||
if let Some(lib) = lib() {
|
||||
unsafe {
|
||||
if let Ok(f) = lib.get::<unsafe extern "C" fn(i32) -> i32>(b"c_aa_close\0") {
|
||||
f(self.handle);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn find_devices_does_not_panic() {
|
||||
// With no adapter plugged in, must return empty without panicking.
|
||||
let _ = AardvarkHandle::find_devices();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn open_returns_error_or_ok_depending_on_hardware() {
|
||||
// With hardware connected: open() succeeds (Ok).
|
||||
// Without hardware: returns LibraryNotFound, NotFound, or OpenFailed — any Err is fine.
|
||||
// Both outcomes are valid; the important thing is no panic.
|
||||
let _ = AardvarkHandle::open();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn open_port_returns_error_when_no_hardware() {
|
||||
// Port 99 doesn't exist — must return an error regardless of whether hardware is connected.
|
||||
assert!(AardvarkHandle::open_port(99).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_messages_are_human_readable() {
|
||||
assert!(AardvarkError::NotFound
|
||||
.to_string()
|
||||
.to_lowercase()
|
||||
.contains("not found"));
|
||||
assert!(AardvarkError::OpenFailed(-1).to_string().contains("-1"));
|
||||
assert!(AardvarkError::I2cWriteFailed(-3)
|
||||
.to_string()
|
||||
.contains("I2C write"));
|
||||
assert!(AardvarkError::SpiTransferFailed(-2)
|
||||
.to_string()
|
||||
.contains("SPI"));
|
||||
assert!(AardvarkError::LibraryNotFound
|
||||
.to_string()
|
||||
.contains("aardvark.so"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,919 @@
|
||||
/*=========================================================================
|
||||
| Aardvark Interface Library
|
||||
|--------------------------------------------------------------------------
|
||||
| Copyright (c) 2003-2024 Total Phase, Inc.
|
||||
| All rights reserved.
|
||||
| www.totalphase.com
|
||||
|
|
||||
| Redistribution and use of this file in source and binary forms, with
|
||||
| or without modification, are permitted provided that the following
|
||||
| conditions are met:
|
||||
|
|
||||
| - Redistributions of source code must retain the above copyright
|
||||
| notice, this list of conditions, and the following disclaimer.
|
||||
|
|
||||
| - Redistributions in binary form must reproduce the above copyright
|
||||
| notice, this list of conditions, and the following disclaimer in the
|
||||
| documentation or other materials provided with the distribution.
|
||||
|
|
||||
| - This file must only be used to interface with Total Phase products.
|
||||
| The names of Total Phase and its contributors must not be used to
|
||||
| endorse or promote products derived from this software.
|
||||
|
|
||||
| THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
| "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING BUT NOT
|
||||
| LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
| FOR A PARTICULAR PURPOSE, ARE DISCLAIMED. IN NO EVENT WILL THE
|
||||
| COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
| INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING
|
||||
| BUT NOT LIMITED TO PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
| LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
| CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
| LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
| ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
| POSSIBILITY OF SUCH DAMAGE.
|
||||
|--------------------------------------------------------------------------
|
||||
| To access Total Phase Aardvark devices through the API:
|
||||
|
|
||||
| 1) Use one of the following shared objects:
|
||||
| aardvark.so -- Linux or macOS shared object
|
||||
| aardvark.dll -- Windows dynamic link library
|
||||
|
|
||||
| 2) Along with one of the following language modules:
|
||||
| aardvark.c/h -- C/C++ API header file and interface module
|
||||
| aardvark_py.py -- Python API
|
||||
| aardvark.cs -- C# .NET source
|
||||
| aardvark_net.dll -- Compiled .NET binding
|
||||
| aardvark.bas -- Visual Basic 6 API
|
||||
========================================================================*/
|
||||
|
||||
|
||||
#ifndef __aardvark_h__
|
||||
#define __aardvark_h__
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| TYPEDEFS
|
||||
========================================================================*/
|
||||
#ifndef TOTALPHASE_DATA_TYPES
|
||||
#define TOTALPHASE_DATA_TYPES
|
||||
|
||||
#ifndef _MSC_VER
|
||||
/* C99-compliant compilers (GCC) */
|
||||
#include <stdint.h>
|
||||
typedef uint8_t u08;
|
||||
typedef uint16_t u16;
|
||||
typedef uint32_t u32;
|
||||
typedef uint64_t u64;
|
||||
typedef int8_t s08;
|
||||
typedef int16_t s16;
|
||||
typedef int32_t s32;
|
||||
typedef int64_t s64;
|
||||
|
||||
#else
|
||||
/* Microsoft compilers (Visual C++) */
|
||||
typedef unsigned __int8 u08;
|
||||
typedef unsigned __int16 u16;
|
||||
typedef unsigned __int32 u32;
|
||||
typedef unsigned __int64 u64;
|
||||
typedef signed __int8 s08;
|
||||
typedef signed __int16 s16;
|
||||
typedef signed __int32 s32;
|
||||
typedef signed __int64 s64;
|
||||
|
||||
#endif /* __MSC_VER */
|
||||
|
||||
typedef float f32;
|
||||
typedef double f64;
|
||||
|
||||
#endif /* TOTALPHASE_DATA_TYPES */
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| DEBUG
|
||||
========================================================================*/
|
||||
/* Set the following macro to '1' for debugging */
|
||||
#define AA_DEBUG 0
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| VERSION
|
||||
========================================================================*/
|
||||
#define AA_HEADER_VERSION 0x0600 /* v6.00 */
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| STATUS CODES
|
||||
========================================================================*/
|
||||
/*
|
||||
* All API functions return an integer which is the result of the
|
||||
* transaction, or a status code if negative. The status codes are
|
||||
* defined as follows:
|
||||
*/
|
||||
enum AardvarkStatus {
|
||||
/* General codes (0 to -99) */
|
||||
AA_OK = 0,
|
||||
AA_UNABLE_TO_LOAD_LIBRARY = -1,
|
||||
AA_UNABLE_TO_LOAD_DRIVER = -2,
|
||||
AA_UNABLE_TO_LOAD_FUNCTION = -3,
|
||||
AA_INCOMPATIBLE_LIBRARY = -4,
|
||||
AA_INCOMPATIBLE_DEVICE = -5,
|
||||
AA_COMMUNICATION_ERROR = -6,
|
||||
AA_UNABLE_TO_OPEN = -7,
|
||||
AA_UNABLE_TO_CLOSE = -8,
|
||||
AA_INVALID_HANDLE = -9,
|
||||
AA_CONFIG_ERROR = -10,
|
||||
|
||||
/* I2C codes (-100 to -199) */
|
||||
AA_I2C_NOT_AVAILABLE = -100,
|
||||
AA_I2C_NOT_ENABLED = -101,
|
||||
AA_I2C_READ_ERROR = -102,
|
||||
AA_I2C_WRITE_ERROR = -103,
|
||||
AA_I2C_SLAVE_BAD_CONFIG = -104,
|
||||
AA_I2C_SLAVE_READ_ERROR = -105,
|
||||
AA_I2C_SLAVE_TIMEOUT = -106,
|
||||
AA_I2C_DROPPED_EXCESS_BYTES = -107,
|
||||
AA_I2C_BUS_ALREADY_FREE = -108,
|
||||
|
||||
/* SPI codes (-200 to -299) */
|
||||
AA_SPI_NOT_AVAILABLE = -200,
|
||||
AA_SPI_NOT_ENABLED = -201,
|
||||
AA_SPI_WRITE_ERROR = -202,
|
||||
AA_SPI_SLAVE_READ_ERROR = -203,
|
||||
AA_SPI_SLAVE_TIMEOUT = -204,
|
||||
AA_SPI_DROPPED_EXCESS_BYTES = -205,
|
||||
|
||||
/* GPIO codes (-400 to -499) */
|
||||
AA_GPIO_NOT_AVAILABLE = -400
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkStatus AardvarkStatus;
|
||||
#endif
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| GENERAL TYPE DEFINITIONS
|
||||
========================================================================*/
|
||||
/* Aardvark handle type definition */
|
||||
typedef int Aardvark;
|
||||
|
||||
/*
|
||||
* Deprecated type definitions.
|
||||
*
|
||||
* These are only for use with legacy code and
|
||||
* should not be used for new development.
|
||||
*/
|
||||
typedef u08 aa_u08;
|
||||
|
||||
typedef u16 aa_u16;
|
||||
|
||||
typedef u32 aa_u32;
|
||||
|
||||
typedef s08 aa_s08;
|
||||
|
||||
typedef s16 aa_s16;
|
||||
|
||||
typedef s32 aa_s32;
|
||||
|
||||
/*
|
||||
* Aardvark version matrix.
|
||||
*
|
||||
* This matrix describes the various version dependencies
|
||||
* of Aardvark components. It can be used to determine
|
||||
* which component caused an incompatibility error.
|
||||
*
|
||||
* All version numbers are of the format:
|
||||
* (major << 8) | minor
|
||||
*
|
||||
* ex. v1.20 would be encoded as: 0x0114
|
||||
*/
|
||||
struct AardvarkVersion {
|
||||
/* Software, firmware, and hardware versions. */
|
||||
u16 software;
|
||||
u16 firmware;
|
||||
u16 hardware;
|
||||
|
||||
/* Firmware requires that software must be >= this version. */
|
||||
u16 sw_req_by_fw;
|
||||
|
||||
/* Software requires that firmware must be >= this version. */
|
||||
u16 fw_req_by_sw;
|
||||
|
||||
/* Software requires that the API interface must be >= this version. */
|
||||
u16 api_req_by_sw;
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef struct AardvarkVersion AardvarkVersion;
|
||||
#endif
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| GENERAL API
|
||||
========================================================================*/
|
||||
/*
|
||||
* Get a list of ports to which Aardvark devices are attached.
|
||||
*
|
||||
* nelem = maximum number of elements to return
|
||||
* devices = array into which the port numbers are returned
|
||||
*
|
||||
* Each element of the array is written with the port number.
|
||||
* Devices that are in-use are ORed with AA_PORT_NOT_FREE (0x8000).
|
||||
*
|
||||
* ex. devices are attached to ports 0, 1, 2
|
||||
* ports 0 and 2 are available, and port 1 is in-use.
|
||||
* array => 0x0000, 0x8001, 0x0002
|
||||
*
|
||||
* If the array is NULL, it is not filled with any values.
|
||||
* If there are more devices than the array size, only the
|
||||
* first nmemb port numbers will be written into the array.
|
||||
*
|
||||
* Returns the number of devices found, regardless of the
|
||||
* array size.
|
||||
*/
|
||||
#define AA_PORT_NOT_FREE 0x8000
|
||||
int aa_find_devices (
|
||||
int num_devices,
|
||||
u16 * devices
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Get a list of ports to which Aardvark devices are attached.
|
||||
*
|
||||
* This function is the same as aa_find_devices() except that
|
||||
* it returns the unique IDs of each Aardvark device. The IDs
|
||||
* are guaranteed to be non-zero if valid.
|
||||
*
|
||||
* The IDs are the unsigned integer representation of the 10-digit
|
||||
* serial numbers.
|
||||
*/
|
||||
int aa_find_devices_ext (
|
||||
int num_devices,
|
||||
u16 * devices,
|
||||
int num_ids,
|
||||
u32 * unique_ids
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Open the Aardvark port.
|
||||
*
|
||||
* The port number is a zero-indexed integer.
|
||||
*
|
||||
* The port number is the same as that obtained from the
|
||||
* aa_find_devices() function above.
|
||||
*
|
||||
* Returns an Aardvark handle, which is guaranteed to be
|
||||
* greater than zero if it is valid.
|
||||
*
|
||||
* This function is recommended for use in simple applications
|
||||
* where extended information is not required. For more complex
|
||||
* applications, the use of aa_open_ext() is recommended.
|
||||
*/
|
||||
Aardvark aa_open (
|
||||
int port_number
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Open the Aardvark port, returning extended information
|
||||
* in the supplied structure. Behavior is otherwise identical
|
||||
* to aa_open() above. If 0 is passed as the pointer to the
|
||||
* structure, this function is exactly equivalent to aa_open().
|
||||
*
|
||||
* The structure is zeroed before the open is attempted.
|
||||
* It is filled with whatever information is available.
|
||||
*
|
||||
* For example, if the firmware version is not filled, then
|
||||
* the device could not be queried for its version number.
|
||||
*
|
||||
* This function is recommended for use in complex applications
|
||||
* where extended information is required. For more simple
|
||||
* applications, the use of aa_open() is recommended.
|
||||
*/
|
||||
struct AardvarkExt {
|
||||
/* Version matrix */
|
||||
AardvarkVersion version;
|
||||
|
||||
/* Features of this device. */
|
||||
int features;
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef struct AardvarkExt AardvarkExt;
|
||||
#endif
|
||||
|
||||
Aardvark aa_open_ext (
|
||||
int port_number,
|
||||
AardvarkExt * aa_ext
|
||||
);
|
||||
|
||||
|
||||
/* Close the Aardvark port. */
|
||||
int aa_close (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Return the port for this Aardvark handle.
|
||||
*
|
||||
* The port number is a zero-indexed integer.
|
||||
*/
|
||||
int aa_port (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Return the device features as a bit-mask of values, or
|
||||
* an error code if the handle is not valid.
|
||||
*/
|
||||
#define AA_FEATURE_SPI 0x00000001
|
||||
#define AA_FEATURE_I2C 0x00000002
|
||||
#define AA_FEATURE_GPIO 0x00000008
|
||||
int aa_features (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Return the unique ID for this Aardvark adapter.
|
||||
* IDs are guaranteed to be non-zero if valid.
|
||||
* The ID is the unsigned integer representation of the
|
||||
* 10-digit serial number.
|
||||
*/
|
||||
u32 aa_unique_id (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Return the status string for the given status code.
|
||||
* If the code is not valid or the library function cannot
|
||||
* be loaded, return a NULL string.
|
||||
*/
|
||||
const char * aa_status_string (
|
||||
int status
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Enable logging to a file. The handle must be standard file
|
||||
* descriptor. In C, a file descriptor can be obtained by using
|
||||
* the ANSI C function "open" or by using the function "fileno"
|
||||
* on a FILE* stream. A FILE* stream can be obtained using "fopen"
|
||||
* or can correspond to the common "stdout" or "stderr" --
|
||||
* available when including stdlib.h
|
||||
*/
|
||||
#define AA_LOG_STDOUT 1
|
||||
#define AA_LOG_STDERR 2
|
||||
int aa_log (
|
||||
Aardvark aardvark,
|
||||
int level,
|
||||
int handle
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Return the version matrix for the device attached to the
|
||||
* given handle. If the handle is 0 or invalid, only the
|
||||
* software and required api versions are set.
|
||||
*/
|
||||
int aa_version (
|
||||
Aardvark aardvark,
|
||||
AardvarkVersion * version
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Configure the device by enabling/disabling I2C, SPI, and
|
||||
* GPIO functions.
|
||||
*/
|
||||
enum AardvarkConfig {
|
||||
AA_CONFIG_GPIO_ONLY = 0x00,
|
||||
AA_CONFIG_SPI_GPIO = 0x01,
|
||||
AA_CONFIG_GPIO_I2C = 0x02,
|
||||
AA_CONFIG_SPI_I2C = 0x03,
|
||||
AA_CONFIG_QUERY = 0x80
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkConfig AardvarkConfig;
|
||||
#endif
|
||||
|
||||
#define AA_CONFIG_SPI_MASK 0x00000001
|
||||
#define AA_CONFIG_I2C_MASK 0x00000002
|
||||
int aa_configure (
|
||||
Aardvark aardvark,
|
||||
AardvarkConfig config
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Configure the target power pins.
|
||||
* This is only supported on hardware versions >= 2.00
|
||||
*/
|
||||
#define AA_TARGET_POWER_NONE 0x00
|
||||
#define AA_TARGET_POWER_BOTH 0x03
|
||||
#define AA_TARGET_POWER_QUERY 0x80
|
||||
int aa_target_power (
|
||||
Aardvark aardvark,
|
||||
u08 power_mask
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Sleep for the specified number of milliseconds
|
||||
* Accuracy depends on the operating system scheduler
|
||||
* Returns the number of milliseconds slept
|
||||
*/
|
||||
u32 aa_sleep_ms (
|
||||
u32 milliseconds
|
||||
);
|
||||
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| ASYNC MESSAGE POLLING
|
||||
========================================================================*/
|
||||
/*
|
||||
* Polling function to check if there are any asynchronous
|
||||
* messages pending for processing. The function takes a timeout
|
||||
* value in units of milliseconds. If the timeout is < 0, the
|
||||
* function will block until data is received. If the timeout is 0,
|
||||
* the function will perform a non-blocking check.
|
||||
*/
|
||||
#define AA_ASYNC_NO_DATA 0x00000000
|
||||
#define AA_ASYNC_I2C_READ 0x00000001
|
||||
#define AA_ASYNC_I2C_WRITE 0x00000002
|
||||
#define AA_ASYNC_SPI 0x00000004
|
||||
int aa_async_poll (
|
||||
Aardvark aardvark,
|
||||
int timeout
|
||||
);
|
||||
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| I2C API
|
||||
========================================================================*/
|
||||
/* Free the I2C bus. */
|
||||
int aa_i2c_free_bus (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Set the I2C bit rate in kilohertz. If a zero is passed as the
|
||||
* bitrate, the bitrate is unchanged and the current bitrate is
|
||||
* returned.
|
||||
*/
|
||||
int aa_i2c_bitrate (
|
||||
Aardvark aardvark,
|
||||
int bitrate_khz
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Set the bus lock timeout. If a zero is passed as the timeout,
|
||||
* the timeout is unchanged and the current timeout is returned.
|
||||
*/
|
||||
int aa_i2c_bus_timeout (
|
||||
Aardvark aardvark,
|
||||
u16 timeout_ms
|
||||
);
|
||||
|
||||
|
||||
enum AardvarkI2cFlags {
|
||||
AA_I2C_NO_FLAGS = 0x00,
|
||||
AA_I2C_10_BIT_ADDR = 0x01,
|
||||
AA_I2C_COMBINED_FMT = 0x02,
|
||||
AA_I2C_NO_STOP = 0x04,
|
||||
AA_I2C_SIZED_READ = 0x10,
|
||||
AA_I2C_SIZED_READ_EXTRA1 = 0x20
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkI2cFlags AardvarkI2cFlags;
|
||||
#endif
|
||||
|
||||
/* Read a stream of bytes from the I2C slave device. */
|
||||
int aa_i2c_read (
|
||||
Aardvark aardvark,
|
||||
u16 slave_addr,
|
||||
AardvarkI2cFlags flags,
|
||||
u16 num_bytes,
|
||||
u08 * data_in
|
||||
);
|
||||
|
||||
|
||||
enum AardvarkI2cStatus {
|
||||
AA_I2C_STATUS_OK = 0,
|
||||
AA_I2C_STATUS_BUS_ERROR = 1,
|
||||
AA_I2C_STATUS_SLA_ACK = 2,
|
||||
AA_I2C_STATUS_SLA_NACK = 3,
|
||||
AA_I2C_STATUS_DATA_NACK = 4,
|
||||
AA_I2C_STATUS_ARB_LOST = 5,
|
||||
AA_I2C_STATUS_BUS_LOCKED = 6,
|
||||
AA_I2C_STATUS_LAST_DATA_ACK = 7
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkI2cStatus AardvarkI2cStatus;
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Read a stream of bytes from the I2C slave device.
|
||||
* This API function returns the number of bytes read into
|
||||
* the num_read variable. The return value of the function
|
||||
* is a status code.
|
||||
*/
|
||||
int aa_i2c_read_ext (
|
||||
Aardvark aardvark,
|
||||
u16 slave_addr,
|
||||
AardvarkI2cFlags flags,
|
||||
u16 num_bytes,
|
||||
u08 * data_in,
|
||||
u16 * num_read
|
||||
);
|
||||
|
||||
|
||||
/* Write a stream of bytes to the I2C slave device. */
|
||||
int aa_i2c_write (
|
||||
Aardvark aardvark,
|
||||
u16 slave_addr,
|
||||
AardvarkI2cFlags flags,
|
||||
u16 num_bytes,
|
||||
const u08 * data_out
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Write a stream of bytes to the I2C slave device.
|
||||
* This API function returns the number of bytes written into
|
||||
* the num_written variable. The return value of the function
|
||||
* is a status code.
|
||||
*/
|
||||
int aa_i2c_write_ext (
|
||||
Aardvark aardvark,
|
||||
u16 slave_addr,
|
||||
AardvarkI2cFlags flags,
|
||||
u16 num_bytes,
|
||||
const u08 * data_out,
|
||||
u16 * num_written
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Do an atomic write+read to an I2C slave device by first
|
||||
* writing a stream of bytes to the I2C slave device and then
|
||||
* reading a stream of bytes back from the same slave device.
|
||||
* This API function returns the number of bytes written into
|
||||
* the num_written variable and the number of bytes read into
|
||||
* the num_read variable. The return value of the function is
|
||||
* the status given as (read_status << 8) | (write_status).
|
||||
*/
|
||||
int aa_i2c_write_read (
|
||||
Aardvark aardvark,
|
||||
u16 slave_addr,
|
||||
AardvarkI2cFlags flags,
|
||||
u16 out_num_bytes,
|
||||
const u08 * out_data,
|
||||
u16 * num_written,
|
||||
u16 in_num_bytes,
|
||||
u08 * in_data,
|
||||
u16 * num_read
|
||||
);
|
||||
|
||||
|
||||
/* Enable/Disable the Aardvark as an I2C slave device */
|
||||
int aa_i2c_slave_enable (
|
||||
Aardvark aardvark,
|
||||
u08 addr,
|
||||
u16 maxTxBytes,
|
||||
u16 maxRxBytes
|
||||
);
|
||||
|
||||
|
||||
int aa_i2c_slave_disable (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Set the slave response in the event the Aardvark is put
|
||||
* into slave mode and contacted by a Master.
|
||||
*/
|
||||
int aa_i2c_slave_set_response (
|
||||
Aardvark aardvark,
|
||||
u08 num_bytes,
|
||||
const u08 * data_out
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Return number of bytes written from a previous
|
||||
* Aardvark->I2C_master transmission. Since the transmission is
|
||||
* happening asynchronously with respect to the PC host
|
||||
* software, there could be responses queued up from many
|
||||
* previous write transactions.
|
||||
*/
|
||||
int aa_i2c_slave_write_stats (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/* Read the bytes from an I2C slave reception */
|
||||
int aa_i2c_slave_read (
|
||||
Aardvark aardvark,
|
||||
u08 * addr,
|
||||
u16 num_bytes,
|
||||
u08 * data_in
|
||||
);
|
||||
|
||||
|
||||
/* Extended functions that return status code */
|
||||
int aa_i2c_slave_write_stats_ext (
|
||||
Aardvark aardvark,
|
||||
u16 * num_written
|
||||
);
|
||||
|
||||
|
||||
int aa_i2c_slave_read_ext (
|
||||
Aardvark aardvark,
|
||||
u08 * addr,
|
||||
u16 num_bytes,
|
||||
u08 * data_in,
|
||||
u16 * num_read
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Configure the I2C pullup resistors.
|
||||
* This is only supported on hardware versions >= 2.00
|
||||
*/
|
||||
#define AA_I2C_PULLUP_NONE 0x00
|
||||
#define AA_I2C_PULLUP_BOTH 0x03
|
||||
#define AA_I2C_PULLUP_QUERY 0x80
|
||||
int aa_i2c_pullup (
|
||||
Aardvark aardvark,
|
||||
u08 pullup_mask
|
||||
);
|
||||
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| SPI API
|
||||
========================================================================*/
|
||||
/*
|
||||
* Set the SPI bit rate in kilohertz. If a zero is passed as the
|
||||
* bitrate, the bitrate is unchanged and the current bitrate is
|
||||
* returned.
|
||||
*/
|
||||
int aa_spi_bitrate (
|
||||
Aardvark aardvark,
|
||||
int bitrate_khz
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* These configuration parameters specify how to clock the
|
||||
* bits that are sent and received on the Aardvark SPI
|
||||
* interface.
|
||||
*
|
||||
* The polarity option specifies which transition
|
||||
* constitutes the leading edge and which transition is the
|
||||
* falling edge. For example, AA_SPI_POL_RISING_FALLING
|
||||
* would configure the SPI to idle the SCK clock line low.
|
||||
* The clock would then transition low-to-high on the
|
||||
* leading edge and high-to-low on the trailing edge.
|
||||
*
|
||||
* The phase option determines whether to sample or setup on
|
||||
* the leading edge. For example, AA_SPI_PHASE_SAMPLE_SETUP
|
||||
* would configure the SPI to sample on the leading edge and
|
||||
* setup on the trailing edge.
|
||||
*
|
||||
* The bitorder option is used to indicate whether LSB or
|
||||
* MSB is shifted first.
|
||||
*
|
||||
* See the diagrams in the Aardvark datasheet for
|
||||
* more details.
|
||||
*/
|
||||
enum AardvarkSpiPolarity {
|
||||
AA_SPI_POL_RISING_FALLING = 0,
|
||||
AA_SPI_POL_FALLING_RISING = 1
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkSpiPolarity AardvarkSpiPolarity;
|
||||
#endif
|
||||
|
||||
enum AardvarkSpiPhase {
|
||||
AA_SPI_PHASE_SAMPLE_SETUP = 0,
|
||||
AA_SPI_PHASE_SETUP_SAMPLE = 1
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkSpiPhase AardvarkSpiPhase;
|
||||
#endif
|
||||
|
||||
enum AardvarkSpiBitorder {
|
||||
AA_SPI_BITORDER_MSB = 0,
|
||||
AA_SPI_BITORDER_LSB = 1
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkSpiBitorder AardvarkSpiBitorder;
|
||||
#endif
|
||||
|
||||
/* Configure the SPI master or slave interface */
|
||||
int aa_spi_configure (
|
||||
Aardvark aardvark,
|
||||
AardvarkSpiPolarity polarity,
|
||||
AardvarkSpiPhase phase,
|
||||
AardvarkSpiBitorder bitorder
|
||||
);
|
||||
|
||||
|
||||
/* Write a stream of bytes to the downstream SPI slave device. */
|
||||
int aa_spi_write (
|
||||
Aardvark aardvark,
|
||||
u16 out_num_bytes,
|
||||
const u08 * data_out,
|
||||
u16 in_num_bytes,
|
||||
u08 * data_in
|
||||
);
|
||||
|
||||
|
||||
/* Enable/Disable the Aardvark as an SPI slave device */
|
||||
int aa_spi_slave_enable (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
int aa_spi_slave_disable (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Set the slave response in the event the Aardvark is put
|
||||
* into slave mode and contacted by a Master.
|
||||
*/
|
||||
int aa_spi_slave_set_response (
|
||||
Aardvark aardvark,
|
||||
u08 num_bytes,
|
||||
const u08 * data_out
|
||||
);
|
||||
|
||||
|
||||
/* Read the bytes from an SPI slave reception */
|
||||
int aa_spi_slave_read (
|
||||
Aardvark aardvark,
|
||||
u16 num_bytes,
|
||||
u08 * data_in
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Change the output polarity on the SS line.
|
||||
*
|
||||
* Note: When configured as an SPI slave, the Aardvark will
|
||||
* always be setup with SS as active low. Hence this function
|
||||
* only affects the SPI master functions on the Aardvark.
|
||||
*/
|
||||
enum AardvarkSpiSSPolarity {
|
||||
AA_SPI_SS_ACTIVE_LOW = 0,
|
||||
AA_SPI_SS_ACTIVE_HIGH = 1
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkSpiSSPolarity AardvarkSpiSSPolarity;
|
||||
#endif
|
||||
|
||||
int aa_spi_master_ss_polarity (
|
||||
Aardvark aardvark,
|
||||
AardvarkSpiSSPolarity polarity
|
||||
);
|
||||
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| GPIO API
|
||||
========================================================================*/
|
||||
/*
|
||||
* The following enumerated type maps the named lines on the
|
||||
* Aardvark I2C/SPI line to bit positions in the GPIO API.
|
||||
* All GPIO API functions will index these lines through an
|
||||
* 8-bit masked value. Thus, each bit position in the mask
|
||||
* can be referred back its corresponding line through the
|
||||
* enumerated type.
|
||||
*/
|
||||
enum AardvarkGpioBits {
|
||||
AA_GPIO_SCL = 0x01,
|
||||
AA_GPIO_SDA = 0x02,
|
||||
AA_GPIO_MISO = 0x04,
|
||||
AA_GPIO_SCK = 0x08,
|
||||
AA_GPIO_MOSI = 0x10,
|
||||
AA_GPIO_SS = 0x20
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkGpioBits AardvarkGpioBits;
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Configure the GPIO, specifying the direction of each bit.
|
||||
*
|
||||
* A call to this function will not change the value of the pullup
|
||||
* mask in the Aardvark. This is illustrated by the following
|
||||
* example:
|
||||
* (1) Direction mask is first set to 0x00
|
||||
* (2) Pullup is set to 0x01
|
||||
* (3) Direction mask is set to 0x01
|
||||
* (4) Direction mask is later set back to 0x00.
|
||||
*
|
||||
* The pullup will be active after (4).
|
||||
*
|
||||
* On Aardvark power-up, the default value of the direction
|
||||
* mask is 0x00.
|
||||
*/
|
||||
#define AA_GPIO_DIR_INPUT 0
|
||||
#define AA_GPIO_DIR_OUTPUT 1
|
||||
int aa_gpio_direction (
|
||||
Aardvark aardvark,
|
||||
u08 direction_mask
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Enable an internal pullup on any of the GPIO input lines.
|
||||
*
|
||||
* Note: If a line is configured as an output, the pullup bit
|
||||
* for that line will be ignored, though that pullup bit will
|
||||
* be cached in case the line is later configured as an input.
|
||||
*
|
||||
* By default the pullup mask is 0x00.
|
||||
*/
|
||||
#define AA_GPIO_PULLUP_OFF 0
|
||||
#define AA_GPIO_PULLUP_ON 1
|
||||
int aa_gpio_pullup (
|
||||
Aardvark aardvark,
|
||||
u08 pullup_mask
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Read the current digital values on the GPIO input lines.
|
||||
*
|
||||
* The bits will be ordered as described by AA_GPIO_BITS. If a
|
||||
* line is configured as an output, its corresponding bit
|
||||
* position in the mask will be undefined.
|
||||
*/
|
||||
int aa_gpio_get (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Set the outputs on the GPIO lines.
|
||||
*
|
||||
* Note: If a line is configured as an input, it will not be
|
||||
* affected by this call, but the output value for that line
|
||||
* will be cached in the event that the line is later
|
||||
* configured as an output.
|
||||
*/
|
||||
int aa_gpio_set (
|
||||
Aardvark aardvark,
|
||||
u08 value
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Block until there is a change on the GPIO input lines.
|
||||
* Pins configured as outputs will be ignored.
|
||||
*
|
||||
* The function will return either when a change has occurred or
|
||||
* the timeout expires. The timeout, specified in millisecods, has
|
||||
* a precision of ~16 ms. The maximum allowable timeout is
|
||||
* approximately 4 seconds. If the timeout expires, this function
|
||||
* will return the current state of the GPIO lines.
|
||||
*
|
||||
* This function will return immediately with the current value
|
||||
* of the GPIO lines for the first invocation after any of the
|
||||
* following functions are called: aa_configure,
|
||||
* aa_gpio_direction, or aa_gpio_pullup.
|
||||
*
|
||||
* If the function aa_gpio_get is called before calling
|
||||
* aa_gpio_change, aa_gpio_change will only register any changes
|
||||
* from the value last returned by aa_gpio_get.
|
||||
*/
|
||||
int aa_gpio_change (
|
||||
Aardvark aardvark,
|
||||
u16 timeout
|
||||
);
|
||||
|
||||
|
||||
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif /* __aardvark_h__ */
|
||||
@@ -1,80 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Start mem0 + reranker GPU container for ZeroClaw memory backend.
|
||||
#
|
||||
# Required env vars:
|
||||
# MEM0_LLM_API_KEY or ZAI_API_KEY — API key for the LLM used in fact extraction
|
||||
#
|
||||
# Optional env vars (with defaults):
|
||||
# MEM0_LLM_PROVIDER — mem0 LLM provider (default: "openai" i.e. OpenAI-compatible)
|
||||
# MEM0_LLM_MODEL — LLM model for fact extraction (default: "glm-5-turbo")
|
||||
# MEM0_LLM_BASE_URL — LLM API base URL (default: "https://api.z.ai/api/coding/paas/v4")
|
||||
# MEM0_EMBEDDER_MODEL — embedding model (default: "BAAI/bge-m3")
|
||||
# MEM0_EMBEDDER_DIMS — embedding dimensions (default: "1024")
|
||||
# MEM0_EMBEDDER_DEVICE — "cuda", "cpu", or "auto" (default: "cuda")
|
||||
# MEM0_VECTOR_COLLECTION — Qdrant collection name (default: "zeroclaw_mem0")
|
||||
# RERANKER_MODEL — reranker model (default: "BAAI/bge-reranker-v2-m3")
|
||||
# RERANKER_DEVICE — "cuda" or "cpu" (default: "cuda")
|
||||
# MEM0_PORT — mem0 server port (default: 8765)
|
||||
# RERANKER_PORT — reranker server port (default: 8678)
|
||||
# CONTAINER_IMAGE — base container image (default: docker.io/kyuz0/amd-strix-halo-comfyui:latest)
|
||||
# CONTAINER_NAME — container name (default: mem0-gpu)
|
||||
# DATA_DIR — host path for Qdrant data (default: ~/mem0-data)
|
||||
# SCRIPT_DIR — host path for server scripts (default: directory of this script)
|
||||
set -e
|
||||
|
||||
# Resolve script directory for mounting server scripts
|
||||
SCRIPT_DIR="${SCRIPT_DIR:-$(cd "$(dirname "$0")" && pwd)}"
|
||||
|
||||
# API key — accept either name
|
||||
export MEM0_LLM_API_KEY="${MEM0_LLM_API_KEY:-${ZAI_API_KEY:?MEM0_LLM_API_KEY or ZAI_API_KEY must be set}}"
|
||||
|
||||
# Defaults
|
||||
MEM0_LLM_MODEL="${MEM0_LLM_MODEL:-glm-5-turbo}"
|
||||
MEM0_LLM_BASE_URL="${MEM0_LLM_BASE_URL:-https://api.z.ai/api/coding/paas/v4}"
|
||||
MEM0_PORT="${MEM0_PORT:-8765}"
|
||||
RERANKER_PORT="${RERANKER_PORT:-8678}"
|
||||
CONTAINER_IMAGE="${CONTAINER_IMAGE:-docker.io/kyuz0/amd-strix-halo-comfyui:latest}"
|
||||
CONTAINER_NAME="${CONTAINER_NAME:-mem0-gpu}"
|
||||
DATA_DIR="${DATA_DIR:-$HOME/mem0-data}"
|
||||
|
||||
# Stop existing CPU services (if any)
|
||||
kill -9 $(pgrep -f "mem0-server.py") 2>/dev/null || true
|
||||
kill -9 $(pgrep -f "reranker-server.py") 2>/dev/null || true
|
||||
|
||||
# Stop existing container
|
||||
podman stop "$CONTAINER_NAME" 2>/dev/null || true
|
||||
podman rm "$CONTAINER_NAME" 2>/dev/null || true
|
||||
|
||||
podman run -d --name "$CONTAINER_NAME" \
|
||||
--device /dev/dri --device /dev/kfd \
|
||||
--group-add video --group-add render \
|
||||
--restart unless-stopped \
|
||||
-p "$MEM0_PORT:$MEM0_PORT" -p "$RERANKER_PORT:$RERANKER_PORT" \
|
||||
-v "$DATA_DIR":/root/mem0-data:Z \
|
||||
-v "$SCRIPT_DIR/mem0-server.py":/app/mem0-server.py:ro,Z \
|
||||
-v "$SCRIPT_DIR/reranker-server.py":/app/reranker-server.py:ro,Z \
|
||||
-v "$HOME/.cache/huggingface":/root/.cache/huggingface:Z \
|
||||
-e MEM0_LLM_API_KEY="$MEM0_LLM_API_KEY" \
|
||||
-e ZAI_API_KEY="$MEM0_LLM_API_KEY" \
|
||||
-e MEM0_LLM_MODEL="$MEM0_LLM_MODEL" \
|
||||
-e MEM0_LLM_BASE_URL="$MEM0_LLM_BASE_URL" \
|
||||
${MEM0_LLM_PROVIDER:+-e MEM0_LLM_PROVIDER="$MEM0_LLM_PROVIDER"} \
|
||||
${MEM0_EMBEDDER_MODEL:+-e MEM0_EMBEDDER_MODEL="$MEM0_EMBEDDER_MODEL"} \
|
||||
${MEM0_EMBEDDER_DIMS:+-e MEM0_EMBEDDER_DIMS="$MEM0_EMBEDDER_DIMS"} \
|
||||
${MEM0_EMBEDDER_DEVICE:+-e MEM0_EMBEDDER_DEVICE="$MEM0_EMBEDDER_DEVICE"} \
|
||||
${MEM0_VECTOR_COLLECTION:+-e MEM0_VECTOR_COLLECTION="$MEM0_VECTOR_COLLECTION"} \
|
||||
${RERANKER_MODEL:+-e RERANKER_MODEL="$RERANKER_MODEL"} \
|
||||
${RERANKER_DEVICE:+-e RERANKER_DEVICE="$RERANKER_DEVICE"} \
|
||||
-e RERANKER_PORT="$RERANKER_PORT" \
|
||||
-e RERANKER_URL="http://127.0.0.1:$RERANKER_PORT/rerank" \
|
||||
-e TORCH_ROCM_AOTRITON_ENABLE_EXPERIMENTAL=1 \
|
||||
-e HOME=/root \
|
||||
"$CONTAINER_IMAGE" \
|
||||
bash -c "pip install -q FlagEmbedding mem0ai flask httpx qdrant-client 2>&1 | tail -3; echo '=== Starting reranker (GPU) on :$RERANKER_PORT ==='; python3 /app/reranker-server.py & sleep 3; echo '=== Starting mem0 (GPU) on :$MEM0_PORT ==='; exec python3 /app/mem0-server.py"
|
||||
|
||||
echo "Container started, waiting for init..."
|
||||
sleep 15
|
||||
echo "=== Container logs ==="
|
||||
podman logs "$CONTAINER_NAME" 2>&1 | tail -25
|
||||
echo "=== Port check ==="
|
||||
ss -tlnp | grep "$MEM0_PORT\|$RERANKER_PORT" || echo "Ports not yet ready, check: podman logs $CONTAINER_NAME"
|
||||
@@ -1,288 +0,0 @@
|
||||
"""Minimal OpenMemory-compatible REST server wrapping mem0 Python SDK."""
|
||||
import asyncio
|
||||
import json, os, uuid, httpx
|
||||
from datetime import datetime, timezone
|
||||
from fastapi import FastAPI, Query
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
from mem0 import Memory
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
RERANKER_URL = os.environ.get("RERANKER_URL", "http://127.0.0.1:8678/rerank")
|
||||
|
||||
CUSTOM_EXTRACTION_PROMPT = """You are a memory extraction specialist for a Cantonese/Chinese chat assistant.
|
||||
|
||||
Extract ONLY important, persistent facts from the conversation. Rules:
|
||||
1. Extract personal preferences, habits, relationships, names, locations
|
||||
2. Extract decisions, plans, and commitments people make
|
||||
3. SKIP small talk, greetings, reactions ("ok", "哈哈", "係呀")
|
||||
4. SKIP temporary states ("我依家食緊飯") unless they reveal a habit
|
||||
5. Keep facts in the ORIGINAL language (Cantonese/Chinese/English)
|
||||
6. For each fact, note WHO it's about (use their name or identifier if known)
|
||||
7. Merge/update existing facts rather than creating duplicates
|
||||
|
||||
Return a list of facts in JSON format: {"facts": ["fact1", "fact2", ...]}
|
||||
"""
|
||||
|
||||
PROCEDURAL_EXTRACTION_PROMPT = """You are a procedural memory specialist for an AI assistant.
|
||||
|
||||
Extract HOW-TO patterns and reusable procedures from the conversation trace. Rules:
|
||||
1. Identify step-by-step procedures the assistant followed to accomplish a task
|
||||
2. Extract tool usage patterns: which tools were called, in what order, with what arguments
|
||||
3. Capture decision points: why the assistant chose one approach over another
|
||||
4. Note error-recovery patterns: what failed, how it was fixed
|
||||
5. Keep the procedure generic enough to apply to similar future tasks
|
||||
6. Preserve technical details (commands, file paths, API calls) that are reusable
|
||||
7. SKIP greetings, small talk, and conversational filler
|
||||
8. Format each procedure as: "To [goal]: [step1] -> [step2] -> ... -> [result]"
|
||||
|
||||
Return a list of procedures in JSON format: {"facts": ["procedure1", "procedure2", ...]}
|
||||
"""
|
||||
|
||||
# ── Configurable via environment variables ─────────────────────────
|
||||
# LLM (for fact extraction when infer=true)
|
||||
MEM0_LLM_PROVIDER = os.environ.get("MEM0_LLM_PROVIDER", "openai") # "openai" (compatible), "anthropic", etc.
|
||||
MEM0_LLM_MODEL = os.environ.get("MEM0_LLM_MODEL", "glm-5-turbo")
|
||||
MEM0_LLM_API_KEY = os.environ.get("MEM0_LLM_API_KEY") or os.environ.get("ZAI_API_KEY", "")
|
||||
MEM0_LLM_BASE_URL = os.environ.get("MEM0_LLM_BASE_URL", "https://api.z.ai/api/coding/paas/v4")
|
||||
|
||||
# Embedder
|
||||
MEM0_EMBEDDER_PROVIDER = os.environ.get("MEM0_EMBEDDER_PROVIDER", "huggingface") # "huggingface", "openai", etc.
|
||||
MEM0_EMBEDDER_MODEL = os.environ.get("MEM0_EMBEDDER_MODEL", "BAAI/bge-m3")
|
||||
MEM0_EMBEDDER_DIMS = int(os.environ.get("MEM0_EMBEDDER_DIMS", "1024"))
|
||||
MEM0_EMBEDDER_DEVICE = os.environ.get("MEM0_EMBEDDER_DEVICE", "cuda") # "cuda", "cpu", "auto"
|
||||
|
||||
# Vector store
|
||||
MEM0_VECTOR_PROVIDER = os.environ.get("MEM0_VECTOR_PROVIDER", "qdrant") # "qdrant", "chroma", etc.
|
||||
MEM0_VECTOR_COLLECTION = os.environ.get("MEM0_VECTOR_COLLECTION", "zeroclaw_mem0")
|
||||
MEM0_VECTOR_PATH = os.environ.get("MEM0_VECTOR_PATH", os.path.expanduser("~/mem0-data/qdrant"))
|
||||
|
||||
config = {
|
||||
"llm": {
|
||||
"provider": MEM0_LLM_PROVIDER,
|
||||
"config": {
|
||||
"model": MEM0_LLM_MODEL,
|
||||
"api_key": MEM0_LLM_API_KEY,
|
||||
"openai_base_url": MEM0_LLM_BASE_URL,
|
||||
},
|
||||
},
|
||||
"embedder": {
|
||||
"provider": MEM0_EMBEDDER_PROVIDER,
|
||||
"config": {
|
||||
"model": MEM0_EMBEDDER_MODEL,
|
||||
"embedding_dims": MEM0_EMBEDDER_DIMS,
|
||||
"model_kwargs": {"device": MEM0_EMBEDDER_DEVICE},
|
||||
},
|
||||
},
|
||||
"vector_store": {
|
||||
"provider": MEM0_VECTOR_PROVIDER,
|
||||
"config": {
|
||||
"collection_name": MEM0_VECTOR_COLLECTION,
|
||||
"embedding_model_dims": MEM0_EMBEDDER_DIMS,
|
||||
"path": MEM0_VECTOR_PATH,
|
||||
},
|
||||
},
|
||||
"custom_fact_extraction_prompt": CUSTOM_EXTRACTION_PROMPT,
|
||||
}
|
||||
|
||||
m = Memory.from_config(config)
|
||||
|
||||
|
||||
def rerank_results(query: str, items: list, top_k: int = 10) -> list:
|
||||
"""Rerank search results using bge-reranker-v2-m3."""
|
||||
if not items:
|
||||
return items
|
||||
documents = [item.get("memory", "") for item in items]
|
||||
try:
|
||||
resp = httpx.post(
|
||||
RERANKER_URL,
|
||||
json={"query": query, "documents": documents, "top_k": top_k},
|
||||
timeout=10.0,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
ranked = resp.json().get("results", [])
|
||||
return [items[r["index"]] for r in ranked]
|
||||
except Exception as e:
|
||||
print(f"Reranker failed, using original order: {e}")
|
||||
return items
|
||||
|
||||
|
||||
class AddMemoryRequest(BaseModel):
|
||||
user_id: str
|
||||
text: str
|
||||
metadata: Optional[dict] = None
|
||||
infer: bool = True
|
||||
app: Optional[str] = None
|
||||
custom_instructions: Optional[str] = None
|
||||
|
||||
|
||||
@app.post("/api/v1/memories/")
|
||||
async def add_memory(req: AddMemoryRequest):
|
||||
# Use client-supplied prompt, fall back to server default, then mem0 SDK default
|
||||
prompt = req.custom_instructions or CUSTOM_EXTRACTION_PROMPT
|
||||
result = await asyncio.to_thread(m.add, req.text, user_id=req.user_id, metadata=req.metadata or {}, prompt=prompt)
|
||||
return {"id": str(uuid.uuid4()), "status": "ok", "result": result}
|
||||
|
||||
|
||||
class ProceduralMemoryRequest(BaseModel):
|
||||
user_id: str
|
||||
messages: list[dict]
|
||||
metadata: Optional[dict] = None
|
||||
|
||||
|
||||
@app.post("/api/v1/memories/procedural")
|
||||
async def add_procedural_memory(req: ProceduralMemoryRequest):
|
||||
"""Store a conversation trace as procedural memory.
|
||||
|
||||
Accepts a list of messages (role/content dicts) representing a full
|
||||
conversation turn including tool calls, then uses mem0's native
|
||||
procedural memory extraction to learn reusable "how to" patterns.
|
||||
"""
|
||||
# Build metadata with procedural type marker
|
||||
meta = {"type": "procedural"}
|
||||
if req.metadata:
|
||||
meta.update(req.metadata)
|
||||
|
||||
# Use mem0's native message list support + procedural prompt
|
||||
result = await asyncio.to_thread(m.add,
|
||||
req.messages,
|
||||
user_id=req.user_id,
|
||||
metadata=meta,
|
||||
prompt=PROCEDURAL_EXTRACTION_PROMPT,
|
||||
)
|
||||
|
||||
return {"id": str(uuid.uuid4()), "status": "ok", "result": result}
|
||||
|
||||
|
||||
def _parse_mem0_results(raw_results) -> list:
|
||||
raw = raw_results.get("results", raw_results) if isinstance(raw_results, dict) else raw_results
|
||||
items = []
|
||||
for r in raw:
|
||||
item = r if isinstance(r, dict) else {"memory": str(r)}
|
||||
items.append({
|
||||
"id": item.get("id", str(uuid.uuid4())),
|
||||
"memory": item.get("memory", item.get("text", "")),
|
||||
"created_at": item.get("created_at", datetime.now(timezone.utc).isoformat()),
|
||||
"metadata_": item.get("metadata", {}),
|
||||
})
|
||||
return items
|
||||
|
||||
|
||||
def _parse_iso_timestamp(value: str) -> Optional[datetime]:
|
||||
"""Parse an ISO 8601 timestamp string, returning None on failure."""
|
||||
try:
|
||||
dt = datetime.fromisoformat(value)
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
|
||||
def _item_created_at(item: dict) -> Optional[datetime]:
|
||||
"""Extract created_at from an item as a timezone-aware datetime."""
|
||||
raw = item.get("created_at")
|
||||
if raw is None:
|
||||
return None
|
||||
if isinstance(raw, datetime):
|
||||
if raw.tzinfo is None:
|
||||
raw = raw.replace(tzinfo=timezone.utc)
|
||||
return raw
|
||||
return _parse_iso_timestamp(str(raw))
|
||||
|
||||
|
||||
def _apply_post_filters(
|
||||
items: list,
|
||||
created_after: Optional[str],
|
||||
created_before: Optional[str],
|
||||
) -> list:
|
||||
"""Filter items by created_after / created_before timestamps (post-query)."""
|
||||
after_dt = _parse_iso_timestamp(created_after) if created_after else None
|
||||
before_dt = _parse_iso_timestamp(created_before) if created_before else None
|
||||
if after_dt is None and before_dt is None:
|
||||
return items
|
||||
filtered = []
|
||||
for item in items:
|
||||
ts = _item_created_at(item)
|
||||
if ts is None:
|
||||
# Keep items without a parseable timestamp
|
||||
filtered.append(item)
|
||||
continue
|
||||
if after_dt and ts < after_dt:
|
||||
continue
|
||||
if before_dt and ts > before_dt:
|
||||
continue
|
||||
filtered.append(item)
|
||||
return filtered
|
||||
|
||||
|
||||
@app.get("/api/v1/memories/")
|
||||
async def list_or_search_memories(
|
||||
user_id: str = Query(...),
|
||||
search_query: Optional[str] = Query(None),
|
||||
size: int = Query(10),
|
||||
rerank: bool = Query(True),
|
||||
created_after: Optional[str] = Query(None),
|
||||
created_before: Optional[str] = Query(None),
|
||||
metadata_filter: Optional[str] = Query(None),
|
||||
):
|
||||
# Build mem0 SDK filters dict from metadata_filter JSON param
|
||||
sdk_filters = None
|
||||
if metadata_filter:
|
||||
try:
|
||||
sdk_filters = json.loads(metadata_filter)
|
||||
except json.JSONDecodeError:
|
||||
sdk_filters = None
|
||||
|
||||
if search_query:
|
||||
# Fetch more results than needed so reranker has candidates to work with
|
||||
fetch_size = min(size * 3, 50)
|
||||
results = await asyncio.to_thread(m.search,
|
||||
search_query,
|
||||
user_id=user_id,
|
||||
limit=fetch_size,
|
||||
filters=sdk_filters,
|
||||
)
|
||||
items = _parse_mem0_results(results)
|
||||
items = _apply_post_filters(items, created_after, created_before)
|
||||
if rerank and items:
|
||||
items = rerank_results(search_query, items, top_k=size)
|
||||
else:
|
||||
items = items[:size]
|
||||
return {"items": items, "total": len(items)}
|
||||
else:
|
||||
results = await asyncio.to_thread(m.get_all,user_id=user_id, filters=sdk_filters)
|
||||
items = _parse_mem0_results(results)
|
||||
items = _apply_post_filters(items, created_after, created_before)
|
||||
return {"items": items, "total": len(items)}
|
||||
|
||||
|
||||
@app.delete("/api/v1/memories/{memory_id}")
|
||||
async def delete_memory(memory_id: str):
|
||||
try:
|
||||
await asyncio.to_thread(m.delete, memory_id)
|
||||
except Exception:
|
||||
pass
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.get("/api/v1/memories/{memory_id}/history")
|
||||
async def get_memory_history(memory_id: str):
|
||||
"""Return the edit history of a specific memory."""
|
||||
try:
|
||||
history = await asyncio.to_thread(m.history, memory_id)
|
||||
# Normalize to list of dicts
|
||||
entries = []
|
||||
raw = history if isinstance(history, list) else history.get("results", history) if isinstance(history, dict) else [history]
|
||||
for h in raw:
|
||||
entry = h if isinstance(h, dict) else {"event": str(h)}
|
||||
entries.append(entry)
|
||||
return {"memory_id": memory_id, "history": entries}
|
||||
except Exception as e:
|
||||
return {"memory_id": memory_id, "history": [], "error": str(e)}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(app, host="0.0.0.0", port=8765)
|
||||
@@ -1,50 +0,0 @@
|
||||
from flask import Flask, request, jsonify
|
||||
from FlagEmbedding import FlagReranker
|
||||
import os, torch
|
||||
|
||||
app = Flask(__name__)
|
||||
reranker = None
|
||||
|
||||
# ── Configurable via environment variables ─────────────────────────
|
||||
RERANKER_MODEL = os.environ.get("RERANKER_MODEL", "BAAI/bge-reranker-v2-m3")
|
||||
RERANKER_DEVICE = os.environ.get("RERANKER_DEVICE", "cuda" if torch.cuda.is_available() else "cpu")
|
||||
RERANKER_PORT = int(os.environ.get("RERANKER_PORT", "8678"))
|
||||
|
||||
def get_reranker():
|
||||
global reranker
|
||||
if reranker is None:
|
||||
reranker = FlagReranker(RERANKER_MODEL, use_fp16=True, device=RERANKER_DEVICE)
|
||||
return reranker
|
||||
|
||||
@app.route('/rerank', methods=['POST'])
|
||||
def rerank():
|
||||
data = request.json
|
||||
query = data.get('query', '')
|
||||
documents = data.get('documents', [])
|
||||
top_k = data.get('top_k', len(documents))
|
||||
|
||||
if not query or not documents:
|
||||
return jsonify({'error': 'query and documents required'}), 400
|
||||
|
||||
pairs = [[query, doc] for doc in documents]
|
||||
scores = get_reranker().compute_score(pairs)
|
||||
if isinstance(scores, float):
|
||||
scores = [scores]
|
||||
|
||||
results = sorted(
|
||||
[{'index': i, 'document': doc, 'score': score}
|
||||
for i, (doc, score) in enumerate(zip(documents, scores))],
|
||||
key=lambda x: x['score'], reverse=True
|
||||
)[:top_k]
|
||||
|
||||
return jsonify({'results': results})
|
||||
|
||||
@app.route('/health', methods=['GET'])
|
||||
def health():
|
||||
return jsonify({'status': 'ok', 'model': RERANKER_MODEL, 'device': RERANKER_DEVICE})
|
||||
|
||||
if __name__ == '__main__':
|
||||
print(f'Loading reranker model ({RERANKER_MODEL}) on {RERANKER_DEVICE}...')
|
||||
get_reranker()
|
||||
print(f'Reranker server ready on :{RERANKER_PORT}')
|
||||
app.run(host='0.0.0.0', port=RERANKER_PORT)
|
||||
@@ -10,3 +10,22 @@ default_temperature = 0.7
|
||||
port = 42617
|
||||
host = "[::]"
|
||||
allow_public_bind = true
|
||||
|
||||
# Cost tracking and budget enforcement configuration
|
||||
# Enable to track API usage costs and enforce spending limits
|
||||
[cost]
|
||||
enabled = false
|
||||
daily_limit_usd = 10.0
|
||||
monthly_limit_usd = 100.0
|
||||
warn_at_percent = 80
|
||||
allow_override = false
|
||||
|
||||
# Per-model pricing (USD per 1M tokens)
|
||||
# Uncomment and customize to override default pricing
|
||||
# [cost.prices."anthropic/claude-sonnet-4-20250514"]
|
||||
# input = 3.0
|
||||
# output = 15.0
|
||||
#
|
||||
# [cost.prices."openai/gpt-4o"]
|
||||
# input = 5.0
|
||||
# output = 15.0
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
pkgbase = zeroclaw
|
||||
pkgdesc = Zero overhead. Zero compromise. 100% Rust. The fastest, smallest AI assistant.
|
||||
pkgver = 0.5.4
|
||||
pkgver = 0.5.9
|
||||
pkgrel = 1
|
||||
url = https://github.com/zeroclaw-labs/zeroclaw
|
||||
arch = x86_64
|
||||
@@ -10,7 +10,7 @@ pkgbase = zeroclaw
|
||||
makedepends = git
|
||||
depends = gcc-libs
|
||||
depends = openssl
|
||||
source = zeroclaw-0.5.4.tar.gz::https://github.com/zeroclaw-labs/zeroclaw/archive/refs/tags/v0.5.4.tar.gz
|
||||
source = zeroclaw-0.5.9.tar.gz::https://github.com/zeroclaw-labs/zeroclaw/archive/refs/tags/v0.5.9.tar.gz
|
||||
sha256sums = SKIP
|
||||
|
||||
pkgname = zeroclaw
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Maintainer: zeroclaw-labs <bot@zeroclaw.dev>
|
||||
pkgname=zeroclaw
|
||||
pkgver=0.5.4
|
||||
pkgver=0.5.9
|
||||
pkgrel=1
|
||||
pkgdesc="Zero overhead. Zero compromise. 100% Rust. The fastest, smallest AI assistant."
|
||||
arch=('x86_64')
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"version": "0.5.4",
|
||||
"version": "0.5.9",
|
||||
"description": "Zero overhead. Zero compromise. 100% Rust. The fastest, smallest AI assistant.",
|
||||
"homepage": "https://github.com/zeroclaw-labs/zeroclaw",
|
||||
"license": "MIT|Apache-2.0",
|
||||
"architecture": {
|
||||
"64bit": {
|
||||
"url": "https://github.com/zeroclaw-labs/zeroclaw/releases/download/v0.5.4/zeroclaw-x86_64-pc-windows-msvc.zip",
|
||||
"url": "https://github.com/zeroclaw-labs/zeroclaw/releases/download/v0.5.9/zeroclaw-x86_64-pc-windows-msvc.zip",
|
||||
"hash": "",
|
||||
"bin": "zeroclaw.exe"
|
||||
}
|
||||
|
||||
@@ -0,0 +1,325 @@
|
||||
# Aardvark Integration — How It Works
|
||||
|
||||
A plain-language walkthrough of every piece and how they connect.
|
||||
|
||||
---
|
||||
|
||||
## The Big Picture
|
||||
|
||||
```
|
||||
┌──────────────────────────────────────────────────────────────┐
|
||||
│ STARTUP (boot) │
|
||||
│ │
|
||||
│ 1. Ask aardvark-sys: "any adapters plugged in?" │
|
||||
│ 2. For each one found → register a device + transport │
|
||||
│ 3. Load tools only if hardware was found │
|
||||
└──────────────────────────────────────────┬───────────────────┘
|
||||
│
|
||||
┌──────────────────────▼──────────────────────┐
|
||||
│ RUNTIME (agent loop) │
|
||||
│ │
|
||||
│ User: "scan i2c bus" │
|
||||
│ → agent calls i2c_scan tool │
|
||||
│ → tool builds a ZcCommand │
|
||||
│ → AardvarkTransport sends to hardware │
|
||||
│ → response flows back as text │
|
||||
└──────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Layer by Layer
|
||||
|
||||
### Layer 1 — `aardvark-sys` (the USB talker)
|
||||
|
||||
**File:** `crates/aardvark-sys/src/lib.rs`
|
||||
|
||||
This is the only layer that ever touches the raw C library.
|
||||
Think of it as a thin translator: it turns C function calls into safe Rust.
|
||||
|
||||
**Algorithm:**
|
||||
|
||||
```
|
||||
find_devices()
|
||||
→ call aa_find_devices(16, buf) // ask C lib how many adapters
|
||||
→ return Vec of port numbers // [0, 1, ...] one per adapter
|
||||
|
||||
open_port(port)
|
||||
→ call aa_open(port) // open that specific adapter
|
||||
→ if handle ≤ 0, return OpenFailed
|
||||
→ else return AardvarkHandle{ _port: handle }
|
||||
|
||||
i2c_scan(handle)
|
||||
→ for addr in 0x08..=0x77 // every valid 7-bit address
|
||||
try aa_i2c_read(addr, 1 byte) // knock on the door
|
||||
if ACK → add to list // device answered
|
||||
→ return list of live addresses
|
||||
|
||||
i2c_read(handle, addr, len)
|
||||
→ aa_i2c_read(addr, len bytes)
|
||||
→ return bytes as Vec<u8>
|
||||
|
||||
i2c_write(handle, addr, data)
|
||||
→ aa_i2c_write(addr, data)
|
||||
|
||||
spi_transfer(handle, bytes_to_send)
|
||||
→ aa_spi_write(bytes) // full-duplex: sends + receives
|
||||
→ return received bytes
|
||||
|
||||
gpio_set(handle, direction, value)
|
||||
→ aa_gpio_direction(direction) // which pins are outputs
|
||||
→ aa_gpio_put(value) // set output levels
|
||||
|
||||
gpio_get(handle)
|
||||
→ aa_gpio_get() // read all pin levels as bitmask
|
||||
|
||||
Drop(handle)
|
||||
→ aa_close(handle._port) // always close on drop
|
||||
```
|
||||
|
||||
**In stub mode** (no SDK): every method returns `Err(NotFound)` immediately. `find_devices()` returns `[]`. Nothing crashes.
|
||||
|
||||
---
|
||||
|
||||
### Layer 2 — `AardvarkTransport` (the bridge)
|
||||
|
||||
**File:** `src/hardware/aardvark.rs`
|
||||
|
||||
The rest of ZeroClaw speaks a single language: `ZcCommand` → `ZcResponse`.
|
||||
`AardvarkTransport` translates between that protocol and the aardvark-sys calls above.
|
||||
|
||||
**Algorithm:**
|
||||
|
||||
```
|
||||
send(ZcCommand) → ZcResponse
|
||||
|
||||
extract command name from cmd.name
|
||||
extract parameters from cmd.params (serde_json values)
|
||||
|
||||
match cmd.name:
|
||||
|
||||
"i2c_scan" → open handle → call i2c_scan()
|
||||
→ format found addresses as hex list
|
||||
→ return ZcResponse{ output: "0x48, 0x68" }
|
||||
|
||||
"i2c_read" → parse addr (hex string) + len (number)
|
||||
→ open handle → i2c_enable(bitrate)
|
||||
→ call i2c_read(addr, len)
|
||||
→ format bytes as hex
|
||||
→ return ZcResponse{ output: "0xAB 0xCD" }
|
||||
|
||||
"i2c_write" → parse addr + data bytes
|
||||
→ open handle → i2c_write(addr, data)
|
||||
→ return ZcResponse{ output: "ok" }
|
||||
|
||||
"spi_transfer" → parse bytes_hex string → decode to Vec<u8>
|
||||
→ open handle → spi_enable(bitrate)
|
||||
→ spi_transfer(bytes)
|
||||
→ return received bytes as hex
|
||||
|
||||
"gpio_set" → parse direction + value bitmasks
|
||||
→ open handle → gpio_set(dir, val)
|
||||
→ return ZcResponse{ output: "ok" }
|
||||
|
||||
"gpio_get" → open handle → gpio_get()
|
||||
→ return bitmask value as string
|
||||
|
||||
on any AardvarkError → return ZcResponse{ error: "..." }
|
||||
```
|
||||
|
||||
**Key design choice — lazy open:** The handle is opened fresh for every command and dropped at the end. This means no held connection, no state to clean up, and no "is it still open?" logic anywhere.
|
||||
|
||||
---
|
||||
|
||||
### Layer 3 — Tools (what the agent calls)
|
||||
|
||||
**File:** `src/hardware/aardvark_tools.rs`
|
||||
|
||||
Each tool is a thin wrapper. It:
|
||||
1. Validates the agent's JSON input
|
||||
2. Resolves which physical device to use
|
||||
3. Builds a `ZcCommand`
|
||||
4. Calls `AardvarkTransport.send()`
|
||||
5. Returns the result as text
|
||||
|
||||
```
|
||||
I2cScanTool.call(args)
|
||||
→ look up "device" in args (default: "aardvark0")
|
||||
→ find that device in the registry
|
||||
→ build ZcCommand{ name: "i2c_scan", params: {} }
|
||||
→ send to AardvarkTransport
|
||||
→ return "Found: 0x48, 0x68" (or "No devices found")
|
||||
|
||||
I2cReadTool.call(args)
|
||||
→ require args["addr"] and args["len"]
|
||||
→ build ZcCommand{ name: "i2c_read", params: {addr, len} }
|
||||
→ send → return hex bytes
|
||||
|
||||
I2cWriteTool.call(args)
|
||||
→ require args["addr"] and args["data"] (hex or array)
|
||||
→ build ZcCommand{ name: "i2c_write", params: {addr, data} }
|
||||
→ send → return "ok" or error
|
||||
|
||||
SpiTransferTool.call(args)
|
||||
→ require args["bytes"] (hex string)
|
||||
→ build ZcCommand{ name: "spi_transfer", params: {bytes} }
|
||||
→ send → return received bytes
|
||||
|
||||
GpioAardvarkTool.call(args)
|
||||
→ require args["direction"] + args["value"] (set)
|
||||
OR no extra args (get)
|
||||
→ build appropriate ZcCommand
|
||||
→ send → return result
|
||||
|
||||
DatasheetTool.call(args)
|
||||
→ action = args["action"]: "search" | "download" | "list" | "read"
|
||||
→ "search": return a Google/vendor search URL for the device
|
||||
→ "download": fetch PDF from args["url"] → save to ~/.zeroclaw/hardware/datasheets/
|
||||
→ "list": scan the datasheets directory → return filenames
|
||||
→ "read": open a saved PDF and return its text
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Layer 4 — Device Registry (the address book)
|
||||
|
||||
**File:** `src/hardware/device.rs`
|
||||
|
||||
The registry is a runtime map of every connected device.
|
||||
Each entry stores: alias, kind, capabilities, transport handle.
|
||||
|
||||
```
|
||||
register("aardvark", vid=0x2b76, ...)
|
||||
→ DeviceKind::from_vid(0x2b76) → DeviceKind::Aardvark
|
||||
→ DeviceRuntime::from_kind() → DeviceRuntime::Aardvark
|
||||
→ assign alias "aardvark0" (then "aardvark1" for second, etc.)
|
||||
→ store entry in HashMap
|
||||
|
||||
attach_transport("aardvark0", AardvarkTransport, capabilities{i2c,spi,gpio})
|
||||
→ store Arc<dyn Transport> in the entry
|
||||
|
||||
has_aardvark()
|
||||
→ any entry where kind == Aardvark → true / false
|
||||
|
||||
resolve_aardvark_device(args)
|
||||
→ read "device" param (default: "aardvark0")
|
||||
→ look up alias in HashMap
|
||||
→ return (alias, DeviceContext{ transport, capabilities })
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Layer 5 — `boot()` (startup wiring)
|
||||
|
||||
**File:** `src/hardware/mod.rs`
|
||||
|
||||
`boot()` runs once at startup. For Aardvark:
|
||||
|
||||
```
|
||||
boot()
|
||||
...
|
||||
aardvark_ports = aardvark_sys::AardvarkHandle::find_devices()
|
||||
// → [] in stub mode, [0] if one adapter is plugged in
|
||||
|
||||
for (i, port) in aardvark_ports:
|
||||
alias = registry.register("aardvark", vid=0x2b76, ...)
|
||||
// → "aardvark0", "aardvark1", ...
|
||||
|
||||
transport = AardvarkTransport::new(port, bitrate=100kHz)
|
||||
registry.attach_transport(alias, transport, {i2c:true, spi:true, gpio:true})
|
||||
|
||||
log "[registry] aardvark0 ready → Total Phase port 0"
|
||||
...
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Layer 6 — Tool Registry (the loader)
|
||||
|
||||
**File:** `src/hardware/tool_registry.rs`
|
||||
|
||||
After `boot()`, the tool registry checks what hardware is present and loads
|
||||
only the relevant tools:
|
||||
|
||||
```
|
||||
ToolRegistry::load(devices)
|
||||
|
||||
# always loaded (Pico / GPIO)
|
||||
register: gpio_write, gpio_read, gpio_toggle, pico_flash, device_list, device_status
|
||||
|
||||
# only loaded if an Aardvark was found at boot
|
||||
if devices.has_aardvark():
|
||||
register: i2c_scan, i2c_read, i2c_write, spi_transfer, gpio_aardvark, datasheet
|
||||
```
|
||||
|
||||
This is why the `hardware_feature_registers_all_six_tools` test still passes in stub mode — `has_aardvark()` returns false, 0 extra tools load, count stays at 6.
|
||||
|
||||
---
|
||||
|
||||
## Full Flow Diagram
|
||||
|
||||
```
|
||||
SDK FILES aardvark-sys ZeroClaw core
|
||||
(vendor/) (crates/) (src/)
|
||||
─────────────────────────────────────────────────────────────────
|
||||
|
||||
aardvark.h ──► build.rs boot()
|
||||
aardvark.so (bindgen) ──► find_devices()
|
||||
│ │
|
||||
bindings.rs │ vec![0] (one adapter)
|
||||
│ ▼
|
||||
lib.rs register("aardvark0")
|
||||
AardvarkHandle attach_transport(AardvarkTransport)
|
||||
│ │
|
||||
│ ▼
|
||||
│ ToolRegistry::load()
|
||||
│ has_aardvark() == true
|
||||
│ → load 6 aardvark tools
|
||||
│
|
||||
─────────────────────────────────────────────────────────────────
|
||||
|
||||
USER MESSAGE: "scan the i2c bus"
|
||||
|
||||
agent loop
|
||||
│
|
||||
▼
|
||||
I2cScanTool.call()
|
||||
│
|
||||
▼
|
||||
resolve_aardvark_device("aardvark0")
|
||||
│ returns transport Arc
|
||||
▼
|
||||
AardvarkTransport.send(ZcCommand{ name: "i2c_scan" })
|
||||
│
|
||||
▼
|
||||
AardvarkHandle::open_port(0) ← opens USB connection
|
||||
│
|
||||
▼
|
||||
aa_i2c_read(0x08..0x77) ← probes each address
|
||||
│
|
||||
▼
|
||||
AardvarkHandle dropped ← USB connection closed
|
||||
│
|
||||
▼
|
||||
ZcResponse{ output: "Found: 0x48, 0x68" }
|
||||
│
|
||||
▼
|
||||
agent sends reply to user: "I found two I2C devices: 0x48 and 0x68"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Stub vs Real Side by Side
|
||||
|
||||
| | Stub mode (now) | Real hardware |
|
||||
|---|---|---|
|
||||
| `find_devices()` | returns `[]` | returns `[0]` |
|
||||
| `open_port(0)` | `Err(NotFound)` | opens USB, returns handle |
|
||||
| `i2c_scan()` | `[]` | probes bus, returns addresses |
|
||||
| tools loaded | only the 6 Pico tools | 6 Pico + 6 Aardvark tools |
|
||||
| `has_aardvark()` | `false` | `true` |
|
||||
| SDK needed | no | yes (`vendor/aardvark.h` + `.so`) |
|
||||
|
||||
The only code that changes when you plug in real hardware is inside
|
||||
`crates/aardvark-sys/src/lib.rs` — every other layer is already wired up
|
||||
and waiting.
|
||||
@@ -0,0 +1,202 @@
|
||||
# ADR-004: Tool Shared State Ownership Contract
|
||||
|
||||
**Status:** Accepted
|
||||
|
||||
**Date:** 2026-03-22
|
||||
|
||||
**Issue:** [#4057](https://github.com/zeroclaw/zeroclaw/issues/4057)
|
||||
|
||||
## Context
|
||||
|
||||
ZeroClaw tools execute in a multi-client environment where a single daemon
|
||||
process serves requests from multiple connected clients simultaneously. Several
|
||||
tools already maintain long-lived shared state:
|
||||
|
||||
- **`DelegateParentToolsHandle`** (`src/tools/mod.rs`):
|
||||
`Arc<RwLock<Vec<Arc<dyn Tool>>>>` — holds parent tools for delegate agents
|
||||
with no per-client isolation.
|
||||
- **`ChannelMapHandle`** (`src/tools/reaction.rs`):
|
||||
`Arc<RwLock<HashMap<String, Arc<dyn Channel>>>>` — global channel map shared
|
||||
across all clients.
|
||||
- **`CanvasStore`** (`src/tools/canvas.rs`):
|
||||
`Arc<RwLock<HashMap<String, CanvasEntry>>>` — canvas IDs are plain strings
|
||||
with no client namespace.
|
||||
|
||||
These patterns emerged organically. As the tool surface grows and more clients
|
||||
connect concurrently, we need a clear contract governing ownership, identity,
|
||||
isolation, lifecycle, and reload behavior for tool-held shared state. Without
|
||||
this contract, new tools risk introducing data leaks between clients, stale
|
||||
state after config reloads, or inconsistent initialization timing.
|
||||
|
||||
Additional context:
|
||||
|
||||
- The tool registry is immutable after startup, built once in
|
||||
`all_tools_with_runtime()`.
|
||||
- Client identity is currently derived from IP address only
|
||||
(`src/gateway/mod.rs`), which is insufficient for reliable namespacing.
|
||||
- `SecurityPolicy` is scoped per agent, not per client.
|
||||
- `WorkspaceManager` provides some isolation but workspace switching is global.
|
||||
|
||||
## Decision
|
||||
|
||||
### 1. Ownership: May tools own long-lived shared state?
|
||||
|
||||
**Yes.** Tools MAY own long-lived shared state, provided they follow the
|
||||
established **handle pattern**: wrap the state in `Arc<RwLock<T>>` (or
|
||||
`Arc<parking_lot::RwLock<T>>`) and expose a cloneable handle type.
|
||||
|
||||
This pattern is already proven by three independent implementations:
|
||||
|
||||
| Handle | Location | Inner type |
|
||||
|--------|----------|-----------|
|
||||
| `DelegateParentToolsHandle` | `src/tools/mod.rs` | `Vec<Arc<dyn Tool>>` |
|
||||
| `ChannelMapHandle` | `src/tools/reaction.rs` | `HashMap<String, Arc<dyn Channel>>` |
|
||||
| `CanvasStore` | `src/tools/canvas.rs` | `HashMap<String, CanvasEntry>` |
|
||||
|
||||
Tools that need shared state MUST:
|
||||
|
||||
- Define a named handle type alias (e.g., `pub type FooHandle = Arc<RwLock<T>>`).
|
||||
- Accept the handle at construction time rather than creating global state.
|
||||
- Document the concurrency contract in the handle type's doc comment.
|
||||
|
||||
Tools MUST NOT use static mutable state (`lazy_static!`, `OnceCell` with
|
||||
interior mutability) for per-request or per-client data.
|
||||
|
||||
### 2. Identity assignment: Who constructs identity keys?
|
||||
|
||||
**The daemon SHOULD provide identity.** Tools MUST NOT construct their own
|
||||
client identity keys.
|
||||
|
||||
A new `ClientId` type should be introduced (opaque, `Clone + Eq + Hash + Send + Sync`)
|
||||
that the daemon assigns at connection time. This replaces the current approach
|
||||
of using raw IP addresses (`src/gateway/mod.rs:259-306`), which breaks when
|
||||
multiple clients share a NAT address or when proxied connections arrive.
|
||||
|
||||
`ClientId` is passed to tools that require per-client state namespacing as part
|
||||
of the tool execution context. Tools that do not need per-client isolation
|
||||
(e.g., the immutable tool registry) may ignore it.
|
||||
|
||||
The `ClientId` contract:
|
||||
|
||||
- Generated by the gateway layer at connection establishment.
|
||||
- Opaque to tools — tools must not parse or derive meaning from the value.
|
||||
- Stable for the lifetime of a single client session.
|
||||
- Passed through the execution context, not stored globally.
|
||||
|
||||
### 3. Lifecycle: When may tools run startup-style validation?
|
||||
|
||||
**Validation runs once at first registration, and again when config changes
|
||||
are detected.**
|
||||
|
||||
The lifecycle phases are:
|
||||
|
||||
1. **Construction** — tool is instantiated with handles and config. No I/O or
|
||||
validation occurs here.
|
||||
2. **Registration** — tool is registered in the tool registry via
|
||||
`all_tools_with_runtime()`. At this point the tool MAY perform one-time
|
||||
startup validation (e.g., checking that required credentials exist, verifying
|
||||
external service connectivity).
|
||||
3. **Execution** — tool handles individual requests. No re-validation unless
|
||||
the config-change signal fires (see Reload Semantics below).
|
||||
4. **Shutdown** — daemon is stopping. Tools with open resources SHOULD clean up
|
||||
gracefully via `Drop` or an explicit shutdown method.
|
||||
|
||||
Tools MUST NOT perform blocking validation during execution-phase calls.
|
||||
Validation results SHOULD be cached in the tool's handle state and checked
|
||||
via a fast path during execution.
|
||||
|
||||
### 4. Isolation: What must be isolated per client?
|
||||
|
||||
State falls into two categories with different isolation requirements:
|
||||
|
||||
**MUST be isolated per client:**
|
||||
|
||||
- Security-sensitive state: credentials, API keys, quotas, rate-limit counters,
|
||||
per-client authorization decisions.
|
||||
- User-specific session data: conversation context, user preferences,
|
||||
workspace-scoped file paths.
|
||||
|
||||
Isolation mechanism: tools holding per-client state MUST key their internal
|
||||
maps by `ClientId`. The handle pattern naturally supports this by using
|
||||
`HashMap<ClientId, T>` inside the `RwLock`.
|
||||
|
||||
**MAY be shared across clients (with namespace prefixing):**
|
||||
|
||||
- Broadcast/display state: canvas frames (`CanvasStore`), notification channels
|
||||
(`ChannelMapHandle`).
|
||||
- Read-only reference data: tool registry, static configuration, model
|
||||
metadata.
|
||||
|
||||
When shared state uses string keys (e.g., canvas IDs, channel names), tools
|
||||
SHOULD support optional namespace prefixing (e.g., `{client_id}:{canvas_name}`)
|
||||
to allow per-client isolation when needed without mandating it for broadcast
|
||||
use cases.
|
||||
|
||||
Tools MUST NOT store per-client secrets in shared (non-isolated) state
|
||||
structures.
|
||||
|
||||
### 5. Reload semantics: What invalidates prior shared state on config change?
|
||||
|
||||
**Config changes detected via hash comparison MUST invalidate cached
|
||||
validation state.**
|
||||
|
||||
The reload contract:
|
||||
|
||||
- The daemon computes a hash of the tool-relevant config section at startup and
|
||||
after each config reload event.
|
||||
- When the hash changes, the daemon signals affected tools to re-run their
|
||||
registration-phase validation.
|
||||
- Tools MUST treat their cached validation result as stale when signaled and
|
||||
re-validate before the next execution.
|
||||
|
||||
Specific invalidation rules:
|
||||
|
||||
| Config change | Invalidation scope |
|
||||
|--------------|-------------------|
|
||||
| Credential/secret rotation | Per-tool validation cache; per-client credential state |
|
||||
| Tool enable/disable | Full tool registry rebuild via `all_tools_with_runtime()` |
|
||||
| Security policy change | `SecurityPolicy` re-derivation; per-agent policy state |
|
||||
| Workspace directory change | `WorkspaceManager` state; file-path-dependent tool state |
|
||||
| Provider config change | Provider-dependent tools re-validate connectivity |
|
||||
|
||||
Tools MAY retain non-security shared state (e.g., canvas content, channel
|
||||
subscriptions) across config reloads unless the reload explicitly affects that
|
||||
state's validity.
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Consistency:** All new tools follow the same handle pattern, making shared
|
||||
state discoverable and auditable.
|
||||
- **Safety:** Per-client isolation of security-sensitive state prevents data
|
||||
leaks in multi-tenant scenarios.
|
||||
- **Clarity:** Explicit lifecycle phases eliminate ambiguity about when
|
||||
validation runs.
|
||||
- **Evolvability:** The `ClientId` abstraction decouples tools from transport
|
||||
details, supporting future identity mechanisms (tokens, certificates).
|
||||
|
||||
### Negative
|
||||
|
||||
- **Migration cost:** Existing tools (`CanvasStore`, `ReactionTool`) may need
|
||||
refactoring to accept `ClientId` and namespace their state.
|
||||
- **Complexity:** Tools that were simple singletons now need to consider
|
||||
multi-client semantics even if they currently have one client.
|
||||
- **Performance:** Per-client keying adds a hash lookup on each access, though
|
||||
this is negligible compared to I/O costs.
|
||||
|
||||
### Neutral
|
||||
|
||||
- The tool registry remains immutable after startup; this ADR does not change
|
||||
that invariant.
|
||||
- `SecurityPolicy` remains per-agent; this ADR documents that client isolation
|
||||
is orthogonal to agent-level policy.
|
||||
|
||||
## References
|
||||
|
||||
- `src/tools/mod.rs` — `DelegateParentToolsHandle`, `all_tools_with_runtime()`
|
||||
- `src/tools/reaction.rs` — `ChannelMapHandle`, `ReactionTool`
|
||||
- `src/tools/canvas.rs` — `CanvasStore`, `CanvasEntry`
|
||||
- `src/tools/traits.rs` — `Tool` trait
|
||||
- `src/gateway/mod.rs` — client IP extraction (`forwarded_client_ip`, `resolve_client_ip`)
|
||||
- `src/security/` — `SecurityPolicy`
|
||||
@@ -0,0 +1,215 @@
|
||||
# Browser Automation Setup Guide
|
||||
|
||||
This guide covers setting up browser automation capabilities in ZeroClaw, including both headless automation and GUI access via VNC.
|
||||
|
||||
## Overview
|
||||
|
||||
ZeroClaw supports multiple browser access methods:
|
||||
|
||||
| Method | Use Case | Requirements |
|
||||
|--------|----------|--------------|
|
||||
| **agent-browser CLI** | Headless automation, AI agents | npm, Chrome |
|
||||
| **VNC + noVNC** | GUI access, debugging | Xvfb, x11vnc, noVNC |
|
||||
| **Chrome Remote Desktop** | Remote GUI via Google | XFCE, Google account |
|
||||
|
||||
## Quick Start: Headless Automation
|
||||
|
||||
### 1. Install agent-browser
|
||||
|
||||
```bash
|
||||
# Install CLI
|
||||
npm install -g agent-browser
|
||||
|
||||
# Download Chrome for Testing
|
||||
agent-browser install --with-deps # Linux (includes system deps)
|
||||
agent-browser install # macOS/Windows
|
||||
```
|
||||
|
||||
### 2. Verify ZeroClaw Config
|
||||
|
||||
The browser tool is enabled by default. To verify or customize, edit
|
||||
`~/.zeroclaw/config.toml`:
|
||||
|
||||
```toml
|
||||
[browser]
|
||||
enabled = true # default: true
|
||||
allowed_domains = ["*"] # default: ["*"] (all public hosts)
|
||||
backend = "agent_browser" # default: "agent_browser"
|
||||
native_headless = true # default: true
|
||||
```
|
||||
|
||||
To restrict domains or disable the browser tool:
|
||||
|
||||
```toml
|
||||
[browser]
|
||||
enabled = false # disable entirely
|
||||
# or restrict to specific domains:
|
||||
allowed_domains = ["example.com", "docs.example.com"]
|
||||
```
|
||||
|
||||
### 3. Test
|
||||
|
||||
```bash
|
||||
echo "Open https://example.com and tell me what it says" | zeroclaw agent
|
||||
```
|
||||
|
||||
## VNC Setup (GUI Access)
|
||||
|
||||
For debugging or when you need visual browser access:
|
||||
|
||||
### Install Dependencies
|
||||
|
||||
```bash
|
||||
# Ubuntu/Debian
|
||||
apt-get install -y xvfb x11vnc fluxbox novnc websockify
|
||||
|
||||
# Optional: Desktop environment for Chrome Remote Desktop
|
||||
apt-get install -y xfce4 xfce4-goodies
|
||||
```
|
||||
|
||||
### Start VNC Server
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# Start virtual display with VNC access
|
||||
|
||||
DISPLAY_NUM=99
|
||||
VNC_PORT=5900
|
||||
NOVNC_PORT=6080
|
||||
RESOLUTION=1920x1080x24
|
||||
|
||||
# Start Xvfb
|
||||
Xvfb :$DISPLAY_NUM -screen 0 $RESOLUTION -ac &
|
||||
sleep 1
|
||||
|
||||
# Start window manager
|
||||
fluxbox -display :$DISPLAY_NUM &
|
||||
sleep 1
|
||||
|
||||
# Start x11vnc
|
||||
x11vnc -display :$DISPLAY_NUM -rfbport $VNC_PORT -forever -shared -nopw -bg
|
||||
sleep 1
|
||||
|
||||
# Start noVNC (web-based VNC)
|
||||
websockify --web=/usr/share/novnc $NOVNC_PORT localhost:$VNC_PORT &
|
||||
|
||||
echo "VNC available at:"
|
||||
echo " VNC Client: localhost:$VNC_PORT"
|
||||
echo " Web Browser: http://localhost:$NOVNC_PORT/vnc.html"
|
||||
```
|
||||
|
||||
### VNC Access
|
||||
|
||||
- **VNC Client**: Connect to `localhost:5900`
|
||||
- **Web Browser**: Open `http://localhost:6080/vnc.html`
|
||||
|
||||
### Start Browser on VNC Display
|
||||
|
||||
```bash
|
||||
DISPLAY=:99 google-chrome --no-sandbox https://example.com &
|
||||
```
|
||||
|
||||
## Chrome Remote Desktop
|
||||
|
||||
### Install
|
||||
|
||||
```bash
|
||||
# Download and install
|
||||
wget https://dl.google.com/linux/direct/chrome-remote-desktop_current_amd64.deb
|
||||
apt-get install -y ./chrome-remote-desktop_current_amd64.deb
|
||||
|
||||
# Configure session
|
||||
echo "xfce4-session" > ~/.chrome-remote-desktop-session
|
||||
chmod +x ~/.chrome-remote-desktop-session
|
||||
```
|
||||
|
||||
### Setup
|
||||
|
||||
1. Visit <https://remotedesktop.google.com/headless>
|
||||
2. Copy the "Debian Linux" setup command
|
||||
3. Run it on your server
|
||||
4. Start the service: `systemctl --user start chrome-remote-desktop`
|
||||
|
||||
### Remote Access
|
||||
|
||||
Go to <https://remotedesktop.google.com/access> from any device.
|
||||
|
||||
## Testing
|
||||
|
||||
### CLI Tests
|
||||
|
||||
```bash
|
||||
# Basic open and close
|
||||
agent-browser open https://example.com
|
||||
agent-browser get title
|
||||
agent-browser close
|
||||
|
||||
# Snapshot with refs
|
||||
agent-browser open https://example.com
|
||||
agent-browser snapshot -i
|
||||
agent-browser close
|
||||
|
||||
# Screenshot
|
||||
agent-browser open https://example.com
|
||||
agent-browser screenshot /tmp/test.png
|
||||
agent-browser close
|
||||
```
|
||||
|
||||
### ZeroClaw Integration Tests
|
||||
|
||||
```bash
|
||||
# Content extraction
|
||||
echo "Open https://example.com and summarize it" | zeroclaw agent
|
||||
|
||||
# Navigation
|
||||
echo "Go to https://github.com/trending and list the top 3 repos" | zeroclaw agent
|
||||
|
||||
# Form interaction
|
||||
echo "Go to Wikipedia, search for 'Rust programming language', and summarize" | zeroclaw agent
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Element not found"
|
||||
|
||||
The page may not be fully loaded. Add a wait:
|
||||
|
||||
```bash
|
||||
agent-browser open https://slow-site.com
|
||||
agent-browser wait --load networkidle
|
||||
agent-browser snapshot -i
|
||||
```
|
||||
|
||||
### Cookie dialogs blocking access
|
||||
|
||||
Handle cookie consent first:
|
||||
|
||||
```bash
|
||||
agent-browser open https://site-with-cookies.com
|
||||
agent-browser snapshot -i
|
||||
agent-browser click @accept_cookies # Click the accept button
|
||||
agent-browser snapshot -i # Now get the actual content
|
||||
```
|
||||
|
||||
### Docker sandbox network restrictions
|
||||
|
||||
If `web_fetch` fails inside Docker sandbox, use agent-browser instead:
|
||||
|
||||
```bash
|
||||
# Instead of web_fetch, use:
|
||||
agent-browser open https://example.com
|
||||
agent-browser get text body
|
||||
```
|
||||
|
||||
## Security Notes
|
||||
|
||||
- `agent-browser` runs Chrome in headless mode with sandboxing
|
||||
- For sensitive sites, use `--session-name` to persist auth state
|
||||
- The `--allowed-domains` config restricts navigation to specific domains
|
||||
- VNC ports (5900, 6080) should be behind a firewall or Tailscale
|
||||
|
||||
## Related
|
||||
|
||||
- [agent-browser Documentation](https://github.com/vercel-labs/agent-browser)
|
||||
- [ZeroClaw Configuration Reference](./config-reference.md)
|
||||
- [Skills Documentation](../skills/)
|
||||
@@ -20,6 +20,7 @@ Selected allowlist (all actions currently used across Quality Gate, Release Beta
|
||||
| `docker/setup-buildx-action@v3` | release, promote-release | Docker Buildx setup |
|
||||
| `docker/login-action@v3` | release, promote-release | GHCR authentication |
|
||||
| `docker/build-push-action@v6` | release, promote-release | Multi-platform Docker image build and push |
|
||||
| `actions/labeler@v5` | pr-path-labeler | Apply path/scope labels from `labeler.yml` |
|
||||
|
||||
Equivalent allowlist patterns:
|
||||
|
||||
@@ -36,6 +37,7 @@ Equivalent allowlist patterns:
|
||||
| Quality Gate | `.github/workflows/checks-on-pr.yml` | Pull requests to `master` |
|
||||
| Release Beta | `.github/workflows/release-beta-on-push.yml` | Push to `master` |
|
||||
| Release Stable | `.github/workflows/release-stable-manual.yml` | Manual `workflow_dispatch` |
|
||||
| PR Path Labeler | `.github/workflows/pr-path-labeler.yml` | `pull_request_target` (opened, synchronize, reopened) |
|
||||
|
||||
## Change Control
|
||||
|
||||
@@ -62,6 +64,7 @@ gh api repos/zeroclaw-labs/zeroclaw/actions/permissions/selected-actions
|
||||
|
||||
## Change Log
|
||||
|
||||
- 2026-03-23: Added PR Path Labeler (`pr-path-labeler.yml`) using `actions/labeler@v5`. No allowlist change needed — covered by existing `actions/*` pattern.
|
||||
- 2026-03-10: Renamed workflows — CI → Quality Gate (`checks-on-pr.yml`), Beta Release → Release Beta (`release-beta-on-push.yml`), Promote Release → Release Stable (`release-stable-manual.yml`). Added `lint` and `security` jobs to Quality Gate. Added Cross-Platform Build (`cross-platform-build-manual.yml`).
|
||||
- 2026-03-05: Complete workflow overhaul — replaced 22 workflows with 3 (CI, Beta Release, Promote Release)
|
||||
- Removed patterns no longer in use: `DavidAnson/markdownlint-cli2-action@*`, `lycheeverse/lychee-action@*`, `EmbarkStudios/cargo-deny-action@*`, `rustsec/audit-check@*`, `rhysd/actionlint@*`, `sigstore/cosign-installer@*`, `Checkmarx/vorpal-reviewdog-github-action@*`, `useblacksmith/*`
|
||||
|
||||
@@ -45,6 +45,15 @@ For complete code examples of each extension trait, see [extension-examples.md](
|
||||
- Keep multilingual entry-point parity for all supported locales (`en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`) when nav or key wording changes.
|
||||
- When shared docs wording changes, sync corresponding localized docs in the same PR (or explicitly document deferral and follow-up PR).
|
||||
|
||||
## Tool Shared State
|
||||
|
||||
- Follow the `Arc<RwLock<T>>` handle pattern for any tool that owns long-lived shared state.
|
||||
- Accept handles at construction; do not create global/static mutable state.
|
||||
- Use `ClientId` (provided by the daemon) to namespace per-client state — never construct identity keys inside the tool.
|
||||
- Isolate security-sensitive state (credentials, quotas) per client; broadcast/display state may be shared with optional namespace prefixing.
|
||||
- Cached validation is invalidated on config change — tools must re-validate before the next execution when signaled.
|
||||
- See [ADR-004: Tool Shared State Ownership](../architecture/adr-004-tool-shared-state-ownership.md) for the full contract.
|
||||
|
||||
## Architecture Boundary Rules
|
||||
|
||||
- Extend capabilities by adding trait implementations + factory wiring first; avoid cross-module rewrites for isolated features.
|
||||
|
||||
@@ -0,0 +1,213 @@
|
||||
# Label Registry
|
||||
|
||||
Single reference for every label used on PRs and issues. Labels are grouped by category. Each entry lists the label name, definition, and how it is applied.
|
||||
|
||||
Sources consolidated here:
|
||||
|
||||
- `.github/labeler.yml` (path-label config for `actions/labeler`)
|
||||
- `.github/label-policy.json` (contributor tier thresholds)
|
||||
- `docs/contributing/pr-workflow.md` (size, risk, and triage label definitions)
|
||||
- `docs/contributing/ci-map.md` (automation behavior and high-risk path heuristics)
|
||||
|
||||
Note: The CI was simplified to 4 workflows (`ci.yml`, `release.yml`, `ci-full.yml`, `promote-release.yml`). Workflows that previously automated size, risk, contributor tier, and triage labels (`pr-labeler.yml`, `pr-auto-response.yml`, `pr-check-stale.yml`, and supporting scripts) were removed. Only path labels via `pr-path-labeler.yml` are currently automated.
|
||||
|
||||
---
|
||||
|
||||
## Path labels
|
||||
|
||||
Applied automatically by `pr-path-labeler.yml` using `actions/labeler`. Matches changed files against glob patterns in `.github/labeler.yml`.
|
||||
|
||||
### Base scope labels
|
||||
|
||||
| Label | Matches |
|
||||
|---|---|
|
||||
| `docs` | `docs/**`, `**/*.md`, `**/*.mdx`, `LICENSE`, `.markdownlint-cli2.yaml` |
|
||||
| `dependencies` | `Cargo.toml`, `Cargo.lock`, `deny.toml`, `.github/dependabot.yml` |
|
||||
| `ci` | `.github/**`, `.githooks/**` |
|
||||
| `core` | `src/*.rs` |
|
||||
| `agent` | `src/agent/**` |
|
||||
| `channel` | `src/channels/**` |
|
||||
| `gateway` | `src/gateway/**` |
|
||||
| `config` | `src/config/**` |
|
||||
| `cron` | `src/cron/**` |
|
||||
| `daemon` | `src/daemon/**` |
|
||||
| `doctor` | `src/doctor/**` |
|
||||
| `health` | `src/health/**` |
|
||||
| `heartbeat` | `src/heartbeat/**` |
|
||||
| `integration` | `src/integrations/**` |
|
||||
| `memory` | `src/memory/**` |
|
||||
| `security` | `src/security/**` |
|
||||
| `runtime` | `src/runtime/**` |
|
||||
| `onboard` | `src/onboard/**` |
|
||||
| `provider` | `src/providers/**` |
|
||||
| `service` | `src/service/**` |
|
||||
| `skillforge` | `src/skillforge/**` |
|
||||
| `skills` | `src/skills/**` |
|
||||
| `tool` | `src/tools/**` |
|
||||
| `tunnel` | `src/tunnel/**` |
|
||||
| `observability` | `src/observability/**` |
|
||||
| `tests` | `tests/**` |
|
||||
| `scripts` | `scripts/**` |
|
||||
| `dev` | `dev/**` |
|
||||
|
||||
### Per-component channel labels
|
||||
|
||||
Each channel gets a specific label in addition to the base `channel` label.
|
||||
|
||||
| Label | Matches |
|
||||
|---|---|
|
||||
| `channel:bluesky` | `bluesky.rs` |
|
||||
| `channel:clawdtalk` | `clawdtalk.rs` |
|
||||
| `channel:cli` | `cli.rs` |
|
||||
| `channel:dingtalk` | `dingtalk.rs` |
|
||||
| `channel:discord` | `discord.rs`, `discord_history.rs` |
|
||||
| `channel:email` | `email_channel.rs`, `gmail_push.rs` |
|
||||
| `channel:imessage` | `imessage.rs` |
|
||||
| `channel:irc` | `irc.rs` |
|
||||
| `channel:lark` | `lark.rs` |
|
||||
| `channel:linq` | `linq.rs` |
|
||||
| `channel:matrix` | `matrix.rs` |
|
||||
| `channel:mattermost` | `mattermost.rs` |
|
||||
| `channel:mochat` | `mochat.rs` |
|
||||
| `channel:mqtt` | `mqtt.rs` |
|
||||
| `channel:nextcloud-talk` | `nextcloud_talk.rs` |
|
||||
| `channel:nostr` | `nostr.rs` |
|
||||
| `channel:notion` | `notion.rs` |
|
||||
| `channel:qq` | `qq.rs` |
|
||||
| `channel:reddit` | `reddit.rs` |
|
||||
| `channel:signal` | `signal.rs` |
|
||||
| `channel:slack` | `slack.rs` |
|
||||
| `channel:telegram` | `telegram.rs` |
|
||||
| `channel:twitter` | `twitter.rs` |
|
||||
| `channel:wati` | `wati.rs` |
|
||||
| `channel:webhook` | `webhook.rs` |
|
||||
| `channel:wecom` | `wecom.rs` |
|
||||
| `channel:whatsapp` | `whatsapp.rs`, `whatsapp_storage.rs`, `whatsapp_web.rs` |
|
||||
|
||||
### Per-component provider labels
|
||||
|
||||
| Label | Matches |
|
||||
|---|---|
|
||||
| `provider:anthropic` | `anthropic.rs` |
|
||||
| `provider:azure-openai` | `azure_openai.rs` |
|
||||
| `provider:bedrock` | `bedrock.rs` |
|
||||
| `provider:claude-code` | `claude_code.rs` |
|
||||
| `provider:compatible` | `compatible.rs` |
|
||||
| `provider:copilot` | `copilot.rs` |
|
||||
| `provider:gemini` | `gemini.rs`, `gemini_cli.rs` |
|
||||
| `provider:glm` | `glm.rs` |
|
||||
| `provider:kilocli` | `kilocli.rs` |
|
||||
| `provider:ollama` | `ollama.rs` |
|
||||
| `provider:openai` | `openai.rs`, `openai_codex.rs` |
|
||||
| `provider:openrouter` | `openrouter.rs` |
|
||||
| `provider:telnyx` | `telnyx.rs` |
|
||||
|
||||
### Per-group tool labels
|
||||
|
||||
Tools are grouped by logical function rather than one label per file.
|
||||
|
||||
| Label | Matches |
|
||||
|---|---|
|
||||
| `tool:browser` | `browser.rs`, `browser_delegate.rs`, `browser_open.rs`, `text_browser.rs`, `screenshot.rs` |
|
||||
| `tool:cloud` | `cloud_ops.rs`, `cloud_patterns.rs` |
|
||||
| `tool:composio` | `composio.rs` |
|
||||
| `tool:cron` | `cron_add.rs`, `cron_list.rs`, `cron_remove.rs`, `cron_run.rs`, `cron_runs.rs`, `cron_update.rs` |
|
||||
| `tool:file` | `file_edit.rs`, `file_read.rs`, `file_write.rs`, `glob_search.rs`, `content_search.rs` |
|
||||
| `tool:google-workspace` | `google_workspace.rs` |
|
||||
| `tool:mcp` | `mcp_client.rs`, `mcp_deferred.rs`, `mcp_protocol.rs`, `mcp_tool.rs`, `mcp_transport.rs` |
|
||||
| `tool:memory` | `memory_forget.rs`, `memory_recall.rs`, `memory_store.rs` |
|
||||
| `tool:microsoft365` | `microsoft365/**` |
|
||||
| `tool:security` | `security_ops.rs`, `verifiable_intent.rs` |
|
||||
| `tool:shell` | `shell.rs`, `node_tool.rs`, `cli_discovery.rs` |
|
||||
| `tool:sop` | `sop_advance.rs`, `sop_approve.rs`, `sop_execute.rs`, `sop_list.rs`, `sop_status.rs` |
|
||||
| `tool:web` | `web_fetch.rs`, `web_search_tool.rs`, `web_search_provider_routing.rs`, `http_request.rs` |
|
||||
|
||||
---
|
||||
|
||||
## Size labels
|
||||
|
||||
Defined in `pr-workflow.md` §6.1. Based on effective changed line count, normalized for docs-only and lockfile-heavy PRs.
|
||||
|
||||
| Label | Threshold |
|
||||
|---|---|
|
||||
| `size: XS` | <= 80 lines |
|
||||
| `size: S` | <= 250 lines |
|
||||
| `size: M` | <= 500 lines |
|
||||
| `size: L` | <= 1000 lines |
|
||||
| `size: XL` | > 1000 lines |
|
||||
|
||||
**Applied by:** manual. The workflows that previously computed size labels (`pr-labeler.yml` and supporting scripts) were removed during CI simplification.
|
||||
|
||||
---
|
||||
|
||||
## Risk labels
|
||||
|
||||
Defined in `pr-workflow.md` §13.2 and `ci-map.md`. Based on a heuristic combining touched paths and change size.
|
||||
|
||||
| Label | Meaning |
|
||||
|---|---|
|
||||
| `risk: low` | No high-risk paths touched, small change |
|
||||
| `risk: medium` | Behavioral `src/**` changes without boundary/security impact |
|
||||
| `risk: high` | Touches high-risk paths (see below) or large security-adjacent change |
|
||||
| `risk: manual` | Maintainer override that freezes automated risk recalculation |
|
||||
|
||||
High-risk paths: `src/security/**`, `src/runtime/**`, `src/gateway/**`, `src/tools/**`, `.github/workflows/**`.
|
||||
|
||||
The boundary between low and medium is not formally defined beyond "no high-risk paths."
|
||||
|
||||
**Applied by:** manual. Previously automated via `pr-labeler.yml`; removed during CI simplification.
|
||||
|
||||
---
|
||||
|
||||
## Contributor tier labels
|
||||
|
||||
Defined in `.github/label-policy.json`. Based on the author's merged PR count queried from the GitHub API.
|
||||
|
||||
| Label | Minimum merged PRs |
|
||||
|---|---|
|
||||
| `trusted contributor` | 5 |
|
||||
| `experienced contributor` | 10 |
|
||||
| `principal contributor` | 20 |
|
||||
| `distinguished contributor` | 50 |
|
||||
|
||||
**Applied by:** manual. Previously automated via `pr-labeler.yml` and `pr-auto-response.yml`; removed during CI simplification.
|
||||
|
||||
---
|
||||
|
||||
## Response and triage labels
|
||||
|
||||
Defined in `pr-workflow.md` §8. Applied manually.
|
||||
|
||||
| Label | Purpose | Applied by |
|
||||
|---|---|---|
|
||||
| `r:needs-repro` | Incomplete bug report; request deterministic repro | Manual |
|
||||
| `r:support` | Usage/help item better handled outside bug backlog | Manual |
|
||||
| `invalid` | Not a valid bug/feature request | Manual |
|
||||
| `duplicate` | Duplicate of existing issue | Manual |
|
||||
| `stale-candidate` | Dormant PR/issue; candidate for closing | Manual |
|
||||
| `superseded` | Replaced by a newer PR | Manual |
|
||||
| `no-stale` | Exempt from stale automation; accepted but blocked work | Manual |
|
||||
|
||||
**Automation:** none currently. The workflows that handled label-driven issue closing (`pr-auto-response.yml`) and stale detection (`pr-check-stale.yml`) were removed during CI simplification.
|
||||
|
||||
---
|
||||
|
||||
## Implementation status
|
||||
|
||||
| Category | Count | Automated | Workflow |
|
||||
|---|---|---|---|
|
||||
| Path (base scope) | 27 | Yes | `pr-path-labeler.yml` |
|
||||
| Path (per-component) | 52 | Yes | `pr-path-labeler.yml` |
|
||||
| Size | 5 | No | Manual |
|
||||
| Risk | 4 | No | Manual |
|
||||
| Contributor tier | 4 | No | Manual |
|
||||
| Response/triage | 7 | No | Manual |
|
||||
| **Total** | **99** | | |
|
||||
|
||||
---
|
||||
|
||||
## Maintenance
|
||||
|
||||
- **Owner:** maintainers responsible for label policy and PR triage automation.
|
||||
- **Update trigger:** new channels, providers, or tools added to the source tree; label policy changes; triage workflow changes.
|
||||
- **Source of truth:** this document consolidates definitions from the four source files listed at the top. When definitions conflict, update the source file first, then sync this registry.
|
||||
@@ -411,30 +411,6 @@ allowed_roots = [\"~/Desktop/projects\", \"/opt/shared-repo\"]
|
||||
|
||||
- 内存上下文注入忽略旧的 `assistant_resp*` 自动保存键,以防止旧模型生成的摘要被视为事实。
|
||||
|
||||
### `[memory.mem0]`
|
||||
|
||||
Mem0 (OpenMemory) 后端 — 连接自托管 mem0 服务器,提供基于向量的记忆存储和 LLM 事实提取。构建时需要 `memory-mem0` feature flag,配置需设置 `backend = "mem0"`。
|
||||
|
||||
| 键 | 默认值 | 环境变量 | 用途 |
|
||||
|---|---|---|---|
|
||||
| `url` | `http://localhost:8765` | `MEM0_URL` | OpenMemory 服务器地址 |
|
||||
| `user_id` | `zeroclaw` | `MEM0_USER_ID` | 记忆作用域的用户 ID |
|
||||
| `app_name` | `zeroclaw` | `MEM0_APP_NAME` | 在 mem0 中注册的应用名称 |
|
||||
| `infer` | `true` | — | 使用 LLM 从存储文本中提取事实 (`true`) 或原样存储 (`false`) |
|
||||
| `extraction_prompt` | 未设置 | `MEM0_EXTRACTION_PROMPT` | 自定义 LLM 事实提取提示词(如适用于非英文内容) |
|
||||
|
||||
```toml
|
||||
[memory]
|
||||
backend = "mem0"
|
||||
|
||||
[memory.mem0]
|
||||
url = "http://192.168.0.171:8765"
|
||||
user_id = "zeroclaw-bot"
|
||||
extraction_prompt = "用原始语言提取事实..."
|
||||
```
|
||||
|
||||
服务器部署脚本位于 `deploy/mem0/`。
|
||||
|
||||
## `[[model_routes]]` 和 `[[embedding_routes]]`
|
||||
|
||||
使用路由提示,以便集成可以在模型 ID 演变时保持稳定的名称。
|
||||
|
||||
@@ -12,8 +12,6 @@ SOP 审计条目通过 `SopAuditLogger` 持久化到配置的内存后端的 `so
|
||||
- `sop_step_{run_id}_{step_number}`:单步结果
|
||||
- `sop_approval_{run_id}_{step_number}`:操作员审批记录
|
||||
- `sop_timeout_approve_{run_id}_{step_number}`:超时自动审批记录
|
||||
- `sop_gate_decision_{gate_id}_{timestamp_ms}`:门评估器决策记录(启用 `ampersona-gates` 时)
|
||||
- `sop_phase_state`:持久化的信任阶段状态快照(启用 `ampersona-gates` 时)
|
||||
|
||||
## 2. 检查路径
|
||||
|
||||
|
||||
@@ -122,6 +122,34 @@ tools = ["mcp_browser_*"]
|
||||
keywords = ["browse", "navigate", "open url", "screenshot"]
|
||||
```
|
||||
|
||||
## `[pacing]`
|
||||
|
||||
Pacing controls for slow/local LLM workloads (Ollama, llama.cpp, vLLM). All keys are optional; when absent, existing behavior is preserved.
|
||||
|
||||
| Key | Default | Purpose |
|
||||
|---|---|---|
|
||||
| `step_timeout_secs` | _none_ | Per-step timeout: maximum seconds for a single LLM inference turn. Catches a truly hung model without terminating the overall task loop |
|
||||
| `loop_detection_min_elapsed_secs` | _none_ | Minimum elapsed seconds before loop detection activates. Tasks completing under this threshold get aggressive loop protection; longer-running tasks receive a grace period |
|
||||
| `loop_ignore_tools` | `[]` | Tool names excluded from identical-output loop detection. Useful for browser workflows where `browser_screenshot` structurally resembles a loop |
|
||||
| `message_timeout_scale_max` | `4` | Override for the hardcoded timeout scaling cap. The channel message timeout budget is `message_timeout_secs * min(max_tool_iterations, message_timeout_scale_max)` |
|
||||
|
||||
Notes:
|
||||
|
||||
- These settings are intended for local/slow LLM deployments. Cloud-provider users typically do not need them.
|
||||
- `step_timeout_secs` operates independently of the total channel message timeout budget. A step timeout abort does not consume the overall budget; the loop simply stops.
|
||||
- `loop_detection_min_elapsed_secs` delays loop-detection counting, not the task itself. Loop protection remains fully active for short tasks (the default).
|
||||
- `loop_ignore_tools` only suppresses tool-output-based loop detection for the listed tools. Other safety features (max iterations, overall timeout) remain active.
|
||||
- `message_timeout_scale_max` must be >= 1. Setting it higher than `max_tool_iterations` has no additional effect (the formula uses `min()`).
|
||||
- Example configuration for a slow local Ollama deployment:
|
||||
|
||||
```toml
|
||||
[pacing]
|
||||
step_timeout_secs = 120
|
||||
loop_detection_min_elapsed_secs = 60
|
||||
loop_ignore_tools = ["browser_screenshot", "browser_navigate"]
|
||||
message_timeout_scale_max = 8
|
||||
```
|
||||
|
||||
## `[security.otp]`
|
||||
|
||||
| Key | Default | Purpose |
|
||||
@@ -185,12 +213,15 @@ Delegate sub-agent configurations. Each key under `[agents]` defines a named sub
|
||||
| `max_iterations` | `10` | Max tool-call iterations for agentic mode |
|
||||
| `timeout_secs` | `120` | Timeout in seconds for non-agentic provider calls (1–3600) |
|
||||
| `agentic_timeout_secs` | `300` | Timeout in seconds for agentic sub-agent loops (1–3600) |
|
||||
| `skills_directory` | unset | Optional skills directory path (workspace-relative) for scoped skill loading |
|
||||
|
||||
Notes:
|
||||
|
||||
- `agentic = false` preserves existing single prompt→response delegate behavior.
|
||||
- `agentic = true` requires at least one matching entry in `allowed_tools`.
|
||||
- The `delegate` tool is excluded from sub-agent allowlists to prevent re-entrant delegation loops.
|
||||
- Sub-agents receive an enriched system prompt containing: tools section (allowed tools with parameters), skills section (from scoped or default directory), workspace path, current date/time, safety constraints, and shell policy when `shell` is in the effective tool list.
|
||||
- When `skills_directory` is unset or empty, the sub-agent loads skills from the default workspace `skills/` directory. When set, skills are loaded exclusively from that directory (relative to workspace root), enabling per-agent scoped skill sets.
|
||||
|
||||
```toml
|
||||
[agents.researcher]
|
||||
@@ -208,6 +239,14 @@ provider = "ollama"
|
||||
model = "qwen2.5-coder:32b"
|
||||
temperature = 0.2
|
||||
timeout_secs = 60
|
||||
|
||||
[agents.code_reviewer]
|
||||
provider = "anthropic"
|
||||
model = "claude-opus-4-5"
|
||||
system_prompt = "You are an expert code reviewer focused on security and performance."
|
||||
agentic = true
|
||||
allowed_tools = ["file_read", "shell"]
|
||||
skills_directory = "skills/code-review"
|
||||
```
|
||||
|
||||
## `[runtime]`
|
||||
@@ -414,6 +453,12 @@ Notes:
|
||||
| `port` | `42617` | gateway listen port |
|
||||
| `require_pairing` | `true` | require pairing before bearer auth |
|
||||
| `allow_public_bind` | `false` | block accidental public exposure |
|
||||
| `path_prefix` | _(none)_ | URL path prefix for reverse-proxy deployments (e.g. `"/zeroclaw"`) |
|
||||
|
||||
When deploying behind a reverse proxy that maps ZeroClaw to a sub-path,
|
||||
set `path_prefix` to that sub-path (e.g. `"/zeroclaw"`). All gateway
|
||||
routes will be served under this prefix. The value must start with `/`
|
||||
and must not end with `/`.
|
||||
|
||||
## `[autonomy]`
|
||||
|
||||
@@ -463,30 +508,6 @@ Notes:
|
||||
|
||||
- Memory context injection ignores legacy `assistant_resp*` auto-save keys to prevent old model-authored summaries from being treated as facts.
|
||||
|
||||
### `[memory.mem0]`
|
||||
|
||||
Mem0 (OpenMemory) backend — connects to a self-hosted mem0 server for vector-based memory with LLM-powered fact extraction. Requires feature flag `memory-mem0` at build time and `backend = "mem0"` in config.
|
||||
|
||||
| Key | Default | Env var | Purpose |
|
||||
|---|---|---|---|
|
||||
| `url` | `http://localhost:8765` | `MEM0_URL` | OpenMemory server URL |
|
||||
| `user_id` | `zeroclaw` | `MEM0_USER_ID` | User ID for scoping memories |
|
||||
| `app_name` | `zeroclaw` | `MEM0_APP_NAME` | Application name registered in mem0 |
|
||||
| `infer` | `true` | — | Use LLM to extract facts from stored text (`true`) or store raw (`false`) |
|
||||
| `extraction_prompt` | unset | `MEM0_EXTRACTION_PROMPT` | Custom prompt for LLM fact extraction (e.g. for non-English content) |
|
||||
|
||||
```toml
|
||||
[memory]
|
||||
backend = "mem0"
|
||||
|
||||
[memory.mem0]
|
||||
url = "http://192.168.0.171:8765"
|
||||
user_id = "zeroclaw-bot"
|
||||
extraction_prompt = "Extract facts in the original language..."
|
||||
```
|
||||
|
||||
Server deployment scripts are in `deploy/mem0/`.
|
||||
|
||||
## `[[model_routes]]` and `[[embedding_routes]]`
|
||||
|
||||
Use route hints so integrations can keep stable names while model IDs evolve.
|
||||
@@ -586,7 +607,7 @@ Top-level channel options are configured under `channels_config`.
|
||||
|
||||
| Key | Default | Purpose |
|
||||
|---|---|---|
|
||||
| `message_timeout_secs` | `300` | Base timeout in seconds for channel message processing; runtime scales this with tool-loop depth (up to 4x) |
|
||||
| `message_timeout_secs` | `300` | Base timeout in seconds for channel message processing; runtime scales this with tool-loop depth (up to 4x, overridable via `[pacing].message_timeout_scale_max`) |
|
||||
|
||||
Examples:
|
||||
|
||||
@@ -601,7 +622,7 @@ Examples:
|
||||
Notes:
|
||||
|
||||
- Default `300s` is optimized for on-device LLMs (Ollama) which are slower than cloud APIs.
|
||||
- Runtime timeout budget is `message_timeout_secs * scale`, where `scale = min(max_tool_iterations, 4)` and a minimum of `1`.
|
||||
- Runtime timeout budget is `message_timeout_secs * scale`, where `scale = min(max_tool_iterations, cap)` and a minimum of `1`. The default cap is `4`; override with `[pacing].message_timeout_scale_max`.
|
||||
- This scaling avoids false timeouts when the first LLM turn is slow/retried but later tool-loop turns still need to complete.
|
||||
- If using cloud APIs (OpenAI, Anthropic, etc.), you can reduce this to `60` or lower.
|
||||
- Values below `30` are clamped to `30` to avoid immediate timeout churn.
|
||||
|
||||
@@ -12,8 +12,6 @@ Common key patterns:
|
||||
- `sop_step_{run_id}_{step_number}`: per-step result
|
||||
- `sop_approval_{run_id}_{step_number}`: operator approval record
|
||||
- `sop_timeout_approve_{run_id}_{step_number}`: timeout auto-approval record
|
||||
- `sop_gate_decision_{gate_id}_{timestamp_ms}`: gate evaluator decision record (when `ampersona-gates` is enabled)
|
||||
- `sop_phase_state`: persisted trust-phase state snapshot (when `ampersona-gates` is enabled)
|
||||
|
||||
## 2. Inspection Paths
|
||||
|
||||
|
||||
@@ -337,30 +337,6 @@ Lưu ý:
|
||||
|
||||
- Chèn ngữ cảnh memory bỏ qua khóa auto-save `assistant_resp*` kiểu cũ để tránh tóm tắt do model tạo bị coi là sự thật.
|
||||
|
||||
### `[memory.mem0]`
|
||||
|
||||
Backend Mem0 (OpenMemory) — kết nối đến server mem0 tự host, cung cấp bộ nhớ vector với trích xuất sự kiện bằng LLM. Cần feature flag `memory-mem0` khi build và `backend = "mem0"` trong config.
|
||||
|
||||
| Khóa | Mặc định | Biến môi trường | Mục đích |
|
||||
|---|---|---|---|
|
||||
| `url` | `http://localhost:8765` | `MEM0_URL` | URL server OpenMemory |
|
||||
| `user_id` | `zeroclaw` | `MEM0_USER_ID` | User ID để phân vùng memory |
|
||||
| `app_name` | `zeroclaw` | `MEM0_APP_NAME` | Tên ứng dụng đăng ký trong mem0 |
|
||||
| `infer` | `true` | — | Dùng LLM trích xuất sự kiện từ text (`true`) hoặc lưu nguyên (`false`) |
|
||||
| `extraction_prompt` | chưa đặt | `MEM0_EXTRACTION_PROMPT` | Prompt tùy chỉnh cho trích xuất sự kiện LLM (vd: cho nội dung không phải tiếng Anh) |
|
||||
|
||||
```toml
|
||||
[memory]
|
||||
backend = "mem0"
|
||||
|
||||
[memory.mem0]
|
||||
url = "http://192.168.0.171:8765"
|
||||
user_id = "zeroclaw-bot"
|
||||
extraction_prompt = "Trích xuất sự kiện bằng ngôn ngữ gốc..."
|
||||
```
|
||||
|
||||
Script triển khai server nằm trong `deploy/mem0/`.
|
||||
|
||||
## `[[model_routes]]` và `[[embedding_routes]]`
|
||||
|
||||
Route hint giúp tên tích hợp ổn định khi model ID thay đổi.
|
||||
|
||||
@@ -38,3 +38,82 @@ allowed_tools = ["read", "edit", "exec"]
|
||||
max_iterations = 15
|
||||
# Optional: use longer timeout for complex coding tasks
|
||||
agentic_timeout_secs = 600
|
||||
|
||||
# ── Cron Configuration ────────────────────────────────────────
|
||||
[cron]
|
||||
# Enable the cron subsystem. Default: true
|
||||
enabled = true
|
||||
# Run all overdue jobs at scheduler startup. Default: true
|
||||
catch_up_on_startup = true
|
||||
# Maximum number of historical cron run records to retain. Default: 50
|
||||
max_run_history = 50
|
||||
|
||||
# ── Declarative Cron Jobs ─────────────────────────────────────
|
||||
# Define cron jobs directly in config. These are synced to the database
|
||||
# at scheduler startup. Each job needs a stable `id` for merge semantics.
|
||||
|
||||
# Shell job: runs a shell command on a cron schedule
|
||||
[[cron.jobs]]
|
||||
id = "daily-backup"
|
||||
name = "Daily Backup"
|
||||
job_type = "shell"
|
||||
command = "tar czf /tmp/backup.tar.gz /data"
|
||||
schedule = { kind = "cron", expr = "0 2 * * *" }
|
||||
|
||||
# Agent job: runs an agent prompt on an interval
|
||||
[[cron.jobs]]
|
||||
id = "health-check"
|
||||
name = "Health Check"
|
||||
job_type = "agent"
|
||||
prompt = "Check server health: disk space, memory, CPU load"
|
||||
model = "anthropic/claude-sonnet-4"
|
||||
allowed_tools = ["shell", "file_read"]
|
||||
schedule = { kind = "every", every_ms = 300000 }
|
||||
|
||||
# Cron job with timezone and delivery
|
||||
# [[cron.jobs]]
|
||||
# id = "morning-report"
|
||||
# name = "Morning Report"
|
||||
# job_type = "agent"
|
||||
# prompt = "Generate a daily summary of system metrics"
|
||||
# schedule = { kind = "cron", expr = "0 9 * * 1-5", tz = "America/New_York" }
|
||||
# [cron.jobs.delivery]
|
||||
# mode = "announce"
|
||||
# channel = "telegram"
|
||||
# to = "123456789"
|
||||
|
||||
# ── Cost Tracking Configuration ────────────────────────────────
|
||||
[cost]
|
||||
# Enable cost tracking and budget enforcement. Default: false
|
||||
enabled = false
|
||||
# Daily spending limit in USD. Default: 10.0
|
||||
daily_limit_usd = 10.0
|
||||
# Monthly spending limit in USD. Default: 100.0
|
||||
monthly_limit_usd = 100.0
|
||||
# Warn when spending reaches this percentage of limit. Default: 80
|
||||
warn_at_percent = 80
|
||||
# Allow requests to exceed budget with --override flag. Default: false
|
||||
allow_override = false
|
||||
|
||||
# Per-model pricing (USD per 1M tokens).
|
||||
# Built-in defaults exist for popular models; add overrides here.
|
||||
# [cost.prices."anthropic/claude-opus-4-20250514"]
|
||||
# input = 15.0
|
||||
# output = 75.0
|
||||
# [cost.prices."anthropic/claude-sonnet-4-20250514"]
|
||||
# input = 3.0
|
||||
# output = 15.0
|
||||
# [cost.prices."openai/gpt-4o"]
|
||||
# input = 5.0
|
||||
# output = 15.0
|
||||
# [cost.prices."openai/gpt-4o-mini"]
|
||||
# input = 0.15
|
||||
# output = 0.60
|
||||
|
||||
# ── Voice Transcription ─────────────────────────────────────────
|
||||
# [transcription]
|
||||
# enabled = true
|
||||
# default_provider = "groq"
|
||||
# Also transcribe non-PTT (forwarded / regular) audio on WhatsApp.
|
||||
# Default: false (only voice notes are transcribed).
|
||||
# transcribe_non_ptt_audio = false
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
## Aardvark Adapter (aardvark0)
|
||||
|
||||
- Protocol: I2C and SPI via Total Phase Aardvark USB
|
||||
- Bitrate: 100 kHz (standard-mode I2C) by default
|
||||
- Use `i2c_scan` first to discover connected devices
|
||||
- Use `i2c_read` / `i2c_write` for register operations
|
||||
- Use `spi_transfer` for full-duplex SPI
|
||||
- Use `gpio_aardvark` to control the Aardvark's GPIO expansion pins
|
||||
- Use `datasheet` tool when user identifies a new device
|
||||
|
||||
## Tool Selection — Aardvark
|
||||
|
||||
| Goal | Tool |
|
||||
|--------------------------------|-----------------|
|
||||
| Find devices on the I2C bus | `i2c_scan` |
|
||||
| Read a register | `i2c_read` |
|
||||
| Write a register | `i2c_write` |
|
||||
| Full-duplex SPI transfer | `spi_transfer` |
|
||||
| Control Aardvark GPIO pins | `gpio_aardvark` |
|
||||
| User names a new device | `datasheet` |
|
||||
|
||||
## I2C Workflow
|
||||
|
||||
1. Run `i2c_scan` — find what addresses respond.
|
||||
2. User identifies the device (or look up the address in the skill file).
|
||||
3. Read the relevant register with `i2c_read`.
|
||||
4. If datasheet is not yet cached, use `datasheet(action="search", device_name="...")`.
|
||||
|
||||
## Notes
|
||||
|
||||
- Aardvark has no firmware — it calls the C library directly.
|
||||
Do NOT use `device_exec`, `device_read_code`, or `device_write_code` for Aardvark.
|
||||
- The Aardvark adapter auto-enables I2C pull-ups (3.3 V) — no external resistors needed
|
||||
for most sensors.
|
||||
@@ -0,0 +1,41 @@
|
||||
# aardvark0 — <Device Name> (<Part Number>)
|
||||
|
||||
<!-- Copy this file to ~/.zeroclaw/hardware/devices/aardvark0.md -->
|
||||
<!-- Fill in the device details from the datasheet. -->
|
||||
|
||||
## Connection
|
||||
|
||||
- Adapter: Total Phase Aardvark (aardvark0)
|
||||
- Protocol: I2C <!-- or SPI -->
|
||||
- I2C Address: 0x48 <!-- change to the actual device address -->
|
||||
- Bitrate: 100 kHz
|
||||
|
||||
## Key Registers (from datasheet)
|
||||
|
||||
<!-- Example for LM75 temperature sensor — replace with your device -->
|
||||
| Register | Address | Description | Notes |
|
||||
|----------|---------|----------------------------------------|------------------------|
|
||||
| Temp | 0x00 | Temperature (2 bytes, big-endian) | MSB × 0.5 °C per LSB |
|
||||
| Config | 0x01 | Configuration register | Read/write |
|
||||
| Thyst | 0x02 | Hysteresis temperature | Read/write |
|
||||
| Tos | 0x03 | Overtemperature shutdown threshold | Read/write |
|
||||
|
||||
## Datasheet
|
||||
|
||||
- File: `~/.zeroclaw/hardware/datasheets/<device>.pdf`
|
||||
- Source: <!-- URL where you downloaded the datasheet -->
|
||||
|
||||
## Verified Working Commands
|
||||
|
||||
```python
|
||||
# Read temperature from LM75 at I2C address 0x48, register 0x00
|
||||
i2c_read(addr=0x48, register=0x00, len=2)
|
||||
|
||||
# Convert two bytes to °C:
|
||||
# raw = (byte[0] << 1) | (byte[1] >> 7)
|
||||
# temp = raw * 0.5 (if byte[0] bit 7 is 1, it's negative: raw - 256)
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
<!-- Add any device-specific quirks, power-on sequences, or gotchas here -->
|
||||
@@ -0,0 +1,63 @@
|
||||
# Skill: I2C Operations via Aardvark
|
||||
|
||||
<!-- Copy to ~/.zeroclaw/hardware/skills/i2c.md -->
|
||||
|
||||
## Always scan first
|
||||
|
||||
If the I2C address is unknown, run `i2c_scan` before anything else.
|
||||
|
||||
## Common device addresses
|
||||
|
||||
| Address range | Typical devices |
|
||||
|---------------|-----------------------------------------------|
|
||||
| 0x08–0x0F | Reserved / rare |
|
||||
| 0x40–0x4F | LM75, TMP102, HTU21D (temp/humidity) |
|
||||
| 0x48–0x4F | LM75, DS1621, ADS1115 (ADC) |
|
||||
| 0x50–0x57 | AT24Cxx EEPROM |
|
||||
| 0x68–0x6F | MPU6050 IMU, DS1307 / DS3231 RTC |
|
||||
| 0x76–0x77 | BME280, BMP280 (pressure + humidity) |
|
||||
| 0x42 | Common PSoC6 default |
|
||||
| 0x3C, 0x3D | SSD1306 OLED display |
|
||||
|
||||
## Reading a register
|
||||
|
||||
```text
|
||||
i2c_read(addr=0x48, register=0x00, len=2)
|
||||
```
|
||||
|
||||
## Writing a register
|
||||
|
||||
```text
|
||||
i2c_write(addr=0x48, bytes=[0x01, 0x60])
|
||||
```
|
||||
|
||||
## Write-then-read (register pointer pattern)
|
||||
|
||||
Some devices require you to first write the register address, then read separately:
|
||||
|
||||
```text
|
||||
i2c_write(addr=0x48, bytes=[0x00])
|
||||
i2c_read(addr=0x48, len=2)
|
||||
```
|
||||
|
||||
The `i2c_read` tool handles this automatically when you specify `register=`.
|
||||
|
||||
## Temperature conversion — LM75 / TMP102
|
||||
|
||||
Raw bytes from register 0x00 are big-endian, 9-bit or 11-bit:
|
||||
|
||||
```
|
||||
raw = (byte[0] << 1) | (byte[1] >> 7) # for LM75 (9-bit)
|
||||
if raw >= 256: raw -= 512 # handle negative (two's complement)
|
||||
temp_c = raw * 0.5
|
||||
```
|
||||
|
||||
## Decision table — Aardvark vs Pico tools
|
||||
|
||||
| Scenario | Use |
|
||||
|------------------------------------------------|---------------|
|
||||
| Talking to an I2C sensor via Aardvark | `i2c_read` |
|
||||
| Configuring a sensor register | `i2c_write` |
|
||||
| Discovering what's on the bus | `i2c_scan` |
|
||||
| Running MicroPython on the connected Pico | `device_exec` |
|
||||
| Blinking Pico LED | `device_exec` |
|
||||
@@ -88,6 +88,7 @@ checksum = "8ec610d8f49840a5b376c69663b6369e71f4b34484b9b2eb29fb918d92516cb9"
|
||||
dependencies = [
|
||||
"bare-metal",
|
||||
"bitfield",
|
||||
"critical-section",
|
||||
"embedded-hal 0.2.7",
|
||||
"volatile-register",
|
||||
]
|
||||
@@ -837,6 +838,7 @@ dependencies = [
|
||||
name = "nucleo"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"cortex-m",
|
||||
"cortex-m-rt",
|
||||
"critical-section",
|
||||
"defmt 1.0.1",
|
||||
|
||||
@@ -7,6 +7,8 @@
|
||||
# Flash: probe-rs run --chip STM32F401RETx target/thumbv7em-none-eabihf/release/nucleo
|
||||
# Or: zeroclaw peripheral flash-nucleo
|
||||
|
||||
[workspace]
|
||||
|
||||
[package]
|
||||
name = "nucleo"
|
||||
version = "0.1.0"
|
||||
@@ -18,12 +20,13 @@ description = "ZeroClaw Nucleo-F401RE peripheral firmware — GPIO over JSON ser
|
||||
embassy-executor = { version = "0.9", features = ["arch-cortex-m", "executor-thread", "defmt"] }
|
||||
embassy-stm32 = { version = "0.5", features = ["defmt", "stm32f401re", "unstable-pac", "memory-x", "time-driver-tim4", "exti"] }
|
||||
embassy-time = { version = "0.5", features = ["defmt", "defmt-timestamp-uptime", "tick-hz-32_768"] }
|
||||
cortex-m = { version = "0.7", features = ["inline-asm", "critical-section-single-core"] }
|
||||
cortex-m-rt = "0.7"
|
||||
defmt = "1.0"
|
||||
defmt-rtt = "1.0"
|
||||
panic-probe = { version = "1.0", features = ["print-defmt"] }
|
||||
heapless = { version = "0.9", default-features = false }
|
||||
critical-section = "1.1"
|
||||
cortex-m-rt = "0.7"
|
||||
|
||||
[package.metadata.embassy]
|
||||
build = [
|
||||
@@ -34,6 +37,5 @@ build = [
|
||||
opt-level = "s"
|
||||
lto = true
|
||||
codegen-units = 1
|
||||
strip = true
|
||||
panic = "abort"
|
||||
debug = 1
|
||||
debug = 2
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
# ZeroClaw Pico firmware — serial protocol handler
|
||||
# Placeholder: replace with actual MicroPython firmware for Pico deployment
|
||||
@@ -0,0 +1,3 @@
|
||||
[target.thumbv7em-none-eabihf]
|
||||
rustflags = ["-C", "link-arg=-Tlink.x", "-C", "link-arg=-Tdefmt.x"]
|
||||
runner = "probe-rs run --chip STM32F401RETx"
|
||||
@@ -230,6 +230,49 @@ detect_release_target() {
|
||||
esac
|
||||
}
|
||||
|
||||
detect_device_class() {
|
||||
# Containers are never desktops
|
||||
if _is_container_runtime; then
|
||||
echo "container"
|
||||
return
|
||||
fi
|
||||
|
||||
# Termux / Android
|
||||
if [[ -n "${TERMUX_VERSION:-}" || -d "/data/data/com.termux" ]]; then
|
||||
echo "mobile"
|
||||
return
|
||||
fi
|
||||
|
||||
local os arch
|
||||
os="$(uname -s)"
|
||||
arch="$(uname -m)"
|
||||
|
||||
case "$os" in
|
||||
Darwin)
|
||||
# macOS is always a desktop
|
||||
echo "desktop"
|
||||
;;
|
||||
Linux)
|
||||
# Raspberry Pi / ARM SBCs — treat as embedded (typically headless)
|
||||
case "$arch" in
|
||||
armv6l|armv7l)
|
||||
echo "embedded"
|
||||
return
|
||||
;;
|
||||
esac
|
||||
# Check for a display server (X11 or Wayland)
|
||||
if [[ -n "${DISPLAY:-}" || -n "${WAYLAND_DISPLAY:-}" || -n "${XDG_SESSION_TYPE:-}" ]]; then
|
||||
echo "desktop"
|
||||
else
|
||||
echo "server"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo "server"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
should_attempt_prebuilt_for_resources() {
|
||||
local workspace="${1:-.}"
|
||||
local min_ram_mb min_disk_mb total_ram_mb free_disk_mb low_resource
|
||||
@@ -568,6 +611,31 @@ then re-run bootstrap.
|
||||
MSG
|
||||
exit 0
|
||||
fi
|
||||
# Detect un-accepted Xcode/CLT license (causes `cc` to exit 69).
|
||||
# xcrun --show-sdk-path can succeed even without an accepted license,
|
||||
# so we test-compile a trivial C file which reliably triggers the error.
|
||||
_xcode_test_file="$(mktemp /tmp/zeroclaw-xcode-check.XXXXXX.c)"
|
||||
printf 'int main(){return 0;}\n' > "$_xcode_test_file"
|
||||
if ! cc -x c "$_xcode_test_file" -o /dev/null 2>/dev/null; then
|
||||
rm -f "$_xcode_test_file"
|
||||
warn "Xcode/CLT license has not been accepted. Attempting to accept it now..."
|
||||
_xcode_accept_ok=false
|
||||
if [[ "$(id -u)" -eq 0 ]]; then
|
||||
xcodebuild -license accept && _xcode_accept_ok=true
|
||||
elif [[ -c /dev/tty ]] && have_cmd sudo; then
|
||||
sudo xcodebuild -license accept < /dev/tty && _xcode_accept_ok=true
|
||||
fi
|
||||
if [[ "$_xcode_accept_ok" == true ]]; then
|
||||
step_ok "Xcode license accepted"
|
||||
else
|
||||
error "Could not accept Xcode license. Run manually:"
|
||||
error " sudo xcodebuild -license accept"
|
||||
error "then re-run this installer."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
rm -f "$_xcode_test_file"
|
||||
fi
|
||||
if ! have_cmd git; then
|
||||
warn "git is not available. Install git (e.g., Homebrew) and re-run bootstrap."
|
||||
fi
|
||||
@@ -1130,6 +1198,9 @@ while [[ $# -gt 0 ]]; do
|
||||
done
|
||||
|
||||
OS_NAME="$(uname -s)"
|
||||
DEVICE_CLASS="$(detect_device_class)"
|
||||
step_dot "Device: $OS_NAME/$(uname -m) ($DEVICE_CLASS)"
|
||||
|
||||
if [[ "$GUIDED_MODE" == "auto" ]]; then
|
||||
if [[ "$OS_NAME" == "Linux" && "$ORIGINAL_ARG_COUNT" -eq 0 && -t 0 && -t 1 ]]; then
|
||||
GUIDED_MODE="on"
|
||||
@@ -1168,6 +1239,43 @@ else
|
||||
install_system_deps
|
||||
fi
|
||||
|
||||
# Always check Xcode/CLT license on macOS, regardless of --install-system-deps.
|
||||
# An un-accepted license causes `cc` to exit 69, breaking all Rust builds.
|
||||
if [[ "$OS_NAME" == "Darwin" ]]; then
|
||||
_xcode_test_file="$(mktemp /tmp/zeroclaw-xcode-check.XXXXXX.c)"
|
||||
printf 'int main(){return 0;}\n' > "$_xcode_test_file"
|
||||
if ! cc -x c "$_xcode_test_file" -o /dev/null 2>/dev/null; then
|
||||
rm -f "$_xcode_test_file"
|
||||
warn "Xcode/CLT license has not been accepted. Attempting to accept it now..."
|
||||
# Use /dev/tty so sudo can prompt for a password even in a curl|bash pipe.
|
||||
_xcode_accept_ok=false
|
||||
if [[ "$(id -u)" -eq 0 ]]; then
|
||||
xcodebuild -license accept && _xcode_accept_ok=true
|
||||
elif [[ -c /dev/tty ]] && have_cmd sudo; then
|
||||
sudo xcodebuild -license accept < /dev/tty && _xcode_accept_ok=true
|
||||
fi
|
||||
if [[ "$_xcode_accept_ok" == true ]]; then
|
||||
step_ok "Xcode license accepted"
|
||||
# Re-test compilation to confirm it's fixed.
|
||||
_xcode_test_file="$(mktemp /tmp/zeroclaw-xcode-check.XXXXXX.c)"
|
||||
printf 'int main(){return 0;}\n' > "$_xcode_test_file"
|
||||
if ! cc -x c "$_xcode_test_file" -o /dev/null 2>/dev/null; then
|
||||
rm -f "$_xcode_test_file"
|
||||
error "C compiler still failing after license accept. Check your Xcode/CLT installation."
|
||||
exit 1
|
||||
fi
|
||||
rm -f "$_xcode_test_file"
|
||||
else
|
||||
error "Could not accept Xcode license. Run manually:"
|
||||
error " sudo xcodebuild -license accept"
|
||||
error "then re-run this installer."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
rm -f "$_xcode_test_file"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$INSTALL_RUST" == true ]]; then
|
||||
install_rust_toolchain
|
||||
fi
|
||||
@@ -1354,8 +1462,20 @@ if [[ "$SKIP_BUILD" == false ]]; then
|
||||
step_dot "Cleaning stale build cache (upgrade detected)"
|
||||
cargo clean --release 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Determine cargo feature flags — disable prometheus on 32-bit targets
|
||||
# (prometheus crate requires AtomicU64, unavailable on armv7l/armv6l)
|
||||
CARGO_FEATURE_FLAGS=""
|
||||
_build_arch="$(uname -m)"
|
||||
case "$_build_arch" in
|
||||
armv7l|armv6l|armhf)
|
||||
step_dot "32-bit ARM detected ($_build_arch) — disabling prometheus (requires 64-bit atomics)"
|
||||
CARGO_FEATURE_FLAGS="--no-default-features --features channel-nostr,skill-creation"
|
||||
;;
|
||||
esac
|
||||
|
||||
step_dot "Building release binary"
|
||||
cargo build --release --locked
|
||||
cargo build --release --locked $CARGO_FEATURE_FLAGS
|
||||
step_ok "Release binary built"
|
||||
else
|
||||
step_dot "Skipping build"
|
||||
@@ -1374,7 +1494,7 @@ if [[ "$SKIP_INSTALL" == false ]]; then
|
||||
fi
|
||||
fi
|
||||
|
||||
cargo install --path "$WORK_DIR" --force --locked
|
||||
cargo install --path "$WORK_DIR" --force --locked $CARGO_FEATURE_FLAGS
|
||||
step_ok "ZeroClaw installed"
|
||||
|
||||
# Sync binary to ~/.local/bin so PATH lookups find the fresh version
|
||||
@@ -1386,6 +1506,85 @@ else
|
||||
step_dot "Skipping install"
|
||||
fi
|
||||
|
||||
# --- Build web dashboard ---
|
||||
if [[ "$SKIP_BUILD" == false && -d "$WORK_DIR/web" ]]; then
|
||||
if have_cmd node && have_cmd npm; then
|
||||
step_dot "Building web dashboard"
|
||||
if (cd "$WORK_DIR/web" && npm ci --ignore-scripts 2>/dev/null && npm run build 2>/dev/null); then
|
||||
step_ok "Web dashboard built"
|
||||
else
|
||||
warn "Web dashboard build failed — dashboard will not be available"
|
||||
fi
|
||||
else
|
||||
warn "node/npm not found — skipping web dashboard build"
|
||||
warn "Install Node.js (>=18) and re-run, or build manually: cd web && npm ci && npm run build"
|
||||
fi
|
||||
else
|
||||
if [[ "$SKIP_BUILD" == true ]]; then
|
||||
step_dot "Skipping web dashboard build"
|
||||
fi
|
||||
fi
|
||||
|
||||
# --- Companion desktop app (device-class-aware) ---
|
||||
# The desktop app is a pre-built download from the website, not built from source.
|
||||
# This keeps the one-liner install fast and the CLI binary small.
|
||||
DESKTOP_DOWNLOAD_URL="https://www.zeroclawlabs.ai/download"
|
||||
DESKTOP_APP_DETECTED=false
|
||||
|
||||
if [[ "$DEVICE_CLASS" == "desktop" ]]; then
|
||||
# Check if the companion app is already installed
|
||||
case "$OS_NAME" in
|
||||
Darwin)
|
||||
if [[ -d "/Applications/ZeroClaw.app" ]] || [[ -d "$HOME/Applications/ZeroClaw.app" ]]; then
|
||||
DESKTOP_APP_DETECTED=true
|
||||
step_ok "Companion app found (ZeroClaw.app)"
|
||||
fi
|
||||
;;
|
||||
Linux)
|
||||
if have_cmd zeroclaw-desktop; then
|
||||
DESKTOP_APP_DETECTED=true
|
||||
step_ok "Companion app found (zeroclaw-desktop)"
|
||||
elif [[ -x "$HOME/.local/bin/zeroclaw-desktop" ]]; then
|
||||
DESKTOP_APP_DETECTED=true
|
||||
step_ok "Companion app found (~/.local/bin/zeroclaw-desktop)"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$DESKTOP_APP_DETECTED" == false ]]; then
|
||||
echo
|
||||
echo -e "${BOLD}Companion App${RESET}"
|
||||
echo -e " Menu bar access to your ZeroClaw agent."
|
||||
echo -e " Works alongside the CLI — connects to the same gateway."
|
||||
echo
|
||||
case "$OS_NAME" in
|
||||
Darwin)
|
||||
echo -e " ${BOLD}Download for macOS:${RESET} ${BLUE}${DESKTOP_DOWNLOAD_URL}${RESET}"
|
||||
;;
|
||||
Linux)
|
||||
echo -e " ${BOLD}Download for Linux:${RESET} ${BLUE}${DESKTOP_DOWNLOAD_URL}${RESET}"
|
||||
;;
|
||||
esac
|
||||
echo -e " ${DIM}Or run: zeroclaw desktop --install${RESET}"
|
||||
fi
|
||||
elif [[ "$DEVICE_CLASS" != "desktop" ]]; then
|
||||
# Non-desktop device — explain why companion app is not offered
|
||||
case "$DEVICE_CLASS" in
|
||||
mobile)
|
||||
step_dot "Mobile device — use the web dashboard at http://127.0.0.1:42617"
|
||||
;;
|
||||
embedded)
|
||||
step_dot "Embedded device ($(uname -m)) — use the web dashboard"
|
||||
;;
|
||||
container)
|
||||
step_dot "Container runtime — use the web dashboard"
|
||||
;;
|
||||
server)
|
||||
step_dot "Headless server — use the web dashboard"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
ZEROCLAW_BIN=""
|
||||
if [[ -x "$HOME/.cargo/bin/zeroclaw" ]]; then
|
||||
ZEROCLAW_BIN="$HOME/.cargo/bin/zeroclaw"
|
||||
@@ -1460,25 +1659,6 @@ if [[ -n "$ZEROCLAW_BIN" ]]; then
|
||||
if "$ZEROCLAW_BIN" service restart 2>/dev/null; then
|
||||
step_ok "Gateway service restarted"
|
||||
|
||||
# Fetch and display pairing code from running gateway
|
||||
PAIR_CODE=""
|
||||
for i in 1 2 3 4 5; do
|
||||
sleep 2
|
||||
if PAIR_CODE=$("$ZEROCLAW_BIN" gateway get-paircode 2>/dev/null | grep -oE '[0-9]{6}'); then
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [[ -n "$PAIR_CODE" ]]; then
|
||||
echo
|
||||
echo -e " ${BOLD_BLUE}🔐 Gateway Pairing Code${RESET}"
|
||||
echo
|
||||
echo -e " ${BOLD_BLUE}┌──────────────┐${RESET}"
|
||||
echo -e " ${BOLD_BLUE}│${RESET} ${BOLD}${PAIR_CODE}${RESET} ${BOLD_BLUE}│${RESET}"
|
||||
echo -e " ${BOLD_BLUE}└──────────────┘${RESET}"
|
||||
echo
|
||||
echo -e " ${DIM}Enter this code in the dashboard to pair your device.${RESET}"
|
||||
echo -e " ${DIM}Run 'zeroclaw gateway get-paircode --new' anytime to generate a fresh code.${RESET}"
|
||||
fi
|
||||
else
|
||||
step_fail "Gateway service restart failed — re-run with zeroclaw service start"
|
||||
fi
|
||||
@@ -1525,7 +1705,6 @@ GATEWAY_PORT=42617
|
||||
DASHBOARD_URL="http://127.0.0.1:${GATEWAY_PORT}"
|
||||
echo
|
||||
echo -e "${BOLD}Dashboard URL:${RESET} ${BLUE}${DASHBOARD_URL}${RESET}"
|
||||
echo -e "${DIM} Run 'zeroclaw gateway get-paircode' to get your pairing code.${RESET}"
|
||||
|
||||
# --- Copy to clipboard ---
|
||||
COPIED_TO_CLIPBOARD=false
|
||||
@@ -1572,6 +1751,13 @@ echo -e "${BOLD}Next steps:${RESET}"
|
||||
echo -e " ${DIM}zeroclaw status${RESET}"
|
||||
echo -e " ${DIM}zeroclaw agent -m \"Hello, ZeroClaw!\"${RESET}"
|
||||
echo -e " ${DIM}zeroclaw gateway${RESET}"
|
||||
if [[ "$DEVICE_CLASS" == "desktop" ]]; then
|
||||
if [[ "$DESKTOP_APP_DETECTED" == true ]]; then
|
||||
echo -e " ${DIM}zeroclaw desktop${RESET} ${DIM}# Launch the menu bar app${RESET}"
|
||||
else
|
||||
echo -e " ${DIM}zeroclaw desktop --install${RESET} ${DIM}# Download the companion app${RESET}"
|
||||
fi
|
||||
fi
|
||||
echo
|
||||
echo -e "${BOLD}Docs:${RESET} ${BLUE}https://www.zeroclawlabs.ai/docs${RESET}"
|
||||
echo
|
||||
|
||||
@@ -0,0 +1,10 @@
|
||||
# Allow the gpio group to control the Raspberry Pi onboard ACT LED
|
||||
# via the Linux LED subsystem sysfs interface.
|
||||
#
|
||||
# Without this rule /sys/class/leds/ACT/{brightness,trigger} are
|
||||
# root-only writable, which prevents zeroclaw from blinking the LED.
|
||||
SUBSYSTEM=="leds", KERNEL=="ACT", ACTION=="add", \
|
||||
RUN+="/bin/chgrp gpio /sys/%p/brightness", \
|
||||
RUN+="/bin/chmod g+w /sys/%p/brightness", \
|
||||
RUN+="/bin/chgrp gpio /sys/%p/trigger", \
|
||||
RUN+="/bin/chmod g+w /sys/%p/trigger"
|
||||
@@ -0,0 +1,232 @@
|
||||
# scripts/ — Raspberry Pi Deployment Guide
|
||||
|
||||
This directory contains everything needed to cross-compile ZeroClaw and deploy it to a Raspberry Pi over SSH.
|
||||
|
||||
## Contents
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `deploy-rpi.sh` | One-shot cross-compile and deploy script |
|
||||
| `rpi-config.toml` | Production config template deployed to `~/.zeroclaw/config.toml` |
|
||||
| `zeroclaw.service` | systemd unit file installed on the Pi |
|
||||
| `99-act-led.rules` | udev rule for ACT LED sysfs access without sudo |
|
||||
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### Cross-compilation toolchain (pick one)
|
||||
|
||||
#### Option A — cargo-zigbuild (recommended for Apple Silicon)
|
||||
|
||||
```bash
|
||||
brew install zig
|
||||
cargo install cargo-zigbuild
|
||||
rustup target add aarch64-unknown-linux-gnu
|
||||
```
|
||||
|
||||
#### Option B — cross (Docker-based)
|
||||
|
||||
```bash
|
||||
cargo install cross
|
||||
rustup target add aarch64-unknown-linux-gnu
|
||||
# Docker must be running
|
||||
```
|
||||
|
||||
The deploy script auto-detects which tool is available, preferring `cargo-zigbuild`.
|
||||
Force a specific tool with `CROSS_TOOL=zigbuild` or `CROSS_TOOL=cross`.
|
||||
|
||||
### Optional: passwordless SSH
|
||||
|
||||
If you can't use SSH key authentication, install `sshpass` and set the `RPI_PASS` environment variable:
|
||||
|
||||
```bash
|
||||
brew install sshpass # macOS
|
||||
sudo apt install sshpass # Linux
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
RPI_HOST=raspberrypi.local RPI_USER=pi ./scripts/deploy-rpi.sh
|
||||
```
|
||||
|
||||
After the first deploy, you must set your API key on the Pi (see [First-Time Setup](#first-time-setup)).
|
||||
|
||||
---
|
||||
|
||||
## Environment Variables
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `RPI_HOST` | `raspberrypi.local` | Pi hostname or IP address |
|
||||
| `RPI_USER` | `pi` | SSH username |
|
||||
| `RPI_PORT` | `22` | SSH port |
|
||||
| `RPI_DIR` | `~/zeroclaw` | Remote directory for the binary and `.env` |
|
||||
| `RPI_PASS` | _(unset)_ | SSH password — uses `sshpass` if set; key auth used otherwise |
|
||||
| `CROSS_TOOL` | _(auto-detect)_ | Force `zigbuild` or `cross` |
|
||||
|
||||
---
|
||||
|
||||
## What the Deploy Script Does
|
||||
|
||||
1. **Cross-compile** — builds a release binary for `aarch64-unknown-linux-gnu` with `--features hardware,peripheral-rpi`.
|
||||
2. **Stop service** — runs `sudo systemctl stop zeroclaw` on the Pi (continues if not yet installed).
|
||||
3. **Create remote directory** — ensures `$RPI_DIR` exists on the Pi.
|
||||
4. **Copy binary** — SCPs the compiled binary to `$RPI_DIR/zeroclaw`.
|
||||
5. **Create `.env`** — writes an `.env` skeleton with an `ANTHROPIC_API_KEY=` placeholder to `$RPI_DIR/.env` with mode `600`. Skipped if the file already exists so an existing key is not overwritten.
|
||||
6. **Deploy config** — copies `rpi-config.toml` to `~/.zeroclaw/config.toml`, preserving any `api_key` already present in the file.
|
||||
7. **Install systemd service** — copies `zeroclaw.service` to `/etc/systemd/system/`, then enables and restarts it.
|
||||
8. **Hardware permissions** — adds the deploy user to the `gpio` group, copies `99-act-led.rules` to `/etc/udev/rules.d/`, and resets the ACT LED trigger.
|
||||
|
||||
---
|
||||
|
||||
## First-Time Setup
|
||||
|
||||
After the first successful deploy, SSH into the Pi and fill in your API key:
|
||||
|
||||
```bash
|
||||
ssh pi@raspberrypi.local
|
||||
nano ~/zeroclaw/.env
|
||||
# Set: ANTHROPIC_API_KEY=sk-ant-...
|
||||
sudo systemctl restart zeroclaw
|
||||
```
|
||||
|
||||
The `.env` is loaded by the systemd service as an `EnvironmentFile`.
|
||||
|
||||
---
|
||||
|
||||
## Interacting with ZeroClaw on the Pi
|
||||
|
||||
Once the service is running the gateway listens on port **8080**.
|
||||
|
||||
### Health check
|
||||
|
||||
```bash
|
||||
curl http://raspberrypi.local:8080/health
|
||||
```
|
||||
|
||||
### Send a message
|
||||
|
||||
```bash
|
||||
curl -s -X POST http://raspberrypi.local:8080/api/chat \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{"message": "What is the CPU temperature?"}' | jq .
|
||||
```
|
||||
|
||||
### Stream a conversation
|
||||
|
||||
```bash
|
||||
curl -N -s -X POST http://raspberrypi.local:8080/api/chat \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H 'Accept: text/event-stream' \
|
||||
-d '{"message": "List connected hardware devices", "stream": true}'
|
||||
```
|
||||
|
||||
### Follow service logs
|
||||
|
||||
```bash
|
||||
ssh pi@raspberrypi.local 'journalctl -u zeroclaw -f'
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Hardware Features
|
||||
|
||||
### GPIO tools
|
||||
|
||||
ZeroClaw is deployed with the `peripheral-rpi` feature, which enables two LLM-callable tools:
|
||||
|
||||
- **`gpio_read`** — reads a GPIO pin value via sysfs (`/sys/class/gpio/...`).
|
||||
- **`gpio_write`** — writes a GPIO pin value.
|
||||
|
||||
These tools let the agent directly control hardware in response to natural-language instructions.
|
||||
|
||||
### ACT LED
|
||||
|
||||
The udev rule `99-act-led.rules` grants the `gpio` group write access to:
|
||||
|
||||
```
|
||||
/sys/class/leds/ACT/trigger
|
||||
/sys/class/leds/ACT/brightness
|
||||
```
|
||||
|
||||
This allows toggling the Pi's green ACT LED without `sudo`.
|
||||
|
||||
### Aardvark I2C/SPI adapter
|
||||
|
||||
If a Total Phase Aardvark adapter is connected, the `hardware` feature enables I2C/SPI communication with external devices. No extra setup is needed — the device is auto-detected via USB.
|
||||
|
||||
---
|
||||
|
||||
## Files Deployed to the Pi
|
||||
|
||||
| Remote path | Source | Description |
|
||||
|------------|--------|-------------|
|
||||
| `~/zeroclaw/zeroclaw` | compiled binary | Main agent binary |
|
||||
| `~/zeroclaw/.env` | created on first deploy | API key and environment variables |
|
||||
| `~/.zeroclaw/config.toml` | `rpi-config.toml` | Agent configuration |
|
||||
| `/etc/systemd/system/zeroclaw.service` | `zeroclaw.service` | systemd service unit |
|
||||
| `/etc/udev/rules.d/99-act-led.rules` | `99-act-led.rules` | ACT LED permissions |
|
||||
|
||||
---
|
||||
|
||||
## Configuration
|
||||
|
||||
`rpi-config.toml` is the production config template. Key defaults:
|
||||
|
||||
- **Provider**: `anthropic-custom:https://api.z.ai/api/anthropic`
|
||||
- **Model**: `claude-3-5-sonnet-20241022`
|
||||
- **Autonomy**: `full`
|
||||
- **Allowed shell commands**: `git`, `cargo`, `npm`, `mkdir`, `touch`, `cp`, `mv`, `ls`, `cat`, `grep`, `find`, `echo`, `pwd`, `wc`, `head`, `tail`, `date`
|
||||
|
||||
To customise, edit `~/.zeroclaw/config.toml` directly on the Pi and restart the service.
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Service won't start
|
||||
|
||||
```bash
|
||||
ssh pi@raspberrypi.local 'sudo systemctl status zeroclaw'
|
||||
ssh pi@raspberrypi.local 'journalctl -u zeroclaw -n 50 --no-pager'
|
||||
```
|
||||
|
||||
### GPIO permission denied
|
||||
|
||||
Make sure the deploy user is in the `gpio` group and that a fresh login session has been started:
|
||||
|
||||
```bash
|
||||
ssh pi@raspberrypi.local 'groups'
|
||||
# Should include: gpio
|
||||
```
|
||||
|
||||
If the group was just added, log out and back in, or run `newgrp gpio`.
|
||||
|
||||
### Wrong architecture / binary won't run
|
||||
|
||||
Re-run the deploy script. Confirm the target:
|
||||
|
||||
```bash
|
||||
ssh pi@raspberrypi.local 'file ~/zeroclaw/zeroclaw'
|
||||
# Expected: ELF 64-bit LSB pie executable, ARM aarch64
|
||||
```
|
||||
|
||||
### Force a specific cross-compilation tool
|
||||
|
||||
```bash
|
||||
CROSS_TOOL=zigbuild RPI_HOST=raspberrypi.local ./scripts/deploy-rpi.sh
|
||||
# or
|
||||
CROSS_TOOL=cross RPI_HOST=raspberrypi.local ./scripts/deploy-rpi.sh
|
||||
```
|
||||
|
||||
### Rebuild locally without deploying
|
||||
|
||||
```bash
|
||||
cargo zigbuild --release \
|
||||
--target aarch64-unknown-linux-gnu \
|
||||
--features hardware,peripheral-rpi
|
||||
```
|
||||
@@ -0,0 +1,21 @@
|
||||
#!/bin/bash
|
||||
# Start a browser on a virtual display
|
||||
# Usage: ./start-browser.sh [display_num] [url]
|
||||
|
||||
set -e
|
||||
|
||||
DISPLAY_NUM=${1:-99}
|
||||
URL=${2:-"https://google.com"}
|
||||
|
||||
export DISPLAY=:$DISPLAY_NUM
|
||||
|
||||
# Check if display is running
|
||||
if ! xdpyinfo -display :$DISPLAY_NUM &>/dev/null; then
|
||||
echo "Error: Display :$DISPLAY_NUM not running."
|
||||
echo "Start VNC first: ./start-vnc.sh"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
google-chrome --no-sandbox --disable-gpu --disable-setuid-sandbox "$URL" &
|
||||
echo "Chrome started on display :$DISPLAY_NUM"
|
||||
echo "View via VNC or noVNC"
|
||||
@@ -0,0 +1,52 @@
|
||||
#!/bin/bash
|
||||
# Start virtual display with VNC access for browser GUI
|
||||
# Usage: ./start-vnc.sh [display_num] [vnc_port] [novnc_port] [resolution]
|
||||
|
||||
set -e
|
||||
|
||||
DISPLAY_NUM=${1:-99}
|
||||
VNC_PORT=${2:-5900}
|
||||
NOVNC_PORT=${3:-6080}
|
||||
RESOLUTION=${4:-1920x1080x24}
|
||||
|
||||
echo "Starting virtual display :$DISPLAY_NUM at $RESOLUTION"
|
||||
|
||||
# Kill any existing sessions
|
||||
pkill -f "Xvfb :$DISPLAY_NUM" 2>/dev/null || true
|
||||
pkill -f "x11vnc.*:$DISPLAY_NUM" 2>/dev/null || true
|
||||
pkill -f "websockify.*$NOVNC_PORT" 2>/dev/null || true
|
||||
sleep 1
|
||||
|
||||
# Start Xvfb (virtual framebuffer)
|
||||
Xvfb :$DISPLAY_NUM -screen 0 $RESOLUTION -ac &
|
||||
XVFB_PID=$!
|
||||
sleep 1
|
||||
|
||||
# Set DISPLAY
|
||||
export DISPLAY=:$DISPLAY_NUM
|
||||
|
||||
# Start window manager
|
||||
fluxbox -display :$DISPLAY_NUM 2>/dev/null &
|
||||
sleep 1
|
||||
|
||||
# Start x11vnc
|
||||
x11vnc -display :$DISPLAY_NUM -rfbport $VNC_PORT -forever -shared -nopw -bg 2>/dev/null
|
||||
sleep 1
|
||||
|
||||
# Start noVNC (web-based VNC client)
|
||||
websockify --web=/usr/share/novnc $NOVNC_PORT localhost:$VNC_PORT &
|
||||
NOVNC_PID=$!
|
||||
|
||||
echo ""
|
||||
echo "==================================="
|
||||
echo "VNC Server started!"
|
||||
echo "==================================="
|
||||
echo "VNC Direct: localhost:$VNC_PORT"
|
||||
echo "noVNC Web: http://localhost:$NOVNC_PORT/vnc.html"
|
||||
echo "Display: :$DISPLAY_NUM"
|
||||
echo "==================================="
|
||||
echo ""
|
||||
echo "To start a browser, run:"
|
||||
echo " DISPLAY=:$DISPLAY_NUM google-chrome &"
|
||||
echo ""
|
||||
echo "To stop, run: pkill -f 'Xvfb :$DISPLAY_NUM'"
|
||||
@@ -0,0 +1,11 @@
|
||||
#!/bin/bash
|
||||
# Stop virtual display and VNC server
|
||||
# Usage: ./stop-vnc.sh [display_num]
|
||||
|
||||
DISPLAY_NUM=${1:-99}
|
||||
|
||||
pkill -f "Xvfb :$DISPLAY_NUM" 2>/dev/null || true
|
||||
pkill -f "x11vnc.*:$DISPLAY_NUM" 2>/dev/null || true
|
||||
pkill -f "websockify.*6080" 2>/dev/null || true
|
||||
|
||||
echo "VNC server stopped"
|
||||
@@ -0,0 +1,223 @@
|
||||
#!/usr/bin/env bash
|
||||
# deploy-rpi.sh — cross-compile ZeroClaw for Raspberry Pi and deploy via SSH.
|
||||
#
|
||||
# Cross-compilation (pick ONE — the script auto-detects):
|
||||
#
|
||||
# Option A — cargo-zigbuild (recommended; works on Apple Silicon + Intel, no Docker)
|
||||
# brew install zig
|
||||
# cargo install cargo-zigbuild
|
||||
# rustup target add aarch64-unknown-linux-gnu
|
||||
#
|
||||
# Option B — cross (Docker-based; requires Docker Desktop running)
|
||||
# cargo install cross
|
||||
#
|
||||
# Usage:
|
||||
# RPI_HOST=raspberrypi.local RPI_USER=pi ./scripts/deploy-rpi.sh
|
||||
#
|
||||
# Optional env vars:
|
||||
# RPI_HOST — hostname or IP of the Pi (default: raspberrypi.local)
|
||||
# RPI_USER — SSH user on the Pi (default: pi)
|
||||
# RPI_PORT — SSH port (default: 22)
|
||||
# RPI_DIR — remote deployment dir (default: /home/$RPI_USER/zeroclaw)
|
||||
# RPI_PASS — SSH password (uses sshpass) (default: prompt interactively)
|
||||
# CROSS_TOOL — force "zigbuild" or "cross" (default: auto-detect)
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
RPI_HOST="${RPI_HOST:-raspberrypi.local}"
|
||||
RPI_USER="${RPI_USER:-pi}"
|
||||
RPI_PORT="${RPI_PORT:-22}"
|
||||
RPI_DIR="${RPI_DIR:-/home/${RPI_USER}/zeroclaw}"
|
||||
TARGET="aarch64-unknown-linux-gnu"
|
||||
FEATURES="hardware,peripheral-rpi"
|
||||
BINARY="target/${TARGET}/release/zeroclaw"
|
||||
SSH_OPTS="-p ${RPI_PORT} -o StrictHostKeyChecking=no -o ConnectTimeout=10"
|
||||
# scp uses -P (uppercase) for port; ssh uses -p (lowercase)
|
||||
SCP_OPTS="-P ${RPI_PORT} -o StrictHostKeyChecking=no -o ConnectTimeout=10"
|
||||
|
||||
# If RPI_PASS is set, wrap ssh/scp with sshpass for non-interactive auth.
|
||||
SSH_CMD="ssh"
|
||||
SCP_CMD="scp"
|
||||
if [[ -n "${RPI_PASS:-}" ]]; then
|
||||
if ! command -v sshpass &>/dev/null; then
|
||||
echo "ERROR: RPI_PASS is set but sshpass is not installed."
|
||||
echo " brew install hudochenkov/sshpass/sshpass"
|
||||
exit 1
|
||||
fi
|
||||
SSH_CMD="sshpass -p ${RPI_PASS} ssh"
|
||||
SCP_CMD="sshpass -p ${RPI_PASS} scp"
|
||||
fi
|
||||
|
||||
echo "==> Building ZeroClaw for Raspberry Pi (${TARGET})"
|
||||
echo " Features: ${FEATURES}"
|
||||
echo " Target host: ${RPI_USER}@${RPI_HOST}:${RPI_PORT}"
|
||||
echo ""
|
||||
|
||||
# ── 1. Cross-compile — auto-detect best available tool ───────────────────────
|
||||
# Prefer cargo-zigbuild: it works on Apple Silicon without Docker and avoids
|
||||
# the rustup-toolchain-install errors that affect cross v0.2.x on arm64 Macs.
|
||||
_detect_cross_tool() {
|
||||
if [[ "${CROSS_TOOL:-}" == "cross" ]]; then
|
||||
echo "cross"; return
|
||||
fi
|
||||
if [[ "${CROSS_TOOL:-}" == "zigbuild" ]]; then
|
||||
echo "zigbuild"; return
|
||||
fi
|
||||
if command -v cargo-zigbuild &>/dev/null && command -v zig &>/dev/null; then
|
||||
echo "zigbuild"; return
|
||||
fi
|
||||
if command -v cross &>/dev/null; then
|
||||
echo "cross"; return
|
||||
fi
|
||||
echo "none"
|
||||
}
|
||||
|
||||
TOOL=$(_detect_cross_tool)
|
||||
|
||||
case "${TOOL}" in
|
||||
zigbuild)
|
||||
echo "==> Using cargo-zigbuild (Zig cross-linker)"
|
||||
# Ensure the target sysroot is registered with rustup.
|
||||
rustup target add "${TARGET}" 2>/dev/null || true
|
||||
cargo zigbuild \
|
||||
--target "${TARGET}" \
|
||||
--features "${FEATURES}" \
|
||||
--release
|
||||
;;
|
||||
cross)
|
||||
echo "==> Using cross (Docker-based)"
|
||||
# Verify Docker is running before handing off — gives a clear error message
|
||||
# instead of the confusing rustup-toolchain failure from cross v0.2.x.
|
||||
if ! docker info &>/dev/null; then
|
||||
echo ""
|
||||
echo "ERROR: Docker is not running."
|
||||
echo " Start Docker Desktop and retry, or install cargo-zigbuild instead:"
|
||||
echo " brew install zig && cargo install cargo-zigbuild"
|
||||
echo " rustup target add ${TARGET}"
|
||||
exit 1
|
||||
fi
|
||||
cross build \
|
||||
--target "${TARGET}" \
|
||||
--features "${FEATURES}" \
|
||||
--release
|
||||
;;
|
||||
none)
|
||||
echo ""
|
||||
echo "ERROR: No cross-compilation tool found."
|
||||
echo ""
|
||||
echo "Install one of the following and retry:"
|
||||
echo ""
|
||||
echo " Option A — cargo-zigbuild (recommended; works on Apple Silicon, no Docker):"
|
||||
echo " brew install zig"
|
||||
echo " cargo install cargo-zigbuild"
|
||||
echo " rustup target add ${TARGET}"
|
||||
echo ""
|
||||
echo " Option B — cross (requires Docker Desktop running):"
|
||||
echo " cargo install cross"
|
||||
echo ""
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo ""
|
||||
echo "==> Build complete: ${BINARY}"
|
||||
ls -lh "${BINARY}"
|
||||
|
||||
# ── 2. Stop running service (if any) so binary can be overwritten ─────────────
|
||||
echo ""
|
||||
echo "==> Stopping zeroclaw service (if running)"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" \
|
||||
"sudo systemctl stop zeroclaw 2>/dev/null || true"
|
||||
|
||||
# ── 3. Create remote directory ────────────────────────────────────────────────
|
||||
echo ""
|
||||
echo "==> Creating remote directory ${RPI_DIR}"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" "mkdir -p ${RPI_DIR}"
|
||||
|
||||
# ── 4. Deploy binary ──────────────────────────────────────────────────────────
|
||||
echo ""
|
||||
echo "==> Deploying binary to ${RPI_USER}@${RPI_HOST}:${RPI_DIR}/zeroclaw"
|
||||
${SCP_CMD} ${SCP_OPTS} "${BINARY}" "${RPI_USER}@${RPI_HOST}:${RPI_DIR}/zeroclaw"
|
||||
|
||||
# ── 4. Create .env skeleton (if it doesn't exist) ────────────────────────────
|
||||
ENV_DEST="${RPI_DIR}/.env"
|
||||
echo ""
|
||||
echo "==> Checking for ${ENV_DEST}"
|
||||
# shellcheck disable=SC2029
|
||||
if ${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" "[ -f ${ENV_DEST} ]"; then
|
||||
echo " .env already exists — skipping"
|
||||
else
|
||||
echo " Creating .env skeleton with 600 permissions"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" \
|
||||
"mkdir -p ${RPI_DIR} && \
|
||||
printf '# Set your API key here\nANTHROPIC_API_KEY=sk-ant-\n' > ${ENV_DEST} && \
|
||||
chmod 600 ${ENV_DEST}"
|
||||
echo " IMPORTANT: edit ${ENV_DEST} on the Pi and set ANTHROPIC_API_KEY"
|
||||
fi
|
||||
|
||||
# ── 5. Deploy config ─────────────────────────────────────────────────────────
|
||||
CONFIG_DEST="/home/${RPI_USER}/.zeroclaw/config.toml"
|
||||
echo ""
|
||||
echo "==> Deploying config to ${CONFIG_DEST}"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" "mkdir -p /home/${RPI_USER}/.zeroclaw"
|
||||
# Preserve existing api_key from the remote config if present.
|
||||
# shellcheck disable=SC2029
|
||||
EXISTING_API_KEY=$(${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" \
|
||||
"grep -m1 '^api_key' ${CONFIG_DEST} 2>/dev/null || true")
|
||||
${SCP_CMD} ${SCP_OPTS} "scripts/rpi-config.toml" "${RPI_USER}@${RPI_HOST}:${CONFIG_DEST}"
|
||||
if [[ -n "${EXISTING_API_KEY}" ]]; then
|
||||
echo " Restoring existing api_key from previous config"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" \
|
||||
"sed -i 's|^# api_key = .*|${EXISTING_API_KEY}|' ${CONFIG_DEST}"
|
||||
fi
|
||||
|
||||
# ── 6. Deploy and enable systemd service ─────────────────────────────────────
|
||||
SERVICE_DEST="/etc/systemd/system/zeroclaw.service"
|
||||
echo ""
|
||||
echo "==> Installing systemd service (requires sudo on the Pi)"
|
||||
${SCP_CMD} ${SCP_OPTS} "scripts/zeroclaw.service" "${RPI_USER}@${RPI_HOST}:/tmp/zeroclaw.service"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" \
|
||||
"sudo mv /tmp/zeroclaw.service ${SERVICE_DEST} && \
|
||||
sudo systemctl daemon-reload && \
|
||||
sudo systemctl enable zeroclaw && \
|
||||
sudo systemctl restart zeroclaw && \
|
||||
sudo systemctl status zeroclaw --no-pager || true"
|
||||
|
||||
# ── 7. Runtime permissions ───────────────────────────────────────────────────
|
||||
echo ""
|
||||
echo "==> Granting ${RPI_USER} access to GPIO group"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" \
|
||||
"sudo usermod -aG gpio ${RPI_USER} || true"
|
||||
|
||||
# ── 8. Reset ACT LED trigger so ZeroClaw can control it ──────────────────────
|
||||
echo ""
|
||||
echo "==> Installing udev rule for ACT LED sysfs access by gpio group"
|
||||
${SCP_CMD} ${SCP_OPTS} "scripts/99-act-led.rules" "${RPI_USER}@${RPI_HOST}:/tmp/99-act-led.rules"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" \
|
||||
"sudo mv /tmp/99-act-led.rules /etc/udev/rules.d/99-act-led.rules && \
|
||||
sudo udevadm control --reload-rules && \
|
||||
sudo chgrp gpio /sys/class/leds/ACT/brightness /sys/class/leds/ACT/trigger 2>/dev/null || true && \
|
||||
sudo chmod g+w /sys/class/leds/ACT/brightness /sys/class/leds/ACT/trigger 2>/dev/null || true"
|
||||
|
||||
echo ""
|
||||
echo "==> Resetting ACT LED trigger (none)"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" \
|
||||
"echo none | sudo tee /sys/class/leds/ACT/trigger > /dev/null 2>&1 || true"
|
||||
|
||||
echo ""
|
||||
echo "==> Deployment complete!"
|
||||
echo ""
|
||||
echo " ZeroClaw is running at http://${RPI_HOST}:8080"
|
||||
echo " POST /api/chat — chat with the agent"
|
||||
echo " GET /health — health check"
|
||||
echo ""
|
||||
echo " To check logs: ssh ${RPI_USER}@${RPI_HOST} 'journalctl -u zeroclaw -f'"
|
||||
@@ -77,7 +77,9 @@ echo "Created annotated tag: $TAG"
|
||||
if [[ "$PUSH_TAG" == "true" ]]; then
|
||||
git push origin "$TAG"
|
||||
echo "Pushed tag to origin: $TAG"
|
||||
echo "GitHub release pipeline will run via .github/workflows/pub-release.yml"
|
||||
echo "Release Stable workflow will auto-trigger via tag push."
|
||||
echo "Monitor: gh workflow view 'Release Stable' --web"
|
||||
else
|
||||
echo "Next step: git push origin $TAG"
|
||||
echo "This will auto-trigger the Release Stable workflow (builds, Docker, crates.io, website, Scoop, AUR, Homebrew, tweet)."
|
||||
fi
|
||||
|
||||
@@ -0,0 +1,631 @@
|
||||
# ZeroClaw — Raspberry Pi production configuration
|
||||
#
|
||||
# Copy this to ~/.zeroclaw/config.toml on the Pi.
|
||||
# deploy-rpi.sh does this automatically.
|
||||
#
|
||||
# API key is loaded from ~/.zeroclaw/.env (EnvironmentFile in systemd).
|
||||
# Set it there as: ANTHROPIC_API_KEY=your-key-here
|
||||
# Or set api_key directly below (not recommended for version control).
|
||||
|
||||
# api_key = ""
|
||||
default_provider = "anthropic-custom:https://api.z.ai/api/anthropic"
|
||||
default_model = "claude-3-5-sonnet-20241022"
|
||||
default_temperature = 0.4
|
||||
model_routes = []
|
||||
embedding_routes = []
|
||||
|
||||
[model_providers]
|
||||
|
||||
[provider]
|
||||
|
||||
[observability]
|
||||
backend = "none"
|
||||
runtime_trace_mode = "none"
|
||||
runtime_trace_path = "state/runtime-trace.jsonl"
|
||||
runtime_trace_max_entries = 200
|
||||
|
||||
[autonomy]
|
||||
level = "full"
|
||||
workspace_only = false
|
||||
allowed_commands = [
|
||||
"git",
|
||||
"npm",
|
||||
"cargo",
|
||||
"mkdir",
|
||||
"touch",
|
||||
"cp",
|
||||
"mv",
|
||||
"ls",
|
||||
"cat",
|
||||
"grep",
|
||||
"find",
|
||||
"echo",
|
||||
"pwd",
|
||||
"wc",
|
||||
"head",
|
||||
"tail",
|
||||
"date",
|
||||
]
|
||||
command_context_rules = []
|
||||
forbidden_paths = [
|
||||
"/etc",
|
||||
"/root",
|
||||
"/home",
|
||||
"/usr",
|
||||
"/bin",
|
||||
"/sbin",
|
||||
"/lib",
|
||||
"/opt",
|
||||
"/boot",
|
||||
"/dev",
|
||||
"/proc",
|
||||
"/sys",
|
||||
"/var",
|
||||
"/tmp",
|
||||
"/mnt",
|
||||
"~/.ssh",
|
||||
"~/.gnupg",
|
||||
"~/.aws",
|
||||
"~/.config",
|
||||
]
|
||||
max_actions_per_hour = 100
|
||||
max_cost_per_day_cents = 1000
|
||||
require_approval_for_medium_risk = true
|
||||
block_high_risk_commands = true
|
||||
shell_env_passthrough = []
|
||||
allow_sensitive_file_reads = false
|
||||
allow_sensitive_file_writes = false
|
||||
auto_approve = [
|
||||
"file_read",
|
||||
"memory_recall",
|
||||
]
|
||||
always_ask = []
|
||||
allowed_roots = []
|
||||
non_cli_excluded_tools = [
|
||||
"shell",
|
||||
"process",
|
||||
"file_write",
|
||||
"file_edit",
|
||||
"git_operations",
|
||||
"browser",
|
||||
"browser_open",
|
||||
"http_request",
|
||||
"schedule",
|
||||
"cron_add",
|
||||
"cron_remove",
|
||||
"cron_update",
|
||||
"cron_run",
|
||||
"memory_store",
|
||||
"memory_forget",
|
||||
"proxy_config",
|
||||
"web_search_config",
|
||||
"web_access_config",
|
||||
"model_routing_config",
|
||||
"channel_ack_config",
|
||||
"pushover",
|
||||
"composio",
|
||||
"delegate",
|
||||
"screenshot",
|
||||
"image_info",
|
||||
]
|
||||
non_cli_approval_approvers = []
|
||||
non_cli_natural_language_approval_mode = "direct"
|
||||
|
||||
[autonomy.non_cli_natural_language_approval_mode_by_channel]
|
||||
|
||||
[security]
|
||||
roles = []
|
||||
|
||||
[security.sandbox]
|
||||
backend = "auto"
|
||||
firejail_args = []
|
||||
|
||||
[security.resources]
|
||||
max_memory_mb = 512
|
||||
max_cpu_time_seconds = 60
|
||||
max_subprocesses = 10
|
||||
memory_monitoring = true
|
||||
|
||||
[security.audit]
|
||||
enabled = true
|
||||
log_path = "audit.log"
|
||||
max_size_mb = 100
|
||||
sign_events = false
|
||||
|
||||
[security.otp]
|
||||
enabled = true
|
||||
method = "totp"
|
||||
token_ttl_secs = 30
|
||||
cache_valid_secs = 300
|
||||
gated_actions = [
|
||||
"shell",
|
||||
"file_write",
|
||||
"browser_open",
|
||||
"browser",
|
||||
"memory_forget",
|
||||
]
|
||||
gated_domains = []
|
||||
gated_domain_categories = []
|
||||
challenge_delivery = "dm"
|
||||
challenge_timeout_secs = 120
|
||||
challenge_max_attempts = 3
|
||||
|
||||
[security.estop]
|
||||
enabled = false
|
||||
state_file = "~/.zeroclaw/estop-state.json"
|
||||
require_otp_to_resume = true
|
||||
|
||||
[security.syscall_anomaly]
|
||||
enabled = true
|
||||
strict_mode = false
|
||||
alert_on_unknown_syscall = true
|
||||
max_denied_events_per_minute = 5
|
||||
max_total_events_per_minute = 120
|
||||
max_alerts_per_minute = 30
|
||||
alert_cooldown_secs = 20
|
||||
log_path = "syscall-anomalies.log"
|
||||
baseline_syscalls = [
|
||||
"read",
|
||||
"write",
|
||||
"open",
|
||||
"openat",
|
||||
"close",
|
||||
"stat",
|
||||
"fstat",
|
||||
"newfstatat",
|
||||
"lseek",
|
||||
"mmap",
|
||||
"mprotect",
|
||||
"munmap",
|
||||
"brk",
|
||||
"rt_sigaction",
|
||||
"rt_sigprocmask",
|
||||
"ioctl",
|
||||
"fcntl",
|
||||
"access",
|
||||
"pipe2",
|
||||
"dup",
|
||||
"dup2",
|
||||
"dup3",
|
||||
"epoll_create1",
|
||||
"epoll_ctl",
|
||||
"epoll_wait",
|
||||
"poll",
|
||||
"ppoll",
|
||||
"select",
|
||||
"futex",
|
||||
"clock_gettime",
|
||||
"nanosleep",
|
||||
"getpid",
|
||||
"gettid",
|
||||
"set_tid_address",
|
||||
"set_robust_list",
|
||||
"clone",
|
||||
"clone3",
|
||||
"fork",
|
||||
"execve",
|
||||
"wait4",
|
||||
"exit",
|
||||
"exit_group",
|
||||
"socket",
|
||||
"connect",
|
||||
"accept",
|
||||
"accept4",
|
||||
"listen",
|
||||
"sendto",
|
||||
"recvfrom",
|
||||
"sendmsg",
|
||||
"recvmsg",
|
||||
"getsockname",
|
||||
"getpeername",
|
||||
"setsockopt",
|
||||
"getsockopt",
|
||||
"getrandom",
|
||||
"statx",
|
||||
]
|
||||
|
||||
[security.perplexity_filter]
|
||||
enable_perplexity_filter = false
|
||||
perplexity_threshold = 18.0
|
||||
suffix_window_chars = 64
|
||||
min_prompt_chars = 32
|
||||
symbol_ratio_threshold = 0.2
|
||||
|
||||
[security.outbound_leak_guard]
|
||||
enabled = true
|
||||
action = "redact"
|
||||
sensitivity = 0.7
|
||||
|
||||
[security.url_access]
|
||||
block_private_ip = true
|
||||
allow_cidrs = []
|
||||
allow_domains = []
|
||||
allow_loopback = false
|
||||
require_first_visit_approval = false
|
||||
enforce_domain_allowlist = false
|
||||
domain_allowlist = []
|
||||
domain_blocklist = []
|
||||
approved_domains = []
|
||||
|
||||
[runtime]
|
||||
kind = "native"
|
||||
|
||||
[runtime.docker]
|
||||
image = "alpine:3.20"
|
||||
network = "none"
|
||||
memory_limit_mb = 512
|
||||
cpu_limit = 1.0
|
||||
read_only_rootfs = true
|
||||
mount_workspace = true
|
||||
allowed_workspace_roots = []
|
||||
|
||||
[runtime.wasm]
|
||||
tools_dir = "tools/wasm"
|
||||
fuel_limit = 1000000
|
||||
memory_limit_mb = 64
|
||||
max_module_size_mb = 50
|
||||
allow_workspace_read = false
|
||||
allow_workspace_write = false
|
||||
allowed_hosts = []
|
||||
|
||||
[runtime.wasm.security]
|
||||
require_workspace_relative_tools_dir = true
|
||||
reject_symlink_modules = true
|
||||
reject_symlink_tools_dir = true
|
||||
strict_host_validation = true
|
||||
capability_escalation_mode = "deny"
|
||||
module_hash_policy = "warn"
|
||||
|
||||
[runtime.wasm.security.module_sha256]
|
||||
|
||||
[research]
|
||||
enabled = false
|
||||
trigger = "never"
|
||||
keywords = [
|
||||
"find",
|
||||
"search",
|
||||
"check",
|
||||
"investigate",
|
||||
"look",
|
||||
"research",
|
||||
"найди",
|
||||
"проверь",
|
||||
"исследуй",
|
||||
"поищи",
|
||||
]
|
||||
min_message_length = 50
|
||||
max_iterations = 5
|
||||
show_progress = true
|
||||
system_prompt_prefix = ""
|
||||
|
||||
[reliability]
|
||||
provider_retries = 2
|
||||
provider_backoff_ms = 500
|
||||
fallback_providers = []
|
||||
api_keys = []
|
||||
channel_initial_backoff_secs = 2
|
||||
channel_max_backoff_secs = 60
|
||||
scheduler_poll_secs = 15
|
||||
scheduler_retries = 2
|
||||
|
||||
[reliability.model_fallbacks]
|
||||
|
||||
[scheduler]
|
||||
enabled = true
|
||||
max_tasks = 64
|
||||
max_concurrent = 4
|
||||
|
||||
[agent]
|
||||
compact_context = true
|
||||
max_tool_iterations = 20
|
||||
max_history_messages = 50
|
||||
parallel_tools = false
|
||||
tool_dispatcher = "auto"
|
||||
loop_detection_no_progress_threshold = 3
|
||||
loop_detection_ping_pong_cycles = 2
|
||||
loop_detection_failure_streak = 3
|
||||
safety_heartbeat_interval = 5
|
||||
safety_heartbeat_turn_interval = 10
|
||||
|
||||
[agent.session]
|
||||
backend = "none"
|
||||
strategy = "per-sender"
|
||||
ttl_seconds = 3600
|
||||
max_messages = 50
|
||||
|
||||
[agent.teams]
|
||||
enabled = true
|
||||
auto_activate = true
|
||||
max_agents = 32
|
||||
strategy = "adaptive"
|
||||
load_window_secs = 120
|
||||
inflight_penalty = 8
|
||||
recent_selection_penalty = 2
|
||||
recent_failure_penalty = 12
|
||||
|
||||
[agent.subagents]
|
||||
enabled = true
|
||||
auto_activate = true
|
||||
max_concurrent = 10
|
||||
strategy = "adaptive"
|
||||
load_window_secs = 180
|
||||
inflight_penalty = 10
|
||||
recent_selection_penalty = 3
|
||||
recent_failure_penalty = 16
|
||||
queue_wait_ms = 15000
|
||||
queue_poll_ms = 200
|
||||
|
||||
[skills]
|
||||
open_skills_enabled = false
|
||||
trusted_skill_roots = []
|
||||
allow_scripts = false
|
||||
prompt_injection_mode = "full"
|
||||
|
||||
[query_classification]
|
||||
enabled = false
|
||||
rules = []
|
||||
|
||||
[heartbeat]
|
||||
enabled = false
|
||||
interval_minutes = 30
|
||||
|
||||
[cron]
|
||||
enabled = true
|
||||
max_run_history = 50
|
||||
|
||||
[goal_loop]
|
||||
enabled = false
|
||||
interval_minutes = 10
|
||||
step_timeout_secs = 120
|
||||
max_steps_per_cycle = 3
|
||||
|
||||
[channels_config]
|
||||
cli = true
|
||||
message_timeout_secs = 300
|
||||
|
||||
[channels_config.webhook]
|
||||
port = 8080
|
||||
secret = "mytoken123"
|
||||
|
||||
[channels_config.ack_reaction]
|
||||
|
||||
[memory]
|
||||
backend = "sqlite"
|
||||
auto_save = true
|
||||
hygiene_enabled = true
|
||||
archive_after_days = 7
|
||||
purge_after_days = 30
|
||||
conversation_retention_days = 30
|
||||
embedding_provider = "none"
|
||||
embedding_model = "text-embedding-3-small"
|
||||
embedding_dimensions = 1536
|
||||
vector_weight = 0.7
|
||||
keyword_weight = 0.3
|
||||
min_relevance_score = 0.4
|
||||
embedding_cache_size = 10000
|
||||
chunk_max_tokens = 512
|
||||
response_cache_enabled = false
|
||||
response_cache_ttl_minutes = 60
|
||||
response_cache_max_entries = 5000
|
||||
snapshot_enabled = false
|
||||
snapshot_on_hygiene = false
|
||||
auto_hydrate = true
|
||||
sqlite_journal_mode = "wal"
|
||||
|
||||
[memory.qdrant]
|
||||
collection = "zeroclaw_memories"
|
||||
|
||||
[storage.provider.config]
|
||||
provider = ""
|
||||
schema = "public"
|
||||
table = "memories"
|
||||
tls = false
|
||||
|
||||
[tunnel]
|
||||
provider = "none"
|
||||
|
||||
[gateway]
|
||||
port = 8080
|
||||
host = "0.0.0.0"
|
||||
require_pairing = false
|
||||
trusted_ips = ["0.0.0.0/0"]
|
||||
allow_public_bind = true
|
||||
paired_tokens = []
|
||||
pair_rate_limit_per_minute = 10
|
||||
webhook_rate_limit_per_minute = 60
|
||||
trust_forwarded_headers = false
|
||||
rate_limit_max_keys = 10000
|
||||
idempotency_ttl_secs = 300
|
||||
idempotency_max_keys = 10000
|
||||
webhook_secret = "mytoken123"
|
||||
|
||||
[gateway.node_control]
|
||||
enabled = false
|
||||
allowed_node_ids = []
|
||||
|
||||
[composio]
|
||||
enabled = false
|
||||
entity_id = "default"
|
||||
|
||||
[secrets]
|
||||
encrypt = true
|
||||
|
||||
[browser]
|
||||
enabled = false
|
||||
allowed_domains = []
|
||||
browser_open = "default"
|
||||
backend = "agent_browser"
|
||||
auto_backend_priority = []
|
||||
agent_browser_command = "agent-browser"
|
||||
agent_browser_extra_args = []
|
||||
agent_browser_timeout_ms = 30000
|
||||
native_headless = true
|
||||
native_webdriver_url = "http://127.0.0.1:9515"
|
||||
|
||||
[browser.computer_use]
|
||||
endpoint = "http://127.0.0.1:8787/v1/actions"
|
||||
timeout_ms = 15000
|
||||
allow_remote_endpoint = false
|
||||
window_allowlist = []
|
||||
|
||||
[http_request]
|
||||
enabled = false
|
||||
allowed_domains = []
|
||||
max_response_size = 1000000
|
||||
timeout_secs = 30
|
||||
user_agent = "ZeroClaw/1.0"
|
||||
|
||||
[http_request.credential_profiles]
|
||||
|
||||
[multimodal]
|
||||
max_images = 4
|
||||
max_image_size_mb = 5
|
||||
allow_remote_fetch = false
|
||||
|
||||
[web_fetch]
|
||||
enabled = false
|
||||
provider = "fast_html2md"
|
||||
allowed_domains = ["*"]
|
||||
blocked_domains = []
|
||||
max_response_size = 500000
|
||||
timeout_secs = 30
|
||||
user_agent = "ZeroClaw/1.0"
|
||||
|
||||
[web_search]
|
||||
enabled = false
|
||||
provider = "duckduckgo"
|
||||
fallback_providers = []
|
||||
retries_per_provider = 0
|
||||
retry_backoff_ms = 250
|
||||
domain_filter = []
|
||||
language_filter = []
|
||||
exa_search_type = "auto"
|
||||
exa_include_text = false
|
||||
jina_site_filters = []
|
||||
max_results = 5
|
||||
timeout_secs = 15
|
||||
user_agent = "ZeroClaw/1.0"
|
||||
|
||||
[proxy]
|
||||
enabled = false
|
||||
no_proxy = []
|
||||
scope = "zeroclaw"
|
||||
services = []
|
||||
|
||||
[identity]
|
||||
format = "openclaw"
|
||||
extra_files = []
|
||||
|
||||
[cost]
|
||||
enabled = false
|
||||
daily_limit_usd = 10.0
|
||||
monthly_limit_usd = 100.0
|
||||
warn_at_percent = 80
|
||||
allow_override = false
|
||||
|
||||
[cost.prices."anthropic/claude-opus-4-20250514"]
|
||||
input = 15.0
|
||||
output = 75.0
|
||||
|
||||
[cost.prices."openai/gpt-4o"]
|
||||
input = 5.0
|
||||
output = 15.0
|
||||
|
||||
[cost.prices."openai/gpt-4o-mini"]
|
||||
input = 0.15
|
||||
output = 0.6
|
||||
|
||||
[cost.prices."anthropic/claude-sonnet-4-20250514"]
|
||||
input = 3.0
|
||||
output = 15.0
|
||||
|
||||
[cost.prices."openai/o1-preview"]
|
||||
input = 15.0
|
||||
output = 60.0
|
||||
|
||||
[cost.prices."anthropic/claude-3-haiku"]
|
||||
input = 0.25
|
||||
output = 1.25
|
||||
|
||||
[cost.prices."google/gemini-2.0-flash"]
|
||||
input = 0.1
|
||||
output = 0.4
|
||||
|
||||
[cost.prices."anthropic/claude-3.5-sonnet"]
|
||||
input = 3.0
|
||||
output = 15.0
|
||||
|
||||
[cost.prices."google/gemini-1.5-pro"]
|
||||
input = 1.25
|
||||
output = 5.0
|
||||
|
||||
[cost.enforcement]
|
||||
mode = "warn"
|
||||
route_down_model = "hint:fast"
|
||||
reserve_percent = 10
|
||||
|
||||
[economic]
|
||||
enabled = false
|
||||
initial_balance = 1000.0
|
||||
min_evaluation_threshold = 0.6
|
||||
|
||||
[economic.token_pricing]
|
||||
input_price_per_million = 3.0
|
||||
output_price_per_million = 15.0
|
||||
|
||||
[peripherals]
|
||||
enabled = true
|
||||
boards = []
|
||||
|
||||
[agents]
|
||||
|
||||
[coordination]
|
||||
enabled = true
|
||||
lead_agent = "delegate-lead"
|
||||
max_inbox_messages_per_agent = 256
|
||||
max_dead_letters = 256
|
||||
max_context_entries = 512
|
||||
max_seen_message_ids = 4096
|
||||
|
||||
[hooks]
|
||||
enabled = true
|
||||
|
||||
[hooks.builtin]
|
||||
boot_script = false
|
||||
command_logger = false
|
||||
session_memory = false
|
||||
|
||||
[plugins]
|
||||
enabled = true
|
||||
allow = []
|
||||
deny = []
|
||||
load_paths = []
|
||||
|
||||
[plugins.entries]
|
||||
|
||||
[hardware]
|
||||
enabled = true
|
||||
transport = "None"
|
||||
baud_rate = 115200
|
||||
workspace_datasheets = false
|
||||
|
||||
[transcription]
|
||||
enabled = false
|
||||
api_url = "https://api.groq.com/openai/v1/audio/transcriptions"
|
||||
model = "whisper-large-v3-turbo"
|
||||
max_duration_secs = 120
|
||||
|
||||
[agents_ipc]
|
||||
enabled = false
|
||||
db_path = "~/.zeroclaw/agents.db"
|
||||
staleness_secs = 300
|
||||
|
||||
[mcp]
|
||||
enabled = false
|
||||
servers = []
|
||||
|
||||
[wasm]
|
||||
enabled = true
|
||||
memory_limit_mb = 64
|
||||
fuel_limit = 1000000000
|
||||
registry_url = "https://zeromarket.vercel.app/api"
|
||||
@@ -0,0 +1,22 @@
|
||||
[Unit]
|
||||
Description=ZeroClaw AI Hardware Agent
|
||||
Documentation=https://github.com/zeroclaw/zeroclaw
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=pi
|
||||
SupplementaryGroups=gpio spi i2c
|
||||
WorkingDirectory=/home/pi/zeroclaw
|
||||
ExecStart=/home/pi/zeroclaw/zeroclaw gateway --host 0.0.0.0 --port 8080
|
||||
Restart=on-failure
|
||||
RestartSec=5
|
||||
EnvironmentFile=/home/pi/zeroclaw/.env
|
||||
Environment=RUST_LOG=info
|
||||
|
||||
# Expand ~ in config path
|
||||
Environment=HOME=/home/pi
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -0,0 +1,122 @@
|
||||
---
|
||||
name: browser
|
||||
description: Headless browser automation using agent-browser CLI
|
||||
metadata: {"zeroclaw":{"emoji":"🌐","requires":{"bins":["agent-browser"]}}}
|
||||
---
|
||||
|
||||
# Browser Skill
|
||||
|
||||
Control a headless browser for web automation, scraping, and testing.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- `agent-browser` CLI installed globally (`npm install -g agent-browser`)
|
||||
- Chrome downloaded (`agent-browser install`)
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
# Install agent-browser CLI
|
||||
npm install -g agent-browser
|
||||
|
||||
# Download Chrome for Testing
|
||||
agent-browser install --with-deps # Linux
|
||||
agent-browser install # macOS/Windows
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Navigate and snapshot
|
||||
|
||||
```bash
|
||||
agent-browser open https://example.com
|
||||
agent-browser snapshot -i
|
||||
```
|
||||
|
||||
### Interact with elements
|
||||
|
||||
```bash
|
||||
agent-browser click @e1 # Click by ref
|
||||
agent-browser fill @e2 "text" # Fill input
|
||||
agent-browser press Enter # Press key
|
||||
```
|
||||
|
||||
### Extract data
|
||||
|
||||
```bash
|
||||
agent-browser get text @e1 # Get text content
|
||||
agent-browser get url # Get current URL
|
||||
agent-browser screenshot page.png # Take screenshot
|
||||
```
|
||||
|
||||
### Session management
|
||||
|
||||
```bash
|
||||
agent-browser close # Close browser
|
||||
```
|
||||
|
||||
## Common Workflows
|
||||
|
||||
### Login flow
|
||||
|
||||
```bash
|
||||
agent-browser open https://site.com/login
|
||||
agent-browser snapshot -i
|
||||
agent-browser fill @email "user@example.com"
|
||||
agent-browser fill @password "secretpass"
|
||||
agent-browser click @submit
|
||||
agent-browser wait --text "Welcome"
|
||||
```
|
||||
|
||||
### Scrape page content
|
||||
|
||||
```bash
|
||||
agent-browser open https://news.ycombinator.com
|
||||
agent-browser snapshot -i
|
||||
agent-browser get text @e1
|
||||
```
|
||||
|
||||
### Take screenshots
|
||||
|
||||
```bash
|
||||
agent-browser open https://google.com
|
||||
agent-browser screenshot --full page.png
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
- `--json` - JSON output for parsing
|
||||
- `--headed` - Show browser window (for debugging)
|
||||
- `--session-name <name>` - Persist session cookies
|
||||
- `--profile <path>` - Use persistent browser profile
|
||||
|
||||
## Configuration
|
||||
|
||||
The browser tool is enabled by default with `allowed_domains = ["*"]` and
|
||||
`backend = "agent_browser"`. To customize, edit `~/.zeroclaw/config.toml`:
|
||||
|
||||
```toml
|
||||
[browser]
|
||||
enabled = true # default: true
|
||||
allowed_domains = ["*"] # default: ["*"] (all public hosts)
|
||||
backend = "agent_browser" # default: "agent_browser"
|
||||
native_headless = true # default: true
|
||||
```
|
||||
|
||||
To restrict domains or disable the browser tool:
|
||||
|
||||
```toml
|
||||
[browser]
|
||||
enabled = false # disable entirely
|
||||
# or restrict to specific domains:
|
||||
allowed_domains = ["example.com", "docs.example.com"]
|
||||
```
|
||||
|
||||
## Full Command Reference
|
||||
|
||||
Run `agent-browser --help` for all available commands.
|
||||
|
||||
## Related
|
||||
|
||||
- [agent-browser GitHub](https://github.com/vercel-labs/agent-browser)
|
||||
- [VNC Setup Guide](../docs/browser-setup.md)
|
||||
@@ -0,0 +1,3 @@
|
||||
# Browser skill tests
|
||||
# Format: command | expected_exit_code | expected_output_pattern
|
||||
echo "browser skill loaded" | 0 | browser skill loaded
|
||||
@@ -12,11 +12,29 @@ use crate::runtime;
|
||||
use crate::security::SecurityPolicy;
|
||||
use crate::tools::{self, Tool, ToolSpec};
|
||||
use anyhow::Result;
|
||||
use chrono::{Datelike, Timelike};
|
||||
use std::collections::HashMap;
|
||||
use std::io::Write as IoWrite;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
|
||||
/// Events emitted during a streamed agent turn.
|
||||
///
|
||||
/// Consumers receive these through a `tokio::sync::mpsc::Sender<TurnEvent>`
|
||||
/// passed to [`Agent::turn_streamed`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum TurnEvent {
|
||||
/// A text chunk from the LLM response (may arrive many times).
|
||||
Chunk { delta: String },
|
||||
/// The agent is invoking a tool.
|
||||
ToolCall {
|
||||
name: String,
|
||||
args: serde_json::Value,
|
||||
},
|
||||
/// A tool has returned a result.
|
||||
ToolResult { name: String, output: String },
|
||||
}
|
||||
|
||||
pub struct Agent {
|
||||
provider: Box<dyn Provider>,
|
||||
tools: Vec<Box<dyn Tool>>,
|
||||
@@ -330,7 +348,7 @@ impl Agent {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_config(config: &Config) -> Result<Self> {
|
||||
pub async fn from_config(config: &Config) -> Result<Self> {
|
||||
let observer: Arc<dyn Observer> =
|
||||
Arc::from(observability::create_observer(&config.observability));
|
||||
let runtime: Arc<dyn runtime::RuntimeAdapter> =
|
||||
@@ -359,21 +377,83 @@ impl Agent {
|
||||
None
|
||||
};
|
||||
|
||||
let (tools, _delegate_handle) = tools::all_tools_with_runtime(
|
||||
Arc::new(config.clone()),
|
||||
&security,
|
||||
runtime,
|
||||
memory.clone(),
|
||||
composio_key,
|
||||
composio_entity_id,
|
||||
&config.browser,
|
||||
&config.http_request,
|
||||
&config.web_fetch,
|
||||
&config.workspace_dir,
|
||||
&config.agents,
|
||||
config.api_key.as_deref(),
|
||||
config,
|
||||
);
|
||||
let (mut tools, delegate_handle, _reaction_handle, _channel_map_handle, _ask_user_handle) =
|
||||
tools::all_tools_with_runtime(
|
||||
Arc::new(config.clone()),
|
||||
&security,
|
||||
runtime,
|
||||
memory.clone(),
|
||||
composio_key,
|
||||
composio_entity_id,
|
||||
&config.browser,
|
||||
&config.http_request,
|
||||
&config.web_fetch,
|
||||
&config.workspace_dir,
|
||||
&config.agents,
|
||||
config.api_key.as_deref(),
|
||||
config,
|
||||
None,
|
||||
);
|
||||
|
||||
// ── Wire MCP tools (non-fatal) ─────────────────────────────
|
||||
// Replicates the same MCP initialization logic used in the CLI
|
||||
// and webhook paths (loop_.rs) so that the WebSocket/daemon UI
|
||||
// path also has access to MCP tools.
|
||||
if config.mcp.enabled && !config.mcp.servers.is_empty() {
|
||||
tracing::info!(
|
||||
"Initializing MCP client — {} server(s) configured",
|
||||
config.mcp.servers.len()
|
||||
);
|
||||
match tools::McpRegistry::connect_all(&config.mcp.servers).await {
|
||||
Ok(registry) => {
|
||||
let registry = std::sync::Arc::new(registry);
|
||||
if config.mcp.deferred_loading {
|
||||
let deferred_set = tools::DeferredMcpToolSet::from_registry(
|
||||
std::sync::Arc::clone(®istry),
|
||||
)
|
||||
.await;
|
||||
tracing::info!(
|
||||
"MCP deferred: {} tool stub(s) from {} server(s)",
|
||||
deferred_set.len(),
|
||||
registry.server_count()
|
||||
);
|
||||
let activated = std::sync::Arc::new(std::sync::Mutex::new(
|
||||
tools::ActivatedToolSet::new(),
|
||||
));
|
||||
tools.push(Box::new(tools::ToolSearchTool::new(
|
||||
deferred_set,
|
||||
activated,
|
||||
)));
|
||||
} else {
|
||||
let names = registry.tool_names();
|
||||
let mut registered = 0usize;
|
||||
for name in names {
|
||||
if let Some(def) = registry.get_tool_def(&name).await {
|
||||
let wrapper: std::sync::Arc<dyn tools::Tool> =
|
||||
std::sync::Arc::new(tools::McpToolWrapper::new(
|
||||
name,
|
||||
def,
|
||||
std::sync::Arc::clone(®istry),
|
||||
));
|
||||
if let Some(ref handle) = delegate_handle {
|
||||
handle.write().push(std::sync::Arc::clone(&wrapper));
|
||||
}
|
||||
tools.push(Box::new(tools::ArcToolRef(wrapper)));
|
||||
registered += 1;
|
||||
}
|
||||
}
|
||||
tracing::info!(
|
||||
"MCP: {} tool(s) registered from {} server(s)",
|
||||
registered,
|
||||
registry.server_count()
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!("MCP registry failed to initialize: {e:#}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let provider_name = config.default_provider.as_deref().unwrap_or("openrouter");
|
||||
|
||||
@@ -573,6 +653,24 @@ impl Agent {
|
||||
return format!("hint:{}", decision.hint);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: auto-classify by complexity when no rule matched.
|
||||
if let Some(ref ac) = self.config.auto_classify {
|
||||
let tier = super::eval::estimate_complexity(user_message);
|
||||
if let Some(hint) = ac.hint_for(tier) {
|
||||
if self.available_hints.contains(&hint.to_string()) {
|
||||
tracing::info!(
|
||||
target: "query_classification",
|
||||
hint = hint,
|
||||
complexity = ?tier,
|
||||
message_length = user_message.len(),
|
||||
"Auto-classified by complexity"
|
||||
);
|
||||
return format!("hint:{hint}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.model_name.clone()
|
||||
}
|
||||
|
||||
@@ -607,11 +705,17 @@ impl Agent {
|
||||
.await;
|
||||
}
|
||||
|
||||
let now = chrono::Local::now().format("%Y-%m-%d %H:%M:%S %Z");
|
||||
let now = chrono::Local::now();
|
||||
let (year, month, day) = (now.year(), now.month(), now.day());
|
||||
let (hour, minute, second) = (now.hour(), now.minute(), now.second());
|
||||
let tz = now.format("%Z");
|
||||
let date_str =
|
||||
format!("{year:04}-{month:02}-{day:02} {hour:02}:{minute:02}:{second:02} {tz}");
|
||||
|
||||
let enriched = if context.is_empty() {
|
||||
format!("[{now}] {user_message}")
|
||||
format!("[CURRENT DATE & TIME: {date_str}]\n\n{user_message}")
|
||||
} else {
|
||||
format!("{context}[{now}] {user_message}")
|
||||
format!("[CURRENT DATE & TIME: {date_str}]\n\n{context}\n\n{user_message}")
|
||||
};
|
||||
|
||||
self.history
|
||||
@@ -737,6 +841,254 @@ impl Agent {
|
||||
)
|
||||
}
|
||||
|
||||
/// Execute a single agent turn while streaming intermediate events.
|
||||
///
|
||||
/// Behaves identically to [`turn`](Self::turn) but forwards [`TurnEvent`]s
|
||||
/// through the provided channel so callers (e.g. the WebSocket gateway)
|
||||
/// can relay incremental updates to clients.
|
||||
///
|
||||
/// The returned `String` is the final, complete assistant response — the
|
||||
/// same value that `turn` would return.
|
||||
pub async fn turn_streamed(
|
||||
&mut self,
|
||||
user_message: &str,
|
||||
event_tx: tokio::sync::mpsc::Sender<TurnEvent>,
|
||||
) -> Result<String> {
|
||||
// ── Preamble (identical to turn) ───────────────────────────────
|
||||
if self.history.is_empty() {
|
||||
let system_prompt = self.build_system_prompt()?;
|
||||
self.history
|
||||
.push(ConversationMessage::Chat(ChatMessage::system(
|
||||
system_prompt,
|
||||
)));
|
||||
}
|
||||
|
||||
let context = self
|
||||
.memory_loader
|
||||
.load_context(
|
||||
self.memory.as_ref(),
|
||||
user_message,
|
||||
self.memory_session_id.as_deref(),
|
||||
)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
|
||||
if self.auto_save {
|
||||
let _ = self
|
||||
.memory
|
||||
.store(
|
||||
"user_msg",
|
||||
user_message,
|
||||
MemoryCategory::Conversation,
|
||||
self.memory_session_id.as_deref(),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
let now = chrono::Local::now().format("%Y-%m-%d %H:%M:%S %Z");
|
||||
let enriched = if context.is_empty() {
|
||||
format!("[{now}] {user_message}")
|
||||
} else {
|
||||
format!("{context}[{now}] {user_message}")
|
||||
};
|
||||
|
||||
self.history
|
||||
.push(ConversationMessage::Chat(ChatMessage::user(enriched)));
|
||||
|
||||
let effective_model = self.classify_model(user_message);
|
||||
|
||||
// ── Turn loop ──────────────────────────────────────────────────
|
||||
for _ in 0..self.config.max_tool_iterations {
|
||||
let messages = self.tool_dispatcher.to_provider_messages(&self.history);
|
||||
|
||||
// Response cache check (same as turn)
|
||||
let cache_key = if self.temperature == 0.0 {
|
||||
self.response_cache.as_ref().map(|_| {
|
||||
let last_user = messages
|
||||
.iter()
|
||||
.rfind(|m| m.role == "user")
|
||||
.map(|m| m.content.as_str())
|
||||
.unwrap_or("");
|
||||
let system = messages
|
||||
.iter()
|
||||
.find(|m| m.role == "system")
|
||||
.map(|m| m.content.as_str());
|
||||
crate::memory::response_cache::ResponseCache::cache_key(
|
||||
&effective_model,
|
||||
system,
|
||||
last_user,
|
||||
)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let (Some(ref cache), Some(ref key)) = (&self.response_cache, &cache_key) {
|
||||
if let Ok(Some(cached)) = cache.get(key) {
|
||||
self.observer.record_event(&ObserverEvent::CacheHit {
|
||||
cache_type: "response".into(),
|
||||
tokens_saved: 0,
|
||||
});
|
||||
self.history
|
||||
.push(ConversationMessage::Chat(ChatMessage::assistant(
|
||||
cached.clone(),
|
||||
)));
|
||||
self.trim_history();
|
||||
return Ok(cached);
|
||||
}
|
||||
self.observer.record_event(&ObserverEvent::CacheMiss {
|
||||
cache_type: "response".into(),
|
||||
});
|
||||
}
|
||||
|
||||
// ── Streaming LLM call ────────────────────────────────────
|
||||
// Try streaming first; if the provider returns content we
|
||||
// forward deltas. Otherwise fall back to non-streaming chat.
|
||||
use futures_util::StreamExt;
|
||||
|
||||
let stream_opts = crate::providers::traits::StreamOptions::new(true);
|
||||
let mut stream = self.provider.stream_chat_with_history(
|
||||
&messages,
|
||||
&effective_model,
|
||||
self.temperature,
|
||||
stream_opts,
|
||||
);
|
||||
|
||||
let mut streamed_text = String::new();
|
||||
let mut got_stream = false;
|
||||
|
||||
while let Some(item) = stream.next().await {
|
||||
match item {
|
||||
Ok(chunk) => {
|
||||
if !chunk.delta.is_empty() {
|
||||
got_stream = true;
|
||||
streamed_text.push_str(&chunk.delta);
|
||||
let _ = event_tx.send(TurnEvent::Chunk { delta: chunk.delta }).await;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
// Drop the stream so we release the borrow on provider.
|
||||
drop(stream);
|
||||
|
||||
// If streaming produced text, use it as the response and
|
||||
// check for tool calls via the dispatcher.
|
||||
let response = if got_stream {
|
||||
// Build a synthetic ChatResponse from streamed text
|
||||
crate::providers::ChatResponse {
|
||||
text: Some(streamed_text),
|
||||
tool_calls: Vec::new(),
|
||||
usage: None,
|
||||
reasoning_content: None,
|
||||
}
|
||||
} else {
|
||||
// Fall back to non-streaming chat
|
||||
match self
|
||||
.provider
|
||||
.chat(
|
||||
ChatRequest {
|
||||
messages: &messages,
|
||||
tools: if self.tool_dispatcher.should_send_tool_specs() {
|
||||
Some(&self.tool_specs)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
},
|
||||
&effective_model,
|
||||
self.temperature,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(resp) => resp,
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
};
|
||||
|
||||
let (text, calls) = self.tool_dispatcher.parse_response(&response);
|
||||
if calls.is_empty() {
|
||||
let final_text = if text.is_empty() {
|
||||
response.text.unwrap_or_default()
|
||||
} else {
|
||||
text
|
||||
};
|
||||
|
||||
// Store in response cache
|
||||
if let (Some(ref cache), Some(ref key)) = (&self.response_cache, &cache_key) {
|
||||
let token_count = response
|
||||
.usage
|
||||
.as_ref()
|
||||
.and_then(|u| u.output_tokens)
|
||||
.unwrap_or(0);
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
let _ = cache.put(key, &effective_model, &final_text, token_count as u32);
|
||||
}
|
||||
|
||||
// If we didn't stream, send the full response as a single chunk
|
||||
if !got_stream && !final_text.is_empty() {
|
||||
let _ = event_tx
|
||||
.send(TurnEvent::Chunk {
|
||||
delta: final_text.clone(),
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
self.history
|
||||
.push(ConversationMessage::Chat(ChatMessage::assistant(
|
||||
final_text.clone(),
|
||||
)));
|
||||
self.trim_history();
|
||||
|
||||
return Ok(final_text);
|
||||
}
|
||||
|
||||
// ── Tool calls ─────────────────────────────────────────────
|
||||
if !text.is_empty() {
|
||||
self.history
|
||||
.push(ConversationMessage::Chat(ChatMessage::assistant(
|
||||
text.clone(),
|
||||
)));
|
||||
}
|
||||
|
||||
self.history.push(ConversationMessage::AssistantToolCalls {
|
||||
text: response.text.clone(),
|
||||
tool_calls: response.tool_calls.clone(),
|
||||
reasoning_content: response.reasoning_content.clone(),
|
||||
});
|
||||
|
||||
// Notify about each tool call
|
||||
for call in &calls {
|
||||
let _ = event_tx
|
||||
.send(TurnEvent::ToolCall {
|
||||
name: call.name.clone(),
|
||||
args: call.arguments.clone(),
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
let results = self.execute_tools(&calls).await;
|
||||
|
||||
// Notify about each tool result
|
||||
for result in &results {
|
||||
let _ = event_tx
|
||||
.send(TurnEvent::ToolResult {
|
||||
name: result.name.clone(),
|
||||
output: result.output.clone(),
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
let formatted = self.tool_dispatcher.format_results(&results);
|
||||
self.history.push(formatted);
|
||||
self.trim_history();
|
||||
}
|
||||
|
||||
anyhow::bail!(
|
||||
"Agent exceeded maximum tool iterations ({})",
|
||||
self.config.max_tool_iterations
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn run_single(&mut self, message: &str) -> Result<String> {
|
||||
self.turn(message).await
|
||||
}
|
||||
@@ -786,7 +1138,7 @@ pub async fn run(
|
||||
}
|
||||
effective_config.default_temperature = temperature;
|
||||
|
||||
let mut agent = Agent::from_config(&effective_config)?;
|
||||
let mut agent = Agent::from_config(&effective_config).await?;
|
||||
|
||||
let provider_name = effective_config
|
||||
.default_provider
|
||||
@@ -1130,7 +1482,9 @@ mod tests {
|
||||
.extra_headers
|
||||
.insert("X-Title".to_string(), "zeroclaw-web".to_string());
|
||||
|
||||
let mut agent = Agent::from_config(&config).expect("agent from config");
|
||||
let mut agent = Agent::from_config(&config)
|
||||
.await
|
||||
.expect("agent from config");
|
||||
let response = agent.turn("hello").await.expect("agent turn");
|
||||
|
||||
assert_eq!(response, "hello from mock");
|
||||
|
||||
@@ -0,0 +1,155 @@
|
||||
use crate::providers::traits::ChatMessage;
|
||||
use std::collections::HashSet;
|
||||
|
||||
/// Signals extracted from conversation context to guide tool filtering.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ContextSignals {
|
||||
/// Tool names likely needed. Empty vec means no filtering.
|
||||
pub suggested_tools: Vec<String>,
|
||||
/// Whether full history is relevant.
|
||||
pub history_relevant: bool,
|
||||
}
|
||||
|
||||
/// Analyze context to determine which tools are likely needed.
|
||||
pub fn analyze_turn_context(
|
||||
history: &[ChatMessage],
|
||||
_user_message: &str,
|
||||
iteration: usize,
|
||||
last_tool_calls: &[String],
|
||||
) -> ContextSignals {
|
||||
if iteration == 0 {
|
||||
return ContextSignals {
|
||||
suggested_tools: Vec::new(),
|
||||
history_relevant: true,
|
||||
};
|
||||
}
|
||||
|
||||
let mut tools: HashSet<String> = HashSet::new();
|
||||
for tool in last_tool_calls {
|
||||
tools.insert(tool.clone());
|
||||
}
|
||||
|
||||
if let Some(last_assistant) = history.iter().rev().find(|m| m.role == "assistant") {
|
||||
for word in last_assistant.content.split_whitespace() {
|
||||
for tool_name in tools_for_keyword(word) {
|
||||
tools.insert(tool_name.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut suggested: Vec<String> = tools.into_iter().collect();
|
||||
suggested.sort();
|
||||
|
||||
ContextSignals {
|
||||
suggested_tools: suggested,
|
||||
history_relevant: true,
|
||||
}
|
||||
}
|
||||
|
||||
fn tools_for_keyword(keyword: &str) -> &'static [&'static str] {
|
||||
match keyword.to_lowercase().as_str() {
|
||||
"file" | "read" | "write" | "edit" | "path" | "directory" => {
|
||||
&["file_read", "file_write", "file_edit", "glob_search"]
|
||||
}
|
||||
"shell" | "command" | "run" | "execute" | "install" | "build" => &["shell"],
|
||||
"memory" | "remember" | "recall" | "store" | "forget" => &["memory_store", "memory_recall"],
|
||||
"search" | "find" | "grep" | "look" => {
|
||||
&["content_search", "glob_search", "web_search_tool"]
|
||||
}
|
||||
"browser" | "website" | "url" | "http" | "fetch" => &["web_fetch", "web_search_tool"],
|
||||
"image" | "screenshot" | "picture" => &["image_info"],
|
||||
"git" | "commit" | "branch" | "push" | "pull" => &["git_operations", "shell"],
|
||||
_ => &[],
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn make_message(role: &str, content: &str) -> ChatMessage {
|
||||
ChatMessage {
|
||||
role: role.to_string(),
|
||||
content: content.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iteration_zero_returns_empty_suggestions() {
|
||||
let history = vec![make_message("user", "hello")];
|
||||
let signals = analyze_turn_context(&history, "do something", 0, &[]);
|
||||
assert!(signals.suggested_tools.is_empty());
|
||||
assert!(signals.history_relevant);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iteration_one_includes_last_tools() {
|
||||
let history = vec![
|
||||
make_message("user", "hello"),
|
||||
make_message("assistant", "sure"),
|
||||
];
|
||||
let last_tools = vec!["shell".to_string(), "file_read".to_string()];
|
||||
let signals = analyze_turn_context(&history, "next step", 1, &last_tools);
|
||||
assert!(signals.suggested_tools.contains(&"shell".to_string()));
|
||||
assert!(signals.suggested_tools.contains(&"file_read".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keyword_extraction_from_assistant_message() {
|
||||
let history = vec![
|
||||
make_message("user", "help me"),
|
||||
make_message("assistant", "I will read the file at that path"),
|
||||
];
|
||||
let signals = analyze_turn_context(&history, "ok", 1, &[]);
|
||||
assert!(signals.suggested_tools.contains(&"file_read".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn shell_keywords_suggest_shell_tool() {
|
||||
let history = vec![
|
||||
make_message("user", "build the project"),
|
||||
make_message("assistant", "I will run the build command"),
|
||||
];
|
||||
let signals = analyze_turn_context(&history, "go", 1, &[]);
|
||||
assert!(signals.suggested_tools.contains(&"shell".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn memory_keywords_suggest_memory_tools() {
|
||||
let history = vec![
|
||||
make_message("user", "save this"),
|
||||
make_message("assistant", "I will store that in memory"),
|
||||
];
|
||||
let signals = analyze_turn_context(&history, "ok", 1, &[]);
|
||||
assert!(signals
|
||||
.suggested_tools
|
||||
.contains(&"memory_store".to_string()));
|
||||
assert!(signals
|
||||
.suggested_tools
|
||||
.contains(&"memory_recall".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn combined_keywords_merge_tools() {
|
||||
let history = vec![
|
||||
make_message("user", "do stuff"),
|
||||
make_message(
|
||||
"assistant",
|
||||
"I need to read the file and run a shell command to search",
|
||||
),
|
||||
];
|
||||
let signals = analyze_turn_context(&history, "go", 1, &[]);
|
||||
assert!(signals.suggested_tools.contains(&"file_read".to_string()));
|
||||
assert!(signals.suggested_tools.contains(&"shell".to_string()));
|
||||
assert!(signals
|
||||
.suggested_tools
|
||||
.contains(&"content_search".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_history_iteration_one() {
|
||||
let history: Vec<ChatMessage> = vec![];
|
||||
let signals = analyze_turn_context(&history, "hello", 1, &[]);
|
||||
assert!(signals.suggested_tools.is_empty());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,415 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use schemars::JsonSchema;
|
||||
|
||||
// ── Complexity estimation ───────────────────────────────────────
|
||||
|
||||
/// Coarse complexity tier for a user message.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ComplexityTier {
|
||||
/// Short, simple query (greetings, yes/no, lookups).
|
||||
Simple,
|
||||
/// Typical request — not trivially simple, not deeply complex.
|
||||
Standard,
|
||||
/// Long or reasoning-heavy request (code, multi-step, analysis).
|
||||
Complex,
|
||||
}
|
||||
|
||||
/// Heuristic keywords that signal reasoning complexity.
|
||||
const REASONING_KEYWORDS: &[&str] = &[
|
||||
"explain",
|
||||
"why",
|
||||
"analyze",
|
||||
"compare",
|
||||
"design",
|
||||
"implement",
|
||||
"refactor",
|
||||
"debug",
|
||||
"optimize",
|
||||
"architecture",
|
||||
"trade-off",
|
||||
"tradeoff",
|
||||
"reasoning",
|
||||
"step by step",
|
||||
"think through",
|
||||
"evaluate",
|
||||
"critique",
|
||||
"pros and cons",
|
||||
];
|
||||
|
||||
/// Estimate the complexity of a user message without an LLM call.
|
||||
///
|
||||
/// Rules (applied in order):
|
||||
/// - **Complex**: message > 200 chars, OR contains a code fence, OR ≥ 2
|
||||
/// reasoning keywords.
|
||||
/// - **Simple**: message < 50 chars AND no reasoning keywords.
|
||||
/// - **Standard**: everything else.
|
||||
pub fn estimate_complexity(message: &str) -> ComplexityTier {
|
||||
let lower = message.to_lowercase();
|
||||
let len = message.len();
|
||||
|
||||
let keyword_count = REASONING_KEYWORDS
|
||||
.iter()
|
||||
.filter(|kw| lower.contains(**kw))
|
||||
.count();
|
||||
|
||||
let has_code_fence = message.contains("```");
|
||||
|
||||
if len > 200 || has_code_fence || keyword_count >= 2 {
|
||||
return ComplexityTier::Complex;
|
||||
}
|
||||
|
||||
if len < 50 && keyword_count == 0 {
|
||||
return ComplexityTier::Simple;
|
||||
}
|
||||
|
||||
ComplexityTier::Standard
|
||||
}
|
||||
|
||||
// ── Auto-classify config ────────────────────────────────────────
|
||||
|
||||
/// Configuration for automatic complexity-based classification.
|
||||
///
|
||||
/// When the rule-based classifier in `QueryClassificationConfig` produces no
|
||||
/// match, the eval layer can fall back to `estimate_complexity` and map the
|
||||
/// resulting tier to a routing hint.
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct AutoClassifyConfig {
|
||||
/// Hint to use for `Simple` complexity tier (e.g. `"fast"`).
|
||||
#[serde(default)]
|
||||
pub simple_hint: Option<String>,
|
||||
/// Hint to use for `Standard` complexity tier.
|
||||
#[serde(default)]
|
||||
pub standard_hint: Option<String>,
|
||||
/// Hint to use for `Complex` complexity tier (e.g. `"reasoning"`).
|
||||
#[serde(default)]
|
||||
pub complex_hint: Option<String>,
|
||||
}
|
||||
|
||||
impl AutoClassifyConfig {
|
||||
/// Map a complexity tier to the configured hint, if any.
|
||||
pub fn hint_for(&self, tier: ComplexityTier) -> Option<&str> {
|
||||
match tier {
|
||||
ComplexityTier::Simple => self.simple_hint.as_deref(),
|
||||
ComplexityTier::Standard => self.standard_hint.as_deref(),
|
||||
ComplexityTier::Complex => self.complex_hint.as_deref(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Post-response eval ──────────────────────────────────────────
|
||||
|
||||
/// Configuration for the post-response quality evaluator.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct EvalConfig {
|
||||
/// Enable the eval quality gate.
|
||||
#[serde(default)]
|
||||
pub enabled: bool,
|
||||
/// Minimum quality score (0.0–1.0) to accept a response.
|
||||
/// Below this threshold, a retry with a higher-tier model is suggested.
|
||||
#[serde(default = "default_min_quality_score")]
|
||||
pub min_quality_score: f64,
|
||||
/// Maximum retries with escalated models before accepting whatever we get.
|
||||
#[serde(default = "default_max_retries")]
|
||||
pub max_retries: u32,
|
||||
}
|
||||
|
||||
fn default_min_quality_score() -> f64 {
|
||||
0.5
|
||||
}
|
||||
|
||||
fn default_max_retries() -> u32 {
|
||||
1
|
||||
}
|
||||
|
||||
impl Default for EvalConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: false,
|
||||
min_quality_score: default_min_quality_score(),
|
||||
max_retries: default_max_retries(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Result of evaluating a response against quality heuristics.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct EvalResult {
|
||||
/// Aggregate quality score from 0.0 (terrible) to 1.0 (excellent).
|
||||
pub score: f64,
|
||||
/// Individual check outcomes (for observability).
|
||||
pub checks: Vec<EvalCheck>,
|
||||
/// If score < threshold, the suggested higher-tier hint for retry.
|
||||
pub retry_hint: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct EvalCheck {
|
||||
pub name: &'static str,
|
||||
pub passed: bool,
|
||||
pub weight: f64,
|
||||
}
|
||||
|
||||
/// Code-related keywords in user queries.
|
||||
const CODE_KEYWORDS: &[&str] = &[
|
||||
"code",
|
||||
"function",
|
||||
"implement",
|
||||
"class",
|
||||
"struct",
|
||||
"module",
|
||||
"script",
|
||||
"program",
|
||||
"bug",
|
||||
"error",
|
||||
"compile",
|
||||
"syntax",
|
||||
"refactor",
|
||||
];
|
||||
|
||||
/// Evaluate a response against heuristic quality checks. No LLM call.
|
||||
///
|
||||
/// Checks:
|
||||
/// 1. **Non-empty**: response must not be empty.
|
||||
/// 2. **Not a cop-out**: response must not be just "I don't know" or similar.
|
||||
/// 3. **Sufficient length**: response length should be proportional to query complexity.
|
||||
/// 4. **Code presence**: if the query mentions code keywords, the response should
|
||||
/// contain a code block.
|
||||
pub fn evaluate_response(
|
||||
query: &str,
|
||||
response: &str,
|
||||
complexity: ComplexityTier,
|
||||
auto_classify: Option<&AutoClassifyConfig>,
|
||||
) -> EvalResult {
|
||||
let mut checks = Vec::new();
|
||||
|
||||
// Check 1: Non-empty
|
||||
let non_empty = !response.trim().is_empty();
|
||||
checks.push(EvalCheck {
|
||||
name: "non_empty",
|
||||
passed: non_empty,
|
||||
weight: 0.3,
|
||||
});
|
||||
|
||||
// Check 2: Not a cop-out
|
||||
let lower_resp = response.to_lowercase();
|
||||
let cop_out_phrases = [
|
||||
"i don't know",
|
||||
"i'm not sure",
|
||||
"i cannot",
|
||||
"i can't help",
|
||||
"as an ai",
|
||||
];
|
||||
let is_cop_out = cop_out_phrases
|
||||
.iter()
|
||||
.any(|phrase| lower_resp.starts_with(phrase));
|
||||
let not_cop_out = !is_cop_out || response.len() > 200; // long responses with caveats are fine
|
||||
checks.push(EvalCheck {
|
||||
name: "not_cop_out",
|
||||
passed: not_cop_out,
|
||||
weight: 0.25,
|
||||
});
|
||||
|
||||
// Check 3: Sufficient length for complexity
|
||||
let min_len = match complexity {
|
||||
ComplexityTier::Simple => 5,
|
||||
ComplexityTier::Standard => 20,
|
||||
ComplexityTier::Complex => 50,
|
||||
};
|
||||
let sufficient_length = response.len() >= min_len;
|
||||
checks.push(EvalCheck {
|
||||
name: "sufficient_length",
|
||||
passed: sufficient_length,
|
||||
weight: 0.2,
|
||||
});
|
||||
|
||||
// Check 4: Code presence when expected
|
||||
let query_lower = query.to_lowercase();
|
||||
let expects_code = CODE_KEYWORDS.iter().any(|kw| query_lower.contains(kw));
|
||||
let has_code = response.contains("```") || response.contains(" "); // code block or indented
|
||||
let code_check_passed = !expects_code || has_code;
|
||||
checks.push(EvalCheck {
|
||||
name: "code_presence",
|
||||
passed: code_check_passed,
|
||||
weight: 0.25,
|
||||
});
|
||||
|
||||
// Compute weighted score
|
||||
let total_weight: f64 = checks.iter().map(|c| c.weight).sum();
|
||||
let earned: f64 = checks.iter().filter(|c| c.passed).map(|c| c.weight).sum();
|
||||
let score = if total_weight > 0.0 {
|
||||
earned / total_weight
|
||||
} else {
|
||||
1.0
|
||||
};
|
||||
|
||||
// Determine retry hint: if score is low, suggest escalating
|
||||
let retry_hint = if score <= default_min_quality_score() {
|
||||
// Try to escalate: Simple→Standard→Complex
|
||||
let next_tier = match complexity {
|
||||
ComplexityTier::Simple => Some(ComplexityTier::Standard),
|
||||
ComplexityTier::Standard => Some(ComplexityTier::Complex),
|
||||
ComplexityTier::Complex => None, // already at max
|
||||
};
|
||||
next_tier.and_then(|tier| {
|
||||
auto_classify
|
||||
.and_then(|ac| ac.hint_for(tier))
|
||||
.map(String::from)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
EvalResult {
|
||||
score,
|
||||
checks,
|
||||
retry_hint,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// ── estimate_complexity ─────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn simple_short_message() {
|
||||
assert_eq!(estimate_complexity("hi"), ComplexityTier::Simple);
|
||||
assert_eq!(estimate_complexity("hello"), ComplexityTier::Simple);
|
||||
assert_eq!(estimate_complexity("yes"), ComplexityTier::Simple);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn complex_long_message() {
|
||||
let long = "a".repeat(201);
|
||||
assert_eq!(estimate_complexity(&long), ComplexityTier::Complex);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn complex_code_fence() {
|
||||
let msg = "Here is some code:\n```rust\nfn main() {}\n```";
|
||||
assert_eq!(estimate_complexity(msg), ComplexityTier::Complex);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn complex_multiple_reasoning_keywords() {
|
||||
let msg = "Please explain why this design is better and analyze the trade-off";
|
||||
assert_eq!(estimate_complexity(msg), ComplexityTier::Complex);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn standard_medium_message() {
|
||||
// 50+ chars but no code fence, < 2 reasoning keywords
|
||||
let msg = "Can you help me find a good restaurant in this area please?";
|
||||
assert_eq!(estimate_complexity(msg), ComplexityTier::Standard);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn standard_short_with_one_keyword() {
|
||||
// < 50 chars but has 1 reasoning keyword → still not Simple
|
||||
let msg = "explain this";
|
||||
assert_eq!(estimate_complexity(msg), ComplexityTier::Standard);
|
||||
}
|
||||
|
||||
// ── auto_classify ───────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn auto_classify_maps_tiers_to_hints() {
|
||||
let ac = AutoClassifyConfig {
|
||||
simple_hint: Some("fast".into()),
|
||||
standard_hint: None,
|
||||
complex_hint: Some("reasoning".into()),
|
||||
};
|
||||
assert_eq!(ac.hint_for(ComplexityTier::Simple), Some("fast"));
|
||||
assert_eq!(ac.hint_for(ComplexityTier::Standard), None);
|
||||
assert_eq!(ac.hint_for(ComplexityTier::Complex), Some("reasoning"));
|
||||
}
|
||||
|
||||
// ── evaluate_response ───────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn empty_response_scores_low() {
|
||||
let result = evaluate_response("hello", "", ComplexityTier::Simple, None);
|
||||
assert!(result.score <= 0.5, "empty response should score low");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn good_response_scores_high() {
|
||||
let result = evaluate_response(
|
||||
"what is 2+2?",
|
||||
"The answer is 4.",
|
||||
ComplexityTier::Simple,
|
||||
None,
|
||||
);
|
||||
assert!(
|
||||
result.score >= 0.9,
|
||||
"good simple response should score high, got {}",
|
||||
result.score
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cop_out_response_penalized() {
|
||||
let result = evaluate_response(
|
||||
"explain quantum computing",
|
||||
"I don't know much about that.",
|
||||
ComplexityTier::Standard,
|
||||
None,
|
||||
);
|
||||
assert!(
|
||||
result.score < 1.0,
|
||||
"cop-out should be penalized, got {}",
|
||||
result.score
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn code_query_without_code_response_penalized() {
|
||||
let result = evaluate_response(
|
||||
"write a function to sort an array",
|
||||
"You should use a sorting algorithm.",
|
||||
ComplexityTier::Standard,
|
||||
None,
|
||||
);
|
||||
// "code_presence" check should fail
|
||||
let code_check = result.checks.iter().find(|c| c.name == "code_presence");
|
||||
assert!(
|
||||
code_check.is_some() && !code_check.unwrap().passed,
|
||||
"code check should fail"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn retry_hint_escalation() {
|
||||
let ac = AutoClassifyConfig {
|
||||
simple_hint: Some("fast".into()),
|
||||
standard_hint: Some("default".into()),
|
||||
complex_hint: Some("reasoning".into()),
|
||||
};
|
||||
// Empty response for a Simple query → should suggest Standard hint
|
||||
let result = evaluate_response("hello", "", ComplexityTier::Simple, Some(&ac));
|
||||
assert_eq!(result.retry_hint, Some("default".into()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_retry_when_already_complex() {
|
||||
let ac = AutoClassifyConfig {
|
||||
simple_hint: Some("fast".into()),
|
||||
standard_hint: Some("default".into()),
|
||||
complex_hint: Some("reasoning".into()),
|
||||
};
|
||||
// Empty response for Complex → no escalation possible
|
||||
let result =
|
||||
evaluate_response("explain everything", "", ComplexityTier::Complex, Some(&ac));
|
||||
assert_eq!(result.retry_hint, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn max_retries_defaults() {
|
||||
let config = EvalConfig::default();
|
||||
assert!(!config.enabled);
|
||||
assert_eq!(config.max_retries, 1);
|
||||
assert!((config.min_quality_score - 0.5).abs() < f64::EPSILON);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,283 @@
|
||||
use crate::providers::traits::ChatMessage;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Config
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn default_max_tokens() -> usize {
|
||||
8192
|
||||
}
|
||||
|
||||
fn default_keep_recent() -> usize {
|
||||
4
|
||||
}
|
||||
|
||||
fn default_collapse() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct HistoryPrunerConfig {
|
||||
/// Enable history pruning. Default: false.
|
||||
#[serde(default)]
|
||||
pub enabled: bool,
|
||||
/// Maximum estimated tokens for message history. Default: 8192.
|
||||
#[serde(default = "default_max_tokens")]
|
||||
pub max_tokens: usize,
|
||||
/// Keep the N most recent messages untouched. Default: 4.
|
||||
#[serde(default = "default_keep_recent")]
|
||||
pub keep_recent: usize,
|
||||
/// Collapse old tool call/result pairs into short summaries. Default: true.
|
||||
#[serde(default = "default_collapse")]
|
||||
pub collapse_tool_results: bool,
|
||||
}
|
||||
|
||||
impl Default for HistoryPrunerConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: false,
|
||||
max_tokens: 8192,
|
||||
keep_recent: 4,
|
||||
collapse_tool_results: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Stats
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct PruneStats {
|
||||
pub messages_before: usize,
|
||||
pub messages_after: usize,
|
||||
pub collapsed_pairs: usize,
|
||||
pub dropped_messages: usize,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Token estimation
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn estimate_tokens(messages: &[ChatMessage]) -> usize {
|
||||
messages.iter().map(|m| m.content.len() / 4).sum()
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Protected-index helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn protected_indices(messages: &[ChatMessage], keep_recent: usize) -> Vec<bool> {
|
||||
let len = messages.len();
|
||||
let mut protected = vec![false; len];
|
||||
for (i, msg) in messages.iter().enumerate() {
|
||||
if msg.role == "system" {
|
||||
protected[i] = true;
|
||||
}
|
||||
}
|
||||
let recent_start = len.saturating_sub(keep_recent);
|
||||
for p in protected.iter_mut().skip(recent_start) {
|
||||
*p = true;
|
||||
}
|
||||
protected
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Public entry point
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
pub fn prune_history(messages: &mut Vec<ChatMessage>, config: &HistoryPrunerConfig) -> PruneStats {
|
||||
let messages_before = messages.len();
|
||||
if !config.enabled || messages.is_empty() {
|
||||
return PruneStats {
|
||||
messages_before,
|
||||
messages_after: messages_before,
|
||||
collapsed_pairs: 0,
|
||||
dropped_messages: 0,
|
||||
};
|
||||
}
|
||||
|
||||
let mut collapsed_pairs: usize = 0;
|
||||
|
||||
// Phase 1 – collapse assistant+tool pairs
|
||||
if config.collapse_tool_results {
|
||||
let mut i = 0;
|
||||
while i + 1 < messages.len() {
|
||||
let protected = protected_indices(messages, config.keep_recent);
|
||||
if messages[i].role == "assistant"
|
||||
&& messages[i + 1].role == "tool"
|
||||
&& !protected[i]
|
||||
&& !protected[i + 1]
|
||||
{
|
||||
let tool_content = &messages[i + 1].content;
|
||||
let truncated: String = tool_content.chars().take(100).collect();
|
||||
let summary = format!("[Tool result: {truncated}...]");
|
||||
messages[i] = ChatMessage {
|
||||
role: "assistant".to_string(),
|
||||
content: summary,
|
||||
};
|
||||
messages.remove(i + 1);
|
||||
collapsed_pairs += 1;
|
||||
} else {
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 2 – budget enforcement
|
||||
let mut dropped_messages: usize = 0;
|
||||
while estimate_tokens(messages) > config.max_tokens {
|
||||
let protected = protected_indices(messages, config.keep_recent);
|
||||
if let Some(idx) = protected
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(_, &p)| !p)
|
||||
.map(|(i, _)| i)
|
||||
{
|
||||
messages.remove(idx);
|
||||
dropped_messages += 1;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
PruneStats {
|
||||
messages_before,
|
||||
messages_after: messages.len(),
|
||||
collapsed_pairs,
|
||||
dropped_messages,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn msg(role: &str, content: &str) -> ChatMessage {
|
||||
ChatMessage {
|
||||
role: role.to_string(),
|
||||
content: content.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prune_disabled_is_noop() {
|
||||
let mut messages = vec![
|
||||
msg("system", "You are helpful."),
|
||||
msg("user", "Hello"),
|
||||
msg("assistant", "Hi there!"),
|
||||
];
|
||||
let config = HistoryPrunerConfig {
|
||||
enabled: false,
|
||||
..Default::default()
|
||||
};
|
||||
let stats = prune_history(&mut messages, &config);
|
||||
assert_eq!(messages.len(), 3);
|
||||
assert_eq!(messages[0].content, "You are helpful.");
|
||||
assert_eq!(stats.messages_before, 3);
|
||||
assert_eq!(stats.messages_after, 3);
|
||||
assert_eq!(stats.collapsed_pairs, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prune_under_budget_no_change() {
|
||||
let mut messages = vec![
|
||||
msg("system", "You are helpful."),
|
||||
msg("user", "Hello"),
|
||||
msg("assistant", "Hi!"),
|
||||
];
|
||||
let config = HistoryPrunerConfig {
|
||||
enabled: true,
|
||||
max_tokens: 8192,
|
||||
keep_recent: 2,
|
||||
collapse_tool_results: false,
|
||||
};
|
||||
let stats = prune_history(&mut messages, &config);
|
||||
assert_eq!(messages.len(), 3);
|
||||
assert_eq!(stats.collapsed_pairs, 0);
|
||||
assert_eq!(stats.dropped_messages, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prune_collapses_tool_pairs() {
|
||||
let tool_result = "a".repeat(160);
|
||||
let mut messages = vec![
|
||||
msg("system", "sys"),
|
||||
msg("assistant", "calling tool X"),
|
||||
msg("tool", &tool_result),
|
||||
msg("user", "thanks"),
|
||||
msg("assistant", "done"),
|
||||
];
|
||||
let config = HistoryPrunerConfig {
|
||||
enabled: true,
|
||||
max_tokens: 100_000,
|
||||
keep_recent: 2,
|
||||
collapse_tool_results: true,
|
||||
};
|
||||
let stats = prune_history(&mut messages, &config);
|
||||
assert_eq!(stats.collapsed_pairs, 1);
|
||||
assert_eq!(messages.len(), 4);
|
||||
assert_eq!(messages[1].role, "assistant");
|
||||
assert!(messages[1].content.starts_with("[Tool result: "));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prune_preserves_system_and_recent() {
|
||||
let big = "x".repeat(40_000);
|
||||
let mut messages = vec![
|
||||
msg("system", "system prompt"),
|
||||
msg("user", &big),
|
||||
msg("assistant", "old reply"),
|
||||
msg("user", "recent1"),
|
||||
msg("assistant", "recent2"),
|
||||
];
|
||||
let config = HistoryPrunerConfig {
|
||||
enabled: true,
|
||||
max_tokens: 100,
|
||||
keep_recent: 2,
|
||||
collapse_tool_results: false,
|
||||
};
|
||||
let stats = prune_history(&mut messages, &config);
|
||||
assert!(messages.iter().any(|m| m.role == "system"));
|
||||
assert!(messages.iter().any(|m| m.content == "recent1"));
|
||||
assert!(messages.iter().any(|m| m.content == "recent2"));
|
||||
assert!(stats.dropped_messages > 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prune_drops_oldest_when_over_budget() {
|
||||
let filler = "y".repeat(400);
|
||||
let mut messages = vec![
|
||||
msg("system", "sys"),
|
||||
msg("user", &filler),
|
||||
msg("assistant", &filler),
|
||||
msg("user", "recent-user"),
|
||||
msg("assistant", "recent-assistant"),
|
||||
];
|
||||
let config = HistoryPrunerConfig {
|
||||
enabled: true,
|
||||
max_tokens: 150,
|
||||
keep_recent: 2,
|
||||
collapse_tool_results: false,
|
||||
};
|
||||
let stats = prune_history(&mut messages, &config);
|
||||
assert!(stats.dropped_messages >= 1);
|
||||
assert_eq!(messages[0].role, "system");
|
||||
assert!(messages.iter().any(|m| m.content == "recent-user"));
|
||||
assert!(messages.iter().any(|m| m.content == "recent-assistant"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prune_empty_messages() {
|
||||
let mut messages: Vec<ChatMessage> = vec![];
|
||||
let config = HistoryPrunerConfig {
|
||||
enabled: true,
|
||||
..Default::default()
|
||||
};
|
||||
let stats = prune_history(&mut messages, &config);
|
||||
assert_eq!(stats.messages_before, 0);
|
||||
assert_eq!(stats.messages_after, 0);
|
||||
}
|
||||
}
|
||||