Compare commits
33 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a4cb0b460f | |||
| 48733d5ee2 | |||
| 2d118af78f | |||
| 8d7e7e994e | |||
| d38d706c8e | |||
| 523188da08 | |||
| 82f7fbbe0f | |||
| c1b2fceca5 | |||
| be6e9fca5d | |||
| 75c11dfb92 | |||
| 48270fbbf3 | |||
| 18a456b24e | |||
| 71e89801b5 | |||
| 46f6e79557 | |||
| c301b1d4d0 | |||
| 981a93d942 | |||
| 34f0b38e42 | |||
| 00209dd899 | |||
| 9e8a478254 | |||
| 96f25ac701 | |||
| fb5c8cb620 | |||
| 9f5543e046 | |||
| 05c9b8180b | |||
| f96a0471b5 | |||
| 072f5f1170 | |||
| 28f94ae48c | |||
| 8a217a77f9 | |||
| 79a7f08b04 | |||
| de12055364 | |||
| 65c34966bb | |||
| 4a2be7c2e5 | |||
| 2bd141aa07 | |||
| cc470601de |
@@ -0,0 +1,97 @@
|
||||
# Mem0 Integration: Dual-Scope Recall + Per-Turn Memory
|
||||
|
||||
## Context
|
||||
|
||||
Mem0 auto-save works but the integration is missing key features from mem0 best practices: per-turn recall, multi-level scoping, and proper context injection. This causes the bot to "forget" on follow-up turns and not differentiate users.
|
||||
|
||||
## What's Missing (vs mem0 docs)
|
||||
|
||||
1. **Per-turn recall** — only first turn gets memory context, follow-ups get nothing
|
||||
2. **Dual-scope** — no sender vs group distinction. All memories use single hardcoded `user_id`
|
||||
3. **System prompt injection** — memory prepended to user message (pollutes session history)
|
||||
4. **`agent_id` scoping** — mem0 supports agent-level patterns, not used
|
||||
|
||||
## Changes
|
||||
|
||||
### 1. `src/memory/mem0.rs` — Use session_id for multi-level scoping
|
||||
|
||||
Map zeroclaw's `session_id` param to mem0's `user_id`. This enables per-user and per-group memory namespaces without changing the `Memory` trait.
|
||||
|
||||
```rust
|
||||
// Add helper:
|
||||
fn effective_user_id(&self, session_id: Option<&str>) -> &str {
|
||||
session_id.filter(|s| !s.is_empty()).unwrap_or(&self.user_id)
|
||||
}
|
||||
|
||||
// In store(): use effective_user_id(session_id) as mem0 user_id
|
||||
// In recall(): use effective_user_id(session_id) as mem0 user_id
|
||||
// In list(): use effective_user_id(session_id) as mem0 user_id
|
||||
```
|
||||
|
||||
### 2. `src/channels/mod.rs` ~line 2229 — Per-turn dual-scope recall
|
||||
|
||||
Remove `if !had_prior_history` gate. Always recall from both sender scope and group scope (for group chats).
|
||||
|
||||
```rust
|
||||
// Detect group chat
|
||||
let is_group = msg.reply_target.contains("@g.us")
|
||||
|| msg.reply_target.starts_with("group:");
|
||||
|
||||
// Sender-scope recall (always)
|
||||
let sender_context = build_memory_context(
|
||||
ctx.memory.as_ref(), &msg.content, ctx.min_relevance_score,
|
||||
Some(&msg.sender),
|
||||
).await;
|
||||
|
||||
// Group-scope recall (groups only)
|
||||
let group_context = if is_group {
|
||||
build_memory_context(
|
||||
ctx.memory.as_ref(), &msg.content, ctx.min_relevance_score,
|
||||
Some(&history_key),
|
||||
).await
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
// Merge (deduplicate by checking substring overlap)
|
||||
let memory_context = merge_memory_contexts(&sender_context, &group_context);
|
||||
```
|
||||
|
||||
### 3. `src/channels/mod.rs` ~line 2244 — Inject into system prompt
|
||||
|
||||
Move memory context from user message to system prompt. Re-fetched each turn, doesn't pollute session.
|
||||
|
||||
```rust
|
||||
let mut system_prompt = build_channel_system_prompt(...);
|
||||
if !memory_context.is_empty() {
|
||||
system_prompt.push_str(&format!("\n\n{memory_context}"));
|
||||
}
|
||||
let mut history = vec![ChatMessage::system(system_prompt)];
|
||||
```
|
||||
|
||||
### 4. `src/channels/mod.rs` — Dual-scope auto-save
|
||||
|
||||
Find existing auto-save call. For group messages, store twice:
|
||||
- `store(key, content, category, Some(&msg.sender))` — personal facts
|
||||
- `store(key, content, category, Some(&history_key))` — group context
|
||||
|
||||
Both async, non-blocking. DMs only store to sender scope.
|
||||
|
||||
### 5. `src/memory/mem0.rs` — Add `agent_id` support (optional)
|
||||
|
||||
Pass `self.app_name` as `agent_id` param to mem0 API for agent behavior tracking.
|
||||
|
||||
## Files to Modify
|
||||
|
||||
1. `src/memory/mem0.rs` — session_id → user_id mapping
|
||||
2. `src/channels/mod.rs` — per-turn recall, dual-scope, system prompt injection, dual-scope save
|
||||
|
||||
## Verification
|
||||
|
||||
1. `cargo check --features whatsapp-web,memory-mem0`
|
||||
2. `cargo test --features whatsapp-web,memory-mem0`
|
||||
3. Deploy to Synology
|
||||
4. Test DM: "我鍾意食壽司" → next turn "我鍾意食咩" → should recall
|
||||
5. Test group: Joe says "我鍾意食壽司" → someone else asks "Joe 鍾意食咩" → should recall from group scope
|
||||
6. Check mem0 server logs: GET with `user_id=sender` AND `user_id=group_key`
|
||||
7. Check mem0 server logs: POST with both user_ids for group messages
|
||||
@@ -1,3 +1,44 @@
|
||||
# EditorConfig is awesome: https://EditorConfig.org
|
||||
|
||||
# top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
# All files
|
||||
[*]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
# Rust files - match rustfmt.toml
|
||||
[*.rs]
|
||||
indent_size = 4
|
||||
max_line_length = 100
|
||||
|
||||
# Markdown files
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
max_line_length = 80
|
||||
|
||||
# TOML files
|
||||
[*.toml]
|
||||
indent_size = 2
|
||||
|
||||
# YAML files
|
||||
[*.{yml,yaml}]
|
||||
indent_size = 2
|
||||
|
||||
# Python files
|
||||
[*.py]
|
||||
indent_size = 4
|
||||
max_line_length = 100
|
||||
|
||||
# Shell scripts
|
||||
[*.{sh,bash}]
|
||||
indent_size = 2
|
||||
|
||||
# JSON files
|
||||
[*.json]
|
||||
indent_size = 2
|
||||
|
||||
Generated
+21
-2
@@ -2,6 +2,14 @@
|
||||
# It is not intended for manual editing.
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "aardvark-sys"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"libloading",
|
||||
"thiserror 2.0.18",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "accessory"
|
||||
version = "2.1.0"
|
||||
@@ -3510,6 +3518,16 @@ version = "0.2.183"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d"
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
version = "0.8.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libm"
|
||||
version = "0.2.16"
|
||||
@@ -6032,9 +6050,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rustls-webpki"
|
||||
version = "0.103.9"
|
||||
version = "0.103.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53"
|
||||
checksum = "df33b2b81ac578cabaf06b89b0631153a3f416b0a886e8a7a1707fb51abbd1ef"
|
||||
dependencies = [
|
||||
"aws-lc-rs",
|
||||
"ring",
|
||||
@@ -9187,6 +9205,7 @@ dependencies = [
|
||||
name = "zeroclawlabs"
|
||||
version = "0.5.4"
|
||||
dependencies = [
|
||||
"aardvark-sys",
|
||||
"anyhow",
|
||||
"async-imap",
|
||||
"async-trait",
|
||||
|
||||
+6
-1
@@ -1,5 +1,5 @@
|
||||
[workspace]
|
||||
members = [".", "crates/robot-kit"]
|
||||
members = [".", "crates/robot-kit", "crates/aardvark-sys"]
|
||||
resolver = "2"
|
||||
|
||||
[package]
|
||||
@@ -96,6 +96,9 @@ zip = { version = "8.1", default-features = false, features = ["deflate"] }
|
||||
anyhow = "1.0"
|
||||
thiserror = "2.0"
|
||||
|
||||
# Aardvark I2C/SPI/GPIO USB adapter (Total Phase) — stub when SDK absent
|
||||
aardvark-sys = { path = "crates/aardvark-sys" }
|
||||
|
||||
# UUID generation
|
||||
uuid = { version = "1.22", default-features = false, features = ["v4", "std"] }
|
||||
|
||||
@@ -225,6 +228,8 @@ channel-matrix = ["dep:matrix-sdk"]
|
||||
channel-lark = ["dep:prost"]
|
||||
channel-feishu = ["channel-lark"] # Alias for Feishu users (Lark and Feishu are the same platform)
|
||||
memory-postgres = ["dep:postgres"]
|
||||
# memory-mem0 = Mem0 (OpenMemory) memory backend via REST API
|
||||
memory-mem0 = []
|
||||
observability-prometheus = ["dep:prometheus"]
|
||||
observability-otel = ["dep:opentelemetry", "dep:opentelemetry_sdk", "dep:opentelemetry-otlp"]
|
||||
peripheral-rpi = ["rppal"]
|
||||
|
||||
+4
-3
@@ -23,9 +23,10 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
|
||||
# 1. Copy manifests to cache dependencies
|
||||
COPY Cargo.toml Cargo.lock ./
|
||||
# Remove robot-kit from workspace members — it is excluded by .dockerignore
|
||||
# and is not needed for the Docker build (hardware-only crate).
|
||||
RUN sed -i 's/members = \[".", "crates\/robot-kit"\]/members = ["."]/' Cargo.toml
|
||||
# Include every workspace member: Cargo.lock is generated for the full workspace.
|
||||
# Previously we used sed to drop `crates/robot-kit`, which made the manifest disagree
|
||||
# with the lockfile and caused `cargo --locked` to fail (Cargo refused to rewrite the lock).
|
||||
COPY crates/robot-kit/ crates/robot-kit/
|
||||
# Create dummy targets declared in Cargo.toml so manifest parsing succeeds.
|
||||
RUN mkdir -p src benches \
|
||||
&& echo "fn main() {}" > src/main.rs \
|
||||
|
||||
+4
-3
@@ -38,9 +38,10 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
|
||||
# 1. Copy manifests to cache dependencies
|
||||
COPY Cargo.toml Cargo.lock ./
|
||||
# Remove robot-kit from workspace members — it is excluded by .dockerignore
|
||||
# and is not needed for the Docker build (hardware-only crate).
|
||||
RUN sed -i 's/members = \[".", "crates\/robot-kit"\]/members = ["."]/' Cargo.toml
|
||||
# Include every workspace member: Cargo.lock is generated for the full workspace.
|
||||
# Previously we used sed to drop `crates/robot-kit`, which made the manifest disagree
|
||||
# with the lockfile and caused `cargo --locked` to fail (Cargo refused to rewrite the lock).
|
||||
COPY crates/robot-kit/ crates/robot-kit/
|
||||
# Create dummy targets declared in Cargo.toml so manifest parsing succeeds.
|
||||
RUN mkdir -p src benches \
|
||||
&& echo "fn main() {}" > src/main.rs \
|
||||
|
||||
@@ -41,3 +41,18 @@ This project uses third-party libraries and components,
|
||||
each licensed under their respective terms.
|
||||
|
||||
See Cargo.lock for a complete dependency list.
|
||||
|
||||
Verifiable Intent Specification
|
||||
================================
|
||||
|
||||
The src/verifiable_intent/ module is a Rust-native reimplementation based on
|
||||
the Verifiable Intent open specification and reference implementation:
|
||||
|
||||
Project: Verifiable Intent (VI)
|
||||
Author: agent-intent
|
||||
Source: https://github.com/agent-intent/verifiable-intent
|
||||
License: Apache License, Version 2.0
|
||||
|
||||
This implementation follows the VI specification design (SD-JWT layered
|
||||
credentials, constraint model, three-layer chain). No source code was copied
|
||||
from the reference implementation.
|
||||
|
||||
@@ -324,47 +324,6 @@ ls -lh target/release/zeroclaw
|
||||
- CI/CD: تجريبي (تلقائي عند الدفع) → مستقر (إرسال يدوي) → Docker، crates.io، Scoop، AUR، Homebrew، تغريدة.
|
||||
- ملفات ثنائية مُعدة مسبقًا لـ Linux (x86_64، aarch64، armv7)، macOS (x86_64، aarch64)، Windows (x86_64).
|
||||
|
||||
## كيف يعمل (باختصار)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## التكوين
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ React 19 + Vite 6 + Tailwind CSS 4 ওয়েব ড্যাশবোর্
|
||||
- CI/CD: বেটা (পুশে অটো) → স্টেবল (ম্যানুয়াল ডিসপ্যাচ) → Docker, crates.io, Scoop, AUR, Homebrew, টুইট।
|
||||
- Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64) এর জন্য প্রি-বিল্ট বাইনারি।
|
||||
|
||||
## এটি কিভাবে কাজ করে (সংক্ষিপ্ত)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## কনফিগারেশন
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ Webový panel React 19 + Vite 6 + Tailwind CSS 4 servírovaný přímo z Gateway
|
||||
- CI/CD: beta (auto na push) → stable (ruční dispatch) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Předpřipravené binárky pro Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Jak to funguje (krátce)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Konfigurace
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ React 19 + Vite 6 + Tailwind CSS 4 web-dashboard serveret direkte fra Gateway'en
|
||||
- CI/CD: beta (auto on push) → stable (manual dispatch) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Forhaandsbyggede binaerer til Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Saadan virker det (kort)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Konfiguration
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ React 19 + Vite 6 + Tailwind CSS 4 Web-Dashboard, direkt vom Gateway bereitgeste
|
||||
- CI/CD: beta (automatisch bei Push) → stable (manueller Dispatch) → Docker, crates.io, Scoop, AUR, Homebrew, Tweet.
|
||||
- Vorgefertigte Binaries für Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Wie es funktioniert (kurz)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (Steuerungsebene) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web-Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Ratenbegrenzung │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Konfiguration
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ ls -lh target/release/zeroclaw
|
||||
- CI/CD: beta (auto on push) → stable (manual dispatch) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Προκατασκευασμένα δυαδικά για Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Πώς λειτουργεί (σύντομα)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Ρύθμιση παραμέτρων
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ Panel web React 19 + Vite 6 + Tailwind CSS 4 servido directamente desde el Gatew
|
||||
- CI/CD: beta (automático al hacer push) → stable (dispatch manual) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Binarios preconstruidos para Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Cómo funciona (resumen)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (plano de control) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Panel Web (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Emparejamiento + Limitación │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Configuración
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ React 19 + Vite 6 + Tailwind CSS 4 web-hallintapaneeli, jota tarjoillaan suoraan
|
||||
- CI/CD: beta (auto on push) → stable (manual dispatch) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Valmiit binaarit Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Miten se toimii (lyhyesti)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Maaritykset
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ Tableau de bord web React 19 + Vite 6 + Tailwind CSS 4 servi directement depuis
|
||||
- CI/CD : beta (automatique au push) → stable (dispatch manuel) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Binaires précompilés pour Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Comment ça fonctionne (résumé)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (plan de contrôle) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Tableau de bord (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Appairage + Limitation │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ ls -lh target/release/zeroclaw
|
||||
- CI/CD: בטא (אוטומטי בדחיפה) → יציב (שליחה ידנית) → Docker, crates.io, Scoop, AUR, Homebrew, ציוץ.
|
||||
- בינאריים מוכנים מראש ל-Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## איך זה עובד (בקצרה)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## הגדרות
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ React 19 + Vite 6 + Tailwind CSS 4 वेब डैशबोर्ड सीध
|
||||
- CI/CD: बीटा (पुश पर ऑटो) → स्टेबल (मैनुअल डिस्पैच) → Docker, crates.io, Scoop, AUR, Homebrew, ट्वीट।
|
||||
- Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64) के लिए प्री-बिल्ट बाइनरी।
|
||||
|
||||
## यह कैसे काम करता है (संक्षिप्त)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## कॉन्फ़िगरेशन
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ React 19 + Vite 6 + Tailwind CSS 4 webes vezerlopult, amelyet kozvetlenul a Gate
|
||||
- CI/CD: beta (auto on push) → stable (manual dispatch) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Elore elkeszitett binarisok Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64) rendszerekhez.
|
||||
|
||||
## Hogyan mukodik (roviden)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Konfiguracio
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ Dasbor web React 19 + Vite 6 + Tailwind CSS 4 yang disajikan langsung dari Gatew
|
||||
- CI/CD: beta (otomatis saat push) → stable (dispatch manual) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Biner pre-built untuk Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Cara kerjanya (singkat)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Konfigurasi
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ Dashboard web React 19 + Vite 6 + Tailwind CSS 4 servita direttamente dal Gatewa
|
||||
- CI/CD: beta (automatico al push) → stable (dispatch manuale) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Binari precompilati per Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Come funziona (sintesi)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (piano di controllo) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Dashboard Web (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Accoppiamento + Limitazione │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Configurazione
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ React 19 + Vite 6 + Tailwind CSS 4 ウェブダッシュボード、Gatewayか
|
||||
- CI/CD:beta(プッシュ時自動)→ stable(手動ディスパッチ)→ Docker、crates.io、Scoop、AUR、Homebrew、tweet。
|
||||
- プリビルドバイナリ:Linux(x86_64、aarch64、armv7)、macOS(x86_64、aarch64)、Windows(x86_64)。
|
||||
|
||||
## 仕組み(概要)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## 設定
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ Gateway에서 직접 제공하는 React 19 + Vite 6 + Tailwind CSS 4 웹 대시
|
||||
- CI/CD: beta (push 시 자동) → stable (수동 디스패치) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64)용 사전 빌드 바이너리.
|
||||
|
||||
## 작동 방식 (요약)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## 구성
|
||||
|
||||
|
||||
@@ -300,7 +300,7 @@ React 19 + Vite 6 + Tailwind CSS 4 web dashboard served directly from the Gatewa
|
||||
|
||||
- **Core:** shell, file read/write/edit, git operations, glob search, content search
|
||||
- **Web:** browser control, web fetch, web search, screenshot, image info, PDF read
|
||||
- **Integrations:** Jira, Notion, Google Workspace, Microsoft 365, LinkedIn, Composio, Pushover
|
||||
- **Integrations:** Jira, Notion, Google Workspace, Microsoft 365, LinkedIn, Composio, Pushover, Weather (wttr.in)
|
||||
- **MCP:** Model Context Protocol tool wrapper + deferred tool sets
|
||||
- **Scheduling:** cron add/remove/update/run, schedule tool
|
||||
- **Memory:** recall, store, forget, knowledge, project intel
|
||||
@@ -324,47 +324,6 @@ React 19 + Vite 6 + Tailwind CSS 4 web dashboard served directly from the Gatewa
|
||||
- CI/CD: beta (auto on push) → stable (manual dispatch) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Pre-built binaries for Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## How it works (short)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ React 19 + Vite 6 + Tailwind CSS 4 nettbasert dashbord servert direkte fra Gatew
|
||||
- CI/CD: beta (auto pa push) -> stabil (manuell utsendelse) -> Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Forhandsbygde binarfiler for Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Slik fungerer det (kort)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (kontrollplan) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Nettbasert dashbord (React 19)│
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Paring + Hastighetsbegrensning│
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Sloyfe │ │Planleg.│ │ Sverm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Leveran.│ │Verktoy │ │ Minne │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Sikker- │ │Periferiutst│
|
||||
│ het │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Konfigurasjon
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ React 19 + Vite 6 + Tailwind CSS 4 webdashboard geserveerd direct vanuit de Gate
|
||||
- CI/CD: beta (auto bij push) → stable (handmatige dispatch) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Voorgebouwde binaries voor Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Hoe het werkt (kort)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Configuratie
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ Panel webowy React 19 + Vite 6 + Tailwind CSS 4 serwowany bezpośrednio z Gatewa
|
||||
- CI/CD: beta (auto na push) → stable (ręczny dispatch) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Gotowe pliki binarne dla Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Jak to działa (w skrócie)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Konfiguracja
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ Painel web React 19 + Vite 6 + Tailwind CSS 4 servido diretamente pelo Gateway:
|
||||
- CI/CD: beta (automático no push) → stable (dispatch manual) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Binários pré-construídos para Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Como funciona (resumo)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (plano de controle) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Painel Web (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pareamento + Limitação │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Configuração
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ Panou web React 19 + Vite 6 + Tailwind CSS 4 servit direct din Gateway:
|
||||
- CI/CD: beta (automat la push) → stable (dispatch manual) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Binare pre-construite pentru Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Cum funcționează (pe scurt)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Configurare
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ ls -lh target/release/zeroclaw
|
||||
- CI/CD: бета (авто при push) → стабильный (ручной запуск) → Docker, crates.io, Scoop, AUR, Homebrew, твит.
|
||||
- Предсобранные бинарные файлы для Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Как это работает (кратко)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Конфигурация
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ React 19 + Vite 6 + Tailwind CSS 4 webbpanel serverad direkt från Gateway:
|
||||
- CI/CD: beta (automatiskt vid push) → stable (manuell dispatch) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Förbyggda binärer för Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Hur det fungerar (kort)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (kontrollplan) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Webbpanel (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Parkoppling + Hastighetsbegränsning │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Konfiguration
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ Feature-gated: Matrix (`channel-matrix`), Lark (`channel-lark`), Nostr (`channel
|
||||
- CI/CD: beta (อัตโนมัติเมื่อ push) → stable (dispatch แบบ manual) → Docker, crates.io, Scoop, AUR, Homebrew, tweet
|
||||
- ไบนารี pre-built สำหรับ Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64)
|
||||
|
||||
## วิธีการทำงาน (สั้น)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## การกำหนดค่า
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ React 19 + Vite 6 + Tailwind CSS 4 web dashboard na direktang inihahatid mula sa
|
||||
- CI/CD: beta (auto sa push) → stable (manual dispatch) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Pre-built binaries para sa Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Paano gumagana (maikli)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ Gateway'den doğrudan sunulan React 19 + Vite 6 + Tailwind CSS 4 web paneli:
|
||||
- CI/CD: beta (push'ta otomatik) → stable (manuel dispatch) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64) için önceden derlenmiş ikili dosyalar.
|
||||
|
||||
## Nasıl çalışır (kısaca)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Yapılandırma
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ ls -lh target/release/zeroclaw
|
||||
- CI/CD: beta (автоматично при push) → stable (ручний запуск) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Попередньо зібрані бінарні файли для Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Як це працює (коротко)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Конфігурація
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ Gateway سے براہ راست فراہم کردہ React 19 + Vite 6 + Tailwind
|
||||
- CI/CD: beta (push پر خودکار) → stable (دستی dispatch) → Docker، crates.io، Scoop، AUR، Homebrew، tweet۔
|
||||
- Linux (x86_64، aarch64، armv7)، macOS (x86_64، aarch64)، Windows (x86_64) کے لیے پری بلٹ بائنریز۔
|
||||
|
||||
## یہ کیسے کام کرتا ہے (مختصر)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## کنفیگریشن
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ Bảng điều khiển web React 19 + Vite 6 + Tailwind CSS 4 được phục v
|
||||
- CI/CD: beta (tự động khi push) → stable (dispatch thủ công) → Docker, crates.io, Scoop, AUR, Homebrew, tweet.
|
||||
- Binary dựng sẵn cho Linux (x86_64, aarch64, armv7), macOS (x86_64, aarch64), Windows (x86_64).
|
||||
|
||||
## Cách hoạt động (tóm tắt)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## Cấu hình
|
||||
|
||||
|
||||
@@ -324,47 +324,6 @@ React 19 + Vite 6 + Tailwind CSS 4 网页仪表板直接从 Gateway 提供:
|
||||
- CI/CD:beta(推送时自动)→ stable(手动触发)→ Docker、crates.io、Scoop、AUR、Homebrew、tweet。
|
||||
- 预构建二进制文件支持 Linux(x86_64、aarch64、armv7)、macOS(x86_64、aarch64)、Windows(x86_64)。
|
||||
|
||||
## 工作原理(简述)
|
||||
|
||||
```
|
||||
WhatsApp / Telegram / Slack / Discord / Signal / iMessage / Matrix / IRC / Email
|
||||
Bluesky / Nostr / Mattermost / DingTalk / Lark / QQ / Reddit / MQTT / WebSocket
|
||||
│
|
||||
▼
|
||||
┌───────────────────────────────┐
|
||||
│ Gateway │
|
||||
│ (control plane) │
|
||||
│ http://127.0.0.1:42617 │
|
||||
├───────────────────────────────┤
|
||||
│ Web Dashboard (React 19) │
|
||||
│ REST API + WebSocket + SSE │
|
||||
│ Pairing + Rate Limiting │
|
||||
└──────────────┬────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│ Agent │ │ Cron │ │ Hands │
|
||||
│ Loop │ │Scheduler│ │ Swarm │
|
||||
└───┬────┘ └───┬────┘ └───┬────┘
|
||||
│ │ │
|
||||
└──────────┼──────────┘
|
||||
│
|
||||
┌──────────┼──────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌────────┐ ┌────────┐ ┌────────┐
|
||||
│Provider│ │ Tools │ │ Memory │
|
||||
│ (LLM) │ │ (70+) │ │(md/sql)│
|
||||
└────────┘ └────────┘ └────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌────────┐ ┌────────────┐
|
||||
│Security│ │ Peripherals│
|
||||
│ Policy │ │(ESP32/STM32)│
|
||||
└────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## 配置
|
||||
|
||||
|
||||
@@ -0,0 +1,25 @@
|
||||
[package]
|
||||
name = "aardvark-sys"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
authors = ["theonlyhennygod"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "Low-level bindings for the Total Phase Aardvark I2C/SPI/GPIO USB adapter"
|
||||
repository = "https://github.com/zeroclaw-labs/zeroclaw"
|
||||
|
||||
# NOTE: This crate is the ONLY place in ZeroClaw where unsafe code is permitted.
|
||||
# The rest of the workspace remains #![forbid(unsafe_code)].
|
||||
#
|
||||
# Stub implementation: the Total Phase SDK (aardvark.h + aardvark.so) is NOT
|
||||
# yet committed. All AardvarkHandle methods return Err(AardvarkError::NotFound)
|
||||
# at runtime. No unsafe code is needed for the stub.
|
||||
#
|
||||
# To enable real hardware (once SDK files are in vendor/):
|
||||
# 1. Add `bindgen = "0.69"` to [build-dependencies]
|
||||
# 2. Add `libc = "0.2"` to [dependencies]
|
||||
# 3. Uncomment the build.rs bindgen call
|
||||
# 4. Replace stub method bodies with FFI calls via mod bindings
|
||||
|
||||
[dependencies]
|
||||
libloading = "0.8"
|
||||
thiserror = "2.0"
|
||||
@@ -0,0 +1,27 @@
|
||||
//! Build script for aardvark-sys.
|
||||
//!
|
||||
//! # SDK present (real hardware)
|
||||
//! When the Total Phase SDK files are in `vendor/`:
|
||||
//! - Sets linker search path for aardvark.so
|
||||
//! - Generates src/bindings.rs via bindgen
|
||||
//!
|
||||
//! # SDK absent (stub)
|
||||
//! Does nothing. All AardvarkHandle methods return errors at runtime.
|
||||
|
||||
fn main() {
|
||||
// Stub: SDK not yet in vendor/
|
||||
// Uncomment and fill in when aardvark.h + aardvark.so are available:
|
||||
//
|
||||
// println!("cargo:rustc-link-search=native=crates/aardvark-sys/vendor");
|
||||
// println!("cargo:rustc-link-lib=dylib=aardvark");
|
||||
// println!("cargo:rerun-if-changed=vendor/aardvark.h");
|
||||
//
|
||||
// let bindings = bindgen::Builder::default()
|
||||
// .header("vendor/aardvark.h")
|
||||
// .parse_callbacks(Box::new(bindgen::CargoCallbacks::new()))
|
||||
// .generate()
|
||||
// .expect("Unable to generate aardvark bindings");
|
||||
// bindings
|
||||
// .write_to_file("src/bindings.rs")
|
||||
// .expect("Could not write bindings");
|
||||
}
|
||||
@@ -0,0 +1,475 @@
|
||||
//! Bindings for the Total Phase Aardvark I2C/SPI/GPIO USB adapter.
|
||||
//!
|
||||
//! Uses [`libloading`] to load `aardvark.so` at runtime — the same pattern
|
||||
//! the official Total Phase C stub (`aardvark.c`) uses internally.
|
||||
//!
|
||||
//! # Library search order
|
||||
//!
|
||||
//! 1. `ZEROCLAW_AARDVARK_LIB` environment variable (full path to `aardvark.so`)
|
||||
//! 2. `<workspace>/crates/aardvark-sys/vendor/aardvark.so` (development default)
|
||||
//! 3. `./aardvark.so` (next to the binary, for deployment)
|
||||
//!
|
||||
//! If none resolve, every method returns
|
||||
//! [`Err(AardvarkError::LibraryNotFound)`](AardvarkError::LibraryNotFound).
|
||||
//!
|
||||
//! # Safety
|
||||
//!
|
||||
//! This crate is the **only** place in ZeroClaw where `unsafe` is permitted.
|
||||
//! All `unsafe` is confined to `extern "C"` call sites inside this file.
|
||||
//! The public API is fully safe Rust.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use libloading::{Library, Symbol};
|
||||
use thiserror::Error;
|
||||
|
||||
// ── Constants from aardvark.h ─────────────────────────────────────────────
|
||||
|
||||
/// Bit set on a port returned by `aa_find_devices` when that port is in use.
|
||||
const AA_PORT_NOT_FREE: u16 = 0x8000;
|
||||
/// Configure adapter for I2C + GPIO (I2C master mode, SPI disabled).
|
||||
const AA_CONFIG_GPIO_I2C: i32 = 0x02;
|
||||
/// Configure adapter for SPI + GPIO (SPI master mode, I2C disabled).
|
||||
const AA_CONFIG_SPI_GPIO: i32 = 0x01;
|
||||
/// No I2C flags (standard 7-bit addressing, normal stop condition).
|
||||
const AA_I2C_NO_FLAGS: i32 = 0x00;
|
||||
/// Enable both onboard I2C pullup resistors (hardware v2+ only).
|
||||
const AA_I2C_PULLUP_BOTH: u8 = 0x03;
|
||||
|
||||
// ── Library loading ───────────────────────────────────────────────────────
|
||||
|
||||
static AARDVARK_LIB: OnceLock<Option<Library>> = OnceLock::new();
|
||||
|
||||
fn lib() -> Option<&'static Library> {
|
||||
AARDVARK_LIB
|
||||
.get_or_init(|| {
|
||||
let candidates: Vec<PathBuf> = vec![
|
||||
// 1. Explicit env-var override (full path)
|
||||
std::env::var("ZEROCLAW_AARDVARK_LIB")
|
||||
.ok()
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_default(),
|
||||
// 2. Vendor directory shipped with this crate (dev default)
|
||||
{
|
||||
let mut p = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
p.push("vendor/aardvark.so");
|
||||
p
|
||||
},
|
||||
// 3. Next to the running binary (deployment)
|
||||
std::env::current_exe()
|
||||
.ok()
|
||||
.and_then(|e| e.parent().map(|d| d.join("aardvark.so")))
|
||||
.unwrap_or_default(),
|
||||
// 4. Current working directory
|
||||
PathBuf::from("aardvark.so"),
|
||||
];
|
||||
let mut tried_any = false;
|
||||
for path in &candidates {
|
||||
if path.as_os_str().is_empty() {
|
||||
continue;
|
||||
}
|
||||
tried_any = true;
|
||||
match unsafe { Library::new(path) } {
|
||||
Ok(lib) => {
|
||||
// Verify the .so exports aa_c_version (Total Phase version gate).
|
||||
// The .so exports c_aa_* symbols (not aa_*); aa_c_version is the
|
||||
// one non-prefixed symbol used to confirm library identity.
|
||||
let version_ok = unsafe {
|
||||
lib.get::<unsafe extern "C" fn() -> u32>(b"aa_c_version\0").is_ok()
|
||||
};
|
||||
if !version_ok {
|
||||
eprintln!(
|
||||
"[aardvark-sys] {} loaded but aa_c_version not found — \
|
||||
not a valid Aardvark library, skipping",
|
||||
path.display()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
eprintln!("[aardvark-sys] loaded library from {}", path.display());
|
||||
return Some(lib);
|
||||
}
|
||||
Err(e) => {
|
||||
let msg = e.to_string();
|
||||
// Surface architecture mismatch explicitly — the most common
|
||||
// failure on Apple Silicon machines with an x86_64 SDK.
|
||||
if msg.contains("incompatible architecture") || msg.contains("mach-o file") {
|
||||
eprintln!(
|
||||
"[aardvark-sys] ARCHITECTURE MISMATCH loading {}: {}\n\
|
||||
[aardvark-sys] The vendored aardvark.so is x86_64 but this \
|
||||
binary is {}.\n\
|
||||
[aardvark-sys] Download the arm64 SDK from https://www.totalphase.com/downloads/ \
|
||||
or build with --target x86_64-apple-darwin.",
|
||||
path.display(),
|
||||
msg,
|
||||
std::env::consts::ARCH,
|
||||
);
|
||||
} else {
|
||||
eprintln!(
|
||||
"[aardvark-sys] could not load {}: {}",
|
||||
path.display(),
|
||||
msg
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if !tried_any {
|
||||
eprintln!("[aardvark-sys] no library candidates found; set ZEROCLAW_AARDVARK_LIB or place aardvark.so next to the binary");
|
||||
}
|
||||
None
|
||||
})
|
||||
.as_ref()
|
||||
}
|
||||
|
||||
/// Errors returned by Aardvark hardware operations.
|
||||
#[derive(Debug, Error)]
|
||||
pub enum AardvarkError {
|
||||
/// No Aardvark adapter found — adapter not plugged in.
|
||||
#[error("Aardvark adapter not found — is it plugged in?")]
|
||||
NotFound,
|
||||
/// `aa_open` returned a non-positive handle.
|
||||
#[error("Aardvark open failed (code {0})")]
|
||||
OpenFailed(i32),
|
||||
/// `aa_i2c_write` returned a negative status code.
|
||||
#[error("I2C write failed (code {0})")]
|
||||
I2cWriteFailed(i32),
|
||||
/// `aa_i2c_read` returned a negative status code.
|
||||
#[error("I2C read failed (code {0})")]
|
||||
I2cReadFailed(i32),
|
||||
/// `aa_spi_write` returned a negative status code.
|
||||
#[error("SPI transfer failed (code {0})")]
|
||||
SpiTransferFailed(i32),
|
||||
/// GPIO operation returned a negative status code.
|
||||
#[error("GPIO error (code {0})")]
|
||||
GpioError(i32),
|
||||
/// `aardvark.so` could not be found or loaded.
|
||||
#[error("aardvark.so not found — set ZEROCLAW_AARDVARK_LIB or place it next to the binary")]
|
||||
LibraryNotFound,
|
||||
}
|
||||
|
||||
/// Convenience `Result` alias for this crate.
|
||||
pub type Result<T> = std::result::Result<T, AardvarkError>;
|
||||
|
||||
// ── Handle ────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Safe RAII handle over the Aardvark C library handle.
|
||||
///
|
||||
/// Automatically closes the adapter on `Drop`.
|
||||
///
|
||||
/// **Usage pattern:** open a fresh handle per command and let it drop at the
|
||||
/// end of each operation (lazy-open / eager-close).
|
||||
pub struct AardvarkHandle {
|
||||
handle: i32,
|
||||
}
|
||||
|
||||
impl AardvarkHandle {
|
||||
// ── Lifecycle ─────────────────────────────────────────────────────────
|
||||
|
||||
/// Open the first available (free) Aardvark adapter.
|
||||
pub fn open() -> Result<Self> {
|
||||
let ports = Self::find_devices();
|
||||
let port = ports.first().copied().ok_or(AardvarkError::NotFound)?;
|
||||
Self::open_port(i32::from(port))
|
||||
}
|
||||
|
||||
/// Open a specific Aardvark adapter by port index.
|
||||
pub fn open_port(port: i32) -> Result<Self> {
|
||||
let lib = lib().ok_or(AardvarkError::LibraryNotFound)?;
|
||||
let handle: i32 = unsafe {
|
||||
let f: Symbol<unsafe extern "C" fn(i32) -> i32> = lib
|
||||
.get(b"c_aa_open\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
f(port)
|
||||
};
|
||||
if handle <= 0 {
|
||||
Err(AardvarkError::OpenFailed(handle))
|
||||
} else {
|
||||
Ok(Self { handle })
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the port numbers of all **free** connected adapters.
|
||||
///
|
||||
/// Ports in-use by another process are filtered out.
|
||||
/// Returns an empty `Vec` when `aardvark.so` cannot be loaded.
|
||||
pub fn find_devices() -> Vec<u16> {
|
||||
let Some(lib) = lib() else {
|
||||
eprintln!("[aardvark-sys] find_devices: library not loaded");
|
||||
return Vec::new();
|
||||
};
|
||||
let mut ports = [0u16; 16];
|
||||
let n: i32 = unsafe {
|
||||
let f: std::result::Result<Symbol<unsafe extern "C" fn(i32, *mut u16) -> i32>, _> =
|
||||
lib.get(b"c_aa_find_devices\0");
|
||||
match f {
|
||||
Ok(f) => f(16, ports.as_mut_ptr()),
|
||||
Err(e) => {
|
||||
eprintln!("[aardvark-sys] find_devices: symbol lookup failed: {e}");
|
||||
return Vec::new();
|
||||
}
|
||||
}
|
||||
};
|
||||
eprintln!(
|
||||
"[aardvark-sys] find_devices: c_aa_find_devices returned {n}, ports={:?}",
|
||||
&ports[..n.max(0) as usize]
|
||||
);
|
||||
if n <= 0 {
|
||||
return Vec::new();
|
||||
}
|
||||
let free: Vec<u16> = ports[..n as usize]
|
||||
.iter()
|
||||
.filter(|&&p| (p & AA_PORT_NOT_FREE) == 0)
|
||||
.copied()
|
||||
.collect();
|
||||
eprintln!("[aardvark-sys] find_devices: free ports={free:?}");
|
||||
free
|
||||
}
|
||||
|
||||
// ── I2C ───────────────────────────────────────────────────────────────
|
||||
|
||||
/// Enable I2C mode and set the bitrate (kHz).
|
||||
pub fn i2c_enable(&self, bitrate_khz: u32) -> Result<()> {
|
||||
let lib = lib().ok_or(AardvarkError::LibraryNotFound)?;
|
||||
unsafe {
|
||||
let configure: Symbol<unsafe extern "C" fn(i32, i32) -> i32> = lib
|
||||
.get(b"c_aa_configure\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
configure(self.handle, AA_CONFIG_GPIO_I2C);
|
||||
let pullup: Symbol<unsafe extern "C" fn(i32, u8) -> i32> = lib
|
||||
.get(b"c_aa_i2c_pullup\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
pullup(self.handle, AA_I2C_PULLUP_BOTH);
|
||||
let bitrate: Symbol<unsafe extern "C" fn(i32, i32) -> i32> = lib
|
||||
.get(b"c_aa_i2c_bitrate\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
bitrate(self.handle, bitrate_khz as i32);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write `data` bytes to the I2C device at `addr`.
|
||||
pub fn i2c_write(&self, addr: u8, data: &[u8]) -> Result<()> {
|
||||
let lib = lib().ok_or(AardvarkError::LibraryNotFound)?;
|
||||
let ret: i32 = unsafe {
|
||||
let f: Symbol<unsafe extern "C" fn(i32, u16, i32, u16, *const u8) -> i32> = lib
|
||||
.get(b"c_aa_i2c_write\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
f(
|
||||
self.handle,
|
||||
u16::from(addr),
|
||||
AA_I2C_NO_FLAGS,
|
||||
data.len() as u16,
|
||||
data.as_ptr(),
|
||||
)
|
||||
};
|
||||
if ret < 0 {
|
||||
Err(AardvarkError::I2cWriteFailed(ret))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Read `len` bytes from the I2C device at `addr`.
|
||||
pub fn i2c_read(&self, addr: u8, len: usize) -> Result<Vec<u8>> {
|
||||
let lib = lib().ok_or(AardvarkError::LibraryNotFound)?;
|
||||
let mut buf = vec![0u8; len];
|
||||
let ret: i32 = unsafe {
|
||||
let f: Symbol<unsafe extern "C" fn(i32, u16, i32, u16, *mut u8) -> i32> = lib
|
||||
.get(b"c_aa_i2c_read\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
f(
|
||||
self.handle,
|
||||
u16::from(addr),
|
||||
AA_I2C_NO_FLAGS,
|
||||
len as u16,
|
||||
buf.as_mut_ptr(),
|
||||
)
|
||||
};
|
||||
if ret < 0 {
|
||||
Err(AardvarkError::I2cReadFailed(ret))
|
||||
} else {
|
||||
Ok(buf)
|
||||
}
|
||||
}
|
||||
|
||||
/// Write then read — standard I2C register-read pattern.
|
||||
pub fn i2c_write_read(&self, addr: u8, write_data: &[u8], read_len: usize) -> Result<Vec<u8>> {
|
||||
self.i2c_write(addr, write_data)?;
|
||||
self.i2c_read(addr, read_len)
|
||||
}
|
||||
|
||||
/// Scan the I2C bus, returning addresses of all responding devices.
|
||||
///
|
||||
/// Probes `0x08–0x77` with a 1-byte read; returns addresses that ACK.
|
||||
pub fn i2c_scan(&self) -> Vec<u8> {
|
||||
let Some(lib) = lib() else {
|
||||
return Vec::new();
|
||||
};
|
||||
let Ok(f): std::result::Result<
|
||||
Symbol<unsafe extern "C" fn(i32, u16, i32, u16, *mut u8) -> i32>,
|
||||
_,
|
||||
> = (unsafe { lib.get(b"c_aa_i2c_read\0") }) else {
|
||||
return Vec::new();
|
||||
};
|
||||
let mut found = Vec::new();
|
||||
let mut buf = [0u8; 1];
|
||||
for addr in 0x08u16..=0x77 {
|
||||
let ret = unsafe { f(self.handle, addr, AA_I2C_NO_FLAGS, 1, buf.as_mut_ptr()) };
|
||||
// ret > 0: bytes received → device ACKed
|
||||
// ret == 0: NACK → no device at this address
|
||||
// ret < 0: error code → skip
|
||||
if ret > 0 {
|
||||
found.push(addr as u8);
|
||||
}
|
||||
}
|
||||
found
|
||||
}
|
||||
|
||||
// ── SPI ───────────────────────────────────────────────────────────────
|
||||
|
||||
/// Enable SPI mode and set the bitrate (kHz).
|
||||
pub fn spi_enable(&self, bitrate_khz: u32) -> Result<()> {
|
||||
let lib = lib().ok_or(AardvarkError::LibraryNotFound)?;
|
||||
unsafe {
|
||||
let configure: Symbol<unsafe extern "C" fn(i32, i32) -> i32> = lib
|
||||
.get(b"c_aa_configure\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
configure(self.handle, AA_CONFIG_SPI_GPIO);
|
||||
// SPI mode 0: polarity=rising/falling(0), phase=sample/setup(0), MSB first(0)
|
||||
let spi_cfg: Symbol<unsafe extern "C" fn(i32, i32, i32, i32) -> i32> = lib
|
||||
.get(b"c_aa_spi_configure\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
spi_cfg(self.handle, 0, 0, 0);
|
||||
let bitrate: Symbol<unsafe extern "C" fn(i32, i32) -> i32> = lib
|
||||
.get(b"c_aa_spi_bitrate\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
bitrate(self.handle, bitrate_khz as i32);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Full-duplex SPI transfer.
|
||||
///
|
||||
/// Sends `send` bytes; returns the simultaneously received bytes (same length).
|
||||
pub fn spi_transfer(&self, send: &[u8]) -> Result<Vec<u8>> {
|
||||
let lib = lib().ok_or(AardvarkError::LibraryNotFound)?;
|
||||
let mut recv = vec![0u8; send.len()];
|
||||
// aa_spi_write(aardvark, out_num_bytes, data_out, in_num_bytes, data_in)
|
||||
let ret: i32 = unsafe {
|
||||
let f: Symbol<unsafe extern "C" fn(i32, u16, *const u8, u16, *mut u8) -> i32> = lib
|
||||
.get(b"c_aa_spi_write\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
f(
|
||||
self.handle,
|
||||
send.len() as u16,
|
||||
send.as_ptr(),
|
||||
recv.len() as u16,
|
||||
recv.as_mut_ptr(),
|
||||
)
|
||||
};
|
||||
if ret < 0 {
|
||||
Err(AardvarkError::SpiTransferFailed(ret))
|
||||
} else {
|
||||
Ok(recv)
|
||||
}
|
||||
}
|
||||
|
||||
// ── GPIO ──────────────────────────────────────────────────────────────
|
||||
|
||||
/// Set GPIO pin directions and output values.
|
||||
///
|
||||
/// `direction`: bitmask — `1` = output, `0` = input.
|
||||
/// `value`: output state bitmask.
|
||||
pub fn gpio_set(&self, direction: u8, value: u8) -> Result<()> {
|
||||
let lib = lib().ok_or(AardvarkError::LibraryNotFound)?;
|
||||
unsafe {
|
||||
let dir_f: Symbol<unsafe extern "C" fn(i32, u8) -> i32> = lib
|
||||
.get(b"c_aa_gpio_direction\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
let d = dir_f(self.handle, direction);
|
||||
if d < 0 {
|
||||
return Err(AardvarkError::GpioError(d));
|
||||
}
|
||||
let set_f: Symbol<unsafe extern "C" fn(i32, u8) -> i32> =
|
||||
lib.get(b"c_aa_gpio_set\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
let r = set_f(self.handle, value);
|
||||
if r < 0 {
|
||||
return Err(AardvarkError::GpioError(r));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Read the current GPIO pin states as a bitmask.
|
||||
pub fn gpio_get(&self) -> Result<u8> {
|
||||
let lib = lib().ok_or(AardvarkError::LibraryNotFound)?;
|
||||
let ret: i32 = unsafe {
|
||||
let f: Symbol<unsafe extern "C" fn(i32) -> i32> = lib
|
||||
.get(b"c_aa_gpio_get\0")
|
||||
.map_err(|_| AardvarkError::LibraryNotFound)?;
|
||||
f(self.handle)
|
||||
};
|
||||
if ret < 0 {
|
||||
Err(AardvarkError::GpioError(ret))
|
||||
} else {
|
||||
Ok(ret as u8)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for AardvarkHandle {
|
||||
fn drop(&mut self) {
|
||||
if let Some(lib) = lib() {
|
||||
unsafe {
|
||||
if let Ok(f) = lib.get::<unsafe extern "C" fn(i32) -> i32>(b"c_aa_close\0") {
|
||||
f(self.handle);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn find_devices_does_not_panic() {
|
||||
// With no adapter plugged in, must return empty without panicking.
|
||||
let _ = AardvarkHandle::find_devices();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn open_returns_error_or_ok_depending_on_hardware() {
|
||||
// With hardware connected: open() succeeds (Ok).
|
||||
// Without hardware: returns LibraryNotFound, NotFound, or OpenFailed — any Err is fine.
|
||||
// Both outcomes are valid; the important thing is no panic.
|
||||
let _ = AardvarkHandle::open();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn open_port_returns_error_when_no_hardware() {
|
||||
// Port 99 doesn't exist — must return an error regardless of whether hardware is connected.
|
||||
assert!(AardvarkHandle::open_port(99).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_display_messages_are_human_readable() {
|
||||
assert!(AardvarkError::NotFound
|
||||
.to_string()
|
||||
.to_lowercase()
|
||||
.contains("not found"));
|
||||
assert!(AardvarkError::OpenFailed(-1).to_string().contains("-1"));
|
||||
assert!(AardvarkError::I2cWriteFailed(-3)
|
||||
.to_string()
|
||||
.contains("I2C write"));
|
||||
assert!(AardvarkError::SpiTransferFailed(-2)
|
||||
.to_string()
|
||||
.contains("SPI"));
|
||||
assert!(AardvarkError::LibraryNotFound
|
||||
.to_string()
|
||||
.contains("aardvark.so"));
|
||||
}
|
||||
}
|
||||
+919
@@ -0,0 +1,919 @@
|
||||
/*=========================================================================
|
||||
| Aardvark Interface Library
|
||||
|--------------------------------------------------------------------------
|
||||
| Copyright (c) 2003-2024 Total Phase, Inc.
|
||||
| All rights reserved.
|
||||
| www.totalphase.com
|
||||
|
|
||||
| Redistribution and use of this file in source and binary forms, with
|
||||
| or without modification, are permitted provided that the following
|
||||
| conditions are met:
|
||||
|
|
||||
| - Redistributions of source code must retain the above copyright
|
||||
| notice, this list of conditions, and the following disclaimer.
|
||||
|
|
||||
| - Redistributions in binary form must reproduce the above copyright
|
||||
| notice, this list of conditions, and the following disclaimer in the
|
||||
| documentation or other materials provided with the distribution.
|
||||
|
|
||||
| - This file must only be used to interface with Total Phase products.
|
||||
| The names of Total Phase and its contributors must not be used to
|
||||
| endorse or promote products derived from this software.
|
||||
|
|
||||
| THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
| "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING BUT NOT
|
||||
| LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
| FOR A PARTICULAR PURPOSE, ARE DISCLAIMED. IN NO EVENT WILL THE
|
||||
| COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
| INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING
|
||||
| BUT NOT LIMITED TO PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
| LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
| CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
| LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
| ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
| POSSIBILITY OF SUCH DAMAGE.
|
||||
|--------------------------------------------------------------------------
|
||||
| To access Total Phase Aardvark devices through the API:
|
||||
|
|
||||
| 1) Use one of the following shared objects:
|
||||
| aardvark.so -- Linux or macOS shared object
|
||||
| aardvark.dll -- Windows dynamic link library
|
||||
|
|
||||
| 2) Along with one of the following language modules:
|
||||
| aardvark.c/h -- C/C++ API header file and interface module
|
||||
| aardvark_py.py -- Python API
|
||||
| aardvark.cs -- C# .NET source
|
||||
| aardvark_net.dll -- Compiled .NET binding
|
||||
| aardvark.bas -- Visual Basic 6 API
|
||||
========================================================================*/
|
||||
|
||||
|
||||
#ifndef __aardvark_h__
|
||||
#define __aardvark_h__
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| TYPEDEFS
|
||||
========================================================================*/
|
||||
#ifndef TOTALPHASE_DATA_TYPES
|
||||
#define TOTALPHASE_DATA_TYPES
|
||||
|
||||
#ifndef _MSC_VER
|
||||
/* C99-compliant compilers (GCC) */
|
||||
#include <stdint.h>
|
||||
typedef uint8_t u08;
|
||||
typedef uint16_t u16;
|
||||
typedef uint32_t u32;
|
||||
typedef uint64_t u64;
|
||||
typedef int8_t s08;
|
||||
typedef int16_t s16;
|
||||
typedef int32_t s32;
|
||||
typedef int64_t s64;
|
||||
|
||||
#else
|
||||
/* Microsoft compilers (Visual C++) */
|
||||
typedef unsigned __int8 u08;
|
||||
typedef unsigned __int16 u16;
|
||||
typedef unsigned __int32 u32;
|
||||
typedef unsigned __int64 u64;
|
||||
typedef signed __int8 s08;
|
||||
typedef signed __int16 s16;
|
||||
typedef signed __int32 s32;
|
||||
typedef signed __int64 s64;
|
||||
|
||||
#endif /* __MSC_VER */
|
||||
|
||||
typedef float f32;
|
||||
typedef double f64;
|
||||
|
||||
#endif /* TOTALPHASE_DATA_TYPES */
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| DEBUG
|
||||
========================================================================*/
|
||||
/* Set the following macro to '1' for debugging */
|
||||
#define AA_DEBUG 0
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| VERSION
|
||||
========================================================================*/
|
||||
#define AA_HEADER_VERSION 0x0600 /* v6.00 */
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| STATUS CODES
|
||||
========================================================================*/
|
||||
/*
|
||||
* All API functions return an integer which is the result of the
|
||||
* transaction, or a status code if negative. The status codes are
|
||||
* defined as follows:
|
||||
*/
|
||||
enum AardvarkStatus {
|
||||
/* General codes (0 to -99) */
|
||||
AA_OK = 0,
|
||||
AA_UNABLE_TO_LOAD_LIBRARY = -1,
|
||||
AA_UNABLE_TO_LOAD_DRIVER = -2,
|
||||
AA_UNABLE_TO_LOAD_FUNCTION = -3,
|
||||
AA_INCOMPATIBLE_LIBRARY = -4,
|
||||
AA_INCOMPATIBLE_DEVICE = -5,
|
||||
AA_COMMUNICATION_ERROR = -6,
|
||||
AA_UNABLE_TO_OPEN = -7,
|
||||
AA_UNABLE_TO_CLOSE = -8,
|
||||
AA_INVALID_HANDLE = -9,
|
||||
AA_CONFIG_ERROR = -10,
|
||||
|
||||
/* I2C codes (-100 to -199) */
|
||||
AA_I2C_NOT_AVAILABLE = -100,
|
||||
AA_I2C_NOT_ENABLED = -101,
|
||||
AA_I2C_READ_ERROR = -102,
|
||||
AA_I2C_WRITE_ERROR = -103,
|
||||
AA_I2C_SLAVE_BAD_CONFIG = -104,
|
||||
AA_I2C_SLAVE_READ_ERROR = -105,
|
||||
AA_I2C_SLAVE_TIMEOUT = -106,
|
||||
AA_I2C_DROPPED_EXCESS_BYTES = -107,
|
||||
AA_I2C_BUS_ALREADY_FREE = -108,
|
||||
|
||||
/* SPI codes (-200 to -299) */
|
||||
AA_SPI_NOT_AVAILABLE = -200,
|
||||
AA_SPI_NOT_ENABLED = -201,
|
||||
AA_SPI_WRITE_ERROR = -202,
|
||||
AA_SPI_SLAVE_READ_ERROR = -203,
|
||||
AA_SPI_SLAVE_TIMEOUT = -204,
|
||||
AA_SPI_DROPPED_EXCESS_BYTES = -205,
|
||||
|
||||
/* GPIO codes (-400 to -499) */
|
||||
AA_GPIO_NOT_AVAILABLE = -400
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkStatus AardvarkStatus;
|
||||
#endif
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| GENERAL TYPE DEFINITIONS
|
||||
========================================================================*/
|
||||
/* Aardvark handle type definition */
|
||||
typedef int Aardvark;
|
||||
|
||||
/*
|
||||
* Deprecated type definitions.
|
||||
*
|
||||
* These are only for use with legacy code and
|
||||
* should not be used for new development.
|
||||
*/
|
||||
typedef u08 aa_u08;
|
||||
|
||||
typedef u16 aa_u16;
|
||||
|
||||
typedef u32 aa_u32;
|
||||
|
||||
typedef s08 aa_s08;
|
||||
|
||||
typedef s16 aa_s16;
|
||||
|
||||
typedef s32 aa_s32;
|
||||
|
||||
/*
|
||||
* Aardvark version matrix.
|
||||
*
|
||||
* This matrix describes the various version dependencies
|
||||
* of Aardvark components. It can be used to determine
|
||||
* which component caused an incompatibility error.
|
||||
*
|
||||
* All version numbers are of the format:
|
||||
* (major << 8) | minor
|
||||
*
|
||||
* ex. v1.20 would be encoded as: 0x0114
|
||||
*/
|
||||
struct AardvarkVersion {
|
||||
/* Software, firmware, and hardware versions. */
|
||||
u16 software;
|
||||
u16 firmware;
|
||||
u16 hardware;
|
||||
|
||||
/* Firmware requires that software must be >= this version. */
|
||||
u16 sw_req_by_fw;
|
||||
|
||||
/* Software requires that firmware must be >= this version. */
|
||||
u16 fw_req_by_sw;
|
||||
|
||||
/* Software requires that the API interface must be >= this version. */
|
||||
u16 api_req_by_sw;
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef struct AardvarkVersion AardvarkVersion;
|
||||
#endif
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| GENERAL API
|
||||
========================================================================*/
|
||||
/*
|
||||
* Get a list of ports to which Aardvark devices are attached.
|
||||
*
|
||||
* nelem = maximum number of elements to return
|
||||
* devices = array into which the port numbers are returned
|
||||
*
|
||||
* Each element of the array is written with the port number.
|
||||
* Devices that are in-use are ORed with AA_PORT_NOT_FREE (0x8000).
|
||||
*
|
||||
* ex. devices are attached to ports 0, 1, 2
|
||||
* ports 0 and 2 are available, and port 1 is in-use.
|
||||
* array => 0x0000, 0x8001, 0x0002
|
||||
*
|
||||
* If the array is NULL, it is not filled with any values.
|
||||
* If there are more devices than the array size, only the
|
||||
* first nmemb port numbers will be written into the array.
|
||||
*
|
||||
* Returns the number of devices found, regardless of the
|
||||
* array size.
|
||||
*/
|
||||
#define AA_PORT_NOT_FREE 0x8000
|
||||
int aa_find_devices (
|
||||
int num_devices,
|
||||
u16 * devices
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Get a list of ports to which Aardvark devices are attached.
|
||||
*
|
||||
* This function is the same as aa_find_devices() except that
|
||||
* it returns the unique IDs of each Aardvark device. The IDs
|
||||
* are guaranteed to be non-zero if valid.
|
||||
*
|
||||
* The IDs are the unsigned integer representation of the 10-digit
|
||||
* serial numbers.
|
||||
*/
|
||||
int aa_find_devices_ext (
|
||||
int num_devices,
|
||||
u16 * devices,
|
||||
int num_ids,
|
||||
u32 * unique_ids
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Open the Aardvark port.
|
||||
*
|
||||
* The port number is a zero-indexed integer.
|
||||
*
|
||||
* The port number is the same as that obtained from the
|
||||
* aa_find_devices() function above.
|
||||
*
|
||||
* Returns an Aardvark handle, which is guaranteed to be
|
||||
* greater than zero if it is valid.
|
||||
*
|
||||
* This function is recommended for use in simple applications
|
||||
* where extended information is not required. For more complex
|
||||
* applications, the use of aa_open_ext() is recommended.
|
||||
*/
|
||||
Aardvark aa_open (
|
||||
int port_number
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Open the Aardvark port, returning extended information
|
||||
* in the supplied structure. Behavior is otherwise identical
|
||||
* to aa_open() above. If 0 is passed as the pointer to the
|
||||
* structure, this function is exactly equivalent to aa_open().
|
||||
*
|
||||
* The structure is zeroed before the open is attempted.
|
||||
* It is filled with whatever information is available.
|
||||
*
|
||||
* For example, if the firmware version is not filled, then
|
||||
* the device could not be queried for its version number.
|
||||
*
|
||||
* This function is recommended for use in complex applications
|
||||
* where extended information is required. For more simple
|
||||
* applications, the use of aa_open() is recommended.
|
||||
*/
|
||||
struct AardvarkExt {
|
||||
/* Version matrix */
|
||||
AardvarkVersion version;
|
||||
|
||||
/* Features of this device. */
|
||||
int features;
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef struct AardvarkExt AardvarkExt;
|
||||
#endif
|
||||
|
||||
Aardvark aa_open_ext (
|
||||
int port_number,
|
||||
AardvarkExt * aa_ext
|
||||
);
|
||||
|
||||
|
||||
/* Close the Aardvark port. */
|
||||
int aa_close (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Return the port for this Aardvark handle.
|
||||
*
|
||||
* The port number is a zero-indexed integer.
|
||||
*/
|
||||
int aa_port (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Return the device features as a bit-mask of values, or
|
||||
* an error code if the handle is not valid.
|
||||
*/
|
||||
#define AA_FEATURE_SPI 0x00000001
|
||||
#define AA_FEATURE_I2C 0x00000002
|
||||
#define AA_FEATURE_GPIO 0x00000008
|
||||
int aa_features (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Return the unique ID for this Aardvark adapter.
|
||||
* IDs are guaranteed to be non-zero if valid.
|
||||
* The ID is the unsigned integer representation of the
|
||||
* 10-digit serial number.
|
||||
*/
|
||||
u32 aa_unique_id (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Return the status string for the given status code.
|
||||
* If the code is not valid or the library function cannot
|
||||
* be loaded, return a NULL string.
|
||||
*/
|
||||
const char * aa_status_string (
|
||||
int status
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Enable logging to a file. The handle must be standard file
|
||||
* descriptor. In C, a file descriptor can be obtained by using
|
||||
* the ANSI C function "open" or by using the function "fileno"
|
||||
* on a FILE* stream. A FILE* stream can be obtained using "fopen"
|
||||
* or can correspond to the common "stdout" or "stderr" --
|
||||
* available when including stdlib.h
|
||||
*/
|
||||
#define AA_LOG_STDOUT 1
|
||||
#define AA_LOG_STDERR 2
|
||||
int aa_log (
|
||||
Aardvark aardvark,
|
||||
int level,
|
||||
int handle
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Return the version matrix for the device attached to the
|
||||
* given handle. If the handle is 0 or invalid, only the
|
||||
* software and required api versions are set.
|
||||
*/
|
||||
int aa_version (
|
||||
Aardvark aardvark,
|
||||
AardvarkVersion * version
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Configure the device by enabling/disabling I2C, SPI, and
|
||||
* GPIO functions.
|
||||
*/
|
||||
enum AardvarkConfig {
|
||||
AA_CONFIG_GPIO_ONLY = 0x00,
|
||||
AA_CONFIG_SPI_GPIO = 0x01,
|
||||
AA_CONFIG_GPIO_I2C = 0x02,
|
||||
AA_CONFIG_SPI_I2C = 0x03,
|
||||
AA_CONFIG_QUERY = 0x80
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkConfig AardvarkConfig;
|
||||
#endif
|
||||
|
||||
#define AA_CONFIG_SPI_MASK 0x00000001
|
||||
#define AA_CONFIG_I2C_MASK 0x00000002
|
||||
int aa_configure (
|
||||
Aardvark aardvark,
|
||||
AardvarkConfig config
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Configure the target power pins.
|
||||
* This is only supported on hardware versions >= 2.00
|
||||
*/
|
||||
#define AA_TARGET_POWER_NONE 0x00
|
||||
#define AA_TARGET_POWER_BOTH 0x03
|
||||
#define AA_TARGET_POWER_QUERY 0x80
|
||||
int aa_target_power (
|
||||
Aardvark aardvark,
|
||||
u08 power_mask
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Sleep for the specified number of milliseconds
|
||||
* Accuracy depends on the operating system scheduler
|
||||
* Returns the number of milliseconds slept
|
||||
*/
|
||||
u32 aa_sleep_ms (
|
||||
u32 milliseconds
|
||||
);
|
||||
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| ASYNC MESSAGE POLLING
|
||||
========================================================================*/
|
||||
/*
|
||||
* Polling function to check if there are any asynchronous
|
||||
* messages pending for processing. The function takes a timeout
|
||||
* value in units of milliseconds. If the timeout is < 0, the
|
||||
* function will block until data is received. If the timeout is 0,
|
||||
* the function will perform a non-blocking check.
|
||||
*/
|
||||
#define AA_ASYNC_NO_DATA 0x00000000
|
||||
#define AA_ASYNC_I2C_READ 0x00000001
|
||||
#define AA_ASYNC_I2C_WRITE 0x00000002
|
||||
#define AA_ASYNC_SPI 0x00000004
|
||||
int aa_async_poll (
|
||||
Aardvark aardvark,
|
||||
int timeout
|
||||
);
|
||||
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| I2C API
|
||||
========================================================================*/
|
||||
/* Free the I2C bus. */
|
||||
int aa_i2c_free_bus (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Set the I2C bit rate in kilohertz. If a zero is passed as the
|
||||
* bitrate, the bitrate is unchanged and the current bitrate is
|
||||
* returned.
|
||||
*/
|
||||
int aa_i2c_bitrate (
|
||||
Aardvark aardvark,
|
||||
int bitrate_khz
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Set the bus lock timeout. If a zero is passed as the timeout,
|
||||
* the timeout is unchanged and the current timeout is returned.
|
||||
*/
|
||||
int aa_i2c_bus_timeout (
|
||||
Aardvark aardvark,
|
||||
u16 timeout_ms
|
||||
);
|
||||
|
||||
|
||||
enum AardvarkI2cFlags {
|
||||
AA_I2C_NO_FLAGS = 0x00,
|
||||
AA_I2C_10_BIT_ADDR = 0x01,
|
||||
AA_I2C_COMBINED_FMT = 0x02,
|
||||
AA_I2C_NO_STOP = 0x04,
|
||||
AA_I2C_SIZED_READ = 0x10,
|
||||
AA_I2C_SIZED_READ_EXTRA1 = 0x20
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkI2cFlags AardvarkI2cFlags;
|
||||
#endif
|
||||
|
||||
/* Read a stream of bytes from the I2C slave device. */
|
||||
int aa_i2c_read (
|
||||
Aardvark aardvark,
|
||||
u16 slave_addr,
|
||||
AardvarkI2cFlags flags,
|
||||
u16 num_bytes,
|
||||
u08 * data_in
|
||||
);
|
||||
|
||||
|
||||
enum AardvarkI2cStatus {
|
||||
AA_I2C_STATUS_OK = 0,
|
||||
AA_I2C_STATUS_BUS_ERROR = 1,
|
||||
AA_I2C_STATUS_SLA_ACK = 2,
|
||||
AA_I2C_STATUS_SLA_NACK = 3,
|
||||
AA_I2C_STATUS_DATA_NACK = 4,
|
||||
AA_I2C_STATUS_ARB_LOST = 5,
|
||||
AA_I2C_STATUS_BUS_LOCKED = 6,
|
||||
AA_I2C_STATUS_LAST_DATA_ACK = 7
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkI2cStatus AardvarkI2cStatus;
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Read a stream of bytes from the I2C slave device.
|
||||
* This API function returns the number of bytes read into
|
||||
* the num_read variable. The return value of the function
|
||||
* is a status code.
|
||||
*/
|
||||
int aa_i2c_read_ext (
|
||||
Aardvark aardvark,
|
||||
u16 slave_addr,
|
||||
AardvarkI2cFlags flags,
|
||||
u16 num_bytes,
|
||||
u08 * data_in,
|
||||
u16 * num_read
|
||||
);
|
||||
|
||||
|
||||
/* Write a stream of bytes to the I2C slave device. */
|
||||
int aa_i2c_write (
|
||||
Aardvark aardvark,
|
||||
u16 slave_addr,
|
||||
AardvarkI2cFlags flags,
|
||||
u16 num_bytes,
|
||||
const u08 * data_out
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Write a stream of bytes to the I2C slave device.
|
||||
* This API function returns the number of bytes written into
|
||||
* the num_written variable. The return value of the function
|
||||
* is a status code.
|
||||
*/
|
||||
int aa_i2c_write_ext (
|
||||
Aardvark aardvark,
|
||||
u16 slave_addr,
|
||||
AardvarkI2cFlags flags,
|
||||
u16 num_bytes,
|
||||
const u08 * data_out,
|
||||
u16 * num_written
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Do an atomic write+read to an I2C slave device by first
|
||||
* writing a stream of bytes to the I2C slave device and then
|
||||
* reading a stream of bytes back from the same slave device.
|
||||
* This API function returns the number of bytes written into
|
||||
* the num_written variable and the number of bytes read into
|
||||
* the num_read variable. The return value of the function is
|
||||
* the status given as (read_status << 8) | (write_status).
|
||||
*/
|
||||
int aa_i2c_write_read (
|
||||
Aardvark aardvark,
|
||||
u16 slave_addr,
|
||||
AardvarkI2cFlags flags,
|
||||
u16 out_num_bytes,
|
||||
const u08 * out_data,
|
||||
u16 * num_written,
|
||||
u16 in_num_bytes,
|
||||
u08 * in_data,
|
||||
u16 * num_read
|
||||
);
|
||||
|
||||
|
||||
/* Enable/Disable the Aardvark as an I2C slave device */
|
||||
int aa_i2c_slave_enable (
|
||||
Aardvark aardvark,
|
||||
u08 addr,
|
||||
u16 maxTxBytes,
|
||||
u16 maxRxBytes
|
||||
);
|
||||
|
||||
|
||||
int aa_i2c_slave_disable (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Set the slave response in the event the Aardvark is put
|
||||
* into slave mode and contacted by a Master.
|
||||
*/
|
||||
int aa_i2c_slave_set_response (
|
||||
Aardvark aardvark,
|
||||
u08 num_bytes,
|
||||
const u08 * data_out
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Return number of bytes written from a previous
|
||||
* Aardvark->I2C_master transmission. Since the transmission is
|
||||
* happening asynchronously with respect to the PC host
|
||||
* software, there could be responses queued up from many
|
||||
* previous write transactions.
|
||||
*/
|
||||
int aa_i2c_slave_write_stats (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/* Read the bytes from an I2C slave reception */
|
||||
int aa_i2c_slave_read (
|
||||
Aardvark aardvark,
|
||||
u08 * addr,
|
||||
u16 num_bytes,
|
||||
u08 * data_in
|
||||
);
|
||||
|
||||
|
||||
/* Extended functions that return status code */
|
||||
int aa_i2c_slave_write_stats_ext (
|
||||
Aardvark aardvark,
|
||||
u16 * num_written
|
||||
);
|
||||
|
||||
|
||||
int aa_i2c_slave_read_ext (
|
||||
Aardvark aardvark,
|
||||
u08 * addr,
|
||||
u16 num_bytes,
|
||||
u08 * data_in,
|
||||
u16 * num_read
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Configure the I2C pullup resistors.
|
||||
* This is only supported on hardware versions >= 2.00
|
||||
*/
|
||||
#define AA_I2C_PULLUP_NONE 0x00
|
||||
#define AA_I2C_PULLUP_BOTH 0x03
|
||||
#define AA_I2C_PULLUP_QUERY 0x80
|
||||
int aa_i2c_pullup (
|
||||
Aardvark aardvark,
|
||||
u08 pullup_mask
|
||||
);
|
||||
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| SPI API
|
||||
========================================================================*/
|
||||
/*
|
||||
* Set the SPI bit rate in kilohertz. If a zero is passed as the
|
||||
* bitrate, the bitrate is unchanged and the current bitrate is
|
||||
* returned.
|
||||
*/
|
||||
int aa_spi_bitrate (
|
||||
Aardvark aardvark,
|
||||
int bitrate_khz
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* These configuration parameters specify how to clock the
|
||||
* bits that are sent and received on the Aardvark SPI
|
||||
* interface.
|
||||
*
|
||||
* The polarity option specifies which transition
|
||||
* constitutes the leading edge and which transition is the
|
||||
* falling edge. For example, AA_SPI_POL_RISING_FALLING
|
||||
* would configure the SPI to idle the SCK clock line low.
|
||||
* The clock would then transition low-to-high on the
|
||||
* leading edge and high-to-low on the trailing edge.
|
||||
*
|
||||
* The phase option determines whether to sample or setup on
|
||||
* the leading edge. For example, AA_SPI_PHASE_SAMPLE_SETUP
|
||||
* would configure the SPI to sample on the leading edge and
|
||||
* setup on the trailing edge.
|
||||
*
|
||||
* The bitorder option is used to indicate whether LSB or
|
||||
* MSB is shifted first.
|
||||
*
|
||||
* See the diagrams in the Aardvark datasheet for
|
||||
* more details.
|
||||
*/
|
||||
enum AardvarkSpiPolarity {
|
||||
AA_SPI_POL_RISING_FALLING = 0,
|
||||
AA_SPI_POL_FALLING_RISING = 1
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkSpiPolarity AardvarkSpiPolarity;
|
||||
#endif
|
||||
|
||||
enum AardvarkSpiPhase {
|
||||
AA_SPI_PHASE_SAMPLE_SETUP = 0,
|
||||
AA_SPI_PHASE_SETUP_SAMPLE = 1
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkSpiPhase AardvarkSpiPhase;
|
||||
#endif
|
||||
|
||||
enum AardvarkSpiBitorder {
|
||||
AA_SPI_BITORDER_MSB = 0,
|
||||
AA_SPI_BITORDER_LSB = 1
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkSpiBitorder AardvarkSpiBitorder;
|
||||
#endif
|
||||
|
||||
/* Configure the SPI master or slave interface */
|
||||
int aa_spi_configure (
|
||||
Aardvark aardvark,
|
||||
AardvarkSpiPolarity polarity,
|
||||
AardvarkSpiPhase phase,
|
||||
AardvarkSpiBitorder bitorder
|
||||
);
|
||||
|
||||
|
||||
/* Write a stream of bytes to the downstream SPI slave device. */
|
||||
int aa_spi_write (
|
||||
Aardvark aardvark,
|
||||
u16 out_num_bytes,
|
||||
const u08 * data_out,
|
||||
u16 in_num_bytes,
|
||||
u08 * data_in
|
||||
);
|
||||
|
||||
|
||||
/* Enable/Disable the Aardvark as an SPI slave device */
|
||||
int aa_spi_slave_enable (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
int aa_spi_slave_disable (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Set the slave response in the event the Aardvark is put
|
||||
* into slave mode and contacted by a Master.
|
||||
*/
|
||||
int aa_spi_slave_set_response (
|
||||
Aardvark aardvark,
|
||||
u08 num_bytes,
|
||||
const u08 * data_out
|
||||
);
|
||||
|
||||
|
||||
/* Read the bytes from an SPI slave reception */
|
||||
int aa_spi_slave_read (
|
||||
Aardvark aardvark,
|
||||
u16 num_bytes,
|
||||
u08 * data_in
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Change the output polarity on the SS line.
|
||||
*
|
||||
* Note: When configured as an SPI slave, the Aardvark will
|
||||
* always be setup with SS as active low. Hence this function
|
||||
* only affects the SPI master functions on the Aardvark.
|
||||
*/
|
||||
enum AardvarkSpiSSPolarity {
|
||||
AA_SPI_SS_ACTIVE_LOW = 0,
|
||||
AA_SPI_SS_ACTIVE_HIGH = 1
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkSpiSSPolarity AardvarkSpiSSPolarity;
|
||||
#endif
|
||||
|
||||
int aa_spi_master_ss_polarity (
|
||||
Aardvark aardvark,
|
||||
AardvarkSpiSSPolarity polarity
|
||||
);
|
||||
|
||||
|
||||
|
||||
/*=========================================================================
|
||||
| GPIO API
|
||||
========================================================================*/
|
||||
/*
|
||||
* The following enumerated type maps the named lines on the
|
||||
* Aardvark I2C/SPI line to bit positions in the GPIO API.
|
||||
* All GPIO API functions will index these lines through an
|
||||
* 8-bit masked value. Thus, each bit position in the mask
|
||||
* can be referred back its corresponding line through the
|
||||
* enumerated type.
|
||||
*/
|
||||
enum AardvarkGpioBits {
|
||||
AA_GPIO_SCL = 0x01,
|
||||
AA_GPIO_SDA = 0x02,
|
||||
AA_GPIO_MISO = 0x04,
|
||||
AA_GPIO_SCK = 0x08,
|
||||
AA_GPIO_MOSI = 0x10,
|
||||
AA_GPIO_SS = 0x20
|
||||
};
|
||||
#ifndef __cplusplus
|
||||
typedef enum AardvarkGpioBits AardvarkGpioBits;
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Configure the GPIO, specifying the direction of each bit.
|
||||
*
|
||||
* A call to this function will not change the value of the pullup
|
||||
* mask in the Aardvark. This is illustrated by the following
|
||||
* example:
|
||||
* (1) Direction mask is first set to 0x00
|
||||
* (2) Pullup is set to 0x01
|
||||
* (3) Direction mask is set to 0x01
|
||||
* (4) Direction mask is later set back to 0x00.
|
||||
*
|
||||
* The pullup will be active after (4).
|
||||
*
|
||||
* On Aardvark power-up, the default value of the direction
|
||||
* mask is 0x00.
|
||||
*/
|
||||
#define AA_GPIO_DIR_INPUT 0
|
||||
#define AA_GPIO_DIR_OUTPUT 1
|
||||
int aa_gpio_direction (
|
||||
Aardvark aardvark,
|
||||
u08 direction_mask
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Enable an internal pullup on any of the GPIO input lines.
|
||||
*
|
||||
* Note: If a line is configured as an output, the pullup bit
|
||||
* for that line will be ignored, though that pullup bit will
|
||||
* be cached in case the line is later configured as an input.
|
||||
*
|
||||
* By default the pullup mask is 0x00.
|
||||
*/
|
||||
#define AA_GPIO_PULLUP_OFF 0
|
||||
#define AA_GPIO_PULLUP_ON 1
|
||||
int aa_gpio_pullup (
|
||||
Aardvark aardvark,
|
||||
u08 pullup_mask
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Read the current digital values on the GPIO input lines.
|
||||
*
|
||||
* The bits will be ordered as described by AA_GPIO_BITS. If a
|
||||
* line is configured as an output, its corresponding bit
|
||||
* position in the mask will be undefined.
|
||||
*/
|
||||
int aa_gpio_get (
|
||||
Aardvark aardvark
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Set the outputs on the GPIO lines.
|
||||
*
|
||||
* Note: If a line is configured as an input, it will not be
|
||||
* affected by this call, but the output value for that line
|
||||
* will be cached in the event that the line is later
|
||||
* configured as an output.
|
||||
*/
|
||||
int aa_gpio_set (
|
||||
Aardvark aardvark,
|
||||
u08 value
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* Block until there is a change on the GPIO input lines.
|
||||
* Pins configured as outputs will be ignored.
|
||||
*
|
||||
* The function will return either when a change has occurred or
|
||||
* the timeout expires. The timeout, specified in millisecods, has
|
||||
* a precision of ~16 ms. The maximum allowable timeout is
|
||||
* approximately 4 seconds. If the timeout expires, this function
|
||||
* will return the current state of the GPIO lines.
|
||||
*
|
||||
* This function will return immediately with the current value
|
||||
* of the GPIO lines for the first invocation after any of the
|
||||
* following functions are called: aa_configure,
|
||||
* aa_gpio_direction, or aa_gpio_pullup.
|
||||
*
|
||||
* If the function aa_gpio_get is called before calling
|
||||
* aa_gpio_change, aa_gpio_change will only register any changes
|
||||
* from the value last returned by aa_gpio_get.
|
||||
*/
|
||||
int aa_gpio_change (
|
||||
Aardvark aardvark,
|
||||
u16 timeout
|
||||
);
|
||||
|
||||
|
||||
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif /* __aardvark_h__ */
|
||||
BIN
Binary file not shown.
Executable
+80
@@ -0,0 +1,80 @@
|
||||
#!/bin/bash
|
||||
# Start mem0 + reranker GPU container for ZeroClaw memory backend.
|
||||
#
|
||||
# Required env vars:
|
||||
# MEM0_LLM_API_KEY or ZAI_API_KEY — API key for the LLM used in fact extraction
|
||||
#
|
||||
# Optional env vars (with defaults):
|
||||
# MEM0_LLM_PROVIDER — mem0 LLM provider (default: "openai" i.e. OpenAI-compatible)
|
||||
# MEM0_LLM_MODEL — LLM model for fact extraction (default: "glm-5-turbo")
|
||||
# MEM0_LLM_BASE_URL — LLM API base URL (default: "https://api.z.ai/api/coding/paas/v4")
|
||||
# MEM0_EMBEDDER_MODEL — embedding model (default: "BAAI/bge-m3")
|
||||
# MEM0_EMBEDDER_DIMS — embedding dimensions (default: "1024")
|
||||
# MEM0_EMBEDDER_DEVICE — "cuda", "cpu", or "auto" (default: "cuda")
|
||||
# MEM0_VECTOR_COLLECTION — Qdrant collection name (default: "zeroclaw_mem0")
|
||||
# RERANKER_MODEL — reranker model (default: "BAAI/bge-reranker-v2-m3")
|
||||
# RERANKER_DEVICE — "cuda" or "cpu" (default: "cuda")
|
||||
# MEM0_PORT — mem0 server port (default: 8765)
|
||||
# RERANKER_PORT — reranker server port (default: 8678)
|
||||
# CONTAINER_IMAGE — base container image (default: docker.io/kyuz0/amd-strix-halo-comfyui:latest)
|
||||
# CONTAINER_NAME — container name (default: mem0-gpu)
|
||||
# DATA_DIR — host path for Qdrant data (default: ~/mem0-data)
|
||||
# SCRIPT_DIR — host path for server scripts (default: directory of this script)
|
||||
set -e
|
||||
|
||||
# Resolve script directory for mounting server scripts
|
||||
SCRIPT_DIR="${SCRIPT_DIR:-$(cd "$(dirname "$0")" && pwd)}"
|
||||
|
||||
# API key — accept either name
|
||||
export MEM0_LLM_API_KEY="${MEM0_LLM_API_KEY:-${ZAI_API_KEY:?MEM0_LLM_API_KEY or ZAI_API_KEY must be set}}"
|
||||
|
||||
# Defaults
|
||||
MEM0_LLM_MODEL="${MEM0_LLM_MODEL:-glm-5-turbo}"
|
||||
MEM0_LLM_BASE_URL="${MEM0_LLM_BASE_URL:-https://api.z.ai/api/coding/paas/v4}"
|
||||
MEM0_PORT="${MEM0_PORT:-8765}"
|
||||
RERANKER_PORT="${RERANKER_PORT:-8678}"
|
||||
CONTAINER_IMAGE="${CONTAINER_IMAGE:-docker.io/kyuz0/amd-strix-halo-comfyui:latest}"
|
||||
CONTAINER_NAME="${CONTAINER_NAME:-mem0-gpu}"
|
||||
DATA_DIR="${DATA_DIR:-$HOME/mem0-data}"
|
||||
|
||||
# Stop existing CPU services (if any)
|
||||
kill -9 $(pgrep -f "mem0-server.py") 2>/dev/null || true
|
||||
kill -9 $(pgrep -f "reranker-server.py") 2>/dev/null || true
|
||||
|
||||
# Stop existing container
|
||||
podman stop "$CONTAINER_NAME" 2>/dev/null || true
|
||||
podman rm "$CONTAINER_NAME" 2>/dev/null || true
|
||||
|
||||
podman run -d --name "$CONTAINER_NAME" \
|
||||
--device /dev/dri --device /dev/kfd \
|
||||
--group-add video --group-add render \
|
||||
--restart unless-stopped \
|
||||
-p "$MEM0_PORT:$MEM0_PORT" -p "$RERANKER_PORT:$RERANKER_PORT" \
|
||||
-v "$DATA_DIR":/root/mem0-data:Z \
|
||||
-v "$SCRIPT_DIR/mem0-server.py":/app/mem0-server.py:ro,Z \
|
||||
-v "$SCRIPT_DIR/reranker-server.py":/app/reranker-server.py:ro,Z \
|
||||
-v "$HOME/.cache/huggingface":/root/.cache/huggingface:Z \
|
||||
-e MEM0_LLM_API_KEY="$MEM0_LLM_API_KEY" \
|
||||
-e ZAI_API_KEY="$MEM0_LLM_API_KEY" \
|
||||
-e MEM0_LLM_MODEL="$MEM0_LLM_MODEL" \
|
||||
-e MEM0_LLM_BASE_URL="$MEM0_LLM_BASE_URL" \
|
||||
${MEM0_LLM_PROVIDER:+-e MEM0_LLM_PROVIDER="$MEM0_LLM_PROVIDER"} \
|
||||
${MEM0_EMBEDDER_MODEL:+-e MEM0_EMBEDDER_MODEL="$MEM0_EMBEDDER_MODEL"} \
|
||||
${MEM0_EMBEDDER_DIMS:+-e MEM0_EMBEDDER_DIMS="$MEM0_EMBEDDER_DIMS"} \
|
||||
${MEM0_EMBEDDER_DEVICE:+-e MEM0_EMBEDDER_DEVICE="$MEM0_EMBEDDER_DEVICE"} \
|
||||
${MEM0_VECTOR_COLLECTION:+-e MEM0_VECTOR_COLLECTION="$MEM0_VECTOR_COLLECTION"} \
|
||||
${RERANKER_MODEL:+-e RERANKER_MODEL="$RERANKER_MODEL"} \
|
||||
${RERANKER_DEVICE:+-e RERANKER_DEVICE="$RERANKER_DEVICE"} \
|
||||
-e RERANKER_PORT="$RERANKER_PORT" \
|
||||
-e RERANKER_URL="http://127.0.0.1:$RERANKER_PORT/rerank" \
|
||||
-e TORCH_ROCM_AOTRITON_ENABLE_EXPERIMENTAL=1 \
|
||||
-e HOME=/root \
|
||||
"$CONTAINER_IMAGE" \
|
||||
bash -c "pip install -q FlagEmbedding mem0ai flask httpx qdrant-client 2>&1 | tail -3; echo '=== Starting reranker (GPU) on :$RERANKER_PORT ==='; python3 /app/reranker-server.py & sleep 3; echo '=== Starting mem0 (GPU) on :$MEM0_PORT ==='; exec python3 /app/mem0-server.py"
|
||||
|
||||
echo "Container started, waiting for init..."
|
||||
sleep 15
|
||||
echo "=== Container logs ==="
|
||||
podman logs "$CONTAINER_NAME" 2>&1 | tail -25
|
||||
echo "=== Port check ==="
|
||||
ss -tlnp | grep "$MEM0_PORT\|$RERANKER_PORT" || echo "Ports not yet ready, check: podman logs $CONTAINER_NAME"
|
||||
@@ -0,0 +1,288 @@
|
||||
"""Minimal OpenMemory-compatible REST server wrapping mem0 Python SDK."""
|
||||
import asyncio
|
||||
import json, os, uuid, httpx
|
||||
from datetime import datetime, timezone
|
||||
from fastapi import FastAPI, Query
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
from mem0 import Memory
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
RERANKER_URL = os.environ.get("RERANKER_URL", "http://127.0.0.1:8678/rerank")
|
||||
|
||||
CUSTOM_EXTRACTION_PROMPT = """You are a memory extraction specialist for a Cantonese/Chinese chat assistant.
|
||||
|
||||
Extract ONLY important, persistent facts from the conversation. Rules:
|
||||
1. Extract personal preferences, habits, relationships, names, locations
|
||||
2. Extract decisions, plans, and commitments people make
|
||||
3. SKIP small talk, greetings, reactions ("ok", "哈哈", "係呀")
|
||||
4. SKIP temporary states ("我依家食緊飯") unless they reveal a habit
|
||||
5. Keep facts in the ORIGINAL language (Cantonese/Chinese/English)
|
||||
6. For each fact, note WHO it's about (use their name or identifier if known)
|
||||
7. Merge/update existing facts rather than creating duplicates
|
||||
|
||||
Return a list of facts in JSON format: {"facts": ["fact1", "fact2", ...]}
|
||||
"""
|
||||
|
||||
PROCEDURAL_EXTRACTION_PROMPT = """You are a procedural memory specialist for an AI assistant.
|
||||
|
||||
Extract HOW-TO patterns and reusable procedures from the conversation trace. Rules:
|
||||
1. Identify step-by-step procedures the assistant followed to accomplish a task
|
||||
2. Extract tool usage patterns: which tools were called, in what order, with what arguments
|
||||
3. Capture decision points: why the assistant chose one approach over another
|
||||
4. Note error-recovery patterns: what failed, how it was fixed
|
||||
5. Keep the procedure generic enough to apply to similar future tasks
|
||||
6. Preserve technical details (commands, file paths, API calls) that are reusable
|
||||
7. SKIP greetings, small talk, and conversational filler
|
||||
8. Format each procedure as: "To [goal]: [step1] -> [step2] -> ... -> [result]"
|
||||
|
||||
Return a list of procedures in JSON format: {"facts": ["procedure1", "procedure2", ...]}
|
||||
"""
|
||||
|
||||
# ── Configurable via environment variables ─────────────────────────
|
||||
# LLM (for fact extraction when infer=true)
|
||||
MEM0_LLM_PROVIDER = os.environ.get("MEM0_LLM_PROVIDER", "openai") # "openai" (compatible), "anthropic", etc.
|
||||
MEM0_LLM_MODEL = os.environ.get("MEM0_LLM_MODEL", "glm-5-turbo")
|
||||
MEM0_LLM_API_KEY = os.environ.get("MEM0_LLM_API_KEY") or os.environ.get("ZAI_API_KEY", "")
|
||||
MEM0_LLM_BASE_URL = os.environ.get("MEM0_LLM_BASE_URL", "https://api.z.ai/api/coding/paas/v4")
|
||||
|
||||
# Embedder
|
||||
MEM0_EMBEDDER_PROVIDER = os.environ.get("MEM0_EMBEDDER_PROVIDER", "huggingface") # "huggingface", "openai", etc.
|
||||
MEM0_EMBEDDER_MODEL = os.environ.get("MEM0_EMBEDDER_MODEL", "BAAI/bge-m3")
|
||||
MEM0_EMBEDDER_DIMS = int(os.environ.get("MEM0_EMBEDDER_DIMS", "1024"))
|
||||
MEM0_EMBEDDER_DEVICE = os.environ.get("MEM0_EMBEDDER_DEVICE", "cuda") # "cuda", "cpu", "auto"
|
||||
|
||||
# Vector store
|
||||
MEM0_VECTOR_PROVIDER = os.environ.get("MEM0_VECTOR_PROVIDER", "qdrant") # "qdrant", "chroma", etc.
|
||||
MEM0_VECTOR_COLLECTION = os.environ.get("MEM0_VECTOR_COLLECTION", "zeroclaw_mem0")
|
||||
MEM0_VECTOR_PATH = os.environ.get("MEM0_VECTOR_PATH", os.path.expanduser("~/mem0-data/qdrant"))
|
||||
|
||||
config = {
|
||||
"llm": {
|
||||
"provider": MEM0_LLM_PROVIDER,
|
||||
"config": {
|
||||
"model": MEM0_LLM_MODEL,
|
||||
"api_key": MEM0_LLM_API_KEY,
|
||||
"openai_base_url": MEM0_LLM_BASE_URL,
|
||||
},
|
||||
},
|
||||
"embedder": {
|
||||
"provider": MEM0_EMBEDDER_PROVIDER,
|
||||
"config": {
|
||||
"model": MEM0_EMBEDDER_MODEL,
|
||||
"embedding_dims": MEM0_EMBEDDER_DIMS,
|
||||
"model_kwargs": {"device": MEM0_EMBEDDER_DEVICE},
|
||||
},
|
||||
},
|
||||
"vector_store": {
|
||||
"provider": MEM0_VECTOR_PROVIDER,
|
||||
"config": {
|
||||
"collection_name": MEM0_VECTOR_COLLECTION,
|
||||
"embedding_model_dims": MEM0_EMBEDDER_DIMS,
|
||||
"path": MEM0_VECTOR_PATH,
|
||||
},
|
||||
},
|
||||
"custom_fact_extraction_prompt": CUSTOM_EXTRACTION_PROMPT,
|
||||
}
|
||||
|
||||
m = Memory.from_config(config)
|
||||
|
||||
|
||||
def rerank_results(query: str, items: list, top_k: int = 10) -> list:
|
||||
"""Rerank search results using bge-reranker-v2-m3."""
|
||||
if not items:
|
||||
return items
|
||||
documents = [item.get("memory", "") for item in items]
|
||||
try:
|
||||
resp = httpx.post(
|
||||
RERANKER_URL,
|
||||
json={"query": query, "documents": documents, "top_k": top_k},
|
||||
timeout=10.0,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
ranked = resp.json().get("results", [])
|
||||
return [items[r["index"]] for r in ranked]
|
||||
except Exception as e:
|
||||
print(f"Reranker failed, using original order: {e}")
|
||||
return items
|
||||
|
||||
|
||||
class AddMemoryRequest(BaseModel):
|
||||
user_id: str
|
||||
text: str
|
||||
metadata: Optional[dict] = None
|
||||
infer: bool = True
|
||||
app: Optional[str] = None
|
||||
custom_instructions: Optional[str] = None
|
||||
|
||||
|
||||
@app.post("/api/v1/memories/")
|
||||
async def add_memory(req: AddMemoryRequest):
|
||||
# Use client-supplied prompt, fall back to server default, then mem0 SDK default
|
||||
prompt = req.custom_instructions or CUSTOM_EXTRACTION_PROMPT
|
||||
result = await asyncio.to_thread(m.add, req.text, user_id=req.user_id, metadata=req.metadata or {}, prompt=prompt)
|
||||
return {"id": str(uuid.uuid4()), "status": "ok", "result": result}
|
||||
|
||||
|
||||
class ProceduralMemoryRequest(BaseModel):
|
||||
user_id: str
|
||||
messages: list[dict]
|
||||
metadata: Optional[dict] = None
|
||||
|
||||
|
||||
@app.post("/api/v1/memories/procedural")
|
||||
async def add_procedural_memory(req: ProceduralMemoryRequest):
|
||||
"""Store a conversation trace as procedural memory.
|
||||
|
||||
Accepts a list of messages (role/content dicts) representing a full
|
||||
conversation turn including tool calls, then uses mem0's native
|
||||
procedural memory extraction to learn reusable "how to" patterns.
|
||||
"""
|
||||
# Build metadata with procedural type marker
|
||||
meta = {"type": "procedural"}
|
||||
if req.metadata:
|
||||
meta.update(req.metadata)
|
||||
|
||||
# Use mem0's native message list support + procedural prompt
|
||||
result = await asyncio.to_thread(m.add,
|
||||
req.messages,
|
||||
user_id=req.user_id,
|
||||
metadata=meta,
|
||||
prompt=PROCEDURAL_EXTRACTION_PROMPT,
|
||||
)
|
||||
|
||||
return {"id": str(uuid.uuid4()), "status": "ok", "result": result}
|
||||
|
||||
|
||||
def _parse_mem0_results(raw_results) -> list:
|
||||
raw = raw_results.get("results", raw_results) if isinstance(raw_results, dict) else raw_results
|
||||
items = []
|
||||
for r in raw:
|
||||
item = r if isinstance(r, dict) else {"memory": str(r)}
|
||||
items.append({
|
||||
"id": item.get("id", str(uuid.uuid4())),
|
||||
"memory": item.get("memory", item.get("text", "")),
|
||||
"created_at": item.get("created_at", datetime.now(timezone.utc).isoformat()),
|
||||
"metadata_": item.get("metadata", {}),
|
||||
})
|
||||
return items
|
||||
|
||||
|
||||
def _parse_iso_timestamp(value: str) -> Optional[datetime]:
|
||||
"""Parse an ISO 8601 timestamp string, returning None on failure."""
|
||||
try:
|
||||
dt = datetime.fromisoformat(value)
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
|
||||
def _item_created_at(item: dict) -> Optional[datetime]:
|
||||
"""Extract created_at from an item as a timezone-aware datetime."""
|
||||
raw = item.get("created_at")
|
||||
if raw is None:
|
||||
return None
|
||||
if isinstance(raw, datetime):
|
||||
if raw.tzinfo is None:
|
||||
raw = raw.replace(tzinfo=timezone.utc)
|
||||
return raw
|
||||
return _parse_iso_timestamp(str(raw))
|
||||
|
||||
|
||||
def _apply_post_filters(
|
||||
items: list,
|
||||
created_after: Optional[str],
|
||||
created_before: Optional[str],
|
||||
) -> list:
|
||||
"""Filter items by created_after / created_before timestamps (post-query)."""
|
||||
after_dt = _parse_iso_timestamp(created_after) if created_after else None
|
||||
before_dt = _parse_iso_timestamp(created_before) if created_before else None
|
||||
if after_dt is None and before_dt is None:
|
||||
return items
|
||||
filtered = []
|
||||
for item in items:
|
||||
ts = _item_created_at(item)
|
||||
if ts is None:
|
||||
# Keep items without a parseable timestamp
|
||||
filtered.append(item)
|
||||
continue
|
||||
if after_dt and ts < after_dt:
|
||||
continue
|
||||
if before_dt and ts > before_dt:
|
||||
continue
|
||||
filtered.append(item)
|
||||
return filtered
|
||||
|
||||
|
||||
@app.get("/api/v1/memories/")
|
||||
async def list_or_search_memories(
|
||||
user_id: str = Query(...),
|
||||
search_query: Optional[str] = Query(None),
|
||||
size: int = Query(10),
|
||||
rerank: bool = Query(True),
|
||||
created_after: Optional[str] = Query(None),
|
||||
created_before: Optional[str] = Query(None),
|
||||
metadata_filter: Optional[str] = Query(None),
|
||||
):
|
||||
# Build mem0 SDK filters dict from metadata_filter JSON param
|
||||
sdk_filters = None
|
||||
if metadata_filter:
|
||||
try:
|
||||
sdk_filters = json.loads(metadata_filter)
|
||||
except json.JSONDecodeError:
|
||||
sdk_filters = None
|
||||
|
||||
if search_query:
|
||||
# Fetch more results than needed so reranker has candidates to work with
|
||||
fetch_size = min(size * 3, 50)
|
||||
results = await asyncio.to_thread(m.search,
|
||||
search_query,
|
||||
user_id=user_id,
|
||||
limit=fetch_size,
|
||||
filters=sdk_filters,
|
||||
)
|
||||
items = _parse_mem0_results(results)
|
||||
items = _apply_post_filters(items, created_after, created_before)
|
||||
if rerank and items:
|
||||
items = rerank_results(search_query, items, top_k=size)
|
||||
else:
|
||||
items = items[:size]
|
||||
return {"items": items, "total": len(items)}
|
||||
else:
|
||||
results = await asyncio.to_thread(m.get_all,user_id=user_id, filters=sdk_filters)
|
||||
items = _parse_mem0_results(results)
|
||||
items = _apply_post_filters(items, created_after, created_before)
|
||||
return {"items": items, "total": len(items)}
|
||||
|
||||
|
||||
@app.delete("/api/v1/memories/{memory_id}")
|
||||
async def delete_memory(memory_id: str):
|
||||
try:
|
||||
await asyncio.to_thread(m.delete, memory_id)
|
||||
except Exception:
|
||||
pass
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.get("/api/v1/memories/{memory_id}/history")
|
||||
async def get_memory_history(memory_id: str):
|
||||
"""Return the edit history of a specific memory."""
|
||||
try:
|
||||
history = await asyncio.to_thread(m.history, memory_id)
|
||||
# Normalize to list of dicts
|
||||
entries = []
|
||||
raw = history if isinstance(history, list) else history.get("results", history) if isinstance(history, dict) else [history]
|
||||
for h in raw:
|
||||
entry = h if isinstance(h, dict) else {"event": str(h)}
|
||||
entries.append(entry)
|
||||
return {"memory_id": memory_id, "history": entries}
|
||||
except Exception as e:
|
||||
return {"memory_id": memory_id, "history": [], "error": str(e)}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(app, host="0.0.0.0", port=8765)
|
||||
@@ -0,0 +1,50 @@
|
||||
from flask import Flask, request, jsonify
|
||||
from FlagEmbedding import FlagReranker
|
||||
import os, torch
|
||||
|
||||
app = Flask(__name__)
|
||||
reranker = None
|
||||
|
||||
# ── Configurable via environment variables ─────────────────────────
|
||||
RERANKER_MODEL = os.environ.get("RERANKER_MODEL", "BAAI/bge-reranker-v2-m3")
|
||||
RERANKER_DEVICE = os.environ.get("RERANKER_DEVICE", "cuda" if torch.cuda.is_available() else "cpu")
|
||||
RERANKER_PORT = int(os.environ.get("RERANKER_PORT", "8678"))
|
||||
|
||||
def get_reranker():
|
||||
global reranker
|
||||
if reranker is None:
|
||||
reranker = FlagReranker(RERANKER_MODEL, use_fp16=True, device=RERANKER_DEVICE)
|
||||
return reranker
|
||||
|
||||
@app.route('/rerank', methods=['POST'])
|
||||
def rerank():
|
||||
data = request.json
|
||||
query = data.get('query', '')
|
||||
documents = data.get('documents', [])
|
||||
top_k = data.get('top_k', len(documents))
|
||||
|
||||
if not query or not documents:
|
||||
return jsonify({'error': 'query and documents required'}), 400
|
||||
|
||||
pairs = [[query, doc] for doc in documents]
|
||||
scores = get_reranker().compute_score(pairs)
|
||||
if isinstance(scores, float):
|
||||
scores = [scores]
|
||||
|
||||
results = sorted(
|
||||
[{'index': i, 'document': doc, 'score': score}
|
||||
for i, (doc, score) in enumerate(zip(documents, scores))],
|
||||
key=lambda x: x['score'], reverse=True
|
||||
)[:top_k]
|
||||
|
||||
return jsonify({'results': results})
|
||||
|
||||
@app.route('/health', methods=['GET'])
|
||||
def health():
|
||||
return jsonify({'status': 'ok', 'model': RERANKER_MODEL, 'device': RERANKER_DEVICE})
|
||||
|
||||
if __name__ == '__main__':
|
||||
print(f'Loading reranker model ({RERANKER_MODEL}) on {RERANKER_DEVICE}...')
|
||||
get_reranker()
|
||||
print(f'Reranker server ready on :{RERANKER_PORT}')
|
||||
app.run(host='0.0.0.0', port=RERANKER_PORT)
|
||||
@@ -0,0 +1,325 @@
|
||||
# Aardvark Integration — How It Works
|
||||
|
||||
A plain-language walkthrough of every piece and how they connect.
|
||||
|
||||
---
|
||||
|
||||
## The Big Picture
|
||||
|
||||
```
|
||||
┌──────────────────────────────────────────────────────────────┐
|
||||
│ STARTUP (boot) │
|
||||
│ │
|
||||
│ 1. Ask aardvark-sys: "any adapters plugged in?" │
|
||||
│ 2. For each one found → register a device + transport │
|
||||
│ 3. Load tools only if hardware was found │
|
||||
└──────────────────────────────────────────┬───────────────────┘
|
||||
│
|
||||
┌──────────────────────▼──────────────────────┐
|
||||
│ RUNTIME (agent loop) │
|
||||
│ │
|
||||
│ User: "scan i2c bus" │
|
||||
│ → agent calls i2c_scan tool │
|
||||
│ → tool builds a ZcCommand │
|
||||
│ → AardvarkTransport sends to hardware │
|
||||
│ → response flows back as text │
|
||||
└──────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Layer by Layer
|
||||
|
||||
### Layer 1 — `aardvark-sys` (the USB talker)
|
||||
|
||||
**File:** `crates/aardvark-sys/src/lib.rs`
|
||||
|
||||
This is the only layer that ever touches the raw C library.
|
||||
Think of it as a thin translator: it turns C function calls into safe Rust.
|
||||
|
||||
**Algorithm:**
|
||||
|
||||
```
|
||||
find_devices()
|
||||
→ call aa_find_devices(16, buf) // ask C lib how many adapters
|
||||
→ return Vec of port numbers // [0, 1, ...] one per adapter
|
||||
|
||||
open_port(port)
|
||||
→ call aa_open(port) // open that specific adapter
|
||||
→ if handle ≤ 0, return OpenFailed
|
||||
→ else return AardvarkHandle{ _port: handle }
|
||||
|
||||
i2c_scan(handle)
|
||||
→ for addr in 0x08..=0x77 // every valid 7-bit address
|
||||
try aa_i2c_read(addr, 1 byte) // knock on the door
|
||||
if ACK → add to list // device answered
|
||||
→ return list of live addresses
|
||||
|
||||
i2c_read(handle, addr, len)
|
||||
→ aa_i2c_read(addr, len bytes)
|
||||
→ return bytes as Vec<u8>
|
||||
|
||||
i2c_write(handle, addr, data)
|
||||
→ aa_i2c_write(addr, data)
|
||||
|
||||
spi_transfer(handle, bytes_to_send)
|
||||
→ aa_spi_write(bytes) // full-duplex: sends + receives
|
||||
→ return received bytes
|
||||
|
||||
gpio_set(handle, direction, value)
|
||||
→ aa_gpio_direction(direction) // which pins are outputs
|
||||
→ aa_gpio_put(value) // set output levels
|
||||
|
||||
gpio_get(handle)
|
||||
→ aa_gpio_get() // read all pin levels as bitmask
|
||||
|
||||
Drop(handle)
|
||||
→ aa_close(handle._port) // always close on drop
|
||||
```
|
||||
|
||||
**In stub mode** (no SDK): every method returns `Err(NotFound)` immediately. `find_devices()` returns `[]`. Nothing crashes.
|
||||
|
||||
---
|
||||
|
||||
### Layer 2 — `AardvarkTransport` (the bridge)
|
||||
|
||||
**File:** `src/hardware/aardvark.rs`
|
||||
|
||||
The rest of ZeroClaw speaks a single language: `ZcCommand` → `ZcResponse`.
|
||||
`AardvarkTransport` translates between that protocol and the aardvark-sys calls above.
|
||||
|
||||
**Algorithm:**
|
||||
|
||||
```
|
||||
send(ZcCommand) → ZcResponse
|
||||
|
||||
extract command name from cmd.name
|
||||
extract parameters from cmd.params (serde_json values)
|
||||
|
||||
match cmd.name:
|
||||
|
||||
"i2c_scan" → open handle → call i2c_scan()
|
||||
→ format found addresses as hex list
|
||||
→ return ZcResponse{ output: "0x48, 0x68" }
|
||||
|
||||
"i2c_read" → parse addr (hex string) + len (number)
|
||||
→ open handle → i2c_enable(bitrate)
|
||||
→ call i2c_read(addr, len)
|
||||
→ format bytes as hex
|
||||
→ return ZcResponse{ output: "0xAB 0xCD" }
|
||||
|
||||
"i2c_write" → parse addr + data bytes
|
||||
→ open handle → i2c_write(addr, data)
|
||||
→ return ZcResponse{ output: "ok" }
|
||||
|
||||
"spi_transfer" → parse bytes_hex string → decode to Vec<u8>
|
||||
→ open handle → spi_enable(bitrate)
|
||||
→ spi_transfer(bytes)
|
||||
→ return received bytes as hex
|
||||
|
||||
"gpio_set" → parse direction + value bitmasks
|
||||
→ open handle → gpio_set(dir, val)
|
||||
→ return ZcResponse{ output: "ok" }
|
||||
|
||||
"gpio_get" → open handle → gpio_get()
|
||||
→ return bitmask value as string
|
||||
|
||||
on any AardvarkError → return ZcResponse{ error: "..." }
|
||||
```
|
||||
|
||||
**Key design choice — lazy open:** The handle is opened fresh for every command and dropped at the end. This means no held connection, no state to clean up, and no "is it still open?" logic anywhere.
|
||||
|
||||
---
|
||||
|
||||
### Layer 3 — Tools (what the agent calls)
|
||||
|
||||
**File:** `src/hardware/aardvark_tools.rs`
|
||||
|
||||
Each tool is a thin wrapper. It:
|
||||
1. Validates the agent's JSON input
|
||||
2. Resolves which physical device to use
|
||||
3. Builds a `ZcCommand`
|
||||
4. Calls `AardvarkTransport.send()`
|
||||
5. Returns the result as text
|
||||
|
||||
```
|
||||
I2cScanTool.call(args)
|
||||
→ look up "device" in args (default: "aardvark0")
|
||||
→ find that device in the registry
|
||||
→ build ZcCommand{ name: "i2c_scan", params: {} }
|
||||
→ send to AardvarkTransport
|
||||
→ return "Found: 0x48, 0x68" (or "No devices found")
|
||||
|
||||
I2cReadTool.call(args)
|
||||
→ require args["addr"] and args["len"]
|
||||
→ build ZcCommand{ name: "i2c_read", params: {addr, len} }
|
||||
→ send → return hex bytes
|
||||
|
||||
I2cWriteTool.call(args)
|
||||
→ require args["addr"] and args["data"] (hex or array)
|
||||
→ build ZcCommand{ name: "i2c_write", params: {addr, data} }
|
||||
→ send → return "ok" or error
|
||||
|
||||
SpiTransferTool.call(args)
|
||||
→ require args["bytes"] (hex string)
|
||||
→ build ZcCommand{ name: "spi_transfer", params: {bytes} }
|
||||
→ send → return received bytes
|
||||
|
||||
GpioAardvarkTool.call(args)
|
||||
→ require args["direction"] + args["value"] (set)
|
||||
OR no extra args (get)
|
||||
→ build appropriate ZcCommand
|
||||
→ send → return result
|
||||
|
||||
DatasheetTool.call(args)
|
||||
→ action = args["action"]: "search" | "download" | "list" | "read"
|
||||
→ "search": return a Google/vendor search URL for the device
|
||||
→ "download": fetch PDF from args["url"] → save to ~/.zeroclaw/hardware/datasheets/
|
||||
→ "list": scan the datasheets directory → return filenames
|
||||
→ "read": open a saved PDF and return its text
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Layer 4 — Device Registry (the address book)
|
||||
|
||||
**File:** `src/hardware/device.rs`
|
||||
|
||||
The registry is a runtime map of every connected device.
|
||||
Each entry stores: alias, kind, capabilities, transport handle.
|
||||
|
||||
```
|
||||
register("aardvark", vid=0x2b76, ...)
|
||||
→ DeviceKind::from_vid(0x2b76) → DeviceKind::Aardvark
|
||||
→ DeviceRuntime::from_kind() → DeviceRuntime::Aardvark
|
||||
→ assign alias "aardvark0" (then "aardvark1" for second, etc.)
|
||||
→ store entry in HashMap
|
||||
|
||||
attach_transport("aardvark0", AardvarkTransport, capabilities{i2c,spi,gpio})
|
||||
→ store Arc<dyn Transport> in the entry
|
||||
|
||||
has_aardvark()
|
||||
→ any entry where kind == Aardvark → true / false
|
||||
|
||||
resolve_aardvark_device(args)
|
||||
→ read "device" param (default: "aardvark0")
|
||||
→ look up alias in HashMap
|
||||
→ return (alias, DeviceContext{ transport, capabilities })
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Layer 5 — `boot()` (startup wiring)
|
||||
|
||||
**File:** `src/hardware/mod.rs`
|
||||
|
||||
`boot()` runs once at startup. For Aardvark:
|
||||
|
||||
```
|
||||
boot()
|
||||
...
|
||||
aardvark_ports = aardvark_sys::AardvarkHandle::find_devices()
|
||||
// → [] in stub mode, [0] if one adapter is plugged in
|
||||
|
||||
for (i, port) in aardvark_ports:
|
||||
alias = registry.register("aardvark", vid=0x2b76, ...)
|
||||
// → "aardvark0", "aardvark1", ...
|
||||
|
||||
transport = AardvarkTransport::new(port, bitrate=100kHz)
|
||||
registry.attach_transport(alias, transport, {i2c:true, spi:true, gpio:true})
|
||||
|
||||
log "[registry] aardvark0 ready → Total Phase port 0"
|
||||
...
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Layer 6 — Tool Registry (the loader)
|
||||
|
||||
**File:** `src/hardware/tool_registry.rs`
|
||||
|
||||
After `boot()`, the tool registry checks what hardware is present and loads
|
||||
only the relevant tools:
|
||||
|
||||
```
|
||||
ToolRegistry::load(devices)
|
||||
|
||||
# always loaded (Pico / GPIO)
|
||||
register: gpio_write, gpio_read, gpio_toggle, pico_flash, device_list, device_status
|
||||
|
||||
# only loaded if an Aardvark was found at boot
|
||||
if devices.has_aardvark():
|
||||
register: i2c_scan, i2c_read, i2c_write, spi_transfer, gpio_aardvark, datasheet
|
||||
```
|
||||
|
||||
This is why the `hardware_feature_registers_all_six_tools` test still passes in stub mode — `has_aardvark()` returns false, 0 extra tools load, count stays at 6.
|
||||
|
||||
---
|
||||
|
||||
## Full Flow Diagram
|
||||
|
||||
```
|
||||
SDK FILES aardvark-sys ZeroClaw core
|
||||
(vendor/) (crates/) (src/)
|
||||
─────────────────────────────────────────────────────────────────
|
||||
|
||||
aardvark.h ──► build.rs boot()
|
||||
aardvark.so (bindgen) ──► find_devices()
|
||||
│ │
|
||||
bindings.rs │ vec![0] (one adapter)
|
||||
│ ▼
|
||||
lib.rs register("aardvark0")
|
||||
AardvarkHandle attach_transport(AardvarkTransport)
|
||||
│ │
|
||||
│ ▼
|
||||
│ ToolRegistry::load()
|
||||
│ has_aardvark() == true
|
||||
│ → load 6 aardvark tools
|
||||
│
|
||||
─────────────────────────────────────────────────────────────────
|
||||
|
||||
USER MESSAGE: "scan the i2c bus"
|
||||
|
||||
agent loop
|
||||
│
|
||||
▼
|
||||
I2cScanTool.call()
|
||||
│
|
||||
▼
|
||||
resolve_aardvark_device("aardvark0")
|
||||
│ returns transport Arc
|
||||
▼
|
||||
AardvarkTransport.send(ZcCommand{ name: "i2c_scan" })
|
||||
│
|
||||
▼
|
||||
AardvarkHandle::open_port(0) ← opens USB connection
|
||||
│
|
||||
▼
|
||||
aa_i2c_read(0x08..0x77) ← probes each address
|
||||
│
|
||||
▼
|
||||
AardvarkHandle dropped ← USB connection closed
|
||||
│
|
||||
▼
|
||||
ZcResponse{ output: "Found: 0x48, 0x68" }
|
||||
│
|
||||
▼
|
||||
agent sends reply to user: "I found two I2C devices: 0x48 and 0x68"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Stub vs Real Side by Side
|
||||
|
||||
| | Stub mode (now) | Real hardware |
|
||||
|---|---|---|
|
||||
| `find_devices()` | returns `[]` | returns `[0]` |
|
||||
| `open_port(0)` | `Err(NotFound)` | opens USB, returns handle |
|
||||
| `i2c_scan()` | `[]` | probes bus, returns addresses |
|
||||
| tools loaded | only the 6 Pico tools | 6 Pico + 6 Aardvark tools |
|
||||
| `has_aardvark()` | `false` | `true` |
|
||||
| SDK needed | no | yes (`vendor/aardvark.h` + `.so`) |
|
||||
|
||||
The only code that changes when you plug in real hardware is inside
|
||||
`crates/aardvark-sys/src/lib.rs` — every other layer is already wired up
|
||||
and waiting.
|
||||
+15
-1
@@ -4,8 +4,22 @@ Localized documentation trees live here and under `docs/`.
|
||||
|
||||
## Locales
|
||||
|
||||
- العربية (Arabic): [ar/README.md](ar/README.md)
|
||||
- বাংলা (Bengali): [bn/README.md](bn/README.md)
|
||||
- Deutsch (German): [de/README.md](de/README.md)
|
||||
- Ελληνικά (Greek): [el/README.md](el/README.md)
|
||||
- Español (Spanish): [es/README.md](es/README.md)
|
||||
- Français (French): [fr/README.md](fr/README.md)
|
||||
- हिन्दी (Hindi): [hi/README.md](hi/README.md)
|
||||
- Italiano (Italian): [it/README.md](it/README.md)
|
||||
- 日本語 (Japanese): [ja/README.md](ja/README.md)
|
||||
- 한국어 (Korean): [ko/README.md](ko/README.md)
|
||||
- Português (Portuguese): [pt/README.md](pt/README.md)
|
||||
- Русский (Russian): [ru/README.md](ru/README.md)
|
||||
- Tagalog: [tl/README.md](tl/README.md)
|
||||
- Tiếng Việt (Vietnamese): [vi/README.md](vi/README.md)
|
||||
- Vietnamese (canonical): [`docs/vi/`](../vi/)
|
||||
- Chinese (Simplified): [`docs/i18n/zh-CN/`](zh-CN/)
|
||||
- 简体中文 (Chinese): [zh-CN/README.md](zh-CN/README.md)
|
||||
|
||||
## Structure
|
||||
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
# ZeroClaw Documentation Hub (Arabic)
|
||||
|
||||
This locale hub is enabled for Arabic community support.
|
||||
|
||||
Last synchronized: **March 6, 2026**.
|
||||
|
||||
## Quick Links
|
||||
|
||||
- Arabic docs hub: [README.md](README.md)
|
||||
- Arabic summary: [SUMMARY.md](SUMMARY.md)
|
||||
- English docs hub: [../../README.md](../../README.md)
|
||||
- English summary: [../../SUMMARY.md](../../SUMMARY.md)
|
||||
|
||||
## Coverage Status
|
||||
|
||||
Current status: **hub-level support enabled**. Full document translation is in progress.
|
||||
|
||||
## Other Languages
|
||||
|
||||
- English: [../../../README.md](../../../README.md)
|
||||
- 简体中文: [../zh-CN/README.md](../zh-CN/README.md)
|
||||
- 日本語: [../ja/README.md](../ja/README.md)
|
||||
- 한국어: [../ko/README.md](../ko/README.md)
|
||||
- Tiếng Việt: [../vi/README.md](../vi/README.md)
|
||||
- Tagalog: [../tl/README.md](../tl/README.md)
|
||||
- Español: [../es/README.md](../es/README.md)
|
||||
- Português: [../pt/README.md](../pt/README.md)
|
||||
- Italiano: [../it/README.md](../it/README.md)
|
||||
- Deutsch: [../de/README.md](../de/README.md)
|
||||
- Français: [../fr/README.md](../fr/README.md)
|
||||
- العربية: [README.md](README.md)
|
||||
- हिन्दी: [../hi/README.md](../hi/README.md)
|
||||
- Русский: [../ru/README.md](../ru/README.md)
|
||||
- বাংলা: [../bn/README.md](../bn/README.md)
|
||||
- Ελληνικά: [../el/README.md](../el/README.md)
|
||||
@@ -0,0 +1,20 @@
|
||||
# ZeroClaw Docs Summary (Arabic)
|
||||
|
||||
This is the Arabic locale summary entry point.
|
||||
|
||||
Last synchronized: **March 6, 2026**.
|
||||
|
||||
## Entry Points
|
||||
|
||||
- Arabic docs hub: [README.md](README.md)
|
||||
- English docs hub: [../../README.md](../../README.md)
|
||||
- English unified summary: [../../SUMMARY.md](../../SUMMARY.md)
|
||||
|
||||
## Operator References (English Source)
|
||||
|
||||
- [../../commands-reference.md](../../commands-reference.md)
|
||||
- [../../config-reference.md](../../config-reference.md)
|
||||
- [../../providers-reference.md](../../providers-reference.md)
|
||||
- [../../channels-reference.md](../../channels-reference.md)
|
||||
- [../../operations-runbook.md](../../operations-runbook.md)
|
||||
- [../../troubleshooting.md](../../troubleshooting.md)
|
||||
@@ -0,0 +1,35 @@
|
||||
# ZeroClaw Documentation Hub (Bengali)
|
||||
|
||||
This locale hub is enabled for Bengali community support.
|
||||
|
||||
Last synchronized: **March 6, 2026**.
|
||||
|
||||
## Quick Links
|
||||
|
||||
- Bengali docs hub: [README.md](README.md)
|
||||
- Bengali summary: [SUMMARY.md](SUMMARY.md)
|
||||
- English docs hub: [../../README.md](../../README.md)
|
||||
- English summary: [../../SUMMARY.md](../../SUMMARY.md)
|
||||
|
||||
## Coverage Status
|
||||
|
||||
Current status: **hub-level support enabled**. Full document translation is in progress.
|
||||
|
||||
## Other Languages
|
||||
|
||||
- English: [../../../README.md](../../../README.md)
|
||||
- 简体中文: [../zh-CN/README.md](../zh-CN/README.md)
|
||||
- 日本語: [../ja/README.md](../ja/README.md)
|
||||
- 한국어: [../ko/README.md](../ko/README.md)
|
||||
- Tiếng Việt: [../vi/README.md](../vi/README.md)
|
||||
- Tagalog: [../tl/README.md](../tl/README.md)
|
||||
- Español: [../es/README.md](../es/README.md)
|
||||
- Português: [../pt/README.md](../pt/README.md)
|
||||
- Italiano: [../it/README.md](../it/README.md)
|
||||
- Deutsch: [../de/README.md](../de/README.md)
|
||||
- Français: [../fr/README.md](../fr/README.md)
|
||||
- العربية: [../ar/README.md](../ar/README.md)
|
||||
- हिन्दी: [../hi/README.md](../hi/README.md)
|
||||
- Русский: [../ru/README.md](../ru/README.md)
|
||||
- বাংলা: [README.md](README.md)
|
||||
- Ελληνικά: [../el/README.md](../el/README.md)
|
||||
@@ -0,0 +1,20 @@
|
||||
# ZeroClaw Docs Summary (Bengali)
|
||||
|
||||
This is the Bengali locale summary entry point.
|
||||
|
||||
Last synchronized: **March 6, 2026**.
|
||||
|
||||
## Entry Points
|
||||
|
||||
- Bengali docs hub: [README.md](README.md)
|
||||
- English docs hub: [../../README.md](../../README.md)
|
||||
- English unified summary: [../../SUMMARY.md](../../SUMMARY.md)
|
||||
|
||||
## Operator References (English Source)
|
||||
|
||||
- [../../commands-reference.md](../../commands-reference.md)
|
||||
- [../../config-reference.md](../../config-reference.md)
|
||||
- [../../providers-reference.md](../../providers-reference.md)
|
||||
- [../../channels-reference.md](../../channels-reference.md)
|
||||
- [../../operations-runbook.md](../../operations-runbook.md)
|
||||
- [../../troubleshooting.md](../../troubleshooting.md)
|
||||
@@ -0,0 +1,35 @@
|
||||
# ZeroClaw Documentation Hub (German)
|
||||
|
||||
This locale hub is enabled for German community support.
|
||||
|
||||
Last synchronized: **March 6, 2026**.
|
||||
|
||||
## Quick Links
|
||||
|
||||
- German docs hub: [README.md](README.md)
|
||||
- German summary: [SUMMARY.md](SUMMARY.md)
|
||||
- English docs hub: [../../README.md](../../README.md)
|
||||
- English summary: [../../SUMMARY.md](../../SUMMARY.md)
|
||||
|
||||
## Coverage Status
|
||||
|
||||
Current status: **hub-level support enabled**. Full document translation is in progress.
|
||||
|
||||
## Other Languages
|
||||
|
||||
- English: [../../../README.md](../../../README.md)
|
||||
- 简体中文: [../zh-CN/README.md](../zh-CN/README.md)
|
||||
- 日本語: [../ja/README.md](../ja/README.md)
|
||||
- 한국어: [../ko/README.md](../ko/README.md)
|
||||
- Tiếng Việt: [../vi/README.md](../vi/README.md)
|
||||
- Tagalog: [../tl/README.md](../tl/README.md)
|
||||
- Español: [../es/README.md](../es/README.md)
|
||||
- Português: [../pt/README.md](../pt/README.md)
|
||||
- Italiano: [../it/README.md](../it/README.md)
|
||||
- Deutsch: [README.md](README.md)
|
||||
- Français: [../fr/README.md](../fr/README.md)
|
||||
- العربية: [../ar/README.md](../ar/README.md)
|
||||
- हिन्दी: [../hi/README.md](../hi/README.md)
|
||||
- Русский: [../ru/README.md](../ru/README.md)
|
||||
- বাংলা: [../bn/README.md](../bn/README.md)
|
||||
- Ελληνικά: [../el/README.md](../el/README.md)
|
||||
@@ -0,0 +1,20 @@
|
||||
# ZeroClaw Docs Summary (German)
|
||||
|
||||
This is the German locale summary entry point.
|
||||
|
||||
Last synchronized: **March 6, 2026**.
|
||||
|
||||
## Entry Points
|
||||
|
||||
- German docs hub: [README.md](README.md)
|
||||
- English docs hub: [../../README.md](../../README.md)
|
||||
- English unified summary: [../../SUMMARY.md](../../SUMMARY.md)
|
||||
|
||||
## Operator References (English Source)
|
||||
|
||||
- [../../commands-reference.md](../../commands-reference.md)
|
||||
- [../../config-reference.md](../../config-reference.md)
|
||||
- [../../providers-reference.md](../../providers-reference.md)
|
||||
- [../../channels-reference.md](../../channels-reference.md)
|
||||
- [../../operations-runbook.md](../../operations-runbook.md)
|
||||
- [../../troubleshooting.md](../../troubleshooting.md)
|
||||
@@ -0,0 +1,35 @@
|
||||
# ZeroClaw Documentation Hub (Hindi)
|
||||
|
||||
This locale hub is enabled for Hindi community support.
|
||||
|
||||
Last synchronized: **March 6, 2026**.
|
||||
|
||||
## Quick Links
|
||||
|
||||
- Hindi docs hub: [README.md](README.md)
|
||||
- Hindi summary: [SUMMARY.md](SUMMARY.md)
|
||||
- English docs hub: [../../README.md](../../README.md)
|
||||
- English summary: [../../SUMMARY.md](../../SUMMARY.md)
|
||||
|
||||
## Coverage Status
|
||||
|
||||
Current status: **hub-level support enabled**. Full document translation is in progress.
|
||||
|
||||
## Other Languages
|
||||
|
||||
- English: [../../../README.md](../../../README.md)
|
||||
- 简体中文: [../zh-CN/README.md](../zh-CN/README.md)
|
||||
- 日本語: [../ja/README.md](../ja/README.md)
|
||||
- 한국어: [../ko/README.md](../ko/README.md)
|
||||
- Tiếng Việt: [../vi/README.md](../vi/README.md)
|
||||
- Tagalog: [../tl/README.md](../tl/README.md)
|
||||
- Español: [../es/README.md](../es/README.md)
|
||||
- Português: [../pt/README.md](../pt/README.md)
|
||||
- Italiano: [../it/README.md](../it/README.md)
|
||||
- Deutsch: [../de/README.md](../de/README.md)
|
||||
- Français: [../fr/README.md](../fr/README.md)
|
||||
- العربية: [../ar/README.md](../ar/README.md)
|
||||
- हिन्दी: [README.md](README.md)
|
||||
- Русский: [../ru/README.md](../ru/README.md)
|
||||
- বাংলা: [../bn/README.md](../bn/README.md)
|
||||
- Ελληνικά: [../el/README.md](../el/README.md)
|
||||
@@ -0,0 +1,20 @@
|
||||
# ZeroClaw Docs Summary (Hindi)
|
||||
|
||||
This is the Hindi locale summary entry point.
|
||||
|
||||
Last synchronized: **March 6, 2026**.
|
||||
|
||||
## Entry Points
|
||||
|
||||
- Hindi docs hub: [README.md](README.md)
|
||||
- English docs hub: [../../README.md](../../README.md)
|
||||
- English unified summary: [../../SUMMARY.md](../../SUMMARY.md)
|
||||
|
||||
## Operator References (English Source)
|
||||
|
||||
- [../../commands-reference.md](../../commands-reference.md)
|
||||
- [../../config-reference.md](../../config-reference.md)
|
||||
- [../../providers-reference.md](../../providers-reference.md)
|
||||
- [../../channels-reference.md](../../channels-reference.md)
|
||||
- [../../operations-runbook.md](../../operations-runbook.md)
|
||||
- [../../troubleshooting.md](../../troubleshooting.md)
|
||||
@@ -0,0 +1,35 @@
|
||||
# ZeroClaw Documentation Hub (Korean)
|
||||
|
||||
This locale hub is enabled for Korean community support.
|
||||
|
||||
Last synchronized: **March 6, 2026**.
|
||||
|
||||
## Quick Links
|
||||
|
||||
- Korean docs hub: [README.md](README.md)
|
||||
- Korean summary: [SUMMARY.md](SUMMARY.md)
|
||||
- English docs hub: [../../README.md](../../README.md)
|
||||
- English summary: [../../SUMMARY.md](../../SUMMARY.md)
|
||||
|
||||
## Coverage Status
|
||||
|
||||
Current status: **hub-level support enabled**. Full document translation is in progress.
|
||||
|
||||
## Other Languages
|
||||
|
||||
- English: [../../../README.md](../../../README.md)
|
||||
- 简体中文: [../zh-CN/README.md](../zh-CN/README.md)
|
||||
- 日本語: [../ja/README.md](../ja/README.md)
|
||||
- 한국어: [README.md](README.md)
|
||||
- Tiếng Việt: [../vi/README.md](../vi/README.md)
|
||||
- Tagalog: [../tl/README.md](../tl/README.md)
|
||||
- Español: [../es/README.md](../es/README.md)
|
||||
- Português: [../pt/README.md](../pt/README.md)
|
||||
- Italiano: [../it/README.md](../it/README.md)
|
||||
- Deutsch: [../de/README.md](../de/README.md)
|
||||
- Français: [../fr/README.md](../fr/README.md)
|
||||
- العربية: [../ar/README.md](../ar/README.md)
|
||||
- हिन्दी: [../hi/README.md](../hi/README.md)
|
||||
- Русский: [../ru/README.md](../ru/README.md)
|
||||
- বাংলা: [../bn/README.md](../bn/README.md)
|
||||
- Ελληνικά: [../el/README.md](../el/README.md)
|
||||
@@ -0,0 +1,20 @@
|
||||
# ZeroClaw Docs Summary (Korean)
|
||||
|
||||
This is the Korean locale summary entry point.
|
||||
|
||||
Last synchronized: **March 6, 2026**.
|
||||
|
||||
## Entry Points
|
||||
|
||||
- Korean docs hub: [README.md](README.md)
|
||||
- English docs hub: [../../README.md](../../README.md)
|
||||
- English unified summary: [../../SUMMARY.md](../../SUMMARY.md)
|
||||
|
||||
## Operator References (English Source)
|
||||
|
||||
- [../../commands-reference.md](../../commands-reference.md)
|
||||
- [../../config-reference.md](../../config-reference.md)
|
||||
- [../../providers-reference.md](../../providers-reference.md)
|
||||
- [../../channels-reference.md](../../channels-reference.md)
|
||||
- [../../operations-runbook.md](../../operations-runbook.md)
|
||||
- [../../troubleshooting.md](../../troubleshooting.md)
|
||||
@@ -0,0 +1,35 @@
|
||||
# ZeroClaw Documentation Hub (Tagalog)
|
||||
|
||||
This locale hub is enabled for Tagalog community support.
|
||||
|
||||
Last synchronized: **March 6, 2026**.
|
||||
|
||||
## Quick Links
|
||||
|
||||
- Tagalog docs hub: [README.md](README.md)
|
||||
- Tagalog summary: [SUMMARY.md](SUMMARY.md)
|
||||
- English docs hub: [../../README.md](../../README.md)
|
||||
- English summary: [../../SUMMARY.md](../../SUMMARY.md)
|
||||
|
||||
## Coverage Status
|
||||
|
||||
Current status: **hub-level support enabled**. Full document translation is in progress.
|
||||
|
||||
## Other Languages
|
||||
|
||||
- English: [../../../README.md](../../../README.md)
|
||||
- 简体中文: [../zh-CN/README.md](../zh-CN/README.md)
|
||||
- 日本語: [../ja/README.md](../ja/README.md)
|
||||
- 한국어: [../ko/README.md](../ko/README.md)
|
||||
- Tiếng Việt: [../vi/README.md](../vi/README.md)
|
||||
- Tagalog: [README.md](README.md)
|
||||
- Español: [../es/README.md](../es/README.md)
|
||||
- Português: [../pt/README.md](../pt/README.md)
|
||||
- Italiano: [../it/README.md](../it/README.md)
|
||||
- Deutsch: [../de/README.md](../de/README.md)
|
||||
- Français: [../fr/README.md](../fr/README.md)
|
||||
- العربية: [../ar/README.md](../ar/README.md)
|
||||
- हिन्दी: [../hi/README.md](../hi/README.md)
|
||||
- Русский: [../ru/README.md](../ru/README.md)
|
||||
- বাংলা: [../bn/README.md](../bn/README.md)
|
||||
- Ελληνικά: [../el/README.md](../el/README.md)
|
||||
@@ -0,0 +1,20 @@
|
||||
# ZeroClaw Docs Summary (Tagalog)
|
||||
|
||||
This is the Tagalog locale summary entry point.
|
||||
|
||||
Last synchronized: **March 6, 2026**.
|
||||
|
||||
## Entry Points
|
||||
|
||||
- Tagalog docs hub: [README.md](README.md)
|
||||
- English docs hub: [../../README.md](../../README.md)
|
||||
- English unified summary: [../../SUMMARY.md](../../SUMMARY.md)
|
||||
|
||||
## Operator References (English Source)
|
||||
|
||||
- [../../commands-reference.md](../../commands-reference.md)
|
||||
- [../../config-reference.md](../../config-reference.md)
|
||||
- [../../providers-reference.md](../../providers-reference.md)
|
||||
- [../../channels-reference.md](../../channels-reference.md)
|
||||
- [../../operations-runbook.md](../../operations-runbook.md)
|
||||
- [../../troubleshooting.md](../../troubleshooting.md)
|
||||
@@ -411,6 +411,30 @@ allowed_roots = [\"~/Desktop/projects\", \"/opt/shared-repo\"]
|
||||
|
||||
- 内存上下文注入忽略旧的 `assistant_resp*` 自动保存键,以防止旧模型生成的摘要被视为事实。
|
||||
|
||||
### `[memory.mem0]`
|
||||
|
||||
Mem0 (OpenMemory) 后端 — 连接自托管 mem0 服务器,提供基于向量的记忆存储和 LLM 事实提取。构建时需要 `memory-mem0` feature flag,配置需设置 `backend = "mem0"`。
|
||||
|
||||
| 键 | 默认值 | 环境变量 | 用途 |
|
||||
|---|---|---|---|
|
||||
| `url` | `http://localhost:8765` | `MEM0_URL` | OpenMemory 服务器地址 |
|
||||
| `user_id` | `zeroclaw` | `MEM0_USER_ID` | 记忆作用域的用户 ID |
|
||||
| `app_name` | `zeroclaw` | `MEM0_APP_NAME` | 在 mem0 中注册的应用名称 |
|
||||
| `infer` | `true` | — | 使用 LLM 从存储文本中提取事实 (`true`) 或原样存储 (`false`) |
|
||||
| `extraction_prompt` | 未设置 | `MEM0_EXTRACTION_PROMPT` | 自定义 LLM 事实提取提示词(如适用于非英文内容) |
|
||||
|
||||
```toml
|
||||
[memory]
|
||||
backend = "mem0"
|
||||
|
||||
[memory.mem0]
|
||||
url = "http://192.168.0.171:8765"
|
||||
user_id = "zeroclaw-bot"
|
||||
extraction_prompt = "用原始语言提取事实..."
|
||||
```
|
||||
|
||||
服务器部署脚本位于 `deploy/mem0/`。
|
||||
|
||||
## `[[model_routes]]` 和 `[[embedding_routes]]`
|
||||
|
||||
使用路由提示,以便集成可以在模型 ID 演变时保持稳定的名称。
|
||||
|
||||
@@ -463,6 +463,30 @@ Notes:
|
||||
|
||||
- Memory context injection ignores legacy `assistant_resp*` auto-save keys to prevent old model-authored summaries from being treated as facts.
|
||||
|
||||
### `[memory.mem0]`
|
||||
|
||||
Mem0 (OpenMemory) backend — connects to a self-hosted mem0 server for vector-based memory with LLM-powered fact extraction. Requires feature flag `memory-mem0` at build time and `backend = "mem0"` in config.
|
||||
|
||||
| Key | Default | Env var | Purpose |
|
||||
|---|---|---|---|
|
||||
| `url` | `http://localhost:8765` | `MEM0_URL` | OpenMemory server URL |
|
||||
| `user_id` | `zeroclaw` | `MEM0_USER_ID` | User ID for scoping memories |
|
||||
| `app_name` | `zeroclaw` | `MEM0_APP_NAME` | Application name registered in mem0 |
|
||||
| `infer` | `true` | — | Use LLM to extract facts from stored text (`true`) or store raw (`false`) |
|
||||
| `extraction_prompt` | unset | `MEM0_EXTRACTION_PROMPT` | Custom prompt for LLM fact extraction (e.g. for non-English content) |
|
||||
|
||||
```toml
|
||||
[memory]
|
||||
backend = "mem0"
|
||||
|
||||
[memory.mem0]
|
||||
url = "http://192.168.0.171:8765"
|
||||
user_id = "zeroclaw-bot"
|
||||
extraction_prompt = "Extract facts in the original language..."
|
||||
```
|
||||
|
||||
Server deployment scripts are in `deploy/mem0/`.
|
||||
|
||||
## `[[model_routes]]` and `[[embedding_routes]]`
|
||||
|
||||
Use route hints so integrations can keep stable names while model IDs evolve.
|
||||
|
||||
@@ -337,6 +337,30 @@ Lưu ý:
|
||||
|
||||
- Chèn ngữ cảnh memory bỏ qua khóa auto-save `assistant_resp*` kiểu cũ để tránh tóm tắt do model tạo bị coi là sự thật.
|
||||
|
||||
### `[memory.mem0]`
|
||||
|
||||
Backend Mem0 (OpenMemory) — kết nối đến server mem0 tự host, cung cấp bộ nhớ vector với trích xuất sự kiện bằng LLM. Cần feature flag `memory-mem0` khi build và `backend = "mem0"` trong config.
|
||||
|
||||
| Khóa | Mặc định | Biến môi trường | Mục đích |
|
||||
|---|---|---|---|
|
||||
| `url` | `http://localhost:8765` | `MEM0_URL` | URL server OpenMemory |
|
||||
| `user_id` | `zeroclaw` | `MEM0_USER_ID` | User ID để phân vùng memory |
|
||||
| `app_name` | `zeroclaw` | `MEM0_APP_NAME` | Tên ứng dụng đăng ký trong mem0 |
|
||||
| `infer` | `true` | — | Dùng LLM trích xuất sự kiện từ text (`true`) hoặc lưu nguyên (`false`) |
|
||||
| `extraction_prompt` | chưa đặt | `MEM0_EXTRACTION_PROMPT` | Prompt tùy chỉnh cho trích xuất sự kiện LLM (vd: cho nội dung không phải tiếng Anh) |
|
||||
|
||||
```toml
|
||||
[memory]
|
||||
backend = "mem0"
|
||||
|
||||
[memory.mem0]
|
||||
url = "http://192.168.0.171:8765"
|
||||
user_id = "zeroclaw-bot"
|
||||
extraction_prompt = "Trích xuất sự kiện bằng ngôn ngữ gốc..."
|
||||
```
|
||||
|
||||
Script triển khai server nằm trong `deploy/mem0/`.
|
||||
|
||||
## `[[model_routes]]` và `[[embedding_routes]]`
|
||||
|
||||
Route hint giúp tên tích hợp ổn định khi model ID thay đổi.
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
## Aardvark Adapter (aardvark0)
|
||||
|
||||
- Protocol: I2C and SPI via Total Phase Aardvark USB
|
||||
- Bitrate: 100 kHz (standard-mode I2C) by default
|
||||
- Use `i2c_scan` first to discover connected devices
|
||||
- Use `i2c_read` / `i2c_write` for register operations
|
||||
- Use `spi_transfer` for full-duplex SPI
|
||||
- Use `gpio_aardvark` to control the Aardvark's GPIO expansion pins
|
||||
- Use `datasheet` tool when user identifies a new device
|
||||
|
||||
## Tool Selection — Aardvark
|
||||
|
||||
| Goal | Tool |
|
||||
|--------------------------------|-----------------|
|
||||
| Find devices on the I2C bus | `i2c_scan` |
|
||||
| Read a register | `i2c_read` |
|
||||
| Write a register | `i2c_write` |
|
||||
| Full-duplex SPI transfer | `spi_transfer` |
|
||||
| Control Aardvark GPIO pins | `gpio_aardvark` |
|
||||
| User names a new device | `datasheet` |
|
||||
|
||||
## I2C Workflow
|
||||
|
||||
1. Run `i2c_scan` — find what addresses respond.
|
||||
2. User identifies the device (or look up the address in the skill file).
|
||||
3. Read the relevant register with `i2c_read`.
|
||||
4. If datasheet is not yet cached, use `datasheet(action="search", device_name="...")`.
|
||||
|
||||
## Notes
|
||||
|
||||
- Aardvark has no firmware — it calls the C library directly.
|
||||
Do NOT use `device_exec`, `device_read_code`, or `device_write_code` for Aardvark.
|
||||
- The Aardvark adapter auto-enables I2C pull-ups (3.3 V) — no external resistors needed
|
||||
for most sensors.
|
||||
@@ -0,0 +1,41 @@
|
||||
# aardvark0 — <Device Name> (<Part Number>)
|
||||
|
||||
<!-- Copy this file to ~/.zeroclaw/hardware/devices/aardvark0.md -->
|
||||
<!-- Fill in the device details from the datasheet. -->
|
||||
|
||||
## Connection
|
||||
|
||||
- Adapter: Total Phase Aardvark (aardvark0)
|
||||
- Protocol: I2C <!-- or SPI -->
|
||||
- I2C Address: 0x48 <!-- change to the actual device address -->
|
||||
- Bitrate: 100 kHz
|
||||
|
||||
## Key Registers (from datasheet)
|
||||
|
||||
<!-- Example for LM75 temperature sensor — replace with your device -->
|
||||
| Register | Address | Description | Notes |
|
||||
|----------|---------|----------------------------------------|------------------------|
|
||||
| Temp | 0x00 | Temperature (2 bytes, big-endian) | MSB × 0.5 °C per LSB |
|
||||
| Config | 0x01 | Configuration register | Read/write |
|
||||
| Thyst | 0x02 | Hysteresis temperature | Read/write |
|
||||
| Tos | 0x03 | Overtemperature shutdown threshold | Read/write |
|
||||
|
||||
## Datasheet
|
||||
|
||||
- File: `~/.zeroclaw/hardware/datasheets/<device>.pdf`
|
||||
- Source: <!-- URL where you downloaded the datasheet -->
|
||||
|
||||
## Verified Working Commands
|
||||
|
||||
```python
|
||||
# Read temperature from LM75 at I2C address 0x48, register 0x00
|
||||
i2c_read(addr=0x48, register=0x00, len=2)
|
||||
|
||||
# Convert two bytes to °C:
|
||||
# raw = (byte[0] << 1) | (byte[1] >> 7)
|
||||
# temp = raw * 0.5 (if byte[0] bit 7 is 1, it's negative: raw - 256)
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
<!-- Add any device-specific quirks, power-on sequences, or gotchas here -->
|
||||
@@ -0,0 +1,63 @@
|
||||
# Skill: I2C Operations via Aardvark
|
||||
|
||||
<!-- Copy to ~/.zeroclaw/hardware/skills/i2c.md -->
|
||||
|
||||
## Always scan first
|
||||
|
||||
If the I2C address is unknown, run `i2c_scan` before anything else.
|
||||
|
||||
## Common device addresses
|
||||
|
||||
| Address range | Typical devices |
|
||||
|---------------|-----------------------------------------------|
|
||||
| 0x08–0x0F | Reserved / rare |
|
||||
| 0x40–0x4F | LM75, TMP102, HTU21D (temp/humidity) |
|
||||
| 0x48–0x4F | LM75, DS1621, ADS1115 (ADC) |
|
||||
| 0x50–0x57 | AT24Cxx EEPROM |
|
||||
| 0x68–0x6F | MPU6050 IMU, DS1307 / DS3231 RTC |
|
||||
| 0x76–0x77 | BME280, BMP280 (pressure + humidity) |
|
||||
| 0x42 | Common PSoC6 default |
|
||||
| 0x3C, 0x3D | SSD1306 OLED display |
|
||||
|
||||
## Reading a register
|
||||
|
||||
```text
|
||||
i2c_read(addr=0x48, register=0x00, len=2)
|
||||
```
|
||||
|
||||
## Writing a register
|
||||
|
||||
```text
|
||||
i2c_write(addr=0x48, bytes=[0x01, 0x60])
|
||||
```
|
||||
|
||||
## Write-then-read (register pointer pattern)
|
||||
|
||||
Some devices require you to first write the register address, then read separately:
|
||||
|
||||
```text
|
||||
i2c_write(addr=0x48, bytes=[0x00])
|
||||
i2c_read(addr=0x48, len=2)
|
||||
```
|
||||
|
||||
The `i2c_read` tool handles this automatically when you specify `register=`.
|
||||
|
||||
## Temperature conversion — LM75 / TMP102
|
||||
|
||||
Raw bytes from register 0x00 are big-endian, 9-bit or 11-bit:
|
||||
|
||||
```
|
||||
raw = (byte[0] << 1) | (byte[1] >> 7) # for LM75 (9-bit)
|
||||
if raw >= 256: raw -= 512 # handle negative (two's complement)
|
||||
temp_c = raw * 0.5
|
||||
```
|
||||
|
||||
## Decision table — Aardvark vs Pico tools
|
||||
|
||||
| Scenario | Use |
|
||||
|------------------------------------------------|---------------|
|
||||
| Talking to an I2C sensor via Aardvark | `i2c_read` |
|
||||
| Configuring a sensor register | `i2c_write` |
|
||||
| Discovering what's on the bus | `i2c_scan` |
|
||||
| Running MicroPython on the connected Pico | `device_exec` |
|
||||
| Blinking Pico LED | `device_exec` |
|
||||
Generated
+2
@@ -88,6 +88,7 @@ checksum = "8ec610d8f49840a5b376c69663b6369e71f4b34484b9b2eb29fb918d92516cb9"
|
||||
dependencies = [
|
||||
"bare-metal",
|
||||
"bitfield",
|
||||
"critical-section",
|
||||
"embedded-hal 0.2.7",
|
||||
"volatile-register",
|
||||
]
|
||||
@@ -837,6 +838,7 @@ dependencies = [
|
||||
name = "nucleo"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"cortex-m",
|
||||
"cortex-m-rt",
|
||||
"critical-section",
|
||||
"defmt 1.0.1",
|
||||
|
||||
@@ -7,6 +7,8 @@
|
||||
# Flash: probe-rs run --chip STM32F401RETx target/thumbv7em-none-eabihf/release/nucleo
|
||||
# Or: zeroclaw peripheral flash-nucleo
|
||||
|
||||
[workspace]
|
||||
|
||||
[package]
|
||||
name = "nucleo"
|
||||
version = "0.1.0"
|
||||
@@ -18,12 +20,13 @@ description = "ZeroClaw Nucleo-F401RE peripheral firmware — GPIO over JSON ser
|
||||
embassy-executor = { version = "0.9", features = ["arch-cortex-m", "executor-thread", "defmt"] }
|
||||
embassy-stm32 = { version = "0.5", features = ["defmt", "stm32f401re", "unstable-pac", "memory-x", "time-driver-tim4", "exti"] }
|
||||
embassy-time = { version = "0.5", features = ["defmt", "defmt-timestamp-uptime", "tick-hz-32_768"] }
|
||||
cortex-m = { version = "0.7", features = ["inline-asm", "critical-section-single-core"] }
|
||||
cortex-m-rt = "0.7"
|
||||
defmt = "1.0"
|
||||
defmt-rtt = "1.0"
|
||||
panic-probe = { version = "1.0", features = ["print-defmt"] }
|
||||
heapless = { version = "0.9", default-features = false }
|
||||
critical-section = "1.1"
|
||||
cortex-m-rt = "0.7"
|
||||
|
||||
[package.metadata.embassy]
|
||||
build = [
|
||||
@@ -34,6 +37,5 @@ build = [
|
||||
opt-level = "s"
|
||||
lto = true
|
||||
codegen-units = 1
|
||||
strip = true
|
||||
panic = "abort"
|
||||
debug = 1
|
||||
debug = 2
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
# ZeroClaw Pico firmware — serial protocol handler
|
||||
# Placeholder: replace with actual MicroPython firmware for Pico deployment
|
||||
Binary file not shown.
@@ -0,0 +1,3 @@
|
||||
[target.thumbv7em-none-eabihf]
|
||||
rustflags = ["-C", "link-arg=-Tlink.x", "-C", "link-arg=-Tdefmt.x"]
|
||||
runner = "probe-rs run --chip STM32F401RETx"
|
||||
@@ -0,0 +1,10 @@
|
||||
# Allow the gpio group to control the Raspberry Pi onboard ACT LED
|
||||
# via the Linux LED subsystem sysfs interface.
|
||||
#
|
||||
# Without this rule /sys/class/leds/ACT/{brightness,trigger} are
|
||||
# root-only writable, which prevents zeroclaw from blinking the LED.
|
||||
SUBSYSTEM=="leds", KERNEL=="ACT", ACTION=="add", \
|
||||
RUN+="/bin/chgrp gpio /sys/%p/brightness", \
|
||||
RUN+="/bin/chmod g+w /sys/%p/brightness", \
|
||||
RUN+="/bin/chgrp gpio /sys/%p/trigger", \
|
||||
RUN+="/bin/chmod g+w /sys/%p/trigger"
|
||||
@@ -0,0 +1,232 @@
|
||||
# scripts/ — Raspberry Pi Deployment Guide
|
||||
|
||||
This directory contains everything needed to cross-compile ZeroClaw and deploy it to a Raspberry Pi over SSH.
|
||||
|
||||
## Contents
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `deploy-rpi.sh` | One-shot cross-compile and deploy script |
|
||||
| `rpi-config.toml` | Production config template deployed to `~/.zeroclaw/config.toml` |
|
||||
| `zeroclaw.service` | systemd unit file installed on the Pi |
|
||||
| `99-act-led.rules` | udev rule for ACT LED sysfs access without sudo |
|
||||
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### Cross-compilation toolchain (pick one)
|
||||
|
||||
#### Option A — cargo-zigbuild (recommended for Apple Silicon)
|
||||
|
||||
```bash
|
||||
brew install zig
|
||||
cargo install cargo-zigbuild
|
||||
rustup target add aarch64-unknown-linux-gnu
|
||||
```
|
||||
|
||||
#### Option B — cross (Docker-based)
|
||||
|
||||
```bash
|
||||
cargo install cross
|
||||
rustup target add aarch64-unknown-linux-gnu
|
||||
# Docker must be running
|
||||
```
|
||||
|
||||
The deploy script auto-detects which tool is available, preferring `cargo-zigbuild`.
|
||||
Force a specific tool with `CROSS_TOOL=zigbuild` or `CROSS_TOOL=cross`.
|
||||
|
||||
### Optional: passwordless SSH
|
||||
|
||||
If you can't use SSH key authentication, install `sshpass` and set the `RPI_PASS` environment variable:
|
||||
|
||||
```bash
|
||||
brew install sshpass # macOS
|
||||
sudo apt install sshpass # Linux
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
RPI_HOST=raspberrypi.local RPI_USER=pi ./scripts/deploy-rpi.sh
|
||||
```
|
||||
|
||||
After the first deploy, you must set your API key on the Pi (see [First-Time Setup](#first-time-setup)).
|
||||
|
||||
---
|
||||
|
||||
## Environment Variables
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `RPI_HOST` | `raspberrypi.local` | Pi hostname or IP address |
|
||||
| `RPI_USER` | `pi` | SSH username |
|
||||
| `RPI_PORT` | `22` | SSH port |
|
||||
| `RPI_DIR` | `~/zeroclaw` | Remote directory for the binary and `.env` |
|
||||
| `RPI_PASS` | _(unset)_ | SSH password — uses `sshpass` if set; key auth used otherwise |
|
||||
| `CROSS_TOOL` | _(auto-detect)_ | Force `zigbuild` or `cross` |
|
||||
|
||||
---
|
||||
|
||||
## What the Deploy Script Does
|
||||
|
||||
1. **Cross-compile** — builds a release binary for `aarch64-unknown-linux-gnu` with `--features hardware,peripheral-rpi`.
|
||||
2. **Stop service** — runs `sudo systemctl stop zeroclaw` on the Pi (continues if not yet installed).
|
||||
3. **Create remote directory** — ensures `$RPI_DIR` exists on the Pi.
|
||||
4. **Copy binary** — SCPs the compiled binary to `$RPI_DIR/zeroclaw`.
|
||||
5. **Create `.env`** — writes an `.env` skeleton with an `ANTHROPIC_API_KEY=` placeholder to `$RPI_DIR/.env` with mode `600`. Skipped if the file already exists so an existing key is not overwritten.
|
||||
6. **Deploy config** — copies `rpi-config.toml` to `~/.zeroclaw/config.toml`, preserving any `api_key` already present in the file.
|
||||
7. **Install systemd service** — copies `zeroclaw.service` to `/etc/systemd/system/`, then enables and restarts it.
|
||||
8. **Hardware permissions** — adds the deploy user to the `gpio` group, copies `99-act-led.rules` to `/etc/udev/rules.d/`, and resets the ACT LED trigger.
|
||||
|
||||
---
|
||||
|
||||
## First-Time Setup
|
||||
|
||||
After the first successful deploy, SSH into the Pi and fill in your API key:
|
||||
|
||||
```bash
|
||||
ssh pi@raspberrypi.local
|
||||
nano ~/zeroclaw/.env
|
||||
# Set: ANTHROPIC_API_KEY=sk-ant-...
|
||||
sudo systemctl restart zeroclaw
|
||||
```
|
||||
|
||||
The `.env` is loaded by the systemd service as an `EnvironmentFile`.
|
||||
|
||||
---
|
||||
|
||||
## Interacting with ZeroClaw on the Pi
|
||||
|
||||
Once the service is running the gateway listens on port **8080**.
|
||||
|
||||
### Health check
|
||||
|
||||
```bash
|
||||
curl http://raspberrypi.local:8080/health
|
||||
```
|
||||
|
||||
### Send a message
|
||||
|
||||
```bash
|
||||
curl -s -X POST http://raspberrypi.local:8080/api/chat \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{"message": "What is the CPU temperature?"}' | jq .
|
||||
```
|
||||
|
||||
### Stream a conversation
|
||||
|
||||
```bash
|
||||
curl -N -s -X POST http://raspberrypi.local:8080/api/chat \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H 'Accept: text/event-stream' \
|
||||
-d '{"message": "List connected hardware devices", "stream": true}'
|
||||
```
|
||||
|
||||
### Follow service logs
|
||||
|
||||
```bash
|
||||
ssh pi@raspberrypi.local 'journalctl -u zeroclaw -f'
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Hardware Features
|
||||
|
||||
### GPIO tools
|
||||
|
||||
ZeroClaw is deployed with the `peripheral-rpi` feature, which enables two LLM-callable tools:
|
||||
|
||||
- **`gpio_read`** — reads a GPIO pin value via sysfs (`/sys/class/gpio/...`).
|
||||
- **`gpio_write`** — writes a GPIO pin value.
|
||||
|
||||
These tools let the agent directly control hardware in response to natural-language instructions.
|
||||
|
||||
### ACT LED
|
||||
|
||||
The udev rule `99-act-led.rules` grants the `gpio` group write access to:
|
||||
|
||||
```
|
||||
/sys/class/leds/ACT/trigger
|
||||
/sys/class/leds/ACT/brightness
|
||||
```
|
||||
|
||||
This allows toggling the Pi's green ACT LED without `sudo`.
|
||||
|
||||
### Aardvark I2C/SPI adapter
|
||||
|
||||
If a Total Phase Aardvark adapter is connected, the `hardware` feature enables I2C/SPI communication with external devices. No extra setup is needed — the device is auto-detected via USB.
|
||||
|
||||
---
|
||||
|
||||
## Files Deployed to the Pi
|
||||
|
||||
| Remote path | Source | Description |
|
||||
|------------|--------|-------------|
|
||||
| `~/zeroclaw/zeroclaw` | compiled binary | Main agent binary |
|
||||
| `~/zeroclaw/.env` | created on first deploy | API key and environment variables |
|
||||
| `~/.zeroclaw/config.toml` | `rpi-config.toml` | Agent configuration |
|
||||
| `/etc/systemd/system/zeroclaw.service` | `zeroclaw.service` | systemd service unit |
|
||||
| `/etc/udev/rules.d/99-act-led.rules` | `99-act-led.rules` | ACT LED permissions |
|
||||
|
||||
---
|
||||
|
||||
## Configuration
|
||||
|
||||
`rpi-config.toml` is the production config template. Key defaults:
|
||||
|
||||
- **Provider**: `anthropic-custom:https://api.z.ai/api/anthropic`
|
||||
- **Model**: `claude-3-5-sonnet-20241022`
|
||||
- **Autonomy**: `full`
|
||||
- **Allowed shell commands**: `git`, `cargo`, `npm`, `mkdir`, `touch`, `cp`, `mv`, `ls`, `cat`, `grep`, `find`, `echo`, `pwd`, `wc`, `head`, `tail`, `date`
|
||||
|
||||
To customise, edit `~/.zeroclaw/config.toml` directly on the Pi and restart the service.
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Service won't start
|
||||
|
||||
```bash
|
||||
ssh pi@raspberrypi.local 'sudo systemctl status zeroclaw'
|
||||
ssh pi@raspberrypi.local 'journalctl -u zeroclaw -n 50 --no-pager'
|
||||
```
|
||||
|
||||
### GPIO permission denied
|
||||
|
||||
Make sure the deploy user is in the `gpio` group and that a fresh login session has been started:
|
||||
|
||||
```bash
|
||||
ssh pi@raspberrypi.local 'groups'
|
||||
# Should include: gpio
|
||||
```
|
||||
|
||||
If the group was just added, log out and back in, or run `newgrp gpio`.
|
||||
|
||||
### Wrong architecture / binary won't run
|
||||
|
||||
Re-run the deploy script. Confirm the target:
|
||||
|
||||
```bash
|
||||
ssh pi@raspberrypi.local 'file ~/zeroclaw/zeroclaw'
|
||||
# Expected: ELF 64-bit LSB pie executable, ARM aarch64
|
||||
```
|
||||
|
||||
### Force a specific cross-compilation tool
|
||||
|
||||
```bash
|
||||
CROSS_TOOL=zigbuild RPI_HOST=raspberrypi.local ./scripts/deploy-rpi.sh
|
||||
# or
|
||||
CROSS_TOOL=cross RPI_HOST=raspberrypi.local ./scripts/deploy-rpi.sh
|
||||
```
|
||||
|
||||
### Rebuild locally without deploying
|
||||
|
||||
```bash
|
||||
cargo zigbuild --release \
|
||||
--target aarch64-unknown-linux-gnu \
|
||||
--features hardware,peripheral-rpi
|
||||
```
|
||||
Executable
+223
@@ -0,0 +1,223 @@
|
||||
#!/usr/bin/env bash
|
||||
# deploy-rpi.sh — cross-compile ZeroClaw for Raspberry Pi and deploy via SSH.
|
||||
#
|
||||
# Cross-compilation (pick ONE — the script auto-detects):
|
||||
#
|
||||
# Option A — cargo-zigbuild (recommended; works on Apple Silicon + Intel, no Docker)
|
||||
# brew install zig
|
||||
# cargo install cargo-zigbuild
|
||||
# rustup target add aarch64-unknown-linux-gnu
|
||||
#
|
||||
# Option B — cross (Docker-based; requires Docker Desktop running)
|
||||
# cargo install cross
|
||||
#
|
||||
# Usage:
|
||||
# RPI_HOST=raspberrypi.local RPI_USER=pi ./scripts/deploy-rpi.sh
|
||||
#
|
||||
# Optional env vars:
|
||||
# RPI_HOST — hostname or IP of the Pi (default: raspberrypi.local)
|
||||
# RPI_USER — SSH user on the Pi (default: pi)
|
||||
# RPI_PORT — SSH port (default: 22)
|
||||
# RPI_DIR — remote deployment dir (default: /home/$RPI_USER/zeroclaw)
|
||||
# RPI_PASS — SSH password (uses sshpass) (default: prompt interactively)
|
||||
# CROSS_TOOL — force "zigbuild" or "cross" (default: auto-detect)
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
RPI_HOST="${RPI_HOST:-raspberrypi.local}"
|
||||
RPI_USER="${RPI_USER:-pi}"
|
||||
RPI_PORT="${RPI_PORT:-22}"
|
||||
RPI_DIR="${RPI_DIR:-/home/${RPI_USER}/zeroclaw}"
|
||||
TARGET="aarch64-unknown-linux-gnu"
|
||||
FEATURES="hardware,peripheral-rpi"
|
||||
BINARY="target/${TARGET}/release/zeroclaw"
|
||||
SSH_OPTS="-p ${RPI_PORT} -o StrictHostKeyChecking=no -o ConnectTimeout=10"
|
||||
# scp uses -P (uppercase) for port; ssh uses -p (lowercase)
|
||||
SCP_OPTS="-P ${RPI_PORT} -o StrictHostKeyChecking=no -o ConnectTimeout=10"
|
||||
|
||||
# If RPI_PASS is set, wrap ssh/scp with sshpass for non-interactive auth.
|
||||
SSH_CMD="ssh"
|
||||
SCP_CMD="scp"
|
||||
if [[ -n "${RPI_PASS:-}" ]]; then
|
||||
if ! command -v sshpass &>/dev/null; then
|
||||
echo "ERROR: RPI_PASS is set but sshpass is not installed."
|
||||
echo " brew install hudochenkov/sshpass/sshpass"
|
||||
exit 1
|
||||
fi
|
||||
SSH_CMD="sshpass -p ${RPI_PASS} ssh"
|
||||
SCP_CMD="sshpass -p ${RPI_PASS} scp"
|
||||
fi
|
||||
|
||||
echo "==> Building ZeroClaw for Raspberry Pi (${TARGET})"
|
||||
echo " Features: ${FEATURES}"
|
||||
echo " Target host: ${RPI_USER}@${RPI_HOST}:${RPI_PORT}"
|
||||
echo ""
|
||||
|
||||
# ── 1. Cross-compile — auto-detect best available tool ───────────────────────
|
||||
# Prefer cargo-zigbuild: it works on Apple Silicon without Docker and avoids
|
||||
# the rustup-toolchain-install errors that affect cross v0.2.x on arm64 Macs.
|
||||
_detect_cross_tool() {
|
||||
if [[ "${CROSS_TOOL:-}" == "cross" ]]; then
|
||||
echo "cross"; return
|
||||
fi
|
||||
if [[ "${CROSS_TOOL:-}" == "zigbuild" ]]; then
|
||||
echo "zigbuild"; return
|
||||
fi
|
||||
if command -v cargo-zigbuild &>/dev/null && command -v zig &>/dev/null; then
|
||||
echo "zigbuild"; return
|
||||
fi
|
||||
if command -v cross &>/dev/null; then
|
||||
echo "cross"; return
|
||||
fi
|
||||
echo "none"
|
||||
}
|
||||
|
||||
TOOL=$(_detect_cross_tool)
|
||||
|
||||
case "${TOOL}" in
|
||||
zigbuild)
|
||||
echo "==> Using cargo-zigbuild (Zig cross-linker)"
|
||||
# Ensure the target sysroot is registered with rustup.
|
||||
rustup target add "${TARGET}" 2>/dev/null || true
|
||||
cargo zigbuild \
|
||||
--target "${TARGET}" \
|
||||
--features "${FEATURES}" \
|
||||
--release
|
||||
;;
|
||||
cross)
|
||||
echo "==> Using cross (Docker-based)"
|
||||
# Verify Docker is running before handing off — gives a clear error message
|
||||
# instead of the confusing rustup-toolchain failure from cross v0.2.x.
|
||||
if ! docker info &>/dev/null; then
|
||||
echo ""
|
||||
echo "ERROR: Docker is not running."
|
||||
echo " Start Docker Desktop and retry, or install cargo-zigbuild instead:"
|
||||
echo " brew install zig && cargo install cargo-zigbuild"
|
||||
echo " rustup target add ${TARGET}"
|
||||
exit 1
|
||||
fi
|
||||
cross build \
|
||||
--target "${TARGET}" \
|
||||
--features "${FEATURES}" \
|
||||
--release
|
||||
;;
|
||||
none)
|
||||
echo ""
|
||||
echo "ERROR: No cross-compilation tool found."
|
||||
echo ""
|
||||
echo "Install one of the following and retry:"
|
||||
echo ""
|
||||
echo " Option A — cargo-zigbuild (recommended; works on Apple Silicon, no Docker):"
|
||||
echo " brew install zig"
|
||||
echo " cargo install cargo-zigbuild"
|
||||
echo " rustup target add ${TARGET}"
|
||||
echo ""
|
||||
echo " Option B — cross (requires Docker Desktop running):"
|
||||
echo " cargo install cross"
|
||||
echo ""
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo ""
|
||||
echo "==> Build complete: ${BINARY}"
|
||||
ls -lh "${BINARY}"
|
||||
|
||||
# ── 2. Stop running service (if any) so binary can be overwritten ─────────────
|
||||
echo ""
|
||||
echo "==> Stopping zeroclaw service (if running)"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" \
|
||||
"sudo systemctl stop zeroclaw 2>/dev/null || true"
|
||||
|
||||
# ── 3. Create remote directory ────────────────────────────────────────────────
|
||||
echo ""
|
||||
echo "==> Creating remote directory ${RPI_DIR}"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" "mkdir -p ${RPI_DIR}"
|
||||
|
||||
# ── 4. Deploy binary ──────────────────────────────────────────────────────────
|
||||
echo ""
|
||||
echo "==> Deploying binary to ${RPI_USER}@${RPI_HOST}:${RPI_DIR}/zeroclaw"
|
||||
${SCP_CMD} ${SCP_OPTS} "${BINARY}" "${RPI_USER}@${RPI_HOST}:${RPI_DIR}/zeroclaw"
|
||||
|
||||
# ── 4. Create .env skeleton (if it doesn't exist) ────────────────────────────
|
||||
ENV_DEST="${RPI_DIR}/.env"
|
||||
echo ""
|
||||
echo "==> Checking for ${ENV_DEST}"
|
||||
# shellcheck disable=SC2029
|
||||
if ${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" "[ -f ${ENV_DEST} ]"; then
|
||||
echo " .env already exists — skipping"
|
||||
else
|
||||
echo " Creating .env skeleton with 600 permissions"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" \
|
||||
"mkdir -p ${RPI_DIR} && \
|
||||
printf '# Set your API key here\nANTHROPIC_API_KEY=sk-ant-\n' > ${ENV_DEST} && \
|
||||
chmod 600 ${ENV_DEST}"
|
||||
echo " IMPORTANT: edit ${ENV_DEST} on the Pi and set ANTHROPIC_API_KEY"
|
||||
fi
|
||||
|
||||
# ── 5. Deploy config ─────────────────────────────────────────────────────────
|
||||
CONFIG_DEST="/home/${RPI_USER}/.zeroclaw/config.toml"
|
||||
echo ""
|
||||
echo "==> Deploying config to ${CONFIG_DEST}"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" "mkdir -p /home/${RPI_USER}/.zeroclaw"
|
||||
# Preserve existing api_key from the remote config if present.
|
||||
# shellcheck disable=SC2029
|
||||
EXISTING_API_KEY=$(${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" \
|
||||
"grep -m1 '^api_key' ${CONFIG_DEST} 2>/dev/null || true")
|
||||
${SCP_CMD} ${SCP_OPTS} "scripts/rpi-config.toml" "${RPI_USER}@${RPI_HOST}:${CONFIG_DEST}"
|
||||
if [[ -n "${EXISTING_API_KEY}" ]]; then
|
||||
echo " Restoring existing api_key from previous config"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" \
|
||||
"sed -i 's|^# api_key = .*|${EXISTING_API_KEY}|' ${CONFIG_DEST}"
|
||||
fi
|
||||
|
||||
# ── 6. Deploy and enable systemd service ─────────────────────────────────────
|
||||
SERVICE_DEST="/etc/systemd/system/zeroclaw.service"
|
||||
echo ""
|
||||
echo "==> Installing systemd service (requires sudo on the Pi)"
|
||||
${SCP_CMD} ${SCP_OPTS} "scripts/zeroclaw.service" "${RPI_USER}@${RPI_HOST}:/tmp/zeroclaw.service"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" \
|
||||
"sudo mv /tmp/zeroclaw.service ${SERVICE_DEST} && \
|
||||
sudo systemctl daemon-reload && \
|
||||
sudo systemctl enable zeroclaw && \
|
||||
sudo systemctl restart zeroclaw && \
|
||||
sudo systemctl status zeroclaw --no-pager || true"
|
||||
|
||||
# ── 7. Runtime permissions ───────────────────────────────────────────────────
|
||||
echo ""
|
||||
echo "==> Granting ${RPI_USER} access to GPIO group"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" \
|
||||
"sudo usermod -aG gpio ${RPI_USER} || true"
|
||||
|
||||
# ── 8. Reset ACT LED trigger so ZeroClaw can control it ──────────────────────
|
||||
echo ""
|
||||
echo "==> Installing udev rule for ACT LED sysfs access by gpio group"
|
||||
${SCP_CMD} ${SCP_OPTS} "scripts/99-act-led.rules" "${RPI_USER}@${RPI_HOST}:/tmp/99-act-led.rules"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" \
|
||||
"sudo mv /tmp/99-act-led.rules /etc/udev/rules.d/99-act-led.rules && \
|
||||
sudo udevadm control --reload-rules && \
|
||||
sudo chgrp gpio /sys/class/leds/ACT/brightness /sys/class/leds/ACT/trigger 2>/dev/null || true && \
|
||||
sudo chmod g+w /sys/class/leds/ACT/brightness /sys/class/leds/ACT/trigger 2>/dev/null || true"
|
||||
|
||||
echo ""
|
||||
echo "==> Resetting ACT LED trigger (none)"
|
||||
# shellcheck disable=SC2029
|
||||
${SSH_CMD} ${SSH_OPTS} "${RPI_USER}@${RPI_HOST}" \
|
||||
"echo none | sudo tee /sys/class/leds/ACT/trigger > /dev/null 2>&1 || true"
|
||||
|
||||
echo ""
|
||||
echo "==> Deployment complete!"
|
||||
echo ""
|
||||
echo " ZeroClaw is running at http://${RPI_HOST}:8080"
|
||||
echo " POST /api/chat — chat with the agent"
|
||||
echo " GET /health — health check"
|
||||
echo ""
|
||||
echo " To check logs: ssh ${RPI_USER}@${RPI_HOST} 'journalctl -u zeroclaw -f'"
|
||||
@@ -0,0 +1,631 @@
|
||||
# ZeroClaw — Raspberry Pi production configuration
|
||||
#
|
||||
# Copy this to ~/.zeroclaw/config.toml on the Pi.
|
||||
# deploy-rpi.sh does this automatically.
|
||||
#
|
||||
# API key is loaded from ~/.zeroclaw/.env (EnvironmentFile in systemd).
|
||||
# Set it there as: ANTHROPIC_API_KEY=your-key-here
|
||||
# Or set api_key directly below (not recommended for version control).
|
||||
|
||||
# api_key = ""
|
||||
default_provider = "anthropic-custom:https://api.z.ai/api/anthropic"
|
||||
default_model = "claude-3-5-sonnet-20241022"
|
||||
default_temperature = 0.4
|
||||
model_routes = []
|
||||
embedding_routes = []
|
||||
|
||||
[model_providers]
|
||||
|
||||
[provider]
|
||||
|
||||
[observability]
|
||||
backend = "none"
|
||||
runtime_trace_mode = "none"
|
||||
runtime_trace_path = "state/runtime-trace.jsonl"
|
||||
runtime_trace_max_entries = 200
|
||||
|
||||
[autonomy]
|
||||
level = "full"
|
||||
workspace_only = false
|
||||
allowed_commands = [
|
||||
"git",
|
||||
"npm",
|
||||
"cargo",
|
||||
"mkdir",
|
||||
"touch",
|
||||
"cp",
|
||||
"mv",
|
||||
"ls",
|
||||
"cat",
|
||||
"grep",
|
||||
"find",
|
||||
"echo",
|
||||
"pwd",
|
||||
"wc",
|
||||
"head",
|
||||
"tail",
|
||||
"date",
|
||||
]
|
||||
command_context_rules = []
|
||||
forbidden_paths = [
|
||||
"/etc",
|
||||
"/root",
|
||||
"/home",
|
||||
"/usr",
|
||||
"/bin",
|
||||
"/sbin",
|
||||
"/lib",
|
||||
"/opt",
|
||||
"/boot",
|
||||
"/dev",
|
||||
"/proc",
|
||||
"/sys",
|
||||
"/var",
|
||||
"/tmp",
|
||||
"/mnt",
|
||||
"~/.ssh",
|
||||
"~/.gnupg",
|
||||
"~/.aws",
|
||||
"~/.config",
|
||||
]
|
||||
max_actions_per_hour = 100
|
||||
max_cost_per_day_cents = 1000
|
||||
require_approval_for_medium_risk = true
|
||||
block_high_risk_commands = true
|
||||
shell_env_passthrough = []
|
||||
allow_sensitive_file_reads = false
|
||||
allow_sensitive_file_writes = false
|
||||
auto_approve = [
|
||||
"file_read",
|
||||
"memory_recall",
|
||||
]
|
||||
always_ask = []
|
||||
allowed_roots = []
|
||||
non_cli_excluded_tools = [
|
||||
"shell",
|
||||
"process",
|
||||
"file_write",
|
||||
"file_edit",
|
||||
"git_operations",
|
||||
"browser",
|
||||
"browser_open",
|
||||
"http_request",
|
||||
"schedule",
|
||||
"cron_add",
|
||||
"cron_remove",
|
||||
"cron_update",
|
||||
"cron_run",
|
||||
"memory_store",
|
||||
"memory_forget",
|
||||
"proxy_config",
|
||||
"web_search_config",
|
||||
"web_access_config",
|
||||
"model_routing_config",
|
||||
"channel_ack_config",
|
||||
"pushover",
|
||||
"composio",
|
||||
"delegate",
|
||||
"screenshot",
|
||||
"image_info",
|
||||
]
|
||||
non_cli_approval_approvers = []
|
||||
non_cli_natural_language_approval_mode = "direct"
|
||||
|
||||
[autonomy.non_cli_natural_language_approval_mode_by_channel]
|
||||
|
||||
[security]
|
||||
roles = []
|
||||
|
||||
[security.sandbox]
|
||||
backend = "auto"
|
||||
firejail_args = []
|
||||
|
||||
[security.resources]
|
||||
max_memory_mb = 512
|
||||
max_cpu_time_seconds = 60
|
||||
max_subprocesses = 10
|
||||
memory_monitoring = true
|
||||
|
||||
[security.audit]
|
||||
enabled = true
|
||||
log_path = "audit.log"
|
||||
max_size_mb = 100
|
||||
sign_events = false
|
||||
|
||||
[security.otp]
|
||||
enabled = true
|
||||
method = "totp"
|
||||
token_ttl_secs = 30
|
||||
cache_valid_secs = 300
|
||||
gated_actions = [
|
||||
"shell",
|
||||
"file_write",
|
||||
"browser_open",
|
||||
"browser",
|
||||
"memory_forget",
|
||||
]
|
||||
gated_domains = []
|
||||
gated_domain_categories = []
|
||||
challenge_delivery = "dm"
|
||||
challenge_timeout_secs = 120
|
||||
challenge_max_attempts = 3
|
||||
|
||||
[security.estop]
|
||||
enabled = false
|
||||
state_file = "~/.zeroclaw/estop-state.json"
|
||||
require_otp_to_resume = true
|
||||
|
||||
[security.syscall_anomaly]
|
||||
enabled = true
|
||||
strict_mode = false
|
||||
alert_on_unknown_syscall = true
|
||||
max_denied_events_per_minute = 5
|
||||
max_total_events_per_minute = 120
|
||||
max_alerts_per_minute = 30
|
||||
alert_cooldown_secs = 20
|
||||
log_path = "syscall-anomalies.log"
|
||||
baseline_syscalls = [
|
||||
"read",
|
||||
"write",
|
||||
"open",
|
||||
"openat",
|
||||
"close",
|
||||
"stat",
|
||||
"fstat",
|
||||
"newfstatat",
|
||||
"lseek",
|
||||
"mmap",
|
||||
"mprotect",
|
||||
"munmap",
|
||||
"brk",
|
||||
"rt_sigaction",
|
||||
"rt_sigprocmask",
|
||||
"ioctl",
|
||||
"fcntl",
|
||||
"access",
|
||||
"pipe2",
|
||||
"dup",
|
||||
"dup2",
|
||||
"dup3",
|
||||
"epoll_create1",
|
||||
"epoll_ctl",
|
||||
"epoll_wait",
|
||||
"poll",
|
||||
"ppoll",
|
||||
"select",
|
||||
"futex",
|
||||
"clock_gettime",
|
||||
"nanosleep",
|
||||
"getpid",
|
||||
"gettid",
|
||||
"set_tid_address",
|
||||
"set_robust_list",
|
||||
"clone",
|
||||
"clone3",
|
||||
"fork",
|
||||
"execve",
|
||||
"wait4",
|
||||
"exit",
|
||||
"exit_group",
|
||||
"socket",
|
||||
"connect",
|
||||
"accept",
|
||||
"accept4",
|
||||
"listen",
|
||||
"sendto",
|
||||
"recvfrom",
|
||||
"sendmsg",
|
||||
"recvmsg",
|
||||
"getsockname",
|
||||
"getpeername",
|
||||
"setsockopt",
|
||||
"getsockopt",
|
||||
"getrandom",
|
||||
"statx",
|
||||
]
|
||||
|
||||
[security.perplexity_filter]
|
||||
enable_perplexity_filter = false
|
||||
perplexity_threshold = 18.0
|
||||
suffix_window_chars = 64
|
||||
min_prompt_chars = 32
|
||||
symbol_ratio_threshold = 0.2
|
||||
|
||||
[security.outbound_leak_guard]
|
||||
enabled = true
|
||||
action = "redact"
|
||||
sensitivity = 0.7
|
||||
|
||||
[security.url_access]
|
||||
block_private_ip = true
|
||||
allow_cidrs = []
|
||||
allow_domains = []
|
||||
allow_loopback = false
|
||||
require_first_visit_approval = false
|
||||
enforce_domain_allowlist = false
|
||||
domain_allowlist = []
|
||||
domain_blocklist = []
|
||||
approved_domains = []
|
||||
|
||||
[runtime]
|
||||
kind = "native"
|
||||
|
||||
[runtime.docker]
|
||||
image = "alpine:3.20"
|
||||
network = "none"
|
||||
memory_limit_mb = 512
|
||||
cpu_limit = 1.0
|
||||
read_only_rootfs = true
|
||||
mount_workspace = true
|
||||
allowed_workspace_roots = []
|
||||
|
||||
[runtime.wasm]
|
||||
tools_dir = "tools/wasm"
|
||||
fuel_limit = 1000000
|
||||
memory_limit_mb = 64
|
||||
max_module_size_mb = 50
|
||||
allow_workspace_read = false
|
||||
allow_workspace_write = false
|
||||
allowed_hosts = []
|
||||
|
||||
[runtime.wasm.security]
|
||||
require_workspace_relative_tools_dir = true
|
||||
reject_symlink_modules = true
|
||||
reject_symlink_tools_dir = true
|
||||
strict_host_validation = true
|
||||
capability_escalation_mode = "deny"
|
||||
module_hash_policy = "warn"
|
||||
|
||||
[runtime.wasm.security.module_sha256]
|
||||
|
||||
[research]
|
||||
enabled = false
|
||||
trigger = "never"
|
||||
keywords = [
|
||||
"find",
|
||||
"search",
|
||||
"check",
|
||||
"investigate",
|
||||
"look",
|
||||
"research",
|
||||
"найди",
|
||||
"проверь",
|
||||
"исследуй",
|
||||
"поищи",
|
||||
]
|
||||
min_message_length = 50
|
||||
max_iterations = 5
|
||||
show_progress = true
|
||||
system_prompt_prefix = ""
|
||||
|
||||
[reliability]
|
||||
provider_retries = 2
|
||||
provider_backoff_ms = 500
|
||||
fallback_providers = []
|
||||
api_keys = []
|
||||
channel_initial_backoff_secs = 2
|
||||
channel_max_backoff_secs = 60
|
||||
scheduler_poll_secs = 15
|
||||
scheduler_retries = 2
|
||||
|
||||
[reliability.model_fallbacks]
|
||||
|
||||
[scheduler]
|
||||
enabled = true
|
||||
max_tasks = 64
|
||||
max_concurrent = 4
|
||||
|
||||
[agent]
|
||||
compact_context = true
|
||||
max_tool_iterations = 20
|
||||
max_history_messages = 50
|
||||
parallel_tools = false
|
||||
tool_dispatcher = "auto"
|
||||
loop_detection_no_progress_threshold = 3
|
||||
loop_detection_ping_pong_cycles = 2
|
||||
loop_detection_failure_streak = 3
|
||||
safety_heartbeat_interval = 5
|
||||
safety_heartbeat_turn_interval = 10
|
||||
|
||||
[agent.session]
|
||||
backend = "none"
|
||||
strategy = "per-sender"
|
||||
ttl_seconds = 3600
|
||||
max_messages = 50
|
||||
|
||||
[agent.teams]
|
||||
enabled = true
|
||||
auto_activate = true
|
||||
max_agents = 32
|
||||
strategy = "adaptive"
|
||||
load_window_secs = 120
|
||||
inflight_penalty = 8
|
||||
recent_selection_penalty = 2
|
||||
recent_failure_penalty = 12
|
||||
|
||||
[agent.subagents]
|
||||
enabled = true
|
||||
auto_activate = true
|
||||
max_concurrent = 10
|
||||
strategy = "adaptive"
|
||||
load_window_secs = 180
|
||||
inflight_penalty = 10
|
||||
recent_selection_penalty = 3
|
||||
recent_failure_penalty = 16
|
||||
queue_wait_ms = 15000
|
||||
queue_poll_ms = 200
|
||||
|
||||
[skills]
|
||||
open_skills_enabled = false
|
||||
trusted_skill_roots = []
|
||||
allow_scripts = false
|
||||
prompt_injection_mode = "full"
|
||||
|
||||
[query_classification]
|
||||
enabled = false
|
||||
rules = []
|
||||
|
||||
[heartbeat]
|
||||
enabled = false
|
||||
interval_minutes = 30
|
||||
|
||||
[cron]
|
||||
enabled = true
|
||||
max_run_history = 50
|
||||
|
||||
[goal_loop]
|
||||
enabled = false
|
||||
interval_minutes = 10
|
||||
step_timeout_secs = 120
|
||||
max_steps_per_cycle = 3
|
||||
|
||||
[channels_config]
|
||||
cli = true
|
||||
message_timeout_secs = 300
|
||||
|
||||
[channels_config.webhook]
|
||||
port = 8080
|
||||
secret = "mytoken123"
|
||||
|
||||
[channels_config.ack_reaction]
|
||||
|
||||
[memory]
|
||||
backend = "sqlite"
|
||||
auto_save = true
|
||||
hygiene_enabled = true
|
||||
archive_after_days = 7
|
||||
purge_after_days = 30
|
||||
conversation_retention_days = 30
|
||||
embedding_provider = "none"
|
||||
embedding_model = "text-embedding-3-small"
|
||||
embedding_dimensions = 1536
|
||||
vector_weight = 0.7
|
||||
keyword_weight = 0.3
|
||||
min_relevance_score = 0.4
|
||||
embedding_cache_size = 10000
|
||||
chunk_max_tokens = 512
|
||||
response_cache_enabled = false
|
||||
response_cache_ttl_minutes = 60
|
||||
response_cache_max_entries = 5000
|
||||
snapshot_enabled = false
|
||||
snapshot_on_hygiene = false
|
||||
auto_hydrate = true
|
||||
sqlite_journal_mode = "wal"
|
||||
|
||||
[memory.qdrant]
|
||||
collection = "zeroclaw_memories"
|
||||
|
||||
[storage.provider.config]
|
||||
provider = ""
|
||||
schema = "public"
|
||||
table = "memories"
|
||||
tls = false
|
||||
|
||||
[tunnel]
|
||||
provider = "none"
|
||||
|
||||
[gateway]
|
||||
port = 8080
|
||||
host = "0.0.0.0"
|
||||
require_pairing = false
|
||||
trusted_ips = ["0.0.0.0/0"]
|
||||
allow_public_bind = true
|
||||
paired_tokens = []
|
||||
pair_rate_limit_per_minute = 10
|
||||
webhook_rate_limit_per_minute = 60
|
||||
trust_forwarded_headers = false
|
||||
rate_limit_max_keys = 10000
|
||||
idempotency_ttl_secs = 300
|
||||
idempotency_max_keys = 10000
|
||||
webhook_secret = "mytoken123"
|
||||
|
||||
[gateway.node_control]
|
||||
enabled = false
|
||||
allowed_node_ids = []
|
||||
|
||||
[composio]
|
||||
enabled = false
|
||||
entity_id = "default"
|
||||
|
||||
[secrets]
|
||||
encrypt = true
|
||||
|
||||
[browser]
|
||||
enabled = false
|
||||
allowed_domains = []
|
||||
browser_open = "default"
|
||||
backend = "agent_browser"
|
||||
auto_backend_priority = []
|
||||
agent_browser_command = "agent-browser"
|
||||
agent_browser_extra_args = []
|
||||
agent_browser_timeout_ms = 30000
|
||||
native_headless = true
|
||||
native_webdriver_url = "http://127.0.0.1:9515"
|
||||
|
||||
[browser.computer_use]
|
||||
endpoint = "http://127.0.0.1:8787/v1/actions"
|
||||
timeout_ms = 15000
|
||||
allow_remote_endpoint = false
|
||||
window_allowlist = []
|
||||
|
||||
[http_request]
|
||||
enabled = false
|
||||
allowed_domains = []
|
||||
max_response_size = 1000000
|
||||
timeout_secs = 30
|
||||
user_agent = "ZeroClaw/1.0"
|
||||
|
||||
[http_request.credential_profiles]
|
||||
|
||||
[multimodal]
|
||||
max_images = 4
|
||||
max_image_size_mb = 5
|
||||
allow_remote_fetch = false
|
||||
|
||||
[web_fetch]
|
||||
enabled = false
|
||||
provider = "fast_html2md"
|
||||
allowed_domains = ["*"]
|
||||
blocked_domains = []
|
||||
max_response_size = 500000
|
||||
timeout_secs = 30
|
||||
user_agent = "ZeroClaw/1.0"
|
||||
|
||||
[web_search]
|
||||
enabled = false
|
||||
provider = "duckduckgo"
|
||||
fallback_providers = []
|
||||
retries_per_provider = 0
|
||||
retry_backoff_ms = 250
|
||||
domain_filter = []
|
||||
language_filter = []
|
||||
exa_search_type = "auto"
|
||||
exa_include_text = false
|
||||
jina_site_filters = []
|
||||
max_results = 5
|
||||
timeout_secs = 15
|
||||
user_agent = "ZeroClaw/1.0"
|
||||
|
||||
[proxy]
|
||||
enabled = false
|
||||
no_proxy = []
|
||||
scope = "zeroclaw"
|
||||
services = []
|
||||
|
||||
[identity]
|
||||
format = "openclaw"
|
||||
extra_files = []
|
||||
|
||||
[cost]
|
||||
enabled = false
|
||||
daily_limit_usd = 10.0
|
||||
monthly_limit_usd = 100.0
|
||||
warn_at_percent = 80
|
||||
allow_override = false
|
||||
|
||||
[cost.prices."anthropic/claude-opus-4-20250514"]
|
||||
input = 15.0
|
||||
output = 75.0
|
||||
|
||||
[cost.prices."openai/gpt-4o"]
|
||||
input = 5.0
|
||||
output = 15.0
|
||||
|
||||
[cost.prices."openai/gpt-4o-mini"]
|
||||
input = 0.15
|
||||
output = 0.6
|
||||
|
||||
[cost.prices."anthropic/claude-sonnet-4-20250514"]
|
||||
input = 3.0
|
||||
output = 15.0
|
||||
|
||||
[cost.prices."openai/o1-preview"]
|
||||
input = 15.0
|
||||
output = 60.0
|
||||
|
||||
[cost.prices."anthropic/claude-3-haiku"]
|
||||
input = 0.25
|
||||
output = 1.25
|
||||
|
||||
[cost.prices."google/gemini-2.0-flash"]
|
||||
input = 0.1
|
||||
output = 0.4
|
||||
|
||||
[cost.prices."anthropic/claude-3.5-sonnet"]
|
||||
input = 3.0
|
||||
output = 15.0
|
||||
|
||||
[cost.prices."google/gemini-1.5-pro"]
|
||||
input = 1.25
|
||||
output = 5.0
|
||||
|
||||
[cost.enforcement]
|
||||
mode = "warn"
|
||||
route_down_model = "hint:fast"
|
||||
reserve_percent = 10
|
||||
|
||||
[economic]
|
||||
enabled = false
|
||||
initial_balance = 1000.0
|
||||
min_evaluation_threshold = 0.6
|
||||
|
||||
[economic.token_pricing]
|
||||
input_price_per_million = 3.0
|
||||
output_price_per_million = 15.0
|
||||
|
||||
[peripherals]
|
||||
enabled = true
|
||||
boards = []
|
||||
|
||||
[agents]
|
||||
|
||||
[coordination]
|
||||
enabled = true
|
||||
lead_agent = "delegate-lead"
|
||||
max_inbox_messages_per_agent = 256
|
||||
max_dead_letters = 256
|
||||
max_context_entries = 512
|
||||
max_seen_message_ids = 4096
|
||||
|
||||
[hooks]
|
||||
enabled = true
|
||||
|
||||
[hooks.builtin]
|
||||
boot_script = false
|
||||
command_logger = false
|
||||
session_memory = false
|
||||
|
||||
[plugins]
|
||||
enabled = true
|
||||
allow = []
|
||||
deny = []
|
||||
load_paths = []
|
||||
|
||||
[plugins.entries]
|
||||
|
||||
[hardware]
|
||||
enabled = true
|
||||
transport = "None"
|
||||
baud_rate = 115200
|
||||
workspace_datasheets = false
|
||||
|
||||
[transcription]
|
||||
enabled = false
|
||||
api_url = "https://api.groq.com/openai/v1/audio/transcriptions"
|
||||
model = "whisper-large-v3-turbo"
|
||||
max_duration_secs = 120
|
||||
|
||||
[agents_ipc]
|
||||
enabled = false
|
||||
db_path = "~/.zeroclaw/agents.db"
|
||||
staleness_secs = 300
|
||||
|
||||
[mcp]
|
||||
enabled = false
|
||||
servers = []
|
||||
|
||||
[wasm]
|
||||
enabled = true
|
||||
memory_limit_mb = 64
|
||||
fuel_limit = 1000000000
|
||||
registry_url = "https://zeromarket.vercel.app/api"
|
||||
@@ -0,0 +1,22 @@
|
||||
[Unit]
|
||||
Description=ZeroClaw AI Hardware Agent
|
||||
Documentation=https://github.com/zeroclaw/zeroclaw
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=pi
|
||||
SupplementaryGroups=gpio spi i2c
|
||||
WorkingDirectory=/home/pi/zeroclaw
|
||||
ExecStart=/home/pi/zeroclaw/zeroclaw gateway --host 0.0.0.0 --port 8080
|
||||
Restart=on-failure
|
||||
RestartSec=5
|
||||
EnvironmentFile=/home/pi/zeroclaw/.env
|
||||
Environment=RUST_LOG=info
|
||||
|
||||
# Expand ~ in config path
|
||||
Environment=HOME=/home/pi
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
+80
-4
@@ -45,6 +45,8 @@ pub struct Agent {
|
||||
/// Pre-rendered security policy summary injected into the system prompt
|
||||
/// so the LLM knows the concrete constraints before making tool calls.
|
||||
security_summary: Option<String>,
|
||||
/// Autonomy level from config; controls safety prompt instructions.
|
||||
autonomy_level: crate::security::AutonomyLevel,
|
||||
}
|
||||
|
||||
pub struct AgentBuilder {
|
||||
@@ -71,6 +73,7 @@ pub struct AgentBuilder {
|
||||
response_cache: Option<Arc<crate::memory::response_cache::ResponseCache>>,
|
||||
tool_descriptions: Option<ToolDescriptions>,
|
||||
security_summary: Option<String>,
|
||||
autonomy_level: Option<crate::security::AutonomyLevel>,
|
||||
}
|
||||
|
||||
impl AgentBuilder {
|
||||
@@ -99,6 +102,7 @@ impl AgentBuilder {
|
||||
response_cache: None,
|
||||
tool_descriptions: None,
|
||||
security_summary: None,
|
||||
autonomy_level: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -226,6 +230,11 @@ impl AgentBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn autonomy_level(mut self, level: crate::security::AutonomyLevel) -> Self {
|
||||
self.autonomy_level = Some(level);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn build(self) -> Result<Agent> {
|
||||
let mut tools = self
|
||||
.tools
|
||||
@@ -278,6 +287,9 @@ impl AgentBuilder {
|
||||
response_cache: self.response_cache,
|
||||
tool_descriptions: self.tool_descriptions,
|
||||
security_summary: self.security_summary,
|
||||
autonomy_level: self
|
||||
.autonomy_level
|
||||
.unwrap_or(crate::security::AutonomyLevel::Supervised),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -318,7 +330,7 @@ impl Agent {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_config(config: &Config) -> Result<Self> {
|
||||
pub async fn from_config(config: &Config) -> Result<Self> {
|
||||
let observer: Arc<dyn Observer> =
|
||||
Arc::from(observability::create_observer(&config.observability));
|
||||
let runtime: Arc<dyn runtime::RuntimeAdapter> =
|
||||
@@ -347,7 +359,7 @@ impl Agent {
|
||||
None
|
||||
};
|
||||
|
||||
let (tools, _delegate_handle) = tools::all_tools_with_runtime(
|
||||
let (mut tools, delegate_handle) = tools::all_tools_with_runtime(
|
||||
Arc::new(config.clone()),
|
||||
&security,
|
||||
runtime,
|
||||
@@ -363,6 +375,66 @@ impl Agent {
|
||||
config,
|
||||
);
|
||||
|
||||
// ── Wire MCP tools (non-fatal) ─────────────────────────────
|
||||
// Replicates the same MCP initialization logic used in the CLI
|
||||
// and webhook paths (loop_.rs) so that the WebSocket/daemon UI
|
||||
// path also has access to MCP tools.
|
||||
if config.mcp.enabled && !config.mcp.servers.is_empty() {
|
||||
tracing::info!(
|
||||
"Initializing MCP client — {} server(s) configured",
|
||||
config.mcp.servers.len()
|
||||
);
|
||||
match tools::McpRegistry::connect_all(&config.mcp.servers).await {
|
||||
Ok(registry) => {
|
||||
let registry = std::sync::Arc::new(registry);
|
||||
if config.mcp.deferred_loading {
|
||||
let deferred_set = tools::DeferredMcpToolSet::from_registry(
|
||||
std::sync::Arc::clone(®istry),
|
||||
)
|
||||
.await;
|
||||
tracing::info!(
|
||||
"MCP deferred: {} tool stub(s) from {} server(s)",
|
||||
deferred_set.len(),
|
||||
registry.server_count()
|
||||
);
|
||||
let activated = std::sync::Arc::new(std::sync::Mutex::new(
|
||||
tools::ActivatedToolSet::new(),
|
||||
));
|
||||
tools.push(Box::new(tools::ToolSearchTool::new(
|
||||
deferred_set,
|
||||
activated,
|
||||
)));
|
||||
} else {
|
||||
let names = registry.tool_names();
|
||||
let mut registered = 0usize;
|
||||
for name in names {
|
||||
if let Some(def) = registry.get_tool_def(&name).await {
|
||||
let wrapper: std::sync::Arc<dyn tools::Tool> =
|
||||
std::sync::Arc::new(tools::McpToolWrapper::new(
|
||||
name,
|
||||
def,
|
||||
std::sync::Arc::clone(®istry),
|
||||
));
|
||||
if let Some(ref handle) = delegate_handle {
|
||||
handle.write().push(std::sync::Arc::clone(&wrapper));
|
||||
}
|
||||
tools.push(Box::new(tools::ArcToolRef(wrapper)));
|
||||
registered += 1;
|
||||
}
|
||||
}
|
||||
tracing::info!(
|
||||
"MCP: {} tool(s) registered from {} server(s)",
|
||||
registered,
|
||||
registry.server_count()
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!("MCP registry failed to initialize: {e:#}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let provider_name = config.default_provider.as_deref().unwrap_or("openrouter");
|
||||
|
||||
let model_name = config
|
||||
@@ -438,6 +510,7 @@ impl Agent {
|
||||
.skills_prompt_mode(config.skills.prompt_injection_mode)
|
||||
.auto_save(config.memory.auto_save)
|
||||
.security_summary(Some(security.prompt_summary()))
|
||||
.autonomy_level(config.autonomy.level)
|
||||
.build()
|
||||
}
|
||||
|
||||
@@ -480,6 +553,7 @@ impl Agent {
|
||||
dispatcher_instructions: &instructions,
|
||||
tool_descriptions: self.tool_descriptions.as_ref(),
|
||||
security_summary: self.security_summary.clone(),
|
||||
autonomy_level: self.autonomy_level,
|
||||
};
|
||||
self.prompt_builder.build(&ctx)
|
||||
}
|
||||
@@ -772,7 +846,7 @@ pub async fn run(
|
||||
}
|
||||
effective_config.default_temperature = temperature;
|
||||
|
||||
let mut agent = Agent::from_config(&effective_config)?;
|
||||
let mut agent = Agent::from_config(&effective_config).await?;
|
||||
|
||||
let provider_name = effective_config
|
||||
.default_provider
|
||||
@@ -1116,7 +1190,9 @@ mod tests {
|
||||
.extra_headers
|
||||
.insert("X-Title".to_string(), "zeroclaw-web".to_string());
|
||||
|
||||
let mut agent = Agent::from_config(&config).expect("agent from config");
|
||||
let mut agent = Agent::from_config(&config)
|
||||
.await
|
||||
.expect("agent from config");
|
||||
let response = agent.turn("hello").await.expect("agent turn");
|
||||
|
||||
assert_eq!(response, "hello from mock");
|
||||
|
||||
@@ -256,6 +256,10 @@ pub(crate) const PROGRESS_MIN_INTERVAL_MS: u64 = 500;
|
||||
/// Used before streaming the final answer so progress lines are replaced by the clean response.
|
||||
pub(crate) const DRAFT_CLEAR_SENTINEL: &str = "\x00CLEAR\x00";
|
||||
|
||||
tokio::task_local! {
|
||||
pub(crate) static TOOL_CHOICE_OVERRIDE: Option<String>;
|
||||
}
|
||||
|
||||
/// Extract a short hint from tool call arguments for progress display.
|
||||
fn truncate_tool_args_for_progress(name: &str, args: &serde_json::Value, max_len: usize) -> String {
|
||||
let hint = match name {
|
||||
|
||||
+108
-8
@@ -1,6 +1,7 @@
|
||||
use crate::config::IdentityConfig;
|
||||
use crate::i18n::ToolDescriptions;
|
||||
use crate::identity;
|
||||
use crate::security::AutonomyLevel;
|
||||
use crate::skills::Skill;
|
||||
use crate::tools::Tool;
|
||||
use anyhow::Result;
|
||||
@@ -26,6 +27,10 @@ pub struct PromptContext<'a> {
|
||||
/// (allowed commands, forbidden paths, autonomy level) so it can plan
|
||||
/// tool calls without trial-and-error. See issue #2404.
|
||||
pub security_summary: Option<String>,
|
||||
/// Autonomy level from config. Controls whether the safety section
|
||||
/// includes "ask before acting" instructions. Full autonomy omits them
|
||||
/// so the model executes tools directly without simulating approval.
|
||||
pub autonomy_level: AutonomyLevel,
|
||||
}
|
||||
|
||||
pub trait PromptSection: Send + Sync {
|
||||
@@ -177,14 +182,39 @@ impl PromptSection for SafetySection {
|
||||
}
|
||||
|
||||
fn build(&self, ctx: &PromptContext<'_>) -> Result<String> {
|
||||
let mut out = String::from(
|
||||
"## Safety\n\n\
|
||||
- Do not exfiltrate private data.\n\
|
||||
- Do not run destructive commands without asking.\n\
|
||||
- Do not bypass oversight or approval mechanisms.\n\
|
||||
- Prefer `trash` over `rm`.\n\
|
||||
- When in doubt, ask before acting externally.",
|
||||
);
|
||||
let mut out = String::from("## Safety\n\n- Do not exfiltrate private data.\n");
|
||||
|
||||
// Omit "ask before acting" instructions when autonomy is Full —
|
||||
// mirrors build_system_prompt_with_mode_and_autonomy. See #3952.
|
||||
if ctx.autonomy_level != AutonomyLevel::Full {
|
||||
out.push_str(
|
||||
"- Do not run destructive commands without asking.\n\
|
||||
- Do not bypass oversight or approval mechanisms.\n",
|
||||
);
|
||||
}
|
||||
|
||||
out.push_str("- Prefer `trash` over `rm`.\n");
|
||||
out.push_str(match ctx.autonomy_level {
|
||||
AutonomyLevel::Full => {
|
||||
"- Respect the runtime autonomy policy: if a tool or action is allowed, \
|
||||
execute it directly instead of asking the user for extra approval.\n\
|
||||
- If a tool or action is blocked by policy or unavailable, explain that \
|
||||
concrete restriction instead of simulating an approval dialog."
|
||||
}
|
||||
AutonomyLevel::ReadOnly => {
|
||||
"- This runtime is read-only for side effects unless a tool explicitly \
|
||||
reports otherwise.\n\
|
||||
- If a requested action is blocked by policy, explain the restriction \
|
||||
directly instead of simulating an approval dialog."
|
||||
}
|
||||
AutonomyLevel::Supervised => {
|
||||
"- When in doubt, ask before acting externally.\n\
|
||||
- Respect the runtime autonomy policy: ask for approval only when the \
|
||||
current runtime policy actually requires it.\n\
|
||||
- If a tool or action is blocked by policy or unavailable, explain that \
|
||||
concrete restriction instead of simulating an approval dialog."
|
||||
}
|
||||
});
|
||||
|
||||
// Append concrete security policy constraints when available (#2404).
|
||||
// This tells the LLM exactly what commands are allowed, which paths
|
||||
@@ -367,6 +397,7 @@ mod tests {
|
||||
dispatcher_instructions: "",
|
||||
tool_descriptions: None,
|
||||
security_summary: None,
|
||||
autonomy_level: AutonomyLevel::Supervised,
|
||||
};
|
||||
|
||||
let section = IdentitySection;
|
||||
@@ -397,6 +428,7 @@ mod tests {
|
||||
dispatcher_instructions: "instr",
|
||||
tool_descriptions: None,
|
||||
security_summary: None,
|
||||
autonomy_level: AutonomyLevel::Supervised,
|
||||
};
|
||||
let prompt = SystemPromptBuilder::with_defaults().build(&ctx).unwrap();
|
||||
assert!(prompt.contains("## Tools"));
|
||||
@@ -434,6 +466,7 @@ mod tests {
|
||||
dispatcher_instructions: "",
|
||||
tool_descriptions: None,
|
||||
security_summary: None,
|
||||
autonomy_level: AutonomyLevel::Supervised,
|
||||
};
|
||||
|
||||
let output = SkillsSection.build(&ctx).unwrap();
|
||||
@@ -474,6 +507,7 @@ mod tests {
|
||||
dispatcher_instructions: "",
|
||||
tool_descriptions: None,
|
||||
security_summary: None,
|
||||
autonomy_level: AutonomyLevel::Supervised,
|
||||
};
|
||||
|
||||
let output = SkillsSection.build(&ctx).unwrap();
|
||||
@@ -501,6 +535,7 @@ mod tests {
|
||||
dispatcher_instructions: "instr",
|
||||
tool_descriptions: None,
|
||||
security_summary: None,
|
||||
autonomy_level: AutonomyLevel::Supervised,
|
||||
};
|
||||
|
||||
let rendered = DateTimeSection.build(&ctx).unwrap();
|
||||
@@ -541,6 +576,7 @@ mod tests {
|
||||
dispatcher_instructions: "",
|
||||
tool_descriptions: None,
|
||||
security_summary: None,
|
||||
autonomy_level: AutonomyLevel::Supervised,
|
||||
};
|
||||
|
||||
let prompt = SystemPromptBuilder::with_defaults().build(&ctx).unwrap();
|
||||
@@ -574,6 +610,7 @@ mod tests {
|
||||
dispatcher_instructions: "",
|
||||
tool_descriptions: None,
|
||||
security_summary: Some(summary.clone()),
|
||||
autonomy_level: AutonomyLevel::Supervised,
|
||||
};
|
||||
|
||||
let output = SafetySection.build(&ctx).unwrap();
|
||||
@@ -608,6 +645,7 @@ mod tests {
|
||||
dispatcher_instructions: "",
|
||||
tool_descriptions: None,
|
||||
security_summary: None,
|
||||
autonomy_level: AutonomyLevel::Supervised,
|
||||
};
|
||||
|
||||
let output = SafetySection.build(&ctx).unwrap();
|
||||
@@ -620,4 +658,66 @@ mod tests {
|
||||
"should NOT contain security policy header when None"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn safety_section_full_autonomy_omits_approval_instructions() {
|
||||
let tools: Vec<Box<dyn Tool>> = vec![];
|
||||
let ctx = PromptContext {
|
||||
workspace_dir: Path::new("/tmp"),
|
||||
model_name: "test-model",
|
||||
tools: &tools,
|
||||
skills: &[],
|
||||
skills_prompt_mode: crate::config::SkillsPromptInjectionMode::Full,
|
||||
identity_config: None,
|
||||
dispatcher_instructions: "",
|
||||
tool_descriptions: None,
|
||||
security_summary: None,
|
||||
autonomy_level: AutonomyLevel::Full,
|
||||
};
|
||||
|
||||
let output = SafetySection.build(&ctx).unwrap();
|
||||
assert!(
|
||||
!output.contains("without asking"),
|
||||
"full autonomy should NOT include 'ask before acting' instructions"
|
||||
);
|
||||
assert!(
|
||||
!output.contains("bypass oversight"),
|
||||
"full autonomy should NOT include 'bypass oversight' instructions"
|
||||
);
|
||||
assert!(
|
||||
output.contains("execute it directly"),
|
||||
"full autonomy should instruct to execute directly"
|
||||
);
|
||||
assert!(
|
||||
output.contains("Do not exfiltrate"),
|
||||
"full autonomy should still include data exfiltration guard"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn safety_section_supervised_includes_approval_instructions() {
|
||||
let tools: Vec<Box<dyn Tool>> = vec![];
|
||||
let ctx = PromptContext {
|
||||
workspace_dir: Path::new("/tmp"),
|
||||
model_name: "test-model",
|
||||
tools: &tools,
|
||||
skills: &[],
|
||||
skills_prompt_mode: crate::config::SkillsPromptInjectionMode::Full,
|
||||
identity_config: None,
|
||||
dispatcher_instructions: "",
|
||||
tool_descriptions: None,
|
||||
security_summary: None,
|
||||
autonomy_level: AutonomyLevel::Supervised,
|
||||
};
|
||||
|
||||
let output = SafetySection.build(&ctx).unwrap();
|
||||
assert!(
|
||||
output.contains("without asking"),
|
||||
"supervised should include 'ask before acting' instructions"
|
||||
);
|
||||
assert!(
|
||||
output.contains("bypass oversight"),
|
||||
"supervised should include 'bypass oversight' instructions"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
+157
-21
@@ -217,12 +217,88 @@ const LARK_DEFAULT_TOKEN_TTL: Duration = Duration::from_secs(7200);
|
||||
/// Feishu/Lark API business code for expired/invalid tenant access token.
|
||||
const LARK_INVALID_ACCESS_TOKEN_CODE: i64 = 99_991_663;
|
||||
|
||||
/// Max byte size for a single interactive card's markdown content.
|
||||
/// Lark card payloads have a ~30 KB limit; leave margin for JSON envelope.
|
||||
const LARK_CARD_MARKDOWN_MAX_BYTES: usize = 28_000;
|
||||
|
||||
/// Returns true when the WebSocket frame indicates live traffic that should
|
||||
/// refresh the heartbeat watchdog.
|
||||
fn should_refresh_last_recv(msg: &WsMsg) -> bool {
|
||||
matches!(msg, WsMsg::Binary(_) | WsMsg::Ping(_) | WsMsg::Pong(_))
|
||||
}
|
||||
|
||||
/// Build an interactive card JSON string with a single markdown element.
|
||||
/// Uses Card JSON 2.0 structure so that headings, tables, blockquotes,
|
||||
/// and inline code render correctly.
|
||||
fn build_card_content(markdown: &str) -> String {
|
||||
serde_json::json!({
|
||||
"schema": "2.0",
|
||||
"body": {
|
||||
"elements": [{
|
||||
"tag": "markdown",
|
||||
"content": markdown
|
||||
}]
|
||||
}
|
||||
})
|
||||
.to_string()
|
||||
}
|
||||
|
||||
/// Build the full message body for sending an interactive card message.
|
||||
fn build_interactive_card_body(recipient: &str, markdown: &str) -> serde_json::Value {
|
||||
serde_json::json!({
|
||||
"receive_id": recipient,
|
||||
"msg_type": "interactive",
|
||||
"content": build_card_content(markdown),
|
||||
})
|
||||
}
|
||||
|
||||
/// Split markdown content into chunks that fit within the card size limit.
|
||||
/// Splits on line boundaries to avoid breaking markdown syntax.
|
||||
fn split_markdown_chunks(text: &str, max_bytes: usize) -> Vec<&str> {
|
||||
if text.len() <= max_bytes {
|
||||
return vec![text];
|
||||
}
|
||||
|
||||
let mut chunks = Vec::new();
|
||||
let mut start = 0;
|
||||
|
||||
while start < text.len() {
|
||||
if start + max_bytes >= text.len() {
|
||||
chunks.push(&text[start..]);
|
||||
break;
|
||||
}
|
||||
|
||||
let end = start + max_bytes;
|
||||
let search_region = &text[start..end];
|
||||
let split_at = search_region
|
||||
.rfind('\n')
|
||||
.map(|pos| start + pos + 1)
|
||||
.unwrap_or(end);
|
||||
|
||||
let split_at = if text.is_char_boundary(split_at) {
|
||||
split_at
|
||||
} else {
|
||||
(start..split_at)
|
||||
.rev()
|
||||
.find(|&i| text.is_char_boundary(i))
|
||||
.unwrap_or(start)
|
||||
};
|
||||
|
||||
if split_at <= start {
|
||||
let forced = (end..=text.len())
|
||||
.find(|&i| text.is_char_boundary(i))
|
||||
.unwrap_or(text.len());
|
||||
chunks.push(&text[start..forced]);
|
||||
start = forced;
|
||||
} else {
|
||||
chunks.push(&text[start..split_at]);
|
||||
start = split_at;
|
||||
}
|
||||
}
|
||||
|
||||
chunks
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct CachedTenantToken {
|
||||
value: String,
|
||||
@@ -1138,33 +1214,31 @@ impl Channel for LarkChannel {
|
||||
let token = self.get_tenant_access_token().await?;
|
||||
let url = self.send_message_url();
|
||||
|
||||
let content = serde_json::json!({ "text": message.content }).to_string();
|
||||
let body = serde_json::json!({
|
||||
"receive_id": message.recipient,
|
||||
"msg_type": "text",
|
||||
"content": content,
|
||||
});
|
||||
let chunks = split_markdown_chunks(&message.content, LARK_CARD_MARKDOWN_MAX_BYTES);
|
||||
for chunk in &chunks {
|
||||
let body = build_interactive_card_body(&message.recipient, chunk);
|
||||
|
||||
let (status, response) = self.send_text_once(&url, &token, &body).await?;
|
||||
let (status, response) = self.send_text_once(&url, &token, &body).await?;
|
||||
|
||||
if should_refresh_lark_tenant_token(status, &response) {
|
||||
// Token expired/invalid, invalidate and retry once.
|
||||
self.invalidate_token().await;
|
||||
let new_token = self.get_tenant_access_token().await?;
|
||||
let (retry_status, retry_response) =
|
||||
self.send_text_once(&url, &new_token, &body).await?;
|
||||
if should_refresh_lark_tenant_token(status, &response) {
|
||||
// Token expired/invalid, invalidate and retry once.
|
||||
self.invalidate_token().await;
|
||||
let new_token = self.get_tenant_access_token().await?;
|
||||
let (retry_status, retry_response) =
|
||||
self.send_text_once(&url, &new_token, &body).await?;
|
||||
|
||||
if should_refresh_lark_tenant_token(retry_status, &retry_response) {
|
||||
anyhow::bail!(
|
||||
"Lark send failed after token refresh: status={retry_status}, body={retry_response}"
|
||||
);
|
||||
if should_refresh_lark_tenant_token(retry_status, &retry_response) {
|
||||
anyhow::bail!(
|
||||
"Lark send failed after token refresh: status={retry_status}, body={retry_response}"
|
||||
);
|
||||
}
|
||||
|
||||
ensure_lark_send_success(retry_status, &retry_response, "after token refresh")?;
|
||||
} else {
|
||||
ensure_lark_send_success(status, &response, "without token refresh")?;
|
||||
}
|
||||
|
||||
ensure_lark_send_success(retry_status, &retry_response, "after token refresh")?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
ensure_lark_send_success(status, &response, "without token refresh")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -2416,4 +2490,66 @@ mod tests {
|
||||
let selected = random_lark_ack_reaction(Some(&payload), "hello");
|
||||
assert!(LARK_ACK_REACTIONS_JA.contains(&selected));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_interactive_card_body_produces_correct_structure() {
|
||||
let body = build_interactive_card_body("oc_chat123", "**Hello** world");
|
||||
assert_eq!(body["receive_id"], "oc_chat123");
|
||||
assert_eq!(body["msg_type"], "interactive");
|
||||
|
||||
let content: serde_json::Value =
|
||||
serde_json::from_str(body["content"].as_str().unwrap()).unwrap();
|
||||
assert_eq!(content["schema"], "2.0");
|
||||
let elements = content["body"]["elements"].as_array().unwrap();
|
||||
assert_eq!(elements.len(), 1);
|
||||
assert_eq!(elements[0]["tag"], "markdown");
|
||||
assert_eq!(elements[0]["content"], "**Hello** world");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_card_content_produces_valid_json() {
|
||||
let content = build_card_content("# Title\n\n**Bold** text");
|
||||
let parsed: serde_json::Value = serde_json::from_str(&content).unwrap();
|
||||
assert_eq!(parsed["schema"], "2.0");
|
||||
assert_eq!(parsed["body"]["elements"][0]["tag"], "markdown");
|
||||
assert_eq!(
|
||||
parsed["body"]["elements"][0]["content"],
|
||||
"# Title\n\n**Bold** text"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn split_markdown_chunks_single_chunk_for_small_content() {
|
||||
let text = "Hello world";
|
||||
let chunks = split_markdown_chunks(text, LARK_CARD_MARKDOWN_MAX_BYTES);
|
||||
assert_eq!(chunks, vec!["Hello world"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn split_markdown_chunks_splits_on_newline_boundaries() {
|
||||
let line = "abcdefghij\n"; // 11 bytes per line
|
||||
let text = line.repeat(10); // 110 bytes total
|
||||
let chunks = split_markdown_chunks(&text, 33); // ~3 lines per chunk
|
||||
assert_eq!(chunks.len(), 4);
|
||||
for chunk in &chunks[..3] {
|
||||
assert!(chunk.len() <= 33);
|
||||
assert!(chunk.ends_with('\n'));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn split_markdown_chunks_handles_no_newlines() {
|
||||
let text = "a".repeat(100);
|
||||
let chunks = split_markdown_chunks(&text, 30);
|
||||
assert!(chunks.len() > 1);
|
||||
let reassembled: String = chunks.concat();
|
||||
assert_eq!(reassembled, text);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn split_markdown_chunks_exact_boundary() {
|
||||
let text = "abc";
|
||||
let chunks = split_markdown_chunks(text, 3);
|
||||
assert_eq!(chunks, vec!["abc"]);
|
||||
}
|
||||
}
|
||||
|
||||
+147
-51
@@ -89,7 +89,10 @@ pub use whatsapp::WhatsAppChannel;
|
||||
#[cfg(feature = "whatsapp-web")]
|
||||
pub use whatsapp_web::WhatsAppWebChannel;
|
||||
|
||||
use crate::agent::loop_::{build_tool_instructions, run_tool_call_loop, scrub_credentials};
|
||||
use crate::agent::loop_::{
|
||||
build_tool_instructions, clear_model_switch_request, get_model_switch_state,
|
||||
is_model_switch_requested, run_tool_call_loop, scrub_credentials,
|
||||
};
|
||||
use crate::approval::ApprovalManager;
|
||||
use crate::config::Config;
|
||||
use crate::identity;
|
||||
@@ -2081,7 +2084,7 @@ async fn process_channel_message(
|
||||
}
|
||||
|
||||
let runtime_defaults = runtime_defaults_snapshot(ctx.as_ref());
|
||||
let active_provider = match get_or_create_provider(
|
||||
let mut active_provider = match get_or_create_provider(
|
||||
ctx.as_ref(),
|
||||
&route.provider,
|
||||
route.api_key.as_deref(),
|
||||
@@ -2203,30 +2206,63 @@ async fn process_channel_message(
|
||||
);
|
||||
}
|
||||
|
||||
// Only enrich with memory context when there is no prior conversation
|
||||
// history. Follow-up turns already include context from previous messages.
|
||||
if !had_prior_history {
|
||||
let memory_context = build_memory_context(
|
||||
// ── Dual-scope memory recall ──────────────────────────────────
|
||||
// Always recall before each LLM call (not just first turn).
|
||||
// For group chats: merge sender-scope + group-scope memories.
|
||||
// For DMs: sender-scope only.
|
||||
let is_group_chat =
|
||||
msg.reply_target.contains("@g.us") || msg.reply_target.starts_with("group:");
|
||||
|
||||
let mem_recall_start = Instant::now();
|
||||
let sender_memory_fut = build_memory_context(
|
||||
ctx.memory.as_ref(),
|
||||
&msg.content,
|
||||
ctx.min_relevance_score,
|
||||
Some(&msg.sender),
|
||||
);
|
||||
|
||||
let (sender_memory, group_memory) = if is_group_chat {
|
||||
let group_memory_fut = build_memory_context(
|
||||
ctx.memory.as_ref(),
|
||||
&msg.content,
|
||||
ctx.min_relevance_score,
|
||||
Some(&history_key),
|
||||
)
|
||||
.await;
|
||||
if let Some(last_turn) = prior_turns.last_mut() {
|
||||
if last_turn.role == "user" && !memory_context.is_empty() {
|
||||
last_turn.content = format!("{memory_context}{}", msg.content);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
tokio::join!(sender_memory_fut, group_memory_fut)
|
||||
} else {
|
||||
(sender_memory_fut.await, String::new())
|
||||
};
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
let mem_recall_ms = mem_recall_start.elapsed().as_millis() as u64;
|
||||
tracing::info!(
|
||||
mem_recall_ms,
|
||||
sender_empty = sender_memory.is_empty(),
|
||||
group_empty = group_memory.is_empty(),
|
||||
"⏱ Memory recall completed"
|
||||
);
|
||||
|
||||
// Merge sender + group memories, avoiding duplicates
|
||||
let memory_context = if group_memory.is_empty() {
|
||||
sender_memory
|
||||
} else if sender_memory.is_empty() {
|
||||
group_memory
|
||||
} else {
|
||||
format!("{sender_memory}\n{group_memory}")
|
||||
};
|
||||
|
||||
// Use refreshed system prompt for new sessions (master's /new support),
|
||||
// and inject memory into system prompt (not user message) so it
|
||||
// doesn't pollute session history and is re-fetched each turn.
|
||||
let base_system_prompt = if had_prior_history {
|
||||
ctx.system_prompt.as_str().to_string()
|
||||
} else {
|
||||
refreshed_new_session_system_prompt(ctx.as_ref())
|
||||
};
|
||||
let system_prompt =
|
||||
let mut system_prompt =
|
||||
build_channel_system_prompt(&base_system_prompt, &msg.channel, &msg.reply_target);
|
||||
if !memory_context.is_empty() {
|
||||
let _ = write!(system_prompt, "\n\n{memory_context}");
|
||||
}
|
||||
let mut history = vec![ChatMessage::system(system_prompt)];
|
||||
history.extend(prior_turns);
|
||||
let use_streaming = target_channel
|
||||
@@ -2358,41 +2394,92 @@ async fn process_channel_message(
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
let model_switch_callback = get_model_switch_state();
|
||||
let timeout_budget_secs =
|
||||
channel_message_timeout_budget_secs(ctx.message_timeout_secs, ctx.max_tool_iterations);
|
||||
let llm_result = tokio::select! {
|
||||
() = cancellation_token.cancelled() => LlmExecutionResult::Cancelled,
|
||||
result = tokio::time::timeout(
|
||||
Duration::from_secs(timeout_budget_secs),
|
||||
run_tool_call_loop(
|
||||
active_provider.as_ref(),
|
||||
&mut history,
|
||||
ctx.tools_registry.as_ref(),
|
||||
notify_observer.as_ref() as &dyn Observer,
|
||||
route.provider.as_str(),
|
||||
route.model.as_str(),
|
||||
runtime_defaults.temperature,
|
||||
true,
|
||||
Some(&*ctx.approval_manager),
|
||||
msg.channel.as_str(),
|
||||
Some(msg.reply_target.as_str()),
|
||||
&ctx.multimodal,
|
||||
ctx.max_tool_iterations,
|
||||
Some(cancellation_token.clone()),
|
||||
delta_tx,
|
||||
ctx.hooks.as_deref(),
|
||||
if msg.channel == "cli"
|
||||
|| ctx.autonomy_level == AutonomyLevel::Full
|
||||
let llm_call_start = Instant::now();
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
let elapsed_before_llm_ms = started_at.elapsed().as_millis() as u64;
|
||||
tracing::info!(elapsed_before_llm_ms, "⏱ Starting LLM call");
|
||||
let llm_result = loop {
|
||||
let loop_result = tokio::select! {
|
||||
() = cancellation_token.cancelled() => LlmExecutionResult::Cancelled,
|
||||
result = tokio::time::timeout(
|
||||
Duration::from_secs(timeout_budget_secs),
|
||||
run_tool_call_loop(
|
||||
active_provider.as_ref(),
|
||||
&mut history,
|
||||
ctx.tools_registry.as_ref(),
|
||||
notify_observer.as_ref() as &dyn Observer,
|
||||
route.provider.as_str(),
|
||||
route.model.as_str(),
|
||||
runtime_defaults.temperature,
|
||||
true,
|
||||
Some(&*ctx.approval_manager),
|
||||
msg.channel.as_str(),
|
||||
Some(msg.reply_target.as_str()),
|
||||
&ctx.multimodal,
|
||||
ctx.max_tool_iterations,
|
||||
Some(cancellation_token.clone()),
|
||||
delta_tx.clone(),
|
||||
ctx.hooks.as_deref(),
|
||||
if msg.channel == "cli"
|
||||
|| ctx.autonomy_level == AutonomyLevel::Full
|
||||
{
|
||||
&[]
|
||||
} else {
|
||||
ctx.non_cli_excluded_tools.as_ref()
|
||||
},
|
||||
ctx.tool_call_dedup_exempt.as_ref(),
|
||||
ctx.activated_tools.as_ref(),
|
||||
Some(model_switch_callback.clone()),
|
||||
),
|
||||
) => LlmExecutionResult::Completed(result),
|
||||
};
|
||||
|
||||
// Handle model switch: re-create the provider and retry
|
||||
if let LlmExecutionResult::Completed(Ok(Err(ref e))) = loop_result {
|
||||
if let Some((new_provider, new_model)) = is_model_switch_requested(e) {
|
||||
tracing::info!(
|
||||
"Model switch requested, switching from {} {} to {} {}",
|
||||
route.provider,
|
||||
route.model,
|
||||
new_provider,
|
||||
new_model
|
||||
);
|
||||
|
||||
match create_resilient_provider_nonblocking(
|
||||
&new_provider,
|
||||
ctx.api_key.clone(),
|
||||
ctx.api_url.clone(),
|
||||
ctx.reliability.as_ref().clone(),
|
||||
ctx.provider_runtime_options.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
&[]
|
||||
} else {
|
||||
ctx.non_cli_excluded_tools.as_ref()
|
||||
},
|
||||
ctx.tool_call_dedup_exempt.as_ref(),
|
||||
ctx.activated_tools.as_ref(),
|
||||
None,
|
||||
),
|
||||
) => LlmExecutionResult::Completed(result),
|
||||
Ok(new_prov) => {
|
||||
active_provider = Arc::from(new_prov);
|
||||
route.provider = new_provider;
|
||||
route.model = new_model;
|
||||
clear_model_switch_request();
|
||||
|
||||
ctx.observer.record_event(&ObserverEvent::AgentStart {
|
||||
provider: route.provider.clone(),
|
||||
model: route.model.clone(),
|
||||
});
|
||||
|
||||
continue;
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::error!("Failed to create provider after model switch: {err}");
|
||||
clear_model_switch_request();
|
||||
// Fall through with the original error
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break loop_result;
|
||||
};
|
||||
|
||||
if let Some(handle) = draft_updater {
|
||||
@@ -2410,6 +2497,12 @@ async fn process_channel_message(
|
||||
let _ = handle.await;
|
||||
}
|
||||
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
let llm_call_ms = llm_call_start.elapsed().as_millis() as u64;
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
let total_ms = started_at.elapsed().as_millis() as u64;
|
||||
tracing::info!(llm_call_ms, total_ms, "⏱ LLM call completed");
|
||||
|
||||
if let Some(token) = typing_cancellation.as_ref() {
|
||||
token.cancel();
|
||||
}
|
||||
@@ -2518,6 +2611,7 @@ async fn process_channel_message(
|
||||
} else {
|
||||
sanitized_response
|
||||
};
|
||||
|
||||
runtime_trace::record_event(
|
||||
"channel_message_outbound",
|
||||
Some(msg.channel.as_str()),
|
||||
@@ -2592,7 +2686,7 @@ async fn process_channel_message(
|
||||
}
|
||||
} else if let Err(e) = channel
|
||||
.send(
|
||||
&SendMessage::new(delivered_response, &msg.reply_target)
|
||||
&SendMessage::new(&delivered_response, &msg.reply_target)
|
||||
.in_thread(msg.thread_ts.clone()),
|
||||
)
|
||||
.await
|
||||
@@ -8223,10 +8317,12 @@ BTC is currently around $65,000 based on latest tool output."#
|
||||
.unwrap_or_else(|e| e.into_inner());
|
||||
assert_eq!(calls.len(), 1);
|
||||
assert_eq!(calls[0].len(), 2);
|
||||
// Memory context is injected into the system prompt, not the user message.
|
||||
assert_eq!(calls[0][0].0, "system");
|
||||
assert!(calls[0][0].1.contains("[Memory context]"));
|
||||
assert!(calls[0][0].1.contains("Age is 45"));
|
||||
assert_eq!(calls[0][1].0, "user");
|
||||
assert!(calls[0][1].1.contains("[Memory context]"));
|
||||
assert!(calls[0][1].1.contains("Age is 45"));
|
||||
assert!(calls[0][1].1.contains("hello"));
|
||||
assert_eq!(calls[0][1].1, "hello");
|
||||
|
||||
let histories = runtime_ctx
|
||||
.conversation_histories
|
||||
|
||||
@@ -1034,6 +1034,12 @@ impl Channel for QQChannel {
|
||||
msg = read.next() => {
|
||||
let msg = match msg {
|
||||
Some(Ok(Message::Text(t))) => t,
|
||||
Some(Ok(Message::Ping(payload))) => {
|
||||
if write.send(Message::Pong(payload)).await.is_err() {
|
||||
break;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
Some(Ok(Message::Close(_))) | None => break,
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
+285
-1
@@ -30,6 +30,8 @@ pub struct SlackChannel {
|
||||
group_reply_allowed_sender_ids: Vec<String>,
|
||||
user_display_name_cache: Mutex<HashMap<String, CachedSlackDisplayName>>,
|
||||
workspace_dir: Option<PathBuf>,
|
||||
/// Maps channel_id -> thread_ts for active assistant threads (used for status indicators).
|
||||
active_assistant_thread: Mutex<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
const SLACK_HISTORY_MAX_RETRIES: u32 = 3;
|
||||
@@ -48,6 +50,43 @@ const SLACK_ATTACHMENT_FILENAME_MAX_CHARS: usize = 128;
|
||||
const SLACK_USER_CACHE_MAX_ENTRIES: usize = 1000;
|
||||
const SLACK_ATTACHMENT_SAVE_SUBDIR: &str = "slack_files";
|
||||
const SLACK_ATTACHMENT_MAX_FILES_PER_MESSAGE: usize = 8;
|
||||
|
||||
/// Extract the Slack message timestamp from a ZeroClaw message ID.
|
||||
///
|
||||
/// Message IDs follow the format `slack_{channel_id}_{ts}` where `ts`
|
||||
/// contains a dot (e.g. `"1234567890.123456"`). If the format is
|
||||
/// unrecognised the raw `message_id` is returned as-is.
|
||||
fn extract_slack_ts(message_id: &str) -> &str {
|
||||
message_id
|
||||
.strip_prefix("slack_")
|
||||
.and_then(|rest| {
|
||||
rest.find('.').map(|dot_pos| {
|
||||
let underscore = rest[..dot_pos].rfind('_').unwrap_or(0);
|
||||
&rest[underscore + 1..]
|
||||
})
|
||||
})
|
||||
.unwrap_or(message_id)
|
||||
}
|
||||
|
||||
/// Map a Unicode emoji to its Slack short-name.
|
||||
///
|
||||
/// The orchestration layer passes Unicode characters (e.g. `"\u{1F440}"`).
|
||||
/// Slack's reactions API expects colon-free short-names (`"eyes"`).
|
||||
fn unicode_emoji_to_slack_name(emoji: &str) -> &str {
|
||||
match emoji {
|
||||
"\u{1F440}" => "eyes", // 👀
|
||||
"\u{2705}" => "white_check_mark", // ✅
|
||||
"\u{26A0}\u{FE0F}" | "\u{26A0}" => "warning", // ⚠️
|
||||
"\u{274C}" => "x", // ❌
|
||||
"\u{1F44D}" => "thumbsup", // 👍
|
||||
"\u{1F44E}" => "thumbsdown", // 👎
|
||||
"\u{2B50}" => "star", // ⭐
|
||||
"\u{1F389}" => "tada", // 🎉
|
||||
"\u{1F914}" => "thinking_face", // 🤔
|
||||
"\u{1F525}" => "fire", // 🔥
|
||||
_ => emoji.trim_matches(':'),
|
||||
}
|
||||
}
|
||||
const SLACK_ATTACHMENT_RENDER_CONCURRENCY: usize = 3;
|
||||
const SLACK_POLL_ACTIVE_THREAD_MAX: usize = 50;
|
||||
const SLACK_POLL_THREAD_EXPIRE_SECS: u64 = 24 * 60 * 60;
|
||||
@@ -81,6 +120,7 @@ impl SlackChannel {
|
||||
group_reply_allowed_sender_ids: Vec::new(),
|
||||
user_display_name_cache: Mutex::new(HashMap::new()),
|
||||
workspace_dir: None,
|
||||
active_assistant_thread: Mutex::new(HashMap::new()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1747,7 +1787,34 @@ impl SlackChannel {
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
if event.get("type").and_then(|v| v.as_str()) != Some("message") {
|
||||
let event_type = event
|
||||
.get("type")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or_default();
|
||||
|
||||
// Track assistant thread context for Assistants API status indicators.
|
||||
if event_type == "assistant_thread_started"
|
||||
|| event_type == "assistant_thread_context_changed"
|
||||
{
|
||||
if let Some(thread) = event.get("assistant_thread") {
|
||||
let ch = thread
|
||||
.get("channel_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or_default();
|
||||
let tts = thread
|
||||
.get("thread_ts")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or_default();
|
||||
if !ch.is_empty() && !tts.is_empty() {
|
||||
if let Ok(mut map) = self.active_assistant_thread.lock() {
|
||||
map.insert(ch.to_string(), tts.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if event_type != "message" {
|
||||
continue;
|
||||
}
|
||||
let subtype = event.get("subtype").and_then(|v| v.as_str());
|
||||
@@ -1827,6 +1894,13 @@ impl SlackChannel {
|
||||
interruption_scope_id: Self::inbound_interruption_scope_id(event, ts),
|
||||
};
|
||||
|
||||
// Track thread context so start_typing can set assistant status.
|
||||
if let Some(ref tts) = channel_msg.thread_ts {
|
||||
if let Ok(mut map) = self.active_assistant_thread.lock() {
|
||||
map.insert(channel_id.clone(), tts.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if tx.send(channel_msg).await.is_err() {
|
||||
return Ok(());
|
||||
}
|
||||
@@ -2234,6 +2308,96 @@ impl Channel for SlackChannel {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn add_reaction(
|
||||
&self,
|
||||
channel_id: &str,
|
||||
message_id: &str,
|
||||
emoji: &str,
|
||||
) -> anyhow::Result<()> {
|
||||
let ts = extract_slack_ts(message_id);
|
||||
let name = unicode_emoji_to_slack_name(emoji);
|
||||
|
||||
let body = serde_json::json!({
|
||||
"channel": channel_id,
|
||||
"timestamp": ts,
|
||||
"name": name
|
||||
});
|
||||
|
||||
let resp = self
|
||||
.http_client()
|
||||
.post("https://slack.com/api/reactions.add")
|
||||
.bearer_auth(&self.bot_token)
|
||||
.json(&body)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let status = resp.status();
|
||||
let text = resp.text().await.unwrap_or_default();
|
||||
|
||||
if !status.is_success() {
|
||||
let sanitized = crate::providers::sanitize_api_error(&text);
|
||||
anyhow::bail!("Slack reactions.add failed ({status}): {sanitized}");
|
||||
}
|
||||
|
||||
let parsed: serde_json::Value = serde_json::from_str(&text).unwrap_or_default();
|
||||
if parsed.get("ok") == Some(&serde_json::Value::Bool(false)) {
|
||||
let err = parsed
|
||||
.get("error")
|
||||
.and_then(|e| e.as_str())
|
||||
.unwrap_or("unknown");
|
||||
if err != "already_reacted" {
|
||||
anyhow::bail!("Slack reactions.add failed: {err}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn remove_reaction(
|
||||
&self,
|
||||
channel_id: &str,
|
||||
message_id: &str,
|
||||
emoji: &str,
|
||||
) -> anyhow::Result<()> {
|
||||
let ts = extract_slack_ts(message_id);
|
||||
let name = unicode_emoji_to_slack_name(emoji);
|
||||
|
||||
let body = serde_json::json!({
|
||||
"channel": channel_id,
|
||||
"timestamp": ts,
|
||||
"name": name
|
||||
});
|
||||
|
||||
let resp = self
|
||||
.http_client()
|
||||
.post("https://slack.com/api/reactions.remove")
|
||||
.bearer_auth(&self.bot_token)
|
||||
.json(&body)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let status = resp.status();
|
||||
let text = resp.text().await.unwrap_or_default();
|
||||
|
||||
if !status.is_success() {
|
||||
let sanitized = crate::providers::sanitize_api_error(&text);
|
||||
anyhow::bail!("Slack reactions.remove failed ({status}): {sanitized}");
|
||||
}
|
||||
|
||||
let parsed: serde_json::Value = serde_json::from_str(&text).unwrap_or_default();
|
||||
if parsed.get("ok") == Some(&serde_json::Value::Bool(false)) {
|
||||
let err = parsed
|
||||
.get("error")
|
||||
.and_then(|e| e.as_str())
|
||||
.unwrap_or("unknown");
|
||||
if err != "no_reaction" {
|
||||
anyhow::bail!("Slack reactions.remove failed: {err}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn listen(&self, tx: tokio::sync::mpsc::Sender<ChannelMessage>) -> anyhow::Result<()> {
|
||||
let bot_user_id = self.get_bot_user_id().await.unwrap_or_default();
|
||||
let scoped_channels = self.scoped_channel_ids();
|
||||
@@ -2512,6 +2676,49 @@ impl Channel for SlackChannel {
|
||||
};
|
||||
Self::evaluate_health(bot_ok, socket_mode_enabled, socket_mode_ok)
|
||||
}
|
||||
|
||||
async fn start_typing(&self, recipient: &str) -> anyhow::Result<()> {
|
||||
let thread_ts = {
|
||||
let map = self
|
||||
.active_assistant_thread
|
||||
.lock()
|
||||
.map_err(|e| anyhow::anyhow!("lock poisoned: {e}"))?;
|
||||
match map.get(recipient) {
|
||||
Some(ts) => ts.clone(),
|
||||
None => return Ok(()),
|
||||
}
|
||||
};
|
||||
|
||||
let body = serde_json::json!({
|
||||
"channel_id": recipient,
|
||||
"thread_ts": thread_ts,
|
||||
"status": "is thinking...",
|
||||
});
|
||||
|
||||
// Gracefully ignore errors — non-assistant contexts will return errors.
|
||||
if let Ok(resp) = self
|
||||
.http_client()
|
||||
.post("https://slack.com/api/assistant.threads.setStatus")
|
||||
.bearer_auth(&self.bot_token)
|
||||
.json(&body)
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
if !resp.status().is_success() {
|
||||
tracing::debug!(
|
||||
"assistant.threads.setStatus returned {}; ignoring",
|
||||
resp.status()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn stop_typing(&self, _recipient: &str) -> anyhow::Result<()> {
|
||||
// Status auto-clears when the bot sends a message via chat.postMessage.
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -3296,6 +3503,48 @@ mod tests {
|
||||
)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_slack_ts_from_standard_message_id() {
|
||||
assert_eq!(
|
||||
extract_slack_ts("slack_C1234567890_1234567890.123456"),
|
||||
"1234567890.123456"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_slack_ts_from_raw_ts_passthrough() {
|
||||
assert_eq!(extract_slack_ts("1234567890.123456"), "1234567890.123456");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_slack_ts_from_unprefixed_id() {
|
||||
assert_eq!(extract_slack_ts("unknown_format"), "unknown_format");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unicode_emoji_maps_to_slack_eyes() {
|
||||
assert_eq!(unicode_emoji_to_slack_name("\u{1F440}"), "eyes");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unicode_emoji_maps_to_slack_check_mark() {
|
||||
assert_eq!(unicode_emoji_to_slack_name("\u{2705}"), "white_check_mark");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unicode_emoji_maps_to_slack_warning() {
|
||||
assert_eq!(unicode_emoji_to_slack_name("\u{26A0}\u{FE0F}"), "warning");
|
||||
assert_eq!(unicode_emoji_to_slack_name("\u{26A0}"), "warning");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unicode_emoji_colon_wrapped_passthrough() {
|
||||
assert_eq!(
|
||||
unicode_emoji_to_slack_name(":custom_emoji:"),
|
||||
"custom_emoji"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inbound_thread_ts_on_thread_reply_uses_thread_ts() {
|
||||
let reply = serde_json::json!({
|
||||
@@ -3380,4 +3629,39 @@ mod tests {
|
||||
let key2 = super::super::conversation_history_key(&msg2);
|
||||
assert_ne!(key1, key2, "session key should differ per thread");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn start_typing_requires_thread_context() {
|
||||
let ch = SlackChannel::new("xoxb-fake".into(), None, None, vec![], vec![]);
|
||||
// No thread_ts tracked for "C999" — start_typing should be a no-op (Ok).
|
||||
let result = ch.start_typing("C999").await;
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"start_typing should succeed as no-op without thread context"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assistant_thread_tracking() {
|
||||
let ch = SlackChannel::new("xoxb-fake".into(), None, None, vec![], vec![]);
|
||||
|
||||
// Initially empty.
|
||||
{
|
||||
let map = ch.active_assistant_thread.lock().unwrap();
|
||||
assert!(map.is_empty());
|
||||
}
|
||||
|
||||
// Simulate storing a thread_ts (as listen_socket_mode would).
|
||||
{
|
||||
let mut map = ch.active_assistant_thread.lock().unwrap();
|
||||
map.insert("C123".to_string(), "1741234567.000100".to_string());
|
||||
}
|
||||
|
||||
// Verify retrieval.
|
||||
{
|
||||
let map = ch.active_assistant_thread.lock().unwrap();
|
||||
assert_eq!(map.get("C123"), Some(&"1741234567.000100".to_string()),);
|
||||
assert_eq!(map.get("C999"), None);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -727,8 +727,41 @@ pub async fn transcribe_audio(
|
||||
// are reported before missing-key errors (preserves original behavior).
|
||||
validate_audio(&audio_data, file_name)?;
|
||||
|
||||
let groq = GroqProvider::from_config(config)?;
|
||||
groq.transcribe(&audio_data, file_name).await
|
||||
match config.default_provider.as_str() {
|
||||
"groq" => {
|
||||
let groq = GroqProvider::from_config(config)?;
|
||||
groq.transcribe(&audio_data, file_name).await
|
||||
}
|
||||
"openai" => {
|
||||
let openai_cfg = config.openai.as_ref().context(
|
||||
"Default transcription provider 'openai' is not configured. Add [transcription.openai]",
|
||||
)?;
|
||||
let openai = OpenAiWhisperProvider::from_config(openai_cfg)?;
|
||||
openai.transcribe(&audio_data, file_name).await
|
||||
}
|
||||
"deepgram" => {
|
||||
let deepgram_cfg = config.deepgram.as_ref().context(
|
||||
"Default transcription provider 'deepgram' is not configured. Add [transcription.deepgram]",
|
||||
)?;
|
||||
let deepgram = DeepgramProvider::from_config(deepgram_cfg)?;
|
||||
deepgram.transcribe(&audio_data, file_name).await
|
||||
}
|
||||
"assemblyai" => {
|
||||
let assemblyai_cfg = config.assemblyai.as_ref().context(
|
||||
"Default transcription provider 'assemblyai' is not configured. Add [transcription.assemblyai]",
|
||||
)?;
|
||||
let assemblyai = AssemblyAiProvider::from_config(assemblyai_cfg)?;
|
||||
assemblyai.transcribe(&audio_data, file_name).await
|
||||
}
|
||||
"google" => {
|
||||
let google_cfg = config.google.as_ref().context(
|
||||
"Default transcription provider 'google' is not configured. Add [transcription.google]",
|
||||
)?;
|
||||
let google = GoogleSttProvider::from_config(google_cfg)?;
|
||||
google.transcribe(&audio_data, file_name).await
|
||||
}
|
||||
other => bail!("Unsupported transcription provider '{other}'"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -786,6 +819,25 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn openai_default_provider_uses_openai_config() {
|
||||
let data = vec![0u8; 100];
|
||||
let mut config = TranscriptionConfig::default();
|
||||
config.default_provider = "openai".to_string();
|
||||
config.openai = Some(crate::config::OpenAiSttConfig {
|
||||
api_key: None,
|
||||
model: "gpt-4o-mini-transcribe".to_string(),
|
||||
});
|
||||
|
||||
let err = transcribe_audio(data, "test.ogg", &config)
|
||||
.await
|
||||
.unwrap_err();
|
||||
assert!(
|
||||
err.to_string().contains("[transcription.openai].api_key"),
|
||||
"expected openai-specific missing-key error, got: {err}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mime_for_audio_maps_accepted_formats() {
|
||||
let cases = [
|
||||
|
||||
@@ -0,0 +1,152 @@
|
||||
use anyhow::{bail, Result};
|
||||
use std::io::{BufRead, Write};
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct Input {
|
||||
prompt: String,
|
||||
default: Option<String>,
|
||||
allow_empty: bool,
|
||||
}
|
||||
|
||||
impl Input {
|
||||
#[must_use]
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
prompt: String::new(),
|
||||
default: None,
|
||||
allow_empty: false,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_prompt<S: Into<String>>(mut self, prompt: S) -> Self {
|
||||
self.prompt = prompt.into();
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn allow_empty(mut self, val: bool) -> Self {
|
||||
self.allow_empty = val;
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn default<S: Into<String>>(mut self, value: S) -> Self {
|
||||
self.default = Some(value.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn interact_text(self) -> Result<String> {
|
||||
let stdin = std::io::stdin();
|
||||
let stdout = std::io::stdout();
|
||||
self.interact_text_with_io(stdin.lock(), stdout.lock())
|
||||
}
|
||||
|
||||
fn interact_text_with_io<R: BufRead, W: Write>(
|
||||
self,
|
||||
mut reader: R,
|
||||
mut writer: W,
|
||||
) -> Result<String> {
|
||||
loop {
|
||||
write!(writer, "{}", self.render_prompt())?;
|
||||
writer.flush()?;
|
||||
|
||||
let mut line = String::new();
|
||||
let bytes_read = reader.read_line(&mut line)?;
|
||||
if bytes_read == 0 {
|
||||
bail!("No input received from stdin");
|
||||
}
|
||||
|
||||
let trimmed = trim_trailing_line_ending(&line);
|
||||
if trimmed.is_empty() {
|
||||
if let Some(default) = &self.default {
|
||||
return Ok(default.clone());
|
||||
}
|
||||
if self.allow_empty {
|
||||
return Ok(String::new());
|
||||
}
|
||||
writeln!(writer, "Input cannot be empty.")?;
|
||||
continue;
|
||||
}
|
||||
|
||||
return Ok(trimmed.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
fn render_prompt(&self) -> String {
|
||||
match &self.default {
|
||||
Some(default) => format!("{} [{}]: ", self.prompt, default),
|
||||
None => format!("{}: ", self.prompt),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn trim_trailing_line_ending(input: &str) -> &str {
|
||||
input.trim_end_matches(['\n', '\r'])
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{trim_trailing_line_ending, Input};
|
||||
use anyhow::Result;
|
||||
use std::io::Cursor;
|
||||
|
||||
#[test]
|
||||
fn trim_trailing_line_ending_strips_newlines() {
|
||||
assert_eq!(trim_trailing_line_ending("value\n"), "value");
|
||||
assert_eq!(trim_trailing_line_ending("value\r\n"), "value");
|
||||
assert_eq!(trim_trailing_line_ending("value\r"), "value");
|
||||
assert_eq!(trim_trailing_line_ending("value"), "value");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn interact_text_returns_typed_value_without_newline() -> Result<()> {
|
||||
let input = Input::new().with_prompt("Prompt");
|
||||
let mut output = Vec::new();
|
||||
|
||||
let value = input.interact_text_with_io(Cursor::new(b"typed-value\n"), &mut output)?;
|
||||
|
||||
assert_eq!(value, "typed-value");
|
||||
assert_eq!(String::from_utf8(output)?, "Prompt: ");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn interact_text_returns_default_for_blank_input() -> Result<()> {
|
||||
let input = Input::new().with_prompt("Prompt").default("fallback");
|
||||
let mut output = Vec::new();
|
||||
|
||||
let value = input.interact_text_with_io(Cursor::new(b"\n"), &mut output)?;
|
||||
|
||||
assert_eq!(value, "fallback");
|
||||
assert_eq!(String::from_utf8(output)?, "Prompt [fallback]: ");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn interact_text_allows_empty_when_requested() -> Result<()> {
|
||||
let input = Input::new().with_prompt("Prompt").allow_empty(true);
|
||||
let mut output = Vec::new();
|
||||
|
||||
let value = input.interact_text_with_io(Cursor::new(b"\n"), &mut output)?;
|
||||
|
||||
assert_eq!(value, "");
|
||||
assert_eq!(String::from_utf8(output)?, "Prompt: ");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn interact_text_reprompts_when_empty_is_not_allowed() -> Result<()> {
|
||||
let input = Input::new().with_prompt("Prompt");
|
||||
let mut output = Vec::new();
|
||||
|
||||
let value = input.interact_text_with_io(Cursor::new(b"\nsecond-try\n"), &mut output)?;
|
||||
|
||||
assert_eq!(value, "second-try");
|
||||
assert_eq!(
|
||||
String::from_utf8(output)?,
|
||||
"Prompt: Input cannot be empty.\nPrompt: "
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
+2
-2
@@ -26,8 +26,8 @@ pub use schema::{
|
||||
SecurityOpsConfig, SkillCreationConfig, SkillsConfig, SkillsPromptInjectionMode, SlackConfig,
|
||||
StorageConfig, StorageProviderConfig, StorageProviderSection, StreamMode, SwarmConfig,
|
||||
SwarmStrategy, TelegramConfig, TextBrowserConfig, ToolFilterGroup, ToolFilterGroupMode,
|
||||
TranscriptionConfig, TtsConfig, TunnelConfig, WebFetchConfig, WebSearchConfig, WebhookConfig,
|
||||
WorkspaceConfig, DEFAULT_GWS_SERVICES,
|
||||
TranscriptionConfig, TtsConfig, TunnelConfig, VerifiableIntentConfig, WebFetchConfig,
|
||||
WebSearchConfig, WebhookConfig, WorkspaceConfig, DEFAULT_GWS_SERVICES,
|
||||
};
|
||||
|
||||
pub fn name_and_presence<T: traits::ChannelConfig>(channel: Option<&T>) -> (&'static str, bool) {
|
||||
|
||||
+201
-28
@@ -367,6 +367,10 @@ pub struct Config {
|
||||
/// `LANG`, or `LC_ALL` environment variables (defaulting to `"en"`).
|
||||
#[serde(default)]
|
||||
pub locale: Option<String>,
|
||||
|
||||
/// Verifiable Intent (VI) credential verification and issuance (`[verifiable_intent]`).
|
||||
#[serde(default)]
|
||||
pub verifiable_intent: VerifiableIntentConfig,
|
||||
}
|
||||
|
||||
/// Multi-client workspace isolation configuration.
|
||||
@@ -875,6 +879,33 @@ impl Default for McpConfig {
|
||||
}
|
||||
}
|
||||
|
||||
/// Verifiable Intent (VI) credential verification and issuance (`[verifiable_intent]` section).
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct VerifiableIntentConfig {
|
||||
/// Enable VI credential verification on commerce tool calls (default: false).
|
||||
#[serde(default)]
|
||||
pub enabled: bool,
|
||||
|
||||
/// Strictness mode for constraint evaluation: "strict" (fail-closed on unknown
|
||||
/// constraint types) or "permissive" (skip unknown types with a warning).
|
||||
/// Default: "strict".
|
||||
#[serde(default = "default_vi_strictness")]
|
||||
pub strictness: String,
|
||||
}
|
||||
|
||||
fn default_vi_strictness() -> String {
|
||||
"strict".to_owned()
|
||||
}
|
||||
|
||||
impl Default for VerifiableIntentConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: false,
|
||||
strictness: default_vi_strictness(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Nodes (Dynamic Node Discovery) ───────────────────────────────
|
||||
|
||||
/// Configuration for the dynamic node discovery system (`[nodes]`).
|
||||
@@ -3470,6 +3501,77 @@ impl Default for QdrantConfig {
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration for the mem0 (OpenMemory) memory backend.
|
||||
///
|
||||
/// Connects to a self-hosted OpenMemory server via its REST API.
|
||||
/// Deploy OpenMemory with `docker compose up` from the mem0 repo,
|
||||
/// then point `url` at the API (default `http://localhost:8765`).
|
||||
///
|
||||
/// ```toml
|
||||
/// [memory]
|
||||
/// backend = "mem0"
|
||||
///
|
||||
/// [memory.mem0]
|
||||
/// url = "http://localhost:8765"
|
||||
/// user_id = "zeroclaw"
|
||||
/// app_name = "zeroclaw"
|
||||
/// ```
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct Mem0Config {
|
||||
/// OpenMemory server URL (e.g. `http://localhost:8765`).
|
||||
/// Falls back to `MEM0_URL` env var if not set.
|
||||
#[serde(default = "default_mem0_url")]
|
||||
pub url: String,
|
||||
/// User ID for scoping memories within mem0.
|
||||
/// Falls back to `MEM0_USER_ID` env var, or default `"zeroclaw"`.
|
||||
#[serde(default = "default_mem0_user_id")]
|
||||
pub user_id: String,
|
||||
/// Application name registered in mem0.
|
||||
/// Falls back to `MEM0_APP_NAME` env var, or default `"zeroclaw"`.
|
||||
#[serde(default = "default_mem0_app_name")]
|
||||
pub app_name: String,
|
||||
/// Whether mem0 should use its built-in LLM to extract facts from
|
||||
/// stored text (`infer = true`) or store raw text as-is (`false`).
|
||||
#[serde(default = "default_mem0_infer")]
|
||||
pub infer: bool,
|
||||
/// Custom prompt for guiding LLM-based fact extraction when `infer = true`.
|
||||
/// Useful for non-English content (e.g. Cantonese/Chinese).
|
||||
/// Falls back to `MEM0_EXTRACTION_PROMPT` env var.
|
||||
/// If unset, the mem0 server uses its built-in default prompt.
|
||||
#[serde(default = "default_mem0_extraction_prompt")]
|
||||
pub extraction_prompt: Option<String>,
|
||||
}
|
||||
|
||||
fn default_mem0_url() -> String {
|
||||
std::env::var("MEM0_URL").unwrap_or_else(|_| "http://localhost:8765".into())
|
||||
}
|
||||
fn default_mem0_user_id() -> String {
|
||||
std::env::var("MEM0_USER_ID").unwrap_or_else(|_| "zeroclaw".into())
|
||||
}
|
||||
fn default_mem0_app_name() -> String {
|
||||
std::env::var("MEM0_APP_NAME").unwrap_or_else(|_| "zeroclaw".into())
|
||||
}
|
||||
fn default_mem0_infer() -> bool {
|
||||
true
|
||||
}
|
||||
fn default_mem0_extraction_prompt() -> Option<String> {
|
||||
std::env::var("MEM0_EXTRACTION_PROMPT")
|
||||
.ok()
|
||||
.filter(|s| !s.trim().is_empty())
|
||||
}
|
||||
|
||||
impl Default for Mem0Config {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
url: default_mem0_url(),
|
||||
user_id: default_mem0_user_id(),
|
||||
app_name: default_mem0_app_name(),
|
||||
infer: default_mem0_infer(),
|
||||
extraction_prompt: default_mem0_extraction_prompt(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct MemoryConfig {
|
||||
@@ -3555,6 +3657,13 @@ pub struct MemoryConfig {
|
||||
/// Only used when `backend = "qdrant"`.
|
||||
#[serde(default)]
|
||||
pub qdrant: QdrantConfig,
|
||||
|
||||
// ── Mem0 backend options ─────────────────────────────────
|
||||
/// Configuration for mem0 (OpenMemory) backend.
|
||||
/// Only used when `backend = "mem0"`.
|
||||
/// Requires `--features memory-mem0` at build time.
|
||||
#[serde(default)]
|
||||
pub mem0: Mem0Config,
|
||||
}
|
||||
|
||||
fn default_embedding_provider() -> String {
|
||||
@@ -3630,6 +3739,7 @@ impl Default for MemoryConfig {
|
||||
auto_hydrate: true,
|
||||
sqlite_open_timeout_secs: None,
|
||||
qdrant: QdrantConfig::default(),
|
||||
mem0: Mem0Config::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3829,7 +3939,16 @@ pub struct AutonomyConfig {
|
||||
}
|
||||
|
||||
fn default_auto_approve() -> Vec<String> {
|
||||
vec!["file_read".into(), "memory_recall".into()]
|
||||
vec![
|
||||
"file_read".into(),
|
||||
"memory_recall".into(),
|
||||
"web_search_tool".into(),
|
||||
"web_fetch".into(),
|
||||
"calculator".into(),
|
||||
"glob_search".into(),
|
||||
"content_search".into(),
|
||||
"image_info".into(),
|
||||
]
|
||||
}
|
||||
|
||||
fn default_always_ask() -> Vec<String> {
|
||||
@@ -6345,6 +6464,7 @@ impl Default for Config {
|
||||
linkedin: LinkedInConfig::default(),
|
||||
plugins: PluginsConfig::default(),
|
||||
locale: None,
|
||||
verifiable_intent: VerifiableIntentConfig::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6430,14 +6550,23 @@ async fn load_persisted_workspace_dirs(
|
||||
}
|
||||
|
||||
pub(crate) async fn persist_active_workspace_config_dir(config_dir: &Path) -> Result<()> {
|
||||
let default_config_dir = default_config_dir()?;
|
||||
let state_path = active_workspace_state_path(&default_config_dir);
|
||||
persist_active_workspace_config_dir_in(config_dir, &default_config_dir()?).await
|
||||
}
|
||||
|
||||
// Guard: never persist a temp-directory path as the active workspace.
|
||||
// This prevents transient test runs or one-off invocations from hijacking
|
||||
// the daemon's config resolution.
|
||||
#[cfg(not(test))]
|
||||
if is_temp_directory(config_dir) {
|
||||
/// Inner implementation that accepts the default config directory explicitly,
|
||||
/// so callers (including tests) control where the marker is written without
|
||||
/// manipulating process-wide environment variables.
|
||||
async fn persist_active_workspace_config_dir_in(
|
||||
config_dir: &Path,
|
||||
default_config_dir: &Path,
|
||||
) -> Result<()> {
|
||||
let state_path = active_workspace_state_path(default_config_dir);
|
||||
|
||||
// Guard: refuse to write a temp-directory config_dir into a non-temp
|
||||
// default location. This prevents transient test runs or one-off
|
||||
// invocations from hijacking the real user's daemon config resolution.
|
||||
// When both paths are temp (e.g. in tests), the write is harmless.
|
||||
if is_temp_directory(config_dir) && !is_temp_directory(default_config_dir) {
|
||||
tracing::warn!(
|
||||
path = %config_dir.display(),
|
||||
"Refusing to persist temp directory as active workspace marker"
|
||||
@@ -6491,7 +6620,7 @@ pub(crate) async fn persist_active_workspace_config_dir(config_dir: &Path) -> Re
|
||||
);
|
||||
}
|
||||
|
||||
sync_directory(&default_config_dir).await?;
|
||||
sync_directory(default_config_dir).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -6871,8 +7000,45 @@ impl Config {
|
||||
)
|
||||
.context("Failed to deserialize config file")?;
|
||||
|
||||
// Warn about each unknown config key
|
||||
// Warn about each unknown config key.
|
||||
// serde_ignored + #[serde(default)] on nested structs can produce
|
||||
// false positives: parent-level fields get re-reported under the
|
||||
// nested key (e.g. "memory.mem0.auto_hydrate" even though
|
||||
// auto_hydrate belongs to MemoryConfig, not Mem0Config). We
|
||||
// suppress these by checking whether the leaf key is a known field
|
||||
// on the parent struct.
|
||||
let known_memory_fields: &[&str] = &[
|
||||
"backend",
|
||||
"auto_save",
|
||||
"hygiene_enabled",
|
||||
"archive_after_days",
|
||||
"purge_after_days",
|
||||
"conversation_retention_days",
|
||||
"embedding_provider",
|
||||
"embedding_model",
|
||||
"embedding_dimensions",
|
||||
"vector_weight",
|
||||
"keyword_weight",
|
||||
"min_relevance_score",
|
||||
"embedding_cache_size",
|
||||
"chunk_max_tokens",
|
||||
"response_cache_enabled",
|
||||
"response_cache_ttl_minutes",
|
||||
"response_cache_max_entries",
|
||||
"response_cache_hot_entries",
|
||||
"snapshot_enabled",
|
||||
"snapshot_on_hygiene",
|
||||
"auto_hydrate",
|
||||
"sqlite_open_timeout_secs",
|
||||
];
|
||||
for path in ignored_paths {
|
||||
// Skip false positives from nested memory sub-sections
|
||||
if path.starts_with("memory.mem0.") || path.starts_with("memory.qdrant.") {
|
||||
let leaf = path.rsplit('.').next().unwrap_or("");
|
||||
if known_memory_fields.contains(&leaf) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
tracing::warn!(
|
||||
"Unknown config key ignored: \"{}\". Check config.toml for typos or deprecated options.",
|
||||
path
|
||||
@@ -7386,6 +7552,9 @@ impl Config {
|
||||
"security.otp.cache_valid_secs must be greater than or equal to security.otp.token_ttl_secs"
|
||||
);
|
||||
}
|
||||
if self.security.otp.challenge_max_attempts == 0 {
|
||||
anyhow::bail!("security.otp.challenge_max_attempts must be greater than 0");
|
||||
}
|
||||
for (i, action) in self.security.otp.gated_actions.iter().enumerate() {
|
||||
let normalized = action.trim();
|
||||
if normalized.is_empty() {
|
||||
@@ -9190,6 +9359,7 @@ default_temperature = 0.7
|
||||
linkedin: LinkedInConfig::default(),
|
||||
plugins: PluginsConfig::default(),
|
||||
locale: None,
|
||||
verifiable_intent: VerifiableIntentConfig::default(),
|
||||
};
|
||||
|
||||
let toml_str = toml::to_string_pretty(&config).unwrap();
|
||||
@@ -9527,6 +9697,7 @@ tool_dispatcher = "xml"
|
||||
linkedin: LinkedInConfig::default(),
|
||||
plugins: PluginsConfig::default(),
|
||||
locale: None,
|
||||
verifiable_intent: VerifiableIntentConfig::default(),
|
||||
};
|
||||
|
||||
config.save().await.unwrap();
|
||||
@@ -11233,6 +11404,7 @@ default_model = "legacy-model"
|
||||
let _env_guard = env_override_lock().await;
|
||||
let temp_home =
|
||||
std::env::temp_dir().join(format!("zeroclaw_test_home_{}", uuid::Uuid::new_v4()));
|
||||
let temp_default_dir = temp_home.join(".zeroclaw");
|
||||
let custom_config_dir = temp_home.join("profiles").join("agent-alpha");
|
||||
|
||||
fs::create_dir_all(&custom_config_dir).await.unwrap();
|
||||
@@ -11243,14 +11415,19 @@ default_model = "legacy-model"
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Write the marker using the explicit default dir (no HOME manipulation
|
||||
// needed for the persist call itself).
|
||||
persist_active_workspace_config_dir_in(&custom_config_dir, &temp_default_dir)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Config::load_or_init still reads HOME to find the marker, so we
|
||||
// must override HOME here. The persist above already wrote to the
|
||||
// correct temp location, so no stale marker can leak.
|
||||
let original_home = std::env::var("HOME").ok();
|
||||
std::env::set_var("HOME", &temp_home);
|
||||
std::env::remove_var("ZEROCLAW_WORKSPACE");
|
||||
|
||||
persist_active_workspace_config_dir(&custom_config_dir)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let config = Box::pin(Config::load_or_init()).await.unwrap();
|
||||
|
||||
assert_eq!(config.config_path, custom_config_dir.join("config.toml"));
|
||||
@@ -11270,6 +11447,7 @@ default_model = "legacy-model"
|
||||
let _env_guard = env_override_lock().await;
|
||||
let temp_home =
|
||||
std::env::temp_dir().join(format!("zeroclaw_test_home_{}", uuid::Uuid::new_v4()));
|
||||
let temp_default_dir = temp_home.join(".zeroclaw");
|
||||
let marker_config_dir = temp_home.join("profiles").join("persisted-profile");
|
||||
let env_workspace_dir = temp_home.join("env-workspace");
|
||||
|
||||
@@ -11281,11 +11459,13 @@ default_model = "legacy-model"
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let original_home = std::env::var("HOME").ok();
|
||||
std::env::set_var("HOME", &temp_home);
|
||||
persist_active_workspace_config_dir(&marker_config_dir)
|
||||
// Write marker via explicit default dir, then set HOME for load_or_init.
|
||||
persist_active_workspace_config_dir_in(&marker_config_dir, &temp_default_dir)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let original_home = std::env::var("HOME").ok();
|
||||
std::env::set_var("HOME", &temp_home);
|
||||
std::env::set_var("ZEROCLAW_WORKSPACE", &env_workspace_dir);
|
||||
|
||||
let config = Box::pin(Config::load_or_init()).await.unwrap();
|
||||
@@ -11304,31 +11484,24 @@ default_model = "legacy-model"
|
||||
|
||||
#[test]
|
||||
async fn persist_active_workspace_marker_is_cleared_for_default_config_dir() {
|
||||
let _env_guard = env_override_lock().await;
|
||||
let temp_home =
|
||||
std::env::temp_dir().join(format!("zeroclaw_test_home_{}", uuid::Uuid::new_v4()));
|
||||
let default_config_dir = temp_home.join(".zeroclaw");
|
||||
let custom_config_dir = temp_home.join("profiles").join("custom-profile");
|
||||
let marker_path = default_config_dir.join(ACTIVE_WORKSPACE_STATE_FILE);
|
||||
|
||||
let original_home = std::env::var("HOME").ok();
|
||||
std::env::set_var("HOME", &temp_home);
|
||||
|
||||
persist_active_workspace_config_dir(&custom_config_dir)
|
||||
// Use the _in variant directly -- no HOME manipulation needed since
|
||||
// this test only exercises persist/clear logic, not Config::load_or_init.
|
||||
persist_active_workspace_config_dir_in(&custom_config_dir, &default_config_dir)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(marker_path.exists());
|
||||
|
||||
persist_active_workspace_config_dir(&default_config_dir)
|
||||
persist_active_workspace_config_dir_in(&default_config_dir, &default_config_dir)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(!marker_path.exists());
|
||||
|
||||
if let Some(home) = original_home {
|
||||
std::env::set_var("HOME", home);
|
||||
} else {
|
||||
std::env::remove_var("HOME");
|
||||
}
|
||||
let _ = fs::remove_dir_all(temp_home).await;
|
||||
}
|
||||
|
||||
|
||||
+1
-1
@@ -62,7 +62,7 @@ pub(crate) fn validate_delivery_config(delivery: Option<&DeliveryConfig>) -> Res
|
||||
bail!("delivery.channel is required for announce mode");
|
||||
};
|
||||
match channel.to_ascii_lowercase().as_str() {
|
||||
"telegram" | "discord" | "slack" | "mattermost" | "signal" | "matrix" => {}
|
||||
"telegram" | "discord" | "slack" | "mattermost" | "signal" | "matrix" | "qq" => {}
|
||||
other => bail!("unsupported delivery channel: {other}"),
|
||||
}
|
||||
|
||||
|
||||
+70
-35
@@ -78,6 +78,14 @@ pub struct CronAddBody {
|
||||
pub delete_after_run: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct CronPatchBody {
|
||||
pub name: Option<String>,
|
||||
pub schedule: Option<String>,
|
||||
pub command: Option<String>,
|
||||
pub prompt: Option<String>,
|
||||
}
|
||||
|
||||
// ── Handlers ────────────────────────────────────────────────────
|
||||
|
||||
/// GET /api/status — system status overview
|
||||
@@ -228,27 +236,7 @@ pub async fn handle_api_cron_list(
|
||||
|
||||
let config = state.config.lock().clone();
|
||||
match crate::cron::list_jobs(&config) {
|
||||
Ok(jobs) => {
|
||||
let jobs_json: Vec<serde_json::Value> = jobs
|
||||
.iter()
|
||||
.map(|job| {
|
||||
serde_json::json!({
|
||||
"id": job.id,
|
||||
"name": job.name,
|
||||
"job_type": job.job_type,
|
||||
"command": job.command,
|
||||
"prompt": job.prompt,
|
||||
"schedule": job.schedule,
|
||||
"next_run": job.next_run.to_rfc3339(),
|
||||
"last_run": job.last_run.map(|t| t.to_rfc3339()),
|
||||
"last_status": job.last_status,
|
||||
"enabled": job.enabled,
|
||||
"delivery": job.delivery,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
Json(serde_json::json!({"jobs": jobs_json})).into_response()
|
||||
}
|
||||
Ok(jobs) => Json(serde_json::json!({"jobs": jobs})).into_response(),
|
||||
Err(e) => (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(serde_json::json!({"error": format!("Failed to list cron jobs: {e}")})),
|
||||
@@ -344,20 +332,7 @@ pub async fn handle_api_cron_add(
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(job) => Json(serde_json::json!({
|
||||
"status": "ok",
|
||||
"job": {
|
||||
"id": job.id,
|
||||
"name": job.name,
|
||||
"job_type": job.job_type,
|
||||
"command": job.command,
|
||||
"prompt": job.prompt,
|
||||
"schedule": job.schedule,
|
||||
"enabled": job.enabled,
|
||||
"delivery": job.delivery,
|
||||
}
|
||||
}))
|
||||
.into_response(),
|
||||
Ok(job) => Json(serde_json::json!({"status": "ok", "job": job})).into_response(),
|
||||
Err(e) => (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(serde_json::json!({"error": format!("Failed to add cron job: {e}")})),
|
||||
@@ -415,6 +390,66 @@ pub async fn handle_api_cron_runs(
|
||||
}
|
||||
}
|
||||
|
||||
/// PATCH /api/cron/:id — update an existing cron job
|
||||
pub async fn handle_api_cron_patch(
|
||||
State(state): State<AppState>,
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<String>,
|
||||
Json(body): Json<CronPatchBody>,
|
||||
) -> impl IntoResponse {
|
||||
if let Err(e) = require_auth(&state, &headers) {
|
||||
return e.into_response();
|
||||
}
|
||||
|
||||
let config = state.config.lock().clone();
|
||||
|
||||
// Build the schedule from the provided expression string (if any).
|
||||
let schedule = match body.schedule {
|
||||
Some(expr) if !expr.trim().is_empty() => Some(crate::cron::Schedule::Cron {
|
||||
expr: expr.trim().to_string(),
|
||||
tz: None,
|
||||
}),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
// Route the edited text to the correct field based on the job's stored type.
|
||||
// The frontend sends a single textarea value; for agent jobs it is the prompt,
|
||||
// for shell jobs it is the command.
|
||||
let existing = match crate::cron::get_job(&config, &id) {
|
||||
Ok(j) => j,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::NOT_FOUND,
|
||||
Json(serde_json::json!({"error": format!("Cron job not found: {e}")})),
|
||||
)
|
||||
.into_response();
|
||||
}
|
||||
};
|
||||
let is_agent = matches!(existing.job_type, crate::cron::JobType::Agent);
|
||||
let (patch_command, patch_prompt) = if is_agent {
|
||||
(None, body.command.or(body.prompt))
|
||||
} else {
|
||||
(body.command.or(body.prompt), None)
|
||||
};
|
||||
|
||||
let patch = crate::cron::CronJobPatch {
|
||||
name: body.name,
|
||||
schedule,
|
||||
command: patch_command,
|
||||
prompt: patch_prompt,
|
||||
..crate::cron::CronJobPatch::default()
|
||||
};
|
||||
|
||||
match crate::cron::update_shell_job_with_approval(&config, &id, patch, false) {
|
||||
Ok(job) => Json(serde_json::json!({"status": "ok", "job": job})).into_response(),
|
||||
Err(e) => (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(serde_json::json!({"error": format!("Failed to update cron job: {e}")})),
|
||||
)
|
||||
.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
/// DELETE /api/cron/:id — remove a cron job
|
||||
pub async fn handle_api_cron_delete(
|
||||
State(state): State<AppState>,
|
||||
|
||||
@@ -0,0 +1,427 @@
|
||||
//! Hardware context management endpoints.
|
||||
//!
|
||||
//! These endpoints let remote callers (phone, laptop) register GPIO pins and
|
||||
//! append context to the running agent's hardware knowledge base without SSH.
|
||||
//!
|
||||
//! ## Endpoints
|
||||
//!
|
||||
//! - `POST /api/hardware/pin` — register a single GPIO pin assignment
|
||||
//! - `POST /api/hardware/context` — append raw markdown to a device file
|
||||
//! - `GET /api/hardware/context` — read all current hardware context files
|
||||
//! - `POST /api/hardware/reload` — verify on-disk context; report what will be
|
||||
//! used on the next chat request
|
||||
//!
|
||||
//! ## Live update semantics
|
||||
//!
|
||||
//! ZeroClaw's agent loop calls [`crate::hardware::boot`] on **every** request,
|
||||
//! which re-reads `~/.zeroclaw/hardware/` from disk. Writing to those files
|
||||
//! therefore takes effect on the very next `/api/chat` call — no daemon restart
|
||||
//! needed. The `/api/hardware/reload` endpoint verifies what is on disk and
|
||||
//! reports what will be injected into the system prompt next time.
|
||||
//!
|
||||
//! ## Security
|
||||
//!
|
||||
//! - **Auth**: same `require_auth` helper used by all `/api/*` routes.
|
||||
//! - **Path traversal**: device aliases are validated to be alphanumeric +
|
||||
//! hyphens/underscores only; they are never used as raw path components.
|
||||
//! - **Append-only**: all writes use `OpenOptions::append(true)` — existing
|
||||
//! content cannot be truncated or overwritten through these endpoints.
|
||||
//! - **Size limit**: individual append payloads are capped at 32 KB.
|
||||
|
||||
use super::AppState;
|
||||
use axum::{
|
||||
extract::{State},
|
||||
http::{HeaderMap, StatusCode},
|
||||
response::{IntoResponse, Json},
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use tokio::fs;
|
||||
use tokio::io::AsyncWriteExt as _;
|
||||
|
||||
/// Maximum bytes allowed in a single append payload.
|
||||
const MAX_APPEND_BYTES: usize = 32_768; // 32 KB
|
||||
|
||||
// ── Auth helper (re-uses the pattern from api.rs) ─────────────────────────────
|
||||
|
||||
fn require_auth(
|
||||
state: &AppState,
|
||||
headers: &HeaderMap,
|
||||
) -> Result<(), (StatusCode, Json<serde_json::Value>)> {
|
||||
if !state.pairing.require_pairing() {
|
||||
return Ok(());
|
||||
}
|
||||
let token = headers
|
||||
.get(axum::http::header::AUTHORIZATION)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.and_then(|auth| auth.strip_prefix("Bearer "))
|
||||
.unwrap_or("");
|
||||
if state.pairing.is_authenticated(token) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err((
|
||||
StatusCode::UNAUTHORIZED,
|
||||
Json(serde_json::json!({
|
||||
"error": "Unauthorized — pair first via POST /pair, then send Authorization: Bearer <token>"
|
||||
})),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
// ── Path helpers ──────────────────────────────────────────────────────────────
|
||||
|
||||
/// Return `~/.zeroclaw/hardware/` or an error string.
|
||||
fn hardware_dir() -> Result<PathBuf, String> {
|
||||
directories::BaseDirs::new()
|
||||
.map(|b| b.home_dir().join(".zeroclaw").join("hardware"))
|
||||
.ok_or_else(|| "Cannot determine home directory".to_string())
|
||||
}
|
||||
|
||||
/// Validate a device alias: must be non-empty, ≤64 chars, and consist only of
|
||||
/// alphanumerics, hyphens, and underscores. Returns an error message on failure.
|
||||
fn validate_device_alias(alias: &str) -> Result<(), &'static str> {
|
||||
if alias.is_empty() || alias.len() > 64 {
|
||||
return Err("Device alias must be 1–64 characters");
|
||||
}
|
||||
if !alias.chars().all(|c| c.is_alphanumeric() || c == '-' || c == '_') {
|
||||
return Err("Device alias must contain only alphanumerics, hyphens, and underscores");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Return the path to a device context file, after validating the alias.
|
||||
fn device_file_path(hw_dir: &std::path::Path, alias: &str) -> Result<PathBuf, &'static str> {
|
||||
validate_device_alias(alias)?;
|
||||
Ok(hw_dir.join("devices").join(format!("{alias}.md")))
|
||||
}
|
||||
|
||||
// ── POST /api/hardware/pin ────────────────────────────────────────────────────
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct PinRegistrationBody {
|
||||
/// Device alias (default: "rpi0").
|
||||
#[serde(default = "default_device")]
|
||||
pub device: String,
|
||||
/// BCM GPIO number.
|
||||
pub pin: u32,
|
||||
/// Component type/name, e.g. "LED", "Button", "Servo".
|
||||
pub component: String,
|
||||
/// Optional human notes about this pin, e.g. "red LED, active HIGH".
|
||||
#[serde(default)]
|
||||
pub notes: String,
|
||||
}
|
||||
|
||||
fn default_device() -> String {
|
||||
"rpi0".to_string()
|
||||
}
|
||||
|
||||
/// `POST /api/hardware/pin` — register a single GPIO pin assignment.
|
||||
///
|
||||
/// Appends one line to `~/.zeroclaw/hardware/devices/<device>.md`:
|
||||
/// ```text
|
||||
/// - GPIO <pin>: <component> — <notes>
|
||||
/// ```
|
||||
pub async fn handle_hardware_pin(
|
||||
State(state): State<AppState>,
|
||||
headers: HeaderMap,
|
||||
body: Result<Json<PinRegistrationBody>, axum::extract::rejection::JsonRejection>,
|
||||
) -> impl IntoResponse {
|
||||
if let Err(e) = require_auth(&state, &headers) {
|
||||
return e.into_response();
|
||||
}
|
||||
|
||||
let Json(req) = match body {
|
||||
Ok(b) => b,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(serde_json::json!({ "error": format!("Invalid JSON: {e}") })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
if req.component.is_empty() {
|
||||
return (
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(serde_json::json!({ "error": "\"component\" must not be empty" })),
|
||||
)
|
||||
.into_response();
|
||||
}
|
||||
// Sanitize component + notes: strip newlines to prevent line-injection.
|
||||
let component = req.component.replace(['\n', '\r'], " ");
|
||||
let notes = req.notes.replace(['\n', '\r'], " ");
|
||||
|
||||
let hw_dir = match hardware_dir() {
|
||||
Ok(d) => d,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(serde_json::json!({ "error": e })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
let device_path = match device_file_path(&hw_dir, &req.device) {
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(serde_json::json!({ "error": e })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
// Create devices dir + file if missing, then append.
|
||||
if let Some(parent) = device_path.parent() {
|
||||
if let Err(e) = fs::create_dir_all(parent).await {
|
||||
return (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(serde_json::json!({ "error": format!("Failed to create directory: {e}") })),
|
||||
)
|
||||
.into_response();
|
||||
}
|
||||
}
|
||||
|
||||
let line = if notes.is_empty() {
|
||||
format!("- GPIO {}: {}\n", req.pin, component)
|
||||
} else {
|
||||
format!("- GPIO {}: {} — {}\n", req.pin, component, notes)
|
||||
};
|
||||
|
||||
match append_to_file(&device_path, &line).await {
|
||||
Ok(()) => {
|
||||
let message = format!(
|
||||
"GPIO {} registered as {} on {}",
|
||||
req.pin, component, req.device
|
||||
);
|
||||
tracing::info!(device = %req.device, pin = req.pin, component = %component, "{}", message);
|
||||
(
|
||||
StatusCode::OK,
|
||||
Json(serde_json::json!({ "ok": true, "message": message })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
Err(e) => (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(serde_json::json!({ "error": format!("Failed to write: {e}") })),
|
||||
)
|
||||
.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
// ── POST /api/hardware/context ────────────────────────────────────────────────
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ContextAppendBody {
|
||||
/// Device alias (default: "rpi0").
|
||||
#[serde(default = "default_device")]
|
||||
pub device: String,
|
||||
/// Raw markdown string to append to the device file.
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
/// `POST /api/hardware/context` — append raw markdown to a device file.
|
||||
pub async fn handle_hardware_context_post(
|
||||
State(state): State<AppState>,
|
||||
headers: HeaderMap,
|
||||
body: Result<Json<ContextAppendBody>, axum::extract::rejection::JsonRejection>,
|
||||
) -> impl IntoResponse {
|
||||
if let Err(e) = require_auth(&state, &headers) {
|
||||
return e.into_response();
|
||||
}
|
||||
|
||||
let Json(req) = match body {
|
||||
Ok(b) => b,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(serde_json::json!({ "error": format!("Invalid JSON: {e}") })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
if req.content.is_empty() {
|
||||
return (
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(serde_json::json!({ "error": "\"content\" must not be empty" })),
|
||||
)
|
||||
.into_response();
|
||||
}
|
||||
if req.content.len() > MAX_APPEND_BYTES {
|
||||
return (
|
||||
StatusCode::PAYLOAD_TOO_LARGE,
|
||||
Json(serde_json::json!({
|
||||
"error": format!("Content too large — max {} bytes", MAX_APPEND_BYTES)
|
||||
})),
|
||||
)
|
||||
.into_response();
|
||||
}
|
||||
|
||||
let hw_dir = match hardware_dir() {
|
||||
Ok(d) => d,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(serde_json::json!({ "error": e })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
let device_path = match device_file_path(&hw_dir, &req.device) {
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(serde_json::json!({ "error": e })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(parent) = device_path.parent() {
|
||||
if let Err(e) = fs::create_dir_all(parent).await {
|
||||
return (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(serde_json::json!({ "error": format!("Failed to create directory: {e}") })),
|
||||
)
|
||||
.into_response();
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure content ends with a newline so successive appends don't merge lines.
|
||||
let mut content = req.content.clone();
|
||||
if !content.ends_with('\n') {
|
||||
content.push('\n');
|
||||
}
|
||||
|
||||
match append_to_file(&device_path, &content).await {
|
||||
Ok(()) => {
|
||||
tracing::info!(device = %req.device, bytes = content.len(), "Hardware context appended");
|
||||
(StatusCode::OK, Json(serde_json::json!({ "ok": true }))).into_response()
|
||||
}
|
||||
Err(e) => (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(serde_json::json!({ "error": format!("Failed to write: {e}") })),
|
||||
)
|
||||
.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
// ── GET /api/hardware/context ─────────────────────────────────────────────────
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct HardwareContextResponse {
|
||||
hardware_md: String,
|
||||
devices: std::collections::HashMap<String, String>,
|
||||
}
|
||||
|
||||
/// `GET /api/hardware/context` — return all current hardware context file contents.
|
||||
pub async fn handle_hardware_context_get(
|
||||
State(state): State<AppState>,
|
||||
headers: HeaderMap,
|
||||
) -> impl IntoResponse {
|
||||
if let Err(e) = require_auth(&state, &headers) {
|
||||
return e.into_response();
|
||||
}
|
||||
|
||||
let hw_dir = match hardware_dir() {
|
||||
Ok(d) => d,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(serde_json::json!({ "error": e })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
// Read HARDWARE.md
|
||||
let hardware_md = fs::read_to_string(hw_dir.join("HARDWARE.md"))
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
|
||||
// Read all device files
|
||||
let devices_dir = hw_dir.join("devices");
|
||||
let mut devices = std::collections::HashMap::new();
|
||||
if let Ok(mut entries) = fs::read_dir(&devices_dir).await {
|
||||
while let Ok(Some(entry)) = entries.next_entry().await {
|
||||
let path = entry.path();
|
||||
if path.extension().and_then(|e| e.to_str()) == Some("md") {
|
||||
let alias = path
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
if !alias.is_empty() {
|
||||
let content = fs::read_to_string(&path).await.unwrap_or_default();
|
||||
devices.insert(alias, content);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let resp = HardwareContextResponse {
|
||||
hardware_md,
|
||||
devices,
|
||||
};
|
||||
(StatusCode::OK, Json(resp)).into_response()
|
||||
}
|
||||
|
||||
// ── POST /api/hardware/reload ─────────────────────────────────────────────────
|
||||
|
||||
/// `POST /api/hardware/reload` — verify on-disk hardware context and report what
|
||||
/// will be loaded on the next chat request.
|
||||
///
|
||||
/// Since [`crate::hardware::boot`] re-reads from disk on every agent invocation,
|
||||
/// writing to the hardware files via the other endpoints already takes effect on
|
||||
/// the next `/api/chat` call. This endpoint reads the same files and reports
|
||||
/// the current state so callers can confirm the update landed.
|
||||
pub async fn handle_hardware_reload(
|
||||
State(state): State<AppState>,
|
||||
headers: HeaderMap,
|
||||
) -> impl IntoResponse {
|
||||
if let Err(e) = require_auth(&state, &headers) {
|
||||
return e.into_response();
|
||||
}
|
||||
|
||||
// Count currently-registered tools in the gateway state
|
||||
let tool_count = state.tools_registry.len();
|
||||
|
||||
// Reload hardware context from disk (same function used by the agent loop)
|
||||
let context = crate::hardware::load_hardware_context_prompt(&[]);
|
||||
let context_length = context.len();
|
||||
|
||||
tracing::info!(
|
||||
context_length,
|
||||
tool_count,
|
||||
"Hardware context reloaded (on-disk read)"
|
||||
);
|
||||
|
||||
(
|
||||
StatusCode::OK,
|
||||
Json(serde_json::json!({
|
||||
"ok": true,
|
||||
"tools": tool_count,
|
||||
"context_length": context_length,
|
||||
})),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
|
||||
// ── File I/O helper ───────────────────────────────────────────────────────────
|
||||
|
||||
async fn append_to_file(path: &std::path::Path, content: &str) -> std::io::Result<()> {
|
||||
let mut file = tokio::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(path)
|
||||
.await?;
|
||||
file.write_all(content.as_bytes()).await?;
|
||||
file.flush().await?;
|
||||
Ok(())
|
||||
}
|
||||
+66
-3
@@ -365,7 +365,10 @@ pub async fn run_gateway(host: &str, port: u16, config: Config) -> Result<()> {
|
||||
anyhow::bail!(
|
||||
"🛑 Refusing to bind to {host} — gateway would be exposed to the internet.\n\
|
||||
Fix: use --host 127.0.0.1 (default), configure a tunnel, or set\n\
|
||||
[gateway] allow_public_bind = true in config.toml (NOT recommended)."
|
||||
[gateway] allow_public_bind = true in config.toml (NOT recommended).\n\n\
|
||||
Docker: if you need to reach the gateway from a Docker container, set\n\
|
||||
[gateway] host = \"0.0.0.0\" and allow_public_bind = true in config.toml,\n\
|
||||
then connect from the container via ws://host.docker.internal:{port}."
|
||||
);
|
||||
}
|
||||
let config_state = Arc::new(Mutex::new(config.clone()));
|
||||
@@ -427,7 +430,7 @@ pub async fn run_gateway(host: &str, port: u16, config: Config) -> Result<()> {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
let (tools_registry_raw, _delegate_handle_gw) = tools::all_tools_with_runtime(
|
||||
let (mut tools_registry_raw, delegate_handle_gw) = tools::all_tools_with_runtime(
|
||||
Arc::new(config.clone()),
|
||||
&security,
|
||||
runtime,
|
||||
@@ -442,6 +445,63 @@ pub async fn run_gateway(host: &str, port: u16, config: Config) -> Result<()> {
|
||||
config.api_key.as_deref(),
|
||||
&config,
|
||||
);
|
||||
|
||||
// ── Wire MCP tools into the gateway tool registry (non-fatal) ───
|
||||
// Without this, the `/api/tools` endpoint misses MCP tools.
|
||||
if config.mcp.enabled && !config.mcp.servers.is_empty() {
|
||||
tracing::info!(
|
||||
"Gateway: initializing MCP client — {} server(s) configured",
|
||||
config.mcp.servers.len()
|
||||
);
|
||||
match tools::McpRegistry::connect_all(&config.mcp.servers).await {
|
||||
Ok(registry) => {
|
||||
let registry = std::sync::Arc::new(registry);
|
||||
if config.mcp.deferred_loading {
|
||||
let deferred_set =
|
||||
tools::DeferredMcpToolSet::from_registry(std::sync::Arc::clone(®istry))
|
||||
.await;
|
||||
tracing::info!(
|
||||
"Gateway MCP deferred: {} tool stub(s) from {} server(s)",
|
||||
deferred_set.len(),
|
||||
registry.server_count()
|
||||
);
|
||||
let activated =
|
||||
std::sync::Arc::new(std::sync::Mutex::new(tools::ActivatedToolSet::new()));
|
||||
tools_registry_raw.push(Box::new(tools::ToolSearchTool::new(
|
||||
deferred_set,
|
||||
activated,
|
||||
)));
|
||||
} else {
|
||||
let names = registry.tool_names();
|
||||
let mut registered = 0usize;
|
||||
for name in names {
|
||||
if let Some(def) = registry.get_tool_def(&name).await {
|
||||
let wrapper: std::sync::Arc<dyn tools::Tool> =
|
||||
std::sync::Arc::new(tools::McpToolWrapper::new(
|
||||
name,
|
||||
def,
|
||||
std::sync::Arc::clone(®istry),
|
||||
));
|
||||
if let Some(ref handle) = delegate_handle_gw {
|
||||
handle.write().push(std::sync::Arc::clone(&wrapper));
|
||||
}
|
||||
tools_registry_raw.push(Box::new(tools::ArcToolRef(wrapper)));
|
||||
registered += 1;
|
||||
}
|
||||
}
|
||||
tracing::info!(
|
||||
"Gateway MCP: {} tool(s) registered from {} server(s)",
|
||||
registered,
|
||||
registry.server_count()
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!("Gateway MCP registry failed to initialize: {e:#}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let tools_registry: Arc<Vec<ToolSpec>> =
|
||||
Arc::new(tools_registry_raw.iter().map(|t| t.spec()).collect());
|
||||
|
||||
@@ -783,7 +843,10 @@ pub async fn run_gateway(host: &str, port: u16, config: Config) -> Result<()> {
|
||||
"/api/cron/settings",
|
||||
get(api::handle_api_cron_settings_get).patch(api::handle_api_cron_settings_patch),
|
||||
)
|
||||
.route("/api/cron/{id}", delete(api::handle_api_cron_delete))
|
||||
.route(
|
||||
"/api/cron/{id}",
|
||||
delete(api::handle_api_cron_delete).patch(api::handle_api_cron_patch),
|
||||
)
|
||||
.route("/api/cron/{id}/runs", get(api::handle_api_cron_runs))
|
||||
.route("/api/integrations", get(api::handle_api_integrations))
|
||||
.route(
|
||||
|
||||
+1
-1
@@ -150,7 +150,7 @@ async fn handle_socket(socket: WebSocket, state: AppState, session_id: Option<St
|
||||
|
||||
// Build a persistent Agent for this connection so history is maintained across turns.
|
||||
let config = state.config.lock().clone();
|
||||
let mut agent = match crate::agent::Agent::from_config(&config) {
|
||||
let mut agent = match crate::agent::Agent::from_config(&config).await {
|
||||
Ok(a) => a,
|
||||
Err(e) => {
|
||||
let err = serde_json::json!({"type": "error", "message": format!("Failed to initialise agent: {e}")});
|
||||
|
||||
@@ -0,0 +1,225 @@
|
||||
//! AardvarkTransport — implements the Transport trait for Total Phase Aardvark USB adapters.
|
||||
//!
|
||||
//! The Aardvark is NOT a microcontroller firmware target; it is a USB bridge
|
||||
//! that speaks I2C / SPI / GPIO directly. Unlike [`HardwareSerialTransport`],
|
||||
//! this transport interprets [`ZcCommand`] locally and calls the Aardvark C
|
||||
//! library (via [`aardvark_sys`]) rather than forwarding JSON over a serial wire.
|
||||
//!
|
||||
//! Lazy-open strategy: a fresh [`aardvark_sys::AardvarkHandle`] is opened at
|
||||
//! the start of each [`send`](AardvarkTransport::send) call and automatically
|
||||
//! closed (dropped) before the call returns. No persistent handle is held,
|
||||
//! matching the design of [`HardwareSerialTransport`].
|
||||
|
||||
use super::protocol::{ZcCommand, ZcResponse};
|
||||
use super::transport::{Transport, TransportError, TransportKind};
|
||||
use aardvark_sys::AardvarkHandle;
|
||||
use async_trait::async_trait;
|
||||
|
||||
/// Transport implementation for Total Phase Aardvark USB adapters.
|
||||
///
|
||||
/// Supports I2C, SPI, and direct GPIO operations via the Aardvark C library.
|
||||
pub struct AardvarkTransport {
|
||||
/// Aardvark port index (0 = first available adapter).
|
||||
port: i32,
|
||||
/// Default I2C / SPI bitrate in kHz (e.g. 100 for standard-mode I2C).
|
||||
bitrate_khz: u32,
|
||||
}
|
||||
|
||||
impl AardvarkTransport {
|
||||
/// Create a new transport for the given port and bitrate.
|
||||
///
|
||||
/// The port number matches the index returned by
|
||||
/// [`AardvarkHandle::find_devices`].
|
||||
pub fn new(port: i32, bitrate_khz: u32) -> Self {
|
||||
Self { port, bitrate_khz }
|
||||
}
|
||||
|
||||
/// Return `true` when at least one Aardvark adapter is found by the SDK.
|
||||
pub fn probe_connected(&self) -> bool {
|
||||
AardvarkHandle::find_devices()
|
||||
.into_iter()
|
||||
.any(|p| i32::from(p) == self.port || self.port == 0)
|
||||
}
|
||||
|
||||
/// Open a fresh handle for one transaction.
|
||||
fn open_handle(&self) -> Result<AardvarkHandle, TransportError> {
|
||||
AardvarkHandle::open_port(self.port)
|
||||
.map_err(|e| TransportError::Other(format!("aardvark open: {e}")))
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Transport for AardvarkTransport {
|
||||
fn kind(&self) -> TransportKind {
|
||||
TransportKind::Aardvark
|
||||
}
|
||||
|
||||
fn is_connected(&self) -> bool {
|
||||
!AardvarkHandle::find_devices().is_empty()
|
||||
}
|
||||
|
||||
async fn send(&self, cmd: &ZcCommand) -> Result<ZcResponse, TransportError> {
|
||||
// Open a fresh handle per command — released when this scope ends.
|
||||
let handle = self.open_handle()?;
|
||||
|
||||
let result: serde_json::Value = match cmd.cmd.as_str() {
|
||||
// ── I2C ──────────────────────────────────────────────────────────
|
||||
"i2c_scan" => {
|
||||
handle
|
||||
.i2c_enable(self.bitrate_khz)
|
||||
.map_err(|e| TransportError::Other(e.to_string()))?;
|
||||
let devices: Vec<String> = handle
|
||||
.i2c_scan()
|
||||
.into_iter()
|
||||
.map(|a| format!("{a:#04x}"))
|
||||
.collect();
|
||||
serde_json::json!({ "ok": true, "data": { "devices": devices } })
|
||||
}
|
||||
|
||||
"i2c_read" => {
|
||||
let addr = required_u8(&cmd.params, "addr")?;
|
||||
let reg = optional_u8(&cmd.params, "register");
|
||||
let len: usize = cmd
|
||||
.params
|
||||
.get("len")
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(1)
|
||||
.try_into()
|
||||
.unwrap_or(1);
|
||||
|
||||
handle
|
||||
.i2c_enable(self.bitrate_khz)
|
||||
.map_err(|e| TransportError::Other(e.to_string()))?;
|
||||
|
||||
let data = if let Some(r) = reg {
|
||||
handle.i2c_write_read(addr, &[r], len)
|
||||
} else {
|
||||
handle.i2c_read(addr, len)
|
||||
}
|
||||
.map_err(|e| TransportError::Other(e.to_string()))?;
|
||||
|
||||
let hex: Vec<String> = data.iter().map(|b| format!("{b:#04x}")).collect();
|
||||
serde_json::json!({
|
||||
"ok": true,
|
||||
"data": { "bytes": data, "hex": hex }
|
||||
})
|
||||
}
|
||||
|
||||
"i2c_write" => {
|
||||
let addr = required_u8(&cmd.params, "addr")?;
|
||||
let bytes = required_byte_array(&cmd.params, "bytes")?;
|
||||
|
||||
handle
|
||||
.i2c_enable(self.bitrate_khz)
|
||||
.map_err(|e| TransportError::Other(e.to_string()))?;
|
||||
handle
|
||||
.i2c_write(addr, &bytes)
|
||||
.map_err(|e| TransportError::Other(e.to_string()))?;
|
||||
|
||||
serde_json::json!({
|
||||
"ok": true,
|
||||
"data": { "bytes_written": bytes.len() }
|
||||
})
|
||||
}
|
||||
|
||||
// ── SPI ──────────────────────────────────────────────────────────
|
||||
"spi_transfer" => {
|
||||
let bytes = required_byte_array(&cmd.params, "bytes")?;
|
||||
|
||||
handle
|
||||
.spi_enable(self.bitrate_khz)
|
||||
.map_err(|e| TransportError::Other(e.to_string()))?;
|
||||
let recv = handle
|
||||
.spi_transfer(&bytes)
|
||||
.map_err(|e| TransportError::Other(e.to_string()))?;
|
||||
|
||||
let hex: Vec<String> = recv.iter().map(|b| format!("{b:#04x}")).collect();
|
||||
serde_json::json!({
|
||||
"ok": true,
|
||||
"data": { "received": recv, "hex": hex }
|
||||
})
|
||||
}
|
||||
|
||||
// ── GPIO ─────────────────────────────────────────────────────────
|
||||
"gpio_set" => {
|
||||
let direction = required_u8(&cmd.params, "direction")?;
|
||||
let value = required_u8(&cmd.params, "value")?;
|
||||
|
||||
handle
|
||||
.gpio_set(direction, value)
|
||||
.map_err(|e| TransportError::Other(e.to_string()))?;
|
||||
|
||||
serde_json::json!({
|
||||
"ok": true,
|
||||
"data": { "direction": direction, "value": value }
|
||||
})
|
||||
}
|
||||
|
||||
"gpio_get" => {
|
||||
let val = handle
|
||||
.gpio_get()
|
||||
.map_err(|e| TransportError::Other(e.to_string()))?;
|
||||
|
||||
serde_json::json!({
|
||||
"ok": true,
|
||||
"data": { "value": val }
|
||||
})
|
||||
}
|
||||
|
||||
unknown => serde_json::json!({
|
||||
"ok": false,
|
||||
"error": format!("unknown Aardvark command: {unknown}")
|
||||
}),
|
||||
};
|
||||
|
||||
// Drop handle here (auto-close via Drop).
|
||||
Ok(ZcResponse {
|
||||
ok: result["ok"].as_bool().unwrap_or(false),
|
||||
data: result["data"].clone(),
|
||||
error: result["error"].as_str().map(String::from),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// ── Parameter helpers ─────────────────────────────────────────────────────────
|
||||
|
||||
/// Extract a required `u8` field from JSON params, returning a `TransportError`
|
||||
/// if missing or out of range.
|
||||
fn required_u8(params: &serde_json::Value, key: &str) -> Result<u8, TransportError> {
|
||||
params
|
||||
.get(key)
|
||||
.and_then(|v| v.as_u64())
|
||||
.and_then(|n| u8::try_from(n).ok())
|
||||
.ok_or_else(|| {
|
||||
TransportError::Protocol(format!("missing or out-of-range u8 parameter: '{key}'"))
|
||||
})
|
||||
}
|
||||
|
||||
/// Extract an optional `u8` field — returns `None` if absent or not representable as u8.
|
||||
fn optional_u8(params: &serde_json::Value, key: &str) -> Option<u8> {
|
||||
params
|
||||
.get(key)
|
||||
.and_then(|v| v.as_u64())
|
||||
.and_then(|n| u8::try_from(n).ok())
|
||||
}
|
||||
|
||||
/// Extract a required JSON array of integers as `Vec<u8>`.
|
||||
fn required_byte_array(params: &serde_json::Value, key: &str) -> Result<Vec<u8>, TransportError> {
|
||||
let arr = params
|
||||
.get(key)
|
||||
.and_then(|v| v.as_array())
|
||||
.ok_or_else(|| TransportError::Protocol(format!("missing array parameter: '{key}'")))?;
|
||||
|
||||
arr.iter()
|
||||
.enumerate()
|
||||
.map(|(i, v)| {
|
||||
v.as_u64()
|
||||
.and_then(|n| u8::try_from(n).ok())
|
||||
.ok_or_else(|| {
|
||||
TransportError::Protocol(format!(
|
||||
"byte at index {i} in '{key}' is not a valid u8"
|
||||
))
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
@@ -0,0 +1,574 @@
|
||||
//! Aardvark hardware tools — I2C, SPI, and GPIO operations via the Total Phase
|
||||
//! Aardvark USB adapter.
|
||||
//!
|
||||
//! All tools follow the same pattern as the built-in GPIO tools:
|
||||
//! 1. Accept an optional `device` alias parameter.
|
||||
//! 2. Resolve the Aardvark device from the [`DeviceRegistry`].
|
||||
//! 3. Build a [`ZcCommand`] and send it through the registered transport.
|
||||
//! 4. Return a [`ToolResult`] with human-readable output.
|
||||
//!
|
||||
//! These tools are only registered when at least one Aardvark adapter is
|
||||
//! detected at startup (see [`DeviceRegistry::has_aardvark`]).
|
||||
|
||||
use super::device::DeviceRegistry;
|
||||
use super::protocol::ZcCommand;
|
||||
use crate::tools::traits::{Tool, ToolResult};
|
||||
use async_trait::async_trait;
|
||||
use serde_json::json;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
// ── Factory ───────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Build the five Aardvark hardware tools.
|
||||
///
|
||||
/// Called from [`ToolRegistry::load`] when an Aardvark adapter is present.
|
||||
pub fn aardvark_tools(devices: Arc<RwLock<DeviceRegistry>>) -> Vec<Box<dyn Tool>> {
|
||||
vec![
|
||||
Box::new(I2cScanTool::new(devices.clone())),
|
||||
Box::new(I2cReadTool::new(devices.clone())),
|
||||
Box::new(I2cWriteTool::new(devices.clone())),
|
||||
Box::new(SpiTransferTool::new(devices.clone())),
|
||||
Box::new(GpioAardvarkTool::new(devices.clone())),
|
||||
]
|
||||
}
|
||||
|
||||
// ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Resolve the Aardvark device from args and return an owned `DeviceContext`.
|
||||
///
|
||||
/// Thin wrapper so individual tool `execute` methods don't duplicate the logic.
|
||||
async fn resolve(
|
||||
registry: &Arc<RwLock<DeviceRegistry>>,
|
||||
args: &serde_json::Value,
|
||||
) -> Result<(String, super::device::DeviceContext), ToolResult> {
|
||||
let reg = registry.read().await;
|
||||
reg.resolve_aardvark_device(args).map_err(|msg| ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(msg),
|
||||
})
|
||||
}
|
||||
|
||||
// ── I2cScanTool ───────────────────────────────────────────────────────────────
|
||||
|
||||
/// Tool: scan the I2C bus for responding device addresses.
|
||||
pub struct I2cScanTool {
|
||||
registry: Arc<RwLock<DeviceRegistry>>,
|
||||
}
|
||||
|
||||
impl I2cScanTool {
|
||||
pub fn new(registry: Arc<RwLock<DeviceRegistry>>) -> Self {
|
||||
Self { registry }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Tool for I2cScanTool {
|
||||
fn name(&self) -> &str {
|
||||
"i2c_scan"
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Scan the I2C bus via the Aardvark USB adapter and return all responding \
|
||||
device addresses in hex (e.g. [0x48, 0x68])"
|
||||
}
|
||||
|
||||
fn parameters_schema(&self) -> serde_json::Value {
|
||||
json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"device": {
|
||||
"type": "string",
|
||||
"description": "Aardvark device alias (e.g. aardvark0). Omit to auto-select."
|
||||
}
|
||||
},
|
||||
"required": []
|
||||
})
|
||||
}
|
||||
|
||||
async fn execute(&self, args: serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||
let (_alias, ctx) = match resolve(&self.registry, &args).await {
|
||||
Ok(v) => v,
|
||||
Err(result) => return Ok(result),
|
||||
};
|
||||
|
||||
let cmd = ZcCommand::simple("i2c_scan");
|
||||
match ctx.transport.send(&cmd).await {
|
||||
Ok(resp) if resp.ok => {
|
||||
let devices = resp
|
||||
.data
|
||||
.get("devices")
|
||||
.and_then(|v| v.as_array())
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
let output = if devices.is_empty() {
|
||||
"I2C scan complete — no devices found on the bus.".to_string()
|
||||
} else {
|
||||
let addrs: Vec<&str> = devices.iter().filter_map(|v| v.as_str()).collect();
|
||||
format!(
|
||||
"I2C scan found {} device(s): {}",
|
||||
addrs.len(),
|
||||
addrs.join(", ")
|
||||
)
|
||||
};
|
||||
Ok(ToolResult {
|
||||
success: true,
|
||||
output,
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
Ok(resp) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(
|
||||
resp.error
|
||||
.unwrap_or_else(|| "i2c_scan: device returned ok:false".to_string()),
|
||||
),
|
||||
}),
|
||||
Err(e) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!("transport error: {e}")),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── I2cReadTool ───────────────────────────────────────────────────────────────
|
||||
|
||||
/// Tool: read bytes from an I2C device register.
|
||||
pub struct I2cReadTool {
|
||||
registry: Arc<RwLock<DeviceRegistry>>,
|
||||
}
|
||||
|
||||
impl I2cReadTool {
|
||||
pub fn new(registry: Arc<RwLock<DeviceRegistry>>) -> Self {
|
||||
Self { registry }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Tool for I2cReadTool {
|
||||
fn name(&self) -> &str {
|
||||
"i2c_read"
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Read bytes from an I2C device via the Aardvark USB adapter. \
|
||||
Provide the I2C address and optionally a register to read from."
|
||||
}
|
||||
|
||||
fn parameters_schema(&self) -> serde_json::Value {
|
||||
json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"device": {
|
||||
"type": "string",
|
||||
"description": "Aardvark device alias (e.g. aardvark0). Omit to auto-select."
|
||||
},
|
||||
"addr": {
|
||||
"type": "integer",
|
||||
"description": "I2C device address (e.g. 72 for 0x48)"
|
||||
},
|
||||
"register": {
|
||||
"type": "integer",
|
||||
"description": "Register address to read from (optional)"
|
||||
},
|
||||
"len": {
|
||||
"type": "integer",
|
||||
"description": "Number of bytes to read",
|
||||
"default": 1
|
||||
}
|
||||
},
|
||||
"required": ["addr"]
|
||||
})
|
||||
}
|
||||
|
||||
async fn execute(&self, args: serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||
let addr = match args.get("addr").and_then(|v| v.as_u64()) {
|
||||
Some(a) => a,
|
||||
None => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some("missing required parameter: addr".to_string()),
|
||||
})
|
||||
}
|
||||
};
|
||||
let len = args.get("len").and_then(|v| v.as_u64()).unwrap_or(1);
|
||||
|
||||
let (_alias, ctx) = match resolve(&self.registry, &args).await {
|
||||
Ok(v) => v,
|
||||
Err(result) => return Ok(result),
|
||||
};
|
||||
|
||||
let mut params = json!({ "addr": addr, "len": len });
|
||||
if let Some(reg) = args.get("register").and_then(|v| v.as_u64()) {
|
||||
params["register"] = json!(reg);
|
||||
}
|
||||
let cmd = ZcCommand::new("i2c_read", params);
|
||||
|
||||
match ctx.transport.send(&cmd).await {
|
||||
Ok(resp) if resp.ok => {
|
||||
let hex = resp
|
||||
.data
|
||||
.get("hex")
|
||||
.and_then(|v| v.as_array())
|
||||
.map(|a| {
|
||||
a.iter()
|
||||
.filter_map(|v| v.as_str())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
})
|
||||
.unwrap_or_else(|| "?".to_string());
|
||||
Ok(ToolResult {
|
||||
success: true,
|
||||
output: format!("I2C read from addr {addr:#04x}: [{hex}]"),
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
Ok(resp) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(
|
||||
resp.error
|
||||
.unwrap_or_else(|| "i2c_read: device returned ok:false".to_string()),
|
||||
),
|
||||
}),
|
||||
Err(e) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!("transport error: {e}")),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── I2cWriteTool ──────────────────────────────────────────────────────────────
|
||||
|
||||
/// Tool: write bytes to an I2C device.
|
||||
pub struct I2cWriteTool {
|
||||
registry: Arc<RwLock<DeviceRegistry>>,
|
||||
}
|
||||
|
||||
impl I2cWriteTool {
|
||||
pub fn new(registry: Arc<RwLock<DeviceRegistry>>) -> Self {
|
||||
Self { registry }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Tool for I2cWriteTool {
|
||||
fn name(&self) -> &str {
|
||||
"i2c_write"
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Write bytes to an I2C device via the Aardvark USB adapter"
|
||||
}
|
||||
|
||||
fn parameters_schema(&self) -> serde_json::Value {
|
||||
json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"device": {
|
||||
"type": "string",
|
||||
"description": "Aardvark device alias (e.g. aardvark0). Omit to auto-select."
|
||||
},
|
||||
"addr": {
|
||||
"type": "integer",
|
||||
"description": "I2C device address (e.g. 72 for 0x48)"
|
||||
},
|
||||
"bytes": {
|
||||
"type": "array",
|
||||
"items": { "type": "integer" },
|
||||
"description": "Bytes to write (e.g. [1, 96] for register 0x01 config 0x60)"
|
||||
}
|
||||
},
|
||||
"required": ["addr", "bytes"]
|
||||
})
|
||||
}
|
||||
|
||||
async fn execute(&self, args: serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||
let addr = match args.get("addr").and_then(|v| v.as_u64()) {
|
||||
Some(a) => a,
|
||||
None => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some("missing required parameter: addr".to_string()),
|
||||
})
|
||||
}
|
||||
};
|
||||
let bytes = match args.get("bytes").and_then(|v| v.as_array()) {
|
||||
Some(b) => b.clone(),
|
||||
None => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some("missing required parameter: bytes".to_string()),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
let (_alias, ctx) = match resolve(&self.registry, &args).await {
|
||||
Ok(v) => v,
|
||||
Err(result) => return Ok(result),
|
||||
};
|
||||
|
||||
let cmd = ZcCommand::new("i2c_write", json!({ "addr": addr, "bytes": bytes }));
|
||||
|
||||
match ctx.transport.send(&cmd).await {
|
||||
Ok(resp) if resp.ok => {
|
||||
let n = resp
|
||||
.data
|
||||
.get("bytes_written")
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(bytes.len() as u64);
|
||||
Ok(ToolResult {
|
||||
success: true,
|
||||
output: format!("I2C write to addr {addr:#04x}: {n} byte(s) written"),
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
Ok(resp) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(
|
||||
resp.error
|
||||
.unwrap_or_else(|| "i2c_write: device returned ok:false".to_string()),
|
||||
),
|
||||
}),
|
||||
Err(e) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!("transport error: {e}")),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── SpiTransferTool ───────────────────────────────────────────────────────────
|
||||
|
||||
/// Tool: full-duplex SPI transfer.
|
||||
pub struct SpiTransferTool {
|
||||
registry: Arc<RwLock<DeviceRegistry>>,
|
||||
}
|
||||
|
||||
impl SpiTransferTool {
|
||||
pub fn new(registry: Arc<RwLock<DeviceRegistry>>) -> Self {
|
||||
Self { registry }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Tool for SpiTransferTool {
|
||||
fn name(&self) -> &str {
|
||||
"spi_transfer"
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Perform a full-duplex SPI transfer via the Aardvark USB adapter. \
|
||||
Sends the given bytes and returns the received bytes (same length)."
|
||||
}
|
||||
|
||||
fn parameters_schema(&self) -> serde_json::Value {
|
||||
json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"device": {
|
||||
"type": "string",
|
||||
"description": "Aardvark device alias (e.g. aardvark0). Omit to auto-select."
|
||||
},
|
||||
"bytes": {
|
||||
"type": "array",
|
||||
"items": { "type": "integer" },
|
||||
"description": "Bytes to send (received bytes have the same length)"
|
||||
}
|
||||
},
|
||||
"required": ["bytes"]
|
||||
})
|
||||
}
|
||||
|
||||
async fn execute(&self, args: serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||
let bytes = match args.get("bytes").and_then(|v| v.as_array()) {
|
||||
Some(b) => b.clone(),
|
||||
None => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some("missing required parameter: bytes".to_string()),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
let (_alias, ctx) = match resolve(&self.registry, &args).await {
|
||||
Ok(v) => v,
|
||||
Err(result) => return Ok(result),
|
||||
};
|
||||
|
||||
let cmd = ZcCommand::new("spi_transfer", json!({ "bytes": bytes }));
|
||||
|
||||
match ctx.transport.send(&cmd).await {
|
||||
Ok(resp) if resp.ok => {
|
||||
let hex = resp
|
||||
.data
|
||||
.get("hex")
|
||||
.and_then(|v| v.as_array())
|
||||
.map(|a| {
|
||||
a.iter()
|
||||
.filter_map(|v| v.as_str())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
})
|
||||
.unwrap_or_else(|| "?".to_string());
|
||||
Ok(ToolResult {
|
||||
success: true,
|
||||
output: format!("SPI transfer complete. Received: [{hex}]"),
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
Ok(resp) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(
|
||||
resp.error
|
||||
.unwrap_or_else(|| "spi_transfer: device returned ok:false".to_string()),
|
||||
),
|
||||
}),
|
||||
Err(e) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!("transport error: {e}")),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── GpioAardvarkTool ──────────────────────────────────────────────────────────
|
||||
|
||||
/// Tool: set or read the Aardvark adapter's GPIO pins.
|
||||
///
|
||||
/// The Aardvark has 8 GPIO pins accessible via the 10-pin expansion header.
|
||||
/// Each pin can be configured as input or output via bitmasks.
|
||||
pub struct GpioAardvarkTool {
|
||||
registry: Arc<RwLock<DeviceRegistry>>,
|
||||
}
|
||||
|
||||
impl GpioAardvarkTool {
|
||||
pub fn new(registry: Arc<RwLock<DeviceRegistry>>) -> Self {
|
||||
Self { registry }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Tool for GpioAardvarkTool {
|
||||
fn name(&self) -> &str {
|
||||
"gpio_aardvark"
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Set or read the Aardvark USB adapter GPIO pins via bitmasks. \
|
||||
Use action='set' with direction and value bitmasks to drive output pins, \
|
||||
or action='get' to read current pin states."
|
||||
}
|
||||
|
||||
fn parameters_schema(&self) -> serde_json::Value {
|
||||
json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"device": {
|
||||
"type": "string",
|
||||
"description": "Aardvark device alias (e.g. aardvark0). Omit to auto-select."
|
||||
},
|
||||
"action": {
|
||||
"type": "string",
|
||||
"enum": ["set", "get"],
|
||||
"description": "'set' to write GPIO pins, 'get' to read pin states"
|
||||
},
|
||||
"direction": {
|
||||
"type": "integer",
|
||||
"description": "For action='set': bitmask of output pins (1=output, 0=input)"
|
||||
},
|
||||
"value": {
|
||||
"type": "integer",
|
||||
"description": "For action='set': bitmask of output pin levels (1=high, 0=low)"
|
||||
}
|
||||
},
|
||||
"required": ["action"]
|
||||
})
|
||||
}
|
||||
|
||||
async fn execute(&self, args: serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||
let action = match args.get("action").and_then(|v| v.as_str()) {
|
||||
Some(a) => a.to_string(),
|
||||
None => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some("missing required parameter: action".to_string()),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
let (_alias, ctx) = match resolve(&self.registry, &args).await {
|
||||
Ok(v) => v,
|
||||
Err(result) => return Ok(result),
|
||||
};
|
||||
|
||||
let cmd = match action.as_str() {
|
||||
"set" => {
|
||||
let direction = args.get("direction").and_then(|v| v.as_u64()).unwrap_or(0);
|
||||
let value = args.get("value").and_then(|v| v.as_u64()).unwrap_or(0);
|
||||
ZcCommand::new(
|
||||
"gpio_set",
|
||||
json!({ "direction": direction, "value": value }),
|
||||
)
|
||||
}
|
||||
"get" => ZcCommand::simple("gpio_get"),
|
||||
other => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!("unknown action '{other}'; use 'set' or 'get'")),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
match ctx.transport.send(&cmd).await {
|
||||
Ok(resp) if resp.ok => {
|
||||
let output = if action == "get" {
|
||||
let val = resp.data.get("value").and_then(|v| v.as_u64()).unwrap_or(0);
|
||||
format!("Aardvark GPIO pins: {val:#010b} (0x{val:02x})")
|
||||
} else {
|
||||
let dir = resp
|
||||
.data
|
||||
.get("direction")
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(0);
|
||||
let val = resp.data.get("value").and_then(|v| v.as_u64()).unwrap_or(0);
|
||||
format!("Aardvark GPIO set — direction: {dir:#010b}, value: {val:#010b}")
|
||||
};
|
||||
Ok(ToolResult {
|
||||
success: true,
|
||||
output,
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
Ok(resp) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(
|
||||
resp.error
|
||||
.unwrap_or_else(|| "gpio_aardvark: device returned ok:false".to_string()),
|
||||
),
|
||||
}),
|
||||
Err(e) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!("transport error: {e}")),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,355 @@
|
||||
//! Datasheet management for industry devices connected via Aardvark.
|
||||
//!
|
||||
//! When a user identifies a new device (e.g. "I have an LM75 temperature
|
||||
//! sensor"), the [`DatasheetTool`] calls [`DatasheetManager`] to:
|
||||
//!
|
||||
//! 1. **search** — query the web for the device datasheet PDF URL.
|
||||
//! 2. **download** — fetch the PDF and save it to
|
||||
//! `~/.zeroclaw/hardware/datasheets/<device>.pdf`.
|
||||
//! 3. **list** — enumerate all locally cached datasheets.
|
||||
//! 4. **read** — return the local path of a cached datasheet so the LLM can
|
||||
//! reference it with the `read_file` tool or a future RAG pipeline.
|
||||
//!
|
||||
//! # Note on PDF extraction
|
||||
//!
|
||||
//! Full in-process PDF parsing is available when the `rag-pdf` feature is
|
||||
//! enabled (adds `pdf-extract`). Without that feature, the tool returns the
|
||||
//! PDF file path and instructs the LLM to use a future RAG step.
|
||||
|
||||
use crate::tools::traits::{Tool, ToolResult};
|
||||
use async_trait::async_trait;
|
||||
use std::path::PathBuf;
|
||||
|
||||
// ── DatasheetManager ─────────────────────────────────────────────────────────
|
||||
|
||||
/// Manages device datasheet files in `~/.zeroclaw/hardware/datasheets/`.
|
||||
pub struct DatasheetManager {
|
||||
/// Root datasheet storage directory.
|
||||
datasheet_dir: PathBuf,
|
||||
}
|
||||
|
||||
impl DatasheetManager {
|
||||
/// Create a manager rooted at the default ZeroClaw datasheets directory.
|
||||
pub fn new() -> Option<Self> {
|
||||
let home = directories::BaseDirs::new()?.home_dir().to_path_buf();
|
||||
Some(Self {
|
||||
datasheet_dir: home.join(".zeroclaw").join("hardware").join("datasheets"),
|
||||
})
|
||||
}
|
||||
|
||||
/// Check if a datasheet for `device_name` already exists locally.
|
||||
///
|
||||
/// Searches for `<device_name_lower>.pdf` (case-insensitive stem match).
|
||||
pub fn find_local(&self, device_name: &str) -> Option<PathBuf> {
|
||||
let target = format!("{}.pdf", device_name.to_lowercase().replace(' ', "_"));
|
||||
let candidate = self.datasheet_dir.join(&target);
|
||||
if candidate.exists() {
|
||||
return Some(candidate);
|
||||
}
|
||||
// Broader scan: any filename containing the device name.
|
||||
if let Ok(entries) = std::fs::read_dir(&self.datasheet_dir) {
|
||||
for entry in entries.filter_map(|e| e.ok()) {
|
||||
let name = entry.file_name();
|
||||
let name_str = name.to_string_lossy().to_lowercase();
|
||||
let key = device_name.to_lowercase().replace(' ', "_");
|
||||
if name_str.contains(&key) && name_str.ends_with(".pdf") {
|
||||
return Some(entry.path());
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Download a datasheet PDF from `url` and save it locally.
|
||||
///
|
||||
/// The file is saved as `~/.zeroclaw/hardware/datasheets/<device_name>.pdf`.
|
||||
/// Returns the path to the saved file.
|
||||
pub async fn download_datasheet(
|
||||
&self,
|
||||
url: &str,
|
||||
device_name: &str,
|
||||
) -> anyhow::Result<PathBuf> {
|
||||
std::fs::create_dir_all(&self.datasheet_dir)?;
|
||||
|
||||
let filename = format!("{}.pdf", device_name.to_lowercase().replace(' ', "_"));
|
||||
let dest = self.datasheet_dir.join(&filename);
|
||||
|
||||
let client = reqwest::Client::builder()
|
||||
.user_agent("ZeroClaw/0.1 (datasheet downloader)")
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.build()?;
|
||||
|
||||
let response = client.get(url).send().await?;
|
||||
if !response.status().is_success() {
|
||||
anyhow::bail!(
|
||||
"HTTP {} downloading datasheet from {url}",
|
||||
response.status()
|
||||
);
|
||||
}
|
||||
let bytes = response.bytes().await?;
|
||||
std::fs::write(&dest, &bytes)?;
|
||||
|
||||
tracing::info!(device = %device_name, path = %dest.display(), "datasheet downloaded");
|
||||
Ok(dest)
|
||||
}
|
||||
|
||||
/// List all locally cached datasheet filenames.
|
||||
pub fn list_datasheets(&self) -> Vec<String> {
|
||||
if let Ok(entries) = std::fs::read_dir(&self.datasheet_dir) {
|
||||
let mut names: Vec<String> = entries
|
||||
.filter_map(|e| e.ok())
|
||||
.map(|e| e.file_name().to_string_lossy().to_string())
|
||||
.filter(|n| n.ends_with(".pdf"))
|
||||
.collect();
|
||||
names.sort();
|
||||
return names;
|
||||
}
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
/// Build a web search query for a device datasheet.
|
||||
///
|
||||
/// Returns a suggested search query string the LLM (or a search tool) can
|
||||
/// use to find the datasheet.
|
||||
pub fn search_query(device_name: &str) -> String {
|
||||
format!("{device_name} datasheet filetype:pdf site:ti.com OR site:nxp.com OR site:st.com OR site:microchip.com OR site:infineon.com OR site:analog.com")
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DatasheetManager {
|
||||
fn default() -> Self {
|
||||
Self::new().unwrap_or_else(|| Self {
|
||||
datasheet_dir: PathBuf::from(".zeroclaw/hardware/datasheets"),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// ── DatasheetTool ─────────────────────────────────────────────────────────────
|
||||
|
||||
/// Tool: search for, download, and manage device datasheets.
|
||||
///
|
||||
/// Invoked by the LLM when a user identifies a new device connected via
|
||||
/// Aardvark (e.g. "I have an LM75 temperature sensor on the I2C bus").
|
||||
pub struct DatasheetTool;
|
||||
|
||||
impl DatasheetTool {
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DatasheetTool {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Tool for DatasheetTool {
|
||||
fn name(&self) -> &str {
|
||||
"datasheet"
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Search for, download, and manage device datasheets. \
|
||||
Use when the user identifies a new device connected via the Aardvark adapter \
|
||||
(e.g. 'I have an LM75 sensor'). \
|
||||
Actions: 'search' returns a web search query; \
|
||||
'download' fetches a PDF from a URL; \
|
||||
'list' shows cached datasheets; \
|
||||
'read' returns the local path of a cached datasheet."
|
||||
}
|
||||
|
||||
fn parameters_schema(&self) -> serde_json::Value {
|
||||
serde_json::json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"action": {
|
||||
"type": "string",
|
||||
"enum": ["search", "download", "list", "read"],
|
||||
"description": "Operation to perform"
|
||||
},
|
||||
"device_name": {
|
||||
"type": "string",
|
||||
"description": "Device name (e.g. 'LM75', 'PSoC6', 'MPU6050')"
|
||||
},
|
||||
"url": {
|
||||
"type": "string",
|
||||
"description": "For action='download': direct URL to the datasheet PDF"
|
||||
}
|
||||
},
|
||||
"required": ["action"]
|
||||
})
|
||||
}
|
||||
|
||||
async fn execute(&self, args: serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||
let action = match args.get("action").and_then(|v| v.as_str()) {
|
||||
Some(a) => a.to_string(),
|
||||
None => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some("missing required parameter: action".to_string()),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
let mgr = DatasheetManager::default();
|
||||
|
||||
match action.as_str() {
|
||||
"search" => {
|
||||
let device = match args.get("device_name").and_then(|v| v.as_str()) {
|
||||
Some(d) => d.to_string(),
|
||||
None => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(
|
||||
"missing required parameter: device_name for action 'search'"
|
||||
.to_string(),
|
||||
),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
// Check if we already have a cached copy.
|
||||
if let Some(path) = mgr.find_local(&device) {
|
||||
return Ok(ToolResult {
|
||||
success: true,
|
||||
output: format!(
|
||||
"Datasheet for '{device}' already cached at: {}\n\
|
||||
Use action='read' to get the local path.",
|
||||
path.display()
|
||||
),
|
||||
error: None,
|
||||
});
|
||||
}
|
||||
|
||||
let query = DatasheetManager::search_query(&device);
|
||||
Ok(ToolResult {
|
||||
success: true,
|
||||
output: format!(
|
||||
"Suggested web search for '{device}' datasheet:\n{query}\n\n\
|
||||
Once you have a direct PDF URL, use:\n\
|
||||
datasheet(action=\"download\", device_name=\"{device}\", url=\"<URL>\")"
|
||||
),
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
|
||||
"download" => {
|
||||
let device = match args.get("device_name").and_then(|v| v.as_str()) {
|
||||
Some(d) => d.to_string(),
|
||||
None => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(
|
||||
"missing required parameter: device_name for action 'download'"
|
||||
.to_string(),
|
||||
),
|
||||
})
|
||||
}
|
||||
};
|
||||
let url = match args.get("url").and_then(|v| v.as_str()) {
|
||||
Some(u) => u.to_string(),
|
||||
None => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(
|
||||
"missing required parameter: url for action 'download'".to_string(),
|
||||
),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
match mgr.download_datasheet(&url, &device).await {
|
||||
Ok(path) => Ok(ToolResult {
|
||||
success: true,
|
||||
output: format!(
|
||||
"Datasheet for '{device}' downloaded successfully.\n\
|
||||
Saved to: {}\n\n\
|
||||
Next step: create a device profile at \
|
||||
~/.zeroclaw/hardware/devices/aardvark0.md with the key \
|
||||
registers, I2C address, and protocol notes from this datasheet.",
|
||||
path.display()
|
||||
),
|
||||
error: None,
|
||||
}),
|
||||
Err(e) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!("download failed: {e}")),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
"list" => {
|
||||
let datasheets = mgr.list_datasheets();
|
||||
let output = if datasheets.is_empty() {
|
||||
"No datasheets cached yet.\n\
|
||||
Use datasheet(action=\"search\", device_name=\"...\") to find one."
|
||||
.to_string()
|
||||
} else {
|
||||
format!(
|
||||
"{} cached datasheet(s) in ~/.zeroclaw/hardware/datasheets/:\n{}",
|
||||
datasheets.len(),
|
||||
datasheets
|
||||
.iter()
|
||||
.map(|n| format!(" - {n}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
)
|
||||
};
|
||||
Ok(ToolResult {
|
||||
success: true,
|
||||
output,
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
|
||||
"read" => {
|
||||
let device = match args.get("device_name").and_then(|v| v.as_str()) {
|
||||
Some(d) => d.to_string(),
|
||||
None => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(
|
||||
"missing required parameter: device_name for action 'read'"
|
||||
.to_string(),
|
||||
),
|
||||
})
|
||||
}
|
||||
};
|
||||
match mgr.find_local(&device) {
|
||||
Some(path) => Ok(ToolResult {
|
||||
success: true,
|
||||
output: format!(
|
||||
"Datasheet for '{device}' is available at: {}",
|
||||
path.display()
|
||||
),
|
||||
error: None,
|
||||
}),
|
||||
None => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!(
|
||||
"no datasheet found for '{device}'. \
|
||||
Use action='search' to find one."
|
||||
)),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
other => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!(
|
||||
"unknown action '{other}'. Valid: search, download, list, read"
|
||||
)),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,864 @@
|
||||
//! Device types and registry — stable aliases for discovered hardware.
|
||||
//!
|
||||
//! The LLM always refers to devices by alias (`"pico0"`, `"arduino0"`), never
|
||||
//! by raw `/dev/` paths. The `DeviceRegistry` assigns these aliases at startup
|
||||
//! and provides lookup + context building for tool execution.
|
||||
|
||||
use super::transport::Transport;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
// ── DeviceRuntime ─────────────────────────────────────────────────────────────
|
||||
|
||||
/// The software runtime / execution environment of a device.
|
||||
///
|
||||
/// Determines which host-side tooling is used for code deployment and execution.
|
||||
/// Currently only [`MicroPython`](DeviceRuntime::MicroPython) is implemented;
|
||||
/// other variants return a clear "not yet supported" error.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum DeviceRuntime {
|
||||
/// MicroPython — uses `mpremote` for code read/write/exec.
|
||||
MicroPython,
|
||||
/// CircuitPython — `mpremote`-compatible (future).
|
||||
CircuitPython,
|
||||
/// Arduino — `arduino-cli` for sketch upload (future).
|
||||
Arduino,
|
||||
/// STM32 / probe-rs based flashing and debugging (future).
|
||||
Nucleus,
|
||||
/// Linux / Raspberry Pi — ssh/shell execution (future).
|
||||
Linux,
|
||||
/// Total Phase Aardvark I2C/SPI/GPIO USB adapter.
|
||||
Aardvark,
|
||||
}
|
||||
|
||||
impl DeviceRuntime {
|
||||
/// Derive the default runtime from a [`DeviceKind`].
|
||||
pub fn from_kind(kind: &DeviceKind) -> Self {
|
||||
match kind {
|
||||
DeviceKind::Pico | DeviceKind::Esp32 | DeviceKind::Generic => Self::MicroPython,
|
||||
DeviceKind::Arduino => Self::Arduino,
|
||||
DeviceKind::Nucleo => Self::Nucleus,
|
||||
DeviceKind::Aardvark => Self::Aardvark,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DeviceRuntime {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::MicroPython => write!(f, "MicroPython"),
|
||||
Self::CircuitPython => write!(f, "CircuitPython"),
|
||||
Self::Arduino => write!(f, "Arduino"),
|
||||
Self::Nucleus => write!(f, "Nucleus"),
|
||||
Self::Linux => write!(f, "Linux"),
|
||||
Self::Aardvark => write!(f, "Aardvark"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── DeviceKind ────────────────────────────────────────────────────────────────
|
||||
|
||||
/// The category of a discovered hardware device.
|
||||
///
|
||||
/// Derived from USB Vendor ID or, for unknown VIDs, from a successful
|
||||
/// ping handshake (which yields `Generic`).
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum DeviceKind {
|
||||
/// Raspberry Pi Pico / Pico W (VID `0x2E8A`).
|
||||
Pico,
|
||||
/// Arduino Uno, Mega, etc. (VID `0x2341`).
|
||||
Arduino,
|
||||
/// ESP32 via CP2102 bridge (VID `0x10C4`).
|
||||
Esp32,
|
||||
/// STM32 Nucleo (VID `0x0483`).
|
||||
Nucleo,
|
||||
/// Unknown VID that passed the ZeroClaw firmware ping handshake.
|
||||
Generic,
|
||||
/// Total Phase Aardvark USB adapter (VID `0x2B76`).
|
||||
Aardvark,
|
||||
}
|
||||
|
||||
impl DeviceKind {
|
||||
/// Derive the device kind from a USB Vendor ID.
|
||||
/// Returns `None` if the VID is unknown (0 or unrecognised).
|
||||
pub fn from_vid(vid: u16) -> Option<Self> {
|
||||
match vid {
|
||||
0x2e8a => Some(Self::Pico),
|
||||
0x2341 => Some(Self::Arduino),
|
||||
0x10c4 => Some(Self::Esp32),
|
||||
0x0483 => Some(Self::Nucleo),
|
||||
0x2b76 => Some(Self::Aardvark),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DeviceKind {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Pico => write!(f, "pico"),
|
||||
Self::Arduino => write!(f, "arduino"),
|
||||
Self::Esp32 => write!(f, "esp32"),
|
||||
Self::Nucleo => write!(f, "nucleo"),
|
||||
Self::Generic => write!(f, "generic"),
|
||||
Self::Aardvark => write!(f, "aardvark"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Capability flags for a connected device.
|
||||
///
|
||||
/// Populated from device handshake or static board metadata.
|
||||
/// Tools can check capabilities before attempting unsupported operations.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct DeviceCapabilities {
|
||||
pub gpio: bool,
|
||||
pub i2c: bool,
|
||||
pub spi: bool,
|
||||
pub swd: bool,
|
||||
pub uart: bool,
|
||||
pub adc: bool,
|
||||
pub pwm: bool,
|
||||
}
|
||||
|
||||
/// A discovered and registered hardware device.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Device {
|
||||
/// Stable session alias (e.g. `"pico0"`, `"arduino0"`, `"nucleo0"`).
|
||||
pub alias: String,
|
||||
/// Board name from registry (e.g. `"raspberry-pi-pico"`, `"arduino-uno"`).
|
||||
pub board_name: String,
|
||||
/// Device category derived from VID or ping handshake.
|
||||
pub kind: DeviceKind,
|
||||
/// Software runtime that determines how code is deployed/executed.
|
||||
pub runtime: DeviceRuntime,
|
||||
/// USB Vendor ID (if USB-connected).
|
||||
pub vid: Option<u16>,
|
||||
/// USB Product ID (if USB-connected).
|
||||
pub pid: Option<u16>,
|
||||
/// Raw device path (e.g. `"/dev/ttyACM0"`) — internal use only.
|
||||
/// Tools MUST NOT use this directly; always go through Transport.
|
||||
pub device_path: Option<String>,
|
||||
/// Architecture description (e.g. `"ARM Cortex-M0+"`).
|
||||
pub architecture: Option<String>,
|
||||
/// Firmware identifier reported by device during ping handshake.
|
||||
pub firmware: Option<String>,
|
||||
}
|
||||
|
||||
impl Device {
|
||||
/// Convenience accessor — same as `device_path` (matches the Phase 2 spec naming).
|
||||
pub fn port(&self) -> Option<&str> {
|
||||
self.device_path.as_deref()
|
||||
}
|
||||
}
|
||||
|
||||
/// Context passed to hardware tools during execution.
|
||||
///
|
||||
/// Provides the tool with access to the device identity, transport layer,
|
||||
/// and capability flags without the tool managing connections itself.
|
||||
pub struct DeviceContext {
|
||||
/// The device this tool is operating on.
|
||||
pub device: Arc<Device>,
|
||||
/// Transport for sending commands to the device.
|
||||
pub transport: Arc<dyn Transport>,
|
||||
/// Device capabilities (gpio, i2c, spi, etc.).
|
||||
pub capabilities: DeviceCapabilities,
|
||||
}
|
||||
|
||||
/// A registered device entry with its transport and capabilities.
|
||||
struct RegisteredDevice {
|
||||
device: Arc<Device>,
|
||||
transport: Option<Arc<dyn Transport>>,
|
||||
capabilities: DeviceCapabilities,
|
||||
}
|
||||
|
||||
/// Summary string returned by [`DeviceRegistry::prompt_summary`] when no
|
||||
/// devices are registered. Exported so callers can compare against it without
|
||||
/// duplicating the literal.
|
||||
pub const NO_HW_DEVICES_SUMMARY: &str = "No hardware devices connected.";
|
||||
|
||||
/// Registry of discovered devices with stable session aliases.
|
||||
///
|
||||
/// - Scans at startup (via `hardware::discover`)
|
||||
/// - Assigns aliases: `pico0`, `pico1`, `arduino0`, `nucleo0`, `device0`, etc.
|
||||
/// - Provides alias-based lookup for tool dispatch
|
||||
/// - Generates prompt summaries for LLM context
|
||||
pub struct DeviceRegistry {
|
||||
devices: HashMap<String, RegisteredDevice>,
|
||||
alias_counters: HashMap<String, u32>,
|
||||
}
|
||||
|
||||
impl DeviceRegistry {
|
||||
/// Create an empty registry.
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
devices: HashMap::new(),
|
||||
alias_counters: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Register a discovered device and assign a stable alias.
|
||||
///
|
||||
/// Returns the assigned alias (e.g. `"pico0"`).
|
||||
pub fn register(
|
||||
&mut self,
|
||||
board_name: &str,
|
||||
vid: Option<u16>,
|
||||
pid: Option<u16>,
|
||||
device_path: Option<String>,
|
||||
architecture: Option<String>,
|
||||
) -> String {
|
||||
let prefix = alias_prefix(board_name);
|
||||
let counter = self.alias_counters.entry(prefix.clone()).or_insert(0);
|
||||
let alias = format!("{}{}", prefix, counter);
|
||||
*counter += 1;
|
||||
|
||||
let kind = vid
|
||||
.and_then(DeviceKind::from_vid)
|
||||
.unwrap_or(DeviceKind::Generic);
|
||||
let runtime = DeviceRuntime::from_kind(&kind);
|
||||
|
||||
let device = Arc::new(Device {
|
||||
alias: alias.clone(),
|
||||
board_name: board_name.to_string(),
|
||||
kind,
|
||||
runtime,
|
||||
vid,
|
||||
pid,
|
||||
device_path,
|
||||
architecture,
|
||||
firmware: None,
|
||||
});
|
||||
|
||||
self.devices.insert(
|
||||
alias.clone(),
|
||||
RegisteredDevice {
|
||||
device,
|
||||
transport: None,
|
||||
capabilities: DeviceCapabilities::default(),
|
||||
},
|
||||
);
|
||||
|
||||
alias
|
||||
}
|
||||
|
||||
/// Attach a transport and capabilities to a previously registered device.
|
||||
///
|
||||
/// Returns `Err` when `alias` is not found in the registry (should not
|
||||
/// happen in normal usage because callers pass aliases from `register`).
|
||||
pub fn attach_transport(
|
||||
&mut self,
|
||||
alias: &str,
|
||||
transport: Arc<dyn Transport>,
|
||||
capabilities: DeviceCapabilities,
|
||||
) -> anyhow::Result<()> {
|
||||
if let Some(entry) = self.devices.get_mut(alias) {
|
||||
entry.transport = Some(transport);
|
||||
entry.capabilities = capabilities;
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow::anyhow!("unknown device alias: {}", alias))
|
||||
}
|
||||
}
|
||||
|
||||
/// Look up a device by alias.
|
||||
pub fn get_device(&self, alias: &str) -> Option<Arc<Device>> {
|
||||
self.devices.get(alias).map(|e| e.device.clone())
|
||||
}
|
||||
|
||||
/// Build a `DeviceContext` for a device by alias.
|
||||
///
|
||||
/// Returns `None` if the alias is unknown or no transport is attached.
|
||||
pub fn context(&self, alias: &str) -> Option<DeviceContext> {
|
||||
self.devices.get(alias).and_then(|e| {
|
||||
e.transport.as_ref().map(|t| DeviceContext {
|
||||
device: e.device.clone(),
|
||||
transport: t.clone(),
|
||||
capabilities: e.capabilities.clone(),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/// List all registered device aliases.
|
||||
pub fn aliases(&self) -> Vec<&str> {
|
||||
self.devices.keys().map(|s| s.as_str()).collect()
|
||||
}
|
||||
|
||||
/// Return a summary of connected devices for the LLM system prompt.
|
||||
pub fn prompt_summary(&self) -> String {
|
||||
if self.devices.is_empty() {
|
||||
return NO_HW_DEVICES_SUMMARY.to_string();
|
||||
}
|
||||
|
||||
let mut lines = vec!["Connected devices:".to_string()];
|
||||
let mut sorted_aliases: Vec<&String> = self.devices.keys().collect();
|
||||
sorted_aliases.sort();
|
||||
for alias in sorted_aliases {
|
||||
let entry = &self.devices[alias];
|
||||
let status = entry
|
||||
.transport
|
||||
.as_ref()
|
||||
.map(|t| {
|
||||
if t.is_connected() {
|
||||
"connected"
|
||||
} else {
|
||||
"disconnected"
|
||||
}
|
||||
})
|
||||
.unwrap_or("no transport");
|
||||
let arch = entry
|
||||
.device
|
||||
.architecture
|
||||
.as_deref()
|
||||
.unwrap_or("unknown arch");
|
||||
lines.push(format!(
|
||||
" {} — {} ({}) [{}]",
|
||||
alias, entry.device.board_name, arch, status
|
||||
));
|
||||
}
|
||||
lines.join("\n")
|
||||
}
|
||||
|
||||
/// Resolve a GPIO-capable device alias from tool arguments.
|
||||
///
|
||||
/// If `args["device"]` is provided, uses that alias directly.
|
||||
/// Otherwise, auto-selects the single GPIO-capable device, returning an
|
||||
/// error description if zero or multiple GPIO devices are available.
|
||||
///
|
||||
/// On success returns `(alias, DeviceContext)` — both are owned / Arc-based
|
||||
/// so the caller can drop the registry lock before doing async I/O.
|
||||
pub fn resolve_gpio_device(
|
||||
&self,
|
||||
args: &serde_json::Value,
|
||||
) -> Result<(String, DeviceContext), String> {
|
||||
let device_alias: String = match args.get("device").and_then(|v| v.as_str()) {
|
||||
Some(a) => a.to_string(),
|
||||
None => {
|
||||
let gpio_aliases: Vec<String> = self
|
||||
.aliases()
|
||||
.into_iter()
|
||||
.filter(|a| {
|
||||
self.context(a)
|
||||
.map(|c| c.capabilities.gpio)
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.map(|a| a.to_string())
|
||||
.collect();
|
||||
match gpio_aliases.as_slice() {
|
||||
[single] => single.clone(),
|
||||
[] => {
|
||||
return Err("no GPIO-capable device found; specify \"device\" parameter"
|
||||
.to_string());
|
||||
}
|
||||
_ => {
|
||||
return Err(format!(
|
||||
"multiple devices available ({}); specify \"device\" parameter",
|
||||
gpio_aliases.join(", ")
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let ctx = self.context(&device_alias).ok_or_else(|| {
|
||||
format!(
|
||||
"device '{}' not found or has no transport attached",
|
||||
device_alias
|
||||
)
|
||||
})?;
|
||||
|
||||
// Verify the device advertises GPIO capability.
|
||||
if !ctx.capabilities.gpio {
|
||||
return Err(format!(
|
||||
"device '{}' does not support GPIO; specify a GPIO-capable device",
|
||||
device_alias
|
||||
));
|
||||
}
|
||||
|
||||
Ok((device_alias, ctx))
|
||||
}
|
||||
|
||||
/// Return `true` when at least one Aardvark adapter is registered.
|
||||
pub fn has_aardvark(&self) -> bool {
|
||||
self.devices
|
||||
.values()
|
||||
.any(|e| e.device.kind == DeviceKind::Aardvark)
|
||||
}
|
||||
|
||||
/// Resolve an Aardvark device from tool arguments.
|
||||
///
|
||||
/// If `args["device"]` is provided, uses that alias directly.
|
||||
/// Otherwise auto-selects the single Aardvark device, returning an error
|
||||
/// description if zero or multiple Aardvark devices are available.
|
||||
///
|
||||
/// Returns `(alias, DeviceContext)` — both are owned/Arc-based so the
|
||||
/// caller can drop the registry lock before doing async I/O.
|
||||
pub fn resolve_aardvark_device(
|
||||
&self,
|
||||
args: &serde_json::Value,
|
||||
) -> Result<(String, DeviceContext), String> {
|
||||
let device_alias: String = match args.get("device").and_then(|v| v.as_str()) {
|
||||
Some(a) => a.to_string(),
|
||||
None => {
|
||||
let aardvark_aliases: Vec<String> = self
|
||||
.aliases()
|
||||
.into_iter()
|
||||
.filter(|a| {
|
||||
self.devices
|
||||
.get(*a)
|
||||
.map(|e| e.device.kind == DeviceKind::Aardvark)
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.map(|a| a.to_string())
|
||||
.collect();
|
||||
match aardvark_aliases.as_slice() {
|
||||
[single] => single.clone(),
|
||||
[] => {
|
||||
return Err("no Aardvark adapter found; is it plugged in?".to_string());
|
||||
}
|
||||
_ => {
|
||||
return Err(format!(
|
||||
"multiple Aardvark adapters available ({}); \
|
||||
specify \"device\" parameter",
|
||||
aardvark_aliases.join(", ")
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let ctx = self.context(&device_alias).ok_or_else(|| {
|
||||
format!("device '{device_alias}' not found or has no transport attached")
|
||||
})?;
|
||||
|
||||
Ok((device_alias, ctx))
|
||||
}
|
||||
|
||||
/// Number of registered devices.
|
||||
pub fn len(&self) -> usize {
|
||||
self.devices.len()
|
||||
}
|
||||
|
||||
/// Whether the registry is empty.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.devices.is_empty()
|
||||
}
|
||||
|
||||
/// Look up a device by alias (alias for `get_device` matching the Phase 2 spec).
|
||||
pub fn get(&self, alias: &str) -> Option<Arc<Device>> {
|
||||
self.get_device(alias)
|
||||
}
|
||||
|
||||
/// Return all registered devices.
|
||||
pub fn all(&self) -> Vec<Arc<Device>> {
|
||||
self.devices.values().map(|e| e.device.clone()).collect()
|
||||
}
|
||||
|
||||
/// One-line summary per device: `"pico0: raspberry-pi-pico /dev/ttyACM0"`.
|
||||
///
|
||||
/// Suitable for CLI output and debug logging.
|
||||
pub fn summary(&self) -> String {
|
||||
if self.devices.is_empty() {
|
||||
return String::new();
|
||||
}
|
||||
let mut lines: Vec<String> = self
|
||||
.devices
|
||||
.values()
|
||||
.map(|e| {
|
||||
let path = e.device.port().unwrap_or("(native)");
|
||||
format!("{}: {} {}", e.device.alias, e.device.board_name, path)
|
||||
})
|
||||
.collect();
|
||||
lines.sort(); // deterministic for tests
|
||||
lines.join("\n")
|
||||
}
|
||||
|
||||
/// Discover all connected serial devices and populate the registry.
|
||||
///
|
||||
/// Steps:
|
||||
/// 1. Call `discover::scan_serial_devices()` to enumerate port paths + VID/PID.
|
||||
/// 2. For each device with a recognised VID: register and attach a transport.
|
||||
/// 3. For unknown VID (`0`): attempt a 300 ms ping handshake; register only
|
||||
/// if the device responds with ZeroClaw firmware.
|
||||
/// 4. Return the populated registry.
|
||||
///
|
||||
/// Returns an empty registry when no devices are found or the `hardware`
|
||||
/// feature is disabled.
|
||||
#[cfg(feature = "hardware")]
|
||||
pub async fn discover() -> Self {
|
||||
use super::{
|
||||
discover::scan_serial_devices,
|
||||
serial::{HardwareSerialTransport, DEFAULT_BAUD},
|
||||
};
|
||||
|
||||
let mut registry = Self::new();
|
||||
|
||||
for info in scan_serial_devices() {
|
||||
let is_known_vid = info.vid != 0;
|
||||
|
||||
// For unknown VIDs, run the ping handshake before registering.
|
||||
// This avoids registering random USB-serial adapters.
|
||||
// If the probe succeeds we reuse the same transport instance below.
|
||||
let probe_transport = if !is_known_vid {
|
||||
let probe = HardwareSerialTransport::new(&info.port_path, DEFAULT_BAUD);
|
||||
if !probe.ping_handshake().await {
|
||||
tracing::debug!(
|
||||
port = %info.port_path,
|
||||
"skipping unknown device: no ZeroClaw firmware response"
|
||||
);
|
||||
continue;
|
||||
}
|
||||
Some(probe)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let board_name = info.board_name.as_deref().unwrap_or("unknown").to_string();
|
||||
|
||||
let alias = registry.register(
|
||||
&board_name,
|
||||
if info.vid != 0 { Some(info.vid) } else { None },
|
||||
if info.pid != 0 { Some(info.pid) } else { None },
|
||||
Some(info.port_path.clone()),
|
||||
info.architecture,
|
||||
);
|
||||
|
||||
// For unknown-VID devices that passed ping: mark as Generic.
|
||||
// (register() will have already set kind = Generic for vid=None)
|
||||
|
||||
let transport: Arc<dyn super::transport::Transport> =
|
||||
if let Some(probe) = probe_transport {
|
||||
Arc::new(probe)
|
||||
} else {
|
||||
Arc::new(HardwareSerialTransport::new(&info.port_path, DEFAULT_BAUD))
|
||||
};
|
||||
let caps = DeviceCapabilities {
|
||||
gpio: true, // assume GPIO; Phase 3 will populate via capabilities handshake
|
||||
..DeviceCapabilities::default()
|
||||
};
|
||||
registry.attach_transport(&alias, transport, caps)
|
||||
.unwrap_or_else(|e| tracing::warn!(alias = %alias, err = %e, "attach_transport: unexpected unknown alias"));
|
||||
|
||||
tracing::info!(
|
||||
alias = %alias,
|
||||
port = %info.port_path,
|
||||
vid = %info.vid,
|
||||
"device registered"
|
||||
);
|
||||
}
|
||||
|
||||
registry
|
||||
}
|
||||
}
|
||||
|
||||
impl DeviceRegistry {
|
||||
/// Reconnect a device after reboot/reflash.
|
||||
///
|
||||
/// Drops the old transport, creates a fresh [`HardwareSerialTransport`] for
|
||||
/// the given (or existing) port path, runs the ping handshake to confirm
|
||||
/// ZeroClaw firmware is alive, and re-attaches the transport.
|
||||
///
|
||||
/// Pass `new_port` when the OS assigned a different path after reboot;
|
||||
/// pass `None` to reuse the device's current path.
|
||||
#[cfg(feature = "hardware")]
|
||||
pub async fn reconnect(&mut self, alias: &str, new_port: Option<&str>) -> anyhow::Result<()> {
|
||||
use super::serial::{HardwareSerialTransport, DEFAULT_BAUD};
|
||||
|
||||
let entry = self
|
||||
.devices
|
||||
.get_mut(alias)
|
||||
.ok_or_else(|| anyhow::anyhow!("unknown device alias: {alias}"))?;
|
||||
|
||||
// Determine the port path — prefer the caller's override.
|
||||
let port_path = match new_port {
|
||||
Some(p) => {
|
||||
// Update the device record with the new path.
|
||||
let mut updated = (*entry.device).clone();
|
||||
updated.device_path = Some(p.to_string());
|
||||
entry.device = Arc::new(updated);
|
||||
p.to_string()
|
||||
}
|
||||
None => entry
|
||||
.device
|
||||
.device_path
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow::anyhow!("device {alias} has no port path"))?,
|
||||
};
|
||||
|
||||
// Drop the stale transport.
|
||||
entry.transport = None;
|
||||
|
||||
// Create a fresh transport and verify firmware is alive.
|
||||
let transport = HardwareSerialTransport::new(&port_path, DEFAULT_BAUD);
|
||||
if !transport.ping_handshake().await {
|
||||
anyhow::bail!(
|
||||
"ping handshake failed after reconnect on {port_path} — \
|
||||
firmware may not be running"
|
||||
);
|
||||
}
|
||||
|
||||
entry.transport = Some(Arc::new(transport) as Arc<dyn super::transport::Transport>);
|
||||
entry.capabilities.gpio = true;
|
||||
|
||||
tracing::info!(alias = %alias, port = %port_path, "device reconnected");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DeviceRegistry {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// Derive alias prefix from board name.
|
||||
fn alias_prefix(board_name: &str) -> String {
|
||||
match board_name {
|
||||
s if s.starts_with("raspberry-pi-pico") || s.starts_with("pico") => "pico".to_string(),
|
||||
s if s.starts_with("arduino") => "arduino".to_string(),
|
||||
s if s.starts_with("esp32") || s.starts_with("esp") => "esp".to_string(),
|
||||
s if s.starts_with("nucleo") || s.starts_with("stm32") => "nucleo".to_string(),
|
||||
s if s.starts_with("rpi") || s == "raspberry-pi" => "rpi".to_string(),
|
||||
_ => "device".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn alias_prefix_pico_variants() {
|
||||
assert_eq!(alias_prefix("raspberry-pi-pico"), "pico");
|
||||
assert_eq!(alias_prefix("pico-w"), "pico");
|
||||
assert_eq!(alias_prefix("pico"), "pico");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn alias_prefix_arduino() {
|
||||
assert_eq!(alias_prefix("arduino-uno"), "arduino");
|
||||
assert_eq!(alias_prefix("arduino-mega"), "arduino");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn alias_prefix_esp() {
|
||||
assert_eq!(alias_prefix("esp32"), "esp");
|
||||
assert_eq!(alias_prefix("esp32-s3"), "esp");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn alias_prefix_nucleo() {
|
||||
assert_eq!(alias_prefix("nucleo-f401re"), "nucleo");
|
||||
assert_eq!(alias_prefix("stm32-discovery"), "nucleo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn alias_prefix_rpi() {
|
||||
assert_eq!(alias_prefix("rpi-gpio"), "rpi");
|
||||
assert_eq!(alias_prefix("raspberry-pi"), "rpi");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn alias_prefix_unknown() {
|
||||
assert_eq!(alias_prefix("custom-board"), "device");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn registry_assigns_sequential_aliases() {
|
||||
let mut reg = DeviceRegistry::new();
|
||||
let a1 = reg.register("raspberry-pi-pico", Some(0x2E8A), Some(0x000A), None, None);
|
||||
let a2 = reg.register("raspberry-pi-pico", Some(0x2E8A), Some(0x000A), None, None);
|
||||
let a3 = reg.register("arduino-uno", Some(0x2341), Some(0x0043), None, None);
|
||||
|
||||
assert_eq!(a1, "pico0");
|
||||
assert_eq!(a2, "pico1");
|
||||
assert_eq!(a3, "arduino0");
|
||||
assert_eq!(reg.len(), 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn registry_get_device_by_alias() {
|
||||
let mut reg = DeviceRegistry::new();
|
||||
let alias = reg.register(
|
||||
"nucleo-f401re",
|
||||
Some(0x0483),
|
||||
Some(0x374B),
|
||||
Some("/dev/ttyACM0".to_string()),
|
||||
Some("ARM Cortex-M4".to_string()),
|
||||
);
|
||||
|
||||
let device = reg.get_device(&alias).unwrap();
|
||||
assert_eq!(device.alias, "nucleo0");
|
||||
assert_eq!(device.board_name, "nucleo-f401re");
|
||||
assert_eq!(device.vid, Some(0x0483));
|
||||
assert_eq!(device.architecture.as_deref(), Some("ARM Cortex-M4"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn registry_unknown_alias_returns_none() {
|
||||
let reg = DeviceRegistry::new();
|
||||
assert!(reg.get_device("nonexistent").is_none());
|
||||
assert!(reg.context("nonexistent").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn registry_context_none_without_transport() {
|
||||
let mut reg = DeviceRegistry::new();
|
||||
let alias = reg.register("pico", None, None, None, None);
|
||||
// No transport attached → context returns None.
|
||||
assert!(reg.context(&alias).is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn registry_prompt_summary_empty() {
|
||||
let reg = DeviceRegistry::new();
|
||||
assert_eq!(reg.prompt_summary(), NO_HW_DEVICES_SUMMARY);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn registry_prompt_summary_with_devices() {
|
||||
let mut reg = DeviceRegistry::new();
|
||||
reg.register(
|
||||
"raspberry-pi-pico",
|
||||
Some(0x2E8A),
|
||||
None,
|
||||
None,
|
||||
Some("ARM Cortex-M0+".to_string()),
|
||||
);
|
||||
let summary = reg.prompt_summary();
|
||||
assert!(summary.contains("pico0"));
|
||||
assert!(summary.contains("raspberry-pi-pico"));
|
||||
assert!(summary.contains("ARM Cortex-M0+"));
|
||||
assert!(summary.contains("no transport"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn device_capabilities_default_all_false() {
|
||||
let caps = DeviceCapabilities::default();
|
||||
assert!(!caps.gpio);
|
||||
assert!(!caps.i2c);
|
||||
assert!(!caps.spi);
|
||||
assert!(!caps.swd);
|
||||
assert!(!caps.uart);
|
||||
assert!(!caps.adc);
|
||||
assert!(!caps.pwm);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn registry_default_is_empty() {
|
||||
let reg = DeviceRegistry::default();
|
||||
assert!(reg.is_empty());
|
||||
assert_eq!(reg.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn registry_aliases_returns_all() {
|
||||
let mut reg = DeviceRegistry::new();
|
||||
reg.register("pico", None, None, None, None);
|
||||
reg.register("arduino-uno", None, None, None, None);
|
||||
let mut aliases = reg.aliases();
|
||||
aliases.sort_unstable();
|
||||
assert_eq!(aliases, vec!["arduino0", "pico0"]);
|
||||
}
|
||||
|
||||
// ── Phase 2 new tests ────────────────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn device_kind_from_vid_known() {
|
||||
assert_eq!(DeviceKind::from_vid(0x2e8a), Some(DeviceKind::Pico));
|
||||
assert_eq!(DeviceKind::from_vid(0x2341), Some(DeviceKind::Arduino));
|
||||
assert_eq!(DeviceKind::from_vid(0x10c4), Some(DeviceKind::Esp32));
|
||||
assert_eq!(DeviceKind::from_vid(0x0483), Some(DeviceKind::Nucleo));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn device_kind_from_vid_unknown() {
|
||||
assert_eq!(DeviceKind::from_vid(0x0000), None);
|
||||
assert_eq!(DeviceKind::from_vid(0xffff), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn device_kind_display() {
|
||||
assert_eq!(DeviceKind::Pico.to_string(), "pico");
|
||||
assert_eq!(DeviceKind::Arduino.to_string(), "arduino");
|
||||
assert_eq!(DeviceKind::Esp32.to_string(), "esp32");
|
||||
assert_eq!(DeviceKind::Nucleo.to_string(), "nucleo");
|
||||
assert_eq!(DeviceKind::Generic.to_string(), "generic");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn register_sets_kind_from_vid() {
|
||||
let mut reg = DeviceRegistry::new();
|
||||
let a = reg.register("raspberry-pi-pico", Some(0x2e8a), Some(0x000a), None, None);
|
||||
assert_eq!(reg.get(&a).unwrap().kind, DeviceKind::Pico);
|
||||
|
||||
let b = reg.register("arduino-uno", Some(0x2341), Some(0x0043), None, None);
|
||||
assert_eq!(reg.get(&b).unwrap().kind, DeviceKind::Arduino);
|
||||
|
||||
let c = reg.register("unknown-device", None, None, None, None);
|
||||
assert_eq!(reg.get(&c).unwrap().kind, DeviceKind::Generic);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn device_port_returns_device_path() {
|
||||
let mut reg = DeviceRegistry::new();
|
||||
let alias = reg.register(
|
||||
"raspberry-pi-pico",
|
||||
Some(0x2e8a),
|
||||
None,
|
||||
Some("/dev/ttyACM0".to_string()),
|
||||
None,
|
||||
);
|
||||
let device = reg.get(&alias).unwrap();
|
||||
assert_eq!(device.port(), Some("/dev/ttyACM0"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn device_port_none_without_path() {
|
||||
let mut reg = DeviceRegistry::new();
|
||||
let alias = reg.register("pico", None, None, None, None);
|
||||
assert!(reg.get(&alias).unwrap().port().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn registry_get_is_alias_for_get_device() {
|
||||
let mut reg = DeviceRegistry::new();
|
||||
let alias = reg.register("raspberry-pi-pico", Some(0x2e8a), None, None, None);
|
||||
let via_get = reg.get(&alias);
|
||||
let via_get_device = reg.get_device(&alias);
|
||||
assert!(via_get.is_some());
|
||||
assert!(via_get_device.is_some());
|
||||
assert_eq!(via_get.unwrap().alias, via_get_device.unwrap().alias);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn registry_all_returns_every_device() {
|
||||
let mut reg = DeviceRegistry::new();
|
||||
reg.register("raspberry-pi-pico", Some(0x2e8a), None, None, None);
|
||||
reg.register("arduino-uno", Some(0x2341), None, None, None);
|
||||
assert_eq!(reg.all().len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn registry_summary_one_liner_per_device() {
|
||||
let mut reg = DeviceRegistry::new();
|
||||
reg.register(
|
||||
"raspberry-pi-pico",
|
||||
Some(0x2e8a),
|
||||
None,
|
||||
Some("/dev/ttyACM0".to_string()),
|
||||
None,
|
||||
);
|
||||
let s = reg.summary();
|
||||
assert!(s.contains("pico0"));
|
||||
assert!(s.contains("raspberry-pi-pico"));
|
||||
assert!(s.contains("/dev/ttyACM0"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn registry_summary_empty_when_no_devices() {
|
||||
let reg = DeviceRegistry::new();
|
||||
assert_eq!(reg.summary(), "");
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,49 @@ use super::registry;
|
||||
use anyhow::Result;
|
||||
use nusb::MaybeFuture;
|
||||
|
||||
/// Serial port with USB VID/PID for device registration.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SerialDeviceInfo {
|
||||
pub port_path: String,
|
||||
pub vid: u16,
|
||||
pub pid: u16,
|
||||
pub board_name: Option<String>,
|
||||
pub architecture: Option<String>,
|
||||
}
|
||||
|
||||
/// Enumerate serial ports that correspond to known USB devices.
|
||||
/// Returns empty when hardware feature is disabled or enumeration fails.
|
||||
#[cfg(feature = "hardware")]
|
||||
pub fn scan_serial_devices() -> Vec<SerialDeviceInfo> {
|
||||
let mut result = Vec::new();
|
||||
let Ok(ports) = tokio_serial::available_ports() else {
|
||||
return result;
|
||||
};
|
||||
for port in ports {
|
||||
let port_name = port.port_name.as_str();
|
||||
if !crate::util::is_serial_path_allowed(port_name) {
|
||||
continue;
|
||||
}
|
||||
let (vid, pid) = match &port.port_type {
|
||||
tokio_serial::SerialPortType::UsbPort(usb) => (usb.vid, usb.pid),
|
||||
_ => (0, 0),
|
||||
};
|
||||
let board = if vid != 0 {
|
||||
registry::lookup_board(vid, pid)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
result.push(SerialDeviceInfo {
|
||||
port_path: port_name.to_string(),
|
||||
vid,
|
||||
pid,
|
||||
board_name: board.map(|b| b.name.to_string()),
|
||||
architecture: board.and_then(|b| b.architecture.map(String::from)),
|
||||
});
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Information about a discovered USB device.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct UsbDeviceInfo {
|
||||
|
||||
@@ -0,0 +1,628 @@
|
||||
//! GPIO tools — `gpio_read` and `gpio_write` for LLM-driven hardware control.
|
||||
//!
|
||||
//! These are the first built-in hardware tools. They implement the standard
|
||||
//! [`Tool`](crate::tools::Tool) trait so the LLM can call them via function
|
||||
//! calling, and dispatch commands to physical devices via the
|
||||
//! [`Transport`](super::Transport) layer.
|
||||
//!
|
||||
//! Wire protocol (ZeroClaw serial JSON):
|
||||
//! ```text
|
||||
//! gpio_write:
|
||||
//! Host → Device: {"cmd":"gpio_write","params":{"pin":25,"value":1}}\n
|
||||
//! Device → Host: {"ok":true,"data":{"pin":25,"value":1,"state":"HIGH"}}\n
|
||||
//!
|
||||
//! gpio_read:
|
||||
//! Host → Device: {"cmd":"gpio_read","params":{"pin":25}}\n
|
||||
//! Device → Host: {"ok":true,"data":{"pin":25,"value":1,"state":"HIGH"}}\n
|
||||
//! ```
|
||||
|
||||
use super::device::DeviceRegistry;
|
||||
use super::protocol::ZcCommand;
|
||||
use crate::tools::traits::{Tool, ToolResult};
|
||||
use async_trait::async_trait;
|
||||
use serde_json::json;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
// ── GpioWriteTool ─────────────────────────────────────────────────────────────
|
||||
|
||||
/// Tool: set a GPIO pin HIGH or LOW on a connected hardware device.
|
||||
///
|
||||
/// The LLM provides `device` (alias), `pin`, and `value` (0 or 1).
|
||||
/// The tool builds a `ZcCommand`, sends it via the device's transport,
|
||||
/// and returns a human-readable result.
|
||||
pub struct GpioWriteTool {
|
||||
registry: Arc<RwLock<DeviceRegistry>>,
|
||||
}
|
||||
|
||||
impl GpioWriteTool {
|
||||
pub fn new(registry: Arc<RwLock<DeviceRegistry>>) -> Self {
|
||||
Self { registry }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Tool for GpioWriteTool {
|
||||
fn name(&self) -> &str {
|
||||
"gpio_write"
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Set a GPIO pin HIGH (1) or LOW (0) on a connected hardware device"
|
||||
}
|
||||
|
||||
fn parameters_schema(&self) -> serde_json::Value {
|
||||
json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"device": {
|
||||
"type": "string",
|
||||
"description": "Device alias e.g. pico0, arduino0"
|
||||
},
|
||||
"pin": {
|
||||
"type": "integer",
|
||||
"description": "GPIO pin number"
|
||||
},
|
||||
"value": {
|
||||
"type": "integer",
|
||||
"enum": [0, 1],
|
||||
"description": "1 = HIGH (on), 0 = LOW (off)"
|
||||
}
|
||||
},
|
||||
"required": ["pin", "value"]
|
||||
})
|
||||
}
|
||||
|
||||
async fn execute(&self, args: serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||
let pin = match args.get("pin").and_then(|v| v.as_u64()) {
|
||||
Some(p) => p,
|
||||
None => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some("missing required parameter: pin".to_string()),
|
||||
})
|
||||
}
|
||||
};
|
||||
let value = match args.get("value").and_then(|v| v.as_u64()) {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some("missing required parameter: value".to_string()),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
if value > 1 {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some("value must be 0 or 1".to_string()),
|
||||
});
|
||||
}
|
||||
|
||||
// Resolve device alias and obtain an owned context (Arc-based) before
|
||||
// dropping the registry read guard — avoids holding the lock across async I/O.
|
||||
let (device_alias, ctx) = {
|
||||
let registry = self.registry.read().await;
|
||||
match registry.resolve_gpio_device(&args) {
|
||||
Ok(resolved) => resolved,
|
||||
Err(msg) => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(msg),
|
||||
});
|
||||
}
|
||||
}
|
||||
// registry read guard dropped here
|
||||
};
|
||||
|
||||
let cmd = ZcCommand::new("gpio_write", json!({ "pin": pin, "value": value }));
|
||||
|
||||
match ctx.transport.send(&cmd).await {
|
||||
Ok(resp) if resp.ok => {
|
||||
let state = resp
|
||||
.data
|
||||
.get("state")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or(if value == 1 { "HIGH" } else { "LOW" });
|
||||
Ok(ToolResult {
|
||||
success: true,
|
||||
output: format!("GPIO {} set {} on {}", pin, state, device_alias),
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
Ok(resp) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(
|
||||
resp.error
|
||||
.unwrap_or_else(|| "device returned ok:false".to_string()),
|
||||
),
|
||||
}),
|
||||
Err(e) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!("transport error: {}", e)),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── GpioReadTool ──────────────────────────────────────────────────────────────
|
||||
|
||||
/// Tool: read the current HIGH/LOW state of a GPIO pin on a connected device.
|
||||
///
|
||||
/// The LLM provides `device` (alias) and `pin`. The tool builds a `ZcCommand`,
|
||||
/// sends it via the device's transport, and returns the pin state.
|
||||
pub struct GpioReadTool {
|
||||
registry: Arc<RwLock<DeviceRegistry>>,
|
||||
}
|
||||
|
||||
impl GpioReadTool {
|
||||
pub fn new(registry: Arc<RwLock<DeviceRegistry>>) -> Self {
|
||||
Self { registry }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Tool for GpioReadTool {
|
||||
fn name(&self) -> &str {
|
||||
"gpio_read"
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Read the current HIGH/LOW state of a GPIO pin on a connected device"
|
||||
}
|
||||
|
||||
fn parameters_schema(&self) -> serde_json::Value {
|
||||
json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"device": {
|
||||
"type": "string",
|
||||
"description": "Device alias e.g. pico0, arduino0"
|
||||
},
|
||||
"pin": {
|
||||
"type": "integer",
|
||||
"description": "GPIO pin number to read"
|
||||
}
|
||||
},
|
||||
"required": ["pin"]
|
||||
})
|
||||
}
|
||||
|
||||
async fn execute(&self, args: serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||
let pin = match args.get("pin").and_then(|v| v.as_u64()) {
|
||||
Some(p) => p,
|
||||
None => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some("missing required parameter: pin".to_string()),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
// Resolve device alias and obtain an owned context (Arc-based) before
|
||||
// dropping the registry read guard — avoids holding the lock across async I/O.
|
||||
let (device_alias, ctx) = {
|
||||
let registry = self.registry.read().await;
|
||||
match registry.resolve_gpio_device(&args) {
|
||||
Ok(resolved) => resolved,
|
||||
Err(msg) => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(msg),
|
||||
});
|
||||
}
|
||||
}
|
||||
// registry read guard dropped here
|
||||
};
|
||||
|
||||
let cmd = ZcCommand::new("gpio_read", json!({ "pin": pin }));
|
||||
|
||||
match ctx.transport.send(&cmd).await {
|
||||
Ok(resp) if resp.ok => {
|
||||
let value = resp.data.get("value").and_then(|v| v.as_u64()).unwrap_or(0);
|
||||
let state = resp
|
||||
.data
|
||||
.get("state")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or(if value == 1 { "HIGH" } else { "LOW" });
|
||||
Ok(ToolResult {
|
||||
success: true,
|
||||
output: format!("GPIO {} is {} ({}) on {}", pin, state, value, device_alias),
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
Ok(resp) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(
|
||||
resp.error
|
||||
.unwrap_or_else(|| "device returned ok:false".to_string()),
|
||||
),
|
||||
}),
|
||||
Err(e) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!("transport error: {}", e)),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Factory ───────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Create the built-in GPIO tools for a given device registry.
|
||||
///
|
||||
/// Returns `[GpioWriteTool, GpioReadTool]` ready for registration in the
|
||||
/// agent's tool list or a future `ToolRegistry`.
|
||||
pub fn gpio_tools(registry: Arc<RwLock<DeviceRegistry>>) -> Vec<Box<dyn Tool>> {
|
||||
vec![
|
||||
Box::new(GpioWriteTool::new(registry.clone())),
|
||||
Box::new(GpioReadTool::new(registry)),
|
||||
]
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::hardware::{
|
||||
device::{DeviceCapabilities, DeviceRegistry},
|
||||
protocol::ZcResponse,
|
||||
transport::{Transport, TransportError, TransportKind},
|
||||
};
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
|
||||
/// Mock transport that returns configurable responses.
|
||||
struct MockTransport {
|
||||
response: tokio::sync::Mutex<ZcResponse>,
|
||||
connected: AtomicBool,
|
||||
last_cmd: tokio::sync::Mutex<Option<ZcCommand>>,
|
||||
}
|
||||
|
||||
impl MockTransport {
|
||||
fn new(response: ZcResponse) -> Self {
|
||||
Self {
|
||||
response: tokio::sync::Mutex::new(response),
|
||||
connected: AtomicBool::new(true),
|
||||
last_cmd: tokio::sync::Mutex::new(None),
|
||||
}
|
||||
}
|
||||
|
||||
fn disconnected() -> Self {
|
||||
let t = Self::new(ZcResponse::error("mock: disconnected"));
|
||||
t.connected.store(false, Ordering::SeqCst);
|
||||
t
|
||||
}
|
||||
|
||||
async fn last_command(&self) -> Option<ZcCommand> {
|
||||
self.last_cmd.lock().await.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Transport for MockTransport {
|
||||
async fn send(&self, cmd: &ZcCommand) -> Result<ZcResponse, TransportError> {
|
||||
if !self.connected.load(Ordering::SeqCst) {
|
||||
return Err(TransportError::Disconnected);
|
||||
}
|
||||
*self.last_cmd.lock().await = Some(cmd.clone());
|
||||
Ok(self.response.lock().await.clone())
|
||||
}
|
||||
|
||||
fn kind(&self) -> TransportKind {
|
||||
TransportKind::Serial
|
||||
}
|
||||
|
||||
fn is_connected(&self) -> bool {
|
||||
self.connected.load(Ordering::SeqCst)
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper: build a registry with one device + mock transport.
|
||||
fn registry_with_mock(transport: Arc<MockTransport>) -> Arc<RwLock<DeviceRegistry>> {
|
||||
let mut reg = DeviceRegistry::new();
|
||||
let alias = reg.register(
|
||||
"raspberry-pi-pico",
|
||||
Some(0x2e8a),
|
||||
Some(0x000a),
|
||||
Some("/dev/ttyACM0".to_string()),
|
||||
Some("ARM Cortex-M0+".to_string()),
|
||||
);
|
||||
reg.attach_transport(
|
||||
&alias,
|
||||
transport as Arc<dyn Transport>,
|
||||
DeviceCapabilities {
|
||||
gpio: true,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.expect("alias was just registered");
|
||||
Arc::new(RwLock::new(reg))
|
||||
}
|
||||
|
||||
// ── GpioWriteTool tests ──────────────────────────────────────────────
|
||||
|
||||
#[tokio::test]
|
||||
async fn gpio_write_success() {
|
||||
let mock = Arc::new(MockTransport::new(ZcResponse::success(
|
||||
json!({"pin": 25, "value": 1, "state": "HIGH"}),
|
||||
)));
|
||||
let reg = registry_with_mock(mock.clone());
|
||||
let tool = GpioWriteTool::new(reg);
|
||||
|
||||
let result = tool
|
||||
.execute(json!({"device": "pico0", "pin": 25, "value": 1}))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(result.success);
|
||||
assert_eq!(result.output, "GPIO 25 set HIGH on pico0");
|
||||
assert!(result.error.is_none());
|
||||
|
||||
// Verify the command sent to the device
|
||||
let cmd = mock.last_command().await.unwrap();
|
||||
assert_eq!(cmd.cmd, "gpio_write");
|
||||
assert_eq!(cmd.params["pin"], 25);
|
||||
assert_eq!(cmd.params["value"], 1);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn gpio_write_low() {
|
||||
let mock = Arc::new(MockTransport::new(ZcResponse::success(
|
||||
json!({"pin": 13, "value": 0, "state": "LOW"}),
|
||||
)));
|
||||
let reg = registry_with_mock(mock.clone());
|
||||
let tool = GpioWriteTool::new(reg);
|
||||
|
||||
let result = tool
|
||||
.execute(json!({"device": "pico0", "pin": 13, "value": 0}))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(result.success);
|
||||
assert_eq!(result.output, "GPIO 13 set LOW on pico0");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn gpio_write_device_error() {
|
||||
let mock = Arc::new(MockTransport::new(ZcResponse::error(
|
||||
"pin 99 not available",
|
||||
)));
|
||||
let reg = registry_with_mock(mock);
|
||||
let tool = GpioWriteTool::new(reg);
|
||||
|
||||
let result = tool
|
||||
.execute(json!({"device": "pico0", "pin": 99, "value": 1}))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(!result.success);
|
||||
assert_eq!(result.error.as_deref(), Some("pin 99 not available"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn gpio_write_transport_disconnected() {
|
||||
let mock = Arc::new(MockTransport::disconnected());
|
||||
let reg = registry_with_mock(mock);
|
||||
let tool = GpioWriteTool::new(reg);
|
||||
|
||||
let result = tool
|
||||
.execute(json!({"device": "pico0", "pin": 25, "value": 1}))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(!result.success);
|
||||
assert!(result.error.as_deref().unwrap().contains("transport"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn gpio_write_unknown_device() {
|
||||
let mock = Arc::new(MockTransport::new(ZcResponse::success(json!({}))));
|
||||
let reg = registry_with_mock(mock);
|
||||
let tool = GpioWriteTool::new(reg);
|
||||
|
||||
let result = tool
|
||||
.execute(json!({"device": "nonexistent", "pin": 25, "value": 1}))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(!result.success);
|
||||
assert!(result.error.as_deref().unwrap().contains("not found"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn gpio_write_invalid_value() {
|
||||
let mock = Arc::new(MockTransport::new(ZcResponse::success(json!({}))));
|
||||
let reg = registry_with_mock(mock);
|
||||
let tool = GpioWriteTool::new(reg);
|
||||
|
||||
let result = tool
|
||||
.execute(json!({"device": "pico0", "pin": 25, "value": 5}))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(!result.success);
|
||||
assert_eq!(result.error.as_deref(), Some("value must be 0 or 1"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn gpio_write_missing_params() {
|
||||
let mock = Arc::new(MockTransport::new(ZcResponse::success(json!({}))));
|
||||
let reg = registry_with_mock(mock);
|
||||
let tool = GpioWriteTool::new(reg);
|
||||
|
||||
// Missing pin
|
||||
let result = tool
|
||||
.execute(json!({"device": "pico0", "value": 1}))
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(!result.success);
|
||||
assert!(result
|
||||
.error
|
||||
.as_deref()
|
||||
.unwrap_or("")
|
||||
.contains("missing required parameter: pin"));
|
||||
|
||||
// Missing device with empty registry — auto-select finds no GPIO device → Ok(failure)
|
||||
let empty_reg = Arc::new(RwLock::new(DeviceRegistry::new()));
|
||||
let tool_no_reg = GpioWriteTool::new(empty_reg);
|
||||
let result = tool_no_reg
|
||||
.execute(json!({"pin": 25, "value": 1}))
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(!result.success);
|
||||
assert!(result.error.as_deref().unwrap_or("").contains("no GPIO"));
|
||||
|
||||
// Missing value
|
||||
let result = tool
|
||||
.execute(json!({"device": "pico0", "pin": 25}))
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(!result.success);
|
||||
assert!(result
|
||||
.error
|
||||
.as_deref()
|
||||
.unwrap_or("")
|
||||
.contains("missing required parameter: value"));
|
||||
}
|
||||
|
||||
// ── GpioReadTool tests ───────────────────────────────────────────────
|
||||
|
||||
#[tokio::test]
|
||||
async fn gpio_read_success() {
|
||||
let mock = Arc::new(MockTransport::new(ZcResponse::success(
|
||||
json!({"pin": 25, "value": 1, "state": "HIGH"}),
|
||||
)));
|
||||
let reg = registry_with_mock(mock.clone());
|
||||
let tool = GpioReadTool::new(reg);
|
||||
|
||||
let result = tool
|
||||
.execute(json!({"device": "pico0", "pin": 25}))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(result.success);
|
||||
assert_eq!(result.output, "GPIO 25 is HIGH (1) on pico0");
|
||||
assert!(result.error.is_none());
|
||||
|
||||
let cmd = mock.last_command().await.unwrap();
|
||||
assert_eq!(cmd.cmd, "gpio_read");
|
||||
assert_eq!(cmd.params["pin"], 25);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn gpio_read_low() {
|
||||
let mock = Arc::new(MockTransport::new(ZcResponse::success(
|
||||
json!({"pin": 13, "value": 0, "state": "LOW"}),
|
||||
)));
|
||||
let reg = registry_with_mock(mock);
|
||||
let tool = GpioReadTool::new(reg);
|
||||
|
||||
let result = tool
|
||||
.execute(json!({"device": "pico0", "pin": 13}))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(result.success);
|
||||
assert_eq!(result.output, "GPIO 13 is LOW (0) on pico0");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn gpio_read_device_error() {
|
||||
let mock = Arc::new(MockTransport::new(ZcResponse::error("pin not configured")));
|
||||
let reg = registry_with_mock(mock);
|
||||
let tool = GpioReadTool::new(reg);
|
||||
|
||||
let result = tool
|
||||
.execute(json!({"device": "pico0", "pin": 99}))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(!result.success);
|
||||
assert_eq!(result.error.as_deref(), Some("pin not configured"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn gpio_read_transport_disconnected() {
|
||||
let mock = Arc::new(MockTransport::disconnected());
|
||||
let reg = registry_with_mock(mock);
|
||||
let tool = GpioReadTool::new(reg);
|
||||
|
||||
let result = tool
|
||||
.execute(json!({"device": "pico0", "pin": 25}))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(!result.success);
|
||||
assert!(result.error.as_deref().unwrap().contains("transport"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn gpio_read_missing_params() {
|
||||
let mock = Arc::new(MockTransport::new(ZcResponse::success(json!({}))));
|
||||
let reg = registry_with_mock(mock);
|
||||
let tool = GpioReadTool::new(reg);
|
||||
|
||||
// Missing pin
|
||||
let result = tool.execute(json!({"device": "pico0"})).await.unwrap();
|
||||
assert!(!result.success);
|
||||
assert!(result
|
||||
.error
|
||||
.as_deref()
|
||||
.unwrap_or("")
|
||||
.contains("missing required parameter: pin"));
|
||||
|
||||
// Missing device with empty registry — auto-select finds no GPIO device → Ok(failure)
|
||||
let empty_reg = Arc::new(RwLock::new(DeviceRegistry::new()));
|
||||
let tool_no_reg = GpioReadTool::new(empty_reg);
|
||||
let result = tool_no_reg.execute(json!({"pin": 25})).await.unwrap();
|
||||
assert!(!result.success);
|
||||
assert!(result.error.as_deref().unwrap_or("").contains("no GPIO"));
|
||||
}
|
||||
|
||||
// ── Factory / spec tests ─────────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn gpio_tools_factory_returns_two() {
|
||||
let reg = Arc::new(RwLock::new(DeviceRegistry::new()));
|
||||
let tools = gpio_tools(reg);
|
||||
assert_eq!(tools.len(), 2);
|
||||
assert_eq!(tools[0].name(), "gpio_write");
|
||||
assert_eq!(tools[1].name(), "gpio_read");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn gpio_write_spec_is_valid() {
|
||||
let reg = Arc::new(RwLock::new(DeviceRegistry::new()));
|
||||
let tool = GpioWriteTool::new(reg);
|
||||
let spec = tool.spec();
|
||||
assert_eq!(spec.name, "gpio_write");
|
||||
assert!(spec.parameters["properties"]["device"].is_object());
|
||||
assert!(spec.parameters["properties"]["pin"].is_object());
|
||||
assert!(spec.parameters["properties"]["value"].is_object());
|
||||
let required = spec.parameters["required"].as_array().unwrap();
|
||||
assert_eq!(required.len(), 2, "required should be [pin, value]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn gpio_read_spec_is_valid() {
|
||||
let reg = Arc::new(RwLock::new(DeviceRegistry::new()));
|
||||
let tool = GpioReadTool::new(reg);
|
||||
let spec = tool.spec();
|
||||
assert_eq!(spec.name, "gpio_read");
|
||||
assert!(spec.parameters["properties"]["device"].is_object());
|
||||
assert!(spec.parameters["properties"]["pin"].is_object());
|
||||
let required = spec.parameters["required"].as_array().unwrap();
|
||||
assert_eq!(required.len(), 1, "required should be [pin]");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,327 @@
|
||||
//! Plugin manifest loader — scans `~/.zeroclaw/tools/` at startup.
|
||||
//!
|
||||
//! Layout expected on disk:
|
||||
//! ```text
|
||||
//! ~/.zeroclaw/tools/
|
||||
//! ├── i2c_scan/
|
||||
//! │ ├── tool.toml
|
||||
//! │ └── i2c_scan.py
|
||||
//! └── pwm_set/
|
||||
//! ├── tool.toml
|
||||
//! └── pwm_set
|
||||
//! ```
|
||||
//!
|
||||
//! Rules:
|
||||
//! - The directory is **created** if it does not exist.
|
||||
//! - Each subdirectory is scanned for a `tool.toml`.
|
||||
//! - Manifests that fail to parse or validate are **skipped with a warning**;
|
||||
//! they must not crash startup.
|
||||
//! - Non-directory entries at the top level are silently ignored.
|
||||
|
||||
use super::manifest::ToolManifest;
|
||||
use super::subprocess::SubprocessTool;
|
||||
use crate::tools::traits::Tool;
|
||||
use anyhow::Result;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
/// A successfully loaded plugin, ready for registration.
|
||||
pub struct LoadedPlugin {
|
||||
/// Tool name from the manifest (unique key in [`ToolRegistry`]).
|
||||
pub name: String,
|
||||
/// Semantic version string from the manifest.
|
||||
pub version: String,
|
||||
/// The constructed tool, boxed for dynamic dispatch.
|
||||
pub tool: Box<dyn Tool>,
|
||||
}
|
||||
|
||||
/// Scan `~/.zeroclaw/tools/` and return all valid plugins.
|
||||
///
|
||||
/// - Creates the directory if absent.
|
||||
/// - Skips broken manifests with a `tracing::warn!` — does not propagate errors.
|
||||
/// - Returns an empty `Vec` when no plugins are installed.
|
||||
pub fn scan_plugin_dir() -> Vec<LoadedPlugin> {
|
||||
let tools_dir = match plugin_tools_dir() {
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
tracing::warn!("[registry] cannot resolve plugin tools dir: {}", e);
|
||||
return Vec::new();
|
||||
}
|
||||
};
|
||||
|
||||
// Create the directory tree if it is missing.
|
||||
if !tools_dir.exists() {
|
||||
if let Err(e) = fs::create_dir_all(&tools_dir) {
|
||||
tracing::warn!(
|
||||
"[registry] could not create {:?}: {}",
|
||||
tools_dir.display(),
|
||||
e
|
||||
);
|
||||
return Vec::new();
|
||||
}
|
||||
tracing::info!(
|
||||
"[registry] created plugin directory: {}",
|
||||
tools_dir.display()
|
||||
);
|
||||
}
|
||||
|
||||
println!(
|
||||
"[registry] scanning {}...",
|
||||
match dirs_home().as_deref().filter(|s| !s.is_empty()) {
|
||||
Some(home) => tools_dir
|
||||
.to_str()
|
||||
.unwrap_or("~/.zeroclaw/tools")
|
||||
.replace(home, "~"),
|
||||
None => tools_dir
|
||||
.to_str()
|
||||
.unwrap_or("~/.zeroclaw/tools")
|
||||
.to_string(),
|
||||
}
|
||||
);
|
||||
|
||||
let mut plugins = Vec::new();
|
||||
|
||||
let entries = match fs::read_dir(&tools_dir) {
|
||||
Ok(e) => e,
|
||||
Err(e) => {
|
||||
tracing::warn!("[registry] cannot read tools dir: {}", e);
|
||||
return Vec::new();
|
||||
}
|
||||
};
|
||||
|
||||
for entry in entries {
|
||||
let entry = match entry {
|
||||
Ok(e) => e,
|
||||
Err(e) => {
|
||||
tracing::warn!("[registry] skipping unreadable dir entry: {}", e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let plugin_dir = entry.path();
|
||||
|
||||
// Only descend into subdirectories.
|
||||
if !plugin_dir.is_dir() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let manifest_path = plugin_dir.join("tool.toml");
|
||||
|
||||
if !manifest_path.exists() {
|
||||
tracing::debug!(
|
||||
"[registry] no tool.toml in {:?} — skipping",
|
||||
plugin_dir.file_name().unwrap_or_default()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
match load_one_plugin(&plugin_dir, &manifest_path) {
|
||||
Ok(plugin) => plugins.push(plugin),
|
||||
Err(e) => {
|
||||
tracing::warn!(
|
||||
"[registry] skipping plugin in {:?}: {}",
|
||||
plugin_dir.file_name().unwrap_or_default(),
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
plugins
|
||||
}
|
||||
|
||||
/// Parse and validate a single plugin directory.
|
||||
///
|
||||
/// Returns `Err` on any validation failure so the caller can log and continue.
|
||||
fn load_one_plugin(plugin_dir: &Path, manifest_path: &Path) -> Result<LoadedPlugin> {
|
||||
let raw = fs::read_to_string(manifest_path)
|
||||
.map_err(|e| anyhow::anyhow!("cannot read tool.toml: {}", e))?;
|
||||
|
||||
let manifest: ToolManifest = toml::from_str(&raw)
|
||||
.map_err(|e| anyhow::anyhow!("TOML parse error in tool.toml: {}", e))?;
|
||||
|
||||
// Validate required fields — fail fast with a descriptive error.
|
||||
if manifest.tool.name.trim().is_empty() {
|
||||
anyhow::bail!("manifest missing [tool] name");
|
||||
}
|
||||
if manifest.tool.description.trim().is_empty() {
|
||||
anyhow::bail!("manifest missing [tool] description");
|
||||
}
|
||||
if manifest.exec.binary.trim().is_empty() {
|
||||
anyhow::bail!("manifest missing [exec] binary");
|
||||
}
|
||||
|
||||
// Validate binary path: must exist, be a regular file, and reside within plugin_dir.
|
||||
let canonical_plugin_dir = plugin_dir.canonicalize().map_err(|e| {
|
||||
anyhow::anyhow!(
|
||||
"cannot canonicalize plugin dir {}: {}",
|
||||
plugin_dir.display(),
|
||||
e
|
||||
)
|
||||
})?;
|
||||
let raw_binary_path = plugin_dir.join(&manifest.exec.binary);
|
||||
if !raw_binary_path.exists() {
|
||||
anyhow::bail!(
|
||||
"manifest exec binary not found: {}",
|
||||
raw_binary_path.display()
|
||||
);
|
||||
}
|
||||
let binary_path = raw_binary_path.canonicalize().map_err(|e| {
|
||||
anyhow::anyhow!(
|
||||
"cannot canonicalize binary path {}: {}",
|
||||
raw_binary_path.display(),
|
||||
e
|
||||
)
|
||||
})?;
|
||||
if !binary_path.starts_with(&canonical_plugin_dir) {
|
||||
anyhow::bail!(
|
||||
"manifest exec binary escapes plugin directory: {} is not under {}",
|
||||
binary_path.display(),
|
||||
canonical_plugin_dir.display()
|
||||
);
|
||||
}
|
||||
if !binary_path.is_file() {
|
||||
anyhow::bail!(
|
||||
"manifest exec binary is not a regular file: {}",
|
||||
binary_path.display()
|
||||
);
|
||||
}
|
||||
|
||||
let name = manifest.tool.name.clone();
|
||||
let version = manifest.tool.version.clone();
|
||||
let tool: Box<dyn Tool> = Box::new(SubprocessTool::new(manifest, binary_path));
|
||||
|
||||
Ok(LoadedPlugin {
|
||||
name,
|
||||
version,
|
||||
tool,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the path `~/.zeroclaw/tools/` using the `directories` crate.
|
||||
pub fn plugin_tools_dir() -> Result<PathBuf> {
|
||||
use directories::BaseDirs;
|
||||
let base = BaseDirs::new()
|
||||
.ok_or_else(|| anyhow::anyhow!("cannot determine the user home directory"))?;
|
||||
Ok(base.home_dir().join(".zeroclaw").join("tools"))
|
||||
}
|
||||
|
||||
/// Best-effort home dir string for display purposes only.
|
||||
fn dirs_home() -> Option<String> {
|
||||
use directories::BaseDirs;
|
||||
BaseDirs::new().map(|b| b.home_dir().to_string_lossy().into_owned())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::fs;
|
||||
|
||||
fn write_valid_manifest(dir: &Path) {
|
||||
let toml = r#"
|
||||
[tool]
|
||||
name = "test_plugin"
|
||||
version = "1.0.0"
|
||||
description = "A deterministic test plugin"
|
||||
|
||||
[exec]
|
||||
binary = "tool.sh"
|
||||
|
||||
[[parameters]]
|
||||
name = "device"
|
||||
type = "string"
|
||||
description = "Device alias"
|
||||
required = true
|
||||
"#;
|
||||
fs::write(dir.join("tool.toml"), toml).unwrap();
|
||||
// Write a dummy binary (content doesn't matter for manifest loading).
|
||||
fs::write(
|
||||
dir.join("tool.sh"),
|
||||
"#!/bin/sh\necho '{\"success\":true,\"output\":\"ok\",\"error\":null}'\n",
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn load_one_plugin_succeeds_for_valid_manifest() {
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
write_valid_manifest(dir.path());
|
||||
|
||||
let manifest_path = dir.path().join("tool.toml");
|
||||
let plugin = load_one_plugin(dir.path(), &manifest_path).unwrap();
|
||||
|
||||
assert_eq!(plugin.name, "test_plugin");
|
||||
assert_eq!(plugin.version, "1.0.0");
|
||||
assert_eq!(plugin.tool.name(), "test_plugin");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn load_one_plugin_fails_on_missing_name() {
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
let toml = r#"
|
||||
[tool]
|
||||
name = ""
|
||||
version = "1.0.0"
|
||||
description = "Missing name test"
|
||||
|
||||
[exec]
|
||||
binary = "tool.sh"
|
||||
"#;
|
||||
fs::write(dir.path().join("tool.toml"), toml).unwrap();
|
||||
|
||||
let result = load_one_plugin(dir.path(), &dir.path().join("tool.toml"));
|
||||
match result {
|
||||
Err(e) => assert!(e.to_string().contains("name"), "unexpected error: {}", e),
|
||||
Ok(_) => panic!("expected an error for missing name"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn load_one_plugin_fails_on_parse_error() {
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
fs::write(dir.path().join("tool.toml"), "not valid toml {{{{").unwrap();
|
||||
|
||||
let result = load_one_plugin(dir.path(), &dir.path().join("tool.toml"));
|
||||
match result {
|
||||
Err(e) => assert!(
|
||||
e.to_string().contains("TOML parse error"),
|
||||
"unexpected error: {}",
|
||||
e
|
||||
),
|
||||
Ok(_) => panic!("expected a parse error"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scan_plugin_dir_skips_broken_manifests_without_panicking() {
|
||||
// We can't redirect scan_plugin_dir to an arbitrary directory (it
|
||||
// always uses ~/.zeroclaw/tools), but we can verify load_one_plugin
|
||||
// behaviour under broken input without affecting the real directory.
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
|
||||
// Plugin 1: valid
|
||||
let p1 = dir.path().join("good");
|
||||
fs::create_dir_all(&p1).unwrap();
|
||||
write_valid_manifest(&p1);
|
||||
|
||||
// Plugin 2: broken TOML
|
||||
let p2 = dir.path().join("bad");
|
||||
fs::create_dir_all(&p2).unwrap();
|
||||
fs::write(p2.join("tool.toml"), "{{broken").unwrap();
|
||||
|
||||
// Load manually to simulate what scan_plugin_dir does.
|
||||
let good = load_one_plugin(&p1, &p1.join("tool.toml"));
|
||||
let bad = load_one_plugin(&p2, &p2.join("tool.toml"));
|
||||
|
||||
assert!(good.is_ok(), "good plugin should load");
|
||||
assert!(bad.is_err(), "bad plugin should error, not panic");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn plugin_tools_dir_returns_path_ending_in_zeroclaw_tools() {
|
||||
let path = plugin_tools_dir().expect("should resolve");
|
||||
let display = path.to_string_lossy();
|
||||
let expected = std::path::Path::new(".zeroclaw").join("tools");
|
||||
assert!(path.ends_with(&expected), "unexpected path: {}", display);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,194 @@
|
||||
//! Plugin manifest — `~/.zeroclaw/tools/<name>/tool.toml` schema.
|
||||
//!
|
||||
//! Each user plugin lives in its own subdirectory and carries a `tool.toml`
|
||||
//! that describes the tool, how to invoke it, and what parameters it accepts.
|
||||
//!
|
||||
//! Example `tool.toml`:
|
||||
//! ```toml
|
||||
//! [tool]
|
||||
//! name = "i2c_scan"
|
||||
//! version = "1.0.0"
|
||||
//! description = "Scan the I2C bus for connected devices"
|
||||
//!
|
||||
//! [exec]
|
||||
//! binary = "i2c_scan.py"
|
||||
//!
|
||||
//! [transport]
|
||||
//! preferred = "serial"
|
||||
//! device_required = true
|
||||
//!
|
||||
//! [[parameters]]
|
||||
//! name = "device"
|
||||
//! type = "string"
|
||||
//! description = "Device alias e.g. pico0"
|
||||
//! required = true
|
||||
//!
|
||||
//! [[parameters]]
|
||||
//! name = "bus"
|
||||
//! type = "integer"
|
||||
//! description = "I2C bus number (default 0)"
|
||||
//! required = false
|
||||
//! default = 0
|
||||
//! ```
|
||||
|
||||
use serde::Deserialize;
|
||||
|
||||
/// Full plugin manifest — parsed from `tool.toml`.
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ToolManifest {
|
||||
/// Tool identity and human-readable metadata.
|
||||
pub tool: ToolMeta,
|
||||
/// How to invoke the tool binary.
|
||||
pub exec: ExecConfig,
|
||||
/// Optional transport preference and device requirement.
|
||||
pub transport: Option<TransportConfig>,
|
||||
/// Parameter definitions used to build the JSON Schema for the LLM.
|
||||
#[serde(default)]
|
||||
pub parameters: Vec<ParameterDef>,
|
||||
}
|
||||
|
||||
/// Tool identity metadata.
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ToolMeta {
|
||||
/// Unique tool name, used as the function-call key by the LLM.
|
||||
pub name: String,
|
||||
/// Semantic version string (e.g. `"1.0.0"`).
|
||||
pub version: String,
|
||||
/// Human-readable description injected into the LLM system prompt.
|
||||
pub description: String,
|
||||
}
|
||||
|
||||
/// Execution configuration — how ZeroClaw spawns the tool.
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ExecConfig {
|
||||
/// Path to the binary, relative to the plugin directory.
|
||||
///
|
||||
/// Can be a Python script (`"tool.py"`), a shell script (`"run.sh"`),
|
||||
/// a compiled binary (`"i2c_scan"`), or any executable.
|
||||
pub binary: String,
|
||||
}
|
||||
|
||||
/// Optional transport hint for the tool.
|
||||
///
|
||||
/// When present, ZeroClaw will prefer the named transport kind
|
||||
/// and can enforce device presence before calling the tool.
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct TransportConfig {
|
||||
/// Preferred transport kind: `"serial"` | `"swd"` | `"native"` | `"any"`.
|
||||
pub preferred: String,
|
||||
/// Whether the tool requires a hardware device to be connected.
|
||||
pub device_required: bool,
|
||||
}
|
||||
|
||||
/// A single parameter definition for a plugin tool.
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ParameterDef {
|
||||
/// Parameter name (matches the JSON key passed to the tool via stdin).
|
||||
pub name: String,
|
||||
/// JSON Schema primitive type: `"string"` | `"integer"` | `"boolean"`.
|
||||
#[serde(rename = "type")]
|
||||
pub r#type: String,
|
||||
/// Human-readable description shown to the LLM.
|
||||
pub description: String,
|
||||
/// Whether the LLM must supply this parameter.
|
||||
pub required: bool,
|
||||
/// Optional default value serialized as a JSON Value.
|
||||
pub default: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
const MINIMAL_TOML: &str = r#"
|
||||
[tool]
|
||||
name = "i2c_scan"
|
||||
version = "1.0.0"
|
||||
description = "Scan the I2C bus"
|
||||
|
||||
[exec]
|
||||
binary = "i2c_scan.py"
|
||||
|
||||
[[parameters]]
|
||||
name = "device"
|
||||
type = "string"
|
||||
description = "Device alias"
|
||||
required = true
|
||||
"#;
|
||||
|
||||
#[test]
|
||||
fn manifest_parses_minimal_toml() {
|
||||
let m: ToolManifest = toml::from_str(MINIMAL_TOML).expect("parse failed");
|
||||
assert_eq!(m.tool.name, "i2c_scan");
|
||||
assert_eq!(m.tool.version, "1.0.0");
|
||||
assert_eq!(m.exec.binary, "i2c_scan.py");
|
||||
assert!(m.transport.is_none());
|
||||
assert_eq!(m.parameters.len(), 1);
|
||||
assert_eq!(m.parameters[0].name, "device");
|
||||
assert!(m.parameters[0].required);
|
||||
}
|
||||
|
||||
const FULL_TOML: &str = r#"
|
||||
[tool]
|
||||
name = "pwm_set"
|
||||
version = "1.0.0"
|
||||
description = "Set PWM duty cycle on a pin"
|
||||
|
||||
[exec]
|
||||
binary = "pwm_set"
|
||||
|
||||
[transport]
|
||||
preferred = "serial"
|
||||
device_required = true
|
||||
|
||||
[[parameters]]
|
||||
name = "device"
|
||||
type = "string"
|
||||
description = "Device alias"
|
||||
required = true
|
||||
|
||||
[[parameters]]
|
||||
name = "pin"
|
||||
type = "integer"
|
||||
description = "PWM pin number"
|
||||
required = true
|
||||
|
||||
[[parameters]]
|
||||
name = "duty"
|
||||
type = "integer"
|
||||
description = "Duty cycle 0–100"
|
||||
required = false
|
||||
default = 50
|
||||
"#;
|
||||
|
||||
#[test]
|
||||
fn manifest_parses_full_toml_with_transport_and_defaults() {
|
||||
let m: ToolManifest = toml::from_str(FULL_TOML).expect("parse failed");
|
||||
assert_eq!(m.tool.name, "pwm_set");
|
||||
let transport = m.transport.as_ref().expect("transport missing");
|
||||
assert_eq!(transport.preferred, "serial");
|
||||
assert!(transport.device_required);
|
||||
let duty = m
|
||||
.parameters
|
||||
.iter()
|
||||
.find(|p| p.name == "duty")
|
||||
.expect("duty param missing");
|
||||
assert!(!duty.required);
|
||||
assert_eq!(duty.default, Some(serde_json::json!(50)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn manifest_empty_parameters_default_to_empty_vec() {
|
||||
let raw = r#"
|
||||
[tool]
|
||||
name = "noop"
|
||||
version = "0.1.0"
|
||||
description = "No-op tool"
|
||||
|
||||
[exec]
|
||||
binary = "noop"
|
||||
"#;
|
||||
let m: ToolManifest = toml::from_str(raw).expect("parse failed");
|
||||
assert!(m.parameters.is_empty());
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user