Merge remote-tracking branch 'origin/main' into pr2093-mainmerge

This commit is contained in:
argenis de la rosa
2026-02-28 17:43:48 -05:00
34 changed files with 3539 additions and 173 deletions
+1 -1
View File
@@ -212,7 +212,7 @@ jobs:
- name: Link check (offline, added links only)
if: steps.collect_links.outputs.count != '0'
uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2
uses: lycheeverse/lychee-action@8646ba30535128ac92d33dfc9133794bfdd9b411 # v2
with:
fail: true
args: >-
+56
View File
@@ -0,0 +1,56 @@
name: Deploy Web to GitHub Pages
on:
push:
branches: [main, dev]
paths:
- 'web/**'
workflow_dispatch:
permissions:
contents: read
pages: write
id-token: write
concurrency:
group: "pages"
cancel-in-progress: false
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
with:
node-version: '20'
- name: Install dependencies
working-directory: ./web
run: npm ci
- name: Build
working-directory: ./web
run: npm run build
- name: Setup Pages
uses: actions/configure-pages@983d7736d9b0ae728b81ab479565c72886d7745b # v5
- name: Upload artifact
uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b # v4
with:
path: ./web/dist
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
needs: build
steps:
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4
+1 -1
View File
@@ -184,7 +184,7 @@ jobs:
- name: Link check (added links)
if: github.event_name != 'workflow_dispatch' && steps.links.outputs.count != '0'
uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2
uses: lycheeverse/lychee-action@8646ba30535128ac92d33dfc9133794bfdd9b411 # v2
with:
fail: true
args: >-
Generated
+49 -35
View File
@@ -540,16 +540,6 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a45f9771ced8a774de5e5ebffbe520f52e3943bf5a9a6baa3a5d14a5de1afe6"
[[package]]
name = "bcder"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f7c42c9913f68cf9390a225e81ad56a5c515347287eb98baa710090ca1de86d"
dependencies = [
"bytes",
"smallvec",
]
[[package]]
name = "bech32"
version = "0.11.1"
@@ -1655,9 +1645,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb"
dependencies = [
"const-oid",
"der_derive",
"flagset",
"zeroize",
]
[[package]]
name = "der_derive"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8034092389675178f570469e6c3b0465d3d30b4505c294a6550db47f3c17ad18"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.117",
]
[[package]]
name = "deranged"
version = "0.5.8"
@@ -2204,6 +2207,12 @@ version = "0.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99"
[[package]]
name = "flagset"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7ac824320a75a52197e8f2d787f6a38b6718bb6897a35142d749af3c0e8f4fe"
[[package]]
name = "flate2"
version = "1.1.9"
@@ -4604,16 +4613,6 @@ dependencies = [
"unicode-normalization",
]
[[package]]
name = "pem"
version = "3.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d30c53c26bc5b31a98cd02d20f25a7c8567146caf63ed593a9d87b2775291be"
dependencies = [
"base64",
"serde_core",
]
[[package]]
name = "percent-encoding"
version = "2.3.2"
@@ -6821,6 +6820,27 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "tls_codec"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0de2e01245e2bb89d6f05801c564fa27624dbd7b1846859876c7dad82e90bf6b"
dependencies = [
"tls_codec_derive",
"zeroize",
]
[[package]]
name = "tls_codec_derive"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d2e76690929402faae40aebdda620a2c0e25dd6d3b9afe48867dfd95991f4bd"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.117",
]
[[package]]
name = "tokio"
version = "1.49.0"
@@ -6876,16 +6896,17 @@ dependencies = [
[[package]]
name = "tokio-postgres-rustls"
version = "0.12.0"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04fb792ccd6bbcd4bba408eb8a292f70fc4a3589e5d793626f45190e6454b6ab"
checksum = "27d684bad428a0f2481f42241f821db42c54e2dc81d8c00db8536c506b0a0144"
dependencies = [
"const-oid",
"ring",
"rustls",
"tokio",
"tokio-postgres",
"tokio-rustls",
"x509-certificate",
"x509-cert",
]
[[package]]
@@ -8997,22 +9018,15 @@ dependencies = [
]
[[package]]
name = "x509-certificate"
version = "0.23.1"
name = "x509-cert"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "66534846dec7a11d7c50a74b7cdb208b9a581cad890b7866430d438455847c85"
checksum = "1301e935010a701ae5f8655edc0ad17c44bad3ac5ce8c39185f75453b720ae94"
dependencies = [
"bcder",
"bytes",
"chrono",
"const-oid",
"der",
"hex",
"pem",
"ring",
"signature",
"spki",
"thiserror 1.0.69",
"zeroize",
"tls_codec",
]
[[package]]
+1 -1
View File
@@ -110,7 +110,7 @@ prost = { version = "0.14", default-features = false, features = ["derive"], opt
# Memory / persistence
rusqlite = { version = "0.37", features = ["bundled"] }
postgres = { version = "0.19", features = ["with-chrono-0_4"], optional = true }
tokio-postgres-rustls = { version = "0.12", optional = true }
tokio-postgres-rustls = { version = "0.13", optional = true }
chrono = { version = "0.4", default-features = false, features = ["clock", "std", "serde"] }
chrono-tz = "0.10"
cron = "0.15"
+4 -33
View File
@@ -46,7 +46,7 @@ Built by students and members of the Harvard, MIT, and Sundai.Club communities.
</p>
<p align="center">
<strong>Fast, small, and fully autonomous AI assistant infrastructure</strong><br />
<strong>Fast, small, and fully autonomous Operating System</strong><br />
Deploy anywhere. Swap anything.
</p>
@@ -138,16 +138,9 @@ Local machine quick benchmark (macOS arm64, Feb 2026) normalized for 0.8GHz edge
<img src="zero-claw.jpeg" alt="ZeroClaw vs OpenClaw Comparison" width="800" />
</p>
### 🙏 Special Thanks
---
A heartfelt thank you to the communities and institutions that inspire and fuel this open-source work:
- **Harvard University** — for fostering intellectual curiosity and pushing the boundaries of what's possible.
- **MIT** — for championing open knowledge, open source, and the belief that technology should be accessible to everyone.
- **Sundai Club** — for the community, the energy, and the relentless drive to build things that matter.
- **The World & Beyond** 🌍✨ — to every contributor, dreamer, and builder out there making open source a force for good. This is for you.
We're building in the open because the best ideas come from everywhere. If you're reading this, you're part of it. Welcome. 🦀❤️
For full documentation, see [`docs/README.md`](docs/README.md) | [`docs/SUMMARY.md`](docs/SUMMARY.md)
## ⚠️ Official Repository & Impersonation Warning
@@ -172,31 +165,9 @@ ZeroClaw is dual-licensed for maximum openness and contributor protection:
You may choose either license. **Contributors automatically grant rights under both** — see [CLA.md](CLA.md) for the full contributor agreement.
### Trademark
The **ZeroClaw** name and logo are trademarks of ZeroClaw Labs. This license does not grant permission to use them to imply endorsement or affiliation. See [TRADEMARK.md](TRADEMARK.md) for permitted and prohibited uses.
### Contributor Protections
- You **retain copyright** of your contributions
- **Patent grant** (Apache 2.0) shields you from patent claims by other contributors
- Your contributions are **permanently attributed** in commit history and [NOTICE](NOTICE)
- No trademark rights are transferred by contributing
## Contributing
New to ZeroClaw? Look for issues labeled [`good first issue`](https://github.com/zeroclaw-labs/zeroclaw/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) — see our [Contributing Guide](CONTRIBUTING.md#first-time-contributors) for how to get started.
See [CONTRIBUTING.md](CONTRIBUTING.md) and [CLA.md](CLA.md). Implement a trait, submit a PR:
- CI workflow guide: [docs/ci-map.md](docs/ci-map.md)
- New `Provider``src/providers/`
- New `Channel``src/channels/`
- New `Observer``src/observability/`
- New `Tool``src/tools/`
- New `Memory``src/memory/`
- New `Tunnel``src/tunnel/`
- New `Skill``~/.zeroclaw/workspace/skills/<name>/`
See [CONTRIBUTING.md](CONTRIBUTING.md) and [CLA.md](CLA.md). Implement a trait, submit a PR.
---
+23 -2
View File
@@ -146,6 +146,7 @@ If `[channels_config.matrix]`, `[channels_config.lark]`, or `[channels_config.fe
| Napcat | websocket receive + HTTP send (OneBot) | No (typically local/LAN) |
| Linq | webhook (`/linq`) | Yes (public HTTPS callback) |
| iMessage | local integration | No |
| ACP | stdio (JSON-RPC 2.0) | No |
| Nostr | relay websocket (NIP-04 / NIP-17) | No |
---
@@ -160,7 +161,7 @@ For channels with inbound sender allowlists:
Field names differ by channel:
- `allowed_users` (Telegram/Discord/Slack/Mattermost/Matrix/IRC/Lark/Feishu/DingTalk/QQ/Napcat/Nextcloud Talk)
- `allowed_users` (Telegram/Discord/Slack/Mattermost/Matrix/IRC/Lark/Feishu/DingTalk/QQ/Napcat/Nextcloud Talk/ACP)
- `allowed_from` (Signal)
- `allowed_numbers` (WhatsApp)
- `allowed_senders` (Email/Linq)
@@ -540,6 +541,25 @@ Notes:
allowed_contacts = ["*"]
```
### 4.18 ACP
ACP (Agent Client Protocol) enables ZeroClaw to act as a client for OpenCode ACP server,
allowing remote control of OpenCode behavior through JSON-RPC 2.0 communication over stdio.
```toml
[channels_config.acp]
opencode_path = "opencode" # optional, default: "opencode"
workdir = "/path/to/workspace" # optional
extra_args = [] # optional additional arguments to `opencode acp`
allowed_users = ["*"] # empty = deny all, "*" = allow all
```
Notes:
- ACP uses JSON-RPC 2.0 protocol over stdio with newline-delimited messages.
- Requires `opencode` binary in PATH or specified via `opencode_path`.
- The channel starts OpenCode subprocess via `opencode acp` command.
- Responses from OpenCode can be sent back to the originating channel when configured.
---
## 5. Validation Workflow
@@ -588,7 +608,7 @@ RUST_LOG=info zeroclaw daemon 2>&1 | tee /tmp/zeroclaw.log
Then filter channel/gateway events:
```bash
rg -n "Matrix|Telegram|Discord|Slack|Mattermost|Signal|WhatsApp|Email|IRC|Lark|DingTalk|QQ|iMessage|Nostr|Webhook|Channel" /tmp/zeroclaw.log
rg -n "Matrix|Telegram|Discord|Slack|Mattermost|Signal|WhatsApp|Email|IRC|Lark|DingTalk|QQ|iMessage|Nostr|Webhook|Channel|ACP" /tmp/zeroclaw.log
```
### 7.2 Keyword table
@@ -610,6 +630,7 @@ rg -n "Matrix|Telegram|Discord|Slack|Mattermost|Signal|WhatsApp|Email|IRC|Lark|D
| QQ | `QQ: connected and identified` | `QQ: ignoring C2C message from unauthorized user:` / `QQ: ignoring group message from unauthorized user:` | `QQ: received Reconnect (op 7)` / `QQ: received Invalid Session (op 9)` / `QQ: message channel closed` |
| Nextcloud Talk (gateway) | `POST /nextcloud-talk — Nextcloud Talk bot webhook` | `Nextcloud Talk webhook signature verification failed` / `Nextcloud Talk: ignoring message from unauthorized actor:` | `Nextcloud Talk send failed:` / `LLM error for Nextcloud Talk message:` |
| iMessage | `iMessage channel listening (AppleScript bridge)...` | (contact allowlist enforced by `allowed_contacts`) | `iMessage poll error:` |
| ACP | `ACP channel started` | `ACP: ignoring message from unauthorized user:` | `ACP process exited unexpectedly:` / `ACP JSON-RPC timeout:` / `ACP process spawn failed:` |
| Nostr | `Nostr channel listening as npub1...` | `Nostr: ignoring NIP-04 message from unauthorized pubkey:` / `Nostr: ignoring NIP-17 message from unauthorized pubkey:` | `Failed to decrypt NIP-04 message:` / `Failed to unwrap NIP-17 gift wrap:` / `Nostr relay pool shut down` |
### 7.3 Runtime supervisor keywords
+13
View File
@@ -18,6 +18,8 @@ use std::io::Write as IoWrite;
use std::sync::Arc;
use std::time::Instant;
const AUTOSAVE_MIN_MESSAGE_CHARS: usize = 20;
pub struct Agent {
provider: Box<dyn Provider>,
tools: Vec<Box<dyn Tool>>,
@@ -595,6 +597,17 @@ impl Agent {
.push(ConversationMessage::Chat(ChatMessage::assistant(
final_text.clone(),
)));
if self.auto_save && final_text.chars().count() >= AUTOSAVE_MIN_MESSAGE_CHARS {
let _ = self
.memory
.store(
"assistant_resp",
&final_text,
MemoryCategory::Conversation,
None,
)
.await;
}
self.trim_history();
return Ok(final_text);
+22
View File
@@ -2104,6 +2104,17 @@ pub async fn run(
)
.await?;
final_output = response.clone();
if config.memory.auto_save && response.chars().count() >= AUTOSAVE_MIN_MESSAGE_CHARS {
let assistant_key = autosave_memory_key("assistant_resp");
let _ = mem
.store(
&assistant_key,
&response,
MemoryCategory::Conversation,
None,
)
.await;
}
println!("{response}");
observer.record_event(&ObserverEvent::TurnComplete);
} else {
@@ -2296,6 +2307,17 @@ pub async fn run(
}
};
final_output = response.clone();
if config.memory.auto_save && response.chars().count() >= AUTOSAVE_MIN_MESSAGE_CHARS {
let assistant_key = autosave_memory_key("assistant_resp");
let _ = mem
.store(
&assistant_key,
&response,
MemoryCategory::Conversation,
None,
)
.await;
}
if let Err(e) = crate::channels::Channel::send(
&cli,
&crate::channels::traits::SendMessage::new(format!("\n{response}\n"), "user"),
+11 -6
View File
@@ -636,7 +636,7 @@ async fn history_trims_after_max_messages() {
// ═══════════════════════════════════════════════════════════════════════════
#[tokio::test]
async fn auto_save_stores_only_user_messages_in_memory() {
async fn auto_save_stores_user_and_assistant_messages_in_memory() {
let (mem, _tmp) = make_sqlite_memory();
let provider = Box::new(ScriptedProvider::new(vec![text_response(
"I remember everything",
@@ -651,11 +651,11 @@ async fn auto_save_stores_only_user_messages_in_memory() {
let _ = agent.turn("Remember this fact").await.unwrap();
// Auto-save only persists user-stated input, never assistant-generated summaries.
// Auto-save persists both user input and assistant output for traceability.
let count = mem.count().await.unwrap();
assert_eq!(
count, 1,
"Expected exactly 1 user memory entry, got {count}"
count, 2,
"Expected user + assistant memory entries, got {count}"
);
let stored = mem.get("user_msg").await.unwrap();
@@ -668,8 +668,13 @@ async fn auto_save_stores_only_user_messages_in_memory() {
let assistant = mem.get("assistant_resp").await.unwrap();
assert!(
assistant.is_none(),
"assistant_resp should not be auto-saved anymore"
assistant.is_some(),
"Expected assistant_resp key to be present"
);
assert_eq!(
assistant.unwrap().content,
"I remember everything",
"Assistant response should be persisted when auto-save is enabled"
);
}
+901
View File
@@ -0,0 +1,901 @@
//! ACP (Agent Client Protocol) channel for ZeroClaw.
//!
//! This channel enables ZeroClaw to act as an ACP client, connecting to an OpenCode
//! ACP server via `opencode acp` command for JSON-RPC 2.0 communication over stdio.
//! This allows users to control OpenCode behavior from any channel via social apps.
use super::traits::{Channel, ChannelMessage, SendMessage};
use crate::config::schema::AcpConfig;
use anyhow::{Context, Result};
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::VecDeque;
use std::sync::atomic::AtomicU64;
use std::sync::Arc;
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader};
use tokio::process::{Child, Command};
use tokio::sync::mpsc;
use tokio::sync::Mutex;
/// Monotonic counter for message IDs in ACP JSON-RPC requests.
static ACP_MESSAGE_ID: AtomicU64 = AtomicU64::new(0);
/// ACP channel implementation for connecting to OpenCode ACP server.
///
/// The channel starts an OpenCode subprocess via `opencode acp` command and
/// communicates using JSON-RPC 2.0 over stdio. Messages from social apps are
/// forwarded as prompts to OpenCode, and responses are sent back through the
/// originating channel.
pub struct AcpChannel {
/// OpenCode binary path (default: "opencode")
opencode_path: String,
/// Working directory for OpenCode process
workdir: Option<String>,
/// Additional arguments to pass to `opencode acp`
extra_args: Vec<String>,
/// Allowed user identifiers (empty = deny all, "*" = allow all)
allowed_users: Vec<String>,
/// Optional pairing guard for authentication
pairing: Option<crate::security::pairing::PairingGuard>,
/// HTTP client for potential future HTTP transport support
client: reqwest::Client,
/// Active OpenCode subprocess and its I/O handles
process: Arc<Mutex<Option<AcpProcess>>>,
/// Serializes ACP send operations to avoid concurrent process take/spawn races.
send_operation_lock: Arc<Mutex<()>>,
/// Next message ID for JSON-RPC requests
next_message_id: Arc<AtomicU64>,
/// Optional response channel for sending ACP responses back to original channel
response_channel: Option<Arc<dyn Channel>>,
}
/// Active ACP process with I/O handles and session state.
struct AcpProcess {
/// Child process handle
child: Child,
/// Stdin handle for sending JSON-RPC requests
stdin: tokio::process::ChildStdin,
/// Stdout handle for receiving JSON-RPC responses
stdout: BufReader<tokio::process::ChildStdout>,
/// Session ID from ACP server (after initialize + session/new)
session_id: Option<String>,
/// JSON-RPC message ID counter (per-process)
message_id: u64,
/// Pending responses keyed by request ID
pending_responses: VecDeque<PendingResponse>,
}
/// Pending JSON-RPC response awaiting completion.
struct PendingResponse {
request_id: u64,
method: String,
created_at: std::time::Instant,
}
/// JSON-RPC 2.0 request structure.
#[derive(Debug, Clone, Serialize)]
struct JsonRpcRequest {
jsonrpc: String,
id: u64,
method: String,
#[serde(skip_serializing_if = "Option::is_none")]
params: Option<Value>,
}
/// JSON-RPC 2.0 response structure.
#[derive(Debug, Clone, Deserialize)]
struct JsonRpcResponse {
jsonrpc: String,
id: u64,
#[serde(flatten)]
result_or_error: JsonRpcResultOrError,
}
/// JSON-RPC result or error.
#[derive(Debug, Clone, Deserialize)]
#[serde(untagged)]
enum JsonRpcResultOrError {
Result { result: Value },
Error { error: JsonRpcError },
}
/// JSON-RPC error object.
#[derive(Debug, Clone, Deserialize)]
struct JsonRpcError {
code: i32,
message: String,
#[serde(skip_serializing_if = "Option::is_none")]
data: Option<Value>,
}
/// ACP initialization parameters.
#[derive(Debug, Clone, Serialize)]
struct InitializeParams {
protocol_version: u64,
client_capabilities: ClientCapabilities,
client_info: ClientInfo,
}
/// Client capabilities declaration.
#[derive(Debug, Clone, Serialize, Default)]
struct ClientCapabilities {
fs: FsCapabilities,
terminal: bool,
#[serde(rename = "_meta")]
meta: Option<Value>,
}
/// Filesystem capabilities.
#[derive(Debug, Clone, Serialize, Default)]
struct FsCapabilities {
read_text_file: bool,
write_text_file: bool,
}
/// Client information.
#[derive(Debug, Clone, Serialize)]
struct ClientInfo {
name: String,
title: String,
version: String,
}
/// ACP session/new parameters.
#[derive(Debug, Clone, Serialize)]
struct SessionNewParams {
cwd: String,
mcp_servers: Vec<Value>,
}
/// ACP session/prompt parameters.
#[derive(Debug, Clone, Serialize)]
struct SessionPromptParams {
session_id: String,
prompt: Vec<PromptItem>,
}
/// Prompt item (text content).
#[derive(Debug, Clone, Serialize)]
struct PromptItem {
#[serde(rename = "type")]
item_type: String,
text: String,
}
impl AcpChannel {
/// Create a new ACP channel with the given configuration.
pub fn new(config: AcpConfig) -> Self {
Self {
opencode_path: config
.opencode_path
.unwrap_or_else(|| "opencode".to_string()),
workdir: config.workdir,
extra_args: config.extra_args,
allowed_users: config.allowed_users,
pairing: None, // TODO: Implement pairing if needed
client: reqwest::Client::new(),
process: Arc::new(Mutex::new(None)),
send_operation_lock: Arc::new(Mutex::new(())),
next_message_id: Arc::new(AtomicU64::new(0)),
response_channel: None,
}
}
/// Check if a user is allowed to interact with this channel.
fn is_user_allowed(&self, user_id: &str) -> bool {
self.allowed_users
.iter()
.any(|allowed| allowed == "*" || allowed == user_id)
}
/// Set the response channel for sending ACP responses back to original channel
pub fn set_response_channel(&mut self, channel: Arc<dyn Channel>) {
self.response_channel = Some(channel);
}
/// Start the OpenCode ACP subprocess and establish connection.
fn start_process(&self) -> Result<AcpProcess> {
let mut command = Command::new(&self.opencode_path);
command.arg("acp");
if let Some(workdir) = &self.workdir {
command.current_dir(workdir);
}
for arg in &self.extra_args {
command.arg(arg);
}
command.stdin(std::process::Stdio::piped());
command.stdout(std::process::Stdio::piped());
// Inherit stderr so the child cannot block on an unread stderr pipe.
command.stderr(std::process::Stdio::inherit());
let mut child = command
.spawn()
.with_context(|| format!("Failed to start OpenCode process: {}", self.opencode_path))?;
let stdin = child
.stdin
.take()
.context("Failed to take stdin from child process")?;
let stdout = child
.stdout
.take()
.context("Failed to take stdout from child process")?;
let stdout_reader = BufReader::new(stdout);
let process = AcpProcess {
child,
stdin,
stdout: stdout_reader,
session_id: None,
message_id: 0,
pending_responses: VecDeque::new(),
};
Ok(process)
}
/// Send a JSON-RPC request and wait for response.
async fn send_json_rpc_request(
&self,
process: &mut AcpProcess,
method: &str,
params: Option<Value>,
) -> Result<Value> {
let request_id = process.message_id;
process.message_id += 1;
let request = JsonRpcRequest {
jsonrpc: "2.0".to_string(),
id: request_id,
method: method.to_string(),
params,
};
let json_str = serde_json::to_string(&request).with_context(|| {
format!(
"Failed to serialize JSON-RPC request for method: {}",
method
)
})?;
// Write message with newline delimiter (ACP protocol requirement)
process.stdin.write_all(json_str.as_bytes()).await?;
process.stdin.write_all(b"\n").await?;
process.stdin.flush().await?;
// Read response line with timeout
let mut line = String::new();
let timeout_duration = std::time::Duration::from_secs(30);
match tokio::time::timeout(timeout_duration, process.stdout.read_line(&mut line)).await {
Ok(read_result) => {
read_result
.with_context(|| format!("Failed to read response for method: {}", method))?;
}
Err(_) => {
anyhow::bail!("Timeout waiting for ACP response for method: {}", method);
}
}
// Parse JSON-RPC response
let response: JsonRpcResponse = serde_json::from_str(&line)
.with_context(|| format!("Failed to parse JSON-RPC response: {}", line))?;
// Verify response ID matches request ID
if response.id != request_id {
anyhow::bail!(
"Response ID mismatch: expected {}, got {}",
request_id,
response.id
);
}
match response.result_or_error {
JsonRpcResultOrError::Result { result } => Ok(result),
JsonRpcResultOrError::Error { error } => {
anyhow::bail!("ACP JSON-RPC error ({}): {}", error.code, error.message);
}
}
}
/// Initialize ACP connection with the server.
async fn initialize_acp(&self, process: &mut AcpProcess) -> Result<()> {
let params = InitializeParams {
protocol_version: 1,
client_capabilities: ClientCapabilities::default(),
client_info: ClientInfo {
name: "ZeroClaw".to_string(),
title: "ZeroClaw ACP Client".to_string(),
version: env!("CARGO_PKG_VERSION").to_string(),
},
};
let params_value =
serde_json::to_value(params).context("Failed to serialize initialize params")?;
let response = self
.send_json_rpc_request(process, "initialize", Some(params_value))
.await?;
// TODO: Parse response and store capabilities
tracing::info!("ACP initialized successfully: {:?}", response);
Ok(())
}
/// Create a new ACP session.
async fn create_session(&self, process: &mut AcpProcess) -> Result<String> {
let cwd = self.workdir.clone().unwrap_or_else(|| {
std::env::current_dir()
.unwrap_or_else(|_| ".".into())
.to_string_lossy()
.to_string()
});
let params = SessionNewParams {
cwd,
mcp_servers: vec![],
};
let params_value =
serde_json::to_value(params).context("Failed to serialize session/new params")?;
let response = self
.send_json_rpc_request(process, "session/new", Some(params_value))
.await?;
// Parse response to extract session_id
let session_id = response
.get("session_id")
.and_then(|v| v.as_str())
.map(|s| s.to_string())
.with_context(|| {
format!(
"Invalid session/new response: missing session_id: {:?}",
response
)
})?;
tracing::info!("ACP session created: {}", session_id);
Ok(session_id)
}
/// Send a prompt to the ACP session.
async fn send_prompt(
&self,
process: &mut AcpProcess,
session_id: &str,
prompt_text: &str,
) -> Result<String> {
let params = SessionPromptParams {
session_id: session_id.to_string(),
prompt: vec![PromptItem {
item_type: "text".to_string(),
text: prompt_text.to_string(),
}],
};
let params_value =
serde_json::to_value(params).context("Failed to serialize session/prompt params")?;
let response = self
.send_json_rpc_request(process, "session/prompt", Some(params_value))
.await?;
// Parse response to extract the actual response text
// The response may contain a "response" field with text content
let response_text = response
.get("response")
.and_then(|v| v.as_str())
.map(|s| s.to_string())
.with_context(|| {
format!(
"Invalid session/prompt response: missing string field `response` for prompt {:?}: {:?}",
prompt_text, response
)
})?;
Ok(response_text)
}
fn process_is_running(process: &mut AcpProcess) -> bool {
matches!(process.child.try_wait(), Ok(None))
}
async fn initialize_fresh_process(&self) -> Result<AcpProcess> {
let mut new_process = self.start_process()?;
self.initialize_acp(&mut new_process).await?;
let session_id = self.create_session(&mut new_process).await?;
new_process.session_id = Some(session_id);
Ok(new_process)
}
async fn checkout_process_for_send(&self) -> Result<AcpProcess> {
let mut process_opt = {
let mut process_guard = self.process.lock().await;
process_guard.take()
};
let needs_restart = match process_opt.as_mut() {
Some(process) => !Self::process_is_running(process),
None => true,
};
if needs_restart {
process_opt = Some(self.initialize_fresh_process().await?);
}
process_opt.context("ACP process disappeared unexpectedly")
}
async fn restore_process(&self, process: Option<AcpProcess>) {
let mut process_guard = self.process.lock().await;
*process_guard = process;
}
}
#[async_trait]
impl Channel for AcpChannel {
fn name(&self) -> &str {
"acp"
}
async fn send(&self, message: &SendMessage) -> Result<()> {
const MAX_SEND_ATTEMPTS: usize = 2;
let _send_guard = self.send_operation_lock.lock().await;
// Check if user is allowed
if !self.is_user_allowed(&message.recipient) {
tracing::warn!(
"ACP: ignoring message from unauthorized user: {}",
message.recipient
);
return Ok(());
}
// Strip tool call tags from outgoing messages
let content = super::strip_tool_call_tags(&message.content);
let mut last_error = None;
for attempt in 0..MAX_SEND_ATTEMPTS {
let mut process = self.checkout_process_for_send().await?;
let session_id = process
.session_id
.as_ref()
.context("No active ACP session")?
.clone();
match self.send_prompt(&mut process, &session_id, &content).await {
Ok(response) => {
if Self::process_is_running(&mut process) {
self.restore_process(Some(process)).await;
} else {
self.restore_process(None).await;
}
// Send response back through response_channel if set
if let Some(response_channel) = &self.response_channel {
let response_message =
SendMessage::new(response, message.recipient.clone());
if let Err(e) = response_channel.send(&response_message).await {
tracing::warn!(
"Failed to send ACP response through response channel: {}",
e
);
}
} else {
// Log if no response channel configured
tracing::info!(
"ACP response ready (no response channel configured): {}",
response
);
}
return Ok(());
}
Err(error) => {
// Drop unhealthy process on failure and retry once with a fresh process.
self.restore_process(None).await;
if attempt + 1 < MAX_SEND_ATTEMPTS {
tracing::warn!(
"ACP prompt failed (attempt {}/{}), restarting ACP process: {}",
attempt + 1,
MAX_SEND_ATTEMPTS,
error
);
}
last_error = Some(error);
}
}
}
Err(last_error.unwrap_or_else(|| anyhow::anyhow!("ACP send failed with unknown error")))
}
async fn listen(&self, _tx: mpsc::Sender<ChannelMessage>) -> Result<()> {
// ACP is primarily a client-side protocol where we send prompts
// and receive responses. For channel listening, we might need to
// handle incoming messages from other sources that should trigger
// ACP prompts.
// Since ACP is more about sending commands to OpenCode rather than
// listening for incoming messages, we implement a minimal listener
// that just keeps the channel alive.
loop {
tokio::time::sleep(tokio::time::Duration::from_secs(60)).await;
}
}
async fn health_check(&self) -> bool {
let mut process_guard = self.process.lock().await;
let Some(process) = process_guard.as_mut() else {
return false;
};
let is_running = Self::process_is_running(process);
if !is_running {
*process_guard = None;
}
is_running
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::config::schema::AcpConfig;
#[test]
fn acp_channel_name() {
let config = AcpConfig {
opencode_path: None,
workdir: None,
extra_args: vec![],
allowed_users: vec![],
};
let channel = AcpChannel::new(config);
assert_eq!(channel.name(), "acp");
}
#[test]
fn acp_channel_empty_allowlist_denies_all() {
let config = AcpConfig {
opencode_path: None,
workdir: None,
extra_args: vec![],
allowed_users: vec![],
};
let channel = AcpChannel::new(config);
assert!(!channel.is_user_allowed("anyone"));
assert!(!channel.is_user_allowed("user123"));
}
#[test]
fn acp_channel_wildcard_allows_all() {
let config = AcpConfig {
opencode_path: None,
workdir: None,
extra_args: vec![],
allowed_users: vec!["*".to_string()],
};
let channel = AcpChannel::new(config);
assert!(channel.is_user_allowed("anyone"));
assert!(channel.is_user_allowed("user123"));
assert!(channel.is_user_allowed(""));
}
#[test]
fn acp_channel_specific_users() {
let config = AcpConfig {
opencode_path: None,
workdir: None,
extra_args: vec![],
allowed_users: vec!["user1".to_string(), "user2".to_string()],
};
let channel = AcpChannel::new(config);
assert!(channel.is_user_allowed("user1"));
assert!(channel.is_user_allowed("user2"));
assert!(!channel.is_user_allowed("user3"));
assert!(!channel.is_user_allowed("User1")); // case sensitive
assert!(!channel.is_user_allowed("user"));
}
#[test]
fn acp_channel_wildcard_and_specific() {
let config = AcpConfig {
opencode_path: None,
workdir: None,
extra_args: vec![],
allowed_users: vec!["user1".to_string(), "*".to_string()],
};
let channel = AcpChannel::new(config);
assert!(channel.is_user_allowed("user1"));
assert!(channel.is_user_allowed("anyone"));
assert!(channel.is_user_allowed("user2"));
}
#[test]
fn acp_channel_empty_user_id() {
let config = AcpConfig {
opencode_path: None,
workdir: None,
extra_args: vec![],
allowed_users: vec!["user1".to_string()],
};
let channel = AcpChannel::new(config);
assert!(!channel.is_user_allowed(""));
}
#[test]
fn acp_channel_exact_match_not_substring() {
let config = AcpConfig {
opencode_path: None,
workdir: None,
extra_args: vec![],
allowed_users: vec!["user123".to_string()],
};
let channel = AcpChannel::new(config);
assert!(channel.is_user_allowed("user123"));
assert!(!channel.is_user_allowed("user12"));
assert!(!channel.is_user_allowed("user1234"));
assert!(!channel.is_user_allowed("user"));
}
#[test]
fn acp_channel_case_sensitive() {
let config = AcpConfig {
opencode_path: None,
workdir: None,
extra_args: vec![],
allowed_users: vec!["User".to_string()],
};
let channel = AcpChannel::new(config);
assert!(channel.is_user_allowed("User"));
assert!(!channel.is_user_allowed("user"));
assert!(!channel.is_user_allowed("USER"));
}
// JSON-RPC data structure tests
#[test]
fn jsonrpc_request_serialization() {
let request = JsonRpcRequest {
jsonrpc: "2.0".to_string(),
id: 42,
method: "test".to_string(),
params: Some(serde_json::json!({"key": "value"})),
};
let json = serde_json::to_string(&request).unwrap();
let expected = r#"{"jsonrpc":"2.0","id":42,"method":"test","params":{"key":"value"}}"#;
assert_eq!(json, expected);
}
#[test]
fn jsonrpc_request_without_params() {
let request = JsonRpcRequest {
jsonrpc: "2.0".to_string(),
id: 1,
method: "ping".to_string(),
params: None,
};
let json = serde_json::to_string(&request).unwrap();
let expected = r#"{"jsonrpc":"2.0","id":1,"method":"ping"}"#;
assert_eq!(json, expected);
}
#[test]
fn jsonrpc_response_deserialization() {
let json = r#"{"jsonrpc":"2.0","id":42,"result":{"status":"ok"}}"#;
let response: JsonRpcResponse = serde_json::from_str(json).unwrap();
assert_eq!(response.jsonrpc, "2.0");
assert_eq!(response.id, 42);
match response.result_or_error {
JsonRpcResultOrError::Result { result } => {
assert_eq!(result, serde_json::json!({"status": "ok"}));
}
JsonRpcResultOrError::Error { .. } => panic!("Expected result, got error"),
}
}
#[test]
fn jsonrpc_error_deserialization() {
let json = r#"{"jsonrpc":"2.0","id":42,"error":{"code":-32700,"message":"Parse error"}}"#;
let response: JsonRpcResponse = serde_json::from_str(json).unwrap();
assert_eq!(response.jsonrpc, "2.0");
assert_eq!(response.id, 42);
match response.result_or_error {
JsonRpcResultOrError::Error { error } => {
assert_eq!(error.code, -32700);
assert_eq!(error.message, "Parse error");
assert!(error.data.is_none());
}
JsonRpcResultOrError::Result { .. } => panic!("Expected error, got result"),
}
}
#[test]
fn initialize_params_serialization() {
let params = InitializeParams {
protocol_version: 1,
client_capabilities: ClientCapabilities::default(),
client_info: ClientInfo {
name: "ZeroClaw".to_string(),
title: "ZeroClaw ACP Client".to_string(),
version: "1.0.0".to_string(),
},
};
let json = serde_json::to_value(&params).unwrap();
assert_eq!(json["protocol_version"], 1);
assert_eq!(json["client_info"]["name"], "ZeroClaw");
}
#[test]
fn session_new_params_serialization() {
let params = SessionNewParams {
cwd: "/tmp".to_string(),
mcp_servers: vec![],
};
let json = serde_json::to_value(&params).unwrap();
assert_eq!(json["cwd"], "/tmp");
assert_eq!(json["mcp_servers"], serde_json::json!([]));
}
#[test]
fn session_prompt_params_serialization() {
let params = SessionPromptParams {
session_id: "session-123".to_string(),
prompt: vec![PromptItem {
item_type: "text".to_string(),
text: "Hello".to_string(),
}],
};
let json = serde_json::to_value(&params).unwrap();
assert_eq!(json["session_id"], "session-123");
assert_eq!(json["prompt"][0]["type"], "text");
assert_eq!(json["prompt"][0]["text"], "Hello");
}
#[test]
fn acp_channel_set_response_channel() {
use super::Channel;
use crate::channels::traits::SendMessage;
use std::sync::Arc;
// Mock channel for testing
struct MockChannel;
#[async_trait::async_trait]
impl Channel for MockChannel {
fn name(&self) -> &str {
"mock"
}
async fn send(&self, _message: &SendMessage) -> Result<()> {
Ok(())
}
async fn listen(
&self,
_tx: tokio::sync::mpsc::Sender<crate::channels::traits::ChannelMessage>,
) -> Result<()> {
Ok(())
}
async fn health_check(&self) -> bool {
true
}
}
let config = AcpConfig {
opencode_path: None,
workdir: None,
extra_args: vec![],
allowed_users: vec![],
};
let mut channel = AcpChannel::new(config);
let mock_channel = Arc::new(MockChannel);
// Initially no response channel
// (Cannot directly access private field, but we can test via public API)
// Set response channel
channel.set_response_channel(mock_channel.clone());
// Verify channel can be set (no panic)
// This test mainly ensures the method exists and works
assert!(true);
}
// Note: More comprehensive tests would require mocking the OpenCode process
// which is beyond the scope of basic unit tests.
#[cfg(unix)]
async fn spawn_test_process(command: &str, args: &[&str]) -> AcpProcess {
let mut child = Command::new(command)
.args(args)
.stdin(std::process::Stdio::piped())
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::null())
.spawn()
.expect("failed to spawn test ACP process");
let stdin = child.stdin.take().expect("test process stdin");
let stdout = BufReader::new(child.stdout.take().expect("test process stdout"));
AcpProcess {
child,
stdin,
stdout,
session_id: Some("test-session".to_string()),
message_id: 0,
pending_responses: VecDeque::new(),
}
}
#[cfg(unix)]
async fn cleanup_test_process(channel: &AcpChannel) {
let process = {
let mut guard = channel.process.lock().await;
guard.take()
};
if let Some(mut process) = process {
let _ = process.child.kill().await;
let _ = process.child.wait().await;
}
}
#[cfg(unix)]
#[tokio::test]
async fn acp_health_check_false_when_no_process() {
let config = AcpConfig {
opencode_path: None,
workdir: None,
extra_args: vec![],
allowed_users: vec![],
};
let channel = AcpChannel::new(config);
assert!(!channel.health_check().await);
}
#[cfg(unix)]
#[tokio::test]
async fn acp_health_check_true_when_process_running() {
let config = AcpConfig {
opencode_path: None,
workdir: None,
extra_args: vec![],
allowed_users: vec![],
};
let channel = AcpChannel::new(config);
let process = spawn_test_process("sh", &["-c", "sleep 5"]).await;
{
let mut guard = channel.process.lock().await;
*guard = Some(process);
}
assert!(channel.health_check().await);
cleanup_test_process(&channel).await;
}
#[cfg(unix)]
#[tokio::test]
async fn acp_health_check_false_after_process_exit() {
let config = AcpConfig {
opencode_path: None,
workdir: None,
extra_args: vec![],
allowed_users: vec![],
};
let channel = AcpChannel::new(config);
let process = spawn_test_process("sh", &["-c", "true"]).await;
{
let mut guard = channel.process.lock().await;
*guard = Some(process);
}
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
assert!(!channel.health_check().await);
}
}
+45 -18
View File
@@ -15,6 +15,7 @@
//! [`start_channels`]. See `AGENTS.md` §7.2 for the full change playbook.
pub mod clawdtalk;
pub mod acp;
pub mod cli;
pub mod dingtalk;
pub mod discord;
@@ -44,6 +45,7 @@ pub mod whatsapp_storage;
#[cfg(feature = "whatsapp-web")]
pub mod whatsapp_web;
pub use acp::AcpChannel;
pub use clawdtalk::ClawdTalkChannel;
pub use cli::CliChannel;
pub use dingtalk::DingTalkChannel;
@@ -345,6 +347,10 @@ fn conversation_memory_key(msg: &traits::ChannelMessage) -> String {
}
}
fn assistant_memory_key(msg: &traits::ChannelMessage) -> String {
format!("assistant_resp_{}", conversation_memory_key(msg))
}
fn conversation_history_key(msg: &traits::ChannelMessage) -> String {
// QQ uses thread_ts as a passive-reply message id, not a thread identifier.
// Using it in history keys would reset context on every incoming message.
@@ -3745,24 +3751,19 @@ or tune thresholds in config.",
&history_key,
ChatMessage::assistant(&history_response),
);
if let Some(session) = session.as_ref() {
let latest = {
let histories = ctx
.conversation_histories
.lock()
.unwrap_or_else(|e| e.into_inner());
histories.get(&history_key).cloned().unwrap_or_default()
};
let filtered: Vec<ChatMessage> = latest
.into_iter()
.filter(|m| crate::providers::is_user_or_assistant_role(m.role.as_str()))
.collect();
let saved_len = filtered.len();
if let Err(err) = session.update_history(filtered).await {
tracing::warn!("Failed to update session history: {err}");
} else {
tracing::debug!(saved_len, "session history saved");
}
if ctx.auto_save_memory
&& delivered_response.chars().count() >= AUTOSAVE_MIN_MESSAGE_CHARS
{
let assistant_key = assistant_memory_key(&msg);
let _ = ctx
.memory
.store(
&assistant_key,
&delivered_response,
crate::memory::MemoryCategory::Conversation,
None,
)
.await;
}
println!(
" 🤖 Reply ({}ms): {}",
@@ -4995,6 +4996,12 @@ fn collect_configured_channels(
});
}
if let Some(ref acp) = config.channels_config.acp {
channels.push(ConfiguredChannel {
display_name: "ACP",
channel: Arc::new(AcpChannel::new(acp.clone())),
});
}
channels
}
@@ -10288,6 +10295,26 @@ BTC is currently around $65,000 based on latest tool output."#
);
}
#[test]
fn assistant_memory_key_is_namespaced_from_user_key() {
let msg = traits::ChannelMessage {
id: "msg_abc123".into(),
sender: "U123".into(),
reply_target: "C456".into(),
content: "hello".into(),
channel: "slack".into(),
timestamp: 1,
thread_ts: None,
};
let user_key = conversation_memory_key(&msg);
let assistant_key = assistant_memory_key(&msg);
assert!(assistant_key.starts_with("assistant_resp_"));
assert!(assistant_key.ends_with(&user_key));
assert_ne!(assistant_key, user_key);
}
#[test]
fn conversation_history_key_ignores_qq_message_id_thread() {
let msg1 = traits::ChannelMessage {
+69
View File
@@ -518,6 +518,7 @@ impl std::fmt::Debug for Config {
self.channels_config.dingtalk.is_some(),
self.channels_config.napcat.is_some(),
self.channels_config.qq.is_some(),
self.channels_config.acp.is_some(),
self.channels_config.nostr.is_some(),
self.channels_config.clawdtalk.is_some(),
]
@@ -4047,6 +4048,8 @@ impl<T: ChannelConfig> crate::config::traits::ConfigHandle for ConfigWrapper<T>
pub struct ChannelsConfig {
/// Enable the CLI interactive channel. Default: `true`.
pub cli: bool,
/// ACP (Agent Client Protocol) channel configuration.
pub acp: Option<AcpConfig>,
/// Telegram bot channel configuration.
pub telegram: Option<TelegramConfig>,
/// Discord bot channel configuration.
@@ -4188,6 +4191,10 @@ impl ChannelsConfig {
Box::new(ConfigWrapper::new(self.nostr.as_ref())),
self.nostr.is_some(),
),
(
Box::new(ConfigWrapper::new(self.acp.as_ref())),
self.acp.is_some(),
),
(
Box::new(ConfigWrapper::new(self.clawdtalk.as_ref())),
self.clawdtalk.is_some(),
@@ -4213,6 +4220,7 @@ impl Default for ChannelsConfig {
fn default() -> Self {
Self {
cli: true,
acp: None,
telegram: None,
discord: None,
slack: None,
@@ -7048,6 +7056,10 @@ impl Config {
/// Called after TOML deserialization and env-override application to catch
/// obviously invalid values early instead of failing at arbitrary runtime points.
pub fn validate(&self) -> Result<()> {
if let Some(acp) = &self.channels_config.acp {
acp.validate()?;
}
// Gateway
if self.gateway.host.trim().is_empty() {
anyhow::bail!("gateway.host must not be empty");
@@ -8485,6 +8497,60 @@ fn sync_directory(path: &Path) -> Result<()> {
Ok(())
}
/// ACP (Agent Client Protocol) channel configuration.
///
/// Enables ZeroClaw to act as an ACP client, connecting to an OpenCode ACP server
/// via `opencode acp` command for JSON-RPC 2.0 communication over stdio.
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct AcpConfig {
/// OpenCode binary path (default: "opencode").
#[serde(default = "default_acp_opencode_path")]
pub opencode_path: Option<String>,
/// Working directory for OpenCode process.
pub workdir: Option<String>,
/// Additional arguments to pass to `opencode acp`.
#[serde(default)]
pub extra_args: Vec<String>,
/// Allowed user identifiers (empty = deny all, "*" = allow all).
#[serde(default)]
pub allowed_users: Vec<String>,
}
fn default_acp_opencode_path() -> Option<String> {
Some("opencode".to_string())
}
impl AcpConfig {
fn validate(&self) -> Result<()> {
if self
.opencode_path
.as_deref()
.is_some_and(|path| path.trim().is_empty())
{
anyhow::bail!("channels_config.acp.opencode_path must not be empty when set");
}
if self
.workdir
.as_deref()
.is_some_and(|dir| dir.trim().is_empty())
{
anyhow::bail!("channels_config.acp.workdir must not be empty when set");
}
Ok(())
}
}
impl ChannelConfig for AcpConfig {
fn name() -> &'static str {
"ACP"
}
fn desc() -> &'static str {
"Agent Client Protocol channel for OpenCode integration"
}
}
#[cfg(test)]
mod tests {
use super::*;
@@ -8996,6 +9062,7 @@ ws_url = "ws://127.0.0.1:3002"
goal_loop: GoalLoopConfig::default(),
channels_config: ChannelsConfig {
cli: true,
acp: None,
telegram: Some(TelegramConfig {
bot_token: "123:ABC".into(),
allowed_users: vec!["user1".into()],
@@ -9929,6 +9996,7 @@ allowed_users = ["@ops:matrix.org"]
async fn channels_config_with_imessage_and_matrix() {
let c = ChannelsConfig {
cli: true,
acp: None,
telegram: None,
discord: None,
slack: None,
@@ -10210,6 +10278,7 @@ channel_id = "C123"
async fn channels_config_with_whatsapp() {
let c = ChannelsConfig {
cli: true,
acp: None,
telegram: None,
discord: None,
slack: None,
+35 -4
View File
@@ -3,6 +3,7 @@ pub enum MemoryBackendKind {
Sqlite,
SqliteQdrantHybrid,
Lucid,
CortexMem,
Postgres,
Qdrant,
Markdown,
@@ -39,6 +40,15 @@ const LUCID_PROFILE: MemoryBackendProfile = MemoryBackendProfile {
optional_dependency: true,
};
const CORTEX_MEM_PROFILE: MemoryBackendProfile = MemoryBackendProfile {
key: "cortex-mem",
label: "Cortex-Mem bridge — optional CLI sync with local SQLite fallback",
auto_save_default: true,
uses_sqlite_hygiene: true,
sqlite_based: true,
optional_dependency: true,
};
const MARKDOWN_PROFILE: MemoryBackendProfile = MemoryBackendProfile {
key: "markdown",
label: "Markdown Files — simple, human-readable, no dependencies",
@@ -93,9 +103,10 @@ const CUSTOM_PROFILE: MemoryBackendProfile = MemoryBackendProfile {
optional_dependency: false,
};
const SELECTABLE_MEMORY_BACKENDS: [MemoryBackendProfile; 4] = [
const SELECTABLE_MEMORY_BACKENDS: [MemoryBackendProfile; 5] = [
SQLITE_PROFILE,
LUCID_PROFILE,
CORTEX_MEM_PROFILE,
MARKDOWN_PROFILE,
NONE_PROFILE,
];
@@ -113,6 +124,7 @@ pub fn classify_memory_backend(backend: &str) -> MemoryBackendKind {
"sqlite" => MemoryBackendKind::Sqlite,
"sqlite_qdrant_hybrid" | "hybrid" => MemoryBackendKind::SqliteQdrantHybrid,
"lucid" => MemoryBackendKind::Lucid,
"cortex-mem" | "cortex_mem" | "cortexmem" | "cortex" => MemoryBackendKind::CortexMem,
"postgres" => MemoryBackendKind::Postgres,
"qdrant" => MemoryBackendKind::Qdrant,
"markdown" => MemoryBackendKind::Markdown,
@@ -126,6 +138,7 @@ pub fn memory_backend_profile(backend: &str) -> MemoryBackendProfile {
MemoryBackendKind::Sqlite => SQLITE_PROFILE,
MemoryBackendKind::SqliteQdrantHybrid => SQLITE_QDRANT_HYBRID_PROFILE,
MemoryBackendKind::Lucid => LUCID_PROFILE,
MemoryBackendKind::CortexMem => CORTEX_MEM_PROFILE,
MemoryBackendKind::Postgres => POSTGRES_PROFILE,
MemoryBackendKind::Qdrant => QDRANT_PROFILE,
MemoryBackendKind::Markdown => MARKDOWN_PROFILE,
@@ -146,6 +159,14 @@ mod tests {
MemoryBackendKind::SqliteQdrantHybrid
);
assert_eq!(classify_memory_backend("lucid"), MemoryBackendKind::Lucid);
assert_eq!(
classify_memory_backend("cortex-mem"),
MemoryBackendKind::CortexMem
);
assert_eq!(
classify_memory_backend("cortex_mem"),
MemoryBackendKind::CortexMem
);
assert_eq!(
classify_memory_backend("postgres"),
MemoryBackendKind::Postgres
@@ -173,11 +194,12 @@ mod tests {
#[test]
fn selectable_backends_are_ordered_for_onboarding() {
let backends = selectable_memory_backends();
assert_eq!(backends.len(), 4);
assert_eq!(backends.len(), 5);
assert_eq!(backends[0].key, "sqlite");
assert_eq!(backends[1].key, "lucid");
assert_eq!(backends[2].key, "markdown");
assert_eq!(backends[3].key, "none");
assert_eq!(backends[2].key, "cortex-mem");
assert_eq!(backends[3].key, "markdown");
assert_eq!(backends[4].key, "none");
}
#[test]
@@ -188,6 +210,15 @@ mod tests {
assert!(profile.uses_sqlite_hygiene);
}
#[test]
fn cortex_profile_is_sqlite_based_optional_backend() {
let profile = memory_backend_profile("cortex-mem");
assert_eq!(profile.key, "cortex-mem");
assert!(profile.sqlite_based);
assert!(profile.optional_dependency);
assert!(profile.uses_sqlite_hygiene);
}
#[test]
fn unknown_profile_preserves_extensibility_defaults() {
let profile = memory_backend_profile("custom-memory");
+109
View File
@@ -0,0 +1,109 @@
use super::lucid::LucidMemory;
use super::sqlite::SqliteMemory;
use super::traits::{Memory, MemoryCategory, MemoryEntry};
use async_trait::async_trait;
use std::path::Path;
pub struct CortexMemMemory {
inner: LucidMemory,
}
impl CortexMemMemory {
const DEFAULT_CORTEX_CMD: &'static str = "cortex-mem";
pub fn new(workspace_dir: &Path, local: SqliteMemory) -> Self {
let cortex_cmd = std::env::var("ZEROCLAW_CORTEX_CMD")
.or_else(|_| std::env::var("ZEROCLAW_LUCID_CMD"))
.unwrap_or_else(|_| Self::DEFAULT_CORTEX_CMD.to_string());
let inner = LucidMemory::new_with_command(workspace_dir, local, cortex_cmd);
Self { inner }
}
#[cfg(test)]
fn new_with_command_for_test(workspace_dir: &Path, local: SqliteMemory, command: &str) -> Self {
let inner = LucidMemory::new_with_command(workspace_dir, local, command.to_string());
Self { inner }
}
}
#[async_trait]
impl Memory for CortexMemMemory {
fn name(&self) -> &str {
"cortex-mem"
}
async fn store(
&self,
key: &str,
content: &str,
category: MemoryCategory,
session_id: Option<&str>,
) -> anyhow::Result<()> {
self.inner.store(key, content, category, session_id).await
}
async fn recall(
&self,
query: &str,
limit: usize,
session_id: Option<&str>,
) -> anyhow::Result<Vec<MemoryEntry>> {
self.inner.recall(query, limit, session_id).await
}
async fn get(&self, key: &str) -> anyhow::Result<Option<MemoryEntry>> {
self.inner.get(key).await
}
async fn list(
&self,
category: Option<&MemoryCategory>,
session_id: Option<&str>,
) -> anyhow::Result<Vec<MemoryEntry>> {
self.inner.list(category, session_id).await
}
async fn forget(&self, key: &str) -> anyhow::Result<bool> {
self.inner.forget(key).await
}
async fn count(&self) -> anyhow::Result<usize> {
self.inner.count().await
}
async fn health_check(&self) -> bool {
self.inner.health_check().await
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
#[tokio::test]
async fn cortex_backend_reports_expected_name() {
let tmp = TempDir::new().unwrap();
let sqlite = SqliteMemory::new(tmp.path()).unwrap();
let memory = CortexMemMemory::new(tmp.path(), sqlite);
assert_eq!(memory.name(), "cortex-mem");
}
#[tokio::test]
async fn cortex_backend_keeps_local_store_when_bridge_command_fails() {
let tmp = TempDir::new().unwrap();
let sqlite = SqliteMemory::new(tmp.path()).unwrap();
let memory =
CortexMemMemory::new_with_command_for_test(tmp.path(), sqlite, "missing-cortex-cli");
memory
.store("cortex_key", "local first", MemoryCategory::Conversation, None)
.await
.unwrap();
let stored = memory.get("cortex_key").await.unwrap();
assert!(stored.is_some(), "expected local sqlite entry to be present");
assert_eq!(stored.unwrap().content, "local first");
}
}
+7
View File
@@ -34,7 +34,14 @@ impl LucidMemory {
pub fn new(workspace_dir: &Path, local: SqliteMemory) -> Self {
let lucid_cmd = std::env::var("ZEROCLAW_LUCID_CMD")
.unwrap_or_else(|_| Self::DEFAULT_LUCID_CMD.to_string());
Self::new_with_command(workspace_dir, local, lucid_cmd)
}
pub(crate) fn new_with_command(
workspace_dir: &Path,
local: SqliteMemory,
lucid_cmd: String,
) -> Self {
let token_budget = std::env::var("ZEROCLAW_LUCID_BUDGET")
.ok()
.and_then(|v| v.parse::<usize>().ok())
+27 -1
View File
@@ -1,6 +1,7 @@
pub mod backend;
pub mod chunker;
pub mod cli;
pub mod cortex;
pub mod embeddings;
pub mod hybrid;
pub mod hygiene;
@@ -21,6 +22,7 @@ pub use backend::{
classify_memory_backend, default_memory_backend_key, memory_backend_profile,
selectable_memory_backends, MemoryBackendKind, MemoryBackendProfile,
};
pub use cortex::CortexMemMemory;
pub use hybrid::SqliteQdrantHybridMemory;
pub use lucid::LucidMemory;
pub use markdown::MarkdownMemory;
@@ -58,6 +60,10 @@ where
let local = sqlite_builder()?;
Ok(Box::new(LucidMemory::new(workspace_dir, local)))
}
MemoryBackendKind::CortexMem => {
let local = sqlite_builder()?;
Ok(Box::new(CortexMemMemory::new(workspace_dir, local)))
}
MemoryBackendKind::Postgres => postgres_builder(),
MemoryBackendKind::Qdrant | MemoryBackendKind::Markdown => {
Ok(Box::new(MarkdownMemory::new(workspace_dir)))
@@ -217,6 +223,7 @@ pub fn create_memory_with_storage_and_routes(
MemoryBackendKind::Sqlite
| MemoryBackendKind::SqliteQdrantHybrid
| MemoryBackendKind::Lucid
| MemoryBackendKind::CortexMem
)
{
if let Err(e) = snapshot::export_snapshot(workspace_dir) {
@@ -232,6 +239,7 @@ pub fn create_memory_with_storage_and_routes(
MemoryBackendKind::Sqlite
| MemoryBackendKind::SqliteQdrantHybrid
| MemoryBackendKind::Lucid
| MemoryBackendKind::CortexMem
)
&& snapshot::should_hydrate(workspace_dir)
{
@@ -381,7 +389,7 @@ pub fn create_memory_for_migration(
) -> anyhow::Result<Box<dyn Memory>> {
if matches!(classify_memory_backend(backend), MemoryBackendKind::None) {
anyhow::bail!(
"memory backend 'none' disables persistence; choose sqlite, lucid, or markdown before migration"
"memory backend 'none' disables persistence; choose sqlite, lucid, cortex-mem, or markdown before migration"
);
}
@@ -477,6 +485,17 @@ mod tests {
assert_eq!(mem.name(), "lucid");
}
#[test]
fn factory_cortex_mem() {
let tmp = TempDir::new().unwrap();
let cfg = MemoryConfig {
backend: "cortex-mem".into(),
..MemoryConfig::default()
};
let mem = create_memory(&cfg, tmp.path(), None).unwrap();
assert_eq!(mem.name(), "cortex-mem");
}
#[test]
fn factory_sqlite_qdrant_hybrid() {
let tmp = TempDir::new().unwrap();
@@ -521,6 +540,13 @@ mod tests {
assert_eq!(mem.name(), "lucid");
}
#[test]
fn migration_factory_cortex_mem() {
let tmp = TempDir::new().unwrap();
let mem = create_memory_for_migration("cortex-mem", tmp.path()).unwrap();
assert_eq!(mem.name(), "cortex-mem");
}
#[test]
fn migration_factory_none_is_rejected() {
let tmp = TempDir::new().unwrap();
+10 -3
View File
@@ -381,7 +381,7 @@ fn apply_provider_update(
// ── Quick setup (zero prompts) ───────────────────────────────────
/// Non-interactive setup: generates a sensible default config instantly.
/// Use `zeroclaw onboard` or `zeroclaw onboard --api-key sk-... --provider openrouter --memory sqlite|lucid`.
/// Use `zeroclaw onboard` or `zeroclaw onboard --api-key sk-... --provider openrouter --memory sqlite|lucid|cortex-mem`.
/// Use `zeroclaw onboard --interactive` for the full wizard.
fn backend_key_from_choice(choice: usize) -> &'static str {
selectable_memory_backends()
@@ -8327,8 +8327,9 @@ mod tests {
fn backend_key_from_choice_maps_supported_backends() {
assert_eq!(backend_key_from_choice(0), "sqlite");
assert_eq!(backend_key_from_choice(1), "lucid");
assert_eq!(backend_key_from_choice(2), "markdown");
assert_eq!(backend_key_from_choice(3), "none");
assert_eq!(backend_key_from_choice(2), "cortex-mem");
assert_eq!(backend_key_from_choice(3), "markdown");
assert_eq!(backend_key_from_choice(4), "none");
assert_eq!(backend_key_from_choice(999), "sqlite");
}
@@ -8340,6 +8341,12 @@ mod tests {
assert!(lucid.sqlite_based);
assert!(lucid.optional_dependency);
let cortex_mem = memory_backend_profile("cortex-mem");
assert!(cortex_mem.auto_save_default);
assert!(cortex_mem.uses_sqlite_hygiene);
assert!(cortex_mem.sqlite_based);
assert!(cortex_mem.optional_dependency);
let markdown = memory_backend_profile("markdown");
assert!(markdown.auto_save_default);
assert!(!markdown.uses_sqlite_hygiene);
+140 -1
View File
@@ -11,6 +11,8 @@ pub struct CronAddTool {
security: Arc<SecurityPolicy>,
}
const MIN_AGENT_EVERY_MS: u64 = 5 * 60 * 1000;
impl CronAddTool {
pub fn new(config: Arc<Config>, security: Arc<SecurityPolicy>) -> Self {
Self { config, security }
@@ -56,6 +58,8 @@ impl Tool for CronAddTool {
fn description(&self) -> &str {
"Create a scheduled cron job (shell or agent) with cron/at/every schedules. \
Use job_type='agent' with a prompt to run the AI agent on schedule. \
Use schedule.kind='at' for one-time reminders/delayed sends (recommended). \
Agent jobs with schedule.kind='cron' or schedule.kind='every' are recurring and require explicit recurring confirmation. \
To deliver output to a channel (Discord, Telegram, Slack, Mattermost, QQ, Napcat, Lark, Feishu, Email), set \
delivery={\"mode\":\"announce\",\"channel\":\"discord\",\"to\":\"<channel_id_or_chat_id>\"}. \
This is the preferred tool for sending scheduled/delayed messages to users via channels."
@@ -68,13 +72,18 @@ impl Tool for CronAddTool {
"name": { "type": "string" },
"schedule": {
"type": "object",
"description": "Schedule object: {kind:'cron',expr,tz?} | {kind:'at',at} | {kind:'every',every_ms}"
"description": "Schedule object: {kind:'cron',expr,tz?} recurring | {kind:'at',at} one-time | {kind:'every',every_ms} recurring interval"
},
"job_type": { "type": "string", "enum": ["shell", "agent"] },
"command": { "type": "string" },
"prompt": { "type": "string" },
"session_target": { "type": "string", "enum": ["isolated", "main"] },
"model": { "type": "string" },
"recurring_confirmed": {
"type": "boolean",
"description": "Required for agent recurring schedules (schedule.kind='cron' or 'every'). Set true only when recurring behavior is intentional.",
"default": false
},
"delivery": {
"type": "object",
"description": "Delivery config to send job output to a channel. Example: {\"mode\":\"announce\",\"channel\":\"discord\",\"to\":\"<channel_id>\"}",
@@ -216,6 +225,49 @@ impl Tool for CronAddTool {
.get("model")
.and_then(serde_json::Value::as_str)
.map(str::to_string);
let recurring_confirmed = args
.get("recurring_confirmed")
.and_then(serde_json::Value::as_bool)
.unwrap_or(false);
match &schedule {
Schedule::Every { every_ms } => {
if !recurring_confirmed {
return Ok(ToolResult {
success: false,
output: String::new(),
error: Some(
"Agent jobs with recurring schedules require recurring_confirmed=true. \
For one-time reminders, use schedule.kind='at' with an RFC3339 timestamp."
.to_string(),
),
});
}
if *every_ms < MIN_AGENT_EVERY_MS {
return Ok(ToolResult {
success: false,
output: String::new(),
error: Some(format!(
"Agent schedule.kind='every' must be >= {MIN_AGENT_EVERY_MS} ms (5 minutes)"
)),
});
}
}
Schedule::Cron { .. } => {
if !recurring_confirmed {
return Ok(ToolResult {
success: false,
output: String::new(),
error: Some(
"Agent jobs with recurring schedules require recurring_confirmed=true. \
For one-time reminders, use schedule.kind='at' with an RFC3339 timestamp."
.to_string(),
),
});
}
}
Schedule::At { .. } => {}
}
let delivery = match args.get("delivery") {
Some(v) => match serde_json::from_value::<DeliveryConfig>(v.clone()) {
@@ -482,4 +534,91 @@ mod tests {
.unwrap_or_default()
.contains("Missing 'prompt'"));
}
#[tokio::test]
async fn agent_every_requires_recurring_confirmation() {
let tmp = TempDir::new().unwrap();
let cfg = test_config(&tmp).await;
let tool = CronAddTool::new(cfg.clone(), test_security(&cfg));
let result = tool
.execute(json!({
"schedule": { "kind": "every", "every_ms": 300000 },
"job_type": "agent",
"prompt": "Send me a recurring status update"
}))
.await
.unwrap();
assert!(!result.success);
assert!(result
.error
.unwrap_or_default()
.contains("recurring_confirmed=true"));
}
#[tokio::test]
async fn agent_cron_requires_recurring_confirmation() {
let tmp = TempDir::new().unwrap();
let cfg = test_config(&tmp).await;
let tool = CronAddTool::new(cfg.clone(), test_security(&cfg));
let result = tool
.execute(json!({
"schedule": { "kind": "cron", "expr": "*/5 * * * *" },
"job_type": "agent",
"prompt": "Send recurring reminders"
}))
.await
.unwrap();
assert!(!result.success);
assert!(result
.error
.unwrap_or_default()
.contains("recurring_confirmed=true"));
}
#[tokio::test]
async fn agent_every_rejects_high_frequency_intervals() {
let tmp = TempDir::new().unwrap();
let cfg = test_config(&tmp).await;
let tool = CronAddTool::new(cfg.clone(), test_security(&cfg));
let result = tool
.execute(json!({
"schedule": { "kind": "every", "every_ms": 60000 },
"job_type": "agent",
"prompt": "Send me updates frequently",
"recurring_confirmed": true
}))
.await
.unwrap();
assert!(!result.success);
assert!(result
.error
.unwrap_or_default()
.contains("must be >= 300000 ms"));
}
#[tokio::test]
async fn agent_every_with_explicit_confirmation_succeeds() {
let tmp = TempDir::new().unwrap();
let cfg = test_config(&tmp).await;
let tool = CronAddTool::new(cfg.clone(), test_security(&cfg));
let result = tool
.execute(json!({
"schedule": { "kind": "every", "every_ms": 300000 },
"job_type": "agent",
"prompt": "Share a heartbeat summary",
"recurring_confirmed": true
}))
.await
.unwrap();
assert!(result.success, "{:?}", result.error);
assert!(result.output.contains("next_run"));
}
}
+8
View File
@@ -0,0 +1,8 @@
[build]
command = "npm run build"
publish = "dist"
[[redirects]]
from = "/*"
to = "/index.html"
status = 200
+15 -1
View File
@@ -7,11 +7,13 @@
"": {
"name": "zeroclaw-web",
"version": "0.1.0",
"license": "(MIT OR Apache-2.0)",
"dependencies": {
"lucide-react": "^0.468.0",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"react-router-dom": "^7.1.1"
"react-router-dom": "^7.1.1",
"smol-toml": "^1.3.1"
},
"devDependencies": {
"@tailwindcss/vite": "^4.0.0",
@@ -2327,6 +2329,18 @@
"integrity": "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==",
"license": "MIT"
},
"node_modules/smol-toml": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/smol-toml/-/smol-toml-1.6.0.tgz",
"integrity": "sha512-4zemZi0HvTnYwLfrpk/CF9LOd9Lt87kAt50GnqhMpyF9U3poDAP2+iukq2bZsO/ufegbYehBkqINbsWxj4l4cw==",
"license": "BSD-3-Clause",
"engines": {
"node": ">= 18"
},
"funding": {
"url": "https://github.com/sponsors/cyyynthia"
}
},
"node_modules/source-map-js": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
+2 -1
View File
@@ -13,7 +13,8 @@
"lucide-react": "^0.468.0",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"react-router-dom": "^7.1.1"
"react-router-dom": "^7.1.1",
"smol-toml": "^1.3.1"
},
"devDependencies": {
"@tailwindcss/vite": "^4.0.0",
@@ -0,0 +1,121 @@
import { useState, useMemo } from 'react';
import { Search } from 'lucide-react';
import { CONFIG_SECTIONS } from './configSections';
import ConfigSection from './ConfigSection';
import type { FieldDef } from './types';
const CATEGORY_ORDER = [
{ key: 'all', label: 'All' },
{ key: 'general', label: 'General' },
{ key: 'security', label: 'Security' },
{ key: 'channels', label: 'Channels' },
{ key: 'runtime', label: 'Runtime' },
{ key: 'tools', label: 'Tools' },
{ key: 'memory', label: 'Memory' },
{ key: 'network', label: 'Network' },
{ key: 'advanced', label: 'Advanced' },
] as const;
interface Props {
getFieldValue: (sectionPath: string, fieldKey: string) => unknown;
setFieldValue: (sectionPath: string, fieldKey: string, value: unknown) => void;
isFieldMasked: (sectionPath: string, fieldKey: string) => boolean;
}
export default function ConfigFormEditor({
getFieldValue,
setFieldValue,
isFieldMasked,
}: Props) {
const [search, setSearch] = useState('');
const [activeCategory, setActiveCategory] = useState('all');
const isSearching = search.trim().length > 0;
const filteredSections = useMemo(() => {
if (isSearching) {
const q = search.toLowerCase();
return CONFIG_SECTIONS.map((section) => {
const titleMatch = section.title.toLowerCase().includes(q);
const descMatch = section.description?.toLowerCase().includes(q);
if (titleMatch || descMatch) {
return { section, fields: undefined };
}
const matchingFields = section.fields.filter(
(f: FieldDef) =>
f.label.toLowerCase().includes(q) ||
f.key.toLowerCase().includes(q) ||
f.description?.toLowerCase().includes(q),
);
if (matchingFields.length > 0) {
return { section, fields: matchingFields };
}
return null;
}).filter(Boolean) as { section: (typeof CONFIG_SECTIONS)[0]; fields: FieldDef[] | undefined }[];
}
// Category filter
const sections = activeCategory === 'all'
? CONFIG_SECTIONS
: CONFIG_SECTIONS.filter((s) => s.category === activeCategory);
return sections.map((s) => ({ section: s, fields: undefined }));
}, [search, isSearching, activeCategory]);
return (
<div className="space-y-3">
{/* Search */}
<div className="relative">
<Search className="absolute left-3 top-1/2 -translate-y-1/2 h-4 w-4 text-gray-500" />
<input
type="text"
value={search}
onChange={(e) => setSearch(e.target.value)}
placeholder="Search config fields..."
className="w-full bg-gray-800 border border-gray-700 rounded-lg pl-9 pr-3 py-2 text-sm text-white placeholder-gray-500 focus:outline-none focus:ring-2 focus:ring-blue-500"
/>
</div>
{/* Category pills — hidden during search */}
{!isSearching && (
<div className="flex flex-wrap gap-2">
{CATEGORY_ORDER.map(({ key, label }) => (
<button
key={key}
onClick={() => setActiveCategory(key)}
className={`px-3 py-1 rounded-lg text-sm font-medium transition-colors ${
activeCategory === key
? 'bg-blue-600 text-white'
: 'bg-gray-900 text-gray-400 border border-gray-700 hover:bg-gray-800 hover:text-gray-200'
}`}
>
{label}
</button>
))}
</div>
)}
{/* Sections */}
{filteredSections.length === 0 ? (
<div className="text-center py-12 text-gray-500 text-sm">
No matching config fields found.
</div>
) : (
filteredSections.map(({ section, fields }) => (
<ConfigSection
key={section.path || '_root'}
section={fields ? { ...section, defaultCollapsed: false } : section}
getFieldValue={getFieldValue}
setFieldValue={setFieldValue}
isFieldMasked={isFieldMasked}
visibleFields={fields}
/>
))
)}
</div>
);
}
@@ -0,0 +1,29 @@
interface Props {
rawToml: string;
onChange: (raw: string) => void;
disabled?: boolean;
}
export default function ConfigRawEditor({ rawToml, onChange, disabled }: Props) {
return (
<div className="bg-gray-900 rounded-xl border border-gray-800 overflow-hidden">
<div className="flex items-center justify-between px-4 py-2 border-b border-gray-800 bg-gray-800/50">
<span className="text-xs text-gray-400 font-medium uppercase tracking-wider">
TOML Configuration
</span>
<span className="text-xs text-gray-500">
{rawToml.split('\n').length} lines
</span>
</div>
<textarea
value={rawToml}
onChange={(e) => onChange(e.target.value)}
disabled={disabled}
spellCheck={false}
aria-label="Raw TOML configuration editor"
className="w-full min-h-[500px] bg-gray-950 text-gray-200 font-mono text-sm p-4 resize-y focus:outline-none focus:ring-2 focus:ring-blue-500 focus:ring-inset disabled:opacity-50"
style={{ tabSize: 4 }}
/>
</div>
);
}
+131
View File
@@ -0,0 +1,131 @@
import { useEffect, useMemo, useState } from 'react';
import { ChevronRight, ChevronDown } from 'lucide-react';
import type { SectionDef, FieldDef } from './types';
import TextField from './fields/TextField';
import NumberField from './fields/NumberField';
import ToggleField from './fields/ToggleField';
import SelectField from './fields/SelectField';
import TagListField from './fields/TagListField';
interface Props {
section: SectionDef;
getFieldValue: (sectionPath: string, fieldKey: string) => unknown;
setFieldValue: (sectionPath: string, fieldKey: string, value: unknown) => void;
isFieldMasked: (sectionPath: string, fieldKey: string) => boolean;
visibleFields?: FieldDef[];
}
function renderField(
field: FieldDef,
value: unknown,
onChange: (v: unknown) => void,
isMasked: boolean,
) {
const props = { field, value, onChange, isMasked };
switch (field.type) {
case 'text':
case 'password':
return <TextField {...props} />;
case 'number':
return <NumberField {...props} />;
case 'toggle':
return <ToggleField {...props} />;
case 'select':
return <SelectField {...props} />;
case 'tag-list':
return <TagListField {...props} />;
default:
return <TextField {...props} />;
}
}
export default function ConfigSection({
section,
getFieldValue,
setFieldValue,
isFieldMasked,
visibleFields,
}: Props) {
const [collapsed, setCollapsed] = useState(section.defaultCollapsed ?? false);
const sectionPanelId = useMemo(
() =>
`config-section-${(section.path || 'root').replace(/[^a-zA-Z0-9_-]/g, '-')}`,
[section.path],
);
const Icon = section.icon;
const fields = visibleFields ?? section.fields;
useEffect(() => {
setCollapsed(section.defaultCollapsed ?? false);
}, [section.path, section.defaultCollapsed]);
return (
<div className="bg-gray-900 rounded-xl border border-gray-800">
<button
type="button"
onClick={() => setCollapsed(!collapsed)}
aria-expanded={!collapsed}
aria-controls={sectionPanelId}
className="w-full flex items-center gap-3 px-4 py-3 hover:bg-gray-800/30 transition-colors rounded-t-xl"
>
{collapsed ? (
<ChevronRight className="h-4 w-4 text-gray-500 flex-shrink-0" />
) : (
<ChevronDown className="h-4 w-4 text-gray-500 flex-shrink-0" />
)}
<Icon className="h-4 w-4 text-blue-400 flex-shrink-0" />
<span className="text-sm font-medium text-white">{section.title}</span>
{section.description && (
<span className="text-xs text-gray-500 hidden sm:inline">
{section.description}
</span>
)}
<span className="ml-auto text-xs text-gray-600">
{fields.length} {fields.length === 1 ? 'field' : 'fields'}
</span>
</button>
{!collapsed && (
<div
id={sectionPanelId}
className="border-t border-gray-800 px-4 py-4 grid grid-cols-1 sm:grid-cols-2 gap-x-4 gap-y-4"
>
{fields.map((field) => {
const value = getFieldValue(section.path, field.key);
const masked = isFieldMasked(section.path, field.key);
const spanFull = field.type === 'tag-list';
return (
<div key={field.key} className={`flex flex-col${spanFull ? ' sm:col-span-2' : ''}`}>
<label className="flex items-center gap-2 text-sm font-medium text-gray-300 mb-1.5">
<span>{field.label}</span>
{field.sensitive && (
<span className="text-[10px] text-yellow-400 bg-yellow-900/30 border border-yellow-800/50 px-1.5 py-0.5 rounded">
sensitive
</span>
)}
{masked && (
<span className="text-[10px] text-blue-400 bg-blue-900/30 border border-blue-800/50 px-1.5 py-0.5 rounded">
masked
</span>
)}
</label>
{field.description && field.type !== 'text' && field.type !== 'password' && field.type !== 'number' && (
<p className="text-xs text-gray-500 mb-1.5">{field.description}</p>
)}
<div className="mt-auto">
{renderField(
field,
value,
(v) => setFieldValue(section.path, field.key, v),
masked,
)}
</div>
</div>
);
})}
</div>
)}
</div>
);
}
File diff suppressed because it is too large Load Diff
@@ -0,0 +1,41 @@
import type { FieldProps } from '../types';
export default function NumberField({ field, value, onChange }: FieldProps) {
const numValue = value === undefined || value === null || value === '' ? '' : Number(value);
return (
<input
type="number"
value={numValue}
onChange={(e) => {
const raw = e.target.value;
if (raw === '') {
onChange(undefined);
return;
}
const n = Number(raw);
if (!isNaN(n)) {
onChange(n);
}
}}
onBlur={(e) => {
if (field.step !== undefined && field.step < 1) {
return;
}
const raw = e.target.value;
if (raw === '') {
return;
}
const n = Number(raw);
if (!isNaN(n)) {
onChange(Math.floor(n));
}
}}
min={field.min}
max={field.max}
step={field.step ?? 1}
placeholder={field.description ?? ''}
className="w-full bg-gray-800 border border-gray-700 rounded-lg px-3 py-2 text-sm text-white placeholder-gray-500 focus:outline-none focus:ring-2 focus:ring-blue-500"
/>
);
}
@@ -0,0 +1,20 @@
import type { FieldProps } from '../types';
export default function SelectField({ field, value, onChange }: FieldProps) {
const strValue = (value as string) ?? '';
return (
<select
value={strValue}
onChange={(e) => onChange(e.target.value)}
className="w-full bg-gray-800 border border-gray-700 rounded-lg px-3 py-2 text-sm text-white focus:outline-none focus:ring-2 focus:ring-blue-500"
>
<option value="">Select...</option>
{field.options?.map((opt) => (
<option key={opt.value} value={opt.value}>
{opt.label}
</option>
))}
</select>
);
}
@@ -0,0 +1,60 @@
import { useState } from 'react';
import { X } from 'lucide-react';
import type { FieldProps } from '../types';
export default function TagListField({ field, value, onChange }: FieldProps) {
const [input, setInput] = useState('');
const tags: string[] = Array.isArray(value) ? value : [];
const addTag = (tag: string) => {
const trimmed = tag.trim();
if (trimmed && !tags.includes(trimmed)) {
onChange([...tags, trimmed]);
}
setInput('');
};
const removeTag = (index: number) => {
onChange(tags.filter((_, i) => i !== index));
};
const handleKeyDown = (e: React.KeyboardEvent<HTMLInputElement>) => {
if (e.key === 'Enter' || e.key === ',') {
e.preventDefault();
addTag(input);
} else if (e.key === 'Backspace' && input === '' && tags.length > 0) {
removeTag(tags.length - 1);
}
};
return (
<div>
<div className="flex flex-wrap gap-1.5 mb-2">
{tags.map((tag, i) => (
<span
key={tag}
className="inline-flex items-center gap-1 bg-gray-700 text-gray-200 rounded-full px-2.5 py-0.5 text-xs"
>
{tag}
<button
type="button"
onClick={() => removeTag(i)}
className="text-gray-400 hover:text-white transition-colors"
>
<X className="h-3 w-3" />
</button>
</span>
))}
</div>
<input
type="text"
value={input}
onChange={(e) => setInput(e.target.value)}
onKeyDown={handleKeyDown}
onBlur={() => { if (input.trim()) addTag(input); }}
placeholder={field.tagPlaceholder ?? 'Type and press Enter to add'}
className="w-full bg-gray-800 border border-gray-700 rounded-lg px-3 py-2 text-sm text-white placeholder-gray-500 focus:outline-none focus:ring-2 focus:ring-blue-500"
/>
</div>
);
}
@@ -0,0 +1,39 @@
import { useState } from 'react';
import { Eye, EyeOff, Lock } from 'lucide-react';
import type { FieldProps } from '../types';
export default function TextField({ field, value, onChange, isMasked }: FieldProps) {
const [showPassword, setShowPassword] = useState(false);
const isPassword = field.type === 'password';
const strValue = isMasked ? '' : ((value as string) ?? '');
return (
<div className="relative">
<input
type={isPassword && !showPassword ? 'password' : 'text'}
value={strValue}
onChange={(e) => onChange(e.target.value)}
placeholder={isMasked ? 'Configured (masked)' : field.description ?? ''}
className="w-full bg-gray-800 border border-gray-700 rounded-lg px-3 py-2 text-sm text-white placeholder-gray-500 focus:outline-none focus:ring-2 focus:ring-blue-500 pr-16"
/>
<div className="absolute right-2 top-1/2 -translate-y-1/2 flex items-center gap-1">
{isMasked && (
<Lock className="h-3.5 w-3.5 text-yellow-500" />
)}
{isPassword && (
<button
type="button"
onClick={() => setShowPassword(!showPassword)}
className="p-1 text-gray-400 hover:text-gray-200 transition-colors"
>
{showPassword ? (
<EyeOff className="h-3.5 w-3.5" />
) : (
<Eye className="h-3.5 w-3.5" />
)}
</button>
)}
</div>
</div>
);
}
@@ -0,0 +1,27 @@
import type { FieldProps } from '../types';
export default function ToggleField({ field, value, onChange }: FieldProps) {
const isOn = Boolean(value);
return (
<div className="flex items-center gap-3">
<button
type="button"
role="switch"
aria-checked={isOn}
aria-label={field.label}
onClick={() => onChange(!isOn)}
className={`relative inline-flex h-6 w-11 items-center rounded-full transition-colors ${
isOn ? 'bg-blue-600' : 'bg-gray-700'
}`}
>
<span
className={`inline-block h-4 w-4 transform rounded-full bg-white transition-transform ${
isOn ? 'translate-x-6' : 'translate-x-1'
}`}
/>
</button>
<span className="text-sm text-gray-400">{isOn ? 'Enabled' : 'Disabled'}</span>
</div>
);
}
+40
View File
@@ -0,0 +1,40 @@
import type { LucideIcon } from 'lucide-react';
export type FieldType =
| 'text'
| 'password'
| 'number'
| 'toggle'
| 'select'
| 'tag-list';
export interface FieldDef {
key: string;
label: string;
type: FieldType;
description?: string;
sensitive?: boolean;
defaultValue?: unknown;
options?: { value: string; label: string }[];
min?: number;
max?: number;
step?: number;
tagPlaceholder?: string;
}
export interface SectionDef {
path: string;
title: string;
description?: string;
icon: LucideIcon;
fields: FieldDef[];
defaultCollapsed?: boolean;
category?: string;
}
export interface FieldProps {
field: FieldDef;
value: unknown;
onChange: (value: unknown) => void;
isMasked: boolean;
}
+307
View File
@@ -0,0 +1,307 @@
import { useState, useCallback, useRef, useEffect } from 'react';
import { parse, stringify } from 'smol-toml';
import { getConfig, putConfig } from '@/lib/api';
const MASKED = '***MASKED***';
type ParsedConfig = Record<string, unknown>;
function deepClone<T>(obj: T): T {
return JSON.parse(JSON.stringify(obj));
}
/** Recursively scan for MASKED strings and collect their dotted paths. */
function scanMasked(obj: unknown, prefix: string, out: Set<string>) {
if (obj === null || obj === undefined) return;
if (typeof obj === 'string' && obj === MASKED) {
out.add(prefix);
return;
}
if (Array.isArray(obj)) {
obj.forEach((item, i) => {
scanMasked(item, `${prefix}.${i}`, out);
});
return;
}
if (typeof obj === 'object') {
for (const [k, v] of Object.entries(obj as Record<string, unknown>)) {
scanMasked(v, prefix ? `${prefix}.${k}` : k, out);
}
}
}
/** Navigate into an object by dotted path segments, returning the value. */
function getNestedValue(obj: unknown, segments: string[]): unknown {
let current: unknown = obj;
for (const seg of segments) {
if (current === null || current === undefined || typeof current !== 'object') return undefined;
current = (current as Record<string, unknown>)[seg];
}
return current;
}
/** Set a value in an object by dotted path segments, creating intermediates. */
function setNestedValue(obj: Record<string, unknown>, segments: string[], value: unknown) {
if (segments.length === 0) return;
let current: Record<string, unknown> = obj;
for (let i = 0; i < segments.length - 1; i++) {
const seg: string = segments[i]!;
if (current[seg] === undefined || current[seg] === null || typeof current[seg] !== 'object') {
current[seg] = {};
}
current = current[seg] as Record<string, unknown>;
}
const lastSeg: string = segments[segments.length - 1]!;
if (value === undefined || value === '') {
delete current[lastSeg];
} else {
current[lastSeg] = value;
}
}
export type EditorMode = 'form' | 'raw';
export interface ConfigFormState {
loading: boolean;
saving: boolean;
error: string | null;
success: string | null;
mode: EditorMode;
rawToml: string;
parsed: ParsedConfig;
maskedPaths: Set<string>;
dirtyPaths: Set<string>;
setMode: (mode: EditorMode) => boolean;
getFieldValue: (sectionPath: string, fieldKey: string) => unknown;
setFieldValue: (sectionPath: string, fieldKey: string, value: unknown) => void;
isFieldMasked: (sectionPath: string, fieldKey: string) => boolean;
isFieldDirty: (sectionPath: string, fieldKey: string) => boolean;
setRawToml: (raw: string) => void;
save: () => Promise<void>;
reload: () => Promise<void>;
clearMessages: () => void;
}
export function useConfigForm(): ConfigFormState {
const [loading, setLoading] = useState(true);
const [saving, setSaving] = useState(false);
const [error, setError] = useState<string | null>(null);
const [success, setSuccess] = useState<string | null>(null);
const [mode, setModeState] = useState<EditorMode>('form');
const [rawToml, setRawTomlState] = useState('');
const [parsed, setParsed] = useState<ParsedConfig>({});
const maskedPathsRef = useRef<Set<string>>(new Set());
const dirtyPathsRef = useRef<Set<string>>(new Set());
const successTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null);
const [, forceRender] = useState(0);
const loadConfig = useCallback(async () => {
setLoading(true);
setError(null);
try {
const data = await getConfig();
const raw = typeof data === 'string' ? data : JSON.stringify(data, null, 2);
setRawTomlState(raw);
try {
const obj = parse(raw) as ParsedConfig;
setParsed(obj);
const masked = new Set<string>();
scanMasked(obj, '', masked);
maskedPathsRef.current = masked;
} catch {
// If TOML parse fails, start in raw mode
setParsed({});
maskedPathsRef.current = new Set();
setModeState('raw');
}
dirtyPathsRef.current = new Set();
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to load configuration');
} finally {
setLoading(false);
}
}, []);
// Load once on mount.
const hasLoaded = useRef(false);
useEffect(() => {
if (!hasLoaded.current) {
hasLoaded.current = true;
void loadConfig();
}
}, [loadConfig]);
useEffect(() => {
return () => {
if (successTimeoutRef.current) {
clearTimeout(successTimeoutRef.current);
}
};
}, []);
const fieldPath = (sectionPath: string, fieldKey: string) =>
sectionPath ? `${sectionPath}.${fieldKey}` : fieldKey;
const fieldSegments = (sectionPath: string, fieldKey: string) => {
const full = fieldPath(sectionPath, fieldKey);
return full.split('.').filter(Boolean);
};
const getFieldValue = useCallback(
(sectionPath: string, fieldKey: string): unknown => {
const segments = fieldSegments(sectionPath, fieldKey);
return getNestedValue(parsed, segments);
},
[parsed],
);
const setFieldValue = useCallback(
(sectionPath: string, fieldKey: string, value: unknown) => {
const fp = fieldPath(sectionPath, fieldKey);
const segments = fieldSegments(sectionPath, fieldKey);
setParsed((prev) => {
const next = deepClone(prev);
setNestedValue(next, segments, value);
return next;
});
dirtyPathsRef.current.add(fp);
forceRender((n) => n + 1);
},
[],
);
const isFieldMasked = useCallback(
(sectionPath: string, fieldKey: string): boolean => {
const fp = fieldPath(sectionPath, fieldKey);
return maskedPathsRef.current.has(fp) && !dirtyPathsRef.current.has(fp);
},
[],
);
const isFieldDirty = useCallback(
(sectionPath: string, fieldKey: string): boolean => {
const fp = fieldPath(sectionPath, fieldKey);
return dirtyPathsRef.current.has(fp);
},
[],
);
const syncFormToRaw = useCallback((): string => {
try {
const toml = stringify(parsed);
return toml;
} catch {
return rawToml;
}
}, [parsed, rawToml]);
const syncRawToForm = useCallback(
(raw: string): boolean => {
try {
const obj = parse(raw) as ParsedConfig;
setParsed(obj);
// Re-scan masked paths from fresh parse, preserving dirty overrides
const masked = new Set<string>();
scanMasked(obj, '', masked);
maskedPathsRef.current = masked;
return true;
} catch {
return false;
}
},
[],
);
const setMode = useCallback(
(newMode: EditorMode): boolean => {
if (newMode === mode) return true;
if (newMode === 'raw') {
// form → raw: serialize parsed to TOML
const toml = syncFormToRaw();
setRawTomlState(toml);
setModeState('raw');
return true;
} else {
// raw → form: parse TOML
if (syncRawToForm(rawToml)) {
setModeState('form');
return true;
} else {
setError('Invalid TOML syntax. Fix errors before switching to Form view.');
return false;
}
}
},
[mode, syncFormToRaw, syncRawToForm, rawToml],
);
const setRawToml = useCallback((raw: string) => {
setRawTomlState(raw);
}, []);
const save = useCallback(async () => {
setSaving(true);
setError(null);
setSuccess(null);
if (successTimeoutRef.current) {
clearTimeout(successTimeoutRef.current);
}
try {
let toml: string;
if (mode === 'form') {
toml = syncFormToRaw();
} else {
toml = rawToml;
}
await putConfig(toml);
setSuccess('Configuration saved successfully.');
// Auto-dismiss success after 4 seconds
successTimeoutRef.current = setTimeout(() => setSuccess(null), 4000);
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to save configuration');
} finally {
setSaving(false);
}
}, [mode, syncFormToRaw, rawToml]);
const reload = useCallback(async () => {
await loadConfig();
}, [loadConfig]);
const clearMessages = useCallback(() => {
setError(null);
setSuccess(null);
if (successTimeoutRef.current) {
clearTimeout(successTimeoutRef.current);
successTimeoutRef.current = null;
}
}, []);
return {
loading,
saving,
error,
success,
mode,
rawToml,
parsed,
maskedPaths: maskedPathsRef.current,
dirtyPaths: dirtyPathsRef.current,
setMode,
getFieldValue,
setFieldValue,
isFieldMasked,
isFieldDirty,
setRawToml,
save,
reload,
clearMessages,
};
}
+93 -65
View File
@@ -1,50 +1,61 @@
import { useState, useEffect } from 'react';
import {
Settings,
Save,
CheckCircle,
AlertTriangle,
ShieldAlert,
FileText,
SlidersHorizontal,
} from 'lucide-react';
import { getConfig, putConfig } from '@/lib/api';
import { useConfigForm, type EditorMode } from '@/components/config/useConfigForm';
import ConfigFormEditor from '@/components/config/ConfigFormEditor';
import ConfigRawEditor from '@/components/config/ConfigRawEditor';
function ModeTab({
mode,
active,
icon: Icon,
label,
onClick,
}: {
mode: EditorMode;
active: boolean;
icon: React.ComponentType<{ className?: string }>;
label: string;
onClick: () => void;
}) {
return (
<button
onClick={onClick}
className={`flex items-center gap-1.5 px-3 py-1.5 rounded-lg text-sm font-medium transition-colors ${
active
? 'bg-blue-600 text-white'
: 'text-gray-400 hover:text-gray-200 hover:bg-gray-800'
}`}
aria-pressed={active}
data-mode={mode}
>
<Icon className="h-3.5 w-3.5" />
{label}
</button>
);
}
export default function Config() {
const [config, setConfig] = useState('');
const [loading, setLoading] = useState(true);
const [saving, setSaving] = useState(false);
const [error, setError] = useState<string | null>(null);
const [success, setSuccess] = useState<string | null>(null);
useEffect(() => {
getConfig()
.then((data) => {
// The API may return either a raw string or a JSON string
setConfig(typeof data === 'string' ? data : JSON.stringify(data, null, 2));
})
.catch((err) => setError(err.message))
.finally(() => setLoading(false));
}, []);
const handleSave = async () => {
setSaving(true);
setError(null);
setSuccess(null);
try {
await putConfig(config);
setSuccess('Configuration saved successfully.');
} catch (err: unknown) {
setError(err instanceof Error ? err.message : 'Failed to save configuration');
} finally {
setSaving(false);
}
};
// Auto-dismiss success after 4 seconds
useEffect(() => {
if (!success) return;
const timer = setTimeout(() => setSuccess(null), 4000);
return () => clearTimeout(timer);
}, [success]);
const {
loading,
saving,
error,
success,
mode,
rawToml,
setMode,
getFieldValue,
setFieldValue,
isFieldMasked,
setRawToml,
save,
} = useConfigForm();
if (loading) {
return (
@@ -62,14 +73,34 @@ export default function Config() {
<Settings className="h-5 w-5 text-blue-400" />
<h2 className="text-base font-semibold text-white">Configuration</h2>
</div>
<button
onClick={handleSave}
disabled={saving}
className="flex items-center gap-2 bg-blue-600 hover:bg-blue-700 text-white text-sm font-medium px-4 py-2 rounded-lg transition-colors disabled:opacity-50"
>
<Save className="h-4 w-4" />
{saving ? 'Saving...' : 'Save'}
</button>
<div className="flex items-center gap-3">
{/* Mode toggle */}
<div className="flex items-center gap-1 bg-gray-900 border border-gray-800 rounded-lg p-0.5">
<ModeTab
mode="form"
active={mode === 'form'}
icon={SlidersHorizontal}
label="Form"
onClick={() => setMode('form')}
/>
<ModeTab
mode="raw"
active={mode === 'raw'}
icon={FileText}
label="Raw"
onClick={() => setMode('raw')}
/>
</div>
<button
onClick={save}
disabled={saving}
className="flex items-center gap-2 bg-blue-600 hover:bg-blue-700 text-white text-sm font-medium px-4 py-2 rounded-lg transition-colors disabled:opacity-50"
>
<Save className="h-4 w-4" />
{saving ? 'Saving...' : 'Save'}
</button>
</div>
</div>
{/* Sensitive fields note */}
@@ -80,8 +111,9 @@ export default function Config() {
Sensitive fields are masked
</p>
<p className="text-sm text-yellow-400/70 mt-0.5">
API keys, tokens, and passwords are hidden for security. To update a
masked field, replace the entire masked value with your new value.
{mode === 'form'
? 'Masked fields show "Configured (masked)" as a placeholder. Leave them untouched to preserve existing values, or enter a new value to update.'
: 'API keys, tokens, and passwords are hidden for security. To update a masked field, replace the entire masked value with your new value.'}
</p>
</div>
</div>
@@ -102,24 +134,20 @@ export default function Config() {
</div>
)}
{/* Config Editor */}
<div className="bg-gray-900 rounded-xl border border-gray-800 overflow-hidden">
<div className="flex items-center justify-between px-4 py-2 border-b border-gray-800 bg-gray-800/50">
<span className="text-xs text-gray-400 font-medium uppercase tracking-wider">
TOML Configuration
</span>
<span className="text-xs text-gray-500">
{config.split('\n').length} lines
</span>
</div>
<textarea
value={config}
onChange={(e) => setConfig(e.target.value)}
spellCheck={false}
className="w-full min-h-[500px] bg-gray-950 text-gray-200 font-mono text-sm p-4 resize-y focus:outline-none focus:ring-2 focus:ring-blue-500 focus:ring-inset"
style={{ tabSize: 4 }}
{/* Editor */}
{mode === 'form' ? (
<ConfigFormEditor
getFieldValue={getFieldValue}
setFieldValue={setFieldValue}
isFieldMasked={isFieldMasked}
/>
</div>
) : (
<ConfigRawEditor
rawToml={rawToml}
onChange={setRawToml}
disabled={saving}
/>
)}
</div>
);
}