feat: harden non-CLI approval governance and runtime policy sync
This commit is contained in:
parent
5ac885de7b
commit
1fcf2df28b
@ -78,8 +78,8 @@ default_temperature = 0.7
|
||||
|
||||
[gateway]
|
||||
port = 42617
|
||||
host = "[::]"
|
||||
allow_public_bind = true
|
||||
host = "127.0.0.1"
|
||||
allow_public_bind = false
|
||||
EOF
|
||||
|
||||
# ── Stage 2: Development Runtime (Debian) ────────────────────
|
||||
|
||||
@ -8,5 +8,5 @@ default_temperature = 0.7
|
||||
|
||||
[gateway]
|
||||
port = 42617
|
||||
host = "[::]"
|
||||
allow_public_bind = true
|
||||
host = "127.0.0.1"
|
||||
allow_public_bind = false
|
||||
|
||||
@ -37,22 +37,46 @@ cli = true
|
||||
|
||||
Each channel is enabled by creating its sub-table (for example, `[channels_config.telegram]`).
|
||||
|
||||
## In-Chat Runtime Model Switching (Telegram / Discord)
|
||||
One ZeroClaw runtime can serve multiple channels at once: if you configure several
|
||||
channel sub-tables, `zeroclaw channel start` launches all of them in the same process.
|
||||
Channel startup is best-effort: a single channel init failure is reported and skipped,
|
||||
while remaining channels continue running.
|
||||
|
||||
When running `zeroclaw channel start` (or daemon mode), Telegram and Discord now support sender-scoped runtime switching:
|
||||
## In-Chat Runtime Commands
|
||||
|
||||
When running `zeroclaw channel start` (or daemon mode), runtime commands include:
|
||||
|
||||
Telegram/Discord sender-scoped model routing:
|
||||
- `/models` — show available providers and current selection
|
||||
- `/models <provider>` — switch provider for the current sender session
|
||||
- `/model` — show current model and cached model IDs (if available)
|
||||
- `/model <model-id>` — switch model for the current sender session
|
||||
- `/new` — clear conversation history and start a fresh session
|
||||
|
||||
Supervised tool approvals (all non-CLI channels):
|
||||
- `/approve-request <tool-name>` — create a pending approval request
|
||||
- `/approve-confirm <request-id>` — confirm pending request (same sender + same chat/channel only)
|
||||
- `/approve-pending` — list pending requests for your current sender+chat/channel scope
|
||||
- `/approve <tool-name>` — direct one-step approve + persist (`autonomy.auto_approve`, compatibility path)
|
||||
- `/unapprove <tool-name>` — revoke and remove persisted approval
|
||||
- `/approvals` — inspect runtime grants, persisted approval lists, and excluded tools
|
||||
|
||||
Notes:
|
||||
|
||||
- Switching provider or model clears only that sender's in-memory conversation history to avoid cross-model context contamination.
|
||||
- `/new` clears the sender's conversation history without changing provider or model selection.
|
||||
- Model cache previews come from `zeroclaw models refresh --provider <ID>`.
|
||||
- These are runtime chat commands, not CLI subcommands.
|
||||
- Natural-language approval intents are supported with strict parsing and policy control:
|
||||
- `direct` mode (default): `授权工具 shell` grants immediately.
|
||||
- `request_confirm` mode: `授权工具 shell` creates pending request, then confirm with request ID.
|
||||
- `disabled` mode: approval-management must use slash commands.
|
||||
- You can override natural-language approval mode per channel via `[autonomy].non_cli_natural_language_approval_mode_by_channel`.
|
||||
- Approval commands are intercepted before LLM execution, so the model cannot self-escalate permissions through tool calls.
|
||||
- You can restrict who can use approval-management commands via `[autonomy].non_cli_approval_approvers`.
|
||||
- Configure natural-language approval mode via `[autonomy].non_cli_natural_language_approval_mode`.
|
||||
- `autonomy.non_cli_excluded_tools` is reloaded from `config.toml` at runtime; `/approvals` shows the currently effective list.
|
||||
- Each incoming message injects a runtime tool-availability snapshot into the system prompt, derived from the same exclusion policy used by execution.
|
||||
|
||||
## Inbound Image Marker Protocol
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
|
||||
This reference is derived from the current CLI surface (`zeroclaw --help`).
|
||||
|
||||
Last verified: **February 21, 2026**.
|
||||
Last verified: **February 25, 2026**.
|
||||
|
||||
## Top-Level Commands
|
||||
|
||||
@ -138,13 +138,39 @@ Provider connectivity matrix CI/local helper:
|
||||
- `zeroclaw channel add <type> <json>`
|
||||
- `zeroclaw channel remove <name>`
|
||||
|
||||
Runtime in-chat commands (Telegram/Discord while channel server is running):
|
||||
Runtime in-chat commands while channel server is running:
|
||||
|
||||
- `/models`
|
||||
- `/models <provider>`
|
||||
- `/model`
|
||||
- `/model <model-id>`
|
||||
- `/new`
|
||||
- Telegram/Discord sender-session routing:
|
||||
- `/models`
|
||||
- `/models <provider>`
|
||||
- `/model`
|
||||
- `/model <model-id>`
|
||||
- `/new`
|
||||
- Supervised tool approvals (all non-CLI channels):
|
||||
- `/approve-request <tool-name>` (create pending approval request)
|
||||
- `/approve-confirm <request-id>` (confirm pending request; same sender + same chat/channel only)
|
||||
- `/approve-pending` (list pending requests in current sender+chat/channel scope)
|
||||
- `/approve <tool-name>` (direct one-step grant + persist to `autonomy.auto_approve`, compatibility path)
|
||||
- `/unapprove <tool-name>` (revoke + remove from `autonomy.auto_approve`)
|
||||
- `/approvals` (show runtime + persisted approval state)
|
||||
- Natural-language approval behavior is controlled by `[autonomy].non_cli_natural_language_approval_mode`:
|
||||
- `direct` (default): `授权工具 shell` / `approve tool shell` immediately grants
|
||||
- `request_confirm`: natural-language approval creates pending request, then confirm with request ID
|
||||
- `disabled`: natural-language approval commands are ignored (slash commands only)
|
||||
- Optional per-channel override: `[autonomy].non_cli_natural_language_approval_mode_by_channel`
|
||||
|
||||
Approval safety behavior:
|
||||
|
||||
- Runtime approval commands are parsed and executed **before** LLM inference in the channel loop.
|
||||
- Pending requests are sender+chat/channel scoped and expire automatically.
|
||||
- Confirmation requires the same sender in the same chat/channel that created the request.
|
||||
- Once approved and persisted, the tool remains approved across restarts until revoked.
|
||||
- Optional policy gate: `[autonomy].non_cli_approval_approvers` can restrict who may execute approval-management commands.
|
||||
|
||||
Startup behavior for multiple channels:
|
||||
- `zeroclaw channel start` starts all configured channels in one process.
|
||||
- If one channel fails initialization, other channels continue to start.
|
||||
- If all configured channels fail initialization, startup exits with an error.
|
||||
|
||||
Channel runtime also watches `config.toml` and hot-applies updates to:
|
||||
- `default_provider`
|
||||
|
||||
@ -494,6 +494,10 @@ Notes:
|
||||
| `block_high_risk_commands` | `true` | hard block for high-risk commands |
|
||||
| `auto_approve` | `[]` | tool operations always auto-approved |
|
||||
| `always_ask` | `[]` | tool operations that always require approval |
|
||||
| `non_cli_excluded_tools` | `[]` | tools hidden from non-CLI channel tool specs |
|
||||
| `non_cli_approval_approvers` | `[]` | optional allowlist for who can run non-CLI approval-management commands |
|
||||
| `non_cli_natural_language_approval_mode` | `direct` | natural-language behavior for approval-management commands (`direct`, `request_confirm`, `disabled`) |
|
||||
| `non_cli_natural_language_approval_mode_by_channel` | `{}` | per-channel override map for natural-language approval mode |
|
||||
|
||||
Notes:
|
||||
|
||||
@ -503,6 +507,25 @@ Notes:
|
||||
- `allowed_commands` entries can be command names (for example, `"git"`), explicit executable paths (for example, `"/usr/bin/antigravity"`), or `"*"` to allow any command name/path (risk gates still apply).
|
||||
- Shell separator/operator parsing is quote-aware. Characters like `;` inside quoted arguments are treated as literals, not command separators.
|
||||
- Unquoted shell chaining/operators are still enforced by policy checks (`;`, `|`, `&&`, `||`, background chaining, and redirects).
|
||||
- In supervised mode on non-CLI channels, operators can persist human-approved tools with:
|
||||
- One-step flow: `/approve <tool>`.
|
||||
- Two-step flow: `/approve-request <tool>` then `/approve-confirm <request-id>` (same sender + same chat/channel).
|
||||
Both paths write to `autonomy.auto_approve` and remove the tool from `autonomy.always_ask`.
|
||||
- `non_cli_natural_language_approval_mode` controls how strict natural-language approval intents are:
|
||||
- `direct` (default): natural-language approval grants immediately (private-chat friendly).
|
||||
- `request_confirm`: natural-language approval creates a pending request that needs explicit confirm.
|
||||
- `disabled`: natural-language approval commands are rejected; use slash commands only.
|
||||
- `non_cli_natural_language_approval_mode_by_channel` can override that mode for specific channels (keys are channel names like `telegram`, `discord`, `slack`).
|
||||
- Example: keep global `direct`, but force `discord = "request_confirm"` for team chats.
|
||||
- `non_cli_approval_approvers` can restrict who is allowed to run approval commands (`/approve*`, `/unapprove`, `/approvals`):
|
||||
- `*` allows all channel-admitted senders.
|
||||
- `alice` allows sender `alice` on any channel.
|
||||
- `telegram:alice` allows only that channel+sender pair.
|
||||
- `telegram:*` allows any sender on Telegram.
|
||||
- `*:alice` allows `alice` on any channel.
|
||||
- Use `/unapprove <tool>` to remove persisted approval from `autonomy.auto_approve`.
|
||||
- `/approve-pending` lists pending requests for the current sender+chat/channel scope.
|
||||
- If a tool remains unavailable after approval, check `autonomy.non_cli_excluded_tools` (runtime `/approvals` shows this list). Channel runtime reloads this list from `config.toml` automatically.
|
||||
|
||||
```toml
|
||||
[autonomy]
|
||||
|
||||
@ -982,9 +982,9 @@ pub(crate) async fn run_tool_call_loop(
|
||||
anyhow::bail!("Agent exceeded maximum tool iterations ({max_iterations})")
|
||||
}
|
||||
|
||||
/// Build the tool instruction block for the system prompt so the LLM knows
|
||||
/// how to invoke tools.
|
||||
pub(crate) fn build_tool_instructions(tools_registry: &[Box<dyn Tool>]) -> String {
|
||||
/// Build the tool instruction block for the system prompt from concrete tool
|
||||
/// specs so the LLM knows how to invoke tools.
|
||||
pub(crate) fn build_tool_instructions_from_specs(tool_specs: &[crate::tools::ToolSpec]) -> String {
|
||||
let mut instructions = String::new();
|
||||
instructions.push_str("\n## Tool Use Protocol\n\n");
|
||||
instructions.push_str("To use a tool, wrap a JSON object in <tool_call></tool_call> tags:\n\n");
|
||||
@ -992,20 +992,23 @@ pub(crate) fn build_tool_instructions(tools_registry: &[Box<dyn Tool>]) -> Strin
|
||||
instructions.push_str(
|
||||
"CRITICAL: Output actual <tool_call> tags—never describe steps or give examples.\n\n",
|
||||
);
|
||||
instructions.push_str("Example: User says \"what's the date?\". You MUST respond with:\n<tool_call>\n{\"name\":\"shell\",\"arguments\":{\"command\":\"date\"}}\n</tool_call>\n\n");
|
||||
instructions.push_str(
|
||||
"When a tool is needed, emit a real call (not prose), for example:\n\
|
||||
<tool_call>\n\
|
||||
{\"name\":\"tool_name\",\"arguments\":{}}\n\
|
||||
</tool_call>\n\n",
|
||||
);
|
||||
instructions.push_str("You may use multiple tool calls in a single response. ");
|
||||
instructions.push_str("After tool execution, results appear in <tool_result> tags. ");
|
||||
instructions
|
||||
.push_str("Continue reasoning with the results until you can give a final answer.\n\n");
|
||||
instructions.push_str("### Available Tools\n\n");
|
||||
|
||||
for tool in tools_registry {
|
||||
for tool in tool_specs {
|
||||
let _ = writeln!(
|
||||
instructions,
|
||||
"**{}**: {}\nParameters: `{}`\n",
|
||||
tool.name(),
|
||||
tool.description(),
|
||||
tool.parameters_schema()
|
||||
tool.name, tool.description, tool.parameters
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@ -3,13 +3,14 @@
|
||||
//! Provides a pre-execution hook that prompts the user before tool calls,
|
||||
//! with session-scoped "Always" allowlists and audit logging.
|
||||
|
||||
use crate::config::AutonomyConfig;
|
||||
use crate::config::{AutonomyConfig, NonCliNaturalLanguageApprovalMode};
|
||||
use crate::security::AutonomyLevel;
|
||||
use chrono::Utc;
|
||||
use parking_lot::Mutex;
|
||||
use chrono::{Duration, Utc};
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashSet;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::io::{self, BufRead, Write};
|
||||
use uuid::Uuid;
|
||||
|
||||
// ── Types ────────────────────────────────────────────────────────
|
||||
|
||||
@ -42,6 +43,26 @@ pub struct ApprovalLogEntry {
|
||||
pub channel: String,
|
||||
}
|
||||
|
||||
/// A pending non-CLI approval request that still requires explicit confirmation.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct PendingNonCliApprovalRequest {
|
||||
pub request_id: String,
|
||||
pub tool_name: String,
|
||||
pub requested_by: String,
|
||||
pub requested_channel: String,
|
||||
pub requested_reply_target: String,
|
||||
pub reason: Option<String>,
|
||||
pub created_at: String,
|
||||
pub expires_at: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum PendingApprovalError {
|
||||
NotFound,
|
||||
Expired,
|
||||
RequesterMismatch,
|
||||
}
|
||||
|
||||
// ── ApprovalManager ──────────────────────────────────────────────
|
||||
|
||||
/// Manages the interactive approval workflow.
|
||||
@ -50,26 +71,74 @@ pub struct ApprovalLogEntry {
|
||||
/// - Maintains a session-scoped "always" allowlist
|
||||
/// - Records an audit trail of all decisions
|
||||
pub struct ApprovalManager {
|
||||
/// Tools that never need approval (from config).
|
||||
auto_approve: HashSet<String>,
|
||||
/// Tools that always need approval, ignoring session allowlist.
|
||||
always_ask: HashSet<String>,
|
||||
/// Tools that never need approval (config + runtime updates).
|
||||
auto_approve: RwLock<HashSet<String>>,
|
||||
/// Tools that always need approval, ignoring session allowlist (config + runtime updates).
|
||||
always_ask: RwLock<HashSet<String>>,
|
||||
/// Autonomy level from config.
|
||||
autonomy_level: AutonomyLevel,
|
||||
/// Session-scoped allowlist built from "Always" responses.
|
||||
session_allowlist: Mutex<HashSet<String>>,
|
||||
/// Session-scoped allowlist for non-CLI channels after explicit human approval.
|
||||
non_cli_allowlist: Mutex<HashSet<String>>,
|
||||
/// Optional allowlist of senders allowed to manage non-CLI approvals.
|
||||
non_cli_approval_approvers: RwLock<HashSet<String>>,
|
||||
/// Default natural-language handling mode for non-CLI approval-management commands.
|
||||
non_cli_natural_language_approval_mode: RwLock<NonCliNaturalLanguageApprovalMode>,
|
||||
/// Optional per-channel overrides for natural-language approval mode.
|
||||
non_cli_natural_language_approval_mode_by_channel:
|
||||
RwLock<HashMap<String, NonCliNaturalLanguageApprovalMode>>,
|
||||
/// Pending non-CLI approval requests awaiting explicit human confirmation.
|
||||
pending_non_cli_requests: Mutex<HashMap<String, PendingNonCliApprovalRequest>>,
|
||||
/// Audit trail of approval decisions.
|
||||
audit_log: Mutex<Vec<ApprovalLogEntry>>,
|
||||
}
|
||||
|
||||
impl ApprovalManager {
|
||||
fn normalize_non_cli_approvers(entries: &[String]) -> HashSet<String> {
|
||||
entries
|
||||
.iter()
|
||||
.map(|entry| entry.trim().to_string())
|
||||
.filter(|entry| !entry.is_empty())
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn normalize_non_cli_natural_language_mode_by_channel(
|
||||
entries: &HashMap<String, NonCliNaturalLanguageApprovalMode>,
|
||||
) -> HashMap<String, NonCliNaturalLanguageApprovalMode> {
|
||||
entries
|
||||
.iter()
|
||||
.filter_map(|(channel, mode)| {
|
||||
let normalized = channel.trim().to_ascii_lowercase();
|
||||
if normalized.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some((normalized, *mode))
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Create from autonomy config.
|
||||
pub fn from_config(config: &AutonomyConfig) -> Self {
|
||||
Self {
|
||||
auto_approve: config.auto_approve.iter().cloned().collect(),
|
||||
always_ask: config.always_ask.iter().cloned().collect(),
|
||||
auto_approve: RwLock::new(config.auto_approve.iter().cloned().collect()),
|
||||
always_ask: RwLock::new(config.always_ask.iter().cloned().collect()),
|
||||
autonomy_level: config.level,
|
||||
session_allowlist: Mutex::new(HashSet::new()),
|
||||
non_cli_allowlist: Mutex::new(HashSet::new()),
|
||||
non_cli_approval_approvers: RwLock::new(Self::normalize_non_cli_approvers(
|
||||
&config.non_cli_approval_approvers,
|
||||
)),
|
||||
non_cli_natural_language_approval_mode: RwLock::new(
|
||||
config.non_cli_natural_language_approval_mode,
|
||||
),
|
||||
non_cli_natural_language_approval_mode_by_channel: RwLock::new(
|
||||
Self::normalize_non_cli_natural_language_mode_by_channel(
|
||||
&config.non_cli_natural_language_approval_mode_by_channel,
|
||||
),
|
||||
),
|
||||
pending_non_cli_requests: Mutex::new(HashMap::new()),
|
||||
audit_log: Mutex::new(Vec::new()),
|
||||
}
|
||||
}
|
||||
@ -89,12 +158,12 @@ impl ApprovalManager {
|
||||
}
|
||||
|
||||
// always_ask overrides everything.
|
||||
if self.always_ask.contains(tool_name) {
|
||||
if self.always_ask.read().contains(tool_name) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// auto_approve skips the prompt.
|
||||
if self.auto_approve.contains(tool_name) {
|
||||
if self.auto_approve.read().contains(tool_name) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -145,6 +214,270 @@ impl ApprovalManager {
|
||||
self.session_allowlist.lock().clone()
|
||||
}
|
||||
|
||||
/// Grant session-scoped non-CLI approval for a specific tool.
|
||||
pub fn grant_non_cli_session(&self, tool_name: &str) {
|
||||
let mut allowlist = self.non_cli_allowlist.lock();
|
||||
allowlist.insert(tool_name.to_string());
|
||||
}
|
||||
|
||||
/// Revoke session-scoped non-CLI approval for a specific tool.
|
||||
pub fn revoke_non_cli_session(&self, tool_name: &str) -> bool {
|
||||
let mut allowlist = self.non_cli_allowlist.lock();
|
||||
allowlist.remove(tool_name)
|
||||
}
|
||||
|
||||
/// Check whether non-CLI session approval exists for a tool.
|
||||
pub fn is_non_cli_session_granted(&self, tool_name: &str) -> bool {
|
||||
let allowlist = self.non_cli_allowlist.lock();
|
||||
allowlist.contains(tool_name)
|
||||
}
|
||||
|
||||
/// Get the current non-CLI session allowlist.
|
||||
pub fn non_cli_session_allowlist(&self) -> HashSet<String> {
|
||||
self.non_cli_allowlist.lock().clone()
|
||||
}
|
||||
|
||||
/// Snapshot configured non-CLI approval approver entries.
|
||||
pub fn non_cli_approval_approvers(&self) -> HashSet<String> {
|
||||
self.non_cli_approval_approvers.read().clone()
|
||||
}
|
||||
|
||||
/// Natural-language handling mode for non-CLI approval-management commands.
|
||||
pub fn non_cli_natural_language_approval_mode(&self) -> NonCliNaturalLanguageApprovalMode {
|
||||
*self.non_cli_natural_language_approval_mode.read()
|
||||
}
|
||||
|
||||
/// Snapshot per-channel natural-language approval mode overrides.
|
||||
pub fn non_cli_natural_language_approval_mode_by_channel(
|
||||
&self,
|
||||
) -> HashMap<String, NonCliNaturalLanguageApprovalMode> {
|
||||
self.non_cli_natural_language_approval_mode_by_channel
|
||||
.read()
|
||||
.clone()
|
||||
}
|
||||
|
||||
/// Effective natural-language approval mode for a specific channel.
|
||||
pub fn non_cli_natural_language_approval_mode_for_channel(
|
||||
&self,
|
||||
channel: &str,
|
||||
) -> NonCliNaturalLanguageApprovalMode {
|
||||
let normalized = channel.trim().to_ascii_lowercase();
|
||||
self.non_cli_natural_language_approval_mode_by_channel
|
||||
.read()
|
||||
.get(&normalized)
|
||||
.copied()
|
||||
.unwrap_or_else(|| self.non_cli_natural_language_approval_mode())
|
||||
}
|
||||
|
||||
/// Check whether `sender` on `channel` may manage non-CLI approvals.
|
||||
///
|
||||
/// If no approver entries are configured, this defaults to `true` so
|
||||
/// existing setups continue to behave as before.
|
||||
pub fn is_non_cli_approval_actor_allowed(&self, channel: &str, sender: &str) -> bool {
|
||||
let approvers = self.non_cli_approval_approvers.read();
|
||||
if approvers.is_empty() {
|
||||
return true;
|
||||
}
|
||||
|
||||
if approvers.contains("*") || approvers.contains(sender) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let exact = format!("{channel}:{sender}");
|
||||
if approvers.contains(&exact) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let any_on_channel = format!("{channel}:*");
|
||||
if approvers.contains(&any_on_channel) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let sender_any_channel = format!("*:{sender}");
|
||||
approvers.contains(&sender_any_channel)
|
||||
}
|
||||
|
||||
/// Apply runtime + persisted approval grant semantics:
|
||||
/// add to auto_approve and remove from always_ask.
|
||||
pub fn apply_persistent_runtime_grant(&self, tool_name: &str) {
|
||||
{
|
||||
let mut auto = self.auto_approve.write();
|
||||
auto.insert(tool_name.to_string());
|
||||
}
|
||||
let mut always = self.always_ask.write();
|
||||
always.remove(tool_name);
|
||||
}
|
||||
|
||||
/// Apply runtime + persisted approval revoke semantics:
|
||||
/// remove from auto_approve.
|
||||
pub fn apply_persistent_runtime_revoke(&self, tool_name: &str) -> bool {
|
||||
let mut auto = self.auto_approve.write();
|
||||
auto.remove(tool_name)
|
||||
}
|
||||
|
||||
/// Replace runtime-persistent non-CLI policy from config hot-reload.
|
||||
///
|
||||
/// This updates the effective policy sets used by non-CLI approval commands
|
||||
/// without restarting the daemon.
|
||||
pub fn replace_runtime_non_cli_policy(
|
||||
&self,
|
||||
auto_approve: &[String],
|
||||
always_ask: &[String],
|
||||
non_cli_approval_approvers: &[String],
|
||||
non_cli_natural_language_approval_mode: NonCliNaturalLanguageApprovalMode,
|
||||
non_cli_natural_language_approval_mode_by_channel: &HashMap<
|
||||
String,
|
||||
NonCliNaturalLanguageApprovalMode,
|
||||
>,
|
||||
) {
|
||||
{
|
||||
let mut auto = self.auto_approve.write();
|
||||
*auto = auto_approve.iter().cloned().collect();
|
||||
}
|
||||
{
|
||||
let mut always = self.always_ask.write();
|
||||
*always = always_ask.iter().cloned().collect();
|
||||
}
|
||||
{
|
||||
let mut approvers = self.non_cli_approval_approvers.write();
|
||||
*approvers = Self::normalize_non_cli_approvers(non_cli_approval_approvers);
|
||||
}
|
||||
{
|
||||
let mut mode = self.non_cli_natural_language_approval_mode.write();
|
||||
*mode = non_cli_natural_language_approval_mode;
|
||||
}
|
||||
{
|
||||
let mut mode_by_channel = self
|
||||
.non_cli_natural_language_approval_mode_by_channel
|
||||
.write();
|
||||
*mode_by_channel = Self::normalize_non_cli_natural_language_mode_by_channel(
|
||||
non_cli_natural_language_approval_mode_by_channel,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Snapshot runtime auto_approve entries.
|
||||
pub fn auto_approve_tools(&self) -> HashSet<String> {
|
||||
self.auto_approve.read().clone()
|
||||
}
|
||||
|
||||
/// Snapshot runtime always_ask entries.
|
||||
pub fn always_ask_tools(&self) -> HashSet<String> {
|
||||
self.always_ask.read().clone()
|
||||
}
|
||||
|
||||
/// Create a pending non-CLI approval request. If a matching active request
|
||||
/// already exists for (tool, requester, channel), returns that existing request.
|
||||
pub fn create_non_cli_pending_request(
|
||||
&self,
|
||||
tool_name: &str,
|
||||
requested_by: &str,
|
||||
requested_channel: &str,
|
||||
requested_reply_target: &str,
|
||||
reason: Option<String>,
|
||||
) -> PendingNonCliApprovalRequest {
|
||||
let mut pending = self.pending_non_cli_requests.lock();
|
||||
prune_expired_pending_requests(&mut pending);
|
||||
|
||||
if let Some(existing) = pending
|
||||
.values()
|
||||
.find(|req| {
|
||||
req.tool_name == tool_name
|
||||
&& req.requested_by == requested_by
|
||||
&& req.requested_channel == requested_channel
|
||||
&& req.requested_reply_target == requested_reply_target
|
||||
})
|
||||
.cloned()
|
||||
{
|
||||
return existing;
|
||||
}
|
||||
|
||||
let now = Utc::now();
|
||||
let expires = now + Duration::minutes(30);
|
||||
let mut request_id = format!("apr-{}", &Uuid::new_v4().simple().to_string()[..8]);
|
||||
while pending.contains_key(&request_id) {
|
||||
request_id = format!("apr-{}", &Uuid::new_v4().simple().to_string()[..8]);
|
||||
}
|
||||
|
||||
let req = PendingNonCliApprovalRequest {
|
||||
request_id: request_id.clone(),
|
||||
tool_name: tool_name.to_string(),
|
||||
requested_by: requested_by.to_string(),
|
||||
requested_channel: requested_channel.to_string(),
|
||||
requested_reply_target: requested_reply_target.to_string(),
|
||||
reason,
|
||||
created_at: now.to_rfc3339(),
|
||||
expires_at: expires.to_rfc3339(),
|
||||
};
|
||||
pending.insert(request_id, req.clone());
|
||||
req
|
||||
}
|
||||
|
||||
/// Confirm a pending non-CLI approval request.
|
||||
/// Confirmation must come from the same sender in the same channel.
|
||||
pub fn confirm_non_cli_pending_request(
|
||||
&self,
|
||||
request_id: &str,
|
||||
confirmed_by: &str,
|
||||
confirmed_channel: &str,
|
||||
confirmed_reply_target: &str,
|
||||
) -> Result<PendingNonCliApprovalRequest, PendingApprovalError> {
|
||||
let mut pending = self.pending_non_cli_requests.lock();
|
||||
prune_expired_pending_requests(&mut pending);
|
||||
|
||||
let Some(req) = pending.remove(request_id) else {
|
||||
return Err(PendingApprovalError::NotFound);
|
||||
};
|
||||
|
||||
if is_pending_request_expired(&req) {
|
||||
return Err(PendingApprovalError::Expired);
|
||||
}
|
||||
|
||||
if req.requested_by != confirmed_by
|
||||
|| req.requested_channel != confirmed_channel
|
||||
|| req.requested_reply_target != confirmed_reply_target
|
||||
{
|
||||
pending.insert(req.request_id.clone(), req);
|
||||
return Err(PendingApprovalError::RequesterMismatch);
|
||||
}
|
||||
|
||||
Ok(req)
|
||||
}
|
||||
|
||||
/// List active pending non-CLI approval requests.
|
||||
pub fn list_non_cli_pending_requests(
|
||||
&self,
|
||||
requested_by: Option<&str>,
|
||||
requested_channel: Option<&str>,
|
||||
requested_reply_target: Option<&str>,
|
||||
) -> Vec<PendingNonCliApprovalRequest> {
|
||||
let mut pending = self.pending_non_cli_requests.lock();
|
||||
prune_expired_pending_requests(&mut pending);
|
||||
|
||||
let mut rows = pending
|
||||
.values()
|
||||
.filter(|req| {
|
||||
requested_by.map_or(true, |by| req.requested_by == by)
|
||||
&& requested_channel.map_or(true, |channel| req.requested_channel == channel)
|
||||
&& requested_reply_target.map_or(true, |reply_target| {
|
||||
req.requested_reply_target == reply_target
|
||||
})
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
rows.sort_by(|a, b| a.created_at.cmp(&b.created_at));
|
||||
rows
|
||||
}
|
||||
|
||||
/// Remove all pending requests for a tool.
|
||||
pub fn clear_non_cli_pending_requests_for_tool(&self, tool_name: &str) -> usize {
|
||||
let mut pending = self.pending_non_cli_requests.lock();
|
||||
prune_expired_pending_requests(&mut pending);
|
||||
let before = pending.len();
|
||||
pending.retain(|_, req| req.tool_name != tool_name);
|
||||
before.saturating_sub(pending.len())
|
||||
}
|
||||
|
||||
/// Prompt the user on the CLI and return their decision.
|
||||
///
|
||||
/// For non-CLI channels, returns `Yes` automatically (interactive
|
||||
@ -214,6 +547,20 @@ fn truncate_for_summary(input: &str, max_chars: usize) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
fn is_pending_request_expired(req: &PendingNonCliApprovalRequest) -> bool {
|
||||
chrono::DateTime::parse_from_rfc3339(&req.expires_at)
|
||||
.map(|dt| dt.with_timezone(&Utc) <= Utc::now())
|
||||
.unwrap_or(true)
|
||||
}
|
||||
|
||||
fn prune_expired_pending_requests(
|
||||
pending: &mut HashMap<String, PendingNonCliApprovalRequest>,
|
||||
) -> usize {
|
||||
let before = pending.len();
|
||||
pending.retain(|_, req| !is_pending_request_expired(req));
|
||||
before.saturating_sub(pending.len())
|
||||
}
|
||||
|
||||
// ── Tests ────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
@ -323,6 +670,248 @@ mod tests {
|
||||
assert!(mgr.needs_approval("file_write"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_cli_session_approval_persists_across_checks() {
|
||||
let mgr = ApprovalManager::from_config(&supervised_config());
|
||||
assert!(!mgr.is_non_cli_session_granted("shell"));
|
||||
|
||||
mgr.grant_non_cli_session("shell");
|
||||
assert!(mgr.is_non_cli_session_granted("shell"));
|
||||
assert!(mgr.is_non_cli_session_granted("shell"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_cli_session_approval_can_be_revoked() {
|
||||
let mgr = ApprovalManager::from_config(&supervised_config());
|
||||
mgr.grant_non_cli_session("shell");
|
||||
assert!(mgr.is_non_cli_session_granted("shell"));
|
||||
|
||||
assert!(mgr.revoke_non_cli_session("shell"));
|
||||
assert!(!mgr.is_non_cli_session_granted("shell"));
|
||||
assert!(!mgr.revoke_non_cli_session("shell"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_cli_session_allowlist_snapshot_lists_granted_tools() {
|
||||
let mgr = ApprovalManager::from_config(&supervised_config());
|
||||
mgr.grant_non_cli_session("shell");
|
||||
mgr.grant_non_cli_session("file_write");
|
||||
|
||||
let allowlist = mgr.non_cli_session_allowlist();
|
||||
assert!(allowlist.contains("shell"));
|
||||
assert!(allowlist.contains("file_write"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn persistent_runtime_grant_updates_policy_immediately() {
|
||||
let mgr = ApprovalManager::from_config(&supervised_config());
|
||||
assert!(mgr.needs_approval("shell"));
|
||||
|
||||
mgr.apply_persistent_runtime_grant("shell");
|
||||
assert!(!mgr.needs_approval("shell"));
|
||||
assert!(mgr.auto_approve_tools().contains("shell"));
|
||||
assert!(!mgr.always_ask_tools().contains("shell"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn persistent_runtime_revoke_updates_policy_immediately() {
|
||||
let mgr = ApprovalManager::from_config(&supervised_config());
|
||||
assert!(!mgr.needs_approval("file_read"));
|
||||
|
||||
assert!(mgr.apply_persistent_runtime_revoke("file_read"));
|
||||
assert!(mgr.needs_approval("file_read"));
|
||||
assert!(!mgr.apply_persistent_runtime_revoke("file_read"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn create_and_confirm_pending_non_cli_approval_request() {
|
||||
let mgr = ApprovalManager::from_config(&supervised_config());
|
||||
let req = mgr.create_non_cli_pending_request("shell", "alice", "telegram", "chat-1", None);
|
||||
assert_eq!(req.tool_name, "shell");
|
||||
assert!(req.request_id.starts_with("apr-"));
|
||||
|
||||
let confirmed = mgr
|
||||
.confirm_non_cli_pending_request(&req.request_id, "alice", "telegram", "chat-1")
|
||||
.expect("request should confirm");
|
||||
assert_eq!(confirmed.request_id, req.request_id);
|
||||
assert!(mgr
|
||||
.confirm_non_cli_pending_request(&req.request_id, "alice", "telegram", "chat-1")
|
||||
.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pending_non_cli_approval_requires_same_sender_and_channel() {
|
||||
let mgr = ApprovalManager::from_config(&supervised_config());
|
||||
let req = mgr.create_non_cli_pending_request("shell", "alice", "telegram", "chat-1", None);
|
||||
|
||||
let err = mgr
|
||||
.confirm_non_cli_pending_request(&req.request_id, "bob", "telegram", "chat-1")
|
||||
.expect_err("mismatched sender should fail");
|
||||
assert_eq!(err, PendingApprovalError::RequesterMismatch);
|
||||
|
||||
// Request remains pending after mismatch.
|
||||
let pending =
|
||||
mgr.list_non_cli_pending_requests(Some("alice"), Some("telegram"), Some("chat-1"));
|
||||
assert_eq!(pending.len(), 1);
|
||||
|
||||
let err = mgr
|
||||
.confirm_non_cli_pending_request(&req.request_id, "alice", "discord", "chat-1")
|
||||
.expect_err("mismatched channel should fail");
|
||||
assert_eq!(err, PendingApprovalError::RequesterMismatch);
|
||||
|
||||
let err = mgr
|
||||
.confirm_non_cli_pending_request(&req.request_id, "alice", "telegram", "chat-2")
|
||||
.expect_err("mismatched reply target should fail");
|
||||
assert_eq!(err, PendingApprovalError::RequesterMismatch);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_pending_non_cli_approvals_filters_scope() {
|
||||
let mgr = ApprovalManager::from_config(&supervised_config());
|
||||
mgr.create_non_cli_pending_request("shell", "alice", "telegram", "chat-1", None);
|
||||
mgr.create_non_cli_pending_request("file_write", "bob", "telegram", "chat-1", None);
|
||||
mgr.create_non_cli_pending_request("browser_open", "alice", "discord", "chat-9", None);
|
||||
mgr.create_non_cli_pending_request("schedule", "alice", "telegram", "chat-2", None);
|
||||
|
||||
let alice_telegram =
|
||||
mgr.list_non_cli_pending_requests(Some("alice"), Some("telegram"), Some("chat-1"));
|
||||
assert_eq!(alice_telegram.len(), 1);
|
||||
assert_eq!(alice_telegram[0].tool_name, "shell");
|
||||
|
||||
let telegram_chat1 =
|
||||
mgr.list_non_cli_pending_requests(None, Some("telegram"), Some("chat-1"));
|
||||
assert_eq!(telegram_chat1.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pending_non_cli_approval_expiry_is_pruned() {
|
||||
let mgr = ApprovalManager::from_config(&supervised_config());
|
||||
let req = mgr.create_non_cli_pending_request("shell", "alice", "telegram", "chat-1", None);
|
||||
|
||||
{
|
||||
let mut pending = mgr.pending_non_cli_requests.lock();
|
||||
let row = pending.get_mut(&req.request_id).expect("request row");
|
||||
row.expires_at = (Utc::now() - Duration::minutes(1)).to_rfc3339();
|
||||
}
|
||||
|
||||
let rows = mgr.list_non_cli_pending_requests(None, None, None);
|
||||
assert!(rows.is_empty());
|
||||
let err = mgr
|
||||
.confirm_non_cli_pending_request(&req.request_id, "alice", "telegram", "chat-1")
|
||||
.expect_err("expired request should not confirm");
|
||||
assert_eq!(err, PendingApprovalError::NotFound);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_cli_approval_actor_defaults_to_allow_when_not_configured() {
|
||||
let mgr = ApprovalManager::from_config(&supervised_config());
|
||||
assert!(mgr.is_non_cli_approval_actor_allowed("telegram", "alice"));
|
||||
assert!(mgr.is_non_cli_approval_actor_allowed("discord", "bob"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_cli_natural_language_approval_mode_defaults_to_direct() {
|
||||
let mgr = ApprovalManager::from_config(&supervised_config());
|
||||
assert_eq!(
|
||||
mgr.non_cli_natural_language_approval_mode(),
|
||||
NonCliNaturalLanguageApprovalMode::Direct
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_cli_approval_actor_allowlist_supports_exact_and_wildcards() {
|
||||
let mut cfg = supervised_config();
|
||||
cfg.non_cli_approval_approvers = vec![
|
||||
"alice".to_string(),
|
||||
"telegram:bob".to_string(),
|
||||
"discord:*".to_string(),
|
||||
"*:carol".to_string(),
|
||||
];
|
||||
let mgr = ApprovalManager::from_config(&cfg);
|
||||
|
||||
assert!(mgr.is_non_cli_approval_actor_allowed("telegram", "alice"));
|
||||
assert!(mgr.is_non_cli_approval_actor_allowed("telegram", "bob"));
|
||||
assert!(mgr.is_non_cli_approval_actor_allowed("discord", "anyone"));
|
||||
assert!(mgr.is_non_cli_approval_actor_allowed("matrix", "carol"));
|
||||
|
||||
assert!(!mgr.is_non_cli_approval_actor_allowed("telegram", "mallory"));
|
||||
assert!(!mgr.is_non_cli_approval_actor_allowed("matrix", "bob"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_cli_natural_language_approval_mode_honors_config_override() {
|
||||
let mut cfg = supervised_config();
|
||||
cfg.non_cli_natural_language_approval_mode =
|
||||
NonCliNaturalLanguageApprovalMode::RequestConfirm;
|
||||
let mgr = ApprovalManager::from_config(&cfg);
|
||||
assert_eq!(
|
||||
mgr.non_cli_natural_language_approval_mode(),
|
||||
NonCliNaturalLanguageApprovalMode::RequestConfirm
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_cli_natural_language_approval_mode_supports_per_channel_override() {
|
||||
let mut cfg = supervised_config();
|
||||
cfg.non_cli_natural_language_approval_mode = NonCliNaturalLanguageApprovalMode::Direct;
|
||||
cfg.non_cli_natural_language_approval_mode_by_channel
|
||||
.insert(
|
||||
"discord".to_string(),
|
||||
NonCliNaturalLanguageApprovalMode::RequestConfirm,
|
||||
);
|
||||
let mgr = ApprovalManager::from_config(&cfg);
|
||||
|
||||
assert_eq!(
|
||||
mgr.non_cli_natural_language_approval_mode_for_channel("telegram"),
|
||||
NonCliNaturalLanguageApprovalMode::Direct
|
||||
);
|
||||
assert_eq!(
|
||||
mgr.non_cli_natural_language_approval_mode_for_channel("discord"),
|
||||
NonCliNaturalLanguageApprovalMode::RequestConfirm
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replace_runtime_non_cli_policy_updates_modes_and_approvers() {
|
||||
let cfg = supervised_config();
|
||||
let mgr = ApprovalManager::from_config(&cfg);
|
||||
|
||||
let mut mode_overrides = HashMap::new();
|
||||
mode_overrides.insert(
|
||||
"telegram".to_string(),
|
||||
NonCliNaturalLanguageApprovalMode::Disabled,
|
||||
);
|
||||
mode_overrides.insert(
|
||||
"discord".to_string(),
|
||||
NonCliNaturalLanguageApprovalMode::RequestConfirm,
|
||||
);
|
||||
|
||||
mgr.replace_runtime_non_cli_policy(
|
||||
&["mock_price".to_string()],
|
||||
&["shell".to_string()],
|
||||
&["telegram:alice".to_string()],
|
||||
NonCliNaturalLanguageApprovalMode::Direct,
|
||||
&mode_overrides,
|
||||
);
|
||||
|
||||
assert!(!mgr.needs_approval("mock_price"));
|
||||
assert!(mgr.needs_approval("shell"));
|
||||
assert!(mgr.is_non_cli_approval_actor_allowed("telegram", "alice"));
|
||||
assert!(!mgr.is_non_cli_approval_actor_allowed("telegram", "bob"));
|
||||
assert_eq!(
|
||||
mgr.non_cli_natural_language_approval_mode_for_channel("telegram"),
|
||||
NonCliNaturalLanguageApprovalMode::Disabled
|
||||
);
|
||||
assert_eq!(
|
||||
mgr.non_cli_natural_language_approval_mode_for_channel("discord"),
|
||||
NonCliNaturalLanguageApprovalMode::RequestConfirm
|
||||
);
|
||||
assert_eq!(
|
||||
mgr.non_cli_natural_language_approval_mode_for_channel("slack"),
|
||||
NonCliNaturalLanguageApprovalMode::Direct
|
||||
);
|
||||
}
|
||||
|
||||
// ── audit log ────────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
|
||||
2335
src/channels/mod.rs
2335
src/channels/mod.rs
File diff suppressed because it is too large
Load Diff
@ -202,6 +202,128 @@ fn is_http_url(target: &str) -> bool {
|
||||
target.starts_with("http://") || target.starts_with("https://")
|
||||
}
|
||||
|
||||
fn sanitize_attachment_filename(file_name: &str) -> Option<String> {
|
||||
let basename = Path::new(file_name).file_name()?.to_str()?.trim();
|
||||
if basename.is_empty() || basename == "." || basename == ".." {
|
||||
return None;
|
||||
}
|
||||
|
||||
let sanitized: String = basename
|
||||
.replace(['/', '\\'], "_")
|
||||
.chars()
|
||||
.take(128)
|
||||
.collect();
|
||||
if sanitized.is_empty() || sanitized == "." || sanitized == ".." {
|
||||
None
|
||||
} else {
|
||||
Some(sanitized)
|
||||
}
|
||||
}
|
||||
|
||||
fn sanitize_generated_extension(raw_ext: &str) -> String {
|
||||
let cleaned: String = raw_ext
|
||||
.chars()
|
||||
.filter(|c| c.is_ascii_alphanumeric())
|
||||
.take(8)
|
||||
.collect::<String>()
|
||||
.to_ascii_lowercase();
|
||||
if cleaned.is_empty() {
|
||||
"jpg".to_string()
|
||||
} else {
|
||||
cleaned
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_workspace_attachment_path(workspace: &Path, target: &str) -> anyhow::Result<PathBuf> {
|
||||
if target.contains('\0') {
|
||||
anyhow::bail!("Telegram attachment path contains null byte");
|
||||
}
|
||||
|
||||
let workspace_root = workspace
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| workspace.to_path_buf());
|
||||
|
||||
let candidate = if let Some(rel) = target.strip_prefix("/workspace/") {
|
||||
workspace.join(rel)
|
||||
} else if target == "/workspace" {
|
||||
workspace.to_path_buf()
|
||||
} else {
|
||||
let raw = Path::new(target);
|
||||
if raw.is_absolute() {
|
||||
raw.to_path_buf()
|
||||
} else {
|
||||
workspace.join(raw)
|
||||
}
|
||||
};
|
||||
|
||||
let resolved = candidate
|
||||
.canonicalize()
|
||||
.with_context(|| format!("Telegram attachment path not found: {target}"))?;
|
||||
|
||||
if !resolved.starts_with(&workspace_root) {
|
||||
anyhow::bail!("Telegram attachment path escapes workspace: {target}");
|
||||
}
|
||||
if !resolved.is_file() {
|
||||
anyhow::bail!(
|
||||
"Telegram attachment path is not a file: {}",
|
||||
resolved.display()
|
||||
);
|
||||
}
|
||||
|
||||
Ok(resolved)
|
||||
}
|
||||
|
||||
async fn resolve_workspace_attachment_output_path(
|
||||
workspace: &Path,
|
||||
file_name: &str,
|
||||
) -> anyhow::Result<PathBuf> {
|
||||
let safe_name = sanitize_attachment_filename(file_name)
|
||||
.ok_or_else(|| anyhow::anyhow!("invalid attachment filename: {file_name}"))?;
|
||||
|
||||
fs::create_dir_all(workspace).await?;
|
||||
let workspace_root = fs::canonicalize(workspace)
|
||||
.await
|
||||
.unwrap_or_else(|_| workspace.to_path_buf());
|
||||
|
||||
let save_dir = workspace.join("telegram_files");
|
||||
fs::create_dir_all(&save_dir).await?;
|
||||
let resolved_save_dir = fs::canonicalize(&save_dir).await.with_context(|| {
|
||||
format!(
|
||||
"failed to resolve Telegram attachment save directory: {}",
|
||||
save_dir.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
if !resolved_save_dir.starts_with(&workspace_root) {
|
||||
anyhow::bail!(
|
||||
"Telegram attachment save directory escapes workspace: {}",
|
||||
save_dir.display()
|
||||
);
|
||||
}
|
||||
|
||||
let output_path = resolved_save_dir.join(safe_name);
|
||||
match fs::symlink_metadata(&output_path).await {
|
||||
Ok(meta) => {
|
||||
if meta.file_type().is_symlink() {
|
||||
anyhow::bail!(
|
||||
"refusing to write Telegram attachment through symlink: {}",
|
||||
output_path.display()
|
||||
);
|
||||
}
|
||||
if !meta.is_file() {
|
||||
anyhow::bail!(
|
||||
"Telegram attachment output path is not a regular file: {}",
|
||||
output_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||
Err(e) => return Err(e.into()),
|
||||
}
|
||||
|
||||
Ok(output_path)
|
||||
}
|
||||
|
||||
fn infer_attachment_kind_from_target(target: &str) -> Option<TelegramAttachmentKind> {
|
||||
let normalized = target
|
||||
.split('?')
|
||||
@ -1139,12 +1261,6 @@ Allowlist Telegram username (without '@') or numeric user ID.",
|
||||
None
|
||||
})?;
|
||||
|
||||
let save_dir = workspace.join("telegram_files");
|
||||
if let Err(e) = tokio::fs::create_dir_all(&save_dir).await {
|
||||
tracing::warn!("Failed to create telegram_files directory: {e}");
|
||||
return None;
|
||||
}
|
||||
|
||||
// Download file from Telegram
|
||||
let tg_file_path = match self.get_file_path(&attachment.file_id).await {
|
||||
Ok(p) => p,
|
||||
@ -1164,15 +1280,27 @@ Allowlist Telegram username (without '@') or numeric user ID.",
|
||||
|
||||
// Determine local filename
|
||||
let local_filename = match &attachment.file_name {
|
||||
Some(name) => name.clone(),
|
||||
Some(name) => sanitize_attachment_filename(name)
|
||||
.unwrap_or_else(|| format!("attachment_{chat_id}_{message_id}.bin")),
|
||||
None => {
|
||||
// For photos, derive extension from Telegram file path
|
||||
let ext = tg_file_path.rsplit('.').next().unwrap_or("jpg");
|
||||
let ext =
|
||||
sanitize_generated_extension(tg_file_path.rsplit('.').next().unwrap_or("jpg"));
|
||||
format!("photo_{chat_id}_{message_id}.{ext}")
|
||||
}
|
||||
};
|
||||
|
||||
let local_path = save_dir.join(&local_filename);
|
||||
let local_path =
|
||||
match resolve_workspace_attachment_output_path(workspace, &local_filename).await {
|
||||
Ok(path) => path,
|
||||
Err(e) => {
|
||||
tracing::warn!(
|
||||
"Failed to resolve attachment output path for {}: {e}",
|
||||
local_filename
|
||||
);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
if let Err(e) = tokio::fs::write(&local_path, &file_data).await {
|
||||
tracing::warn!("Failed to save attachment to {}: {e}", local_path.display());
|
||||
return None;
|
||||
@ -1909,34 +2037,19 @@ Allowlist Telegram username (without '@') or numeric user ID.",
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Remap Docker container workspace path (/workspace/...) to the host
|
||||
// workspace directory so files written by the containerised runtime
|
||||
// can be found and sent by the host-side Telegram sender.
|
||||
let remapped;
|
||||
let target = if let Some(rel) = target.strip_prefix("/workspace/") {
|
||||
if let Some(ws) = &self.workspace_dir {
|
||||
remapped = ws.join(rel);
|
||||
remapped.to_str().unwrap_or(target)
|
||||
} else {
|
||||
target
|
||||
}
|
||||
} else {
|
||||
target
|
||||
};
|
||||
|
||||
let path = Path::new(target);
|
||||
if !path.exists() {
|
||||
anyhow::bail!("Telegram attachment path not found: {target}");
|
||||
}
|
||||
let workspace = self.workspace_dir.as_ref().ok_or_else(|| {
|
||||
anyhow::anyhow!("workspace_dir is not configured; local file attachments are disabled")
|
||||
})?;
|
||||
let path = resolve_workspace_attachment_path(workspace, target)?;
|
||||
|
||||
match attachment.kind {
|
||||
TelegramAttachmentKind::Image => self.send_photo(chat_id, thread_id, path, None).await,
|
||||
TelegramAttachmentKind::Image => self.send_photo(chat_id, thread_id, &path, None).await,
|
||||
TelegramAttachmentKind::Document => {
|
||||
self.send_document(chat_id, thread_id, path, None).await
|
||||
self.send_document(chat_id, thread_id, &path, None).await
|
||||
}
|
||||
TelegramAttachmentKind::Video => self.send_video(chat_id, thread_id, path, None).await,
|
||||
TelegramAttachmentKind::Audio => self.send_audio(chat_id, thread_id, path, None).await,
|
||||
TelegramAttachmentKind::Voice => self.send_voice(chat_id, thread_id, path, None).await,
|
||||
TelegramAttachmentKind::Video => self.send_video(chat_id, thread_id, &path, None).await,
|
||||
TelegramAttachmentKind::Audio => self.send_audio(chat_id, thread_id, &path, None).await,
|
||||
TelegramAttachmentKind::Voice => self.send_voice(chat_id, thread_id, &path, None).await,
|
||||
}
|
||||
}
|
||||
|
||||
@ -2930,6 +3043,27 @@ Ensure only one `zeroclaw` process is using this bot token."
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::Path;
|
||||
|
||||
#[cfg(unix)]
|
||||
fn symlink_file(src: &Path, dst: &Path) {
|
||||
std::os::unix::fs::symlink(src, dst).expect("symlink should be created");
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn symlink_file(src: &Path, dst: &Path) {
|
||||
std::os::windows::fs::symlink_file(src, dst).expect("symlink should be created");
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn symlink_dir(src: &Path, dst: &Path) {
|
||||
std::os::unix::fs::symlink(src, dst).expect("symlink should be created");
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn symlink_dir(src: &Path, dst: &Path) {
|
||||
std::os::windows::fs::symlink_dir(src, dst).expect("symlink should be created");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn telegram_channel_name() {
|
||||
@ -3274,6 +3408,94 @@ mod tests {
|
||||
assert!(parse_path_only_attachment("Screenshot saved to /tmp/snap.png").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sanitize_attachment_filename_strips_path_traversal() {
|
||||
assert_eq!(
|
||||
sanitize_attachment_filename("../../tmp/evil.txt").as_deref(),
|
||||
Some("evil.txt")
|
||||
);
|
||||
assert_eq!(
|
||||
sanitize_attachment_filename(r"..\\..\\secrets\\token.env").as_deref(),
|
||||
Some("..__..__secrets__token.env")
|
||||
);
|
||||
assert!(sanitize_attachment_filename("..").is_none());
|
||||
assert!(sanitize_attachment_filename("").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_workspace_attachment_path_rejects_escape_and_accepts_workspace_file() {
|
||||
let temp = tempfile::tempdir().expect("tempdir");
|
||||
let workspace = temp.path().join("workspace");
|
||||
std::fs::create_dir_all(&workspace).expect("workspace should exist");
|
||||
|
||||
let in_workspace = workspace.join("report.txt");
|
||||
std::fs::write(&in_workspace, b"ok").expect("workspace fixture should be written");
|
||||
let resolved = resolve_workspace_attachment_path(&workspace, "report.txt")
|
||||
.expect("workspace relative path should resolve");
|
||||
assert!(resolved.starts_with(workspace.canonicalize().unwrap_or(workspace.clone())));
|
||||
|
||||
let outside = temp.path().join("outside.txt");
|
||||
std::fs::write(&outside, b"secret").expect("outside fixture should be written");
|
||||
let escaped =
|
||||
resolve_workspace_attachment_path(&workspace, outside.to_string_lossy().as_ref());
|
||||
assert!(escaped.is_err(), "outside workspace path must be rejected");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_workspace_attachment_path_accepts_workspace_prefix_mapping() {
|
||||
let temp = tempfile::tempdir().expect("tempdir");
|
||||
let workspace = temp.path().join("workspace");
|
||||
std::fs::create_dir_all(workspace.join("sub")).expect("workspace dir should exist");
|
||||
let nested = workspace.join("sub/file.txt");
|
||||
std::fs::write(&nested, b"content").expect("fixture should be written");
|
||||
|
||||
let resolved = resolve_workspace_attachment_path(&workspace, "/workspace/sub/file.txt")
|
||||
.expect("/workspace prefix should map to workspace root");
|
||||
assert_eq!(
|
||||
resolved,
|
||||
nested
|
||||
.canonicalize()
|
||||
.expect("canonical path should resolve")
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn resolve_workspace_attachment_output_path_rejects_symlinked_save_dir() {
|
||||
let temp = tempfile::tempdir().expect("tempdir");
|
||||
let workspace = temp.path().join("workspace");
|
||||
tokio::fs::create_dir_all(&workspace)
|
||||
.await
|
||||
.expect("workspace dir should exist");
|
||||
|
||||
let outside = temp.path().join("outside");
|
||||
tokio::fs::create_dir_all(&outside)
|
||||
.await
|
||||
.expect("outside dir should exist");
|
||||
symlink_dir(&outside, &workspace.join("telegram_files"));
|
||||
|
||||
let result = resolve_workspace_attachment_output_path(&workspace, "doc.txt").await;
|
||||
assert!(result.is_err(), "symlinked save dir must be rejected");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn resolve_workspace_attachment_output_path_rejects_symlink_target_file() {
|
||||
let temp = tempfile::tempdir().expect("tempdir");
|
||||
let workspace = temp.path().join("workspace");
|
||||
let save_dir = workspace.join("telegram_files");
|
||||
tokio::fs::create_dir_all(&save_dir)
|
||||
.await
|
||||
.expect("save dir should exist");
|
||||
|
||||
let outside = temp.path().join("outside.txt");
|
||||
tokio::fs::write(&outside, b"secret")
|
||||
.await
|
||||
.expect("outside fixture should be written");
|
||||
symlink_file(&outside, &save_dir.join("doc.txt"));
|
||||
|
||||
let result = resolve_workspace_attachment_output_path(&workspace, "doc.txt").await;
|
||||
assert!(result.is_err(), "symlink target file must be rejected");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn infer_attachment_kind_from_target_detects_document_extension() {
|
||||
assert_eq!(
|
||||
|
||||
@ -2240,6 +2240,29 @@ pub struct BuiltinHooksConfig {
|
||||
|
||||
// ── Autonomy / Security ──────────────────────────────────────────
|
||||
|
||||
/// Natural-language behavior for non-CLI approval-management commands.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum NonCliNaturalLanguageApprovalMode {
|
||||
/// Do not treat natural-language text as approval-management commands.
|
||||
/// Operators must use explicit slash commands.
|
||||
Disabled,
|
||||
/// Natural-language approval phrases create a pending request that must be
|
||||
/// confirmed with a request ID.
|
||||
RequestConfirm,
|
||||
/// Natural-language approval phrases directly approve the named tool.
|
||||
///
|
||||
/// This keeps private-chat workflows simple while still requiring a human
|
||||
/// sender and passing the same approver allowlist checks as slash commands.
|
||||
Direct,
|
||||
}
|
||||
|
||||
impl Default for NonCliNaturalLanguageApprovalMode {
|
||||
fn default() -> Self {
|
||||
Self::Direct
|
||||
}
|
||||
}
|
||||
|
||||
/// Autonomy and security policy configuration (`[autonomy]` section).
|
||||
///
|
||||
/// Controls what the agent is allowed to do: shell commands, filesystem access,
|
||||
@ -2295,6 +2318,41 @@ pub struct AutonomyConfig {
|
||||
/// model in tool specs.
|
||||
#[serde(default = "default_non_cli_excluded_tools")]
|
||||
pub non_cli_excluded_tools: Vec<String>,
|
||||
|
||||
/// Optional allowlist for who can manage non-CLI approval commands.
|
||||
///
|
||||
/// When empty, any sender already admitted by the channel allowlist can
|
||||
/// use approval-management commands.
|
||||
///
|
||||
/// Supported entry formats:
|
||||
/// - `"*"`: allow any sender on any channel
|
||||
/// - `"alice"`: allow sender `alice` on any channel
|
||||
/// - `"telegram:alice"`: allow sender `alice` only on `telegram`
|
||||
/// - `"telegram:*"`: allow any sender on `telegram`
|
||||
/// - `"*:alice"`: allow sender `alice` on any channel
|
||||
#[serde(default)]
|
||||
pub non_cli_approval_approvers: Vec<String>,
|
||||
|
||||
/// Natural-language handling mode for non-CLI approval-management commands.
|
||||
///
|
||||
/// Values:
|
||||
/// - `direct` (default): phrases like `授权工具 shell` immediately approve.
|
||||
/// - `request_confirm`: phrases create pending requests requiring confirm.
|
||||
/// - `disabled`: ignore natural-language approval commands (slash only).
|
||||
#[serde(default)]
|
||||
pub non_cli_natural_language_approval_mode: NonCliNaturalLanguageApprovalMode,
|
||||
|
||||
/// Optional per-channel override for natural-language approval mode.
|
||||
///
|
||||
/// Keys are channel names (for example: `telegram`, `discord`, `slack`).
|
||||
/// Values use the same enum as `non_cli_natural_language_approval_mode`.
|
||||
///
|
||||
/// Example:
|
||||
/// - `telegram = "direct"` for private-chat ergonomics
|
||||
/// - `discord = "request_confirm"` for stricter team channels
|
||||
#[serde(default)]
|
||||
pub non_cli_natural_language_approval_mode_by_channel:
|
||||
HashMap<String, NonCliNaturalLanguageApprovalMode>,
|
||||
}
|
||||
|
||||
fn default_auto_approve() -> Vec<String> {
|
||||
@ -5382,6 +5440,21 @@ impl Config {
|
||||
&mut config.composio.api_key,
|
||||
"config.composio.api_key",
|
||||
)?;
|
||||
decrypt_optional_secret(
|
||||
&store,
|
||||
&mut config.proxy.http_proxy,
|
||||
"config.proxy.http_proxy",
|
||||
)?;
|
||||
decrypt_optional_secret(
|
||||
&store,
|
||||
&mut config.proxy.https_proxy,
|
||||
"config.proxy.https_proxy",
|
||||
)?;
|
||||
decrypt_optional_secret(
|
||||
&store,
|
||||
&mut config.proxy.all_proxy,
|
||||
"config.proxy.all_proxy",
|
||||
)?;
|
||||
|
||||
decrypt_optional_secret(
|
||||
&store,
|
||||
@ -6201,6 +6274,21 @@ impl Config {
|
||||
&mut config_to_save.composio.api_key,
|
||||
"config.composio.api_key",
|
||||
)?;
|
||||
encrypt_optional_secret(
|
||||
&store,
|
||||
&mut config_to_save.proxy.http_proxy,
|
||||
"config.proxy.http_proxy",
|
||||
)?;
|
||||
encrypt_optional_secret(
|
||||
&store,
|
||||
&mut config_to_save.proxy.https_proxy,
|
||||
"config.proxy.https_proxy",
|
||||
)?;
|
||||
encrypt_optional_secret(
|
||||
&store,
|
||||
&mut config_to_save.proxy.all_proxy,
|
||||
"config.proxy.all_proxy",
|
||||
)?;
|
||||
|
||||
encrypt_optional_secret(
|
||||
&store,
|
||||
@ -6728,6 +6816,10 @@ default_temperature = 0.7
|
||||
always_ask: vec![],
|
||||
allowed_roots: vec![],
|
||||
non_cli_excluded_tools: vec![],
|
||||
non_cli_approval_approvers: vec![],
|
||||
non_cli_natural_language_approval_mode:
|
||||
NonCliNaturalLanguageApprovalMode::RequestConfirm,
|
||||
non_cli_natural_language_approval_mode_by_channel: HashMap::new(),
|
||||
},
|
||||
security: SecurityConfig::default(),
|
||||
runtime: RuntimeConfig {
|
||||
@ -7206,6 +7298,9 @@ tool_dispatcher = "xml"
|
||||
config.config_path = dir.join("config.toml");
|
||||
config.api_key = Some("root-credential".into());
|
||||
config.composio.api_key = Some("composio-credential".into());
|
||||
config.proxy.http_proxy = Some("http://user:pass@proxy.internal:8080".into());
|
||||
config.proxy.https_proxy = Some("https://user:pass@proxy.internal:8443".into());
|
||||
config.proxy.all_proxy = Some("socks5://user:pass@proxy.internal:1080".into());
|
||||
config.browser.computer_use.api_key = Some("browser-credential".into());
|
||||
config.web_search.brave_api_key = Some("brave-credential".into());
|
||||
config.storage.provider.config.db_url = Some("postgres://user:pw@host/db".into());
|
||||
@ -7257,6 +7352,31 @@ tool_dispatcher = "xml"
|
||||
"composio-credential"
|
||||
);
|
||||
|
||||
let proxy_http_encrypted = stored.proxy.http_proxy.as_deref().unwrap();
|
||||
assert!(crate::security::SecretStore::is_encrypted(
|
||||
proxy_http_encrypted
|
||||
));
|
||||
assert_eq!(
|
||||
store.decrypt(proxy_http_encrypted).unwrap(),
|
||||
"http://user:pass@proxy.internal:8080"
|
||||
);
|
||||
let proxy_https_encrypted = stored.proxy.https_proxy.as_deref().unwrap();
|
||||
assert!(crate::security::SecretStore::is_encrypted(
|
||||
proxy_https_encrypted
|
||||
));
|
||||
assert_eq!(
|
||||
store.decrypt(proxy_https_encrypted).unwrap(),
|
||||
"https://user:pass@proxy.internal:8443"
|
||||
);
|
||||
let proxy_all_encrypted = stored.proxy.all_proxy.as_deref().unwrap();
|
||||
assert!(crate::security::SecretStore::is_encrypted(
|
||||
proxy_all_encrypted
|
||||
));
|
||||
assert_eq!(
|
||||
store.decrypt(proxy_all_encrypted).unwrap(),
|
||||
"socks5://user:pass@proxy.internal:1080"
|
||||
);
|
||||
|
||||
let browser_encrypted = stored.browser.computer_use.api_key.as_deref().unwrap();
|
||||
assert!(crate::security::SecretStore::is_encrypted(
|
||||
browser_encrypted
|
||||
|
||||
@ -316,7 +316,8 @@ pub(crate) async fn deliver_announcement(
|
||||
tg.bot_token.clone(),
|
||||
tg.allowed_users.clone(),
|
||||
tg.mention_only,
|
||||
);
|
||||
)
|
||||
.with_workspace_dir(config.workspace_dir.clone());
|
||||
channel.send(&SendMessage::new(output, target)).await?;
|
||||
}
|
||||
"discord" => {
|
||||
@ -331,7 +332,8 @@ pub(crate) async fn deliver_announcement(
|
||||
dc.allowed_users.clone(),
|
||||
dc.listen_to_bots,
|
||||
dc.mention_only,
|
||||
);
|
||||
)
|
||||
.with_workspace_dir(config.workspace_dir.clone());
|
||||
channel.send(&SendMessage::new(output, target)).await?;
|
||||
}
|
||||
"slack" => {
|
||||
|
||||
@ -604,11 +604,20 @@ fn mask_sensitive_fields(config: &crate::config::Config) -> crate::config::Confi
|
||||
mask_optional_secret(&mut masked.api_key);
|
||||
mask_vec_secrets(&mut masked.reliability.api_keys);
|
||||
mask_optional_secret(&mut masked.composio.api_key);
|
||||
mask_optional_secret(&mut masked.proxy.http_proxy);
|
||||
mask_optional_secret(&mut masked.proxy.https_proxy);
|
||||
mask_optional_secret(&mut masked.proxy.all_proxy);
|
||||
mask_optional_secret(&mut masked.browser.computer_use.api_key);
|
||||
mask_optional_secret(&mut masked.web_fetch.api_key);
|
||||
mask_optional_secret(&mut masked.web_search.api_key);
|
||||
mask_optional_secret(&mut masked.web_search.brave_api_key);
|
||||
mask_optional_secret(&mut masked.storage.provider.config.db_url);
|
||||
if let Some(cloudflare) = masked.tunnel.cloudflare.as_mut() {
|
||||
mask_required_secret(&mut cloudflare.token);
|
||||
}
|
||||
if let Some(ngrok) = masked.tunnel.ngrok.as_mut() {
|
||||
mask_required_secret(&mut ngrok.auth_token);
|
||||
}
|
||||
|
||||
for agent in masked.agents.values_mut() {
|
||||
mask_optional_secret(&mut agent.api_key);
|
||||
@ -642,10 +651,16 @@ fn mask_sensitive_fields(config: &crate::config::Config) -> crate::config::Confi
|
||||
mask_required_secret(&mut linq.api_token);
|
||||
mask_optional_secret(&mut linq.signing_secret);
|
||||
}
|
||||
if let Some(wati) = masked.channels_config.wati.as_mut() {
|
||||
mask_required_secret(&mut wati.api_token);
|
||||
}
|
||||
if let Some(nextcloud) = masked.channels_config.nextcloud_talk.as_mut() {
|
||||
mask_required_secret(&mut nextcloud.app_token);
|
||||
mask_optional_secret(&mut nextcloud.webhook_secret);
|
||||
}
|
||||
if let Some(email) = masked.channels_config.email.as_mut() {
|
||||
mask_required_secret(&mut email.password);
|
||||
}
|
||||
if let Some(irc) = masked.channels_config.irc.as_mut() {
|
||||
mask_optional_secret(&mut irc.server_password);
|
||||
mask_optional_secret(&mut irc.nickserv_password);
|
||||
@ -656,6 +671,11 @@ fn mask_sensitive_fields(config: &crate::config::Config) -> crate::config::Confi
|
||||
mask_optional_secret(&mut lark.encrypt_key);
|
||||
mask_optional_secret(&mut lark.verification_token);
|
||||
}
|
||||
if let Some(feishu) = masked.channels_config.feishu.as_mut() {
|
||||
mask_required_secret(&mut feishu.app_secret);
|
||||
mask_optional_secret(&mut feishu.encrypt_key);
|
||||
mask_optional_secret(&mut feishu.verification_token);
|
||||
}
|
||||
if let Some(dingtalk) = masked.channels_config.dingtalk.as_mut() {
|
||||
mask_required_secret(&mut dingtalk.client_secret);
|
||||
}
|
||||
@ -682,6 +702,9 @@ fn restore_masked_sensitive_fields(
|
||||
¤t.reliability.api_keys,
|
||||
);
|
||||
restore_optional_secret(&mut incoming.composio.api_key, ¤t.composio.api_key);
|
||||
restore_optional_secret(&mut incoming.proxy.http_proxy, ¤t.proxy.http_proxy);
|
||||
restore_optional_secret(&mut incoming.proxy.https_proxy, ¤t.proxy.https_proxy);
|
||||
restore_optional_secret(&mut incoming.proxy.all_proxy, ¤t.proxy.all_proxy);
|
||||
restore_optional_secret(
|
||||
&mut incoming.browser.computer_use.api_key,
|
||||
¤t.browser.computer_use.api_key,
|
||||
@ -699,6 +722,18 @@ fn restore_masked_sensitive_fields(
|
||||
&mut incoming.storage.provider.config.db_url,
|
||||
¤t.storage.provider.config.db_url,
|
||||
);
|
||||
if let (Some(incoming_tunnel), Some(current_tunnel)) = (
|
||||
incoming.tunnel.cloudflare.as_mut(),
|
||||
current.tunnel.cloudflare.as_ref(),
|
||||
) {
|
||||
restore_required_secret(&mut incoming_tunnel.token, ¤t_tunnel.token);
|
||||
}
|
||||
if let (Some(incoming_tunnel), Some(current_tunnel)) = (
|
||||
incoming.tunnel.ngrok.as_mut(),
|
||||
current.tunnel.ngrok.as_ref(),
|
||||
) {
|
||||
restore_required_secret(&mut incoming_tunnel.auth_token, ¤t_tunnel.auth_token);
|
||||
}
|
||||
|
||||
for (name, agent) in &mut incoming.agents {
|
||||
if let Some(current_agent) = current.agents.get(name) {
|
||||
@ -758,6 +793,12 @@ fn restore_masked_sensitive_fields(
|
||||
restore_required_secret(&mut incoming_ch.api_token, ¤t_ch.api_token);
|
||||
restore_optional_secret(&mut incoming_ch.signing_secret, ¤t_ch.signing_secret);
|
||||
}
|
||||
if let (Some(incoming_ch), Some(current_ch)) = (
|
||||
incoming.channels_config.wati.as_mut(),
|
||||
current.channels_config.wati.as_ref(),
|
||||
) {
|
||||
restore_required_secret(&mut incoming_ch.api_token, ¤t_ch.api_token);
|
||||
}
|
||||
if let (Some(incoming_ch), Some(current_ch)) = (
|
||||
incoming.channels_config.nextcloud_talk.as_mut(),
|
||||
current.channels_config.nextcloud_talk.as_ref(),
|
||||
@ -765,6 +806,12 @@ fn restore_masked_sensitive_fields(
|
||||
restore_required_secret(&mut incoming_ch.app_token, ¤t_ch.app_token);
|
||||
restore_optional_secret(&mut incoming_ch.webhook_secret, ¤t_ch.webhook_secret);
|
||||
}
|
||||
if let (Some(incoming_ch), Some(current_ch)) = (
|
||||
incoming.channels_config.email.as_mut(),
|
||||
current.channels_config.email.as_ref(),
|
||||
) {
|
||||
restore_required_secret(&mut incoming_ch.password, ¤t_ch.password);
|
||||
}
|
||||
if let (Some(incoming_ch), Some(current_ch)) = (
|
||||
incoming.channels_config.irc.as_mut(),
|
||||
current.channels_config.irc.as_ref(),
|
||||
@ -790,6 +837,17 @@ fn restore_masked_sensitive_fields(
|
||||
¤t_ch.verification_token,
|
||||
);
|
||||
}
|
||||
if let (Some(incoming_ch), Some(current_ch)) = (
|
||||
incoming.channels_config.feishu.as_mut(),
|
||||
current.channels_config.feishu.as_ref(),
|
||||
) {
|
||||
restore_required_secret(&mut incoming_ch.app_secret, ¤t_ch.app_secret);
|
||||
restore_optional_secret(&mut incoming_ch.encrypt_key, ¤t_ch.encrypt_key);
|
||||
restore_optional_secret(
|
||||
&mut incoming_ch.verification_token,
|
||||
¤t_ch.verification_token,
|
||||
);
|
||||
}
|
||||
if let (Some(incoming_ch), Some(current_ch)) = (
|
||||
incoming.channels_config.dingtalk.as_mut(),
|
||||
current.channels_config.dingtalk.as_ref(),
|
||||
@ -831,6 +889,9 @@ fn hydrate_config_for_save(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::config::schema::{
|
||||
CloudflareTunnelConfig, LarkReceiveMode, NgrokTunnelConfig, WatiConfig,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn masking_keeps_toml_valid_and_preserves_api_keys_type() {
|
||||
@ -896,4 +957,180 @@ mod tests {
|
||||
.expect("normalized toml should parse as Config");
|
||||
assert_eq!(parsed.reliability.api_keys, vec![MASKED_SECRET.to_string()]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mask_sensitive_fields_covers_wati_email_and_feishu_secrets() {
|
||||
let mut cfg = crate::config::Config::default();
|
||||
cfg.proxy.http_proxy = Some("http://user:pass@proxy.internal:8080".to_string());
|
||||
cfg.proxy.https_proxy = Some("https://user:pass@proxy.internal:8443".to_string());
|
||||
cfg.proxy.all_proxy = Some("socks5://user:pass@proxy.internal:1080".to_string());
|
||||
cfg.tunnel.cloudflare = Some(CloudflareTunnelConfig {
|
||||
token: "cloudflare-real-token".to_string(),
|
||||
});
|
||||
cfg.tunnel.ngrok = Some(NgrokTunnelConfig {
|
||||
auth_token: "ngrok-real-token".to_string(),
|
||||
domain: Some("zeroclaw.ngrok.app".to_string()),
|
||||
});
|
||||
cfg.channels_config.wati = Some(WatiConfig {
|
||||
api_token: "wati-real-token".to_string(),
|
||||
api_url: "https://live-mt-server.wati.io".to_string(),
|
||||
tenant_id: Some("tenant-1".to_string()),
|
||||
allowed_numbers: vec!["*".to_string()],
|
||||
});
|
||||
let mut email = crate::channels::email_channel::EmailConfig::default();
|
||||
email.password = "email-real-password".to_string();
|
||||
cfg.channels_config.email = Some(email);
|
||||
cfg.channels_config.feishu = Some(crate::config::FeishuConfig {
|
||||
app_id: "cli_app_id".to_string(),
|
||||
app_secret: "feishu-real-secret".to_string(),
|
||||
encrypt_key: Some("feishu-encrypt-key".to_string()),
|
||||
verification_token: Some("feishu-verify-token".to_string()),
|
||||
allowed_users: vec!["*".to_string()],
|
||||
receive_mode: LarkReceiveMode::Webhook,
|
||||
port: Some(42617),
|
||||
});
|
||||
|
||||
let masked = mask_sensitive_fields(&cfg);
|
||||
assert_eq!(masked.proxy.http_proxy.as_deref(), Some(MASKED_SECRET));
|
||||
assert_eq!(masked.proxy.https_proxy.as_deref(), Some(MASKED_SECRET));
|
||||
assert_eq!(masked.proxy.all_proxy.as_deref(), Some(MASKED_SECRET));
|
||||
assert_eq!(
|
||||
masked
|
||||
.tunnel
|
||||
.cloudflare
|
||||
.as_ref()
|
||||
.map(|value| value.token.as_str()),
|
||||
Some(MASKED_SECRET)
|
||||
);
|
||||
assert_eq!(
|
||||
masked
|
||||
.tunnel
|
||||
.ngrok
|
||||
.as_ref()
|
||||
.map(|value| value.auth_token.as_str()),
|
||||
Some(MASKED_SECRET)
|
||||
);
|
||||
assert_eq!(
|
||||
masked
|
||||
.channels_config
|
||||
.wati
|
||||
.as_ref()
|
||||
.map(|value| value.api_token.as_str()),
|
||||
Some(MASKED_SECRET)
|
||||
);
|
||||
assert_eq!(
|
||||
masked
|
||||
.channels_config
|
||||
.email
|
||||
.as_ref()
|
||||
.map(|value| value.password.as_str()),
|
||||
Some(MASKED_SECRET)
|
||||
);
|
||||
let masked_feishu = masked
|
||||
.channels_config
|
||||
.feishu
|
||||
.as_ref()
|
||||
.expect("feishu config should exist");
|
||||
assert_eq!(masked_feishu.app_secret, MASKED_SECRET);
|
||||
assert_eq!(masked_feishu.encrypt_key.as_deref(), Some(MASKED_SECRET));
|
||||
assert_eq!(
|
||||
masked_feishu.verification_token.as_deref(),
|
||||
Some(MASKED_SECRET)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hydrate_config_for_save_restores_wati_email_and_feishu_secrets() {
|
||||
let mut current = crate::config::Config::default();
|
||||
current.proxy.http_proxy = Some("http://user:pass@proxy.internal:8080".to_string());
|
||||
current.proxy.https_proxy = Some("https://user:pass@proxy.internal:8443".to_string());
|
||||
current.proxy.all_proxy = Some("socks5://user:pass@proxy.internal:1080".to_string());
|
||||
current.tunnel.cloudflare = Some(CloudflareTunnelConfig {
|
||||
token: "cloudflare-real-token".to_string(),
|
||||
});
|
||||
current.tunnel.ngrok = Some(NgrokTunnelConfig {
|
||||
auth_token: "ngrok-real-token".to_string(),
|
||||
domain: Some("zeroclaw.ngrok.app".to_string()),
|
||||
});
|
||||
current.channels_config.wati = Some(WatiConfig {
|
||||
api_token: "wati-real-token".to_string(),
|
||||
api_url: "https://live-mt-server.wati.io".to_string(),
|
||||
tenant_id: Some("tenant-1".to_string()),
|
||||
allowed_numbers: vec!["*".to_string()],
|
||||
});
|
||||
let mut email = crate::channels::email_channel::EmailConfig::default();
|
||||
email.password = "email-real-password".to_string();
|
||||
current.channels_config.email = Some(email);
|
||||
current.channels_config.feishu = Some(crate::config::FeishuConfig {
|
||||
app_id: "cli_app_id".to_string(),
|
||||
app_secret: "feishu-real-secret".to_string(),
|
||||
encrypt_key: Some("feishu-encrypt-key".to_string()),
|
||||
verification_token: Some("feishu-verify-token".to_string()),
|
||||
allowed_users: vec!["*".to_string()],
|
||||
receive_mode: LarkReceiveMode::Webhook,
|
||||
port: Some(42617),
|
||||
});
|
||||
|
||||
let incoming = mask_sensitive_fields(¤t);
|
||||
let restored = hydrate_config_for_save(incoming, ¤t);
|
||||
|
||||
assert_eq!(
|
||||
restored.proxy.http_proxy.as_deref(),
|
||||
Some("http://user:pass@proxy.internal:8080")
|
||||
);
|
||||
assert_eq!(
|
||||
restored.proxy.https_proxy.as_deref(),
|
||||
Some("https://user:pass@proxy.internal:8443")
|
||||
);
|
||||
assert_eq!(
|
||||
restored.proxy.all_proxy.as_deref(),
|
||||
Some("socks5://user:pass@proxy.internal:1080")
|
||||
);
|
||||
assert_eq!(
|
||||
restored
|
||||
.tunnel
|
||||
.cloudflare
|
||||
.as_ref()
|
||||
.map(|value| value.token.as_str()),
|
||||
Some("cloudflare-real-token")
|
||||
);
|
||||
assert_eq!(
|
||||
restored
|
||||
.tunnel
|
||||
.ngrok
|
||||
.as_ref()
|
||||
.map(|value| value.auth_token.as_str()),
|
||||
Some("ngrok-real-token")
|
||||
);
|
||||
assert_eq!(
|
||||
restored
|
||||
.channels_config
|
||||
.wati
|
||||
.as_ref()
|
||||
.map(|value| value.api_token.as_str()),
|
||||
Some("wati-real-token")
|
||||
);
|
||||
assert_eq!(
|
||||
restored
|
||||
.channels_config
|
||||
.email
|
||||
.as_ref()
|
||||
.map(|value| value.password.as_str()),
|
||||
Some("email-real-password")
|
||||
);
|
||||
let restored_feishu = restored
|
||||
.channels_config
|
||||
.feishu
|
||||
.as_ref()
|
||||
.expect("feishu config should exist");
|
||||
assert_eq!(restored_feishu.app_secret, "feishu-real-secret");
|
||||
assert_eq!(
|
||||
restored_feishu.encrypt_key.as_deref(),
|
||||
Some("feishu-encrypt-key")
|
||||
);
|
||||
assert_eq!(
|
||||
restored_feishu.verification_token.as_deref(),
|
||||
Some("feishu-verify-token")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -11,7 +11,9 @@ use anyhow::Context;
|
||||
use async_trait::async_trait;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{json, Value};
|
||||
use std::io::ErrorKind;
|
||||
use std::net::ToSocketAddrs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Stdio;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
@ -797,6 +799,15 @@ impl BrowserTool {
|
||||
params.remove("action");
|
||||
|
||||
self.validate_computer_use_action(action, ¶ms)?;
|
||||
if action == "screen_capture" {
|
||||
if let Some(path) = params.get("path").and_then(Value::as_str) {
|
||||
let resolved = self.resolve_output_path_for_write("path", path).await?;
|
||||
params.insert(
|
||||
"path".to_string(),
|
||||
Value::String(resolved.to_string_lossy().into_owned()),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let payload = json!({
|
||||
"action": action,
|
||||
@ -1116,7 +1127,7 @@ impl Tool for BrowserTool {
|
||||
});
|
||||
}
|
||||
|
||||
let action = match parse_browser_action(action_str, &args) {
|
||||
let mut action = match parse_browser_action(action_str, &args) {
|
||||
Ok(a) => a,
|
||||
Err(e) => {
|
||||
return Ok(ToolResult {
|
||||
@ -2350,6 +2361,16 @@ fn host_matches_allowlist(host: &str, allowed: &[String]) -> bool {
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[cfg(unix)]
|
||||
fn symlink_dir(src: &Path, dst: &Path) {
|
||||
std::os::unix::fs::symlink(src, dst).expect("symlink should be created");
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn symlink_dir(src: &Path, dst: &Path) {
|
||||
std::os::windows::fs::symlink_dir(src, dst).expect("symlink should be created");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_domains_works() {
|
||||
let domains = vec![
|
||||
|
||||
@ -3,7 +3,7 @@ use crate::security::SecurityPolicy;
|
||||
use async_trait::async_trait;
|
||||
use serde_json::json;
|
||||
use std::fmt::Write;
|
||||
use std::path::Path;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Maximum file size we will read and base64-encode (5 MB).
|
||||
@ -116,6 +116,32 @@ impl ImageInfoTool {
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_image_path(&self, path_str: &str) -> Result<PathBuf, String> {
|
||||
// Syntax-level checks first.
|
||||
if !self.security.is_path_allowed(path_str) {
|
||||
return Err(format!(
|
||||
"Path not allowed: {path_str} (must be within workspace)"
|
||||
));
|
||||
}
|
||||
|
||||
let raw_path = Path::new(path_str);
|
||||
let candidate = if raw_path.is_absolute() {
|
||||
raw_path.to_path_buf()
|
||||
} else {
|
||||
self.security.workspace_dir.join(raw_path)
|
||||
};
|
||||
|
||||
let resolved = candidate
|
||||
.canonicalize()
|
||||
.map_err(|_| format!("File not found: {path_str}"))?;
|
||||
|
||||
if !self.security.is_resolved_path_allowed(&resolved) {
|
||||
return Err(self.security.resolved_path_violation_message(&resolved));
|
||||
}
|
||||
|
||||
Ok(resolved)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
@ -156,28 +182,26 @@ impl Tool for ImageInfoTool {
|
||||
.and_then(serde_json::Value::as_bool)
|
||||
.unwrap_or(false);
|
||||
|
||||
let path = Path::new(path_str);
|
||||
let resolved_path = match self.resolve_image_path(path_str) {
|
||||
Ok(path) => path,
|
||||
Err(error) => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(error),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Restrict reads to workspace directory to prevent arbitrary file exfiltration
|
||||
if !self.security.is_path_allowed(path_str) {
|
||||
if !resolved_path.is_file() {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!(
|
||||
"Path not allowed: {path_str} (must be within workspace)"
|
||||
)),
|
||||
error: Some(format!("Not a file: {}", resolved_path.display())),
|
||||
});
|
||||
}
|
||||
|
||||
if !path.exists() {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!("File not found: {path_str}")),
|
||||
});
|
||||
}
|
||||
|
||||
let metadata = tokio::fs::metadata(path)
|
||||
let metadata = tokio::fs::metadata(&resolved_path)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to read file metadata: {e}"))?;
|
||||
|
||||
@ -193,7 +217,7 @@ impl Tool for ImageInfoTool {
|
||||
});
|
||||
}
|
||||
|
||||
let bytes = tokio::fs::read(path)
|
||||
let bytes = tokio::fs::read(&resolved_path)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to read image file: {e}"))?;
|
||||
|
||||
@ -232,6 +256,17 @@ impl Tool for ImageInfoTool {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::security::{AutonomyLevel, SecurityPolicy};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[cfg(unix)]
|
||||
fn symlink_file(src: &Path, dst: &Path) {
|
||||
std::os::unix::fs::symlink(src, dst).expect("symlink should be created");
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn symlink_file(src: &Path, dst: &Path) {
|
||||
std::os::windows::fs::symlink_file(src, dst).expect("symlink should be created");
|
||||
}
|
||||
|
||||
fn test_security() -> Arc<SecurityPolicy> {
|
||||
Arc::new(SecurityPolicy {
|
||||
@ -490,4 +525,36 @@ mod tests {
|
||||
|
||||
let _ = tokio::fs::remove_dir_all(&dir).await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn execute_blocks_symlink_escape_outside_workspace() {
|
||||
let temp = tempfile::tempdir().expect("tempdir");
|
||||
let workspace = temp.path().join("workspace");
|
||||
std::fs::create_dir_all(&workspace).expect("workspace should exist");
|
||||
|
||||
let outside = temp.path().join("secret.png");
|
||||
std::fs::write(&outside, b"not-an-image").expect("fixture should be written");
|
||||
|
||||
let link = workspace.join("link.png");
|
||||
symlink_file(&outside, &link);
|
||||
|
||||
let policy = Arc::new(SecurityPolicy {
|
||||
autonomy: AutonomyLevel::Full,
|
||||
workspace_dir: PathBuf::from(&workspace),
|
||||
workspace_only: true,
|
||||
forbidden_paths: vec![],
|
||||
..SecurityPolicy::default()
|
||||
});
|
||||
let tool = ImageInfoTool::new(policy);
|
||||
|
||||
let result = tool.execute(json!({"path": "link.png"})).await.unwrap();
|
||||
assert!(!result.success, "symlink escape must be blocked");
|
||||
let err = result.error.unwrap_or_default();
|
||||
assert!(
|
||||
err.contains("escapes workspace allowlist")
|
||||
|| err.contains("Path not allowed")
|
||||
|| err.contains("outside"),
|
||||
"unexpected error message: {err}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,7 +3,8 @@ use crate::security::SecurityPolicy;
|
||||
use async_trait::async_trait;
|
||||
use serde_json::json;
|
||||
use std::fmt::Write;
|
||||
use std::path::PathBuf;
|
||||
use std::io::ErrorKind;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
@ -25,32 +26,87 @@ impl ScreenshotTool {
|
||||
Self { security }
|
||||
}
|
||||
|
||||
/// Determine the screenshot command for the current platform.
|
||||
fn screenshot_command(output_path: &str) -> Option<Vec<String>> {
|
||||
fn sanitize_output_filename(filename: &str, fallback: &str) -> String {
|
||||
let Some(basename) = Path::new(filename)
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
else {
|
||||
return fallback.to_string();
|
||||
};
|
||||
|
||||
let trimmed = basename.trim();
|
||||
if trimmed.is_empty() || trimmed == "." || trimmed == ".." || trimmed.contains('\0') {
|
||||
return fallback.to_string();
|
||||
}
|
||||
|
||||
trimmed.to_string()
|
||||
}
|
||||
|
||||
/// Resolve screenshot output path and block writes through symlink targets.
|
||||
async fn resolve_output_path_for_write(&self, filename: &str) -> anyhow::Result<PathBuf> {
|
||||
tokio::fs::create_dir_all(&self.security.workspace_dir).await?;
|
||||
|
||||
let workspace_root = tokio::fs::canonicalize(&self.security.workspace_dir)
|
||||
.await
|
||||
.unwrap_or_else(|_| self.security.workspace_dir.clone());
|
||||
let output_path = workspace_root.join(filename);
|
||||
|
||||
// Parent must remain inside workspace after resolution.
|
||||
let parent = output_path
|
||||
.parent()
|
||||
.ok_or_else(|| anyhow::anyhow!("Invalid screenshot output path"))?;
|
||||
let resolved_parent = tokio::fs::canonicalize(parent).await?;
|
||||
if !self.security.is_resolved_path_allowed(&resolved_parent) {
|
||||
anyhow::bail!(
|
||||
"{}",
|
||||
self.security
|
||||
.resolved_path_violation_message(&resolved_parent)
|
||||
);
|
||||
}
|
||||
|
||||
match tokio::fs::symlink_metadata(&output_path).await {
|
||||
Ok(meta) => {
|
||||
if meta.file_type().is_symlink() {
|
||||
anyhow::bail!(
|
||||
"Refusing to write screenshot through symlink: {}",
|
||||
output_path.display()
|
||||
);
|
||||
}
|
||||
if !meta.is_file() {
|
||||
anyhow::bail!(
|
||||
"Screenshot output path is not a regular file: {}",
|
||||
output_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) if e.kind() == ErrorKind::NotFound => {}
|
||||
Err(e) => return Err(e.into()),
|
||||
}
|
||||
|
||||
Ok(output_path)
|
||||
}
|
||||
|
||||
/// Determine candidate screenshot commands for the current platform.
|
||||
fn screenshot_commands(output_path: &str) -> Vec<Vec<String>> {
|
||||
if cfg!(target_os = "macos") {
|
||||
Some(vec![
|
||||
vec![vec![
|
||||
"screencapture".into(),
|
||||
"-x".into(), // no sound
|
||||
output_path.into(),
|
||||
])
|
||||
]]
|
||||
} else if cfg!(target_os = "linux") {
|
||||
Some(vec![
|
||||
"sh".into(),
|
||||
"-c".into(),
|
||||
format!(
|
||||
"if command -v gnome-screenshot >/dev/null 2>&1; then \
|
||||
gnome-screenshot -f '{output_path}'; \
|
||||
elif command -v scrot >/dev/null 2>&1; then \
|
||||
scrot '{output_path}'; \
|
||||
elif command -v import >/dev/null 2>&1; then \
|
||||
import -window root '{output_path}'; \
|
||||
else \
|
||||
echo 'NO_SCREENSHOT_TOOL' >&2; exit 1; \
|
||||
fi"
|
||||
),
|
||||
])
|
||||
vec![
|
||||
vec!["gnome-screenshot".into(), "-f".into(), output_path.into()],
|
||||
vec!["scrot".into(), output_path.into()],
|
||||
vec![
|
||||
"import".into(),
|
||||
"-window".into(),
|
||||
"root".into(),
|
||||
output_path.into(),
|
||||
],
|
||||
]
|
||||
} else {
|
||||
None
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
@ -62,13 +118,11 @@ impl ScreenshotTool {
|
||||
.and_then(|v| v.as_str())
|
||||
.map_or_else(|| format!("screenshot_{timestamp}.png"), String::from);
|
||||
|
||||
// Sanitize filename to prevent path traversal
|
||||
let safe_name = PathBuf::from(&filename).file_name().map_or_else(
|
||||
|| format!("screenshot_{timestamp}.png"),
|
||||
|n| n.to_string_lossy().to_string(),
|
||||
);
|
||||
let fallback_name = format!("screenshot_{timestamp}.png");
|
||||
// Keep only a safe basename and reject dot-segment escapes.
|
||||
let safe_name = Self::sanitize_output_filename(&filename, &fallback_name);
|
||||
|
||||
// Reject filenames with shell-breaking characters to prevent injection in sh -c
|
||||
// Keep conservative filtering for unusual shell/control chars.
|
||||
const SHELL_UNSAFE: &[char] = &[
|
||||
'\'', '"', '`', '$', '\\', ';', '|', '&', '\n', '\0', '(', ')',
|
||||
];
|
||||
@ -80,73 +134,103 @@ impl ScreenshotTool {
|
||||
});
|
||||
}
|
||||
|
||||
let output_path = self.security.workspace_dir.join(&safe_name);
|
||||
let output_path = match self.resolve_output_path_for_write(&safe_name).await {
|
||||
Ok(path) => path,
|
||||
Err(e) => {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!("Invalid screenshot output path: {e}")),
|
||||
});
|
||||
}
|
||||
};
|
||||
let output_str = output_path.to_string_lossy().to_string();
|
||||
|
||||
let Some(mut cmd_args) = Self::screenshot_command(&output_str) else {
|
||||
let mut commands = Self::screenshot_commands(&output_str);
|
||||
if commands.is_empty() {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some("Screenshot not supported on this platform".into()),
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
// macOS region flags
|
||||
if cfg!(target_os = "macos") {
|
||||
if let Some(region) = args.get("region").and_then(|v| v.as_str()) {
|
||||
match region {
|
||||
"selection" => cmd_args.insert(1, "-s".into()),
|
||||
"window" => cmd_args.insert(1, "-w".into()),
|
||||
"selection" => commands[0].insert(1, "-s".into()),
|
||||
"window" => commands[0].insert(1, "-w".into()),
|
||||
_ => {} // ignore unknown regions
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let program = cmd_args.remove(0);
|
||||
let result = tokio::time::timeout(
|
||||
Duration::from_secs(SCREENSHOT_TIMEOUT_SECS),
|
||||
tokio::process::Command::new(&program)
|
||||
.args(&cmd_args)
|
||||
.output(),
|
||||
)
|
||||
.await;
|
||||
let mut saw_spawnable_command = false;
|
||||
let mut last_failure: Option<String> = None;
|
||||
|
||||
match result {
|
||||
Ok(Ok(output)) => {
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
if stderr.contains("NO_SCREENSHOT_TOOL") {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(
|
||||
"No screenshot tool found. Install gnome-screenshot, scrot, or ImageMagick."
|
||||
.into(),
|
||||
),
|
||||
});
|
||||
}
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!("Screenshot command failed: {stderr}")),
|
||||
});
|
||||
}
|
||||
|
||||
Self::read_and_encode(&output_path).await
|
||||
for mut cmd_args in commands {
|
||||
if cmd_args.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let program = cmd_args.remove(0);
|
||||
let result = tokio::time::timeout(
|
||||
Duration::from_secs(SCREENSHOT_TIMEOUT_SECS),
|
||||
tokio::process::Command::new(&program)
|
||||
.args(&cmd_args)
|
||||
.output(),
|
||||
)
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Ok(Ok(output)) => {
|
||||
saw_spawnable_command = true;
|
||||
if output.status.success() {
|
||||
return Self::read_and_encode(&output_path).await;
|
||||
}
|
||||
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
|
||||
if stderr.is_empty() {
|
||||
last_failure =
|
||||
Some(format!("{} exited with status {}", program, output.status));
|
||||
} else {
|
||||
last_failure = Some(stderr);
|
||||
}
|
||||
}
|
||||
Ok(Err(e)) if e.kind() == ErrorKind::NotFound => {
|
||||
// Try next candidate command.
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
saw_spawnable_command = true;
|
||||
last_failure = Some(format!("Failed to execute screenshot command: {e}"));
|
||||
}
|
||||
Err(_) => {
|
||||
saw_spawnable_command = true;
|
||||
last_failure = Some(format!(
|
||||
"Screenshot timed out after {SCREENSHOT_TIMEOUT_SECS}s"
|
||||
));
|
||||
}
|
||||
}
|
||||
Ok(Err(e)) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!("Failed to execute screenshot command: {e}")),
|
||||
}),
|
||||
Err(_) => Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(format!(
|
||||
"Screenshot timed out after {SCREENSHOT_TIMEOUT_SECS}s"
|
||||
)),
|
||||
}),
|
||||
}
|
||||
|
||||
if !saw_spawnable_command {
|
||||
return Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(
|
||||
"No screenshot tool found. Install gnome-screenshot, scrot, or ImageMagick."
|
||||
.into(),
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(ToolResult {
|
||||
success: false,
|
||||
output: String::new(),
|
||||
error: Some(
|
||||
last_failure
|
||||
.unwrap_or_else(|| "Screenshot command failed for unknown reasons".into()),
|
||||
),
|
||||
})
|
||||
}
|
||||
|
||||
/// Read the screenshot file and return base64-encoded result.
|
||||
@ -257,6 +341,17 @@ impl Tool for ScreenshotTool {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::security::{AutonomyLevel, SecurityPolicy};
|
||||
use std::path::Path;
|
||||
|
||||
#[cfg(unix)]
|
||||
fn symlink_file(src: &Path, dst: &Path) {
|
||||
std::os::unix::fs::symlink(src, dst).expect("symlink should be created");
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn symlink_file(src: &Path, dst: &Path) {
|
||||
std::os::windows::fs::symlink_file(src, dst).expect("symlink should be created");
|
||||
}
|
||||
|
||||
fn test_security() -> Arc<SecurityPolicy> {
|
||||
Arc::new(SecurityPolicy {
|
||||
@ -298,10 +393,26 @@ mod tests {
|
||||
#[test]
|
||||
#[cfg(any(target_os = "macos", target_os = "linux"))]
|
||||
fn screenshot_command_exists() {
|
||||
let cmd = ScreenshotTool::screenshot_command("/tmp/test.png");
|
||||
assert!(cmd.is_some());
|
||||
let args = cmd.unwrap();
|
||||
assert!(!args.is_empty());
|
||||
let commands = ScreenshotTool::screenshot_commands("/tmp/test.png");
|
||||
assert!(!commands.is_empty());
|
||||
assert!(commands.iter().all(|cmd| !cmd.is_empty()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn screenshot_filename_sanitizes_dot_segments() {
|
||||
let fallback = "fallback.png";
|
||||
assert_eq!(
|
||||
ScreenshotTool::sanitize_output_filename("../outside.png", fallback),
|
||||
"outside.png"
|
||||
);
|
||||
assert_eq!(
|
||||
ScreenshotTool::sanitize_output_filename("..", fallback),
|
||||
fallback
|
||||
);
|
||||
assert_eq!(
|
||||
ScreenshotTool::sanitize_output_filename(".", fallback),
|
||||
fallback
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
@ -317,11 +428,36 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn screenshot_command_contains_output_path() {
|
||||
let cmd = ScreenshotTool::screenshot_command("/tmp/my_screenshot.png").unwrap();
|
||||
let joined = cmd.join(" ");
|
||||
let commands = ScreenshotTool::screenshot_commands("/tmp/my_screenshot.png");
|
||||
assert!(!commands.is_empty());
|
||||
let joined = commands[0].join(" ");
|
||||
assert!(
|
||||
joined.contains("/tmp/my_screenshot.png"),
|
||||
"Command should contain the output path"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn screenshot_blocks_symlink_output_target() {
|
||||
let temp = tempfile::tempdir().expect("tempdir");
|
||||
let workspace = temp.path().join("workspace");
|
||||
tokio::fs::create_dir_all(&workspace)
|
||||
.await
|
||||
.expect("workspace should exist");
|
||||
|
||||
let outside = temp.path().join("outside.png");
|
||||
tokio::fs::write(&outside, b"secret")
|
||||
.await
|
||||
.expect("outside fixture should be written");
|
||||
symlink_file(&outside, &workspace.join("screen.png"));
|
||||
|
||||
let tool = ScreenshotTool::new(Arc::new(SecurityPolicy {
|
||||
autonomy: AutonomyLevel::Full,
|
||||
workspace_dir: workspace,
|
||||
..SecurityPolicy::default()
|
||||
}));
|
||||
|
||||
let result = tool.resolve_output_path_for_write("screen.png").await;
|
||||
assert!(result.is_err(), "symlink output target must be rejected");
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,6 +3,10 @@
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta
|
||||
http-equiv="Content-Security-Policy"
|
||||
content="default-src 'self'; script-src 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data: blob: https:; connect-src 'self' ws: wss: https:; font-src 'self' data:; object-src 'none'; base-uri 'self'; frame-ancestors 'none'; form-action 'self'"
|
||||
/>
|
||||
<meta name="color-scheme" content="dark" />
|
||||
<title>ZeroClaw</title>
|
||||
</head>
|
||||
|
||||
@ -12,6 +12,7 @@ import {
|
||||
setToken as writeToken,
|
||||
clearToken as removeToken,
|
||||
isAuthenticated as checkAuth,
|
||||
TOKEN_STORAGE_KEY,
|
||||
} from '../lib/auth';
|
||||
import { pair as apiPair, getPublicHealth } from '../lib/api';
|
||||
|
||||
@ -69,10 +70,10 @@ export function AuthProvider({ children }: AuthProviderProps) {
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Keep state in sync if localStorage is changed in another tab
|
||||
// Keep state in sync if token storage is changed from another browser context.
|
||||
useEffect(() => {
|
||||
const handler = (e: StorageEvent) => {
|
||||
if (e.key === 'zeroclaw_token') {
|
||||
if (e.key === TOKEN_STORAGE_KEY) {
|
||||
const t = readToken();
|
||||
setTokenState(t);
|
||||
setAuthenticated(t !== null && t.length > 0);
|
||||
|
||||
@ -1,36 +1,84 @@
|
||||
const TOKEN_KEY = 'zeroclaw_token';
|
||||
export const TOKEN_STORAGE_KEY = 'zeroclaw_token';
|
||||
let inMemoryToken: string | null = null;
|
||||
|
||||
function readStorage(key: string): string | null {
|
||||
try {
|
||||
return sessionStorage.getItem(key);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function writeStorage(key: string, value: string): void {
|
||||
try {
|
||||
sessionStorage.setItem(key, value);
|
||||
} catch {
|
||||
// sessionStorage may be unavailable in some browser privacy modes
|
||||
}
|
||||
}
|
||||
|
||||
function removeStorage(key: string): void {
|
||||
try {
|
||||
sessionStorage.removeItem(key);
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
function clearLegacyLocalStorageToken(key: string): void {
|
||||
try {
|
||||
localStorage.removeItem(key);
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the stored authentication token.
|
||||
*/
|
||||
export function getToken(): string | null {
|
||||
try {
|
||||
return localStorage.getItem(TOKEN_KEY);
|
||||
} catch {
|
||||
return null;
|
||||
if (inMemoryToken && inMemoryToken.length > 0) {
|
||||
return inMemoryToken;
|
||||
}
|
||||
|
||||
const sessionToken = readStorage(TOKEN_STORAGE_KEY);
|
||||
if (sessionToken && sessionToken.length > 0) {
|
||||
inMemoryToken = sessionToken;
|
||||
return sessionToken;
|
||||
}
|
||||
|
||||
// One-time migration from older localStorage-backed sessions.
|
||||
try {
|
||||
const legacy = localStorage.getItem(TOKEN_STORAGE_KEY);
|
||||
if (legacy && legacy.length > 0) {
|
||||
inMemoryToken = legacy;
|
||||
writeStorage(TOKEN_STORAGE_KEY, legacy);
|
||||
localStorage.removeItem(TOKEN_STORAGE_KEY);
|
||||
return legacy;
|
||||
}
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Store an authentication token.
|
||||
*/
|
||||
export function setToken(token: string): void {
|
||||
try {
|
||||
localStorage.setItem(TOKEN_KEY, token);
|
||||
} catch {
|
||||
// localStorage may be unavailable (e.g. in some private browsing modes)
|
||||
}
|
||||
inMemoryToken = token;
|
||||
writeStorage(TOKEN_STORAGE_KEY, token);
|
||||
clearLegacyLocalStorageToken(TOKEN_STORAGE_KEY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the stored authentication token.
|
||||
*/
|
||||
export function clearToken(): void {
|
||||
try {
|
||||
localStorage.removeItem(TOKEN_KEY);
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
inMemoryToken = null;
|
||||
removeStorage(TOKEN_STORAGE_KEY);
|
||||
clearLegacyLocalStorageToken(TOKEN_STORAGE_KEY);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
Loading…
Reference in New Issue
Block a user