Merge branch 'main' into fix/ci-require-chumyin-approval

This commit is contained in:
Chum Yin 2026-02-28 13:49:32 +08:00 committed by GitHub
commit ca8ef10dcf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 1043 additions and 1207 deletions

View File

@ -51,7 +51,7 @@ jobs:
needs: [changes]
if: needs.changes.outputs.rust_changed == 'true'
runs-on: [self-hosted, aws-india]
timeout-minutes: 25
timeout-minutes: 40
steps:
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
with:
@ -75,7 +75,7 @@ jobs:
needs: [changes]
if: needs.changes.outputs.rust_changed == 'true'
runs-on: [self-hosted, aws-india]
timeout-minutes: 35
timeout-minutes: 60
steps:
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
@ -136,7 +136,7 @@ jobs:
needs: [changes]
if: needs.changes.outputs.rust_changed == 'true'
runs-on: [self-hosted, aws-india]
timeout-minutes: 20
timeout-minutes: 35
steps:
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
@ -270,29 +270,6 @@ jobs:
const script = require('./.github/workflows/scripts/ci_workflow_owner_approval.js');
await script({ github, context, core });
human-review-approval:
name: Human Review Approval
needs: [changes]
if: github.event_name == 'pull_request'
runs-on: [self-hosted, aws-india]
permissions:
contents: read
pull-requests: read
steps:
- name: Checkout repository
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
with:
ref: ${{ github.event.pull_request.base.sha }}
- name: Require at least one human approving review
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
HUMAN_REVIEW_BOT_LOGINS: ${{ vars.HUMAN_REVIEW_BOT_LOGINS }}
with:
script: |
const script = require('./.github/workflows/scripts/ci_human_review_guard.js');
await script({ github, context, core });
license-file-owner-guard:
name: License File Owner Guard
needs: [changes]
@ -314,7 +291,7 @@ jobs:
ci-required:
name: CI Required Gate
if: always()
needs: [changes, lint, test, build, docs-only, non-rust, docs-quality, lint-feedback, workflow-owner-approval, human-review-approval, license-file-owner-guard]
needs: [changes, lint, test, build, docs-only, non-rust, docs-quality, lint-feedback, workflow-owner-approval, license-file-owner-guard]
runs-on: [self-hosted, aws-india]
steps:
- name: Enforce required status
@ -328,7 +305,6 @@ jobs:
ci_cd_changed="${{ needs.changes.outputs.ci_cd_changed }}"
docs_result="${{ needs.docs-quality.result }}"
workflow_owner_result="${{ needs.workflow-owner-approval.result }}"
human_review_result="${{ needs.human-review-approval.result }}"
license_owner_result="${{ needs.license-file-owner-guard.result }}"
# --- Helper: enforce PR governance gates ---
@ -338,10 +314,6 @@ jobs:
echo "CI/CD related files changed but required @chumyin approval gate did not pass."
exit 1
fi
if [ "$human_review_result" != "success" ]; then
echo "Human review approval guard did not pass."
exit 1
fi
if [ "$license_owner_result" != "success" ]; then
echo "License file owner guard did not pass."
exit 1
@ -380,8 +352,7 @@ jobs:
echo "test=${test_result}"
echo "build=${build_result}"
echo "docs=${docs_result}"
echo "ci_cd_owner_approval=${workflow_owner_result}"
echo "human_review_approval=${human_review_result}"
echo "workflow_owner_approval=${workflow_owner_result}"
echo "license_file_owner_guard=${license_owner_result}"
check_pr_governance

View File

@ -44,7 +44,7 @@ jobs:
codeql:
name: CodeQL Analysis
runs-on: [self-hosted, aws-india]
timeout-minutes: 30
timeout-minutes: 60
steps:
- name: Checkout repository
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4

1075
README.md

File diff suppressed because it is too large Load Diff

View File

@ -382,6 +382,7 @@ WASM profile templates:
| Key | Default | Purpose |
|---|---|---|
| `reasoning_level` | unset (`None`) | Reasoning effort/level override for providers that support explicit levels (currently OpenAI Codex `/responses`) |
| `transport` | unset (`None`) | Provider transport override (`auto`, `websocket`, `sse`) |
Notes:
@ -389,6 +390,14 @@ Notes:
- When set, overrides `ZEROCLAW_CODEX_REASONING_EFFORT` for OpenAI Codex requests.
- Unset falls back to `ZEROCLAW_CODEX_REASONING_EFFORT` if present, otherwise defaults to `xhigh`.
- If both `provider.reasoning_level` and deprecated `runtime.reasoning_level` are set, provider-level value wins.
- `provider.transport` is normalized case-insensitively (`ws` aliases to `websocket`; `http` aliases to `sse`).
- For OpenAI Codex, default transport mode is `auto` (WebSocket-first with SSE fallback).
- Transport override precedence for OpenAI Codex:
1. `[[model_routes]].transport` (route-specific)
2. `PROVIDER_TRANSPORT` / `ZEROCLAW_PROVIDER_TRANSPORT` / `ZEROCLAW_CODEX_TRANSPORT`
3. `provider.transport`
4. legacy `ZEROCLAW_RESPONSES_WEBSOCKET` (boolean)
- Environment overrides replace configured `provider.transport` when set.
## `[skills]`
@ -668,6 +677,7 @@ Use route hints so integrations can keep stable names while model IDs evolve.
| `model` | _required_ | Model to use with that provider |
| `max_tokens` | unset | Optional per-route output token cap forwarded to provider APIs |
| `api_key` | unset | Optional API key override for this route's provider |
| `transport` | unset | Optional per-route transport override (`auto`, `websocket`, `sse`) |
### `[[embedding_routes]]`

View File

@ -184,6 +184,22 @@ static DEFERRED_ACTION_WITHOUT_TOOL_CALL_REGEX: LazyLock<Regex> = LazyLock::new(
.unwrap()
});
/// Detect common CJK deferred-action phrases (e.g., Chinese "让我…查看")
/// that imply a follow-up tool call should occur.
static CJK_DEFERRED_ACTION_CUE_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(让我|我来|我会|我们来|我们会|我先|先让我|马上)").unwrap());
/// Action verbs commonly used when promising to perform tool-backed work in CJK text.
static CJK_DEFERRED_ACTION_VERB_REGEX: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"(查看|检查|搜索|查找|浏览|打开|读取|写入|运行|执行|调用|分析|验证|列出|获取|尝试|试试|继续|处理|修复|看看|看一看|看一下)").unwrap()
});
/// Fast check for CJK scripts (Han/Hiragana/Katakana/Hangul) so we only run
/// additional regexes when non-Latin text is present.
static CJK_SCRIPT_REGEX: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"[\p{Script=Han}\p{Script=Hiragana}\p{Script=Katakana}\p{Script=Hangul}]").unwrap()
});
/// Scrub credentials from tool output to prevent accidental exfiltration.
/// Replaces known credential patterns with a redacted placeholder while preserving
/// a small prefix for context.
@ -290,7 +306,17 @@ fn truncate_tool_args_for_progress(name: &str, args: &serde_json::Value, max_len
fn looks_like_deferred_action_without_tool_call(text: &str) -> bool {
let trimmed = text.trim();
!trimmed.is_empty() && DEFERRED_ACTION_WITHOUT_TOOL_CALL_REGEX.is_match(trimmed)
if trimmed.is_empty() {
return false;
}
if DEFERRED_ACTION_WITHOUT_TOOL_CALL_REGEX.is_match(trimmed) {
return true;
}
CJK_SCRIPT_REGEX.is_match(trimmed)
&& CJK_DEFERRED_ACTION_CUE_REGEX.is_match(trimmed)
&& CJK_DEFERRED_ACTION_VERB_REGEX.is_match(trimmed)
}
fn maybe_inject_cron_add_delivery(
@ -1678,6 +1704,7 @@ pub async fn run(
let provider_runtime_options = providers::ProviderRuntimeOptions {
auth_profile_override: None,
provider_api_url: config.api_url.clone(),
provider_transport: config.effective_provider_transport(),
zeroclaw_dir: config.config_path.parent().map(std::path::PathBuf::from),
secrets_encrypt: config.secrets.encrypt,
reasoning_enabled: config.runtime.reasoning_enabled,
@ -2158,6 +2185,7 @@ pub async fn process_message(config: Config, message: &str) -> Result<String> {
let provider_runtime_options = providers::ProviderRuntimeOptions {
auth_profile_override: None,
provider_api_url: config.api_url.clone(),
provider_transport: config.effective_provider_transport(),
zeroclaw_dir: config.config_path.parent().map(std::path::PathBuf::from),
secrets_encrypt: config.secrets.encrypt,
reasoning_enabled: config.runtime.reasoning_enabled,
@ -4276,6 +4304,12 @@ Done."#;
assert!(looks_like_deferred_action_without_tool_call(
"It seems absolute paths are blocked. Let me try using a relative path."
));
assert!(looks_like_deferred_action_without_tool_call(
"看起来绝对路径不可用,让我尝试使用当前目录的相对路径。"
));
assert!(looks_like_deferred_action_without_tool_call(
"页面已打开,让我获取快照查看详细信息。"
));
}
#[test]
@ -4283,6 +4317,9 @@ Done."#;
assert!(!looks_like_deferred_action_without_tool_call(
"The latest update is already shown above."
));
assert!(!looks_like_deferred_action_without_tool_call(
"最新结果已经在上面整理完成。"
));
}
#[test]

View File

@ -115,7 +115,9 @@ impl PromptSection for IdentitySection {
inject_workspace_file(&mut prompt, ctx.workspace_dir, "MEMORY.md");
}
let extra_files = ctx.identity_config.map_or(&[][..], |cfg| cfg.extra_files.as_slice());
let extra_files = ctx
.identity_config
.map_or(&[][..], |cfg| cfg.extra_files.as_slice());
for file in extra_files {
if let Some(safe_relative) = normalize_openclaw_identity_extra_file(file) {
inject_workspace_file(&mut prompt, ctx.workspace_dir, safe_relative);

View File

@ -4805,6 +4805,7 @@ pub async fn start_channels(config: Config) -> Result<()> {
let provider_runtime_options = providers::ProviderRuntimeOptions {
auth_profile_override: None,
provider_api_url: config.api_url.clone(),
provider_transport: config.effective_provider_transport(),
zeroclaw_dir: config.config_path.parent().map(std::path::PathBuf::from),
secrets_encrypt: config.secrets.encrypt,
reasoning_enabled: config.runtime.reasoning_enabled,

View File

@ -11,15 +11,15 @@ pub use schema::{
DockerRuntimeConfig, EmbeddingRouteConfig, EstopConfig, FeishuConfig, GatewayConfig,
GroupReplyConfig, GroupReplyMode, HardwareConfig, HardwareTransport, HeartbeatConfig,
HooksConfig, HttpRequestConfig, IMessageConfig, IdentityConfig, LarkConfig, MatrixConfig,
MemoryConfig, ModelRouteConfig, MultimodalConfig, NextcloudTalkConfig, ObservabilityConfig,
OtpChallengeDelivery, OtpConfig, OtpMethod, PeripheralBoardConfig, PeripheralsConfig,
NonCliNaturalLanguageApprovalMode, PerplexityFilterConfig, PluginEntryConfig, PluginsConfig,
ProviderConfig, ProxyConfig, ProxyScope, QdrantConfig, QueryClassificationConfig,
ReliabilityConfig, ResearchPhaseConfig, ResearchTrigger, ResourceLimitsConfig, RuntimeConfig,
SandboxBackend, SandboxConfig, SchedulerConfig, SecretsConfig, SecurityConfig,
SecurityRoleConfig, SkillsConfig, SkillsPromptInjectionMode, SlackConfig, StorageConfig,
StorageProviderConfig, StorageProviderSection, StreamMode, SyscallAnomalyConfig,
TelegramConfig, TranscriptionConfig, TunnelConfig, UrlAccessConfig,
MemoryConfig, ModelRouteConfig, MultimodalConfig, NextcloudTalkConfig,
NonCliNaturalLanguageApprovalMode, ObservabilityConfig, OtpChallengeDelivery, OtpConfig,
OtpMethod, PeripheralBoardConfig, PeripheralsConfig, PerplexityFilterConfig, PluginEntryConfig,
PluginsConfig, ProviderConfig, ProxyConfig, ProxyScope, QdrantConfig,
QueryClassificationConfig, ReliabilityConfig, ResearchPhaseConfig, ResearchTrigger,
ResourceLimitsConfig, RuntimeConfig, SandboxBackend, SandboxConfig, SchedulerConfig,
SecretsConfig, SecurityConfig, SecurityRoleConfig, SkillsConfig, SkillsPromptInjectionMode,
SlackConfig, StorageConfig, StorageProviderConfig, StorageProviderSection, StreamMode,
SyscallAnomalyConfig, TelegramConfig, TranscriptionConfig, TunnelConfig, UrlAccessConfig,
WasmCapabilityEscalationMode, WasmConfig, WasmModuleHashPolicy, WasmRuntimeConfig,
WasmSecurityConfig, WebFetchConfig, WebSearchConfig, WebhookConfig,
};

View File

@ -309,6 +309,20 @@ pub struct ProviderConfig {
/// (e.g. OpenAI Codex `/responses` reasoning effort).
#[serde(default)]
pub reasoning_level: Option<String>,
/// Optional transport override for providers that support multiple transports.
/// Supported values: "auto", "websocket", "sse".
///
/// Resolution order:
/// 1) `model_routes[].transport` (route-specific)
/// 2) env overrides (`PROVIDER_TRANSPORT`, `ZEROCLAW_PROVIDER_TRANSPORT`, `ZEROCLAW_CODEX_TRANSPORT`)
/// 3) `provider.transport`
/// 4) runtime default (`auto`, WebSocket-first with SSE fallback for OpenAI Codex)
///
/// Note: env overrides replace configured `provider.transport` when set.
///
/// Existing configs that omit `provider.transport` remain valid and fall back to defaults.
#[serde(default)]
pub transport: Option<String>,
}
// ── Delegate Agents ──────────────────────────────────────────────
@ -3195,6 +3209,14 @@ pub struct ModelRouteConfig {
/// Optional API key override for this route's provider
#[serde(default)]
pub api_key: Option<String>,
/// Optional route-specific transport override for this route.
/// Supported values: "auto", "websocket", "sse".
///
/// When `model_routes[].transport` is unset, the route inherits `provider.transport`.
/// If both are unset, runtime defaults are used (`auto` for OpenAI Codex).
/// Existing configs without this field remain valid.
#[serde(default)]
pub transport: Option<String>,
}
// ── Embedding routing ───────────────────────────────────────────
@ -6041,6 +6063,28 @@ impl Config {
}
}
fn normalize_provider_transport(raw: Option<&str>, source: &str) -> Option<String> {
let value = raw?.trim();
if value.is_empty() {
return None;
}
let normalized = value.to_ascii_lowercase().replace(['-', '_'], "");
match normalized.as_str() {
"auto" => Some("auto".to_string()),
"websocket" | "ws" => Some("websocket".to_string()),
"sse" | "http" => Some("sse".to_string()),
_ => {
tracing::warn!(
transport = %value,
source,
"Ignoring invalid provider transport override"
);
None
}
}
}
/// Resolve provider reasoning level with backward-compatible runtime alias.
///
/// Priority:
@ -6084,6 +6128,16 @@ impl Config {
}
}
/// Resolve provider transport mode (`provider.transport`).
///
/// Supported values:
/// - `auto`
/// - `websocket`
/// - `sse`
pub fn effective_provider_transport(&self) -> Option<String> {
Self::normalize_provider_transport(self.provider.transport.as_deref(), "provider.transport")
}
fn lookup_model_provider_profile(
&self,
provider_name: &str,
@ -6447,6 +6501,32 @@ impl Config {
if route.max_tokens == Some(0) {
anyhow::bail!("model_routes[{i}].max_tokens must be greater than 0");
}
if route
.transport
.as_deref()
.is_some_and(|value| !value.trim().is_empty())
&& Self::normalize_provider_transport(
route.transport.as_deref(),
"model_routes[].transport",
)
.is_none()
{
anyhow::bail!("model_routes[{i}].transport must be one of: auto, websocket, sse");
}
}
if self
.provider
.transport
.as_deref()
.is_some_and(|value| !value.trim().is_empty())
&& Self::normalize_provider_transport(
self.provider.transport.as_deref(),
"provider.transport",
)
.is_none()
{
anyhow::bail!("provider.transport must be one of: auto, websocket, sse");
}
if self.provider_api.is_some()
@ -6778,6 +6858,17 @@ impl Config {
}
}
// Provider transport override: ZEROCLAW_PROVIDER_TRANSPORT or PROVIDER_TRANSPORT
if let Ok(transport) = std::env::var("ZEROCLAW_PROVIDER_TRANSPORT")
.or_else(|_| std::env::var("PROVIDER_TRANSPORT"))
{
if let Some(normalized) =
Self::normalize_provider_transport(Some(&transport), "env:provider_transport")
{
self.provider.transport = Some(normalized);
}
}
// Vision support override: ZEROCLAW_MODEL_SUPPORT_VISION or MODEL_SUPPORT_VISION
if let Ok(flag) = std::env::var("ZEROCLAW_MODEL_SUPPORT_VISION")
.or_else(|_| std::env::var("MODEL_SUPPORT_VISION"))
@ -9376,6 +9467,7 @@ provider_api = "not-a-real-mode"
model: "anthropic/claude-sonnet-4.6".to_string(),
max_tokens: Some(0),
api_key: None,
transport: None,
}];
let err = config
@ -9386,6 +9478,48 @@ provider_api = "not-a-real-mode"
.contains("model_routes[0].max_tokens must be greater than 0"));
}
#[test]
async fn provider_transport_normalizes_aliases() {
let mut config = Config::default();
config.provider.transport = Some("WS".to_string());
assert_eq!(
config.effective_provider_transport().as_deref(),
Some("websocket")
);
}
#[test]
async fn provider_transport_invalid_is_rejected() {
let mut config = Config::default();
config.provider.transport = Some("udp".to_string());
let err = config
.validate()
.expect_err("provider.transport should reject invalid values");
assert!(err
.to_string()
.contains("provider.transport must be one of: auto, websocket, sse"));
}
#[test]
async fn model_route_transport_invalid_is_rejected() {
let mut config = Config::default();
config.model_routes = vec![ModelRouteConfig {
hint: "reasoning".to_string(),
provider: "openrouter".to_string(),
model: "anthropic/claude-sonnet-4.6".to_string(),
max_tokens: None,
api_key: None,
transport: Some("udp".to_string()),
}];
let err = config
.validate()
.expect_err("model_routes[].transport should reject invalid values");
assert!(err
.to_string()
.contains("model_routes[0].transport must be one of: auto, websocket, sse"));
}
#[test]
async fn env_override_glm_api_key_for_regional_aliases() {
let _env_guard = env_override_lock().await;
@ -10026,6 +10160,60 @@ default_model = "legacy-model"
std::env::remove_var("ZEROCLAW_REASONING_LEVEL");
}
#[test]
async fn env_override_provider_transport_normalizes_zeroclaw_alias() {
let _env_guard = env_override_lock().await;
let mut config = Config::default();
std::env::remove_var("PROVIDER_TRANSPORT");
std::env::set_var("ZEROCLAW_PROVIDER_TRANSPORT", "WS");
config.apply_env_overrides();
assert_eq!(config.provider.transport.as_deref(), Some("websocket"));
std::env::remove_var("ZEROCLAW_PROVIDER_TRANSPORT");
}
#[test]
async fn env_override_provider_transport_normalizes_legacy_alias() {
let _env_guard = env_override_lock().await;
let mut config = Config::default();
std::env::remove_var("ZEROCLAW_PROVIDER_TRANSPORT");
std::env::set_var("PROVIDER_TRANSPORT", "HTTP");
config.apply_env_overrides();
assert_eq!(config.provider.transport.as_deref(), Some("sse"));
std::env::remove_var("PROVIDER_TRANSPORT");
}
#[test]
async fn env_override_provider_transport_invalid_zeroclaw_does_not_override_existing() {
let _env_guard = env_override_lock().await;
let mut config = Config::default();
config.provider.transport = Some("sse".to_string());
std::env::remove_var("PROVIDER_TRANSPORT");
std::env::set_var("ZEROCLAW_PROVIDER_TRANSPORT", "udp");
config.apply_env_overrides();
assert_eq!(config.provider.transport.as_deref(), Some("sse"));
std::env::remove_var("ZEROCLAW_PROVIDER_TRANSPORT");
}
#[test]
async fn env_override_provider_transport_invalid_legacy_does_not_override_existing() {
let _env_guard = env_override_lock().await;
let mut config = Config::default();
config.provider.transport = Some("auto".to_string());
std::env::remove_var("ZEROCLAW_PROVIDER_TRANSPORT");
std::env::set_var("PROVIDER_TRANSPORT", "udp");
config.apply_env_overrides();
assert_eq!(config.provider.transport.as_deref(), Some("auto"));
std::env::remove_var("PROVIDER_TRANSPORT");
}
#[test]
async fn env_override_model_support_vision() {
let _env_guard = env_override_lock().await;

View File

@ -1167,6 +1167,7 @@ mod tests {
model: String::new(),
max_tokens: None,
api_key: None,
transport: None,
}];
let mut items = Vec::new();
check_config_semantics(&config, &mut items);

View File

@ -706,7 +706,10 @@ fn restore_masked_sensitive_fields(
restore_optional_secret(&mut incoming.proxy.http_proxy, &current.proxy.http_proxy);
restore_optional_secret(&mut incoming.proxy.https_proxy, &current.proxy.https_proxy);
restore_optional_secret(&mut incoming.proxy.all_proxy, &current.proxy.all_proxy);
restore_optional_secret(&mut incoming.transcription.api_key, &current.transcription.api_key);
restore_optional_secret(
&mut incoming.transcription.api_key,
&current.transcription.api_key,
);
restore_optional_secret(
&mut incoming.browser.computer_use.api_key,
&current.browser.computer_use.api_key,
@ -932,7 +935,10 @@ mod tests {
assert_eq!(hydrated.config_path, current.config_path);
assert_eq!(hydrated.workspace_dir, current.workspace_dir);
assert_eq!(hydrated.api_key, current.api_key);
assert_eq!(hydrated.transcription.api_key, current.transcription.api_key);
assert_eq!(
hydrated.transcription.api_key,
current.transcription.api_key
);
assert_eq!(hydrated.default_model.as_deref(), Some("gpt-4.1-mini"));
assert_eq!(
hydrated.reliability.api_keys,

View File

@ -362,6 +362,7 @@ pub async fn run_gateway(host: &str, port: u16, config: Config) -> Result<()> {
&providers::ProviderRuntimeOptions {
auth_profile_override: None,
provider_api_url: config.api_url.clone(),
provider_transport: config.effective_provider_transport(),
zeroclaw_dir: config.config_path.parent().map(std::path::PathBuf::from),
secrets_encrypt: config.secrets.encrypt,
reasoning_enabled: config.runtime.reasoning_enabled,

View File

@ -95,9 +95,7 @@ pub async fn handle_api_chat(
&& state.webhook_secret_hash.is_none()
&& !peer_addr.ip().is_loopback()
{
tracing::warn!(
"/api/chat: rejected unauthenticated non-loopback request"
);
tracing::warn!("/api/chat: rejected unauthenticated non-loopback request");
let err = serde_json::json!({
"error": "Unauthorized — configure pairing or X-Webhook-Secret for non-local access"
});
@ -152,7 +150,11 @@ pub async fn handle_api_chat(
message.to_string()
} else {
let recent: Vec<&String> = chat_body.context.iter().rev().take(10).rev().collect();
let context_block = recent.iter().map(|s| s.as_str()).collect::<Vec<&str>>().join("\n");
let context_block = recent
.iter()
.map(|s| s.as_str())
.collect::<Vec<&str>>()
.join("\n");
format!(
"Recent conversation context:\n{}\n\nCurrent message:\n{}",
context_block, message
@ -395,7 +397,9 @@ pub async fn handle_v1_chat_completions_with_tools(
.unwrap_or("");
let token = auth.strip_prefix("Bearer ").unwrap_or("");
if !state.pairing.is_authenticated(token) {
tracing::warn!("/v1/chat/completions (compat): rejected — not paired / invalid bearer token");
tracing::warn!(
"/v1/chat/completions (compat): rejected — not paired / invalid bearer token"
);
let err = serde_json::json!({
"error": {
"message": "Invalid API key. Pair first via POST /pair, then use Authorization: Bearer <token>",
@ -481,7 +485,11 @@ pub async fn handle_v1_chat_completions_with_tools(
.rev()
.filter(|m| m.role == "user" || m.role == "assistant")
.map(|m| {
let role_label = if m.role == "user" { "User" } else { "Assistant" };
let role_label = if m.role == "user" {
"User"
} else {
"Assistant"
};
format!("{}: {}", role_label, m.content)
})
.collect();
@ -495,7 +503,11 @@ pub async fn handle_v1_chat_completions_with_tools(
.take(MAX_CONTEXT_MESSAGES)
.rev()
.collect();
let context_block = recent.iter().map(|s| s.as_str()).collect::<Vec<&str>>().join("\n");
let context_block = recent
.iter()
.map(|s| s.as_str())
.collect::<Vec<&str>>()
.join("\n");
format!(
"Recent conversation context:\n{}\n\nCurrent message:\n{}",
context_block, message
@ -617,9 +629,7 @@ pub async fn handle_v1_chat_completions_with_tools(
}
};
let model_name = request
.model
.unwrap_or_else(|| state.model.clone());
let model_name = request.model.unwrap_or_else(|| state.model.clone());
#[allow(clippy::cast_possible_truncation)]
let prompt_tokens = (enriched_message.len() / 4) as u32;
@ -844,14 +854,20 @@ mod tests {
fn api_chat_body_rejects_missing_message() {
let json = r#"{"session_id": "s1"}"#;
let result: Result<ApiChatBody, _> = serde_json::from_str(json);
assert!(result.is_err(), "missing `message` field should fail deserialization");
assert!(
result.is_err(),
"missing `message` field should fail deserialization"
);
}
#[test]
fn oai_request_rejects_empty_messages() {
let json = r#"{"messages": []}"#;
let req: OaiChatRequest = serde_json::from_str(json).unwrap();
assert!(req.messages.is_empty(), "empty messages should parse but be caught by handler");
assert!(
req.messages.is_empty(),
"empty messages should parse but be caught by handler"
);
}
#[test]
@ -892,7 +908,17 @@ mod tests {
.skip(1)
.rev()
.filter(|m| m.role == "user" || m.role == "assistant")
.map(|m| format!("{}: {}", if m.role == "user" { "User" } else { "Assistant" }, m.content))
.map(|m| {
format!(
"{}: {}",
if m.role == "user" {
"User"
} else {
"Assistant"
},
m.content
)
})
.collect();
assert_eq!(context_messages.len(), 2);

View File

@ -57,8 +57,6 @@ pub(crate) mod heartbeat;
pub mod hooks;
pub(crate) mod identity;
// Intentionally unused re-export — public API surface for plugin authors.
#[allow(unused_imports)]
pub(crate) mod plugins;
pub(crate) mod integrations;
pub mod memory;
pub(crate) mod migration;
@ -66,6 +64,8 @@ pub(crate) mod multimodal;
pub mod observability;
pub(crate) mod onboard;
pub mod peripherals;
#[allow(unused_imports)]
pub(crate) mod plugins;
pub mod providers;
pub mod rag;
pub mod runtime;

View File

@ -779,6 +779,7 @@ fn default_model_for_provider(provider: &str) -> String {
"ollama" => "llama3.2".into(),
"llamacpp" => "ggml-org/gpt-oss-20b-GGUF".into(),
"sglang" | "vllm" | "osaurus" => "default".into(),
"copilot" => "default".into(),
"gemini" => "gemini-2.5-pro".into(),
"kimi-code" => "kimi-for-coding".into(),
"bedrock" => "anthropic.claude-sonnet-4-5-20250929-v1:0".into(),
@ -1225,6 +1226,10 @@ fn curated_models_for_provider(provider_name: &str) -> Vec<(String, String)> {
"Gemini 2.5 Flash-Lite (lowest cost)".to_string(),
),
],
"copilot" => vec![(
"default".to_string(),
"Copilot default model (recommended)".to_string(),
)],
_ => vec![("default".to_string(), "Default model".to_string())],
}
}
@ -2213,7 +2218,7 @@ async fn setup_workspace() -> Result<(PathBuf, PathBuf)> {
async fn setup_provider(workspace_dir: &Path) -> Result<(String, String, String, Option<String>)> {
// ── Tier selection ──
let tiers = vec![
"⭐ Recommended (OpenRouter, Venice, Anthropic, OpenAI, Gemini)",
"⭐ Recommended (OpenRouter, Venice, Anthropic, OpenAI, Gemini, GitHub Copilot)",
"⚡ Fast inference (Groq, Fireworks, Together AI, NVIDIA NIM)",
"🌐 Gateway / proxy (Vercel AI, Cloudflare AI, Amazon Bedrock)",
"🔬 Specialized (Moonshot/Kimi, GLM/Zhipu, MiniMax, Qwen/DashScope, Qianfan, Z.AI, Synthetic, OpenCode Zen, Cohere)",
@ -2240,6 +2245,10 @@ async fn setup_provider(workspace_dir: &Path) -> Result<(String, String, String,
"openai-codex",
"OpenAI Codex (ChatGPT subscription OAuth, no API key)",
),
(
"copilot",
"GitHub Copilot — OAuth device flow (Copilot subscription)",
),
("deepseek", "DeepSeek — V3 & R1 (affordable)"),
("mistral", "Mistral — Large & Codestral"),
("xai", "xAI — Grok 3 & 4"),
@ -2536,6 +2545,24 @@ async fn setup_provider(workspace_dir: &Path) -> Result<(String, String, String,
));
}
key
} else if canonical_provider_name(provider_name) == "copilot" {
print_bullet("GitHub Copilot uses GitHub OAuth device flow.");
print_bullet("Press Enter to keep setup keyless and authenticate on first run.");
print_bullet("Optional: paste a GitHub token now to skip the first-run device prompt.");
println!();
let key: String = Input::new()
.with_prompt(" Paste your GitHub token (optional; Enter = device flow)")
.allow_empty(true)
.interact_text()?;
if key.trim().is_empty() {
print_bullet(
"No token provided. ZeroClaw will open the GitHub device login flow on first use.",
);
}
key
} else if canonical_provider_name(provider_name) == "gemini" {
// Special handling for Gemini: check for CLI auth first
@ -6096,8 +6123,19 @@ fn print_summary(config: &Config) {
let mut step = 1u8;
let provider = config.default_provider.as_deref().unwrap_or("openrouter");
let canonical_provider = canonical_provider_name(provider);
if config.api_key.is_none() && !provider_supports_keyless_local_usage(provider) {
if provider == "openai-codex" {
if canonical_provider == "copilot" {
println!(
" {} Authenticate GitHub Copilot:",
style(format!("{step}.")).cyan().bold()
);
println!(" {}", style("zeroclaw agent -m \"Hello!\"").yellow());
println!(
" {}",
style("(device/OAuth prompt appears automatically on first run)").dim()
);
} else if canonical_provider == "openai-codex" {
println!(
" {} Authenticate OpenAI Codex:",
style(format!("{step}.")).cyan().bold()
@ -6106,7 +6144,7 @@ fn print_summary(config: &Config) {
" {}",
style("zeroclaw auth login --provider openai-codex --device-code").yellow()
);
} else if provider == "anthropic" {
} else if canonical_provider == "anthropic" {
println!(
" {} Configure Anthropic auth:",
style(format!("{step}.")).cyan().bold()
@ -7254,6 +7292,7 @@ mod tests {
assert_eq!(default_model_for_provider("zai-cn"), "glm-5");
assert_eq!(default_model_for_provider("gemini"), "gemini-2.5-pro");
assert_eq!(default_model_for_provider("google"), "gemini-2.5-pro");
assert_eq!(default_model_for_provider("copilot"), "default");
assert_eq!(default_model_for_provider("kimi-code"), "kimi-for-coding");
assert_eq!(
default_model_for_provider("bedrock"),
@ -7347,6 +7386,18 @@ mod tests {
assert!(ids.contains(&"gpt-5.2-codex".to_string()));
}
#[test]
fn curated_models_for_copilot_have_default_entry() {
let models = curated_models_for_provider("copilot");
assert_eq!(
models,
vec![(
"default".to_string(),
"Copilot default model (recommended)".to_string(),
)]
);
}
#[test]
fn curated_models_for_openrouter_use_valid_anthropic_id() {
let ids: Vec<String> = curated_models_for_provider("openrouter")

View File

@ -5,7 +5,9 @@
use std::path::{Path, PathBuf};
use super::manifest::{load_manifest, ManifestLoadResult, PluginManifest, PLUGIN_MANIFEST_FILENAME};
use super::manifest::{
load_manifest, ManifestLoadResult, PluginManifest, PLUGIN_MANIFEST_FILENAME,
};
use super::registry::{DiagnosticLevel, PluginDiagnostic, PluginOrigin};
/// A discovered plugin before loading.
@ -79,10 +81,7 @@ fn scan_dir(dir: &Path, origin: PluginOrigin) -> (Vec<DiscoveredPlugin>, Vec<Plu
/// 2. Global: `~/.zeroclaw/extensions/`
/// 3. Workspace: `<workspace>/.zeroclaw/extensions/`
/// 4. Extra paths from config `[plugins] load_paths`
pub fn discover_plugins(
workspace_dir: Option<&Path>,
extra_paths: &[PathBuf],
) -> DiscoveryResult {
pub fn discover_plugins(workspace_dir: Option<&Path>, extra_paths: &[PathBuf]) -> DiscoveryResult {
let mut all_plugins = Vec::new();
let mut all_diagnostics = Vec::new();
@ -183,10 +182,7 @@ version = "0.1.0"
make_plugin_dir(&ext_dir, "custom-one");
let result = discover_plugins(None, &[ext_dir]);
assert!(result
.plugins
.iter()
.any(|p| p.manifest.id == "custom-one"));
assert!(result.plugins.iter().any(|p| p.manifest.id == "custom-one"));
}
#[test]

View File

@ -306,7 +306,10 @@ mod tests {
};
let reg = load_plugins(&cfg, None, vec![]);
assert_eq!(reg.active_count(), 0);
assert!(reg.diagnostics.iter().any(|d| d.message.contains("disabled")));
assert!(reg
.diagnostics
.iter()
.any(|d| d.message.contains("disabled")));
}
#[test]

View File

@ -683,6 +683,7 @@ fn zai_base_url(name: &str) -> Option<&'static str> {
pub struct ProviderRuntimeOptions {
pub auth_profile_override: Option<String>,
pub provider_api_url: Option<String>,
pub provider_transport: Option<String>,
pub zeroclaw_dir: Option<PathBuf>,
pub secrets_encrypt: bool,
pub reasoning_enabled: Option<bool>,
@ -697,6 +698,7 @@ impl Default for ProviderRuntimeOptions {
Self {
auth_profile_override: None,
provider_api_url: None,
provider_transport: None,
zeroclaw_dir: None,
secrets_encrypt: true,
reasoning_enabled: None,
@ -1512,7 +1514,15 @@ pub fn create_routed_provider_with_options(
.then_some(api_url)
.flatten();
let route_options = options.clone();
let mut route_options = options.clone();
if let Some(transport) = route
.transport
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
{
route_options.provider_transport = Some(transport.to_string());
}
match create_resilient_provider_with_options(
&route.provider,
@ -1542,19 +1552,8 @@ pub fn create_routed_provider_with_options(
}
}
// Build route table
let routes: Vec<(String, router::Route)> = model_routes
.iter()
.map(|r| {
(
r.hint.clone(),
router::Route {
provider_name: r.provider.clone(),
model: r.model.clone(),
},
)
})
.collect();
// Keep only successfully initialized routed providers and preserve
// their provider-id bindings (e.g. "<provider>#<hint>").
Ok(Box::new(
router::RouterProvider::new(providers, routes, default_model.to_string())
@ -3049,6 +3048,7 @@ mod tests {
model: "anthropic/claude-sonnet-4.6".to_string(),
max_tokens: Some(4096),
api_key: None,
transport: None,
}];
let provider = create_routed_provider_with_options(

View File

@ -4,21 +4,81 @@ use crate::multimodal;
use crate::providers::traits::{ChatMessage, Provider, ProviderCapabilities};
use crate::providers::ProviderRuntimeOptions;
use async_trait::async_trait;
use futures_util::{SinkExt, StreamExt};
use reqwest::Client;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::path::PathBuf;
use std::time::Duration;
use tokio::time::timeout;
use tokio_tungstenite::{
connect_async,
tungstenite::{
client::IntoClientRequest,
http::{
header::{AUTHORIZATION, USER_AGENT},
HeaderValue as WsHeaderValue,
},
Message as WsMessage,
},
};
const DEFAULT_CODEX_RESPONSES_URL: &str = "https://chatgpt.com/backend-api/codex/responses";
const CODEX_RESPONSES_URL_ENV: &str = "ZEROCLAW_CODEX_RESPONSES_URL";
const CODEX_BASE_URL_ENV: &str = "ZEROCLAW_CODEX_BASE_URL";
const CODEX_TRANSPORT_ENV: &str = "ZEROCLAW_CODEX_TRANSPORT";
const CODEX_PROVIDER_TRANSPORT_ENV: &str = "ZEROCLAW_PROVIDER_TRANSPORT";
const CODEX_RESPONSES_WEBSOCKET_ENV_LEGACY: &str = "ZEROCLAW_RESPONSES_WEBSOCKET";
const DEFAULT_CODEX_INSTRUCTIONS: &str =
"You are ZeroClaw, a concise and helpful coding assistant.";
const CODEX_WS_CONNECT_TIMEOUT: Duration = Duration::from_secs(20);
const CODEX_WS_SEND_TIMEOUT: Duration = Duration::from_secs(15);
const CODEX_WS_READ_TIMEOUT: Duration = Duration::from_secs(60);
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum CodexTransport {
Auto,
WebSocket,
Sse,
}
#[derive(Debug)]
enum WebsocketRequestError {
TransportUnavailable(anyhow::Error),
Stream(anyhow::Error),
}
impl WebsocketRequestError {
fn transport_unavailable<E>(error: E) -> Self
where
E: Into<anyhow::Error>,
{
Self::TransportUnavailable(error.into())
}
fn stream<E>(error: E) -> Self
where
E: Into<anyhow::Error>,
{
Self::Stream(error.into())
}
}
impl std::fmt::Display for WebsocketRequestError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::TransportUnavailable(error) | Self::Stream(error) => error.fmt(f),
}
}
}
impl std::error::Error for WebsocketRequestError {}
pub struct OpenAiCodexProvider {
auth: AuthService,
auth_profile_override: Option<String>,
responses_url: String,
transport: CodexTransport,
custom_endpoint: bool,
gateway_api_key: Option<String>,
reasoning_level: Option<String>,
@ -104,6 +164,7 @@ impl OpenAiCodexProvider {
auth_profile_override: options.auth_profile_override.clone(),
custom_endpoint: !is_default_responses_url(&responses_url),
responses_url,
transport: resolve_transport_mode(options)?,
gateway_api_key: gateway_api_key.map(ToString::to_string),
reasoning_level: normalize_reasoning_level(
options.reasoning_level.as_deref(),
@ -204,6 +265,72 @@ fn first_nonempty(text: Option<&str>) -> Option<String> {
})
}
fn parse_transport_override(
raw: Option<&str>,
source: &str,
) -> anyhow::Result<Option<CodexTransport>> {
let Some(raw_value) = raw else {
return Ok(None);
};
let value = raw_value.trim();
if value.is_empty() {
return Ok(None);
}
let normalized = value.to_ascii_lowercase().replace(['-', '_'], "");
match normalized.as_str() {
"auto" => Ok(Some(CodexTransport::Auto)),
"websocket" | "ws" => Ok(Some(CodexTransport::WebSocket)),
"sse" | "http" => Ok(Some(CodexTransport::Sse)),
_ => anyhow::bail!(
"Invalid OpenAI Codex transport override '{value}' from {source}; expected one of: auto, websocket, sse"
),
}
}
fn parse_legacy_websocket_flag(raw: &str) -> Option<CodexTransport> {
let normalized = raw.trim().to_ascii_lowercase();
match normalized.as_str() {
"1" | "true" | "on" | "yes" => Some(CodexTransport::WebSocket),
"0" | "false" | "off" | "no" => Some(CodexTransport::Sse),
_ => None,
}
}
fn resolve_transport_mode(options: &ProviderRuntimeOptions) -> anyhow::Result<CodexTransport> {
if let Some(mode) = parse_transport_override(
options.provider_transport.as_deref(),
"provider.transport runtime override",
)? {
return Ok(mode);
}
if let Ok(value) = std::env::var(CODEX_TRANSPORT_ENV) {
if let Some(mode) = parse_transport_override(Some(&value), CODEX_TRANSPORT_ENV)? {
return Ok(mode);
}
}
if let Ok(value) = std::env::var(CODEX_PROVIDER_TRANSPORT_ENV) {
if let Some(mode) = parse_transport_override(Some(&value), CODEX_PROVIDER_TRANSPORT_ENV)? {
return Ok(mode);
}
}
if let Some(mode) = std::env::var(CODEX_RESPONSES_WEBSOCKET_ENV_LEGACY)
.ok()
.and_then(|value| parse_legacy_websocket_flag(&value))
{
tracing::warn!(
env = CODEX_RESPONSES_WEBSOCKET_ENV_LEGACY,
"Using deprecated websocket toggle env for OpenAI Codex transport"
);
return Ok(mode);
}
Ok(CodexTransport::Auto)
}
fn resolve_instructions(system_prompt: Option<&str>) -> String {
first_nonempty(system_prompt).unwrap_or_else(|| DEFAULT_CODEX_INSTRUCTIONS.to_string())
}
@ -526,6 +653,283 @@ async fn decode_responses_body(response: reqwest::Response) -> anyhow::Result<St
}
impl OpenAiCodexProvider {
fn responses_websocket_url(&self, model: &str) -> anyhow::Result<String> {
let mut url = reqwest::Url::parse(&self.responses_url)?;
let next_scheme: &'static str = match url.scheme() {
"https" | "wss" => "wss",
"http" | "ws" => "ws",
other => {
anyhow::bail!(
"OpenAI Codex websocket transport does not support URL scheme: {}",
other
);
}
};
url.set_scheme(next_scheme)
.map_err(|()| anyhow::anyhow!("failed to set websocket URL scheme"))?;
if !url.query_pairs().any(|(k, _)| k == "model") {
url.query_pairs_mut().append_pair("model", model);
}
Ok(url.into())
}
fn apply_auth_headers_ws(
&self,
request: &mut tokio_tungstenite::tungstenite::http::Request<()>,
bearer_token: &str,
account_id: Option<&str>,
access_token: Option<&str>,
use_gateway_api_key_auth: bool,
) -> anyhow::Result<()> {
let headers = request.headers_mut();
headers.insert(
AUTHORIZATION,
WsHeaderValue::from_str(&format!("Bearer {bearer_token}"))?,
);
headers.insert(
"OpenAI-Beta",
WsHeaderValue::from_static("responses=experimental"),
);
headers.insert("originator", WsHeaderValue::from_static("pi"));
headers.insert("accept", WsHeaderValue::from_static("text/event-stream"));
headers.insert(USER_AGENT, WsHeaderValue::from_static("zeroclaw"));
if let Some(account_id) = account_id {
headers.insert("chatgpt-account-id", WsHeaderValue::from_str(account_id)?);
}
if use_gateway_api_key_auth {
if let Some(access_token) = access_token {
headers.insert(
"x-openai-access-token",
WsHeaderValue::from_str(access_token)?,
);
}
if let Some(account_id) = account_id {
headers.insert("x-openai-account-id", WsHeaderValue::from_str(account_id)?);
}
}
Ok(())
}
async fn send_responses_websocket_request(
&self,
request: &ResponsesRequest,
model: &str,
bearer_token: &str,
account_id: Option<&str>,
access_token: Option<&str>,
use_gateway_api_key_auth: bool,
) -> Result<String, WebsocketRequestError> {
let ws_url = self
.responses_websocket_url(model)
.map_err(WebsocketRequestError::transport_unavailable)?;
let mut ws_request = ws_url.into_client_request().map_err(|error| {
WebsocketRequestError::transport_unavailable(anyhow::anyhow!(
"invalid websocket request URL: {error}"
))
})?;
self.apply_auth_headers_ws(
&mut ws_request,
bearer_token,
account_id,
access_token,
use_gateway_api_key_auth,
)
.map_err(WebsocketRequestError::transport_unavailable)?;
let payload = serde_json::json!({
"type": "response.create",
"model": &request.model,
"input": &request.input,
"instructions": &request.instructions,
"store": request.store,
"text": &request.text,
"reasoning": &request.reasoning,
"include": &request.include,
"tool_choice": &request.tool_choice,
"parallel_tool_calls": request.parallel_tool_calls,
});
let (mut ws_stream, _) = timeout(CODEX_WS_CONNECT_TIMEOUT, connect_async(ws_request))
.await
.map_err(|_| {
WebsocketRequestError::transport_unavailable(anyhow::anyhow!(
"OpenAI Codex websocket connect timed out after {}s",
CODEX_WS_CONNECT_TIMEOUT.as_secs()
))
})?
.map_err(WebsocketRequestError::transport_unavailable)?;
timeout(
CODEX_WS_SEND_TIMEOUT,
ws_stream.send(WsMessage::Text(
serde_json::to_string(&payload)
.map_err(WebsocketRequestError::transport_unavailable)?
.into(),
)),
)
.await
.map_err(|_| {
WebsocketRequestError::transport_unavailable(anyhow::anyhow!(
"OpenAI Codex websocket send timed out after {}s",
CODEX_WS_SEND_TIMEOUT.as_secs()
))
})?
.map_err(WebsocketRequestError::transport_unavailable)?;
let mut saw_delta = false;
let mut delta_accumulator = String::new();
let mut fallback_text: Option<String> = None;
let mut timed_out = false;
loop {
let frame = match timeout(CODEX_WS_READ_TIMEOUT, ws_stream.next()).await {
Ok(frame) => frame,
Err(_) => {
let _ = ws_stream.close(None).await;
if saw_delta || fallback_text.is_some() {
timed_out = true;
break;
}
return Err(WebsocketRequestError::stream(anyhow::anyhow!(
"OpenAI Codex websocket stream timed out after {}s waiting for events",
CODEX_WS_READ_TIMEOUT.as_secs()
)));
}
};
let Some(frame) = frame else {
break;
};
let frame = frame.map_err(WebsocketRequestError::stream)?;
let event: Value = match frame {
WsMessage::Text(text) => {
serde_json::from_str(text.as_ref()).map_err(WebsocketRequestError::stream)?
}
WsMessage::Binary(binary) => {
let text = String::from_utf8(binary.to_vec()).map_err(|error| {
WebsocketRequestError::stream(anyhow::anyhow!(
"invalid UTF-8 websocket frame from OpenAI Codex: {error}"
))
})?;
serde_json::from_str(&text).map_err(WebsocketRequestError::stream)?
}
WsMessage::Ping(payload) => {
ws_stream
.send(WsMessage::Pong(payload))
.await
.map_err(WebsocketRequestError::stream)?;
continue;
}
WsMessage::Close(_) => break,
_ => continue,
};
if let Some(message) = extract_stream_error_message(&event) {
return Err(WebsocketRequestError::stream(anyhow::anyhow!(
"OpenAI Codex websocket stream error: {message}"
)));
}
if let Some(text) = extract_stream_event_text(&event, saw_delta) {
let event_type = event.get("type").and_then(Value::as_str);
if event_type == Some("response.output_text.delta") {
saw_delta = true;
delta_accumulator.push_str(&text);
} else if fallback_text.is_none() {
fallback_text = Some(text);
}
}
let event_type = event.get("type").and_then(Value::as_str);
if event_type == Some("response.completed") || event_type == Some("response.done") {
if let Some(response_value) = event.get("response").cloned() {
if let Ok(parsed) = serde_json::from_value::<ResponsesResponse>(response_value)
{
if let Some(text) = extract_responses_text(&parsed) {
let _ = ws_stream.close(None).await;
return Ok(text);
}
}
}
if saw_delta {
let _ = ws_stream.close(None).await;
return nonempty_preserve(Some(&delta_accumulator)).ok_or_else(|| {
WebsocketRequestError::stream(anyhow::anyhow!(
"No response from OpenAI Codex"
))
});
}
if let Some(text) = fallback_text.clone() {
let _ = ws_stream.close(None).await;
return Ok(text);
}
}
}
if saw_delta {
return nonempty_preserve(Some(&delta_accumulator)).ok_or_else(|| {
WebsocketRequestError::stream(anyhow::anyhow!("No response from OpenAI Codex"))
});
}
if let Some(text) = fallback_text {
return Ok(text);
}
if timed_out {
return Err(WebsocketRequestError::stream(anyhow::anyhow!(
"No response from OpenAI Codex websocket stream before timeout"
)));
}
Err(WebsocketRequestError::stream(anyhow::anyhow!(
"No response from OpenAI Codex websocket stream"
)))
}
async fn send_responses_sse_request(
&self,
request: &ResponsesRequest,
bearer_token: &str,
account_id: Option<&str>,
access_token: Option<&str>,
use_gateway_api_key_auth: bool,
) -> anyhow::Result<String> {
let mut request_builder = self
.client
.post(&self.responses_url)
.header("Authorization", format!("Bearer {bearer_token}"))
.header("OpenAI-Beta", "responses=experimental")
.header("originator", "pi")
.header("accept", "text/event-stream")
.header("Content-Type", "application/json");
if let Some(account_id) = account_id {
request_builder = request_builder.header("chatgpt-account-id", account_id);
}
if use_gateway_api_key_auth {
if let Some(access_token) = access_token {
request_builder = request_builder.header("x-openai-access-token", access_token);
}
if let Some(account_id) = account_id {
request_builder = request_builder.header("x-openai-account-id", account_id);
}
}
let response = request_builder.json(request).send().await?;
if !response.status().is_success() {
return Err(super::api_error("OpenAI Codex", response).await);
}
decode_responses_body(response).await
}
async fn send_responses_request(
&self,
input: Vec<ResponsesInput>,
@ -613,35 +1017,59 @@ impl OpenAiCodexProvider {
access_token.as_deref().unwrap_or_default()
};
let mut request_builder = self
.client
.post(&self.responses_url)
.header("Authorization", format!("Bearer {bearer_token}"))
.header("OpenAI-Beta", "responses=experimental")
.header("originator", "pi")
.header("accept", "text/event-stream")
.header("Content-Type", "application/json");
if let Some(account_id) = account_id.as_deref() {
request_builder = request_builder.header("chatgpt-account-id", account_id);
}
if use_gateway_api_key_auth {
if let Some(access_token) = access_token.as_deref() {
request_builder = request_builder.header("x-openai-access-token", access_token);
match self.transport {
CodexTransport::WebSocket => self
.send_responses_websocket_request(
&request,
normalized_model,
bearer_token,
account_id.as_deref(),
access_token.as_deref(),
use_gateway_api_key_auth,
)
.await
.map_err(Into::into),
CodexTransport::Sse => {
self.send_responses_sse_request(
&request,
bearer_token,
account_id.as_deref(),
access_token.as_deref(),
use_gateway_api_key_auth,
)
.await
}
if let Some(account_id) = account_id.as_deref() {
request_builder = request_builder.header("x-openai-account-id", account_id);
CodexTransport::Auto => {
match self
.send_responses_websocket_request(
&request,
normalized_model,
bearer_token,
account_id.as_deref(),
access_token.as_deref(),
use_gateway_api_key_auth,
)
.await
{
Ok(text) => Ok(text),
Err(WebsocketRequestError::TransportUnavailable(error)) => {
tracing::warn!(
error = %error,
"OpenAI Codex websocket request failed; falling back to SSE"
);
self.send_responses_sse_request(
&request,
bearer_token,
account_id.as_deref(),
access_token.as_deref(),
use_gateway_api_key_auth,
)
.await
}
Err(WebsocketRequestError::Stream(error)) => Err(error),
}
}
}
let response = request_builder.json(&request).send().await?;
if !response.status().is_success() {
return Err(super::api_error("OpenAI Codex", response).await);
}
decode_responses_body(response).await
}
}
@ -809,6 +1237,85 @@ mod tests {
);
}
#[test]
fn resolve_transport_mode_defaults_to_auto() {
let _env_lock = env_lock();
let _transport_guard = EnvGuard::set(CODEX_TRANSPORT_ENV, None);
let _legacy_guard = EnvGuard::set(CODEX_RESPONSES_WEBSOCKET_ENV_LEGACY, None);
let _provider_guard = EnvGuard::set("ZEROCLAW_PROVIDER_TRANSPORT", None);
assert_eq!(
resolve_transport_mode(&ProviderRuntimeOptions::default()).unwrap(),
CodexTransport::Auto
);
}
#[test]
fn resolve_transport_mode_accepts_runtime_override() {
let _env_lock = env_lock();
let _transport_guard = EnvGuard::set(CODEX_TRANSPORT_ENV, Some("sse"));
let options = ProviderRuntimeOptions {
provider_transport: Some("websocket".to_string()),
..ProviderRuntimeOptions::default()
};
assert_eq!(
resolve_transport_mode(&options).unwrap(),
CodexTransport::WebSocket
);
}
#[test]
fn resolve_transport_mode_legacy_bool_env_is_supported() {
let _env_lock = env_lock();
let _transport_guard = EnvGuard::set(CODEX_TRANSPORT_ENV, None);
let _provider_guard = EnvGuard::set("ZEROCLAW_PROVIDER_TRANSPORT", None);
let _legacy_guard = EnvGuard::set(CODEX_RESPONSES_WEBSOCKET_ENV_LEGACY, Some("false"));
assert_eq!(
resolve_transport_mode(&ProviderRuntimeOptions::default()).unwrap(),
CodexTransport::Sse
);
}
#[test]
fn resolve_transport_mode_rejects_invalid_runtime_override() {
let _env_lock = env_lock();
let _transport_guard = EnvGuard::set(CODEX_TRANSPORT_ENV, None);
let _provider_guard = EnvGuard::set("ZEROCLAW_PROVIDER_TRANSPORT", None);
let _legacy_guard = EnvGuard::set(CODEX_RESPONSES_WEBSOCKET_ENV_LEGACY, None);
let options = ProviderRuntimeOptions {
provider_transport: Some("udp".to_string()),
..ProviderRuntimeOptions::default()
};
let err =
resolve_transport_mode(&options).expect_err("invalid runtime transport must fail");
assert!(err
.to_string()
.contains("Invalid OpenAI Codex transport override 'udp'"));
}
#[test]
fn websocket_url_uses_ws_scheme_and_model_query() {
let _env_lock = env_lock();
let _endpoint_guard = EnvGuard::set(CODEX_RESPONSES_URL_ENV, None);
let _base_guard = EnvGuard::set(CODEX_BASE_URL_ENV, None);
let options = ProviderRuntimeOptions::default();
let provider = OpenAiCodexProvider::new(&options, None).expect("provider should init");
let ws_url = provider
.responses_websocket_url("gpt-5.3-codex")
.expect("websocket URL should be derived");
assert_eq!(
ws_url,
"wss://chatgpt.com/backend-api/codex/responses?model=gpt-5.3-codex"
);
}
#[test]
fn default_responses_url_detector_handles_equivalent_urls() {
assert!(is_default_responses_url(DEFAULT_CODEX_RESPONSES_URL));
@ -1077,6 +1584,7 @@ data: [DONE]
fn capabilities_includes_vision() {
let options = ProviderRuntimeOptions {
provider_api_url: None,
provider_transport: None,
zeroclaw_dir: None,
secrets_encrypt: false,
auth_profile_override: None,

View File

@ -202,24 +202,23 @@ impl Tool for DocxReadTool {
}
};
let text =
match tokio::task::spawn_blocking(move || extract_docx_text(&bytes)).await {
Ok(Ok(t)) => t,
Ok(Err(e)) => {
return Ok(ToolResult {
success: false,
output: String::new(),
error: Some(format!("DOCX extraction failed: {e}")),
});
}
Err(e) => {
return Ok(ToolResult {
success: false,
output: String::new(),
error: Some(format!("DOCX extraction task panicked: {e}")),
});
}
};
let text = match tokio::task::spawn_blocking(move || extract_docx_text(&bytes)).await {
Ok(Ok(t)) => t,
Ok(Err(e)) => {
return Ok(ToolResult {
success: false,
output: String::new(),
error: Some(format!("DOCX extraction failed: {e}")),
});
}
Err(e) => {
return Ok(ToolResult {
success: false,
output: String::new(),
error: Some(format!("DOCX extraction task panicked: {e}")),
});
}
};
if text.trim().is_empty() {
return Ok(ToolResult {

View File

@ -428,6 +428,7 @@ pub fn all_tools_with_runtime(
let provider_runtime_options = crate::providers::ProviderRuntimeOptions {
auth_profile_override: None,
provider_api_url: root_config.api_url.clone(),
provider_transport: root_config.effective_provider_transport(),
zeroclaw_dir: root_config
.config_path
.parent()

View File

@ -125,6 +125,42 @@ impl ModelRoutingConfigTool {
Ok(output)
}
fn normalize_transport_value(raw: &str, field: &str) -> anyhow::Result<String> {
let normalized = raw.trim().to_ascii_lowercase().replace(['-', '_'], "");
match normalized.as_str() {
"auto" => Ok("auto".to_string()),
"websocket" | "ws" => Ok("websocket".to_string()),
"sse" | "http" => Ok("sse".to_string()),
_ => anyhow::bail!("'{field}' must be one of: auto, websocket, sse"),
}
}
fn parse_optional_transport_update(
args: &Value,
field: &str,
) -> anyhow::Result<MaybeSet<String>> {
let Some(raw) = args.get(field) else {
return Ok(MaybeSet::Unset);
};
if raw.is_null() {
return Ok(MaybeSet::Null);
}
let value = raw
.as_str()
.ok_or_else(|| anyhow::anyhow!("'{field}' must be a string or null"))?
.trim();
if value.is_empty() {
return Ok(MaybeSet::Null);
}
Ok(MaybeSet::Set(Self::normalize_transport_value(
value, field,
)?))
}
fn parse_optional_f64_update(args: &Value, field: &str) -> anyhow::Result<MaybeSet<f64>> {
let Some(raw) = args.get(field) else {
return Ok(MaybeSet::Unset);
@ -217,6 +253,7 @@ impl ModelRoutingConfigTool {
"hint": route.hint,
"provider": route.provider,
"model": route.model,
"transport": route.transport,
"api_key_configured": has_provider_credential(&route.provider, route.api_key.as_deref()),
"classification": classification,
})
@ -429,6 +466,7 @@ impl ModelRoutingConfigTool {
let provider = Self::parse_non_empty_string(args, "provider")?;
let model = Self::parse_non_empty_string(args, "model")?;
let api_key_update = Self::parse_optional_string_update(args, "api_key")?;
let transport_update = Self::parse_optional_transport_update(args, "transport")?;
let keywords_update = if let Some(raw) = args.get("keywords") {
Some(Self::parse_string_list(raw, "keywords")?)
@ -466,6 +504,7 @@ impl ModelRoutingConfigTool {
model: model.clone(),
max_tokens: None,
api_key: None,
transport: None,
});
next_route.hint = hint.clone();
@ -478,6 +517,12 @@ impl ModelRoutingConfigTool {
MaybeSet::Unset => {}
}
match transport_update {
MaybeSet::Set(transport) => next_route.transport = Some(transport),
MaybeSet::Null => next_route.transport = None,
MaybeSet::Unset => {}
}
cfg.model_routes.retain(|route| route.hint != hint);
cfg.model_routes.push(next_route);
Self::normalize_and_sort_routes(&mut cfg.model_routes);
@ -782,6 +827,11 @@ impl Tool for ModelRoutingConfigTool {
"type": ["string", "null"],
"description": "Optional API key override for scenario route or delegate agent"
},
"transport": {
"type": ["string", "null"],
"enum": ["auto", "websocket", "sse", "ws", "http", null],
"description": "Optional route transport override for upsert_scenario (auto, websocket, sse)"
},
"keywords": {
"description": "Classification keywords for upsert_scenario (string or string array)",
"oneOf": [
@ -1004,6 +1054,7 @@ mod tests {
"hint": "coding",
"provider": "openai",
"model": "gpt-5.3-codex",
"transport": "websocket",
"classification_enabled": true,
"keywords": ["code", "bug", "refactor"],
"patterns": ["```"],
@ -1025,9 +1076,58 @@ mod tests {
item["hint"] == json!("coding")
&& item["provider"] == json!("openai")
&& item["model"] == json!("gpt-5.3-codex")
&& item["transport"] == json!("websocket")
}));
}
#[tokio::test]
async fn upsert_scenario_transport_alias_is_canonicalized() {
let tmp = TempDir::new().unwrap();
let tool = ModelRoutingConfigTool::new(test_config(&tmp).await, test_security());
let result = tool
.execute(json!({
"action": "upsert_scenario",
"hint": "analysis",
"provider": "openai",
"model": "gpt-5.3-codex",
"transport": "WS"
}))
.await
.unwrap();
assert!(result.success, "{:?}", result.error);
let get_result = tool.execute(json!({"action": "get"})).await.unwrap();
assert!(get_result.success);
let output: Value = serde_json::from_str(&get_result.output).unwrap();
let scenarios = output["scenarios"].as_array().unwrap();
assert!(scenarios.iter().any(|item| {
item["hint"] == json!("analysis") && item["transport"] == json!("websocket")
}));
}
#[tokio::test]
async fn upsert_scenario_rejects_invalid_transport() {
let tmp = TempDir::new().unwrap();
let tool = ModelRoutingConfigTool::new(test_config(&tmp).await, test_security());
let result = tool
.execute(json!({
"action": "upsert_scenario",
"hint": "analysis",
"provider": "openai",
"model": "gpt-5.3-codex",
"transport": "udp"
}))
.await
.unwrap();
assert!(!result.success);
assert!(result
.error
.unwrap_or_default()
.contains("'transport' must be one of: auto, websocket, sse"));
}
#[tokio::test]
async fn remove_scenario_also_removes_rule() {
let tmp = TempDir::new().unwrap();
@ -1160,6 +1260,9 @@ mod tests {
.unwrap();
assert_eq!(route["api_key_configured"], json!(true));
assert_eq!(output["agents"]["voice_helper"]["api_key_configured"], json!(true));
assert_eq!(
output["agents"]["voice_helper"]["api_key_configured"],
json!(true)
);
}
}

View File

@ -148,6 +148,7 @@ async fn openai_codex_second_vision_support() -> Result<()> {
let opts = ProviderRuntimeOptions {
auth_profile_override: Some("second".to_string()),
provider_api_url: None,
provider_transport: None,
zeroclaw_dir: None,
secrets_encrypt: false,
reasoning_enabled: None,