feat(providers): forward codex mcp config to responses

This commit is contained in:
argenis de la rosa 2026-03-11 13:35:24 -04:00
parent 8cb38579f9
commit 2edc632c22
3 changed files with 396 additions and 4 deletions

View File

@ -1553,7 +1553,12 @@ impl OpenAiCompatibleProvider {
);
}
let tools = tools.filter(|items| !items.is_empty());
let url = self.responses_url();
let mut merged_tools = tools.unwrap_or_default();
if super::responses_endpoint_supports_codex_mcp(&url) {
merged_tools.extend(super::load_codex_mcp_tools());
}
let tools = (!merged_tools.is_empty()).then_some(merged_tools);
let request = ResponsesRequest {
model: model.to_string(),
input,
@ -1564,8 +1569,6 @@ impl OpenAiCompatibleProvider {
tools,
};
let url = self.responses_url();
let response = self
.apply_auth_header(self.http_client().post(&url).json(&request), credential)
.send()
@ -2701,13 +2704,49 @@ impl Provider for OpenAiCompatibleProvider {
#[cfg(test)]
mod tests {
use super::*;
use crate::providers::{load_codex_mcp_tools, responses_endpoint_supports_codex_mcp};
use std::error::Error as StdError;
use std::fmt;
use std::sync::{Mutex, OnceLock};
use tempfile::tempdir;
fn make_provider(name: &str, url: &str, key: Option<&str>) -> OpenAiCompatibleProvider {
OpenAiCompatibleProvider::new(name, url, key, AuthStyle::Bearer)
}
struct EnvGuard {
key: &'static str,
original: Option<String>,
}
impl EnvGuard {
fn set(key: &'static str, value: Option<&str>) -> Self {
let original = std::env::var(key).ok();
match value {
Some(next) => std::env::set_var(key, next),
None => std::env::remove_var(key),
}
Self { key, original }
}
}
impl Drop for EnvGuard {
fn drop(&mut self) {
if let Some(original) = self.original.as_deref() {
std::env::set_var(self.key, original);
} else {
std::env::remove_var(self.key);
}
}
}
fn env_lock() -> std::sync::MutexGuard<'static, ()> {
static LOCK: OnceLock<Mutex<()>> = OnceLock::new();
LOCK.get_or_init(|| Mutex::new(()))
.lock()
.expect("env lock poisoned")
}
#[derive(Debug)]
struct NestedTestError {
message: &'static str,
@ -3724,6 +3763,74 @@ mod tests {
assert!(json.contains("\"tool_choice\":\"auto\""));
}
#[test]
fn responses_request_serializes_with_function_and_forwarded_codex_mcp_tools() {
let _env_lock = env_lock();
let temp = tempdir().expect("tempdir should be created");
let config_path = temp.path().join("config.toml");
std::fs::write(
&config_path,
r#"
[mcp_servers.github]
url = "https://mcp.example.com"
allowed_tools = ["search"]
"#,
)
.expect("codex config should be written");
let config_path_string = config_path.to_string_lossy().into_owned();
let _config_guard = EnvGuard::set("ZEROCLAW_CODEX_CONFIG_PATH", Some(&config_path_string));
let provider = OpenAiCompatibleProvider::new_custom_with_mode(
"Custom",
"https://api.openai.com/v1",
Some("test-key"),
AuthStyle::Bearer,
true,
CompatibleApiMode::OpenAiResponses,
None,
);
let url = provider.responses_url();
let mut merged_tools = vec![serde_json::json!({
"type": "function",
"function": {
"name": "get_weather",
"description": "Get weather for a location",
"parameters": {
"type": "object",
"properties": {
"location": {"type": "string"}
}
}
}
})];
if responses_endpoint_supports_codex_mcp(&url) {
merged_tools.extend(load_codex_mcp_tools());
}
let request = ResponsesRequest {
model: "gpt-5.4".to_string(),
input: vec![ResponsesInput {
role: "user".to_string(),
content: "hello".to_string(),
}],
instructions: Some("system".to_string()),
max_output_tokens: None,
stream: Some(false),
tools: Some(merged_tools),
tool_choice: Some("auto".to_string()),
};
let json = serde_json::to_value(&request).expect("request should serialize");
let tools = json["tools"].as_array().expect("tools should serialize");
assert_eq!(tools.len(), 2);
assert!(tools.iter().any(|tool| tool["type"] == "function"));
assert!(tools.iter().any(|tool| {
tool["type"] == "mcp"
&& tool["server_label"] == "github"
&& tool["server_url"] == "https://mcp.example.com"
}));
}
#[test]
fn response_with_tool_calls_deserializes() {
let json = r#"{

View File

@ -41,6 +41,9 @@ use crate::auth::AuthService;
use compatible::{AuthStyle, CompatibleApiMode, OpenAiCompatibleProvider};
use reliable::ReliableProvider;
use serde::Deserialize;
use serde_json::{json, Value};
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
const MAX_API_ERROR_CHARS: usize = 200;
@ -76,6 +79,145 @@ const ZAI_GLOBAL_BASE_URL: &str = "https://api.z.ai/api/coding/paas/v4";
const ZAI_CN_BASE_URL: &str = "https://open.bigmodel.cn/api/coding/paas/v4";
const VERCEL_AI_GATEWAY_BASE_URL: &str = "https://ai-gateway.vercel.sh/v1";
const LITELLM_BASE_URL: &str = "http://localhost:4000/v1";
const CODEX_CONFIG_PATH_ENV: &str = "ZEROCLAW_CODEX_CONFIG_PATH";
#[derive(Debug, Deserialize, Default)]
struct CodexConfigFile {
#[serde(default)]
mcp_servers: HashMap<String, CodexMcpServerEntry>,
}
#[derive(Debug, Deserialize, Default)]
struct CodexMcpServerEntry {
#[serde(default)]
url: Option<String>,
#[serde(default)]
server_url: Option<String>,
#[serde(default)]
connector_id: Option<String>,
#[serde(default)]
headers: HashMap<String, String>,
#[serde(default)]
allowed_tools: Vec<String>,
#[serde(default)]
require_approval: Option<String>,
#[serde(default)]
server_description: Option<String>,
#[serde(default)]
defer_loading: Option<bool>,
}
fn codex_config_path() -> Option<PathBuf> {
if let Some(path) = std::env::var(CODEX_CONFIG_PATH_ENV)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
{
return Some(PathBuf::from(path));
}
directories::UserDirs::new().map(|dirs| dirs.home_dir().join(".codex").join("config.toml"))
}
pub(crate) fn responses_endpoint_supports_codex_mcp(url: &str) -> bool {
let Ok(parsed) = reqwest::Url::parse(url) else {
return false;
};
matches!(
parsed.host_str().map(|host| host.to_ascii_lowercase()),
Some(host) if host == "api.openai.com" || host == "chatgpt.com"
)
}
fn codex_mcp_tool_from_entry(name: &str, entry: CodexMcpServerEntry) -> Option<Value> {
let server_label = name.trim();
if server_label.is_empty() {
return None;
}
let connector_id = entry
.connector_id
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string);
let server_url = entry
.server_url
.as_deref()
.or(entry.url.as_deref())
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string);
if connector_id.is_none() && server_url.is_none() {
return None;
}
let mut tool = json!({
"type": "mcp",
"server_label": server_label,
});
if let Some(connector_id) = connector_id {
tool["connector_id"] = Value::String(connector_id);
}
if let Some(server_url) = server_url {
tool["server_url"] = Value::String(server_url);
}
if !entry.headers.is_empty() {
tool["headers"] = serde_json::to_value(entry.headers).ok()?;
}
if !entry.allowed_tools.is_empty() {
tool["allowed_tools"] = serde_json::to_value(entry.allowed_tools).ok()?;
}
if let Some(require_approval) = entry
.require_approval
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
{
tool["require_approval"] = Value::String(require_approval.to_string());
}
if let Some(server_description) = entry
.server_description
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
{
tool["server_description"] = Value::String(server_description.to_string());
}
if let Some(defer_loading) = entry.defer_loading {
tool["defer_loading"] = Value::Bool(defer_loading);
}
Some(tool)
}
pub(crate) fn load_codex_mcp_tools_from_path(path: &Path) -> Vec<Value> {
let Ok(raw) = std::fs::read_to_string(path) else {
return Vec::new();
};
let Ok(parsed) = toml::from_str::<CodexConfigFile>(&raw) else {
tracing::warn!(
config_path = %path.display(),
"failed to parse Codex config while loading MCP servers"
);
return Vec::new();
};
parsed
.mcp_servers
.into_iter()
.filter_map(|(name, entry)| codex_mcp_tool_from_entry(&name, entry))
.collect()
}
pub(crate) fn load_codex_mcp_tools() -> Vec<Value> {
codex_config_path()
.map(|path| load_codex_mcp_tools_from_path(&path))
.unwrap_or_default()
}
pub(crate) fn is_minimax_intl_alias(name: &str) -> bool {
matches!(
@ -1902,6 +2044,79 @@ mod tests {
assert_eq!(resolved, Some("explicit-key".to_string()));
}
#[test]
fn load_codex_mcp_tools_from_path_parses_remote_servers() {
let temp_root = std::env::temp_dir().join(format!(
"zeroclaw-codex-mcp-{}-{}",
std::process::id(),
"remote"
));
std::fs::create_dir_all(&temp_root).expect("temp codex dir");
let config_path = temp_root.join("config.toml");
std::fs::write(
&config_path,
r#"
[mcp_servers.github]
url = "https://mcp.example.com"
allowed_tools = ["search", "fetch"]
require_approval = "never"
server_description = "GitHub MCP"
defer_loading = true
[mcp_servers.github.headers]
Authorization = "Bearer test-token"
"#,
)
.expect("write codex config");
let tools = load_codex_mcp_tools_from_path(&config_path);
assert_eq!(tools.len(), 1);
let tool = &tools[0];
assert_eq!(tool["type"], "mcp");
assert_eq!(tool["server_label"], "github");
assert_eq!(tool["server_url"], "https://mcp.example.com");
assert_eq!(tool["allowed_tools"], json!(["search", "fetch"]));
assert_eq!(tool["require_approval"], "never");
assert_eq!(tool["server_description"], "GitHub MCP");
assert_eq!(tool["defer_loading"], true);
assert_eq!(tool["headers"]["Authorization"], "Bearer test-token");
}
#[test]
fn load_codex_mcp_tools_from_path_skips_entries_without_remote_target() {
let temp_root = std::env::temp_dir().join(format!(
"zeroclaw-codex-mcp-{}-{}",
std::process::id(),
"invalid"
));
std::fs::create_dir_all(&temp_root).expect("temp codex dir");
let config_path = temp_root.join("config.toml");
std::fs::write(
&config_path,
r#"
[mcp_servers.local_stdio]
command = "npx"
"#,
)
.expect("write codex config");
let tools = load_codex_mcp_tools_from_path(&config_path);
assert!(tools.is_empty());
}
#[test]
fn responses_endpoint_supports_codex_mcp_matches_openai_hosts() {
assert!(responses_endpoint_supports_codex_mcp(
"https://api.openai.com/v1/responses"
));
assert!(responses_endpoint_supports_codex_mcp(
"https://chatgpt.com/backend-api/codex/responses"
));
assert!(!responses_endpoint_supports_codex_mcp(
"https://api.tonsof.blue/v1/responses"
));
}
#[test]
fn resolve_provider_credential_uses_minimax_oauth_env_for_placeholder() {
let _env_lock = env_lock();

View File

@ -37,6 +37,8 @@ struct ResponsesRequest {
include: Vec<String>,
tool_choice: String,
parallel_tool_calls: bool,
#[serde(skip_serializing_if = "Vec::is_empty")]
tools: Vec<Value>,
}
#[derive(Debug, Serialize)]
@ -116,7 +118,6 @@ impl OpenAiCodexProvider {
.unwrap_or_else(|_| Client::new()),
})
}
}
fn default_zeroclaw_dir() -> PathBuf {
@ -589,6 +590,11 @@ impl OpenAiCodexProvider {
})?)
};
let normalized_model = normalize_model_id(model);
let mcp_tools = if super::responses_endpoint_supports_codex_mcp(&self.responses_url) {
super::load_codex_mcp_tools()
} else {
Vec::new()
};
let request = ResponsesRequest {
model: normalized_model.to_string(),
@ -606,6 +612,7 @@ impl OpenAiCodexProvider {
include: vec!["reasoning.encrypted_content".to_string()],
tool_choice: "auto".to_string(),
parallel_tool_calls: true,
tools: mcp_tools,
};
let bearer_token = if use_gateway_api_key_auth {
@ -697,7 +704,9 @@ impl Provider for OpenAiCodexProvider {
#[cfg(test)]
mod tests {
use super::*;
use crate::providers::{load_codex_mcp_tools, responses_endpoint_supports_codex_mcp};
use std::sync::{Mutex, OnceLock};
use tempfile::tempdir;
struct EnvGuard {
key: &'static str,
@ -1094,4 +1103,65 @@ data: [DONE]
assert!(!caps.native_tool_calling);
assert!(caps.vision);
}
#[test]
fn responses_request_serializes_forwarded_codex_mcp_tools() {
let _env_lock = env_lock();
let temp = tempdir().expect("tempdir should be created");
let config_path = temp.path().join("config.toml");
std::fs::write(
&config_path,
r#"
[mcp_servers.github]
server_url = "https://mcp.example.com"
allowed_tools = ["search"]
require_approval = "never"
"#,
)
.expect("codex config should be written");
let config_path_string = config_path.to_string_lossy().into_owned();
let _config_guard = EnvGuard::set("ZEROCLAW_CODEX_CONFIG_PATH", Some(&config_path_string));
let tools = if responses_endpoint_supports_codex_mcp(DEFAULT_CODEX_RESPONSES_URL) {
load_codex_mcp_tools()
} else {
Vec::new()
};
let request = ResponsesRequest {
model: "gpt-5.4".to_string(),
input: vec![ResponsesInput {
role: "user".to_string(),
content: vec![ResponsesInputContent {
kind: "input_text".to_string(),
text: Some("hello".to_string()),
image_url: None,
}],
}],
instructions: "system".to_string(),
store: false,
stream: true,
text: ResponsesTextOptions {
verbosity: "medium".to_string(),
},
reasoning: ResponsesReasoningOptions {
effort: "high".to_string(),
summary: "auto".to_string(),
},
include: vec!["reasoning.encrypted_content".to_string()],
tool_choice: "auto".to_string(),
parallel_tool_calls: true,
tools,
};
let json = serde_json::to_value(&request).expect("request should serialize");
assert_eq!(json["tools"][0]["type"], "mcp");
assert_eq!(json["tools"][0]["server_label"], "github");
assert_eq!(json["tools"][0]["server_url"], "https://mcp.example.com");
assert_eq!(
json["tools"][0]["allowed_tools"],
serde_json::json!(["search"])
);
assert_eq!(json["tools"][0]["require_approval"], "never");
}
}