Merge pull request #2714 from zeroclaw-labs/dev-batch-2682-2679-2669

feat(dev): batch fixes for integrations, audit log, and lmstudio
This commit is contained in:
Argenis 2026-03-05 01:53:55 -05:00 committed by GitHub
commit a331c7341e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 707 additions and 8 deletions

View File

@ -63,6 +63,22 @@ credential is not reused for fallback providers.
| `osaurus` | — | Yes | `OSAURUS_API_KEY` (optional; defaults to `"osaurus"`) |
| `nvidia` | `nvidia-nim`, `build.nvidia.com` | No | `NVIDIA_API_KEY` |
### LM Studio Notes
- Provider ID: `lmstudio` (alias: `lm-studio`)
- Default local endpoint: `http://localhost:1234/v1`
- Override endpoint with `api_url` for remote server mode:
```toml
default_provider = "lmstudio"
api_url = "http://10.0.0.20:1234/v1"
default_model = "qwen2.5-coder:7b"
```
- Authentication:
- Optional. If your LM Studio server enforces auth, set `api_key` (or `API_KEY`/`ZEROCLAW_API_KEY`).
- If no key is set, ZeroClaw uses an internal placeholder token for compatibility with OpenAI-style auth headers.
### Vercel AI Gateway Notes
- Provider ID: `vercel` (alias: `vercel-ai`)

View File

@ -8,7 +8,9 @@ use axum::{
http::{header, HeaderMap, StatusCode},
response::{IntoResponse, Json},
};
use serde::Deserialize;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use std::collections::BTreeMap;
const MASKED_SECRET: &str = "***MASKED***";
@ -66,6 +68,380 @@ pub struct CronAddBody {
pub command: String,
}
#[derive(Deserialize)]
pub struct IntegrationCredentialsUpdateBody {
pub revision: Option<String>,
#[serde(default)]
pub fields: BTreeMap<String, String>,
}
#[derive(Debug, Clone, Serialize)]
struct IntegrationCredentialsField {
key: String,
label: String,
required: bool,
has_value: bool,
input_type: &'static str,
#[serde(default)]
options: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
current_value: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
masked_value: Option<String>,
}
#[derive(Debug, Clone, Serialize)]
struct IntegrationSettingsEntry {
id: String,
name: String,
description: String,
category: crate::integrations::IntegrationCategory,
status: crate::integrations::IntegrationStatus,
configured: bool,
activates_default_provider: bool,
fields: Vec<IntegrationCredentialsField>,
}
#[derive(Debug, Clone, Serialize)]
struct IntegrationSettingsPayload {
revision: String,
#[serde(skip_serializing_if = "Option::is_none")]
active_default_provider_integration_id: Option<String>,
integrations: Vec<IntegrationSettingsEntry>,
}
#[derive(Debug, Clone, Copy)]
struct DashboardAiIntegrationSpec {
id: &'static str,
integration_name: &'static str,
provider_id: &'static str,
requires_api_key: bool,
supports_api_url: bool,
model_options: &'static [&'static str],
}
const DASHBOARD_AI_INTEGRATION_SPECS: &[DashboardAiIntegrationSpec] = &[
DashboardAiIntegrationSpec {
id: "openrouter",
integration_name: "OpenRouter",
provider_id: "openrouter",
requires_api_key: true,
supports_api_url: false,
model_options: &[
"anthropic/claude-sonnet-4-6",
"openai/gpt-5.2",
"google/gemini-3.1-pro",
],
},
DashboardAiIntegrationSpec {
id: "anthropic",
integration_name: "Anthropic",
provider_id: "anthropic",
requires_api_key: true,
supports_api_url: false,
model_options: &["claude-sonnet-4-6", "claude-opus-4-6"],
},
DashboardAiIntegrationSpec {
id: "openai",
integration_name: "OpenAI",
provider_id: "openai",
requires_api_key: true,
supports_api_url: false,
model_options: &["gpt-5.2", "gpt-5.2-codex", "gpt-4o"],
},
DashboardAiIntegrationSpec {
id: "google",
integration_name: "Google",
provider_id: "gemini",
requires_api_key: true,
supports_api_url: false,
model_options: &["google/gemini-3.1-pro", "google/gemini-3-flash"],
},
DashboardAiIntegrationSpec {
id: "deepseek",
integration_name: "DeepSeek",
provider_id: "deepseek",
requires_api_key: true,
supports_api_url: false,
model_options: &["deepseek/deepseek-reasoner", "deepseek/deepseek-chat"],
},
DashboardAiIntegrationSpec {
id: "xai",
integration_name: "xAI",
provider_id: "xai",
requires_api_key: true,
supports_api_url: false,
model_options: &["x-ai/grok-4", "x-ai/grok-3"],
},
DashboardAiIntegrationSpec {
id: "mistral",
integration_name: "Mistral",
provider_id: "mistral",
requires_api_key: true,
supports_api_url: false,
model_options: &["mistral-large-latest", "codestral-latest"],
},
DashboardAiIntegrationSpec {
id: "ollama",
integration_name: "Ollama",
provider_id: "ollama",
requires_api_key: false,
supports_api_url: true,
model_options: &["llama3.2", "qwen2.5-coder:7b", "phi4"],
},
DashboardAiIntegrationSpec {
id: "perplexity",
integration_name: "Perplexity",
provider_id: "perplexity",
requires_api_key: true,
supports_api_url: false,
model_options: &["sonar-pro", "sonar-reasoning-pro", "sonar"],
},
DashboardAiIntegrationSpec {
id: "venice",
integration_name: "Venice",
provider_id: "venice",
requires_api_key: true,
supports_api_url: false,
model_options: &["zai-org-glm-5", "venice-uncensored"],
},
DashboardAiIntegrationSpec {
id: "vercel",
integration_name: "Vercel AI",
provider_id: "vercel",
requires_api_key: true,
supports_api_url: false,
model_options: &[
"openai/gpt-5.2",
"anthropic/claude-sonnet-4-6",
"google/gemini-3.1-pro",
],
},
DashboardAiIntegrationSpec {
id: "cloudflare",
integration_name: "Cloudflare AI",
provider_id: "cloudflare",
requires_api_key: true,
supports_api_url: false,
model_options: &[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/qwen/qwen3-32b",
],
},
];
fn find_dashboard_spec(id: &str) -> Option<&'static DashboardAiIntegrationSpec> {
DASHBOARD_AI_INTEGRATION_SPECS
.iter()
.find(|spec| spec.id.eq_ignore_ascii_case(id))
}
fn provider_alias_matches(spec: &DashboardAiIntegrationSpec, provider: &str) -> bool {
let normalized = provider.trim().to_ascii_lowercase();
match spec.id {
"google" => matches!(normalized.as_str(), "google" | "google-gemini" | "gemini"),
"xai" => matches!(normalized.as_str(), "xai" | "grok"),
"vercel" => matches!(normalized.as_str(), "vercel" | "vercel-ai"),
"cloudflare" => matches!(normalized.as_str(), "cloudflare" | "cloudflare-ai"),
_ => normalized == spec.provider_id,
}
}
fn is_spec_active(config: &crate::config::Config, spec: &DashboardAiIntegrationSpec) -> bool {
config
.default_provider
.as_deref()
.is_some_and(|provider| provider_alias_matches(spec, provider))
}
fn has_non_empty(value: Option<&str>) -> bool {
value.is_some_and(|candidate| !candidate.trim().is_empty())
}
fn config_revision(config: &crate::config::Config) -> String {
let serialized = toml::to_string(config).unwrap_or_default();
let digest = Sha256::digest(serialized.as_bytes());
format!("{digest:x}")
}
fn active_dashboard_provider_id(config: &crate::config::Config) -> Option<String> {
DASHBOARD_AI_INTEGRATION_SPECS.iter().find_map(|spec| {
if is_spec_active(config, spec) {
Some(spec.id.to_string())
} else {
None
}
})
}
fn build_integration_settings_payload(
config: &crate::config::Config,
) -> IntegrationSettingsPayload {
let all_integrations = crate::integrations::registry::all_integrations();
let mut entries = Vec::new();
for spec in DASHBOARD_AI_INTEGRATION_SPECS {
let Some(registry_entry) = all_integrations
.iter()
.find(|entry| entry.name == spec.integration_name)
else {
continue;
};
let status = (registry_entry.status_fn)(config);
let is_active_provider = is_spec_active(config, spec);
let has_key = has_non_empty(config.api_key.as_deref());
let has_model = is_active_provider && has_non_empty(config.default_model.as_deref());
let has_api_url = is_active_provider && has_non_empty(config.api_url.as_deref());
let mut fields = vec![
IntegrationCredentialsField {
key: "api_key".to_string(),
label: "API Key".to_string(),
required: spec.requires_api_key,
has_value: has_key,
input_type: "secret",
options: Vec::new(),
current_value: None,
masked_value: has_key.then(|| "••••••••".to_string()),
},
IntegrationCredentialsField {
key: "default_model".to_string(),
label: "Default Model".to_string(),
required: false,
has_value: has_model,
input_type: "select",
options: spec
.model_options
.iter()
.map(|value| (*value).to_string())
.collect(),
current_value: if is_active_provider {
config
.default_model
.as_deref()
.filter(|value| !value.trim().is_empty())
.map(std::string::ToString::to_string)
} else {
None
},
masked_value: None,
},
];
if spec.supports_api_url {
fields.push(IntegrationCredentialsField {
key: "api_url".to_string(),
label: "Base URL".to_string(),
required: false,
has_value: has_api_url,
input_type: "text",
options: Vec::new(),
current_value: if is_active_provider {
config
.api_url
.as_deref()
.filter(|value| !value.trim().is_empty())
.map(std::string::ToString::to_string)
} else {
None
},
masked_value: None,
});
}
let configured = if spec.requires_api_key {
is_active_provider && has_key
} else {
is_active_provider
};
entries.push(IntegrationSettingsEntry {
id: spec.id.to_string(),
name: registry_entry.name.to_string(),
description: registry_entry.description.to_string(),
category: registry_entry.category,
status,
configured,
activates_default_provider: true,
fields,
});
}
IntegrationSettingsPayload {
revision: config_revision(config),
active_default_provider_integration_id: active_dashboard_provider_id(config),
integrations: entries,
}
}
fn apply_integration_credentials_update(
config: &crate::config::Config,
integration_id: &str,
fields: &BTreeMap<String, String>,
) -> Result<crate::config::Config, String> {
let Some(spec) = find_dashboard_spec(integration_id) else {
return Err(format!("Unknown integration id: {integration_id}"));
};
let was_active_provider = is_spec_active(config, spec);
let mut updated = config.clone();
for (key, value) in fields {
let trimmed = value.trim();
match key.as_str() {
"api_key" => {
updated.api_key = if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
};
}
"default_model" => {
updated.default_model = if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
};
}
"api_url" => {
if !spec.supports_api_url {
return Err(format!(
"Integration '{}' does not support api_url",
spec.integration_name
));
}
updated.api_url = if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
};
}
_ => {
return Err(format!(
"Unsupported field '{key}' for integration '{integration_id}'"
));
}
}
}
updated.default_provider = Some(spec.provider_id.to_string());
if !fields.contains_key("default_model") && !was_active_provider {
updated.default_model = spec.model_options.first().map(|value| (*value).to_string());
}
if !spec.supports_api_url && !was_active_provider {
updated.api_url = None;
} else if spec.supports_api_url && !fields.contains_key("api_url") && !was_active_provider {
updated.api_url = None;
}
updated
.validate()
.map_err(|err| format!("Invalid integration config update: {err}"))?;
Ok(updated)
}
// ── Handlers ────────────────────────────────────────────────────
/// GET /api/status — system status overview
@ -336,6 +712,104 @@ pub async fn handle_api_integrations(
Json(serde_json::json!({"integrations": integrations})).into_response()
}
/// GET /api/integrations/settings — dashboard credential schema + masked state
pub async fn handle_api_integrations_settings(
State(state): State<AppState>,
headers: HeaderMap,
) -> impl IntoResponse {
if let Err(e) = require_auth(&state, &headers) {
return e.into_response();
}
let config = state.config.lock().clone();
let payload = build_integration_settings_payload(&config);
Json(payload).into_response()
}
/// PUT /api/integrations/:id/credentials — update integration credentials/config
pub async fn handle_api_integration_credentials_put(
State(state): State<AppState>,
headers: HeaderMap,
Path(id): Path<String>,
Json(body): Json<IntegrationCredentialsUpdateBody>,
) -> impl IntoResponse {
if let Err(e) = require_auth(&state, &headers) {
return e.into_response();
}
let current = state.config.lock().clone();
let current_revision = config_revision(&current);
if let Some(revision) = body.revision.as_deref() {
if revision != current_revision {
return (
StatusCode::CONFLICT,
Json(serde_json::json!({
"error": "Integration settings are out of date. Refresh and retry.",
"revision": current_revision,
})),
)
.into_response();
}
}
let updated = match apply_integration_credentials_update(&current, &id, &body.fields) {
Ok(config) => config,
Err(error) if error.starts_with("Unknown integration id:") => {
return (
StatusCode::NOT_FOUND,
Json(serde_json::json!({ "error": error })),
)
.into_response();
}
Err(error) if error.starts_with("Unsupported field") => {
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({ "error": error })),
)
.into_response();
}
Err(error) if error.starts_with("Invalid integration config update:") => {
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({ "error": error })),
)
.into_response();
}
Err(error) => {
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": error })),
)
.into_response();
}
};
let updated_revision = config_revision(&updated);
if updated_revision == current_revision {
return Json(serde_json::json!({
"status": "ok",
"revision": updated_revision,
"unchanged": true,
}))
.into_response();
}
if let Err(error) = updated.save().await {
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({"error": format!("Failed to save config: {error}")})),
)
.into_response();
}
*state.config.lock() = updated;
Json(serde_json::json!({
"status": "ok",
"revision": updated_revision,
}))
.into_response()
}
/// POST /api/doctor — run diagnostics
pub async fn handle_api_doctor(
State(state): State<AppState>,
@ -892,6 +1366,7 @@ mod tests {
use crate::config::schema::{
CloudflareTunnelConfig, LarkReceiveMode, NgrokTunnelConfig, WatiConfig,
};
use std::collections::BTreeMap;
#[test]
fn masking_keeps_toml_valid_and_preserves_api_keys_type() {
@ -1141,4 +1616,60 @@ mod tests {
Some("feishu-verify-token")
);
}
#[test]
fn integration_settings_payload_includes_openrouter_and_revision() {
let config = crate::config::Config::default();
let payload = build_integration_settings_payload(&config);
assert!(
!payload.revision.is_empty(),
"settings payload should include deterministic revision"
);
assert!(
payload
.integrations
.iter()
.any(|entry| entry.id == "openrouter" && entry.name == "OpenRouter"),
"dashboard settings payload should expose OpenRouter editor metadata"
);
}
#[test]
fn apply_integration_credentials_update_switches_provider_with_fallback_model() {
let mut config = crate::config::Config::default();
config.default_provider = Some("openrouter".to_string());
config.default_model = Some("anthropic/claude-sonnet-4-6".to_string());
config.api_url = Some("https://old.example.com".to_string());
let updated = apply_integration_credentials_update(&config, "ollama", &BTreeMap::new())
.expect("ollama update should succeed");
assert_eq!(updated.default_provider.as_deref(), Some("ollama"));
assert_eq!(updated.default_model.as_deref(), Some("llama3.2"));
assert!(
updated.api_url.is_none(),
"switching providers without api_url field should reset stale api_url"
);
}
#[test]
fn apply_integration_credentials_update_rejects_unknown_fields() {
let config = crate::config::Config::default();
let mut fields = BTreeMap::new();
fields.insert("unknown".to_string(), "value".to_string());
let err = apply_integration_credentials_update(&config, "openrouter", &fields)
.expect_err("unknown fields should fail validation");
assert!(err.contains("Unsupported field 'unknown'"));
}
#[test]
fn config_revision_changes_when_config_changes() {
let mut config = crate::config::Config::default();
let initial = config_revision(&config);
config.default_model = Some("gpt-5.2".to_string());
let changed = config_revision(&config);
assert_ne!(initial, changed);
}
}

View File

@ -736,6 +736,14 @@ pub async fn run_gateway(host: &str, port: u16, config: Config) -> Result<()> {
.route("/api/cron", post(api::handle_api_cron_add))
.route("/api/cron/{id}", delete(api::handle_api_cron_delete))
.route("/api/integrations", get(api::handle_api_integrations))
.route(
"/api/integrations/settings",
get(api::handle_api_integrations_settings),
)
.route(
"/api/integrations/{id}/credentials",
put(api::handle_api_integration_credentials_put),
)
.route(
"/api/doctor",
get(api::handle_api_doctor).post(api::handle_api_doctor),

View File

@ -290,7 +290,16 @@ pub fn all_integrations() -> Vec<IntegrationEntry> {
name: "LM Studio",
description: "Local model server",
category: IntegrationCategory::AiModel,
status_fn: |_| IntegrationStatus::ComingSoon,
status_fn: |c| {
if c.default_provider.as_deref().is_some_and(|provider| {
provider.eq_ignore_ascii_case("lmstudio")
|| provider.eq_ignore_ascii_case("lm-studio")
}) {
IntegrationStatus::Active
} else {
IntegrationStatus::Available
}
},
},
IntegrationEntry {
name: "Venice",
@ -874,6 +883,38 @@ mod tests {
}
}
#[test]
fn lm_studio_available_when_not_selected_as_default_provider() {
let config = Config::default();
let entries = all_integrations();
let lm_studio = entries.iter().find(|e| e.name == "LM Studio").unwrap();
assert!(matches!(
(lm_studio.status_fn)(&config),
IntegrationStatus::Available
));
}
#[test]
fn lm_studio_active_for_lmstudio_default_provider_aliases() {
let entries = all_integrations();
let lm_studio = entries.iter().find(|e| e.name == "LM Studio").unwrap();
let mut config = Config {
default_provider: Some("lmstudio".to_string()),
..Config::default()
};
assert!(matches!(
(lm_studio.status_fn)(&config),
IntegrationStatus::Active
));
config.default_provider = Some("lm-studio".to_string());
assert!(matches!(
(lm_studio.status_fn)(&config),
IntegrationStatus::Active
));
}
#[test]
fn whatsapp_available_when_not_configured() {
let config = Config::default();

View File

@ -1002,6 +1002,20 @@ pub fn create_provider_with_url(
create_provider_with_url_and_options(name, api_key, api_url, &ProviderRuntimeOptions::default())
}
fn resolve_lmstudio_connection(api_url: Option<&str>, key: Option<&str>) -> (String, String) {
let base_url = api_url
.map(str::trim)
.filter(|value| !value.is_empty())
.unwrap_or("http://localhost:1234/v1")
.to_string();
let lm_studio_key = key
.map(str::trim)
.filter(|value| !value.is_empty())
.unwrap_or("lm-studio")
.to_string();
(base_url, lm_studio_key)
}
/// Factory: create provider with optional base URL and runtime options.
#[allow(clippy::too_many_lines)]
fn create_provider_with_url_and_options(
@ -1200,14 +1214,11 @@ fn create_provider_with_url_and_options(
))),
"copilot" | "github-copilot" => Ok(Box::new(copilot::CopilotProvider::new(key))),
"lmstudio" | "lm-studio" => {
let lm_studio_key = key
.map(str::trim)
.filter(|value| !value.is_empty())
.unwrap_or("lm-studio");
let (base_url, lm_studio_key) = resolve_lmstudio_connection(api_url, key);
Ok(Box::new(OpenAiCompatibleProvider::new(
"LM Studio",
"http://localhost:1234/v1",
Some(lm_studio_key),
&base_url,
Some(&lm_studio_key),
AuthStyle::Bearer,
)))
}
@ -2323,6 +2334,37 @@ mod tests {
assert!(create_provider("lmstudio", None).is_ok());
}
#[test]
fn lmstudio_connection_prefers_custom_base_url() {
let (base_url, key) =
resolve_lmstudio_connection(Some("http://10.0.0.15:1234/v1"), Some("custom-key"));
assert_eq!(base_url, "http://10.0.0.15:1234/v1");
assert_eq!(key, "custom-key");
}
#[test]
fn lmstudio_connection_uses_safe_defaults_when_unset() {
let (base_url, key) = resolve_lmstudio_connection(Some(" "), None);
assert_eq!(base_url, "http://localhost:1234/v1");
assert_eq!(key, "lm-studio");
}
#[test]
fn factory_lmstudio_with_custom_url() {
assert!(create_provider_with_url(
"lmstudio",
Some("key"),
Some("http://10.0.0.22:1234/v1")
)
.is_ok());
assert!(create_provider_with_url(
"lm-studio",
None,
Some("http://host.docker.internal:1234")
)
.is_ok());
}
#[test]
fn factory_llamacpp() {
assert!(create_provider("llamacpp", Some("key")).is_ok());

View File

@ -166,6 +166,9 @@ impl AuditLogger {
/// Create a new audit logger
pub fn new(config: AuditConfig, zeroclaw_dir: PathBuf) -> Result<Self> {
let log_path = zeroclaw_dir.join(&config.log_path);
if config.enabled {
initialize_audit_log_file(&log_path)?;
}
Ok(Self {
log_path,
config,
@ -182,6 +185,8 @@ impl AuditLogger {
// Check log size and rotate if needed
self.rotate_if_needed()?;
initialize_audit_log_file(&self.log_path)?;
// Serialize and write
let line = serde_json::to_string(event)?;
let mut file = OpenOptions::new()
@ -258,6 +263,20 @@ impl AuditLogger {
}
}
fn initialize_audit_log_file(log_path: &std::path::Path) -> Result<()> {
if let Some(parent) = log_path.parent() {
if !parent.as_os_str().is_empty() {
std::fs::create_dir_all(parent)?;
}
}
let _ = OpenOptions::new()
.create(true)
.append(true)
.open(log_path)?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
@ -333,6 +352,39 @@ mod tests {
Ok(())
}
#[test]
fn audit_logger_enabled_creates_file_on_init() -> Result<()> {
let tmp = TempDir::new()?;
let config = AuditConfig {
enabled: true,
..Default::default()
};
let _logger = AuditLogger::new(config, tmp.path().to_path_buf())?;
assert!(
tmp.path().join("audit.log").exists(),
"audit log file should be created when audit logging is enabled"
);
Ok(())
}
#[test]
fn audit_logger_enabled_creates_parent_directories() -> Result<()> {
let tmp = TempDir::new()?;
let config = AuditConfig {
enabled: true,
log_path: "logs/security/audit.log".to_string(),
..Default::default()
};
let _logger = AuditLogger::new(config, tmp.path().to_path_buf())?;
assert!(
tmp.path().join("logs/security/audit.log").exists(),
"audit logger should create nested directories for configured log path"
);
Ok(())
}
// ── §8.1 Log rotation tests ─────────────────────────────
#[tokio::test]

View File

@ -60,6 +60,15 @@ export async function apiFetch<T = unknown>(
return undefined as unknown as T;
}
const contentType = response.headers.get('content-type')?.toLowerCase() ?? '';
if (!contentType.includes('application/json')) {
const text = await response.text().catch(() => '');
const preview = text.trim().slice(0, 120);
throw new Error(
`API ${response.status}: expected JSON response, got ${contentType || 'unknown content type'}${preview ? ` (${preview})` : ''}`,
);
}
return response.json() as Promise<T>;
}