Compare commits
1 Commits
master
...
work/micro
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
542f8f94f1 |
@ -12,15 +12,16 @@ pub use schema::{
|
|||||||
ElevenLabsTtsConfig, EmbeddingRouteConfig, EstopConfig, FeishuConfig, GatewayConfig,
|
ElevenLabsTtsConfig, EmbeddingRouteConfig, EstopConfig, FeishuConfig, GatewayConfig,
|
||||||
GoogleTtsConfig, HardwareConfig, HardwareTransport, HeartbeatConfig, HooksConfig,
|
GoogleTtsConfig, HardwareConfig, HardwareTransport, HeartbeatConfig, HooksConfig,
|
||||||
HttpRequestConfig, IMessageConfig, IdentityConfig, LarkConfig, MatrixConfig, McpConfig,
|
HttpRequestConfig, IMessageConfig, IdentityConfig, LarkConfig, MatrixConfig, McpConfig,
|
||||||
McpServerConfig, McpTransport, MemoryConfig, ModelRouteConfig, MultimodalConfig,
|
McpServerConfig, McpTransport, MemoryConfig, Microsoft365Config, ModelRouteConfig,
|
||||||
NextcloudTalkConfig, NodesConfig, NotionConfig, ObservabilityConfig, OpenAiTtsConfig,
|
MultimodalConfig, NextcloudTalkConfig, NodesConfig, NotionConfig, ObservabilityConfig,
|
||||||
OpenVpnTunnelConfig, OtpConfig, OtpMethod, PeripheralBoardConfig, PeripheralsConfig,
|
OpenAiTtsConfig, OpenVpnTunnelConfig, OtpConfig, OtpMethod, PeripheralBoardConfig,
|
||||||
ProxyConfig, ProxyScope, QdrantConfig, QueryClassificationConfig, ReliabilityConfig,
|
PeripheralsConfig, ProxyConfig, ProxyScope, QdrantConfig, QueryClassificationConfig,
|
||||||
ResourceLimitsConfig, RuntimeConfig, SandboxBackend, SandboxConfig, SchedulerConfig,
|
ReliabilityConfig, ResourceLimitsConfig, RuntimeConfig, SandboxBackend, SandboxConfig,
|
||||||
SecretsConfig, SecurityConfig, SkillsConfig, SkillsPromptInjectionMode, SlackConfig,
|
SchedulerConfig, SecretsConfig, SecurityConfig, SkillsConfig, SkillsPromptInjectionMode,
|
||||||
StorageConfig, StorageProviderConfig, StorageProviderSection, StreamMode, SwarmConfig,
|
SlackConfig, StorageConfig, StorageProviderConfig, StorageProviderSection, StreamMode,
|
||||||
SwarmStrategy, TelegramConfig, ToolFilterGroup, ToolFilterGroupMode, TranscriptionConfig,
|
SwarmConfig, SwarmStrategy, TelegramConfig, ToolFilterGroup, ToolFilterGroupMode,
|
||||||
TtsConfig, TunnelConfig, WebFetchConfig, WebSearchConfig, WebhookConfig, WorkspaceConfig,
|
TranscriptionConfig, TtsConfig, TunnelConfig, WebFetchConfig, WebSearchConfig, WebhookConfig,
|
||||||
|
WorkspaceConfig,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn name_and_presence<T: traits::ChannelConfig>(channel: Option<&T>) -> (&'static str, bool) {
|
pub fn name_and_presence<T: traits::ChannelConfig>(channel: Option<&T>) -> (&'static str, bool) {
|
||||||
|
|||||||
@ -188,6 +188,10 @@ pub struct Config {
|
|||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub composio: ComposioConfig,
|
pub composio: ComposioConfig,
|
||||||
|
|
||||||
|
/// Microsoft 365 Graph API integration (`[microsoft365]`).
|
||||||
|
#[serde(default)]
|
||||||
|
pub microsoft365: Microsoft365Config,
|
||||||
|
|
||||||
/// Secrets encryption configuration (`[secrets]`).
|
/// Secrets encryption configuration (`[secrets]`).
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub secrets: SecretsConfig,
|
pub secrets: SecretsConfig,
|
||||||
@ -1380,6 +1384,78 @@ impl Default for ComposioConfig {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ── Microsoft 365 (Graph API integration) ───────────────────────
|
||||||
|
|
||||||
|
/// Microsoft 365 integration via Microsoft Graph API (`[microsoft365]` section).
|
||||||
|
///
|
||||||
|
/// Provides access to Outlook mail, Teams messages, Calendar events,
|
||||||
|
/// OneDrive files, and SharePoint search.
|
||||||
|
#[derive(Clone, Serialize, Deserialize, JsonSchema)]
|
||||||
|
pub struct Microsoft365Config {
|
||||||
|
/// Enable Microsoft 365 integration
|
||||||
|
#[serde(default, alias = "enable")]
|
||||||
|
pub enabled: bool,
|
||||||
|
/// Azure AD tenant ID
|
||||||
|
#[serde(default)]
|
||||||
|
pub tenant_id: Option<String>,
|
||||||
|
/// Azure AD application (client) ID
|
||||||
|
#[serde(default)]
|
||||||
|
pub client_id: Option<String>,
|
||||||
|
/// Azure AD client secret (stored encrypted when secrets.encrypt = true)
|
||||||
|
#[serde(default)]
|
||||||
|
pub client_secret: Option<String>,
|
||||||
|
/// Authentication flow: "client_credentials" or "device_code"
|
||||||
|
#[serde(default = "default_ms365_auth_flow")]
|
||||||
|
pub auth_flow: String,
|
||||||
|
/// OAuth scopes to request
|
||||||
|
#[serde(default = "default_ms365_scopes")]
|
||||||
|
pub scopes: Vec<String>,
|
||||||
|
/// Encrypt the token cache file on disk
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub token_cache_encrypted: bool,
|
||||||
|
/// User principal name or "me" (for delegated flows)
|
||||||
|
#[serde(default)]
|
||||||
|
pub user_id: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_ms365_auth_flow() -> String {
|
||||||
|
"client_credentials".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_ms365_scopes() -> Vec<String> {
|
||||||
|
vec!["https://graph.microsoft.com/.default".to_string()]
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Debug for Microsoft365Config {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_struct("Microsoft365Config")
|
||||||
|
.field("enabled", &self.enabled)
|
||||||
|
.field("tenant_id", &self.tenant_id)
|
||||||
|
.field("client_id", &self.client_id)
|
||||||
|
.field("client_secret", &self.client_secret.as_ref().map(|_| "***"))
|
||||||
|
.field("auth_flow", &self.auth_flow)
|
||||||
|
.field("scopes", &self.scopes)
|
||||||
|
.field("token_cache_encrypted", &self.token_cache_encrypted)
|
||||||
|
.field("user_id", &self.user_id)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Microsoft365Config {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
enabled: false,
|
||||||
|
tenant_id: None,
|
||||||
|
client_id: None,
|
||||||
|
client_secret: None,
|
||||||
|
auth_flow: default_ms365_auth_flow(),
|
||||||
|
scopes: default_ms365_scopes(),
|
||||||
|
token_cache_encrypted: true,
|
||||||
|
user_id: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// ── Secrets (encrypted credential store) ────────────────────────
|
// ── Secrets (encrypted credential store) ────────────────────────
|
||||||
|
|
||||||
/// Secrets encryption configuration (`[secrets]` section).
|
/// Secrets encryption configuration (`[secrets]` section).
|
||||||
@ -4549,6 +4625,7 @@ impl Default for Config {
|
|||||||
tunnel: TunnelConfig::default(),
|
tunnel: TunnelConfig::default(),
|
||||||
gateway: GatewayConfig::default(),
|
gateway: GatewayConfig::default(),
|
||||||
composio: ComposioConfig::default(),
|
composio: ComposioConfig::default(),
|
||||||
|
microsoft365: Microsoft365Config::default(),
|
||||||
secrets: SecretsConfig::default(),
|
secrets: SecretsConfig::default(),
|
||||||
browser: BrowserConfig::default(),
|
browser: BrowserConfig::default(),
|
||||||
http_request: HttpRequestConfig::default(),
|
http_request: HttpRequestConfig::default(),
|
||||||
@ -5045,6 +5122,11 @@ impl Config {
|
|||||||
&mut config.composio.api_key,
|
&mut config.composio.api_key,
|
||||||
"config.composio.api_key",
|
"config.composio.api_key",
|
||||||
)?;
|
)?;
|
||||||
|
decrypt_optional_secret(
|
||||||
|
&store,
|
||||||
|
&mut config.microsoft365.client_secret,
|
||||||
|
"config.microsoft365.client_secret",
|
||||||
|
)?;
|
||||||
|
|
||||||
decrypt_optional_secret(
|
decrypt_optional_secret(
|
||||||
&store,
|
&store,
|
||||||
@ -5619,6 +5701,88 @@ impl Config {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Microsoft 365
|
||||||
|
if self.microsoft365.enabled {
|
||||||
|
let tenant = self
|
||||||
|
.microsoft365
|
||||||
|
.tenant_id
|
||||||
|
.as_deref()
|
||||||
|
.map(str::trim)
|
||||||
|
.filter(|s| !s.is_empty());
|
||||||
|
if tenant.is_none() {
|
||||||
|
anyhow::bail!(
|
||||||
|
"microsoft365.tenant_id must not be empty when microsoft365 is enabled"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let client = self
|
||||||
|
.microsoft365
|
||||||
|
.client_id
|
||||||
|
.as_deref()
|
||||||
|
.map(str::trim)
|
||||||
|
.filter(|s| !s.is_empty());
|
||||||
|
if client.is_none() {
|
||||||
|
anyhow::bail!(
|
||||||
|
"microsoft365.client_id must not be empty when microsoft365 is enabled"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let flow = self.microsoft365.auth_flow.trim();
|
||||||
|
if flow != "client_credentials" && flow != "device_code" {
|
||||||
|
anyhow::bail!(
|
||||||
|
"microsoft365.auth_flow must be 'client_credentials' or 'device_code'"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if flow == "client_credentials"
|
||||||
|
&& self
|
||||||
|
.microsoft365
|
||||||
|
.client_secret
|
||||||
|
.as_deref()
|
||||||
|
.map_or(true, |s| s.trim().is_empty())
|
||||||
|
{
|
||||||
|
anyhow::bail!(
|
||||||
|
"microsoft365.client_secret must not be empty when auth_flow is 'client_credentials'"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Microsoft 365
|
||||||
|
if self.microsoft365.enabled {
|
||||||
|
let tenant = self
|
||||||
|
.microsoft365
|
||||||
|
.tenant_id
|
||||||
|
.as_deref()
|
||||||
|
.map(str::trim)
|
||||||
|
.filter(|s| !s.is_empty());
|
||||||
|
if tenant.is_none() {
|
||||||
|
anyhow::bail!(
|
||||||
|
"microsoft365.tenant_id must not be empty when microsoft365 is enabled"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let client = self
|
||||||
|
.microsoft365
|
||||||
|
.client_id
|
||||||
|
.as_deref()
|
||||||
|
.map(str::trim)
|
||||||
|
.filter(|s| !s.is_empty());
|
||||||
|
if client.is_none() {
|
||||||
|
anyhow::bail!(
|
||||||
|
"microsoft365.client_id must not be empty when microsoft365 is enabled"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let flow = self.microsoft365.auth_flow.trim();
|
||||||
|
if flow != "client_credentials" && flow != "device_code" {
|
||||||
|
anyhow::bail!("microsoft365.auth_flow must be client_credentials or device_code");
|
||||||
|
}
|
||||||
|
if flow == "client_credentials"
|
||||||
|
&& self
|
||||||
|
.microsoft365
|
||||||
|
.client_secret
|
||||||
|
.as_deref()
|
||||||
|
.map_or(true, |s| s.trim().is_empty())
|
||||||
|
{
|
||||||
|
anyhow::bail!("microsoft365.client_secret must not be empty when auth_flow is client_credentials");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// MCP
|
// MCP
|
||||||
if self.mcp.enabled {
|
if self.mcp.enabled {
|
||||||
validate_mcp_config(&self.mcp)?;
|
validate_mcp_config(&self.mcp)?;
|
||||||
@ -6020,6 +6184,11 @@ impl Config {
|
|||||||
&mut config_to_save.composio.api_key,
|
&mut config_to_save.composio.api_key,
|
||||||
"config.composio.api_key",
|
"config.composio.api_key",
|
||||||
)?;
|
)?;
|
||||||
|
encrypt_optional_secret(
|
||||||
|
&store,
|
||||||
|
&mut config_to_save.microsoft365.client_secret,
|
||||||
|
"config.microsoft365.client_secret",
|
||||||
|
)?;
|
||||||
|
|
||||||
encrypt_optional_secret(
|
encrypt_optional_secret(
|
||||||
&store,
|
&store,
|
||||||
@ -6724,6 +6893,7 @@ default_temperature = 0.7
|
|||||||
tunnel: TunnelConfig::default(),
|
tunnel: TunnelConfig::default(),
|
||||||
gateway: GatewayConfig::default(),
|
gateway: GatewayConfig::default(),
|
||||||
composio: ComposioConfig::default(),
|
composio: ComposioConfig::default(),
|
||||||
|
microsoft365: Microsoft365Config::default(),
|
||||||
secrets: SecretsConfig::default(),
|
secrets: SecretsConfig::default(),
|
||||||
browser: BrowserConfig::default(),
|
browser: BrowserConfig::default(),
|
||||||
http_request: HttpRequestConfig::default(),
|
http_request: HttpRequestConfig::default(),
|
||||||
@ -7018,6 +7188,7 @@ tool_dispatcher = "xml"
|
|||||||
tunnel: TunnelConfig::default(),
|
tunnel: TunnelConfig::default(),
|
||||||
gateway: GatewayConfig::default(),
|
gateway: GatewayConfig::default(),
|
||||||
composio: ComposioConfig::default(),
|
composio: ComposioConfig::default(),
|
||||||
|
microsoft365: Microsoft365Config::default(),
|
||||||
secrets: SecretsConfig::default(),
|
secrets: SecretsConfig::default(),
|
||||||
browser: BrowserConfig::default(),
|
browser: BrowserConfig::default(),
|
||||||
http_request: HttpRequestConfig::default(),
|
http_request: HttpRequestConfig::default(),
|
||||||
|
|||||||
@ -77,7 +77,7 @@ pub async fn run(config: Config, host: String, port: u16) -> Result<()> {
|
|||||||
max_backoff,
|
max_backoff,
|
||||||
move || {
|
move || {
|
||||||
let cfg = channels_cfg.clone();
|
let cfg = channels_cfg.clone();
|
||||||
async move { crate::channels::start_channels(cfg).await }
|
async move { Box::pin(crate::channels::start_channels(cfg)).await }
|
||||||
},
|
},
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@ -844,7 +844,7 @@ async fn main() -> Result<()> {
|
|||||||
|
|
||||||
// Auto-start channels if user said yes during wizard
|
// Auto-start channels if user said yes during wizard
|
||||||
if std::env::var("ZEROCLAW_AUTOSTART_CHANNELS").as_deref() == Ok("1") {
|
if std::env::var("ZEROCLAW_AUTOSTART_CHANNELS").as_deref() == Ok("1") {
|
||||||
channels::start_channels(config).await?;
|
Box::pin(channels::start_channels(config)).await?;
|
||||||
}
|
}
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
@ -1189,7 +1189,7 @@ async fn main() -> Result<()> {
|
|||||||
},
|
},
|
||||||
|
|
||||||
Commands::Channel { channel_command } => match channel_command {
|
Commands::Channel { channel_command } => match channel_command {
|
||||||
ChannelCommands::Start => channels::start_channels(config).await,
|
ChannelCommands::Start => Box::pin(channels::start_channels(config)).await,
|
||||||
ChannelCommands::Doctor => channels::doctor_channels(config).await,
|
ChannelCommands::Doctor => channels::doctor_channels(config).await,
|
||||||
other => channels::handle_command(other, &config).await,
|
other => channels::handle_command(other, &config).await,
|
||||||
},
|
},
|
||||||
|
|||||||
@ -159,6 +159,7 @@ pub async fn run_wizard(force: bool) -> Result<Config> {
|
|||||||
tunnel: tunnel_config,
|
tunnel: tunnel_config,
|
||||||
gateway: crate::config::GatewayConfig::default(),
|
gateway: crate::config::GatewayConfig::default(),
|
||||||
composio: composio_config,
|
composio: composio_config,
|
||||||
|
microsoft365: crate::config::Microsoft365Config::default(),
|
||||||
secrets: secrets_config,
|
secrets: secrets_config,
|
||||||
browser: BrowserConfig::default(),
|
browser: BrowserConfig::default(),
|
||||||
http_request: crate::config::HttpRequestConfig::default(),
|
http_request: crate::config::HttpRequestConfig::default(),
|
||||||
@ -519,6 +520,7 @@ async fn run_quick_setup_with_home(
|
|||||||
tunnel: crate::config::TunnelConfig::default(),
|
tunnel: crate::config::TunnelConfig::default(),
|
||||||
gateway: crate::config::GatewayConfig::default(),
|
gateway: crate::config::GatewayConfig::default(),
|
||||||
composio: ComposioConfig::default(),
|
composio: ComposioConfig::default(),
|
||||||
|
microsoft365: crate::config::Microsoft365Config::default(),
|
||||||
secrets: SecretsConfig::default(),
|
secrets: SecretsConfig::default(),
|
||||||
browser: BrowserConfig::default(),
|
browser: BrowserConfig::default(),
|
||||||
http_request: crate::config::HttpRequestConfig::default(),
|
http_request: crate::config::HttpRequestConfig::default(),
|
||||||
|
|||||||
400
src/tools/microsoft365/auth.rs
Normal file
400
src/tools/microsoft365/auth.rs
Normal file
@ -0,0 +1,400 @@
|
|||||||
|
use anyhow::Context;
|
||||||
|
use parking_lot::RwLock;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::collections::hash_map::DefaultHasher;
|
||||||
|
use std::hash::{Hash, Hasher};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
|
/// Cached OAuth2 token state persisted to disk between runs.
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CachedTokenState {
|
||||||
|
pub access_token: String,
|
||||||
|
pub refresh_token: Option<String>,
|
||||||
|
/// Unix timestamp (seconds) when the access token expires.
|
||||||
|
pub expires_at: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CachedTokenState {
|
||||||
|
/// Returns `true` when the token is expired or will expire within 60 seconds.
|
||||||
|
pub fn is_expired(&self) -> bool {
|
||||||
|
let now = chrono::Utc::now().timestamp();
|
||||||
|
self.expires_at <= now + 60
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Thread-safe token cache with disk persistence.
|
||||||
|
pub struct TokenCache {
|
||||||
|
inner: RwLock<Option<CachedTokenState>>,
|
||||||
|
/// Serialises the slow acquire/refresh path so only one caller performs the
|
||||||
|
/// network round-trip while others wait and then read the updated cache.
|
||||||
|
acquire_lock: Mutex<()>,
|
||||||
|
config: super::types::Microsoft365ResolvedConfig,
|
||||||
|
cache_path: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TokenCache {
|
||||||
|
pub fn new(
|
||||||
|
config: super::types::Microsoft365ResolvedConfig,
|
||||||
|
zeroclaw_dir: &std::path::Path,
|
||||||
|
) -> anyhow::Result<Self> {
|
||||||
|
if config.token_cache_encrypted {
|
||||||
|
anyhow::bail!(
|
||||||
|
"microsoft365: token_cache_encrypted is enabled but encryption is not yet \
|
||||||
|
implemented; refusing to store tokens in plaintext. Set token_cache_encrypted \
|
||||||
|
to false or wait for encryption support."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scope cache file to (tenant_id, client_id, auth_flow) so config
|
||||||
|
// changes never reuse tokens from a different account/flow.
|
||||||
|
let mut hasher = DefaultHasher::new();
|
||||||
|
config.tenant_id.hash(&mut hasher);
|
||||||
|
config.client_id.hash(&mut hasher);
|
||||||
|
config.auth_flow.hash(&mut hasher);
|
||||||
|
let fingerprint = format!("{:016x}", hasher.finish());
|
||||||
|
|
||||||
|
let cache_path = zeroclaw_dir.join(format!("ms365_token_cache_{fingerprint}.json"));
|
||||||
|
let cached = Self::load_from_disk(&cache_path);
|
||||||
|
Ok(Self {
|
||||||
|
inner: RwLock::new(cached),
|
||||||
|
acquire_lock: Mutex::new(()),
|
||||||
|
config,
|
||||||
|
cache_path,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a valid access token, refreshing or re-authenticating as needed.
|
||||||
|
pub async fn get_token(&self, client: &reqwest::Client) -> anyhow::Result<String> {
|
||||||
|
// Fast path: cached and not expired.
|
||||||
|
{
|
||||||
|
let guard = self.inner.read();
|
||||||
|
if let Some(ref state) = *guard {
|
||||||
|
if !state.is_expired() {
|
||||||
|
return Ok(state.access_token.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Slow path: serialise through a mutex so only one caller performs the
|
||||||
|
// network round-trip while concurrent callers wait and re-check.
|
||||||
|
let _lock = self.acquire_lock.lock().await;
|
||||||
|
|
||||||
|
// Re-check after acquiring the lock — another caller may have refreshed
|
||||||
|
// while we were waiting.
|
||||||
|
{
|
||||||
|
let guard = self.inner.read();
|
||||||
|
if let Some(ref state) = *guard {
|
||||||
|
if !state.is_expired() {
|
||||||
|
return Ok(state.access_token.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let new_state = self.acquire_token(client).await?;
|
||||||
|
let token = new_state.access_token.clone();
|
||||||
|
self.persist_to_disk(&new_state);
|
||||||
|
*self.inner.write() = Some(new_state);
|
||||||
|
Ok(token)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn acquire_token(&self, client: &reqwest::Client) -> anyhow::Result<CachedTokenState> {
|
||||||
|
// Try refresh first if we have a refresh token and the flow supports it.
|
||||||
|
// Client credentials flow does not issue refresh tokens, so skip the
|
||||||
|
// attempt entirely to avoid a wasted round-trip.
|
||||||
|
if self.config.auth_flow.as_str() != "client_credentials" {
|
||||||
|
// Clone the token out so the RwLock guard is dropped before the await.
|
||||||
|
let refresh_token_copy = {
|
||||||
|
let guard = self.inner.read();
|
||||||
|
guard.as_ref().and_then(|state| state.refresh_token.clone())
|
||||||
|
};
|
||||||
|
if let Some(refresh_tok) = refresh_token_copy {
|
||||||
|
match self.refresh_token(client, &refresh_tok).await {
|
||||||
|
Ok(new_state) => return Ok(new_state),
|
||||||
|
Err(e) => {
|
||||||
|
tracing::debug!("ms365: refresh token failed, re-authenticating: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match self.config.auth_flow.as_str() {
|
||||||
|
"client_credentials" => self.client_credentials_flow(client).await,
|
||||||
|
"device_code" => self.device_code_flow(client).await,
|
||||||
|
other => anyhow::bail!("Unsupported auth flow: {other}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn client_credentials_flow(
|
||||||
|
&self,
|
||||||
|
client: &reqwest::Client,
|
||||||
|
) -> anyhow::Result<CachedTokenState> {
|
||||||
|
let client_secret = self
|
||||||
|
.config
|
||||||
|
.client_secret
|
||||||
|
.as_deref()
|
||||||
|
.context("client_credentials flow requires client_secret")?;
|
||||||
|
|
||||||
|
let token_url = format!(
|
||||||
|
"https://login.microsoftonline.com/{}/oauth2/v2.0/token",
|
||||||
|
self.config.tenant_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let scope = self.config.scopes.join(" ");
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.post(&token_url)
|
||||||
|
.form(&[
|
||||||
|
("grant_type", "client_credentials"),
|
||||||
|
("client_id", &self.config.client_id),
|
||||||
|
("client_secret", client_secret),
|
||||||
|
("scope", &scope),
|
||||||
|
])
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("ms365: failed to request client_credentials token")?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let body = resp.text().await.unwrap_or_default();
|
||||||
|
tracing::debug!("ms365: client_credentials raw OAuth error: {body}");
|
||||||
|
anyhow::bail!("ms365: client_credentials token request failed ({status})");
|
||||||
|
}
|
||||||
|
|
||||||
|
let token_resp: TokenResponse = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.context("ms365: failed to parse token response")?;
|
||||||
|
|
||||||
|
Ok(CachedTokenState {
|
||||||
|
access_token: token_resp.access_token,
|
||||||
|
refresh_token: token_resp.refresh_token,
|
||||||
|
expires_at: chrono::Utc::now().timestamp() + token_resp.expires_in,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn device_code_flow(&self, client: &reqwest::Client) -> anyhow::Result<CachedTokenState> {
|
||||||
|
let device_code_url = format!(
|
||||||
|
"https://login.microsoftonline.com/{}/oauth2/v2.0/devicecode",
|
||||||
|
self.config.tenant_id
|
||||||
|
);
|
||||||
|
let scope = self.config.scopes.join(" ");
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.post(&device_code_url)
|
||||||
|
.form(&[
|
||||||
|
("client_id", self.config.client_id.as_str()),
|
||||||
|
("scope", &scope),
|
||||||
|
])
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("ms365: failed to request device code")?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let body = resp.text().await.unwrap_or_default();
|
||||||
|
tracing::debug!("ms365: device_code initiation raw error: {body}");
|
||||||
|
anyhow::bail!("ms365: device code request failed ({status})");
|
||||||
|
}
|
||||||
|
|
||||||
|
let device_resp: DeviceCodeResponse = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.context("ms365: failed to parse device code response")?;
|
||||||
|
|
||||||
|
// Log only a generic prompt; the full device_resp.message may contain
|
||||||
|
// sensitive verification URIs or codes that should not appear in logs.
|
||||||
|
tracing::info!(
|
||||||
|
"ms365: device code auth required — follow the instructions shown to the user"
|
||||||
|
);
|
||||||
|
// Print the user-facing message to stderr so the operator can act on it
|
||||||
|
// without it being captured in structured log sinks.
|
||||||
|
eprintln!("ms365: {}", device_resp.message);
|
||||||
|
|
||||||
|
let token_url = format!(
|
||||||
|
"https://login.microsoftonline.com/{}/oauth2/v2.0/token",
|
||||||
|
self.config.tenant_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let interval = device_resp.interval.max(5);
|
||||||
|
let max_polls = u32::try_from(
|
||||||
|
(device_resp.expires_in / i64::try_from(interval).unwrap_or(i64::MAX)).max(1),
|
||||||
|
)
|
||||||
|
.unwrap_or(u32::MAX);
|
||||||
|
|
||||||
|
for _ in 0..max_polls {
|
||||||
|
tokio::time::sleep(std::time::Duration::from_secs(interval)).await;
|
||||||
|
|
||||||
|
let poll_resp = client
|
||||||
|
.post(&token_url)
|
||||||
|
.form(&[
|
||||||
|
("grant_type", "urn:ietf:params:oauth:grant-type:device_code"),
|
||||||
|
("client_id", self.config.client_id.as_str()),
|
||||||
|
("device_code", &device_resp.device_code),
|
||||||
|
])
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("ms365: failed to poll device code token")?;
|
||||||
|
|
||||||
|
if poll_resp.status().is_success() {
|
||||||
|
let token_resp: TokenResponse = poll_resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.context("ms365: failed to parse token response")?;
|
||||||
|
return Ok(CachedTokenState {
|
||||||
|
access_token: token_resp.access_token,
|
||||||
|
refresh_token: token_resp.refresh_token,
|
||||||
|
expires_at: chrono::Utc::now().timestamp() + token_resp.expires_in,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let body = poll_resp.text().await.unwrap_or_default();
|
||||||
|
if body.contains("authorization_pending") {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if body.contains("slow_down") {
|
||||||
|
tokio::time::sleep(std::time::Duration::from_secs(5)).await;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
tracing::debug!("ms365: device code polling raw error: {body}");
|
||||||
|
anyhow::bail!("ms365: device code polling failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
anyhow::bail!("ms365: device code flow timed out waiting for user authorization")
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn refresh_token(
|
||||||
|
&self,
|
||||||
|
client: &reqwest::Client,
|
||||||
|
refresh_token: &str,
|
||||||
|
) -> anyhow::Result<CachedTokenState> {
|
||||||
|
let token_url = format!(
|
||||||
|
"https://login.microsoftonline.com/{}/oauth2/v2.0/token",
|
||||||
|
self.config.tenant_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut params = vec![
|
||||||
|
("grant_type", "refresh_token"),
|
||||||
|
("client_id", self.config.client_id.as_str()),
|
||||||
|
("refresh_token", refresh_token),
|
||||||
|
];
|
||||||
|
|
||||||
|
let secret_ref;
|
||||||
|
if let Some(ref secret) = self.config.client_secret {
|
||||||
|
secret_ref = secret.as_str();
|
||||||
|
params.push(("client_secret", secret_ref));
|
||||||
|
}
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.post(&token_url)
|
||||||
|
.form(¶ms)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("ms365: failed to refresh token")?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let body = resp.text().await.unwrap_or_default();
|
||||||
|
tracing::debug!("ms365: token refresh raw error: {body}");
|
||||||
|
anyhow::bail!("ms365: token refresh failed ({status})");
|
||||||
|
}
|
||||||
|
|
||||||
|
let token_resp: TokenResponse = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.context("ms365: failed to parse refresh token response")?;
|
||||||
|
|
||||||
|
Ok(CachedTokenState {
|
||||||
|
access_token: token_resp.access_token,
|
||||||
|
refresh_token: token_resp
|
||||||
|
.refresh_token
|
||||||
|
.or_else(|| Some(refresh_token.to_string())),
|
||||||
|
expires_at: chrono::Utc::now().timestamp() + token_resp.expires_in,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_from_disk(path: &std::path::Path) -> Option<CachedTokenState> {
|
||||||
|
let data = std::fs::read_to_string(path).ok()?;
|
||||||
|
serde_json::from_str(&data).ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn persist_to_disk(&self, state: &CachedTokenState) {
|
||||||
|
if let Ok(json) = serde_json::to_string_pretty(state) {
|
||||||
|
if let Err(e) = std::fs::write(&self.cache_path, json) {
|
||||||
|
tracing::warn!("ms365: failed to persist token cache: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct TokenResponse {
|
||||||
|
access_token: String,
|
||||||
|
#[serde(default)]
|
||||||
|
refresh_token: Option<String>,
|
||||||
|
#[serde(default = "default_expires_in")]
|
||||||
|
expires_in: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_expires_in() -> i64 {
|
||||||
|
3600
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct DeviceCodeResponse {
|
||||||
|
device_code: String,
|
||||||
|
message: String,
|
||||||
|
#[serde(default = "default_device_interval")]
|
||||||
|
interval: u64,
|
||||||
|
#[serde(default = "default_device_expires_in")]
|
||||||
|
expires_in: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_device_interval() -> u64 {
|
||||||
|
5
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_device_expires_in() -> i64 {
|
||||||
|
900
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn token_is_expired_when_past_deadline() {
|
||||||
|
let state = CachedTokenState {
|
||||||
|
access_token: "test".into(),
|
||||||
|
refresh_token: None,
|
||||||
|
expires_at: chrono::Utc::now().timestamp() - 10,
|
||||||
|
};
|
||||||
|
assert!(state.is_expired());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn token_is_expired_within_buffer() {
|
||||||
|
let state = CachedTokenState {
|
||||||
|
access_token: "test".into(),
|
||||||
|
refresh_token: None,
|
||||||
|
expires_at: chrono::Utc::now().timestamp() + 30,
|
||||||
|
};
|
||||||
|
assert!(state.is_expired());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn token_is_valid_when_far_from_expiry() {
|
||||||
|
let state = CachedTokenState {
|
||||||
|
access_token: "test".into(),
|
||||||
|
refresh_token: None,
|
||||||
|
expires_at: chrono::Utc::now().timestamp() + 3600,
|
||||||
|
};
|
||||||
|
assert!(!state.is_expired());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn load_from_disk_returns_none_for_missing_file() {
|
||||||
|
let path = std::path::Path::new("/nonexistent/ms365_token_cache.json");
|
||||||
|
assert!(TokenCache::load_from_disk(path).is_none());
|
||||||
|
}
|
||||||
|
}
|
||||||
495
src/tools/microsoft365/graph_client.rs
Normal file
495
src/tools/microsoft365/graph_client.rs
Normal file
@ -0,0 +1,495 @@
|
|||||||
|
use anyhow::Context;
|
||||||
|
|
||||||
|
const GRAPH_BASE: &str = "https://graph.microsoft.com/v1.0";
|
||||||
|
|
||||||
|
/// Build the user path segment: `/me` or `/users/{user_id}`.
|
||||||
|
/// The user_id is percent-encoded to prevent path-traversal attacks.
|
||||||
|
fn user_path(user_id: &str) -> String {
|
||||||
|
if user_id == "me" {
|
||||||
|
"/me".to_string()
|
||||||
|
} else {
|
||||||
|
format!("/users/{}", urlencoding::encode(user_id))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Percent-encode a single path segment to prevent path-traversal attacks.
|
||||||
|
fn encode_path_segment(segment: &str) -> String {
|
||||||
|
urlencoding::encode(segment).into_owned()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List mail messages for a user.
|
||||||
|
pub async fn mail_list(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
token: &str,
|
||||||
|
user_id: &str,
|
||||||
|
folder: Option<&str>,
|
||||||
|
top: u32,
|
||||||
|
) -> anyhow::Result<serde_json::Value> {
|
||||||
|
let base = user_path(user_id);
|
||||||
|
let path = match folder {
|
||||||
|
Some(f) => format!(
|
||||||
|
"{GRAPH_BASE}{base}/mailFolders/{}/messages",
|
||||||
|
encode_path_segment(f)
|
||||||
|
),
|
||||||
|
None => format!("{GRAPH_BASE}{base}/messages"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&path)
|
||||||
|
.bearer_auth(token)
|
||||||
|
.query(&[("$top", top.to_string())])
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("ms365: mail_list request failed")?;
|
||||||
|
|
||||||
|
handle_json_response(resp, "mail_list").await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send a mail message.
|
||||||
|
pub async fn mail_send(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
token: &str,
|
||||||
|
user_id: &str,
|
||||||
|
to: &[String],
|
||||||
|
subject: &str,
|
||||||
|
body: &str,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
let base = user_path(user_id);
|
||||||
|
let url = format!("{GRAPH_BASE}{base}/sendMail");
|
||||||
|
|
||||||
|
let to_recipients: Vec<serde_json::Value> = to
|
||||||
|
.iter()
|
||||||
|
.map(|addr| {
|
||||||
|
serde_json::json!({
|
||||||
|
"emailAddress": { "address": addr }
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let payload = serde_json::json!({
|
||||||
|
"message": {
|
||||||
|
"subject": subject,
|
||||||
|
"body": {
|
||||||
|
"contentType": "Text",
|
||||||
|
"content": body
|
||||||
|
},
|
||||||
|
"toRecipients": to_recipients
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.post(&url)
|
||||||
|
.bearer_auth(token)
|
||||||
|
.json(&payload)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("ms365: mail_send request failed")?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let body = resp.text().await.unwrap_or_default();
|
||||||
|
let code = extract_graph_error_code(&body).unwrap_or_else(|| "unknown".to_string());
|
||||||
|
tracing::debug!("ms365: mail_send raw error body: {body}");
|
||||||
|
anyhow::bail!("ms365: mail_send failed ({status}, code={code})");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List messages in a Teams channel.
|
||||||
|
pub async fn teams_message_list(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
token: &str,
|
||||||
|
team_id: &str,
|
||||||
|
channel_id: &str,
|
||||||
|
top: u32,
|
||||||
|
) -> anyhow::Result<serde_json::Value> {
|
||||||
|
let url = format!(
|
||||||
|
"{GRAPH_BASE}/teams/{}/channels/{}/messages",
|
||||||
|
encode_path_segment(team_id),
|
||||||
|
encode_path_segment(channel_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.bearer_auth(token)
|
||||||
|
.query(&[("$top", top.to_string())])
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("ms365: teams_message_list request failed")?;
|
||||||
|
|
||||||
|
handle_json_response(resp, "teams_message_list").await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send a message to a Teams channel.
|
||||||
|
pub async fn teams_message_send(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
token: &str,
|
||||||
|
team_id: &str,
|
||||||
|
channel_id: &str,
|
||||||
|
body: &str,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
let url = format!(
|
||||||
|
"{GRAPH_BASE}/teams/{}/channels/{}/messages",
|
||||||
|
encode_path_segment(team_id),
|
||||||
|
encode_path_segment(channel_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
let payload = serde_json::json!({
|
||||||
|
"body": {
|
||||||
|
"content": body
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.post(&url)
|
||||||
|
.bearer_auth(token)
|
||||||
|
.json(&payload)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("ms365: teams_message_send request failed")?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let body = resp.text().await.unwrap_or_default();
|
||||||
|
let code = extract_graph_error_code(&body).unwrap_or_else(|| "unknown".to_string());
|
||||||
|
tracing::debug!("ms365: teams_message_send raw error body: {body}");
|
||||||
|
anyhow::bail!("ms365: teams_message_send failed ({status}, code={code})");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List calendar events in a date range.
|
||||||
|
pub async fn calendar_events_list(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
token: &str,
|
||||||
|
user_id: &str,
|
||||||
|
start: &str,
|
||||||
|
end: &str,
|
||||||
|
top: u32,
|
||||||
|
) -> anyhow::Result<serde_json::Value> {
|
||||||
|
let base = user_path(user_id);
|
||||||
|
let url = format!("{GRAPH_BASE}{base}/calendarView");
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.bearer_auth(token)
|
||||||
|
.query(&[
|
||||||
|
("startDateTime", start.to_string()),
|
||||||
|
("endDateTime", end.to_string()),
|
||||||
|
("$top", top.to_string()),
|
||||||
|
])
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("ms365: calendar_events_list request failed")?;
|
||||||
|
|
||||||
|
handle_json_response(resp, "calendar_events_list").await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a calendar event.
|
||||||
|
pub async fn calendar_event_create(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
token: &str,
|
||||||
|
user_id: &str,
|
||||||
|
subject: &str,
|
||||||
|
start: &str,
|
||||||
|
end: &str,
|
||||||
|
attendees: &[String],
|
||||||
|
body_text: Option<&str>,
|
||||||
|
) -> anyhow::Result<String> {
|
||||||
|
let base = user_path(user_id);
|
||||||
|
let url = format!("{GRAPH_BASE}{base}/events");
|
||||||
|
|
||||||
|
let attendee_list: Vec<serde_json::Value> = attendees
|
||||||
|
.iter()
|
||||||
|
.map(|email| {
|
||||||
|
serde_json::json!({
|
||||||
|
"emailAddress": { "address": email },
|
||||||
|
"type": "required"
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut payload = serde_json::json!({
|
||||||
|
"subject": subject,
|
||||||
|
"start": {
|
||||||
|
"dateTime": start,
|
||||||
|
"timeZone": "UTC"
|
||||||
|
},
|
||||||
|
"end": {
|
||||||
|
"dateTime": end,
|
||||||
|
"timeZone": "UTC"
|
||||||
|
},
|
||||||
|
"attendees": attendee_list
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some(text) = body_text {
|
||||||
|
payload["body"] = serde_json::json!({
|
||||||
|
"contentType": "Text",
|
||||||
|
"content": text
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.post(&url)
|
||||||
|
.bearer_auth(token)
|
||||||
|
.json(&payload)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("ms365: calendar_event_create request failed")?;
|
||||||
|
|
||||||
|
let value = handle_json_response(resp, "calendar_event_create").await?;
|
||||||
|
let event_id = value["id"].as_str().unwrap_or("unknown").to_string();
|
||||||
|
Ok(event_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete a calendar event by ID.
|
||||||
|
pub async fn calendar_event_delete(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
token: &str,
|
||||||
|
user_id: &str,
|
||||||
|
event_id: &str,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
let base = user_path(user_id);
|
||||||
|
let url = format!(
|
||||||
|
"{GRAPH_BASE}{base}/events/{}",
|
||||||
|
encode_path_segment(event_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.delete(&url)
|
||||||
|
.bearer_auth(token)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("ms365: calendar_event_delete request failed")?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let body = resp.text().await.unwrap_or_default();
|
||||||
|
let code = extract_graph_error_code(&body).unwrap_or_else(|| "unknown".to_string());
|
||||||
|
tracing::debug!("ms365: calendar_event_delete raw error body: {body}");
|
||||||
|
anyhow::bail!("ms365: calendar_event_delete failed ({status}, code={code})");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List children of a OneDrive folder.
|
||||||
|
pub async fn onedrive_list(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
token: &str,
|
||||||
|
user_id: &str,
|
||||||
|
path: Option<&str>,
|
||||||
|
) -> anyhow::Result<serde_json::Value> {
|
||||||
|
let base = user_path(user_id);
|
||||||
|
let url = match path {
|
||||||
|
Some(p) if !p.is_empty() => {
|
||||||
|
let encoded = urlencoding::encode(p);
|
||||||
|
format!("{GRAPH_BASE}{base}/drive/root:/{encoded}:/children")
|
||||||
|
}
|
||||||
|
_ => format!("{GRAPH_BASE}{base}/drive/root/children"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.bearer_auth(token)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("ms365: onedrive_list request failed")?;
|
||||||
|
|
||||||
|
handle_json_response(resp, "onedrive_list").await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Download a OneDrive item by ID, with a maximum size guard.
|
||||||
|
pub async fn onedrive_download(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
token: &str,
|
||||||
|
user_id: &str,
|
||||||
|
item_id: &str,
|
||||||
|
max_size: usize,
|
||||||
|
) -> anyhow::Result<Vec<u8>> {
|
||||||
|
let base = user_path(user_id);
|
||||||
|
let url = format!(
|
||||||
|
"{GRAPH_BASE}{base}/drive/items/{}/content",
|
||||||
|
encode_path_segment(item_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.bearer_auth(token)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("ms365: onedrive_download request failed")?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let body = resp.text().await.unwrap_or_default();
|
||||||
|
let code = extract_graph_error_code(&body).unwrap_or_else(|| "unknown".to_string());
|
||||||
|
tracing::debug!("ms365: onedrive_download raw error body: {body}");
|
||||||
|
anyhow::bail!("ms365: onedrive_download failed ({status}, code={code})");
|
||||||
|
}
|
||||||
|
|
||||||
|
let bytes = resp
|
||||||
|
.bytes()
|
||||||
|
.await
|
||||||
|
.context("ms365: failed to read download body")?;
|
||||||
|
if bytes.len() > max_size {
|
||||||
|
anyhow::bail!(
|
||||||
|
"ms365: downloaded file exceeds max_size ({} > {max_size})",
|
||||||
|
bytes.len()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(bytes.to_vec())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Search SharePoint for documents matching a query.
|
||||||
|
pub async fn sharepoint_search(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
token: &str,
|
||||||
|
query: &str,
|
||||||
|
top: u32,
|
||||||
|
) -> anyhow::Result<serde_json::Value> {
|
||||||
|
let url = format!("{GRAPH_BASE}/search/query");
|
||||||
|
|
||||||
|
let payload = serde_json::json!({
|
||||||
|
"requests": [{
|
||||||
|
"entityTypes": ["driveItem", "listItem", "site"],
|
||||||
|
"query": {
|
||||||
|
"queryString": query
|
||||||
|
},
|
||||||
|
"from": 0,
|
||||||
|
"size": top
|
||||||
|
}]
|
||||||
|
});
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.post(&url)
|
||||||
|
.bearer_auth(token)
|
||||||
|
.json(&payload)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("ms365: sharepoint_search request failed")?;
|
||||||
|
|
||||||
|
handle_json_response(resp, "sharepoint_search").await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract a short, safe error code from a Graph API JSON error body.
|
||||||
|
/// Returns `None` when the body is not a recognised Graph error envelope.
|
||||||
|
fn extract_graph_error_code(body: &str) -> Option<String> {
|
||||||
|
let parsed: serde_json::Value = serde_json::from_str(body).ok()?;
|
||||||
|
let code = parsed
|
||||||
|
.get("error")
|
||||||
|
.and_then(|e| e.get("code"))
|
||||||
|
.and_then(|c| c.as_str())
|
||||||
|
.map(|s| s.to_string());
|
||||||
|
code
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a JSON response body, returning an error on non-success status.
|
||||||
|
/// Raw Graph API error bodies are not propagated; only the HTTP status and a
|
||||||
|
/// short error code (when available) are surfaced to avoid leaking internal
|
||||||
|
/// API details.
|
||||||
|
async fn handle_json_response(
|
||||||
|
resp: reqwest::Response,
|
||||||
|
operation: &str,
|
||||||
|
) -> anyhow::Result<serde_json::Value> {
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let body = resp.text().await.unwrap_or_default();
|
||||||
|
let code = extract_graph_error_code(&body).unwrap_or_else(|| "unknown".to_string());
|
||||||
|
tracing::debug!("ms365: {operation} raw error body: {body}");
|
||||||
|
anyhow::bail!("ms365: {operation} failed ({status}, code={code})");
|
||||||
|
}
|
||||||
|
|
||||||
|
resp.json()
|
||||||
|
.await
|
||||||
|
.with_context(|| format!("ms365: failed to parse {operation} response"))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn user_path_me() {
|
||||||
|
assert_eq!(user_path("me"), "/me");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn user_path_specific_user() {
|
||||||
|
assert_eq!(user_path("user@contoso.com"), "/users/user%40contoso.com");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn mail_list_url_no_folder() {
|
||||||
|
let base = user_path("me");
|
||||||
|
let url = format!("{GRAPH_BASE}{base}/messages");
|
||||||
|
assert_eq!(url, "https://graph.microsoft.com/v1.0/me/messages");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn mail_list_url_with_folder() {
|
||||||
|
let base = user_path("me");
|
||||||
|
let folder = "inbox";
|
||||||
|
let url = format!(
|
||||||
|
"{GRAPH_BASE}{base}/mailFolders/{}/messages",
|
||||||
|
encode_path_segment(folder)
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
url,
|
||||||
|
"https://graph.microsoft.com/v1.0/me/mailFolders/inbox/messages"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn calendar_view_url() {
|
||||||
|
let base = user_path("user@example.com");
|
||||||
|
let url = format!("{GRAPH_BASE}{base}/calendarView");
|
||||||
|
assert_eq!(
|
||||||
|
url,
|
||||||
|
"https://graph.microsoft.com/v1.0/users/user%40example.com/calendarView"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn teams_message_url() {
|
||||||
|
let url = format!(
|
||||||
|
"{GRAPH_BASE}/teams/{}/channels/{}/messages",
|
||||||
|
encode_path_segment("team-123"),
|
||||||
|
encode_path_segment("channel-456")
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
url,
|
||||||
|
"https://graph.microsoft.com/v1.0/teams/team-123/channels/channel-456/messages"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn onedrive_root_url() {
|
||||||
|
let base = user_path("me");
|
||||||
|
let url = format!("{GRAPH_BASE}{base}/drive/root/children");
|
||||||
|
assert_eq!(
|
||||||
|
url,
|
||||||
|
"https://graph.microsoft.com/v1.0/me/drive/root/children"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn onedrive_path_url() {
|
||||||
|
let base = user_path("me");
|
||||||
|
let encoded = urlencoding::encode("Documents/Reports");
|
||||||
|
let url = format!("{GRAPH_BASE}{base}/drive/root:/{encoded}:/children");
|
||||||
|
assert_eq!(
|
||||||
|
url,
|
||||||
|
"https://graph.microsoft.com/v1.0/me/drive/root:/Documents%2FReports:/children"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn sharepoint_search_url() {
|
||||||
|
let url = format!("{GRAPH_BASE}/search/query");
|
||||||
|
assert_eq!(url, "https://graph.microsoft.com/v1.0/search/query");
|
||||||
|
}
|
||||||
|
}
|
||||||
567
src/tools/microsoft365/mod.rs
Normal file
567
src/tools/microsoft365/mod.rs
Normal file
@ -0,0 +1,567 @@
|
|||||||
|
//! Microsoft 365 integration tool — Graph API access for Mail, Teams, Calendar,
|
||||||
|
//! OneDrive, and SharePoint via a single action-dispatched tool surface.
|
||||||
|
//!
|
||||||
|
//! Auth is handled through direct HTTP calls to the Microsoft identity platform
|
||||||
|
//! (client credentials or device code flow) with token caching.
|
||||||
|
|
||||||
|
pub mod auth;
|
||||||
|
pub mod graph_client;
|
||||||
|
pub mod types;
|
||||||
|
|
||||||
|
use crate::security::policy::ToolOperation;
|
||||||
|
use crate::security::SecurityPolicy;
|
||||||
|
use crate::tools::traits::{Tool, ToolResult};
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use serde_json::json;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
/// Maximum download size for OneDrive files (10 MB).
|
||||||
|
const MAX_ONEDRIVE_DOWNLOAD_SIZE: usize = 10 * 1024 * 1024;
|
||||||
|
|
||||||
|
/// Default number of items to return in list operations.
|
||||||
|
const DEFAULT_TOP: u32 = 25;
|
||||||
|
|
||||||
|
pub struct Microsoft365Tool {
|
||||||
|
config: types::Microsoft365ResolvedConfig,
|
||||||
|
security: Arc<SecurityPolicy>,
|
||||||
|
token_cache: Arc<auth::TokenCache>,
|
||||||
|
http_client: reqwest::Client,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Microsoft365Tool {
|
||||||
|
pub fn new(
|
||||||
|
config: types::Microsoft365ResolvedConfig,
|
||||||
|
security: Arc<SecurityPolicy>,
|
||||||
|
zeroclaw_dir: &std::path::Path,
|
||||||
|
) -> anyhow::Result<Self> {
|
||||||
|
let http_client =
|
||||||
|
crate::config::build_runtime_proxy_client_with_timeouts("tool.microsoft365", 60, 10);
|
||||||
|
let token_cache = Arc::new(auth::TokenCache::new(config.clone(), zeroclaw_dir)?);
|
||||||
|
Ok(Self {
|
||||||
|
config,
|
||||||
|
security,
|
||||||
|
token_cache,
|
||||||
|
http_client,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_token(&self) -> anyhow::Result<String> {
|
||||||
|
self.token_cache.get_token(&self.http_client).await
|
||||||
|
}
|
||||||
|
|
||||||
|
fn user_id(&self) -> &str {
|
||||||
|
&self.config.user_id
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn dispatch(&self, action: &str, args: &serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||||
|
match action {
|
||||||
|
"mail_list" => self.handle_mail_list(args).await,
|
||||||
|
"mail_send" => self.handle_mail_send(args).await,
|
||||||
|
"teams_message_list" => self.handle_teams_message_list(args).await,
|
||||||
|
"teams_message_send" => self.handle_teams_message_send(args).await,
|
||||||
|
"calendar_events_list" => self.handle_calendar_events_list(args).await,
|
||||||
|
"calendar_event_create" => self.handle_calendar_event_create(args).await,
|
||||||
|
"calendar_event_delete" => self.handle_calendar_event_delete(args).await,
|
||||||
|
"onedrive_list" => self.handle_onedrive_list(args).await,
|
||||||
|
"onedrive_download" => self.handle_onedrive_download(args).await,
|
||||||
|
"sharepoint_search" => self.handle_sharepoint_search(args).await,
|
||||||
|
_ => Ok(ToolResult {
|
||||||
|
success: false,
|
||||||
|
output: String::new(),
|
||||||
|
error: Some(format!("Unknown action: {action}")),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Read actions ────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async fn handle_mail_list(&self, args: &serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||||
|
self.security
|
||||||
|
.enforce_tool_operation(ToolOperation::Read, "microsoft365.mail_list")
|
||||||
|
.map_err(|e| anyhow::anyhow!(e))?;
|
||||||
|
|
||||||
|
let token = self.get_token().await?;
|
||||||
|
let folder = args["folder"].as_str();
|
||||||
|
let top = u32::try_from(args["top"].as_u64().unwrap_or(u64::from(DEFAULT_TOP)))
|
||||||
|
.unwrap_or(DEFAULT_TOP);
|
||||||
|
|
||||||
|
let result =
|
||||||
|
graph_client::mail_list(&self.http_client, &token, self.user_id(), folder, top).await?;
|
||||||
|
|
||||||
|
Ok(ToolResult {
|
||||||
|
success: true,
|
||||||
|
output: serde_json::to_string_pretty(&result)?,
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_teams_message_list(
|
||||||
|
&self,
|
||||||
|
args: &serde_json::Value,
|
||||||
|
) -> anyhow::Result<ToolResult> {
|
||||||
|
self.security
|
||||||
|
.enforce_tool_operation(ToolOperation::Read, "microsoft365.teams_message_list")
|
||||||
|
.map_err(|e| anyhow::anyhow!(e))?;
|
||||||
|
|
||||||
|
let token = self.get_token().await?;
|
||||||
|
let team_id = args["team_id"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("team_id is required"))?;
|
||||||
|
let channel_id = args["channel_id"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("channel_id is required"))?;
|
||||||
|
let top = u32::try_from(args["top"].as_u64().unwrap_or(u64::from(DEFAULT_TOP)))
|
||||||
|
.unwrap_or(DEFAULT_TOP);
|
||||||
|
|
||||||
|
let result =
|
||||||
|
graph_client::teams_message_list(&self.http_client, &token, team_id, channel_id, top)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(ToolResult {
|
||||||
|
success: true,
|
||||||
|
output: serde_json::to_string_pretty(&result)?,
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_calendar_events_list(
|
||||||
|
&self,
|
||||||
|
args: &serde_json::Value,
|
||||||
|
) -> anyhow::Result<ToolResult> {
|
||||||
|
self.security
|
||||||
|
.enforce_tool_operation(ToolOperation::Read, "microsoft365.calendar_events_list")
|
||||||
|
.map_err(|e| anyhow::anyhow!(e))?;
|
||||||
|
|
||||||
|
let token = self.get_token().await?;
|
||||||
|
let start = args["start"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("start datetime is required"))?;
|
||||||
|
let end = args["end"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("end datetime is required"))?;
|
||||||
|
let top = u32::try_from(args["top"].as_u64().unwrap_or(u64::from(DEFAULT_TOP)))
|
||||||
|
.unwrap_or(DEFAULT_TOP);
|
||||||
|
|
||||||
|
let result = graph_client::calendar_events_list(
|
||||||
|
&self.http_client,
|
||||||
|
&token,
|
||||||
|
self.user_id(),
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
top,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(ToolResult {
|
||||||
|
success: true,
|
||||||
|
output: serde_json::to_string_pretty(&result)?,
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_onedrive_list(&self, args: &serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||||
|
self.security
|
||||||
|
.enforce_tool_operation(ToolOperation::Read, "microsoft365.onedrive_list")
|
||||||
|
.map_err(|e| anyhow::anyhow!(e))?;
|
||||||
|
|
||||||
|
let token = self.get_token().await?;
|
||||||
|
let path = args["path"].as_str();
|
||||||
|
|
||||||
|
let result =
|
||||||
|
graph_client::onedrive_list(&self.http_client, &token, self.user_id(), path).await?;
|
||||||
|
|
||||||
|
Ok(ToolResult {
|
||||||
|
success: true,
|
||||||
|
output: serde_json::to_string_pretty(&result)?,
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_onedrive_download(
|
||||||
|
&self,
|
||||||
|
args: &serde_json::Value,
|
||||||
|
) -> anyhow::Result<ToolResult> {
|
||||||
|
self.security
|
||||||
|
.enforce_tool_operation(ToolOperation::Read, "microsoft365.onedrive_download")
|
||||||
|
.map_err(|e| anyhow::anyhow!(e))?;
|
||||||
|
|
||||||
|
let token = self.get_token().await?;
|
||||||
|
let item_id = args["item_id"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("item_id is required"))?;
|
||||||
|
let max_size = args["max_size"]
|
||||||
|
.as_u64()
|
||||||
|
.and_then(|v| usize::try_from(v).ok())
|
||||||
|
.unwrap_or(MAX_ONEDRIVE_DOWNLOAD_SIZE)
|
||||||
|
.min(MAX_ONEDRIVE_DOWNLOAD_SIZE);
|
||||||
|
|
||||||
|
let bytes = graph_client::onedrive_download(
|
||||||
|
&self.http_client,
|
||||||
|
&token,
|
||||||
|
self.user_id(),
|
||||||
|
item_id,
|
||||||
|
max_size,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Return base64-encoded for binary safety.
|
||||||
|
use base64::Engine;
|
||||||
|
let encoded = base64::engine::general_purpose::STANDARD.encode(&bytes);
|
||||||
|
|
||||||
|
Ok(ToolResult {
|
||||||
|
success: true,
|
||||||
|
output: format!(
|
||||||
|
"Downloaded {} bytes (base64 encoded):\n{encoded}",
|
||||||
|
bytes.len()
|
||||||
|
),
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_sharepoint_search(
|
||||||
|
&self,
|
||||||
|
args: &serde_json::Value,
|
||||||
|
) -> anyhow::Result<ToolResult> {
|
||||||
|
self.security
|
||||||
|
.enforce_tool_operation(ToolOperation::Read, "microsoft365.sharepoint_search")
|
||||||
|
.map_err(|e| anyhow::anyhow!(e))?;
|
||||||
|
|
||||||
|
let token = self.get_token().await?;
|
||||||
|
let query = args["query"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("query is required"))?;
|
||||||
|
let top = u32::try_from(args["top"].as_u64().unwrap_or(u64::from(DEFAULT_TOP)))
|
||||||
|
.unwrap_or(DEFAULT_TOP);
|
||||||
|
|
||||||
|
let result = graph_client::sharepoint_search(&self.http_client, &token, query, top).await?;
|
||||||
|
|
||||||
|
Ok(ToolResult {
|
||||||
|
success: true,
|
||||||
|
output: serde_json::to_string_pretty(&result)?,
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Write actions ───────────────────────────────────────────────
|
||||||
|
|
||||||
|
async fn handle_mail_send(&self, args: &serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||||
|
self.security
|
||||||
|
.enforce_tool_operation(ToolOperation::Act, "microsoft365.mail_send")
|
||||||
|
.map_err(|e| anyhow::anyhow!(e))?;
|
||||||
|
|
||||||
|
let token = self.get_token().await?;
|
||||||
|
let to: Vec<String> = args["to"]
|
||||||
|
.as_array()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("to must be an array of email addresses"))?
|
||||||
|
.iter()
|
||||||
|
.filter_map(|v| v.as_str().map(String::from))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if to.is_empty() {
|
||||||
|
anyhow::bail!("to must contain at least one email address");
|
||||||
|
}
|
||||||
|
|
||||||
|
let subject = args["subject"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("subject is required"))?;
|
||||||
|
let body = args["body"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("body is required"))?;
|
||||||
|
|
||||||
|
graph_client::mail_send(
|
||||||
|
&self.http_client,
|
||||||
|
&token,
|
||||||
|
self.user_id(),
|
||||||
|
&to,
|
||||||
|
subject,
|
||||||
|
body,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(ToolResult {
|
||||||
|
success: true,
|
||||||
|
output: format!("Email sent to: {}", to.join(", ")),
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_teams_message_send(
|
||||||
|
&self,
|
||||||
|
args: &serde_json::Value,
|
||||||
|
) -> anyhow::Result<ToolResult> {
|
||||||
|
self.security
|
||||||
|
.enforce_tool_operation(ToolOperation::Act, "microsoft365.teams_message_send")
|
||||||
|
.map_err(|e| anyhow::anyhow!(e))?;
|
||||||
|
|
||||||
|
let token = self.get_token().await?;
|
||||||
|
let team_id = args["team_id"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("team_id is required"))?;
|
||||||
|
let channel_id = args["channel_id"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("channel_id is required"))?;
|
||||||
|
let body = args["body"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("body is required"))?;
|
||||||
|
|
||||||
|
graph_client::teams_message_send(&self.http_client, &token, team_id, channel_id, body)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(ToolResult {
|
||||||
|
success: true,
|
||||||
|
output: "Teams message sent".to_string(),
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_calendar_event_create(
|
||||||
|
&self,
|
||||||
|
args: &serde_json::Value,
|
||||||
|
) -> anyhow::Result<ToolResult> {
|
||||||
|
self.security
|
||||||
|
.enforce_tool_operation(ToolOperation::Act, "microsoft365.calendar_event_create")
|
||||||
|
.map_err(|e| anyhow::anyhow!(e))?;
|
||||||
|
|
||||||
|
let token = self.get_token().await?;
|
||||||
|
let subject = args["subject"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("subject is required"))?;
|
||||||
|
let start = args["start"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("start datetime is required"))?;
|
||||||
|
let end = args["end"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("end datetime is required"))?;
|
||||||
|
let attendees: Vec<String> = args["attendees"]
|
||||||
|
.as_array()
|
||||||
|
.map(|arr| {
|
||||||
|
arr.iter()
|
||||||
|
.filter_map(|v| v.as_str().map(String::from))
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
let body_text = args["body"].as_str();
|
||||||
|
|
||||||
|
let event_id = graph_client::calendar_event_create(
|
||||||
|
&self.http_client,
|
||||||
|
&token,
|
||||||
|
self.user_id(),
|
||||||
|
subject,
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
&attendees,
|
||||||
|
body_text,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(ToolResult {
|
||||||
|
success: true,
|
||||||
|
output: format!("Calendar event created (id: {event_id})"),
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_calendar_event_delete(
|
||||||
|
&self,
|
||||||
|
args: &serde_json::Value,
|
||||||
|
) -> anyhow::Result<ToolResult> {
|
||||||
|
self.security
|
||||||
|
.enforce_tool_operation(ToolOperation::Act, "microsoft365.calendar_event_delete")
|
||||||
|
.map_err(|e| anyhow::anyhow!(e))?;
|
||||||
|
|
||||||
|
let token = self.get_token().await?;
|
||||||
|
let event_id = args["event_id"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("event_id is required"))?;
|
||||||
|
|
||||||
|
graph_client::calendar_event_delete(&self.http_client, &token, self.user_id(), event_id)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(ToolResult {
|
||||||
|
success: true,
|
||||||
|
output: format!("Calendar event {event_id} deleted"),
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Tool for Microsoft365Tool {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"microsoft365"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn description(&self) -> &str {
|
||||||
|
"Microsoft 365 integration: manage Outlook mail, Teams messages, Calendar events, \
|
||||||
|
OneDrive files, and SharePoint search via Microsoft Graph API"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parameters_schema(&self) -> serde_json::Value {
|
||||||
|
json!({
|
||||||
|
"type": "object",
|
||||||
|
"required": ["action"],
|
||||||
|
"properties": {
|
||||||
|
"action": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"mail_list",
|
||||||
|
"mail_send",
|
||||||
|
"teams_message_list",
|
||||||
|
"teams_message_send",
|
||||||
|
"calendar_events_list",
|
||||||
|
"calendar_event_create",
|
||||||
|
"calendar_event_delete",
|
||||||
|
"onedrive_list",
|
||||||
|
"onedrive_download",
|
||||||
|
"sharepoint_search"
|
||||||
|
],
|
||||||
|
"description": "The Microsoft 365 action to perform"
|
||||||
|
},
|
||||||
|
"folder": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Mail folder ID (for mail_list, e.g. 'inbox', 'sentitems')"
|
||||||
|
},
|
||||||
|
"to": {
|
||||||
|
"type": "array",
|
||||||
|
"items": { "type": "string" },
|
||||||
|
"description": "Recipient email addresses (for mail_send)"
|
||||||
|
},
|
||||||
|
"subject": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Email subject or calendar event subject"
|
||||||
|
},
|
||||||
|
"body": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Message body text"
|
||||||
|
},
|
||||||
|
"team_id": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Teams team ID (for teams_message_list/send)"
|
||||||
|
},
|
||||||
|
"channel_id": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Teams channel ID (for teams_message_list/send)"
|
||||||
|
},
|
||||||
|
"start": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Start datetime in ISO 8601 format (for calendar actions)"
|
||||||
|
},
|
||||||
|
"end": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "End datetime in ISO 8601 format (for calendar actions)"
|
||||||
|
},
|
||||||
|
"attendees": {
|
||||||
|
"type": "array",
|
||||||
|
"items": { "type": "string" },
|
||||||
|
"description": "Attendee email addresses (for calendar_event_create)"
|
||||||
|
},
|
||||||
|
"event_id": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Calendar event ID (for calendar_event_delete)"
|
||||||
|
},
|
||||||
|
"path": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "OneDrive folder path (for onedrive_list)"
|
||||||
|
},
|
||||||
|
"item_id": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "OneDrive item ID (for onedrive_download)"
|
||||||
|
},
|
||||||
|
"max_size": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Maximum download size in bytes (for onedrive_download, default 10MB)"
|
||||||
|
},
|
||||||
|
"query": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Search query (for sharepoint_search)"
|
||||||
|
},
|
||||||
|
"top": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Maximum number of items to return (default 25)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn execute(&self, args: serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||||
|
let action = match args["action"].as_str() {
|
||||||
|
Some(a) => a.to_string(),
|
||||||
|
None => {
|
||||||
|
return Ok(ToolResult {
|
||||||
|
success: false,
|
||||||
|
output: String::new(),
|
||||||
|
error: Some("'action' parameter is required".to_string()),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
match self.dispatch(&action, &args).await {
|
||||||
|
Ok(result) => Ok(result),
|
||||||
|
Err(e) => Ok(ToolResult {
|
||||||
|
success: false,
|
||||||
|
output: String::new(),
|
||||||
|
error: Some(format!("microsoft365.{action} failed: {e}")),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tool_name_is_microsoft365() {
|
||||||
|
// Verify the schema is valid JSON with the expected structure.
|
||||||
|
let schema_str = r#"{"type":"object","required":["action"]}"#;
|
||||||
|
let _: serde_json::Value = serde_json::from_str(schema_str).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parameters_schema_has_action_enum() {
|
||||||
|
let schema = json!({
|
||||||
|
"type": "object",
|
||||||
|
"required": ["action"],
|
||||||
|
"properties": {
|
||||||
|
"action": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"mail_list",
|
||||||
|
"mail_send",
|
||||||
|
"teams_message_list",
|
||||||
|
"teams_message_send",
|
||||||
|
"calendar_events_list",
|
||||||
|
"calendar_event_create",
|
||||||
|
"calendar_event_delete",
|
||||||
|
"onedrive_list",
|
||||||
|
"onedrive_download",
|
||||||
|
"sharepoint_search"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let actions = schema["properties"]["action"]["enum"].as_array().unwrap();
|
||||||
|
assert_eq!(actions.len(), 10);
|
||||||
|
assert!(actions.contains(&json!("mail_list")));
|
||||||
|
assert!(actions.contains(&json!("sharepoint_search")));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn action_dispatch_table_is_exhaustive() {
|
||||||
|
let valid_actions = [
|
||||||
|
"mail_list",
|
||||||
|
"mail_send",
|
||||||
|
"teams_message_list",
|
||||||
|
"teams_message_send",
|
||||||
|
"calendar_events_list",
|
||||||
|
"calendar_event_create",
|
||||||
|
"calendar_event_delete",
|
||||||
|
"onedrive_list",
|
||||||
|
"onedrive_download",
|
||||||
|
"sharepoint_search",
|
||||||
|
];
|
||||||
|
assert_eq!(valid_actions.len(), 10);
|
||||||
|
assert!(!valid_actions.contains(&"invalid_action"));
|
||||||
|
}
|
||||||
|
}
|
||||||
55
src/tools/microsoft365/types.rs
Normal file
55
src/tools/microsoft365/types.rs
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// Resolved Microsoft 365 configuration with all secrets decrypted and defaults applied.
|
||||||
|
#[derive(Clone, Serialize, Deserialize)]
|
||||||
|
pub struct Microsoft365ResolvedConfig {
|
||||||
|
pub tenant_id: String,
|
||||||
|
pub client_id: String,
|
||||||
|
pub client_secret: Option<String>,
|
||||||
|
pub auth_flow: String,
|
||||||
|
pub scopes: Vec<String>,
|
||||||
|
pub token_cache_encrypted: bool,
|
||||||
|
pub user_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Debug for Microsoft365ResolvedConfig {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_struct("Microsoft365ResolvedConfig")
|
||||||
|
.field("tenant_id", &self.tenant_id)
|
||||||
|
.field("client_id", &self.client_id)
|
||||||
|
.field("client_secret", &self.client_secret.as_ref().map(|_| "***"))
|
||||||
|
.field("auth_flow", &self.auth_flow)
|
||||||
|
.field("scopes", &self.scopes)
|
||||||
|
.field("token_cache_encrypted", &self.token_cache_encrypted)
|
||||||
|
.field("user_id", &self.user_id)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn resolved_config_serialization_roundtrip() {
|
||||||
|
let config = Microsoft365ResolvedConfig {
|
||||||
|
tenant_id: "test-tenant".into(),
|
||||||
|
client_id: "test-client".into(),
|
||||||
|
client_secret: Some("secret".into()),
|
||||||
|
auth_flow: "client_credentials".into(),
|
||||||
|
scopes: vec!["https://graph.microsoft.com/.default".into()],
|
||||||
|
token_cache_encrypted: false,
|
||||||
|
user_id: "me".into(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let json = serde_json::to_string(&config).unwrap();
|
||||||
|
let parsed: Microsoft365ResolvedConfig = serde_json::from_str(&json).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(parsed.tenant_id, "test-tenant");
|
||||||
|
assert_eq!(parsed.client_id, "test-client");
|
||||||
|
assert_eq!(parsed.client_secret.as_deref(), Some("secret"));
|
||||||
|
assert_eq!(parsed.auth_flow, "client_credentials");
|
||||||
|
assert_eq!(parsed.scopes.len(), 1);
|
||||||
|
assert_eq!(parsed.user_id, "me");
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -48,6 +48,7 @@ pub mod mcp_transport;
|
|||||||
pub mod memory_forget;
|
pub mod memory_forget;
|
||||||
pub mod memory_recall;
|
pub mod memory_recall;
|
||||||
pub mod memory_store;
|
pub mod memory_store;
|
||||||
|
pub mod microsoft365;
|
||||||
pub mod model_routing_config;
|
pub mod model_routing_config;
|
||||||
pub mod node_tool;
|
pub mod node_tool;
|
||||||
pub mod notion_tool;
|
pub mod notion_tool;
|
||||||
@ -95,6 +96,7 @@ pub use mcp_tool::McpToolWrapper;
|
|||||||
pub use memory_forget::MemoryForgetTool;
|
pub use memory_forget::MemoryForgetTool;
|
||||||
pub use memory_recall::MemoryRecallTool;
|
pub use memory_recall::MemoryRecallTool;
|
||||||
pub use memory_store::MemoryStoreTool;
|
pub use memory_store::MemoryStoreTool;
|
||||||
|
pub use microsoft365::Microsoft365Tool;
|
||||||
pub use model_routing_config::ModelRoutingConfigTool;
|
pub use model_routing_config::ModelRoutingConfigTool;
|
||||||
#[allow(unused_imports)]
|
#[allow(unused_imports)]
|
||||||
pub use node_tool::NodeTool;
|
pub use node_tool::NodeTool;
|
||||||
@ -379,6 +381,61 @@ pub fn all_tools_with_runtime(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Microsoft 365 Graph API integration
|
||||||
|
if root_config.microsoft365.enabled {
|
||||||
|
let ms_cfg = &root_config.microsoft365;
|
||||||
|
let tenant_id = ms_cfg
|
||||||
|
.tenant_id
|
||||||
|
.as_deref()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.trim()
|
||||||
|
.to_string();
|
||||||
|
let client_id = ms_cfg
|
||||||
|
.client_id
|
||||||
|
.as_deref()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.trim()
|
||||||
|
.to_string();
|
||||||
|
if !tenant_id.is_empty() && !client_id.is_empty() {
|
||||||
|
// Fail fast: client_credentials flow requires a client_secret at registration time.
|
||||||
|
if ms_cfg.auth_flow.trim() == "client_credentials"
|
||||||
|
&& ms_cfg
|
||||||
|
.client_secret
|
||||||
|
.as_deref()
|
||||||
|
.map_or(true, |s| s.trim().is_empty())
|
||||||
|
{
|
||||||
|
tracing::error!(
|
||||||
|
"microsoft365: client_credentials auth_flow requires a non-empty client_secret"
|
||||||
|
);
|
||||||
|
return (boxed_registry_from_arcs(tool_arcs), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let resolved = microsoft365::types::Microsoft365ResolvedConfig {
|
||||||
|
tenant_id,
|
||||||
|
client_id,
|
||||||
|
client_secret: ms_cfg.client_secret.clone(),
|
||||||
|
auth_flow: ms_cfg.auth_flow.clone(),
|
||||||
|
scopes: ms_cfg.scopes.clone(),
|
||||||
|
token_cache_encrypted: ms_cfg.token_cache_encrypted,
|
||||||
|
user_id: ms_cfg.user_id.as_deref().unwrap_or("me").to_string(),
|
||||||
|
};
|
||||||
|
// Store token cache in the config directory (next to config.toml),
|
||||||
|
// not the workspace directory, to keep bearer tokens out of the
|
||||||
|
// project tree.
|
||||||
|
let cache_dir = root_config.config_path.parent().unwrap_or(workspace_dir);
|
||||||
|
match Microsoft365Tool::new(resolved, security.clone(), cache_dir) {
|
||||||
|
Ok(tool) => tool_arcs.push(Arc::new(tool)),
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("microsoft365: failed to initialize tool: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tracing::warn!(
|
||||||
|
"microsoft365: skipped registration because tenant_id or client_id is empty"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Add delegation tool when agents are configured
|
// Add delegation tool when agents are configured
|
||||||
let delegate_fallback_credential = fallback_api_key.and_then(|value| {
|
let delegate_fallback_credential = fallback_api_key.and_then(|value| {
|
||||||
let trimmed_value = value.trim();
|
let trimmed_value = value.trim();
|
||||||
|
|||||||
750
src/tools/project_intel.rs
Normal file
750
src/tools/project_intel.rs
Normal file
@ -0,0 +1,750 @@
|
|||||||
|
//! Project delivery intelligence tool.
|
||||||
|
//!
|
||||||
|
//! Provides read-only analysis and generation for project management:
|
||||||
|
//! status reports, risk detection, client communication drafting,
|
||||||
|
//! sprint summaries, and effort estimation.
|
||||||
|
|
||||||
|
use super::report_templates;
|
||||||
|
use super::traits::{Tool, ToolResult};
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use serde_json::json;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::fmt::Write as _;
|
||||||
|
|
||||||
|
/// Project intelligence tool for consulting project management.
|
||||||
|
///
|
||||||
|
/// All actions are read-only analysis/generation; nothing is modified externally.
|
||||||
|
pub struct ProjectIntelTool {
|
||||||
|
default_language: String,
|
||||||
|
risk_sensitivity: RiskSensitivity,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Risk detection sensitivity level.
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
|
pub enum RiskSensitivity {
|
||||||
|
Low,
|
||||||
|
Medium,
|
||||||
|
High,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RiskSensitivity {
|
||||||
|
fn from_str(s: &str) -> Self {
|
||||||
|
match s.to_lowercase().as_str() {
|
||||||
|
"low" => Self::Low,
|
||||||
|
"high" => Self::High,
|
||||||
|
_ => Self::Medium,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Threshold multiplier: higher sensitivity means lower thresholds.
|
||||||
|
fn threshold_factor(self) -> f64 {
|
||||||
|
match self {
|
||||||
|
Self::Low => 1.5,
|
||||||
|
Self::Medium => 1.0,
|
||||||
|
Self::High => 0.5,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ProjectIntelTool {
|
||||||
|
pub fn new(default_language: String, risk_sensitivity: String) -> Self {
|
||||||
|
Self {
|
||||||
|
default_language,
|
||||||
|
risk_sensitivity: RiskSensitivity::from_str(&risk_sensitivity),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn execute_status_report(&self, args: &serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||||
|
let project_name = args
|
||||||
|
.get("project_name")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.filter(|s| !s.trim().is_empty())
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("missing required 'project_name' for status_report"))?;
|
||||||
|
let period = args
|
||||||
|
.get("period")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.filter(|s| !s.trim().is_empty())
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("missing required 'period' for status_report"))?;
|
||||||
|
let lang = args
|
||||||
|
.get("language")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or(&self.default_language);
|
||||||
|
let git_log = args
|
||||||
|
.get("git_log")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or("No git data provided");
|
||||||
|
let jira_summary = args
|
||||||
|
.get("jira_summary")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or("No Jira data provided");
|
||||||
|
let notes = args.get("notes").and_then(|v| v.as_str()).unwrap_or("");
|
||||||
|
|
||||||
|
let tpl = report_templates::weekly_status_template(lang);
|
||||||
|
let mut vars = HashMap::new();
|
||||||
|
vars.insert("project_name".into(), project_name.to_string());
|
||||||
|
vars.insert("period".into(), period.to_string());
|
||||||
|
vars.insert("completed".into(), git_log.to_string());
|
||||||
|
vars.insert("in_progress".into(), jira_summary.to_string());
|
||||||
|
vars.insert("blocked".into(), notes.to_string());
|
||||||
|
vars.insert("next_steps".into(), "To be determined".into());
|
||||||
|
|
||||||
|
let rendered = tpl.render(&vars);
|
||||||
|
Ok(ToolResult {
|
||||||
|
success: true,
|
||||||
|
output: rendered,
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn execute_risk_scan(&self, args: &serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||||
|
let deadlines = args
|
||||||
|
.get("deadlines")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or_default();
|
||||||
|
let velocity = args
|
||||||
|
.get("velocity")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or_default();
|
||||||
|
let blockers = args
|
||||||
|
.get("blockers")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or_default();
|
||||||
|
let lang = args
|
||||||
|
.get("language")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or(&self.default_language);
|
||||||
|
|
||||||
|
let mut risks = Vec::new();
|
||||||
|
|
||||||
|
// Heuristic risk detection based on signals
|
||||||
|
let factor = self.risk_sensitivity.threshold_factor();
|
||||||
|
|
||||||
|
if !blockers.is_empty() {
|
||||||
|
let blocker_count = blockers.lines().filter(|l| !l.trim().is_empty()).count();
|
||||||
|
let severity = if (blocker_count as f64) > 3.0 * factor {
|
||||||
|
"critical"
|
||||||
|
} else if (blocker_count as f64) > 1.0 * factor {
|
||||||
|
"high"
|
||||||
|
} else {
|
||||||
|
"medium"
|
||||||
|
};
|
||||||
|
risks.push(RiskItem {
|
||||||
|
title: "Active blockers detected".into(),
|
||||||
|
severity: severity.into(),
|
||||||
|
detail: format!("{blocker_count} blocker(s) identified"),
|
||||||
|
mitigation: "Escalate blockers, assign owners, set resolution deadlines".into(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if deadlines.to_lowercase().contains("overdue")
|
||||||
|
|| deadlines.to_lowercase().contains("missed")
|
||||||
|
{
|
||||||
|
risks.push(RiskItem {
|
||||||
|
title: "Deadline risk".into(),
|
||||||
|
severity: "high".into(),
|
||||||
|
detail: "Overdue or missed deadlines detected in project context".into(),
|
||||||
|
mitigation: "Re-prioritize scope, negotiate timeline, add resources".into(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if velocity.to_lowercase().contains("declining") || velocity.to_lowercase().contains("slow")
|
||||||
|
{
|
||||||
|
risks.push(RiskItem {
|
||||||
|
title: "Velocity degradation".into(),
|
||||||
|
severity: "medium".into(),
|
||||||
|
detail: "Team velocity is declining or below expectations".into(),
|
||||||
|
mitigation: "Identify bottlenecks, reduce WIP, address technical debt".into(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if risks.is_empty() {
|
||||||
|
risks.push(RiskItem {
|
||||||
|
title: "No significant risks detected".into(),
|
||||||
|
severity: "low".into(),
|
||||||
|
detail: "Current project signals within normal parameters".into(),
|
||||||
|
mitigation: "Continue monitoring".into(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let tpl = report_templates::risk_register_template(lang);
|
||||||
|
let risks_text = risks
|
||||||
|
.iter()
|
||||||
|
.map(|r| {
|
||||||
|
format!(
|
||||||
|
"- [{}] {}: {}",
|
||||||
|
r.severity.to_uppercase(),
|
||||||
|
r.title,
|
||||||
|
r.detail
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("\n");
|
||||||
|
let mitigations_text = risks
|
||||||
|
.iter()
|
||||||
|
.map(|r| format!("- {}: {}", r.title, r.mitigation))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("\n");
|
||||||
|
|
||||||
|
let mut vars = HashMap::new();
|
||||||
|
vars.insert(
|
||||||
|
"project_name".into(),
|
||||||
|
args.get("project_name")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or("Unknown")
|
||||||
|
.to_string(),
|
||||||
|
);
|
||||||
|
vars.insert("risks".into(), risks_text);
|
||||||
|
vars.insert("mitigations".into(), mitigations_text);
|
||||||
|
|
||||||
|
Ok(ToolResult {
|
||||||
|
success: true,
|
||||||
|
output: tpl.render(&vars),
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn execute_draft_update(&self, args: &serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||||
|
let project_name = args
|
||||||
|
.get("project_name")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.filter(|s| !s.trim().is_empty())
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("missing required 'project_name' for draft_update"))?;
|
||||||
|
let audience = args
|
||||||
|
.get("audience")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or("client");
|
||||||
|
let tone = args
|
||||||
|
.get("tone")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or("formal");
|
||||||
|
let highlights = args
|
||||||
|
.get("highlights")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.filter(|s| !s.trim().is_empty())
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("missing required 'highlights' for draft_update"))?;
|
||||||
|
let concerns = args.get("concerns").and_then(|v| v.as_str()).unwrap_or("");
|
||||||
|
|
||||||
|
let greeting = match (audience, tone) {
|
||||||
|
("client", "casual") => "Hi there,".to_string(),
|
||||||
|
("client", _) => "Dear valued partner,".to_string(),
|
||||||
|
("internal", "casual") => "Hey team,".to_string(),
|
||||||
|
("internal", _) => "Dear team,".to_string(),
|
||||||
|
(_, "casual") => "Hi,".to_string(),
|
||||||
|
_ => "Dear reader,".to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let closing = match tone {
|
||||||
|
"casual" => "Cheers",
|
||||||
|
_ => "Best regards",
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut body = format!(
|
||||||
|
"{greeting}\n\nHere is an update on {project_name}.\n\n**Highlights:**\n{highlights}"
|
||||||
|
);
|
||||||
|
if !concerns.is_empty() {
|
||||||
|
let _ = write!(body, "\n\n**Items requiring attention:**\n{concerns}");
|
||||||
|
}
|
||||||
|
let _ = write!(
|
||||||
|
body,
|
||||||
|
"\n\nPlease do not hesitate to reach out with any questions.\n\n{closing}"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(ToolResult {
|
||||||
|
success: true,
|
||||||
|
output: body,
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn execute_sprint_summary(&self, args: &serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||||
|
let sprint_dates = args
|
||||||
|
.get("sprint_dates")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or("current sprint");
|
||||||
|
let completed = args
|
||||||
|
.get("completed")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or("None specified");
|
||||||
|
let in_progress = args
|
||||||
|
.get("in_progress")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or("None specified");
|
||||||
|
let blocked = args
|
||||||
|
.get("blocked")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or("None");
|
||||||
|
let velocity = args
|
||||||
|
.get("velocity")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or("Not calculated");
|
||||||
|
let lang = args
|
||||||
|
.get("language")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or(&self.default_language);
|
||||||
|
|
||||||
|
let tpl = report_templates::sprint_review_template(lang);
|
||||||
|
let mut vars = HashMap::new();
|
||||||
|
vars.insert("sprint_dates".into(), sprint_dates.to_string());
|
||||||
|
vars.insert("completed".into(), completed.to_string());
|
||||||
|
vars.insert("in_progress".into(), in_progress.to_string());
|
||||||
|
vars.insert("blocked".into(), blocked.to_string());
|
||||||
|
vars.insert("velocity".into(), velocity.to_string());
|
||||||
|
|
||||||
|
Ok(ToolResult {
|
||||||
|
success: true,
|
||||||
|
output: tpl.render(&vars),
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn execute_effort_estimate(&self, args: &serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||||
|
let tasks = args.get("tasks").and_then(|v| v.as_str()).unwrap_or("");
|
||||||
|
|
||||||
|
if tasks.trim().is_empty() {
|
||||||
|
return Ok(ToolResult {
|
||||||
|
success: false,
|
||||||
|
output: String::new(),
|
||||||
|
error: Some("No task descriptions provided".into()),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut estimates = Vec::new();
|
||||||
|
for line in tasks.lines() {
|
||||||
|
let line = line.trim();
|
||||||
|
if line.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let (size, rationale) = estimate_task_effort(line);
|
||||||
|
estimates.push(format!("- **{size}** | {line}\n Rationale: {rationale}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let output = format!(
|
||||||
|
"## Effort Estimates\n\n{}\n\n_Sizes: XS (<2h), S (2-4h), M (4-8h), L (1-3d), XL (3-5d), XXL (>5d)_",
|
||||||
|
estimates.join("\n")
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(ToolResult {
|
||||||
|
success: true,
|
||||||
|
output,
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct RiskItem {
|
||||||
|
title: String,
|
||||||
|
severity: String,
|
||||||
|
detail: String,
|
||||||
|
mitigation: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Heuristic effort estimation from task description text.
|
||||||
|
fn estimate_task_effort(description: &str) -> (&'static str, &'static str) {
|
||||||
|
let lower = description.to_lowercase();
|
||||||
|
let word_count = description.split_whitespace().count();
|
||||||
|
|
||||||
|
// Signal-based heuristics
|
||||||
|
let complexity_signals = [
|
||||||
|
"refactor",
|
||||||
|
"rewrite",
|
||||||
|
"migrate",
|
||||||
|
"redesign",
|
||||||
|
"architecture",
|
||||||
|
"infrastructure",
|
||||||
|
];
|
||||||
|
let medium_signals = [
|
||||||
|
"implement",
|
||||||
|
"create",
|
||||||
|
"build",
|
||||||
|
"integrate",
|
||||||
|
"add feature",
|
||||||
|
"new module",
|
||||||
|
];
|
||||||
|
let small_signals = [
|
||||||
|
"fix", "update", "tweak", "adjust", "rename", "typo", "bump", "config",
|
||||||
|
];
|
||||||
|
|
||||||
|
if complexity_signals.iter().any(|s| lower.contains(s)) {
|
||||||
|
if word_count > 15 {
|
||||||
|
return (
|
||||||
|
"XXL",
|
||||||
|
"Large-scope structural change with extensive description",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return ("XL", "Structural change requiring significant effort");
|
||||||
|
}
|
||||||
|
|
||||||
|
if medium_signals.iter().any(|s| lower.contains(s)) {
|
||||||
|
if word_count > 12 {
|
||||||
|
return ("L", "Feature implementation with detailed requirements");
|
||||||
|
}
|
||||||
|
return ("M", "Standard feature implementation");
|
||||||
|
}
|
||||||
|
|
||||||
|
if small_signals.iter().any(|s| lower.contains(s)) {
|
||||||
|
if word_count > 10 {
|
||||||
|
return ("S", "Small change with additional context");
|
||||||
|
}
|
||||||
|
return ("XS", "Minor targeted change");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: estimate by description length as a proxy for complexity
|
||||||
|
if word_count > 20 {
|
||||||
|
("L", "Complex task inferred from detailed description")
|
||||||
|
} else if word_count > 10 {
|
||||||
|
("M", "Moderate task inferred from description length")
|
||||||
|
} else {
|
||||||
|
("S", "Simple task inferred from brief description")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Tool for ProjectIntelTool {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"project_intel"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn description(&self) -> &str {
|
||||||
|
"Project delivery intelligence: generate status reports, detect risks, draft client updates, summarize sprints, and estimate effort. Read-only analysis tool."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parameters_schema(&self) -> serde_json::Value {
|
||||||
|
json!({
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"action": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["status_report", "risk_scan", "draft_update", "sprint_summary", "effort_estimate"],
|
||||||
|
"description": "The analysis action to perform"
|
||||||
|
},
|
||||||
|
"project_name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Project name (for status_report, risk_scan, draft_update)"
|
||||||
|
},
|
||||||
|
"period": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Reporting period: week, sprint, or month (for status_report)"
|
||||||
|
},
|
||||||
|
"language": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Report language: en, de, fr, it (default from config)"
|
||||||
|
},
|
||||||
|
"git_log": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Git log summary text (for status_report)"
|
||||||
|
},
|
||||||
|
"jira_summary": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Jira/issue tracker summary (for status_report)"
|
||||||
|
},
|
||||||
|
"notes": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Additional notes or context"
|
||||||
|
},
|
||||||
|
"deadlines": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Deadline information (for risk_scan)"
|
||||||
|
},
|
||||||
|
"velocity": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Team velocity data (for risk_scan, sprint_summary)"
|
||||||
|
},
|
||||||
|
"blockers": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Current blockers (for risk_scan)"
|
||||||
|
},
|
||||||
|
"audience": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["client", "internal"],
|
||||||
|
"description": "Target audience (for draft_update)"
|
||||||
|
},
|
||||||
|
"tone": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["formal", "casual"],
|
||||||
|
"description": "Communication tone (for draft_update)"
|
||||||
|
},
|
||||||
|
"highlights": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Key highlights for the update (for draft_update)"
|
||||||
|
},
|
||||||
|
"concerns": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Items requiring attention (for draft_update)"
|
||||||
|
},
|
||||||
|
"sprint_dates": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Sprint date range (for sprint_summary)"
|
||||||
|
},
|
||||||
|
"completed": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Completed items (for sprint_summary)"
|
||||||
|
},
|
||||||
|
"in_progress": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "In-progress items (for sprint_summary)"
|
||||||
|
},
|
||||||
|
"blocked": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Blocked items (for sprint_summary)"
|
||||||
|
},
|
||||||
|
"tasks": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Task descriptions, one per line (for effort_estimate)"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["action"]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn execute(&self, args: serde_json::Value) -> anyhow::Result<ToolResult> {
|
||||||
|
let action = args
|
||||||
|
.get("action")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Missing required 'action' parameter"))?;
|
||||||
|
|
||||||
|
match action {
|
||||||
|
"status_report" => self.execute_status_report(&args),
|
||||||
|
"risk_scan" => self.execute_risk_scan(&args),
|
||||||
|
"draft_update" => self.execute_draft_update(&args),
|
||||||
|
"sprint_summary" => self.execute_sprint_summary(&args),
|
||||||
|
"effort_estimate" => self.execute_effort_estimate(&args),
|
||||||
|
other => Ok(ToolResult {
|
||||||
|
success: false,
|
||||||
|
output: String::new(),
|
||||||
|
error: Some(format!(
|
||||||
|
"Unknown action '{other}'. Valid actions: status_report, risk_scan, draft_update, sprint_summary, effort_estimate"
|
||||||
|
)),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn tool() -> ProjectIntelTool {
|
||||||
|
ProjectIntelTool::new("en".into(), "medium".into())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tool_name_and_description() {
|
||||||
|
let t = tool();
|
||||||
|
assert_eq!(t.name(), "project_intel");
|
||||||
|
assert!(!t.description().is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parameters_schema_has_action() {
|
||||||
|
let t = tool();
|
||||||
|
let schema = t.parameters_schema();
|
||||||
|
assert!(schema["properties"]["action"].is_object());
|
||||||
|
let required = schema["required"].as_array().unwrap();
|
||||||
|
assert!(required.contains(&serde_json::Value::String("action".into())));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn status_report_renders() {
|
||||||
|
let t = tool();
|
||||||
|
let result = t
|
||||||
|
.execute(json!({
|
||||||
|
"action": "status_report",
|
||||||
|
"project_name": "TestProject",
|
||||||
|
"period": "week",
|
||||||
|
"git_log": "- feat: added login"
|
||||||
|
}))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert!(result.success);
|
||||||
|
assert!(result.output.contains("TestProject"));
|
||||||
|
assert!(result.output.contains("added login"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn risk_scan_detects_blockers() {
|
||||||
|
let t = tool();
|
||||||
|
let result = t
|
||||||
|
.execute(json!({
|
||||||
|
"action": "risk_scan",
|
||||||
|
"blockers": "DB migration stuck\nCI pipeline broken\nAPI key expired"
|
||||||
|
}))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert!(result.success);
|
||||||
|
assert!(result.output.contains("blocker"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn risk_scan_detects_deadline_risk() {
|
||||||
|
let t = tool();
|
||||||
|
let result = t
|
||||||
|
.execute(json!({
|
||||||
|
"action": "risk_scan",
|
||||||
|
"deadlines": "Sprint deadline overdue by 3 days"
|
||||||
|
}))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert!(result.success);
|
||||||
|
assert!(result.output.contains("Deadline risk"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn risk_scan_no_signals_returns_low_risk() {
|
||||||
|
let t = tool();
|
||||||
|
let result = t.execute(json!({ "action": "risk_scan" })).await.unwrap();
|
||||||
|
assert!(result.success);
|
||||||
|
assert!(result.output.contains("No significant risks"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn draft_update_formal_client() {
|
||||||
|
let t = tool();
|
||||||
|
let result = t
|
||||||
|
.execute(json!({
|
||||||
|
"action": "draft_update",
|
||||||
|
"project_name": "Portal",
|
||||||
|
"audience": "client",
|
||||||
|
"tone": "formal",
|
||||||
|
"highlights": "Phase 1 delivered"
|
||||||
|
}))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert!(result.success);
|
||||||
|
assert!(result.output.contains("Dear valued partner"));
|
||||||
|
assert!(result.output.contains("Portal"));
|
||||||
|
assert!(result.output.contains("Phase 1 delivered"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn draft_update_casual_internal() {
|
||||||
|
let t = tool();
|
||||||
|
let result = t
|
||||||
|
.execute(json!({
|
||||||
|
"action": "draft_update",
|
||||||
|
"project_name": "ZeroClaw",
|
||||||
|
"audience": "internal",
|
||||||
|
"tone": "casual",
|
||||||
|
"highlights": "Core loop stabilized"
|
||||||
|
}))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert!(result.success);
|
||||||
|
assert!(result.output.contains("Hey team"));
|
||||||
|
assert!(result.output.contains("Cheers"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn sprint_summary_renders() {
|
||||||
|
let t = tool();
|
||||||
|
let result = t
|
||||||
|
.execute(json!({
|
||||||
|
"action": "sprint_summary",
|
||||||
|
"sprint_dates": "2026-03-01 to 2026-03-14",
|
||||||
|
"completed": "- Login page\n- API endpoints",
|
||||||
|
"in_progress": "- Dashboard",
|
||||||
|
"blocked": "- Payment integration"
|
||||||
|
}))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert!(result.success);
|
||||||
|
assert!(result.output.contains("Login page"));
|
||||||
|
assert!(result.output.contains("Dashboard"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn effort_estimate_basic() {
|
||||||
|
let t = tool();
|
||||||
|
let result = t
|
||||||
|
.execute(json!({
|
||||||
|
"action": "effort_estimate",
|
||||||
|
"tasks": "Fix typo in README\nImplement user authentication\nRefactor database layer"
|
||||||
|
}))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert!(result.success);
|
||||||
|
assert!(result.output.contains("XS"));
|
||||||
|
assert!(result.output.contains("Refactor database layer"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn effort_estimate_empty_tasks_fails() {
|
||||||
|
let t = tool();
|
||||||
|
let result = t
|
||||||
|
.execute(json!({ "action": "effort_estimate", "tasks": "" }))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert!(!result.success);
|
||||||
|
assert!(result.error.unwrap().contains("No task descriptions"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn unknown_action_returns_error() {
|
||||||
|
let t = tool();
|
||||||
|
let result = t
|
||||||
|
.execute(json!({ "action": "invalid_thing" }))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert!(!result.success);
|
||||||
|
assert!(result.error.unwrap().contains("Unknown action"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn missing_action_returns_error() {
|
||||||
|
let t = tool();
|
||||||
|
let result = t.execute(json!({})).await;
|
||||||
|
assert!(result.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn effort_estimate_heuristics_coverage() {
|
||||||
|
assert_eq!(estimate_task_effort("Fix typo").0, "XS");
|
||||||
|
assert_eq!(estimate_task_effort("Update config values").0, "XS");
|
||||||
|
assert_eq!(
|
||||||
|
estimate_task_effort("Implement new notification system").0,
|
||||||
|
"M"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
estimate_task_effort("Refactor the entire authentication module").0,
|
||||||
|
"XL"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
estimate_task_effort("Migrate the database schema to support multi-tenancy with data isolation and proper indexing across all services").0,
|
||||||
|
"XXL"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn risk_sensitivity_threshold_ordering() {
|
||||||
|
assert!(
|
||||||
|
RiskSensitivity::High.threshold_factor() < RiskSensitivity::Medium.threshold_factor()
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
RiskSensitivity::Medium.threshold_factor() < RiskSensitivity::Low.threshold_factor()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn risk_sensitivity_from_str_variants() {
|
||||||
|
assert_eq!(RiskSensitivity::from_str("low"), RiskSensitivity::Low);
|
||||||
|
assert_eq!(RiskSensitivity::from_str("high"), RiskSensitivity::High);
|
||||||
|
assert_eq!(RiskSensitivity::from_str("medium"), RiskSensitivity::Medium);
|
||||||
|
assert_eq!(
|
||||||
|
RiskSensitivity::from_str("unknown"),
|
||||||
|
RiskSensitivity::Medium
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn high_sensitivity_detects_single_blocker_as_high() {
|
||||||
|
let t = ProjectIntelTool::new("en".into(), "high".into());
|
||||||
|
let result = t
|
||||||
|
.execute(json!({
|
||||||
|
"action": "risk_scan",
|
||||||
|
"blockers": "Single blocker"
|
||||||
|
}))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert!(result.success);
|
||||||
|
assert!(result.output.contains("[HIGH]") || result.output.contains("[CRITICAL]"));
|
||||||
|
}
|
||||||
|
}
|
||||||
582
src/tools/report_templates.rs
Normal file
582
src/tools/report_templates.rs
Normal file
@ -0,0 +1,582 @@
|
|||||||
|
//! Report template engine for project delivery intelligence.
|
||||||
|
//!
|
||||||
|
//! Provides built-in templates for weekly status, sprint review, risk register,
|
||||||
|
//! and milestone reports with multi-language support (EN, DE, FR, IT).
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::fmt::Write as _;
|
||||||
|
|
||||||
|
/// Supported report output formats.
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
|
pub enum ReportFormat {
|
||||||
|
Markdown,
|
||||||
|
Html,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A named section within a report template.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct TemplateSection {
|
||||||
|
pub heading: String,
|
||||||
|
pub body: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A report template with named sections and variable placeholders.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct ReportTemplate {
|
||||||
|
pub name: String,
|
||||||
|
pub sections: Vec<TemplateSection>,
|
||||||
|
pub format: ReportFormat,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Escape a string for safe inclusion in HTML output.
|
||||||
|
fn escape_html(s: &str) -> String {
|
||||||
|
s.replace('&', "&")
|
||||||
|
.replace('<', "<")
|
||||||
|
.replace('>', ">")
|
||||||
|
.replace('"', """)
|
||||||
|
.replace('\'', "'")
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ReportTemplate {
|
||||||
|
/// Render the template by substituting `{{key}}` placeholders with values.
|
||||||
|
pub fn render(&self, vars: &HashMap<String, String>) -> String {
|
||||||
|
let mut out = String::new();
|
||||||
|
for section in &self.sections {
|
||||||
|
let heading = substitute(§ion.heading, vars);
|
||||||
|
let body = substitute(§ion.body, vars);
|
||||||
|
match self.format {
|
||||||
|
ReportFormat::Markdown => {
|
||||||
|
let _ = write!(out, "## {heading}\n\n{body}\n\n");
|
||||||
|
}
|
||||||
|
ReportFormat::Html => {
|
||||||
|
let heading = escape_html(&heading);
|
||||||
|
let body = escape_html(&body);
|
||||||
|
let _ = write!(out, "<h2>{heading}</h2>\n<p>{body}</p>\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out.trim_end().to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Single-pass placeholder substitution.
|
||||||
|
///
|
||||||
|
/// Scans `template` left-to-right for `{{key}}` tokens and replaces them with
|
||||||
|
/// the corresponding value from `vars`. Because the scan is single-pass,
|
||||||
|
/// values that themselves contain `{{...}}` sequences are emitted literally
|
||||||
|
/// and never re-expanded, preventing injection of new placeholders.
|
||||||
|
fn substitute(template: &str, vars: &HashMap<String, String>) -> String {
|
||||||
|
let mut result = String::with_capacity(template.len());
|
||||||
|
let bytes = template.as_bytes();
|
||||||
|
let len = bytes.len();
|
||||||
|
let mut i = 0;
|
||||||
|
|
||||||
|
while i < len {
|
||||||
|
if i + 1 < len && bytes[i] == b'{' && bytes[i + 1] == b'{' {
|
||||||
|
// Find the closing `}}`.
|
||||||
|
if let Some(close) = template[i + 2..].find("}}") {
|
||||||
|
let key = &template[i + 2..i + 2 + close];
|
||||||
|
if let Some(value) = vars.get(key) {
|
||||||
|
result.push_str(value);
|
||||||
|
} else {
|
||||||
|
// Unknown placeholder: emit as-is.
|
||||||
|
result.push_str(&template[i..i + 2 + close + 2]);
|
||||||
|
}
|
||||||
|
i += 2 + close + 2;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result.push(template.as_bytes()[i] as char);
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Built-in templates ────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Return the built-in weekly status template for the given language.
|
||||||
|
pub fn weekly_status_template(lang: &str) -> ReportTemplate {
|
||||||
|
let (name, sections) = match lang {
|
||||||
|
"de" => (
|
||||||
|
"Wochenstatus",
|
||||||
|
vec![
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Zusammenfassung".into(),
|
||||||
|
body: "Projekt: {{project_name}} | Zeitraum: {{period}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Erledigt".into(),
|
||||||
|
body: "{{completed}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "In Bearbeitung".into(),
|
||||||
|
body: "{{in_progress}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Blockiert".into(),
|
||||||
|
body: "{{blocked}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Naechste Schritte".into(),
|
||||||
|
body: "{{next_steps}}".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
"fr" => (
|
||||||
|
"Statut hebdomadaire",
|
||||||
|
vec![
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Resume".into(),
|
||||||
|
body: "Projet: {{project_name}} | Periode: {{period}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Termine".into(),
|
||||||
|
body: "{{completed}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "En cours".into(),
|
||||||
|
body: "{{in_progress}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Bloque".into(),
|
||||||
|
body: "{{blocked}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Prochaines etapes".into(),
|
||||||
|
body: "{{next_steps}}".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
"it" => (
|
||||||
|
"Stato settimanale",
|
||||||
|
vec![
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Riepilogo".into(),
|
||||||
|
body: "Progetto: {{project_name}} | Periodo: {{period}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Completato".into(),
|
||||||
|
body: "{{completed}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "In corso".into(),
|
||||||
|
body: "{{in_progress}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Bloccato".into(),
|
||||||
|
body: "{{blocked}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Prossimi passi".into(),
|
||||||
|
body: "{{next_steps}}".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
_ => (
|
||||||
|
"Weekly Status",
|
||||||
|
vec![
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Summary".into(),
|
||||||
|
body: "Project: {{project_name}} | Period: {{period}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Completed".into(),
|
||||||
|
body: "{{completed}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "In Progress".into(),
|
||||||
|
body: "{{in_progress}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Blocked".into(),
|
||||||
|
body: "{{blocked}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Next Steps".into(),
|
||||||
|
body: "{{next_steps}}".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
};
|
||||||
|
ReportTemplate {
|
||||||
|
name: name.into(),
|
||||||
|
sections,
|
||||||
|
format: ReportFormat::Markdown,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the built-in sprint review template for the given language.
|
||||||
|
pub fn sprint_review_template(lang: &str) -> ReportTemplate {
|
||||||
|
let (name, sections) = match lang {
|
||||||
|
"de" => (
|
||||||
|
"Sprint-Uebersicht",
|
||||||
|
vec![
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Sprint".into(),
|
||||||
|
body: "{{sprint_dates}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Erledigt".into(),
|
||||||
|
body: "{{completed}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "In Bearbeitung".into(),
|
||||||
|
body: "{{in_progress}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Blockiert".into(),
|
||||||
|
body: "{{blocked}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Velocity".into(),
|
||||||
|
body: "{{velocity}}".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
"fr" => (
|
||||||
|
"Revue de sprint",
|
||||||
|
vec![
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Sprint".into(),
|
||||||
|
body: "{{sprint_dates}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Termine".into(),
|
||||||
|
body: "{{completed}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "En cours".into(),
|
||||||
|
body: "{{in_progress}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Bloque".into(),
|
||||||
|
body: "{{blocked}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Velocite".into(),
|
||||||
|
body: "{{velocity}}".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
"it" => (
|
||||||
|
"Revisione sprint",
|
||||||
|
vec![
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Sprint".into(),
|
||||||
|
body: "{{sprint_dates}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Completato".into(),
|
||||||
|
body: "{{completed}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "In corso".into(),
|
||||||
|
body: "{{in_progress}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Bloccato".into(),
|
||||||
|
body: "{{blocked}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Velocita".into(),
|
||||||
|
body: "{{velocity}}".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
_ => (
|
||||||
|
"Sprint Review",
|
||||||
|
vec![
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Sprint".into(),
|
||||||
|
body: "{{sprint_dates}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Completed".into(),
|
||||||
|
body: "{{completed}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "In Progress".into(),
|
||||||
|
body: "{{in_progress}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Blocked".into(),
|
||||||
|
body: "{{blocked}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Velocity".into(),
|
||||||
|
body: "{{velocity}}".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
};
|
||||||
|
ReportTemplate {
|
||||||
|
name: name.into(),
|
||||||
|
sections,
|
||||||
|
format: ReportFormat::Markdown,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the built-in risk register template for the given language.
|
||||||
|
pub fn risk_register_template(lang: &str) -> ReportTemplate {
|
||||||
|
let (name, sections) = match lang {
|
||||||
|
"de" => (
|
||||||
|
"Risikoregister",
|
||||||
|
vec![
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Projekt".into(),
|
||||||
|
body: "{{project_name}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Risiken".into(),
|
||||||
|
body: "{{risks}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Massnahmen".into(),
|
||||||
|
body: "{{mitigations}}".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
"fr" => (
|
||||||
|
"Registre des risques",
|
||||||
|
vec![
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Projet".into(),
|
||||||
|
body: "{{project_name}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Risques".into(),
|
||||||
|
body: "{{risks}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Mesures".into(),
|
||||||
|
body: "{{mitigations}}".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
"it" => (
|
||||||
|
"Registro dei rischi",
|
||||||
|
vec![
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Progetto".into(),
|
||||||
|
body: "{{project_name}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Rischi".into(),
|
||||||
|
body: "{{risks}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Mitigazioni".into(),
|
||||||
|
body: "{{mitigations}}".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
_ => (
|
||||||
|
"Risk Register",
|
||||||
|
vec![
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Project".into(),
|
||||||
|
body: "{{project_name}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Risks".into(),
|
||||||
|
body: "{{risks}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Mitigations".into(),
|
||||||
|
body: "{{mitigations}}".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
};
|
||||||
|
ReportTemplate {
|
||||||
|
name: name.into(),
|
||||||
|
sections,
|
||||||
|
format: ReportFormat::Markdown,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the built-in milestone report template for the given language.
|
||||||
|
pub fn milestone_report_template(lang: &str) -> ReportTemplate {
|
||||||
|
let (name, sections) = match lang {
|
||||||
|
"de" => (
|
||||||
|
"Meilensteinbericht",
|
||||||
|
vec![
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Projekt".into(),
|
||||||
|
body: "{{project_name}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Meilensteine".into(),
|
||||||
|
body: "{{milestones}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Status".into(),
|
||||||
|
body: "{{status}}".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
"fr" => (
|
||||||
|
"Rapport de jalons",
|
||||||
|
vec![
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Projet".into(),
|
||||||
|
body: "{{project_name}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Jalons".into(),
|
||||||
|
body: "{{milestones}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Statut".into(),
|
||||||
|
body: "{{status}}".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
"it" => (
|
||||||
|
"Report milestone",
|
||||||
|
vec![
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Progetto".into(),
|
||||||
|
body: "{{project_name}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Milestone".into(),
|
||||||
|
body: "{{milestones}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Stato".into(),
|
||||||
|
body: "{{status}}".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
_ => (
|
||||||
|
"Milestone Report",
|
||||||
|
vec![
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Project".into(),
|
||||||
|
body: "{{project_name}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Milestones".into(),
|
||||||
|
body: "{{milestones}}".into(),
|
||||||
|
},
|
||||||
|
TemplateSection {
|
||||||
|
heading: "Status".into(),
|
||||||
|
body: "{{status}}".into(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
};
|
||||||
|
ReportTemplate {
|
||||||
|
name: name.into(),
|
||||||
|
sections,
|
||||||
|
format: ReportFormat::Markdown,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn weekly_status_renders_with_variables() {
|
||||||
|
let tpl = weekly_status_template("en");
|
||||||
|
let mut vars = HashMap::new();
|
||||||
|
vars.insert("project_name".into(), "ZeroClaw".into());
|
||||||
|
vars.insert("period".into(), "2026-W10".into());
|
||||||
|
vars.insert("completed".into(), "- Task A\n- Task B".into());
|
||||||
|
vars.insert("in_progress".into(), "- Task C".into());
|
||||||
|
vars.insert("blocked".into(), "None".into());
|
||||||
|
vars.insert("next_steps".into(), "- Task D".into());
|
||||||
|
|
||||||
|
let rendered = tpl.render(&vars);
|
||||||
|
assert!(rendered.contains("Project: ZeroClaw"));
|
||||||
|
assert!(rendered.contains("Period: 2026-W10"));
|
||||||
|
assert!(rendered.contains("- Task A"));
|
||||||
|
assert!(rendered.contains("## Completed"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn weekly_status_de_renders_german_headings() {
|
||||||
|
let tpl = weekly_status_template("de");
|
||||||
|
let vars = HashMap::new();
|
||||||
|
let rendered = tpl.render(&vars);
|
||||||
|
assert!(rendered.contains("## Zusammenfassung"));
|
||||||
|
assert!(rendered.contains("## Erledigt"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn weekly_status_fr_renders_french_headings() {
|
||||||
|
let tpl = weekly_status_template("fr");
|
||||||
|
let vars = HashMap::new();
|
||||||
|
let rendered = tpl.render(&vars);
|
||||||
|
assert!(rendered.contains("## Resume"));
|
||||||
|
assert!(rendered.contains("## Termine"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn weekly_status_it_renders_italian_headings() {
|
||||||
|
let tpl = weekly_status_template("it");
|
||||||
|
let vars = HashMap::new();
|
||||||
|
let rendered = tpl.render(&vars);
|
||||||
|
assert!(rendered.contains("## Riepilogo"));
|
||||||
|
assert!(rendered.contains("## Completato"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn html_format_renders_tags() {
|
||||||
|
let mut tpl = weekly_status_template("en");
|
||||||
|
tpl.format = ReportFormat::Html;
|
||||||
|
let mut vars = HashMap::new();
|
||||||
|
vars.insert("project_name".into(), "Test".into());
|
||||||
|
vars.insert("period".into(), "W1".into());
|
||||||
|
vars.insert("completed".into(), "Done".into());
|
||||||
|
vars.insert("in_progress".into(), "WIP".into());
|
||||||
|
vars.insert("blocked".into(), "None".into());
|
||||||
|
vars.insert("next_steps".into(), "Next".into());
|
||||||
|
|
||||||
|
let rendered = tpl.render(&vars);
|
||||||
|
assert!(rendered.contains("<h2>Summary</h2>"));
|
||||||
|
assert!(rendered.contains("<p>Project: Test | Period: W1</p>"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn sprint_review_template_has_velocity_section() {
|
||||||
|
let tpl = sprint_review_template("en");
|
||||||
|
let section_headings: Vec<&str> = tpl.sections.iter().map(|s| s.heading.as_str()).collect();
|
||||||
|
assert!(section_headings.contains(&"Velocity"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn risk_register_template_has_risk_sections() {
|
||||||
|
let tpl = risk_register_template("en");
|
||||||
|
let section_headings: Vec<&str> = tpl.sections.iter().map(|s| s.heading.as_str()).collect();
|
||||||
|
assert!(section_headings.contains(&"Risks"));
|
||||||
|
assert!(section_headings.contains(&"Mitigations"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn milestone_template_all_languages() {
|
||||||
|
for lang in &["en", "de", "fr", "it"] {
|
||||||
|
let tpl = milestone_report_template(lang);
|
||||||
|
assert!(!tpl.name.is_empty());
|
||||||
|
assert_eq!(tpl.sections.len(), 3);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn substitute_leaves_unknown_placeholders() {
|
||||||
|
let vars = HashMap::new();
|
||||||
|
let result = substitute("Hello {{name}}", &vars);
|
||||||
|
assert_eq!(result, "Hello {{name}}");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn substitute_replaces_all_occurrences() {
|
||||||
|
let mut vars = HashMap::new();
|
||||||
|
vars.insert("x".into(), "1".into());
|
||||||
|
let result = substitute("{{x}} and {{x}}", &vars);
|
||||||
|
assert_eq!(result, "1 and 1");
|
||||||
|
}
|
||||||
|
}
|
||||||
Loading…
Reference in New Issue
Block a user