Files
claw-code/rust/crates/tools/src/lib.rs

4132 lines
141 KiB
Rust

use std::collections::{BTreeMap, BTreeSet};
use std::path::{Path, PathBuf};
use std::process::Command;
use std::time::{Duration, Instant};
use api::{
resolve_startup_auth_source, AnthropicClient, ContentBlockDelta, InputContentBlock,
InputMessage, MessageRequest, OutputContentBlock, StreamEvent as ApiStreamEvent, ToolChoice,
ToolDefinition, ToolResultContentBlock,
};
use reqwest::blocking::Client;
use runtime::{
edit_file, execute_bash, glob_search, grep_search, load_system_prompt, read_file, write_file,
ApiClient, ApiRequest, AssistantEvent, BashCommandInput, ConfigLoader, ContentBlock,
ConversationMessage, ConversationRuntime, GrepSearchInput, MessageRole, PermissionMode,
PermissionPolicy, RuntimeError, Session, TokenUsage, ToolError, ToolExecutor,
};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ToolManifestEntry {
pub name: String,
pub source: ToolSource,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ToolSource {
Base,
Conditional,
}
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct ToolRegistry {
entries: Vec<ToolManifestEntry>,
}
impl ToolRegistry {
#[must_use]
pub fn new(entries: Vec<ToolManifestEntry>) -> Self {
Self { entries }
}
#[must_use]
pub fn entries(&self) -> &[ToolManifestEntry] {
&self.entries
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ToolSpec {
pub name: &'static str,
pub description: &'static str,
pub input_schema: Value,
pub required_permission: PermissionMode,
}
#[must_use]
#[allow(clippy::too_many_lines)]
pub fn mvp_tool_specs() -> Vec<ToolSpec> {
vec![
ToolSpec {
name: "bash",
description: "Execute a shell command in the current workspace.",
input_schema: json!({
"type": "object",
"properties": {
"command": { "type": "string" },
"timeout": { "type": "integer", "minimum": 1 },
"description": { "type": "string" },
"run_in_background": { "type": "boolean" },
"dangerouslyDisableSandbox": { "type": "boolean" }
},
"required": ["command"],
"additionalProperties": false
}),
required_permission: PermissionMode::DangerFullAccess,
},
ToolSpec {
name: "read_file",
description: "Read a text file from the workspace.",
input_schema: json!({
"type": "object",
"properties": {
"path": { "type": "string" },
"offset": { "type": "integer", "minimum": 0 },
"limit": { "type": "integer", "minimum": 1 }
},
"required": ["path"],
"additionalProperties": false
}),
required_permission: PermissionMode::ReadOnly,
},
ToolSpec {
name: "write_file",
description: "Write a text file in the workspace.",
input_schema: json!({
"type": "object",
"properties": {
"path": { "type": "string" },
"content": { "type": "string" }
},
"required": ["path", "content"],
"additionalProperties": false
}),
required_permission: PermissionMode::WorkspaceWrite,
},
ToolSpec {
name: "edit_file",
description: "Replace text in a workspace file.",
input_schema: json!({
"type": "object",
"properties": {
"path": { "type": "string" },
"old_string": { "type": "string" },
"new_string": { "type": "string" },
"replace_all": { "type": "boolean" }
},
"required": ["path", "old_string", "new_string"],
"additionalProperties": false
}),
required_permission: PermissionMode::WorkspaceWrite,
},
ToolSpec {
name: "glob_search",
description: "Find files by glob pattern.",
input_schema: json!({
"type": "object",
"properties": {
"pattern": { "type": "string" },
"path": { "type": "string" }
},
"required": ["pattern"],
"additionalProperties": false
}),
required_permission: PermissionMode::ReadOnly,
},
ToolSpec {
name: "grep_search",
description: "Search file contents with a regex pattern.",
input_schema: json!({
"type": "object",
"properties": {
"pattern": { "type": "string" },
"path": { "type": "string" },
"glob": { "type": "string" },
"output_mode": { "type": "string" },
"-B": { "type": "integer", "minimum": 0 },
"-A": { "type": "integer", "minimum": 0 },
"-C": { "type": "integer", "minimum": 0 },
"context": { "type": "integer", "minimum": 0 },
"-n": { "type": "boolean" },
"-i": { "type": "boolean" },
"type": { "type": "string" },
"head_limit": { "type": "integer", "minimum": 1 },
"offset": { "type": "integer", "minimum": 0 },
"multiline": { "type": "boolean" }
},
"required": ["pattern"],
"additionalProperties": false
}),
required_permission: PermissionMode::ReadOnly,
},
ToolSpec {
name: "WebFetch",
description:
"Fetch a URL, convert it into readable text, and answer a prompt about it.",
input_schema: json!({
"type": "object",
"properties": {
"url": { "type": "string", "format": "uri" },
"prompt": { "type": "string" }
},
"required": ["url", "prompt"],
"additionalProperties": false
}),
required_permission: PermissionMode::ReadOnly,
},
ToolSpec {
name: "WebSearch",
description: "Search the web for current information and return cited results.",
input_schema: json!({
"type": "object",
"properties": {
"query": { "type": "string", "minLength": 2 },
"allowed_domains": {
"type": "array",
"items": { "type": "string" }
},
"blocked_domains": {
"type": "array",
"items": { "type": "string" }
}
},
"required": ["query"],
"additionalProperties": false
}),
required_permission: PermissionMode::ReadOnly,
},
ToolSpec {
name: "TodoWrite",
description: "Update the structured task list for the current session.",
input_schema: json!({
"type": "object",
"properties": {
"todos": {
"type": "array",
"items": {
"type": "object",
"properties": {
"content": { "type": "string" },
"activeForm": { "type": "string" },
"status": {
"type": "string",
"enum": ["pending", "in_progress", "completed"]
}
},
"required": ["content", "activeForm", "status"],
"additionalProperties": false
}
}
},
"required": ["todos"],
"additionalProperties": false
}),
required_permission: PermissionMode::WorkspaceWrite,
},
ToolSpec {
name: "Skill",
description: "Load a local skill definition and its instructions.",
input_schema: json!({
"type": "object",
"properties": {
"skill": { "type": "string" },
"args": { "type": "string" }
},
"required": ["skill"],
"additionalProperties": false
}),
required_permission: PermissionMode::ReadOnly,
},
ToolSpec {
name: "Agent",
description:
"Launch and execute a specialized child agent conversation with bounded recursion.",
input_schema: json!({
"type": "object",
"properties": {
"description": { "type": "string" },
"prompt": { "type": "string" },
"subagent_type": { "type": "string" },
"name": { "type": "string" },
"model": { "type": "string" },
"max_depth": { "type": "integer", "minimum": 0 }
},
"required": ["description", "prompt"],
"additionalProperties": false
}),
required_permission: PermissionMode::DangerFullAccess,
},
ToolSpec {
name: "ToolSearch",
description: "Search for deferred or specialized tools by exact name or keywords.",
input_schema: json!({
"type": "object",
"properties": {
"query": { "type": "string" },
"max_results": { "type": "integer", "minimum": 1 }
},
"required": ["query"],
"additionalProperties": false
}),
required_permission: PermissionMode::ReadOnly,
},
ToolSpec {
name: "NotebookEdit",
description: "Replace, insert, or delete a cell in a Jupyter notebook.",
input_schema: json!({
"type": "object",
"properties": {
"notebook_path": { "type": "string" },
"cell_id": { "type": "string" },
"new_source": { "type": "string" },
"cell_type": { "type": "string", "enum": ["code", "markdown"] },
"edit_mode": { "type": "string", "enum": ["replace", "insert", "delete"] }
},
"required": ["notebook_path"],
"additionalProperties": false
}),
required_permission: PermissionMode::WorkspaceWrite,
},
ToolSpec {
name: "Sleep",
description: "Wait for a specified duration without holding a shell process.",
input_schema: json!({
"type": "object",
"properties": {
"duration_ms": { "type": "integer", "minimum": 0 }
},
"required": ["duration_ms"],
"additionalProperties": false
}),
required_permission: PermissionMode::ReadOnly,
},
ToolSpec {
name: "SendUserMessage",
description: "Send a message to the user.",
input_schema: json!({
"type": "object",
"properties": {
"message": { "type": "string" },
"attachments": {
"type": "array",
"items": { "type": "string" }
},
"status": {
"type": "string",
"enum": ["normal", "proactive"]
}
},
"required": ["message", "status"],
"additionalProperties": false
}),
required_permission: PermissionMode::ReadOnly,
},
ToolSpec {
name: "Config",
description: "Get or set Claude Code settings.",
input_schema: json!({
"type": "object",
"properties": {
"setting": { "type": "string" },
"value": {
"type": ["string", "boolean", "number"]
}
},
"required": ["setting"],
"additionalProperties": false
}),
required_permission: PermissionMode::WorkspaceWrite,
},
ToolSpec {
name: "StructuredOutput",
description: "Return structured output in the requested format.",
input_schema: json!({
"type": "object",
"additionalProperties": true
}),
required_permission: PermissionMode::ReadOnly,
},
ToolSpec {
name: "REPL",
description: "Execute code in a REPL-like subprocess.",
input_schema: json!({
"type": "object",
"properties": {
"code": { "type": "string" },
"language": { "type": "string" },
"timeout_ms": { "type": "integer", "minimum": 1 }
},
"required": ["code", "language"],
"additionalProperties": false
}),
required_permission: PermissionMode::DangerFullAccess,
},
ToolSpec {
name: "PowerShell",
description: "Execute a PowerShell command with optional timeout.",
input_schema: json!({
"type": "object",
"properties": {
"command": { "type": "string" },
"timeout": { "type": "integer", "minimum": 1 },
"description": { "type": "string" },
"run_in_background": { "type": "boolean" }
},
"required": ["command"],
"additionalProperties": false
}),
required_permission: PermissionMode::DangerFullAccess,
},
]
}
pub fn execute_tool(name: &str, input: &Value) -> Result<String, String> {
match name {
"bash" => from_value::<BashCommandInput>(input).and_then(run_bash),
"read_file" => from_value::<ReadFileInput>(input).and_then(run_read_file),
"write_file" => from_value::<WriteFileInput>(input).and_then(run_write_file),
"edit_file" => from_value::<EditFileInput>(input).and_then(run_edit_file),
"glob_search" => from_value::<GlobSearchInputValue>(input).and_then(run_glob_search),
"grep_search" => from_value::<GrepSearchInput>(input).and_then(run_grep_search),
"WebFetch" => from_value::<WebFetchInput>(input).and_then(run_web_fetch),
"WebSearch" => from_value::<WebSearchInput>(input).and_then(run_web_search),
"TodoWrite" => from_value::<TodoWriteInput>(input).and_then(run_todo_write),
"Skill" => from_value::<SkillInput>(input).and_then(run_skill),
"Agent" => from_value::<AgentInput>(input).and_then(run_agent),
"ToolSearch" => from_value::<ToolSearchInput>(input).and_then(run_tool_search),
"NotebookEdit" => from_value::<NotebookEditInput>(input).and_then(run_notebook_edit),
"Sleep" => from_value::<SleepInput>(input).and_then(run_sleep),
"SendUserMessage" | "Brief" => from_value::<BriefInput>(input).and_then(run_brief),
"Config" => from_value::<ConfigInput>(input).and_then(run_config),
"StructuredOutput" => {
from_value::<StructuredOutputInput>(input).and_then(run_structured_output)
}
"REPL" => from_value::<ReplInput>(input).and_then(run_repl),
"PowerShell" => from_value::<PowerShellInput>(input).and_then(run_powershell),
_ => Err(format!("unsupported tool: {name}")),
}
}
fn from_value<T: for<'de> Deserialize<'de>>(input: &Value) -> Result<T, String> {
serde_json::from_value(input.clone()).map_err(|error| error.to_string())
}
fn run_bash(input: BashCommandInput) -> Result<String, String> {
serde_json::to_string_pretty(&execute_bash(input).map_err(|error| error.to_string())?)
.map_err(|error| error.to_string())
}
#[allow(clippy::needless_pass_by_value)]
fn run_read_file(input: ReadFileInput) -> Result<String, String> {
to_pretty_json(read_file(&input.path, input.offset, input.limit).map_err(io_to_string)?)
}
#[allow(clippy::needless_pass_by_value)]
fn run_write_file(input: WriteFileInput) -> Result<String, String> {
to_pretty_json(write_file(&input.path, &input.content).map_err(io_to_string)?)
}
#[allow(clippy::needless_pass_by_value)]
fn run_edit_file(input: EditFileInput) -> Result<String, String> {
to_pretty_json(
edit_file(
&input.path,
&input.old_string,
&input.new_string,
input.replace_all.unwrap_or(false),
)
.map_err(io_to_string)?,
)
}
#[allow(clippy::needless_pass_by_value)]
fn run_glob_search(input: GlobSearchInputValue) -> Result<String, String> {
to_pretty_json(glob_search(&input.pattern, input.path.as_deref()).map_err(io_to_string)?)
}
#[allow(clippy::needless_pass_by_value)]
fn run_grep_search(input: GrepSearchInput) -> Result<String, String> {
to_pretty_json(grep_search(&input).map_err(io_to_string)?)
}
#[allow(clippy::needless_pass_by_value)]
fn run_web_fetch(input: WebFetchInput) -> Result<String, String> {
to_pretty_json(execute_web_fetch(&input)?)
}
#[allow(clippy::needless_pass_by_value)]
fn run_web_search(input: WebSearchInput) -> Result<String, String> {
to_pretty_json(execute_web_search(&input)?)
}
fn run_todo_write(input: TodoWriteInput) -> Result<String, String> {
to_pretty_json(execute_todo_write(input)?)
}
fn run_skill(input: SkillInput) -> Result<String, String> {
to_pretty_json(execute_skill(input)?)
}
fn run_agent(input: AgentInput) -> Result<String, String> {
to_pretty_json(execute_agent(input)?)
}
fn run_tool_search(input: ToolSearchInput) -> Result<String, String> {
to_pretty_json(execute_tool_search(input))
}
fn run_notebook_edit(input: NotebookEditInput) -> Result<String, String> {
to_pretty_json(execute_notebook_edit(input)?)
}
fn run_sleep(input: SleepInput) -> Result<String, String> {
to_pretty_json(execute_sleep(input))
}
fn run_brief(input: BriefInput) -> Result<String, String> {
to_pretty_json(execute_brief(input)?)
}
fn run_config(input: ConfigInput) -> Result<String, String> {
to_pretty_json(execute_config(input)?)
}
fn run_structured_output(input: StructuredOutputInput) -> Result<String, String> {
to_pretty_json(execute_structured_output(input))
}
fn run_repl(input: ReplInput) -> Result<String, String> {
to_pretty_json(execute_repl(input)?)
}
fn run_powershell(input: PowerShellInput) -> Result<String, String> {
to_pretty_json(execute_powershell(input).map_err(|error| error.to_string())?)
}
fn to_pretty_json<T: serde::Serialize>(value: T) -> Result<String, String> {
serde_json::to_string_pretty(&value).map_err(|error| error.to_string())
}
#[allow(clippy::needless_pass_by_value)]
fn io_to_string(error: std::io::Error) -> String {
error.to_string()
}
#[derive(Debug, Deserialize)]
struct ReadFileInput {
path: String,
offset: Option<usize>,
limit: Option<usize>,
}
#[derive(Debug, Deserialize)]
struct WriteFileInput {
path: String,
content: String,
}
#[derive(Debug, Deserialize)]
struct EditFileInput {
path: String,
old_string: String,
new_string: String,
replace_all: Option<bool>,
}
#[derive(Debug, Deserialize)]
struct GlobSearchInputValue {
pattern: String,
path: Option<String>,
}
#[derive(Debug, Deserialize)]
struct WebFetchInput {
url: String,
prompt: String,
}
#[derive(Debug, Deserialize)]
struct WebSearchInput {
query: String,
allowed_domains: Option<Vec<String>>,
blocked_domains: Option<Vec<String>>,
}
#[derive(Debug, Deserialize)]
struct TodoWriteInput {
todos: Vec<TodoItem>,
}
#[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Eq)]
struct TodoItem {
content: String,
#[serde(rename = "activeForm")]
active_form: String,
status: TodoStatus,
}
#[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
enum TodoStatus {
Pending,
InProgress,
Completed,
}
#[derive(Debug, Deserialize)]
struct SkillInput {
skill: String,
args: Option<String>,
}
#[derive(Debug, Deserialize)]
struct AgentInput {
description: String,
prompt: String,
subagent_type: Option<String>,
name: Option<String>,
model: Option<String>,
max_depth: Option<usize>,
}
#[derive(Debug, Deserialize)]
struct ToolSearchInput {
query: String,
max_results: Option<usize>,
}
#[derive(Debug, Deserialize)]
struct NotebookEditInput {
notebook_path: String,
cell_id: Option<String>,
new_source: Option<String>,
cell_type: Option<NotebookCellType>,
edit_mode: Option<NotebookEditMode>,
}
#[derive(Debug, Deserialize, Serialize, Clone, Copy, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
enum NotebookCellType {
Code,
Markdown,
}
#[derive(Debug, Deserialize, Serialize, Clone, Copy, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
enum NotebookEditMode {
Replace,
Insert,
Delete,
}
#[derive(Debug, Deserialize)]
struct SleepInput {
duration_ms: u64,
}
#[derive(Debug, Deserialize)]
struct BriefInput {
message: String,
attachments: Option<Vec<String>>,
status: BriefStatus,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "lowercase")]
enum BriefStatus {
Normal,
Proactive,
}
#[derive(Debug, Deserialize)]
struct ConfigInput {
setting: String,
value: Option<ConfigValue>,
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum ConfigValue {
String(String),
Bool(bool),
Number(f64),
}
#[derive(Debug, Deserialize)]
#[serde(transparent)]
struct StructuredOutputInput(BTreeMap<String, Value>);
#[derive(Debug, Deserialize)]
struct ReplInput {
code: String,
language: String,
timeout_ms: Option<u64>,
}
#[derive(Debug, Deserialize)]
struct PowerShellInput {
command: String,
timeout: Option<u64>,
description: Option<String>,
run_in_background: Option<bool>,
}
#[derive(Debug, Serialize)]
struct WebFetchOutput {
bytes: usize,
code: u16,
#[serde(rename = "codeText")]
code_text: String,
result: String,
#[serde(rename = "durationMs")]
duration_ms: u128,
url: String,
}
#[derive(Debug, Serialize)]
struct WebSearchOutput {
query: String,
results: Vec<WebSearchResultItem>,
#[serde(rename = "durationSeconds")]
duration_seconds: f64,
}
#[derive(Debug, Serialize)]
struct TodoWriteOutput {
#[serde(rename = "oldTodos")]
old_todos: Vec<TodoItem>,
#[serde(rename = "newTodos")]
new_todos: Vec<TodoItem>,
#[serde(rename = "verificationNudgeNeeded")]
verification_nudge_needed: Option<bool>,
}
#[derive(Debug, Serialize)]
struct SkillOutput {
skill: String,
path: String,
args: Option<String>,
description: Option<String>,
prompt: String,
}
#[derive(Debug, Serialize, Deserialize)]
struct AgentOutput {
#[serde(rename = "agentId")]
agent_id: String,
name: String,
description: String,
#[serde(rename = "subagentType")]
subagent_type: Option<String>,
model: Option<String>,
status: String,
#[serde(rename = "maxDepth")]
max_depth: usize,
#[serde(rename = "depth")]
depth: usize,
#[serde(rename = "result")]
result: Option<String>,
#[serde(rename = "assistantMessages")]
assistant_messages: Vec<String>,
#[serde(rename = "toolResults")]
tool_results: Vec<AgentToolResult>,
#[serde(rename = "outputFile")]
output_file: String,
#[serde(rename = "manifestFile")]
manifest_file: String,
#[serde(rename = "createdAt")]
created_at: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
struct AgentToolResult {
#[serde(rename = "toolName")]
tool_name: String,
output: String,
#[serde(rename = "isError")]
is_error: bool,
}
#[derive(Debug, Serialize)]
struct ToolSearchOutput {
matches: Vec<String>,
query: String,
normalized_query: String,
#[serde(rename = "total_deferred_tools")]
total_deferred_tools: usize,
#[serde(rename = "pending_mcp_servers")]
pending_mcp_servers: Option<Vec<String>>,
}
#[derive(Debug, Serialize)]
struct NotebookEditOutput {
new_source: String,
cell_id: Option<String>,
cell_type: Option<NotebookCellType>,
language: String,
edit_mode: String,
error: Option<String>,
notebook_path: String,
original_file: String,
updated_file: String,
}
#[derive(Debug, Serialize)]
struct SleepOutput {
duration_ms: u64,
message: String,
}
#[derive(Debug, Serialize)]
struct BriefOutput {
message: String,
attachments: Option<Vec<ResolvedAttachment>>,
#[serde(rename = "sentAt")]
sent_at: String,
}
#[derive(Debug, Serialize)]
struct ResolvedAttachment {
path: String,
size: u64,
#[serde(rename = "isImage")]
is_image: bool,
}
#[derive(Debug, Serialize)]
struct ConfigOutput {
success: bool,
operation: Option<String>,
setting: Option<String>,
value: Option<Value>,
#[serde(rename = "previousValue")]
previous_value: Option<Value>,
#[serde(rename = "newValue")]
new_value: Option<Value>,
error: Option<String>,
}
#[derive(Debug, Serialize)]
struct StructuredOutputResult {
data: String,
structured_output: BTreeMap<String, Value>,
}
#[derive(Debug, Serialize)]
struct ReplOutput {
language: String,
stdout: String,
stderr: String,
#[serde(rename = "exitCode")]
exit_code: i32,
#[serde(rename = "durationMs")]
duration_ms: u128,
}
#[derive(Debug, Serialize)]
#[serde(untagged)]
enum WebSearchResultItem {
SearchResult {
tool_use_id: String,
content: Vec<SearchHit>,
},
Commentary(String),
}
#[derive(Debug, Serialize)]
struct SearchHit {
title: String,
url: String,
}
fn execute_web_fetch(input: &WebFetchInput) -> Result<WebFetchOutput, String> {
let started = Instant::now();
let client = build_http_client()?;
let request_url = normalize_fetch_url(&input.url)?;
let response = client
.get(request_url.clone())
.send()
.map_err(|error| error.to_string())?;
let status = response.status();
let final_url = response.url().to_string();
let code = status.as_u16();
let code_text = status.canonical_reason().unwrap_or("Unknown").to_string();
let content_type = response
.headers()
.get(reqwest::header::CONTENT_TYPE)
.and_then(|value| value.to_str().ok())
.unwrap_or_default()
.to_string();
let body = response.text().map_err(|error| error.to_string())?;
let bytes = body.len();
let normalized = normalize_fetched_content(&body, &content_type);
let result = summarize_web_fetch(&final_url, &input.prompt, &normalized, &body, &content_type);
Ok(WebFetchOutput {
bytes,
code,
code_text,
result,
duration_ms: started.elapsed().as_millis(),
url: final_url,
})
}
fn execute_web_search(input: &WebSearchInput) -> Result<WebSearchOutput, String> {
let started = Instant::now();
let client = build_http_client()?;
let search_url = build_search_url(&input.query)?;
let response = client
.get(search_url)
.send()
.map_err(|error| error.to_string())?;
let final_url = response.url().clone();
let html = response.text().map_err(|error| error.to_string())?;
let mut hits = extract_search_hits(&html);
if hits.is_empty() && final_url.host_str().is_some() {
hits = extract_search_hits_from_generic_links(&html);
}
if let Some(allowed) = input.allowed_domains.as_ref() {
hits.retain(|hit| host_matches_list(&hit.url, allowed));
}
if let Some(blocked) = input.blocked_domains.as_ref() {
hits.retain(|hit| !host_matches_list(&hit.url, blocked));
}
dedupe_hits(&mut hits);
hits.truncate(8);
let summary = if hits.is_empty() {
format!("No web search results matched the query {:?}.", input.query)
} else {
let rendered_hits = hits
.iter()
.map(|hit| format!("- [{}]({})", hit.title, hit.url))
.collect::<Vec<_>>()
.join("\n");
format!(
"Search results for {:?}. Include a Sources section in the final answer.\n{}",
input.query, rendered_hits
)
};
Ok(WebSearchOutput {
query: input.query.clone(),
results: vec![
WebSearchResultItem::Commentary(summary),
WebSearchResultItem::SearchResult {
tool_use_id: String::from("web_search_1"),
content: hits,
},
],
duration_seconds: started.elapsed().as_secs_f64(),
})
}
fn build_http_client() -> Result<Client, String> {
Client::builder()
.timeout(Duration::from_secs(20))
.redirect(reqwest::redirect::Policy::limited(10))
.user_agent("clawd-rust-tools/0.1")
.build()
.map_err(|error| error.to_string())
}
fn normalize_fetch_url(url: &str) -> Result<String, String> {
let parsed = reqwest::Url::parse(url).map_err(|error| error.to_string())?;
if parsed.scheme() == "http" {
let host = parsed.host_str().unwrap_or_default();
if host != "localhost" && host != "127.0.0.1" && host != "::1" {
let mut upgraded = parsed;
upgraded
.set_scheme("https")
.map_err(|()| String::from("failed to upgrade URL to https"))?;
return Ok(upgraded.to_string());
}
}
Ok(parsed.to_string())
}
fn build_search_url(query: &str) -> Result<reqwest::Url, String> {
if let Ok(base) = std::env::var("CLAWD_WEB_SEARCH_BASE_URL") {
let mut url = reqwest::Url::parse(&base).map_err(|error| error.to_string())?;
url.query_pairs_mut().append_pair("q", query);
return Ok(url);
}
let mut url = reqwest::Url::parse("https://html.duckduckgo.com/html/")
.map_err(|error| error.to_string())?;
url.query_pairs_mut().append_pair("q", query);
Ok(url)
}
fn normalize_fetched_content(body: &str, content_type: &str) -> String {
if content_type.contains("html") {
html_to_text(body)
} else {
body.trim().to_string()
}
}
fn summarize_web_fetch(
url: &str,
prompt: &str,
content: &str,
raw_body: &str,
content_type: &str,
) -> String {
let lower_prompt = prompt.to_lowercase();
let compact = collapse_whitespace(content);
let detail = if lower_prompt.contains("title") {
extract_title(content, raw_body, content_type).map_or_else(
|| preview_text(&compact, 600),
|title| format!("Title: {title}"),
)
} else if lower_prompt.contains("summary") || lower_prompt.contains("summarize") {
preview_text(&compact, 900)
} else {
let preview = preview_text(&compact, 900);
format!("Prompt: {prompt}\nContent preview:\n{preview}")
};
format!("Fetched {url}\n{detail}")
}
fn extract_title(content: &str, raw_body: &str, content_type: &str) -> Option<String> {
if content_type.contains("html") {
let lowered = raw_body.to_lowercase();
if let Some(start) = lowered.find("<title>") {
let after = start + "<title>".len();
if let Some(end_rel) = lowered[after..].find("</title>") {
let title =
collapse_whitespace(&decode_html_entities(&raw_body[after..after + end_rel]));
if !title.is_empty() {
return Some(title);
}
}
}
}
for line in content.lines() {
let trimmed = line.trim();
if !trimmed.is_empty() {
return Some(trimmed.to_string());
}
}
None
}
fn html_to_text(html: &str) -> String {
let mut text = String::with_capacity(html.len());
let mut in_tag = false;
let mut previous_was_space = false;
for ch in html.chars() {
match ch {
'<' => in_tag = true,
'>' => in_tag = false,
_ if in_tag => {}
'&' => {
text.push('&');
previous_was_space = false;
}
ch if ch.is_whitespace() => {
if !previous_was_space {
text.push(' ');
previous_was_space = true;
}
}
_ => {
text.push(ch);
previous_was_space = false;
}
}
}
collapse_whitespace(&decode_html_entities(&text))
}
fn decode_html_entities(input: &str) -> String {
input
.replace("&amp;", "&")
.replace("&lt;", "<")
.replace("&gt;", ">")
.replace("&quot;", "\"")
.replace("&#39;", "'")
.replace("&nbsp;", " ")
}
fn collapse_whitespace(input: &str) -> String {
input.split_whitespace().collect::<Vec<_>>().join(" ")
}
fn preview_text(input: &str, max_chars: usize) -> String {
if input.chars().count() <= max_chars {
return input.to_string();
}
let shortened = input.chars().take(max_chars).collect::<String>();
format!("{}", shortened.trim_end())
}
fn extract_search_hits(html: &str) -> Vec<SearchHit> {
let mut hits = Vec::new();
let mut remaining = html;
while let Some(anchor_start) = remaining.find("result__a") {
let after_class = &remaining[anchor_start..];
let Some(href_idx) = after_class.find("href=") else {
remaining = &after_class[1..];
continue;
};
let href_slice = &after_class[href_idx + 5..];
let Some((url, rest)) = extract_quoted_value(href_slice) else {
remaining = &after_class[1..];
continue;
};
let Some(close_tag_idx) = rest.find('>') else {
remaining = &after_class[1..];
continue;
};
let after_tag = &rest[close_tag_idx + 1..];
let Some(end_anchor_idx) = after_tag.find("</a>") else {
remaining = &after_tag[1..];
continue;
};
let title = html_to_text(&after_tag[..end_anchor_idx]);
if let Some(decoded_url) = decode_duckduckgo_redirect(&url) {
hits.push(SearchHit {
title: title.trim().to_string(),
url: decoded_url,
});
}
remaining = &after_tag[end_anchor_idx + 4..];
}
hits
}
fn extract_search_hits_from_generic_links(html: &str) -> Vec<SearchHit> {
let mut hits = Vec::new();
let mut remaining = html;
while let Some(anchor_start) = remaining.find("<a") {
let after_anchor = &remaining[anchor_start..];
let Some(href_idx) = after_anchor.find("href=") else {
remaining = &after_anchor[2..];
continue;
};
let href_slice = &after_anchor[href_idx + 5..];
let Some((url, rest)) = extract_quoted_value(href_slice) else {
remaining = &after_anchor[2..];
continue;
};
let Some(close_tag_idx) = rest.find('>') else {
remaining = &after_anchor[2..];
continue;
};
let after_tag = &rest[close_tag_idx + 1..];
let Some(end_anchor_idx) = after_tag.find("</a>") else {
remaining = &after_anchor[2..];
continue;
};
let title = html_to_text(&after_tag[..end_anchor_idx]);
if title.trim().is_empty() {
remaining = &after_tag[end_anchor_idx + 4..];
continue;
}
let decoded_url = decode_duckduckgo_redirect(&url).unwrap_or(url);
if decoded_url.starts_with("http://") || decoded_url.starts_with("https://") {
hits.push(SearchHit {
title: title.trim().to_string(),
url: decoded_url,
});
}
remaining = &after_tag[end_anchor_idx + 4..];
}
hits
}
fn extract_quoted_value(input: &str) -> Option<(String, &str)> {
let quote = input.chars().next()?;
if quote != '"' && quote != '\'' {
return None;
}
let rest = &input[quote.len_utf8()..];
let end = rest.find(quote)?;
Some((rest[..end].to_string(), &rest[end + quote.len_utf8()..]))
}
fn decode_duckduckgo_redirect(url: &str) -> Option<String> {
if url.starts_with("http://") || url.starts_with("https://") {
return Some(html_entity_decode_url(url));
}
let joined = if url.starts_with("//") {
format!("https:{url}")
} else if url.starts_with('/') {
format!("https://duckduckgo.com{url}")
} else {
return None;
};
let parsed = reqwest::Url::parse(&joined).ok()?;
if parsed.path() == "/l/" || parsed.path() == "/l" {
for (key, value) in parsed.query_pairs() {
if key == "uddg" {
return Some(html_entity_decode_url(value.as_ref()));
}
}
}
Some(joined)
}
fn html_entity_decode_url(url: &str) -> String {
decode_html_entities(url)
}
fn host_matches_list(url: &str, domains: &[String]) -> bool {
let Ok(parsed) = reqwest::Url::parse(url) else {
return false;
};
let Some(host) = parsed.host_str() else {
return false;
};
let host = host.to_ascii_lowercase();
domains.iter().any(|domain| {
let normalized = normalize_domain_filter(domain);
!normalized.is_empty() && (host == normalized || host.ends_with(&format!(".{normalized}")))
})
}
fn normalize_domain_filter(domain: &str) -> String {
let trimmed = domain.trim();
let candidate = reqwest::Url::parse(trimmed)
.ok()
.and_then(|url| url.host_str().map(str::to_string))
.unwrap_or_else(|| trimmed.to_string());
candidate
.trim()
.trim_start_matches('.')
.trim_end_matches('/')
.to_ascii_lowercase()
}
fn dedupe_hits(hits: &mut Vec<SearchHit>) {
let mut seen = BTreeSet::new();
hits.retain(|hit| seen.insert(hit.url.clone()));
}
fn execute_todo_write(input: TodoWriteInput) -> Result<TodoWriteOutput, String> {
validate_todos(&input.todos)?;
let store_path = todo_store_path()?;
let old_todos = if store_path.exists() {
parse_todo_markdown(
&std::fs::read_to_string(&store_path).map_err(|error| error.to_string())?,
)?
} else {
Vec::new()
};
let all_done = input
.todos
.iter()
.all(|todo| matches!(todo.status, TodoStatus::Completed));
let persisted = if all_done {
Vec::new()
} else {
input.todos.clone()
};
if let Some(parent) = store_path.parent() {
std::fs::create_dir_all(parent).map_err(|error| error.to_string())?;
}
std::fs::write(&store_path, render_todo_markdown(&persisted))
.map_err(|error| error.to_string())?;
let verification_nudge_needed = (all_done
&& input.todos.len() >= 3
&& !input
.todos
.iter()
.any(|todo| todo.content.to_lowercase().contains("verif")))
.then_some(true);
Ok(TodoWriteOutput {
old_todos,
new_todos: input.todos,
verification_nudge_needed,
})
}
fn execute_skill(input: SkillInput) -> Result<SkillOutput, String> {
let skill_path = resolve_skill_path(&input.skill)?;
let prompt = std::fs::read_to_string(&skill_path).map_err(|error| error.to_string())?;
let description = parse_skill_description(&prompt);
Ok(SkillOutput {
skill: input.skill,
path: skill_path.display().to_string(),
args: input.args,
description,
prompt,
})
}
fn validate_todos(todos: &[TodoItem]) -> Result<(), String> {
if todos.is_empty() {
return Err(String::from("todos must not be empty"));
}
let in_progress = todos
.iter()
.filter(|todo| matches!(todo.status, TodoStatus::InProgress))
.count();
if in_progress > 1 {
return Err(String::from(
"exactly zero or one todo items may be in_progress",
));
}
if todos.iter().any(|todo| todo.content.trim().is_empty()) {
return Err(String::from("todo content must not be empty"));
}
if todos.iter().any(|todo| todo.active_form.trim().is_empty()) {
return Err(String::from("todo activeForm must not be empty"));
}
Ok(())
}
fn todo_store_path() -> Result<std::path::PathBuf, String> {
if let Ok(path) = std::env::var("CLAWD_TODO_STORE") {
return Ok(std::path::PathBuf::from(path));
}
let cwd = std::env::current_dir().map_err(|error| error.to_string())?;
Ok(cwd.join(".claude").join("todos.md"))
}
fn render_todo_markdown(todos: &[TodoItem]) -> String {
let mut lines = vec!["# Todo list".to_string(), String::new()];
for todo in todos {
let marker = match todo.status {
TodoStatus::Pending => "[ ]",
TodoStatus::InProgress => "[~]",
TodoStatus::Completed => "[x]",
};
lines.push(format!(
"- {marker} {} :: {}",
todo.content, todo.active_form
));
}
lines.push(String::new());
lines.join("\n")
}
fn parse_todo_markdown(content: &str) -> Result<Vec<TodoItem>, String> {
let mut todos = Vec::new();
for line in content.lines() {
let trimmed = line.trim();
if trimmed.is_empty() || trimmed.starts_with('#') {
continue;
}
let Some(rest) = trimmed.strip_prefix("- [") else {
continue;
};
let mut chars = rest.chars();
let status = match chars.next() {
Some(' ') => TodoStatus::Pending,
Some('~') => TodoStatus::InProgress,
Some('x' | 'X') => TodoStatus::Completed,
Some(other) => return Err(format!("unsupported todo status marker: {other}")),
None => return Err(String::from("malformed todo line")),
};
let remainder = chars.as_str();
let Some(body) = remainder.strip_prefix("] ") else {
return Err(String::from("malformed todo line"));
};
let Some((content, active_form)) = body.split_once(" :: ") else {
return Err(String::from("todo line missing active form separator"));
};
todos.push(TodoItem {
content: content.trim().to_string(),
active_form: active_form.trim().to_string(),
status,
});
}
Ok(todos)
}
fn resolve_skill_path(skill: &str) -> Result<std::path::PathBuf, String> {
let requested = skill.trim().trim_start_matches('/').trim_start_matches('$');
if requested.is_empty() {
return Err(String::from("skill must not be empty"));
}
let mut candidates = Vec::new();
if let Ok(codex_home) = std::env::var("CODEX_HOME") {
candidates.push(std::path::PathBuf::from(codex_home).join("skills"));
}
candidates.push(std::path::PathBuf::from("/home/bellman/.codex/skills"));
for root in candidates {
let direct = root.join(requested).join("SKILL.md");
if direct.exists() {
return Ok(direct);
}
if let Ok(entries) = std::fs::read_dir(&root) {
for entry in entries.flatten() {
let path = entry.path().join("SKILL.md");
if !path.exists() {
continue;
}
if entry
.file_name()
.to_string_lossy()
.eq_ignore_ascii_case(requested)
{
return Ok(path);
}
}
}
}
Err(format!("unknown skill: {requested}"))
}
fn execute_agent(input: AgentInput) -> Result<AgentOutput, String> {
if input.description.trim().is_empty() {
return Err(String::from("description must not be empty"));
}
if input.prompt.trim().is_empty() {
return Err(String::from("prompt must not be empty"));
}
let depth = current_agent_depth()?;
let max_depth = input.max_depth.unwrap_or(3);
if depth >= max_depth {
return Err(format!(
"Agent max_depth exceeded: current depth {depth} reached limit {max_depth}"
));
}
let agent_id = make_agent_id();
let output_dir = agent_store_dir()?;
std::fs::create_dir_all(&output_dir).map_err(|error| error.to_string())?;
let output_file = output_dir.join(format!("{agent_id}.md"));
let manifest_file = output_dir.join(format!("{agent_id}.json"));
let normalized_subagent_type = normalize_subagent_type(input.subagent_type.as_deref());
let agent_name = input
.name
.as_deref()
.map(slugify_agent_name)
.filter(|name| !name.is_empty())
.unwrap_or_else(|| slugify_agent_name(&input.description));
let created_at = iso8601_now();
let model = input.model.clone().or_else(agent_default_model);
let child_result = with_agent_depth(depth + 1, || {
run_child_agent_conversation(&input.prompt, model.clone(), max_depth)
})?;
let manifest = AgentOutput {
agent_id,
name: agent_name,
description: input.description,
subagent_type: Some(normalized_subagent_type),
model,
status: String::from("completed"),
max_depth,
depth,
result: child_result.result.clone(),
assistant_messages: child_result.assistant_messages.clone(),
tool_results: child_result.tool_results.clone(),
output_file: output_file.display().to_string(),
manifest_file: manifest_file.display().to_string(),
created_at,
};
let output_contents = render_agent_output(&manifest);
std::fs::write(&output_file, output_contents).map_err(|error| error.to_string())?;
std::fs::write(
&manifest_file,
serde_json::to_string_pretty(&manifest).map_err(|error| error.to_string())?,
)
.map_err(|error| error.to_string())?;
Ok(manifest)
}
#[derive(Debug, Clone)]
struct ChildConversationResult {
result: Option<String>,
assistant_messages: Vec<String>,
tool_results: Vec<AgentToolResult>,
}
fn run_child_agent_conversation(
prompt: &str,
model: Option<String>,
_max_depth: usize,
) -> Result<ChildConversationResult, String> {
let mut runtime = ConversationRuntime::new(
Session::new(),
build_agent_api_client(model.unwrap_or_else(default_agent_model))?,
AgentToolExecutor,
agent_permission_policy(),
build_agent_system_prompt()?,
)
.with_max_iterations(16);
let summary = runtime
.run_turn(prompt, None)
.map_err(|error| error.to_string())?;
let assistant_messages = summary
.assistant_messages
.iter()
.filter_map(extract_message_text)
.collect::<Vec<_>>();
let tool_results = summary
.tool_results
.iter()
.filter_map(extract_agent_tool_result)
.collect::<Vec<_>>();
let result = assistant_messages.last().cloned();
Ok(ChildConversationResult {
result,
assistant_messages,
tool_results,
})
}
fn render_agent_output(output: &AgentOutput) -> String {
let mut lines = vec![
"# Agent Task".to_string(),
String::new(),
format!("- id: {}", output.agent_id),
format!("- name: {}", output.name),
format!("- description: {}", output.description),
format!(
"- subagent_type: {}",
output.subagent_type.as_deref().unwrap_or("general-purpose")
),
format!("- status: {}", output.status),
format!("- depth: {}", output.depth),
format!("- max_depth: {}", output.max_depth),
format!("- created_at: {}", output.created_at),
String::new(),
"## Result".to_string(),
String::new(),
output
.result
.clone()
.unwrap_or_else(|| String::from("<no final assistant text>")),
];
if !output.tool_results.is_empty() {
lines.push(String::new());
lines.push("## Tool Results".to_string());
lines.push(String::new());
lines.extend(output.tool_results.iter().map(|result| {
format!(
"- {} [{}]: {}",
result.tool_name,
if result.is_error { "error" } else { "ok" },
result.output
)
}));
}
lines.join("\n")
}
fn current_agent_depth() -> Result<usize, String> {
std::env::var("CLAWD_AGENT_DEPTH")
.ok()
.map(|value| {
value
.parse::<usize>()
.map_err(|error| format!("invalid CLAWD_AGENT_DEPTH: {error}"))
})
.transpose()
.map(|value| value.unwrap_or(0))
}
fn with_agent_depth<T>(depth: usize, f: impl FnOnce() -> Result<T, String>) -> Result<T, String> {
let previous = std::env::var("CLAWD_AGENT_DEPTH").ok();
std::env::set_var("CLAWD_AGENT_DEPTH", depth.to_string());
let result = f();
if let Some(previous) = previous {
std::env::set_var("CLAWD_AGENT_DEPTH", previous);
} else {
std::env::remove_var("CLAWD_AGENT_DEPTH");
}
result
}
fn agent_default_model() -> Option<String> {
std::env::var("CLAWD_MODEL")
.ok()
.filter(|value| !value.trim().is_empty())
}
fn default_agent_model() -> String {
agent_default_model().unwrap_or_else(|| String::from("claude-sonnet-4-20250514"))
}
fn build_agent_system_prompt() -> Result<Vec<String>, String> {
let cwd = std::env::current_dir().map_err(|error| error.to_string())?;
let date = std::env::var("CLAWD_CURRENT_DATE").unwrap_or_else(|_| String::from("2026-04-01"));
load_system_prompt(cwd, &date, std::env::consts::OS, "unknown")
.map_err(|error| error.to_string())
}
fn agent_permission_policy() -> PermissionPolicy {
mvp_tool_specs().into_iter().fold(
PermissionPolicy::new(PermissionMode::DangerFullAccess),
|policy, spec| policy.with_tool_requirement(spec.name, spec.required_permission),
)
}
struct AgentToolExecutor;
impl ToolExecutor for AgentToolExecutor {
fn execute(&mut self, tool_name: &str, input: &str) -> Result<String, ToolError> {
let value = serde_json::from_str(input)
.map_err(|error| ToolError::new(format!("invalid tool input JSON: {error}")))?;
execute_tool(tool_name, &value).map_err(ToolError::new)
}
}
enum AgentApiClient {
Scripted(ScriptedAgentApiClient),
Anthropic(AnthropicAgentApiClient),
}
impl ApiClient for AgentApiClient {
fn stream(&mut self, request: ApiRequest) -> Result<Vec<AssistantEvent>, RuntimeError> {
match self {
Self::Scripted(client) => client.stream(request),
Self::Anthropic(client) => client.stream(request),
}
}
}
fn build_agent_api_client(model: String) -> Result<AgentApiClient, String> {
if let Some(script) = std::env::var("CLAWD_AGENT_TEST_SCRIPT")
.ok()
.filter(|value| !value.trim().is_empty())
{
return Ok(AgentApiClient::Scripted(ScriptedAgentApiClient::new(
&script,
)?));
}
Ok(AgentApiClient::Anthropic(AnthropicAgentApiClient::new(
model,
)?))
}
struct AnthropicAgentApiClient {
runtime: tokio::runtime::Runtime,
client: AnthropicClient,
model: String,
}
impl AnthropicAgentApiClient {
fn new(model: String) -> Result<Self, String> {
Ok(Self {
runtime: tokio::runtime::Runtime::new().map_err(|error| error.to_string())?,
client: AnthropicClient::from_auth(resolve_agent_auth_source()?),
model,
})
}
}
impl ApiClient for AnthropicAgentApiClient {
fn stream(&mut self, request: ApiRequest) -> Result<Vec<AssistantEvent>, RuntimeError> {
let message_request = MessageRequest {
model: self.model.clone(),
max_tokens: 32,
messages: convert_agent_messages(&request.messages),
system: (!request.system_prompt.is_empty()).then(|| {
request.system_prompt.join(
"
",
)
}),
tools: Some(agent_tool_definitions()),
tool_choice: Some(ToolChoice::Auto),
stream: true,
thinking: None,
};
self.runtime.block_on(async {
let mut stream = self
.client
.stream_message(&message_request)
.await
.map_err(|error| RuntimeError::new(error.to_string()))?;
let mut events = Vec::new();
let mut pending_tool: Option<(String, String, String)> = None;
let mut saw_stop = false;
while let Some(event) = stream
.next_event()
.await
.map_err(|error| RuntimeError::new(error.to_string()))?
{
match event {
ApiStreamEvent::MessageStart(start) => {
push_agent_output_blocks(
start.message.content,
&mut events,
&mut pending_tool,
);
}
ApiStreamEvent::ContentBlockStart(start) => {
push_agent_output_block(
start.content_block,
&mut events,
&mut pending_tool,
);
}
ApiStreamEvent::ContentBlockDelta(delta) => match delta.delta {
ContentBlockDelta::TextDelta { text } => {
if !text.is_empty() {
events.push(AssistantEvent::TextDelta(text));
}
}
ContentBlockDelta::InputJsonDelta { partial_json } => {
if let Some((_, _, input)) = &mut pending_tool {
input.push_str(&partial_json);
}
}
ContentBlockDelta::ThinkingDelta { .. }
| ContentBlockDelta::SignatureDelta { .. } => {}
},
ApiStreamEvent::ContentBlockStop(_) => {
if let Some((id, name, input)) = pending_tool.take() {
events.push(AssistantEvent::ToolUse { id, name, input });
}
}
ApiStreamEvent::MessageDelta(delta) => {
events.push(AssistantEvent::Usage(TokenUsage {
input_tokens: delta.usage.input_tokens,
output_tokens: delta.usage.output_tokens,
cache_creation_input_tokens: delta.usage.cache_creation_input_tokens,
cache_read_input_tokens: delta.usage.cache_read_input_tokens,
}));
}
ApiStreamEvent::MessageStop(_) => {
saw_stop = true;
events.push(AssistantEvent::MessageStop);
}
}
}
if !saw_stop {
events.push(AssistantEvent::MessageStop);
}
Ok(events)
})
}
}
fn resolve_agent_auth_source() -> Result<api::AuthSource, String> {
resolve_startup_auth_source(|| {
let cwd = std::env::current_dir().map_err(api::ApiError::from)?;
let config = ConfigLoader::default_for(&cwd).load().map_err(|error| {
api::ApiError::Auth(format!("failed to load runtime OAuth config: {error}"))
})?;
Ok(config.oauth().cloned())
})
.map_err(|error| error.to_string())
}
fn agent_tool_definitions() -> Vec<ToolDefinition> {
mvp_tool_specs()
.into_iter()
.map(|spec| ToolDefinition {
name: spec.name.to_string(),
description: Some(spec.description.to_string()),
input_schema: spec.input_schema,
})
.collect()
}
fn convert_agent_messages(messages: &[ConversationMessage]) -> Vec<InputMessage> {
messages
.iter()
.filter_map(|message| {
let role = match message.role {
MessageRole::System | MessageRole::User | MessageRole::Tool => "user",
MessageRole::Assistant => "assistant",
};
let content = message
.blocks
.iter()
.map(|block| match block {
ContentBlock::Text { text } => InputContentBlock::Text { text: text.clone() },
ContentBlock::ToolUse { id, name, input } => InputContentBlock::ToolUse {
id: id.clone(),
name: name.clone(),
input: serde_json::from_str(input)
.unwrap_or_else(|_| serde_json::json!({ "raw": input })),
},
ContentBlock::ToolResult {
tool_use_id,
output,
is_error,
..
} => InputContentBlock::ToolResult {
tool_use_id: tool_use_id.clone(),
content: vec![ToolResultContentBlock::Text {
text: output.clone(),
}],
is_error: *is_error,
},
ContentBlock::Thinking { .. } => InputContentBlock::Text { text: String::new() },
})
.collect::<Vec<_>>();
(!content.is_empty()).then(|| InputMessage {
role: role.to_string(),
content,
})
})
.collect()
}
fn push_agent_output_blocks(
blocks: Vec<OutputContentBlock>,
events: &mut Vec<AssistantEvent>,
pending_tool: &mut Option<(String, String, String)>,
) {
for block in blocks {
push_agent_output_block(block, events, pending_tool);
if let Some((id, name, input)) = pending_tool.take() {
events.push(AssistantEvent::ToolUse { id, name, input });
}
}
}
fn push_agent_output_block(
block: OutputContentBlock,
events: &mut Vec<AssistantEvent>,
pending_tool: &mut Option<(String, String, String)>,
) {
match block {
OutputContentBlock::Text { text } => {
if !text.is_empty() {
events.push(AssistantEvent::TextDelta(text));
}
}
OutputContentBlock::ToolUse { id, name, input } => {
*pending_tool = Some((id, name, input.to_string()));
}
OutputContentBlock::Thinking { .. } => {}
}
}
#[derive(Debug)]
struct ScriptedAgentApiClient {
turns: Vec<Vec<ScriptedAgentEvent>>,
call_count: usize,
}
impl ScriptedAgentApiClient {
fn new(script: &str) -> Result<Self, String> {
let turns = serde_json::from_str(script).map_err(|error| error.to_string())?;
Ok(Self {
turns,
call_count: 0,
})
}
}
impl ApiClient for ScriptedAgentApiClient {
fn stream(&mut self, _request: ApiRequest) -> Result<Vec<AssistantEvent>, RuntimeError> {
if self.call_count >= self.turns.len() {
return Err(RuntimeError::new("scripted agent client exhausted"));
}
let events = self.turns[self.call_count]
.iter()
.map(ScriptedAgentEvent::to_runtime_event)
.chain(std::iter::once(AssistantEvent::MessageStop))
.collect();
self.call_count += 1;
Ok(events)
}
}
#[derive(Debug, Clone, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
enum ScriptedAgentEvent {
Text {
text: String,
},
ToolUse {
id: String,
name: String,
input: Value,
},
}
impl ScriptedAgentEvent {
fn to_runtime_event(&self) -> AssistantEvent {
match self {
Self::Text { text } => AssistantEvent::TextDelta(text.clone()),
Self::ToolUse { id, name, input } => AssistantEvent::ToolUse {
id: id.clone(),
name: name.clone(),
input: input.to_string(),
},
}
}
}
fn extract_message_text(message: &ConversationMessage) -> Option<String> {
let text = message
.blocks
.iter()
.filter_map(|block| match block {
ContentBlock::Text { text } => Some(text.as_str()),
_ => None,
})
.collect::<String>();
(!text.is_empty()).then_some(text)
}
fn extract_agent_tool_result(message: &ConversationMessage) -> Option<AgentToolResult> {
message.blocks.iter().find_map(|block| match block {
ContentBlock::ToolResult {
tool_name,
output,
is_error,
..
} => Some(AgentToolResult {
tool_name: tool_name.clone(),
output: output.clone(),
is_error: *is_error,
}),
_ => None,
})
}
#[allow(clippy::needless_pass_by_value)]
fn execute_tool_search(input: ToolSearchInput) -> ToolSearchOutput {
let deferred = deferred_tool_specs();
let max_results = input.max_results.unwrap_or(5).max(1);
let query = input.query.trim().to_string();
let normalized_query = normalize_tool_search_query(&query);
let matches = search_tool_specs(&query, max_results, &deferred);
ToolSearchOutput {
matches,
query,
normalized_query,
total_deferred_tools: deferred.len(),
pending_mcp_servers: None,
}
}
fn deferred_tool_specs() -> Vec<ToolSpec> {
mvp_tool_specs()
.into_iter()
.filter(|spec| {
!matches!(
spec.name,
"bash" | "read_file" | "write_file" | "edit_file" | "glob_search" | "grep_search"
)
})
.collect()
}
fn search_tool_specs(query: &str, max_results: usize, specs: &[ToolSpec]) -> Vec<String> {
let lowered = query.to_lowercase();
if let Some(selection) = lowered.strip_prefix("select:") {
return selection
.split(',')
.map(str::trim)
.filter(|part| !part.is_empty())
.filter_map(|wanted| {
let wanted = canonical_tool_token(wanted);
specs
.iter()
.find(|spec| canonical_tool_token(spec.name) == wanted)
.map(|spec| spec.name.to_string())
})
.take(max_results)
.collect();
}
let mut required = Vec::new();
let mut optional = Vec::new();
for term in lowered.split_whitespace() {
if let Some(rest) = term.strip_prefix('+') {
if !rest.is_empty() {
required.push(rest);
}
} else {
optional.push(term);
}
}
let terms = if required.is_empty() {
optional.clone()
} else {
required.iter().chain(optional.iter()).copied().collect()
};
let mut scored = specs
.iter()
.filter_map(|spec| {
let name = spec.name.to_lowercase();
let canonical_name = canonical_tool_token(spec.name);
let normalized_description = normalize_tool_search_query(spec.description);
let haystack = format!(
"{name} {} {canonical_name}",
spec.description.to_lowercase()
);
let normalized_haystack = format!("{canonical_name} {normalized_description}");
if required.iter().any(|term| !haystack.contains(term)) {
return None;
}
let mut score = 0_i32;
for term in &terms {
let canonical_term = canonical_tool_token(term);
if haystack.contains(term) {
score += 2;
}
if name == *term {
score += 8;
}
if name.contains(term) {
score += 4;
}
if canonical_name == canonical_term {
score += 12;
}
if normalized_haystack.contains(&canonical_term) {
score += 3;
}
}
if score == 0 && !lowered.is_empty() {
return None;
}
Some((score, spec.name.to_string()))
})
.collect::<Vec<_>>();
scored.sort_by(|left, right| right.0.cmp(&left.0).then_with(|| left.1.cmp(&right.1)));
scored
.into_iter()
.map(|(_, name)| name)
.take(max_results)
.collect()
}
fn normalize_tool_search_query(query: &str) -> String {
query
.trim()
.split(|ch: char| ch.is_whitespace() || ch == ',')
.filter(|term| !term.is_empty())
.map(canonical_tool_token)
.collect::<Vec<_>>()
.join(" ")
}
fn canonical_tool_token(value: &str) -> String {
let mut canonical = value
.chars()
.filter(char::is_ascii_alphanumeric)
.flat_map(char::to_lowercase)
.collect::<String>();
if let Some(stripped) = canonical.strip_suffix("tool") {
canonical = stripped.to_string();
}
canonical
}
fn agent_store_dir() -> Result<std::path::PathBuf, String> {
if let Ok(path) = std::env::var("CLAWD_AGENT_STORE") {
return Ok(std::path::PathBuf::from(path));
}
let cwd = std::env::current_dir().map_err(|error| error.to_string())?;
if let Some(workspace_root) = cwd.ancestors().nth(2) {
return Ok(workspace_root.join(".clawd-agents"));
}
Ok(cwd.join(".clawd-agents"))
}
fn make_agent_id() -> String {
let nanos = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_nanos();
format!("agent-{nanos}")
}
fn slugify_agent_name(description: &str) -> String {
let mut out = description
.chars()
.map(|ch| {
if ch.is_ascii_alphanumeric() {
ch.to_ascii_lowercase()
} else {
'-'
}
})
.collect::<String>();
while out.contains("--") {
out = out.replace("--", "-");
}
out.trim_matches('-').chars().take(32).collect()
}
fn normalize_subagent_type(subagent_type: Option<&str>) -> String {
let trimmed = subagent_type.map(str::trim).unwrap_or_default();
if trimmed.is_empty() {
return String::from("general-purpose");
}
match canonical_tool_token(trimmed).as_str() {
"general" | "generalpurpose" | "generalpurposeagent" => String::from("general-purpose"),
"explore" | "explorer" | "exploreagent" => String::from("Explore"),
"plan" | "planagent" => String::from("Plan"),
"verification" | "verificationagent" | "verify" | "verifier" => {
String::from("Verification")
}
"claudecodeguide" | "claudecodeguideagent" | "guide" => String::from("claude-code-guide"),
"statusline" | "statuslinesetup" => String::from("statusline-setup"),
_ => trimmed.to_string(),
}
}
fn iso8601_now() -> String {
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs()
.to_string()
}
#[allow(clippy::too_many_lines)]
fn execute_notebook_edit(input: NotebookEditInput) -> Result<NotebookEditOutput, String> {
let path = std::path::PathBuf::from(&input.notebook_path);
if path.extension().and_then(|ext| ext.to_str()) != Some("ipynb") {
return Err(String::from(
"File must be a Jupyter notebook (.ipynb file).",
));
}
let original_file = std::fs::read_to_string(&path).map_err(|error| error.to_string())?;
let mut notebook: serde_json::Value =
serde_json::from_str(&original_file).map_err(|error| error.to_string())?;
let language = notebook
.get("metadata")
.and_then(|metadata| metadata.get("kernelspec"))
.and_then(|kernelspec| kernelspec.get("language"))
.and_then(serde_json::Value::as_str)
.unwrap_or("python")
.to_string();
let cells = notebook
.get_mut("cells")
.and_then(serde_json::Value::as_array_mut)
.ok_or_else(|| String::from("Notebook cells array not found"))?;
let edit_mode = input.edit_mode.unwrap_or(NotebookEditMode::Replace);
let target_index = match input.cell_id.as_deref() {
Some(cell_id) => Some(resolve_cell_index(cells, Some(cell_id), edit_mode)?),
None if matches!(
edit_mode,
NotebookEditMode::Replace | NotebookEditMode::Delete
) =>
{
Some(resolve_cell_index(cells, None, edit_mode)?)
}
None => None,
};
let resolved_cell_type = match edit_mode {
NotebookEditMode::Delete => None,
NotebookEditMode::Insert => Some(input.cell_type.unwrap_or(NotebookCellType::Code)),
NotebookEditMode::Replace => Some(input.cell_type.unwrap_or_else(|| {
target_index
.and_then(|index| cells.get(index))
.and_then(cell_kind)
.unwrap_or(NotebookCellType::Code)
})),
};
let new_source = require_notebook_source(input.new_source, edit_mode)?;
let cell_id = match edit_mode {
NotebookEditMode::Insert => {
let resolved_cell_type = resolved_cell_type.expect("insert cell type");
let new_id = make_cell_id(cells.len());
let new_cell = build_notebook_cell(&new_id, resolved_cell_type, &new_source);
let insert_at = target_index.map_or(cells.len(), |index| index + 1);
cells.insert(insert_at, new_cell);
cells
.get(insert_at)
.and_then(|cell| cell.get("id"))
.and_then(serde_json::Value::as_str)
.map(ToString::to_string)
}
NotebookEditMode::Delete => {
let removed = cells.remove(target_index.expect("delete target index"));
removed
.get("id")
.and_then(serde_json::Value::as_str)
.map(ToString::to_string)
}
NotebookEditMode::Replace => {
let resolved_cell_type = resolved_cell_type.expect("replace cell type");
let cell = cells
.get_mut(target_index.expect("replace target index"))
.ok_or_else(|| String::from("Cell index out of range"))?;
cell["source"] = serde_json::Value::Array(source_lines(&new_source));
cell["cell_type"] = serde_json::Value::String(match resolved_cell_type {
NotebookCellType::Code => String::from("code"),
NotebookCellType::Markdown => String::from("markdown"),
});
match resolved_cell_type {
NotebookCellType::Code => {
if !cell.get("outputs").is_some_and(serde_json::Value::is_array) {
cell["outputs"] = json!([]);
}
if cell.get("execution_count").is_none() {
cell["execution_count"] = serde_json::Value::Null;
}
}
NotebookCellType::Markdown => {
if let Some(object) = cell.as_object_mut() {
object.remove("outputs");
object.remove("execution_count");
}
}
}
cell.get("id")
.and_then(serde_json::Value::as_str)
.map(ToString::to_string)
}
};
let updated_file =
serde_json::to_string_pretty(&notebook).map_err(|error| error.to_string())?;
std::fs::write(&path, &updated_file).map_err(|error| error.to_string())?;
Ok(NotebookEditOutput {
new_source,
cell_id,
cell_type: resolved_cell_type,
language,
edit_mode: format_notebook_edit_mode(edit_mode),
error: None,
notebook_path: path.display().to_string(),
original_file,
updated_file,
})
}
fn require_notebook_source(
source: Option<String>,
edit_mode: NotebookEditMode,
) -> Result<String, String> {
match edit_mode {
NotebookEditMode::Delete => Ok(source.unwrap_or_default()),
NotebookEditMode::Insert | NotebookEditMode::Replace => source
.ok_or_else(|| String::from("new_source is required for insert and replace edits")),
}
}
fn build_notebook_cell(cell_id: &str, cell_type: NotebookCellType, source: &str) -> Value {
let mut cell = json!({
"cell_type": match cell_type {
NotebookCellType::Code => "code",
NotebookCellType::Markdown => "markdown",
},
"id": cell_id,
"metadata": {},
"source": source_lines(source),
});
if let Some(object) = cell.as_object_mut() {
match cell_type {
NotebookCellType::Code => {
object.insert(String::from("outputs"), json!([]));
object.insert(String::from("execution_count"), Value::Null);
}
NotebookCellType::Markdown => {}
}
}
cell
}
fn cell_kind(cell: &serde_json::Value) -> Option<NotebookCellType> {
cell.get("cell_type")
.and_then(serde_json::Value::as_str)
.map(|kind| {
if kind == "markdown" {
NotebookCellType::Markdown
} else {
NotebookCellType::Code
}
})
}
#[allow(clippy::needless_pass_by_value)]
fn execute_sleep(input: SleepInput) -> SleepOutput {
std::thread::sleep(Duration::from_millis(input.duration_ms));
SleepOutput {
duration_ms: input.duration_ms,
message: format!("Slept for {}ms", input.duration_ms),
}
}
fn execute_brief(input: BriefInput) -> Result<BriefOutput, String> {
if input.message.trim().is_empty() {
return Err(String::from("message must not be empty"));
}
let attachments = input
.attachments
.as_ref()
.map(|paths| {
paths
.iter()
.map(|path| resolve_attachment(path))
.collect::<Result<Vec<_>, String>>()
})
.transpose()?;
let message = match input.status {
BriefStatus::Normal | BriefStatus::Proactive => input.message,
};
Ok(BriefOutput {
message,
attachments,
sent_at: iso8601_timestamp(),
})
}
fn resolve_attachment(path: &str) -> Result<ResolvedAttachment, String> {
let resolved = std::fs::canonicalize(path).map_err(|error| error.to_string())?;
let metadata = std::fs::metadata(&resolved).map_err(|error| error.to_string())?;
Ok(ResolvedAttachment {
path: resolved.display().to_string(),
size: metadata.len(),
is_image: is_image_path(&resolved),
})
}
fn is_image_path(path: &Path) -> bool {
matches!(
path.extension()
.and_then(|ext| ext.to_str())
.map(str::to_ascii_lowercase)
.as_deref(),
Some("png" | "jpg" | "jpeg" | "gif" | "webp" | "bmp" | "svg")
)
}
fn execute_config(input: ConfigInput) -> Result<ConfigOutput, String> {
let setting = input.setting.trim();
if setting.is_empty() {
return Err(String::from("setting must not be empty"));
}
let Some(spec) = supported_config_setting(setting) else {
return Ok(ConfigOutput {
success: false,
operation: None,
setting: None,
value: None,
previous_value: None,
new_value: None,
error: Some(format!("Unknown setting: \"{setting}\"")),
});
};
let path = config_file_for_scope(spec.scope)?;
let mut document = read_json_object(&path)?;
if let Some(value) = input.value {
let normalized = normalize_config_value(spec, value)?;
let previous_value = get_nested_value(&document, spec.path).cloned();
set_nested_value(&mut document, spec.path, normalized.clone());
write_json_object(&path, &document)?;
Ok(ConfigOutput {
success: true,
operation: Some(String::from("set")),
setting: Some(setting.to_string()),
value: Some(normalized.clone()),
previous_value,
new_value: Some(normalized),
error: None,
})
} else {
Ok(ConfigOutput {
success: true,
operation: Some(String::from("get")),
setting: Some(setting.to_string()),
value: get_nested_value(&document, spec.path).cloned(),
previous_value: None,
new_value: None,
error: None,
})
}
}
fn execute_structured_output(input: StructuredOutputInput) -> StructuredOutputResult {
StructuredOutputResult {
data: String::from("Structured output provided successfully"),
structured_output: input.0,
}
}
fn execute_repl(input: ReplInput) -> Result<ReplOutput, String> {
if input.code.trim().is_empty() {
return Err(String::from("code must not be empty"));
}
let _ = input.timeout_ms;
let runtime = resolve_repl_runtime(&input.language)?;
let started = Instant::now();
let output = Command::new(runtime.program)
.args(runtime.args)
.arg(&input.code)
.output()
.map_err(|error| error.to_string())?;
Ok(ReplOutput {
language: input.language,
stdout: String::from_utf8_lossy(&output.stdout).into_owned(),
stderr: String::from_utf8_lossy(&output.stderr).into_owned(),
exit_code: output.status.code().unwrap_or(1),
duration_ms: started.elapsed().as_millis(),
})
}
struct ReplRuntime {
program: &'static str,
args: &'static [&'static str],
}
fn resolve_repl_runtime(language: &str) -> Result<ReplRuntime, String> {
match language.trim().to_ascii_lowercase().as_str() {
"python" | "py" => Ok(ReplRuntime {
program: detect_first_command(&["python3", "python"])
.ok_or_else(|| String::from("python runtime not found"))?,
args: &["-c"],
}),
"javascript" | "js" | "node" => Ok(ReplRuntime {
program: detect_first_command(&["node"])
.ok_or_else(|| String::from("node runtime not found"))?,
args: &["-e"],
}),
"sh" | "shell" | "bash" => Ok(ReplRuntime {
program: detect_first_command(&["bash", "sh"])
.ok_or_else(|| String::from("shell runtime not found"))?,
args: &["-lc"],
}),
other => Err(format!("unsupported REPL language: {other}")),
}
}
fn detect_first_command(commands: &[&'static str]) -> Option<&'static str> {
commands
.iter()
.copied()
.find(|command| command_exists(command))
}
#[derive(Clone, Copy)]
enum ConfigScope {
Global,
Settings,
}
#[derive(Clone, Copy)]
struct ConfigSettingSpec {
scope: ConfigScope,
kind: ConfigKind,
path: &'static [&'static str],
options: Option<&'static [&'static str]>,
}
#[derive(Clone, Copy)]
enum ConfigKind {
Boolean,
String,
}
fn supported_config_setting(setting: &str) -> Option<ConfigSettingSpec> {
Some(match setting {
"theme" => ConfigSettingSpec {
scope: ConfigScope::Global,
kind: ConfigKind::String,
path: &["theme"],
options: None,
},
"editorMode" => ConfigSettingSpec {
scope: ConfigScope::Global,
kind: ConfigKind::String,
path: &["editorMode"],
options: Some(&["default", "vim", "emacs"]),
},
"verbose" => ConfigSettingSpec {
scope: ConfigScope::Global,
kind: ConfigKind::Boolean,
path: &["verbose"],
options: None,
},
"preferredNotifChannel" => ConfigSettingSpec {
scope: ConfigScope::Global,
kind: ConfigKind::String,
path: &["preferredNotifChannel"],
options: None,
},
"autoCompactEnabled" => ConfigSettingSpec {
scope: ConfigScope::Global,
kind: ConfigKind::Boolean,
path: &["autoCompactEnabled"],
options: None,
},
"autoMemoryEnabled" => ConfigSettingSpec {
scope: ConfigScope::Settings,
kind: ConfigKind::Boolean,
path: &["autoMemoryEnabled"],
options: None,
},
"autoDreamEnabled" => ConfigSettingSpec {
scope: ConfigScope::Settings,
kind: ConfigKind::Boolean,
path: &["autoDreamEnabled"],
options: None,
},
"fileCheckpointingEnabled" => ConfigSettingSpec {
scope: ConfigScope::Global,
kind: ConfigKind::Boolean,
path: &["fileCheckpointingEnabled"],
options: None,
},
"showTurnDuration" => ConfigSettingSpec {
scope: ConfigScope::Global,
kind: ConfigKind::Boolean,
path: &["showTurnDuration"],
options: None,
},
"terminalProgressBarEnabled" => ConfigSettingSpec {
scope: ConfigScope::Global,
kind: ConfigKind::Boolean,
path: &["terminalProgressBarEnabled"],
options: None,
},
"todoFeatureEnabled" => ConfigSettingSpec {
scope: ConfigScope::Global,
kind: ConfigKind::Boolean,
path: &["todoFeatureEnabled"],
options: None,
},
"model" => ConfigSettingSpec {
scope: ConfigScope::Settings,
kind: ConfigKind::String,
path: &["model"],
options: None,
},
"alwaysThinkingEnabled" => ConfigSettingSpec {
scope: ConfigScope::Settings,
kind: ConfigKind::Boolean,
path: &["alwaysThinkingEnabled"],
options: None,
},
"permissions.defaultMode" => ConfigSettingSpec {
scope: ConfigScope::Settings,
kind: ConfigKind::String,
path: &["permissions", "defaultMode"],
options: Some(&["default", "plan", "acceptEdits", "dontAsk", "auto"]),
},
"language" => ConfigSettingSpec {
scope: ConfigScope::Settings,
kind: ConfigKind::String,
path: &["language"],
options: None,
},
"teammateMode" => ConfigSettingSpec {
scope: ConfigScope::Global,
kind: ConfigKind::String,
path: &["teammateMode"],
options: Some(&["tmux", "in-process", "auto"]),
},
_ => return None,
})
}
fn normalize_config_value(spec: ConfigSettingSpec, value: ConfigValue) -> Result<Value, String> {
let normalized = match (spec.kind, value) {
(ConfigKind::Boolean, ConfigValue::Bool(value)) => Value::Bool(value),
(ConfigKind::Boolean, ConfigValue::String(value)) => {
match value.trim().to_ascii_lowercase().as_str() {
"true" => Value::Bool(true),
"false" => Value::Bool(false),
_ => return Err(String::from("setting requires true or false")),
}
}
(ConfigKind::Boolean, ConfigValue::Number(_)) => {
return Err(String::from("setting requires true or false"))
}
(ConfigKind::String, ConfigValue::String(value)) => Value::String(value),
(ConfigKind::String, ConfigValue::Bool(value)) => Value::String(value.to_string()),
(ConfigKind::String, ConfigValue::Number(value)) => json!(value),
};
if let Some(options) = spec.options {
let Some(as_str) = normalized.as_str() else {
return Err(String::from("setting requires a string value"));
};
if !options.iter().any(|option| option == &as_str) {
return Err(format!(
"Invalid value \"{as_str}\". Options: {}",
options.join(", ")
));
}
}
Ok(normalized)
}
fn config_file_for_scope(scope: ConfigScope) -> Result<PathBuf, String> {
let cwd = std::env::current_dir().map_err(|error| error.to_string())?;
Ok(match scope {
ConfigScope::Global => config_home_dir()?.join("settings.json"),
ConfigScope::Settings => cwd.join(".claude").join("settings.local.json"),
})
}
fn config_home_dir() -> Result<PathBuf, String> {
if let Ok(path) = std::env::var("CLAUDE_CONFIG_HOME") {
return Ok(PathBuf::from(path));
}
let home = std::env::var("HOME").map_err(|_| String::from("HOME is not set"))?;
Ok(PathBuf::from(home).join(".claude"))
}
fn read_json_object(path: &Path) -> Result<serde_json::Map<String, Value>, String> {
match std::fs::read_to_string(path) {
Ok(contents) => {
if contents.trim().is_empty() {
return Ok(serde_json::Map::new());
}
serde_json::from_str::<Value>(&contents)
.map_err(|error| error.to_string())?
.as_object()
.cloned()
.ok_or_else(|| String::from("config file must contain a JSON object"))
}
Err(error) if error.kind() == std::io::ErrorKind::NotFound => Ok(serde_json::Map::new()),
Err(error) => Err(error.to_string()),
}
}
fn write_json_object(path: &Path, value: &serde_json::Map<String, Value>) -> Result<(), String> {
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent).map_err(|error| error.to_string())?;
}
std::fs::write(
path,
serde_json::to_string_pretty(value).map_err(|error| error.to_string())?,
)
.map_err(|error| error.to_string())
}
fn get_nested_value<'a>(
value: &'a serde_json::Map<String, Value>,
path: &[&str],
) -> Option<&'a Value> {
let (first, rest) = path.split_first()?;
let mut current = value.get(*first)?;
for key in rest {
current = current.as_object()?.get(*key)?;
}
Some(current)
}
fn set_nested_value(root: &mut serde_json::Map<String, Value>, path: &[&str], new_value: Value) {
let (first, rest) = path.split_first().expect("config path must not be empty");
if rest.is_empty() {
root.insert((*first).to_string(), new_value);
return;
}
let entry = root
.entry((*first).to_string())
.or_insert_with(|| Value::Object(serde_json::Map::new()));
if !entry.is_object() {
*entry = Value::Object(serde_json::Map::new());
}
let map = entry.as_object_mut().expect("object inserted");
set_nested_value(map, rest, new_value);
}
fn iso8601_timestamp() -> String {
if let Ok(output) = Command::new("date")
.args(["-u", "+%Y-%m-%dT%H:%M:%SZ"])
.output()
{
if output.status.success() {
return String::from_utf8_lossy(&output.stdout).trim().to_string();
}
}
iso8601_now()
}
#[allow(clippy::needless_pass_by_value)]
fn execute_powershell(input: PowerShellInput) -> std::io::Result<runtime::BashCommandOutput> {
let _ = &input.description;
let shell = detect_powershell_shell()?;
execute_shell_command(
shell,
&input.command,
input.timeout,
input.run_in_background,
)
}
fn detect_powershell_shell() -> std::io::Result<&'static str> {
if command_exists("pwsh") {
Ok("pwsh")
} else if command_exists("powershell") {
Ok("powershell")
} else {
Err(std::io::Error::new(
std::io::ErrorKind::NotFound,
"PowerShell executable not found (expected `pwsh` or `powershell` in PATH)",
))
}
}
fn command_exists(command: &str) -> bool {
std::process::Command::new("sh")
.arg("-lc")
.arg(format!("command -v {command} >/dev/null 2>&1"))
.status()
.map(|status| status.success())
.unwrap_or(false)
}
#[allow(clippy::too_many_lines)]
fn execute_shell_command(
shell: &str,
command: &str,
timeout: Option<u64>,
run_in_background: Option<bool>,
) -> std::io::Result<runtime::BashCommandOutput> {
if run_in_background.unwrap_or(false) {
let child = std::process::Command::new(shell)
.arg("-NoProfile")
.arg("-NonInteractive")
.arg("-Command")
.arg(command)
.stdin(std::process::Stdio::null())
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.spawn()?;
return Ok(runtime::BashCommandOutput {
stdout: String::new(),
stderr: String::new(),
raw_output_path: None,
interrupted: false,
is_image: None,
background_task_id: Some(child.id().to_string()),
backgrounded_by_user: Some(true),
assistant_auto_backgrounded: Some(false),
dangerously_disable_sandbox: None,
return_code_interpretation: None,
no_output_expected: Some(true),
structured_content: None,
persisted_output_path: None,
persisted_output_size: None,
});
}
let mut process = std::process::Command::new(shell);
process
.arg("-NoProfile")
.arg("-NonInteractive")
.arg("-Command")
.arg(command);
process
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped());
if let Some(timeout_ms) = timeout {
let mut child = process.spawn()?;
let started = Instant::now();
loop {
if let Some(status) = child.try_wait()? {
let output = child.wait_with_output()?;
return Ok(runtime::BashCommandOutput {
stdout: String::from_utf8_lossy(&output.stdout).into_owned(),
stderr: String::from_utf8_lossy(&output.stderr).into_owned(),
raw_output_path: None,
interrupted: false,
is_image: None,
background_task_id: None,
backgrounded_by_user: None,
assistant_auto_backgrounded: None,
dangerously_disable_sandbox: None,
return_code_interpretation: status
.code()
.filter(|code| *code != 0)
.map(|code| format!("exit_code:{code}")),
no_output_expected: Some(output.stdout.is_empty() && output.stderr.is_empty()),
structured_content: None,
persisted_output_path: None,
persisted_output_size: None,
});
}
if started.elapsed() >= Duration::from_millis(timeout_ms) {
let _ = child.kill();
let output = child.wait_with_output()?;
let stderr = String::from_utf8_lossy(&output.stderr).into_owned();
let stderr = if stderr.trim().is_empty() {
format!("Command exceeded timeout of {timeout_ms} ms")
} else {
format!(
"{}
Command exceeded timeout of {timeout_ms} ms",
stderr.trim_end()
)
};
return Ok(runtime::BashCommandOutput {
stdout: String::from_utf8_lossy(&output.stdout).into_owned(),
stderr,
raw_output_path: None,
interrupted: true,
is_image: None,
background_task_id: None,
backgrounded_by_user: None,
assistant_auto_backgrounded: None,
dangerously_disable_sandbox: None,
return_code_interpretation: Some(String::from("timeout")),
no_output_expected: Some(false),
structured_content: None,
persisted_output_path: None,
persisted_output_size: None,
});
}
std::thread::sleep(Duration::from_millis(10));
}
}
let output = process.output()?;
Ok(runtime::BashCommandOutput {
stdout: String::from_utf8_lossy(&output.stdout).into_owned(),
stderr: String::from_utf8_lossy(&output.stderr).into_owned(),
raw_output_path: None,
interrupted: false,
is_image: None,
background_task_id: None,
backgrounded_by_user: None,
assistant_auto_backgrounded: None,
dangerously_disable_sandbox: None,
return_code_interpretation: output
.status
.code()
.filter(|code| *code != 0)
.map(|code| format!("exit_code:{code}")),
no_output_expected: Some(output.stdout.is_empty() && output.stderr.is_empty()),
structured_content: None,
persisted_output_path: None,
persisted_output_size: None,
})
}
fn resolve_cell_index(
cells: &[serde_json::Value],
cell_id: Option<&str>,
edit_mode: NotebookEditMode,
) -> Result<usize, String> {
if cells.is_empty()
&& matches!(
edit_mode,
NotebookEditMode::Replace | NotebookEditMode::Delete
)
{
return Err(String::from("Notebook has no cells to edit"));
}
if let Some(cell_id) = cell_id {
cells
.iter()
.position(|cell| cell.get("id").and_then(serde_json::Value::as_str) == Some(cell_id))
.ok_or_else(|| format!("Cell id not found: {cell_id}"))
} else {
Ok(cells.len().saturating_sub(1))
}
}
fn source_lines(source: &str) -> Vec<serde_json::Value> {
if source.is_empty() {
return vec![serde_json::Value::String(String::new())];
}
source
.split_inclusive('\n')
.map(|line| serde_json::Value::String(line.to_string()))
.collect()
}
fn format_notebook_edit_mode(mode: NotebookEditMode) -> String {
match mode {
NotebookEditMode::Replace => String::from("replace"),
NotebookEditMode::Insert => String::from("insert"),
NotebookEditMode::Delete => String::from("delete"),
}
}
fn make_cell_id(index: usize) -> String {
format!("cell-{}", index + 1)
}
fn parse_skill_description(contents: &str) -> Option<String> {
for line in contents.lines() {
if let Some(value) = line.strip_prefix("description:") {
let trimmed = value.trim();
if !trimmed.is_empty() {
return Some(trimmed.to_string());
}
}
}
None
}
#[cfg(test)]
mod tests {
use std::fs;
use std::io::{Read, Write};
use std::net::{SocketAddr, TcpListener};
use std::path::PathBuf;
use std::sync::{Arc, Mutex, OnceLock};
use std::thread;
use std::time::Duration;
use super::{execute_tool, mvp_tool_specs};
use serde_json::json;
fn env_lock() -> &'static Mutex<()> {
static LOCK: OnceLock<Mutex<()>> = OnceLock::new();
LOCK.get_or_init(|| Mutex::new(()))
}
fn temp_path(name: &str) -> PathBuf {
let unique = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("time")
.as_nanos();
std::env::temp_dir().join(format!("clawd-tools-{unique}-{name}"))
}
#[test]
fn exposes_mvp_tools() {
let names = mvp_tool_specs()
.into_iter()
.map(|spec| spec.name)
.collect::<Vec<_>>();
assert!(names.contains(&"bash"));
assert!(names.contains(&"read_file"));
assert!(names.contains(&"WebFetch"));
assert!(names.contains(&"WebSearch"));
assert!(names.contains(&"TodoWrite"));
assert!(names.contains(&"Skill"));
assert!(names.contains(&"Agent"));
assert!(names.contains(&"ToolSearch"));
assert!(names.contains(&"NotebookEdit"));
assert!(names.contains(&"Sleep"));
assert!(names.contains(&"SendUserMessage"));
assert!(names.contains(&"Config"));
assert!(names.contains(&"StructuredOutput"));
assert!(names.contains(&"REPL"));
assert!(names.contains(&"PowerShell"));
}
#[test]
fn rejects_unknown_tool_names() {
let error = execute_tool("nope", &json!({})).expect_err("tool should be rejected");
assert!(error.contains("unsupported tool"));
}
#[test]
fn web_fetch_returns_prompt_aware_summary() {
let server = TestServer::spawn(Arc::new(|request_line: &str| {
assert!(request_line.starts_with("GET /page "));
HttpResponse::html(
200,
"OK",
"<html><head><title>Ignored</title></head><body><h1>Test Page</h1><p>Hello <b>world</b> from local server.</p></body></html>",
)
}));
let result = execute_tool(
"WebFetch",
&json!({
"url": format!("http://{}/page", server.addr()),
"prompt": "Summarize this page"
}),
)
.expect("WebFetch should succeed");
let output: serde_json::Value = serde_json::from_str(&result).expect("valid json");
assert_eq!(output["code"], 200);
let summary = output["result"].as_str().expect("result string");
assert!(summary.contains("Fetched"));
assert!(summary.contains("Test Page"));
assert!(summary.contains("Hello world from local server"));
let titled = execute_tool(
"WebFetch",
&json!({
"url": format!("http://{}/page", server.addr()),
"prompt": "What is the page title?"
}),
)
.expect("WebFetch title query should succeed");
let titled_output: serde_json::Value = serde_json::from_str(&titled).expect("valid json");
let titled_summary = titled_output["result"].as_str().expect("result string");
assert!(titled_summary.contains("Title: Ignored"));
}
#[test]
fn web_fetch_supports_plain_text_and_rejects_invalid_url() {
let server = TestServer::spawn(Arc::new(|request_line: &str| {
assert!(request_line.starts_with("GET /plain "));
HttpResponse::text(200, "OK", "plain text response")
}));
let result = execute_tool(
"WebFetch",
&json!({
"url": format!("http://{}/plain", server.addr()),
"prompt": "Show me the content"
}),
)
.expect("WebFetch should succeed for text content");
let output: serde_json::Value = serde_json::from_str(&result).expect("valid json");
assert_eq!(output["url"], format!("http://{}/plain", server.addr()));
assert!(output["result"]
.as_str()
.expect("result")
.contains("plain text response"));
let error = execute_tool(
"WebFetch",
&json!({
"url": "not a url",
"prompt": "Summarize"
}),
)
.expect_err("invalid URL should fail");
assert!(error.contains("relative URL without a base") || error.contains("invalid"));
}
#[test]
fn web_search_extracts_and_filters_results() {
let server = TestServer::spawn(Arc::new(|request_line: &str| {
assert!(request_line.contains("GET /search?q=rust+web+search "));
HttpResponse::html(
200,
"OK",
r#"
<html><body>
<a class="result__a" href="https://docs.rs/reqwest">Reqwest docs</a>
<a class="result__a" href="https://example.com/blocked">Blocked result</a>
</body></html>
"#,
)
}));
std::env::set_var(
"CLAWD_WEB_SEARCH_BASE_URL",
format!("http://{}/search", server.addr()),
);
let result = execute_tool(
"WebSearch",
&json!({
"query": "rust web search",
"allowed_domains": ["https://DOCS.rs/"],
"blocked_domains": ["HTTPS://EXAMPLE.COM"]
}),
)
.expect("WebSearch should succeed");
std::env::remove_var("CLAWD_WEB_SEARCH_BASE_URL");
let output: serde_json::Value = serde_json::from_str(&result).expect("valid json");
assert_eq!(output["query"], "rust web search");
let results = output["results"].as_array().expect("results array");
let search_result = results
.iter()
.find(|item| item.get("content").is_some())
.expect("search result block present");
let content = search_result["content"].as_array().expect("content array");
assert_eq!(content.len(), 1);
assert_eq!(content[0]["title"], "Reqwest docs");
assert_eq!(content[0]["url"], "https://docs.rs/reqwest");
}
#[test]
fn web_search_handles_generic_links_and_invalid_base_url() {
let _guard = env_lock()
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
let server = TestServer::spawn(Arc::new(|request_line: &str| {
assert!(request_line.contains("GET /fallback?q=generic+links "));
HttpResponse::html(
200,
"OK",
r#"
<html><body>
<a href="https://example.com/one">Example One</a>
<a href="https://example.com/one">Duplicate Example One</a>
<a href="https://docs.rs/tokio">Tokio Docs</a>
</body></html>
"#,
)
}));
std::env::set_var(
"CLAWD_WEB_SEARCH_BASE_URL",
format!("http://{}/fallback", server.addr()),
);
let result = execute_tool(
"WebSearch",
&json!({
"query": "generic links"
}),
)
.expect("WebSearch fallback parsing should succeed");
std::env::remove_var("CLAWD_WEB_SEARCH_BASE_URL");
let output: serde_json::Value = serde_json::from_str(&result).expect("valid json");
let results = output["results"].as_array().expect("results array");
let search_result = results
.iter()
.find(|item| item.get("content").is_some())
.expect("search result block present");
let content = search_result["content"].as_array().expect("content array");
assert_eq!(content.len(), 2);
assert_eq!(content[0]["url"], "https://example.com/one");
assert_eq!(content[1]["url"], "https://docs.rs/tokio");
std::env::set_var("CLAWD_WEB_SEARCH_BASE_URL", "://bad-base-url");
let error = execute_tool("WebSearch", &json!({ "query": "generic links" }))
.expect_err("invalid base URL should fail");
std::env::remove_var("CLAWD_WEB_SEARCH_BASE_URL");
assert!(error.contains("relative URL without a base") || error.contains("empty host"));
}
#[test]
fn todo_write_persists_and_returns_previous_state() {
let _guard = env_lock()
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
let path = temp_path("todos.json");
std::env::set_var("CLAWD_TODO_STORE", &path);
let first = execute_tool(
"TodoWrite",
&json!({
"todos": [
{"content": "Add tool", "activeForm": "Adding tool", "status": "in_progress"},
{"content": "Run tests", "activeForm": "Running tests", "status": "pending"}
]
}),
)
.expect("TodoWrite should succeed");
let first_output: serde_json::Value = serde_json::from_str(&first).expect("valid json");
assert_eq!(first_output["oldTodos"].as_array().expect("array").len(), 0);
let second = execute_tool(
"TodoWrite",
&json!({
"todos": [
{"content": "Add tool", "activeForm": "Adding tool", "status": "completed"},
{"content": "Run tests", "activeForm": "Running tests", "status": "completed"},
{"content": "Verify", "activeForm": "Verifying", "status": "completed"}
]
}),
)
.expect("TodoWrite should succeed");
std::env::remove_var("CLAWD_TODO_STORE");
let _ = std::fs::remove_file(path);
let second_output: serde_json::Value = serde_json::from_str(&second).expect("valid json");
assert_eq!(
second_output["oldTodos"].as_array().expect("array").len(),
2
);
assert_eq!(
second_output["newTodos"].as_array().expect("array").len(),
3
);
assert!(second_output["verificationNudgeNeeded"].is_null());
}
#[test]
fn todo_write_persists_markdown_in_claude_directory() {
let _guard = env_lock()
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
let temp = temp_path("todos-md-dir");
std::fs::create_dir_all(&temp).expect("temp dir");
let previous = std::env::current_dir().expect("cwd");
std::env::set_current_dir(&temp).expect("set cwd");
execute_tool(
"TodoWrite",
&json!({
"todos": [
{"content": "Add tool", "activeForm": "Adding tool", "status": "in_progress"},
{"content": "Run tests", "activeForm": "Running tests", "status": "pending"}
]
}),
)
.expect("TodoWrite should succeed");
let persisted = std::fs::read_to_string(temp.join(".claude").join("todos.md"))
.expect("todo markdown exists");
std::env::set_current_dir(previous).expect("restore cwd");
let _ = std::fs::remove_dir_all(temp);
assert!(persisted.contains("# Todo list"));
assert!(persisted.contains("- [~] Add tool :: Adding tool"));
assert!(persisted.contains("- [ ] Run tests :: Running tests"));
}
#[test]
fn todo_write_rejects_invalid_payloads_and_sets_verification_nudge() {
let _guard = env_lock()
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
let path = temp_path("todos-errors.json");
std::env::set_var("CLAWD_TODO_STORE", &path);
let empty = execute_tool("TodoWrite", &json!({ "todos": [] }))
.expect_err("empty todos should fail");
assert!(empty.contains("todos must not be empty"));
let too_many_active = execute_tool(
"TodoWrite",
&json!({
"todos": [
{"content": "One", "activeForm": "Doing one", "status": "in_progress"},
{"content": "Two", "activeForm": "Doing two", "status": "in_progress"}
]
}),
)
.expect_err("multiple in-progress todos should fail");
assert!(too_many_active.contains("zero or one todo items may be in_progress"));
let blank_content = execute_tool(
"TodoWrite",
&json!({
"todos": [
{"content": " ", "activeForm": "Doing it", "status": "pending"}
]
}),
)
.expect_err("blank content should fail");
assert!(blank_content.contains("todo content must not be empty"));
let nudge = execute_tool(
"TodoWrite",
&json!({
"todos": [
{"content": "Write tests", "activeForm": "Writing tests", "status": "completed"},
{"content": "Fix errors", "activeForm": "Fixing errors", "status": "completed"},
{"content": "Ship branch", "activeForm": "Shipping branch", "status": "completed"}
]
}),
)
.expect("completed todos should succeed");
std::env::remove_var("CLAWD_TODO_STORE");
let _ = fs::remove_file(path);
let output: serde_json::Value = serde_json::from_str(&nudge).expect("valid json");
assert_eq!(output["verificationNudgeNeeded"], true);
}
#[test]
fn skill_loads_local_skill_prompt() {
let result = execute_tool(
"Skill",
&json!({
"skill": "help",
"args": "overview"
}),
)
.expect("Skill should succeed");
let output: serde_json::Value = serde_json::from_str(&result).expect("valid json");
assert_eq!(output["skill"], "help");
assert!(output["path"]
.as_str()
.expect("path")
.ends_with("/help/SKILL.md"));
assert!(output["prompt"]
.as_str()
.expect("prompt")
.contains("Guide on using oh-my-codex plugin"));
let dollar_result = execute_tool(
"Skill",
&json!({
"skill": "$help"
}),
)
.expect("Skill should accept $skill invocation form");
let dollar_output: serde_json::Value =
serde_json::from_str(&dollar_result).expect("valid json");
assert_eq!(dollar_output["skill"], "$help");
assert!(dollar_output["path"]
.as_str()
.expect("path")
.ends_with("/help/SKILL.md"));
}
#[test]
fn tool_search_supports_keyword_and_select_queries() {
let keyword = execute_tool(
"ToolSearch",
&json!({"query": "web current", "max_results": 3}),
)
.expect("ToolSearch should succeed");
let keyword_output: serde_json::Value = serde_json::from_str(&keyword).expect("valid json");
let matches = keyword_output["matches"].as_array().expect("matches");
assert!(matches.iter().any(|value| value == "WebSearch"));
let selected = execute_tool("ToolSearch", &json!({"query": "select:Agent,Skill"}))
.expect("ToolSearch should succeed");
let selected_output: serde_json::Value =
serde_json::from_str(&selected).expect("valid json");
assert_eq!(selected_output["matches"][0], "Agent");
assert_eq!(selected_output["matches"][1], "Skill");
let aliased = execute_tool("ToolSearch", &json!({"query": "AgentTool"}))
.expect("ToolSearch should support tool aliases");
let aliased_output: serde_json::Value = serde_json::from_str(&aliased).expect("valid json");
assert_eq!(aliased_output["matches"][0], "Agent");
assert_eq!(aliased_output["normalized_query"], "agent");
let selected_with_alias =
execute_tool("ToolSearch", &json!({"query": "select:AgentTool,Skill"}))
.expect("ToolSearch alias select should succeed");
let selected_with_alias_output: serde_json::Value =
serde_json::from_str(&selected_with_alias).expect("valid json");
assert_eq!(selected_with_alias_output["matches"][0], "Agent");
assert_eq!(selected_with_alias_output["matches"][1], "Skill");
}
#[test]
fn agent_executes_child_conversation_and_persists_results() {
let _guard = env_lock()
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
let dir = temp_path("agent-store");
std::env::set_var("CLAWD_AGENT_STORE", &dir);
std::env::set_var(
"CLAWD_AGENT_TEST_SCRIPT",
serde_json::to_string(&vec![
vec![json!({
"type": "tool_use",
"id": "tool-1",
"name": "StructuredOutput",
"input": {"ok": true, "items": [1, 2, 3]}
})],
vec![json!({
"type": "text",
"text": "Child agent completed successfully."
})],
])
.expect("script json"),
);
let result = execute_tool(
"Agent",
&json!({
"description": "Audit the branch",
"prompt": "Check tests and outstanding work.",
"subagent_type": "Explore",
"name": "ship-audit"
}),
)
.expect("Agent should succeed");
std::env::remove_var("CLAWD_AGENT_TEST_SCRIPT");
std::env::remove_var("CLAWD_AGENT_STORE");
let output: serde_json::Value = serde_json::from_str(&result).expect("valid json");
assert_eq!(output["name"], "ship-audit");
assert_eq!(output["subagentType"], "Explore");
assert_eq!(output["status"], "completed");
assert_eq!(output["depth"], 0);
assert_eq!(output["maxDepth"], 3);
assert_eq!(output["result"], "Child agent completed successfully.");
assert_eq!(output["toolResults"][0]["toolName"], "StructuredOutput");
assert_eq!(output["toolResults"][0]["isError"], false);
let manifest_file = output["manifestFile"].as_str().expect("manifest file");
let output_file = output["outputFile"].as_str().expect("output file");
let contents = std::fs::read_to_string(output_file).expect("agent file exists");
let manifest_contents =
std::fs::read_to_string(manifest_file).expect("manifest file exists");
assert!(contents.contains("Child agent completed successfully."));
assert!(contents.contains("StructuredOutput [ok]"));
assert!(manifest_contents.contains("\"subagentType\": \"Explore\""));
std::env::set_var(
"CLAWD_AGENT_TEST_SCRIPT",
serde_json::to_string(&vec![vec![json!({
"type": "text",
"text": "Normalized alias check."
})]])
.expect("script json"),
);
let normalized = execute_tool(
"Agent",
&json!({
"description": "Verify the branch",
"prompt": "Check tests.",
"subagent_type": "explorer"
}),
)
.expect("Agent should normalize built-in aliases");
std::env::remove_var("CLAWD_AGENT_TEST_SCRIPT");
let normalized_output: serde_json::Value =
serde_json::from_str(&normalized).expect("valid json");
assert_eq!(normalized_output["subagentType"], "Explore");
std::env::set_var(
"CLAWD_AGENT_TEST_SCRIPT",
serde_json::to_string(&vec![vec![json!({
"type": "text",
"text": "Name normalization check."
})]])
.expect("script json"),
);
let named = execute_tool(
"Agent",
&json!({
"description": "Review the branch",
"prompt": "Inspect diff.",
"name": "Ship Audit!!!"
}),
)
.expect("Agent should normalize explicit names");
std::env::remove_var("CLAWD_AGENT_TEST_SCRIPT");
let named_output: serde_json::Value = serde_json::from_str(&named).expect("valid json");
assert_eq!(named_output["name"], "ship-audit");
let _ = std::fs::remove_dir_all(dir);
}
#[test]
fn agent_rejects_blank_required_fields_and_enforces_max_depth() {
let missing_description = execute_tool(
"Agent",
&json!({
"description": " ",
"prompt": "Inspect"
}),
)
.expect_err("blank description should fail");
assert!(missing_description.contains("description must not be empty"));
let missing_prompt = execute_tool(
"Agent",
&json!({
"description": "Inspect branch",
"prompt": " "
}),
)
.expect_err("blank prompt should fail");
assert!(missing_prompt.contains("prompt must not be empty"));
let _guard = env_lock()
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
std::env::set_var("CLAWD_AGENT_DEPTH", "1");
let depth_error = execute_tool(
"Agent",
&json!({
"description": "Nested agent",
"prompt": "Do nested work.",
"max_depth": 1
}),
)
.expect_err("max depth should fail");
std::env::remove_var("CLAWD_AGENT_DEPTH");
assert!(depth_error.contains("max_depth exceeded"));
}
#[test]
fn notebook_edit_replaces_inserts_and_deletes_cells() {
let path = temp_path("notebook.ipynb");
std::fs::write(
&path,
r#"{
"cells": [
{"cell_type": "code", "id": "cell-a", "metadata": {}, "source": ["print(1)\n"], "outputs": [], "execution_count": null}
],
"metadata": {"kernelspec": {"language": "python"}},
"nbformat": 4,
"nbformat_minor": 5
}"#,
)
.expect("write notebook");
let replaced = execute_tool(
"NotebookEdit",
&json!({
"notebook_path": path.display().to_string(),
"cell_id": "cell-a",
"new_source": "print(2)\n",
"edit_mode": "replace"
}),
)
.expect("NotebookEdit replace should succeed");
let replaced_output: serde_json::Value = serde_json::from_str(&replaced).expect("json");
assert_eq!(replaced_output["cell_id"], "cell-a");
assert_eq!(replaced_output["cell_type"], "code");
let inserted = execute_tool(
"NotebookEdit",
&json!({
"notebook_path": path.display().to_string(),
"cell_id": "cell-a",
"new_source": "# heading\n",
"cell_type": "markdown",
"edit_mode": "insert"
}),
)
.expect("NotebookEdit insert should succeed");
let inserted_output: serde_json::Value = serde_json::from_str(&inserted).expect("json");
assert_eq!(inserted_output["cell_type"], "markdown");
let appended = execute_tool(
"NotebookEdit",
&json!({
"notebook_path": path.display().to_string(),
"new_source": "print(3)\n",
"edit_mode": "insert"
}),
)
.expect("NotebookEdit append should succeed");
let appended_output: serde_json::Value = serde_json::from_str(&appended).expect("json");
assert_eq!(appended_output["cell_type"], "code");
let deleted = execute_tool(
"NotebookEdit",
&json!({
"notebook_path": path.display().to_string(),
"cell_id": "cell-a",
"edit_mode": "delete"
}),
)
.expect("NotebookEdit delete should succeed without new_source");
let deleted_output: serde_json::Value = serde_json::from_str(&deleted).expect("json");
assert!(deleted_output["cell_type"].is_null());
assert_eq!(deleted_output["new_source"], "");
let final_notebook: serde_json::Value =
serde_json::from_str(&std::fs::read_to_string(&path).expect("read notebook"))
.expect("valid notebook json");
let cells = final_notebook["cells"].as_array().expect("cells array");
assert_eq!(cells.len(), 2);
assert_eq!(cells[0]["cell_type"], "markdown");
assert!(cells[0].get("outputs").is_none());
assert_eq!(cells[1]["cell_type"], "code");
assert_eq!(cells[1]["source"][0], "print(3)\n");
let _ = std::fs::remove_file(path);
}
#[test]
fn notebook_edit_rejects_invalid_inputs() {
let text_path = temp_path("notebook.txt");
fs::write(&text_path, "not a notebook").expect("write text file");
let wrong_extension = execute_tool(
"NotebookEdit",
&json!({
"notebook_path": text_path.display().to_string(),
"new_source": "print(1)\n"
}),
)
.expect_err("non-ipynb file should fail");
assert!(wrong_extension.contains("Jupyter notebook"));
let _ = fs::remove_file(&text_path);
let empty_notebook = temp_path("empty.ipynb");
fs::write(
&empty_notebook,
r#"{"cells":[],"metadata":{"kernelspec":{"language":"python"}},"nbformat":4,"nbformat_minor":5}"#,
)
.expect("write empty notebook");
let missing_source = execute_tool(
"NotebookEdit",
&json!({
"notebook_path": empty_notebook.display().to_string(),
"edit_mode": "insert"
}),
)
.expect_err("insert without source should fail");
assert!(missing_source.contains("new_source is required"));
let missing_cell = execute_tool(
"NotebookEdit",
&json!({
"notebook_path": empty_notebook.display().to_string(),
"edit_mode": "delete"
}),
)
.expect_err("delete on empty notebook should fail");
assert!(missing_cell.contains("Notebook has no cells to edit"));
let _ = fs::remove_file(empty_notebook);
}
#[test]
fn bash_tool_reports_success_exit_failure_timeout_and_background() {
let success = execute_tool("bash", &json!({ "command": "printf 'hello'" }))
.expect("bash should succeed");
let success_output: serde_json::Value = serde_json::from_str(&success).expect("json");
assert_eq!(success_output["stdout"], "hello");
assert_eq!(success_output["interrupted"], false);
let failure = execute_tool("bash", &json!({ "command": "printf 'oops' >&2; exit 7" }))
.expect("bash failure should still return structured output");
let failure_output: serde_json::Value = serde_json::from_str(&failure).expect("json");
assert_eq!(failure_output["returnCodeInterpretation"], "exit_code:7");
assert!(failure_output["stderr"]
.as_str()
.expect("stderr")
.contains("oops"));
let timeout = execute_tool("bash", &json!({ "command": "sleep 1", "timeout": 10 }))
.expect("bash timeout should return output");
let timeout_output: serde_json::Value = serde_json::from_str(&timeout).expect("json");
assert_eq!(timeout_output["interrupted"], true);
assert_eq!(timeout_output["returnCodeInterpretation"], "timeout");
assert!(timeout_output["stderr"]
.as_str()
.expect("stderr")
.contains("Command exceeded timeout"));
let background = execute_tool(
"bash",
&json!({ "command": "sleep 1", "run_in_background": true }),
)
.expect("bash background should succeed");
let background_output: serde_json::Value = serde_json::from_str(&background).expect("json");
assert!(background_output["backgroundTaskId"].as_str().is_some());
assert_eq!(background_output["noOutputExpected"], true);
}
#[test]
fn file_tools_cover_read_write_and_edit_behaviors() {
let _guard = env_lock()
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
let root = temp_path("fs-suite");
fs::create_dir_all(&root).expect("create root");
let original_dir = std::env::current_dir().expect("cwd");
std::env::set_current_dir(&root).expect("set cwd");
let write_create = execute_tool(
"write_file",
&json!({ "path": "nested/demo.txt", "content": "alpha\nbeta\nalpha\n" }),
)
.expect("write create should succeed");
let write_create_output: serde_json::Value =
serde_json::from_str(&write_create).expect("json");
assert_eq!(write_create_output["type"], "create");
assert!(root.join("nested/demo.txt").exists());
let write_update = execute_tool(
"write_file",
&json!({ "path": "nested/demo.txt", "content": "alpha\nbeta\ngamma\n" }),
)
.expect("write update should succeed");
let write_update_output: serde_json::Value =
serde_json::from_str(&write_update).expect("json");
assert_eq!(write_update_output["type"], "update");
assert_eq!(write_update_output["originalFile"], "alpha\nbeta\nalpha\n");
let read_full = execute_tool("read_file", &json!({ "path": "nested/demo.txt" }))
.expect("read full should succeed");
let read_full_output: serde_json::Value = serde_json::from_str(&read_full).expect("json");
assert_eq!(read_full_output["file"]["content"], "alpha\nbeta\ngamma");
assert_eq!(read_full_output["file"]["startLine"], 1);
let read_slice = execute_tool(
"read_file",
&json!({ "path": "nested/demo.txt", "offset": 1, "limit": 1 }),
)
.expect("read slice should succeed");
let read_slice_output: serde_json::Value = serde_json::from_str(&read_slice).expect("json");
assert_eq!(read_slice_output["file"]["content"], "beta");
assert_eq!(read_slice_output["file"]["startLine"], 2);
let read_past_end = execute_tool(
"read_file",
&json!({ "path": "nested/demo.txt", "offset": 50 }),
)
.expect("read past EOF should succeed");
let read_past_end_output: serde_json::Value =
serde_json::from_str(&read_past_end).expect("json");
assert_eq!(read_past_end_output["file"]["content"], "");
assert_eq!(read_past_end_output["file"]["startLine"], 4);
let read_error = execute_tool("read_file", &json!({ "path": "missing.txt" }))
.expect_err("missing file should fail");
assert!(!read_error.is_empty());
let edit_once = execute_tool(
"edit_file",
&json!({ "path": "nested/demo.txt", "old_string": "alpha", "new_string": "omega" }),
)
.expect("single edit should succeed");
let edit_once_output: serde_json::Value = serde_json::from_str(&edit_once).expect("json");
assert_eq!(edit_once_output["replaceAll"], false);
assert_eq!(
fs::read_to_string(root.join("nested/demo.txt")).expect("read file"),
"omega\nbeta\ngamma\n"
);
execute_tool(
"write_file",
&json!({ "path": "nested/demo.txt", "content": "alpha\nbeta\nalpha\n" }),
)
.expect("reset file");
let edit_all = execute_tool(
"edit_file",
&json!({
"path": "nested/demo.txt",
"old_string": "alpha",
"new_string": "omega",
"replace_all": true
}),
)
.expect("replace all should succeed");
let edit_all_output: serde_json::Value = serde_json::from_str(&edit_all).expect("json");
assert_eq!(edit_all_output["replaceAll"], true);
assert_eq!(
fs::read_to_string(root.join("nested/demo.txt")).expect("read file"),
"omega\nbeta\nomega\n"
);
let edit_same = execute_tool(
"edit_file",
&json!({ "path": "nested/demo.txt", "old_string": "omega", "new_string": "omega" }),
)
.expect_err("identical old/new should fail");
assert!(edit_same.contains("must differ"));
let edit_missing = execute_tool(
"edit_file",
&json!({ "path": "nested/demo.txt", "old_string": "missing", "new_string": "omega" }),
)
.expect_err("missing substring should fail");
assert!(edit_missing.contains("old_string not found"));
std::env::set_current_dir(&original_dir).expect("restore cwd");
let _ = fs::remove_dir_all(root);
}
#[test]
fn glob_and_grep_tools_cover_success_and_errors() {
let _guard = env_lock()
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
let root = temp_path("search-suite");
fs::create_dir_all(root.join("nested")).expect("create root");
let original_dir = std::env::current_dir().expect("cwd");
std::env::set_current_dir(&root).expect("set cwd");
fs::write(
root.join("nested/lib.rs"),
"fn main() {}\nlet alpha = 1;\nlet alpha = 2;\n",
)
.expect("write rust file");
fs::write(root.join("nested/notes.txt"), "alpha\nbeta\n").expect("write txt file");
let globbed = execute_tool("glob_search", &json!({ "pattern": "nested/*.rs" }))
.expect("glob should succeed");
let globbed_output: serde_json::Value = serde_json::from_str(&globbed).expect("json");
assert_eq!(globbed_output["numFiles"], 1);
assert!(globbed_output["filenames"][0]
.as_str()
.expect("filename")
.ends_with("nested/lib.rs"));
let glob_error = execute_tool("glob_search", &json!({ "pattern": "[" }))
.expect_err("invalid glob should fail");
assert!(!glob_error.is_empty());
let grep_content = execute_tool(
"grep_search",
&json!({
"pattern": "alpha",
"path": "nested",
"glob": "*.rs",
"output_mode": "content",
"-n": true,
"head_limit": 1,
"offset": 1
}),
)
.expect("grep content should succeed");
let grep_content_output: serde_json::Value =
serde_json::from_str(&grep_content).expect("json");
assert_eq!(grep_content_output["numFiles"], 0);
assert!(grep_content_output["appliedLimit"].is_null());
assert_eq!(grep_content_output["appliedOffset"], 1);
assert!(grep_content_output["content"]
.as_str()
.expect("content")
.contains("let alpha = 2;"));
let grep_count = execute_tool(
"grep_search",
&json!({ "pattern": "alpha", "path": "nested", "output_mode": "count" }),
)
.expect("grep count should succeed");
let grep_count_output: serde_json::Value = serde_json::from_str(&grep_count).expect("json");
assert_eq!(grep_count_output["numMatches"], 3);
let grep_error = execute_tool(
"grep_search",
&json!({ "pattern": "(alpha", "path": "nested" }),
)
.expect_err("invalid regex should fail");
assert!(!grep_error.is_empty());
std::env::set_current_dir(&original_dir).expect("restore cwd");
let _ = fs::remove_dir_all(root);
}
#[test]
fn sleep_waits_and_reports_duration() {
let started = std::time::Instant::now();
let result =
execute_tool("Sleep", &json!({"duration_ms": 20})).expect("Sleep should succeed");
let elapsed = started.elapsed();
let output: serde_json::Value = serde_json::from_str(&result).expect("json");
assert_eq!(output["duration_ms"], 20);
assert!(output["message"]
.as_str()
.expect("message")
.contains("Slept for 20ms"));
assert!(elapsed >= Duration::from_millis(15));
}
#[test]
fn brief_returns_sent_message_and_attachment_metadata() {
let attachment = std::env::temp_dir().join(format!(
"clawd-brief-{}.png",
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("time")
.as_nanos()
));
std::fs::write(&attachment, b"png-data").expect("write attachment");
let result = execute_tool(
"SendUserMessage",
&json!({
"message": "hello user",
"attachments": [attachment.display().to_string()],
"status": "normal"
}),
)
.expect("SendUserMessage should succeed");
let output: serde_json::Value = serde_json::from_str(&result).expect("json");
assert_eq!(output["message"], "hello user");
assert!(output["sentAt"].as_str().is_some());
assert_eq!(output["attachments"][0]["isImage"], true);
let _ = std::fs::remove_file(attachment);
}
#[test]
fn config_reads_and_writes_supported_values() {
let _guard = env_lock()
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
let root = std::env::temp_dir().join(format!(
"clawd-config-{}",
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("time")
.as_nanos()
));
let home = root.join("home");
let cwd = root.join("cwd");
std::fs::create_dir_all(home.join(".claude")).expect("home dir");
std::fs::create_dir_all(cwd.join(".claude")).expect("cwd dir");
std::fs::write(
home.join(".claude").join("settings.json"),
r#"{"verbose":false}"#,
)
.expect("write global settings");
let original_home = std::env::var("HOME").ok();
let original_claude_home = std::env::var("CLAUDE_CONFIG_HOME").ok();
let original_dir = std::env::current_dir().expect("cwd");
std::env::set_var("HOME", &home);
std::env::remove_var("CLAUDE_CONFIG_HOME");
std::env::set_current_dir(&cwd).expect("set cwd");
let get = execute_tool("Config", &json!({"setting": "verbose"})).expect("get config");
let get_output: serde_json::Value = serde_json::from_str(&get).expect("json");
assert_eq!(get_output["value"], false);
let set = execute_tool(
"Config",
&json!({"setting": "permissions.defaultMode", "value": "plan"}),
)
.expect("set config");
let set_output: serde_json::Value = serde_json::from_str(&set).expect("json");
assert_eq!(set_output["operation"], "set");
assert_eq!(set_output["newValue"], "plan");
let invalid = execute_tool(
"Config",
&json!({"setting": "permissions.defaultMode", "value": "bogus"}),
)
.expect_err("invalid config value should error");
assert!(invalid.contains("Invalid value"));
let unknown =
execute_tool("Config", &json!({"setting": "nope"})).expect("unknown setting result");
let unknown_output: serde_json::Value = serde_json::from_str(&unknown).expect("json");
assert_eq!(unknown_output["success"], false);
std::env::set_current_dir(&original_dir).expect("restore cwd");
match original_home {
Some(value) => std::env::set_var("HOME", value),
None => std::env::remove_var("HOME"),
}
match original_claude_home {
Some(value) => std::env::set_var("CLAUDE_CONFIG_HOME", value),
None => std::env::remove_var("CLAUDE_CONFIG_HOME"),
}
let _ = std::fs::remove_dir_all(root);
}
#[test]
fn structured_output_echoes_input_payload() {
let result = execute_tool("StructuredOutput", &json!({"ok": true, "items": [1, 2, 3]}))
.expect("StructuredOutput should succeed");
let output: serde_json::Value = serde_json::from_str(&result).expect("json");
assert_eq!(output["data"], "Structured output provided successfully");
assert_eq!(output["structured_output"]["ok"], true);
assert_eq!(output["structured_output"]["items"][1], 2);
}
#[test]
fn repl_executes_python_code() {
let result = execute_tool(
"REPL",
&json!({"language": "python", "code": "print(1 + 1)", "timeout_ms": 500}),
)
.expect("REPL should succeed");
let output: serde_json::Value = serde_json::from_str(&result).expect("json");
assert_eq!(output["language"], "python");
assert_eq!(output["exitCode"], 0);
assert!(output["stdout"].as_str().expect("stdout").contains('2'));
}
#[test]
fn powershell_runs_via_stub_shell() {
let _guard = env_lock()
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
let dir = std::env::temp_dir().join(format!(
"clawd-pwsh-bin-{}",
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("time")
.as_nanos()
));
std::fs::create_dir_all(&dir).expect("create dir");
let script = dir.join("pwsh");
std::fs::write(
&script,
r#"#!/bin/sh
while [ "$1" != "-Command" ] && [ $# -gt 0 ]; do shift; done
shift
printf 'pwsh:%s' "$1"
"#,
)
.expect("write script");
std::process::Command::new("/bin/chmod")
.arg("+x")
.arg(&script)
.status()
.expect("chmod");
let original_path = std::env::var("PATH").unwrap_or_default();
std::env::set_var("PATH", format!("{}:{}", dir.display(), original_path));
let result = execute_tool(
"PowerShell",
&json!({"command": "Write-Output hello", "timeout": 1000}),
)
.expect("PowerShell should succeed");
let background = execute_tool(
"PowerShell",
&json!({"command": "Write-Output hello", "run_in_background": true}),
)
.expect("PowerShell background should succeed");
std::env::set_var("PATH", original_path);
let _ = std::fs::remove_dir_all(dir);
let output: serde_json::Value = serde_json::from_str(&result).expect("json");
assert_eq!(output["stdout"], "pwsh:Write-Output hello");
assert!(output["stderr"].as_str().expect("stderr").is_empty());
let background_output: serde_json::Value = serde_json::from_str(&background).expect("json");
assert!(background_output["backgroundTaskId"].as_str().is_some());
assert_eq!(background_output["backgroundedByUser"], true);
assert_eq!(background_output["assistantAutoBackgrounded"], false);
}
#[test]
fn powershell_errors_when_shell_is_missing() {
let _guard = env_lock()
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
let original_path = std::env::var("PATH").unwrap_or_default();
let empty_dir = std::env::temp_dir().join(format!(
"clawd-empty-bin-{}",
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("time")
.as_nanos()
));
std::fs::create_dir_all(&empty_dir).expect("create empty dir");
std::env::set_var("PATH", empty_dir.display().to_string());
let err = execute_tool("PowerShell", &json!({"command": "Write-Output hello"}))
.expect_err("PowerShell should fail when shell is missing");
std::env::set_var("PATH", original_path);
let _ = std::fs::remove_dir_all(empty_dir);
assert!(err.contains("PowerShell executable not found"));
}
struct TestServer {
addr: SocketAddr,
shutdown: Option<std::sync::mpsc::Sender<()>>,
handle: Option<thread::JoinHandle<()>>,
}
impl TestServer {
fn spawn(handler: Arc<dyn Fn(&str) -> HttpResponse + Send + Sync + 'static>) -> Self {
let listener = TcpListener::bind("127.0.0.1:0").expect("bind test server");
listener
.set_nonblocking(true)
.expect("set nonblocking listener");
let addr = listener.local_addr().expect("local addr");
let (tx, rx) = std::sync::mpsc::channel::<()>();
let handle = thread::spawn(move || loop {
if rx.try_recv().is_ok() {
break;
}
match listener.accept() {
Ok((mut stream, _)) => {
let mut buffer = [0_u8; 4096];
let size = stream.read(&mut buffer).expect("read request");
let request = String::from_utf8_lossy(&buffer[..size]).into_owned();
let request_line = request.lines().next().unwrap_or_default().to_string();
let response = handler(&request_line);
stream
.write_all(response.to_bytes().as_slice())
.expect("write response");
}
Err(error) if error.kind() == std::io::ErrorKind::WouldBlock => {
thread::sleep(Duration::from_millis(10));
}
Err(error) => panic!("server accept failed: {error}"),
}
});
Self {
addr,
shutdown: Some(tx),
handle: Some(handle),
}
}
fn addr(&self) -> SocketAddr {
self.addr
}
}
impl Drop for TestServer {
fn drop(&mut self) {
if let Some(tx) = self.shutdown.take() {
let _ = tx.send(());
}
if let Some(handle) = self.handle.take() {
handle.join().expect("join test server");
}
}
}
struct HttpResponse {
status: u16,
reason: &'static str,
content_type: &'static str,
body: String,
}
impl HttpResponse {
fn html(status: u16, reason: &'static str, body: &str) -> Self {
Self {
status,
reason,
content_type: "text/html; charset=utf-8",
body: body.to_string(),
}
}
fn text(status: u16, reason: &'static str, body: &str) -> Self {
Self {
status,
reason,
content_type: "text/plain; charset=utf-8",
body: body.to_string(),
}
}
fn to_bytes(&self) -> Vec<u8> {
format!(
"HTTP/1.1 {} {}\r\nContent-Type: {}\r\nContent-Length: {}\r\nConnection: close\r\n\r\n{}",
self.status,
self.reason,
self.content_type,
self.body.len(),
self.body
)
.into_bytes()
}
}
}