mirror of
https://github.com/aljazceru/goose.git
synced 2025-12-20 23:54:23 +01:00
feat: work with docs/xls and simple html (#1526)
This commit is contained in:
221
Cargo.lock
generated
221
Cargo.lock
generated
@@ -35,6 +35,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"getrandom 0.2.15",
|
||||
"once_cell",
|
||||
"version_check",
|
||||
"zerocopy",
|
||||
@@ -167,6 +168,9 @@ name = "arbitrary"
|
||||
version = "1.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223"
|
||||
dependencies = [
|
||||
"derive_arbitrary",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arg_enum_proc_macro"
|
||||
@@ -897,6 +901,21 @@ dependencies = [
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bit-set"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
|
||||
dependencies = [
|
||||
"bit-vec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bit-vec"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
|
||||
|
||||
[[package]]
|
||||
name = "bit_field"
|
||||
version = "0.10.2"
|
||||
@@ -1097,6 +1116,17 @@ dependencies = [
|
||||
"nom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfb"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d8a4f8e55be323b378facfcf1f06aa97f6ec17cf4ac84fb17325093aaf62da41"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"fnv",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-expr"
|
||||
version = "0.15.8"
|
||||
@@ -1642,6 +1672,17 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_arbitrary"
|
||||
version = "1.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.99",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_builder"
|
||||
version = "0.20.2"
|
||||
@@ -1746,6 +1787,12 @@ dependencies = [
|
||||
"const-random",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "doc-comment"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
|
||||
|
||||
[[package]]
|
||||
name = "document-features"
|
||||
version = "0.2.11"
|
||||
@@ -1755,6 +1802,21 @@ dependencies = [
|
||||
"litrs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "docx-rs"
|
||||
version = "0.4.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e593b51d4fe95d69d70fd40da4b314b029736302c986c3c760826e842fd27dc3"
|
||||
dependencies = [
|
||||
"base64 0.13.1",
|
||||
"image 0.24.9",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror 1.0.69",
|
||||
"xml-rs",
|
||||
"zip 0.6.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dotenv"
|
||||
version = "0.15.0"
|
||||
@@ -1873,6 +1935,17 @@ dependencies = [
|
||||
"zune-inflate",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fancy-regex"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298"
|
||||
dependencies = [
|
||||
"bit-set",
|
||||
"regex-automata 0.4.9",
|
||||
"regex-syntax 0.8.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastrand"
|
||||
version = "2.3.0"
|
||||
@@ -2310,10 +2383,12 @@ dependencies = [
|
||||
"async-trait",
|
||||
"base64 0.21.7",
|
||||
"chrono",
|
||||
"docx-rs",
|
||||
"etcetera",
|
||||
"google-drive3",
|
||||
"http-body-util",
|
||||
"ignore",
|
||||
"image 0.24.9",
|
||||
"include_dir",
|
||||
"indoc",
|
||||
"kill_tree",
|
||||
@@ -2329,13 +2404,13 @@ dependencies = [
|
||||
"serial_test",
|
||||
"shellexpand",
|
||||
"sysinfo 0.32.1",
|
||||
"temp-env",
|
||||
"tempfile",
|
||||
"thiserror 1.0.69",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"tracing-appender",
|
||||
"tracing-subscriber",
|
||||
"umya-spreadsheet",
|
||||
"url",
|
||||
"urlencoding",
|
||||
"webbrowser",
|
||||
@@ -2512,6 +2587,21 @@ dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "html_parser"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f6f56db07b6612644f6f7719f8ef944f75fff9d6378fdf3d316fd32194184abd"
|
||||
dependencies = [
|
||||
"doc-comment",
|
||||
"pest",
|
||||
"pest_derive",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"thiserror 1.0.69",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "0.2.12"
|
||||
@@ -2875,6 +2965,24 @@ dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "image"
|
||||
version = "0.24.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5690139d2f55868e080017335e4b94cb7414274c74f1669c84fb5feba2c9f69d"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
"byteorder",
|
||||
"color_quant",
|
||||
"exr",
|
||||
"gif",
|
||||
"jpeg-decoder",
|
||||
"num-traits",
|
||||
"png",
|
||||
"qoi",
|
||||
"tiff",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "image"
|
||||
version = "0.25.5"
|
||||
@@ -3096,6 +3204,9 @@ name = "jpeg-decoder"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f5d4a7da358eff58addd2877a45865158f0d78c911d43a5784ceb7bbf52833b0"
|
||||
dependencies = [
|
||||
"rayon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
@@ -3280,6 +3391,12 @@ dependencies = [
|
||||
"scopeguard",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lockfree-object-pool"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9374ef4228402d4b7e403e5838cb880d9ee663314b0a900d5a6aabf0c213552e"
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.26"
|
||||
@@ -4220,6 +4337,16 @@ dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quick-xml"
|
||||
version = "0.37.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "165859e9e55f79d67b96c5d96f4e88b6f2695a1972849c15a6a3f5c59fc2c003"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quinn"
|
||||
version = "0.11.6"
|
||||
@@ -5459,6 +5586,12 @@ dependencies = [
|
||||
"unicode-width 0.2.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thin-vec"
|
||||
version = "0.2.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a38c90d48152c236a3ab59271da4f4ae63d678c5d7ad6b7714d7cb9760be5e4b"
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.69"
|
||||
@@ -5499,6 +5632,12 @@ dependencies = [
|
||||
"syn 2.0.99",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thousands"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3bf63baf9f5039dadc247375c29eb13706706cfde997d0330d05aa63a77d8820"
|
||||
|
||||
[[package]]
|
||||
name = "thread_local"
|
||||
version = "1.1.8"
|
||||
@@ -5942,6 +6081,35 @@ version = "0.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971"
|
||||
|
||||
[[package]]
|
||||
name = "umya-spreadsheet"
|
||||
version = "2.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "17ec15f1f191ba42ba0ed0f788999eec910c201cbbd4ae5de7cf0eb0a94b3d1a"
|
||||
dependencies = [
|
||||
"aes",
|
||||
"ahash",
|
||||
"base64 0.22.1",
|
||||
"byteorder",
|
||||
"cbc",
|
||||
"cfb",
|
||||
"chrono",
|
||||
"encoding_rs",
|
||||
"fancy-regex",
|
||||
"getrandom 0.2.15",
|
||||
"hmac",
|
||||
"html_parser",
|
||||
"image 0.25.5",
|
||||
"lazy_static",
|
||||
"md-5",
|
||||
"quick-xml 0.37.2",
|
||||
"regex",
|
||||
"sha2",
|
||||
"thin-vec",
|
||||
"thousands",
|
||||
"zip 2.2.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.18"
|
||||
@@ -6783,7 +6951,7 @@ dependencies = [
|
||||
"core-foundation 0.10.0",
|
||||
"core-graphics",
|
||||
"dbus",
|
||||
"image",
|
||||
"image 0.25.5",
|
||||
"log",
|
||||
"percent-encoding",
|
||||
"sysinfo 0.32.1",
|
||||
@@ -6803,6 +6971,12 @@ dependencies = [
|
||||
"quick-xml 0.30.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xml-rs"
|
||||
version = "0.8.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c5b940ebc25896e71dd073bad2dbaa2abfe97b0a391415e22ad1326d9c54e3c4"
|
||||
|
||||
[[package]]
|
||||
name = "xmlparser"
|
||||
version = "0.13.6"
|
||||
@@ -6964,6 +7138,49 @@ dependencies = [
|
||||
"syn 2.0.99",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zip"
|
||||
version = "0.6.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"crc32fast",
|
||||
"crossbeam-utils",
|
||||
"flate2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zip"
|
||||
version = "2.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b280484c454e74e5fff658bbf7df8fdbe7a07c6b2de4a53def232c15ef138f3a"
|
||||
dependencies = [
|
||||
"arbitrary",
|
||||
"crc32fast",
|
||||
"crossbeam-utils",
|
||||
"displaydoc",
|
||||
"flate2",
|
||||
"indexmap 2.7.1",
|
||||
"memchr",
|
||||
"thiserror 2.0.12",
|
||||
"zopfli",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zopfli"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e5019f391bac5cf252e93bbcc53d039ffd62c7bfb7c150414d61369afe57e946"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"crc32fast",
|
||||
"lockfree-object-pool",
|
||||
"log",
|
||||
"once_cell",
|
||||
"simd-adler32",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd"
|
||||
version = "0.13.3"
|
||||
|
||||
@@ -8,4 +8,4 @@ version = "1.0.12"
|
||||
authors = ["Block <ai-oss-tools@block.xyz>"]
|
||||
license = "Apache-2.0"
|
||||
repository = "https://github.com/block/goose"
|
||||
description = "An AI agent"
|
||||
description = "An AI agent"
|
||||
@@ -38,9 +38,11 @@ http-body-util = "0.1.2"
|
||||
regex = "1.11.1"
|
||||
once_cell = "1.20.2"
|
||||
ignore = "0.4"
|
||||
temp-env = "0.3"
|
||||
lopdf = "0.35.0"
|
||||
docx-rs = "0.4.7"
|
||||
image = "0.24.9"
|
||||
umya-spreadsheet = "2.2.3"
|
||||
|
||||
[dev-dependencies]
|
||||
serial_test = "3.0.0"
|
||||
sysinfo = "0.32.1"
|
||||
sysinfo = "0.32.1"
|
||||
871
crates/goose-mcp/src/computercontroller/docx_tool.rs
Normal file
871
crates/goose-mcp/src/computercontroller/docx_tool.rs
Normal file
@@ -0,0 +1,871 @@
|
||||
use docx_rs::*;
|
||||
use image::{self, ImageFormat};
|
||||
use mcp_core::{Content, ToolError};
|
||||
use std::{fs, io::Cursor};
|
||||
|
||||
#[derive(Debug)]
|
||||
enum UpdateMode {
|
||||
Append,
|
||||
Replace {
|
||||
old_text: String,
|
||||
},
|
||||
InsertStructured {
|
||||
level: Option<String>, // e.g., "Heading1", "Heading2", etc.
|
||||
style: Option<DocxStyle>,
|
||||
},
|
||||
AddImage {
|
||||
image_path: String,
|
||||
width: Option<u32>,
|
||||
height: Option<u32>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
struct DocxStyle {
|
||||
bold: bool,
|
||||
italic: bool,
|
||||
underline: bool,
|
||||
size: Option<usize>,
|
||||
color: Option<String>,
|
||||
alignment: Option<AlignmentType>,
|
||||
}
|
||||
|
||||
impl DocxStyle {
|
||||
fn from_json(value: &serde_json::Value) -> Option<Self> {
|
||||
let obj = value.as_object()?;
|
||||
Some(Self {
|
||||
bold: obj.get("bold").and_then(|v| v.as_bool()).unwrap_or(false),
|
||||
italic: obj.get("italic").and_then(|v| v.as_bool()).unwrap_or(false),
|
||||
underline: obj
|
||||
.get("underline")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(false),
|
||||
size: obj.get("size").and_then(|v| v.as_u64()).map(|s| s as usize),
|
||||
color: obj.get("color").and_then(|v| v.as_str()).map(String::from),
|
||||
alignment: obj
|
||||
.get("alignment")
|
||||
.and_then(|v| v.as_str())
|
||||
.and_then(|a| match a {
|
||||
"left" => Some(AlignmentType::Left),
|
||||
"center" => Some(AlignmentType::Center),
|
||||
"right" => Some(AlignmentType::Right),
|
||||
"justified" => Some(AlignmentType::Both),
|
||||
_ => None,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
fn apply_to_run(&self, run: Run) -> Run {
|
||||
let mut run = run;
|
||||
if self.bold {
|
||||
run = run.bold();
|
||||
}
|
||||
if self.italic {
|
||||
run = run.italic();
|
||||
}
|
||||
if self.underline {
|
||||
run = run.underline("single");
|
||||
}
|
||||
if let Some(size) = self.size {
|
||||
run = run.size(size);
|
||||
}
|
||||
if let Some(color) = &self.color {
|
||||
run = run.color(color);
|
||||
}
|
||||
run
|
||||
}
|
||||
|
||||
fn apply_to_paragraph(&self, para: Paragraph) -> Paragraph {
|
||||
let mut para = para;
|
||||
if let Some(alignment) = self.alignment {
|
||||
para = para.align(alignment);
|
||||
}
|
||||
para
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn docx_tool(
|
||||
path: &str,
|
||||
operation: &str,
|
||||
content: Option<&str>,
|
||||
params: Option<&serde_json::Value>,
|
||||
) -> Result<Vec<Content>, ToolError> {
|
||||
match operation {
|
||||
"extract_text" => {
|
||||
let file = fs::read(path).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to read DOCX file: {}", e))
|
||||
})?;
|
||||
|
||||
let docx = read_docx(&file).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to parse DOCX file: {}", e))
|
||||
})?;
|
||||
|
||||
let mut text = String::new();
|
||||
let mut structure = Vec::new();
|
||||
let mut current_level = None;
|
||||
|
||||
// Extract document structure and text
|
||||
for element in docx.document.children.iter() {
|
||||
if let DocumentChild::Paragraph(p) = element {
|
||||
// Check for heading style
|
||||
if let Some(style) = p.property.style.as_ref() {
|
||||
if style.val.starts_with("Heading") {
|
||||
current_level = Some(style.val.clone());
|
||||
structure.push(format!("{}: ", style.val));
|
||||
}
|
||||
}
|
||||
|
||||
// Extract text from runs
|
||||
let para_text: String = p
|
||||
.children
|
||||
.iter()
|
||||
.filter_map(|child| {
|
||||
if let ParagraphChild::Run(run) = child {
|
||||
Some(
|
||||
run.children
|
||||
.iter()
|
||||
.filter_map(|rc| {
|
||||
if let RunChild::Text(t) = rc {
|
||||
Some(t.text.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(""),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("");
|
||||
|
||||
if !para_text.trim().is_empty() {
|
||||
if current_level.is_some() {
|
||||
if let Some(s) = structure.last_mut() {
|
||||
s.push_str(¶_text);
|
||||
}
|
||||
current_level = None;
|
||||
}
|
||||
text.push_str(¶_text);
|
||||
text.push('\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let result = if !structure.is_empty() {
|
||||
format!(
|
||||
"Document Structure:\n{}\n\nFull Text:\n{}",
|
||||
structure.join("\n"),
|
||||
text
|
||||
)
|
||||
} else {
|
||||
format!("Extracted Text:\n{}", text)
|
||||
};
|
||||
|
||||
Ok(vec![Content::text(result)])
|
||||
}
|
||||
|
||||
"update_doc" => {
|
||||
let content = content.ok_or_else(|| {
|
||||
ToolError::InvalidParameters(
|
||||
"Content parameter required for update_doc".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Parse update mode and style from params
|
||||
let (mode, style) = if let Some(params) = params {
|
||||
let mode = params
|
||||
.get("mode")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("append");
|
||||
let style = params.get("style").and_then(DocxStyle::from_json);
|
||||
|
||||
let mode = match mode {
|
||||
"append" => UpdateMode::Append,
|
||||
"replace" => {
|
||||
let old_text =
|
||||
params
|
||||
.get("old_text")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| {
|
||||
ToolError::InvalidParameters(
|
||||
"old_text parameter required for replace mode".to_string(),
|
||||
)
|
||||
})?;
|
||||
UpdateMode::Replace {
|
||||
old_text: old_text.to_string(),
|
||||
}
|
||||
}
|
||||
"structured" => {
|
||||
let level = params
|
||||
.get("level")
|
||||
.and_then(|v| v.as_str())
|
||||
.map(String::from);
|
||||
UpdateMode::InsertStructured {
|
||||
level,
|
||||
style: style.clone(),
|
||||
}
|
||||
}
|
||||
"add_image" => {
|
||||
let image_path = params
|
||||
.get("image_path")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| {
|
||||
ToolError::InvalidParameters(
|
||||
"image_path parameter required for add_image mode".to_string(),
|
||||
)
|
||||
})?
|
||||
.to_string();
|
||||
|
||||
let width = params
|
||||
.get("width")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|w| w as u32);
|
||||
|
||||
let height = params
|
||||
.get("height")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|h| h as u32);
|
||||
|
||||
UpdateMode::AddImage {
|
||||
image_path,
|
||||
width,
|
||||
height,
|
||||
}
|
||||
}
|
||||
_ => return Err(ToolError::InvalidParameters(
|
||||
"Invalid mode. Must be 'append', 'replace', 'structured', or 'add_image'"
|
||||
.to_string(),
|
||||
)),
|
||||
};
|
||||
(mode, style)
|
||||
} else {
|
||||
(UpdateMode::Append, None)
|
||||
};
|
||||
|
||||
match mode {
|
||||
UpdateMode::Append => {
|
||||
// Read existing document if it exists, or create new one
|
||||
let mut doc = if std::path::Path::new(path).exists() {
|
||||
let file = fs::read(path).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to read DOCX file: {}", e))
|
||||
})?;
|
||||
read_docx(&file).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to parse DOCX file: {}", e))
|
||||
})?
|
||||
} else {
|
||||
Docx::new()
|
||||
};
|
||||
|
||||
// Split content into paragraphs and add them
|
||||
for para in content.split('\n') {
|
||||
if !para.trim().is_empty() {
|
||||
let mut run = Run::new().add_text(para);
|
||||
let mut paragraph = Paragraph::new();
|
||||
|
||||
if let Some(style) = &style {
|
||||
run = style.apply_to_run(run);
|
||||
paragraph = style.apply_to_paragraph(paragraph);
|
||||
}
|
||||
|
||||
doc = doc.add_paragraph(paragraph.add_run(run));
|
||||
}
|
||||
}
|
||||
|
||||
let mut buf = Vec::new();
|
||||
{
|
||||
let mut cursor = Cursor::new(&mut buf);
|
||||
doc.build().pack(&mut cursor).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to build DOCX: {}", e))
|
||||
})?;
|
||||
}
|
||||
|
||||
fs::write(path, &buf).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to write DOCX file: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(vec![Content::text(format!(
|
||||
"Successfully wrote content to {}",
|
||||
path
|
||||
))])
|
||||
}
|
||||
|
||||
UpdateMode::Replace { old_text } => {
|
||||
// Read existing document
|
||||
let file = fs::read(path).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to read DOCX file: {}", e))
|
||||
})?;
|
||||
|
||||
let docx = read_docx(&file).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to parse DOCX file: {}", e))
|
||||
})?;
|
||||
|
||||
let mut new_doc = Docx::new();
|
||||
let mut found_text = false;
|
||||
|
||||
// Process each paragraph
|
||||
for element in docx.document.children.iter() {
|
||||
if let DocumentChild::Paragraph(p) = element {
|
||||
let para_text: String = p
|
||||
.children
|
||||
.iter()
|
||||
.filter_map(|child| {
|
||||
if let ParagraphChild::Run(run) = child {
|
||||
Some(
|
||||
run.children
|
||||
.iter()
|
||||
.filter_map(|rc| {
|
||||
if let RunChild::Text(t) = rc {
|
||||
Some(t.text.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(""),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("");
|
||||
|
||||
if para_text.contains(&old_text) {
|
||||
// Replace this paragraph with new content
|
||||
found_text = true;
|
||||
for para in content.split('\n') {
|
||||
if !para.trim().is_empty() {
|
||||
let mut run = Run::new().add_text(para);
|
||||
let mut paragraph = Paragraph::new();
|
||||
|
||||
if let Some(style) = &style {
|
||||
run = style.apply_to_run(run);
|
||||
paragraph = style.apply_to_paragraph(paragraph);
|
||||
}
|
||||
|
||||
new_doc = new_doc.add_paragraph(paragraph.add_run(run));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Create a new paragraph with the same content and style
|
||||
let mut para = Paragraph::new();
|
||||
if let Some(style) = &p.property.style {
|
||||
para = para.style(&style.val);
|
||||
}
|
||||
for child in p.children.iter() {
|
||||
if let ParagraphChild::Run(run) = child {
|
||||
for rc in run.children.iter() {
|
||||
if let RunChild::Text(t) = rc {
|
||||
para = para.add_run(Run::new().add_text(&t.text));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
new_doc = new_doc.add_paragraph(para);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !found_text {
|
||||
return Err(ToolError::ExecutionError(format!(
|
||||
"Could not find text to replace: {}",
|
||||
old_text
|
||||
)));
|
||||
}
|
||||
|
||||
let mut buf = Vec::new();
|
||||
{
|
||||
let mut cursor = Cursor::new(&mut buf);
|
||||
new_doc.build().pack(&mut cursor).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to build DOCX: {}", e))
|
||||
})?;
|
||||
}
|
||||
|
||||
fs::write(path, &buf).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to write DOCX file: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(vec![Content::text(format!(
|
||||
"Successfully replaced content in {}",
|
||||
path
|
||||
))])
|
||||
}
|
||||
|
||||
UpdateMode::InsertStructured { level, style } => {
|
||||
let mut doc = if std::path::Path::new(path).exists() {
|
||||
let file = fs::read(path).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to read DOCX file: {}", e))
|
||||
})?;
|
||||
read_docx(&file).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to parse DOCX file: {}", e))
|
||||
})?
|
||||
} else {
|
||||
Docx::new()
|
||||
};
|
||||
|
||||
// Create the paragraph with heading style if specified
|
||||
for para in content.split('\n') {
|
||||
if !para.trim().is_empty() {
|
||||
let mut run = Run::new().add_text(para);
|
||||
let mut paragraph = Paragraph::new();
|
||||
|
||||
// Apply heading style if specified
|
||||
if let Some(level) = &level {
|
||||
paragraph = paragraph.style(level);
|
||||
}
|
||||
|
||||
// Apply custom style if specified
|
||||
if let Some(style) = &style {
|
||||
run = style.apply_to_run(run);
|
||||
paragraph = style.apply_to_paragraph(paragraph);
|
||||
}
|
||||
|
||||
doc = doc.add_paragraph(paragraph.add_run(run));
|
||||
}
|
||||
}
|
||||
|
||||
let mut buf = Vec::new();
|
||||
{
|
||||
let mut cursor = Cursor::new(&mut buf);
|
||||
doc.build().pack(&mut cursor).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to build DOCX: {}", e))
|
||||
})?;
|
||||
}
|
||||
|
||||
fs::write(path, &buf).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to write DOCX file: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(vec![Content::text(format!(
|
||||
"Successfully added structured content to {}",
|
||||
path
|
||||
))])
|
||||
}
|
||||
|
||||
UpdateMode::AddImage {
|
||||
image_path,
|
||||
width,
|
||||
height,
|
||||
} => {
|
||||
let mut doc = if std::path::Path::new(path).exists() {
|
||||
let file = fs::read(path).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to read DOCX file: {}", e))
|
||||
})?;
|
||||
read_docx(&file).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to parse DOCX file: {}", e))
|
||||
})?
|
||||
} else {
|
||||
Docx::new()
|
||||
};
|
||||
|
||||
// Read the image file
|
||||
let image_data = fs::read(&image_path).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to read image file: {}", e))
|
||||
})?;
|
||||
|
||||
// Get image format and extension
|
||||
let extension = std::path::Path::new(&image_path)
|
||||
.extension()
|
||||
.and_then(|e| e.to_str())
|
||||
.ok_or_else(|| {
|
||||
ToolError::ExecutionError("Invalid image file extension".to_string())
|
||||
})?
|
||||
.to_lowercase();
|
||||
|
||||
// Convert to PNG if not already PNG
|
||||
let image_data = if extension != "png" {
|
||||
// Try to convert to PNG using the image crate
|
||||
let img = image::load_from_memory(&image_data).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to load image: {}", e))
|
||||
})?;
|
||||
let mut png_data = Vec::new();
|
||||
img.write_to(&mut Cursor::new(&mut png_data), ImageFormat::Png)
|
||||
.map_err(|e| {
|
||||
ToolError::ExecutionError(format!(
|
||||
"Failed to convert image to PNG: {}",
|
||||
e
|
||||
))
|
||||
})?;
|
||||
png_data
|
||||
} else {
|
||||
image_data
|
||||
};
|
||||
|
||||
// Add optional caption if provided
|
||||
if !content.trim().is_empty() {
|
||||
let mut caption = Paragraph::new();
|
||||
if let Some(style) = &style {
|
||||
caption = style.apply_to_paragraph(caption);
|
||||
caption =
|
||||
caption.add_run(style.apply_to_run(Run::new().add_text(content)));
|
||||
} else {
|
||||
caption = caption.add_run(Run::new().add_text(content));
|
||||
}
|
||||
doc = doc.add_paragraph(caption);
|
||||
}
|
||||
|
||||
// Create a paragraph with the image
|
||||
let mut paragraph = Paragraph::new();
|
||||
if let Some(style) = &style {
|
||||
paragraph = style.apply_to_paragraph(paragraph);
|
||||
}
|
||||
|
||||
// Create and add the image
|
||||
let mut pic = Pic::new(&image_data);
|
||||
if let (Some(w), Some(h)) = (width, height) {
|
||||
pic = pic.size(w, h);
|
||||
}
|
||||
|
||||
paragraph = paragraph.add_run(Run::new().add_image(pic));
|
||||
doc = doc.add_paragraph(paragraph);
|
||||
|
||||
let mut buf = Vec::new();
|
||||
{
|
||||
let mut cursor = Cursor::new(&mut buf);
|
||||
doc.build().pack(&mut cursor).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to build DOCX: {}", e))
|
||||
})?;
|
||||
}
|
||||
|
||||
fs::write(path, &buf).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to write DOCX file: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(vec![Content::text(format!(
|
||||
"Successfully added image to {}",
|
||||
path
|
||||
))])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ => Err(ToolError::InvalidParameters(format!(
|
||||
"Invalid operation: {}. Valid operations are: 'extract_text', 'update_doc'",
|
||||
operation
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docx_text_extraction() {
|
||||
let test_docx_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("src/computercontroller/tests/data/sample.docx");
|
||||
|
||||
println!("Testing text extraction from: {}", test_docx_path.display());
|
||||
|
||||
let result = docx_tool(test_docx_path.to_str().unwrap(), "extract_text", None, None).await;
|
||||
|
||||
assert!(result.is_ok(), "DOCX text extraction should succeed");
|
||||
let content = result.unwrap();
|
||||
assert!(!content.is_empty(), "Extracted text should not be empty");
|
||||
let text = content[0].as_text().unwrap();
|
||||
println!("Extracted text:\n{}", text);
|
||||
assert!(
|
||||
!text.trim().is_empty(),
|
||||
"Extracted text should not be empty"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docx_update_append() {
|
||||
let test_output_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("src/computercontroller/tests/data/test_output.docx");
|
||||
|
||||
let test_content =
|
||||
"Test Heading\nThis is a test paragraph.\n\nAnother paragraph with some content.";
|
||||
|
||||
let result = docx_tool(
|
||||
test_output_path.to_str().unwrap(),
|
||||
"update_doc",
|
||||
Some(test_content),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok(), "DOCX update should succeed");
|
||||
assert!(test_output_path.exists(), "Output file should exist");
|
||||
|
||||
// Now try to read it back
|
||||
let result = docx_tool(
|
||||
test_output_path.to_str().unwrap(),
|
||||
"extract_text",
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should be able to read back the written file"
|
||||
);
|
||||
let content = result.unwrap();
|
||||
let text = content[0].as_text().unwrap();
|
||||
assert!(
|
||||
text.contains("Test Heading"),
|
||||
"Should contain written content"
|
||||
);
|
||||
assert!(
|
||||
text.contains("test paragraph"),
|
||||
"Should contain written content"
|
||||
);
|
||||
|
||||
// Clean up
|
||||
fs::remove_file(test_output_path).unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docx_update_styled() {
|
||||
let test_output_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("src/computercontroller/tests/data/test_styled.docx");
|
||||
|
||||
let test_content = "Styled Heading\nThis is a styled paragraph.";
|
||||
let params = json!({
|
||||
"mode": "structured",
|
||||
"level": "Heading1",
|
||||
"style": {
|
||||
"bold": true,
|
||||
"color": "FF0000",
|
||||
"size": 24,
|
||||
"alignment": "center"
|
||||
}
|
||||
});
|
||||
|
||||
let result = docx_tool(
|
||||
test_output_path.to_str().unwrap(),
|
||||
"update_doc",
|
||||
Some(test_content),
|
||||
Some(¶ms),
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok(), "DOCX styled update should succeed");
|
||||
assert!(test_output_path.exists(), "Output file should exist");
|
||||
|
||||
// Clean up
|
||||
fs::remove_file(test_output_path).unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docx_update_replace() {
|
||||
let test_output_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("src/computercontroller/tests/data/test_replace.docx");
|
||||
|
||||
// First create a document
|
||||
let initial_content = "Original content\nThis should be replaced.\nKeep this text.";
|
||||
let _ = docx_tool(
|
||||
test_output_path.to_str().unwrap(),
|
||||
"update_doc",
|
||||
Some(initial_content),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
|
||||
// Now replace part of it
|
||||
let replacement = "New content here";
|
||||
let params = json!({
|
||||
"mode": "replace",
|
||||
"old_text": "This should be replaced",
|
||||
"style": {
|
||||
"italic": true
|
||||
}
|
||||
});
|
||||
|
||||
let result = docx_tool(
|
||||
test_output_path.to_str().unwrap(),
|
||||
"update_doc",
|
||||
Some(replacement),
|
||||
Some(¶ms),
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok(), "DOCX replace should succeed");
|
||||
|
||||
// Verify the content
|
||||
let result = docx_tool(
|
||||
test_output_path.to_str().unwrap(),
|
||||
"extract_text",
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
assert!(result.is_ok());
|
||||
let content = result.unwrap();
|
||||
let text = content[0].as_text().unwrap();
|
||||
assert!(
|
||||
text.contains("New content here"),
|
||||
"Should contain new content"
|
||||
);
|
||||
assert!(
|
||||
text.contains("Keep this text"),
|
||||
"Should keep unmodified content"
|
||||
);
|
||||
assert!(
|
||||
!text.contains("This should be replaced"),
|
||||
"Should not contain replaced text"
|
||||
);
|
||||
|
||||
// Clean up
|
||||
fs::remove_file(test_output_path).unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docx_add_image() {
|
||||
let test_output_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("src/computercontroller/tests/data/test_image.docx");
|
||||
|
||||
// Create a test image file
|
||||
let test_image_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("src/computercontroller/tests/data/test_image.png");
|
||||
|
||||
// Create a simple test PNG image using the image crate
|
||||
let imgbuf = image::ImageBuffer::from_fn(32, 32, |x, y| {
|
||||
let dx = x as f32 - 16.0;
|
||||
let dy = y as f32 - 16.0;
|
||||
if dx * dx + dy * dy < 16.0 * 16.0 {
|
||||
image::Rgb([0u8, 0u8, 255u8]) // Blue circle
|
||||
} else {
|
||||
image::Rgb([255u8, 255u8, 255u8]) // White background
|
||||
}
|
||||
});
|
||||
imgbuf
|
||||
.save(&test_image_path)
|
||||
.expect("Failed to create test image");
|
||||
|
||||
let params = json!({
|
||||
"mode": "add_image",
|
||||
"image_path": test_image_path.to_str().unwrap(),
|
||||
"width": 100,
|
||||
"height": 100,
|
||||
"style": {
|
||||
"alignment": "center"
|
||||
}
|
||||
});
|
||||
|
||||
let result = docx_tool(
|
||||
test_output_path.to_str().unwrap(),
|
||||
"update_doc",
|
||||
Some("Image Caption"),
|
||||
Some(¶ms),
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok(), "DOCX image addition should succeed");
|
||||
assert!(test_output_path.exists(), "Output file should exist");
|
||||
|
||||
// Clean up
|
||||
fs::remove_file(test_output_path).unwrap();
|
||||
fs::remove_file(test_image_path).unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docx_invalid_path() {
|
||||
let result = docx_tool("nonexistent.docx", "extract_text", None, None).await;
|
||||
assert!(result.is_err(), "Should fail with invalid path");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docx_invalid_operation() {
|
||||
let test_docx_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("src/computercontroller/tests/data/sample.docx");
|
||||
|
||||
let result = docx_tool(
|
||||
test_docx_path.to_str().unwrap(),
|
||||
"invalid_operation",
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err(), "Should fail with invalid operation");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docx_update_without_content() {
|
||||
let test_output_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("src/computercontroller/tests/data/test_output.docx");
|
||||
|
||||
let result = docx_tool(test_output_path.to_str().unwrap(), "update_doc", None, None).await;
|
||||
|
||||
assert!(result.is_err(), "Should fail without content");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docx_update_preserve_content() {
|
||||
let test_output_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("src/computercontroller/tests/data/test_preserve.docx");
|
||||
|
||||
// First create a document with initial content
|
||||
let initial_content =
|
||||
"Initial content\nThis is the first paragraph.\nThis should stay in the document.";
|
||||
let result = docx_tool(
|
||||
test_output_path.to_str().unwrap(),
|
||||
"update_doc",
|
||||
Some(initial_content),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
assert!(result.is_ok(), "Initial document creation should succeed");
|
||||
|
||||
// Now append new content
|
||||
let new_content = "New content\nThis is an additional paragraph.";
|
||||
let params = json!({
|
||||
"mode": "append",
|
||||
"style": {
|
||||
"bold": true
|
||||
}
|
||||
});
|
||||
|
||||
let result = docx_tool(
|
||||
test_output_path.to_str().unwrap(),
|
||||
"update_doc",
|
||||
Some(new_content),
|
||||
Some(¶ms),
|
||||
)
|
||||
.await;
|
||||
assert!(result.is_ok(), "Content append should succeed");
|
||||
|
||||
// Verify both old and new content exists
|
||||
let result = docx_tool(
|
||||
test_output_path.to_str().unwrap(),
|
||||
"extract_text",
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
assert!(result.is_ok());
|
||||
let content = result.unwrap();
|
||||
let text = content[0].as_text().unwrap();
|
||||
|
||||
// Check for initial content
|
||||
assert!(
|
||||
text.contains("Initial content"),
|
||||
"Should contain initial content"
|
||||
);
|
||||
assert!(
|
||||
text.contains("first paragraph"),
|
||||
"Should contain first paragraph"
|
||||
);
|
||||
assert!(
|
||||
text.contains("should stay in the document"),
|
||||
"Should preserve existing content"
|
||||
);
|
||||
|
||||
// Check for new content
|
||||
assert!(text.contains("New content"), "Should contain new content");
|
||||
assert!(
|
||||
text.contains("additional paragraph"),
|
||||
"Should contain appended paragraph"
|
||||
);
|
||||
|
||||
// Clean up
|
||||
fs::remove_file(test_output_path).unwrap();
|
||||
}
|
||||
}
|
||||
@@ -19,7 +19,10 @@ use mcp_core::{
|
||||
use mcp_server::router::CapabilitiesBuilder;
|
||||
use mcp_server::Router;
|
||||
|
||||
mod docx_tool;
|
||||
mod pdf_tool;
|
||||
mod presentation_tool;
|
||||
mod xlsx_tool;
|
||||
|
||||
mod platform;
|
||||
use platform::{create_system_automation, SystemAutomation};
|
||||
@@ -261,6 +264,205 @@ impl ComputerControllerRouter {
|
||||
}),
|
||||
);
|
||||
|
||||
let docx_tool = Tool::new(
|
||||
"docx_tool",
|
||||
indoc! {r#"
|
||||
Process DOCX files to extract text and create/update documents.
|
||||
Supports operations:
|
||||
- extract_text: Extract all text content and structure (headings, TOC) from the DOCX
|
||||
- update_doc: Create a new DOCX or update existing one with provided content
|
||||
Modes:
|
||||
- append: Add content to end of document (default)
|
||||
- replace: Replace specific text with new content
|
||||
- structured: Add content with specific heading level and styling
|
||||
- add_image: Add an image to the document (with optional caption)
|
||||
|
||||
Use this when there is a .docx file that needs to be processed or created.
|
||||
"#},
|
||||
json!({
|
||||
"type": "object",
|
||||
"required": ["path", "operation"],
|
||||
"properties": {
|
||||
"path": {
|
||||
"type": "string",
|
||||
"description": "Path to the DOCX file"
|
||||
},
|
||||
"operation": {
|
||||
"type": "string",
|
||||
"enum": ["extract_text", "update_doc"],
|
||||
"description": "Operation to perform on the DOCX"
|
||||
},
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "Content to write (required for update_doc operation)"
|
||||
},
|
||||
"params": {
|
||||
"type": "object",
|
||||
"description": "Additional parameters for update_doc operation",
|
||||
"properties": {
|
||||
"mode": {
|
||||
"type": "string",
|
||||
"enum": ["append", "replace", "structured", "add_image"],
|
||||
"description": "Update mode (default: append)"
|
||||
},
|
||||
"old_text": {
|
||||
"type": "string",
|
||||
"description": "Text to replace (required for replace mode)"
|
||||
},
|
||||
"level": {
|
||||
"type": "string",
|
||||
"description": "Heading level for structured mode (e.g., 'Heading1', 'Heading2')"
|
||||
},
|
||||
"image_path": {
|
||||
"type": "string",
|
||||
"description": "Path to the image file (required for add_image mode)"
|
||||
},
|
||||
"width": {
|
||||
"type": "integer",
|
||||
"description": "Image width in pixels (optional)"
|
||||
},
|
||||
"height": {
|
||||
"type": "integer",
|
||||
"description": "Image height in pixels (optional)"
|
||||
},
|
||||
"style": {
|
||||
"type": "object",
|
||||
"description": "Styling options for the text",
|
||||
"properties": {
|
||||
"bold": {
|
||||
"type": "boolean",
|
||||
"description": "Make text bold"
|
||||
},
|
||||
"italic": {
|
||||
"type": "boolean",
|
||||
"description": "Make text italic"
|
||||
},
|
||||
"underline": {
|
||||
"type": "boolean",
|
||||
"description": "Make text underlined"
|
||||
},
|
||||
"size": {
|
||||
"type": "integer",
|
||||
"description": "Font size in points"
|
||||
},
|
||||
"color": {
|
||||
"type": "string",
|
||||
"description": "Text color in hex format (e.g., 'FF0000' for red)"
|
||||
},
|
||||
"alignment": {
|
||||
"type": "string",
|
||||
"enum": ["left", "center", "right", "justified"],
|
||||
"description": "Text alignment"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
let make_presentation_tool = Tool::new(
|
||||
"make_presentation",
|
||||
indoc! {r#"
|
||||
Create and manage HTML presentations with a simple, modern design.
|
||||
Operations:
|
||||
- create: Create new presentation with template
|
||||
- add_slide: Add a new slide with content
|
||||
|
||||
Open in a browser (using a command) to show the user: open <path>
|
||||
|
||||
For advanced edits, use developer tools to modify the HTML directly.
|
||||
A template slide is included in comments for reference.
|
||||
"#},
|
||||
json!({
|
||||
"type": "object",
|
||||
"required": ["path", "operation"],
|
||||
"properties": {
|
||||
"path": {
|
||||
"type": "string",
|
||||
"description": "Path to the presentation file"
|
||||
},
|
||||
"operation": {
|
||||
"type": "string",
|
||||
"enum": ["create", "add_slide"],
|
||||
"description": "Operation to perform"
|
||||
},
|
||||
"params": {
|
||||
"type": "object",
|
||||
"description": "Parameters for add_slide operation",
|
||||
"properties": {
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "Content for the new slide"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
let xlsx_tool = Tool::new(
|
||||
"xlsx_tool",
|
||||
indoc! {r#"
|
||||
Process Excel (XLSX) files to read and manipulate spreadsheet data.
|
||||
Supports operations:
|
||||
- list_worksheets: List all worksheets in the workbook (returns name, index, column_count, row_count)
|
||||
- get_columns: Get column names from a worksheet (returns values from the first row)
|
||||
- get_range: Get values and formulas from a cell range (e.g., "A1:C10") (returns a 2D array organized as [row][column])
|
||||
- find_text: Search for text in a worksheet (returns a list of (row, column) coordinates)
|
||||
- update_cell: Update a single cell's value (returns confirmation message)
|
||||
- get_cell: Get value and formula from a specific cell (returns both value and formula if present)
|
||||
- save: Save changes back to the file (returns confirmation message)
|
||||
|
||||
Use this when working with Excel spreadsheets to analyze or modify data.
|
||||
"#},
|
||||
json!({
|
||||
"type": "object",
|
||||
"required": ["path", "operation"],
|
||||
"properties": {
|
||||
"path": {
|
||||
"type": "string",
|
||||
"description": "Path to the XLSX file"
|
||||
},
|
||||
"operation": {
|
||||
"type": "string",
|
||||
"enum": ["list_worksheets", "get_columns", "get_range", "find_text", "update_cell", "get_cell", "save"],
|
||||
"description": "Operation to perform on the XLSX file"
|
||||
},
|
||||
"worksheet": {
|
||||
"type": "string",
|
||||
"description": "Worksheet name (if not provided, uses first worksheet)"
|
||||
},
|
||||
"range": {
|
||||
"type": "string",
|
||||
"description": "Cell range in A1 notation (e.g., 'A1:C10') for get_range operation"
|
||||
},
|
||||
"search_text": {
|
||||
"type": "string",
|
||||
"description": "Text to search for in find_text operation"
|
||||
},
|
||||
"case_sensitive": {
|
||||
"type": "boolean",
|
||||
"default": false,
|
||||
"description": "Whether search should be case-sensitive"
|
||||
},
|
||||
"row": {
|
||||
"type": "integer",
|
||||
"description": "Row number for update_cell and get_cell operations"
|
||||
},
|
||||
"col": {
|
||||
"type": "integer",
|
||||
"description": "Column number for update_cell and get_cell operations"
|
||||
},
|
||||
"value": {
|
||||
"type": "string",
|
||||
"description": "New value for update_cell operation"
|
||||
}
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
// choose_app_strategy().cache_dir()
|
||||
// - macOS/Linux: ~/.cache/goose/computer_controller/
|
||||
// - Windows: ~\AppData\Local\Block\goose\cache\computer_controller\
|
||||
@@ -389,6 +591,9 @@ impl ComputerControllerRouter {
|
||||
computer_control_tool,
|
||||
cache_tool,
|
||||
pdf_tool,
|
||||
docx_tool,
|
||||
xlsx_tool,
|
||||
make_presentation_tool,
|
||||
],
|
||||
cache_dir,
|
||||
active_resources: Arc::new(Mutex::new(HashMap::new())),
|
||||
@@ -682,7 +887,187 @@ impl ComputerControllerRouter {
|
||||
Ok(vec![Content::text(result)])
|
||||
}
|
||||
|
||||
async fn xlsx_tool(&self, params: Value) -> Result<Vec<Content>, ToolError> {
|
||||
let path = params
|
||||
.get("path")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| ToolError::InvalidParameters("Missing 'path' parameter".into()))?;
|
||||
|
||||
let operation = params
|
||||
.get("operation")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| ToolError::InvalidParameters("Missing 'operation' parameter".into()))?;
|
||||
|
||||
match operation {
|
||||
"list_worksheets" => {
|
||||
let xlsx = xlsx_tool::XlsxTool::new(path)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?;
|
||||
let worksheets = xlsx
|
||||
.list_worksheets()
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?;
|
||||
Ok(vec![Content::text(format!("{:#?}", worksheets))])
|
||||
}
|
||||
"get_columns" => {
|
||||
let xlsx = xlsx_tool::XlsxTool::new(path)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?;
|
||||
let worksheet = if let Some(name) = params.get("worksheet").and_then(|v| v.as_str())
|
||||
{
|
||||
xlsx.get_worksheet_by_name(name)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?
|
||||
} else {
|
||||
xlsx.get_worksheet_by_index(0)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?
|
||||
};
|
||||
let columns = xlsx
|
||||
.get_column_names(worksheet)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?;
|
||||
Ok(vec![Content::text(format!("{:#?}", columns))])
|
||||
}
|
||||
"get_range" => {
|
||||
let range = params
|
||||
.get("range")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| {
|
||||
ToolError::InvalidParameters("Missing 'range' parameter".into())
|
||||
})?;
|
||||
|
||||
let xlsx = xlsx_tool::XlsxTool::new(path)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?;
|
||||
let worksheet = if let Some(name) = params.get("worksheet").and_then(|v| v.as_str())
|
||||
{
|
||||
xlsx.get_worksheet_by_name(name)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?
|
||||
} else {
|
||||
xlsx.get_worksheet_by_index(0)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?
|
||||
};
|
||||
let range_data = xlsx
|
||||
.get_range(worksheet, range)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?;
|
||||
Ok(vec![Content::text(format!("{:#?}", range_data))])
|
||||
}
|
||||
"find_text" => {
|
||||
let search_text = params
|
||||
.get("search_text")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| {
|
||||
ToolError::InvalidParameters("Missing 'search_text' parameter".into())
|
||||
})?;
|
||||
|
||||
let case_sensitive = params
|
||||
.get("case_sensitive")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(false);
|
||||
|
||||
let xlsx = xlsx_tool::XlsxTool::new(path)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?;
|
||||
let worksheet = if let Some(name) = params.get("worksheet").and_then(|v| v.as_str())
|
||||
{
|
||||
xlsx.get_worksheet_by_name(name)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?
|
||||
} else {
|
||||
xlsx.get_worksheet_by_index(0)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?
|
||||
};
|
||||
let matches = xlsx
|
||||
.find_in_worksheet(worksheet, search_text, case_sensitive)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?;
|
||||
Ok(vec![Content::text(format!(
|
||||
"Found matches at: {:#?}",
|
||||
matches
|
||||
))])
|
||||
}
|
||||
"update_cell" => {
|
||||
let row = params.get("row").and_then(|v| v.as_u64()).ok_or_else(|| {
|
||||
ToolError::InvalidParameters("Missing 'row' parameter".into())
|
||||
})?;
|
||||
|
||||
let col = params.get("col").and_then(|v| v.as_u64()).ok_or_else(|| {
|
||||
ToolError::InvalidParameters("Missing 'col' parameter".into())
|
||||
})?;
|
||||
|
||||
let value = params
|
||||
.get("value")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| {
|
||||
ToolError::InvalidParameters("Missing 'value' parameter".into())
|
||||
})?;
|
||||
|
||||
let worksheet_name = params
|
||||
.get("worksheet")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("Sheet1");
|
||||
|
||||
let mut xlsx = xlsx_tool::XlsxTool::new(path)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?;
|
||||
xlsx.update_cell(worksheet_name, row as u32, col as u32, value)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?;
|
||||
xlsx.save(path)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?;
|
||||
Ok(vec![Content::text(format!(
|
||||
"Updated cell ({}, {}) to '{}' in worksheet '{}'",
|
||||
row, col, value, worksheet_name
|
||||
))])
|
||||
}
|
||||
"save" => {
|
||||
let xlsx = xlsx_tool::XlsxTool::new(path)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?;
|
||||
xlsx.save(path)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?;
|
||||
Ok(vec![Content::text("File saved successfully.")])
|
||||
}
|
||||
"get_cell" => {
|
||||
let row = params.get("row").and_then(|v| v.as_u64()).ok_or_else(|| {
|
||||
ToolError::InvalidParameters("Missing 'row' parameter".into())
|
||||
})?;
|
||||
|
||||
let col = params.get("col").and_then(|v| v.as_u64()).ok_or_else(|| {
|
||||
ToolError::InvalidParameters("Missing 'col' parameter".into())
|
||||
})?;
|
||||
|
||||
let xlsx = xlsx_tool::XlsxTool::new(path)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?;
|
||||
let worksheet = if let Some(name) = params.get("worksheet").and_then(|v| v.as_str())
|
||||
{
|
||||
xlsx.get_worksheet_by_name(name)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?
|
||||
} else {
|
||||
xlsx.get_worksheet_by_index(0)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?
|
||||
};
|
||||
let cell_value = xlsx
|
||||
.get_cell_value(worksheet, row as u32, col as u32)
|
||||
.map_err(|e| ToolError::ExecutionError(e.to_string()))?;
|
||||
Ok(vec![Content::text(format!("{:#?}", cell_value))])
|
||||
}
|
||||
_ => Err(ToolError::InvalidParameters(format!(
|
||||
"Invalid operation: {}",
|
||||
operation
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
// Implement cache tool functionality
|
||||
async fn docx_tool(&self, params: Value) -> Result<Vec<Content>, ToolError> {
|
||||
let path = params
|
||||
.get("path")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| ToolError::InvalidParameters("Missing 'path' parameter".into()))?;
|
||||
|
||||
let operation = params
|
||||
.get("operation")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| ToolError::InvalidParameters("Missing 'operation' parameter".into()))?;
|
||||
|
||||
crate::computercontroller::docx_tool::docx_tool(
|
||||
path,
|
||||
operation,
|
||||
params.get("content").and_then(|v| v.as_str()),
|
||||
params.get("params"),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn pdf_tool(&self, params: Value) -> Result<Vec<Content>, ToolError> {
|
||||
let path = params
|
||||
.get("path")
|
||||
@@ -809,6 +1194,26 @@ impl Router for ComputerControllerRouter {
|
||||
"computer_control" => this.computer_control(arguments).await,
|
||||
"cache" => this.cache(arguments).await,
|
||||
"pdf_tool" => this.pdf_tool(arguments).await,
|
||||
"docx_tool" => this.docx_tool(arguments).await,
|
||||
"xlsx_tool" => this.xlsx_tool(arguments).await,
|
||||
"make_presentation" => {
|
||||
let path = arguments
|
||||
.get("path")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| {
|
||||
ToolError::InvalidParameters("Missing 'path' parameter".into())
|
||||
})?;
|
||||
|
||||
let operation = arguments
|
||||
.get("operation")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| {
|
||||
ToolError::InvalidParameters("Missing 'operation' parameter".into())
|
||||
})?;
|
||||
|
||||
presentation_tool::make_presentation(path, operation, arguments.get("params"))
|
||||
.await
|
||||
}
|
||||
_ => Err(ToolError::NotFound(format!("Tool {} not found", tool_name))),
|
||||
}
|
||||
})
|
||||
|
||||
398
crates/goose-mcp/src/computercontroller/presentation_tool.rs
Normal file
398
crates/goose-mcp/src/computercontroller/presentation_tool.rs
Normal file
@@ -0,0 +1,398 @@
|
||||
use mcp_core::{Content, ToolError};
|
||||
use serde_json::Value;
|
||||
use std::fs;
|
||||
|
||||
const TEMPLATE: &str = r#"<html>
|
||||
<head>
|
||||
<title>HTML and CSS Slideshow</title>
|
||||
<style>
|
||||
body {
|
||||
font-family: Helvetica, sans-serif;
|
||||
padding: 5%;
|
||||
text-align: center;
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
/* Styling the area of the slides */
|
||||
#slideshow {
|
||||
overflow: hidden;
|
||||
height: 510px;
|
||||
width: 728px;
|
||||
margin: 0 auto;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
/* Style each of the sides with a fixed width and height */
|
||||
.slide {
|
||||
float: left;
|
||||
height: 510px;
|
||||
width: 728px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
padding: 20px;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
/* Add animation to the slides */
|
||||
.slide-wrapper {
|
||||
/* Calculate the total width on the basis of number of slides */
|
||||
width: calc(728px * var(--num-slides));
|
||||
transition: margin-left 0.3s ease-in-out;
|
||||
}
|
||||
|
||||
/* Set the background color of each of the slides */
|
||||
.slide:nth-child(1) { background: #4CAF50; } /* Material Green */
|
||||
.slide:nth-child(2) { background: #2196F3; } /* Material Blue */
|
||||
.slide:nth-child(3) { background: #FFC107; } /* Material Amber */
|
||||
|
||||
/* Style slide content */
|
||||
.slide h1 {
|
||||
color: white;
|
||||
font-size: 2.5em;
|
||||
margin-bottom: 0.5em;
|
||||
text-shadow: 2px 2px 4px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
.slide p {
|
||||
color: white;
|
||||
font-size: 1.5em;
|
||||
line-height: 1.4;
|
||||
text-shadow: 1px 1px 2px rgba(0, 0, 0, 0.1);
|
||||
max-width: 90%;
|
||||
margin: 0.5em auto;
|
||||
}
|
||||
|
||||
.slide ul, .slide ol {
|
||||
color: white;
|
||||
font-size: 1.2em;
|
||||
text-align: left;
|
||||
margin: 1em auto;
|
||||
text-shadow: 1px 1px 2px rgba(0, 0, 0, 0.1);
|
||||
max-width: 90%;
|
||||
}
|
||||
|
||||
.slide li {
|
||||
margin-bottom: 0.5em;
|
||||
}
|
||||
|
||||
.slide ul ul, .slide ol ol {
|
||||
font-size: 0.9em;
|
||||
margin: 0.5em 0 0.5em 1em;
|
||||
}
|
||||
|
||||
.slide pre {
|
||||
font-size: 1.1em;
|
||||
text-align: left;
|
||||
background: rgba(255, 255, 255, 0.9);
|
||||
padding: 1em;
|
||||
border-radius: 5px;
|
||||
max-width: 90%;
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
.nav-hint {
|
||||
position: fixed;
|
||||
bottom: 20px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
background: rgba(0, 0, 0, 0.7);
|
||||
color: white;
|
||||
padding: 10px 20px;
|
||||
border-radius: 5px;
|
||||
font-size: 16px;
|
||||
opacity: 1;
|
||||
transition: opacity 0.5s;
|
||||
}
|
||||
|
||||
.nav-hint.fade {
|
||||
opacity: 0;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<!-- Define the slideshow container -->
|
||||
<div id="slideshow">
|
||||
<div class="slide-wrapper" style="--num-slides: 2">
|
||||
<!-- First slide -->
|
||||
<div class="slide">
|
||||
<h1>Your Presentation</h1>
|
||||
<p>Use arrow keys to navigate</p>
|
||||
</div>
|
||||
|
||||
<!-- SLIDE_TEMPLATE (do not remove this comment)
|
||||
|
||||
<div class="slide">
|
||||
<h1>New Slide Title</h1>
|
||||
<p>Slide content goes here, can use rich like below:</p>
|
||||
<ul>
|
||||
<li>Use make_presentation to:
|
||||
<ul>
|
||||
<li>create - Create new presentation</li>
|
||||
<li>add_slide - Add a new slide with content</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>For manual edits:
|
||||
<ul>
|
||||
<li>Use developer tools to edit the HTML</li>
|
||||
<li>Update --num-slides in slide-wrapper</li>
|
||||
<li>Copy template below for new slides</li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
END_SLIDE_TEMPLATE -->
|
||||
|
||||
<!-- ADD_SLIDES_HERE (do not remove this comment) -->
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="nav-hint">
|
||||
Use ← and → arrow keys to navigate
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const slideWrapper = document.querySelector('.slide-wrapper');
|
||||
const slideWidth = 728;
|
||||
let currentSlide = 0;
|
||||
const hint = document.querySelector('.nav-hint');
|
||||
const totalSlides = document.querySelectorAll('.slide').length;
|
||||
|
||||
// Hide hint after 5 seconds
|
||||
setTimeout(() => {
|
||||
hint.classList.add('fade');
|
||||
}, 5000);
|
||||
|
||||
document.addEventListener('keydown', (e) => {
|
||||
if (e.key === 'ArrowLeft') {
|
||||
if (currentSlide > 0) {
|
||||
currentSlide--;
|
||||
updateSlide();
|
||||
}
|
||||
} else if (e.key === 'ArrowRight') {
|
||||
if (currentSlide < totalSlides - 1) {
|
||||
currentSlide++;
|
||||
updateSlide();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
function updateSlide() {
|
||||
slideWrapper.style.marginLeft = `-${currentSlide * slideWidth}px`;
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>"#;
|
||||
|
||||
pub async fn make_presentation(
|
||||
path: &str,
|
||||
operation: &str,
|
||||
params: Option<&Value>,
|
||||
) -> Result<Vec<Content>, ToolError> {
|
||||
match operation {
|
||||
"create" => {
|
||||
// Get title from params or use default
|
||||
let title = params
|
||||
.and_then(|p| p.get("title"))
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("Your Presentation");
|
||||
|
||||
// Replace title in template
|
||||
let content = TEMPLATE.replace("Your Presentation", title);
|
||||
|
||||
// Create a new presentation with the template
|
||||
fs::write(path, content).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to create presentation file: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(vec![Content::text(format!(
|
||||
"Created new presentation with title '{}' at: {}\nYou can open it with the command: `open {}` to show user. You should look at the html and consider if you want to ask user if they need to adjust it, colours, typeface and so on.",
|
||||
title, path, path
|
||||
))])
|
||||
}
|
||||
"add_slide" => {
|
||||
let content = params
|
||||
.and_then(|p| p.get("content"))
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| {
|
||||
ToolError::InvalidParameters("Missing 'content' parameter for slide".into())
|
||||
})?;
|
||||
|
||||
// Read existing file
|
||||
let mut html = fs::read_to_string(path).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to read presentation file: {}", e))
|
||||
})?;
|
||||
|
||||
// Find the marker comment
|
||||
let marker = "<!-- ADD_SLIDES_HERE";
|
||||
let insert_pos = html.find(marker).ok_or_else(|| {
|
||||
ToolError::ExecutionError("Invalid presentation file format".into())
|
||||
})?;
|
||||
|
||||
// Count actual slides (excluding template)
|
||||
let current_slides = html.matches("class=\"slide\"").count() - 1; // -1 for template
|
||||
let new_count = current_slides + 1;
|
||||
|
||||
// Update the num-slides value
|
||||
html = html.replace(
|
||||
&format!("--num-slides: {}", current_slides),
|
||||
&format!("--num-slides: {}", new_count),
|
||||
);
|
||||
|
||||
// Create new slide HTML
|
||||
let slide_html = format!(
|
||||
r#" <div class="slide">
|
||||
<h1>{}</h1>
|
||||
</div>
|
||||
|
||||
{}"#,
|
||||
content, marker
|
||||
);
|
||||
|
||||
// Insert the new slide
|
||||
html.replace_range(insert_pos..insert_pos + marker.len(), &slide_html);
|
||||
|
||||
// Save the file
|
||||
fs::write(path, html).map_err(|e| {
|
||||
ToolError::ExecutionError(format!("Failed to update presentation file: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(vec![Content::text(format!(
|
||||
"Added new slide to presentation. You can view it with: open {}\nNote: when creating, or adding a slide, if the content for a slide is long, edit it so that it uses appropriate size, formatting, lists etc (and can even split it to other slides if needed).",
|
||||
path
|
||||
))])
|
||||
}
|
||||
_ => Err(ToolError::InvalidParameters(format!(
|
||||
"Invalid operation: {}. Valid operations are: create, add_slide",
|
||||
operation
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_create_presentation() {
|
||||
let test_dir = tempfile::tempdir().unwrap();
|
||||
let test_path = test_dir.path().join("test_presentation.html");
|
||||
let path_str = test_path.to_str().unwrap();
|
||||
|
||||
// Test default title
|
||||
let result = make_presentation(path_str, "create", None).await;
|
||||
assert!(result.is_ok(), "Should successfully create presentation");
|
||||
|
||||
// Verify the file exists and contains the default title
|
||||
assert!(test_path.exists(), "Presentation file should exist");
|
||||
let content = fs::read_to_string(&test_path).unwrap();
|
||||
assert!(
|
||||
content.contains("Your Presentation"),
|
||||
"Should contain default title"
|
||||
);
|
||||
|
||||
// Test custom title
|
||||
let test_path2 = test_dir.path().join("test_presentation2.html");
|
||||
let path_str2 = test_path2.to_str().unwrap();
|
||||
let params = serde_json::json!({
|
||||
"title": "Custom Title Test"
|
||||
});
|
||||
let result = make_presentation(path_str2, "create", Some(¶ms)).await;
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should successfully create presentation with custom title"
|
||||
);
|
||||
|
||||
// Verify custom title
|
||||
let content = fs::read_to_string(&test_path2).unwrap();
|
||||
assert!(
|
||||
content.contains("Custom Title Test"),
|
||||
"Should contain custom title"
|
||||
);
|
||||
assert!(
|
||||
content.contains("SLIDE_TEMPLATE"),
|
||||
"Should contain slide template"
|
||||
);
|
||||
assert!(
|
||||
content.contains("ADD_SLIDES_HERE"),
|
||||
"Should contain slides marker"
|
||||
);
|
||||
|
||||
// Clean up
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_add_slide() {
|
||||
let test_dir = tempfile::tempdir().unwrap();
|
||||
let test_path = test_dir.path().join("test_presentation.html");
|
||||
let path_str = test_path.to_str().unwrap();
|
||||
|
||||
// First create the presentation
|
||||
let result = make_presentation(path_str, "create", None).await;
|
||||
assert!(result.is_ok(), "Should successfully create presentation");
|
||||
|
||||
// Add a new slide
|
||||
let params = serde_json::json!({
|
||||
"content": "New Test Slide"
|
||||
});
|
||||
let result = make_presentation(path_str, "add_slide", Some(¶ms)).await;
|
||||
assert!(result.is_ok(), "Should successfully add slide");
|
||||
|
||||
// Verify the content
|
||||
let content = fs::read_to_string(&test_path).unwrap();
|
||||
assert!(
|
||||
content.contains("New Test Slide"),
|
||||
"Should contain new slide content"
|
||||
);
|
||||
// Initial template has 1 slide + new slide = 2
|
||||
assert!(
|
||||
content.contains("--num-slides: 2"),
|
||||
"Should have correct slide count"
|
||||
);
|
||||
assert!(
|
||||
content.contains("ADD_SLIDES_HERE"),
|
||||
"Should preserve marker"
|
||||
);
|
||||
|
||||
// Clean up
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_add_slide_without_content() {
|
||||
let test_dir = tempfile::tempdir().unwrap();
|
||||
let test_path = test_dir.path().join("test_presentation.html");
|
||||
let path_str = test_path.to_str().unwrap();
|
||||
|
||||
// Create the presentation first
|
||||
let _ = make_presentation(path_str, "create", None).await;
|
||||
|
||||
// Try to add slide without content
|
||||
let result = make_presentation(path_str, "add_slide", None).await;
|
||||
assert!(result.is_err(), "Should fail without content");
|
||||
match result {
|
||||
Err(ToolError::InvalidParameters(msg)) => {
|
||||
assert!(msg.contains("Missing 'content' parameter"));
|
||||
}
|
||||
_ => panic!("Expected InvalidParameters error"),
|
||||
}
|
||||
|
||||
// Clean up
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_invalid_operation() {
|
||||
let result = make_presentation("test.html", "invalid", None).await;
|
||||
assert!(result.is_err(), "Should fail with invalid operation");
|
||||
match result {
|
||||
Err(ToolError::InvalidParameters(msg)) => {
|
||||
assert!(msg.contains("Valid operations are: create, add_slide"));
|
||||
}
|
||||
_ => panic!("Expected InvalidParameters error"),
|
||||
}
|
||||
}
|
||||
}
|
||||
Binary file not shown.
BIN
crates/goose-mcp/src/computercontroller/tests/data/sample.docx
Normal file
BIN
crates/goose-mcp/src/computercontroller/tests/data/sample.docx
Normal file
Binary file not shown.
331
crates/goose-mcp/src/computercontroller/xlsx_tool.rs
Normal file
331
crates/goose-mcp/src/computercontroller/xlsx_tool.rs
Normal file
@@ -0,0 +1,331 @@
|
||||
use anyhow::{Context, Result};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::Path;
|
||||
use umya_spreadsheet::{Spreadsheet, Worksheet};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct WorksheetInfo {
|
||||
name: String,
|
||||
index: usize,
|
||||
column_count: usize,
|
||||
row_count: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CellValue {
|
||||
value: String,
|
||||
formula: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct RangeData {
|
||||
start_row: u32,
|
||||
end_row: u32,
|
||||
start_col: u32,
|
||||
end_col: u32,
|
||||
// First dimension is rows, second dimension is columns: values[row_index][column_index]
|
||||
values: Vec<Vec<CellValue>>,
|
||||
}
|
||||
|
||||
pub struct XlsxTool {
|
||||
workbook: Spreadsheet,
|
||||
}
|
||||
|
||||
impl XlsxTool {
|
||||
pub fn new<P: AsRef<Path>>(path: P) -> Result<Self> {
|
||||
let workbook =
|
||||
umya_spreadsheet::reader::xlsx::read(path).context("Failed to read Excel file")?;
|
||||
Ok(Self { workbook })
|
||||
}
|
||||
|
||||
pub fn list_worksheets(&self) -> Result<Vec<WorksheetInfo>> {
|
||||
let mut worksheets = Vec::new();
|
||||
for (index, worksheet) in self.workbook.get_sheet_collection().iter().enumerate() {
|
||||
let (column_count, row_count) = self.get_worksheet_dimensions(worksheet)?;
|
||||
worksheets.push(WorksheetInfo {
|
||||
name: worksheet.get_name().to_string(),
|
||||
index,
|
||||
column_count,
|
||||
row_count,
|
||||
});
|
||||
}
|
||||
Ok(worksheets)
|
||||
}
|
||||
|
||||
pub fn get_worksheet_by_name(&self, name: &str) -> Result<&Worksheet> {
|
||||
self.workbook
|
||||
.get_sheet_by_name(name)
|
||||
.context("Worksheet not found")
|
||||
}
|
||||
|
||||
pub fn get_worksheet_by_index(&self, index: usize) -> Result<&Worksheet> {
|
||||
self.workbook
|
||||
.get_sheet_collection()
|
||||
.get(index)
|
||||
.context("Worksheet index out of bounds")
|
||||
}
|
||||
|
||||
fn get_worksheet_dimensions(&self, worksheet: &Worksheet) -> Result<(usize, usize)> {
|
||||
// Returns (column_count, row_count) for the worksheet
|
||||
let mut max_col = 0;
|
||||
let mut max_row = 0;
|
||||
|
||||
// Iterate through all rows
|
||||
for row_num in 1..=worksheet.get_highest_row() {
|
||||
for col_num in 1..=worksheet.get_highest_column() {
|
||||
if let Some(cell) = worksheet.get_cell((row_num, col_num)) {
|
||||
let coord = cell.get_coordinate();
|
||||
max_col = max_col.max(*coord.get_col_num() as usize);
|
||||
max_row = max_row.max(*coord.get_row_num() as usize);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((max_col, max_row))
|
||||
}
|
||||
|
||||
pub fn get_column_names(&self, worksheet: &Worksheet) -> Result<Vec<String>> {
|
||||
let mut names = Vec::new();
|
||||
for col_num in 1..=worksheet.get_highest_column() {
|
||||
if let Some(cell) = worksheet.get_cell((1, col_num)) {
|
||||
names.push(cell.get_value().into_owned());
|
||||
} else {
|
||||
names.push(String::new());
|
||||
}
|
||||
}
|
||||
Ok(names)
|
||||
}
|
||||
|
||||
pub fn get_range(&self, worksheet: &Worksheet, range: &str) -> Result<RangeData> {
|
||||
let (start_col, start_row, end_col, end_row) = parse_range(range)?;
|
||||
let mut values = Vec::new();
|
||||
|
||||
// Iterate through rows first, then columns
|
||||
for row_idx in start_row..=end_row {
|
||||
let mut row_values = Vec::new();
|
||||
for col_idx in start_col..=end_col {
|
||||
let cell_value = if let Some(cell) = worksheet.get_cell((row_idx, col_idx)) {
|
||||
CellValue {
|
||||
value: cell.get_value().into_owned(),
|
||||
formula: if cell.get_formula().is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(cell.get_formula().to_string())
|
||||
},
|
||||
}
|
||||
} else {
|
||||
CellValue {
|
||||
value: String::new(),
|
||||
formula: None,
|
||||
}
|
||||
};
|
||||
row_values.push(cell_value);
|
||||
}
|
||||
values.push(row_values);
|
||||
}
|
||||
|
||||
Ok(RangeData {
|
||||
start_row,
|
||||
end_row,
|
||||
start_col,
|
||||
end_col,
|
||||
values,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn update_cell(
|
||||
&mut self,
|
||||
worksheet_name: &str,
|
||||
row: u32,
|
||||
col: u32,
|
||||
value: &str,
|
||||
) -> Result<()> {
|
||||
let worksheet = self
|
||||
.workbook
|
||||
.get_sheet_by_name_mut(worksheet_name)
|
||||
.context("Worksheet not found")?;
|
||||
|
||||
worksheet
|
||||
.get_cell_mut((row, col))
|
||||
.set_value(value.to_string());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
|
||||
umya_spreadsheet::writer::xlsx::write(&self.workbook, path)
|
||||
.context("Failed to save Excel file")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn find_in_worksheet(
|
||||
&self,
|
||||
worksheet: &Worksheet,
|
||||
search_text: &str,
|
||||
case_sensitive: bool,
|
||||
) -> Result<Vec<(u32, u32)>> {
|
||||
// Returns a vector of (row, column) coordinates where matches are found
|
||||
let mut matches = Vec::new();
|
||||
let search_text = if !case_sensitive {
|
||||
search_text.to_lowercase()
|
||||
} else {
|
||||
search_text.to_string()
|
||||
};
|
||||
|
||||
for row_num in 1..=worksheet.get_highest_row() {
|
||||
for col_num in 1..=worksheet.get_highest_column() {
|
||||
if let Some(cell) = worksheet.get_cell((row_num, col_num)) {
|
||||
let cell_value = if !case_sensitive {
|
||||
cell.get_value().to_lowercase()
|
||||
} else {
|
||||
cell.get_value().to_string()
|
||||
};
|
||||
|
||||
if cell_value.contains(&search_text) {
|
||||
let coord = cell.get_coordinate();
|
||||
matches.push((*coord.get_row_num(), *coord.get_col_num()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(matches)
|
||||
}
|
||||
|
||||
pub fn get_cell_value(&self, worksheet: &Worksheet, row: u32, col: u32) -> Result<CellValue> {
|
||||
let cell = worksheet.get_cell((row, col)).context("Cell not found")?;
|
||||
|
||||
Ok(CellValue {
|
||||
value: cell.get_value().into_owned(),
|
||||
formula: if cell.get_formula().is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(cell.get_formula().to_string())
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_range(range: &str) -> Result<(u32, u32, u32, u32)> {
|
||||
// Handle ranges like "A1:B10"
|
||||
let parts: Vec<&str> = range.split(':').collect();
|
||||
if parts.len() != 2 {
|
||||
anyhow::bail!("Invalid range format. Expected format: 'A1:B10'");
|
||||
}
|
||||
|
||||
let start = parse_cell_reference(parts[0])?;
|
||||
let end = parse_cell_reference(parts[1])?;
|
||||
|
||||
Ok((start.0, start.1, end.0, end.1))
|
||||
}
|
||||
|
||||
fn parse_cell_reference(reference: &str) -> Result<(u32, u32)> {
|
||||
// Parse Excel cell reference (e.g., "A1") and return (column, row)
|
||||
let mut col_str = String::new();
|
||||
let mut row_str = String::new();
|
||||
let mut parsing_row = false;
|
||||
|
||||
for c in reference.chars() {
|
||||
if c.is_alphabetic() {
|
||||
if parsing_row {
|
||||
anyhow::bail!("Invalid cell reference format");
|
||||
}
|
||||
col_str.push(c.to_ascii_uppercase());
|
||||
} else if c.is_numeric() {
|
||||
parsing_row = true;
|
||||
row_str.push(c);
|
||||
} else {
|
||||
anyhow::bail!("Invalid character in cell reference");
|
||||
}
|
||||
}
|
||||
|
||||
let col = column_letter_to_number(&col_str)?;
|
||||
let row = row_str.parse::<u32>().context("Invalid row number")?;
|
||||
|
||||
Ok((col, row))
|
||||
}
|
||||
|
||||
fn column_letter_to_number(column: &str) -> Result<u32> {
|
||||
let mut result = 0u32;
|
||||
for c in column.chars() {
|
||||
if !c.is_ascii_alphabetic() {
|
||||
anyhow::bail!("Invalid column letter");
|
||||
}
|
||||
result = result * 26 + (c.to_ascii_uppercase() as u32 - 'A' as u32 + 1);
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn get_test_file() -> PathBuf {
|
||||
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("src")
|
||||
.join("computercontroller")
|
||||
.join("tests")
|
||||
.join("data")
|
||||
.join("FinancialSample.xlsx")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_open_xlsx() -> Result<()> {
|
||||
let xlsx = XlsxTool::new(get_test_file())?;
|
||||
let worksheets = xlsx.list_worksheets()?;
|
||||
assert!(!worksheets.is_empty());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_column_names() -> Result<()> {
|
||||
let xlsx = XlsxTool::new(get_test_file())?;
|
||||
let worksheet = xlsx.get_worksheet_by_index(0)?;
|
||||
let columns = xlsx.get_column_names(worksheet)?;
|
||||
assert!(!columns.is_empty());
|
||||
println!("Columns: {:?}", columns);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_range() -> Result<()> {
|
||||
let xlsx = XlsxTool::new(get_test_file())?;
|
||||
let worksheet = xlsx.get_worksheet_by_index(0)?;
|
||||
let range = xlsx.get_range(worksheet, "A1:C5")?;
|
||||
assert_eq!(range.values.len(), 5);
|
||||
println!("Range data: {:?}", range);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_in_worksheet() -> Result<()> {
|
||||
let xlsx = XlsxTool::new(get_test_file())?;
|
||||
let worksheet = xlsx.get_worksheet_by_index(0)?;
|
||||
let matches = xlsx.find_in_worksheet(worksheet, "Government", false)?;
|
||||
assert!(!matches.is_empty());
|
||||
println!("Found matches at: {:?}", matches);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_cell_value() -> Result<()> {
|
||||
let xlsx = XlsxTool::new(get_test_file())?;
|
||||
let worksheet = xlsx.get_worksheet_by_index(0)?;
|
||||
|
||||
// Test header cell (known value from FinancialSample.xlsx)
|
||||
let header_cell = xlsx.get_cell_value(worksheet, 1, 1)?;
|
||||
assert_eq!(header_cell.value, "Segment");
|
||||
assert!(header_cell.formula.is_none());
|
||||
|
||||
// Test data cell (known value from FinancialSample.xlsx)
|
||||
let data_cell = xlsx.get_cell_value(worksheet, 2, 2)?;
|
||||
assert_eq!(data_cell.value, "Canada");
|
||||
assert!(data_cell.formula.is_none());
|
||||
|
||||
println!(
|
||||
"Header cell: {:#?}\nData cell: {:#?}",
|
||||
header_cell, data_cell
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user