feat(file-tool): add pptx ods parquet and image actions
This commit is contained in:
@@ -57,6 +57,12 @@ Implemented in actual NODE1 stack (`services/router/*` + gateway):
|
|||||||
- `zip_bundle`
|
- `zip_bundle`
|
||||||
- `docx_create`
|
- `docx_create`
|
||||||
- `docx_update`
|
- `docx_update`
|
||||||
|
- `pptx_create`
|
||||||
|
- `pptx_update`
|
||||||
|
- `ods_create`
|
||||||
|
- `ods_update`
|
||||||
|
- `parquet_create`
|
||||||
|
- `parquet_update`
|
||||||
- `pdf_merge`
|
- `pdf_merge`
|
||||||
- `pdf_split`
|
- `pdf_split`
|
||||||
- `pdf_fill`
|
- `pdf_fill`
|
||||||
@@ -66,6 +72,9 @@ Implemented in actual NODE1 stack (`services/router/*` + gateway):
|
|||||||
- `markdown_update`
|
- `markdown_update`
|
||||||
- `xml_export`
|
- `xml_export`
|
||||||
- `html_export`
|
- `html_export`
|
||||||
|
- `image_create`
|
||||||
|
- `image_edit`
|
||||||
|
- `image_convert`
|
||||||
|
|
||||||
### Standard output contract
|
### Standard output contract
|
||||||
For file-producing tool calls, router now propagates:
|
For file-producing tool calls, router now propagates:
|
||||||
@@ -101,11 +110,15 @@ Run inside `dagi-router-node1` to validate actions deterministically:
|
|||||||
- Excel create/update
|
- Excel create/update
|
||||||
- Text/Markdown create/update
|
- Text/Markdown create/update
|
||||||
- XML/HTML export
|
- XML/HTML export
|
||||||
|
- PPTX create/update
|
||||||
|
- ODS create/update
|
||||||
|
- Parquet create/update
|
||||||
- CSV create/update
|
- CSV create/update
|
||||||
- JSON/YAML export
|
- JSON/YAML export
|
||||||
- ZIP bundle
|
- ZIP bundle
|
||||||
- DOCX create/update
|
- DOCX create/update
|
||||||
- PDF merge/split/fill
|
- PDF merge/split/fill
|
||||||
|
- Image create/edit/convert
|
||||||
|
|
||||||
Also verify infer endpoint still works:
|
Also verify infer endpoint still works:
|
||||||
- `POST http://127.0.0.1:9102/v1/agents/devtools/infer`
|
- `POST http://127.0.0.1:9102/v1/agents/devtools/infer`
|
||||||
@@ -117,6 +130,12 @@ Also verify infer endpoint still works:
|
|||||||
- `rollback_backups/file_tool_step4_tool_manager.py.bak_20260215_012309`
|
- `rollback_backups/file_tool_step4_tool_manager.py.bak_20260215_012309`
|
||||||
- `services/router/tool_manager.py.bak_20260215_020902`
|
- `services/router/tool_manager.py.bak_20260215_020902`
|
||||||
- `services/router/tool_manager.py.bak_20260215_112313`
|
- `services/router/tool_manager.py.bak_20260215_112313`
|
||||||
|
- `services/router/tool_manager.py.bak_20260215_112459`
|
||||||
|
- `services/router/requirements.txt.bak_20260215_112459`
|
||||||
|
- `services/router/tool_manager.py.bak_20260215_112652`
|
||||||
|
- `services/router/requirements.txt.bak_20260215_112652`
|
||||||
|
- `services/router/tool_manager.py.bak_20260215_112841`
|
||||||
|
- `services/router/tool_manager.py.bak_20260215_112912`
|
||||||
|
|
||||||
## Rollback (NODE1)
|
## Rollback (NODE1)
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
@@ -8,6 +8,9 @@ neo4j>=5.14.0
|
|||||||
openpyxl>=3.1.2
|
openpyxl>=3.1.2
|
||||||
python-docx>=1.1.2
|
python-docx>=1.1.2
|
||||||
pypdf>=5.1.0
|
pypdf>=5.1.0
|
||||||
|
python-pptx>=0.6.23
|
||||||
|
odfpy>=1.4.1
|
||||||
|
pyarrow>=18.0.0
|
||||||
|
|
||||||
# Memory Retrieval v3.0
|
# Memory Retrieval v3.0
|
||||||
asyncpg>=0.29.0
|
asyncpg>=0.29.0
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ import logging
|
|||||||
import hashlib
|
import hashlib
|
||||||
import base64
|
import base64
|
||||||
import csv
|
import csv
|
||||||
|
import tempfile
|
||||||
import httpx
|
import httpx
|
||||||
from typing import Dict, List, Any, Optional
|
from typing import Dict, List, Any, Optional
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
@@ -324,10 +325,13 @@ TOOL_DEFINITIONS = [
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": [
|
||||||
"excel_create", "excel_update", "docx_create", "docx_update",
|
"excel_create", "excel_update", "docx_create", "docx_update",
|
||||||
|
"pptx_create", "pptx_update",
|
||||||
|
"ods_create", "ods_update", "parquet_create", "parquet_update",
|
||||||
"csv_create", "csv_update", "pdf_fill", "pdf_merge", "pdf_split",
|
"csv_create", "csv_update", "pdf_fill", "pdf_merge", "pdf_split",
|
||||||
"json_export", "yaml_export", "zip_bundle",
|
"json_export", "yaml_export", "zip_bundle",
|
||||||
"text_create", "text_update", "markdown_create", "markdown_update",
|
"text_create", "text_update", "markdown_create", "markdown_update",
|
||||||
"xml_export", "html_export"
|
"xml_export", "html_export",
|
||||||
|
"image_create", "image_edit", "image_convert"
|
||||||
],
|
],
|
||||||
"description": "Дія file tool"
|
"description": "Дія file tool"
|
||||||
},
|
},
|
||||||
@@ -610,6 +614,14 @@ class ToolManager:
|
|||||||
return self._file_csv_create(args)
|
return self._file_csv_create(args)
|
||||||
if action == "csv_update":
|
if action == "csv_update":
|
||||||
return self._file_csv_update(args)
|
return self._file_csv_update(args)
|
||||||
|
if action == "ods_create":
|
||||||
|
return self._file_ods_create(args)
|
||||||
|
if action == "ods_update":
|
||||||
|
return self._file_ods_update(args)
|
||||||
|
if action == "parquet_create":
|
||||||
|
return self._file_parquet_create(args)
|
||||||
|
if action == "parquet_update":
|
||||||
|
return self._file_parquet_update(args)
|
||||||
if action == "text_create":
|
if action == "text_create":
|
||||||
return self._file_text_create(args)
|
return self._file_text_create(args)
|
||||||
if action == "text_update":
|
if action == "text_update":
|
||||||
@@ -622,6 +634,12 @@ class ToolManager:
|
|||||||
return self._file_xml_export(args)
|
return self._file_xml_export(args)
|
||||||
if action == "html_export":
|
if action == "html_export":
|
||||||
return self._file_html_export(args)
|
return self._file_html_export(args)
|
||||||
|
if action == "image_create":
|
||||||
|
return self._file_image_create(args)
|
||||||
|
if action == "image_edit":
|
||||||
|
return self._file_image_edit(args)
|
||||||
|
if action == "image_convert":
|
||||||
|
return self._file_image_convert(args)
|
||||||
if action == "json_export":
|
if action == "json_export":
|
||||||
return self._file_json_export(args)
|
return self._file_json_export(args)
|
||||||
if action == "yaml_export":
|
if action == "yaml_export":
|
||||||
@@ -632,6 +650,10 @@ class ToolManager:
|
|||||||
return self._file_docx_create(args)
|
return self._file_docx_create(args)
|
||||||
if action == "docx_update":
|
if action == "docx_update":
|
||||||
return self._file_docx_update(args)
|
return self._file_docx_update(args)
|
||||||
|
if action == "pptx_create":
|
||||||
|
return self._file_pptx_create(args)
|
||||||
|
if action == "pptx_update":
|
||||||
|
return self._file_pptx_update(args)
|
||||||
if action == "pdf_merge":
|
if action == "pdf_merge":
|
||||||
return self._file_pdf_merge(args)
|
return self._file_pdf_merge(args)
|
||||||
if action == "pdf_split":
|
if action == "pdf_split":
|
||||||
@@ -712,6 +734,200 @@ class ToolManager:
|
|||||||
file_mime="text/csv",
|
file_mime="text/csv",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _rows_to_objects(rows_raw: Any, headers: Optional[List[str]] = None) -> List[Dict[str, Any]]:
|
||||||
|
if not isinstance(rows_raw, list):
|
||||||
|
return []
|
||||||
|
result: List[Dict[str, Any]] = []
|
||||||
|
for idx, row in enumerate(rows_raw):
|
||||||
|
if isinstance(row, dict):
|
||||||
|
result.append(dict(row))
|
||||||
|
continue
|
||||||
|
if isinstance(row, list):
|
||||||
|
if headers:
|
||||||
|
obj = {str(headers[i]): row[i] if i < len(row) else None for i in range(len(headers))}
|
||||||
|
else:
|
||||||
|
obj = {f"col_{i+1}": v for i, v in enumerate(row)}
|
||||||
|
result.append(obj)
|
||||||
|
continue
|
||||||
|
key = headers[0] if headers else "value"
|
||||||
|
result.append({str(key): row})
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _file_ods_create(self, args: Dict[str, Any]) -> ToolResult:
|
||||||
|
from odf.opendocument import OpenDocumentSpreadsheet
|
||||||
|
from odf.table import Table, TableCell, TableRow
|
||||||
|
from odf.text import P
|
||||||
|
|
||||||
|
file_name = self._sanitize_file_name(args.get("file_name"), "sheet.ods", force_ext=".ods")
|
||||||
|
headers = args.get("headers") or []
|
||||||
|
rows_raw = args.get("rows") or []
|
||||||
|
rows = self._normalize_rows(rows_raw, headers=headers if headers else None)
|
||||||
|
if rows and not headers and isinstance(rows_raw[0], dict):
|
||||||
|
headers = list(rows_raw[0].keys())
|
||||||
|
rows = self._normalize_rows(rows_raw, headers=headers)
|
||||||
|
|
||||||
|
doc = OpenDocumentSpreadsheet()
|
||||||
|
table = Table(name=str(args.get("sheet_name") or "Sheet1"))
|
||||||
|
|
||||||
|
if headers:
|
||||||
|
hrow = TableRow()
|
||||||
|
for value in headers:
|
||||||
|
cell = TableCell(valuetype="string")
|
||||||
|
cell.addElement(P(text=str(value)))
|
||||||
|
hrow.addElement(cell)
|
||||||
|
table.addElement(hrow)
|
||||||
|
|
||||||
|
for row in rows:
|
||||||
|
trow = TableRow()
|
||||||
|
for value in row:
|
||||||
|
cell = TableCell(valuetype="string")
|
||||||
|
cell.addElement(P(text="" if value is None else str(value)))
|
||||||
|
trow.addElement(cell)
|
||||||
|
table.addElement(trow)
|
||||||
|
|
||||||
|
doc.spreadsheet.addElement(table)
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=".ods") as tmp:
|
||||||
|
doc.save(tmp.name)
|
||||||
|
tmp.seek(0)
|
||||||
|
payload = tmp.read()
|
||||||
|
|
||||||
|
return ToolResult(
|
||||||
|
success=True,
|
||||||
|
result={"message": f"ODS created: {file_name}"},
|
||||||
|
file_base64=self._b64_from_bytes(payload),
|
||||||
|
file_name=file_name,
|
||||||
|
file_mime="application/vnd.oasis.opendocument.spreadsheet",
|
||||||
|
)
|
||||||
|
|
||||||
|
def _file_ods_update(self, args: Dict[str, Any]) -> ToolResult:
|
||||||
|
from odf.opendocument import OpenDocumentSpreadsheet, load
|
||||||
|
from odf.table import Table, TableCell, TableRow
|
||||||
|
from odf.text import P
|
||||||
|
|
||||||
|
src_b64 = args.get("file_base64")
|
||||||
|
if not src_b64:
|
||||||
|
return ToolResult(success=False, result=None, error="file_base64 is required for ods_update")
|
||||||
|
file_name = self._sanitize_file_name(args.get("file_name"), "updated.ods", force_ext=".ods")
|
||||||
|
operation = str(args.get("operation") or "append").strip().lower()
|
||||||
|
if operation not in {"append", "replace"}:
|
||||||
|
return ToolResult(success=False, result=None, error="operation must be append|replace")
|
||||||
|
|
||||||
|
headers = args.get("headers") or []
|
||||||
|
rows_raw = args.get("rows") or []
|
||||||
|
rows = self._normalize_rows(rows_raw, headers=headers if headers else None)
|
||||||
|
if rows and not headers and isinstance(rows_raw[0], dict):
|
||||||
|
headers = list(rows_raw[0].keys())
|
||||||
|
rows = self._normalize_rows(rows_raw, headers=headers)
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=".ods") as src:
|
||||||
|
src.write(self._bytes_from_b64(src_b64))
|
||||||
|
src.flush()
|
||||||
|
doc = load(src.name)
|
||||||
|
|
||||||
|
# Rebuild first table to keep update deterministic.
|
||||||
|
tables = doc.spreadsheet.getElementsByType(Table)
|
||||||
|
existing: List[List[str]] = []
|
||||||
|
if tables and operation == "append":
|
||||||
|
first = tables[0]
|
||||||
|
for r in first.getElementsByType(TableRow):
|
||||||
|
vals = []
|
||||||
|
for c in r.getElementsByType(TableCell):
|
||||||
|
text_nodes = c.getElementsByType(P)
|
||||||
|
vals.append("".join((p.firstChild.data if p.firstChild else "") for p in text_nodes))
|
||||||
|
existing.append(vals)
|
||||||
|
doc.spreadsheet.removeChild(first)
|
||||||
|
elif tables:
|
||||||
|
doc.spreadsheet.removeChild(tables[0])
|
||||||
|
|
||||||
|
table = Table(name=str(args.get("sheet_name") or "Sheet1"))
|
||||||
|
out_rows: List[List[Any]] = []
|
||||||
|
if operation == "append" and existing:
|
||||||
|
out_rows.extend(existing)
|
||||||
|
out_rows.extend(rows)
|
||||||
|
else:
|
||||||
|
if headers:
|
||||||
|
out_rows.append(headers)
|
||||||
|
out_rows.extend(rows)
|
||||||
|
|
||||||
|
for row in out_rows:
|
||||||
|
trow = TableRow()
|
||||||
|
for value in row:
|
||||||
|
cell = TableCell(valuetype="string")
|
||||||
|
cell.addElement(P(text="" if value is None else str(value)))
|
||||||
|
trow.addElement(cell)
|
||||||
|
table.addElement(trow)
|
||||||
|
|
||||||
|
doc.spreadsheet.addElement(table)
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=".ods") as dst:
|
||||||
|
doc.save(dst.name)
|
||||||
|
dst.seek(0)
|
||||||
|
payload = dst.read()
|
||||||
|
|
||||||
|
return ToolResult(
|
||||||
|
success=True,
|
||||||
|
result={"message": f"ODS updated: {file_name}"},
|
||||||
|
file_base64=self._b64_from_bytes(payload),
|
||||||
|
file_name=file_name,
|
||||||
|
file_mime="application/vnd.oasis.opendocument.spreadsheet",
|
||||||
|
)
|
||||||
|
|
||||||
|
def _file_parquet_create(self, args: Dict[str, Any]) -> ToolResult:
|
||||||
|
import pyarrow as pa
|
||||||
|
import pyarrow.parquet as pq
|
||||||
|
|
||||||
|
file_name = self._sanitize_file_name(args.get("file_name"), "data.parquet", force_ext=".parquet")
|
||||||
|
headers = args.get("headers") or []
|
||||||
|
rows_raw = args.get("rows") or []
|
||||||
|
objects = self._rows_to_objects(rows_raw, headers=headers if headers else None)
|
||||||
|
table = pa.Table.from_pylist(objects if objects else [{"value": None}])
|
||||||
|
out = BytesIO()
|
||||||
|
pq.write_table(table, out)
|
||||||
|
payload = out.getvalue()
|
||||||
|
|
||||||
|
return ToolResult(
|
||||||
|
success=True,
|
||||||
|
result={"message": f"Parquet created: {file_name}"},
|
||||||
|
file_base64=self._b64_from_bytes(payload),
|
||||||
|
file_name=file_name,
|
||||||
|
file_mime="application/vnd.apache.parquet",
|
||||||
|
)
|
||||||
|
|
||||||
|
def _file_parquet_update(self, args: Dict[str, Any]) -> ToolResult:
|
||||||
|
import pyarrow as pa
|
||||||
|
import pyarrow.parquet as pq
|
||||||
|
|
||||||
|
src_b64 = args.get("file_base64")
|
||||||
|
if not src_b64:
|
||||||
|
return ToolResult(success=False, result=None, error="file_base64 is required for parquet_update")
|
||||||
|
file_name = self._sanitize_file_name(args.get("file_name"), "updated.parquet", force_ext=".parquet")
|
||||||
|
operation = str(args.get("operation") or "append").strip().lower()
|
||||||
|
if operation not in {"append", "replace"}:
|
||||||
|
return ToolResult(success=False, result=None, error="operation must be append|replace")
|
||||||
|
|
||||||
|
headers = args.get("headers") or []
|
||||||
|
rows_raw = args.get("rows") or []
|
||||||
|
new_rows = self._rows_to_objects(rows_raw, headers=headers if headers else None)
|
||||||
|
|
||||||
|
existing_rows: List[Dict[str, Any]] = []
|
||||||
|
if operation == "append":
|
||||||
|
table = pq.read_table(BytesIO(self._bytes_from_b64(src_b64)))
|
||||||
|
existing_rows = table.to_pylist()
|
||||||
|
|
||||||
|
merged = new_rows if operation == "replace" else (existing_rows + new_rows)
|
||||||
|
table = pa.Table.from_pylist(merged if merged else [{"value": None}])
|
||||||
|
out = BytesIO()
|
||||||
|
pq.write_table(table, out)
|
||||||
|
payload = out.getvalue()
|
||||||
|
|
||||||
|
return ToolResult(
|
||||||
|
success=True,
|
||||||
|
result={"message": f"Parquet updated: {file_name}"},
|
||||||
|
file_base64=self._b64_from_bytes(payload),
|
||||||
|
file_name=file_name,
|
||||||
|
file_mime="application/vnd.apache.parquet",
|
||||||
|
)
|
||||||
|
|
||||||
def _file_json_export(self, args: Dict[str, Any]) -> ToolResult:
|
def _file_json_export(self, args: Dict[str, Any]) -> ToolResult:
|
||||||
file_name = self._sanitize_file_name(args.get("file_name"), "export.json", force_ext=".json")
|
file_name = self._sanitize_file_name(args.get("file_name"), "export.json", force_ext=".json")
|
||||||
content = args.get("content")
|
content = args.get("content")
|
||||||
@@ -826,6 +1042,142 @@ class ToolManager:
|
|||||||
file_mime="text/html",
|
file_mime="text/html",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _image_format_for_name(file_name: str, fallback: str = "PNG") -> str:
|
||||||
|
suffix = PurePath(file_name).suffix.lower()
|
||||||
|
mapping = {
|
||||||
|
".png": "PNG",
|
||||||
|
".jpg": "JPEG",
|
||||||
|
".jpeg": "JPEG",
|
||||||
|
".webp": "WEBP",
|
||||||
|
".gif": "GIF",
|
||||||
|
".bmp": "BMP",
|
||||||
|
".tif": "TIFF",
|
||||||
|
".tiff": "TIFF",
|
||||||
|
}
|
||||||
|
return mapping.get(suffix, fallback)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _mime_for_image_format(fmt: str) -> str:
|
||||||
|
mapping = {
|
||||||
|
"PNG": "image/png",
|
||||||
|
"JPEG": "image/jpeg",
|
||||||
|
"WEBP": "image/webp",
|
||||||
|
"GIF": "image/gif",
|
||||||
|
"BMP": "image/bmp",
|
||||||
|
"TIFF": "image/tiff",
|
||||||
|
}
|
||||||
|
return mapping.get(fmt.upper(), "application/octet-stream")
|
||||||
|
|
||||||
|
def _file_image_create(self, args: Dict[str, Any]) -> ToolResult:
|
||||||
|
from PIL import Image, ImageDraw
|
||||||
|
|
||||||
|
file_name = self._sanitize_file_name(args.get("file_name"), "image.png")
|
||||||
|
fmt = self._image_format_for_name(file_name, fallback=str(args.get("format") or "PNG"))
|
||||||
|
width = max(1, int(args.get("width") or 1024))
|
||||||
|
height = max(1, int(args.get("height") or 1024))
|
||||||
|
color = args.get("background_color") or args.get("color") or "white"
|
||||||
|
|
||||||
|
image = Image.new("RGB", (width, height), color=color)
|
||||||
|
text = args.get("text")
|
||||||
|
if text:
|
||||||
|
draw = ImageDraw.Draw(image)
|
||||||
|
draw.text((20, 20), str(text), fill=args.get("text_color") or "black")
|
||||||
|
|
||||||
|
out = BytesIO()
|
||||||
|
image.save(out, format=fmt)
|
||||||
|
payload = out.getvalue()
|
||||||
|
return ToolResult(
|
||||||
|
success=True,
|
||||||
|
result={"message": f"Image created: {file_name}"},
|
||||||
|
file_base64=self._b64_from_bytes(payload),
|
||||||
|
file_name=file_name,
|
||||||
|
file_mime=self._mime_for_image_format(fmt),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _file_image_edit(self, args: Dict[str, Any]) -> ToolResult:
|
||||||
|
from PIL import Image, ImageDraw
|
||||||
|
|
||||||
|
src_b64 = args.get("file_base64")
|
||||||
|
operations = args.get("operations") or []
|
||||||
|
if not src_b64:
|
||||||
|
return ToolResult(success=False, result=None, error="file_base64 is required for image_edit")
|
||||||
|
if not isinstance(operations, list) or not operations:
|
||||||
|
return ToolResult(success=False, result=None, error="operations must be non-empty array")
|
||||||
|
|
||||||
|
file_name = self._sanitize_file_name(args.get("file_name"), "edited.png")
|
||||||
|
fmt = self._image_format_for_name(file_name, fallback=str(args.get("format") or "PNG"))
|
||||||
|
image = Image.open(BytesIO(self._bytes_from_b64(src_b64)))
|
||||||
|
|
||||||
|
for op in operations:
|
||||||
|
if not isinstance(op, dict):
|
||||||
|
return ToolResult(success=False, result=None, error="Each operation must be object")
|
||||||
|
op_type = str(op.get("type") or "").strip().lower()
|
||||||
|
if op_type == "resize":
|
||||||
|
width = max(1, int(op.get("width") or image.width))
|
||||||
|
height = max(1, int(op.get("height") or image.height))
|
||||||
|
image = image.resize((width, height))
|
||||||
|
elif op_type == "crop":
|
||||||
|
left = int(op.get("left") or 0)
|
||||||
|
top = int(op.get("top") or 0)
|
||||||
|
right = int(op.get("right") or image.width)
|
||||||
|
bottom = int(op.get("bottom") or image.height)
|
||||||
|
image = image.crop((left, top, right, bottom))
|
||||||
|
elif op_type == "rotate":
|
||||||
|
angle = float(op.get("angle") or 0)
|
||||||
|
image = image.rotate(angle, expand=bool(op.get("expand", True)))
|
||||||
|
elif op_type == "flip_horizontal":
|
||||||
|
image = image.transpose(Image.FLIP_LEFT_RIGHT)
|
||||||
|
elif op_type == "flip_vertical":
|
||||||
|
image = image.transpose(Image.FLIP_TOP_BOTTOM)
|
||||||
|
elif op_type == "draw_text":
|
||||||
|
draw = ImageDraw.Draw(image)
|
||||||
|
x = int(op.get("x") or 0)
|
||||||
|
y = int(op.get("y") or 0)
|
||||||
|
draw.text((x, y), str(op.get("text") or ""), fill=op.get("color") or "black")
|
||||||
|
else:
|
||||||
|
return ToolResult(success=False, result=None, error=f"Unsupported image_edit operation: {op_type}")
|
||||||
|
|
||||||
|
out = BytesIO()
|
||||||
|
image.save(out, format=fmt)
|
||||||
|
payload = out.getvalue()
|
||||||
|
return ToolResult(
|
||||||
|
success=True,
|
||||||
|
result={"message": f"Image edited: {file_name}"},
|
||||||
|
file_base64=self._b64_from_bytes(payload),
|
||||||
|
file_name=file_name,
|
||||||
|
file_mime=self._mime_for_image_format(fmt),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _file_image_convert(self, args: Dict[str, Any]) -> ToolResult:
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
src_b64 = args.get("file_base64")
|
||||||
|
if not src_b64:
|
||||||
|
return ToolResult(success=False, result=None, error="file_base64 is required for image_convert")
|
||||||
|
file_name = self._sanitize_file_name(args.get("file_name"), "converted.png")
|
||||||
|
fmt = str(args.get("target_format") or self._image_format_for_name(file_name)).upper()
|
||||||
|
if fmt == "JPG":
|
||||||
|
fmt = "JPEG"
|
||||||
|
|
||||||
|
image = Image.open(BytesIO(self._bytes_from_b64(src_b64)))
|
||||||
|
if fmt in {"JPEG"} and image.mode not in {"RGB", "L"}:
|
||||||
|
image = image.convert("RGB")
|
||||||
|
|
||||||
|
out = BytesIO()
|
||||||
|
save_kwargs: Dict[str, Any] = {}
|
||||||
|
if fmt in {"JPEG", "WEBP"} and args.get("quality") is not None:
|
||||||
|
save_kwargs["quality"] = int(args.get("quality"))
|
||||||
|
image.save(out, format=fmt, **save_kwargs)
|
||||||
|
payload = out.getvalue()
|
||||||
|
return ToolResult(
|
||||||
|
success=True,
|
||||||
|
result={"message": f"Image converted: {file_name}"},
|
||||||
|
file_base64=self._b64_from_bytes(payload),
|
||||||
|
file_name=file_name,
|
||||||
|
file_mime=self._mime_for_image_format(fmt),
|
||||||
|
)
|
||||||
|
|
||||||
def _file_yaml_export(self, args: Dict[str, Any]) -> ToolResult:
|
def _file_yaml_export(self, args: Dict[str, Any]) -> ToolResult:
|
||||||
file_name = self._sanitize_file_name(args.get("file_name"), "export.yaml", force_ext=".yaml")
|
file_name = self._sanitize_file_name(args.get("file_name"), "export.yaml", force_ext=".yaml")
|
||||||
content = args.get("content")
|
content = args.get("content")
|
||||||
@@ -1117,6 +1469,119 @@ class ToolManager:
|
|||||||
file_mime="application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
file_mime="application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _file_pptx_create(self, args: Dict[str, Any]) -> ToolResult:
|
||||||
|
from pptx import Presentation
|
||||||
|
|
||||||
|
file_name = self._sanitize_file_name(args.get("file_name"), "slides.pptx", force_ext=".pptx")
|
||||||
|
prs = Presentation()
|
||||||
|
|
||||||
|
title = str(args.get("title") or "").strip()
|
||||||
|
subtitle = str(args.get("subtitle") or "").strip()
|
||||||
|
if title or subtitle:
|
||||||
|
slide = prs.slides.add_slide(prs.slide_layouts[0])
|
||||||
|
if title and slide.shapes.title:
|
||||||
|
slide.shapes.title.text = title
|
||||||
|
if subtitle and len(slide.placeholders) > 1:
|
||||||
|
slide.placeholders[1].text = subtitle
|
||||||
|
|
||||||
|
for entry in args.get("slides") or []:
|
||||||
|
if not isinstance(entry, dict):
|
||||||
|
continue
|
||||||
|
slide = prs.slides.add_slide(prs.slide_layouts[1])
|
||||||
|
if slide.shapes.title:
|
||||||
|
slide.shapes.title.text = str(entry.get("title") or "")
|
||||||
|
body = None
|
||||||
|
if len(slide.placeholders) > 1:
|
||||||
|
body = slide.placeholders[1].text_frame
|
||||||
|
lines = entry.get("bullets")
|
||||||
|
if lines is None:
|
||||||
|
lines = entry.get("lines")
|
||||||
|
if lines is None:
|
||||||
|
lines = [entry.get("text")] if entry.get("text") is not None else []
|
||||||
|
if body is not None:
|
||||||
|
body.clear()
|
||||||
|
first = True
|
||||||
|
for line in lines:
|
||||||
|
if first:
|
||||||
|
body.text = str(line)
|
||||||
|
first = False
|
||||||
|
else:
|
||||||
|
p = body.add_paragraph()
|
||||||
|
p.text = str(line)
|
||||||
|
|
||||||
|
out = BytesIO()
|
||||||
|
prs.save(out)
|
||||||
|
return ToolResult(
|
||||||
|
success=True,
|
||||||
|
result={"message": f"PPTX created: {file_name}"},
|
||||||
|
file_base64=self._b64_from_bytes(out.getvalue()),
|
||||||
|
file_name=file_name,
|
||||||
|
file_mime="application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
||||||
|
)
|
||||||
|
|
||||||
|
def _file_pptx_update(self, args: Dict[str, Any]) -> ToolResult:
|
||||||
|
from pptx import Presentation
|
||||||
|
|
||||||
|
src_b64 = args.get("file_base64")
|
||||||
|
operations = args.get("operations") or []
|
||||||
|
if not src_b64:
|
||||||
|
return ToolResult(success=False, result=None, error="file_base64 is required for pptx_update")
|
||||||
|
if not isinstance(operations, list) or not operations:
|
||||||
|
return ToolResult(success=False, result=None, error="operations must be non-empty array")
|
||||||
|
|
||||||
|
file_name = self._sanitize_file_name(args.get("file_name"), "updated.pptx", force_ext=".pptx")
|
||||||
|
prs = Presentation(BytesIO(self._bytes_from_b64(src_b64)))
|
||||||
|
|
||||||
|
for op in operations:
|
||||||
|
if not isinstance(op, dict):
|
||||||
|
return ToolResult(success=False, result=None, error="Each operation must be object")
|
||||||
|
op_type = str(op.get("type") or "").strip().lower()
|
||||||
|
|
||||||
|
if op_type == "append_slide":
|
||||||
|
slide = prs.slides.add_slide(prs.slide_layouts[1])
|
||||||
|
if slide.shapes.title:
|
||||||
|
slide.shapes.title.text = str(op.get("title") or "")
|
||||||
|
lines = op.get("bullets")
|
||||||
|
if lines is None:
|
||||||
|
lines = op.get("lines")
|
||||||
|
if lines is None:
|
||||||
|
lines = [op.get("text")] if op.get("text") is not None else []
|
||||||
|
if len(slide.placeholders) > 1:
|
||||||
|
body = slide.placeholders[1].text_frame
|
||||||
|
body.clear()
|
||||||
|
first = True
|
||||||
|
for line in lines:
|
||||||
|
if first:
|
||||||
|
body.text = str(line)
|
||||||
|
first = False
|
||||||
|
else:
|
||||||
|
p = body.add_paragraph()
|
||||||
|
p.text = str(line)
|
||||||
|
elif op_type == "replace_text":
|
||||||
|
old = str(op.get("old") or "")
|
||||||
|
new = str(op.get("new") or "")
|
||||||
|
if not old:
|
||||||
|
return ToolResult(success=False, result=None, error="replace_text requires old")
|
||||||
|
for slide in prs.slides:
|
||||||
|
for shape in slide.shapes:
|
||||||
|
if not hasattr(shape, "text"):
|
||||||
|
continue
|
||||||
|
text = shape.text or ""
|
||||||
|
if old in text:
|
||||||
|
shape.text = text.replace(old, new)
|
||||||
|
else:
|
||||||
|
return ToolResult(success=False, result=None, error=f"Unsupported pptx_update operation: {op_type}")
|
||||||
|
|
||||||
|
out = BytesIO()
|
||||||
|
prs.save(out)
|
||||||
|
return ToolResult(
|
||||||
|
success=True,
|
||||||
|
result={"message": f"PPTX updated: {file_name}"},
|
||||||
|
file_base64=self._b64_from_bytes(out.getvalue()),
|
||||||
|
file_name=file_name,
|
||||||
|
file_mime="application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
||||||
|
)
|
||||||
|
|
||||||
def _file_pdf_merge(self, args: Dict[str, Any]) -> ToolResult:
|
def _file_pdf_merge(self, args: Dict[str, Any]) -> ToolResult:
|
||||||
from pypdf import PdfReader, PdfWriter
|
from pypdf import PdfReader, PdfWriter
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user