unity-mcp/Server/src/services/tools/batch_execute.py

79 lines
2.8 KiB
Python
Raw Normal View History

[FEATURE] Custom Tool Fix and Add inspection window for all the tools (#414) * Update .Bat file and Bug fix on ManageScript * Update the .Bat file to include runtime folder * Fix the inconsistent EditorPrefs variable so the GUI change on Script Validation could cause real change. * Further changes String to Int for consistency * [Custom Tool] Roslyn Runtime Compilation Allows users to generate/compile codes during Playmode * Fix based on CR * Create claude_skill_unity.zip Upload the unity_claude_skill that can be uploaded to Claude for a combo of unity-mcp-skill. * Update for Custom_Tool Fix and Detection 1. Fix Original Roslyn Compilation Custom Tool to fit the V8 standard 2. Add a new panel in the GUI to see and toggle/untoggle the tools. The toggle feature will be implemented in the future, right now its implemented here to discuss with the team if this is a good feature to add; 3. Add few missing summary in certain tools * Revert "Update for Custom_Tool Fix and Detection" This reverts commit ae8cfe5e256c70ac4a16c79d50341a39cbac18ba. * Update README.md * Reapply "Update for Custom_Tool Fix and Detection" This reverts commit f423c2f25e9ccff4f3b89d1d360ee9cf13143733. * Update ManageScript.cs Fix the layout problem of manage_script in the panel * Update To comply with the current server setting * Update on Batch Tested object generation/modification with batch and it works perfectly! We should push and let users test for a while and see PS: I tried both VS Copilot and Claude Desktop. Claude Desktop works but VS Copilot does not due to the nested structure of batch. Will look into it more. * Revert "Merge pull request #1 from Scriptwonder/batching" This reverts commit 55ee76810be161d414e1f5f5abaa5ee30ddd0052, reversing changes made to ae2eedd7fb2c6a66ff008bacac481aefb1b0d176.
2025-12-08 08:38:32 +08:00
"""Defines the batch_execute tool for orchestrating multiple Unity MCP commands."""
from __future__ import annotations
from typing import Annotated, Any
from fastmcp import Context
from services.registry import mcp_for_unity_tool
from services.tools import get_unity_instance_from_context
from transport.unity_transport import send_with_unity_instance
from transport.legacy.unity_connection import async_send_command_with_retry
MAX_COMMANDS_PER_BATCH = 25
@mcp_for_unity_tool(
name="batch_execute",
description=(
"Runs a list of MCP tool calls as one batch. Use it to send a full sequence of commands, "
"inspect the results, then submit the next batch for the following step."
),
)
async def batch_execute(
ctx: Context,
commands: Annotated[list[dict[str, Any]], "List of commands with 'tool' and 'params' keys."],
parallel: Annotated[bool | None, "Attempt to run read-only commands in parallel"] = None,
fail_fast: Annotated[bool | None, "Stop processing after the first failure"] = None,
max_parallelism: Annotated[int | None, "Hint for the maximum number of parallel workers"] = None,
) -> dict[str, Any]:
"""Proxy the batch_execute tool to the Unity Editor transporter."""
unity_instance = get_unity_instance_from_context(ctx)
if not isinstance(commands, list) or not commands:
raise ValueError("'commands' must be a non-empty list of command specifications")
if len(commands) > MAX_COMMANDS_PER_BATCH:
raise ValueError(
f"batch_execute currently supports up to {MAX_COMMANDS_PER_BATCH} commands; received {len(commands)}"
)
normalized_commands: list[dict[str, Any]] = []
for index, command in enumerate(commands):
if not isinstance(command, dict):
raise ValueError(f"Command at index {index} must be an object with 'tool' and 'params' keys")
tool_name = command.get("tool")
params = command.get("params", {})
if not tool_name or not isinstance(tool_name, str):
raise ValueError(f"Command at index {index} is missing a valid 'tool' name")
if params is None:
params = {}
if not isinstance(params, dict):
raise ValueError(f"Command '{tool_name}' must specify parameters as an object/dict")
normalized_commands.append({
"tool": tool_name,
"params": params,
})
payload: dict[str, Any] = {
"commands": normalized_commands,
}
if parallel is not None:
payload["parallel"] = bool(parallel)
if fail_fast is not None:
payload["failFast"] = bool(fail_fast)
if max_parallelism is not None:
payload["maxParallelism"] = int(max_parallelism)
return await send_with_unity_instance(
async_send_command_with_retry,
unity_instance,
"batch_execute",
payload,
)