refactor: rename shell tools to terminal tools

This commit is contained in:
Timothy
2026-04-30 19:41:16 -07:00
parent 0e8efa7bcc
commit 0c6f0f8aef
44 changed files with 450 additions and 446 deletions
-19
View File
@@ -1,19 +0,0 @@
#!/usr/bin/env python3
"""shell-tools MCP server entry point.
Wired into _DEFAULT_LOCAL_SERVERS in core/framework/loader/mcp_registry.py
so that running ``uv run python shell_tools_server.py --stdio`` from this
directory starts the server. The cwd of ``tools/`` puts ``src/shell_tools``
on the import path via uv's workspace setup.
Usage:
uv run python shell_tools_server.py --stdio # for agent integration
uv run python shell_tools_server.py --port 4004 # HTTP for inspection
"""
from __future__ import annotations
from shell_tools.server import main
if __name__ == "__main__":
main()
-43
View File
@@ -1,43 +0,0 @@
"""shell-tools — Terminal/shell capabilities MCP server.
Exposes ten tools (prefix ``shell_*``) covering:
- Foreground exec with auto-promotion to background (``shell_exec``)
- Background job lifecycle (``shell_job_*``)
- Persistent PTY-backed bash sessions (``shell_pty_*``)
- Filesystem search (``shell_rg``, ``shell_find``)
- Truncation handle retrieval (``shell_output_get``)
Bash-only on POSIX. zsh is rejected at the shell-resolver level. See
``common/limits.py:_resolve_shell`` for the single enforcement point.
"""
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from fastmcp import FastMCP
def register_shell_tools(mcp: FastMCP) -> list[str]:
"""Register all ten shell-tools with the FastMCP server.
Returns the list of registered tool names so the caller can log /
smoke-test how many landed.
"""
from shell_tools.exec import register_exec_tools
from shell_tools.jobs.tools import register_job_tools
from shell_tools.output import register_output_tools
from shell_tools.pty.tools import register_pty_tools
from shell_tools.search.tools import register_search_tools
register_exec_tools(mcp)
register_job_tools(mcp)
register_pty_tools(mcp)
register_search_tools(mcp)
register_output_tools(mcp)
return [name for name in mcp._tool_manager._tools.keys() if name.startswith("shell_")]
__all__ = ["register_shell_tools"]
-6
View File
@@ -1,6 +0,0 @@
"""Background job management for shell-tools."""
from shell_tools.jobs.manager import JobManager, JobRecord, get_manager
from shell_tools.jobs.tools import register_job_tools
__all__ = ["JobManager", "JobRecord", "get_manager", "register_job_tools"]
+43
View File
@@ -0,0 +1,43 @@
"""terminal-tools — Terminal capabilities MCP server.
Exposes ten tools (prefix ``terminal_*``) covering:
- Foreground exec with auto-promotion to background (``terminal_exec``)
- Background job lifecycle (``terminal_job_*``)
- Persistent PTY-backed bash sessions (``terminal_pty_*``)
- Filesystem search (``terminal_rg``, ``terminal_find``)
- Truncation handle retrieval (``terminal_output_get``)
Bash-only on POSIX. zsh is rejected at the shell-resolver level. See
``common/limits.py:_resolve_shell`` for the single enforcement point.
"""
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from fastmcp import FastMCP
def register_terminal_tools(mcp: FastMCP) -> list[str]:
"""Register all ten terminal-tools with the FastMCP server.
Returns the list of registered tool names so the caller can log /
smoke-test how many landed.
"""
from terminal_tools.exec import register_exec_tools
from terminal_tools.jobs.tools import register_job_tools
from terminal_tools.output import register_output_tools
from terminal_tools.pty.tools import register_pty_tools
from terminal_tools.search.tools import register_search_tools
register_exec_tools(mcp)
register_job_tools(mcp)
register_pty_tools(mcp)
register_search_tools(mcp)
register_output_tools(mcp)
return [name for name in mcp._tool_manager._tools.keys() if name.startswith("terminal_")]
__all__ = ["register_terminal_tools"]
@@ -1,7 +1,7 @@
"""Shell resolution + resource limits.
The single place that decides which shell binary we invoke and how to
strip zsh-specific environment leakage. Per the shell-tools security
strip zsh-specific environment leakage. Per the terminal-tools security
stance (see ``destructive_warning.py`` neighbours), zsh constructs
(``zmodload``, ``=cmd``, ``zpty``, ``ztcp``) bypass bash-shaped
checks refusing zsh isn't aesthetic, it's a deliberate hardening
@@ -47,7 +47,7 @@ def _resolve_shell(shell: bool | str) -> str | None:
lower = shell.lower()
if "zsh" in lower:
raise ZshRefused(
f"shell={shell!r} rejected: shell-tools is bash-only on POSIX. "
f"shell={shell!r} rejected: terminal-tools is bash-only on POSIX. "
"Use shell=True (bash) or omit the shell parameter to exec directly. "
"This is a deliberate security stance — zsh has command/builtin "
"classes (zmodload, =cmd, zpty, ztcp) that bypass bash-shaped checks."
@@ -2,7 +2,7 @@
When an exec produces more output than the inline cap (default 256 KB),
the surplus is kept here under a short-lived handle. The agent passes
the handle to ``shell_output_get`` to paginate the rest. Handles
the handle to ``terminal_output_get`` to paginate the rest. Handles
expire after 5 minutes; total store size is capped at 64 MB with LRU
eviction so the server can't be DoS'd by a chatty subprocess.
@@ -9,9 +9,9 @@ from __future__ import annotations
from collections.abc import Sequence
from shell_tools.common.destructive_warning import get_warning
from shell_tools.common.output_store import get_store
from shell_tools.common.semantic_exit import classify
from terminal_tools.common.destructive_warning import get_warning
from terminal_tools.common.output_store import get_store
from terminal_tools.common.semantic_exit import classify
def _truncate_bytes(buf: bytes, max_bytes: int) -> tuple[str, int, str]:
@@ -51,11 +51,11 @@ def build_exec_envelope(
) -> dict:
"""Construct the standard exec envelope.
See ``shell-tools-foundations`` SKILL for the field semantics. The
See ``terminal-tools-foundations`` SKILL for the field semantics. The
inline ``stdout``/``stderr`` are decoded and trimmed; if either
overflows ``max_output_kb`` the *full* bytes are stashed in the
output store under ``output_handle`` for retrieval via
``shell_output_get``. Both streams share the same handle (with
``terminal_output_get``. Both streams share the same handle (with
``out_<hex>:stdout`` / ``out_<hex>:stderr`` suffixes) when both
overflow the agent uses the suffix to pick a stream.
"""
@@ -1,4 +1,4 @@
"""``shell_exec`` — foreground exec with auto-promotion to background.
"""``terminal_exec`` — foreground exec with auto-promotion to background.
The flagship tool. Most agent terminal interactions go through here:
fast commands (<30s) return inline with the standard envelope; longer
@@ -16,7 +16,7 @@ Implementation notes:
We hand them to JobManager which spawns pump threads to fill ring
buffers from that point on. The agent sees an envelope with
``auto_backgrounded=True, exit_code=None, job_id=<>`` and
transitions to ``shell_job_logs``. **There's no early-output loss**
transitions to ``terminal_job_logs``. **There's no early-output loss**
because the pumps start before we return from the tool call.
- For pure-foreground use (``auto_background_after_sec=0``), we
fall back to ``proc.communicate(timeout=timeout_sec)`` which has
@@ -31,13 +31,19 @@ import threading
import time
from typing import TYPE_CHECKING
from shell_tools.common.limits import (
from terminal_tools.common.limits import (
ZshRefused,
_resolve_shell,
coerce_limits,
make_preexec_fn,
sanitized_env,
)
from terminal_tools.common.ring_buffer import RingBuffer
from terminal_tools.common.truncation import build_exec_envelope
from terminal_tools.jobs.manager import JobLimitExceeded, get_manager
if TYPE_CHECKING:
from fastmcp import FastMCP
# Tokens that indicate the user passed a shell-syntax command (pipes,
@@ -50,17 +56,11 @@ from shell_tools.common.limits import (
_SHELL_METACHARS: frozenset[str] = frozenset(
{"|", "&&", "||", ";", ">", "<", ">>", "<<", "&", "2>", "2>&1", "|&"}
)
from shell_tools.common.ring_buffer import RingBuffer
from shell_tools.common.truncation import build_exec_envelope
from shell_tools.jobs.manager import JobLimitExceeded, get_manager
if TYPE_CHECKING:
from fastmcp import FastMCP
def register_exec_tools(mcp: FastMCP) -> None:
@mcp.tool()
def shell_exec(
def terminal_exec(
command: str,
cwd: str | None = None,
env: dict[str, str] | None = None,
@@ -75,7 +75,7 @@ def register_exec_tools(mcp: FastMCP) -> None:
Past auto_background_after_sec, the call auto-promotes to a background
job and returns immediately with `auto_backgrounded=True, job_id=...`
poll with shell_job_logs(job_id, since_offset=...) to read the rest.
poll with terminal_job_logs(job_id, since_offset=...) to read the rest.
Set auto_background_after_sec=0 to force pure foreground (kill on
timeout_sec).
@@ -98,9 +98,9 @@ def register_exec_tools(mcp: FastMCP) -> None:
limits: Optional setrlimit caps. Keys: cpu_sec, rss_mb,
fsize_mb, nofile.
max_output_kb: Inline output cap. Overflow stashes to an
output_handle for retrieval via shell_output_get.
output_handle for retrieval via terminal_output_get.
Returns the standard envelope: see `shell-tools-foundations` skill.
Returns the standard envelope: see `terminal-tools-foundations` skill.
"""
# Auto-detect shell-syntax commands. If the agent passes
# ``shell=False`` (the default) but the command contains a pipe,
@@ -0,0 +1,6 @@
"""Background job management for terminal-tools."""
from terminal_tools.jobs.manager import JobManager, JobRecord, get_manager
from terminal_tools.jobs.tools import register_job_tools
__all__ = ["JobManager", "JobRecord", "get_manager", "register_job_tools"]
@@ -1,17 +1,17 @@
"""Background job manager.
Owns the long-lived ``Popen`` instances backing ``shell_job_*`` and
``shell_exec`` auto-promotion. Each job has up to two ring buffers
Owns the long-lived ``Popen`` instances backing ``terminal_job_*`` and
``terminal_exec`` auto-promotion. Each job has up to two ring buffers
(stdout / stderr, or one merged) fed by background pump threads.
Design notes:
- We don't use asyncio here. FastMCP's tool handlers run in a worker
thread; subprocess + threads compose more naturally with that
model than asyncio Subprocess (which would need its own loop).
- ``shell_exec`` "promotes" by adopting an already-running Popen
- ``terminal_exec`` "promotes" by adopting an already-running Popen
into the manager it doesn't re-spawn. The pump threads were
already filling buffers in the exec path.
- Hard concurrency cap (env: ``SHELL_TOOLS_MAX_JOBS``, default 32).
- Hard concurrency cap (env: ``TERMINAL_TOOLS_MAX_JOBS``, default 32).
The cap is the only non-bypassable safety pin per the soft-
guardrails design.
- On server shutdown the lifespan hook calls ``shutdown_all()``
@@ -31,11 +31,11 @@ from collections.abc import Sequence
from dataclasses import dataclass, field
from typing import Any
from shell_tools.common.ring_buffer import RingBuffer
from terminal_tools.common.ring_buffer import RingBuffer
_MAX_JOBS_DEFAULT = 32
_DEFAULT_RING_BYTES = 4 * 1024 * 1024
_RECENT_EXIT_KEEP = 50 # exited jobs we still surface to ``shell_job_manage(action="list")``
_RECENT_EXIT_KEEP = 50 # exited jobs we still surface to ``terminal_job_manage(action="list")``
@dataclass(slots=True)
@@ -53,7 +53,7 @@ class JobRecord:
exited_at: float | None = None
exit_code: int | None = None
signaled: bool = False
# Adopted=True when the job started life as a foreground shell_exec
# Adopted=True when the job started life as a foreground terminal_exec
# and was promoted past the auto-background budget.
adopted: bool = False
@@ -88,7 +88,7 @@ class JobLimitExceeded(RuntimeError):
class JobManager:
def __init__(self, max_jobs: int | None = None, ring_bytes: int = _DEFAULT_RING_BYTES):
self._max_jobs = max_jobs or int(os.getenv("SHELL_TOOLS_MAX_JOBS", str(_MAX_JOBS_DEFAULT)))
self._max_jobs = max_jobs or int(os.getenv("TERMINAL_TOOLS_MAX_JOBS", str(_MAX_JOBS_DEFAULT)))
self._ring_bytes = ring_bytes
self._jobs: dict[str, JobRecord] = {}
# FIFO of recently-exited job_ids so list/inspect can still
@@ -116,8 +116,8 @@ class JobManager:
"""Spawn a process and start pumping its output into ring buffers."""
if self.active_count() >= self._max_jobs:
raise JobLimitExceeded(
f"shell-tools job cap reached ({self._max_jobs}). "
"Wait for a job to finish or raise SHELL_TOOLS_MAX_JOBS."
f"terminal-tools job cap reached ({self._max_jobs}). "
"Wait for a job to finish or raise TERMINAL_TOOLS_MAX_JOBS."
)
proc = self._spawn(command, cwd=cwd, env=env, shell=shell, merge_stderr=merge_stderr, preexec_fn=preexec_fn)
@@ -137,10 +137,10 @@ class JobManager:
) -> JobRecord:
"""Adopt a Popen that's already running with pumps in flight.
Used by ``shell_exec`` for auto-promotion: the foreground path
Used by ``terminal_exec`` for auto-promotion: the foreground path
had already started pump threads filling its own ring buffers.
We hand the buffers + pumps over to the manager so the agent
can resume reading via ``shell_job_logs``.
can resume reading via ``terminal_job_logs``.
"""
if self.active_count() >= self._max_jobs:
# Mid-call cap exceeded — kill and report.
@@ -149,7 +149,7 @@ class JobManager:
except Exception:
pass
raise JobLimitExceeded(
f"shell-tools job cap reached ({self._max_jobs}); foreground exec was killed during auto-promotion."
f"terminal-tools job cap reached ({self._max_jobs}); foreground exec was killed during auto-promotion."
)
record = self._wrap(
proc,
@@ -259,7 +259,7 @@ class JobManager:
) -> subprocess.Popen[bytes]:
# Resolve shell: a string shell is coerced to ``[<shell>, "-c", command]``,
# bool=True means /bin/bash with the same shape.
from shell_tools.common.limits import _resolve_shell
from terminal_tools.common.limits import _resolve_shell
resolved = _resolve_shell(shell)
if resolved is not None:
@@ -1,5 +1,5 @@
"""Job-control MCP tools: ``shell_job_start``, ``shell_job_logs``,
``shell_job_manage``.
"""Job-control MCP tools: ``terminal_job_start``, ``terminal_job_logs``,
``terminal_job_manage``.
Three tools, not seven: ``_logs`` rolls in status + wait, ``_manage``
covers list + signals + stdin so the agent has fewer tool names to
@@ -12,8 +12,8 @@ from __future__ import annotations
import signal
from typing import TYPE_CHECKING, Any
from shell_tools.common.limits import coerce_limits, make_preexec_fn, sanitized_env
from shell_tools.jobs.manager import JobLimitExceeded, get_manager
from terminal_tools.common.limits import coerce_limits, make_preexec_fn, sanitized_env
from terminal_tools.jobs.manager import JobLimitExceeded, get_manager
if TYPE_CHECKING:
from fastmcp import FastMCP
@@ -33,7 +33,7 @@ def register_job_tools(mcp: FastMCP) -> None:
manager = get_manager()
@mcp.tool()
def shell_job_start(
def terminal_job_start(
command: str,
cwd: str | None = None,
env: dict[str, str] | None = None,
@@ -42,11 +42,11 @@ def register_job_tools(mcp: FastMCP) -> None:
name: str | None = None,
limits: dict[str, int] | None = None,
) -> dict:
"""Spawn a background process. Returns a job_id you poll with shell_job_logs.
"""Spawn a background process. Returns a job_id you poll with terminal_job_logs.
Use this when work might run >1 minute, when you want to keep doing
other things while it runs, or when you need to stream logs as they
arrive. Jobs die when the shell-tools server restarts they are NOT
arrive. Jobs die when the terminal-tools server restarts they are NOT
persistent across reboots.
Args:
@@ -60,7 +60,7 @@ def register_job_tools(mcp: FastMCP) -> None:
single ring buffer. Convenient for log-shaped output where
ordering matters.
shell: True to invoke /bin/bash -c. Refuses zsh.
name: Optional human label surfaced in shell_job_manage(action="list").
name: Optional human label surfaced in terminal_job_manage(action="list").
limits: Optional resource caps applied via setrlimit before exec.
Keys: cpu_sec, rss_mb, fsize_mb, nofile.
@@ -101,7 +101,7 @@ def register_job_tools(mcp: FastMCP) -> None:
return {"error": f"{type(e).__name__}: {e}"}
@mcp.tool()
def shell_job_logs(
def terminal_job_logs(
job_id: str,
stream: str = "stdout",
since_offset: int = 0,
@@ -117,7 +117,7 @@ def register_job_tools(mcp: FastMCP) -> None:
wait_timeout_sec elapses, then returns logs and final status.
Args:
job_id: From shell_job_start (or auto-promoted from shell_exec).
job_id: From terminal_job_start (or auto-promoted from terminal_exec).
stream: "stdout" | "stderr" | "merged". Use "merged" only when the
job was started with merge_stderr=True.
since_offset: Absolute byte offset to start reading from. Pass 0
@@ -163,7 +163,7 @@ def register_job_tools(mcp: FastMCP) -> None:
}
@mcp.tool()
def shell_job_manage(
def terminal_job_manage(
action: str,
job_id: str | None = None,
data: str | None = None,
@@ -1,10 +1,10 @@
"""``shell_output_get`` — retrieve truncated output via handle."""
"""``terminal_output_get`` — retrieve truncated output via handle."""
from __future__ import annotations
from typing import TYPE_CHECKING
from shell_tools.common.output_store import get_store
from terminal_tools.common.output_store import get_store
if TYPE_CHECKING:
from fastmcp import FastMCP
@@ -12,14 +12,14 @@ if TYPE_CHECKING:
def register_output_tools(mcp: FastMCP) -> None:
@mcp.tool()
def shell_output_get(
def terminal_output_get(
output_handle: str,
since_offset: int = 0,
max_kb: int = 64,
) -> dict:
"""Retrieve a slice of truncated output by handle.
When shell_exec or shell_job_logs returns more output than fits inline,
When terminal_exec or terminal_job_logs returns more output than fits inline,
you'll see `output_handle: "out_<hex>"`. Pass it here with successive
offsets to paginate. The full output is preserved (combined stdout+stderr
with `--- stdout ---` / `--- stderr ---` separators) for 5 minutes.
@@ -1,5 +1,5 @@
"""Persistent PTY-backed shell sessions."""
from shell_tools.pty.tools import register_pty_tools
from terminal_tools.pty.tools import register_pty_tools
__all__ = ["register_pty_tools"]
@@ -33,8 +33,8 @@ import threading
import time
import uuid
from shell_tools.common.limits import _resolve_shell, sanitized_env
from shell_tools.common.ring_buffer import RingBuffer
from terminal_tools.common.limits import _resolve_shell, sanitized_env
from terminal_tools.common.ring_buffer import RingBuffer
_BUF_BYTES = 2 * 1024 * 1024
@@ -63,7 +63,7 @@ class PtySession:
self.session_id = "pty_" + uuid.uuid4().hex[:10]
self.shell_path = _resolve_shell(shell) or "/bin/bash"
self._sentinel_token = uuid.uuid4().hex
self._sentinel = f"__SHELLTOOLS_PROMPT_{self._sentinel_token}__"
self._sentinel = f"__TERMINALTOOLS_PROMPT_{self._sentinel_token}__"
self._sentinel_re = re.compile(re.escape(self._sentinel))
# Build env: zsh leakage stripped, prompt set to our sentinel.
@@ -89,7 +89,7 @@ class PtySession:
argv = [self.shell_path, "--norc", "--noprofile", "-i"]
os.execve(self.shell_path, argv, merged_env)
except Exception as e: # pragma: no cover — child exec
os.write(2, f"shell-tools pty: exec failed: {e}\n".encode())
os.write(2, f"terminal-tools pty: exec failed: {e}\n".encode())
os._exit(127)
# Parent
@@ -1,6 +1,6 @@
"""Three PTY tools: ``shell_pty_open``, ``shell_pty_run``, ``shell_pty_close``.
"""Three PTY tools: ``terminal_pty_open``, ``terminal_pty_run``, ``terminal_pty_close``.
Per-server hard cap on concurrent sessions (env: ``SHELL_TOOLS_MAX_PTY``,
Per-server hard cap on concurrent sessions (env: ``TERMINAL_TOOLS_MAX_PTY``,
default 8) prevents PTY exhaustion. Idle sessions older than
``idle_timeout_sec`` are reaped lazily on every ``_open`` so an
abandoned session can't leak a bash forever.
@@ -14,7 +14,7 @@ import threading
import time
from typing import TYPE_CHECKING
from shell_tools.common.limits import ZshRefused
from terminal_tools.common.limits import ZshRefused
if TYPE_CHECKING:
from fastmcp import FastMCP
@@ -27,7 +27,7 @@ class _PtyRegistry:
def __init__(self):
self._sessions: dict[str, PtySession] = {} # noqa: F821
self._lock = threading.Lock()
self._max = int(os.getenv("SHELL_TOOLS_MAX_PTY", str(_MAX_PTY_DEFAULT)))
self._max = int(os.getenv("TERMINAL_TOOLS_MAX_PTY", str(_MAX_PTY_DEFAULT)))
def reap_idle(self) -> None:
"""Drop sessions whose idle time exceeded their idle_timeout_sec."""
@@ -55,8 +55,8 @@ class _PtyRegistry:
if len(self._sessions) >= self._max:
# Caller should have reaped first; treat as cap.
raise RuntimeError(
f"shell-tools PTY cap reached ({self._max}). "
"Close idle sessions or raise SHELL_TOOLS_MAX_PTY."
f"terminal-tools PTY cap reached ({self._max}). "
"Close idle sessions or raise TERMINAL_TOOLS_MAX_PTY."
)
self._sessions[sess.session_id] = sess
@@ -95,31 +95,31 @@ def register_pty_tools(mcp: FastMCP) -> None:
# Register stub tools that report unsupported; keeps the tool
# surface uniform across platforms even when PTY is unavailable.
@mcp.tool()
def shell_pty_open(*args, **kwargs) -> dict:
def terminal_pty_open(*args, **kwargs) -> dict:
"""Persistent PTY-backed bash session. POSIX-only.
Windows is not supported in v1 use shell_exec / shell_job_*
Windows is not supported in v1 use terminal_exec / terminal_job_*
for non-interactive work. The PTY tools require stdlib pty,
which exists only on Linux + macOS.
"""
return {"error": "shell_pty_* tools are POSIX-only; not supported on Windows"}
return {"error": "terminal_pty_* tools are POSIX-only; not supported on Windows"}
@mcp.tool()
def shell_pty_run(*args, **kwargs) -> dict: # noqa: D401
def terminal_pty_run(*args, **kwargs) -> dict: # noqa: D401
"""Persistent PTY-backed bash session. POSIX-only."""
return {"error": "shell_pty_* tools are POSIX-only; not supported on Windows"}
return {"error": "terminal_pty_* tools are POSIX-only; not supported on Windows"}
@mcp.tool()
def shell_pty_close(*args, **kwargs) -> dict: # noqa: D401
def terminal_pty_close(*args, **kwargs) -> dict: # noqa: D401
"""Persistent PTY-backed bash session. POSIX-only."""
return {"error": "shell_pty_* tools are POSIX-only; not supported on Windows"}
return {"error": "terminal_pty_* tools are POSIX-only; not supported on Windows"}
return
from shell_tools.pty.session import PtySession, SessionBusy
from terminal_tools.pty.session import PtySession, SessionBusy
@mcp.tool()
def shell_pty_open(
def terminal_pty_open(
cwd: str | None = None,
env: dict[str, str] | None = None,
cols: int = 120,
@@ -130,11 +130,11 @@ def register_pty_tools(mcp: FastMCP) -> None:
Use a session when you need state across calls building env vars,
navigating with cd, driving REPLs, or responding to interactive
prompts (sudo, ssh, mysql). For one-shot work, use shell_exec
prompts (sudo, ssh, mysql). For one-shot work, use terminal_exec
instead.
The session runs vanilla bash (--norc --noprofile) so dotfiles
don't surprise you. A unique PS1 sentinel is set so shell_pty_run
don't surprise you. A unique PS1 sentinel is set so terminal_pty_run
can unambiguously detect command completion. macOS users: this
is /bin/bash, not zsh, by deliberate policy explicit
shell="/bin/zsh" overrides are rejected.
@@ -166,7 +166,7 @@ def register_pty_tools(mcp: FastMCP) -> None:
}
@mcp.tool()
def shell_pty_run(
def terminal_pty_run(
session_id: str,
command: str | None = None,
expect: str | None = None,
@@ -187,7 +187,7 @@ def register_pty_tools(mcp: FastMCP) -> None:
Typically follows raw_send.
Args:
session_id: From shell_pty_open.
session_id: From terminal_pty_open.
command: The text to send. None when read_only=True.
expect: Regex to wait for INSTEAD of the default prompt sentinel.
Useful when the command launches a REPL with its own prompt.
@@ -222,12 +222,12 @@ def register_pty_tools(mcp: FastMCP) -> None:
return {"error": str(e)}
@mcp.tool()
def shell_pty_close(session_id: str, force: bool = False) -> dict:
def terminal_pty_close(session_id: str, force: bool = False) -> dict:
"""Terminate a PTY session. Always do this when you're done — leaked
sessions count against the per-server PTY cap.
Args:
session_id: From shell_pty_open.
session_id: From terminal_pty_open.
force: Skip the graceful "exit\\n" attempt and SIGTERM/SIGKILL.
Returns: {exit_code, final_output, already_closed}
@@ -1,5 +1,5 @@
"""Filesystem search tools (rg + find)."""
from shell_tools.search.tools import register_search_tools
from terminal_tools.search.tools import register_search_tools
__all__ = ["register_search_tools"]
@@ -1,4 +1,4 @@
"""``shell_rg`` and ``shell_find`` — structured wrappers over ripgrep / find.
"""``terminal_rg`` and ``terminal_find`` — structured wrappers over ripgrep / find.
Distinct from ``files-tools.search_files`` (project-relative,
code-editor-tuned) these accept arbitrary paths and surface the
@@ -23,7 +23,7 @@ _MAX_OUTPUT_BYTES = 256 * 1024
def register_search_tools(mcp: FastMCP) -> None:
@mcp.tool()
def shell_rg(
def terminal_rg(
pattern: str,
path: str = ".",
glob: str | None = None,
@@ -128,7 +128,7 @@ def register_search_tools(mcp: FastMCP) -> None:
}
@mcp.tool()
def shell_find(
def terminal_find(
path: str,
name: str | None = None,
iname: str | None = None,
@@ -141,7 +141,7 @@ def register_search_tools(mcp: FastMCP) -> None:
) -> dict:
"""Run `find` with structured predicates.
For tree views or stat-like info on a single path, use shell_exec
For tree views or stat-like info on a single path, use terminal_exec
("ls -la", "tree -L 2", "stat foo"). This tool is for predicate-driven
searches (find me .log files modified in the last 7 days bigger than 1MB).
@@ -1,8 +1,8 @@
"""shell-tools FastMCP server — entry module.
"""terminal-tools FastMCP server — entry module.
Run via:
uv run python -m shell_tools.server --stdio
uv run python shell_tools_server.py --stdio (preferred, see _DEFAULT_LOCAL_SERVERS)
uv run python -m terminal_tools.server --stdio
uv run python terminal_tools_server.py --stdio (preferred, see _DEFAULT_LOCAL_SERVERS)
"""
from __future__ import annotations
@@ -23,7 +23,7 @@ def setup_logger() -> None:
if not logger.handlers:
stream = sys.stderr if "--stdio" in sys.argv else sys.stdout
handler = logging.StreamHandler(stream)
handler.setFormatter(logging.Formatter("[shell-tools] %(message)s"))
handler.setFormatter(logging.Formatter("[terminal-tools] %(message)s"))
logger.addHandler(handler)
logger.setLevel(logging.INFO)
@@ -45,9 +45,9 @@ if "--stdio" in sys.argv:
from fastmcp import FastMCP # noqa: E402
from shell_tools import register_shell_tools # noqa: E402
from shell_tools.jobs.manager import get_manager # noqa: E402
from shell_tools.pty.tools import get_registry as get_pty_registry # noqa: E402
from terminal_tools import register_terminal_tools # noqa: E402
from terminal_tools.jobs.manager import get_manager # noqa: E402
from terminal_tools.pty.tools import get_registry as get_pty_registry # noqa: E402
@asynccontextmanager
@@ -93,7 +93,7 @@ async def _parent_watchdog(parent_pid: int) -> None:
while True:
await asyncio.sleep(2.0)
if not _is_alive(parent_pid):
logger.warning("Parent PID %d gone — shell-tools exiting", parent_pid)
logger.warning("Parent PID %d gone — terminal-tools exiting", parent_pid)
try:
get_manager().shutdown_all(grace_sec=1.0)
except Exception:
@@ -119,25 +119,25 @@ def _atexit_reap() -> None:
atexit.register(_atexit_reap)
mcp = FastMCP("shell-tools", lifespan=_lifespan)
mcp = FastMCP("terminal-tools", lifespan=_lifespan)
def main() -> None:
parser = argparse.ArgumentParser(description="shell-tools MCP server")
parser.add_argument("--port", type=int, default=int(os.getenv("SHELL_TOOLS_PORT", "4004")))
parser = argparse.ArgumentParser(description="terminal-tools MCP server")
parser.add_argument("--port", type=int, default=int(os.getenv("TERMINAL_TOOLS_PORT", "4004")))
parser.add_argument("--host", default="0.0.0.0")
parser.add_argument("--stdio", action="store_true")
args = parser.parse_args()
tools = register_shell_tools(mcp)
tools = register_terminal_tools(mcp)
if not args.stdio:
logger.info("Registered %d shell-tools: %s", len(tools), tools)
logger.info("Registered %d terminal-tools: %s", len(tools), tools)
if args.stdio:
mcp.run(transport="stdio")
else:
logger.info("Starting shell-tools on %s:%d", args.host, args.port)
logger.info("Starting terminal-tools on %s:%d", args.host, args.port)
asyncio.run(mcp.run_async(transport="http", host=args.host, port=args.port))
+19
View File
@@ -0,0 +1,19 @@
#!/usr/bin/env python3
"""terminal-tools MCP server entry point.
Wired into _DEFAULT_LOCAL_SERVERS in core/framework/loader/mcp_registry.py
so that running ``uv run python terminal_tools_server.py --stdio`` from this
directory starts the server. The cwd of ``tools/`` puts ``src/terminal_tools``
on the import path via uv's workspace setup.
Usage:
uv run python terminal_tools_server.py --stdio # for agent integration
uv run python terminal_tools_server.py --port 4004 # HTTP for inspection
"""
from __future__ import annotations
from terminal_tools.server import main
if __name__ == "__main__":
main()
-33
View File
@@ -1,33 +0,0 @@
"""Smoke test: load the server module, register tools, assert all 10 land."""
from __future__ import annotations
EXPECTED_TOOLS = {
"shell_exec",
"shell_job_start",
"shell_job_logs",
"shell_job_manage",
"shell_pty_open",
"shell_pty_run",
"shell_pty_close",
"shell_rg",
"shell_find",
"shell_output_get",
}
def test_register_shell_tools_lands_all_ten(mcp):
from shell_tools import register_shell_tools
names = register_shell_tools(mcp)
assert set(names) == EXPECTED_TOOLS, (
f"missing: {EXPECTED_TOOLS - set(names)}, extra: {set(names) - EXPECTED_TOOLS}"
)
def test_all_tools_have_shell_prefix(mcp):
from shell_tools import register_shell_tools
names = register_shell_tools(mcp)
for n in names:
assert n.startswith("shell_"), f"tool {n!r} missing shell_ prefix"
@@ -1,4 +1,4 @@
"""shell_exec — envelope shape, semantic exits, warnings, auto-promotion."""
"""terminal_exec — envelope shape, semantic exits, warnings, auto-promotion."""
from __future__ import annotations
@@ -9,10 +9,10 @@ import pytest
@pytest.fixture
def exec_tool(mcp):
from shell_tools.exec import register_exec_tools
from terminal_tools.exec import register_exec_tools
register_exec_tools(mcp)
return mcp._tool_manager._tools["shell_exec"].fn
return mcp._tool_manager._tools["terminal_exec"].fn
def test_envelope_shape_simple_echo(exec_tool):
@@ -85,7 +85,7 @@ def test_zsh_refused(exec_tool):
def test_zsh_string_refused():
"""Calling _resolve_shell with zsh path raises ZshRefused."""
from shell_tools.common.limits import ZshRefused, _resolve_shell
from terminal_tools.common.limits import ZshRefused, _resolve_shell
with pytest.raises(ZshRefused):
_resolve_shell("/bin/zsh")
@@ -108,10 +108,10 @@ def test_truncation_via_handle(exec_tool):
def test_output_handle_round_trip(exec_tool, mcp):
from shell_tools.output import register_output_tools
from terminal_tools.output import register_output_tools
register_output_tools(mcp)
output_get = mcp._tool_manager._tools["shell_output_get"].fn
output_get = mcp._tool_manager._tools["terminal_output_get"].fn
result = exec_tool(
command="python3 -c 'import sys; sys.stdout.write(\"x\" * 300_000)'",
@@ -202,7 +202,7 @@ def test_explicit_shell_true_unchanged(exec_tool):
def test_auto_promotion(exec_tool, mcp):
"""Past auto_background_after_sec, the call returns auto_backgrounded=True."""
from shell_tools.jobs.tools import register_job_tools
from terminal_tools.jobs.tools import register_job_tools
register_job_tools(mcp)
# Use a 1s budget so the test runs quickly.
@@ -218,8 +218,8 @@ def test_auto_promotion(exec_tool, mcp):
assert result["exit_code"] is None
assert elapsed < 3, "auto-promotion should return quickly past the budget"
# Take over via shell_job_logs
job_logs = mcp._tool_manager._tools["shell_job_logs"].fn
# Take over via terminal_job_logs
job_logs = mcp._tool_manager._tools["terminal_job_logs"].fn
log_result = job_logs(job_id=result["job_id"], wait_until_exit=True, wait_timeout_sec=10)
assert log_result["status"] == "exited"
assert log_result["exit_code"] == 0
@@ -9,13 +9,13 @@ import pytest
@pytest.fixture
def job_tools(mcp):
from shell_tools.jobs.tools import register_job_tools
from terminal_tools.jobs.tools import register_job_tools
register_job_tools(mcp)
return {
"start": mcp._tool_manager._tools["shell_job_start"].fn,
"logs": mcp._tool_manager._tools["shell_job_logs"].fn,
"manage": mcp._tool_manager._tools["shell_job_manage"].fn,
"start": mcp._tool_manager._tools["terminal_job_start"].fn,
"logs": mcp._tool_manager._tools["terminal_job_logs"].fn,
"manage": mcp._tool_manager._tools["terminal_job_manage"].fn,
}
@@ -12,27 +12,27 @@ pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="PTY is POSIX-on
@pytest.fixture
def pty_tools(mcp):
from shell_tools.pty.tools import register_pty_tools
from terminal_tools.pty.tools import register_pty_tools
register_pty_tools(mcp)
return {
"open": mcp._tool_manager._tools["shell_pty_open"].fn,
"run": mcp._tool_manager._tools["shell_pty_run"].fn,
"close": mcp._tool_manager._tools["shell_pty_close"].fn,
"open": mcp._tool_manager._tools["terminal_pty_open"].fn,
"run": mcp._tool_manager._tools["terminal_pty_run"].fn,
"close": mcp._tool_manager._tools["terminal_pty_close"].fn,
}
def test_open_close_basic(pty_tools):
opened = pty_tools["open"]()
assert "session_id" in opened
assert opened["shell"] == "/bin/bash", "shell-tools must default to bash, not zsh"
assert opened["shell"] == "/bin/bash", "terminal-tools must default to bash, not zsh"
closed = pty_tools["close"](session_id=opened["session_id"])
assert closed.get("already_closed") in (False, None)
def test_bash_on_darwin():
"""Even on macOS, the resolved shell is /bin/bash, not /bin/zsh."""
from shell_tools.common.limits import _resolve_shell
from terminal_tools.common.limits import _resolve_shell
assert _resolve_shell(True) == "/bin/bash"
@@ -1,4 +1,4 @@
"""shell_rg + shell_find — basic functionality, structured output."""
"""terminal_rg + terminal_find — basic functionality, structured output."""
from __future__ import annotations
@@ -9,12 +9,12 @@ import pytest
@pytest.fixture
def search_tools(mcp):
from shell_tools.search.tools import register_search_tools
from terminal_tools.search.tools import register_search_tools
register_search_tools(mcp)
return {
"rg": mcp._tool_manager._tools["shell_rg"].fn,
"find": mcp._tool_manager._tools["shell_find"].fn,
"rg": mcp._tool_manager._tools["terminal_rg"].fn,
"find": mcp._tool_manager._tools["terminal_find"].fn,
}
@@ -6,7 +6,7 @@ import pytest
def test_resolve_shell_rejects_zsh():
from shell_tools.common.limits import ZshRefused, _resolve_shell
from terminal_tools.common.limits import ZshRefused, _resolve_shell
for path in ("/bin/zsh", "/usr/bin/zsh", "/usr/local/bin/zsh", "ZSH"):
with pytest.raises(ZshRefused):
@@ -14,7 +14,7 @@ def test_resolve_shell_rejects_zsh():
def test_resolve_shell_accepts_bash():
from shell_tools.common.limits import _resolve_shell
from terminal_tools.common.limits import _resolve_shell
assert _resolve_shell(True) == "/bin/bash"
assert _resolve_shell("/bin/bash") == "/bin/bash"
@@ -22,7 +22,7 @@ def test_resolve_shell_accepts_bash():
def test_sanitized_env_strips_zsh_vars(monkeypatch):
from shell_tools.common.limits import sanitized_env
from terminal_tools.common.limits import sanitized_env
monkeypatch.setenv("ZDOTDIR", "/some/path")
monkeypatch.setenv("ZSH_VERSION", "5.9")
@@ -38,7 +38,7 @@ def test_sanitized_env_strips_zsh_vars(monkeypatch):
def test_destructive_warning_catalog():
from shell_tools.common.destructive_warning import get_warning
from terminal_tools.common.destructive_warning import get_warning
cases = [
("rm -rf /tmp/foo", "force-remove"),
@@ -59,14 +59,14 @@ def test_destructive_warning_catalog():
def test_destructive_warning_clean_commands():
from shell_tools.common.destructive_warning import get_warning
from terminal_tools.common.destructive_warning import get_warning
for cmd in ["ls -la", "echo hi", "git status", "git commit -m 'x'"]:
assert get_warning(cmd) is None, f"unexpected warning for {cmd!r}"
def test_semantic_exit_grep():
from shell_tools.common.semantic_exit import classify
from terminal_tools.common.semantic_exit import classify
status, msg = classify("grep foo /tmp/x", 0)
assert status == "ok"
@@ -78,7 +78,7 @@ def test_semantic_exit_grep():
def test_semantic_exit_default():
from shell_tools.common.semantic_exit import classify
from terminal_tools.common.semantic_exit import classify
status, msg = classify("ls", 0)
assert status == "ok"
@@ -88,14 +88,14 @@ def test_semantic_exit_default():
def test_semantic_exit_signaled():
from shell_tools.common.semantic_exit import classify
from terminal_tools.common.semantic_exit import classify
status, msg = classify("sleep 999", -15, signaled=True)
assert status == "signal"
def test_semantic_exit_timed_out():
from shell_tools.common.semantic_exit import classify
from terminal_tools.common.semantic_exit import classify
status, msg = classify("sleep 999", None, timed_out=True)
assert status == "error"
+33
View File
@@ -0,0 +1,33 @@
"""Smoke test: load the server module, register tools, assert all 10 land."""
from __future__ import annotations
EXPECTED_TOOLS = {
"terminal_exec",
"terminal_job_start",
"terminal_job_logs",
"terminal_job_manage",
"terminal_pty_open",
"terminal_pty_run",
"terminal_pty_close",
"terminal_rg",
"terminal_find",
"terminal_output_get",
}
def test_register_terminal_tools_lands_all_ten(mcp):
from terminal_tools import register_terminal_tools
names = register_terminal_tools(mcp)
assert set(names) == EXPECTED_TOOLS, (
f"missing: {EXPECTED_TOOLS - set(names)}, extra: {set(names) - EXPECTED_TOOLS}"
)
def test_all_tools_have_terminal_prefix(mcp):
from terminal_tools import register_terminal_tools
names = register_terminal_tools(mcp)
for n in names:
assert n.startswith("terminal_"), f"tool {n!r} missing terminal_ prefix"