fix: all agent path prefixed by HIVE_HOME

This commit is contained in:
Timothy
2026-04-28 19:16:35 -07:00
parent dd69a53de1
commit ae2aa30edf
24 changed files with 235 additions and 105 deletions
@@ -657,8 +657,10 @@ def write_compaction_debug_log(
level: str, level: str,
inventory: list[dict[str, Any]] | None, inventory: list[dict[str, Any]] | None,
) -> None: ) -> None:
"""Write detailed compaction analysis to ~/.hive/compaction_log/.""" """Write detailed compaction analysis to $HIVE_HOME/compaction_log/."""
log_dir = Path.home() / ".hive" / "compaction_log" from framework.config import HIVE_HOME
log_dir = HIVE_HOME / "compaction_log"
log_dir.mkdir(parents=True, exist_ok=True) log_dir.mkdir(parents=True, exist_ok=True)
ts = datetime.now(UTC).strftime("%Y%m%dT%H%M%S_%f") ts = datetime.now(UTC).strftime("%Y%m%dT%H%M%S_%f")
@@ -560,7 +560,9 @@ class CredentialTesterAgent:
if self._selected_account is None: if self._selected_account is None:
raise RuntimeError("No account selected. Call select_account() first.") raise RuntimeError("No account selected. Call select_account() first.")
self._storage_path = Path.home() / ".hive" / "agents" / "credential_tester" from framework.config import HIVE_HOME
self._storage_path = HIVE_HOME / "agents" / "credential_tester"
self._storage_path.mkdir(parents=True, exist_ok=True) self._storage_path.mkdir(parents=True, exist_ok=True)
self._tool_registry = ToolRegistry() self._tool_registry = ToolRegistry()
+9 -3
View File
@@ -66,7 +66,9 @@ def _get_last_active(agent_path: Path) -> str | None:
latest: str | None = None latest: str | None = None
# 1. Worker sessions # 1. Worker sessions
sessions_dir = Path.home() / ".hive" / "agents" / agent_name / "sessions" from framework.config import HIVE_HOME
sessions_dir = HIVE_HOME / "agents" / agent_name / "sessions"
if sessions_dir.exists(): if sessions_dir.exists():
for session_dir in sessions_dir.iterdir(): for session_dir in sessions_dir.iterdir():
if not session_dir.is_dir() or not session_dir.name.startswith("session_"): if not session_dir.is_dir() or not session_dir.name.startswith("session_"):
@@ -115,7 +117,9 @@ def _get_last_active(agent_path: Path) -> str | None:
def _count_sessions(agent_name: str) -> int: def _count_sessions(agent_name: str) -> int:
"""Count session directories under ~/.hive/agents/{agent_name}/sessions/.""" """Count session directories under ~/.hive/agents/{agent_name}/sessions/."""
sessions_dir = Path.home() / ".hive" / "agents" / agent_name / "sessions" from framework.config import HIVE_HOME
sessions_dir = HIVE_HOME / "agents" / agent_name / "sessions"
if not sessions_dir.exists(): if not sessions_dir.exists():
return 0 return 0
return sum(1 for d in sessions_dir.iterdir() if d.is_dir() and d.name.startswith("session_")) return sum(1 for d in sessions_dir.iterdir() if d.is_dir() and d.name.startswith("session_"))
@@ -123,7 +127,9 @@ def _count_sessions(agent_name: str) -> int:
def _count_runs(agent_name: str) -> int: def _count_runs(agent_name: str) -> int:
"""Count unique run_ids across all sessions for an agent.""" """Count unique run_ids across all sessions for an agent."""
sessions_dir = Path.home() / ".hive" / "agents" / agent_name / "sessions" from framework.config import HIVE_HOME
sessions_dir = HIVE_HOME / "agents" / agent_name / "sessions"
if not sessions_dir.exists(): if not sessions_dir.exists():
return 0 return 0
run_ids: set[str] = set() run_ids: set[str] = set()
+4 -2
View File
@@ -6,8 +6,10 @@ from pathlib import Path
def _load_preferred_model() -> str: def _load_preferred_model() -> str:
"""Load preferred model from ~/.hive/configuration.json.""" """Load preferred model from $HIVE_HOME/configuration.json."""
config_path = Path.home() / ".hive" / "configuration.json" from framework.config import HIVE_HOME
config_path = HIVE_HOME / "configuration.json"
if config_path.exists(): if config_path.exists():
try: try:
with open(config_path, encoding="utf-8") as f: with open(config_path, encoding="utf-8") as f:
+6 -1
View File
@@ -18,7 +18,12 @@ from pathlib import Path
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
CREDENTIAL_KEY_PATH = Path.home() / ".hive" / "secrets" / "credential_key" # Resolved once at module import. ``framework.config.HIVE_HOME`` reads
# the desktop's ``HIVE_HOME`` env var at its own import time, so the
# runtime always sees the per-user root before this constant is computed.
from framework.config import HIVE_HOME as _HIVE_HOME
CREDENTIAL_KEY_PATH = _HIVE_HOME / "secrets" / "credential_key"
CREDENTIAL_KEY_ENV_VAR = "HIVE_CREDENTIAL_KEY" CREDENTIAL_KEY_ENV_VAR = "HIVE_CREDENTIAL_KEY"
ADEN_CREDENTIAL_ID = "aden_api_key" ADEN_CREDENTIAL_ID = "aden_api_key"
ADEN_ENV_VAR = "ADEN_API_KEY" ADEN_ENV_VAR = "ADEN_API_KEY"
+3 -1
View File
@@ -751,7 +751,9 @@ class CredentialStore:
# Determine local storage path # Determine local storage path
if local_path is None: if local_path is None:
local_path = str(Path.home() / ".hive" / "credentials") from framework.config import HIVE_HOME
local_path = str(HIVE_HOME / "credentials")
local_storage = EncryptedFileStorage(base_path=local_path) local_storage = EncryptedFileStorage(base_path=local_path)
+3 -1
View File
@@ -42,7 +42,9 @@ def _open_event_log() -> IO[str] | None:
return None return None
raw = _DEBUG_EVENTS_RAW raw = _DEBUG_EVENTS_RAW
if raw.lower() in ("1", "true", "full"): if raw.lower() in ("1", "true", "full"):
log_dir = Path.home() / ".hive" / "event_logs" from framework.config import HIVE_HOME
log_dir = HIVE_HOME / "event_logs"
else: else:
log_dir = Path(raw) log_dir = Path(raw)
log_dir.mkdir(parents=True, exist_ok=True) log_dir.mkdir(parents=True, exist_ok=True)
+3 -1
View File
@@ -264,7 +264,9 @@ def ensure_all_colony_dbs(colonies_root: Path | None = None) -> list[Path]:
run the stale-claim reclaimer on all of them in one pass. run the stale-claim reclaimer on all of them in one pass.
""" """
if colonies_root is None: if colonies_root is None:
colonies_root = Path.home() / ".hive" / "colonies" from framework.config import COLONIES_DIR
colonies_root = COLONIES_DIR
if not colonies_root.is_dir(): if not colonies_root.is_dir():
return [] return []
+4 -2
View File
@@ -50,8 +50,10 @@ _ENDPOINTS = [
_DEFAULT_PROJECT_ID = "rising-fact-p41fc" _DEFAULT_PROJECT_ID = "rising-fact-p41fc"
_TOKEN_REFRESH_BUFFER_SECS = 60 _TOKEN_REFRESH_BUFFER_SECS = 60
# Credentials file in ~/.hive/ (native implementation) # Credentials file in $HIVE_HOME (native implementation)
_ACCOUNTS_FILE = Path.home() / ".hive" / "antigravity-accounts.json" from framework.config import HIVE_HOME as _HIVE_HOME
_ACCOUNTS_FILE = _HIVE_HOME / "antigravity-accounts.json"
_IDE_STATE_DB_MAC = ( _IDE_STATE_DB_MAC = (
Path.home() / "Library" / "Application Support" / "Antigravity" / "User" / "globalStorage" / "state.vscdb" Path.home() / "Library" / "Application Support" / "Antigravity" / "User" / "globalStorage" / "state.vscdb"
) )
+33 -38
View File
@@ -45,17 +45,19 @@ logging.getLogger("httpcore").setLevel(logging.WARNING)
def _api_base_needs_bearer_auth(api_base: str | None) -> bool: def _api_base_needs_bearer_auth(api_base: str | None) -> bool:
"""Return True when ``api_base`` points at an Anthropic-compatible endpoint """Return True when api_base points at an Anthropic-compatible endpoint
that authenticates via ``Authorization: Bearer`` rather than ``x-api-key``. that authenticates via ``Authorization: Bearer`` rather than ``x-api-key``.
The hive-llm proxy (Rust service in hive-backend/llm/) speaks the Anthropic The Hive LLM proxy (Rust service in hive-backend/llm/) speaks the
Messages API but mints user-scoped JWTs and validates them via Bearer auth. Anthropic Messages API but mints user-scoped JWTs and validates them
Default upstream Anthropic endpoints (api.anthropic.com, Kimi's via Bearer auth. Default upstream Anthropic endpoints (api.anthropic.com,
api.kimi.com/coding) keep using x-api-key, so the override is scoped to the Kimi's api.kimi.com/coding) keep using x-api-key, so the override is
known hive-proxy hosts plus the env-configured override. scoped to known hive-proxy hosts plus the env-configured override.
""" """
if not api_base: if not api_base:
return False return False
# Strip protocol, port, and path so a plain hostname compare is enough
# for the common cases.
lowered = api_base.lower() lowered = api_base.lower()
for host in ("adenhq.com", "open-hive.com", "127.0.0.1:8890", "localhost:8890"): for host in ("adenhq.com", "open-hive.com", "127.0.0.1:8890", "localhost:8890"):
if host in lowered: if host in lowered:
@@ -67,19 +69,16 @@ def _api_base_needs_bearer_auth(api_base: str | None) -> bool:
def _patch_litellm_anthropic_oauth() -> None: def _patch_litellm_anthropic_oauth() -> None:
"""Patch litellm's Anthropic header construction to fix OAuth/JWT token handling. """Patch litellm's Anthropic header construction to fix OAuth token handling.
Two cases are remapped: litellm bug: validate_environment() puts the OAuth token into x-api-key,
1. **Anthropic OAuth tokens** (``sk-ant-oat`` prefix). litellm puts the token but Anthropic's API rejects OAuth tokens in x-api-key. They must be sent
into ``x-api-key`` but Anthropic's API requires it on via Authorization: Bearer only, with x-api-key omitted entirely.
``Authorization: Bearer`` only see BerriAI/litellm#19618.
2. **Hive LLM proxy bearer tokens** (any JWT). The Rust proxy at
hive-backend/llm/ speaks the Anthropic Messages API but authenticates
with ``Authorization: Bearer <jwt>``; litellm's default ``x-api-key``
would 401.
Both cases share the same fix: promote whatever's in ``x-api-key`` to This patch wraps validate_environment to remove x-api-key when the
``Authorization: Bearer`` and drop ``x-api-key``. Authorization header carries an OAuth token (sk-ant-oat prefix).
See: https://github.com/BerriAI/litellm/issues/19618
""" """
try: try:
from litellm.llms.anthropic.common_utils import AnthropicModelInfo from litellm.llms.anthropic.common_utils import AnthropicModelInfo
@@ -120,15 +119,6 @@ def _patch_litellm_anthropic_oauth() -> None:
oauth_prefix = f"Bearer {ANTHROPIC_OAUTH_TOKEN_PREFIX}" oauth_prefix = f"Bearer {ANTHROPIC_OAUTH_TOKEN_PREFIX}"
auth_is_oauth = auth.startswith(oauth_prefix) auth_is_oauth = auth.startswith(oauth_prefix)
key_is_oauth = x_api_key.startswith(ANTHROPIC_OAUTH_TOKEN_PREFIX) key_is_oauth = x_api_key.startswith(ANTHROPIC_OAUTH_TOKEN_PREFIX)
# The hive-llm proxy speaks the Anthropic Messages API but authenticates
# via Authorization: Bearer <jwt>; x-api-key is ignored and returns
# missing_auth. Promote x-api-key → Authorization for those endpoints
# so hive's per-user stream JWT actually authenticates the request.
hive_needs_remap = (
x_api_key
and not auth
and _api_base_needs_bearer_auth(api_base)
)
if auth_is_oauth or key_is_oauth: if auth_is_oauth or key_is_oauth:
token = x_api_key if key_is_oauth else auth.removeprefix("Bearer ").strip() token = x_api_key if key_is_oauth else auth.removeprefix("Bearer ").strip()
result.pop("x-api-key", None) result.pop("x-api-key", None)
@@ -139,9 +129,6 @@ def _patch_litellm_anthropic_oauth() -> None:
if ANTHROPIC_OAUTH_BETA_HEADER not in beta_parts: if ANTHROPIC_OAUTH_BETA_HEADER not in beta_parts:
beta_parts.append(ANTHROPIC_OAUTH_BETA_HEADER) beta_parts.append(ANTHROPIC_OAUTH_BETA_HEADER)
result["anthropic-beta"] = ",".join(beta_parts) result["anthropic-beta"] = ",".join(beta_parts)
elif hive_needs_remap:
result.pop("x-api-key", None)
result["authorization"] = f"Bearer {x_api_key}"
return result return result
AnthropicModelInfo.validate_environment = _patched_validate_environment AnthropicModelInfo.validate_environment = _patched_validate_environment
@@ -390,10 +377,16 @@ OPENROUTER_TOOL_COMPAT_MODEL_CACHE: dict[str, float] = {}
# from rate-limit retries — 3 retries is sufficient for connection failures. # from rate-limit retries — 3 retries is sufficient for connection failures.
STREAM_TRANSIENT_MAX_RETRIES = 3 STREAM_TRANSIENT_MAX_RETRIES = 3
# Directory for dumping failed requests # Directory for dumping failed requests. Resolved lazily so HIVE_HOME
FAILED_REQUESTS_DIR = Path.home() / ".hive" / "failed_requests" # overrides (set by the desktop shell) take effect even if this module
# is imported before framework.config picks up the override.
def _failed_requests_dir() -> Path:
from framework.config import HIVE_HOME
# Maximum number of dump files to retain in ~/.hive/failed_requests/. return HIVE_HOME / "failed_requests"
# Maximum number of dump files to retain in $HIVE_HOME/failed_requests/.
# Older files are pruned automatically to prevent unbounded disk growth. # Older files are pruned automatically to prevent unbounded disk growth.
MAX_FAILED_REQUEST_DUMPS = 50 MAX_FAILED_REQUEST_DUMPS = 50
@@ -585,7 +578,7 @@ def _prune_failed_request_dumps(max_files: int = MAX_FAILED_REQUEST_DUMPS) -> No
""" """
try: try:
all_dumps = sorted( all_dumps = sorted(
FAILED_REQUESTS_DIR.glob("*.json"), _failed_requests_dir().glob("*.json"),
key=lambda f: f.stat().st_mtime, key=lambda f: f.stat().st_mtime,
) )
excess = len(all_dumps) - max_files excess = len(all_dumps) - max_files
@@ -620,11 +613,12 @@ def _dump_failed_request(
) -> str: ) -> str:
"""Dump failed request to a file for debugging. Returns the file path.""" """Dump failed request to a file for debugging. Returns the file path."""
try: try:
FAILED_REQUESTS_DIR.mkdir(parents=True, exist_ok=True) dump_dir = _failed_requests_dir()
dump_dir.mkdir(parents=True, exist_ok=True)
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S_%f") timestamp = datetime.now().strftime("%Y%m%d_%H%M%S_%f")
filename = f"{error_type}_{model.replace('/', '_')}_{timestamp}.json" filename = f"{error_type}_{model.replace('/', '_')}_{timestamp}.json"
filepath = FAILED_REQUESTS_DIR / filename filepath = dump_dir / filename
# Build dump data # Build dump data
messages = kwargs.get("messages", []) messages = kwargs.get("messages", [])
@@ -654,7 +648,7 @@ def _dump_failed_request(
return str(filepath) return str(filepath)
except OSError as e: except OSError as e:
logger.warning(f"Failed to dump request debug log to {FAILED_REQUESTS_DIR}: {e}") logger.warning(f"Failed to dump request debug log to {_failed_requests_dir()}: {e}")
return "log_write_failed" return "log_write_failed"
@@ -2207,9 +2201,10 @@ class LiteLLMProvider(LLMProvider):
if logger.isEnabledFor(logging.DEBUG) and full_messages: if logger.isEnabledFor(logging.DEBUG) and full_messages:
import json as _json import json as _json
from datetime import datetime as _dt from datetime import datetime as _dt
from pathlib import Path as _Path
_debug_dir = _Path.home() / ".hive" / "debug_logs" from framework.config import HIVE_HOME as _HIVE_HOME
_debug_dir = _HIVE_HOME / "debug_logs"
_debug_dir.mkdir(parents=True, exist_ok=True) _debug_dir.mkdir(parents=True, exist_ok=True)
_ts = _dt.now().strftime("%Y%m%d_%H%M%S_%f") _ts = _dt.now().strftime("%Y%m%d_%H%M%S_%f")
_dump_file = _debug_dir / f"llm_request_{_ts}.json" _dump_file = _debug_dir / f"llm_request_{_ts}.json"
+4 -2
View File
@@ -558,7 +558,9 @@ ANTIGRAVITY_IDE_STATE_DB = (
# Linux fallback for the IDE state DB # Linux fallback for the IDE state DB
ANTIGRAVITY_IDE_STATE_DB_LINUX = Path.home() / ".config" / "Antigravity" / "User" / "globalStorage" / "state.vscdb" ANTIGRAVITY_IDE_STATE_DB_LINUX = Path.home() / ".config" / "Antigravity" / "User" / "globalStorage" / "state.vscdb"
# Antigravity credentials stored by native OAuth implementation # Antigravity credentials stored by native OAuth implementation
ANTIGRAVITY_AUTH_FILE = Path.home() / ".hive" / "antigravity-accounts.json" from framework.config import HIVE_HOME as _HIVE_HOME
ANTIGRAVITY_AUTH_FILE = _HIVE_HOME / "antigravity-accounts.json"
ANTIGRAVITY_OAUTH_TOKEN_URL = "https://oauth2.googleapis.com/token" ANTIGRAVITY_OAUTH_TOKEN_URL = "https://oauth2.googleapis.com/token"
_ANTIGRAVITY_TOKEN_LIFETIME_SECS = 3600 # Google access tokens expire in 1 hour _ANTIGRAVITY_TOKEN_LIFETIME_SECS = 3600 # Google access tokens expire in 1 hour
@@ -1389,7 +1391,7 @@ class AgentLoader:
) )
if storage_path is None: if storage_path is None:
storage_path = Path.home() / ".hive" / "agents" / agent_path.name / worker_name storage_path = _HIVE_HOME / "agents" / agent_path.name / worker_name
storage_path.mkdir(parents=True, exist_ok=True) storage_path.mkdir(parents=True, exist_ok=True)
runner = cls( runner = cls(
+6 -2
View File
@@ -62,10 +62,14 @@ _STALE_DEFAULT_ALIASES: dict[str, str] = {
class MCPRegistry: class MCPRegistry:
"""Manages local MCP server state in ~/.hive/mcp_registry/.""" """Manages local MCP server state in $HIVE_HOME/mcp_registry/."""
def __init__(self, base_path: Path | None = None): def __init__(self, base_path: Path | None = None):
self._base = base_path or Path.home() / ".hive" / "mcp_registry" if base_path is None:
from framework.config import HIVE_HOME
base_path = HIVE_HOME / "mcp_registry"
self._base = base_path
self._installed_path = self._base / "installed.json" self._installed_path = self._base / "installed.json"
self._config_path = self._base / "config.json" self._config_path = self._base / "config.json"
self._cache_dir = self._base / "cache" self._cache_dir = self._base / "cache"
+3 -1
View File
@@ -515,7 +515,9 @@ class ToolRegistry:
if "--project-root" not in args: if "--project-root" not in args:
args.extend(["--project-root", project_root]) args.extend(["--project-root", project_root])
if "--write-root" not in args: if "--write-root" not in args:
_write_root = Path.home() / ".hive" / "workspace" from framework.config import HIVE_HOME
_write_root = HIVE_HOME / "workspace"
_write_root.mkdir(parents=True, exist_ok=True) _write_root.mkdir(parents=True, exist_ok=True)
args.extend(["--write-root", str(_write_root)]) args.extend(["--write-root", str(_write_root)])
config["args"] = args config["args"] = args
+59 -12
View File
@@ -1,5 +1,6 @@
"""aiohttp Application factory for the Hive HTTP API server.""" """aiohttp Application factory for the Hive HTTP API server."""
import hmac
import logging import logging
import os import os
from pathlib import Path from pathlib import Path
@@ -21,7 +22,9 @@ _ALLOWED_AGENT_ROOTS: tuple[Path, ...] | None = None
def _has_encrypted_credentials() -> bool: def _has_encrypted_credentials() -> bool:
"""Return True when an encrypted credential store already exists on disk.""" """Return True when an encrypted credential store already exists on disk."""
cred_dir = Path.home() / ".hive" / "credentials" / "credentials" from framework.config import HIVE_HOME
cred_dir = HIVE_HOME / "credentials" / "credentials"
return cred_dir.is_dir() and any(cred_dir.glob("*.enc")) return cred_dir.is_dir() and any(cred_dir.glob("*.enc"))
@@ -30,17 +33,18 @@ def _get_allowed_agent_roots() -> tuple[Path, ...]:
Roots are anchored to the repository root (derived from ``__file__``) Roots are anchored to the repository root (derived from ``__file__``)
so the allowlist is correct regardless of the process's working so the allowlist is correct regardless of the process's working
directory. directory. The hive-home subtrees honour ``HIVE_HOME`` so the desktop's
per-user root is allowed in addition to (or instead of) ``~/.hive``.
""" """
global _ALLOWED_AGENT_ROOTS global _ALLOWED_AGENT_ROOTS
if _ALLOWED_AGENT_ROOTS is None: if _ALLOWED_AGENT_ROOTS is None:
from framework.config import COLONIES_DIR from framework.config import COLONIES_DIR, HIVE_HOME
_ALLOWED_AGENT_ROOTS = ( _ALLOWED_AGENT_ROOTS = (
COLONIES_DIR.resolve(), # ~/.hive/colonies/ COLONIES_DIR.resolve(), # $HIVE_HOME/colonies/
(_REPO_ROOT / "exports").resolve(), # compat fallback (_REPO_ROOT / "exports").resolve(), # compat fallback
(_REPO_ROOT / "examples").resolve(), (_REPO_ROOT / "examples").resolve(),
(Path.home() / ".hive" / "agents").resolve(), (HIVE_HOME / "agents").resolve(),
) )
return _ALLOWED_AGENT_ROOTS return _ALLOWED_AGENT_ROOTS
@@ -62,7 +66,8 @@ def validate_agent_path(agent_path: str | Path) -> Path:
if resolved.is_relative_to(root) and resolved != root: if resolved.is_relative_to(root) and resolved != root:
return resolved return resolved
raise ValueError( raise ValueError(
"agent_path must be inside an allowed directory (~/.hive/colonies/, exports/, examples/, or ~/.hive/agents/)" "agent_path must be inside an allowed directory "
"($HIVE_HOME/colonies/, exports/, examples/, or $HIVE_HOME/agents/)"
) )
@@ -94,13 +99,15 @@ def resolve_session(request: web.Request):
def sessions_dir(session: Session) -> Path: def sessions_dir(session: Session) -> Path:
"""Resolve the worker sessions directory for a session. """Resolve the worker sessions directory for a session.
Storage layout: ~/.hive/agents/{agent_name}/sessions/ Storage layout: $HIVE_HOME/agents/{agent_name}/sessions/
Requires a worker to be loaded (worker_path must be set). Requires a worker to be loaded (worker_path must be set).
""" """
if session.worker_path is None: if session.worker_path is None:
raise ValueError("No worker loaded — no worker sessions directory") raise ValueError("No worker loaded — no worker sessions directory")
from framework.config import HIVE_HOME
agent_name = session.worker_path.name agent_name = session.worker_path.name
return Path.home() / ".hive" / "agents" / agent_name / "sessions" return HIVE_HOME / "agents" / agent_name / "sessions"
# Allowed CORS origins (localhost on any port) # Allowed CORS origins (localhost on any port)
@@ -159,6 +166,28 @@ async def no_cache_api_middleware(request: web.Request, handler):
return response return response
# ---------------------------------------------------------------------------
# Desktop shared-secret auth middleware.
#
# When the runtime is spawned by the Electron main process, a fresh random
# token is passed via ``HIVE_DESKTOP_TOKEN``. Every request from main must
# carry the matching ``X-Hive-Token`` header. If the env var is unset (e.g.
# running ``hive serve`` directly from a terminal), the check is skipped —
# OSS behaviour is preserved.
# ---------------------------------------------------------------------------
_EXPECTED_DESKTOP_TOKEN: str | None = os.environ.get("HIVE_DESKTOP_TOKEN") or None
@web.middleware
async def desktop_auth_middleware(request: web.Request, handler):
if _EXPECTED_DESKTOP_TOKEN is None:
return await handler(request)
provided = request.headers.get("X-Hive-Token", "")
if not hmac.compare_digest(provided, _EXPECTED_DESKTOP_TOKEN):
return web.json_response({"error": "unauthorized"}, status=401)
return await handler(request)
@web.middleware @web.middleware
async def error_middleware(request: web.Request, handler): async def error_middleware(request: web.Request, handler):
"""Catch exceptions and return JSON error responses. """Catch exceptions and return JSON error responses.
@@ -287,7 +316,12 @@ def create_app(model: str | None = None) -> web.Application:
Returns: Returns:
Configured aiohttp Application ready to run. Configured aiohttp Application ready to run.
""" """
app = web.Application(middlewares=[cors_middleware, no_cache_api_middleware, error_middleware]) # Desktop mode: the runtime is always a subprocess of the Electron main
# process, which reaches it via IPC and the `hive://` custom protocol.
# There is no browser origin to authorize, so CORS is unnecessary.
# The auth middleware enforces the shared-secret token when the env var
# is set (i.e. when Electron spawned us); it is a no-op otherwise.
app = web.Application(middlewares=[desktop_auth_middleware, no_cache_api_middleware, error_middleware])
# Initialize credential store (before SessionManager so it can be shared) # Initialize credential store (before SessionManager so it can be shared)
from framework.credentials.store import CredentialStore from framework.credentials.store import CredentialStore
@@ -392,9 +426,22 @@ def create_app(model: str | None = None) -> web.Application:
register_skills_routes(app) register_skills_routes(app)
register_task_routes(app) register_task_routes(app)
# Static file serving — Option C production mode # Commercial extensions (optional — only present in hive-desktop-runtime).
# If frontend/dist/ exists, serve built frontend files on / # Imports lazily so an OSS install without the `commercial` package keeps
_setup_static_serving(app) # working unchanged.
try:
from commercial.middleware import setup_commercial_middleware
from commercial.routes import register_routes as register_commercial_routes
setup_commercial_middleware(app)
register_commercial_routes(app)
logger.info("Commercial extensions loaded")
except ImportError:
pass
# Desktop mode: no static file serving. The frontend lives in the
# Electron renderer process and is loaded from file:// (or the Vite
# dev server in dev mode) — not from this aiohttp app.
return app return app
@@ -294,7 +294,9 @@ def _resolve_progress_db_by_name(colony_name: str) -> Path | None:
""" """
if not _COLONY_NAME_RE.match(colony_name): if not _COLONY_NAME_RE.match(colony_name):
return None return None
db_path = Path.home() / ".hive" / "colonies" / colony_name / "data" / "progress.db" from framework.config import COLONIES_DIR
db_path = COLONIES_DIR / colony_name / "data" / "progress.db"
return db_path if db_path.exists() else None return db_path if db_path.exists() else None
+6 -2
View File
@@ -1245,7 +1245,9 @@ async def fork_session_into_colony(
# would wrongly flag every fresh colony as "already-exists" if we # would wrongly flag every fresh colony as "already-exists" if we
# used ``not colony_dir.exists()``. A colony is "new" until its # used ``not colony_dir.exists()``. A colony is "new" until its
# worker config has actually been written. # worker config has actually been written.
colony_dir = Path.home() / ".hive" / "colonies" / colony_name from framework.config import COLONIES_DIR
colony_dir = COLONIES_DIR / colony_name
worker_name = "worker" worker_name = "worker"
worker_config_path = colony_dir / f"{worker_name}.json" worker_config_path = colony_dir / f"{worker_name}.json"
is_new = not worker_config_path.exists() is_new = not worker_config_path.exists()
@@ -1469,7 +1471,9 @@ async def fork_session_into_colony(
compaction_status.mark_in_progress(dest_queen_dir) compaction_status.mark_in_progress(dest_queen_dir)
_worker_storage = Path.home() / ".hive" / "agents" / colony_name / worker_name from framework.config import HIVE_HOME
_worker_storage = HIVE_HOME / "agents" / colony_name / worker_name
_dest_queen_dir = dest_queen_dir _dest_queen_dir = dest_queen_dir
_queen_ctx = queen_ctx _queen_ctx = queen_ctx
_queen_loop = queen_loop _queen_loop = queen_loop
+4 -2
View File
@@ -1094,8 +1094,10 @@ async def handle_delete_agent(request: web.Request) -> web.Response:
except ValueError as exc: except ValueError as exc:
return web.json_response({"error": str(exc)}, status=400) return web.json_response({"error": str(exc)}, status=400)
# Reject deletion of framework agents (~/.hive/agents/) — those are internal # Reject deletion of framework agents ($HIVE_HOME/agents/) — those are internal
hive_agents_dir = Path.home() / ".hive" / "agents" from framework.config import HIVE_HOME
hive_agents_dir = HIVE_HOME / "agents"
if resolved.is_relative_to(hive_agents_dir): if resolved.is_relative_to(hive_agents_dir):
return web.json_response({"error": "Cannot delete framework agents"}, status=403) return web.json_response({"error": "Cannot delete framework agents"}, status=403)
+2 -2
View File
@@ -67,10 +67,10 @@ async def handle_list_nodes(request: web.Request) -> web.Response:
worker_session_id = request.query.get("session_id") worker_session_id = request.query.get("session_id")
if worker_session_id and session.worker_path: if worker_session_id and session.worker_path:
worker_session_id = safe_path_segment(worker_session_id) worker_session_id = safe_path_segment(worker_session_id)
from pathlib import Path from framework.config import HIVE_HOME
state_path = ( state_path = (
Path.home() / ".hive" / "agents" / session.worker_path.name / "sessions" / worker_session_id / "state.json" HIVE_HOME / "agents" / session.worker_path.name / "sessions" / worker_session_id / "state.json"
) )
if state_path.exists(): if state_path.exists():
try: try:
+7 -3
View File
@@ -546,8 +546,10 @@ class SessionManager:
session.colony_name = colony_id session.colony_name = colony_id
session.worker_path = agent_path session.worker_path = agent_path
# Worker storage: ~/.hive/agents/{colony_name}/{worker_name}/ # Worker storage: $HIVE_HOME/agents/{colony_name}/{worker_name}/
worker_storage = Path.home() / ".hive" / "agents" / colony_id / worker_name from framework.config import HIVE_HOME
worker_storage = HIVE_HOME / "agents" / colony_id / worker_name
worker_storage.mkdir(parents=True, exist_ok=True) worker_storage.mkdir(parents=True, exist_ok=True)
# Copy conversations from colony if fresh # Copy conversations from colony if fresh
@@ -927,7 +929,9 @@ class SessionManager:
that process is still running on the host. If it is, the session is that process is still running on the host. If it is, the session is
owned by another healthy worker process, so leave it alone. owned by another healthy worker process, so leave it alone.
""" """
sessions_path = Path.home() / ".hive" / "agents" / agent_path.name / "sessions" from framework.config import HIVE_HOME
sessions_path = HIVE_HOME / "agents" / agent_path.name / "sessions"
if not sessions_path.exists(): if not sessions_path.exists():
return return
+22 -10
View File
@@ -18,11 +18,22 @@ from pathlib import Path
from framework.skills.parser import ParsedSkill from framework.skills.parser import ParsedSkill
from framework.skills.skill_errors import SkillError, SkillErrorCode from framework.skills.skill_errors import SkillError, SkillErrorCode
# Default install destination for user-scope skills # Default install destination for user-scope skills + sentinel file for
USER_SKILLS_DIR = Path.home() / ".hive" / "skills" # the one-time security notice on first install (NFR-5). Computed via
# helpers so HIVE_HOME (set by the desktop shell to a per-user dir)
# is honoured. ``framework.config.HIVE_HOME`` is module-global and
# resolved at first import — so a single call here is enough; we don't
# need to re-resolve on every access.
def _user_skills_dir() -> Path:
from framework.config import HIVE_HOME
# Sentinel file for the one-time security notice on first install (NFR-5) return HIVE_HOME / "skills"
INSTALL_NOTICE_SENTINEL = Path.home() / ".hive" / ".install_notice_shown"
def _install_notice_sentinel() -> Path:
from framework.config import HIVE_HOME
return HIVE_HOME / ".install_notice_shown"
_INSTALL_NOTICE = """\ _INSTALL_NOTICE = """\
@@ -44,15 +55,16 @@ _INSTALL_NOTICE = """\
def maybe_show_install_notice() -> None: def maybe_show_install_notice() -> None:
"""Print a one-time security notice before the first skill install (NFR-5). """Print a one-time security notice before the first skill install (NFR-5).
Touches a sentinel file in ~/.hive/ after showing the notice so it is Touches a sentinel file in $HIVE_HOME after showing the notice so it is
only displayed once across all future installs. only displayed once across all future installs.
""" """
if INSTALL_NOTICE_SENTINEL.exists(): sentinel = _install_notice_sentinel()
if sentinel.exists():
return return
print(_INSTALL_NOTICE, flush=True) print(_INSTALL_NOTICE, flush=True)
try: try:
INSTALL_NOTICE_SENTINEL.parent.mkdir(parents=True, exist_ok=True) sentinel.parent.mkdir(parents=True, exist_ok=True)
INSTALL_NOTICE_SENTINEL.touch() sentinel.touch()
except OSError: except OSError:
pass # If we can't write the sentinel, just show the notice every time pass # If we can't write the sentinel, just show the notice every time
@@ -93,7 +105,7 @@ def install_from_git(
fix="Install git (https://git-scm.com/) and retry.", fix="Install git (https://git-scm.com/) and retry.",
) )
dest = (target_dir or USER_SKILLS_DIR) / skill_name dest = (target_dir or _user_skills_dir()) / skill_name
if dest.exists(): if dest.exists():
raise SkillError( raise SkillError(
code=SkillErrorCode.SKILL_ACTIVATION_FAILED, code=SkillErrorCode.SKILL_ACTIVATION_FAILED,
@@ -194,7 +206,7 @@ def remove_skill(name: str, skills_dir: Path | None = None) -> bool:
Raises: Raises:
SkillError: If the directory exists but cannot be removed. SkillError: If the directory exists but cannot be removed.
""" """
target = (skills_dir or USER_SKILLS_DIR) / name target = (skills_dir or _user_skills_dir()) / name
if not target.exists(): if not target.exists():
return False return False
try: try:
+13 -4
View File
@@ -26,9 +26,18 @@ _DEFAULT_REGISTRY_URL = (
"https://raw.githubusercontent.com/hive-skill-registry/hive-skill-registry/main/skill_index.json" "https://raw.githubusercontent.com/hive-skill-registry/hive-skill-registry/main/skill_index.json"
) )
_CACHE_DIR = Path.home() / ".hive" / "registry_cache" def _cache_dir() -> Path:
_CACHE_INDEX_PATH = _CACHE_DIR / "skill_index.json" from framework.config import HIVE_HOME
_CACHE_METADATA_PATH = _CACHE_DIR / "metadata.json"
return HIVE_HOME / "registry_cache"
def _cache_index_path() -> Path:
return _cache_dir() / "skill_index.json"
def _cache_metadata_path() -> Path:
return _cache_dir() / "metadata.json"
_CACHE_TTL_SECONDS = 3600 # 1 hour _CACHE_TTL_SECONDS = 3600 # 1 hour
@@ -46,7 +55,7 @@ class RegistryClient:
cache_dir: Path | None = None, cache_dir: Path | None = None,
) -> None: ) -> None:
self._url = registry_url or os.environ.get("HIVE_REGISTRY_URL", _DEFAULT_REGISTRY_URL) self._url = registry_url or os.environ.get("HIVE_REGISTRY_URL", _DEFAULT_REGISTRY_URL)
cache_root = cache_dir or _CACHE_DIR cache_root = cache_dir or _cache_dir()
self._index_path = cache_root / "skill_index.json" self._index_path = cache_root / "skill_index.json"
self._metadata_path = cache_root / "metadata.json" self._metadata_path = cache_root / "metadata.json"
+18 -7
View File
@@ -30,8 +30,16 @@ _ENV_TRUST_ALL = "HIVE_TRUST_PROJECT_SKILLS"
# Env var for comma-separated own-remote glob patterns (e.g. "github.com/myorg/*"). # Env var for comma-separated own-remote glob patterns (e.g. "github.com/myorg/*").
_ENV_OWN_REMOTES = "HIVE_OWN_REMOTES" _ENV_OWN_REMOTES = "HIVE_OWN_REMOTES"
_TRUSTED_REPOS_PATH = Path.home() / ".hive" / "trusted_repos.json" def _trusted_repos_path() -> Path:
_NOTICE_SENTINEL_PATH = Path.home() / ".hive" / ".skill_trust_notice_shown" from framework.config import HIVE_HOME
return HIVE_HOME / "trusted_repos.json"
def _notice_sentinel_path() -> Path:
from framework.config import HIVE_HOME
return HIVE_HOME / ".skill_trust_notice_shown"
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
@@ -50,7 +58,7 @@ class TrustedRepoStore:
"""Persists permanently-trusted repo keys to ~/.hive/trusted_repos.json.""" """Persists permanently-trusted repo keys to ~/.hive/trusted_repos.json."""
def __init__(self, path: Path | None = None) -> None: def __init__(self, path: Path | None = None) -> None:
self._path = path or _TRUSTED_REPOS_PATH self._path = path or _trusted_repos_path()
self._entries: dict[str, TrustedRepoEntry] = {} self._entries: dict[str, TrustedRepoEntry] = {}
self._loaded = False self._loaded = False
@@ -224,7 +232,9 @@ class ProjectTrustDetector:
patterns.extend(p.strip() for p in raw.split(",") if p.strip()) patterns.extend(p.strip() for p in raw.split(",") if p.strip())
# From ~/.hive/own_remotes file # From ~/.hive/own_remotes file
own_remotes_file = Path.home() / ".hive" / "own_remotes" from framework.config import HIVE_HOME
own_remotes_file = HIVE_HOME / "own_remotes"
if own_remotes_file.is_file(): if own_remotes_file.is_file():
try: try:
for line in own_remotes_file.read_text(encoding="utf-8").splitlines(): for line in own_remotes_file.read_text(encoding="utf-8").splitlines():
@@ -415,7 +425,8 @@ class TrustGate:
def _maybe_show_security_notice(self, Colors) -> None: # noqa: N803 def _maybe_show_security_notice(self, Colors) -> None: # noqa: N803
"""Show the one-time security notice if not already shown (NFR-5).""" """Show the one-time security notice if not already shown (NFR-5)."""
if _NOTICE_SENTINEL_PATH.exists(): sentinel = _notice_sentinel_path()
if sentinel.exists():
return return
self._print("") self._print("")
self._print( self._print(
@@ -427,8 +438,8 @@ class TrustGate:
) )
self._print("") self._print("")
try: try:
_NOTICE_SENTINEL_PATH.parent.mkdir(parents=True, exist_ok=True) sentinel.parent.mkdir(parents=True, exist_ok=True)
_NOTICE_SENTINEL_PATH.touch() sentinel.touch()
except OSError: except OSError:
pass pass
@@ -1699,7 +1699,9 @@ def register_queen_lifecycle_tools(
# INSIDE it (project scope, colony-local). fork_session_into_colony # INSIDE it (project scope, colony-local). fork_session_into_colony
# keys "is_new" off worker.json rather than the dir itself, so # keys "is_new" off worker.json rather than the dir itself, so
# pre-creating here does not wrongly flag fresh colonies as "old". # pre-creating here does not wrongly flag fresh colonies as "old".
colony_dir = Path.home() / ".hive" / "colonies" / cn from framework.config import COLONIES_DIR
colony_dir = COLONIES_DIR / cn
try: try:
colony_dir.mkdir(parents=True, exist_ok=True) colony_dir.mkdir(parents=True, exist_ok=True)
except OSError as e: except OSError as e:
@@ -2479,7 +2481,9 @@ def register_queen_lifecycle_tools(
ensure_progress_db as _ensure_db, ensure_progress_db as _ensure_db,
) )
colony_dir = _Path.home() / ".hive" / "colonies" / cn from framework.config import COLONIES_DIR as _COLONIES_DIR
colony_dir = _COLONIES_DIR / cn
if not colony_dir.is_dir(): if not colony_dir.is_dir():
return json.dumps({"error": f"colony '{cn}' not found"}) return json.dumps({"error": f"colony '{cn}' not found"})
+10 -3
View File
@@ -15,7 +15,13 @@ from typing import IO, Any
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
_LLM_DEBUG_DIR = Path.home() / ".hive" / "llm_logs" def _llm_debug_dir() -> Path:
"""Resolve $HIVE_HOME/llm_logs lazily so the env override (set by the
desktop) takes effect. A module-level constant would freeze whatever
HIVE_HOME was at import time and miss late-bound test overrides."""
from framework.config import HIVE_HOME
return HIVE_HOME / "llm_logs"
_log_file: IO[str] | None = None _log_file: IO[str] | None = None
_log_ready = False # lazy init guard _log_ready = False # lazy init guard
@@ -23,9 +29,10 @@ _log_ready = False # lazy init guard
def _open_log() -> IO[str] | None: def _open_log() -> IO[str] | None:
"""Open the JSONL log file for this process.""" """Open the JSONL log file for this process."""
_LLM_DEBUG_DIR.mkdir(parents=True, exist_ok=True) debug_dir = _llm_debug_dir()
debug_dir.mkdir(parents=True, exist_ok=True)
ts = datetime.now().strftime("%Y%m%d_%H%M%S") ts = datetime.now().strftime("%Y%m%d_%H%M%S")
path = _LLM_DEBUG_DIR / f"{ts}.jsonl" path = debug_dir / f"{ts}.jsonl"
logger.info("LLM debug log → %s", path) logger.info("LLM debug log → %s", path)
return open(path, "a", encoding="utf-8") # noqa: SIM115 return open(path, "a", encoding="utf-8") # noqa: SIM115