chore: lint
This commit is contained in:
@@ -16,7 +16,6 @@ import os
|
||||
import re
|
||||
import time
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from framework.agent_loop.conversation import Message, NodeConversation
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
import json
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def _load_preferred_model() -> str:
|
||||
|
||||
@@ -16,13 +16,13 @@ import os
|
||||
import stat
|
||||
from pathlib import Path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Resolved once at module import. ``framework.config.HIVE_HOME`` reads
|
||||
# the desktop's ``HIVE_HOME`` env var at its own import time, so the
|
||||
# runtime always sees the per-user root before this constant is computed.
|
||||
from framework.config import HIVE_HOME as _HIVE_HOME
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
CREDENTIAL_KEY_PATH = _HIVE_HOME / "secrets" / "credential_key"
|
||||
CREDENTIAL_KEY_ENV_VAR = "HIVE_CREDENTIAL_KEY"
|
||||
ADEN_CREDENTIAL_ID = "aden_api_key"
|
||||
|
||||
@@ -745,7 +745,6 @@ class CredentialStore:
|
||||
token = store.get_key("hubspot", "access_token")
|
||||
"""
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from .storage import EncryptedFileStorage
|
||||
|
||||
|
||||
@@ -23,6 +23,7 @@ from collections.abc import AsyncIterator, Callable, Iterator
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from framework.config import HIVE_HOME as _HIVE_HOME
|
||||
from framework.llm.provider import LLMProvider, LLMResponse, Tool
|
||||
from framework.llm.stream_events import (
|
||||
FinishEvent,
|
||||
@@ -51,8 +52,6 @@ _DEFAULT_PROJECT_ID = "rising-fact-p41fc"
|
||||
_TOKEN_REFRESH_BUFFER_SECS = 60
|
||||
|
||||
# Credentials file in $HIVE_HOME (native implementation)
|
||||
from framework.config import HIVE_HOME as _HIVE_HOME
|
||||
|
||||
_ACCOUNTS_FILE = _HIVE_HOME / "antigravity-accounts.json"
|
||||
_IDE_STATE_DB_MAC = (
|
||||
Path.home() / "Library" / "Application Support" / "Antigravity" / "User" / "globalStorage" / "state.vscdb"
|
||||
|
||||
@@ -377,6 +377,7 @@ OPENROUTER_TOOL_COMPAT_MODEL_CACHE: dict[str, float] = {}
|
||||
# from rate-limit retries — 3 retries is sufficient for connection failures.
|
||||
STREAM_TRANSIENT_MAX_RETRIES = 3
|
||||
|
||||
|
||||
# Directory for dumping failed requests. Resolved lazily so HIVE_HOME
|
||||
# overrides (set by the desktop shell) take effect even if this module
|
||||
# is imported before framework.config picks up the override.
|
||||
|
||||
@@ -9,7 +9,7 @@ from datetime import UTC
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from framework.config import get_hive_config, get_preferred_model
|
||||
from framework.config import HIVE_HOME as _HIVE_HOME, get_hive_config, get_preferred_model
|
||||
from framework.credentials.validation import (
|
||||
ensure_credential_key_env as _ensure_credential_key_env,
|
||||
)
|
||||
@@ -558,8 +558,6 @@ ANTIGRAVITY_IDE_STATE_DB = (
|
||||
# Linux fallback for the IDE state DB
|
||||
ANTIGRAVITY_IDE_STATE_DB_LINUX = Path.home() / ".config" / "Antigravity" / "User" / "globalStorage" / "state.vscdb"
|
||||
# Antigravity credentials stored by native OAuth implementation
|
||||
from framework.config import HIVE_HOME as _HIVE_HOME
|
||||
|
||||
ANTIGRAVITY_AUTH_FILE = _HIVE_HOME / "antigravity-accounts.json"
|
||||
|
||||
ANTIGRAVITY_OAUTH_TOKEN_URL = "https://oauth2.googleapis.com/token"
|
||||
|
||||
@@ -652,12 +652,7 @@ async def create_queen(
|
||||
_has_vision,
|
||||
)
|
||||
phase_state.prompt_incubating = finalize_queen_prompt(
|
||||
(
|
||||
_queen_character_core
|
||||
+ _queen_role_incubating
|
||||
+ _queen_tools_incubating
|
||||
+ _queen_behavior_always
|
||||
),
|
||||
(_queen_character_core + _queen_role_incubating + _queen_tools_incubating + _queen_behavior_always),
|
||||
_has_vision,
|
||||
)
|
||||
phase_state.prompt_working = finalize_queen_prompt(
|
||||
@@ -665,12 +660,7 @@ async def create_queen(
|
||||
_has_vision,
|
||||
)
|
||||
phase_state.prompt_reviewing = finalize_queen_prompt(
|
||||
(
|
||||
_queen_character_core
|
||||
+ _queen_role_reviewing
|
||||
+ _queen_tools_reviewing
|
||||
+ _queen_behavior_always
|
||||
),
|
||||
(_queen_character_core + _queen_role_reviewing + _queen_tools_reviewing + _queen_behavior_always),
|
||||
_has_vision,
|
||||
)
|
||||
|
||||
|
||||
@@ -57,6 +57,7 @@ _SESSION_SEGMENT_RE = re.compile(r"^[a-z0-9_]+$")
|
||||
# pushed wholesale anyway.
|
||||
_MAX_UPLOAD_BYTES = 100 * 1024 * 1024
|
||||
|
||||
|
||||
def _agents_dir() -> Path:
|
||||
"""``COLONIES_DIR`` resolves to ``HIVE_HOME/colonies``; ``agents/`` is
|
||||
the sibling. Resolved per-call so tests that monkeypatch
|
||||
@@ -129,9 +130,7 @@ def _normalise_member_name(name: str) -> str:
|
||||
return name
|
||||
|
||||
|
||||
def _safe_extract_tar(
|
||||
tf: tarfile.TarFile, dest: Path, *, strip_prefix: str
|
||||
) -> tuple[int, str | None]:
|
||||
def _safe_extract_tar(tf: tarfile.TarFile, dest: Path, *, strip_prefix: str) -> tuple[int, str | None]:
|
||||
"""Extract every member of ``tf`` whose name starts with ``strip_prefix/``
|
||||
into ``dest``, with the prefix stripped off.
|
||||
|
||||
@@ -159,7 +158,7 @@ def _safe_extract_tar(
|
||||
if not name.startswith(prefix_with_sep):
|
||||
# Belongs to a different root in a multi-root tar; skip.
|
||||
continue
|
||||
rel = name[len(prefix_with_sep):]
|
||||
rel = name[len(prefix_with_sep) :]
|
||||
else:
|
||||
rel = name
|
||||
if not rel:
|
||||
@@ -299,9 +298,7 @@ async def _read_upload(
|
||||
) -> tuple[bytes | None, str | None, dict[str, str], web.Response | None]:
|
||||
"""Drain the multipart upload. Returns ``(bytes, filename, form, error)``."""
|
||||
if not request.content_type.startswith("multipart/"):
|
||||
return None, None, {}, web.json_response(
|
||||
{"error": "expected multipart/form-data"}, status=400
|
||||
)
|
||||
return None, None, {}, web.json_response({"error": "expected multipart/form-data"}, status=400)
|
||||
reader = await request.multipart()
|
||||
upload: bytes | None = None
|
||||
upload_filename: str | None = None
|
||||
@@ -318,18 +315,21 @@ async def _read_upload(
|
||||
break
|
||||
buf.write(chunk)
|
||||
if buf.tell() > _MAX_UPLOAD_BYTES:
|
||||
return None, None, {}, web.json_response(
|
||||
{"error": f"upload exceeds {_MAX_UPLOAD_BYTES} bytes"},
|
||||
status=413,
|
||||
return (
|
||||
None,
|
||||
None,
|
||||
{},
|
||||
web.json_response(
|
||||
{"error": f"upload exceeds {_MAX_UPLOAD_BYTES} bytes"},
|
||||
status=413,
|
||||
),
|
||||
)
|
||||
upload = buf.getvalue()
|
||||
upload_filename = part.filename or ""
|
||||
else:
|
||||
form[part.name or ""] = (await part.text()).strip()
|
||||
if upload is None:
|
||||
return None, None, {}, web.json_response(
|
||||
{"error": "missing 'file' part"}, status=400
|
||||
)
|
||||
return None, None, {}, web.json_response({"error": "missing 'file' part"}, status=400)
|
||||
return upload, upload_filename, form, None
|
||||
|
||||
|
||||
@@ -346,18 +346,12 @@ async def handle_import_colony(request: web.Request) -> web.Response:
|
||||
try:
|
||||
tf = tarfile.open(fileobj=io.BytesIO(upload), mode="r:*")
|
||||
except tarfile.TarError as err:
|
||||
return web.json_response(
|
||||
{"error": f"invalid tar archive: {err}"}, status=400
|
||||
)
|
||||
return web.json_response({"error": f"invalid tar archive: {err}"}, status=400)
|
||||
|
||||
try:
|
||||
if _has_multi_root_prefix(tf):
|
||||
return await _import_multi_root(
|
||||
tf, replace_existing, upload_filename, len(upload)
|
||||
)
|
||||
return await _import_legacy_single_root(
|
||||
tf, name_override, replace_existing, upload_filename, len(upload)
|
||||
)
|
||||
return await _import_multi_root(tf, replace_existing, upload_filename, len(upload))
|
||||
return await _import_legacy_single_root(tf, name_override, replace_existing, upload_filename, len(upload))
|
||||
finally:
|
||||
tf.close()
|
||||
|
||||
@@ -478,15 +472,11 @@ async def _import_multi_root(
|
||||
for kind in ("colonies", "agents_worker", "agents_queen"):
|
||||
for prefix, dest in plan[kind].items():
|
||||
target = Path(dest)
|
||||
files_extracted, extract_err = _safe_extract_tar(
|
||||
tf, target, strip_prefix=prefix
|
||||
)
|
||||
files_extracted, extract_err = _safe_extract_tar(tf, target, strip_prefix=prefix)
|
||||
if extract_err:
|
||||
return _abort(extract_err)
|
||||
summary.setdefault(kind, {"files": 0})
|
||||
summary[kind]["files"] = (
|
||||
int(summary[kind].get("files", 0)) + files_extracted
|
||||
)
|
||||
summary[kind]["files"] = int(summary[kind].get("files", 0)) + files_extracted
|
||||
extracted_dests.append(target)
|
||||
|
||||
total_files = sum(int(v.get("files", 0)) for v in summary.values())
|
||||
|
||||
@@ -1181,7 +1181,6 @@ async def fork_session_into_colony(
|
||||
import json
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from framework.agent_loop.agent_loop import AgentLoop, LoopConfig
|
||||
from framework.agent_loop.types import AgentContext
|
||||
|
||||
@@ -69,9 +69,7 @@ async def handle_list_nodes(request: web.Request) -> web.Response:
|
||||
worker_session_id = safe_path_segment(worker_session_id)
|
||||
from framework.config import HIVE_HOME
|
||||
|
||||
state_path = (
|
||||
HIVE_HOME / "agents" / session.worker_path.name / "sessions" / worker_session_id / "state.json"
|
||||
)
|
||||
state_path = HIVE_HOME / "agents" / session.worker_path.name / "sessions" / worker_session_id / "state.json"
|
||||
if state_path.exists():
|
||||
try:
|
||||
state = json.loads(state_path.read_text(encoding="utf-8"))
|
||||
|
||||
@@ -105,9 +105,7 @@ async def test_happy_path_imports_colony(colonies_dir: Path) -> None:
|
||||
async def test_name_override(colonies_dir: Path) -> None:
|
||||
archive = _build_tar({"x_daily/": None, "x_daily/file.txt": b"hi"})
|
||||
async with await _client(_app()) as c:
|
||||
resp = await c.post(
|
||||
"/api/colonies/import", data=_form(archive, name="other_name")
|
||||
)
|
||||
resp = await c.post("/api/colonies/import", data=_form(archive, name="other_name"))
|
||||
assert resp.status == 201
|
||||
body = await resp.json()
|
||||
assert body["name"] == "other_name"
|
||||
@@ -202,7 +200,9 @@ async def test_rejects_invalid_colony_name(colonies_dir: Path) -> None:
|
||||
@pytest.mark.asyncio
|
||||
async def test_rejects_non_multipart(colonies_dir: Path) -> None:
|
||||
async with await _client(_app()) as c:
|
||||
resp = await c.post("/api/colonies/import", data=b"not multipart", headers={"Content-Type": "application/octet-stream"})
|
||||
resp = await c.post(
|
||||
"/api/colonies/import", data=b"not multipart", headers={"Content-Type": "application/octet-stream"}
|
||||
)
|
||||
assert resp.status == 400
|
||||
|
||||
|
||||
@@ -266,7 +266,9 @@ async def test_multi_root_unpacks_three_subtrees(colonies_dir: Path) -> None:
|
||||
assert (colonies_dir / "x_daily" / "data" / "progress.db").exists()
|
||||
# Worker conversations under HIVE_HOME/agents/<colony>/worker/
|
||||
hive_home = colonies_dir.parent
|
||||
assert (hive_home / "agents" / "x_daily" / "worker" / "conversations" / "0001.json").read_bytes() == b'{"role":"user"}'
|
||||
assert (
|
||||
hive_home / "agents" / "x_daily" / "worker" / "conversations" / "0001.json"
|
||||
).read_bytes() == b'{"role":"user"}'
|
||||
# Queen forked session under HIVE_HOME/agents/queens/<queen>/sessions/<sid>/
|
||||
assert (hive_home / "agents" / "queens" / "queen_alpha" / "sessions" / "session_x" / "queen.json").exists()
|
||||
# Summary in response
|
||||
|
||||
@@ -18,6 +18,7 @@ from pathlib import Path
|
||||
from framework.skills.parser import ParsedSkill
|
||||
from framework.skills.skill_errors import SkillError, SkillErrorCode
|
||||
|
||||
|
||||
# Default install destination for user-scope skills + sentinel file for
|
||||
# the one-time security notice on first install (NFR-5). Computed via
|
||||
# helpers so HIVE_HOME (set by the desktop shell to a per-user dir)
|
||||
@@ -35,6 +36,7 @@ def _install_notice_sentinel() -> Path:
|
||||
|
||||
return HIVE_HOME / ".install_notice_shown"
|
||||
|
||||
|
||||
_INSTALL_NOTICE = """\
|
||||
─────────────────────────────────────────────────────────────
|
||||
Security Notice: Installing Third-Party Skills
|
||||
|
||||
@@ -26,6 +26,7 @@ _DEFAULT_REGISTRY_URL = (
|
||||
"https://raw.githubusercontent.com/hive-skill-registry/hive-skill-registry/main/skill_index.json"
|
||||
)
|
||||
|
||||
|
||||
def _cache_dir() -> Path:
|
||||
from framework.config import HIVE_HOME
|
||||
|
||||
@@ -38,6 +39,8 @@ def _cache_index_path() -> Path:
|
||||
|
||||
def _cache_metadata_path() -> Path:
|
||||
return _cache_dir() / "metadata.json"
|
||||
|
||||
|
||||
_CACHE_TTL_SECONDS = 3600 # 1 hour
|
||||
|
||||
|
||||
|
||||
@@ -30,6 +30,7 @@ _ENV_TRUST_ALL = "HIVE_TRUST_PROJECT_SKILLS"
|
||||
# Env var for comma-separated own-remote glob patterns (e.g. "github.com/myorg/*").
|
||||
_ENV_OWN_REMOTES = "HIVE_OWN_REMOTES"
|
||||
|
||||
|
||||
def _trusted_repos_path() -> Path:
|
||||
from framework.config import HIVE_HOME
|
||||
|
||||
|
||||
@@ -248,9 +248,7 @@ class TaskStore:
|
||||
rejected before any write. The doc model makes "atomic-or-none"
|
||||
free — we mutate one in-memory document and write it once.
|
||||
"""
|
||||
return await asyncio.to_thread(
|
||||
self._create_tasks_batch_sync, task_list_id, specs
|
||||
)
|
||||
return await asyncio.to_thread(self._create_tasks_batch_sync, task_list_id, specs)
|
||||
|
||||
async def create_task(
|
||||
self,
|
||||
@@ -401,9 +399,7 @@ class TaskStore:
|
||||
doc_path = self._doc_path(task_list_id)
|
||||
if doc_path.exists():
|
||||
try:
|
||||
return TaskListDocument.model_validate_json(
|
||||
doc_path.read_text(encoding="utf-8")
|
||||
)
|
||||
return TaskListDocument.model_validate_json(doc_path.read_text(encoding="utf-8"))
|
||||
except Exception:
|
||||
logger.warning("Corrupt tasks.json at %s", doc_path, exc_info=True)
|
||||
# Fall through — legacy fallback may rescue us.
|
||||
@@ -414,9 +410,7 @@ class TaskStore:
|
||||
# finished migrating, in which case we read the new doc.
|
||||
if doc_path.exists():
|
||||
try:
|
||||
return TaskListDocument.model_validate_json(
|
||||
doc_path.read_text(encoding="utf-8")
|
||||
)
|
||||
return TaskListDocument.model_validate_json(doc_path.read_text(encoding="utf-8"))
|
||||
except Exception:
|
||||
logger.warning(
|
||||
"Corrupt tasks.json at %s (post-lock)",
|
||||
@@ -438,9 +432,7 @@ class TaskStore:
|
||||
doc_path = self._doc_path(task_list_id)
|
||||
if doc_path.exists():
|
||||
try:
|
||||
return TaskListDocument.model_validate_json(
|
||||
doc_path.read_text(encoding="utf-8")
|
||||
)
|
||||
return TaskListDocument.model_validate_json(doc_path.read_text(encoding="utf-8"))
|
||||
except Exception:
|
||||
logger.warning("Corrupt tasks.json at %s", doc_path, exc_info=True)
|
||||
if self._has_legacy_artifacts(task_list_id):
|
||||
@@ -464,11 +456,7 @@ class TaskStore:
|
||||
"""Fold legacy artifacts into a TaskListDocument. Caller MUST hold lock."""
|
||||
meta = self._read_legacy_meta(task_list_id)
|
||||
if meta is None:
|
||||
inferred_role = (
|
||||
TaskListRole.TEMPLATE
|
||||
if task_list_id.startswith("colony:")
|
||||
else TaskListRole.SESSION
|
||||
)
|
||||
inferred_role = TaskListRole.TEMPLATE if task_list_id.startswith("colony:") else TaskListRole.SESSION
|
||||
meta = TaskListMeta(task_list_id=task_list_id, role=inferred_role)
|
||||
|
||||
tasks: list[TaskRecord] = []
|
||||
@@ -621,14 +609,8 @@ class TaskStore:
|
||||
with self._list_lock(task_list_id):
|
||||
doc = self._read_doc_unsafe(task_list_id)
|
||||
if doc is None:
|
||||
inferred_role = (
|
||||
TaskListRole.TEMPLATE
|
||||
if task_list_id.startswith("colony:")
|
||||
else TaskListRole.SESSION
|
||||
)
|
||||
doc = TaskListDocument(
|
||||
meta=TaskListMeta(task_list_id=task_list_id, role=inferred_role)
|
||||
)
|
||||
inferred_role = TaskListRole.TEMPLATE if task_list_id.startswith("colony:") else TaskListRole.SESSION
|
||||
doc = TaskListDocument(meta=TaskListMeta(task_list_id=task_list_id, role=inferred_role))
|
||||
new_id = self._next_id_for_doc(doc)
|
||||
now = time.time()
|
||||
record = TaskRecord(
|
||||
@@ -664,14 +646,8 @@ class TaskStore:
|
||||
with self._list_lock(task_list_id):
|
||||
doc = self._read_doc_unsafe(task_list_id)
|
||||
if doc is None:
|
||||
inferred_role = (
|
||||
TaskListRole.TEMPLATE
|
||||
if task_list_id.startswith("colony:")
|
||||
else TaskListRole.SESSION
|
||||
)
|
||||
doc = TaskListDocument(
|
||||
meta=TaskListMeta(task_list_id=task_list_id, role=inferred_role)
|
||||
)
|
||||
inferred_role = TaskListRole.TEMPLATE if task_list_id.startswith("colony:") else TaskListRole.SESSION
|
||||
doc = TaskListDocument(meta=TaskListMeta(task_list_id=task_list_id, role=inferred_role))
|
||||
|
||||
base_id = self._next_id_for_doc(doc)
|
||||
now = time.time()
|
||||
@@ -726,14 +702,18 @@ class TaskStore:
|
||||
target = next((r for r in doc.tasks if r.id == task_id), None)
|
||||
if target is None:
|
||||
return None, []
|
||||
new, changed = self._update_task_in_doc(doc, target, subject=subject,
|
||||
description=description,
|
||||
active_form=active_form,
|
||||
owner=owner,
|
||||
status=status,
|
||||
add_blocks=add_blocks,
|
||||
add_blocked_by=add_blocked_by,
|
||||
metadata_patch=metadata_patch)
|
||||
new, changed = self._update_task_in_doc(
|
||||
doc,
|
||||
target,
|
||||
subject=subject,
|
||||
description=description,
|
||||
active_form=active_form,
|
||||
owner=owner,
|
||||
status=status,
|
||||
add_blocks=add_blocks,
|
||||
add_blocked_by=add_blocked_by,
|
||||
metadata_patch=metadata_patch,
|
||||
)
|
||||
if changed:
|
||||
self._write_doc_unsafe(task_list_id, doc)
|
||||
return new, changed
|
||||
|
||||
@@ -125,8 +125,7 @@ def _create_batch_schema() -> dict[str, Any]:
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"description": (
|
||||
"Array of task specs. Each becomes one task with a "
|
||||
"sequential id. Atomic — all created or none."
|
||||
"Array of task specs. Each becomes one task with a sequential id. Atomic — all created or none."
|
||||
),
|
||||
"items": {
|
||||
"type": "object",
|
||||
@@ -138,9 +137,7 @@ def _create_batch_schema() -> dict[str, Any]:
|
||||
"description": {"type": "string"},
|
||||
"active_form": {
|
||||
"type": "string",
|
||||
"description": (
|
||||
"Present-continuous label shown while in_progress."
|
||||
),
|
||||
"description": ("Present-continuous label shown while in_progress."),
|
||||
},
|
||||
"metadata": {"type": "object"},
|
||||
},
|
||||
@@ -317,10 +314,7 @@ def _make_create_batch_executor(store: TaskStore):
|
||||
await store.delete_task(list_id, r.id)
|
||||
return {
|
||||
"success": False,
|
||||
"error": (
|
||||
f"Hook blocked task #{rec.id} ({rec.subject!r}); "
|
||||
f"entire batch rolled back: {exc}"
|
||||
),
|
||||
"error": (f"Hook blocked task #{rec.id} ({rec.subject!r}); entire batch rolled back: {exc}"),
|
||||
}
|
||||
|
||||
for rec in recs:
|
||||
@@ -339,10 +333,7 @@ def _make_create_batch_executor(store: TaskStore):
|
||||
"success": True,
|
||||
"task_list_id": list_id,
|
||||
"task_ids": ids,
|
||||
"message": (
|
||||
f"Created {len(ids)} task(s): {range_label}. "
|
||||
f"Mark #{ids[0]} in_progress before starting it."
|
||||
),
|
||||
"message": (f"Created {len(ids)} task(s): {range_label}. Mark #{ids[0]} in_progress before starting it."),
|
||||
"tasks": [_serialize_task(r) for r in recs],
|
||||
}
|
||||
|
||||
|
||||
@@ -2440,15 +2440,13 @@ def register_queen_lifecycle_tools(
|
||||
if not _COLONY_NAME_RE.match(cn):
|
||||
return json.dumps({"error": "colony_name must be lowercase alphanumeric with underscores"})
|
||||
|
||||
from pathlib import Path as _Path
|
||||
|
||||
from framework.config import COLONIES_DIR as _COLONIES_DIR
|
||||
from framework.host.progress_db import (
|
||||
enqueue_task as _enqueue_task,
|
||||
ensure_progress_db as _ensure_db,
|
||||
)
|
||||
|
||||
from framework.config import COLONIES_DIR as _COLONIES_DIR
|
||||
|
||||
colony_dir = _COLONIES_DIR / cn
|
||||
if not colony_dir.is_dir():
|
||||
return json.dumps({"error": f"colony '{cn}' not found"})
|
||||
|
||||
@@ -15,6 +15,7 @@ from typing import IO, Any
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _llm_debug_dir() -> Path:
|
||||
"""Resolve $HIVE_HOME/llm_logs lazily so the env override (set by the
|
||||
desktop) takes effect. A module-level constant would freeze whatever
|
||||
@@ -23,6 +24,7 @@ def _llm_debug_dir() -> Path:
|
||||
|
||||
return HIVE_HOME / "llm_logs"
|
||||
|
||||
|
||||
_log_file: IO[str] | None = None
|
||||
_log_ready = False # lazy init guard
|
||||
|
||||
|
||||
@@ -25,9 +25,7 @@ def test_is_client_facing() -> None:
|
||||
assert session_summary.is_client_facing({"role": "user", "content": "hi"})
|
||||
assert session_summary.is_client_facing({"role": "assistant", "content": "ok"})
|
||||
assert not session_summary.is_client_facing({"role": "tool", "content": "x"})
|
||||
assert not session_summary.is_client_facing(
|
||||
{"role": "assistant", "content": "", "tool_calls": [{"id": "1"}]}
|
||||
)
|
||||
assert not session_summary.is_client_facing({"role": "assistant", "content": "", "tool_calls": [{"id": "1"}]})
|
||||
assert not session_summary.is_client_facing({"is_transition_marker": True})
|
||||
|
||||
|
||||
|
||||
@@ -68,9 +68,7 @@ def _is_test_session(execution_id: str, records: list[dict[str, Any]]) -> bool:
|
||||
if execution_id.startswith("<MagicMock"):
|
||||
return True
|
||||
models = {
|
||||
str(r.get("token_counts", {}).get("model", ""))
|
||||
for r in records
|
||||
if isinstance(r.get("token_counts"), dict)
|
||||
str(r.get("token_counts", {}).get("model", "")) for r in records if isinstance(r.get("token_counts"), dict)
|
||||
}
|
||||
models.discard("")
|
||||
if models and models <= {"mock"}:
|
||||
@@ -80,9 +78,7 @@ def _is_test_session(execution_id: str, records: list[dict[str, Any]]) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def _discover_session_summaries(
|
||||
logs_dir: Path, limit_files: int, include_tests: bool
|
||||
) -> list[SessionSummary]:
|
||||
def _discover_session_summaries(logs_dir: Path, limit_files: int, include_tests: bool) -> list[SessionSummary]:
|
||||
if not logs_dir.exists():
|
||||
raise FileNotFoundError(f"log directory not found: {logs_dir}")
|
||||
|
||||
@@ -120,9 +116,7 @@ def _discover_session_summaries(
|
||||
continue
|
||||
|
||||
if not include_tests:
|
||||
by_session = {
|
||||
eid: recs for eid, recs in by_session.items() if not _is_test_session(eid, recs)
|
||||
}
|
||||
by_session = {eid: recs for eid, recs in by_session.items() if not _is_test_session(eid, recs)}
|
||||
|
||||
summaries: list[SessionSummary] = []
|
||||
for eid, recs in by_session.items():
|
||||
@@ -141,8 +135,7 @@ def _discover_session_summaries(
|
||||
{
|
||||
str(r.get("token_counts", {}).get("model", ""))
|
||||
for r in recs
|
||||
if isinstance(r.get("token_counts"), dict)
|
||||
and r.get("token_counts", {}).get("model")
|
||||
if isinstance(r.get("token_counts"), dict) and r.get("token_counts", {}).get("model")
|
||||
}
|
||||
),
|
||||
)
|
||||
@@ -152,9 +145,7 @@ def _discover_session_summaries(
|
||||
return summaries
|
||||
|
||||
|
||||
def _load_session_data(
|
||||
logs_dir: Path, session_id: str, limit_files: int
|
||||
) -> list[dict[str, Any]] | None:
|
||||
def _load_session_data(logs_dir: Path, session_id: str, limit_files: int) -> list[dict[str, Any]] | None:
|
||||
if not logs_dir.exists():
|
||||
return None
|
||||
|
||||
|
||||
Reference in New Issue
Block a user