fix: windows competibility

This commit is contained in:
Timothy Zhang
2026-02-27 19:55:48 -08:00
parent 6ff4ec3643
commit de27bfe76f
14 changed files with 35 additions and 44 deletions
@@ -7,9 +7,9 @@ from framework.graph import NodeSpec
# Load reference docs at import time so they're always in the system prompt.
# No voluntary read_file() calls needed — the LLM gets everything upfront.
_ref_dir = Path(__file__).parent.parent / "reference"
_framework_guide = (_ref_dir / "framework_guide.md").read_text()
_file_templates = (_ref_dir / "file_templates.md").read_text()
_anti_patterns = (_ref_dir / "anti_patterns.md").read_text()
_framework_guide = (_ref_dir / "framework_guide.md").read_text(encoding="utf-8")
_file_templates = (_ref_dir / "file_templates.md").read_text(encoding="utf-8")
_anti_patterns = (_ref_dir / "anti_patterns.md").read_text(encoding="utf-8")
# Shared appendices — appended to every coding node's system prompt.
_appendices = (
+1 -1
View File
@@ -761,7 +761,7 @@ class GraphBuilder:
path = self.storage_path / f"{session_id}.json"
if not path.exists():
raise FileNotFoundError(f"Session not found: {session_id}")
return BuildSession.model_validate_json(path.read_text())
return BuildSession.model_validate_json(path.read_text(encoding="utf-8"))
@classmethod
def list_sessions(cls, storage_path: Path | str | None = None) -> list[str]:
+1 -1
View File
@@ -164,7 +164,7 @@ def _read_credential_key_file() -> str | None:
"""Read the credential key from ``~/.hive/secrets/credential_key``."""
try:
if CREDENTIAL_KEY_PATH.is_file():
value = CREDENTIAL_KEY_PATH.read_text().strip()
value = CREDENTIAL_KEY_PATH.read_text(encoding="utf-8").strip()
if value:
return value
except Exception:
+3 -3
View File
@@ -1864,7 +1864,7 @@ def import_from_export(
return json.dumps({"success": False, "error": f"File not found: {agent_json_path}"})
try:
data = json.loads(path.read_text())
data = json.loads(path.read_text(encoding="utf-8"))
except json.JSONDecodeError as e:
return json.dumps({"success": False, "error": f"Invalid JSON: {e}"})
@@ -2986,7 +2986,7 @@ def debug_test(
# Find which file contains the test
test_file = None
for py_file in tests_dir.glob("test_*.py"):
content = py_file.read_text()
content = py_file.read_text(encoding="utf-8")
if f"def {test_name}" in content or f"async def {test_name}" in content:
test_file = py_file
break
@@ -3138,7 +3138,7 @@ def list_tests(
tests = []
for test_file in sorted(tests_dir.glob("test_*.py")):
try:
content = test_file.read_text()
content = test_file.read_text(encoding="utf-8")
tree = ast.parse(content)
# Find all async function definitions that start with "test_"
+2 -2
View File
@@ -428,7 +428,7 @@ def _load_resume_state(
if not cp_path.exists():
return None
try:
cp_data = json.loads(cp_path.read_text())
cp_data = json.loads(cp_path.read_text(encoding="utf-8"))
except (json.JSONDecodeError, OSError):
return None
return {
@@ -444,7 +444,7 @@ def _load_resume_state(
if not state_path.exists():
return None
try:
state_data = json.loads(state_path.read_text())
state_data = json.loads(state_path.read_text(encoding="utf-8"))
except (json.JSONDecodeError, OSError):
return None
progress = state_data.get("progress", {})
+1 -1
View File
@@ -190,7 +190,7 @@ async def handle_resume(request: web.Request) -> web.Response:
return web.json_response({"error": "Session not found"}, status=404)
try:
state = json.loads(state_path.read_text())
state = json.loads(state_path.read_text(encoding="utf-8"))
except (json.JSONDecodeError, OSError) as e:
return web.json_response({"error": f"Failed to read session: {e}"}, status=500)
+1 -1
View File
@@ -79,7 +79,7 @@ async def handle_list_nodes(request: web.Request) -> web.Response:
)
if state_path.exists():
try:
state = json.loads(state_path.read_text())
state = json.loads(state_path.read_text(encoding="utf-8"))
progress = state.get("progress", {})
visit_counts = progress.get("node_visit_counts", {})
failures = progress.get("nodes_with_failures", [])
+5 -5
View File
@@ -369,7 +369,7 @@ async def handle_list_worker_sessions(request: web.Request) -> web.Response:
state_path = d / "state.json"
if state_path.exists():
try:
state = json.loads(state_path.read_text())
state = json.loads(state_path.read_text(encoding="utf-8"))
entry["status"] = state.get("status", "unknown")
entry["started_at"] = state.get("started_at")
entry["completed_at"] = state.get("completed_at")
@@ -408,7 +408,7 @@ async def handle_get_worker_session(request: web.Request) -> web.Response:
return web.json_response({"error": "Session not found"}, status=404)
try:
state = json.loads(state_path.read_text())
state = json.loads(state_path.read_text(encoding="utf-8"))
except (json.JSONDecodeError, OSError) as e:
return web.json_response({"error": f"Failed to read session: {e}"}, status=500)
@@ -436,7 +436,7 @@ async def handle_list_checkpoints(request: web.Request) -> web.Response:
if f.suffix != ".json":
continue
try:
data = json.loads(f.read_text())
data = json.loads(f.read_text(encoding="utf-8"))
checkpoints.append(
{
"checkpoint_id": f.stem,
@@ -546,7 +546,7 @@ async def handle_messages(request: web.Request) -> web.Response:
if part_file.suffix != ".json":
continue
try:
part = json.loads(part_file.read_text())
part = json.loads(part_file.read_text(encoding="utf-8"))
part["_node_id"] = node_dir.name
all_messages.append(part)
except (json.JSONDecodeError, OSError):
@@ -600,7 +600,7 @@ async def handle_queen_messages(request: web.Request) -> web.Response:
if part_file.suffix != ".json":
continue
try:
part = json.loads(part_file.read_text())
part = json.loads(part_file.read_text(encoding="utf-8"))
part["_node_id"] = node_dir.name
all_messages.append(part)
except (json.JSONDecodeError, OSError):
+2 -2
View File
@@ -277,13 +277,13 @@ class SessionManager:
if not state_path.exists():
continue
try:
state = json.loads(state_path.read_text())
state = json.loads(state_path.read_text(encoding="utf-8"))
if state.get("status") != "active":
continue
state["status"] = "cancelled"
state.setdefault("result", {})["error"] = "Stale session: runtime restarted"
state.setdefault("timestamps", {})["updated_at"] = datetime.now().isoformat()
state_path.write_text(json.dumps(state, indent=2))
state_path.write_text(json.dumps(state, indent=2), encoding="utf-8")
logger.info(
"Marked stale session '%s' as cancelled for agent '%s'", d.name, agent_path.name
)
+2 -2
View File
@@ -95,7 +95,7 @@ class CheckpointStore:
return None
try:
return Checkpoint.model_validate_json(checkpoint_path.read_text())
return Checkpoint.model_validate_json(checkpoint_path.read_text(encoding="utf-8"))
except Exception as e:
logger.error(f"Failed to load checkpoint {checkpoint_id}: {e}")
return None
@@ -123,7 +123,7 @@ class CheckpointStore:
return None
try:
return CheckpointIndex.model_validate_json(self.index_path.read_text())
return CheckpointIndex.model_validate_json(self.index_path.read_text(encoding="utf-8"))
except Exception as e:
logger.error(f"Failed to load checkpoint index: {e}")
return None
+2 -2
View File
@@ -114,7 +114,7 @@ class SessionStore:
if not state_path.exists():
return None
return SessionState.model_validate_json(state_path.read_text())
return SessionState.model_validate_json(state_path.read_text(encoding="utf-8"))
return await asyncio.to_thread(_read)
@@ -151,7 +151,7 @@ class SessionStore:
continue
try:
state = SessionState.model_validate_json(state_path.read_text())
state = SessionState.model_validate_json(state_path.read_text(encoding="utf-8"))
# Apply filters
if status and state.status != status:
+2 -2
View File
@@ -190,7 +190,7 @@ def cmd_test_debug(args: argparse.Namespace) -> int:
# Find which file contains the test
test_file = None
for py_file in tests_dir.glob("test_*.py"):
content = py_file.read_text()
content = py_file.read_text(encoding="utf-8")
if f"def {test_name}" in content or f"async def {test_name}" in content:
test_file = py_file
break
@@ -238,7 +238,7 @@ def _scan_test_files(tests_dir: Path) -> list[dict]:
for test_file in sorted(tests_dir.glob("test_*.py")):
try:
content = test_file.read_text()
content = test_file.read_text(encoding="utf-8")
tree = ast.parse(content)
for node in ast.walk(tree):
+4 -4
View File
@@ -53,7 +53,7 @@ def _get_last_active(agent_name: str) -> str | None:
if not state_file.exists():
continue
try:
data = json.loads(state_file.read_text())
data = json.loads(state_file.read_text(encoding="utf-8"))
ts = data.get("timestamps", {}).get("updated_at")
if ts and (latest is None or ts > latest):
latest = ts
@@ -84,7 +84,7 @@ def _extract_agent_stats(agent_path: Path) -> tuple[int, int, list[str]]:
agent_py = agent_path / "agent.py"
if agent_py.exists():
try:
tree = ast.parse(agent_py.read_text())
tree = ast.parse(agent_py.read_text(encoding="utf-8"))
for node in ast.walk(tree):
# Find `nodes = [...]` assignment
if isinstance(node, ast.Assign):
@@ -99,7 +99,7 @@ def _extract_agent_stats(agent_path: Path) -> tuple[int, int, list[str]]:
agent_json = agent_path / "agent.json"
if agent_json.exists():
try:
data = json.loads(agent_json.read_text())
data = json.loads(agent_json.read_text(encoding="utf-8"))
json_nodes = data.get("nodes", [])
if node_count == 0:
node_count = len(json_nodes)
@@ -150,7 +150,7 @@ def discover_agents() -> dict[str, list[AgentEntry]]:
agent_json = path / "agent.json"
if agent_json.exists():
try:
data = json.loads(agent_json.read_text())
data = json.loads(agent_json.read_text(encoding="utf-8"))
meta = data.get("agent", {})
name = meta.get("name", name)
desc = meta.get("description", desc)
+6 -15
View File
@@ -307,8 +307,9 @@ Write-Host ""
Write-Step -Number "1" -Text "Step 1: Checking Python..."
# On Windows "python3.x" aliases don't exist; prefer "python" then "python3"
$PythonCmd = $null
foreach ($candidate in @("python3.13", "python3.12", "python3.11", "python3", "python")) {
foreach ($candidate in @("python", "python3", "python3.13", "python3.12", "python3.11")) {
try {
$ver = & $candidate -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')" 2>$null
if ($LASTEXITCODE -eq 0 -and $ver) {
@@ -326,17 +327,7 @@ foreach ($candidate in @("python3.13", "python3.12", "python3.11", "python3", "p
}
if (-not $PythonCmd) {
# Try plain "python" as final fallback (common on Windows)
try {
$ver = & python -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')" 2>$null
if ($LASTEXITCODE -eq 0) {
Write-Color -Text "Python $ver found but 3.11+ is required." -Color Red
} else {
Write-Color -Text "Python is not installed." -Color Red
}
} catch {
Write-Color -Text "Python is not installed." -Color Red
}
Write-Color -Text "Python 3.11+ is not installed or not on PATH." -Color Red
Write-Host ""
Write-Host "Please install Python 3.11+ from https://python.org"
Write-Host " - Make sure to check 'Add Python to PATH' during installation"
@@ -673,7 +664,7 @@ $imports = @(
$modulesToCheck = @("framework", "aden_tools", "litellm", "framework.mcp.agent_builder_server")
try {
$checkOutput = & uv run $PythonCmd scripts/check_requirements.py @modulesToCheck 2>&1 | Out-String
$checkOutput = & uv run python scripts/check_requirements.py @modulesToCheck 2>&1 | Out-String
$resultJson = $null
# Try to parse JSON result
@@ -1197,7 +1188,7 @@ $verifyErrors = 0
$verifyModules = @("framework", "aden_tools")
try {
$verifyOutput = & uv run $PythonCmd scripts/check_requirements.py @verifyModules 2>&1 | Out-String
$verifyOutput = & uv run python scripts/check_requirements.py @verifyModules 2>&1 | Out-String
$verifyJson = $null
try {
@@ -1207,7 +1198,7 @@ try {
# Fall back to basic checks if JSON parsing fails
foreach ($mod in $verifyModules) {
Write-Host " $([char]0x2B21) $mod... " -NoNewline
$null = & uv run $PythonCmd -c "import $mod" 2>&1
$null = & uv run python -c "import $mod" 2>&1
if ($LASTEXITCODE -eq 0) { Write-Ok "ok" }
else { Write-Fail "failed"; $verifyErrors++ }
}