feat: worker model loading

This commit is contained in:
Richard Tang
2026-03-18 12:14:02 -07:00
parent b9d529d94e
commit 23c66d1059
5 changed files with 1606 additions and 269 deletions
+4 -1
View File
@@ -1141,7 +1141,10 @@ class AgentRunner:
# Create LLM provider
# Uses LiteLLM which auto-detects the provider from model name
if self.mock_mode:
# Skip if already injected (e.g. worker agents with a pre-built LLM)
if self._llm is not None:
pass # LLM already configured externally
elif self.mock_mode:
# Use mock LLM for testing without real API calls
from framework.llm.mock import MockLLMProvider
+34 -4
View File
@@ -289,9 +289,15 @@ class SessionManager:
loop = asyncio.get_running_loop()
# Prioritize: explicit model arg > worker-specific model > session default
from framework.config import get_preferred_worker_model
from framework.config import (
get_preferred_worker_model,
get_worker_api_base,
get_worker_api_key,
get_worker_llm_extra_kwargs,
)
resolved_model = model or get_preferred_worker_model() or self._model
worker_model = get_preferred_worker_model()
resolved_model = model or worker_model or self._model
runner = await loop.run_in_executor(
None,
lambda: AgentRunner.load(
@@ -303,6 +309,22 @@ class SessionManager:
),
)
# If a worker-specific model is configured, build an LLM provider
# with the correct worker credentials so _setup() doesn't fall back
# to the queen's llm config (which may be a different provider).
if worker_model and not model:
from framework.llm.litellm import LiteLLMProvider
worker_api_key = get_worker_api_key()
worker_api_base = get_worker_api_base()
worker_extra = get_worker_llm_extra_kwargs()
runner._llm = LiteLLMProvider(
model=resolved_model,
api_key=worker_api_key,
api_base=worker_api_base,
**worker_extra,
)
# Setup with session's event bus
if runner._agent_runtime is None:
await loop.run_in_executor(
@@ -927,6 +949,7 @@ class SessionManager:
# then use max+1 as offset so resumed sessions produce monotonically
# increasing iteration values — preventing frontend message ID collisions.
iteration_offset = 0
last_phase = ""
events_path = queen_dir / "events.jsonl"
try:
if events_path.exists():
@@ -938,17 +961,24 @@ class SessionManager:
continue
try:
evt = json.loads(line)
it = evt.get("data", {}).get("iteration")
data = evt.get("data", {})
it = data.get("iteration")
if isinstance(it, int) and it > max_iter:
max_iter = it
# Track the latest queen phase from QUEEN_PHASE_CHANGED events
if evt.get("type") == "queen_phase_changed":
phase = data.get("phase")
if phase:
last_phase = phase
except (json.JSONDecodeError, TypeError):
continue
if max_iter >= 0:
iteration_offset = max_iter + 1
logger.info(
"Session '%s' resuming with iteration_offset=%d (from events.jsonl max)",
"Session '%s' resuming with iteration_offset=%d (from events.jsonl max), last phase: %s",
session.id,
iteration_offset,
last_phase or "unknown",
)
except OSError:
pass
-8
View File
@@ -60,7 +60,6 @@
"integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@babel/code-frame": "^7.29.0",
"@babel/generator": "^7.29.0",
@@ -1557,7 +1556,6 @@
"integrity": "sha512-4K3bqJpXpqfg2XKGK9bpDTc6xO/xoUP/RBWS7AtRMug6zZFaRekiLzjVtAoZMquxoAbzBvy5nxQ7veS5eYzf8A==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"undici-types": "~7.18.0"
}
@@ -1573,7 +1571,6 @@
"resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.28.tgz",
"integrity": "sha512-z9VXpC7MWrhfWipitjNdgCauoMLRdIILQsAEV+ZesIzBq/oUlxk0m3ApZuMFCXdnS4U7KrI+l3WRUEGQ8K1QKw==",
"license": "MIT",
"peer": true,
"dependencies": {
"@types/prop-types": "*",
"csstype": "^3.2.2"
@@ -1786,7 +1783,6 @@
}
],
"license": "MIT",
"peer": true,
"dependencies": {
"baseline-browser-mapping": "^2.9.0",
"caniuse-lite": "^1.0.30001759",
@@ -3564,7 +3560,6 @@
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
"dev": true,
"license": "MIT",
"peer": true,
"engines": {
"node": ">=12"
},
@@ -3616,7 +3611,6 @@
"resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz",
"integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"loose-envify": "^1.1.0"
},
@@ -3629,7 +3623,6 @@
"resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz",
"integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==",
"license": "MIT",
"peer": true,
"dependencies": {
"loose-envify": "^1.1.0",
"scheduler": "^0.23.2"
@@ -4190,7 +4183,6 @@
"integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"esbuild": "^0.25.0",
"fdir": "^6.4.4",
File diff suppressed because it is too large Load Diff
+755 -99
View File
@@ -7,6 +7,8 @@
# ~/.hive/configuration.json. If no worker model is configured, workers
# fall back to the default (queen) model.
#
# The provider selection flow is identical to quickstart.sh.
#
set -e
@@ -27,28 +29,36 @@ BOLD='\033[1m'
DIM='\033[2m'
NC='\033[0m'
# Hive LLM endpoint
HIVE_LLM_ENDPOINT="https://api.adenhq.com"
# Get the directory where this script is located, then the project root
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
PROJECT_DIR="$( cd "$SCRIPT_DIR/.." && pwd )"
HIVE_CONFIG_FILE="$HOME/.hive/configuration.json"
HIVE_CONFIG_DIR="$HOME/.hive"
HIVE_CONFIG_FILE="$HIVE_CONFIG_DIR/configuration.json"
# Helper function for prompts
prompt_yes_no() {
local prompt="$1"
local default="${2:-y}"
local response
if [ "$default" = "y" ]; then
prompt="$prompt [Y/n] "
else
prompt="$prompt [y/N] "
# ── Detect Python ─────────────────────────────────────────────────────
PYTHON_CMD=""
for CANDIDATE in python3.11 python3.12 python3.13 python3 python; do
if command -v "$CANDIDATE" &> /dev/null; then
PYTHON_MAJOR=$("$CANDIDATE" -c 'import sys; print(sys.version_info.major)')
PYTHON_MINOR=$("$CANDIDATE" -c 'import sys; print(sys.version_info.minor)')
if [ "$PYTHON_MAJOR" -eq 3 ] && [ "$PYTHON_MINOR" -ge 11 ]; then
PYTHON_CMD="$CANDIDATE"
break
fi
fi
read -r -p "$prompt" response
response="${response:-$default}"
[[ "$response" =~ ^[Yy] ]]
}
done
# ── Provider / model definitions (same as quickstart) ────────────────
if [ -z "$PYTHON_CMD" ]; then
PYTHON_CMD="python3"
if ! command -v python3 &> /dev/null; then
PYTHON_CMD="python"
fi
fi
# ── Provider / model definitions (identical to quickstart) ────────────
if [ "$USE_ASSOC_ARRAYS" = true ]; then
declare -A PROVIDER_NAMES=(
@@ -228,42 +238,185 @@ else
get_model_choice_maxcontexttokens() { local idx=$(_mc_nth "$1" "$2"); echo "${MC_MAXCONTEXTTOKENS[$idx]}"; }
fi
# ── Model selection prompt ───────────────────────────────────────────
# ── Detect user's shell rc file ──────────────────────────────────────
select_model() {
detect_shell_rc() {
local shell_name
shell_name=$(basename "$SHELL")
case "$shell_name" in
zsh)
if [ -f "$HOME/.zshrc" ]; then
echo "$HOME/.zshrc"
else
echo "$HOME/.zshenv"
fi
;;
bash)
if [ -f "$HOME/.bashrc" ]; then
echo "$HOME/.bashrc"
elif [ -f "$HOME/.bash_profile" ]; then
echo "$HOME/.bash_profile"
else
echo "$HOME/.profile"
fi
;;
*)
echo "$HOME/.profile"
;;
esac
}
SHELL_RC_FILE=$(detect_shell_rc)
# ── Normalize OpenRouter model IDs ───────────────────────────────────
normalize_openrouter_model_id() {
local raw="$1"
# Trim leading/trailing whitespace
raw="${raw#"${raw%%[![:space:]]*}"}"
raw="${raw%"${raw##*[![:space:]]}"}"
if [[ "$raw" =~ ^[Oo][Pp][Ee][Nn][Rr][Oo][Uu][Tt][Ee][Rr]/(.+)$ ]]; then
raw="${BASH_REMATCH[1]}"
fi
printf '%s' "$raw"
}
# ── Model selection prompt (identical to quickstart) ─────────────────
prompt_model_selection() {
local provider_id="$1"
if [ "$provider_id" = "openrouter" ]; then
local default_model=""
if [ -n "$PREV_MODEL" ] && [ "$provider_id" = "$PREV_PROVIDER" ]; then
default_model="$(normalize_openrouter_model_id "$PREV_MODEL")"
fi
echo ""
echo -e "${BOLD}Enter your OpenRouter model id:${NC}"
echo -e " ${DIM}Paste from openrouter.ai (example: x-ai/grok-4.20-beta)${NC}"
echo -e " ${DIM}If calls fail with guardrail/privacy errors: openrouter.ai/settings/privacy${NC}"
echo ""
local input_model=""
while true; do
if [ -n "$default_model" ]; then
read -r -p "Model id [$default_model]: " input_model || true
input_model="${input_model:-$default_model}"
else
read -r -p "Model id: " input_model || true
fi
local normalized_model
normalized_model="$(normalize_openrouter_model_id "$input_model")"
if [ -n "$normalized_model" ]; then
local openrouter_key=""
if [ -n "${SELECTED_ENV_VAR:-}" ]; then
openrouter_key="${!SELECTED_ENV_VAR:-}"
fi
if [ -n "$openrouter_key" ]; then
local model_hc_result=""
local model_hc_valid=""
local model_hc_msg=""
local model_hc_canonical=""
local model_hc_base="${SELECTED_API_BASE:-https://openrouter.ai/api/v1}"
echo -n " Verifying model id... "
model_hc_result="$(cd "$PROJECT_DIR" && uv run python "$PROJECT_DIR/scripts/check_llm_key.py" "openrouter" "$openrouter_key" "$model_hc_base" "$normalized_model" 2>/dev/null)" || true
model_hc_valid="$(echo "$model_hc_result" | $PYTHON_CMD -c "import json,sys; print(json.loads(sys.stdin.read()).get('valid',''))" 2>/dev/null)" || true
model_hc_msg="$(echo "$model_hc_result" | $PYTHON_CMD -c "import json,sys; print(json.loads(sys.stdin.read()).get('message',''))" 2>/dev/null)" || true
model_hc_canonical="$(echo "$model_hc_result" | $PYTHON_CMD -c "import json,sys; print(json.loads(sys.stdin.read()).get('model',''))" 2>/dev/null)" || true
if [ "$model_hc_valid" = "True" ]; then
if [ -n "$model_hc_canonical" ]; then
normalized_model="$model_hc_canonical"
fi
echo -e "${GREEN}ok${NC}"
elif [ "$model_hc_valid" = "False" ]; then
echo -e "${RED}failed${NC}"
echo -e " ${YELLOW}$model_hc_msg${NC}"
echo ""
continue
else
echo -e "${YELLOW}--${NC}"
echo -e " ${DIM}Could not verify model id (network issue). Continuing with your selection.${NC}"
fi
else
echo -e " ${DIM}Skipping model verification (OpenRouter key not available in current shell).${NC}"
fi
SELECTED_MODEL="$normalized_model"
SELECTED_MAX_TOKENS=8192
SELECTED_MAX_CONTEXT_TOKENS=120000
echo ""
echo -e "${GREEN}${NC} Model: ${DIM}$SELECTED_MODEL${NC}"
return
fi
echo -e "${RED}Model id cannot be empty.${NC}"
done
fi
local count
count=$(get_model_choice_count "$provider_id")
count="$(get_model_choice_count "$provider_id")"
if [ "$count" -eq 0 ]; then
# No curated choices for this provider (e.g. Mistral, DeepSeek)
SELECTED_MODEL="$(get_default_model "$provider_id")"
SELECTED_MAX_TOKENS=8192
SELECTED_MAX_CONTEXT_TOKENS=120000
echo ""
echo -e "${GREEN}${NC} Worker model: ${DIM}$SELECTED_MODEL${NC}"
return
fi
if [ "$count" -eq 1 ]; then
# Only one choice — auto-select
SELECTED_MODEL="$(get_model_choice_id "$provider_id" 0)"
SELECTED_MAX_TOKENS="$(get_model_choice_maxtokens "$provider_id" 0)"
SELECTED_MAX_CONTEXT_TOKENS="$(get_model_choice_maxcontexttokens "$provider_id" 0)"
return
fi
# Multiple choices — show menu
echo ""
echo -e "${BOLD}Select worker model:${NC}"
echo -e "${BOLD}Select a model:${NC}"
echo ""
# Find default index from previous model (if same provider)
local default_idx=""
if [ -n "$PREV_MODEL" ] && [ "$provider_id" = "$PREV_PROVIDER" ]; then
local j=0
while [ $j -lt "$count" ]; do
if [ "$(get_model_choice_id "$provider_id" "$j")" = "$PREV_MODEL" ]; then
default_idx=$((j + 1))
break
fi
j=$((j + 1))
done
fi
local i=0
while [ "$i" -lt "$count" ]; do
echo -e " ${CYAN}$((i+1)))${NC} $(get_model_choice_label "$provider_id" "$i")"
while [ $i -lt "$count" ]; do
local label
label="$(get_model_choice_label "$provider_id" "$i")"
local mid
mid="$(get_model_choice_id "$provider_id" "$i")"
local num=$((i + 1))
echo -e " ${CYAN}$num)${NC} $label ${DIM}($mid)${NC}"
i=$((i + 1))
done
echo ""
local choice
while true; do
local default_idx=1
read -r -p "Enter choice (1-$count) [$default_idx]: " choice || true
choice="${choice:-$default_idx}"
if [ -n "$default_idx" ]; then
read -r -p "Enter choice (1-$count) [$default_idx]: " choice || true
choice="${choice:-$default_idx}"
else
read -r -p "Enter choice (1-$count): " choice || true
fi
if [[ "$choice" =~ ^[0-9]+$ ]] && [ "$choice" -ge 1 ] && [ "$choice" -le "$count" ]; then
local idx=$((choice - 1))
SELECTED_MODEL="$(get_model_choice_id "$provider_id" "$idx")"
SELECTED_MAX_TOKENS="$(get_model_choice_maxtokens "$provider_id" "$idx")"
SELECTED_MAX_CONTEXT_TOKENS="$(get_model_choice_maxcontexttokens "$provider_id" "$idx")"
echo ""
echo -e "${GREEN}${NC} Worker model: ${DIM}$SELECTED_MODEL${NC}"
echo -e "${GREEN}${NC} Model: ${DIM}$SELECTED_MODEL${NC}"
return
fi
echo -e "${RED}Invalid choice. Please enter 1-$count${NC}"
@@ -271,6 +424,7 @@ select_model() {
}
# ── Save worker_llm section to configuration.json ────────────────────
# Args: provider_id env_var model max_tokens max_context_tokens [use_claude_code_sub] [api_base] [use_codex_sub]
save_worker_configuration() {
local provider_id="$1"
@@ -278,7 +432,9 @@ save_worker_configuration() {
local model="$3"
local max_tokens="$4"
local max_context_tokens="$5"
local api_base="${6:-}"
local use_claude_code_sub="${6:-}"
local api_base="${7:-}"
local use_codex_sub="${8:-}"
if [ -z "$model" ]; then
model="$(get_default_model "$provider_id")"
@@ -293,7 +449,9 @@ save_worker_configuration() {
"$model" \
"$max_tokens" \
"$max_context_tokens" \
"$api_base" 2>/dev/null <<'PY'
"$use_claude_code_sub" \
"$api_base" \
"$use_codex_sub" 2>/dev/null <<'PY'
import json
import sys
from pathlib import Path
@@ -304,8 +462,10 @@ from pathlib import Path
model,
max_tokens,
max_context_tokens,
use_claude_code_sub,
api_base,
) = sys.argv[1:7]
use_codex_sub,
) = sys.argv[1:9]
cfg_path = Path.home() / ".hive" / "configuration.json"
cfg_path.parent.mkdir(parents=True, exist_ok=True)
@@ -321,11 +481,28 @@ config["worker_llm"] = {
"model": model,
"max_tokens": int(max_tokens),
"max_context_tokens": int(max_context_tokens),
"api_key_env_var": env_var,
}
if env_var:
config["worker_llm"]["api_key_env_var"] = env_var
if use_claude_code_sub == "true":
config["worker_llm"]["use_claude_code_subscription"] = True
config["worker_llm"].pop("api_key_env_var", None)
else:
config["worker_llm"].pop("use_claude_code_subscription", None)
if use_codex_sub == "true":
config["worker_llm"]["use_codex_subscription"] = True
config["worker_llm"].pop("api_key_env_var", None)
else:
config["worker_llm"].pop("use_codex_subscription", None)
if api_base:
config["worker_llm"]["api_base"] = api_base
else:
config["worker_llm"].pop("api_base", None)
if not env_var:
config["worker_llm"].pop("api_key_env_var", None)
tmp_path = cfg_path.with_name(cfg_path.name + ".tmp")
with open(tmp_path, "w", encoding="utf-8") as f:
@@ -360,100 +537,579 @@ print(f'Worker: {wm if wm else \"(same as queen)\"}')
fi
fi
# Source shell rc to pick up env vars
SHELL_RC_FILE="$HOME/.bashrc"
if [ -n "$ZSH_VERSION" ] || [ "$SHELL" = "/bin/zsh" ]; then
SHELL_RC_FILE="$HOME/.zshrc"
fi
# Source shell rc file to pick up existing env vars (temporarily disable set -e)
set +e
if [ -f "$SHELL_RC_FILE" ]; then
eval "$(grep -E '^export [A-Z_]+=' "$SHELL_RC_FILE" 2>/dev/null)"
fi
set -e
# Detect available providers
AVAIL_PROVIDERS=()
AVAIL_ENV_VARS=()
# Find all available API keys
FOUND_PROVIDERS=() # Display names for UI
FOUND_ENV_VARS=() # Corresponding env var names
SELECTED_PROVIDER_ID="" # Will hold the chosen provider ID
SELECTED_ENV_VAR="" # Will hold the chosen env var
SELECTED_MODEL="" # Will hold the chosen model ID
SELECTED_MAX_TOKENS=8192 # Will hold the chosen max_tokens (output limit)
SELECTED_MAX_CONTEXT_TOKENS=120000 # Will hold the chosen max_context_tokens (input history budget)
SUBSCRIPTION_MODE="" # "claude_code" | "codex" | "zai_code" | ""
ENV_VARS_TO_CHECK=(ANTHROPIC_API_KEY OPENAI_API_KEY GEMINI_API_KEY GOOGLE_API_KEY GROQ_API_KEY CEREBRAS_API_KEY OPENROUTER_API_KEY MISTRAL_API_KEY TOGETHER_API_KEY DEEPSEEK_API_KEY)
# ── Credential detection (silent — just set flags) ───────────
CLAUDE_CRED_DETECTED=false
if command -v security &>/dev/null && security find-generic-password -s "Claude Code-credentials" &>/dev/null 2>&1; then
CLAUDE_CRED_DETECTED=true
elif [ -f "$HOME/.claude/.credentials.json" ]; then
CLAUDE_CRED_DETECTED=true
fi
for ev in "${ENV_VARS_TO_CHECK[@]}"; do
if [ -n "${!ev:-}" ]; then
AVAIL_PROVIDERS+=("$(get_provider_name "$ev")")
AVAIL_ENV_VARS+=("$ev")
CODEX_CRED_DETECTED=false
if command -v security &>/dev/null && security find-generic-password -s "Codex Auth" &>/dev/null 2>&1; then
CODEX_CRED_DETECTED=true
elif [ -f "$HOME/.codex/auth.json" ]; then
CODEX_CRED_DETECTED=true
fi
ZAI_CRED_DETECTED=false
if [ -n "${ZAI_API_KEY:-}" ]; then
ZAI_CRED_DETECTED=true
fi
MINIMAX_CRED_DETECTED=false
if [ -n "${MINIMAX_API_KEY:-}" ]; then
MINIMAX_CRED_DETECTED=true
fi
KIMI_CRED_DETECTED=false
if [ -f "$HOME/.kimi/config.toml" ]; then
KIMI_CRED_DETECTED=true
elif [ -n "${KIMI_API_KEY:-}" ]; then
KIMI_CRED_DETECTED=true
fi
HIVE_CRED_DETECTED=false
if [ -n "${HIVE_API_KEY:-}" ]; then
HIVE_CRED_DETECTED=true
fi
# Detect API key providers
if [ "$USE_ASSOC_ARRAYS" = true ]; then
for env_var in "${!PROVIDER_NAMES[@]}"; do
if [ -n "${!env_var}" ]; then
FOUND_PROVIDERS+=("$(get_provider_name "$env_var")")
FOUND_ENV_VARS+=("$env_var")
fi
done
else
for env_var in "${PROVIDER_ENV_VARS[@]}"; do
if [ -n "${!env_var}" ]; then
FOUND_PROVIDERS+=("$(get_provider_name "$env_var")")
FOUND_ENV_VARS+=("$env_var")
fi
done
fi
# ── Read previous worker configuration (if any) ──────────────────────
PREV_PROVIDER=""
PREV_MODEL=""
PREV_ENV_VAR=""
PREV_SUB_MODE=""
if [ -f "$HIVE_CONFIG_FILE" ]; then
eval "$(cd "$PROJECT_DIR" && uv run python - 2>/dev/null <<'PY'
import json
from pathlib import Path
cfg_path = Path.home() / ".hive" / "configuration.json"
try:
with open(cfg_path, encoding="utf-8-sig") as f:
c = json.load(f)
llm = c.get("worker_llm", {})
print(f"PREV_PROVIDER={llm.get('provider', '')}")
print(f"PREV_MODEL={llm.get('model', '')}")
print(f"PREV_ENV_VAR={llm.get('api_key_env_var', '')}")
sub = ""
if llm.get("use_claude_code_subscription"):
sub = "claude_code"
elif llm.get("use_codex_subscription"):
sub = "codex"
elif llm.get("use_kimi_code_subscription"):
sub = "kimi_code"
elif llm.get("provider", "") == "minimax" or "api.minimax.io" in llm.get("api_base", ""):
sub = "minimax_code"
elif llm.get("provider", "") == "hive" or "adenhq.com" in llm.get("api_base", ""):
sub = "hive_llm"
elif "api.z.ai" in llm.get("api_base", ""):
sub = "zai_code"
print(f"PREV_SUB_MODE={sub}")
except Exception:
pass
PY
)" || true
fi
# Compute default menu number from previous config (only if credential is still valid)
DEFAULT_CHOICE=""
if [ -n "$PREV_SUB_MODE" ] || [ -n "$PREV_PROVIDER" ]; then
PREV_CRED_VALID=false
case "$PREV_SUB_MODE" in
claude_code) [ "$CLAUDE_CRED_DETECTED" = true ] && PREV_CRED_VALID=true ;;
zai_code) [ "$ZAI_CRED_DETECTED" = true ] && PREV_CRED_VALID=true ;;
codex) [ "$CODEX_CRED_DETECTED" = true ] && PREV_CRED_VALID=true ;;
kimi_code) [ "$KIMI_CRED_DETECTED" = true ] && PREV_CRED_VALID=true ;;
hive_llm) [ "$HIVE_CRED_DETECTED" = true ] && PREV_CRED_VALID=true ;;
*)
# API key provider — check if the env var is set
if [ -n "$PREV_ENV_VAR" ] && [ -n "${!PREV_ENV_VAR}" ]; then
PREV_CRED_VALID=true
fi
;;
esac
if [ "$PREV_CRED_VALID" = true ]; then
case "$PREV_SUB_MODE" in
claude_code) DEFAULT_CHOICE=1 ;;
zai_code) DEFAULT_CHOICE=2 ;;
codex) DEFAULT_CHOICE=3 ;;
minimax_code) DEFAULT_CHOICE=4 ;;
kimi_code) DEFAULT_CHOICE=5 ;;
hive_llm) DEFAULT_CHOICE=6 ;;
esac
if [ -z "$DEFAULT_CHOICE" ]; then
case "$PREV_PROVIDER" in
anthropic) DEFAULT_CHOICE=7 ;;
openai) DEFAULT_CHOICE=8 ;;
gemini) DEFAULT_CHOICE=9 ;;
groq) DEFAULT_CHOICE=10 ;;
cerebras) DEFAULT_CHOICE=11 ;;
openrouter) DEFAULT_CHOICE=12 ;;
minimax) DEFAULT_CHOICE=4 ;;
kimi) DEFAULT_CHOICE=5 ;;
hive) DEFAULT_CHOICE=6 ;;
esac
fi
fi
fi
# ── Show unified provider selection menu ─────────────────────
echo -e "${BOLD}Select your worker LLM provider:${NC}"
echo ""
echo -e " ${CYAN}${BOLD}Subscription modes (no API key purchase needed):${NC}"
# 1) Claude Code
if [ "$CLAUDE_CRED_DETECTED" = true ]; then
echo -e " ${CYAN}1)${NC} Claude Code Subscription ${DIM}(use your Claude Max/Pro plan)${NC} ${GREEN}(credential detected)${NC}"
else
echo -e " ${CYAN}1)${NC} Claude Code Subscription ${DIM}(use your Claude Max/Pro plan)${NC}"
fi
# 2) ZAI Code
if [ "$ZAI_CRED_DETECTED" = true ]; then
echo -e " ${CYAN}2)${NC} ZAI Code Subscription ${DIM}(use your ZAI Code plan)${NC} ${GREEN}(credential detected)${NC}"
else
echo -e " ${CYAN}2)${NC} ZAI Code Subscription ${DIM}(use your ZAI Code plan)${NC}"
fi
# 3) Codex
if [ "$CODEX_CRED_DETECTED" = true ]; then
echo -e " ${CYAN}3)${NC} OpenAI Codex Subscription ${DIM}(use your Codex/ChatGPT Plus plan)${NC} ${GREEN}(credential detected)${NC}"
else
echo -e " ${CYAN}3)${NC} OpenAI Codex Subscription ${DIM}(use your Codex/ChatGPT Plus plan)${NC}"
fi
# 4) MiniMax
if [ "$MINIMAX_CRED_DETECTED" = true ]; then
echo -e " ${CYAN}4)${NC} MiniMax Coding Key ${DIM}(use your MiniMax coding key)${NC} ${GREEN}(credential detected)${NC}"
else
echo -e " ${CYAN}4)${NC} MiniMax Coding Key ${DIM}(use your MiniMax coding key)${NC}"
fi
# 5) Kimi Code
if [ "$KIMI_CRED_DETECTED" = true ]; then
echo -e " ${CYAN}5)${NC} Kimi Code Subscription ${DIM}(use your Kimi Code plan)${NC} ${GREEN}(credential detected)${NC}"
else
echo -e " ${CYAN}5)${NC} Kimi Code Subscription ${DIM}(use your Kimi Code plan)${NC}"
fi
# 6) Hive LLM
if [ "$HIVE_CRED_DETECTED" = true ]; then
echo -e " ${CYAN}6)${NC} Hive LLM ${DIM}(use your Hive API key)${NC} ${GREEN}(credential detected)${NC}"
else
echo -e " ${CYAN}6)${NC} Hive LLM ${DIM}(use your Hive API key)${NC}"
fi
echo ""
echo -e " ${CYAN}${BOLD}API key providers:${NC}"
# 7-12) API key providers — show (credential detected) if key already set
PROVIDER_MENU_ENVS=(ANTHROPIC_API_KEY OPENAI_API_KEY GEMINI_API_KEY GROQ_API_KEY CEREBRAS_API_KEY OPENROUTER_API_KEY)
PROVIDER_MENU_NAMES=("Anthropic (Claude) - Recommended" "OpenAI (GPT)" "Google Gemini - Free tier available" "Groq - Fast, free tier" "Cerebras - Fast, free tier" "OpenRouter - Bring any OpenRouter model")
for idx in "${!PROVIDER_MENU_ENVS[@]}"; do
num=$((idx + 7))
env_var="${PROVIDER_MENU_ENVS[$idx]}"
if [ -n "${!env_var}" ]; then
echo -e " ${CYAN}$num)${NC} ${PROVIDER_MENU_NAMES[$idx]} ${GREEN}(credential detected)${NC}"
else
echo -e " ${CYAN}$num)${NC} ${PROVIDER_MENU_NAMES[$idx]}"
fi
done
if [ ${#AVAIL_PROVIDERS[@]} -eq 0 ]; then
echo -e "${RED}No API keys found.${NC}"
echo -e "Run ${CYAN}./quickstart.sh${NC} first to set up your LLM provider."
exit 1
SKIP_CHOICE=$((7 + ${#PROVIDER_MENU_ENVS[@]}))
echo -e " ${CYAN}$SKIP_CHOICE)${NC} Skip for now"
echo ""
if [ -n "$DEFAULT_CHOICE" ]; then
echo -e " ${DIM}Previously configured: ${PREV_PROVIDER}/${PREV_MODEL}. Press Enter to keep.${NC}"
echo ""
fi
# Pick provider
SELECTED_PROVIDER_ID=""
SELECTED_ENV_VAR=""
SELECTED_MODEL=""
SELECTED_MAX_TOKENS=8192
SELECTED_MAX_CONTEXT_TOKENS=120000
SELECTED_API_BASE=""
while true; do
if [ -n "$DEFAULT_CHOICE" ]; then
read -r -p "Enter choice (1-$SKIP_CHOICE) [$DEFAULT_CHOICE]: " choice || true
choice="${choice:-$DEFAULT_CHOICE}"
else
read -r -p "Enter choice (1-$SKIP_CHOICE): " choice || true
fi
if [[ "$choice" =~ ^[0-9]+$ ]] && [ "$choice" -ge 1 ] && [ "$choice" -le "$SKIP_CHOICE" ]; then
break
fi
echo -e "${RED}Invalid choice. Please enter 1-$SKIP_CHOICE${NC}"
done
if [ ${#AVAIL_PROVIDERS[@]} -eq 1 ]; then
SELECTED_ENV_VAR="${AVAIL_ENV_VARS[0]}"
SELECTED_PROVIDER_ID="$(get_provider_id "$SELECTED_ENV_VAR")"
echo -e "${GREEN}${NC} Provider: $(get_provider_name "$SELECTED_ENV_VAR")"
else
echo -e "${BOLD}Select provider for worker agents:${NC}"
echo ""
local_i=1
for prov in "${AVAIL_PROVIDERS[@]}"; do
echo -e " ${CYAN}${local_i})${NC} $prov"
local_i=$((local_i + 1))
done
echo ""
while true; do
read -r -p "Enter choice (1-${#AVAIL_PROVIDERS[@]}): " choice || true
if [[ "$choice" =~ ^[0-9]+$ ]] && [ "$choice" -ge 1 ] && [ "$choice" -le "${#AVAIL_PROVIDERS[@]}" ]; then
idx=$((choice - 1))
SELECTED_ENV_VAR="${AVAIL_ENV_VARS[$idx]}"
SELECTED_PROVIDER_ID="$(get_provider_id "$SELECTED_ENV_VAR")"
case $choice in
1)
# Claude Code Subscription
if [ "$CLAUDE_CRED_DETECTED" = false ]; then
echo ""
echo -e "${GREEN}${NC} Provider: ${AVAIL_PROVIDERS[$idx]}"
echo -e "${YELLOW} ~/.claude/.credentials.json not found.${NC}"
echo -e " Run ${CYAN}claude${NC} first to authenticate with your Claude subscription,"
echo -e " then run this script again."
echo ""
exit 1
else
SUBSCRIPTION_MODE="claude_code"
SELECTED_PROVIDER_ID="anthropic"
SELECTED_MODEL="claude-opus-4-6"
SELECTED_MAX_TOKENS=32768
SELECTED_MAX_CONTEXT_TOKENS=960000 # Claude — 1M context window
echo ""
echo -e "${GREEN}${NC} Using Claude Code subscription"
fi
;;
2)
# ZAI Code Subscription
SUBSCRIPTION_MODE="zai_code"
SELECTED_PROVIDER_ID="openai"
SELECTED_ENV_VAR="ZAI_API_KEY"
SELECTED_MODEL="glm-5"
SELECTED_MAX_TOKENS=32768
SELECTED_MAX_CONTEXT_TOKENS=180000 # GLM-5 — 200k context window
PROVIDER_NAME="ZAI"
echo ""
echo -e "${GREEN}${NC} Using ZAI Code subscription"
echo -e " ${DIM}Model: glm-5 | API: api.z.ai${NC}"
;;
3)
# OpenAI Codex Subscription
if [ "$CODEX_CRED_DETECTED" = false ]; then
echo ""
echo -e "${YELLOW} Codex credentials not found. Starting OAuth login...${NC}"
echo ""
if cd "$PROJECT_DIR" && uv run python "$PROJECT_DIR/core/codex_oauth.py"; then
CODEX_CRED_DETECTED=true
else
echo ""
echo -e "${RED} OAuth login failed or was cancelled.${NC}"
echo ""
echo -e " To authenticate manually, visit:"
echo -e " ${CYAN}https://auth.openai.com/authorize?client_id=app_EMoamEEZ73f0CkXaXp7hrann&response_type=code&redirect_uri=http://localhost:1455/auth/callback&scope=openid%20profile%20email%20offline_access${NC}"
echo ""
echo -e " Or run ${CYAN}codex${NC} to authenticate, then run this script again."
echo ""
SELECTED_PROVIDER_ID=""
fi
fi
if [ "$CODEX_CRED_DETECTED" = true ]; then
SUBSCRIPTION_MODE="codex"
SELECTED_PROVIDER_ID="openai"
SELECTED_MODEL="gpt-5.3-codex"
SELECTED_MAX_TOKENS=16384
SELECTED_MAX_CONTEXT_TOKENS=120000 # GPT Codex — 128k context window
echo ""
echo -e "${GREEN}${NC} Using OpenAI Codex subscription"
fi
;;
4)
# MiniMax Coding Key
SUBSCRIPTION_MODE="minimax_code"
SELECTED_ENV_VAR="MINIMAX_API_KEY"
SELECTED_PROVIDER_ID="minimax"
SELECTED_MODEL="MiniMax-M2.5"
SELECTED_MAX_TOKENS=32768
SELECTED_MAX_CONTEXT_TOKENS=900000 # MiniMax M2.5 — 1M context window
SELECTED_API_BASE="https://api.minimax.io/v1"
PROVIDER_NAME="MiniMax"
SIGNUP_URL="https://platform.minimax.io/user-center/basic-information/interface-key"
echo ""
echo -e "${GREEN}${NC} Using MiniMax coding key"
echo -e " ${DIM}Model: MiniMax-M2.5 | API: api.minimax.io${NC}"
;;
5)
# Kimi Code Subscription
SUBSCRIPTION_MODE="kimi_code"
SELECTED_PROVIDER_ID="kimi"
SELECTED_ENV_VAR="KIMI_API_KEY"
SELECTED_MODEL="kimi-k2.5"
SELECTED_MAX_TOKENS=32768
SELECTED_MAX_CONTEXT_TOKENS=240000 # Kimi K2.5 — 256k context window
SELECTED_API_BASE="https://api.kimi.com/coding"
PROVIDER_NAME="Kimi"
SIGNUP_URL="https://www.kimi.com/code"
echo ""
echo -e "${GREEN}${NC} Using Kimi Code subscription"
echo -e " ${DIM}Model: kimi-k2.5 | API: api.kimi.com/coding${NC}"
;;
6)
# Hive LLM
SUBSCRIPTION_MODE="hive_llm"
SELECTED_PROVIDER_ID="hive"
SELECTED_ENV_VAR="HIVE_API_KEY"
SELECTED_MAX_TOKENS=32768
SELECTED_MAX_CONTEXT_TOKENS=180000
SELECTED_API_BASE="$HIVE_LLM_ENDPOINT"
PROVIDER_NAME="Hive"
SIGNUP_URL="https://discord.com/invite/hQdU7QDkgR"
echo ""
echo -e "${GREEN}${NC} Using Hive LLM"
echo ""
echo -e " Select a model:"
echo -e " ${CYAN}1)${NC} queen ${DIM}(default — Hive flagship)${NC}"
echo -e " ${CYAN}2)${NC} kimi-2.5"
echo -e " ${CYAN}3)${NC} GLM-5"
echo ""
read -r -p " Enter model choice (1-3) [1]: " hive_model_choice || true
hive_model_choice="${hive_model_choice:-1}"
case "$hive_model_choice" in
2) SELECTED_MODEL="kimi-2.5" ;;
3) SELECTED_MODEL="GLM-5" ;;
*) SELECTED_MODEL="queen" ;;
esac
echo -e " ${DIM}Model: $SELECTED_MODEL | API: ${HIVE_LLM_ENDPOINT}${NC}"
;;
7)
SELECTED_ENV_VAR="ANTHROPIC_API_KEY"
SELECTED_PROVIDER_ID="anthropic"
PROVIDER_NAME="Anthropic"
SIGNUP_URL="https://console.anthropic.com/settings/keys"
;;
8)
SELECTED_ENV_VAR="OPENAI_API_KEY"
SELECTED_PROVIDER_ID="openai"
PROVIDER_NAME="OpenAI"
SIGNUP_URL="https://platform.openai.com/api-keys"
;;
9)
SELECTED_ENV_VAR="GEMINI_API_KEY"
SELECTED_PROVIDER_ID="gemini"
PROVIDER_NAME="Google Gemini"
SIGNUP_URL="https://aistudio.google.com/apikey"
;;
10)
SELECTED_ENV_VAR="GROQ_API_KEY"
SELECTED_PROVIDER_ID="groq"
PROVIDER_NAME="Groq"
SIGNUP_URL="https://console.groq.com/keys"
;;
11)
SELECTED_ENV_VAR="CEREBRAS_API_KEY"
SELECTED_PROVIDER_ID="cerebras"
PROVIDER_NAME="Cerebras"
SIGNUP_URL="https://cloud.cerebras.ai/"
;;
12)
SELECTED_ENV_VAR="OPENROUTER_API_KEY"
SELECTED_PROVIDER_ID="openrouter"
SELECTED_API_BASE="https://openrouter.ai/api/v1"
PROVIDER_NAME="OpenRouter"
SIGNUP_URL="https://openrouter.ai/keys"
;;
"$SKIP_CHOICE")
echo ""
echo -e "${YELLOW}Skipped.${NC} Worker model not configured."
echo -e "Run this script again when ready."
echo ""
exit 0
;;
esac
# For API-key providers: prompt for key (allow replacement if already set)
if { [ -z "$SUBSCRIPTION_MODE" ] || [ "$SUBSCRIPTION_MODE" = "minimax_code" ] || [ "$SUBSCRIPTION_MODE" = "kimi_code" ] || [ "$SUBSCRIPTION_MODE" = "hive_llm" ]; } && [ -n "$SELECTED_ENV_VAR" ]; then
while true; do
CURRENT_KEY="${!SELECTED_ENV_VAR}"
if [ -n "$CURRENT_KEY" ]; then
# Key exists — offer to keep or replace
MASKED_KEY="${CURRENT_KEY:0:4}...${CURRENT_KEY: -4}"
echo ""
echo -e " ${GREEN}${NC} Current key: ${DIM}$MASKED_KEY${NC}"
read -r -p " Press Enter to keep, or paste a new key to replace: " API_KEY
else
# No key — prompt for one
echo ""
echo -e "Get your API key from: ${CYAN}$SIGNUP_URL${NC}"
echo ""
read -r -p "Paste your $PROVIDER_NAME API key (or press Enter to skip): " API_KEY
fi
if [ -n "$API_KEY" ]; then
# Remove old export line(s) for this env var from shell rc, then append new
sed -i.bak "/^export ${SELECTED_ENV_VAR}=/d" "$SHELL_RC_FILE" && rm -f "${SHELL_RC_FILE}.bak"
echo "" >> "$SHELL_RC_FILE"
echo "# Hive Agent Framework - $PROVIDER_NAME API key" >> "$SHELL_RC_FILE"
echo "export $SELECTED_ENV_VAR=\"$API_KEY\"" >> "$SHELL_RC_FILE"
export "$SELECTED_ENV_VAR=$API_KEY"
echo ""
echo -e "${GREEN}${NC} API key saved to $SHELL_RC_FILE"
# Health check the new key
echo -n " Verifying API key... "
if [ -n "${SELECTED_API_BASE:-}" ]; then
HC_RESULT=$(cd "$PROJECT_DIR" && uv run python "$PROJECT_DIR/scripts/check_llm_key.py" "$SELECTED_PROVIDER_ID" "$API_KEY" "$SELECTED_API_BASE" 2>/dev/null) || true
else
HC_RESULT=$(cd "$PROJECT_DIR" && uv run python "$PROJECT_DIR/scripts/check_llm_key.py" "$SELECTED_PROVIDER_ID" "$API_KEY" 2>/dev/null) || true
fi
HC_VALID=$(echo "$HC_RESULT" | $PYTHON_CMD -c "import json,sys; print(json.loads(sys.stdin.read()).get('valid',''))" 2>/dev/null) || true
HC_MSG=$(echo "$HC_RESULT" | $PYTHON_CMD -c "import json,sys; print(json.loads(sys.stdin.read()).get('message',''))" 2>/dev/null) || true
if [ "$HC_VALID" = "True" ]; then
echo -e "${GREEN}ok${NC}"
break
elif [ "$HC_VALID" = "False" ]; then
echo -e "${RED}failed${NC}"
echo -e " ${YELLOW}$HC_MSG${NC}"
# Undo the save so the user can retry cleanly
sed -i.bak "/^export ${SELECTED_ENV_VAR}=/d" "$SHELL_RC_FILE" && rm -f "${SHELL_RC_FILE}.bak"
# Remove the comment line we just added
sed -i.bak "/^# Hive Agent Framework - $PROVIDER_NAME API key$/d" "$SHELL_RC_FILE" && rm -f "${SHELL_RC_FILE}.bak"
unset "$SELECTED_ENV_VAR"
echo ""
read -r -p " Press Enter to try again: " _
# Loop back to key prompt
else
echo -e "${YELLOW}--${NC}"
echo -e " ${DIM}Could not verify key (network issue). The key has been saved.${NC}"
break
fi
elif [ -z "$CURRENT_KEY" ]; then
# No existing key and user skipped — abort provider
echo ""
echo -e "${YELLOW}Skipped.${NC} Add your API key to $SHELL_RC_FILE when ready."
SELECTED_ENV_VAR=""
SELECTED_PROVIDER_ID=""
break
else
# User pressed Enter with existing key — keep it, proceed normally
break
fi
echo -e "${RED}Invalid choice.${NC}"
done
fi
# OpenRouter: custom api_base
if [ "$SELECTED_PROVIDER_ID" = "openrouter" ]; then
SELECTED_API_BASE="https://openrouter.ai/api/v1"
# For ZAI subscription: prompt for API key (allow replacement if already set)
if [ "$SUBSCRIPTION_MODE" = "zai_code" ]; then
while true; do
if [ "$ZAI_CRED_DETECTED" = true ] && [ -n "$ZAI_API_KEY" ]; then
# Key exists — offer to keep or replace
MASKED_KEY="${ZAI_API_KEY:0:4}...${ZAI_API_KEY: -4}"
echo ""
echo -e " ${GREEN}${NC} Current ZAI key: ${DIM}$MASKED_KEY${NC}"
read -r -p " Press Enter to keep, or paste a new key to replace: " API_KEY
else
# No key — prompt for one
echo ""
read -r -p "Paste your ZAI API key (or press Enter to skip): " API_KEY
fi
if [ -n "$API_KEY" ]; then
sed -i.bak "/^export ZAI_API_KEY=/d" "$SHELL_RC_FILE" && rm -f "${SHELL_RC_FILE}.bak"
echo "" >> "$SHELL_RC_FILE"
echo "# Hive Agent Framework - ZAI Code subscription API key" >> "$SHELL_RC_FILE"
echo "export ZAI_API_KEY=\"$API_KEY\"" >> "$SHELL_RC_FILE"
export ZAI_API_KEY="$API_KEY"
echo ""
echo -e "${GREEN}${NC} ZAI API key saved to $SHELL_RC_FILE"
# Health check the new key
echo -n " Verifying ZAI API key... "
HC_RESULT=$(cd "$PROJECT_DIR" && uv run python "$PROJECT_DIR/scripts/check_llm_key.py" "zai" "$API_KEY" "https://api.z.ai/api/coding/paas/v4" 2>/dev/null) || true
HC_VALID=$(echo "$HC_RESULT" | $PYTHON_CMD -c "import json,sys; print(json.loads(sys.stdin.read()).get('valid',''))" 2>/dev/null) || true
HC_MSG=$(echo "$HC_RESULT" | $PYTHON_CMD -c "import json,sys; print(json.loads(sys.stdin.read()).get('message',''))" 2>/dev/null) || true
if [ "$HC_VALID" = "True" ]; then
echo -e "${GREEN}ok${NC}"
break
elif [ "$HC_VALID" = "False" ]; then
echo -e "${RED}failed${NC}"
echo -e " ${YELLOW}$HC_MSG${NC}"
# Undo the save so the user can retry cleanly
sed -i.bak "/^export ZAI_API_KEY=/d" "$SHELL_RC_FILE" && rm -f "${SHELL_RC_FILE}.bak"
sed -i.bak "/^# Hive Agent Framework - ZAI Code subscription API key$/d" "$SHELL_RC_FILE" && rm -f "${SHELL_RC_FILE}.bak"
unset ZAI_API_KEY
ZAI_CRED_DETECTED=false
echo ""
read -r -p " Press Enter to try again: " _
# Loop back to key prompt
else
echo -e "${YELLOW}--${NC}"
echo -e " ${DIM}Could not verify key (network issue). The key has been saved.${NC}"
break
fi
elif [ "$ZAI_CRED_DETECTED" = false ] || [ -z "$ZAI_API_KEY" ]; then
# No existing key and user skipped — abort provider
echo ""
echo -e "${YELLOW}Skipped.${NC} Add your ZAI API key to $SHELL_RC_FILE when ready:"
echo -e " ${CYAN}echo 'export ZAI_API_KEY=\"your-key\"' >> $SHELL_RC_FILE${NC}"
SELECTED_ENV_VAR=""
SELECTED_PROVIDER_ID=""
SUBSCRIPTION_MODE=""
break
else
# User pressed Enter with existing key — keep it, proceed normally
break
fi
done
fi
# Select model
select_model "$SELECTED_PROVIDER_ID"
# Prompt for model if not already selected (manual provider path)
if [ -n "$SELECTED_PROVIDER_ID" ] && [ -z "$SELECTED_MODEL" ]; then
prompt_model_selection "$SELECTED_PROVIDER_ID"
fi
# Option to clear worker model
echo ""
if prompt_yes_no "Save this worker model configuration?"; then
# Save worker configuration if a provider was selected
if [ -n "$SELECTED_PROVIDER_ID" ]; then
echo ""
echo -n " Saving worker model configuration... "
if save_worker_configuration "$SELECTED_PROVIDER_ID" "$SELECTED_ENV_VAR" "$SELECTED_MODEL" "$SELECTED_MAX_TOKENS" "$SELECTED_MAX_CONTEXT_TOKENS" "$SELECTED_API_BASE" > /dev/null; then
echo -e "${GREEN}done${NC}"
echo -e " ${DIM}~/.hive/configuration.json (worker_llm section)${NC}"
SAVE_OK=true
if [ "$SUBSCRIPTION_MODE" = "claude_code" ]; then
save_worker_configuration "$SELECTED_PROVIDER_ID" "" "$SELECTED_MODEL" "$SELECTED_MAX_TOKENS" "$SELECTED_MAX_CONTEXT_TOKENS" "true" "" > /dev/null || SAVE_OK=false
elif [ "$SUBSCRIPTION_MODE" = "codex" ]; then
save_worker_configuration "$SELECTED_PROVIDER_ID" "" "$SELECTED_MODEL" "$SELECTED_MAX_TOKENS" "$SELECTED_MAX_CONTEXT_TOKENS" "" "" "true" > /dev/null || SAVE_OK=false
elif [ "$SUBSCRIPTION_MODE" = "zai_code" ]; then
save_worker_configuration "$SELECTED_PROVIDER_ID" "$SELECTED_ENV_VAR" "$SELECTED_MODEL" "$SELECTED_MAX_TOKENS" "$SELECTED_MAX_CONTEXT_TOKENS" "" "https://api.z.ai/api/coding/paas/v4" > /dev/null || SAVE_OK=false
elif [ "$SUBSCRIPTION_MODE" = "minimax_code" ]; then
save_worker_configuration "$SELECTED_PROVIDER_ID" "$SELECTED_ENV_VAR" "$SELECTED_MODEL" "$SELECTED_MAX_TOKENS" "$SELECTED_MAX_CONTEXT_TOKENS" "" "$SELECTED_API_BASE" > /dev/null || SAVE_OK=false
elif [ "$SUBSCRIPTION_MODE" = "kimi_code" ]; then
save_worker_configuration "$SELECTED_PROVIDER_ID" "$SELECTED_ENV_VAR" "$SELECTED_MODEL" "$SELECTED_MAX_TOKENS" "$SELECTED_MAX_CONTEXT_TOKENS" "" "$SELECTED_API_BASE" > /dev/null || SAVE_OK=false
elif [ "$SUBSCRIPTION_MODE" = "hive_llm" ]; then
save_worker_configuration "$SELECTED_PROVIDER_ID" "$SELECTED_ENV_VAR" "$SELECTED_MODEL" "$SELECTED_MAX_TOKENS" "$SELECTED_MAX_CONTEXT_TOKENS" "" "$SELECTED_API_BASE" > /dev/null || SAVE_OK=false
elif [ "$SELECTED_PROVIDER_ID" = "openrouter" ]; then
save_worker_configuration "$SELECTED_PROVIDER_ID" "$SELECTED_ENV_VAR" "$SELECTED_MODEL" "$SELECTED_MAX_TOKENS" "$SELECTED_MAX_CONTEXT_TOKENS" "" "$SELECTED_API_BASE" > /dev/null || SAVE_OK=false
else
save_worker_configuration "$SELECTED_PROVIDER_ID" "$SELECTED_ENV_VAR" "$SELECTED_MODEL" "$SELECTED_MAX_TOKENS" "$SELECTED_MAX_CONTEXT_TOKENS" > /dev/null || SAVE_OK=false
fi
if [ "$SAVE_OK" = false ]; then
echo -e "${RED}failed${NC}"
echo -e "${YELLOW} Could not write ~/.hive/configuration.json. Please rerun this script.${NC}"
exit 1
fi
else
echo -e "${GREEN}done${NC}"
echo -e " ${DIM}~/.hive/configuration.json (worker_llm section)${NC}"
echo ""
echo -e "${GREEN}${NC} Worker model configured successfully."
echo -e " ${DIM}Worker agents will now use: ${SELECTED_PROVIDER_ID}/${SELECTED_MODEL}${NC}"
echo -e " ${DIM}Run this script again to change, or remove the worker_llm section${NC}"
echo -e " ${DIM}from ~/.hive/configuration.json to revert to the default.${NC}"
echo ""
echo "Cancelled. Worker agents will continue using the default model."
exit 0
fi
echo ""
echo -e "${GREEN}${NC} Worker model configured successfully."
echo -e " ${DIM}Worker agents will now use: ${SELECTED_PROVIDER_ID}/${SELECTED_MODEL}${NC}"
echo -e " ${DIM}Run this script again to change, or remove the worker_llm section${NC}"
echo -e " ${DIM}from ~/.hive/configuration.json to revert to the default.${NC}"
echo ""