Fix default LLM model to claude-3-haiku (compatible with API key tier)

This commit is contained in:
2026-04-13 15:49:22 +00:00
parent a1454a0d05
commit 4ee0dc4c11
4 changed files with 84 additions and 2 deletions
+2 -2
View File
@@ -13,9 +13,9 @@ from shared import DASHBOARD_API, api_request
# Default models per provider # Default models per provider
DEFAULT_MODELS = { DEFAULT_MODELS = {
"anthropic": "claude-sonnet-4-5-20250514", "anthropic": "claude-3-haiku-20240307",
"openai": "gpt-4o-mini", "openai": "gpt-4o-mini",
"litellm": "anthropic/claude-sonnet-4-5-20250514", "litellm": "anthropic/claude-3-haiku-20240307",
"ollama": "llama3", "ollama": "llama3",
} }
+18
View File
@@ -0,0 +1,18 @@
#!/usr/bin/env python3
import sys
sys.path.insert(0, "/app/agents")
from shared import api_request, DASHBOARD_API
from llm_client import get_llm_config, DEFAULT_MODELS
# Check what config the user has
config = api_request(f"{DASHBOARD_API}/api/users/2/llm", retries=1)
print(f"LLM config: {config}")
print(f"Default model for anthropic: {DEFAULT_MODELS.get('anthropic')}")
# Try a simple completion
from llm_client import complete
try:
result = complete(2, "Say hello in one sentence.", max_tokens=50)
print(f"Success: {result}")
except Exception as e:
print(f"Error: {e}")
+38
View File
@@ -0,0 +1,38 @@
#!/usr/bin/env python3
import json
from urllib import request
API_KEY = "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"
# Try different model names
models = [
"claude-sonnet-4-5-20250514",
"claude-3-5-sonnet-20241022",
"claude-3-haiku-20240307",
"claude-sonnet-4-5-20250514",
]
for model in models:
try:
body = json.dumps({
"model": model,
"max_tokens": 20,
"messages": [{"role": "user", "content": "Say hi"}],
}).encode()
req = request.Request(
"https://api.anthropic.com/v1/messages",
data=body,
headers={
"x-api-key": API_KEY,
"anthropic-version": "2023-06-01",
"content-type": "application/json",
},
method="POST",
)
with request.urlopen(req, timeout=30) as resp:
result = json.loads(resp.read().decode())
text = result.get("content", [{}])[0].get("text", "")
print(f" {model}: OK - {text}")
break
except Exception as e:
print(f" {model}: {e}")
+26
View File
@@ -0,0 +1,26 @@
#!/usr/bin/env python3
"""Quick test for the project monitor agent."""
import sys, json
sys.path.insert(0, "/app/agents")
config = {
"project_name": "AI Agents",
"wiki_collection_id": "9d9e471c-84cd-4ba7-bae5-c70f61805228",
"wiki_doc_ids": "",
"gitea_repo": "eric/ai-agents",
"custom_urls": "",
"custom_notes": "",
"report_collection_id": "",
"include_in_briefing": "true",
}
print("Starting project monitor test...")
try:
from project_monitor import run
section, summary = run(config, user_id=2, instance_id=5)
print("SUCCESS")
print(f"Summary: {summary[:200]}")
except Exception as e:
print(f"FAILED: {type(e).__name__}: {e}")
import traceback
traceback.print_exc()