import requests
from typing import Optional
class ResilientRouter:
"""Smart routing with automatic fallback to a fixed model."""
def __init__(
self,
api_key: str,
fallback_model: str = "openai/gpt-4o"
):
self.api_key = api_key
self.fallback_model = fallback_model
self.url = "https://api.edenai.run/v3/llm/chat/completions"
def chat(
self,
message: str,
candidates: list = None,
timeout: int = 30
) -> dict:
"""Chat with smart routing, falling back to a fixed model on error."""
headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json"
}
# Step 1: Try smart routing
try:
payload = {
"model": "@edenai",
"messages": [{"role": "user", "content": message}]
}
if candidates:
payload["router_candidates"] = candidates
response = requests.post(
self.url, headers=headers, json=payload, timeout=timeout
)
response.raise_for_status()
data = response.json()
return {
"content": data["choices"][0]["message"]["content"],
"model": data["model"],
"method": "smart_routing"
}
except Exception as e:
print(f"Smart routing failed: {e}")
# Step 2: Fall back to fixed model
try:
payload = {
"model": self.fallback_model,
"messages": [{"role": "user", "content": message}]
}
response = requests.post(
self.url, headers=headers, json=payload, timeout=timeout
)
response.raise_for_status()
data = response.json()
return {
"content": data["choices"][0]["message"]["content"],
"model": self.fallback_model,
"method": "fallback"
}
except Exception as fallback_error:
raise RuntimeError(f"All attempts failed. Last error: {fallback_error}")
# Usage
router = ResilientRouter("YOUR_API_KEY", fallback_model="openai/gpt-4o")
result = router.chat(
"Explain neural networks",
candidates=["anthropic/claude-sonnet-4-5", "google/gemini-2.5-pro"]
)
print(f"Method: {result['method']}")
print(f"Model: {result['model']}")
print(result["content"])