The open-source router
for your LLM stack
Bring your own keys. Smart routing based on task complexity, automated fallbacks, and cost optimization. Drop the LLMora SDK into your app and make your AI resilient in minutes.
from llmora import LLMora
import os
# Initialize with your API keys
client = LLMora(
providers={
"openai": os.getenv("OPENAI_API_KEY"),
"anthropic": os.getenv("ANTHROPIC_API_KEY")
},
)
# Routes to the right model based on task complexity
response = client.chat.completions.create(
model="router:auto",
messages=[{"role": "user", "content": "Optimize this query."}],
fallback=["gpt-5.4", "claude-4.6-opus"]
)