Usage

A router is a collection of LLMs that you can route queries to. You can create a router in the Neutrino dashboard or use the default router, which includes all supported models.

You can treat a router as a LLM.

from openai import OpenAI

client = OpenAI(
    base_url="https://router.neutrinoapp.com/api/llm-router",
    api_key="<Neutrino-API-key>"
)

response = client.chat.completions.create(
    # Instead of a specific model, set this to the ID of your Neutrino Router
    model="your-neutrino-router-id",  # (or 'default')
    messages = [
        {"role": "system", "content": "You are a helpful AI assistant. Your job is to be helpful and respond to user requests."},
        {"role": "user", "content": "Explain what artificial intelligence is"},
    ],
)

print(f"Optimal model: {response.model}")
print(response.choices[0].message.content)

Streaming Responses

from openai import OpenAI

client = OpenAI(
    base_url="https://router.neutrinoapp.com/api/llm-router",
    api_key="<Neutrino-API-key>"
)

response = client.chat.completions.create(
    # Instead of a specific model, set this to the ID of your Neutrino Router
    model="your-neutrino-router-id",  # (or 'default')
    messages = [
        {"role": "system", "content": "You are a helpful AI assistant. Your job is to be helpful and respond to user requests."},
        {"role": "user", "content": "Explain what artificial intelligence is"},
    ],
    stream=True
)

for i, chunk in enumerate(response):
    if i == 0:
        print(f"Optimal model: {chunk.model}")
    print(chunk.choices[0].delta.content, end="")