OpenAI SDK
The OpenAI Python SDK works out of the box with ARouter.
Installation
Basic Usage
from openai import OpenAI
client = OpenAI(
base_url="https://api.arouter.com/v1",
api_key="lr_live_xxxx",
)
response = client.chat.completions.create(
model="gpt-4o",
messages=[{"role": "user", "content": "Explain quantum computing in one sentence."}],
)
print(response.choices[0].message.content)
Multi-Provider Routing
Switch providers by changing the model string:
# Anthropic via OpenAI SDK
response = client.chat.completions.create(
model="anthropic/claude-sonnet-4-20250514",
messages=[{"role": "user", "content": "Hello!"}],
)
# DeepSeek via OpenAI SDK
response = client.chat.completions.create(
model="deepseek/deepseek-chat",
messages=[{"role": "user", "content": "Hello!"}],
)
# Gemini via OpenAI SDK
response = client.chat.completions.create(
model="google/gemini-2.0-flash",
messages=[{"role": "user", "content": "Hello!"}],
)
Streaming
stream = client.chat.completions.create(
model="gpt-4o",
messages=[{"role": "user", "content": "Write a haiku about code."}],
stream=True,
)
for chunk in stream:
if chunk.choices[0].delta.content:
print(chunk.choices[0].delta.content, end="")
Async
import asyncio
from openai import AsyncOpenAI
client = AsyncOpenAI(
base_url="https://api.arouter.com/v1",
api_key="lr_live_xxxx",
)
async def main():
response = await client.chat.completions.create(
model="gpt-4o",
messages=[{"role": "user", "content": "Hello!"}],
)
print(response.choices[0].message.content)
asyncio.run(main())
Embeddings
response = client.embeddings.create(
model="openai/text-embedding-3-small",
input="The quick brown fox jumps over the lazy dog",
)
print(response.data[0].embedding[:5])
Anthropic SDK
The Anthropic Python SDK works natively.
Installation
Basic Usage
import anthropic
client = anthropic.Anthropic(
base_url="https://api.arouter.com",
api_key="lr_live_xxxx",
)
message = client.messages.create(
model="claude-sonnet-4-20250514",
max_tokens=1024,
messages=[{"role": "user", "content": "Hello!"}],
)
print(message.content[0].text)
For the Anthropic SDK, set base_url to https://api.arouter.com (without /v1).
The SDK adds /v1/messages automatically.
Streaming
with client.messages.stream(
model="claude-sonnet-4-20250514",
max_tokens=1024,
messages=[{"role": "user", "content": "Write a story."}],
) as stream:
for text in stream.text_stream:
print(text, end="")
Gemini SDK
The Google Generative AI Python SDK works by
configuring the API endpoint.
Installation
pip install google-generativeai
Basic Usage
import google.generativeai as genai
genai.configure(
api_key="lr_live_xxxx",
transport="rest",
client_options={"api_endpoint": "https://api.arouter.com"},
)
model = genai.GenerativeModel("gemini-2.0-flash")
response = model.generate_content("Hello!")
print(response.text)
The transport="rest" parameter is required. The Gemini SDK defaults to gRPC,
which is not supported by ARouter.