import OpenAI from "openai";
// 1. Initialize with LLM Router credentials
const client = new OpenAI({
baseURL: "https://api.llmrouter.app/v1",
apiKey: process.env.LLM_ROUTER_API_KEY,
});
async function main() {
const response = await client.chat.completions.create({
// 2. Use ANY supported model (prefixed with provider slug)
model: "anthropic/claude-3-5-sonnet",
messages: [{ role: "user", content: "Explain quantum physics." }],
// 3. (Optional) Pass LLM Router features natively at the root
// @ts-expect-error - Custom LLM Router extension
gateway: {
chatHistoryCompression: { enabled: true, score: 0.6 },
redact: { token: true },
},
});
console.log(response.choices[0].message.content);
}
main();