Fast LLM inference, OpenAI-compatible. Simple to integrate, easy to scale. Start building in minutes.
import OpenAI from "openai";
const client = new OpenAI({
apiKey: process.env.GROQ_API_KEY,
baseURL: "https://api.groq.com/openai/v1",
});
const response = await client.responses.create({
model: "openai/gpt-oss-20b",
input: "Explain the importance of fast language models",
});
console.log(response.output_text);