OpenAI
// OPENAI_API_KEY=sk-...
import { Agent, openai } from "@radaros/core";
const agent = new Agent({
name: "openai-agent",
model: openai("gpt-4o"),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("What is quantum computing?");
console.log(result.output);
Anthropic
// ANTHROPIC_API_KEY=sk-ant-...
import { Agent, anthropic } from "@radaros/core";
const agent = new Agent({
name: "anthropic-agent",
model: anthropic("claude-sonnet-4-20250514"),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("Explain recursion simply.");
console.log(result.output);
Google Gemini
// GOOGLE_API_KEY=AIza...
import { Agent, google } from "@radaros/core";
const agent = new Agent({
name: "gemini-agent",
model: google("gemini-2.5-flash"),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("Summarize the theory of relativity.");
console.log(result.output);
Vertex AI
// GOOGLE_CLOUD_PROJECT=my-gcp-project
import { Agent, vertex } from "@radaros/core";
const agent = new Agent({
name: "vertex-agent",
model: vertex("gemini-2.5-flash", {
project: process.env.GOOGLE_CLOUD_PROJECT!,
location: "us-central1",
}),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("What are the benefits of cloud computing?");
console.log(result.output);
Ollama (Local)
// No API key needed — just a running Ollama instance
import { Agent, ollama } from "@radaros/core";
const agent = new Agent({
name: "ollama-agent",
model: ollama("llama3.2", { host: "http://localhost:11434" }),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("Write a haiku about coding.");
console.log(result.output);
DeepSeek
// DEEPSEEK_API_KEY=sk-...
import { Agent, deepseek } from "@radaros/core";
const agent = new Agent({
name: "deepseek-agent",
model: deepseek("deepseek-chat"),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("Explain the transformer architecture.");
console.log(result.output);
Mistral
// MISTRAL_API_KEY=...
import { Agent, mistral } from "@radaros/core";
const agent = new Agent({
name: "mistral-agent",
model: mistral("mistral-large-latest"),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("What is the capital of France?");
console.log(result.output);
xAI (Grok)
// XAI_API_KEY=xai-...
import { Agent, xai } from "@radaros/core";
const agent = new Agent({
name: "grok-agent",
model: xai("grok-3"),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("Tell me something surprising about space.");
console.log(result.output);
Perplexity
// PERPLEXITY_API_KEY=pplx-...
import { Agent, perplexity } from "@radaros/core";
const agent = new Agent({
name: "perplexity-agent",
model: perplexity("sonar-pro"),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("What happened in tech news today?");
console.log(result.output);
Cohere
// COHERE_API_KEY=...
import { Agent, cohere } from "@radaros/core";
const agent = new Agent({
name: "cohere-agent",
model: cohere("command-r-plus"),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("Compare SQL and NoSQL databases.");
console.log(result.output);
Meta (Llama)
// META_API_KEY=...
import { Agent, meta } from "@radaros/core";
const agent = new Agent({
name: "meta-agent",
model: meta("Llama-4-Maverick-17B-128E-Instruct-FP8"),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("Explain gradient descent.");
console.log(result.output);
AWS Bedrock
// AWS_REGION=us-east-1
// AWS_ACCESS_KEY_ID=AKIA...
// AWS_SECRET_ACCESS_KEY=...
import { Agent, awsBedrock } from "@radaros/core";
const agent = new Agent({
name: "bedrock-agent",
model: awsBedrock("amazon.nova-pro-v1:0", {
region: process.env.AWS_REGION!,
}),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("What is serverless computing?");
console.log(result.output);
AWS Claude
// AWS_REGION=us-east-1
// AWS_ACCESS_KEY_ID=AKIA...
// AWS_SECRET_ACCESS_KEY=...
import { Agent, awsClaude } from "@radaros/core";
const agent = new Agent({
name: "aws-claude-agent",
model: awsClaude("claude-sonnet-4-20250514", {
region: process.env.AWS_REGION!,
}),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("Explain microservices vs monoliths.");
console.log(result.output);
Azure OpenAI
// AZURE_OPENAI_API_KEY=...
import { Agent, azureOpenai } from "@radaros/core";
const agent = new Agent({
name: "azure-openai-agent",
model: azureOpenai("gpt-4o", {
resourceName: "my-resource",
deploymentName: "my-gpt4o-deployment",
apiKey: process.env.AZURE_OPENAI_API_KEY!,
}),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("What is zero-trust security?");
console.log(result.output);
Azure AI Foundry
// AZURE_FOUNDRY_API_KEY=...
import { Agent, azureFoundry } from "@radaros/core";
const agent = new Agent({
name: "azure-foundry-agent",
model: azureFoundry("Phi-4", {
endpoint: "https://my-foundry.azure.com",
apiKey: process.env.AZURE_FOUNDRY_API_KEY!,
}),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("What is edge computing?");
console.log(result.output);
Vercel v0
// VERCEL_API_KEY=...
import { Agent, vercel } from "@radaros/core";
const agent = new Agent({
name: "vercel-agent",
model: vercel("v0-1.0-md"),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("Generate a React button component.");
console.log(result.output);
OpenAI-Compatible Providers
Any provider that exposes an OpenAI-compatible API works out of the box with theopenai factory by passing a custom baseURL.
Together AI
// TOGETHER_API_KEY=...
import { Agent, openai } from "@radaros/core";
const agent = new Agent({
name: "together-agent",
model: openai("meta-llama/Llama-3.1-70B", {
apiKey: process.env.TOGETHER_API_KEY!,
baseURL: "https://api.together.xyz/v1",
}),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("Explain MapReduce.");
console.log(result.output);
Groq
// GROQ_API_KEY=gsk_...
import { Agent, openai } from "@radaros/core";
const agent = new Agent({
name: "groq-agent",
model: openai("llama-3.3-70b-versatile", {
apiKey: process.env.GROQ_API_KEY!,
baseURL: "https://api.groq.com/openai/v1",
}),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("What is WebAssembly?");
console.log(result.output);
Fireworks
// FIREWORKS_API_KEY=...
import { Agent, openai } from "@radaros/core";
const agent = new Agent({
name: "fireworks-agent",
model: openai("accounts/fireworks/models/llama-v3p1-70b-instruct", {
apiKey: process.env.FIREWORKS_API_KEY!,
baseURL: "https://api.fireworks.ai/inference/v1",
}),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("Describe the actor model in concurrency.");
console.log(result.output);
OpenRouter
// OPENROUTER_API_KEY=sk-or-...
import { Agent, openai } from "@radaros/core";
const agent = new Agent({
name: "openrouter-agent",
model: openai("anthropic/claude-sonnet-4-20250514", {
apiKey: process.env.OPENROUTER_API_KEY!,
baseURL: "https://openrouter.ai/api/v1",
}),
instructions: "You are a helpful assistant.",
});
const result = await agent.run("What is functional programming?");
console.log(result.output);
Model Switching
Swap models at runtime without changing any other agent configuration.import { Agent, openai, anthropic, google } from "@radaros/core";
const agent = new Agent({
name: "switchable-agent",
model: openai("gpt-4o"),
instructions: "You are a helpful assistant.",
});
// Run with the default model
const r1 = await agent.run("Hello from OpenAI!");
console.log(r1.output);
// Switch to Anthropic for the next run
agent.model = anthropic("claude-sonnet-4-20250514");
const r2 = await agent.run("Hello from Anthropic!");
console.log(r2.output);
// Switch to Google Gemini
agent.model = google("gemini-2.5-flash");
const r3 = await agent.run("Hello from Gemini!");
console.log(r3.output);
Streaming
Useagent.stream() to process output as it arrives — works with every provider.
import { Agent, openai } from "@radaros/core";
const agent = new Agent({
name: "streaming-agent",
model: openai("gpt-4o"),
instructions: "You are a helpful assistant.",
});
const stream = await agent.stream("Write a short poem about TypeScript.");
for await (const chunk of stream) {
process.stdout.write(chunk.content);
}