Text Generation Quickstart
This quickstart walks you through making your first text generation request with AI Gateway.
Create a new directory and initialize a Node.js project:
Terminalmkdir ai-text-demo cd ai-text-demo pnpm initInstall the AI SDK and development dependencies:
Terminalnpm install ai dotenv @types/node tsx typescriptTerminalyarn add ai dotenv @types/node tsx typescriptTerminalpnpm add ai dotenv @types/node tsx typescriptTerminalbun add ai dotenv @types/node tsx typescriptGo to the AI Gateway API Keys page in your Vercel dashboard and click Create key to generate a new API key.
Create a
.env.localfile and save your API key:.env.localAI_GATEWAY_API_KEY=your_ai_gateway_api_keyInstead of using an API key, you can use OIDC tokens to authenticate your requests.
Create an
index.tsfile:index.tsimport { streamText } from 'ai'; import 'dotenv/config'; async function main() { const result = streamText({ model: 'openai/gpt-5.2', prompt: 'Invent a new holiday and describe its traditions.', }); for await (const textPart of result.textStream) { process.stdout.write(textPart); } console.log(); console.log('Token usage:', await result.usage); console.log('Finish reason:', await result.finishReason); } main().catch(console.error);Run your script:
Terminalpnpm tsx index.tsYou should see the AI model's response stream to your terminal.
- Learn about provider and model routing with fallbacks
- Explore the AI SDK documentation for more configuration options
- Try other APIs: OpenAI-compatible, Anthropic-compatible, or OpenResponses
Use any OpenAI-compatible SDK or HTTP client with AI Gateway:
import OpenAI from 'openai';
import 'dotenv/config';
const client = new OpenAI({
apiKey: process.env.AI_GATEWAY_API_KEY,
baseURL: 'https://ai-gateway.vercel.sh/v1',
});
async function main() {
const response = await client.chat.completions.create({
model: 'anthropic/claude-sonnet-4.6',
messages: [
{
role: 'user',
content: 'Invent a new holiday and describe its traditions.',
},
],
});
console.log(response.choices[0].message.content);
}
main().catch(console.error);import os
from openai import OpenAI
from dotenv import load_dotenv
load_dotenv()
client = OpenAI(
api_key=os.getenv('AI_GATEWAY_API_KEY'),
base_url='https://ai-gateway.vercel.sh/v1',
)
response = client.chat.completions.create(
model='anthropic/claude-sonnet-4.6',
messages=[
{
'role': 'user',
'content': 'Invent a new holiday and describe its traditions.',
},
],
)
print(response.choices[0].message.content)Learn more in the OpenAI-Compatible API docs.
Use any Anthropic-compatible SDK or HTTP client with AI Gateway:
import Anthropic from '@anthropic-ai/sdk';
import 'dotenv/config';
const client = new Anthropic({
apiKey: process.env.AI_GATEWAY_API_KEY,
baseURL: 'https://ai-gateway.vercel.sh',
});
async function main() {
const message = await client.messages.create({
model: 'anthropic/claude-sonnet-4.6',
max_tokens: 1024,
messages: [
{
role: 'user',
content: 'Invent a new holiday and describe its traditions.',
},
],
});
console.log(message.content[0].text);
}
main().catch(console.error);import os
import anthropic
from dotenv import load_dotenv
load_dotenv()
client = anthropic.Anthropic(
api_key=os.getenv('AI_GATEWAY_API_KEY'),
base_url='https://ai-gateway.vercel.sh',
)
message = client.messages.create(
model='anthropic/claude-sonnet-4.6',
max_tokens=1024,
messages=[
{
'role': 'user',
'content': 'Invent a new holiday and describe its traditions.',
},
],
)
print(message.content[0].text)Learn more in the Anthropic-Compatible API docs.
Use the OpenResponses API, an open standard for AI model interactions:
import 'dotenv/config';
async function main() {
const response = await fetch('https://ai-gateway.vercel.sh/v1/responses', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${process.env.AI_GATEWAY_API_KEY}`,
},
body: JSON.stringify({
model: 'anthropic/claude-sonnet-4.6',
input: [
{
type: 'message',
role: 'user',
content: 'Invent a new holiday and describe its traditions.',
},
],
}),
});
const result = await response.json();
console.log(result.output[0].content[0].text);
}
main().catch(console.error);import os
import requests
from dotenv import load_dotenv
load_dotenv()
response = requests.post(
'https://ai-gateway.vercel.sh/v1/responses',
headers={
'Content-Type': 'application/json',
'Authorization': f'Bearer {os.getenv("AI_GATEWAY_API_KEY")}',
},
json={
'model': 'anthropic/claude-sonnet-4.6',
'input': [
{
'type': 'message',
'role': 'user',
'content': 'Invent a new holiday and describe its traditions.',
},
],
},
)
result = response.json()
print(result['output'][0]['content'][0]['text'])curl -X POST "https://ai-gateway.vercel.sh/v1/responses" \
-H "Authorization: Bearer $AI_GATEWAY_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"model": "anthropic/claude-sonnet-4.6",
"input": [
{
"type": "message",
"role": "user",
"content": "Invent a new holiday and describe its traditions."
}
]
}'Learn more in the OpenResponses API docs.
Was this helpful?