VercelVercel
Menu

Text Generation Quickstart

Last updated February 19, 2026

This quickstart walks you through making your first text generation request with AI Gateway.

  1. Create a new directory and initialize a Node.js project:

    Terminal
    mkdir ai-text-demo
    cd ai-text-demo
    pnpm init
  2. Install the AI SDK and development dependencies:

    Terminal
    npm install ai dotenv @types/node tsx typescript
    Terminal
    yarn add ai dotenv @types/node tsx typescript
    Terminal
    pnpm add ai dotenv @types/node tsx typescript
    Terminal
    bun add ai dotenv @types/node tsx typescript
  3. Go to the AI Gateway API Keys page in your Vercel dashboard and click Create key to generate a new API key.

    Create a .env.local file and save your API key:

    .env.local
    AI_GATEWAY_API_KEY=your_ai_gateway_api_key

    Instead of using an API key, you can use OIDC tokens to authenticate your requests.

  4. Create an index.ts file:

    index.ts
    import { streamText } from 'ai';
    import 'dotenv/config';
     
    async function main() {
      const result = streamText({
        model: 'openai/gpt-5.2',
        prompt: 'Invent a new holiday and describe its traditions.',
      });
     
      for await (const textPart of result.textStream) {
        process.stdout.write(textPart);
      }
     
      console.log();
      console.log('Token usage:', await result.usage);
      console.log('Finish reason:', await result.finishReason);
    }
     
    main().catch(console.error);

    Run your script:

    Terminal
    pnpm tsx index.ts

    You should see the AI model's response stream to your terminal.

Use any OpenAI-compatible SDK or HTTP client with AI Gateway:

index.ts
import OpenAI from 'openai';
import 'dotenv/config';
 
const client = new OpenAI({
  apiKey: process.env.AI_GATEWAY_API_KEY,
  baseURL: 'https://ai-gateway.vercel.sh/v1',
});
 
async function main() {
  const response = await client.chat.completions.create({
    model: 'anthropic/claude-sonnet-4.6',
    messages: [
      {
        role: 'user',
        content: 'Invent a new holiday and describe its traditions.',
      },
    ],
  });
 
  console.log(response.choices[0].message.content);
}
 
main().catch(console.error);
main.py
import os
from openai import OpenAI
from dotenv import load_dotenv
 
load_dotenv()
 
client = OpenAI(
    api_key=os.getenv('AI_GATEWAY_API_KEY'),
    base_url='https://ai-gateway.vercel.sh/v1',
)
 
response = client.chat.completions.create(
    model='anthropic/claude-sonnet-4.6',
    messages=[
        {
            'role': 'user',
            'content': 'Invent a new holiday and describe its traditions.',
        },
    ],
)
 
print(response.choices[0].message.content)

Learn more in the OpenAI-Compatible API docs.

Use any Anthropic-compatible SDK or HTTP client with AI Gateway:

index.ts
import Anthropic from '@anthropic-ai/sdk';
import 'dotenv/config';
 
const client = new Anthropic({
  apiKey: process.env.AI_GATEWAY_API_KEY,
  baseURL: 'https://ai-gateway.vercel.sh',
});
 
async function main() {
  const message = await client.messages.create({
    model: 'anthropic/claude-sonnet-4.6',
    max_tokens: 1024,
    messages: [
      {
        role: 'user',
        content: 'Invent a new holiday and describe its traditions.',
      },
    ],
  });
 
  console.log(message.content[0].text);
}
 
main().catch(console.error);
main.py
import os
import anthropic
from dotenv import load_dotenv
 
load_dotenv()
 
client = anthropic.Anthropic(
    api_key=os.getenv('AI_GATEWAY_API_KEY'),
    base_url='https://ai-gateway.vercel.sh',
)
 
message = client.messages.create(
    model='anthropic/claude-sonnet-4.6',
    max_tokens=1024,
    messages=[
        {
            'role': 'user',
            'content': 'Invent a new holiday and describe its traditions.',
        },
    ],
)
 
print(message.content[0].text)

Learn more in the Anthropic-Compatible API docs.

Use the OpenResponses API, an open standard for AI model interactions:

index.ts
import 'dotenv/config';
 
async function main() {
  const response = await fetch('https://ai-gateway.vercel.sh/v1/responses', {
    method: 'POST',
    headers: {
      'Content-Type': 'application/json',
      Authorization: `Bearer ${process.env.AI_GATEWAY_API_KEY}`,
    },
    body: JSON.stringify({
      model: 'anthropic/claude-sonnet-4.6',
      input: [
        {
          type: 'message',
          role: 'user',
          content: 'Invent a new holiday and describe its traditions.',
        },
      ],
    }),
  });
 
  const result = await response.json();
  console.log(result.output[0].content[0].text);
}
 
main().catch(console.error);
main.py
import os
import requests
from dotenv import load_dotenv
 
load_dotenv()
 
response = requests.post(
    'https://ai-gateway.vercel.sh/v1/responses',
    headers={
        'Content-Type': 'application/json',
        'Authorization': f'Bearer {os.getenv("AI_GATEWAY_API_KEY")}',
    },
    json={
        'model': 'anthropic/claude-sonnet-4.6',
        'input': [
            {
                'type': 'message',
                'role': 'user',
                'content': 'Invent a new holiday and describe its traditions.',
            },
        ],
    },
)
 
result = response.json()
print(result['output'][0]['content'][0]['text'])
Terminal
curl -X POST "https://ai-gateway.vercel.sh/v1/responses" \
  -H "Authorization: Bearer $AI_GATEWAY_API_KEY" \
  -H "Content-Type: application/json" \
  -d '{
    "model": "anthropic/claude-sonnet-4.6",
    "input": [
      {
        "type": "message",
        "role": "user",
        "content": "Invent a new holiday and describe its traditions."
      }
    ]
  }'

Learn more in the OpenResponses API docs.


Was this helpful?

supported.