TypeScript SDK

The official OpenAI Node.js SDK works seamlessly with ModelBridge. Change the base URL, keep everything else the same.

Installation

Install

npm install openai

Configuration

Set up your environment variables -- if you're signed in, select your API key to auto-fill:

export OPENAI_BASE_URL="https://api.modelbridge.dev/v1"
export OPENAI_API_KEY="mb_live_your_key_here"

Then create a client that reads from environment variables:

import OpenAI from "openai";

const client = new OpenAI(); // reads OPENAI_BASE_URL and OPENAI_API_KEY from env

Or configure explicitly. Select your model and API key:

import OpenAI from "openai";

const client = new OpenAI({
  baseURL: "https://api.modelbridge.dev/v1",
  apiKey: "mb_live_your_key_here",
});

const response = await client.chat.completions.create({
  model: "",
  messages: [
    { role: "system", content: "You are a helpful assistant." },
    { role: "user", content: "Hello! What can you help me with?" },
  ],
});

console.log(response.choices[0].message.content);

Streaming

import OpenAI from "openai";

const client = new OpenAI({
  baseURL: "https://api.modelbridge.dev/v1",
  apiKey: "mb_live_your_key_here",
});

const stream = await client.chat.completions.create({
  model: "",
  messages: [{ role: "user", content: "Write a short poem about code." }],
  stream: true,
});

for await (const chunk of stream) {
  const content = chunk.choices[0]?.delta?.content;
  if (content) {
    process.stdout.write(content);
  }
}
console.log();

Tool calling

import OpenAI from "openai";

const client = new OpenAI({
  baseURL: "https://api.modelbridge.dev/v1",
  apiKey: "mb_live_your_key_here",
});

const response = await client.chat.completions.create({
  model: "",
  messages: [{ role: "user", content: "What's the weather in Tokyo?" }],
  tools: [
    {
      type: "function",
      function: {
        name: "get_weather",
        description: "Get the current weather for a location",
        parameters: {
          type: "object",
          properties: {
            location: { type: "string", description: "City name" },
          },
          required: ["location"],
        },
      },
    },
  ],
  tool_choice: "auto",
});

const message = response.choices[0].message;
if (message.tool_calls) {
  const toolCall = message.tool_calls[0];
  console.log(`Function: ${toolCall.function.name}`);
  console.log(`Arguments: ${JSON.parse(toolCall.function.arguments)}`);
}

Error handling

import OpenAI from "openai";

const client = new OpenAI({ maxRetries: 3, timeout: 120000 });

try {
  const response = await client.chat.completions.create({
    model: "claude-sonnet-4-6",
    messages: [{ role: "user", content: "Hello" }],
  });
} catch (error) {
  if (error instanceof OpenAI.AuthenticationError) {
    console.error("Invalid API key");
  } else if (error instanceof OpenAI.RateLimitError) {
    console.error("Spending limit reached");
  } else if (error instanceof OpenAI.APIError) {
    console.error("API error:", error.message);
  }
}

Usage with frameworks

Next.js API route

import OpenAI from "openai";
import { NextResponse } from "next/server";

const client = new OpenAI({
  baseURL: "https://api.modelbridge.dev/v1",
  apiKey: process.env.MODELBRIDGE_API_KEY,
});

export async function POST(request: Request) {
  const { message } = await request.json();

  const response = await client.chat.completions.create({
    model: "claude-sonnet-4-6",
    messages: [{ role: "user", content: message }],
  });

  return NextResponse.json({
    reply: response.choices[0].message.content,
  });
}

Streaming with Next.js

import OpenAI from "openai";

const client = new OpenAI({
  baseURL: "https://api.modelbridge.dev/v1",
  apiKey: process.env.MODELBRIDGE_API_KEY,
});

export async function POST(request: Request) {
  const { message } = await request.json();

  const stream = await client.chat.completions.create({
    model: "claude-sonnet-4-6",
    messages: [{ role: "user", content: message }],
    stream: true,
  });

  const encoder = new TextEncoder();
  const readable = new ReadableStream({
    async start(controller) {
      for await (const chunk of stream) {
        const content = chunk.choices[0]?.delta?.content;
        if (content) {
          controller.enqueue(encoder.encode(content));
        }
      }
      controller.close();
    },
  });

  return new Response(readable);
}

Was this page helpful?