Gradio with OpenAI API (via OpenRouter)

Open In Colab Download .ipynb

Install required packages

!uv pip install gradio==5.49.1 openai
Using Python 3.13.1 environment at: /Users/simon/Dev/CS-394/.venv

Audited 2 packages in 14ms

Set the OpenRouter API Key from Colab Secrets

from google.colab import userdata
OPENROUTER_API_KEY = userdata.get('OPENROUTER_API_KEY')

(Or grab the OpenRouter API key if running locally)

import os
from dotenv import load_dotenv
load_dotenv()

OPENROUTER_API_KEY = os.environ.get("OPENROUTER_API_KEY")

Initialize the OpenAI client

import openai

# Initialize OpenAI client
client = openai.OpenAI(
    base_url='https://openrouter.ai/api/v1',
    api_key=OPENROUTER_API_KEY,
)

Example 1: Basic Gradio interface

import gradio as gr

def image_classifier(inp):
    return {'cat': 0.3, 'dog': 0.7}

demo = gr.Interface(fn=image_classifier, inputs="image", outputs="label")
demo.launch()
* Running on local URL:  http://127.0.0.1:7862
* To create a public link, set `share=True` in `launch()`.

Example 2: Basic chat interface with conversation history

import gradio as gr

def chat_with_history(message, history):
    # Add current message
    messages = history + [{"role": "user", "content": message}]
    
    # Get response from API
    response = client.chat.completions.create(
        model='openai/gpt-5.2-chat',
        messages=messages,
    )
    
    return response.choices[0].message.content

# Create a chat interface
demo = gr.ChatInterface(
    fn=chat_with_history,
    title="Basic Chat with Conversation History",
    type="messages"
)

demo.launch()
* Running on local URL:  http://127.0.0.1:7863
* To create a public link, set `share=True` in `launch()`.

Example 3: Streaming chat interface

import gradio as gr

def chat_with_streaming(message, history):
    messages = history + [{"role": "user", "content": message}]
    
    # Stream the response
    stream = client.chat.completions.create(
        model='openai/gpt-5.2-chat',
        messages=messages,
        stream=True,
    )
    
    response_text = ""
    for chunk in stream:
        if chunk.choices[0].delta.content is not None:
            token = chunk.choices[0].delta.content
            response_text += token
            yield response_text

# Create streaming chat interface
demo = gr.ChatInterface(
    fn=chat_with_streaming,
    title="AI Chat with Streaming",
    type="messages"
)

demo.launch()
* Running on local URL:  http://127.0.0.1:7864
* To create a public link, set `share=True` in `launch()`.