WebLLM Examples
Overview
Section titled “Overview”This page provides complete, working examples using WebLLM for browser-native AI execution. All examples run entirely in the browser, with no server costs or API key management needed.
Choose your preferred integration approach:
Prerequisites
Section titled “Prerequisites”Before running these examples, ensure you have:
- Installed the WebLLM extension in your browser
- Configured at least one AI provider (OpenAI, Anthropic, or local)
- Granted necessary permissions when prompted
Basic Chat Application
Section titled “Basic Chat Application”A simple client-side chat application with conversation history. WebLLM intelligently routes to the user’s best available provider.
'use client';
import { useState } from 'react';import { generateText } from 'webllm';
interface Message { role: 'user' | 'assistant'; content: string;}
export default function ChatPage() { const [messages, setMessages] = useState<Message[]>([]); const [input, setInput] = useState(''); const [isLoading, setIsLoading] = useState(false);
const handleSend = async (e: React.FormEvent) => { e.preventDefault(); if (!input.trim() || isLoading) return;
const userMessage: Message = { role: 'user', content: input }; setMessages(prev => [...prev, userMessage]); setInput(''); setIsLoading(true);
try { // WebLLM selects best provider based on task and hints const result = await generateText({ task: 'general', hints: { speed: 'balanced', quality: 'standard' }, messages: [...messages, userMessage] });
setMessages(prev => [...prev, { role: 'assistant', content: result.text }]); } catch (error) { console.error('Chat failed:', error); alert('Failed to get response. Please try again.'); } finally { setIsLoading(false); } };
return ( <div className="max-w-2xl mx-auto p-4"> <h1 className="text-2xl font-bold mb-4">AI Chat</h1>
<div className="border rounded-lg p-4 mb-4 h-96 overflow-y-auto"> {messages.length === 0 ? ( <p className="text-gray-500">Start a conversation...</p> ) : ( messages.map((msg, i) => ( <div key={i} className={`mb-4 ${msg.role === 'user' ? 'text-right' : 'text-left'}`} > <div className={`inline-block p-3 rounded-lg ${ msg.role === 'user' ? 'bg-blue-500 text-white' : 'bg-gray-200 text-gray-900' }`} > {msg.content} </div> </div> )) )} </div>
<form onSubmit={handleSend} className="flex gap-2"> <input type="text" value={input} onChange={(e) => setInput(e.target.value)} disabled={isLoading} placeholder="Type your message..." className="flex-1 border rounded-lg px-4 py-2" /> <button type="submit" disabled={isLoading || !input.trim()} className="bg-blue-500 text-white px-6 py-2 rounded-lg disabled:opacity-50" > {isLoading ? 'Sending...' : 'Send'} </button> </form> </div> );}'use client';
import { useState } from 'react';import { generateText } from 'ai';import { webllm } from 'webllm-ai-provider';
interface Message { role: 'user' | 'assistant'; content: string;}
export default function ChatPage() { const [messages, setMessages] = useState<Message[]>([]); const [input, setInput] = useState(''); const [isLoading, setIsLoading] = useState(false);
const handleSend = async (e: React.FormEvent) => { e.preventDefault(); if (!input.trim() || isLoading) return;
const userMessage: Message = { role: 'user', content: input }; setMessages(prev => [...prev, userMessage]); setInput(''); setIsLoading(true);
try { // WebLLM selects best provider based on task and hints const result = await generateText({ model: webllm({ task: 'general', hints: { speed: 'balanced', quality: 'standard' } }), messages: [...messages, userMessage] });
setMessages(prev => [...prev, { role: 'assistant', content: result.text }]); } catch (error) { console.error('Chat failed:', error); alert('Failed to get response. Please try again.'); } finally { setIsLoading(false); } };
return ( <div className="max-w-2xl mx-auto p-4"> <h1 className="text-2xl font-bold mb-4">AI Chat</h1>
<div className="border rounded-lg p-4 mb-4 h-96 overflow-y-auto"> {messages.length === 0 ? ( <p className="text-gray-500">Start a conversation...</p> ) : ( messages.map((msg, i) => ( <div key={i} className={`mb-4 ${msg.role === 'user' ? 'text-right' : 'text-left'}`} > <div className={`inline-block p-3 rounded-lg ${ msg.role === 'user' ? 'bg-blue-500 text-white' : 'bg-gray-200 text-gray-900' }`} > {msg.content} </div> </div> )) )} </div>
<form onSubmit={handleSend} className="flex gap-2"> <input type="text" value={input} onChange={(e) => setInput(e.target.value)} disabled={isLoading} placeholder="Type your message..." className="flex-1 border rounded-lg px-4 py-2" /> <button type="submit" disabled={isLoading || !input.trim()} className="bg-blue-500 text-white px-6 py-2 rounded-lg disabled:opacity-50" > {isLoading ? 'Sending...' : 'Send'} </button> </form> </div> );}Document Summarizer
Section titled “Document Summarizer”Intelligently summarize documents using reasoning-optimized models.
'use client';
import { useState } from 'react';import { generateText } from 'ai';import { webllm } from 'webllm-ai-provider';
export default function SummarizePage() { const [document, setDocument] = useState(''); const [summary, setSummary] = useState(''); const [isLoading, setIsLoading] = useState(false);
const handleSummarize = async () => { if (!document.trim()) return;
setIsLoading(true); setSummary('');
try { const result = await generateText({ model: webllm({ task: 'summarization', hints: { quality: 'best', capabilities: { reasoning: true } } }), messages: [ { role: 'system', content: 'You are an expert at summarizing documents. Provide concise, accurate summaries that capture key points.' }, { role: 'user', content: `Summarize this document in 3-5 bullet points:\n\n${document}` } ] });
setSummary(result.text); } catch (error) { console.error('Summarization failed:', error); alert('Failed to summarize. Please try again.'); } finally { setIsLoading(false); } };
return ( <div className="max-w-4xl mx-auto p-4"> <h1 className="text-2xl font-bold mb-4">Document Summarizer</h1>
<div className="grid grid-cols-2 gap-4"> <div> <h2 className="font-semibold mb-2">Original Document</h2> <textarea value={document} onChange={(e) => setDocument(e.target.value)} placeholder="Paste your document here..." className="w-full h-96 border rounded-lg p-4 resize-none" /> <button onClick={handleSummarize} disabled={isLoading || !document.trim()} className="mt-2 bg-blue-500 text-white px-6 py-2 rounded-lg disabled:opacity-50" > {isLoading ? 'Summarizing...' : 'Summarize'} </button> </div>
<div> <h2 className="font-semibold mb-2">Summary</h2> <div className="w-full h-96 border rounded-lg p-4 bg-gray-50 overflow-y-auto"> {summary ? ( <div className="prose prose-sm">{summary}</div> ) : ( <p className="text-gray-500">Summary will appear here...</p> )} </div> </div> </div> </div> );}Code Review Assistant
Section titled “Code Review Assistant”Analyze code with strong reasoning capabilities.
'use client';
import { useState } from 'react';import { generateText } from 'ai';import { webllm } from 'webllm-ai-provider';
export default function CodeReviewPage() { const [code, setCode] = useState(''); const [review, setReview] = useState(''); const [isLoading, setIsLoading] = useState(false);
const handleReview = async () => { if (!code.trim()) return;
setIsLoading(true); setReview('');
try { const result = await generateText({ model: webllm({ task: 'coding', hints: { quality: 'best', capabilities: { reasoning: true, codeGeneration: true } } }), messages: [ { role: 'system', content: `You are an expert code reviewer. Analyze code for:- Bugs and potential issues- Security vulnerabilities- Performance optimizations- Code quality and best practices- Readability improvements
Provide specific, actionable feedback.` }, { role: 'user', content: `Review this code:\n\n\`\`\`\n${code}\n\`\`\`` } ], temperature: 0.3 // Lower temperature for more precise analysis });
setReview(result.text); } catch (error) { console.error('Code review failed:', error); alert('Failed to review code. Please try again.'); } finally { setIsLoading(false); } };
return ( <div className="max-w-6xl mx-auto p-4"> <h1 className="text-2xl font-bold mb-4">Code Review Assistant</h1>
<div className="grid grid-cols-2 gap-4"> <div> <h2 className="font-semibold mb-2">Your Code</h2> <textarea value={code} onChange={(e) => setCode(e.target.value)} placeholder="Paste your code here..." className="w-full h-96 border rounded-lg p-4 font-mono text-sm resize-none" /> <button onClick={handleReview} disabled={isLoading || !code.trim()} className="mt-2 bg-blue-500 text-white px-6 py-2 rounded-lg disabled:opacity-50" > {isLoading ? 'Reviewing...' : 'Review Code'} </button> </div>
<div> <h2 className="font-semibold mb-2">Review</h2> <div className="w-full h-96 border rounded-lg p-4 bg-gray-50 overflow-y-auto"> {review ? ( <div className="prose prose-sm whitespace-pre-wrap">{review}</div> ) : ( <p className="text-gray-500">Review will appear here...</p> )} </div> </div> </div> </div> );}Quick FAQ Bot
Section titled “Quick FAQ Bot”Fast responses for simple questions using speed-optimized routing.
'use client';
import { useState } from 'react';import { generateText } from 'ai';import { webllm } from 'webllm-ai-provider';
const FAQ_TOPICS = [ 'Product Features', 'Pricing', 'Technical Support', 'Account Management', 'Billing'];
export default function FAQPage() { const [question, setQuestion] = useState(''); const [answer, setAnswer] = useState(''); const [isLoading, setIsLoading] = useState(false);
const handleAsk = async (e: React.FormEvent) => { e.preventDefault(); if (!question.trim()) return;
setIsLoading(true); setAnswer('');
try { const result = await generateText({ model: webllm({ task: 'qa', hints: { speed: 'fastest', // Prioritize fast responses quality: 'draft' } }), temperature: 0.5, // Balanced creativity messages: [ { role: 'system', content: `You are a helpful customer support assistant. Provide brief, accurate answers to frequently asked questions. If you don't know the answer, say so.` }, { role: 'user', content: question } ] });
setAnswer(result.text); } catch (error) { console.error('FAQ failed:', error); alert('Failed to get answer. Please try again.'); } finally { setIsLoading(false); } };
const handleQuickQuestion = (topic: string) => { setQuestion(`What should I know about ${topic}?`); };
return ( <div className="max-w-2xl mx-auto p-4"> <h1 className="text-2xl font-bold mb-4">Quick FAQ</h1>
<div className="mb-4"> <p className="text-sm text-gray-600 mb-2">Quick topics:</p> <div className="flex flex-wrap gap-2"> {FAQ_TOPICS.map(topic => ( <button key={topic} onClick={() => handleQuickQuestion(topic)} className="px-3 py-1 bg-gray-200 hover:bg-gray-300 rounded-full text-sm" > {topic} </button> ))} </div> </div>
<form onSubmit={handleAsk} className="mb-4"> <textarea value={question} onChange={(e) => setQuestion(e.target.value)} placeholder="Ask a question..." className="w-full border rounded-lg p-4 mb-2 resize-none" rows={3} /> <button type="submit" disabled={isLoading || !question.trim()} className="bg-blue-500 text-white px-6 py-2 rounded-lg disabled:opacity-50" > {isLoading ? 'Getting answer...' : 'Ask'} </button> </form>
{answer && ( <div className="border rounded-lg p-4 bg-blue-50"> <h3 className="font-semibold mb-2">Answer:</h3> <p>{answer}</p> </div> )} </div> );}Creative Writing Assistant
Section titled “Creative Writing Assistant”Generate creative content with depth-optimized models.
'use client';
import { useState } from 'react';import { generateText } from 'ai';import { webllm } from 'webllm-ai-provider';
const WRITING_STYLES = ['Story', 'Poem', 'Blog Post', 'Email', 'Social Media'];
export default function WritingPage() { const [prompt, setPrompt] = useState(''); const [style, setStyle] = useState('Story'); const [content, setContent] = useState(''); const [isLoading, setIsLoading] = useState(false);
const handleGenerate = async () => { if (!prompt.trim()) return;
setIsLoading(true); setContent('');
try { const result = await generateText({ model: webllm({ task: 'creative', hints: { quality: 'high' } }), temperature: 0.9, // Higher creativity messages: [ { role: 'system', content: `You are a creative writing assistant. Generate engaging, well-written ${style.toLowerCase()} content.` }, { role: 'user', content: prompt } ], maxTokens: 2048 });
setContent(result.text); } catch (error) { console.error('Generation failed:', error); alert('Failed to generate content. Please try again.'); } finally { setIsLoading(false); } };
return ( <div className="max-w-4xl mx-auto p-4"> <h1 className="text-2xl font-bold mb-4">Creative Writing Assistant</h1>
<div className="mb-4"> <label className="block font-semibold mb-2">Writing Style</label> <div className="flex gap-2"> {WRITING_STYLES.map(s => ( <button key={s} onClick={() => setStyle(s)} className={`px-4 py-2 rounded-lg ${ style === s ? 'bg-blue-500 text-white' : 'bg-gray-200 hover:bg-gray-300' }`} > {s} </button> ))} </div> </div>
<div className="mb-4"> <label className="block font-semibold mb-2">Your Prompt</label> <textarea value={prompt} onChange={(e) => setPrompt(e.target.value)} placeholder="What would you like to write about?" className="w-full border rounded-lg p-4 resize-none" rows={4} /> <button onClick={handleGenerate} disabled={isLoading || !prompt.trim()} className="mt-2 bg-blue-500 text-white px-6 py-2 rounded-lg disabled:opacity-50" > {isLoading ? 'Generating...' : 'Generate'} </button> </div>
{content && ( <div className="border rounded-lg p-6 bg-gray-50"> <h3 className="font-semibold mb-4">Generated {style}:</h3> <div className="prose prose-lg whitespace-pre-wrap">{content}</div> </div> )} </div> );}Extension Availability Check
Section titled “Extension Availability Check”Always check if WebLLM is available before using it.
'use client';
import { useEffect, useState } from 'react';
function isWebLLMAvailable() { return typeof navigator !== 'undefined' && 'llm' in navigator && !!navigator.llm;}
export function WebLLMCheck({ children }: { children: React.ReactNode }) { const [isReady, setIsReady] = useState(false); const [isChecking, setIsChecking] = useState(true);
useEffect(() => { const checkAvailability = () => { const available = isWebLLMAvailable(); setIsReady(available); setIsChecking(false); };
// Check immediately checkAvailability();
// Listen for extension loading const handler = () => { checkAvailability(); };
window.addEventListener('webllm:ready', handler); return () => window.removeEventListener('webllm:ready', handler); }, []);
if (isChecking) { return ( <div className="flex items-center justify-center min-h-screen"> <div className="text-center"> <div className="animate-spin rounded-full h-12 w-12 border-b-2 border-blue-500 mx-auto mb-4"></div> <p>Checking WebLLM availability...</p> </div> </div> ); }
if (!isReady) { return ( <div className="flex items-center justify-center min-h-screen"> <div className="max-w-md text-center p-8 border rounded-lg"> <h2 className="text-2xl font-bold mb-4">WebLLM Extension Required</h2> <p className="mb-4"> This application requires the WebLLM extension to function. </p> <ol className="text-left mb-6 space-y-2"> <li>1. Install the WebLLM extension</li> <li>2. Configure your preferred AI provider</li> <li>3. Reload this page</li> </ol> <a href="https://chrome.google.com/webstore" target="_blank" rel="noopener noreferrer" className="inline-block bg-blue-500 text-white px-6 py-2 rounded-lg hover:bg-blue-600" > Install Extension </a> </div> </div> ); }
return <>{children}</>;}Usage in Layout
Section titled “Usage in Layout”Wrap your entire app with the availability check:
import { WebLLMCheck } from '@/components/WebLLMCheck';
export default function RootLayout({ children,}: { children: React.ReactNode;}) { return ( <html lang="en"> <body> <WebLLMCheck> {children} </WebLLMCheck> </body> </html> );}Next Steps
Section titled “Next Steps”- Explore Model Preferences for advanced routing
- Learn about Provider Management
- Check the full Vercel AI Provider Guide