diff --git a/fern/examples/docs-agent.mdx b/fern/assistants/examples/docs-agent.mdx
similarity index 99%
rename from fern/examples/docs-agent.mdx
rename to fern/assistants/examples/docs-agent.mdx
index 1b21f4ae..6326858c 100644
--- a/fern/examples/docs-agent.mdx
+++ b/fern/assistants/examples/docs-agent.mdx
@@ -1,7 +1,7 @@
---
title: Documentation agent
subtitle: Build a voice assistant that answers questions about your docs
-slug: examples/docs-agent
+slug: assistants/examples/docs-agent
---
Try our live implementation using the voice widget in the bottom-right corner of this page.
diff --git a/fern/examples/inbound-support.mdx b/fern/assistants/examples/inbound-support.mdx
similarity index 97%
rename from fern/examples/inbound-support.mdx
rename to fern/assistants/examples/inbound-support.mdx
index d8628ade..ab8fbac3 100644
--- a/fern/examples/inbound-support.mdx
+++ b/fern/assistants/examples/inbound-support.mdx
@@ -1,7 +1,7 @@
---
title: Inbound customer support
subtitle: Build a banking customer support agent that can process inbound phone calls and assist with common banking issues.
-slug: examples/inbound-support
+slug: assistants/examples/inbound-support
description: Build a voice AI banking support agent with tools for account lookup, balance and transaction retrieval.
---
@@ -33,10 +33,10 @@ We will be creating a customer support agent for VapiBank, a bank that wants to
-
+
-
+
@@ -48,7 +48,7 @@ We will be creating a customer support agent for VapiBank, a bank that wants to
2. Click **Choose file** and upload both `accounts.csv` and `transactions.csv`
3. Note the file IDs for use in creating tools
-
+
```typescript
@@ -132,7 +132,7 @@ We will be creating a customer support agent for VapiBank, a bank that wants to
- Select `Blank Template` as your starting point.
- Change assistant name to `Tom`.
-
+
@@ -527,7 +527,7 @@ You have access to CSV files with account and transaction data:
Click `Talk to Assistant` to test it out.
-
+
```typescript
@@ -662,7 +662,7 @@ You have access to CSV files with account and transaction data:
- In the expanded accordion, add `get_balance` and `get_recent_transactions` tools.
- Click `Publish` to save your changes.
-
+
@@ -915,7 +915,7 @@ You have access to CSV files with account and transaction data:
- Under `Inbound Settings` find `Assistant` dropdown and select `Tom` from the list.
- Changes are saved automatically.
-
+
@@ -998,7 +998,7 @@ You have access to CSV files with account and transaction data:
- Accept the generated test case.
- Click `Run Test Suite` to execute the tests.
-
+
Click `Run Tests` to execute the tests.
diff --git a/fern/examples/voice-widget.mdx b/fern/assistants/examples/voice-widget.mdx
similarity index 99%
rename from fern/examples/voice-widget.mdx
rename to fern/assistants/examples/voice-widget.mdx
index 1623631a..1f2fb4d1 100644
--- a/fern/examples/voice-widget.mdx
+++ b/fern/assistants/examples/voice-widget.mdx
@@ -3,7 +3,7 @@ title: Web Snippet
subtitle: >-
Easily integrate the Vapi Voice Widget into your website for enhanced user
interaction.
-slug: examples/voice-widget
+slug: assistants/examples/voice-widget
---
Improve your website's user interaction with the Vapi Voice Widget. This robust tool enables your visitors to engage with a voice assistant for support and interaction, offering a smooth and contemporary way to connect with your services.
diff --git a/fern/docs.yml b/fern/docs.yml
index 5b3214ce..b900b683 100644
--- a/fern/docs.yml
+++ b/fern/docs.yml
@@ -104,27 +104,9 @@ navigation:
- page: Web calls
icon: fa-light fa-browser
path: quickstart/web.mdx
- - section: How Vapi works
- icon: fa-light fa-diagram-project
- contents:
- - page: Core models
- path: quickstart.mdx
- icon: fa-light fa-microchip-ai
- - page: Orchestration models
- icon: fa-light fa-network-wired
- path: how-vapi-works.mdx
- - section: Examples
- icon: fa-light fa-code
- contents:
- - page: Inbound support
- path: examples/inbound-support.mdx
- icon: fa-light fa-phone-volume
- - page: Voice widget
- path: examples/voice-widget.mdx
- icon: fa-light fa-window-maximize
- - page: Documentation agent
- path: examples/docs-agent.mdx
- icon: fa-light fa-microphone
+ - page: Example Library
+ icon: fa-light fa-book-open
+ path: examples.mdx
- section: Assistants
contents:
@@ -214,6 +196,18 @@ navigation:
path: customization/custom-llm/using-your-server.mdx
- page: Tool calling integration
path: customization/custom-llm/tool-calling-integration.mdx
+ - section: Examples
+ icon: fa-light fa-code
+ contents:
+ - page: Inbound support
+ path: assistants/examples/inbound-support.mdx
+ icon: fa-light fa-phone-volume
+ - page: Voice widget
+ path: assistants/examples/voice-widget.mdx
+ icon: fa-light fa-window-maximize
+ - page: Documentation agent
+ path: assistants/examples/docs-agent.mdx
+ icon: fa-light fa-microphone
- section: Workflows
contents:
@@ -382,6 +376,15 @@ navigation:
- page: FAQ
path: faq.mdx
icon: fa-light fa-question
+ - section: How Vapi works
+ icon: fa-light fa-diagram-project
+ contents:
+ - page: Core models
+ path: quickstart.mdx
+ icon: fa-light fa-microchip-ai
+ - page: Orchestration models
+ icon: fa-light fa-network-wired
+ path: how-vapi-works.mdx
- section: Integrations
collapsed: true
icon: fa-light fa-link
@@ -575,29 +578,29 @@ navigation:
layout:
- section: Client SDKs
contents:
- - page: Web
- path: sdk/web.mdx
- icon: fa-light fa-globe
+ - link: Web SDK
+ href: https://github.com/VapiAI/web
+ icon: fa-brands fa-js
- link: iOS
href: https://github.com/VapiAI/ios
- icon: fa-brands fa-app-store-ios
+ icon: fa-brands fa-apple
- link: Flutter
href: https://github.com/VapiAI/flutter
- icon: fa-brands fa-google
+ icon: fa-light fa-mobile
- link: React Native
- href: https://github.com/VapiAI/react-native-sdk
+ href: https://github.com/VapiAI/react-native
icon: fa-brands fa-react
- link: Python
href: https://github.com/VapiAI/python
icon: fa-brands fa-python
- section: Server SDKs
contents:
- - link: Python
- href: https://github.com/VapiAI/server-sdk-python
- icon: fa-brands fa-python
- link: TypeScript
href: https://github.com/VapiAI/server-sdk-typescript
icon: fa-brands fa-js
+ - link: Python
+ href: https://github.com/VapiAI/server-sdk-python
+ icon: fa-brands fa-python
- link: Java
href: https://github.com/VapiAI/server-sdk-java
icon: fa-brands fa-java
@@ -606,7 +609,7 @@ navigation:
icon: fa-light fa-gem
- link: C#
href: https://github.com/VapiAI/server-sdk-csharp
- icon: fa-light fa-hashtag
+ icon: fa-light fa-brackets-curly
- link: Go
href: https://github.com/VapiAI/server-sdk-go
icon: fa-brands fa-golang
@@ -811,3 +814,9 @@ redirects:
destination: /workflows/quickstart
- source: /web-integration
destination: /web
+ - source: /examples/inbound-support
+ destination: /assistants/examples/inbound-support
+ - source: /examples/voice-widget
+ destination: /assistants/examples/voice-widget
+ - source: /examples/docs-agent
+ destination: /assistants/examples/docs-agent
diff --git a/fern/examples.mdx b/fern/examples.mdx
new file mode 100644
index 00000000..62a83184
--- /dev/null
+++ b/fern/examples.mdx
@@ -0,0 +1,64 @@
+---
+title: Examples Library
+subtitle: Explore complete examples with step-by-step instructions to build with Vapi
+slug: examples
+---
+
+
+
+
+
+
+
Built with Assistants
+
+ Build a docs agent that can answer questions about your documentation
+
+
+
+
+
+
Built with Assistants
+
+ Build a technical support assistant that remembers where you left off between calls
+
+
+
+
+
+
Built with Assistants
+
+ Easily integrate the Vapi Voice Widget into your website for enhanced user interaction
+
+
+
+
+
+
Built with Workflows
+
+ Build an appointment scheduling assistant that can schedule appointments for a barbershop
+
+
+
+
+
+
Built with Workflows
+
+ Build a medical triage and scheduling assistant that can triage patients and schedule appointments for a clinic
+
+
+
+
+
+
Built with Workflows
+
+ Build an ecommerce order management assistant that can track orders and process returns
+
+
+
+
+
+
Built with Workflows
+
+ Create an outbound sales agent that can schedule appointments automatically
+
+
diff --git a/fern/overview.mdx b/fern/overview.mdx
index 82a5ea1e..1ee54c95 100644
--- a/fern/overview.mdx
+++ b/fern/overview.mdx
@@ -42,9 +42,9 @@ Each layer is highly customizable and we support dozens of models across STT, LL
The easiest way to start with Vapi. Build a voice agent in 5 minutes.
- Integrate voice calls into your web application.
+ title="Web Calls"
+ href="/quickstart/web">
+ Quickly get started making web calls with our SDKs.
@@ -58,7 +58,7 @@ Explore end-to-end examples for some common voice workflows:
icon="phone"
iconType="solid"
color="#f5ac4c"
- href="/examples/inbound-support"
+ href="/assistants/examples/inbound-support"
>
We'll build a customer support agent that can process inbound phone calls and assist with common banking issues.
diff --git a/fern/quickstart/introduction.mdx b/fern/quickstart/introduction.mdx
index 543763c4..2b9574af 100644
--- a/fern/quickstart/introduction.mdx
+++ b/fern/quickstart/introduction.mdx
@@ -121,7 +121,7 @@ Vapi offers two main primitives for building voice agents, each designed for dif
title="Customer Support"
icon="headset"
iconType="solid"
- href="/examples/inbound-support"
+ href="/assistants/examples/inbound-support"
>
@@ -156,9 +156,6 @@ Vapi offers two main primitives for building voice agents, each designed for dif
Handle booking requests, check availability, and confirm appointments with conditional routing.
-
-
-
+
+
+
+
+ See our collection of examples covering a wide range of use cases.
+
diff --git a/fern/quickstart/web.mdx b/fern/quickstart/web.mdx
index 3c1f29b7..b83834ef 100644
--- a/fern/quickstart/web.mdx
+++ b/fern/quickstart/web.mdx
@@ -1,200 +1,1270 @@
---
-title: Web integration
-subtitle: Integrate voice calls into your web application
+title: Web calls
+subtitle: Build voice interfaces and backend integrations using Vapi's Web and Server SDKs
slug: quickstart/web
---
## Overview
-This guide shows you how to integrate live, two-way voice conversations into any web application. Use Vapi with plain JavaScript, React, Next.js, or any other web framework to add voice capabilities directly to your app.
+Build powerful voice applications that work across web browsers, mobile apps, and backend systems. This guide covers both client-side voice interfaces and server-side call management using Vapi's comprehensive SDK ecosystem.
-**In this guide, you'll learn to:**
-- Install and configure the Vapi Web SDK
-- Connect to existing assistants from your dashboard
-- Handle call lifecycle events in your application
+**In this quickstart, you'll learn to:**
+- Create real-time voice interfaces for web and mobile
+- Build automated outbound and inbound call systems
+- Handle events and webhooks for call management
+- Implement voice widgets and backend integrations
-See the full Next.js [demo here on v0](https://v0.dev/chat/vapi-quickstart-nextjs-z3lv02T7Dd5). To try it live and make edits, follow these steps:
+## Choose your integration approach
-1. Fork the app in v0
-2. Go to settings → environment variables
-3. Create a new environment variable called `NEXT_PUBLIC_VAPI_API_KEY`
-4. Add your [public API key from the dashboard](https://dashboard.vapi.ai/org/api-keys)
+
+
+ **Best for:** User-facing applications, voice widgets, mobile apps
+ - Browser-based voice assistants and widgets
+ - Real-time voice conversations
+ - Mobile voice applications (iOS, Android, React Native, Flutter)
+ - Direct user interaction with assistants
+
+
+ **Best for:** Backend automation, bulk operations, system integrations
+ - Automated outbound call campaigns
+ - Inbound call routing and management
+ - CRM integrations and bulk operations
+ - Webhook processing and real-time events
+
+
-## Installation
+## Web voice interfaces
-
- ### Install the SDK
-
+Build browser-based voice assistants and widgets for real-time user interaction.
- ### Import the SDK
-
-
-
-## Integration approaches
+### Installation and setup
-
-
-
- First, create and configure an assistant in the [Vapi dashboard](https://dashboard.vapi.ai). Follow the [Phone calls quickstart](/quickstart/phone) to set up your assistant.
-
-
- Copy your assistant's ID from the dashboard:
-
-
-
-
-
-
- ```javascript
- // Start a call with your pre-configured assistant
- vapi.start("YOUR_ASSISTANT_ID_FROM_THE_DASHBOARD");
- ```
-
-
- Customize settings or pass template variables at runtime:
-
- ```javascript
- const assistantOverrides = {
- transcriber: {
- provider: "deepgram",
- model: "nova-2",
- language: "en-US",
- },
- recordingEnabled: false,
- variableValues: {
- customerName: "John",
- accountType: "premium"
+
+ Build browser-based voice interfaces:
+
+
+ ```bash title="npm"
+ npm install @vapi-ai/web
+ ```
+
+ ```bash title="yarn"
+ yarn add @vapi-ai/web
+ ```
+
+ ```bash title="pnpm"
+ pnpm add @vapi-ai/web
+ ```
+
+ ```bash title="bun"
+ bun add @vapi-ai/web
+ ```
+
+
+ ```typescript
+ import Vapi from '@vapi-ai/web';
+
+ const vapi = new Vapi('YOUR_PUBLIC_API_KEY');
+
+ // Start voice conversation
+ vapi.start('YOUR_ASSISTANT_ID');
+
+ // Listen for events
+ vapi.on('call-start', () => console.log('Call started'));
+ vapi.on('call-end', () => console.log('Call ended'));
+ vapi.on('message', (message) => {
+ if (message.type === 'transcript') {
+ console.log(`${message.role}: ${message.transcript}`);
+ }
+ });
+ ```
+
+
+
+ Build voice-enabled mobile apps:
+
+ ```bash
+ npm install @vapi-ai/react-native
+ ```
+
+ ```jsx
+ import { VapiProvider, useVapi } from '@vapi-ai/react-native';
+
+ const VoiceApp = () => {
+ const { start, stop, isConnected } = useVapi();
+
+ return (
+
+
+ );
+ };
+
+ export default () => (
+
+
+
+ );
+ ```
+
+
+
+ Create voice apps with Flutter:
+
+ ```yaml
+ dependencies:
+ vapi_flutter: ^1.0.0
+ ```
+
+ ```dart
+ import 'package:vapi_flutter/vapi_flutter.dart';
+
+ class VoiceWidget extends StatefulWidget {
+ @override
+ _VoiceWidgetState createState() => _VoiceWidgetState();
+ }
+
+ class _VoiceWidgetState extends State {
+ final VapiClient _vapi = VapiClient('YOUR_PUBLIC_API_KEY');
+ bool _isConnected = false;
+
+ @override
+ Widget build(BuildContext context) {
+ return ElevatedButton(
+ onPressed: () {
+ if (_isConnected) {
+ _vapi.stop();
+ } else {
+ _vapi.start('YOUR_ASSISTANT_ID');
+ }
},
+ child: Text(_isConnected ? 'End Call' : 'Start Call'),
+ );
+ }
+ }
+ ```
+
+
+
+ Build native iOS voice apps:
+
+ ```swift
+ import VapiSDK
+
+ class VoiceViewController: UIViewController {
+ private let vapi = VapiClient(apiKey: "YOUR_PUBLIC_API_KEY")
+
+ @IBAction func startCallTapped(_ sender: UIButton) {
+ vapi.start(assistantId: "YOUR_ASSISTANT_ID")
+ }
+
+ override func viewDidLoad() {
+ super.viewDidLoad()
+ vapi.delegate = self
+ }
+ }
+
+ extension VoiceViewController: VapiClientDelegate {
+ func vapiCallDidStart() {
+ print("Call started")
+ }
+
+ func vapiCallDidEnd() {
+ print("Call ended")
+ }
+ }
+ ```
+
+
+
+### Voice widget implementation
+
+Create a voice widget for your website:
+
+
+
+ The fastest way to get started. Copy this snippet into your website:
+
+ ```html
+
+ ```
+
- vapi.start("YOUR_ASSISTANT_ID", assistantOverrides);
- ```
-
-
-
-
-
-
- Create an assistant configuration directly in your code:
-
- ```javascript
- const assistantOptions = {
- name: "Customer Support Assistant",
- firstMessage: "Hi! How can I help you today?",
- transcriber: {
- provider: "deepgram",
- model: "nova-2",
- language: "en-US",
- },
- voice: {
- provider: "playht",
- voice_id: "jennifer",
- },
- model: {
- provider: "openai",
- model: "gpt-4o",
- messages: [
- {
- role: "system",
- content: `You are a helpful customer support assistant. Keep responses brief and friendly since this is a voice conversation.`,
- },
- ],
- },
+
+ Build a complete React voice widget:
+
+ ```tsx
+ import React, { useState, useEffect } from 'react';
+ import Vapi from '@vapi-ai/web';
+
+ interface VapiWidgetProps {
+ apiKey: string;
+ assistantId: string;
+ config?: Record;
+ }
+
+ const VapiWidget: React.FC = ({
+ apiKey,
+ assistantId,
+ config = {}
+ }) => {
+ const [vapi, setVapi] = useState(null);
+ const [isConnected, setIsConnected] = useState(false);
+ const [isSpeaking, setIsSpeaking] = useState(false);
+ const [transcript, setTranscript] = useState>([]);
+
+ useEffect(() => {
+ const vapiInstance = new Vapi(apiKey);
+ setVapi(vapiInstance);
+
+ // Event listeners
+ vapiInstance.on('call-start', () => {
+ console.log('Call started');
+ setIsConnected(true);
+ });
+
+ vapiInstance.on('call-end', () => {
+ console.log('Call ended');
+ setIsConnected(false);
+ setIsSpeaking(false);
+ });
+
+ vapiInstance.on('speech-start', () => {
+ console.log('Assistant started speaking');
+ setIsSpeaking(true);
+ });
+
+ vapiInstance.on('speech-end', () => {
+ console.log('Assistant stopped speaking');
+ setIsSpeaking(false);
+ });
+
+ vapiInstance.on('message', (message) => {
+ if (message.type === 'transcript') {
+ setTranscript(prev => [...prev, {
+ role: message.role,
+ text: message.transcript
+ }]);
+ }
+ });
+
+ vapiInstance.on('error', (error) => {
+ console.error('Vapi error:', error);
+ });
+
+ return () => {
+ vapiInstance?.stop();
+ };
+ }, [apiKey]);
+
+ const startCall = () => {
+ if (vapi) {
+ vapi.start(assistantId);
+ }
+ };
+
+ const endCall = () => {
+ if (vapi) {
+ vapi.stop();
+ }
+ };
+
+ return (
+