-
-
Notifications
You must be signed in to change notification settings - Fork 1.4k
Add comprehensive streaming responses documentation to Custom HTTP API Endpoints #3192
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 10 commits
117fcc6
b13e82f
e4df302
edea2b9
d5402d5
90e9984
b908ad8
277138a
1525d74
541c22a
beb5026
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -175,6 +175,238 @@ export const fooBar: FooBar = async (req, res, context) => { | |
|
||
The object `context.entities.Task` exposes `prisma.task` from [Prisma's CRUD API](https://www.prisma.io/docs/reference/tools-and-interfaces/prisma-client/crud). | ||
|
||
## Streaming Responses | ||
|
||
You can use streaming responses to send data to the client in chunks as it becomes available. This is useful for: | ||
|
||
- **LLM responses** - Stream AI-generated content as it's produced | ||
- **Long-running processes** - Show progress updates in real-time | ||
- **Large datasets** - Send data incrementally to avoid timeouts | ||
|
||
### Creating a Streaming API | ||
|
||
To create a streaming API, write a function that uses Express response methods like `res.write()` and `res.end()`: | ||
|
||
```wasp title="main.wasp" | ||
api streamingText { | ||
httpRoute: (POST, "/api/streaming-example"), | ||
fn: import { getStreamingText } from "@src/streaming", | ||
} | ||
``` | ||
|
||
<small> | ||
Don't forget to set up the CORS middleware. See the [section explaning CORS](#making-sure-cors-works) for details. | ||
</small> | ||
|
||
```ts title="src/streaming.ts" auto-js | ||
import OpenAI from "openai"; | ||
import type { StreamingText } from "wasp/server/api"; | ||
|
||
const client = new OpenAI({ | ||
apiKey: process.env.OPENAI_API_KEY, | ||
}); | ||
|
||
export const getStreamingText: StreamingText< | ||
never, | ||
string, | ||
{ message: string } | ||
> = async (req, res) => { | ||
const { message } = req.body; | ||
|
||
// Set appropriate headers for streaming. | ||
res.setHeader("Content-Type", "text/plain; charset=utf-8"); | ||
res.setHeader("Transfer-Encoding", "chunked"); | ||
|
||
const stream = await client.responses.create({ | ||
model: "gpt-5", | ||
input: `Funny response to "${message}"`, | ||
stream: true, | ||
}); | ||
|
||
for await (const chunk of stream) { | ||
if (chunk.type === "response.output_text.delta") { | ||
// Write each chunk to the response as it arrives. | ||
res.write(chunk.delta); | ||
} | ||
} | ||
|
||
// End the response. | ||
res.end(); | ||
}; | ||
``` | ||
|
||
### Consuming Streaming Responses | ||
|
||
#### Using the Fetch API (Recommended) | ||
|
||
Here's a example showing how to consume streaming responses on the client: | ||
|
||
```tsx title="src/StreamingPage.tsx" auto-js | ||
import { useEffect, useState } from "react"; | ||
import { config } from "wasp/client"; | ||
import { getSessionId } from "wasp/client/api"; | ||
|
||
export function StreamingPage() { | ||
const { response } = useTextStream("/api/streaming-example", { | ||
message: "Best Office episode?", | ||
}); | ||
|
||
return ( | ||
<div> | ||
<h1>Streaming Example</h1> | ||
<pre>{response}</pre> | ||
</div> | ||
); | ||
} | ||
|
||
function useTextStream(path: string, payload: { message: string }) { | ||
const [response, setResponse] = useState(""); | ||
|
||
useEffect(() => { | ||
const controller = new AbortController(); | ||
|
||
fetchStream( | ||
path, | ||
payload, | ||
(chunk) => { | ||
setResponse((prev) => prev + chunk); | ||
}, | ||
controller.signal | ||
); | ||
|
||
return () => { | ||
controller.abort(); | ||
}; | ||
}, [path]); | ||
|
||
return { response }; | ||
} | ||
|
||
async function fetchStream( | ||
path: string, | ||
payload: { message: string }, | ||
onData: (data: string) => void, | ||
signal: AbortSignal | ||
) { | ||
const sessionId = getSessionId(); | ||
|
||
try { | ||
const response = await fetch(config.apiUrl + path, { | ||
cprecioso marked this conversation as resolved.
Show resolved
Hide resolved
|
||
method: "POST", | ||
headers: { | ||
"Content-Type": "application/json", | ||
...(sessionId && { Authorization: `Bearer ${sessionId}` }), | ||
}, | ||
body: JSON.stringify(payload), | ||
signal, | ||
}); | ||
|
||
if (!response.ok) { | ||
throw new Error(`HTTP error! status: ${response.status}`); | ||
} | ||
|
||
if (response.body === null) { | ||
throw new Error("Stream body is null"); | ||
} | ||
|
||
const stream = response.body.pipeThrough(new TextDecoderStream()); | ||
const reader = stream.getReader(); | ||
while (true) { | ||
const { done, value } = await reader.read(); | ||
if (done) { | ||
break; | ||
} | ||
onData(value); | ||
} | ||
} catch (error: unknown) { | ||
if (error instanceof Error) { | ||
if (error.name === "AbortError") { | ||
// Fetch was aborted, no need to log an error | ||
return; | ||
} | ||
console.error("Fetch error:", error.message); | ||
} else { | ||
throw error; | ||
} | ||
} | ||
} | ||
``` | ||
|
||
#### Using Axios | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Why do we want to show users how to do it with There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We have an There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yep this is mentioned in #2935 As I said, at least I would mention why users should take one or other. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I thought was currently written explains that:
Should I add something else here? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Mmmm IMO that is just a description but that doesn't provide clear guidance. I think we should focus on when are this downsides or upsides important for me? Anyway it's quite a small thing so accepting, whatever you choose is good. |
||
|
||
Axios does not natively support streaming responses, but you can simulate it by leveraging the `onDownloadProgress` callback. On the other hand, you don't have to provide the session token manually, as the Axios wrapper from `wasp/client/api` does that for you. | ||
|
||
Here's how you can do it: | ||
|
||
```tsx title="src/AxiosStreamingPage.tsx" auto-js | ||
import { useEffect, useState } from "react"; | ||
import { api } from "wasp/client/api"; | ||
|
||
export function StreamingPage() { | ||
infomiho marked this conversation as resolved.
Show resolved
Hide resolved
|
||
const { response } = useAxiosTextStream("/api/streaming-example", { | ||
message: "Best Office episode?", | ||
}); | ||
|
||
return ( | ||
<div> | ||
<h1>Axios Streaming</h1> | ||
<pre>{response}</pre> | ||
</div> | ||
); | ||
} | ||
|
||
function useAxiosTextStream(path: string, payload: { message: string }) { | ||
const [response, setResponse] = useState(""); | ||
|
||
useEffect(() => { | ||
const controller = new AbortController(); | ||
|
||
fetchAxiosStream( | ||
path, | ||
payload, | ||
(data) => { | ||
setResponse(data); | ||
}, | ||
controller.signal | ||
); | ||
|
||
return () => { | ||
controller.abort(); | ||
}; | ||
}, [path]); | ||
|
||
return { response }; | ||
} | ||
|
||
async function fetchAxiosStream( | ||
path: string, | ||
payload: { message: string }, | ||
onData: (data: string) => void, | ||
signal: AbortSignal | ||
) { | ||
try { | ||
return await api.post(path, payload, { | ||
responseType: "stream", | ||
signal, | ||
onDownloadProgress: (progressEvent) => { | ||
const xhr = progressEvent.event.target; | ||
onData(xhr.responseText); | ||
}, | ||
}); | ||
} catch (error: unknown) { | ||
if (error instanceof Error) { | ||
if (error.name === "CanceledError") { | ||
// Request was cancelled, no action needed | ||
} else { | ||
console.error("Fetch error:", error); | ||
} | ||
} else { | ||
throw error; | ||
} | ||
} | ||
} | ||
``` | ||
|
||
## API Reference | ||
|
||
```wasp title="main.wasp" | ||
|
Uh oh!
There was an error while loading. Please reload this page.