zod-stream
v2.0.0
Published
A client for node or the browser to generate and consume streaming json
Downloads
56,872
Maintainers
Readme
zod-stream
Installation
with pnpm
$ pnpm add zod-stream zod openai
with npm
$ npm install zod-stream zod openai
with bun
$ bun add zod-stream zod openai
Basic Usage
Creating an endpoint that calls OpenAI with a defined response model. (Next.js app router, route handler example)
/api/get-stream
import { OAIStream } from "zod-stream/OAIStream"
import { withResponseModel } from "zod-stream/response-model"
import OpenAI from "openai"
import { z } from "zod"
const oai = new OpenAI({
apiKey: process.env["OPENAI_API_KEY"] ?? undefined,
organization: process.env["OPENAI_ORG_ID"] ?? undefined
})
export async function POST(request: Request) {
const { messages } = await request.json()
const params = withResponseModel({
response_model: { schema: z.object({ content: z.string() }), name: "Content response" },
params: {
messages,
model: "gpt-4"
},
mode: "TOOLS"
})
const extractionStream = await oai.chat.completions.create({
...params,
stream: true
})
return new Response(
OAIStream({
res: extractionStream
})
)
}
Consuming the structured stream elsewhere, maybe in the browser.
const client = new ZodStream()
const stream = await client.create({
completionPromise: async () => {
const response = fetch("/api/get-stream", {
body: JSON.stringify({ messages: [] }),
method: "POST"
})
return response.body
},
response_model: { // should match model expected to be returned by the completion.
schema: z.object({
content: z.string()
})
}
})
for await (const chunk of extractionStream) {
console.log(chunk) // safe to parse partial json
}