feat: support steaming

This commit is contained in:
Joel
2023-06-10 14:28:27 +08:00
parent cfd0c9532f
commit a8be513d4b
4 changed files with 4 additions and 31 deletions

View File

@ -1,6 +1,5 @@
import { type NextRequest } from 'next/server'
import { client, getInfo } from '@/app/api/utils/common'
import { OpenAIStream } from '@/app/api/utils/stream'
export async function POST(request: NextRequest) {
const body = await request.json()
@ -12,6 +11,5 @@ export async function POST(request: NextRequest) {
} = body
const { user } = getInfo(request)
const res = await client.createChatMessage(inputs, query, user, responseMode, conversationId)
const stream = await OpenAIStream(res as any)
return new Response(stream as any)
return new Response(res.data as any)
}

View File

@ -1,25 +0,0 @@
export async function OpenAIStream(res: { body: any }) {
const reader = res.body.getReader()
const stream = new ReadableStream({
// https://developer.mozilla.org/en-US/docs/Web/API/Streams_API/Using_readable_streams
// https://github.com/whichlight/chatgpt-api-streaming/blob/master/pages/api/OpenAIStream.ts
start(controller) {
return pump()
function pump() {
return reader.read().then(({ done, value }: any) => {
// When no more data needs to be consumed, close the stream
if (done) {
controller.close()
return
}
// Enqueue the next data chunk into our target stream
controller.enqueue(value)
return pump()
})
}
},
})
return stream
}