I have the following function, which should do the following:
- Take a ReadableStream
- Read it chunk by chunk
- Before the last chunk (openai indicates this with a "DONE" string), an extra chunk with my extensionPayload should be added.
The issue is that the data of the last chunk from the original/openai stream + my extension data are merged into one chunk. But to process the chunks in the client, I need them to be separate chunks.
export async function extendOpenAIStream(
openaiStream: ReadableStream<Uint8Array>,
extensionPayload: JSONValue
) {
const encoder = new TextEncoder()
const decoder = new TextDecoder()
const reader = openaiStream.getReader()
const stream = new ReadableStream({
cancel() {
reader.cancel()
},
async start(controller) {
while (true) {
const { done, value } = await reader.read()
const dataString = decoder.decode(value)
// https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
if (done || dataString.includes('[DONE]')) {
// Enque our extension
const extendedValue = encoder.encode(
`data: ${JSON.stringify(extensionPayload)}\n\n`
)
controller.enqueue(extendedValue)
// Enque the original chunk
controller.enqueue(value)
// Close the stream
controller.close()
break
}
controller.enqueue(value)
}
},
})
return stream
}
Expected chunks (separated chunks):
data: {"extensionPayload": {...}}
data: {"id":"...,"object":"chat.completion.chunk","created":1684486791,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{},"index":0,"finish_reason":"stop"}]}
data: [DONE]
Actual chunks (merged into one chunk):
data: {"extensionPayload": {...}}
data: {"id":"...,"object":"chat.completion.chunk","created":1684486791,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{},"index":0,"finish_reason":"stop"}]}
data: [DONE]