0

I've heard that you can get streamed answers from Chat-GPT-4, but I don't find much of information about this. If I ask chat-gpt itself, it tells me that according to 2021, there is no such feature. Can I use streaming, and how?

peter_the_oak
  • 3,529
  • 3
  • 23
  • 37
  • I find it interesting that chatgpt didn't have the correct information about itself which makes me question the quality and authority of answers from chatgpt – Rob Jul 10 '23 at 14:27
  • @Rob despite all legends, IMO always check what you get from Chat-GPT. It's our responsibility. – peter_the_oak Jul 10 '23 at 14:31

1 Answers1

0

Yes, you can.

First, the API reference is here: https://platform.openai.com/docs/api-reference/chat/create

Scroll down to "stream". It's a boolean.

Second, here is an out-of-the-box JavaScript example with the most basic tools. Ugly but it works. If you run it, you well see the deltas drop in and be printed in the console. Have fun.

(function() {
// Define the API endpoint
const apiEndpoint = "https://api.openai.com/v1/chat/completions";
const headers = {
  "Content-Type": "application/json",
  "Authorization": "Bearer xxxxx", // replace with your actual OpenAI key
};

// Define the data to be sent
const data = {
  model: "gpt-4",
  messages: [{
    role: "user", 
    content: "Tell me more about XYZ." // <<<<< This is your prompt.
  }],
  stream: true  // <<<< Peek-a-boo
};

// Make the API request
fetch(apiEndpoint, {
  method: "POST",
  headers: headers,
  body: JSON.stringify(data)
})
  .then((response) => {
    const reader = response.body.getReader();
    return new ReadableStream({
      start(controller) {
        return pump();
        function pump() {
          return reader.read().then(({ done, value }) => {
            // When no more data needs to be consumed, close the stream
            if (done) {
              controller.close();
              return;
            }
            // Enqueue the next data chunk into our target stream
            controller.enqueue(value);

            //debugger;

            let chars = new TextDecoder().decode(value);
            let lines = chars.split('data:');
            for (let i=0; i < lines.length; i++) {
                chars = lines[i].replace(/\s*/, '').replace(/\s*$/, '');
                if (!chars) {
                    continue;
                }
                //debugger;
                //chars = lines[i].replace(/data: *(\{.*)\s*/g, '$1');
                //chars = chars.replace('\\"', '"');
                let obj = JSON.parse(chars);
                if (obj && obj.choices[0].delta) {
                    let deltaS = obj.choices[0].delta.content;
                    console.log(deltaS);
                }
            }
//          console.log('Received: ' + chars.join(''));
            return pump();
          });
        }
      },
    });
  })
  .then((stream) => new Response(stream))
  .then((response) => response.json())
  .then((responseJson) => {
    // Here, you get the whole response object.
    // Loop over the 'choices' array to get each individual message.
    responseJson['choices'].forEach((choice) => {
      //console.log(choice['message']['content']);
    });
  })
  .catch((err) => console.error(err));
})();
  
peter_the_oak
  • 3,529
  • 3
  • 23
  • 37