LangFlux supports streaming back to your front end application when the final node is a Chain or OpenAI Function Agent.
yarn add socket.io-client
npm install socket.io-client
import socketIOClient from 'socket.io-client'
const socket = socketIOClient("http://localhost:3000") //flowise url
import { useState } from 'react'
const [socketIOClientId, setSocketIOClientId] = useState('');
socket.on('connect', () => {
setSocketIOClientId(socket.id)
});
async function query(data) {
const response = await fetch(
"http://localhost:3000/api/v1/prediction/<chatflow-id>",
{
method: "POST",
body: data
}
);
const result = await response.json();
return result;
}
query({
"question": "Hey, how are you?",
"socketIOClientId": socketIOClientId
}).then((response) => {
console.log(response);
});
socket.on('start', () => {
console.log('start');
});
socket.on('token', (token) => {
console.log('token:', token);
});
socket.on('sourceDocuments', (sourceDocuments) => {
console.log('sourceDocuments:', sourceDocuments);
});
socket.on('end', () => {
console.log('end');
});