messages streaminug

This commit is contained in:
Dhravya 2024-04-08 18:39:32 -07:00
parent f04fa3faf7
commit afd14a5dcd
3 changed files with 54 additions and 10 deletions

View file

@ -31,22 +31,25 @@ export async function POST(req: NextRequest) {
chatHistory: ChatHistory[]
};
console.log("CHathistory", chatHistory)
if (!query) {
return new Response(JSON.stringify({ message: "Invalid query" }), { status: 400 });
}
const resp = await fetch(`https://cf-ai-backend.dhravya.workers.dev/chat?q=${query}&user=${session.user.email ?? session.user.name}&sourcesOnly=${sourcesOnly}`, {
headers: {
"X-Custom-Auth-Key": env.BACKEND_SECURITY_KEY,
},
method: "POST",
body: JSON.stringify({
chatHistory
chatHistory: chatHistory.chatHistory ?? []
})
})
console.log(resp.status)
console.log(resp.statusText)
if (resp.status !== 200 || !resp.ok) {
const errorData = await resp.json();

View file

@ -54,7 +54,7 @@ export default function Main({ sidebarOpen }: { sidebarOpen: boolean }) {
// This is the streamed AI response we get from the server.
const [aiResponse, setAIResponse] = useState('');
const [toBeParsed, setToBeParsed] = useState('');
const textArea = useRef<HTMLTextAreaElement>(null);
@ -105,7 +105,36 @@ export default function Main({ sidebarOpen }: { sidebarOpen: boolean }) {
remainingData = part;
} else if (parsedPart && parsedPart.response) {
// If the part is parsable and has the "response" field, update the AI response state
setAIResponse((prev) => prev + parsedPart.response);
// setAIResponse((prev) => prev + parsedPart.response);
// appendToChatHistory('model', parsedPart.response);
// Append to chat history in this way:
// If the last message was from the model, append to that message
// Otherwise, Start a new message from the model and append to that
if (
chatHistory.length > 0 &&
chatHistory[chatHistory.length - 1].role === 'model'
) {
setChatHistory((prev: any) => {
const lastMessage = prev[prev.length - 1];
const newParts = [
...lastMessage.parts,
{ text: parsedPart.response },
];
return [
...prev.slice(0, prev.length - 1),
{ ...lastMessage, parts: newParts },
];
});
} else {
setChatHistory((prev) => [
...prev,
{
role: 'model',
parts: [{ text: parsedPart.response }],
},
]);
}
}
} catch (error) {
// If parsing fails and it's not the last part, it's a malformed JSON
@ -137,8 +166,16 @@ export default function Main({ sidebarOpen }: { sidebarOpen: boolean }) {
e.preventDefault();
setIsAiLoading(true);
appendToChatHistory('user', value);
const sourcesResponse = await fetch(
`/api/query?sourcesOnly=true&q=${value}`,
`/api/chat?sourcesOnly=true&q=${value}`,
{
method: 'POST',
body: JSON.stringify({
chatHistory,
}),
},
);
const sourcesInJson = (await sourcesResponse.json()) as {
@ -147,7 +184,13 @@ export default function Main({ sidebarOpen }: { sidebarOpen: boolean }) {
setSearchResults(sourcesInJson.ids);
const response = await fetch(`/api/query?q=${value}`);
// TODO: PASS THE `SPACE` TO THE API
const response = await fetch(`/api/chat?q=${value}`, {
method: 'POST',
body: JSON.stringify({
chatHistory,
}),
});
if (response.status !== 200) {
setIsAiLoading(false);
@ -162,8 +205,8 @@ export default function Main({ sidebarOpen }: { sidebarOpen: boolean }) {
// @ts-ignore
reader.read().then(function processText({ done, value }) {
if (done) {
// setSearchResults(JSON.parse(result.replace('data: ', '')));
// setIsAiLoading(false);
setIsAiLoading(false);
setToBeParsed('');
return;
}
@ -187,7 +230,7 @@ export default function Main({ sidebarOpen }: { sidebarOpen: boolean }) {
{chatHistory.map((chat, index) => (
<ChatMessage
key={index}
message={chat.parts[0].text}
message={chat.parts.map((part) => part.text).join('')}
user={chat.role === 'model' ? 'ai' : session?.user!}
/>
))}

View file

@ -31,8 +31,6 @@ export const MemoryProvider: React.FC<
[spaces],
);
console.log(spaces);
return (
<MemoryContext.Provider value={{ spaces, addSpace, deleteSpace }}>
{children}