mirror of
https://github.com/supermemoryai/supermemory.git
synced 2026-05-14 16:36:59 +00:00
Merge pull request #111 from Dhravya/kush/misc-be-improvements
show chunks seperations in database, proper error handling and prettier config
This commit is contained in:
commit
b487eea712
4 changed files with 56 additions and 37 deletions
5
.prettierrc
Normal file
5
.prettierrc
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"tabWidth": 2,
|
||||
"trailingComma": "all",
|
||||
"useTabs": false
|
||||
}
|
||||
|
|
@ -150,6 +150,7 @@ export async function batchCreateChunksAndEmbeddings({
|
|||
|
||||
const allIds = await context.env.KV.list({ prefix: uuid });
|
||||
|
||||
let pageContent = "";
|
||||
// If some chunks for that content already exist, we'll just update the metadata to include
|
||||
// the user.
|
||||
if (allIds.keys.length > 0) {
|
||||
|
|
@ -168,12 +169,15 @@ export async function batchCreateChunksAndEmbeddings({
|
|||
return acc;
|
||||
}, {}),
|
||||
};
|
||||
|
||||
const content =
|
||||
vector.metadata.content.toString().split("Content: ")[1] ||
|
||||
vector.metadata.content;
|
||||
pageContent += `<---chunkId: ${vector.id}\n${content}\n---->`;
|
||||
return vector;
|
||||
});
|
||||
|
||||
await context.env.VECTORIZE_INDEX.upsert(newVectors);
|
||||
return;
|
||||
return pageContent; //Return the page content that goes to d1 db
|
||||
}
|
||||
|
||||
for (let i = 0; i < chunks.length; i++) {
|
||||
|
|
@ -209,5 +213,7 @@ export async function batchCreateChunksAndEmbeddings({
|
|||
console.log("Docs added: ", docs);
|
||||
|
||||
await context.env.KV.put(chunkId, ourID);
|
||||
pageContent += `<---chunkId: ${chunkId}\n${chunk}\n---->`;
|
||||
}
|
||||
return pageContent; // Return the pageContent that goes to the d1 db
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ import { zValidator } from "@hono/zod-validator";
|
|||
import chunkText from "./utils/chonker";
|
||||
import { systemPrompt, template } from "./prompts/prompt1";
|
||||
import { swaggerUI } from "@hono/swagger-ui";
|
||||
import { createOpenAI } from "@ai-sdk/openai";
|
||||
|
||||
const app = new Hono<{ Bindings: Env }>();
|
||||
|
||||
|
|
@ -65,14 +64,18 @@ app.post("/api/add", zValidator("json", vectorObj), async (c) => {
|
|||
const { store } = await initQuery(c);
|
||||
|
||||
console.log(body.spaces);
|
||||
await batchCreateChunksAndEmbeddings({
|
||||
const chunks = chunkText(body.pageContent, 1536);
|
||||
if (chunks.length > 20) {
|
||||
return c.json({ status: "error", message: "We are unable to process documents this size just yet, try something smaller" });
|
||||
}
|
||||
const chunkedInput = await batchCreateChunksAndEmbeddings({
|
||||
store,
|
||||
body,
|
||||
chunks: chunkText(body.pageContent, 1536),
|
||||
chunks: chunks,
|
||||
context: c,
|
||||
});
|
||||
|
||||
return c.json({ status: "ok" });
|
||||
return c.json({ status: "ok", chunkedInput });
|
||||
});
|
||||
|
||||
app.post(
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ import {
|
|||
chatHistory,
|
||||
chatThreads,
|
||||
contentToSpace,
|
||||
sessions,
|
||||
space,
|
||||
spacesAccess,
|
||||
storedContent,
|
||||
|
|
@ -19,11 +18,9 @@ import { Tweet } from "react-tweet/api";
|
|||
import { getMetaData } from "@/lib/get-metadata";
|
||||
import { and, eq, inArray, sql } from "drizzle-orm";
|
||||
import { LIMITS } from "@/lib/constants";
|
||||
import { z } from "zod";
|
||||
import { AddFromAPIType, ChatHistory } from "@repo/shared-types";
|
||||
import { ChatHistory } from "@repo/shared-types";
|
||||
import { decipher } from "@/server/encrypt";
|
||||
import { redirect } from "next/navigation";
|
||||
import { ensureAuth } from "../api/ensureAuth";
|
||||
import { tweetToMd } from "@repo/shared-types/utils";
|
||||
|
||||
export const createSpace = async (
|
||||
|
|
@ -265,6 +262,36 @@ export const createMemory = async (input: {
|
|||
|
||||
let contentId: number | undefined;
|
||||
|
||||
const response = (await vectorSaveResponse.json()) as {
|
||||
status: string;
|
||||
chunkedInput: string;
|
||||
message?: string;
|
||||
};
|
||||
|
||||
try {
|
||||
if (response.status !== "ok") {
|
||||
if (response.status === "error") {
|
||||
return {
|
||||
success: false,
|
||||
data: 0,
|
||||
error: response.message,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
success: false,
|
||||
data: 0,
|
||||
error: `Failed to save to vector store. Backend returned error: ${response.message}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
return {
|
||||
success: false,
|
||||
data: 0,
|
||||
error: `Failed to save to vector store. Backend returned error: ${e}`,
|
||||
};
|
||||
}
|
||||
|
||||
const saveToDbUrl =
|
||||
(metadata.baseUrl.split("#supermemory-user-")[0] ?? metadata.baseUrl) +
|
||||
"#supermemory-user-" +
|
||||
|
|
@ -275,7 +302,7 @@ export const createMemory = async (input: {
|
|||
const insertResponse = await db
|
||||
.insert(storedContent)
|
||||
.values({
|
||||
content: pageContent,
|
||||
content: response.chunkedInput,
|
||||
title: metadata.title,
|
||||
description: metadata.description,
|
||||
url: saveToDbUrl,
|
||||
|
|
@ -349,32 +376,10 @@ export const createMemory = async (input: {
|
|||
);
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await vectorSaveResponse.json();
|
||||
|
||||
const expectedResponse = z.object({ status: z.literal("ok") });
|
||||
|
||||
const parsedResponse = expectedResponse.safeParse(response);
|
||||
|
||||
if (!parsedResponse.success) {
|
||||
return {
|
||||
success: false,
|
||||
data: 0,
|
||||
error: `Failed to save to vector store. Backend returned error: ${parsedResponse.error.message}`,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: 1,
|
||||
};
|
||||
} catch (e) {
|
||||
return {
|
||||
success: false,
|
||||
data: 0,
|
||||
error: `Failed to save to vector store. Backend returned error: ${e}`,
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
data: 1,
|
||||
};
|
||||
};
|
||||
|
||||
export const createChatThread = async (
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue